mirror of
https://github.com/kevin1024/vcrpy.git
synced 2025-12-09 01:03:24 +00:00
Compare commits
62 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
53c55b13e7 | ||
|
|
365e7cb112 | ||
|
|
e5d6327de9 | ||
|
|
d86ffe7130 | ||
|
|
d9fd563812 | ||
|
|
9e548718e5 | ||
|
|
83720793fb | ||
|
|
188326b10e | ||
|
|
ff90190660 | ||
|
|
1d9f8b5f7c | ||
|
|
2454aa2eb0 | ||
|
|
df5f6089af | ||
|
|
5738547288 | ||
|
|
8274b660c6 | ||
|
|
a8f1a65d62 | ||
|
|
9c275dd86a | ||
|
|
1fbd65a702 | ||
|
|
31b0e825b5 | ||
|
|
973d8339b3 | ||
|
|
c8db6cb731 | ||
|
|
ecbc192fc4 | ||
|
|
76d365314a | ||
|
|
830a3c2e04 | ||
|
|
9c432c7e50 | ||
|
|
6f7f45d0a8 | ||
|
|
8e352feb6a | ||
|
|
57a934d14b | ||
|
|
f9d7ccd33e | ||
|
|
265a158fe7 | ||
|
|
c65ff0e7b3 | ||
|
|
066752aa0b | ||
|
|
9a5214888b | ||
|
|
609d8e35be | ||
|
|
ce14de8251 | ||
|
|
574b22a62a | ||
|
|
1167b9ea4e | ||
|
|
77ae99bfda | ||
|
|
8851571ba7 | ||
|
|
f71d28d10e | ||
|
|
3355bd01eb | ||
|
|
17afa82bf4 | ||
|
|
f98684e8aa | ||
|
|
5a85e88a39 | ||
|
|
d2368eb2c4 | ||
|
|
3a46616ba6 | ||
|
|
37665581e0 | ||
|
|
57df0c6921 | ||
|
|
ddb29745a9 | ||
|
|
ac7c9244cc | ||
|
|
6da7cd0ea5 | ||
|
|
24df79b75f | ||
|
|
0800b99214 | ||
|
|
3dad89df3f | ||
|
|
5c9b0b4ccb | ||
|
|
5a848d277e | ||
|
|
c88c738df9 | ||
|
|
9a8067d8e7 | ||
|
|
787c6bdb77 | ||
|
|
c3298c25a3 | ||
|
|
2f4c803678 | ||
|
|
60145983bf | ||
|
|
b5c27f99d1 |
15
.travis.yml
15
.travis.yml
@@ -16,15 +16,20 @@ env:
|
|||||||
- TOX_SUFFIX="requests1"
|
- TOX_SUFFIX="requests1"
|
||||||
- TOX_SUFFIX="httplib2"
|
- TOX_SUFFIX="httplib2"
|
||||||
- TOX_SUFFIX="boto"
|
- TOX_SUFFIX="boto"
|
||||||
|
- TOX_SUFFIX="boto3"
|
||||||
- TOX_SUFFIX="urllib317"
|
- TOX_SUFFIX="urllib317"
|
||||||
- TOX_SUFFIX="urllib319"
|
- TOX_SUFFIX="urllib319"
|
||||||
- TOX_SUFFIX="urllib3110"
|
- TOX_SUFFIX="urllib3110"
|
||||||
- TOX_SUFFIX="tornado3"
|
- TOX_SUFFIX="tornado3"
|
||||||
- TOX_SUFFIX="tornado4"
|
- TOX_SUFFIX="tornado4"
|
||||||
|
- TOX_SUFFIX="aiohttp"
|
||||||
matrix:
|
matrix:
|
||||||
allow_failures:
|
allow_failures:
|
||||||
- env: TOX_SUFFIX="boto"
|
- env: TOX_SUFFIX="boto"
|
||||||
|
- env: TOX_SUFFIX="boto3"
|
||||||
exclude:
|
exclude:
|
||||||
|
- env: TOX_SUFFIX="flakes"
|
||||||
|
python: 2.6
|
||||||
- env: TOX_SUFFIX="boto"
|
- env: TOX_SUFFIX="boto"
|
||||||
python: 3.3
|
python: 3.3
|
||||||
- env: TOX_SUFFIX="boto"
|
- env: TOX_SUFFIX="boto"
|
||||||
@@ -33,6 +38,16 @@ matrix:
|
|||||||
python: 3.4
|
python: 3.4
|
||||||
- env: TOX_SUFFIX="requests1"
|
- env: TOX_SUFFIX="requests1"
|
||||||
python: 3.5
|
python: 3.5
|
||||||
|
- env: TOX_SUFFIX="aiohttp"
|
||||||
|
python: 2.6
|
||||||
|
- env: TOX_SUFFIX="aiohttp"
|
||||||
|
python: 2.7
|
||||||
|
- env: TOX_SUFFIX="aiohttp"
|
||||||
|
python: 3.3
|
||||||
|
- env: TOX_SUFFIX="aiohttp"
|
||||||
|
python: pypy
|
||||||
|
- env: TOX_SUFFIX="aiohttp"
|
||||||
|
python: pypy3
|
||||||
python:
|
python:
|
||||||
- 2.6
|
- 2.6
|
||||||
- 2.7
|
- 2.7
|
||||||
|
|||||||
16
README.rst
16
README.rst
@@ -13,7 +13,7 @@ Source code
|
|||||||
https://github.com/kevin1024/vcrpy
|
https://github.com/kevin1024/vcrpy
|
||||||
|
|
||||||
Documentation
|
Documentation
|
||||||
https://vcrpy.readthedocs.org/
|
https://vcrpy.readthedocs.io/
|
||||||
|
|
||||||
Rationale
|
Rationale
|
||||||
---------
|
---------
|
||||||
@@ -41,6 +41,20 @@ VCR.py will detect the absence of a cassette file and once again record
|
|||||||
all HTTP interactions, which will update them to correspond to the new
|
all HTTP interactions, which will update them to correspond to the new
|
||||||
API.
|
API.
|
||||||
|
|
||||||
|
Support
|
||||||
|
-------
|
||||||
|
|
||||||
|
VCR.py works great with the following HTTP clients:
|
||||||
|
|
||||||
|
- requests
|
||||||
|
- aiohttp
|
||||||
|
- urllib3
|
||||||
|
- tornado
|
||||||
|
- urllib2
|
||||||
|
- boto
|
||||||
|
- boto3
|
||||||
|
|
||||||
|
|
||||||
License
|
License
|
||||||
=======
|
=======
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,18 @@
|
|||||||
Changelog
|
Changelog
|
||||||
---------
|
---------
|
||||||
|
- 1.10.5 Added a fix to httplib2 (thanks @carlosds730), Fix an issue with
|
||||||
|
aiohttp (thanks @madninja), Add missing requirement yarl (thanks @lamenezes),
|
||||||
|
Remove duplicate mock triple (thanks @FooBarQuaxx)
|
||||||
|
- 1.10.4 Fix an issue with asyncio aiohttp (thanks @madninja)
|
||||||
|
- 1.10.3 Fix some issues with asyncio and params (thanks @anovikov1984 and
|
||||||
|
@lamenezes), Fix some issues with cassette serialize / deserialize and empty
|
||||||
|
response bodies (thanks @gRoussac and @dz0ny)
|
||||||
|
- 1.10.2 Fix 1.10.1 release - add aiohttp support back in
|
||||||
|
- 1.10.1 [bad release] Fix build for Fedora package + python2 (thanks @puiterwijk and @lamenezes)
|
||||||
|
- 1.10.0 Add support for aiohttp (thanks @lamenezes)
|
||||||
|
- 1.9.0 Add support for boto3 (thanks @desdm, @foorbarna). Fix deepcopy issue
|
||||||
|
for response headers when `decode_compressed_response` is enabled (thanks
|
||||||
|
@nickdirienzo)
|
||||||
- 1.8.0 Fix for Serialization errors with JSON adapter (thanks
|
- 1.8.0 Fix for Serialization errors with JSON adapter (thanks
|
||||||
@aliaksandrb). Avoid concatenating bytes with strings (thanks
|
@aliaksandrb). Avoid concatenating bytes with strings (thanks
|
||||||
@jaysonsantos). Exclude __pycache__ dirs & compiled files in sdist
|
@jaysonsantos). Exclude __pycache__ dirs & compiled files in sdist
|
||||||
|
|||||||
@@ -110,7 +110,7 @@ todo_include_todos = False
|
|||||||
# -- Options for HTML output ----------------------------------------------
|
# -- Options for HTML output ----------------------------------------------
|
||||||
|
|
||||||
# The theme to use for HTML and HTML Help pages.
|
# The theme to use for HTML and HTML Help pages.
|
||||||
# https://read-the-docs.readthedocs.org/en/latest/theme.html#how-do-i-use-this-locally-and-on-read-the-docs
|
# https://read-the-docs.readthedocs.io/en/latest/theme.html#how-do-i-use-this-locally-and-on-read-the-docs
|
||||||
if 'READTHEDOCS' not in os.environ:
|
if 'READTHEDOCS' not in os.environ:
|
||||||
import sphinx_rtd_theme
|
import sphinx_rtd_theme
|
||||||
html_theme = 'sphinx_rtd_theme'
|
html_theme = 'sphinx_rtd_theme'
|
||||||
|
|||||||
@@ -19,7 +19,7 @@ that has ``requests`` installed.
|
|||||||
|
|
||||||
Also, in order for the boto tests to run, you will need an AWS key.
|
Also, in order for the boto tests to run, you will need an AWS key.
|
||||||
Refer to the `boto
|
Refer to the `boto
|
||||||
documentation <http://boto.readthedocs.org/en/latest/getting_started.html>`__
|
documentation <https://boto.readthedocs.io/en/latest/getting_started.html>`__
|
||||||
for how to set this up. I have marked the boto tests as optional in
|
for how to set this up. I have marked the boto tests as optional in
|
||||||
Travis so you don't have to worry about them failing if you submit a
|
Travis so you don't have to worry about them failing if you submit a
|
||||||
pull request.
|
pull request.
|
||||||
|
|||||||
9
setup.py
9
setup.py
@@ -31,6 +31,7 @@ extras_require = {
|
|||||||
':python_version in "2.4, 2.5, 2.6"':
|
':python_version in "2.4, 2.5, 2.6"':
|
||||||
['contextlib2', 'backport_collections', 'mock'],
|
['contextlib2', 'backport_collections', 'mock'],
|
||||||
':python_version in "2.7, 3.1, 3.2"': ['contextlib2', 'mock'],
|
':python_version in "2.7, 3.1, 3.2"': ['contextlib2', 'mock'],
|
||||||
|
':python_version in "3.4, 3.5, 3.6"': ['yarl'],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -49,9 +50,13 @@ except Exception:
|
|||||||
install_requires.extend(value)
|
install_requires.extend(value)
|
||||||
|
|
||||||
|
|
||||||
|
excluded_packages = ["tests*"]
|
||||||
|
if sys.version_info[0] == 2:
|
||||||
|
excluded_packages.append("vcr.stubs.aiohttp_stubs")
|
||||||
|
|
||||||
setup(
|
setup(
|
||||||
name='vcrpy',
|
name='vcrpy',
|
||||||
version='1.8.0',
|
version='1.10.5',
|
||||||
description=(
|
description=(
|
||||||
"Automatically mock your HTTP interactions to simplify and "
|
"Automatically mock your HTTP interactions to simplify and "
|
||||||
"speed up testing"
|
"speed up testing"
|
||||||
@@ -60,7 +65,7 @@ setup(
|
|||||||
author='Kevin McCarthy',
|
author='Kevin McCarthy',
|
||||||
author_email='me@kevinmccarthy.org',
|
author_email='me@kevinmccarthy.org',
|
||||||
url='https://github.com/kevin1024/vcrpy',
|
url='https://github.com/kevin1024/vcrpy',
|
||||||
packages=find_packages(exclude=("tests*",)),
|
packages=find_packages(exclude=excluded_packages),
|
||||||
install_requires=install_requires,
|
install_requires=install_requires,
|
||||||
extras_require=extras_require,
|
extras_require=extras_require,
|
||||||
license='MIT',
|
license='MIT',
|
||||||
|
|||||||
7
tests/integration/aiohttp_utils.py
Normal file
7
tests/integration/aiohttp_utils.py
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
import asyncio
|
||||||
|
|
||||||
|
|
||||||
|
@asyncio.coroutine
|
||||||
|
def aiohttp_request(session, method, url, as_text, **kwargs):
|
||||||
|
response = yield from session.request(method, url, **kwargs) # NOQA: E999
|
||||||
|
return response, (yield from response.text()) if as_text else (yield from response.json()) # NOQA: E999
|
||||||
117
tests/integration/test_aiohttp.py
Normal file
117
tests/integration/test_aiohttp.py
Normal file
@@ -0,0 +1,117 @@
|
|||||||
|
import pytest
|
||||||
|
aiohttp = pytest.importorskip("aiohttp")
|
||||||
|
|
||||||
|
import asyncio # NOQA
|
||||||
|
import sys # NOQA
|
||||||
|
|
||||||
|
import aiohttp # NOQA
|
||||||
|
import pytest # NOQA
|
||||||
|
import vcr # NOQA
|
||||||
|
|
||||||
|
from .aiohttp_utils import aiohttp_request # NOQA
|
||||||
|
|
||||||
|
|
||||||
|
def get(url, as_text=True, **kwargs):
|
||||||
|
loop = asyncio.get_event_loop()
|
||||||
|
with aiohttp.ClientSession() as session:
|
||||||
|
task = loop.create_task(aiohttp_request(session, 'GET', url, as_text, **kwargs))
|
||||||
|
return loop.run_until_complete(task)
|
||||||
|
|
||||||
|
|
||||||
|
def post(url, as_text=True, **kwargs):
|
||||||
|
loop = asyncio.get_event_loop()
|
||||||
|
with aiohttp.ClientSession() as session:
|
||||||
|
task = loop.create_task(aiohttp_request(session, 'POST', url, as_text, **kwargs))
|
||||||
|
return loop.run_until_complete(task)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(params=["https", "http"])
|
||||||
|
def scheme(request):
|
||||||
|
'''Fixture that returns both http and https.'''
|
||||||
|
return request.param
|
||||||
|
|
||||||
|
|
||||||
|
def test_status(tmpdir, scheme):
|
||||||
|
url = scheme + '://httpbin.org'
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('status.yaml'))):
|
||||||
|
response, _ = get(url)
|
||||||
|
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('status.yaml'))) as cassette:
|
||||||
|
cassette_response, _ = get(url)
|
||||||
|
assert cassette_response.status == response.status
|
||||||
|
assert cassette.play_count == 1
|
||||||
|
|
||||||
|
|
||||||
|
def test_headers(tmpdir, scheme):
|
||||||
|
url = scheme + '://httpbin.org'
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('headers.yaml'))):
|
||||||
|
response, _ = get(url)
|
||||||
|
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('headers.yaml'))) as cassette:
|
||||||
|
cassette_response, _ = get(url)
|
||||||
|
assert cassette_response.headers == response.headers
|
||||||
|
assert cassette.play_count == 1
|
||||||
|
|
||||||
|
|
||||||
|
def test_text(tmpdir, scheme):
|
||||||
|
url = scheme + '://httpbin.org'
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('text.yaml'))):
|
||||||
|
_, response_text = get(url)
|
||||||
|
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('text.yaml'))) as cassette:
|
||||||
|
_, cassette_response_text = get(url)
|
||||||
|
assert cassette_response_text == response_text
|
||||||
|
assert cassette.play_count == 1
|
||||||
|
|
||||||
|
|
||||||
|
def test_json(tmpdir, scheme):
|
||||||
|
url = scheme + '://httpbin.org/get'
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('json.yaml'))):
|
||||||
|
_, response_json = get(url, as_text=False)
|
||||||
|
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('json.yaml'))) as cassette:
|
||||||
|
_, cassette_response_json = get(url, as_text=False)
|
||||||
|
assert cassette_response_json == response_json
|
||||||
|
assert cassette.play_count == 1
|
||||||
|
|
||||||
|
|
||||||
|
def test_post(tmpdir, scheme):
|
||||||
|
data = {'key1': 'value1', 'key2': 'value2'}
|
||||||
|
url = scheme + '://httpbin.org/post'
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('post.yaml'))):
|
||||||
|
_, response_json = post(url, data=data)
|
||||||
|
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('post.yaml'))) as cassette:
|
||||||
|
_, cassette_response_json = post(url, data=data)
|
||||||
|
assert cassette_response_json == response_json
|
||||||
|
assert cassette.play_count == 1
|
||||||
|
|
||||||
|
|
||||||
|
def test_params(tmpdir, scheme):
|
||||||
|
url = scheme + '://httpbin.org/get'
|
||||||
|
params = {'a': 1, 'b': False, 'c': 'c'}
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('get.yaml'))) as cassette:
|
||||||
|
_, response_json = get(url, as_text=False, params=params)
|
||||||
|
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('get.yaml'))) as cassette:
|
||||||
|
_, cassette_response_json = get(url, as_text=False, params=params)
|
||||||
|
assert cassette_response_json == response_json
|
||||||
|
assert cassette.play_count == 1
|
||||||
|
|
||||||
|
|
||||||
|
def test_params_same_url_distinct_params(tmpdir, scheme):
|
||||||
|
url = scheme + '://httpbin.org/get'
|
||||||
|
params = {'a': 1, 'b': False, 'c': 'c'}
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('get.yaml'))) as cassette:
|
||||||
|
_, response_json = get(url, as_text=False, params=params)
|
||||||
|
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('get.yaml'))) as cassette:
|
||||||
|
_, cassette_response_json = get(url, as_text=False, params=params)
|
||||||
|
assert cassette_response_json == response_json
|
||||||
|
assert cassette.play_count == 1
|
||||||
|
|
||||||
|
other_params = {'other': 'params'}
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('get.yaml'))) as cassette:
|
||||||
|
response, cassette_response_text = get(url, as_text=True, params=other_params)
|
||||||
|
assert 'No match for the request' in cassette_response_text
|
||||||
|
assert response.status == 599
|
||||||
@@ -5,8 +5,12 @@ import boto # NOQA
|
|||||||
import boto.iam # NOQA
|
import boto.iam # NOQA
|
||||||
from boto.s3.connection import S3Connection # NOQA
|
from boto.s3.connection import S3Connection # NOQA
|
||||||
from boto.s3.key import Key # NOQA
|
from boto.s3.key import Key # NOQA
|
||||||
from ConfigParser import DuplicateSectionError # NOQA
|
|
||||||
import vcr # NOQA
|
import vcr # NOQA
|
||||||
|
try: # NOQA
|
||||||
|
from ConfigParser import DuplicateSectionError # NOQA
|
||||||
|
except ImportError: # NOQA
|
||||||
|
# python3
|
||||||
|
from configparser import DuplicateSectionError # NOQA
|
||||||
|
|
||||||
|
|
||||||
def test_boto_stubs(tmpdir):
|
def test_boto_stubs(tmpdir):
|
||||||
|
|||||||
67
tests/integration/test_boto3.py
Normal file
67
tests/integration/test_boto3.py
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
import pytest
|
||||||
|
boto3 = pytest.importorskip("boto3")
|
||||||
|
|
||||||
|
import boto3 # NOQA
|
||||||
|
import vcr # NOQA
|
||||||
|
|
||||||
|
bucket = 'boto3-demo-1337' # a bucket you can access
|
||||||
|
key = 'test/my_test.txt' # key with r+w access
|
||||||
|
content = 'hello world i am a string' # content to put in the test file
|
||||||
|
|
||||||
|
|
||||||
|
def test_boto_stubs(tmpdir):
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('boto3-stubs.yml'))):
|
||||||
|
# Perform the imports within the patched context so that
|
||||||
|
# HTTPConnection, VerifiedHTTPSConnection refers to the patched version.
|
||||||
|
from botocore.vendored.requests.packages.urllib3.connectionpool import \
|
||||||
|
HTTPConnection, VerifiedHTTPSConnection
|
||||||
|
from vcr.stubs.boto3_stubs import VCRRequestsHTTPConnection, VCRRequestsHTTPSConnection
|
||||||
|
# Prove that the class was patched by the stub and that we can instantiate it.
|
||||||
|
assert issubclass(HTTPConnection, VCRRequestsHTTPConnection)
|
||||||
|
assert issubclass(VerifiedHTTPSConnection, VCRRequestsHTTPSConnection)
|
||||||
|
HTTPConnection('hostname.does.not.matter')
|
||||||
|
VerifiedHTTPSConnection('hostname.does.not.matter')
|
||||||
|
|
||||||
|
|
||||||
|
def test_boto3_without_vcr():
|
||||||
|
s3_resource = boto3.resource('s3')
|
||||||
|
b = s3_resource.Bucket(bucket)
|
||||||
|
b.put_object(Key=key, Body=content)
|
||||||
|
|
||||||
|
# retrieve content to check it
|
||||||
|
o = s3_resource.Object(bucket, key).get()
|
||||||
|
|
||||||
|
# decode for python3
|
||||||
|
assert content == o['Body'].read().decode('utf-8')
|
||||||
|
|
||||||
|
|
||||||
|
def test_boto_medium_difficulty(tmpdir):
|
||||||
|
s3_resource = boto3.resource('s3')
|
||||||
|
b = s3_resource.Bucket(bucket)
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('boto3-medium.yml'))):
|
||||||
|
b.put_object(Key=key, Body=content)
|
||||||
|
o = s3_resource.Object(bucket, key).get()
|
||||||
|
assert content == o['Body'].read().decode('utf-8')
|
||||||
|
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('boto3-medium.yml'))) as cass:
|
||||||
|
b.put_object(Key=key, Body=content)
|
||||||
|
o = s3_resource.Object(bucket, key).get()
|
||||||
|
assert content == o['Body'].read().decode('utf-8')
|
||||||
|
assert cass.all_played
|
||||||
|
|
||||||
|
|
||||||
|
def test_boto_hardcore_mode(tmpdir):
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('boto3-hardcore.yml'))):
|
||||||
|
s3_resource = boto3.resource('s3')
|
||||||
|
b = s3_resource.Bucket(bucket)
|
||||||
|
b.put_object(Key=key, Body=content)
|
||||||
|
o = s3_resource.Object(bucket, key).get()
|
||||||
|
assert content == o['Body'].read().decode('utf-8')
|
||||||
|
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('boto3-hardcore.yml'))) as cass:
|
||||||
|
s3_resource = boto3.resource('s3')
|
||||||
|
b = s3_resource.Bucket(bucket)
|
||||||
|
b.put_object(Key=key, Body=content)
|
||||||
|
o = s3_resource.Object(bucket, key).get()
|
||||||
|
assert content == o['Body'].read().decode('utf-8')
|
||||||
|
assert cass.all_played
|
||||||
@@ -38,6 +38,18 @@ def test_body(tmpdir, httpbin_both):
|
|||||||
assert content == requests.get(url).content
|
assert content == requests.get(url).content
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_empty_content_type_json(tmpdir, httpbin_both):
|
||||||
|
'''Ensure GET with application/json content-type and empty request body doesn't crash'''
|
||||||
|
url = httpbin_both + '/status/200'
|
||||||
|
headers = {'Content-Type': 'application/json'}
|
||||||
|
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('get_empty_json.yaml')), match_on=('body',)):
|
||||||
|
status = requests.get(url, headers=headers).status_code
|
||||||
|
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('get_empty_json.yaml')), match_on=('body',)):
|
||||||
|
assert status == requests.get(url, headers=headers).status_code
|
||||||
|
|
||||||
|
|
||||||
def test_effective_url(tmpdir, httpbin_both):
|
def test_effective_url(tmpdir, httpbin_both):
|
||||||
'''Ensure that the effective_url is captured'''
|
'''Ensure that the effective_url is captured'''
|
||||||
url = httpbin_both.url + '/redirect-to?url=/html'
|
url = httpbin_both.url + '/redirect-to?url=/html'
|
||||||
|
|||||||
@@ -1,6 +1,9 @@
|
|||||||
import vcr
|
import vcr
|
||||||
|
import zlib
|
||||||
import six.moves.http_client as httplib
|
import six.moves.http_client as httplib
|
||||||
|
|
||||||
|
from assertions import assert_is_json
|
||||||
|
|
||||||
|
|
||||||
def _headers_are_case_insensitive(host, port):
|
def _headers_are_case_insensitive(host, port):
|
||||||
conn = httplib.HTTPConnection(host, port)
|
conn = httplib.HTTPConnection(host, port)
|
||||||
@@ -44,3 +47,39 @@ def test_multiple_headers(tmpdir, httpbin):
|
|||||||
inside = _multiple_header_value(httpbin)
|
inside = _multiple_header_value(httpbin)
|
||||||
|
|
||||||
assert outside == inside
|
assert outside == inside
|
||||||
|
|
||||||
|
|
||||||
|
def test_original_decoded_response_is_not_modified(tmpdir, httpbin):
|
||||||
|
testfile = str(tmpdir.join('decoded_response.yml'))
|
||||||
|
host, port = httpbin.host, httpbin.port
|
||||||
|
|
||||||
|
conn = httplib.HTTPConnection(host, port)
|
||||||
|
conn.request('GET', '/gzip')
|
||||||
|
outside = conn.getresponse()
|
||||||
|
|
||||||
|
with vcr.use_cassette(testfile, decode_compressed_response=True):
|
||||||
|
conn = httplib.HTTPConnection(host, port)
|
||||||
|
conn.request('GET', '/gzip')
|
||||||
|
inside = conn.getresponse()
|
||||||
|
|
||||||
|
# Assert that we do not modify the original response while appending
|
||||||
|
# to the casssette.
|
||||||
|
assert 'gzip' == inside.headers['content-encoding']
|
||||||
|
|
||||||
|
# They should effectively be the same response.
|
||||||
|
inside_headers = (h for h in inside.headers.items() if h[0].lower() != 'date')
|
||||||
|
outside_headers = (h for h in outside.getheaders() if h[0].lower() != 'date')
|
||||||
|
assert set(inside_headers) == set(outside_headers)
|
||||||
|
inside = zlib.decompress(inside.read(), 16+zlib.MAX_WBITS)
|
||||||
|
outside = zlib.decompress(outside.read(), 16+zlib.MAX_WBITS)
|
||||||
|
assert inside == outside
|
||||||
|
|
||||||
|
# Even though the above are raw bytes, the JSON data should have been
|
||||||
|
# decoded and saved to the cassette.
|
||||||
|
with vcr.use_cassette(testfile):
|
||||||
|
conn = httplib.HTTPConnection(host, port)
|
||||||
|
conn.request('GET', '/gzip')
|
||||||
|
inside = conn.getresponse()
|
||||||
|
|
||||||
|
assert 'content-encoding' not in inside.headers
|
||||||
|
assert_is_json(inside.read())
|
||||||
|
|||||||
@@ -76,7 +76,7 @@ def test_amazon_doctype(tmpdir):
|
|||||||
# amazon gzips its homepage. For some reason, in requests 2.7, it's not
|
# amazon gzips its homepage. For some reason, in requests 2.7, it's not
|
||||||
# getting gunzipped.
|
# getting gunzipped.
|
||||||
with vcr.use_cassette(str(tmpdir.join('amz.yml'))):
|
with vcr.use_cassette(str(tmpdir.join('amz.yml'))):
|
||||||
r = requests.get('http://www.amazon.com')
|
r = requests.get('http://www.amazon.com', verify=False)
|
||||||
assert 'html' in r.text
|
assert 'html' in r.text
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ import pytest
|
|||||||
from vcr.compat import mock
|
from vcr.compat import mock
|
||||||
from vcr.request import Request
|
from vcr.request import Request
|
||||||
from vcr.serialize import deserialize, serialize
|
from vcr.serialize import deserialize, serialize
|
||||||
from vcr.serializers import yamlserializer, jsonserializer
|
from vcr.serializers import yamlserializer, jsonserializer, compat
|
||||||
|
|
||||||
|
|
||||||
def test_deserialize_old_yaml_cassette():
|
def test_deserialize_old_yaml_cassette():
|
||||||
@@ -131,3 +131,9 @@ def test_serialize_binary_request():
|
|||||||
)
|
)
|
||||||
except (UnicodeDecodeError, TypeError) as exc:
|
except (UnicodeDecodeError, TypeError) as exc:
|
||||||
assert msg in str(exc)
|
assert msg in str(exc)
|
||||||
|
|
||||||
|
|
||||||
|
def test_deserialize_no_body_string():
|
||||||
|
data = {'body': {'string': None}}
|
||||||
|
output = compat.convert_to_bytes(data)
|
||||||
|
assert data == output
|
||||||
|
|||||||
@@ -1,4 +1,6 @@
|
|||||||
from vcr.stubs import VCRHTTPSConnection
|
from vcr.stubs import VCRHTTPSConnection
|
||||||
|
from vcr.compat import mock
|
||||||
|
from vcr.cassette import Cassette
|
||||||
|
|
||||||
|
|
||||||
class TestVCRConnection(object):
|
class TestVCRConnection(object):
|
||||||
@@ -7,3 +9,10 @@ class TestVCRConnection(object):
|
|||||||
vcr_connection = VCRHTTPSConnection('www.examplehost.com')
|
vcr_connection = VCRHTTPSConnection('www.examplehost.com')
|
||||||
vcr_connection.ssl_version = 'example_ssl_version'
|
vcr_connection.ssl_version = 'example_ssl_version'
|
||||||
assert vcr_connection.real_connection.ssl_version == 'example_ssl_version'
|
assert vcr_connection.real_connection.ssl_version == 'example_ssl_version'
|
||||||
|
|
||||||
|
@mock.patch('vcr.cassette.Cassette.can_play_response_for', return_value=False)
|
||||||
|
def testing_connect(*args):
|
||||||
|
vcr_connection = VCRHTTPSConnection('www.google.com')
|
||||||
|
vcr_connection.cassette = Cassette('test', record_mode='all')
|
||||||
|
vcr_connection.real_connection.connect()
|
||||||
|
assert vcr_connection.real_connection.sock is not None
|
||||||
|
|||||||
8
tox.ini
8
tox.ini
@@ -1,11 +1,11 @@
|
|||||||
[tox]
|
[tox]
|
||||||
envlist = {py26,py27,py33,py34,pypy,pypy3}-{flakes,requests27,requests26,requests25,requests24,requests23,requests22,requests1,httplib2,urllib317,urllib319,urllib3110,tornado3,tornado4,boto}
|
envlist = {py26,py27,py33,py34,pypy,pypy3}-{flakes,requests27,requests26,requests25,requests24,requests23,requests22,requests1,httplib2,urllib317,urllib319,urllib3110,tornado3,tornado4,boto,boto3,aiohttp}
|
||||||
|
|
||||||
[testenv:flakes]
|
[testenv:flakes]
|
||||||
skipsdist = True
|
skipsdist = True
|
||||||
commands =
|
commands =
|
||||||
flake8 --version
|
flake8 --version
|
||||||
flake8 --exclude="./docs/conf.py"
|
flake8 --exclude=./docs/conf.py,./.tox/
|
||||||
pyflakes ./docs/conf.py
|
pyflakes ./docs/conf.py
|
||||||
deps = flake8
|
deps = flake8
|
||||||
|
|
||||||
@@ -13,6 +13,8 @@ deps = flake8
|
|||||||
commands =
|
commands =
|
||||||
./runtests.sh {posargs}
|
./runtests.sh {posargs}
|
||||||
deps =
|
deps =
|
||||||
|
# httpbin fails with latest Flask, so we pin it
|
||||||
|
Flask==0.10.1
|
||||||
mock
|
mock
|
||||||
pytest
|
pytest
|
||||||
pytest-httpbin
|
pytest-httpbin
|
||||||
@@ -35,6 +37,8 @@ deps =
|
|||||||
{py26,py27,py33,py34}-tornado3: pycurl
|
{py26,py27,py33,py34}-tornado3: pycurl
|
||||||
{py26,py27,py33,py34}-tornado4: pycurl
|
{py26,py27,py33,py34}-tornado4: pycurl
|
||||||
boto: boto
|
boto: boto
|
||||||
|
boto3: boto3
|
||||||
|
aiohttp: aiohttp
|
||||||
|
|
||||||
[flake8]
|
[flake8]
|
||||||
max_line_length = 110
|
max_line_length = 110
|
||||||
|
|||||||
@@ -146,9 +146,11 @@ def decode_response(response):
|
|||||||
else: # encoding == 'deflate'
|
else: # encoding == 'deflate'
|
||||||
return zlib.decompress(body)
|
return zlib.decompress(body)
|
||||||
|
|
||||||
|
# Deepcopy here in case `headers` contain objects that could
|
||||||
|
# be mutated by a shallow copy and corrupt the real response.
|
||||||
|
response = copy.deepcopy(response)
|
||||||
headers = CaseInsensitiveDict(response['headers'])
|
headers = CaseInsensitiveDict(response['headers'])
|
||||||
if is_compressed(headers):
|
if is_compressed(headers):
|
||||||
response = copy.deepcopy(response)
|
|
||||||
encoding = headers['content-encoding'][0]
|
encoding = headers['content-encoding'][0]
|
||||||
headers['content-encoding'].remove(encoding)
|
headers['content-encoding'].remove(encoding)
|
||||||
if not headers['content-encoding']:
|
if not headers['content-encoding']:
|
||||||
|
|||||||
@@ -49,7 +49,8 @@ def _transform_json(body):
|
|||||||
# Request body is always a byte string, but json.loads() wants a text
|
# Request body is always a byte string, but json.loads() wants a text
|
||||||
# string. RFC 7159 says the default encoding is UTF-8 (although UTF-16
|
# string. RFC 7159 says the default encoding is UTF-8 (although UTF-16
|
||||||
# and UTF-32 are also allowed: hmmmmm).
|
# and UTF-32 are also allowed: hmmmmm).
|
||||||
return json.loads(body.decode('utf-8'))
|
if body:
|
||||||
|
return json.loads(body.decode('utf-8'))
|
||||||
|
|
||||||
|
|
||||||
_xml_header_checker = _header_checker('text/xml')
|
_xml_header_checker = _header_checker('text/xml')
|
||||||
|
|||||||
@@ -164,5 +164,6 @@ def main():
|
|||||||
sys.stderr.write("[{0}] {1}\n".format(status, file_path))
|
sys.stderr.write("[{0}] {1}\n".format(status, file_path))
|
||||||
sys.stderr.write("Done.\n")
|
sys.stderr.write("Done.\n")
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
main()
|
main()
|
||||||
|
|||||||
61
vcr/patch.py
61
vcr/patch.py
@@ -22,6 +22,16 @@ else:
|
|||||||
_cpoolHTTPConnection = cpool.HTTPConnection
|
_cpoolHTTPConnection = cpool.HTTPConnection
|
||||||
_cpoolHTTPSConnection = cpool.HTTPSConnection
|
_cpoolHTTPSConnection = cpool.HTTPSConnection
|
||||||
|
|
||||||
|
# Try to save the original types for boto3
|
||||||
|
try:
|
||||||
|
import botocore.vendored.requests.packages.urllib3.connectionpool as cpool
|
||||||
|
except ImportError: # pragma: no cover
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
_Boto3VerifiedHTTPSConnection = cpool.VerifiedHTTPSConnection
|
||||||
|
_cpoolBoto3HTTPConnection = cpool.HTTPConnection
|
||||||
|
_cpoolBoto3HTTPSConnection = cpool.HTTPSConnection
|
||||||
|
|
||||||
|
|
||||||
# Try to save the original types for urllib3
|
# Try to save the original types for urllib3
|
||||||
try:
|
try:
|
||||||
@@ -70,6 +80,13 @@ else:
|
|||||||
_CurlAsyncHTTPClient_fetch_impl = \
|
_CurlAsyncHTTPClient_fetch_impl = \
|
||||||
tornado.curl_httpclient.CurlAsyncHTTPClient.fetch_impl
|
tornado.curl_httpclient.CurlAsyncHTTPClient.fetch_impl
|
||||||
|
|
||||||
|
try:
|
||||||
|
import aiohttp.client
|
||||||
|
except ImportError: # pragma: no cover
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
_AiohttpClientSessionRequest = aiohttp.client.ClientSession._request
|
||||||
|
|
||||||
|
|
||||||
class CassettePatcherBuilder(object):
|
class CassettePatcherBuilder(object):
|
||||||
|
|
||||||
@@ -87,8 +104,8 @@ class CassettePatcherBuilder(object):
|
|||||||
|
|
||||||
def build(self):
|
def build(self):
|
||||||
return itertools.chain(
|
return itertools.chain(
|
||||||
self._httplib(), self._requests(), self._urllib3(),
|
self._httplib(), self._requests(), self._boto3(), self._urllib3(),
|
||||||
self._httplib2(), self._boto(), self._tornado(),
|
self._httplib2(), self._boto(), self._tornado(), self._aiohttp(),
|
||||||
self._build_patchers_from_mock_triples(
|
self._build_patchers_from_mock_triples(
|
||||||
self._cassette.custom_patches
|
self._cassette.custom_patches
|
||||||
),
|
),
|
||||||
@@ -165,6 +182,14 @@ class CassettePatcherBuilder(object):
|
|||||||
from .stubs import requests_stubs
|
from .stubs import requests_stubs
|
||||||
return self._urllib3_patchers(cpool, requests_stubs)
|
return self._urllib3_patchers(cpool, requests_stubs)
|
||||||
|
|
||||||
|
def _boto3(self):
|
||||||
|
try:
|
||||||
|
import botocore.vendored.requests.packages.urllib3.connectionpool as cpool
|
||||||
|
except ImportError: # pragma: no cover
|
||||||
|
return ()
|
||||||
|
from .stubs import boto3_stubs
|
||||||
|
return self._urllib3_patchers(cpool, boto3_stubs)
|
||||||
|
|
||||||
def _patched_get_conn(self, connection_pool_class, connection_class_getter):
|
def _patched_get_conn(self, connection_pool_class, connection_class_getter):
|
||||||
get_conn = connection_pool_class._get_conn
|
get_conn = connection_pool_class._get_conn
|
||||||
|
|
||||||
@@ -255,6 +280,19 @@ class CassettePatcherBuilder(object):
|
|||||||
)
|
)
|
||||||
yield curl.CurlAsyncHTTPClient, 'fetch_impl', new_fetch_impl
|
yield curl.CurlAsyncHTTPClient, 'fetch_impl', new_fetch_impl
|
||||||
|
|
||||||
|
@_build_patchers_from_mock_triples_decorator
|
||||||
|
def _aiohttp(self):
|
||||||
|
try:
|
||||||
|
import aiohttp.client as client
|
||||||
|
except ImportError: # pragma: no cover
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
from .stubs.aiohttp_stubs import vcr_request
|
||||||
|
new_request = vcr_request(
|
||||||
|
self._cassette, _AiohttpClientSessionRequest
|
||||||
|
)
|
||||||
|
yield client.ClientSession, '_request', new_request
|
||||||
|
|
||||||
def _urllib3_patchers(self, cpool, stubs):
|
def _urllib3_patchers(self, cpool, stubs):
|
||||||
http_connection_remover = ConnectionRemover(
|
http_connection_remover = ConnectionRemover(
|
||||||
self._get_cassette_subclass(stubs.VCRRequestsHTTPConnection)
|
self._get_cassette_subclass(stubs.VCRRequestsHTTPConnection)
|
||||||
@@ -263,7 +301,6 @@ class CassettePatcherBuilder(object):
|
|||||||
self._get_cassette_subclass(stubs.VCRRequestsHTTPSConnection)
|
self._get_cassette_subclass(stubs.VCRRequestsHTTPSConnection)
|
||||||
)
|
)
|
||||||
mock_triples = (
|
mock_triples = (
|
||||||
(cpool, 'VerifiedHTTPSConnection', stubs.VCRRequestsHTTPSConnection),
|
|
||||||
(cpool, 'VerifiedHTTPSConnection', stubs.VCRRequestsHTTPSConnection),
|
(cpool, 'VerifiedHTTPSConnection', stubs.VCRRequestsHTTPSConnection),
|
||||||
(cpool, 'HTTPConnection', stubs.VCRRequestsHTTPConnection),
|
(cpool, 'HTTPConnection', stubs.VCRRequestsHTTPConnection),
|
||||||
(cpool, 'HTTPSConnection', stubs.VCRRequestsHTTPSConnection),
|
(cpool, 'HTTPSConnection', stubs.VCRRequestsHTTPSConnection),
|
||||||
@@ -353,6 +390,24 @@ def reset_patchers():
|
|||||||
yield mock.patch.object(cpool.HTTPConnectionPool, 'ConnectionCls', _HTTPConnection)
|
yield mock.patch.object(cpool.HTTPConnectionPool, 'ConnectionCls', _HTTPConnection)
|
||||||
yield mock.patch.object(cpool.HTTPSConnectionPool, 'ConnectionCls', _HTTPSConnection)
|
yield mock.patch.object(cpool.HTTPSConnectionPool, 'ConnectionCls', _HTTPSConnection)
|
||||||
|
|
||||||
|
try:
|
||||||
|
import botocore.vendored.requests.packages.urllib3.connectionpool as cpool
|
||||||
|
except ImportError: # pragma: no cover
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
# unpatch requests v1.x
|
||||||
|
yield mock.patch.object(cpool, 'VerifiedHTTPSConnection', _Boto3VerifiedHTTPSConnection)
|
||||||
|
yield mock.patch.object(cpool, 'HTTPConnection', _cpoolBoto3HTTPConnection)
|
||||||
|
# unpatch requests v2.x
|
||||||
|
if hasattr(cpool.HTTPConnectionPool, 'ConnectionCls'):
|
||||||
|
yield mock.patch.object(cpool.HTTPConnectionPool, 'ConnectionCls',
|
||||||
|
_cpoolBoto3HTTPConnection)
|
||||||
|
yield mock.patch.object(cpool.HTTPSConnectionPool, 'ConnectionCls',
|
||||||
|
_cpoolBoto3HTTPSConnection)
|
||||||
|
|
||||||
|
if hasattr(cpool, 'HTTPSConnection'):
|
||||||
|
yield mock.patch.object(cpool, 'HTTPSConnection', _cpoolBoto3HTTPSConnection)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import httplib2 as cpool
|
import httplib2 as cpool
|
||||||
except ImportError: # pragma: no cover
|
except ImportError: # pragma: no cover
|
||||||
|
|||||||
@@ -24,7 +24,7 @@ def convert_body_to_bytes(resp):
|
|||||||
http://pyyaml.org/wiki/PyYAMLDocumentation#Python3support
|
http://pyyaml.org/wiki/PyYAMLDocumentation#Python3support
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
if not isinstance(resp['body']['string'], six.binary_type):
|
if resp['body']['string'] is not None and not isinstance(resp['body']['string'], six.binary_type):
|
||||||
resp['body']['string'] = resp['body']['string'].encode('utf-8')
|
resp['body']['string'] = resp['body']['string'].encode('utf-8')
|
||||||
except (KeyError, TypeError, UnicodeEncodeError):
|
except (KeyError, TypeError, UnicodeEncodeError):
|
||||||
# The thing we were converting either wasn't a dictionary or didn't
|
# The thing we were converting either wasn't a dictionary or didn't
|
||||||
|
|||||||
@@ -287,7 +287,9 @@ class VCRConnection(object):
|
|||||||
# Cassette is write-protected, don't actually connect
|
# Cassette is write-protected, don't actually connect
|
||||||
return
|
return
|
||||||
|
|
||||||
return self.real_connection.connect(*args, **kwargs)
|
from vcr.patch import force_reset
|
||||||
|
with force_reset():
|
||||||
|
return self.real_connection.connect(*args, **kwargs)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def sock(self):
|
def sock(self):
|
||||||
|
|||||||
81
vcr/stubs/aiohttp_stubs/__init__.py
Normal file
81
vcr/stubs/aiohttp_stubs/__init__.py
Normal file
@@ -0,0 +1,81 @@
|
|||||||
|
'''Stubs for aiohttp HTTP clients'''
|
||||||
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import functools
|
||||||
|
import json
|
||||||
|
|
||||||
|
from aiohttp import ClientResponse
|
||||||
|
from yarl import URL
|
||||||
|
|
||||||
|
from vcr.request import Request
|
||||||
|
|
||||||
|
|
||||||
|
class MockClientResponse(ClientResponse):
|
||||||
|
# TODO: get encoding from header
|
||||||
|
@asyncio.coroutine
|
||||||
|
def json(self, *, encoding='utf-8', loads=json.loads): # NOQA: E999
|
||||||
|
return loads(self.content.decode(encoding))
|
||||||
|
|
||||||
|
@asyncio.coroutine
|
||||||
|
def text(self, encoding='utf-8'):
|
||||||
|
return self.content.decode(encoding)
|
||||||
|
|
||||||
|
@asyncio.coroutine
|
||||||
|
def release(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def vcr_request(cassette, real_request):
|
||||||
|
@functools.wraps(real_request)
|
||||||
|
@asyncio.coroutine
|
||||||
|
def new_request(self, method, url, **kwargs):
|
||||||
|
headers = kwargs.get('headers')
|
||||||
|
headers = self._prepare_headers(headers)
|
||||||
|
data = kwargs.get('data')
|
||||||
|
params = kwargs.get('params')
|
||||||
|
if params:
|
||||||
|
for k, v in params.items():
|
||||||
|
params[k] = str(v)
|
||||||
|
|
||||||
|
request_url = URL(url).with_query(params)
|
||||||
|
vcr_request = Request(method, str(request_url), data, headers)
|
||||||
|
|
||||||
|
if cassette.can_play_response_for(vcr_request):
|
||||||
|
vcr_response = cassette.play_response(vcr_request)
|
||||||
|
|
||||||
|
response = MockClientResponse(method, URL(vcr_response.get('url')))
|
||||||
|
response.status = vcr_response['status']['code']
|
||||||
|
response.content = vcr_response['body']['string']
|
||||||
|
response.reason = vcr_response['status']['message']
|
||||||
|
response.headers = vcr_response['headers']
|
||||||
|
|
||||||
|
response.close()
|
||||||
|
return response
|
||||||
|
|
||||||
|
if cassette.write_protected and cassette.filter_request(vcr_request):
|
||||||
|
response = MockClientResponse(method, URL(url))
|
||||||
|
response.status = 599
|
||||||
|
msg = ("No match for the request {!r} was found. Can't overwrite "
|
||||||
|
"existing cassette {!r} in your current record mode {!r}.")
|
||||||
|
msg = msg.format(vcr_request, cassette._path, cassette.record_mode)
|
||||||
|
response.content = msg.encode()
|
||||||
|
response.close()
|
||||||
|
return response
|
||||||
|
|
||||||
|
response = yield from real_request(self, method, url, **kwargs) # NOQA: E999
|
||||||
|
|
||||||
|
vcr_response = {
|
||||||
|
'status': {
|
||||||
|
'code': response.status,
|
||||||
|
'message': response.reason,
|
||||||
|
},
|
||||||
|
'headers': dict(response.headers),
|
||||||
|
'body': {'string': (yield from response.text())}, # NOQA: E999
|
||||||
|
'url': response.url,
|
||||||
|
}
|
||||||
|
cassette.append(vcr_request, vcr_response)
|
||||||
|
|
||||||
|
return response
|
||||||
|
|
||||||
|
return new_request
|
||||||
15
vcr/stubs/boto3_stubs.py
Normal file
15
vcr/stubs/boto3_stubs.py
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
'''Stubs for boto3'''
|
||||||
|
|
||||||
|
from botocore.vendored.requests.packages.urllib3.connectionpool import HTTPConnection, VerifiedHTTPSConnection
|
||||||
|
from ..stubs import VCRHTTPConnection, VCRHTTPSConnection
|
||||||
|
|
||||||
|
# urllib3 defines its own HTTPConnection classes, which boto3 goes ahead and assumes
|
||||||
|
# you're using. It includes some polyfills for newer features missing in older pythons.
|
||||||
|
|
||||||
|
|
||||||
|
class VCRRequestsHTTPConnection(VCRHTTPConnection, HTTPConnection):
|
||||||
|
_baseclass = HTTPConnection
|
||||||
|
|
||||||
|
|
||||||
|
class VCRRequestsHTTPSConnection(VCRHTTPSConnection, VerifiedHTTPSConnection):
|
||||||
|
_baseclass = VerifiedHTTPSConnection
|
||||||
Reference in New Issue
Block a user