mirror of
https://github.com/kevin1024/vcrpy.git
synced 2025-12-09 09:13:23 +00:00
Compare commits
75 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c341e48961 | ||
|
|
5be75692c4 | ||
|
|
b10b92bdbb | ||
|
|
3009cbbbe9 | ||
|
|
f811b41ad9 | ||
|
|
140bc2ee74 | ||
|
|
867fd9ab4b | ||
|
|
545c903ee2 | ||
|
|
cd864b5eca | ||
|
|
689d68a0a2 | ||
|
|
709017ea46 | ||
|
|
8621427f46 | ||
|
|
7e695ff7bc | ||
|
|
bd08e5119f | ||
|
|
6ab508d67d | ||
|
|
f1561ae0f8 | ||
|
|
f1f8ce2af4 | ||
|
|
26be756f47 | ||
|
|
f890709a20 | ||
|
|
d0ae5fa40b | ||
|
|
1562bc7659 | ||
|
|
16b69aa2e5 | ||
|
|
d9caff107d | ||
|
|
f317490eec | ||
|
|
cf13805973 | ||
|
|
389cb4d6e3 | ||
|
|
7a82d70391 | ||
|
|
f3b9966a2a | ||
|
|
5ba1c7fbb6 | ||
|
|
ad153bd733 | ||
|
|
42b3b16fe1 | ||
|
|
531dc02ca5 | ||
|
|
2156adb841 | ||
|
|
6caf7e962e | ||
|
|
97fbd7e0bd | ||
|
|
ead48b1907 | ||
|
|
1af4b2587e | ||
|
|
82fa50c092 | ||
|
|
58d8980cfa | ||
|
|
c111ebab0a | ||
|
|
943a15a967 | ||
|
|
d0aa6bcc8d | ||
|
|
04fd730a08 | ||
|
|
6156271c48 | ||
|
|
87666ba2e4 | ||
|
|
7915d07aff | ||
|
|
095e272191 | ||
|
|
42762ec806 | ||
|
|
bfb38af8e1 | ||
|
|
894695d13b | ||
|
|
a56a0726d4 | ||
|
|
c366852925 | ||
|
|
0cab15658f | ||
|
|
c3ecf8c5b2 | ||
|
|
81d453f7d3 | ||
|
|
262ad903cb | ||
|
|
ec60af0214 | ||
|
|
8cf8d3f69c | ||
|
|
034aeb4f17 | ||
|
|
d59efbc6e0 | ||
|
|
b753a491c9 | ||
|
|
9092b34dd1 | ||
|
|
0a3aaddca2 | ||
|
|
c55d976277 | ||
|
|
47ccddafee | ||
|
|
dcaf813657 | ||
|
|
ef727aaaaf | ||
|
|
ee17233aa0 | ||
|
|
f88294a9e6 | ||
|
|
572da2084d | ||
|
|
88bf8f0aac | ||
|
|
9b59e02374 | ||
|
|
ba290a32d2 | ||
|
|
420c2ceb6f | ||
|
|
ec786f2fd9 |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1,6 +1,7 @@
|
|||||||
*.pyc
|
*.pyc
|
||||||
.tox
|
.tox
|
||||||
.cache
|
.cache
|
||||||
|
.pytest_cache/
|
||||||
build/
|
build/
|
||||||
dist/
|
dist/
|
||||||
*.egg/
|
*.egg/
|
||||||
|
|||||||
50
.travis.yml
50
.travis.yml
@@ -7,61 +7,37 @@ env:
|
|||||||
- secure: LBSEg/gMj4u4Hrpo3zs6Y/1mTpd2RtcN49mZIFgTdbJ9IhpiNPqcEt647Lz94F9Eses2x2WbNuKqZKZZReY7QLbEzU1m0nN5jlaKrjcG5NR5clNABfFFyhgc0jBikyS4abAG8jc2efeaTrFuQwdoF4sE8YiVrkiVj2X5Xoi6sBk=
|
- secure: LBSEg/gMj4u4Hrpo3zs6Y/1mTpd2RtcN49mZIFgTdbJ9IhpiNPqcEt647Lz94F9Eses2x2WbNuKqZKZZReY7QLbEzU1m0nN5jlaKrjcG5NR5clNABfFFyhgc0jBikyS4abAG8jc2efeaTrFuQwdoF4sE8YiVrkiVj2X5Xoi6sBk=
|
||||||
matrix:
|
matrix:
|
||||||
- TOX_SUFFIX="flakes"
|
- TOX_SUFFIX="flakes"
|
||||||
- TOX_SUFFIX="requests22"
|
|
||||||
- TOX_SUFFIX="requests23"
|
|
||||||
- TOX_SUFFIX="requests24"
|
|
||||||
- TOX_SUFFIX="requests25"
|
|
||||||
- TOX_SUFFIX="requests26"
|
|
||||||
- TOX_SUFFIX="requests27"
|
- TOX_SUFFIX="requests27"
|
||||||
- TOX_SUFFIX="requests213"
|
|
||||||
- TOX_SUFFIX="requests1"
|
|
||||||
- TOX_SUFFIX="httplib2"
|
- TOX_SUFFIX="httplib2"
|
||||||
- TOX_SUFFIX="boto"
|
|
||||||
- TOX_SUFFIX="boto3"
|
- TOX_SUFFIX="boto3"
|
||||||
- TOX_SUFFIX="urllib317"
|
- TOX_SUFFIX="urllib3121"
|
||||||
- TOX_SUFFIX="urllib319"
|
|
||||||
- TOX_SUFFIX="urllib3110"
|
|
||||||
- TOX_SUFFIX="tornado3"
|
|
||||||
- TOX_SUFFIX="tornado4"
|
- TOX_SUFFIX="tornado4"
|
||||||
- TOX_SUFFIX="aiohttp"
|
- TOX_SUFFIX="aiohttp"
|
||||||
matrix:
|
matrix:
|
||||||
allow_failures:
|
allow_failures:
|
||||||
- env: TOX_SUFFIX="boto"
|
|
||||||
- env: TOX_SUFFIX="boto3"
|
- env: TOX_SUFFIX="boto3"
|
||||||
exclude:
|
|
||||||
- env: TOX_SUFFIX="flakes"
|
|
||||||
python: 2.6
|
|
||||||
- env: TOX_SUFFIX="boto"
|
|
||||||
python: 3.3
|
|
||||||
- env: TOX_SUFFIX="boto"
|
|
||||||
python: 3.4
|
|
||||||
- env: TOX_SUFFIX="boto"
|
|
||||||
python: 3.6
|
|
||||||
- env: TOX_SUFFIX="requests1"
|
|
||||||
python: 3.4
|
|
||||||
- env: TOX_SUFFIX="requests1"
|
|
||||||
python: 3.5
|
|
||||||
- env: TOX_SUFFIX="requests1"
|
|
||||||
python: 3.6
|
|
||||||
- env: TOX_SUFFIX="aiohttp"
|
- env: TOX_SUFFIX="aiohttp"
|
||||||
python: 2.6
|
python: "pypy3.5-5.9.0"
|
||||||
|
exclude:
|
||||||
|
# Only run flakes on a single Python 2.x and a single 3.x
|
||||||
|
- env: TOX_SUFFIX="flakes"
|
||||||
|
python: 3.4
|
||||||
|
- env: TOX_SUFFIX="flakes"
|
||||||
|
python: 3.5
|
||||||
|
- env: TOX_SUFFIX="flakes"
|
||||||
|
python: pypy
|
||||||
|
- env: TOX_SUFFIX="flakes"
|
||||||
|
python: "pypy3.5-5.9.0"
|
||||||
- env: TOX_SUFFIX="aiohttp"
|
- env: TOX_SUFFIX="aiohttp"
|
||||||
python: 2.7
|
python: 2.7
|
||||||
- env: TOX_SUFFIX="aiohttp"
|
|
||||||
python: 3.3
|
|
||||||
- env: TOX_SUFFIX="aiohttp"
|
- env: TOX_SUFFIX="aiohttp"
|
||||||
python: pypy
|
python: pypy
|
||||||
- env: TOX_SUFFIX="aiohttp"
|
|
||||||
python: pypy3
|
|
||||||
python:
|
python:
|
||||||
- 2.6
|
|
||||||
- 2.7
|
- 2.7
|
||||||
- 3.3
|
|
||||||
- 3.4
|
|
||||||
- 3.5
|
- 3.5
|
||||||
- 3.6
|
- 3.6
|
||||||
- pypy
|
- pypy
|
||||||
- pypy3
|
- "pypy3.5-5.9.0"
|
||||||
install:
|
install:
|
||||||
- pip install tox-travis
|
- pip install tox-travis
|
||||||
- if [[ $TOX_SUFFIX != 'flakes' ]]; then python setup.py install ; fi
|
- if [[ $TOX_SUFFIX != 'flakes' ]]; then python setup.py install ; fi
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
|PyPI| |Build Status| |Waffle Ready| |Gitter|
|
|PyPI| |Python versions| |Build Status| |Waffle Ready| |Gitter|
|
||||||
|
|
||||||
VCR.py
|
VCR.py
|
||||||
======
|
======
|
||||||
@@ -51,7 +51,6 @@ VCR.py works great with the following HTTP clients:
|
|||||||
- urllib3
|
- urllib3
|
||||||
- tornado
|
- tornado
|
||||||
- urllib2
|
- urllib2
|
||||||
- boto
|
|
||||||
- boto3
|
- boto3
|
||||||
|
|
||||||
|
|
||||||
@@ -63,6 +62,8 @@ more details
|
|||||||
|
|
||||||
.. |PyPI| image:: https://img.shields.io/pypi/v/vcrpy.svg
|
.. |PyPI| image:: https://img.shields.io/pypi/v/vcrpy.svg
|
||||||
:target: https://pypi.python.org/pypi/vcrpy-unittest
|
:target: https://pypi.python.org/pypi/vcrpy-unittest
|
||||||
|
.. |Python versions| image:: https://img.shields.io/pypi/pyversions/vcrpy-unittest.svg
|
||||||
|
:target: https://pypi.python.org/pypi/vcrpy-unittest
|
||||||
.. |Build Status| image:: https://secure.travis-ci.org/kevin1024/vcrpy.png?branch=master
|
.. |Build Status| image:: https://secure.travis-ci.org/kevin1024/vcrpy.png?branch=master
|
||||||
:target: http://travis-ci.org/kevin1024/vcrpy
|
:target: http://travis-ci.org/kevin1024/vcrpy
|
||||||
.. |Waffle Ready| image:: https://badge.waffle.io/kevin1024/vcrpy.png?label=ready&title=waffle
|
.. |Waffle Ready| image:: https://badge.waffle.io/kevin1024/vcrpy.png?label=ready&title=waffle
|
||||||
|
|||||||
@@ -1,5 +1,7 @@
|
|||||||
Changelog
|
Changelog
|
||||||
---------
|
---------
|
||||||
|
- 1.12.0 - Fix support to latest aiohttp version (3.2.1), Adapted setup to PEP508, Support binary responses on aiohttp, Dropped support for EOL python versions (2.6 and 3.3)
|
||||||
|
- 1.11.1 Fix compatibility with newest requests and urllib3 releases
|
||||||
- 1.11.0 Allow injection of persistence methods + bugfixes (thanks @j-funk and @IvanMalison),
|
- 1.11.0 Allow injection of persistence methods + bugfixes (thanks @j-funk and @IvanMalison),
|
||||||
Support python 3.6 + CI tests (thanks @derekbekoe and @graingert),
|
Support python 3.6 + CI tests (thanks @derekbekoe and @graingert),
|
||||||
Support pytest-asyncio coroutines (thanks @graingert)
|
Support pytest-asyncio coroutines (thanks @graingert)
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ with pip::
|
|||||||
Compatibility
|
Compatibility
|
||||||
-------------
|
-------------
|
||||||
|
|
||||||
VCR.py supports Python 2.6 and 2.7, 3.3, 3.4, and
|
VCR.py supports Python 2.7 and 3.4+, and
|
||||||
`pypy <http://pypy.org>`__.
|
`pypy <http://pypy.org>`__.
|
||||||
|
|
||||||
The following http libraries are supported:
|
The following http libraries are supported:
|
||||||
@@ -40,7 +40,7 @@ rebuilding pyyaml.
|
|||||||
|
|
||||||
brew install libyaml # Mac with Homebrew
|
brew install libyaml # Mac with Homebrew
|
||||||
apt-get install libyaml-dev # Ubuntu
|
apt-get install libyaml-dev # Ubuntu
|
||||||
dnf install libyaml-dev # Fedora
|
dnf install libyaml-devel # Fedora
|
||||||
|
|
||||||
3. Rebuild pyyaml with libyaml::
|
3. Rebuild pyyaml with libyaml::
|
||||||
|
|
||||||
|
|||||||
@@ -57,7 +57,7 @@ once
|
|||||||
file.
|
file.
|
||||||
|
|
||||||
It is similar to the new\_episodes record mode, but will prevent new,
|
It is similar to the new\_episodes record mode, but will prevent new,
|
||||||
unexpected requests from being made (i.e. because the request URI
|
unexpected requests from being made (e.g. because the request URI
|
||||||
changed).
|
changed).
|
||||||
|
|
||||||
once is the default record mode, used when you do not set one.
|
once is the default record mode, used when you do not set one.
|
||||||
|
|||||||
@@ -1,3 +1,3 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
REQUESTS_CA_BUNDLE=`python -m pytest_httpbin.certs` py.test $1
|
REQUESTS_CA_BUNDLE=`python -m pytest_httpbin.certs` py.test $*
|
||||||
|
|||||||
46
setup.py
46
setup.py
@@ -1,11 +1,9 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
import logging
|
|
||||||
|
|
||||||
from setuptools import setup, find_packages
|
from setuptools import setup, find_packages
|
||||||
from setuptools.command.test import test as TestCommand
|
from setuptools.command.test import test as TestCommand
|
||||||
import pkg_resources
|
|
||||||
|
|
||||||
long_description = open('README.rst', 'r').read()
|
long_description = open('README.rst', 'r').read()
|
||||||
|
|
||||||
@@ -24,31 +22,14 @@ class PyTest(TestCommand):
|
|||||||
sys.exit(errno)
|
sys.exit(errno)
|
||||||
|
|
||||||
|
|
||||||
install_requires = ['PyYAML', 'wrapt', 'six>=1.5']
|
install_requires = [
|
||||||
|
'PyYAML',
|
||||||
|
'wrapt',
|
||||||
extras_require = {
|
'six>=1.5',
|
||||||
':python_version in "2.4, 2.5, 2.6"':
|
'contextlib2; python_version=="2.7"',
|
||||||
['contextlib2', 'backport_collections', 'mock'],
|
'mock; python_version=="2.7"',
|
||||||
':python_version in "2.7, 3.1, 3.2"': ['contextlib2', 'mock'],
|
'yarl; python_version>="3.4"',
|
||||||
':python_version in "3.4, 3.5, 3.6"': ['yarl'],
|
]
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
try:
|
|
||||||
if 'bdist_wheel' not in sys.argv:
|
|
||||||
for key, value in extras_require.items():
|
|
||||||
if key.startswith(':') and pkg_resources.evaluate_marker(key[1:]):
|
|
||||||
install_requires.extend(value)
|
|
||||||
except Exception:
|
|
||||||
logging.getLogger(__name__).exception(
|
|
||||||
'Something went wrong calculating platform specific dependencies, so '
|
|
||||||
"you're getting them all!"
|
|
||||||
)
|
|
||||||
for key, value in extras_require.items():
|
|
||||||
if key.startswith(':'):
|
|
||||||
install_requires.extend(value)
|
|
||||||
|
|
||||||
|
|
||||||
excluded_packages = ["tests*"]
|
excluded_packages = ["tests*"]
|
||||||
if sys.version_info[0] == 2:
|
if sys.version_info[0] == 2:
|
||||||
@@ -56,7 +37,7 @@ if sys.version_info[0] == 2:
|
|||||||
|
|
||||||
setup(
|
setup(
|
||||||
name='vcrpy',
|
name='vcrpy',
|
||||||
version='1.11.0',
|
version='1.12.0',
|
||||||
description=(
|
description=(
|
||||||
"Automatically mock your HTTP interactions to simplify and "
|
"Automatically mock your HTTP interactions to simplify and "
|
||||||
"speed up testing"
|
"speed up testing"
|
||||||
@@ -66,8 +47,8 @@ setup(
|
|||||||
author_email='me@kevinmccarthy.org',
|
author_email='me@kevinmccarthy.org',
|
||||||
url='https://github.com/kevin1024/vcrpy',
|
url='https://github.com/kevin1024/vcrpy',
|
||||||
packages=find_packages(exclude=excluded_packages),
|
packages=find_packages(exclude=excluded_packages),
|
||||||
|
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',
|
||||||
install_requires=install_requires,
|
install_requires=install_requires,
|
||||||
extras_require=extras_require,
|
|
||||||
license='MIT',
|
license='MIT',
|
||||||
tests_require=['pytest', 'mock', 'pytest-httpbin'],
|
tests_require=['pytest', 'mock', 'pytest-httpbin'],
|
||||||
classifiers=[
|
classifiers=[
|
||||||
@@ -75,7 +56,14 @@ setup(
|
|||||||
'Environment :: Console',
|
'Environment :: Console',
|
||||||
'Intended Audience :: Developers',
|
'Intended Audience :: Developers',
|
||||||
'Programming Language :: Python',
|
'Programming Language :: Python',
|
||||||
|
'Programming Language :: Python :: 2',
|
||||||
|
'Programming Language :: Python :: 2.7',
|
||||||
'Programming Language :: Python :: 3',
|
'Programming Language :: Python :: 3',
|
||||||
|
'Programming Language :: Python :: 3.4',
|
||||||
|
'Programming Language :: Python :: 3.5',
|
||||||
|
'Programming Language :: Python :: 3.6',
|
||||||
|
'Programming Language :: Python :: Implementation :: CPython',
|
||||||
|
'Programming Language :: Python :: Implementation :: PyPy',
|
||||||
'Topic :: Software Development :: Testing',
|
'Topic :: Software Development :: Testing',
|
||||||
'Topic :: Internet :: WWW/HTTP',
|
'Topic :: Internet :: WWW/HTTP',
|
||||||
'License :: OSI Approved :: MIT License',
|
'License :: OSI Approved :: MIT License',
|
||||||
|
|||||||
@@ -1,13 +1,23 @@
|
|||||||
|
# flake8: noqa
|
||||||
import asyncio
|
import asyncio
|
||||||
|
|
||||||
import aiohttp
|
import aiohttp
|
||||||
|
|
||||||
|
|
||||||
@asyncio.coroutine
|
@asyncio.coroutine
|
||||||
def aiohttp_request(loop, method, url, as_text, **kwargs):
|
def aiohttp_request(loop, method, url, output='text', encoding='utf-8', **kwargs):
|
||||||
with aiohttp.ClientSession(loop=loop) as session:
|
session = aiohttp.ClientSession(loop=loop)
|
||||||
response = yield from session.request(method, url, **kwargs) # NOQA: E999
|
response_ctx = session.request(method, url, **kwargs)
|
||||||
if as_text:
|
|
||||||
content = yield from response.text() # NOQA: E999
|
response = yield from response_ctx.__aenter__()
|
||||||
else:
|
if output == 'text':
|
||||||
content = yield from response.json() # NOQA: E999
|
content = yield from response.text()
|
||||||
return response, content
|
elif output == 'json':
|
||||||
|
content = yield from response.json(encoding=encoding)
|
||||||
|
elif output == 'raw':
|
||||||
|
content = yield from response.read()
|
||||||
|
|
||||||
|
response_ctx._resp.close()
|
||||||
|
yield from session.close()
|
||||||
|
|
||||||
|
return response, content
|
||||||
|
|||||||
@@ -1,13 +0,0 @@
|
|||||||
import aiohttp
|
|
||||||
import pytest
|
|
||||||
import vcr
|
|
||||||
|
|
||||||
|
|
||||||
@vcr.use_cassette()
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_http(): # noqa: E999
|
|
||||||
async with aiohttp.ClientSession() as session:
|
|
||||||
url = 'https://httpbin.org/get'
|
|
||||||
params = {'ham': 'spam'}
|
|
||||||
resp = await session.get(url, params=params) # noqa: E999
|
|
||||||
assert (await resp.json())['args'] == {'ham': 'spam'} # noqa: E999
|
|
||||||
@@ -1,19 +1,12 @@
|
|||||||
|
import contextlib
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
asyncio = pytest.importorskip("asyncio")
|
||||||
aiohttp = pytest.importorskip("aiohttp")
|
aiohttp = pytest.importorskip("aiohttp")
|
||||||
|
|
||||||
import asyncio # noqa: E402
|
|
||||||
import contextlib # noqa: E402
|
|
||||||
|
|
||||||
import pytest # noqa: E402
|
|
||||||
import vcr # noqa: E402
|
import vcr # noqa: E402
|
||||||
|
|
||||||
from .aiohttp_utils import aiohttp_request # noqa: E402
|
from .aiohttp_utils import aiohttp_request # noqa: E402
|
||||||
|
|
||||||
try:
|
|
||||||
from .async_def import test_http # noqa: F401
|
|
||||||
except SyntaxError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def run_in_loop(fn):
|
def run_in_loop(fn):
|
||||||
with contextlib.closing(asyncio.new_event_loop()) as loop:
|
with contextlib.closing(asyncio.new_event_loop()) as loop:
|
||||||
@@ -22,19 +15,19 @@ def run_in_loop(fn):
|
|||||||
return loop.run_until_complete(task)
|
return loop.run_until_complete(task)
|
||||||
|
|
||||||
|
|
||||||
def request(method, url, as_text=True, **kwargs):
|
def request(method, url, output='text', **kwargs):
|
||||||
def run(loop):
|
def run(loop):
|
||||||
return aiohttp_request(loop, method, url, as_text, **kwargs)
|
return aiohttp_request(loop, method, url, output=output, **kwargs)
|
||||||
|
|
||||||
return run_in_loop(run)
|
return run_in_loop(run)
|
||||||
|
|
||||||
|
|
||||||
def get(url, as_text=True, **kwargs):
|
def get(url, output='text', **kwargs):
|
||||||
return request('GET', url, as_text, **kwargs)
|
return request('GET', url, output=output, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
def post(url, as_text=True, **kwargs):
|
def post(url, output='text', **kwargs):
|
||||||
return request('POST', url, as_text, **kwargs)
|
return request('POST', url, output='text', **kwargs)
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(params=["https", "http"])
|
@pytest.fixture(params=["https", "http"])
|
||||||
@@ -78,15 +71,28 @@ def test_text(tmpdir, scheme):
|
|||||||
|
|
||||||
def test_json(tmpdir, scheme):
|
def test_json(tmpdir, scheme):
|
||||||
url = scheme + '://httpbin.org/get'
|
url = scheme + '://httpbin.org/get'
|
||||||
|
headers = {'Content-Type': 'application/json'}
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join('json.yaml'))):
|
with vcr.use_cassette(str(tmpdir.join('json.yaml'))):
|
||||||
_, response_json = get(url, as_text=False)
|
_, response_json = get(url, output='json', headers=headers)
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join('json.yaml'))) as cassette:
|
with vcr.use_cassette(str(tmpdir.join('json.yaml'))) as cassette:
|
||||||
_, cassette_response_json = get(url, as_text=False)
|
_, cassette_response_json = get(url, output='json', headers=headers)
|
||||||
assert cassette_response_json == response_json
|
assert cassette_response_json == response_json
|
||||||
assert cassette.play_count == 1
|
assert cassette.play_count == 1
|
||||||
|
|
||||||
|
|
||||||
|
def test_binary(tmpdir, scheme):
|
||||||
|
url = scheme + '://httpbin.org/image/png'
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('binary.yaml'))):
|
||||||
|
_, response_binary = get(url, output='raw')
|
||||||
|
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('binary.yaml'))) as cassette:
|
||||||
|
_, cassette_response_binary = get(url, output='raw')
|
||||||
|
assert cassette_response_binary == response_binary
|
||||||
|
assert cassette.play_count == 1
|
||||||
|
|
||||||
|
|
||||||
def test_post(tmpdir, scheme):
|
def test_post(tmpdir, scheme):
|
||||||
data = {'key1': 'value1', 'key2': 'value2'}
|
data = {'key1': 'value1', 'key2': 'value2'}
|
||||||
url = scheme + '://httpbin.org/post'
|
url = scheme + '://httpbin.org/post'
|
||||||
@@ -101,29 +107,33 @@ def test_post(tmpdir, scheme):
|
|||||||
|
|
||||||
def test_params(tmpdir, scheme):
|
def test_params(tmpdir, scheme):
|
||||||
url = scheme + '://httpbin.org/get'
|
url = scheme + '://httpbin.org/get'
|
||||||
|
headers = {'Content-Type': 'application/json'}
|
||||||
params = {'a': 1, 'b': False, 'c': 'c'}
|
params = {'a': 1, 'b': False, 'c': 'c'}
|
||||||
with vcr.use_cassette(str(tmpdir.join('get.yaml'))) as cassette:
|
|
||||||
_, response_json = get(url, as_text=False, params=params)
|
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join('get.yaml'))) as cassette:
|
with vcr.use_cassette(str(tmpdir.join('get.yaml'))) as cassette:
|
||||||
_, cassette_response_json = get(url, as_text=False, params=params)
|
_, response_json = get(url, output='json', params=params, headers=headers)
|
||||||
|
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('get.yaml'))) as cassette:
|
||||||
|
_, cassette_response_json = get(url, output='json', params=params, headers=headers)
|
||||||
assert cassette_response_json == response_json
|
assert cassette_response_json == response_json
|
||||||
assert cassette.play_count == 1
|
assert cassette.play_count == 1
|
||||||
|
|
||||||
|
|
||||||
def test_params_same_url_distinct_params(tmpdir, scheme):
|
def test_params_same_url_distinct_params(tmpdir, scheme):
|
||||||
url = scheme + '://httpbin.org/get'
|
url = scheme + '://httpbin.org/get'
|
||||||
|
headers = {'Content-Type': 'application/json'}
|
||||||
params = {'a': 1, 'b': False, 'c': 'c'}
|
params = {'a': 1, 'b': False, 'c': 'c'}
|
||||||
with vcr.use_cassette(str(tmpdir.join('get.yaml'))) as cassette:
|
|
||||||
_, response_json = get(url, as_text=False, params=params)
|
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join('get.yaml'))) as cassette:
|
with vcr.use_cassette(str(tmpdir.join('get.yaml'))) as cassette:
|
||||||
_, cassette_response_json = get(url, as_text=False, params=params)
|
_, response_json = get(url, output='json', params=params, headers=headers)
|
||||||
|
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('get.yaml'))) as cassette:
|
||||||
|
_, cassette_response_json = get(url, output='json', params=params, headers=headers)
|
||||||
assert cassette_response_json == response_json
|
assert cassette_response_json == response_json
|
||||||
assert cassette.play_count == 1
|
assert cassette.play_count == 1
|
||||||
|
|
||||||
other_params = {'other': 'params'}
|
other_params = {'other': 'params'}
|
||||||
with vcr.use_cassette(str(tmpdir.join('get.yaml'))) as cassette:
|
with vcr.use_cassette(str(tmpdir.join('get.yaml'))) as cassette:
|
||||||
response, cassette_response_text = get(url, as_text=True, params=other_params)
|
response, cassette_response_text = get(url, output='text', params=other_params)
|
||||||
assert 'No match for the request' in cassette_response_text
|
assert 'No match for the request' in cassette_response_text
|
||||||
assert response.status == 599
|
assert response.status == 599
|
||||||
|
|||||||
@@ -26,9 +26,9 @@ def test_ignore_localhost(tmpdir, httpbin):
|
|||||||
with overridden_dns({'httpbin.org': '127.0.0.1'}):
|
with overridden_dns({'httpbin.org': '127.0.0.1'}):
|
||||||
cass_file = str(tmpdir.join('filter_qs.yaml'))
|
cass_file = str(tmpdir.join('filter_qs.yaml'))
|
||||||
with vcr.use_cassette(cass_file, ignore_localhost=True) as cass:
|
with vcr.use_cassette(cass_file, ignore_localhost=True) as cass:
|
||||||
urlopen('http://localhost:{0}/'.format(httpbin.port))
|
urlopen('http://localhost:{}/'.format(httpbin.port))
|
||||||
assert len(cass) == 0
|
assert len(cass) == 0
|
||||||
urlopen('http://httpbin.org:{0}/'.format(httpbin.port))
|
urlopen('http://httpbin.org:{}/'.format(httpbin.port))
|
||||||
assert len(cass) == 1
|
assert len(cass) == 1
|
||||||
|
|
||||||
|
|
||||||
@@ -39,9 +39,9 @@ def test_ignore_httpbin(tmpdir, httpbin):
|
|||||||
cass_file,
|
cass_file,
|
||||||
ignore_hosts=['httpbin.org']
|
ignore_hosts=['httpbin.org']
|
||||||
) as cass:
|
) as cass:
|
||||||
urlopen('http://httpbin.org:{0}/'.format(httpbin.port))
|
urlopen('http://httpbin.org:{}/'.format(httpbin.port))
|
||||||
assert len(cass) == 0
|
assert len(cass) == 0
|
||||||
urlopen('http://localhost:{0}/'.format(httpbin.port))
|
urlopen('http://localhost:{}/'.format(httpbin.port))
|
||||||
assert len(cass) == 1
|
assert len(cass) == 1
|
||||||
|
|
||||||
|
|
||||||
@@ -53,8 +53,8 @@ def test_ignore_localhost_and_httpbin(tmpdir, httpbin):
|
|||||||
ignore_hosts=['httpbin.org'],
|
ignore_hosts=['httpbin.org'],
|
||||||
ignore_localhost=True
|
ignore_localhost=True
|
||||||
) as cass:
|
) as cass:
|
||||||
urlopen('http://httpbin.org:{0}'.format(httpbin.port))
|
urlopen('http://httpbin.org:{}'.format(httpbin.port))
|
||||||
urlopen('http://localhost:{0}'.format(httpbin.port))
|
urlopen('http://localhost:{}'.format(httpbin.port))
|
||||||
assert len(cass) == 0
|
assert len(cass) == 0
|
||||||
|
|
||||||
|
|
||||||
@@ -62,12 +62,12 @@ def test_ignore_localhost_twice(tmpdir, httpbin):
|
|||||||
with overridden_dns({'httpbin.org': '127.0.0.1'}):
|
with overridden_dns({'httpbin.org': '127.0.0.1'}):
|
||||||
cass_file = str(tmpdir.join('filter_qs.yaml'))
|
cass_file = str(tmpdir.join('filter_qs.yaml'))
|
||||||
with vcr.use_cassette(cass_file, ignore_localhost=True) as cass:
|
with vcr.use_cassette(cass_file, ignore_localhost=True) as cass:
|
||||||
urlopen('http://localhost:{0}'.format(httpbin.port))
|
urlopen('http://localhost:{}'.format(httpbin.port))
|
||||||
assert len(cass) == 0
|
assert len(cass) == 0
|
||||||
urlopen('http://httpbin.org:{0}'.format(httpbin.port))
|
urlopen('http://httpbin.org:{}'.format(httpbin.port))
|
||||||
assert len(cass) == 1
|
assert len(cass) == 1
|
||||||
with vcr.use_cassette(cass_file, ignore_localhost=True) as cass:
|
with vcr.use_cassette(cass_file, ignore_localhost=True) as cass:
|
||||||
assert len(cass) == 1
|
assert len(cass) == 1
|
||||||
urlopen('http://localhost:{0}'.format(httpbin.port))
|
urlopen('http://localhost:{}'.format(httpbin.port))
|
||||||
urlopen('http://httpbin.org:{0}'.format(httpbin.port))
|
urlopen('http://httpbin.org:{}'.format(httpbin.port))
|
||||||
assert len(cass) == 1
|
assert len(cass) == 1
|
||||||
|
|||||||
@@ -10,10 +10,25 @@ import vcr
|
|||||||
from vcr.persisters.filesystem import FilesystemPersister
|
from vcr.persisters.filesystem import FilesystemPersister
|
||||||
|
|
||||||
|
|
||||||
|
class CustomFilesystemPersister(object):
|
||||||
|
'''Behaves just like default FilesystemPersister but adds .test extension
|
||||||
|
to the cassette file'''
|
||||||
|
@staticmethod
|
||||||
|
def load_cassette(cassette_path, serializer):
|
||||||
|
cassette_path += '.test'
|
||||||
|
return FilesystemPersister.load_cassette(cassette_path, serializer)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def save_cassette(cassette_path, cassette_dict, serializer):
|
||||||
|
cassette_path += '.test'
|
||||||
|
FilesystemPersister.save_cassette(cassette_path, cassette_dict,
|
||||||
|
serializer)
|
||||||
|
|
||||||
|
|
||||||
def test_save_cassette_with_custom_persister(tmpdir, httpbin):
|
def test_save_cassette_with_custom_persister(tmpdir, httpbin):
|
||||||
'''Ensure you can save a cassette using custom persister'''
|
'''Ensure you can save a cassette using custom persister'''
|
||||||
my_vcr = vcr.VCR()
|
my_vcr = vcr.VCR()
|
||||||
my_vcr.register_persister(FilesystemPersister)
|
my_vcr.register_persister(CustomFilesystemPersister)
|
||||||
|
|
||||||
# Check to make sure directory doesnt exist
|
# Check to make sure directory doesnt exist
|
||||||
assert not os.path.exists(str(tmpdir.join('nonexistent')))
|
assert not os.path.exists(str(tmpdir.join('nonexistent')))
|
||||||
@@ -23,7 +38,7 @@ def test_save_cassette_with_custom_persister(tmpdir, httpbin):
|
|||||||
urlopen(httpbin.url).read()
|
urlopen(httpbin.url).read()
|
||||||
|
|
||||||
# Callback should have made the file and the directory
|
# Callback should have made the file and the directory
|
||||||
assert os.path.exists(str(tmpdir.join('nonexistent', 'cassette.yml')))
|
assert os.path.exists(str(tmpdir.join('nonexistent', 'cassette.yml.test')))
|
||||||
|
|
||||||
|
|
||||||
def test_load_cassette_with_custom_persister(tmpdir, httpbin):
|
def test_load_cassette_with_custom_persister(tmpdir, httpbin):
|
||||||
@@ -31,9 +46,9 @@ def test_load_cassette_with_custom_persister(tmpdir, httpbin):
|
|||||||
Ensure you can load a cassette using custom persister
|
Ensure you can load a cassette using custom persister
|
||||||
'''
|
'''
|
||||||
my_vcr = vcr.VCR()
|
my_vcr = vcr.VCR()
|
||||||
my_vcr.register_persister(FilesystemPersister)
|
my_vcr.register_persister(CustomFilesystemPersister)
|
||||||
|
|
||||||
test_fixture = str(tmpdir.join('synopsis.json'))
|
test_fixture = str(tmpdir.join('synopsis.json.test'))
|
||||||
|
|
||||||
with my_vcr.use_cassette(test_fixture, serializer='json'):
|
with my_vcr.use_cassette(test_fixture, serializer='json'):
|
||||||
response = urlopen(httpbin.url).read()
|
response = urlopen(httpbin.url).read()
|
||||||
|
|||||||
@@ -1,6 +1,8 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
'''Test requests' interaction with vcr'''
|
'''Test requests' interaction with vcr'''
|
||||||
|
import platform
|
||||||
import pytest
|
import pytest
|
||||||
|
import sys
|
||||||
import vcr
|
import vcr
|
||||||
from assertions import assert_cassette_empty, assert_is_json
|
from assertions import assert_cassette_empty, assert_is_json
|
||||||
|
|
||||||
@@ -114,7 +116,10 @@ def test_post_chunked_binary(tmpdir, httpbin):
|
|||||||
assert req1 == req2
|
assert req1 == req2
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.xfail('sys.version_info >= (3, 6)', strict=True, raises=ConnectionError)
|
@pytest.mark.xskip('sys.version_info >= (3, 6)', strict=True, raises=ConnectionError)
|
||||||
|
@pytest.mark.xskip((3, 5) < sys.version_info < (3, 6) and
|
||||||
|
platform.python_implementation() == 'CPython',
|
||||||
|
reason='Fails on CPython 3.5')
|
||||||
def test_post_chunked_binary_secure(tmpdir, httpbin_secure):
|
def test_post_chunked_binary_secure(tmpdir, httpbin_secure):
|
||||||
'''Ensure that we can send chunked binary without breaking while trying to concatenate bytes with str.'''
|
'''Ensure that we can send chunked binary without breaking while trying to concatenate bytes with str.'''
|
||||||
data1 = iter([b'data', b'to', b'send'])
|
data1 = iter([b'data', b'to', b'send'])
|
||||||
@@ -249,10 +254,8 @@ def test_nested_cassettes_with_session_created_before_nesting(httpbin_both, tmpd
|
|||||||
def test_post_file(tmpdir, httpbin_both):
|
def test_post_file(tmpdir, httpbin_both):
|
||||||
'''Ensure that we handle posting a file.'''
|
'''Ensure that we handle posting a file.'''
|
||||||
url = httpbin_both + '/post'
|
url = httpbin_both + '/post'
|
||||||
with vcr.use_cassette(str(tmpdir.join('post_file.yaml'))) as cass:
|
with vcr.use_cassette(str(tmpdir.join('post_file.yaml'))) as cass, open('tox.ini') as f:
|
||||||
# Don't use 2.7+ only style ',' separated with here because we support python 2.6
|
original_response = requests.post(url, f).content
|
||||||
with open('tox.ini') as f:
|
|
||||||
original_response = requests.post(url, f).content
|
|
||||||
|
|
||||||
# This also tests that we do the right thing with matching the body when they are files.
|
# This also tests that we do the right thing with matching the body when they are files.
|
||||||
with vcr.use_cassette(str(tmpdir.join('post_file.yaml')),
|
with vcr.use_cassette(str(tmpdir.join('post_file.yaml')),
|
||||||
|
|||||||
@@ -5,6 +5,7 @@
|
|||||||
import pytest
|
import pytest
|
||||||
import pytest_httpbin
|
import pytest_httpbin
|
||||||
import vcr
|
import vcr
|
||||||
|
from vcr.patch import force_reset
|
||||||
from assertions import assert_cassette_empty, assert_is_json
|
from assertions import assert_cassette_empty, assert_is_json
|
||||||
urllib3 = pytest.importorskip("urllib3")
|
urllib3 = pytest.importorskip("urllib3")
|
||||||
|
|
||||||
@@ -55,7 +56,7 @@ def test_body(tmpdir, httpbin_both, verify_pool_mgr):
|
|||||||
def test_auth(tmpdir, httpbin_both, verify_pool_mgr):
|
def test_auth(tmpdir, httpbin_both, verify_pool_mgr):
|
||||||
'''Ensure that we can handle basic auth'''
|
'''Ensure that we can handle basic auth'''
|
||||||
auth = ('user', 'passwd')
|
auth = ('user', 'passwd')
|
||||||
headers = urllib3.util.make_headers(basic_auth='{0}:{1}'.format(*auth))
|
headers = urllib3.util.make_headers(basic_auth='{}:{}'.format(*auth))
|
||||||
url = httpbin_both.url + '/basic-auth/user/passwd'
|
url = httpbin_both.url + '/basic-auth/user/passwd'
|
||||||
with vcr.use_cassette(str(tmpdir.join('auth.yaml'))):
|
with vcr.use_cassette(str(tmpdir.join('auth.yaml'))):
|
||||||
one = verify_pool_mgr.request('GET', url, headers=headers)
|
one = verify_pool_mgr.request('GET', url, headers=headers)
|
||||||
@@ -69,7 +70,7 @@ def test_auth(tmpdir, httpbin_both, verify_pool_mgr):
|
|||||||
def test_auth_failed(tmpdir, httpbin_both, verify_pool_mgr):
|
def test_auth_failed(tmpdir, httpbin_both, verify_pool_mgr):
|
||||||
'''Ensure that we can save failed auth statuses'''
|
'''Ensure that we can save failed auth statuses'''
|
||||||
auth = ('user', 'wrongwrongwrong')
|
auth = ('user', 'wrongwrongwrong')
|
||||||
headers = urllib3.util.make_headers(basic_auth='{0}:{1}'.format(*auth))
|
headers = urllib3.util.make_headers(basic_auth='{}:{}'.format(*auth))
|
||||||
url = httpbin_both.url + '/basic-auth/user/passwd'
|
url = httpbin_both.url + '/basic-auth/user/passwd'
|
||||||
with vcr.use_cassette(str(tmpdir.join('auth-failed.yaml'))) as cass:
|
with vcr.use_cassette(str(tmpdir.join('auth-failed.yaml'))) as cass:
|
||||||
# Ensure that this is empty to begin with
|
# Ensure that this is empty to begin with
|
||||||
@@ -138,3 +139,21 @@ def test_gzip(tmpdir, httpbin_both, verify_pool_mgr):
|
|||||||
def test_https_with_cert_validation_disabled(tmpdir, httpbin_secure, pool_mgr):
|
def test_https_with_cert_validation_disabled(tmpdir, httpbin_secure, pool_mgr):
|
||||||
with vcr.use_cassette(str(tmpdir.join('cert_validation_disabled.yaml'))):
|
with vcr.use_cassette(str(tmpdir.join('cert_validation_disabled.yaml'))):
|
||||||
pool_mgr.request('GET', httpbin_secure.url)
|
pool_mgr.request('GET', httpbin_secure.url)
|
||||||
|
|
||||||
|
|
||||||
|
def test_urllib3_force_reset():
|
||||||
|
cpool = urllib3.connectionpool
|
||||||
|
http_original = cpool.HTTPConnection
|
||||||
|
https_original = cpool.HTTPSConnection
|
||||||
|
verified_https_original = cpool.VerifiedHTTPSConnection
|
||||||
|
with vcr.use_cassette(path='test'):
|
||||||
|
first_cassette_HTTPConnection = cpool.HTTPConnection
|
||||||
|
first_cassette_HTTPSConnection = cpool.HTTPSConnection
|
||||||
|
first_cassette_VerifiedHTTPSConnection = cpool.VerifiedHTTPSConnection
|
||||||
|
with force_reset():
|
||||||
|
assert cpool.HTTPConnection is http_original
|
||||||
|
assert cpool.HTTPSConnection is https_original
|
||||||
|
assert cpool.VerifiedHTTPSConnection is verified_https_original
|
||||||
|
assert cpool.HTTPConnection is first_cassette_HTTPConnection
|
||||||
|
assert cpool.HTTPSConnection is first_cassette_HTTPSConnection
|
||||||
|
assert cpool.VerifiedHTTPSConnection is first_cassette_VerifiedHTTPSConnection
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
|
import multiprocessing
|
||||||
import pytest
|
import pytest
|
||||||
from six.moves import xmlrpc_client
|
from six.moves import xmlrpc_client, xmlrpc_server
|
||||||
|
|
||||||
requests = pytest.importorskip("requests")
|
requests = pytest.importorskip("requests")
|
||||||
|
|
||||||
@@ -80,13 +81,27 @@ def test_amazon_doctype(tmpdir):
|
|||||||
assert 'html' in r.text
|
assert 'html' in r.text
|
||||||
|
|
||||||
|
|
||||||
def test_xmlrpclib(tmpdir):
|
@pytest.yield_fixture(scope='session')
|
||||||
|
def rpc_server():
|
||||||
|
httpd = xmlrpc_server.SimpleXMLRPCServer(('', 0))
|
||||||
|
httpd.register_function(pow)
|
||||||
|
proxy_process = multiprocessing.Process(
|
||||||
|
target=httpd.serve_forever,
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
proxy_process.start()
|
||||||
|
yield 'http://{}:{}'.format(*httpd.server_address)
|
||||||
|
finally:
|
||||||
|
proxy_process.terminate()
|
||||||
|
|
||||||
|
|
||||||
|
def test_xmlrpclib(tmpdir, rpc_server):
|
||||||
with vcr.use_cassette(str(tmpdir.join('xmlrpcvideo.yaml'))):
|
with vcr.use_cassette(str(tmpdir.join('xmlrpcvideo.yaml'))):
|
||||||
roundup_server = xmlrpc_client.ServerProxy('http://bugs.python.org/xmlrpc', allow_none=True)
|
roundup_server = xmlrpc_client.ServerProxy(rpc_server, allow_none=True)
|
||||||
original_schema = roundup_server.schema()
|
original_schema = roundup_server.pow(2, 4)
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join('xmlrpcvideo.yaml'))):
|
with vcr.use_cassette(str(tmpdir.join('xmlrpcvideo.yaml'))):
|
||||||
roundup_server = xmlrpc_client.ServerProxy('http://bugs.python.org/xmlrpc', allow_none=True)
|
roundup_server = xmlrpc_client.ServerProxy(rpc_server, allow_none=True)
|
||||||
second_schema = roundup_server.schema()
|
second_schema = roundup_server.pow(2, 4)
|
||||||
|
|
||||||
assert original_schema == second_schema
|
assert original_schema == second_schema
|
||||||
|
|||||||
@@ -22,7 +22,7 @@ def assert_matcher(matcher_name):
|
|||||||
matcher = getattr(matchers, matcher_name)
|
matcher = getattr(matchers, matcher_name)
|
||||||
for k1, k2 in itertools.permutations(REQUESTS, 2):
|
for k1, k2 in itertools.permutations(REQUESTS, 2):
|
||||||
matched = matcher(REQUESTS[k1], REQUESTS[k2])
|
matched = matcher(REQUESTS[k1], REQUESTS[k2])
|
||||||
if matcher_name in set((k1, k2)):
|
if matcher_name in {k1, k2}:
|
||||||
assert not matched
|
assert not matched
|
||||||
else:
|
else:
|
||||||
assert matched
|
assert matched
|
||||||
@@ -31,7 +31,7 @@ def assert_matcher(matcher_name):
|
|||||||
def test_uri_matcher():
|
def test_uri_matcher():
|
||||||
for k1, k2 in itertools.permutations(REQUESTS, 2):
|
for k1, k2 in itertools.permutations(REQUESTS, 2):
|
||||||
matched = matchers.uri(REQUESTS[k1], REQUESTS[k2])
|
matched = matchers.uri(REQUESTS[k1], REQUESTS[k2])
|
||||||
if set((k1, k2)) != set(('base', 'method')):
|
if {k1, k2} != {'base', 'method'}:
|
||||||
assert not matched
|
assert not matched
|
||||||
else:
|
else:
|
||||||
assert matched
|
assert matched
|
||||||
|
|||||||
@@ -319,11 +319,11 @@ def test_additional_matchers():
|
|||||||
|
|
||||||
@vcr.use_cassette
|
@vcr.use_cassette
|
||||||
def function_defaults(cassette):
|
def function_defaults(cassette):
|
||||||
assert set(cassette._match_on) == set([vcr.matchers['uri']])
|
assert set(cassette._match_on) == {vcr.matchers['uri']}
|
||||||
|
|
||||||
@vcr.use_cassette(additional_matchers=('body',))
|
@vcr.use_cassette(additional_matchers=('body',))
|
||||||
def function_additional(cassette):
|
def function_additional(cassette):
|
||||||
assert set(cassette._match_on) == set([vcr.matchers['uri'], vcr.matchers['body']])
|
assert set(cassette._match_on) == {vcr.matchers['uri'], vcr.matchers['body']}
|
||||||
|
|
||||||
function_defaults()
|
function_defaults()
|
||||||
function_additional()
|
function_additional()
|
||||||
|
|||||||
26
tox.ini
26
tox.ini
@@ -1,5 +1,5 @@
|
|||||||
[tox]
|
[tox]
|
||||||
envlist = {py26,py27,py33,py34,py35,py36,pypy,pypy3}-{flakes,requests213,requests27,requests26,requests25,requests24,requests23,requests22,requests1,httplib2,urllib317,urllib319,urllib3110,tornado3,tornado4,boto,boto3,aiohttp}
|
envlist = {py27,py35,py36,pypy}-{flakes,requests27,httplib2,urllib3121,tornado4,boto3,aiohttp}
|
||||||
|
|
||||||
[testenv:flakes]
|
[testenv:flakes]
|
||||||
skipsdist = True
|
skipsdist = True
|
||||||
@@ -13,31 +13,17 @@ deps = flake8
|
|||||||
commands =
|
commands =
|
||||||
./runtests.sh {posargs}
|
./runtests.sh {posargs}
|
||||||
deps =
|
deps =
|
||||||
# httpbin fails with latest Flask, so we pin it
|
Flask<1
|
||||||
Flask==0.10.1
|
|
||||||
mock
|
mock
|
||||||
pytest
|
pytest
|
||||||
pytest-httpbin
|
pytest-httpbin
|
||||||
PyYAML
|
PyYAML
|
||||||
requests1: requests==1.2.3
|
|
||||||
requests213: requests==2.13.0
|
|
||||||
requests27: requests==2.7.0
|
requests27: requests==2.7.0
|
||||||
requests26: requests==2.6.0
|
|
||||||
requests25: requests==2.5.0
|
|
||||||
requests24: requests==2.4.0
|
|
||||||
requests23: requests==2.3.0
|
|
||||||
requests22: requests==2.2.1
|
|
||||||
httplib2: httplib2
|
httplib2: httplib2
|
||||||
urllib317: urllib3==1.7.1
|
urllib3121: urllib3==1.21.1
|
||||||
urllib319: urllib3==1.9.1
|
{py27,py35,py36,pypy}-tornado4: tornado>=4,<5
|
||||||
urllib3110: urllib3==1.10.2
|
{py27,py35,py36,pypy}-tornado4: pytest-tornado
|
||||||
{py26,py27,py33,py34,py35,py36,pypy}-tornado3: tornado>=3,<4
|
{py27,py35,py36}-tornado4: pycurl
|
||||||
{py26,py27,py33,py34,py35,py36,pypy}-tornado4: tornado>=4,<5
|
|
||||||
{py26,py27,py33,py34,py35,py36,pypy}-tornado3: pytest-tornado
|
|
||||||
{py26,py27,py33,py34,py35,py36,pypy}-tornado4: pytest-tornado
|
|
||||||
{py26,py27,py33,py34,py35,py36}-tornado3: pycurl
|
|
||||||
{py26,py27,py33,py34,py35,py36}-tornado4: pycurl
|
|
||||||
boto: boto
|
|
||||||
boto3: boto3
|
boto3: boto3
|
||||||
aiohttp: aiohttp
|
aiohttp: aiohttp
|
||||||
aiohttp: pytest-asyncio
|
aiohttp: pytest-asyncio
|
||||||
|
|||||||
@@ -1,10 +1,11 @@
|
|||||||
|
import collections
|
||||||
import sys
|
import sys
|
||||||
import inspect
|
import inspect
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
import wrapt
|
import wrapt
|
||||||
|
|
||||||
from .compat import contextlib, collections
|
from .compat import contextlib
|
||||||
from .errors import UnhandledHTTPRequestError
|
from .errors import UnhandledHTTPRequestError
|
||||||
from .matchers import requests_match, uri, method
|
from .matchers import requests_match, uri, method
|
||||||
from .patch import CassettePatcherBuilder
|
from .patch import CassettePatcherBuilder
|
||||||
@@ -174,13 +175,13 @@ class Cassette(object):
|
|||||||
def use(cls, **kwargs):
|
def use(cls, **kwargs):
|
||||||
return CassetteContextDecorator.from_args(cls, **kwargs)
|
return CassetteContextDecorator.from_args(cls, **kwargs)
|
||||||
|
|
||||||
def __init__(self, path, serializer=yamlserializer, persister=FilesystemPersister, record_mode='once',
|
def __init__(self, path, serializer=None, persister=None, record_mode='once',
|
||||||
match_on=(uri, method), before_record_request=None,
|
match_on=(uri, method), before_record_request=None,
|
||||||
before_record_response=None, custom_patches=(),
|
before_record_response=None, custom_patches=(),
|
||||||
inject=False):
|
inject=False):
|
||||||
self._persister = persister
|
self._persister = persister or FilesystemPersister
|
||||||
self._path = path
|
self._path = path
|
||||||
self._serializer = serializer
|
self._serializer = serializer or yamlserializer
|
||||||
self._match_on = match_on
|
self._match_on = match_on
|
||||||
self._before_record_request = before_record_request or (lambda x: x)
|
self._before_record_request = before_record_request or (lambda x: x)
|
||||||
self._before_record_response = before_record_response or (lambda x: x)
|
self._before_record_response = before_record_response or (lambda x: x)
|
||||||
@@ -303,7 +304,7 @@ class Cassette(object):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return "<Cassette containing {0} recorded response(s)>".format(
|
return "<Cassette containing {} recorded response(s)>".format(
|
||||||
len(self)
|
len(self)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -11,8 +11,4 @@ else:
|
|||||||
if not hasattr(contextlib, 'ExitStack'):
|
if not hasattr(contextlib, 'ExitStack'):
|
||||||
import contextlib2 as contextlib
|
import contextlib2 as contextlib
|
||||||
|
|
||||||
import collections
|
__all__ = ['mock', 'contextlib']
|
||||||
if not hasattr(collections, 'Counter'):
|
|
||||||
import backport_collections as collections
|
|
||||||
|
|
||||||
__all__ = ['mock', 'contextlib', 'collections']
|
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
import copy
|
import copy
|
||||||
|
import collections
|
||||||
import functools
|
import functools
|
||||||
import inspect
|
import inspect
|
||||||
import os
|
import os
|
||||||
@@ -6,7 +7,6 @@ import types
|
|||||||
|
|
||||||
import six
|
import six
|
||||||
|
|
||||||
from .compat import collections
|
|
||||||
from .cassette import Cassette
|
from .cassette import Cassette
|
||||||
from .serializers import yamlserializer, jsonserializer
|
from .serializers import yamlserializer, jsonserializer
|
||||||
from .persisters.filesystem import FilesystemPersister
|
from .persisters.filesystem import FilesystemPersister
|
||||||
@@ -78,7 +78,7 @@ class VCR(object):
|
|||||||
serializer = self.serializers[serializer_name]
|
serializer = self.serializers[serializer_name]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise KeyError(
|
raise KeyError(
|
||||||
"Serializer {0} doesn't exist or isn't registered".format(
|
"Serializer {} doesn't exist or isn't registered".format(
|
||||||
serializer_name
|
serializer_name
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
@@ -91,7 +91,7 @@ class VCR(object):
|
|||||||
matchers.append(self.matchers[m])
|
matchers.append(self.matchers[m])
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise KeyError(
|
raise KeyError(
|
||||||
"Matcher {0} doesn't exist or isn't registered".format(m)
|
"Matcher {} doesn't exist or isn't registered".format(m)
|
||||||
)
|
)
|
||||||
return matchers
|
return matchers
|
||||||
|
|
||||||
@@ -145,6 +145,7 @@ class VCR(object):
|
|||||||
|
|
||||||
merged_config = {
|
merged_config = {
|
||||||
'serializer': self._get_serializer(serializer_name),
|
'serializer': self._get_serializer(serializer_name),
|
||||||
|
'persister': self.persister,
|
||||||
'match_on': self._get_matchers(
|
'match_on': self._get_matchers(
|
||||||
tuple(matcher_names) + tuple(additional_matchers)
|
tuple(matcher_names) + tuple(additional_matchers)
|
||||||
),
|
),
|
||||||
|
|||||||
@@ -90,12 +90,12 @@ def _log_matches(r1, r2, matches):
|
|||||||
differences = [m for m in matches if not m[0]]
|
differences = [m for m in matches if not m[0]]
|
||||||
if differences:
|
if differences:
|
||||||
log.debug(
|
log.debug(
|
||||||
"Requests {0} and {1} differ according to "
|
"Requests {} and {} differ according to "
|
||||||
"the following matchers: {2}".format(r1, r2, differences)
|
"the following matchers: {}".format(r1, r2, differences)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def requests_match(r1, r2, matchers):
|
def requests_match(r1, r2, matchers):
|
||||||
matches = [(m(r1, r2), m) for m in matchers]
|
matches = [(m(r1, r2), m) for m in matchers]
|
||||||
_log_matches(r1, r2, matches)
|
_log_matches(r1, r2, matches)
|
||||||
return all([m[0] for m in matches])
|
return all(m[0] for m in matches)
|
||||||
|
|||||||
@@ -59,7 +59,7 @@ def build_uri(**parts):
|
|||||||
port = parts['port']
|
port = parts['port']
|
||||||
scheme = parts['protocol']
|
scheme = parts['protocol']
|
||||||
default_port = {'https': 443, 'http': 80}[scheme]
|
default_port = {'https': 443, 'http': 80}[scheme]
|
||||||
parts['port'] = ':{0}'.format(port) if port != default_port else ''
|
parts['port'] = ':{}'.format(port) if port != default_port else ''
|
||||||
return "{protocol}://{host}{port}{path}".format(**parts)
|
return "{protocol}://{host}{port}{path}".format(**parts)
|
||||||
|
|
||||||
|
|
||||||
@@ -161,7 +161,7 @@ def main():
|
|||||||
for file_path in files:
|
for file_path in files:
|
||||||
migrated = try_migrate(file_path)
|
migrated = try_migrate(file_path)
|
||||||
status = 'OK' if migrated else 'FAIL'
|
status = 'OK' if migrated else 'FAIL'
|
||||||
sys.stderr.write("[{0}] {1}\n".format(status, file_path))
|
sys.stderr.write("[{}] {}\n".format(status, file_path))
|
||||||
sys.stderr.write("Done.\n")
|
sys.stderr.write("Done.\n")
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
58
vcr/patch.py
58
vcr/patch.py
@@ -12,16 +12,6 @@ _HTTPConnection = httplib.HTTPConnection
|
|||||||
_HTTPSConnection = httplib.HTTPSConnection
|
_HTTPSConnection = httplib.HTTPSConnection
|
||||||
|
|
||||||
|
|
||||||
# Try to save the original types for requests
|
|
||||||
try:
|
|
||||||
import requests.packages.urllib3.connectionpool as cpool
|
|
||||||
except ImportError: # pragma: no cover
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
_VerifiedHTTPSConnection = cpool.VerifiedHTTPSConnection
|
|
||||||
_cpoolHTTPConnection = cpool.HTTPConnection
|
|
||||||
_cpoolHTTPSConnection = cpool.HTTPSConnection
|
|
||||||
|
|
||||||
# Try to save the original types for boto3
|
# Try to save the original types for boto3
|
||||||
try:
|
try:
|
||||||
import botocore.vendored.requests.packages.urllib3.connectionpool as cpool
|
import botocore.vendored.requests.packages.urllib3.connectionpool as cpool
|
||||||
@@ -32,14 +22,27 @@ else:
|
|||||||
_cpoolBoto3HTTPConnection = cpool.HTTPConnection
|
_cpoolBoto3HTTPConnection = cpool.HTTPConnection
|
||||||
_cpoolBoto3HTTPSConnection = cpool.HTTPSConnection
|
_cpoolBoto3HTTPSConnection = cpool.HTTPSConnection
|
||||||
|
|
||||||
|
cpool = None
|
||||||
# Try to save the original types for urllib3
|
# Try to save the original types for urllib3
|
||||||
try:
|
try:
|
||||||
import urllib3
|
import urllib3.connectionpool as cpool
|
||||||
except ImportError: # pragma: no cover
|
except ImportError: # pragma: no cover
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
_VerifiedHTTPSConnection = urllib3.connectionpool.VerifiedHTTPSConnection
|
_VerifiedHTTPSConnection = cpool.VerifiedHTTPSConnection
|
||||||
|
_cpoolHTTPConnection = cpool.HTTPConnection
|
||||||
|
_cpoolHTTPSConnection = cpool.HTTPSConnection
|
||||||
|
|
||||||
|
# Try to save the original types for requests
|
||||||
|
try:
|
||||||
|
if not cpool:
|
||||||
|
import requests.packages.urllib3.connectionpool as cpool
|
||||||
|
except ImportError: # pragma: no cover
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
_VerifiedHTTPSConnection = cpool.VerifiedHTTPSConnection
|
||||||
|
_cpoolHTTPConnection = cpool.HTTPConnection
|
||||||
|
_cpoolHTTPSConnection = cpool.HTTPSConnection
|
||||||
|
|
||||||
|
|
||||||
# Try to save the original types for httplib2
|
# Try to save the original types for httplib2
|
||||||
@@ -166,7 +169,7 @@ class CassettePatcherBuilder(object):
|
|||||||
bases = (base_class,)
|
bases = (base_class,)
|
||||||
if not issubclass(base_class, object): # Check for old style class
|
if not issubclass(base_class, object): # Check for old style class
|
||||||
bases += (object,)
|
bases += (object,)
|
||||||
return type('{0}{1}'.format(base_class.__name__, self._cassette._path),
|
return type('{}{}'.format(base_class.__name__, self._cassette._path),
|
||||||
bases, dict(cassette=self._cassette))
|
bases, dict(cassette=self._cassette))
|
||||||
|
|
||||||
@_build_patchers_from_mock_triples_decorator
|
@_build_patchers_from_mock_triples_decorator
|
||||||
@@ -176,10 +179,9 @@ class CassettePatcherBuilder(object):
|
|||||||
|
|
||||||
def _requests(self):
|
def _requests(self):
|
||||||
try:
|
try:
|
||||||
import requests.packages.urllib3.connectionpool as cpool
|
from .stubs import requests_stubs
|
||||||
except ImportError: # pragma: no cover
|
except ImportError: # pragma: no cover
|
||||||
return ()
|
return ()
|
||||||
from .stubs import requests_stubs
|
|
||||||
return self._urllib3_patchers(cpool, requests_stubs)
|
return self._urllib3_patchers(cpool, requests_stubs)
|
||||||
|
|
||||||
def _boto3(self):
|
def _boto3(self):
|
||||||
@@ -360,8 +362,22 @@ class ConnectionRemover(object):
|
|||||||
def reset_patchers():
|
def reset_patchers():
|
||||||
yield mock.patch.object(httplib, 'HTTPConnection', _HTTPConnection)
|
yield mock.patch.object(httplib, 'HTTPConnection', _HTTPConnection)
|
||||||
yield mock.patch.object(httplib, 'HTTPSConnection', _HTTPSConnection)
|
yield mock.patch.object(httplib, 'HTTPSConnection', _HTTPSConnection)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import requests.packages.urllib3.connectionpool as cpool
|
import requests
|
||||||
|
if requests.__build__ < 0x021603:
|
||||||
|
# Avoid double unmock if requests 2.16.3
|
||||||
|
# First, this is pointless, requests.packages.urllib3 *IS* urllib3 (see packages.py)
|
||||||
|
# Second, this is unmocking twice the same classes with different namespaces
|
||||||
|
# and is creating weird issues and bugs:
|
||||||
|
# > AssertionError: assert <class 'urllib3.connection.HTTPConnection'>
|
||||||
|
# > is <class 'requests.packages.urllib3.connection.HTTPConnection'>
|
||||||
|
# This assert should work!!!
|
||||||
|
# Note that this also means that now, requests.packages is never imported
|
||||||
|
# if requests 2.16.3 or greater is used with VCRPy.
|
||||||
|
import requests.packages.urllib3.connectionpool as cpool
|
||||||
|
else:
|
||||||
|
raise ImportError("Skip requests not vendored anymore")
|
||||||
except ImportError: # pragma: no cover
|
except ImportError: # pragma: no cover
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
@@ -384,11 +400,11 @@ def reset_patchers():
|
|||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
yield mock.patch.object(cpool, 'VerifiedHTTPSConnection', _VerifiedHTTPSConnection)
|
yield mock.patch.object(cpool, 'VerifiedHTTPSConnection', _VerifiedHTTPSConnection)
|
||||||
yield mock.patch.object(cpool, 'HTTPConnection', _HTTPConnection)
|
yield mock.patch.object(cpool, 'HTTPConnection', _cpoolHTTPConnection)
|
||||||
yield mock.patch.object(cpool, 'HTTPSConnection', _HTTPSConnection)
|
yield mock.patch.object(cpool, 'HTTPSConnection', _cpoolHTTPSConnection)
|
||||||
if hasattr(cpool.HTTPConnectionPool, 'ConnectionCls'):
|
if hasattr(cpool.HTTPConnectionPool, 'ConnectionCls'):
|
||||||
yield mock.patch.object(cpool.HTTPConnectionPool, 'ConnectionCls', _HTTPConnection)
|
yield mock.patch.object(cpool.HTTPConnectionPool, 'ConnectionCls', _cpoolHTTPConnection)
|
||||||
yield mock.patch.object(cpool.HTTPSConnectionPool, 'ConnectionCls', _HTTPSConnection)
|
yield mock.patch.object(cpool.HTTPSConnectionPool, 'ConnectionCls', _cpoolHTTPSConnection)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import botocore.vendored.requests.packages.urllib3.connectionpool as cpool
|
import botocore.vendored.requests.packages.urllib3.connectionpool as cpool
|
||||||
|
|||||||
@@ -81,7 +81,7 @@ class Request(object):
|
|||||||
return self.scheme
|
return self.scheme
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return "<Request ({0}) {1}>".format(self.method, self.uri)
|
return "<Request ({}) {}>".format(self.method, self.uri)
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return self.__str__()
|
return self.__str__()
|
||||||
|
|||||||
@@ -132,11 +132,11 @@ class VCRConnection(object):
|
|||||||
"""
|
"""
|
||||||
port = self.real_connection.port
|
port = self.real_connection.port
|
||||||
default_port = {'https': 443, 'http': 80}[self._protocol]
|
default_port = {'https': 443, 'http': 80}[self._protocol]
|
||||||
return ':{0}'.format(port) if port != default_port else ''
|
return ':{}'.format(port) if port != default_port else ''
|
||||||
|
|
||||||
def _uri(self, url):
|
def _uri(self, url):
|
||||||
"""Returns request absolute URI"""
|
"""Returns request absolute URI"""
|
||||||
uri = "{0}://{1}{2}{3}".format(
|
uri = "{}://{}{}{}".format(
|
||||||
self._protocol,
|
self._protocol,
|
||||||
self.real_connection.host,
|
self.real_connection.host,
|
||||||
self._port_postfix(),
|
self._port_postfix(),
|
||||||
@@ -146,7 +146,7 @@ class VCRConnection(object):
|
|||||||
|
|
||||||
def _url(self, uri):
|
def _url(self, uri):
|
||||||
"""Returns request selector url from absolute URI"""
|
"""Returns request selector url from absolute URI"""
|
||||||
prefix = "{0}://{1}{2}".format(
|
prefix = "{}://{}{}".format(
|
||||||
self._protocol,
|
self._protocol,
|
||||||
self.real_connection.host,
|
self.real_connection.host,
|
||||||
self._port_postfix(),
|
self._port_postfix(),
|
||||||
@@ -161,7 +161,7 @@ class VCRConnection(object):
|
|||||||
body=body,
|
body=body,
|
||||||
headers=headers or {}
|
headers=headers or {}
|
||||||
)
|
)
|
||||||
log.debug('Got {0}'.format(self._vcr_request))
|
log.debug('Got {}'.format(self._vcr_request))
|
||||||
|
|
||||||
# Note: The request may not actually be finished at this point, so
|
# Note: The request may not actually be finished at this point, so
|
||||||
# I'm not sending the actual request until getresponse(). This
|
# I'm not sending the actual request until getresponse(). This
|
||||||
@@ -180,7 +180,7 @@ class VCRConnection(object):
|
|||||||
body="",
|
body="",
|
||||||
headers={}
|
headers={}
|
||||||
)
|
)
|
||||||
log.debug('Got {0}'.format(self._vcr_request))
|
log.debug('Got {}'.format(self._vcr_request))
|
||||||
|
|
||||||
def putheader(self, header, *values):
|
def putheader(self, header, *values):
|
||||||
self._vcr_request.headers[header] = values
|
self._vcr_request.headers[header] = values
|
||||||
@@ -214,7 +214,7 @@ class VCRConnection(object):
|
|||||||
# then return it
|
# then return it
|
||||||
if self.cassette.can_play_response_for(self._vcr_request):
|
if self.cassette.can_play_response_for(self._vcr_request):
|
||||||
log.info(
|
log.info(
|
||||||
"Playing response for {0} from cassette".format(
|
"Playing response for {} from cassette".format(
|
||||||
self._vcr_request
|
self._vcr_request
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
@@ -236,7 +236,7 @@ class VCRConnection(object):
|
|||||||
# and return it.
|
# and return it.
|
||||||
|
|
||||||
log.info(
|
log.info(
|
||||||
"{0} not in cassette, sending to real server".format(
|
"{} not in cassette, sending to real server".format(
|
||||||
self._vcr_request
|
self._vcr_request
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
@@ -334,6 +334,18 @@ class VCRConnection(object):
|
|||||||
|
|
||||||
super(VCRConnection, self).__setattr__(name, value)
|
super(VCRConnection, self).__setattr__(name, value)
|
||||||
|
|
||||||
|
def __getattr__(self, name):
|
||||||
|
"""
|
||||||
|
Send requests for weird attributes up to the real connection
|
||||||
|
(counterpart to __setattr above)
|
||||||
|
"""
|
||||||
|
if self.__dict__.get('real_connection'):
|
||||||
|
# check in case real_connection has not been set yet, such as when
|
||||||
|
# we're setting the real_connection itself for the first time
|
||||||
|
return getattr(self.real_connection, name)
|
||||||
|
|
||||||
|
return super(VCRConnection, self).__getattr__(name)
|
||||||
|
|
||||||
|
|
||||||
for k, v in HTTPConnection.__dict__.items():
|
for k, v in HTTPConnection.__dict__.items():
|
||||||
if isinstance(v, staticmethod):
|
if isinstance(v, staticmethod):
|
||||||
|
|||||||
@@ -12,6 +12,20 @@ from vcr.request import Request
|
|||||||
|
|
||||||
|
|
||||||
class MockClientResponse(ClientResponse):
|
class MockClientResponse(ClientResponse):
|
||||||
|
def __init__(self, method, url):
|
||||||
|
super().__init__(
|
||||||
|
method=method,
|
||||||
|
url=url,
|
||||||
|
writer=None,
|
||||||
|
continue100=None,
|
||||||
|
timer=None,
|
||||||
|
request_info=None,
|
||||||
|
auto_decompress=None,
|
||||||
|
traces=None,
|
||||||
|
loop=asyncio.get_event_loop(),
|
||||||
|
session=None,
|
||||||
|
)
|
||||||
|
|
||||||
# TODO: get encoding from header
|
# TODO: get encoding from header
|
||||||
@asyncio.coroutine
|
@asyncio.coroutine
|
||||||
def json(self, *, encoding='utf-8', loads=json.loads): # NOQA: E999
|
def json(self, *, encoding='utf-8', loads=json.loads): # NOQA: E999
|
||||||
@@ -21,6 +35,10 @@ class MockClientResponse(ClientResponse):
|
|||||||
def text(self, encoding='utf-8'):
|
def text(self, encoding='utf-8'):
|
||||||
return self.content.decode(encoding)
|
return self.content.decode(encoding)
|
||||||
|
|
||||||
|
@asyncio.coroutine
|
||||||
|
def read(self):
|
||||||
|
return self.content
|
||||||
|
|
||||||
@asyncio.coroutine
|
@asyncio.coroutine
|
||||||
def release(self):
|
def release(self):
|
||||||
pass
|
pass
|
||||||
@@ -71,7 +89,7 @@ def vcr_request(cassette, real_request):
|
|||||||
'message': response.reason,
|
'message': response.reason,
|
||||||
},
|
},
|
||||||
'headers': dict(response.headers),
|
'headers': dict(response.headers),
|
||||||
'body': {'string': (yield from response.text())}, # NOQA: E999
|
'body': {'string': (yield from response.read())}, # NOQA: E999
|
||||||
'url': response.url,
|
'url': response.url,
|
||||||
}
|
}
|
||||||
cassette.append(vcr_request, vcr_response)
|
cassette.append(vcr_request, vcr_response)
|
||||||
|
|||||||
@@ -13,9 +13,7 @@ class VCRHTTPConnectionWithTimeout(VCRHTTPConnection,
|
|||||||
HTTPConnection.__init__.'''
|
HTTPConnection.__init__.'''
|
||||||
|
|
||||||
# Delete the keyword arguments that HTTPConnection would not recognize
|
# Delete the keyword arguments that HTTPConnection would not recognize
|
||||||
safe_keys = set(
|
safe_keys = {'host', 'port', 'strict', 'timeout', 'source_address'}
|
||||||
('host', 'port', 'strict', 'timeout', 'source_address')
|
|
||||||
)
|
|
||||||
unknown_keys = set(kwargs.keys()) - safe_keys
|
unknown_keys = set(kwargs.keys()) - safe_keys
|
||||||
safe_kwargs = kwargs.copy()
|
safe_kwargs = kwargs.copy()
|
||||||
for kw in unknown_keys:
|
for kw in unknown_keys:
|
||||||
@@ -33,7 +31,7 @@ class VCRHTTPSConnectionWithTimeout(VCRHTTPSConnection,
|
|||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
|
|
||||||
# Delete the keyword arguments that HTTPSConnection would not recognize
|
# Delete the keyword arguments that HTTPSConnection would not recognize
|
||||||
safe_keys = set((
|
safe_keys = {
|
||||||
'host',
|
'host',
|
||||||
'port',
|
'port',
|
||||||
'key_file',
|
'key_file',
|
||||||
@@ -42,7 +40,7 @@ class VCRHTTPSConnectionWithTimeout(VCRHTTPSConnection,
|
|||||||
'timeout',
|
'timeout',
|
||||||
'source_address',
|
'source_address',
|
||||||
'ca_certs',
|
'ca_certs',
|
||||||
))
|
}
|
||||||
unknown_keys = set(kwargs.keys()) - safe_keys
|
unknown_keys = set(kwargs.keys()) - safe_keys
|
||||||
safe_kwargs = kwargs.copy()
|
safe_kwargs = kwargs.copy()
|
||||||
for kw in unknown_keys:
|
for kw in unknown_keys:
|
||||||
|
|||||||
@@ -1,9 +1,9 @@
|
|||||||
'''Stubs for requests'''
|
'''Stubs for requests'''
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from requests.packages.urllib3.connectionpool import HTTPConnection, VerifiedHTTPSConnection
|
|
||||||
except ImportError:
|
|
||||||
from urllib3.connectionpool import HTTPConnection, VerifiedHTTPSConnection
|
from urllib3.connectionpool import HTTPConnection, VerifiedHTTPSConnection
|
||||||
|
except ImportError:
|
||||||
|
from requests.packages.urllib3.connectionpool import HTTPConnection, VerifiedHTTPSConnection
|
||||||
|
|
||||||
from ..stubs import VCRHTTPConnection, VCRHTTPSConnection
|
from ..stubs import VCRHTTPConnection, VCRHTTPSConnection
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user