mirror of
https://github.com/kevin1024/vcrpy.git
synced 2025-12-09 01:03:24 +00:00
Compare commits
101 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3a46616ba6 | ||
|
|
57df0c6921 | ||
|
|
ddb29745a9 | ||
|
|
ac7c9244cc | ||
|
|
6da7cd0ea5 | ||
|
|
24df79b75f | ||
|
|
0800b99214 | ||
|
|
3dad89df3f | ||
|
|
5c9b0b4ccb | ||
|
|
5a848d277e | ||
|
|
c88c738df9 | ||
|
|
9a8067d8e7 | ||
|
|
787c6bdb77 | ||
|
|
c3298c25a3 | ||
|
|
2f4c803678 | ||
|
|
60145983bf | ||
|
|
b5c27f99d1 | ||
|
|
1ef099a13e | ||
|
|
34d07406f9 | ||
|
|
e269c77670 | ||
|
|
889edccecb | ||
|
|
37c8cbca91 | ||
|
|
9daf301deb | ||
|
|
528c9e7b1a | ||
|
|
4e36997e1a | ||
|
|
c571c932c9 | ||
|
|
d060a68ffd | ||
|
|
cfc483a08d | ||
|
|
632af2e41a | ||
|
|
7fdfce65ee | ||
|
|
7cc513e1d2 | ||
|
|
4f3c5c0a6e | ||
|
|
43b3411e6c | ||
|
|
99d4150df8 | ||
|
|
8d5993eced | ||
|
|
8a1b7c6532 | ||
|
|
3459d95d4f | ||
|
|
ebaae9bed7 | ||
|
|
d780bc04dd | ||
|
|
31c358c035 | ||
|
|
573c6eee0b | ||
|
|
70c92d05d9 | ||
|
|
5d866dd77c | ||
|
|
2d08358b5c | ||
|
|
64397d7ecc | ||
|
|
dc9cd4229b | ||
|
|
6ae1b00207 | ||
|
|
54bb9aa27a | ||
|
|
312ed2c234 | ||
|
|
20915a79c1 | ||
|
|
495afdddc8 | ||
|
|
dee580f971 | ||
|
|
6919c06b8c | ||
|
|
77de8dc47e | ||
|
|
cb40a45eba | ||
|
|
678586904b | ||
|
|
ddbf0464f4 | ||
|
|
e14b94789b | ||
|
|
e6dba270ec | ||
|
|
615cf8661a | ||
|
|
ce6656c4d5 | ||
|
|
8d083ba578 | ||
|
|
f0f5334c40 | ||
|
|
8de2312ccc | ||
|
|
c3f5ae84b1 | ||
|
|
f6b8e4f8e7 | ||
|
|
2ac3fa9abe | ||
|
|
dd8b39b29e | ||
|
|
1324feae99 | ||
|
|
7990c549d1 | ||
|
|
327797c4ff | ||
|
|
ac510097e0 | ||
|
|
00d973a0f5 | ||
|
|
79ff59feae | ||
|
|
34252bc234 | ||
|
|
5f78657c52 | ||
|
|
00b4e451fe | ||
|
|
44564ba39f | ||
|
|
7f02a7e999 | ||
|
|
c28adea66d | ||
|
|
3f006cc261 | ||
|
|
0eda8ba482 | ||
|
|
d620095c36 | ||
|
|
c8180326ad | ||
|
|
d55d593d1c | ||
|
|
04f4a7fd2f | ||
|
|
6fd04f3675 | ||
|
|
420f83b6b1 | ||
|
|
c6adcc83b3 | ||
|
|
dc61f5f520 | ||
|
|
4450cb992f | ||
|
|
083b1ec686 | ||
|
|
97c924d8dd | ||
|
|
65398131a4 | ||
|
|
7312229aef | ||
|
|
b62265c0ad | ||
|
|
d00c60a4ad | ||
|
|
4ddfb47c9c | ||
|
|
f0b7c3f1e0 | ||
|
|
646d12df94 | ||
|
|
eda64bc3be |
3
.gitignore
vendored
3
.gitignore
vendored
@@ -7,4 +7,5 @@ dist/
|
|||||||
*.egg-info/
|
*.egg-info/
|
||||||
pytestdebug.log
|
pytestdebug.log
|
||||||
|
|
||||||
fixtures/
|
fixtures/
|
||||||
|
/docs/_build
|
||||||
|
|||||||
61
.travis.yml
61
.travis.yml
@@ -1,51 +1,50 @@
|
|||||||
language: python
|
language: python
|
||||||
|
sudo: false
|
||||||
before_install: openssl version
|
before_install: openssl version
|
||||||
env:
|
env:
|
||||||
global:
|
global:
|
||||||
- secure: AifoKzwhjV94cmcQZrdQmqRu/9rkZZvWpwBv1daeAQpLOKFPGsOm3D+x2cSw9+iCfkgDZDfqQVv1kCaFVxTll8v8jTq5SJdqEY0NmGWbj/UkNtShh609oRDsuzLxAEwtVKYjf/h8K2BRea+bl1tGkwZ2vtmYS6dxNlAijjWOfds=
|
- secure: AifoKzwhjV94cmcQZrdQmqRu/9rkZZvWpwBv1daeAQpLOKFPGsOm3D+x2cSw9+iCfkgDZDfqQVv1kCaFVxTll8v8jTq5SJdqEY0NmGWbj/UkNtShh609oRDsuzLxAEwtVKYjf/h8K2BRea+bl1tGkwZ2vtmYS6dxNlAijjWOfds=
|
||||||
- secure: LBSEg/gMj4u4Hrpo3zs6Y/1mTpd2RtcN49mZIFgTdbJ9IhpiNPqcEt647Lz94F9Eses2x2WbNuKqZKZZReY7QLbEzU1m0nN5jlaKrjcG5NR5clNABfFFyhgc0jBikyS4abAG8jc2efeaTrFuQwdoF4sE8YiVrkiVj2X5Xoi6sBk=
|
- secure: LBSEg/gMj4u4Hrpo3zs6Y/1mTpd2RtcN49mZIFgTdbJ9IhpiNPqcEt647Lz94F9Eses2x2WbNuKqZKZZReY7QLbEzU1m0nN5jlaKrjcG5NR5clNABfFFyhgc0jBikyS4abAG8jc2efeaTrFuQwdoF4sE8YiVrkiVj2X5Xoi6sBk=
|
||||||
matrix:
|
matrix:
|
||||||
- WITH_LIB="requests2.2"
|
- TOX_SUFFIX="flakes"
|
||||||
- WITH_LIB="requests2.3"
|
- TOX_SUFFIX="requests22"
|
||||||
- WITH_LIB="requests2.4"
|
- TOX_SUFFIX="requests23"
|
||||||
- WITH_LIB="requests2.5"
|
- TOX_SUFFIX="requests24"
|
||||||
- WITH_LIB="requests2.7"
|
- TOX_SUFFIX="requests25"
|
||||||
- WITH_LIB="requests1.x"
|
- TOX_SUFFIX="requests26"
|
||||||
- WITH_LIB="httplib2"
|
- TOX_SUFFIX="requests27"
|
||||||
- WITH_LIB="boto"
|
- TOX_SUFFIX="requests1"
|
||||||
- WITH_LIB="urllib31.7"
|
- TOX_SUFFIX="httplib2"
|
||||||
- WITH_LIB="urllib31.9"
|
- TOX_SUFFIX="boto"
|
||||||
- WITH_LIB="urllib31.10"
|
- TOX_SUFFIX="boto3"
|
||||||
- WITH_LIB="tornado"
|
- TOX_SUFFIX="urllib317"
|
||||||
|
- TOX_SUFFIX="urllib319"
|
||||||
|
- TOX_SUFFIX="urllib3110"
|
||||||
|
- TOX_SUFFIX="tornado3"
|
||||||
|
- TOX_SUFFIX="tornado4"
|
||||||
matrix:
|
matrix:
|
||||||
allow_failures:
|
allow_failures:
|
||||||
- env: WITH_LIB="boto"
|
- env: TOX_SUFFIX="boto"
|
||||||
|
- env: TOX_SUFFIX="boto3"
|
||||||
exclude:
|
exclude:
|
||||||
- env: WITH_LIB="boto"
|
- env: TOX_SUFFIX="boto"
|
||||||
python: 3.3
|
python: 3.3
|
||||||
- env: WITH_LIB="boto"
|
- env: TOX_SUFFIX="boto"
|
||||||
python: 3.4
|
python: 3.4
|
||||||
- env: WITH_LIB="requests1.x"
|
- env: TOX_SUFFIX="requests1"
|
||||||
python: 3.4
|
python: 3.4
|
||||||
|
- env: TOX_SUFFIX="requests1"
|
||||||
|
python: 3.5
|
||||||
python:
|
python:
|
||||||
- 2.6
|
- 2.6
|
||||||
- 2.7
|
- 2.7
|
||||||
- 3.3
|
- 3.3
|
||||||
- 3.4
|
- 3.4
|
||||||
|
- 3.5
|
||||||
- pypy
|
- pypy
|
||||||
|
- pypy3
|
||||||
install:
|
install:
|
||||||
- pip install .
|
- pip install tox-travis
|
||||||
- if [ $WITH_LIB = "requests1.x" ] ; then pip install requests==1.2.3; fi
|
- if [[ $TOX_SUFFIX != 'flakes' ]]; then python setup.py install ; fi
|
||||||
- if [ $WITH_LIB = "requests2.2" ] ; then pip install requests==2.2.1; fi
|
script:
|
||||||
- if [ $WITH_LIB = "requests2.3" ] ; then pip install requests==2.3.0; fi
|
- tox -e "${TOX_SUFFIX}"
|
||||||
- if [ $WITH_LIB = "requests2.4" ] ; then pip install requests==2.4.0; fi
|
|
||||||
- if [ $WITH_LIB = "requests2.5" ] ; then pip install requests==2.5.0; fi
|
|
||||||
- if [ $WITH_LIB = "requests2.7" ] ; then pip install requests==2.7.0; fi
|
|
||||||
- if [ $WITH_LIB = "httplib2" ] ; then pip install httplib2; fi
|
|
||||||
- if [ $WITH_LIB = "boto" ] ; then pip install boto; fi
|
|
||||||
- if [ $WITH_LIB = "urllib31.7" ] ; then pip install certifi urllib3==1.7.1; fi
|
|
||||||
- if [ $WITH_LIB = "urllib31.9" ] ; then pip install certifi urllib3==1.9.1; fi
|
|
||||||
- if [ $WITH_LIB = "urllib31.10" ] ; then pip install certifi urllib3==1.10.2; fi
|
|
||||||
- if [ $WITH_LIB = "tornado" ] ; then pip install tornado==4.2 pytest-tornado; fi
|
|
||||||
- if [ $WITH_LIB = "tornado" -a $TRAVIS_PYTHON_VERSION != "pypy" ] ; then pip install pycurl; fi
|
|
||||||
script: python setup.py test
|
|
||||||
|
|||||||
@@ -2,3 +2,5 @@ include README.rst
|
|||||||
include LICENSE.txt
|
include LICENSE.txt
|
||||||
include tox.ini
|
include tox.ini
|
||||||
recursive-include tests *
|
recursive-include tests *
|
||||||
|
recursive-exclude * __pycache__
|
||||||
|
recursive-exclude * *.py[co]
|
||||||
|
|||||||
736
README.rst
736
README.rst
@@ -1,25 +1,29 @@
|
|||||||
|
|PyPI| |Build Status| |Waffle Ready| |Gitter|
|
||||||
|
|
||||||
VCR.py
|
VCR.py
|
||||||
======
|
======
|
||||||
|
|
||||||
.. figure:: https://raw.github.com/kevin1024/vcrpy/master/vcr.png
|
.. image:: https://raw.github.com/kevin1024/vcrpy/master/vcr.png
|
||||||
:alt: vcr.py
|
:alt: vcr.py
|
||||||
|
|
||||||
vcr.py
|
|
||||||
|
|
||||||
This is a Python version of `Ruby's VCR
|
This is a Python version of `Ruby's VCR
|
||||||
library <https://github.com/vcr/vcr>`__.
|
library <https://github.com/vcr/vcr>`__.
|
||||||
|
|
||||||
|Build Status| |Stories in Ready|
|
Source code
|
||||||
|
https://github.com/kevin1024/vcrpy
|
||||||
|
|
||||||
What it does
|
Documentation
|
||||||
------------
|
https://vcrpy.readthedocs.org/
|
||||||
|
|
||||||
|
Rationale
|
||||||
|
---------
|
||||||
|
|
||||||
VCR.py simplifies and speeds up tests that make HTTP requests. The
|
VCR.py simplifies and speeds up tests that make HTTP requests. The
|
||||||
first time you run code that is inside a VCR.py context manager or
|
first time you run code that is inside a VCR.py context manager or
|
||||||
decorated function, VCR.py records all HTTP interactions that take
|
decorated function, VCR.py records all HTTP interactions that take
|
||||||
place through the libraries it supports and serializes and writes them
|
place through the libraries it supports and serializes and writes them
|
||||||
to a flat file (in yaml format by default). This flat file is called a
|
to a flat file (in yaml format by default). This flat file is called a
|
||||||
cassette. When the relevant peice of code is executed again, VCR.py
|
cassette. When the relevant piece of code is executed again, VCR.py
|
||||||
will read the serialized requests and responses from the
|
will read the serialized requests and responses from the
|
||||||
aforementioned cassette file, and intercept any HTTP requests that it
|
aforementioned cassette file, and intercept any HTTP requests that it
|
||||||
recognizes from the original test run and return the responses that
|
recognizes from the original test run and return the responses that
|
||||||
@@ -37,724 +41,18 @@ VCR.py will detect the absence of a cassette file and once again record
|
|||||||
all HTTP interactions, which will update them to correspond to the new
|
all HTTP interactions, which will update them to correspond to the new
|
||||||
API.
|
API.
|
||||||
|
|
||||||
Compatibility Notes
|
|
||||||
-------------------
|
|
||||||
|
|
||||||
VCR.py supports Python 2.6 and 2.7, 3.3, 3.4, and
|
|
||||||
`pypy <http://pypy.org>`__.
|
|
||||||
|
|
||||||
The following http libraries are supported:
|
|
||||||
|
|
||||||
- urllib2
|
|
||||||
- urllib3
|
|
||||||
- http.client (python3)
|
|
||||||
- requests (both 1.x and 2.x versions)
|
|
||||||
- httplib2
|
|
||||||
- boto
|
|
||||||
- Tornado's AsyncHTTPClient
|
|
||||||
|
|
||||||
Usage
|
|
||||||
-----
|
|
||||||
|
|
||||||
.. code:: python
|
|
||||||
|
|
||||||
import vcr
|
|
||||||
import urllib2
|
|
||||||
|
|
||||||
with vcr.use_cassette('fixtures/vcr_cassettes/synopsis.yaml'):
|
|
||||||
response = urllib2.urlopen('http://www.iana.org/domains/reserved').read()
|
|
||||||
assert 'Example domains' in response
|
|
||||||
|
|
||||||
Run this test once, and VCR.py will record the HTTP request to
|
|
||||||
``fixtures/vcr_cassettes/synopsis.yml``. Run it again, and VCR.py will
|
|
||||||
replay the response from iana.org when the http request is made. This
|
|
||||||
test is now fast (no real HTTP requests are made anymore), deterministic
|
|
||||||
(the test will continue to pass, even if you are offline, or iana.org
|
|
||||||
goes down for maintenance) and accurate (the response will contain the
|
|
||||||
same headers and body you get from a real request).
|
|
||||||
|
|
||||||
You can also use VCR.py as a decorator. The same request above would
|
|
||||||
look like this:
|
|
||||||
|
|
||||||
.. code:: python
|
|
||||||
|
|
||||||
@vcr.use_cassette('fixtures/vcr_cassettes/synopsis.yaml')
|
|
||||||
def test_iana():
|
|
||||||
response = urllib2.urlopen('http://www.iana.org/domains/reserved').read()
|
|
||||||
assert 'Example domains' in response
|
|
||||||
|
|
||||||
When using the decorator version of ``use_cassette``, it is possible to
|
|
||||||
omit the path to the cassette file.
|
|
||||||
|
|
||||||
.. code:: python
|
|
||||||
|
|
||||||
@vcr.use_cassette()
|
|
||||||
def test_iana():
|
|
||||||
response = urllib2.urlopen('http://www.iana.org/domains/reserved').read()
|
|
||||||
assert 'Example domains' in response
|
|
||||||
|
|
||||||
In this case, the cassette file will be given the same name as the test
|
|
||||||
function, and it will be placed in the same directory as the file in
|
|
||||||
which the test is defined. See the Automatic Test Naming section below
|
|
||||||
for more details.
|
|
||||||
|
|
||||||
Configuration
|
|
||||||
-------------
|
|
||||||
|
|
||||||
If you don't like VCR's defaults, you can set options by instantiating a
|
|
||||||
``VCR`` class and setting the options on it.
|
|
||||||
|
|
||||||
.. code:: python
|
|
||||||
|
|
||||||
|
|
||||||
import vcr
|
|
||||||
|
|
||||||
my_vcr = vcr.VCR(
|
|
||||||
serializer='json',
|
|
||||||
cassette_library_dir='fixtures/cassettes',
|
|
||||||
record_mode='once',
|
|
||||||
match_on=['uri', 'method'],
|
|
||||||
)
|
|
||||||
|
|
||||||
with my_vcr.use_cassette('test.json'):
|
|
||||||
# your http code here
|
|
||||||
|
|
||||||
Otherwise, you can override options each time you use a cassette.
|
|
||||||
|
|
||||||
.. code:: python
|
|
||||||
|
|
||||||
with vcr.use_cassette('test.yml', serializer='json', record_mode='once'):
|
|
||||||
# your http code here
|
|
||||||
|
|
||||||
Note: Per-cassette overrides take precedence over the global config.
|
|
||||||
|
|
||||||
Request matching
|
|
||||||
----------------
|
|
||||||
|
|
||||||
Request matching is configurable and allows you to change which requests
|
|
||||||
VCR considers identical. The default behavior is
|
|
||||||
``['method', 'scheme', 'host', 'port', 'path', 'query']`` which means
|
|
||||||
that requests with both the same URL and method (ie POST or GET) are
|
|
||||||
considered identical.
|
|
||||||
|
|
||||||
This can be configured by changing the ``match_on`` setting.
|
|
||||||
|
|
||||||
The following options are available :
|
|
||||||
|
|
||||||
- method (for example, POST or GET)
|
|
||||||
- uri (the full URI.)
|
|
||||||
- host (the hostname of the server receiving the request)
|
|
||||||
- port (the port of the server receiving the request)
|
|
||||||
- path (the path of the request)
|
|
||||||
- query (the query string of the request)
|
|
||||||
- raw\_body (the entire request body as is)
|
|
||||||
- body (the entire request body unmarshalled by content-type
|
|
||||||
i.e. xmlrpc, json, form-urlencoded, falling back on raw\_body)
|
|
||||||
- headers (the headers of the request)
|
|
||||||
|
|
||||||
Backwards compatible matchers:
|
|
||||||
- url (the ``uri`` alias)
|
|
||||||
|
|
||||||
If these options don't work for you, you can also register your own
|
|
||||||
request matcher. This is described in the Advanced section of this
|
|
||||||
README.
|
|
||||||
|
|
||||||
Record Modes
|
|
||||||
------------
|
|
||||||
|
|
||||||
VCR supports 4 record modes (with the same behavior as Ruby's VCR):
|
|
||||||
|
|
||||||
once
|
|
||||||
~~~~
|
|
||||||
|
|
||||||
- Replay previously recorded interactions.
|
|
||||||
- Record new interactions if there is no cassette file.
|
|
||||||
- Cause an error to be raised for new requests if there is a cassette
|
|
||||||
file.
|
|
||||||
|
|
||||||
It is similar to the new\_episodes record mode, but will prevent new,
|
|
||||||
unexpected requests from being made (i.e. because the request URI
|
|
||||||
changed).
|
|
||||||
|
|
||||||
once is the default record mode, used when you do not set one.
|
|
||||||
|
|
||||||
new\_episodes
|
|
||||||
~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
- Record new interactions.
|
|
||||||
- Replay previously recorded interactions. It is similar to the once
|
|
||||||
record mode, but will always record new interactions, even if you
|
|
||||||
have an existing recorded one that is similar, but not identical.
|
|
||||||
|
|
||||||
This was the default behavior in versions < 0.3.0
|
|
||||||
|
|
||||||
none
|
|
||||||
~~~~
|
|
||||||
|
|
||||||
- Replay previously recorded interactions.
|
|
||||||
- Cause an error to be raised for any new requests. This is useful when
|
|
||||||
your code makes potentially dangerous HTTP requests. The none record
|
|
||||||
mode guarantees that no new HTTP requests will be made.
|
|
||||||
|
|
||||||
all
|
|
||||||
~~~
|
|
||||||
|
|
||||||
- Record new interactions.
|
|
||||||
- Never replay previously recorded interactions. This can be
|
|
||||||
temporarily used to force VCR to re-record a cassette (i.e. to ensure
|
|
||||||
the responses are not out of date) or can be used when you simply
|
|
||||||
want to log all HTTP requests.
|
|
||||||
|
|
||||||
Advanced Features
|
|
||||||
-----------------
|
|
||||||
|
|
||||||
If you want, VCR.py can return information about the cassette it is
|
|
||||||
using to record your requests and responses. This will let you record
|
|
||||||
your requests and responses and make assertions on them, to make sure
|
|
||||||
that your code under test is generating the expected requests and
|
|
||||||
responses. This feature is not present in Ruby's VCR, but I think it is
|
|
||||||
a nice addition. Here's an example:
|
|
||||||
|
|
||||||
.. code:: python
|
|
||||||
|
|
||||||
import vcr
|
|
||||||
import urllib2
|
|
||||||
|
|
||||||
with vcr.use_cassette('fixtures/vcr_cassettes/synopsis.yaml') as cass:
|
|
||||||
response = urllib2.urlopen('http://www.zombo.com/').read()
|
|
||||||
# cass should have 1 request inside it
|
|
||||||
assert len(cass) == 1
|
|
||||||
# the request uri should have been http://www.zombo.com/
|
|
||||||
assert cass.requests[0].uri == 'http://www.zombo.com/'
|
|
||||||
|
|
||||||
The ``Cassette`` object exposes the following properties which I
|
|
||||||
consider part of the API. The fields are as follows:
|
|
||||||
|
|
||||||
- ``requests``: A list of vcr.Request objects corresponding to the http
|
|
||||||
requests that were made during the recording of the cassette. The
|
|
||||||
requests appear in the order that they were originally processed.
|
|
||||||
- ``responses``: A list of the responses made.
|
|
||||||
- ``play_count``: The number of times this cassette has played back a
|
|
||||||
response.
|
|
||||||
- ``all_played``: A boolean indicating whether all the responses have
|
|
||||||
been played back.
|
|
||||||
- ``responses_of(request)``: Access the responses that match a given
|
|
||||||
request
|
|
||||||
|
|
||||||
The ``Request`` object has the following properties:
|
|
||||||
|
|
||||||
- ``uri``: The full uri of the request. Example:
|
|
||||||
"https://google.com/?q=vcrpy"
|
|
||||||
- ``scheme``: The scheme used to make the request (http or https)
|
|
||||||
- ``host``: The host of the request, for example "www.google.com"
|
|
||||||
- ``port``: The port the request was made on
|
|
||||||
- ``path``: The path of the request. For example "/" or "/home.html"
|
|
||||||
- ``query``: The parsed query string of the request. Sorted list of
|
|
||||||
name, value pairs.
|
|
||||||
- ``method`` : The method used to make the request, for example "GET"
|
|
||||||
or "POST"
|
|
||||||
- ``body``: The body of the request, usually empty except for POST /
|
|
||||||
PUT / etc
|
|
||||||
|
|
||||||
Backwards compatible properties:
|
|
||||||
|
|
||||||
- ``url``: The ``uri`` alias
|
|
||||||
- ``protocol``: The ``scheme`` alias
|
|
||||||
|
|
||||||
Register your own serializer
|
|
||||||
----------------------------
|
|
||||||
|
|
||||||
Don't like JSON or YAML? That's OK, VCR.py can serialize to any format
|
|
||||||
you would like. Create your own module or class instance with 2 methods:
|
|
||||||
|
|
||||||
- ``def deserialize(cassette_string)``
|
|
||||||
- ``def serialize(cassette_dict)``
|
|
||||||
|
|
||||||
Finally, register your class with VCR to use your new serializer.
|
|
||||||
|
|
||||||
.. code:: python
|
|
||||||
|
|
||||||
import vcr
|
|
||||||
|
|
||||||
class BogoSerializer(object):
|
|
||||||
"""
|
|
||||||
Must implement serialize() and deserialize() methods
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
my_vcr = vcr.VCR()
|
|
||||||
my_vcr.register_serializer('bogo', BogoSerializer())
|
|
||||||
|
|
||||||
with my_vcr.use_cassette('test.bogo', serializer='bogo'):
|
|
||||||
# your http here
|
|
||||||
|
|
||||||
# After you register, you can set the default serializer to your new serializer
|
|
||||||
|
|
||||||
my_vcr.serializer = 'bogo'
|
|
||||||
|
|
||||||
with my_vcr.use_cassette('test.bogo'):
|
|
||||||
# your http here
|
|
||||||
|
|
||||||
Register your own request matcher
|
|
||||||
---------------------------------
|
|
||||||
|
|
||||||
Create your own method with the following signature
|
|
||||||
|
|
||||||
.. code:: python
|
|
||||||
|
|
||||||
def my_matcher(r1, r2):
|
|
||||||
|
|
||||||
Your method receives the two requests and must return ``True`` if they
|
|
||||||
match, ``False`` if they don't.
|
|
||||||
|
|
||||||
Finally, register your method with VCR to use your new request matcher.
|
|
||||||
|
|
||||||
.. code:: python
|
|
||||||
|
|
||||||
import vcr
|
|
||||||
|
|
||||||
def jurassic_matcher(r1, r2):
|
|
||||||
return r1.uri == r2.uri and 'JURASSIC PARK' in r1.body
|
|
||||||
|
|
||||||
my_vcr = vcr.VCR()
|
|
||||||
my_vcr.register_matcher('jurassic', jurassic_matcher)
|
|
||||||
|
|
||||||
with my_vcr.use_cassette('test.yml', match_on=['jurassic']):
|
|
||||||
# your http here
|
|
||||||
|
|
||||||
# After you register, you can set the default match_on to use your new matcher
|
|
||||||
|
|
||||||
my_vcr.match_on = ['jurassic']
|
|
||||||
|
|
||||||
with my_vcr.use_cassette('test.yml'):
|
|
||||||
# your http here
|
|
||||||
|
|
||||||
Filter sensitive data from the request
|
|
||||||
--------------------------------------
|
|
||||||
|
|
||||||
If you are checking your cassettes into source control, and are using
|
|
||||||
some form of authentication in your tests, you can filter out that
|
|
||||||
information so it won't appear in your cassette files. There are a few
|
|
||||||
ways to do this:
|
|
||||||
|
|
||||||
Filter information from HTTP Headers
|
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
Use the ``filter_headers`` configuration option with a list of headers
|
|
||||||
to filter.
|
|
||||||
|
|
||||||
.. code:: python
|
|
||||||
|
|
||||||
with my_vcr.use_cassette('test.yml', filter_headers=['authorization']):
|
|
||||||
# sensitive HTTP request goes here
|
|
||||||
|
|
||||||
Filter information from HTTP querystring
|
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
Use the ``filter_query_parameters`` configuration option with a list of
|
|
||||||
query parameters to filter.
|
|
||||||
|
|
||||||
.. code:: python
|
|
||||||
|
|
||||||
with my_vcr.use_cassette('test.yml', filter_query_parameters=['api_key']):
|
|
||||||
requests.get('http://api.com/getdata?api_key=secretstring')
|
|
||||||
|
|
||||||
Filter information from HTTP post data
|
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
Use the ``filter_post_data_parameters`` configuration option with a list
|
|
||||||
of post data parameters to filter.
|
|
||||||
|
|
||||||
.. code:: python
|
|
||||||
|
|
||||||
with my_vcr.use_cassette('test.yml', filter_post_data_parameters=['client_secret']):
|
|
||||||
requests.post('http://api.com/postdata', data={'api_key': 'secretstring'})
|
|
||||||
|
|
||||||
Custom Request filtering
|
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
If none of these covers your request filtering needs, you can register a
|
|
||||||
callback that will manipulate the HTTP request before adding it to the
|
|
||||||
cassette. Use the ``before_record`` configuration option to so this.
|
|
||||||
Here is an example that will never record requests to the /login
|
|
||||||
endpoint.
|
|
||||||
|
|
||||||
.. code:: python
|
|
||||||
|
|
||||||
def before_record_cb(request):
|
|
||||||
if request.path != '/login':
|
|
||||||
return request
|
|
||||||
|
|
||||||
my_vcr = vcr.VCR(
|
|
||||||
before_record = before_record_cb,
|
|
||||||
)
|
|
||||||
with my_vcr.use_cassette('test.yml'):
|
|
||||||
# your http code here
|
|
||||||
|
|
||||||
You can also mutate the response using this callback. For example, you
|
|
||||||
could remove all query parameters from any requests to the ``'/login'``
|
|
||||||
path.
|
|
||||||
|
|
||||||
.. code:: python
|
|
||||||
|
|
||||||
def scrub_login_request(request):
|
|
||||||
if request.path == '/login':
|
|
||||||
request.uri, _ = urllib.splitquery(response.uri)
|
|
||||||
return request
|
|
||||||
|
|
||||||
my_vcr = vcr.VCR(
|
|
||||||
before_record=scrub_login_request,
|
|
||||||
)
|
|
||||||
with my_vcr.use_cassette('test.yml'):
|
|
||||||
# your http code here
|
|
||||||
|
|
||||||
Custom Response Filtering
|
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
VCR.py also suports response filtering with the
|
|
||||||
``before_record_response`` keyword argument. It's usage is similar to
|
|
||||||
that of ``before_record``:
|
|
||||||
|
|
||||||
.. code:: python
|
|
||||||
|
|
||||||
def scrub_string(string, replacement=''):
|
|
||||||
def before_record_response(response):
|
|
||||||
response['body']['string'] = response['body']['string'].replace(string, replacement)
|
|
||||||
return response
|
|
||||||
return before_record_response
|
|
||||||
|
|
||||||
my_vcr = vcr.VCR(
|
|
||||||
before_record_response=scrub_string(settings.USERNAME, 'username'),
|
|
||||||
)
|
|
||||||
with my_vcr.use_cassette('test.yml'):
|
|
||||||
# your http code here
|
|
||||||
|
|
||||||
Ignore requests
|
|
||||||
---------------
|
|
||||||
|
|
||||||
If you would like to completely ignore certain requests, you can do it
|
|
||||||
in a few ways:
|
|
||||||
|
|
||||||
- Set the ``ignore_localhost`` option equal to True. This will not
|
|
||||||
record any requests sent to (or responses from) localhost, 127.0.0.1,
|
|
||||||
or 0.0.0.0.
|
|
||||||
- Set the ``ignore_hosts`` configuration option to a list of hosts to
|
|
||||||
ignore
|
|
||||||
- Add a ``before_record`` callback that returns None for requests you
|
|
||||||
want to ignore
|
|
||||||
|
|
||||||
Requests that are ignored by VCR will not be saved in a cassette, nor
|
|
||||||
played back from a cassette. VCR will completely ignore those requests
|
|
||||||
as if it didn't notice them at all, and they will continue to hit the
|
|
||||||
server as if VCR were not there.
|
|
||||||
|
|
||||||
Custom Patches
|
|
||||||
--------------
|
|
||||||
|
|
||||||
If you use a custom ``HTTPConnection`` class, or otherwise make http
|
|
||||||
requests in a way that requires additional patching, you can use the
|
|
||||||
``custom_patches`` keyword argument of the ``VCR`` and ``Cassette``
|
|
||||||
objects to patch those objects whenever a cassette's context is entered.
|
|
||||||
To patch a custom version of ``HTTPConnection`` you can do something
|
|
||||||
like this:
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
import where_the_custom_https_connection_lives
|
|
||||||
from vcr.stubs import VCRHTTPSConnection
|
|
||||||
my_vcr = config.VCR(custom_patches=((where_the_custom_https_connection_lives, 'CustomHTTPSConnection', VCRHTTPSConnection),))
|
|
||||||
|
|
||||||
@my_vcr.use_cassette(...)
|
|
||||||
|
|
||||||
Automatic Cassette Naming
|
|
||||||
-------------------------
|
|
||||||
|
|
||||||
VCR.py now allows the omission of the path argument to the use\_cassette
|
|
||||||
function. Both of the following are now legal/should work
|
|
||||||
|
|
||||||
.. code:: python
|
|
||||||
|
|
||||||
@my_vcr.use_cassette
|
|
||||||
def my_test_function():
|
|
||||||
...
|
|
||||||
|
|
||||||
.. code:: python
|
|
||||||
|
|
||||||
@my_vcr.use_cassette()
|
|
||||||
def my_test_function():
|
|
||||||
...
|
|
||||||
|
|
||||||
In both cases, VCR.py will use a path that is generated from the
|
|
||||||
provided test function's name. If no ``cassette_library_dir`` has been
|
|
||||||
set, the cassette will be in a file with the name of the test function
|
|
||||||
in directory of the file in which the test function is declared. If a
|
|
||||||
``cassette_library_dir`` has been set, the cassette will appear in that
|
|
||||||
directory in a file with the name of the decorated function.
|
|
||||||
|
|
||||||
It is possible to control the path produced by the automatic naming
|
|
||||||
machinery by customizing the ``path_transformer`` and
|
|
||||||
``func_path_generator`` vcr variables. To add an extension to all
|
|
||||||
cassette names, use ``VCR.ensure_suffix`` as follows:
|
|
||||||
|
|
||||||
.. code:: python
|
|
||||||
|
|
||||||
my_vcr = VCR(path_transformer=VCR.ensure_suffix('.yaml'))
|
|
||||||
|
|
||||||
@my_vcr.use_cassette
|
|
||||||
def my_test_function():
|
|
||||||
|
|
||||||
Installation
|
|
||||||
------------
|
|
||||||
|
|
||||||
VCR.py is a package on PyPI, so you can ``pip install vcrpy`` (first you
|
|
||||||
may need to ``brew install libyaml``
|
|
||||||
[`Homebrew <http://mxcl.github.com/homebrew/>`__\ ])
|
|
||||||
|
|
||||||
Ruby VCR compatibility
|
|
||||||
----------------------
|
|
||||||
|
|
||||||
VCR.py does not aim to match the format of the Ruby VCR YAML files.
|
|
||||||
Cassettes generated by Ruby's VCR are not compatible with VCR.py.
|
|
||||||
|
|
||||||
Running VCR's test suite
|
|
||||||
------------------------
|
|
||||||
|
|
||||||
The tests are all run automatically on `Travis
|
|
||||||
CI <https://travis-ci.org/kevin1024/vcrpy>`__, but you can also run them
|
|
||||||
yourself using `py.test <http://pytest.org/>`__ and
|
|
||||||
`Tox <http://tox.testrun.org/>`__. Tox will automatically run them in
|
|
||||||
all environments VCR.py supports. The test suite is pretty big and slow,
|
|
||||||
but you can tell tox to only run specific tests like this:
|
|
||||||
|
|
||||||
``tox -e py27requests -- -v -k "'test_status_code or test_gzip'"``
|
|
||||||
|
|
||||||
This will run only tests that look like ``test_status_code`` or
|
|
||||||
``test_gzip`` in the test suite, and only in the python 2.7 environment
|
|
||||||
that has ``requests`` installed.
|
|
||||||
|
|
||||||
Also, in order for the boto tests to run, you will need an AWS key.
|
|
||||||
Refer to the `boto
|
|
||||||
documentation <http://boto.readthedocs.org/en/latest/getting_started.html>`__
|
|
||||||
for how to set this up. I have marked the boto tests as optional in
|
|
||||||
Travis so you don't have to worry about them failing if you submit a
|
|
||||||
pull request.
|
|
||||||
|
|
||||||
Logging
|
|
||||||
-------
|
|
||||||
|
|
||||||
VCR.py has a few log messages you can turn on to help you figure out if
|
|
||||||
HTTP requests are hitting a real server or not. You can turn them on
|
|
||||||
like this:
|
|
||||||
|
|
||||||
.. code:: python
|
|
||||||
|
|
||||||
import vcr
|
|
||||||
import requests
|
|
||||||
import logging
|
|
||||||
|
|
||||||
logging.basicConfig() # you need to initialize logging, otherwise you will not see anything from vcrpy
|
|
||||||
vcr_log = logging.getLogger("vcr")
|
|
||||||
vcr_log.setLevel(logging.INFO)
|
|
||||||
|
|
||||||
with vcr.use_cassette('headers.yml'):
|
|
||||||
requests.get('http://httpbin.org/headers')
|
|
||||||
|
|
||||||
The first time you run this, you will see:
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
INFO:vcr.stubs:<Request (GET) http://httpbin.org/headers> not in cassette, sending to real server
|
|
||||||
|
|
||||||
The second time, you will see:
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
INFO:vcr.stubs:Playing response for <Request (GET) http://httpbin.org/headers> from cassette
|
|
||||||
|
|
||||||
If you set the loglevel to DEBUG, you will also get information about
|
|
||||||
which matchers didn't match. This can help you with debugging custom
|
|
||||||
matchers.
|
|
||||||
|
|
||||||
Upgrade
|
|
||||||
-------
|
|
||||||
|
|
||||||
New Cassette Format
|
|
||||||
~~~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
The cassette format has changed in *VCR.py 1.x*, the *VCR.py 0.x*
|
|
||||||
cassettes cannot be used with *VCR.py 1.x*. The easiest way to upgrade
|
|
||||||
is to simply delete your cassettes and re-record all of them. VCR.py
|
|
||||||
also provides a migration script that attempts to upgrade your 0.x
|
|
||||||
cassettes to the new 1.x format. To use it, run the following command:
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
python -m vcr.migration PATH
|
|
||||||
|
|
||||||
The PATH can be either a path to the directory with cassettes or the
|
|
||||||
path to a single cassette.
|
|
||||||
|
|
||||||
*Note*: Back up your cassettes files before migration. The migration
|
|
||||||
*should* only modify cassettes using the old 0.x format.
|
|
||||||
|
|
||||||
New serializer / deserializer API
|
|
||||||
---------------------------------
|
|
||||||
|
|
||||||
If you made a custom serializer, you will need to update it to match the
|
|
||||||
new API in version 1.0.x
|
|
||||||
|
|
||||||
- Serializers now take dicts and return strings.
|
|
||||||
- Deserializers take strings and return dicts (instead of requests,
|
|
||||||
responses pair)
|
|
||||||
|
|
||||||
Changelog
|
|
||||||
---------
|
|
||||||
- 1.7.3 [#188] ``additional_matchers`` kwarg on ``use_casstte``.
|
|
||||||
[#191] Actually support passing multiple before_record_request
|
|
||||||
functions (thanks @agriffis).
|
|
||||||
- 1.7.2 [#186] Get effective_url in tornado (thanks @mvschaik), [#187]
|
|
||||||
Set request_time on Response object in tornado (thanks @abhinav).
|
|
||||||
- 1.7.1 [#183] Patch ``fetch_impl`` instead of the entire HTTPClient
|
|
||||||
class for Tornado (thanks @abhinav).
|
|
||||||
- 1.7.0 [#177] Properly support coroutine/generator decoration. [#178]
|
|
||||||
Support distribute (thanks @graingert). [#163] Make compatibility
|
|
||||||
between python2 and python3 recorded cassettes more robust (thanks
|
|
||||||
@gward).
|
|
||||||
- 1.6.1 [#169] Support conditional requirements in old versions of
|
|
||||||
pip, Fix RST parse errors generated by pandoc, [Tornado] Fix
|
|
||||||
unsupported features exception not being raised, [#166]
|
|
||||||
content-aware body matcher.
|
|
||||||
- 1.6.0 [#120] Tornado support (thanks @abhinav), [#147] packaging fixes
|
|
||||||
(thanks @graingert), [#158] allow filtering post params in requests
|
|
||||||
(thanks @MrJohz), [#140] add xmlrpclib support (thanks @Diaoul).
|
|
||||||
- 1.5.2 Fix crash when cassette path contains cassette library
|
|
||||||
directory (thanks @gazpachoking).
|
|
||||||
- 1.5.0 Automatic cassette naming and 'application/json' post data
|
|
||||||
filtering (thanks @marco-santamaria).
|
|
||||||
- 1.4.2 Fix a bug caused by requests 2.7 and chunked transfer encoding
|
|
||||||
- 1.4.1 Include README, tests, LICENSE in package. Thanks @ralphbean.
|
|
||||||
- 1.4.0 Filter post data parameters (thanks @eadmundo), support for
|
|
||||||
posting files through requests, inject\_cassette kwarg to access
|
|
||||||
cassette from ``use_cassette`` decorated function,
|
|
||||||
``with_current_defaults`` actually works (thanks @samstav).
|
|
||||||
- 1.3.0 Fix/add support for urllib3 (thanks @aisch), fix default port
|
|
||||||
for https (thanks @abhinav).
|
|
||||||
- 1.2.0 Add custom\_patches argument to VCR/Cassette objects to allow
|
|
||||||
users to stub custom classes when cassettes become active.
|
|
||||||
- 1.1.4 Add force reset around calls to actual connection from stubs,
|
|
||||||
to ensure compatibility with the version of httplib/urlib2 in python
|
|
||||||
2.7.9.
|
|
||||||
- 1.1.3 Fix python3 headers field (thanks @rtaboada), fix boto test
|
|
||||||
(thanks @telaviv), fix new\_episodes record mode (thanks @jashugan),
|
|
||||||
fix Windows connectionpool stub bug (thanks @gazpachoking), add
|
|
||||||
support for requests 2.5
|
|
||||||
- 1.1.2 Add urllib==1.7.1 support. Make json serialize error handling
|
|
||||||
correct Improve logging of match failures.
|
|
||||||
- 1.1.1 Use function signature preserving ``wrapt.decorator`` to write
|
|
||||||
the decorator version of use\_cassette in order to ensure
|
|
||||||
compatibility with py.test fixtures and python 2. Move all request
|
|
||||||
filtering into the ``before_record_callable``.
|
|
||||||
- 1.1.0 Add ``before_record_response``. Fix several bugs related to the
|
|
||||||
context management of cassettes.
|
|
||||||
- 1.0.3: Fix an issue with requests 2.4 and make sure case sensitivity
|
|
||||||
is consistent across python versions
|
|
||||||
- 1.0.2: Fix an issue with requests 2.3
|
|
||||||
- 1.0.1: Fix a bug with the new ignore requests feature and the once
|
|
||||||
record mode
|
|
||||||
- 1.0.0: *BACKWARDS INCOMPATIBLE*: Please see the 'upgrade' section in
|
|
||||||
the README. Take a look at the matcher section as well, you might
|
|
||||||
want to update your ``match_on`` settings. Add support for filtering
|
|
||||||
sensitive data from requests, matching query strings after the order
|
|
||||||
changes and improving the built-in matchers, (thanks to @mshytikov),
|
|
||||||
support for ignoring requests to certain hosts, bump supported
|
|
||||||
Python3 version to 3.4, fix some bugs with Boto support (thanks
|
|
||||||
@marusich), fix error with URL field capitalization in README (thanks
|
|
||||||
@simon-weber), added some log messages to help with debugging, added
|
|
||||||
``all_played`` property on cassette (thanks @mshytikov)
|
|
||||||
- 0.7.0: VCR.py now supports Python 3! (thanks @asundg) Also I
|
|
||||||
refactored the stub connections quite a bit to add support for the
|
|
||||||
putrequest and putheader calls. This version also adds support for
|
|
||||||
httplib2 (thanks @nilp0inter). I have added a couple tests for boto
|
|
||||||
since it is an http client in its own right. Finally, this version
|
|
||||||
includes a fix for a bug where requests wasn't being patched properly
|
|
||||||
(thanks @msabramo).
|
|
||||||
- 0.6.0: Store response headers as a list since a HTTP response can
|
|
||||||
have the same header twice (happens with set-cookie sometimes). This
|
|
||||||
has the added benefit of preserving the order of headers. Thanks
|
|
||||||
@smallcode for the bug report leading to this change. I have made an
|
|
||||||
effort to ensure backwards compatibility with the old cassettes'
|
|
||||||
header storage mechanism, but if you want to upgrade to the new
|
|
||||||
header storage, you should delete your cassettes and re-record them.
|
|
||||||
Also this release adds better error messages (thanks @msabramo) and
|
|
||||||
adds support for using VCR as a decorator (thanks @smallcode for the
|
|
||||||
motivation)
|
|
||||||
- 0.5.0: Change the ``response_of`` method to ``responses_of`` since
|
|
||||||
cassettes can now contain more than one response for a request. Since
|
|
||||||
this changes the API, I'm bumping the version. Also includes 2
|
|
||||||
bugfixes: a better error message when attempting to overwrite a
|
|
||||||
cassette file, and a fix for a bug with requests sessions (thanks
|
|
||||||
@msabramo)
|
|
||||||
- 0.4.0: Change default request recording behavior for multiple
|
|
||||||
requests. If you make the same request multiple times to the same
|
|
||||||
URL, the response might be different each time (maybe the response
|
|
||||||
has a timestamp in it or something), so this will make the same
|
|
||||||
request multiple times and save them all. Then, when you are
|
|
||||||
replaying the cassette, the responses will be played back in the same
|
|
||||||
order in which they were received. If you were making multiple
|
|
||||||
requests to the same URL in a cassette before version 0.4.0, you
|
|
||||||
might need to regenerate your cassette files. Also, removes support
|
|
||||||
for the cassette.play\_count counter API, since individual requests
|
|
||||||
aren't unique anymore. A cassette might contain the same request
|
|
||||||
several times. Also removes secure overwrite feature since that was
|
|
||||||
breaking overwriting files in Windows, and fixes a bug preventing
|
|
||||||
request's automatic body decompression from working.
|
|
||||||
- 0.3.5: Fix compatibility with requests 2.x
|
|
||||||
- 0.3.4: Bugfix: close file before renaming it. This fixes an issue on
|
|
||||||
Windows. Thanks @smallcode for the fix.
|
|
||||||
- 0.3.3: Bugfix for error message when an unreigstered custom matcher
|
|
||||||
was used
|
|
||||||
- 0.3.2: Fix issue with new config syntax and the ``match_on``
|
|
||||||
parameter. Thanks, @chromy!
|
|
||||||
- 0.3.1: Fix issue causing full paths to be sent on the HTTP request
|
|
||||||
line.
|
|
||||||
- 0.3.0: *Backwards incompatible release* - Added support for record
|
|
||||||
modes, and changed the default recording behavior to the "once"
|
|
||||||
record mode. Please see the documentation on record modes for more.
|
|
||||||
Added support for custom request matching, and changed the default
|
|
||||||
request matching behavior to match only on the URL and method. Also,
|
|
||||||
improved the httplib mocking to add support for the
|
|
||||||
``HTTPConnection.send()`` method. This means that requests won't
|
|
||||||
actually be sent until the response is read, since I need to record
|
|
||||||
the entire request in order to match up the appropriate response. I
|
|
||||||
don't think this should cause any issues unless you are sending
|
|
||||||
requests without ever loading the response (which none of the
|
|
||||||
standard httplib wrappers do, as far as I know. Thanks to @fatuhoku
|
|
||||||
for some of the ideas and the motivation behind this release.
|
|
||||||
- 0.2.1: Fixed missing modules in setup.py
|
|
||||||
- 0.2.0: Added configuration API, which lets you configure some
|
|
||||||
settings on VCR (see the README). Also, VCR no longer saves cassettes
|
|
||||||
if they haven't changed at all and supports JSON as well as YAML
|
|
||||||
(thanks @sirpengi). Added amazing new skeumorphic logo, thanks
|
|
||||||
@hairarrow.
|
|
||||||
- 0.1.0: *backwards incompatible release - delete your old cassette
|
|
||||||
files*: This release adds the ability to access the cassette to make
|
|
||||||
assertions on it, as well as a major code refactor thanks to
|
|
||||||
@dlecocq. It also fixes a couple longstanding bugs with redirects and
|
|
||||||
HTTPS. [#3 and #4]
|
|
||||||
- 0.0.4: If you have libyaml installed, vcrpy will use the c bindings
|
|
||||||
instead. Speed up your tests! Thanks @dlecocq
|
|
||||||
- 0.0.3: Add support for requests 1.2.3. Support for older versions of
|
|
||||||
requests dropped (thanks @vitormazzi and @bryanhelmig)
|
|
||||||
- 0.0.2: Add support for requests / urllib3
|
|
||||||
- 0.0.1: Initial Release
|
|
||||||
|
|
||||||
License
|
License
|
||||||
=======
|
=======
|
||||||
|
|
||||||
This library uses the MIT license. See `LICENSE.txt <LICENSE.txt>`__ for
|
This library uses the MIT license. See `LICENSE.txt <LICENSE.txt>`__ for
|
||||||
more details
|
more details
|
||||||
|
|
||||||
|
.. |PyPI| image:: https://img.shields.io/pypi/v/vcrpy.svg
|
||||||
|
:target: https://pypi.python.org/pypi/vcrpy-unittest
|
||||||
.. |Build Status| image:: https://secure.travis-ci.org/kevin1024/vcrpy.png?branch=master
|
.. |Build Status| image:: https://secure.travis-ci.org/kevin1024/vcrpy.png?branch=master
|
||||||
:target: http://travis-ci.org/kevin1024/vcrpy
|
:target: http://travis-ci.org/kevin1024/vcrpy
|
||||||
.. |Stories in Ready| image:: https://badge.waffle.io/kevin1024/vcrpy.png?label=ready&title=Ready
|
.. |Waffle Ready| image:: https://badge.waffle.io/kevin1024/vcrpy.png?label=ready&title=waffle
|
||||||
:target: https://waffle.io/kevin1024/vcrpy
|
:target: https://waffle.io/kevin1024/vcrpy
|
||||||
|
.. |Gitter| image:: https://badges.gitter.im/Join%20Chat.svg
|
||||||
|
:alt: Join the chat at https://gitter.im/kevin1024/vcrpy
|
||||||
|
:target: https://gitter.im/kevin1024/vcrpy?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge
|
||||||
|
|||||||
192
docs/Makefile
Normal file
192
docs/Makefile
Normal file
@@ -0,0 +1,192 @@
|
|||||||
|
# Makefile for Sphinx documentation
|
||||||
|
#
|
||||||
|
|
||||||
|
# You can set these variables from the command line.
|
||||||
|
SPHINXOPTS =
|
||||||
|
SPHINXBUILD = sphinx-build
|
||||||
|
PAPER =
|
||||||
|
BUILDDIR = _build
|
||||||
|
|
||||||
|
# User-friendly check for sphinx-build
|
||||||
|
ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
|
||||||
|
$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
|
||||||
|
endif
|
||||||
|
|
||||||
|
# Internal variables.
|
||||||
|
PAPEROPT_a4 = -D latex_paper_size=a4
|
||||||
|
PAPEROPT_letter = -D latex_paper_size=letter
|
||||||
|
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
|
||||||
|
# the i18n builder cannot share the environment and doctrees with the others
|
||||||
|
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
|
||||||
|
|
||||||
|
.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest coverage gettext
|
||||||
|
|
||||||
|
help:
|
||||||
|
@echo "Please use \`make <target>' where <target> is one of"
|
||||||
|
@echo " html to make standalone HTML files"
|
||||||
|
@echo " dirhtml to make HTML files named index.html in directories"
|
||||||
|
@echo " singlehtml to make a single large HTML file"
|
||||||
|
@echo " pickle to make pickle files"
|
||||||
|
@echo " json to make JSON files"
|
||||||
|
@echo " htmlhelp to make HTML files and a HTML help project"
|
||||||
|
@echo " qthelp to make HTML files and a qthelp project"
|
||||||
|
@echo " applehelp to make an Apple Help Book"
|
||||||
|
@echo " devhelp to make HTML files and a Devhelp project"
|
||||||
|
@echo " epub to make an epub"
|
||||||
|
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
|
||||||
|
@echo " latexpdf to make LaTeX files and run them through pdflatex"
|
||||||
|
@echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
|
||||||
|
@echo " text to make text files"
|
||||||
|
@echo " man to make manual pages"
|
||||||
|
@echo " texinfo to make Texinfo files"
|
||||||
|
@echo " info to make Texinfo files and run them through makeinfo"
|
||||||
|
@echo " gettext to make PO message catalogs"
|
||||||
|
@echo " changes to make an overview of all changed/added/deprecated items"
|
||||||
|
@echo " xml to make Docutils-native XML files"
|
||||||
|
@echo " pseudoxml to make pseudoxml-XML files for display purposes"
|
||||||
|
@echo " linkcheck to check all external links for integrity"
|
||||||
|
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
|
||||||
|
@echo " coverage to run coverage check of the documentation (if enabled)"
|
||||||
|
|
||||||
|
clean:
|
||||||
|
rm -rf $(BUILDDIR)/*
|
||||||
|
|
||||||
|
html:
|
||||||
|
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
||||||
|
@echo
|
||||||
|
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
|
||||||
|
|
||||||
|
dirhtml:
|
||||||
|
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
|
||||||
|
@echo
|
||||||
|
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
|
||||||
|
|
||||||
|
singlehtml:
|
||||||
|
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
|
||||||
|
@echo
|
||||||
|
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
|
||||||
|
|
||||||
|
pickle:
|
||||||
|
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
|
||||||
|
@echo
|
||||||
|
@echo "Build finished; now you can process the pickle files."
|
||||||
|
|
||||||
|
json:
|
||||||
|
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
|
||||||
|
@echo
|
||||||
|
@echo "Build finished; now you can process the JSON files."
|
||||||
|
|
||||||
|
htmlhelp:
|
||||||
|
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
|
||||||
|
@echo
|
||||||
|
@echo "Build finished; now you can run HTML Help Workshop with the" \
|
||||||
|
".hhp project file in $(BUILDDIR)/htmlhelp."
|
||||||
|
|
||||||
|
qthelp:
|
||||||
|
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
|
||||||
|
@echo
|
||||||
|
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
|
||||||
|
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
|
||||||
|
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/vcrpy.qhcp"
|
||||||
|
@echo "To view the help file:"
|
||||||
|
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/vcrpy.qhc"
|
||||||
|
|
||||||
|
applehelp:
|
||||||
|
$(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp
|
||||||
|
@echo
|
||||||
|
@echo "Build finished. The help book is in $(BUILDDIR)/applehelp."
|
||||||
|
@echo "N.B. You won't be able to view it unless you put it in" \
|
||||||
|
"~/Library/Documentation/Help or install it in your application" \
|
||||||
|
"bundle."
|
||||||
|
|
||||||
|
devhelp:
|
||||||
|
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
|
||||||
|
@echo
|
||||||
|
@echo "Build finished."
|
||||||
|
@echo "To view the help file:"
|
||||||
|
@echo "# mkdir -p $$HOME/.local/share/devhelp/vcrpy"
|
||||||
|
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/vcrpy"
|
||||||
|
@echo "# devhelp"
|
||||||
|
|
||||||
|
epub:
|
||||||
|
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
|
||||||
|
@echo
|
||||||
|
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
|
||||||
|
|
||||||
|
latex:
|
||||||
|
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||||
|
@echo
|
||||||
|
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
|
||||||
|
@echo "Run \`make' in that directory to run these through (pdf)latex" \
|
||||||
|
"(use \`make latexpdf' here to do that automatically)."
|
||||||
|
|
||||||
|
latexpdf:
|
||||||
|
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||||
|
@echo "Running LaTeX files through pdflatex..."
|
||||||
|
$(MAKE) -C $(BUILDDIR)/latex all-pdf
|
||||||
|
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
|
||||||
|
|
||||||
|
latexpdfja:
|
||||||
|
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||||
|
@echo "Running LaTeX files through platex and dvipdfmx..."
|
||||||
|
$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
|
||||||
|
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
|
||||||
|
|
||||||
|
text:
|
||||||
|
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
|
||||||
|
@echo
|
||||||
|
@echo "Build finished. The text files are in $(BUILDDIR)/text."
|
||||||
|
|
||||||
|
man:
|
||||||
|
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
|
||||||
|
@echo
|
||||||
|
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
|
||||||
|
|
||||||
|
texinfo:
|
||||||
|
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
||||||
|
@echo
|
||||||
|
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
|
||||||
|
@echo "Run \`make' in that directory to run these through makeinfo" \
|
||||||
|
"(use \`make info' here to do that automatically)."
|
||||||
|
|
||||||
|
info:
|
||||||
|
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
||||||
|
@echo "Running Texinfo files through makeinfo..."
|
||||||
|
make -C $(BUILDDIR)/texinfo info
|
||||||
|
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
|
||||||
|
|
||||||
|
gettext:
|
||||||
|
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
|
||||||
|
@echo
|
||||||
|
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
|
||||||
|
|
||||||
|
changes:
|
||||||
|
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
|
||||||
|
@echo
|
||||||
|
@echo "The overview file is in $(BUILDDIR)/changes."
|
||||||
|
|
||||||
|
linkcheck:
|
||||||
|
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
|
||||||
|
@echo
|
||||||
|
@echo "Link check complete; look for any errors in the above output " \
|
||||||
|
"or in $(BUILDDIR)/linkcheck/output.txt."
|
||||||
|
|
||||||
|
doctest:
|
||||||
|
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
|
||||||
|
@echo "Testing of doctests in the sources finished, look at the " \
|
||||||
|
"results in $(BUILDDIR)/doctest/output.txt."
|
||||||
|
|
||||||
|
coverage:
|
||||||
|
$(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage
|
||||||
|
@echo "Testing of coverage in the sources finished, look at the " \
|
||||||
|
"results in $(BUILDDIR)/coverage/python.txt."
|
||||||
|
|
||||||
|
xml:
|
||||||
|
$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
|
||||||
|
@echo
|
||||||
|
@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
|
||||||
|
|
||||||
|
pseudoxml:
|
||||||
|
$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
|
||||||
|
@echo
|
||||||
|
@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
|
||||||
345
docs/advanced.rst
Normal file
345
docs/advanced.rst
Normal file
@@ -0,0 +1,345 @@
|
|||||||
|
Advanced Features
|
||||||
|
=================
|
||||||
|
|
||||||
|
If you want, VCR.py can return information about the cassette it is
|
||||||
|
using to record your requests and responses. This will let you record
|
||||||
|
your requests and responses and make assertions on them, to make sure
|
||||||
|
that your code under test is generating the expected requests and
|
||||||
|
responses. This feature is not present in Ruby's VCR, but I think it is
|
||||||
|
a nice addition. Here's an example:
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
import vcr
|
||||||
|
import urllib2
|
||||||
|
|
||||||
|
with vcr.use_cassette('fixtures/vcr_cassettes/synopsis.yaml') as cass:
|
||||||
|
response = urllib2.urlopen('http://www.zombo.com/').read()
|
||||||
|
# cass should have 1 request inside it
|
||||||
|
assert len(cass) == 1
|
||||||
|
# the request uri should have been http://www.zombo.com/
|
||||||
|
assert cass.requests[0].uri == 'http://www.zombo.com/'
|
||||||
|
|
||||||
|
The ``Cassette`` object exposes the following properties which I
|
||||||
|
consider part of the API. The fields are as follows:
|
||||||
|
|
||||||
|
- ``requests``: A list of vcr.Request objects corresponding to the http
|
||||||
|
requests that were made during the recording of the cassette. The
|
||||||
|
requests appear in the order that they were originally processed.
|
||||||
|
- ``responses``: A list of the responses made.
|
||||||
|
- ``play_count``: The number of times this cassette has played back a
|
||||||
|
response.
|
||||||
|
- ``all_played``: A boolean indicating whether all the responses have
|
||||||
|
been played back.
|
||||||
|
- ``responses_of(request)``: Access the responses that match a given
|
||||||
|
request
|
||||||
|
|
||||||
|
The ``Request`` object has the following properties:
|
||||||
|
|
||||||
|
- ``uri``: The full uri of the request. Example:
|
||||||
|
"https://google.com/?q=vcrpy"
|
||||||
|
- ``scheme``: The scheme used to make the request (http or https)
|
||||||
|
- ``host``: The host of the request, for example "www.google.com"
|
||||||
|
- ``port``: The port the request was made on
|
||||||
|
- ``path``: The path of the request. For example "/" or "/home.html"
|
||||||
|
- ``query``: The parsed query string of the request. Sorted list of
|
||||||
|
name, value pairs.
|
||||||
|
- ``method`` : The method used to make the request, for example "GET"
|
||||||
|
or "POST"
|
||||||
|
- ``body``: The body of the request, usually empty except for POST /
|
||||||
|
PUT / etc
|
||||||
|
|
||||||
|
Backwards compatible properties:
|
||||||
|
|
||||||
|
- ``url``: The ``uri`` alias
|
||||||
|
- ``protocol``: The ``scheme`` alias
|
||||||
|
|
||||||
|
Register your own serializer
|
||||||
|
----------------------------
|
||||||
|
|
||||||
|
Don't like JSON or YAML? That's OK, VCR.py can serialize to any format
|
||||||
|
you would like. Create your own module or class instance with 2 methods:
|
||||||
|
|
||||||
|
- ``def deserialize(cassette_string)``
|
||||||
|
- ``def serialize(cassette_dict)``
|
||||||
|
|
||||||
|
Finally, register your class with VCR to use your new serializer.
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
import vcr
|
||||||
|
|
||||||
|
class BogoSerializer(object):
|
||||||
|
"""
|
||||||
|
Must implement serialize() and deserialize() methods
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
my_vcr = vcr.VCR()
|
||||||
|
my_vcr.register_serializer('bogo', BogoSerializer())
|
||||||
|
|
||||||
|
with my_vcr.use_cassette('test.bogo', serializer='bogo'):
|
||||||
|
# your http here
|
||||||
|
|
||||||
|
# After you register, you can set the default serializer to your new serializer
|
||||||
|
|
||||||
|
my_vcr.serializer = 'bogo'
|
||||||
|
|
||||||
|
with my_vcr.use_cassette('test.bogo'):
|
||||||
|
# your http here
|
||||||
|
|
||||||
|
Register your own request matcher
|
||||||
|
---------------------------------
|
||||||
|
|
||||||
|
Create your own method with the following signature
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
def my_matcher(r1, r2):
|
||||||
|
|
||||||
|
Your method receives the two requests and must return ``True`` if they
|
||||||
|
match, ``False`` if they don't.
|
||||||
|
|
||||||
|
Finally, register your method with VCR to use your new request matcher.
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
import vcr
|
||||||
|
|
||||||
|
def jurassic_matcher(r1, r2):
|
||||||
|
return r1.uri == r2.uri and 'JURASSIC PARK' in r1.body
|
||||||
|
|
||||||
|
my_vcr = vcr.VCR()
|
||||||
|
my_vcr.register_matcher('jurassic', jurassic_matcher)
|
||||||
|
|
||||||
|
with my_vcr.use_cassette('test.yml', match_on=['jurassic']):
|
||||||
|
# your http here
|
||||||
|
|
||||||
|
# After you register, you can set the default match_on to use your new matcher
|
||||||
|
|
||||||
|
my_vcr.match_on = ['jurassic']
|
||||||
|
|
||||||
|
with my_vcr.use_cassette('test.yml'):
|
||||||
|
# your http here
|
||||||
|
|
||||||
|
Filter sensitive data from the request
|
||||||
|
--------------------------------------
|
||||||
|
|
||||||
|
If you are checking your cassettes into source control, and are using
|
||||||
|
some form of authentication in your tests, you can filter out that
|
||||||
|
information so it won't appear in your cassette files. There are a few
|
||||||
|
ways to do this:
|
||||||
|
|
||||||
|
Filter information from HTTP Headers
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Use the ``filter_headers`` configuration option with a list of headers
|
||||||
|
to filter.
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
with my_vcr.use_cassette('test.yml', filter_headers=['authorization']):
|
||||||
|
# sensitive HTTP request goes here
|
||||||
|
|
||||||
|
Filter information from HTTP querystring
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Use the ``filter_query_parameters`` configuration option with a list of
|
||||||
|
query parameters to filter.
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
with my_vcr.use_cassette('test.yml', filter_query_parameters=['api_key']):
|
||||||
|
requests.get('http://api.com/getdata?api_key=secretstring')
|
||||||
|
|
||||||
|
Filter information from HTTP post data
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Use the ``filter_post_data_parameters`` configuration option with a list
|
||||||
|
of post data parameters to filter.
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
with my_vcr.use_cassette('test.yml', filter_post_data_parameters=['client_secret']):
|
||||||
|
requests.post('http://api.com/postdata', data={'api_key': 'secretstring'})
|
||||||
|
|
||||||
|
Advanced use of filter_headers, filter_query_parameters and filter_post_data_parameters
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
In all of the above cases, it's also possible to pass a list of ``(key, value)``
|
||||||
|
tuples where the value can be any of the following:
|
||||||
|
|
||||||
|
* A new value to replace the original value.
|
||||||
|
* ``None`` to remove the key/value pair. (Same as passing a simple key string.)
|
||||||
|
* A callable that returns a new value or ``None``.
|
||||||
|
|
||||||
|
So these two calls are the same:
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
# original (still works)
|
||||||
|
vcr = VCR(filter_headers=['authorization'])
|
||||||
|
|
||||||
|
# new
|
||||||
|
vcr = VCR(filter_headers=[('authorization', None)])
|
||||||
|
|
||||||
|
Here are two examples of the new functionality:
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
# replace with a static value (most common)
|
||||||
|
vcr = VCR(filter_headers=[('authorization', 'XXXXXX')])
|
||||||
|
|
||||||
|
# replace with a callable, for example when testing
|
||||||
|
# lots of different kinds of authorization.
|
||||||
|
def replace_auth(key, value, request):
|
||||||
|
auth_type = value.split(' ', 1)[0]
|
||||||
|
return '{} {}'.format(auth_type, 'XXXXXX')
|
||||||
|
|
||||||
|
Custom Request filtering
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
If none of these covers your request filtering needs, you can register a
|
||||||
|
callback that will manipulate the HTTP request before adding it to the
|
||||||
|
cassette. Use the ``before_record`` configuration option to so this.
|
||||||
|
Here is an example that will never record requests to the /login
|
||||||
|
endpoint.
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
def before_record_cb(request):
|
||||||
|
if request.path != '/login':
|
||||||
|
return request
|
||||||
|
|
||||||
|
my_vcr = vcr.VCR(
|
||||||
|
before_record = before_record_cb,
|
||||||
|
)
|
||||||
|
with my_vcr.use_cassette('test.yml'):
|
||||||
|
# your http code here
|
||||||
|
|
||||||
|
You can also mutate the response using this callback. For example, you
|
||||||
|
could remove all query parameters from any requests to the ``'/login'``
|
||||||
|
path.
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
def scrub_login_request(request):
|
||||||
|
if request.path == '/login':
|
||||||
|
request.uri, _ = urllib.splitquery(response.uri)
|
||||||
|
return request
|
||||||
|
|
||||||
|
my_vcr = vcr.VCR(
|
||||||
|
before_record=scrub_login_request,
|
||||||
|
)
|
||||||
|
with my_vcr.use_cassette('test.yml'):
|
||||||
|
# your http code here
|
||||||
|
|
||||||
|
Custom Response Filtering
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
VCR.py also suports response filtering with the
|
||||||
|
``before_record_response`` keyword argument. It's usage is similar to
|
||||||
|
that of ``before_record``:
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
def scrub_string(string, replacement=''):
|
||||||
|
def before_record_response(response):
|
||||||
|
response['body']['string'] = response['body']['string'].replace(string, replacement)
|
||||||
|
return response
|
||||||
|
return before_record_response
|
||||||
|
|
||||||
|
my_vcr = vcr.VCR(
|
||||||
|
before_record_response=scrub_string(settings.USERNAME, 'username'),
|
||||||
|
)
|
||||||
|
with my_vcr.use_cassette('test.yml'):
|
||||||
|
# your http code here
|
||||||
|
|
||||||
|
|
||||||
|
Decode compressed response
|
||||||
|
---------------------------
|
||||||
|
|
||||||
|
When the ``decode_compressed_response`` keyword argument of a ``VCR`` object
|
||||||
|
is set to True, VCR will decompress "gzip" and "deflate" response bodies
|
||||||
|
before recording. This ensures that these interactions become readable and
|
||||||
|
editable after being serialized.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
Decompression is done before any other specified `Custom Response Filtering`_.
|
||||||
|
|
||||||
|
This option should be avoided if the actual decompression of response bodies
|
||||||
|
is part of the functionality of the library or app being tested.
|
||||||
|
|
||||||
|
Ignore requests
|
||||||
|
---------------
|
||||||
|
|
||||||
|
If you would like to completely ignore certain requests, you can do it
|
||||||
|
in a few ways:
|
||||||
|
|
||||||
|
- Set the ``ignore_localhost`` option equal to True. This will not
|
||||||
|
record any requests sent to (or responses from) localhost, 127.0.0.1,
|
||||||
|
or 0.0.0.0.
|
||||||
|
- Set the ``ignore_hosts`` configuration option to a list of hosts to
|
||||||
|
ignore
|
||||||
|
- Add a ``before_record`` callback that returns None for requests you
|
||||||
|
want to ignore
|
||||||
|
|
||||||
|
Requests that are ignored by VCR will not be saved in a cassette, nor
|
||||||
|
played back from a cassette. VCR will completely ignore those requests
|
||||||
|
as if it didn't notice them at all, and they will continue to hit the
|
||||||
|
server as if VCR were not there.
|
||||||
|
|
||||||
|
Custom Patches
|
||||||
|
--------------
|
||||||
|
|
||||||
|
If you use a custom ``HTTPConnection`` class, or otherwise make http
|
||||||
|
requests in a way that requires additional patching, you can use the
|
||||||
|
``custom_patches`` keyword argument of the ``VCR`` and ``Cassette``
|
||||||
|
objects to patch those objects whenever a cassette's context is entered.
|
||||||
|
To patch a custom version of ``HTTPConnection`` you can do something
|
||||||
|
like this:
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
import where_the_custom_https_connection_lives
|
||||||
|
from vcr.stubs import VCRHTTPSConnection
|
||||||
|
my_vcr = config.VCR(custom_patches=((where_the_custom_https_connection_lives, 'CustomHTTPSConnection', VCRHTTPSConnection),))
|
||||||
|
|
||||||
|
@my_vcr.use_cassette(...)
|
||||||
|
|
||||||
|
Automatic Cassette Naming
|
||||||
|
-------------------------
|
||||||
|
|
||||||
|
VCR.py now allows the omission of the path argument to the use\_cassette
|
||||||
|
function. Both of the following are now legal/should work
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
@my_vcr.use_cassette
|
||||||
|
def my_test_function():
|
||||||
|
...
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
@my_vcr.use_cassette()
|
||||||
|
def my_test_function():
|
||||||
|
...
|
||||||
|
|
||||||
|
In both cases, VCR.py will use a path that is generated from the
|
||||||
|
provided test function's name. If no ``cassette_library_dir`` has been
|
||||||
|
set, the cassette will be in a file with the name of the test function
|
||||||
|
in directory of the file in which the test function is declared. If a
|
||||||
|
``cassette_library_dir`` has been set, the cassette will appear in that
|
||||||
|
directory in a file with the name of the decorated function.
|
||||||
|
|
||||||
|
It is possible to control the path produced by the automatic naming
|
||||||
|
machinery by customizing the ``path_transformer`` and
|
||||||
|
``func_path_generator`` vcr variables. To add an extension to all
|
||||||
|
cassette names, use ``VCR.ensure_suffix`` as follows:
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
my_vcr = VCR(path_transformer=VCR.ensure_suffix('.yaml'))
|
||||||
|
|
||||||
|
@my_vcr.use_cassette
|
||||||
|
def my_test_function():
|
||||||
51
docs/api.rst
Normal file
51
docs/api.rst
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
API
|
||||||
|
===
|
||||||
|
|
||||||
|
:mod:`~vcr.config`
|
||||||
|
------------------
|
||||||
|
|
||||||
|
.. automodule:: vcr.config
|
||||||
|
:members:
|
||||||
|
:special-members: __init__
|
||||||
|
|
||||||
|
:mod:`~vcr.cassette`
|
||||||
|
--------------------
|
||||||
|
|
||||||
|
.. automodule:: vcr.cassette
|
||||||
|
:members:
|
||||||
|
:special-members: __init__
|
||||||
|
|
||||||
|
:mod:`~vcr.matchers`
|
||||||
|
--------------------
|
||||||
|
|
||||||
|
.. automodule:: vcr.matchers
|
||||||
|
:members:
|
||||||
|
:special-members: __init__
|
||||||
|
|
||||||
|
:mod:`~vcr.filters`
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
.. automodule:: vcr.filters
|
||||||
|
:members:
|
||||||
|
:special-members: __init__
|
||||||
|
|
||||||
|
:mod:`~vcr.request`
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
.. automodule:: vcr.request
|
||||||
|
:members:
|
||||||
|
:special-members: __init__
|
||||||
|
|
||||||
|
:mod:`~vcr.serialize`
|
||||||
|
---------------------
|
||||||
|
|
||||||
|
.. automodule:: vcr.serialize
|
||||||
|
:members:
|
||||||
|
:special-members: __init__
|
||||||
|
|
||||||
|
:mod:`~vcr.patch`
|
||||||
|
-----------------
|
||||||
|
|
||||||
|
.. automodule:: vcr.patch
|
||||||
|
:members:
|
||||||
|
:special-members: __init__
|
||||||
152
docs/changelog.rst
Normal file
152
docs/changelog.rst
Normal file
@@ -0,0 +1,152 @@
|
|||||||
|
Changelog
|
||||||
|
---------
|
||||||
|
- 1.9.0 Add support for boto3 (thanks @desdm, @foorbarna). Fix deepcopy issue
|
||||||
|
for response headers when `decode_compressed_response` is enabled (thanks
|
||||||
|
@nickdirienzo)
|
||||||
|
- 1.8.0 Fix for Serialization errors with JSON adapter (thanks
|
||||||
|
@aliaksandrb). Avoid concatenating bytes with strings (thanks
|
||||||
|
@jaysonsantos). Exclude __pycache__ dirs & compiled files in sdist
|
||||||
|
(thanks @koobs). Fix Tornado support behavior for Tornado 3 (thanks
|
||||||
|
@abhinav). decode_compressed_response option and filter (thanks
|
||||||
|
@jayvdb).
|
||||||
|
- 1.7.4 [#217] Make use_cassette decorated functions actually return a
|
||||||
|
value (thanks @bcen). [#199] Fix path transfromation defaults.
|
||||||
|
Better headers dictionary management.
|
||||||
|
- 1.7.3 [#188] ``additional_matchers`` kwarg on ``use_cassette``.
|
||||||
|
[#191] Actually support passing multiple before_record_request
|
||||||
|
functions (thanks @agriffis).
|
||||||
|
- 1.7.2 [#186] Get effective_url in tornado (thanks @mvschaik), [#187]
|
||||||
|
Set request_time on Response object in tornado (thanks @abhinav).
|
||||||
|
- 1.7.1 [#183] Patch ``fetch_impl`` instead of the entire HTTPClient
|
||||||
|
class for Tornado (thanks @abhinav).
|
||||||
|
- 1.7.0 [#177] Properly support coroutine/generator decoration. [#178]
|
||||||
|
Support distribute (thanks @graingert). [#163] Make compatibility
|
||||||
|
between python2 and python3 recorded cassettes more robust (thanks
|
||||||
|
@gward).
|
||||||
|
- 1.6.1 [#169] Support conditional requirements in old versions of
|
||||||
|
pip, Fix RST parse errors generated by pandoc, [Tornado] Fix
|
||||||
|
unsupported features exception not being raised, [#166]
|
||||||
|
content-aware body matcher.
|
||||||
|
- 1.6.0 [#120] Tornado support (thanks @abhinav), [#147] packaging fixes
|
||||||
|
(thanks @graingert), [#158] allow filtering post params in requests
|
||||||
|
(thanks @MrJohz), [#140] add xmlrpclib support (thanks @Diaoul).
|
||||||
|
- 1.5.2 Fix crash when cassette path contains cassette library
|
||||||
|
directory (thanks @gazpachoking).
|
||||||
|
- 1.5.0 Automatic cassette naming and 'application/json' post data
|
||||||
|
filtering (thanks @marco-santamaria).
|
||||||
|
- 1.4.2 Fix a bug caused by requests 2.7 and chunked transfer encoding
|
||||||
|
- 1.4.1 Include README, tests, LICENSE in package. Thanks @ralphbean.
|
||||||
|
- 1.4.0 Filter post data parameters (thanks @eadmundo), support for
|
||||||
|
posting files through requests, inject\_cassette kwarg to access
|
||||||
|
cassette from ``use_cassette`` decorated function,
|
||||||
|
``with_current_defaults`` actually works (thanks @samstav).
|
||||||
|
- 1.3.0 Fix/add support for urllib3 (thanks @aisch), fix default port
|
||||||
|
for https (thanks @abhinav).
|
||||||
|
- 1.2.0 Add custom\_patches argument to VCR/Cassette objects to allow
|
||||||
|
users to stub custom classes when cassettes become active.
|
||||||
|
- 1.1.4 Add force reset around calls to actual connection from stubs,
|
||||||
|
to ensure compatibility with the version of httplib/urlib2 in python
|
||||||
|
2.7.9.
|
||||||
|
- 1.1.3 Fix python3 headers field (thanks @rtaboada), fix boto test
|
||||||
|
(thanks @telaviv), fix new\_episodes record mode (thanks @jashugan),
|
||||||
|
fix Windows connectionpool stub bug (thanks @gazpachoking), add
|
||||||
|
support for requests 2.5
|
||||||
|
- 1.1.2 Add urllib==1.7.1 support. Make json serialize error handling
|
||||||
|
correct Improve logging of match failures.
|
||||||
|
- 1.1.1 Use function signature preserving ``wrapt.decorator`` to write
|
||||||
|
the decorator version of use\_cassette in order to ensure
|
||||||
|
compatibility with py.test fixtures and python 2. Move all request
|
||||||
|
filtering into the ``before_record_callable``.
|
||||||
|
- 1.1.0 Add ``before_record_response``. Fix several bugs related to the
|
||||||
|
context management of cassettes.
|
||||||
|
- 1.0.3: Fix an issue with requests 2.4 and make sure case sensitivity
|
||||||
|
is consistent across python versions
|
||||||
|
- 1.0.2: Fix an issue with requests 2.3
|
||||||
|
- 1.0.1: Fix a bug with the new ignore requests feature and the once
|
||||||
|
record mode
|
||||||
|
- 1.0.0: *BACKWARDS INCOMPATIBLE*: Please see the 'upgrade' section in
|
||||||
|
the README. Take a look at the matcher section as well, you might
|
||||||
|
want to update your ``match_on`` settings. Add support for filtering
|
||||||
|
sensitive data from requests, matching query strings after the order
|
||||||
|
changes and improving the built-in matchers, (thanks to @mshytikov),
|
||||||
|
support for ignoring requests to certain hosts, bump supported
|
||||||
|
Python3 version to 3.4, fix some bugs with Boto support (thanks
|
||||||
|
@marusich), fix error with URL field capitalization in README (thanks
|
||||||
|
@simon-weber), added some log messages to help with debugging, added
|
||||||
|
``all_played`` property on cassette (thanks @mshytikov)
|
||||||
|
- 0.7.0: VCR.py now supports Python 3! (thanks @asundg) Also I
|
||||||
|
refactored the stub connections quite a bit to add support for the
|
||||||
|
putrequest and putheader calls. This version also adds support for
|
||||||
|
httplib2 (thanks @nilp0inter). I have added a couple tests for boto
|
||||||
|
since it is an http client in its own right. Finally, this version
|
||||||
|
includes a fix for a bug where requests wasn't being patched properly
|
||||||
|
(thanks @msabramo).
|
||||||
|
- 0.6.0: Store response headers as a list since a HTTP response can
|
||||||
|
have the same header twice (happens with set-cookie sometimes). This
|
||||||
|
has the added benefit of preserving the order of headers. Thanks
|
||||||
|
@smallcode for the bug report leading to this change. I have made an
|
||||||
|
effort to ensure backwards compatibility with the old cassettes'
|
||||||
|
header storage mechanism, but if you want to upgrade to the new
|
||||||
|
header storage, you should delete your cassettes and re-record them.
|
||||||
|
Also this release adds better error messages (thanks @msabramo) and
|
||||||
|
adds support for using VCR as a decorator (thanks @smallcode for the
|
||||||
|
motivation)
|
||||||
|
- 0.5.0: Change the ``response_of`` method to ``responses_of`` since
|
||||||
|
cassettes can now contain more than one response for a request. Since
|
||||||
|
this changes the API, I'm bumping the version. Also includes 2
|
||||||
|
bugfixes: a better error message when attempting to overwrite a
|
||||||
|
cassette file, and a fix for a bug with requests sessions (thanks
|
||||||
|
@msabramo)
|
||||||
|
- 0.4.0: Change default request recording behavior for multiple
|
||||||
|
requests. If you make the same request multiple times to the same
|
||||||
|
URL, the response might be different each time (maybe the response
|
||||||
|
has a timestamp in it or something), so this will make the same
|
||||||
|
request multiple times and save them all. Then, when you are
|
||||||
|
replaying the cassette, the responses will be played back in the same
|
||||||
|
order in which they were received. If you were making multiple
|
||||||
|
requests to the same URL in a cassette before version 0.4.0, you
|
||||||
|
might need to regenerate your cassette files. Also, removes support
|
||||||
|
for the cassette.play\_count counter API, since individual requests
|
||||||
|
aren't unique anymore. A cassette might contain the same request
|
||||||
|
several times. Also removes secure overwrite feature since that was
|
||||||
|
breaking overwriting files in Windows, and fixes a bug preventing
|
||||||
|
request's automatic body decompression from working.
|
||||||
|
- 0.3.5: Fix compatibility with requests 2.x
|
||||||
|
- 0.3.4: Bugfix: close file before renaming it. This fixes an issue on
|
||||||
|
Windows. Thanks @smallcode for the fix.
|
||||||
|
- 0.3.3: Bugfix for error message when an unreigstered custom matcher
|
||||||
|
was used
|
||||||
|
- 0.3.2: Fix issue with new config syntax and the ``match_on``
|
||||||
|
parameter. Thanks, @chromy!
|
||||||
|
- 0.3.1: Fix issue causing full paths to be sent on the HTTP request
|
||||||
|
line.
|
||||||
|
- 0.3.0: *Backwards incompatible release* - Added support for record
|
||||||
|
modes, and changed the default recording behavior to the "once"
|
||||||
|
record mode. Please see the documentation on record modes for more.
|
||||||
|
Added support for custom request matching, and changed the default
|
||||||
|
request matching behavior to match only on the URL and method. Also,
|
||||||
|
improved the httplib mocking to add support for the
|
||||||
|
``HTTPConnection.send()`` method. This means that requests won't
|
||||||
|
actually be sent until the response is read, since I need to record
|
||||||
|
the entire request in order to match up the appropriate response. I
|
||||||
|
don't think this should cause any issues unless you are sending
|
||||||
|
requests without ever loading the response (which none of the
|
||||||
|
standard httplib wrappers do, as far as I know. Thanks to @fatuhoku
|
||||||
|
for some of the ideas and the motivation behind this release.
|
||||||
|
- 0.2.1: Fixed missing modules in setup.py
|
||||||
|
- 0.2.0: Added configuration API, which lets you configure some
|
||||||
|
settings on VCR (see the README). Also, VCR no longer saves cassettes
|
||||||
|
if they haven't changed at all and supports JSON as well as YAML
|
||||||
|
(thanks @sirpengi). Added amazing new skeumorphic logo, thanks
|
||||||
|
@hairarrow.
|
||||||
|
- 0.1.0: *backwards incompatible release - delete your old cassette
|
||||||
|
files*: This release adds the ability to access the cassette to make
|
||||||
|
assertions on it, as well as a major code refactor thanks to
|
||||||
|
@dlecocq. It also fixes a couple longstanding bugs with redirects and
|
||||||
|
HTTPS. [#3 and #4]
|
||||||
|
- 0.0.4: If you have libyaml installed, vcrpy will use the c bindings
|
||||||
|
instead. Speed up your tests! Thanks @dlecocq
|
||||||
|
- 0.0.3: Add support for requests 1.2.3. Support for older versions of
|
||||||
|
requests dropped (thanks @vitormazzi and @bryanhelmig)
|
||||||
|
- 0.0.2: Add support for requests / urllib3
|
||||||
|
- 0.0.1: Initial Release
|
||||||
291
docs/conf.py
Normal file
291
docs/conf.py
Normal file
@@ -0,0 +1,291 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#
|
||||||
|
# vcrpy documentation build configuration file, created by
|
||||||
|
# sphinx-quickstart on Sun Sep 13 11:18:00 2015.
|
||||||
|
#
|
||||||
|
# This file is execfile()d with the current directory set to its
|
||||||
|
# containing dir.
|
||||||
|
#
|
||||||
|
# Note that not all possible configuration values are present in this
|
||||||
|
# autogenerated file.
|
||||||
|
#
|
||||||
|
# All configuration values have a default; values that are commented out
|
||||||
|
# serve to show the default.
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
|
# If extensions (or modules to document with autodoc) are in another directory,
|
||||||
|
# add these directories to sys.path here. If the directory is relative to the
|
||||||
|
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||||
|
#sys.path.insert(0, os.path.abspath('.'))
|
||||||
|
|
||||||
|
# -- General configuration ------------------------------------------------
|
||||||
|
|
||||||
|
# If your documentation needs a minimal Sphinx version, state it here.
|
||||||
|
#needs_sphinx = '1.0'
|
||||||
|
|
||||||
|
# Add any Sphinx extension module names here, as strings. They can be
|
||||||
|
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||||
|
# ones.
|
||||||
|
extensions = [
|
||||||
|
'sphinx.ext.autodoc',
|
||||||
|
'sphinx.ext.intersphinx',
|
||||||
|
'sphinx.ext.coverage',
|
||||||
|
'sphinx.ext.viewcode',
|
||||||
|
]
|
||||||
|
|
||||||
|
# Add any paths that contain templates here, relative to this directory.
|
||||||
|
templates_path = ['_templates']
|
||||||
|
|
||||||
|
# The suffix(es) of source filenames.
|
||||||
|
# You can specify multiple suffix as a list of string:
|
||||||
|
# source_suffix = ['.rst', '.md']
|
||||||
|
source_suffix = '.rst'
|
||||||
|
|
||||||
|
# The encoding of source files.
|
||||||
|
#source_encoding = 'utf-8-sig'
|
||||||
|
|
||||||
|
# The master toctree document.
|
||||||
|
master_doc = 'index'
|
||||||
|
|
||||||
|
# General information about the project.
|
||||||
|
project = u'vcrpy'
|
||||||
|
copyright = u'2015, Kevin McCarthy'
|
||||||
|
author = u'Kevin McCarthy'
|
||||||
|
|
||||||
|
# The version info for the project you're documenting, acts as replacement for
|
||||||
|
# |version| and |release|, also used in various other places throughout the
|
||||||
|
# built documents.
|
||||||
|
#
|
||||||
|
# The short X.Y version.
|
||||||
|
version = '1.7.4'
|
||||||
|
# The full version, including alpha/beta/rc tags.
|
||||||
|
release = '1.7.4'
|
||||||
|
|
||||||
|
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||||
|
# for a list of supported languages.
|
||||||
|
#
|
||||||
|
# This is also used if you do content translation via gettext catalogs.
|
||||||
|
# Usually you set "language" from the command line for these cases.
|
||||||
|
language = None
|
||||||
|
|
||||||
|
# There are two options for replacing |today|: either, you set today to some
|
||||||
|
# non-false value, then it is used:
|
||||||
|
#today = ''
|
||||||
|
# Else, today_fmt is used as the format for a strftime call.
|
||||||
|
#today_fmt = '%B %d, %Y'
|
||||||
|
|
||||||
|
# List of patterns, relative to source directory, that match files and
|
||||||
|
# directories to ignore when looking for source files.
|
||||||
|
exclude_patterns = ['_build']
|
||||||
|
|
||||||
|
# The reST default role (used for this markup: `text`) to use for all
|
||||||
|
# documents.
|
||||||
|
#default_role = None
|
||||||
|
|
||||||
|
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||||
|
#add_function_parentheses = True
|
||||||
|
|
||||||
|
# If true, the current module name will be prepended to all description
|
||||||
|
# unit titles (such as .. function::).
|
||||||
|
#add_module_names = True
|
||||||
|
|
||||||
|
# If true, sectionauthor and moduleauthor directives will be shown in the
|
||||||
|
# output. They are ignored by default.
|
||||||
|
#show_authors = False
|
||||||
|
|
||||||
|
# The name of the Pygments (syntax highlighting) style to use.
|
||||||
|
pygments_style = 'sphinx'
|
||||||
|
|
||||||
|
# A list of ignored prefixes for module index sorting.
|
||||||
|
#modindex_common_prefix = []
|
||||||
|
|
||||||
|
# If true, keep warnings as "system message" paragraphs in the built documents.
|
||||||
|
#keep_warnings = False
|
||||||
|
|
||||||
|
# If true, `todo` and `todoList` produce output, else they produce nothing.
|
||||||
|
todo_include_todos = False
|
||||||
|
|
||||||
|
|
||||||
|
# -- Options for HTML output ----------------------------------------------
|
||||||
|
|
||||||
|
# The theme to use for HTML and HTML Help pages.
|
||||||
|
# https://read-the-docs.readthedocs.org/en/latest/theme.html#how-do-i-use-this-locally-and-on-read-the-docs
|
||||||
|
if 'READTHEDOCS' not in os.environ:
|
||||||
|
import sphinx_rtd_theme
|
||||||
|
html_theme = 'sphinx_rtd_theme'
|
||||||
|
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
|
||||||
|
|
||||||
|
# Theme options are theme-specific and customize the look and feel of a theme
|
||||||
|
# further. For a list of options available for each theme, see the
|
||||||
|
# documentation.
|
||||||
|
#html_theme_options = {}
|
||||||
|
|
||||||
|
# The name for this set of Sphinx documents. If None, it defaults to
|
||||||
|
# "<project> v<release> documentation".
|
||||||
|
#html_title = None
|
||||||
|
|
||||||
|
# A shorter title for the navigation bar. Default is the same as html_title.
|
||||||
|
#html_short_title = None
|
||||||
|
|
||||||
|
# The name of an image file (relative to this directory) to place at the top
|
||||||
|
# of the sidebar.
|
||||||
|
#html_logo = None
|
||||||
|
|
||||||
|
# The name of an image file (within the static path) to use as favicon of the
|
||||||
|
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
||||||
|
# pixels large.
|
||||||
|
#html_favicon = None
|
||||||
|
|
||||||
|
# Add any paths that contain custom static files (such as style sheets) here,
|
||||||
|
# relative to this directory. They are copied after the builtin static files,
|
||||||
|
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||||
|
html_static_path = ['_static']
|
||||||
|
|
||||||
|
# Add any extra paths that contain custom files (such as robots.txt or
|
||||||
|
# .htaccess) here, relative to this directory. These files are copied
|
||||||
|
# directly to the root of the documentation.
|
||||||
|
#html_extra_path = []
|
||||||
|
|
||||||
|
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
||||||
|
# using the given strftime format.
|
||||||
|
#html_last_updated_fmt = '%b %d, %Y'
|
||||||
|
|
||||||
|
# If true, SmartyPants will be used to convert quotes and dashes to
|
||||||
|
# typographically correct entities.
|
||||||
|
#html_use_smartypants = True
|
||||||
|
|
||||||
|
# Custom sidebar templates, maps document names to template names.
|
||||||
|
#html_sidebars = {}
|
||||||
|
|
||||||
|
# Additional templates that should be rendered to pages, maps page names to
|
||||||
|
# template names.
|
||||||
|
#html_additional_pages = {}
|
||||||
|
|
||||||
|
# If false, no module index is generated.
|
||||||
|
#html_domain_indices = True
|
||||||
|
|
||||||
|
# If false, no index is generated.
|
||||||
|
#html_use_index = True
|
||||||
|
|
||||||
|
# If true, the index is split into individual pages for each letter.
|
||||||
|
#html_split_index = False
|
||||||
|
|
||||||
|
# If true, links to the reST sources are added to the pages.
|
||||||
|
#html_show_sourcelink = True
|
||||||
|
|
||||||
|
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
|
||||||
|
#html_show_sphinx = True
|
||||||
|
|
||||||
|
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
|
||||||
|
#html_show_copyright = True
|
||||||
|
|
||||||
|
# If true, an OpenSearch description file will be output, and all pages will
|
||||||
|
# contain a <link> tag referring to it. The value of this option must be the
|
||||||
|
# base URL from which the finished HTML is served.
|
||||||
|
#html_use_opensearch = ''
|
||||||
|
|
||||||
|
# This is the file name suffix for HTML files (e.g. ".xhtml").
|
||||||
|
#html_file_suffix = None
|
||||||
|
|
||||||
|
# Language to be used for generating the HTML full-text search index.
|
||||||
|
# Sphinx supports the following languages:
|
||||||
|
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
|
||||||
|
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
|
||||||
|
#html_search_language = 'en'
|
||||||
|
|
||||||
|
# A dictionary with options for the search language support, empty by default.
|
||||||
|
# Now only 'ja' uses this config value
|
||||||
|
#html_search_options = {'type': 'default'}
|
||||||
|
|
||||||
|
# The name of a javascript file (relative to the configuration directory) that
|
||||||
|
# implements a search results scorer. If empty, the default will be used.
|
||||||
|
#html_search_scorer = 'scorer.js'
|
||||||
|
|
||||||
|
# Output file base name for HTML help builder.
|
||||||
|
htmlhelp_basename = 'vcrpydoc'
|
||||||
|
|
||||||
|
# -- Options for LaTeX output ---------------------------------------------
|
||||||
|
|
||||||
|
latex_elements = {
|
||||||
|
# The paper size ('letterpaper' or 'a4paper').
|
||||||
|
#'papersize': 'letterpaper',
|
||||||
|
|
||||||
|
# The font size ('10pt', '11pt' or '12pt').
|
||||||
|
#'pointsize': '10pt',
|
||||||
|
|
||||||
|
# Additional stuff for the LaTeX preamble.
|
||||||
|
#'preamble': '',
|
||||||
|
|
||||||
|
# Latex figure (float) alignment
|
||||||
|
#'figure_align': 'htbp',
|
||||||
|
}
|
||||||
|
|
||||||
|
# Grouping the document tree into LaTeX files. List of tuples
|
||||||
|
# (source start file, target name, title,
|
||||||
|
# author, documentclass [howto, manual, or own class]).
|
||||||
|
latex_documents = [
|
||||||
|
(master_doc, 'vcrpy.tex', u'vcrpy Documentation',
|
||||||
|
u'Kevin McCarthy', 'manual'),
|
||||||
|
]
|
||||||
|
|
||||||
|
# The name of an image file (relative to this directory) to place at the top of
|
||||||
|
# the title page.
|
||||||
|
#latex_logo = None
|
||||||
|
|
||||||
|
# For "manual" documents, if this is true, then toplevel headings are parts,
|
||||||
|
# not chapters.
|
||||||
|
#latex_use_parts = False
|
||||||
|
|
||||||
|
# If true, show page references after internal links.
|
||||||
|
#latex_show_pagerefs = False
|
||||||
|
|
||||||
|
# If true, show URL addresses after external links.
|
||||||
|
#latex_show_urls = False
|
||||||
|
|
||||||
|
# Documents to append as an appendix to all manuals.
|
||||||
|
#latex_appendices = []
|
||||||
|
|
||||||
|
# If false, no module index is generated.
|
||||||
|
#latex_domain_indices = True
|
||||||
|
|
||||||
|
|
||||||
|
# -- Options for manual page output ---------------------------------------
|
||||||
|
|
||||||
|
# One entry per manual page. List of tuples
|
||||||
|
# (source start file, name, description, authors, manual section).
|
||||||
|
man_pages = [
|
||||||
|
(master_doc, 'vcrpy', u'vcrpy Documentation',
|
||||||
|
[author], 1)
|
||||||
|
]
|
||||||
|
|
||||||
|
# If true, show URL addresses after external links.
|
||||||
|
#man_show_urls = False
|
||||||
|
|
||||||
|
|
||||||
|
# -- Options for Texinfo output -------------------------------------------
|
||||||
|
|
||||||
|
# Grouping the document tree into Texinfo files. List of tuples
|
||||||
|
# (source start file, target name, title, author,
|
||||||
|
# dir menu entry, description, category)
|
||||||
|
texinfo_documents = [
|
||||||
|
(master_doc, 'vcrpy', u'vcrpy Documentation',
|
||||||
|
author, 'vcrpy', 'One line description of project.',
|
||||||
|
'Miscellaneous'),
|
||||||
|
]
|
||||||
|
|
||||||
|
# Documents to append as an appendix to all manuals.
|
||||||
|
#texinfo_appendices = []
|
||||||
|
|
||||||
|
# If false, no module index is generated.
|
||||||
|
#texinfo_domain_indices = True
|
||||||
|
|
||||||
|
# How to display URL addresses: 'footnote', 'no', or 'inline'.
|
||||||
|
#texinfo_show_urls = 'footnote'
|
||||||
|
|
||||||
|
# If true, do not generate a @detailmenu in the "Top" node's menu.
|
||||||
|
#texinfo_no_detailmenu = False
|
||||||
|
|
||||||
|
|
||||||
|
# Example configuration for intersphinx: refer to the Python standard library.
|
||||||
|
intersphinx_mapping = {'https://docs.python.org/': None}
|
||||||
60
docs/configuration.rst
Normal file
60
docs/configuration.rst
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
Configuration
|
||||||
|
=============
|
||||||
|
|
||||||
|
If you don't like VCR's defaults, you can set options by instantiating a
|
||||||
|
``VCR`` class and setting the options on it.
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
|
||||||
|
import vcr
|
||||||
|
|
||||||
|
my_vcr = vcr.VCR(
|
||||||
|
serializer='json',
|
||||||
|
cassette_library_dir='fixtures/cassettes',
|
||||||
|
record_mode='once',
|
||||||
|
match_on=['uri', 'method'],
|
||||||
|
)
|
||||||
|
|
||||||
|
with my_vcr.use_cassette('test.json'):
|
||||||
|
# your http code here
|
||||||
|
|
||||||
|
Otherwise, you can override options each time you use a cassette.
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
with vcr.use_cassette('test.yml', serializer='json', record_mode='once'):
|
||||||
|
# your http code here
|
||||||
|
|
||||||
|
Note: Per-cassette overrides take precedence over the global config.
|
||||||
|
|
||||||
|
Request matching
|
||||||
|
----------------
|
||||||
|
|
||||||
|
Request matching is configurable and allows you to change which requests
|
||||||
|
VCR considers identical. The default behavior is
|
||||||
|
``['method', 'scheme', 'host', 'port', 'path', 'query']`` which means
|
||||||
|
that requests with both the same URL and method (ie POST or GET) are
|
||||||
|
considered identical.
|
||||||
|
|
||||||
|
This can be configured by changing the ``match_on`` setting.
|
||||||
|
|
||||||
|
The following options are available :
|
||||||
|
|
||||||
|
- method (for example, POST or GET)
|
||||||
|
- uri (the full URI.)
|
||||||
|
- host (the hostname of the server receiving the request)
|
||||||
|
- port (the port of the server receiving the request)
|
||||||
|
- path (the path of the request)
|
||||||
|
- query (the query string of the request)
|
||||||
|
- raw\_body (the entire request body as is)
|
||||||
|
- body (the entire request body unmarshalled by content-type
|
||||||
|
i.e. xmlrpc, json, form-urlencoded, falling back on raw\_body)
|
||||||
|
- headers (the headers of the request)
|
||||||
|
|
||||||
|
Backwards compatible matchers:
|
||||||
|
- url (the ``uri`` alias)
|
||||||
|
|
||||||
|
If these options don't work for you, you can also register your own
|
||||||
|
request matcher. This is described in the Advanced section of this
|
||||||
|
README.
|
||||||
25
docs/contributing.rst
Normal file
25
docs/contributing.rst
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
Contributing
|
||||||
|
============
|
||||||
|
|
||||||
|
Running VCR's test suite
|
||||||
|
------------------------
|
||||||
|
|
||||||
|
The tests are all run automatically on `Travis
|
||||||
|
CI <https://travis-ci.org/kevin1024/vcrpy>`__, but you can also run them
|
||||||
|
yourself using `py.test <http://pytest.org/>`__ and
|
||||||
|
`Tox <http://tox.testrun.org/>`__. Tox will automatically run them in
|
||||||
|
all environments VCR.py supports. The test suite is pretty big and slow,
|
||||||
|
but you can tell tox to only run specific tests like this::
|
||||||
|
|
||||||
|
tox -e py27requests -- -v -k "'test_status_code or test_gzip'"
|
||||||
|
|
||||||
|
This will run only tests that look like ``test_status_code`` or
|
||||||
|
``test_gzip`` in the test suite, and only in the python 2.7 environment
|
||||||
|
that has ``requests`` installed.
|
||||||
|
|
||||||
|
Also, in order for the boto tests to run, you will need an AWS key.
|
||||||
|
Refer to the `boto
|
||||||
|
documentation <http://boto.readthedocs.org/en/latest/getting_started.html>`__
|
||||||
|
for how to set this up. I have marked the boto tests as optional in
|
||||||
|
Travis so you don't have to worry about them failing if you submit a
|
||||||
|
pull request.
|
||||||
31
docs/debugging.rst
Normal file
31
docs/debugging.rst
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
Debugging
|
||||||
|
=========
|
||||||
|
|
||||||
|
VCR.py has a few log messages you can turn on to help you figure out if
|
||||||
|
HTTP requests are hitting a real server or not. You can turn them on
|
||||||
|
like this:
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
import vcr
|
||||||
|
import requests
|
||||||
|
import logging
|
||||||
|
|
||||||
|
logging.basicConfig() # you need to initialize logging, otherwise you will not see anything from vcrpy
|
||||||
|
vcr_log = logging.getLogger("vcr")
|
||||||
|
vcr_log.setLevel(logging.INFO)
|
||||||
|
|
||||||
|
with vcr.use_cassette('headers.yml'):
|
||||||
|
requests.get('http://httpbin.org/headers')
|
||||||
|
|
||||||
|
The first time you run this, you will see::
|
||||||
|
|
||||||
|
INFO:vcr.stubs:<Request (GET) http://httpbin.org/headers> not in cassette, sending to real server
|
||||||
|
|
||||||
|
The second time, you will see::
|
||||||
|
|
||||||
|
INFO:vcr.stubs:Playing response for <Request (GET) http://httpbin.org/headers> from cassette
|
||||||
|
|
||||||
|
If you set the loglevel to DEBUG, you will also get information about
|
||||||
|
which matchers didn't match. This can help you with debugging custom
|
||||||
|
matchers.
|
||||||
24
docs/index.rst
Normal file
24
docs/index.rst
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
.. include:: ../README.rst
|
||||||
|
|
||||||
|
Contents
|
||||||
|
========
|
||||||
|
|
||||||
|
.. toctree::
|
||||||
|
:maxdepth: 2
|
||||||
|
|
||||||
|
installation
|
||||||
|
usage
|
||||||
|
configuration
|
||||||
|
advanced
|
||||||
|
api
|
||||||
|
debugging
|
||||||
|
contributing
|
||||||
|
changelog
|
||||||
|
|
||||||
|
==================
|
||||||
|
Indices and tables
|
||||||
|
==================
|
||||||
|
|
||||||
|
* :ref:`genindex`
|
||||||
|
* :ref:`modindex`
|
||||||
|
* :ref:`search`
|
||||||
84
docs/installation.rst
Normal file
84
docs/installation.rst
Normal file
@@ -0,0 +1,84 @@
|
|||||||
|
Installation
|
||||||
|
============
|
||||||
|
|
||||||
|
VCR.py is a package on `PyPI <https://pypi.python.org>`__, so you can install
|
||||||
|
with pip::
|
||||||
|
|
||||||
|
pip install vcrpy
|
||||||
|
|
||||||
|
Compatibility
|
||||||
|
-------------
|
||||||
|
|
||||||
|
VCR.py supports Python 2.6 and 2.7, 3.3, 3.4, and
|
||||||
|
`pypy <http://pypy.org>`__.
|
||||||
|
|
||||||
|
The following http libraries are supported:
|
||||||
|
|
||||||
|
- urllib2
|
||||||
|
- urllib3
|
||||||
|
- http.client (python3)
|
||||||
|
- requests (both 1.x and 2.x versions)
|
||||||
|
- httplib2
|
||||||
|
- boto
|
||||||
|
- Tornado's AsyncHTTPClient
|
||||||
|
|
||||||
|
Speed
|
||||||
|
-----
|
||||||
|
|
||||||
|
VCR.py runs about 10x faster when `pyyaml <http://pyyaml.org>`__ can use the
|
||||||
|
`libyaml extensions <http://pyyaml.org/wiki/LibYAML>`__. In order for this to
|
||||||
|
work, libyaml needs to be available when pyyaml is built. Additionally the flag
|
||||||
|
is cached by pip, so you might need to explicitly avoid the cache when
|
||||||
|
rebuilding pyyaml.
|
||||||
|
|
||||||
|
1. Test if pyyaml is built with libyaml. This should work::
|
||||||
|
|
||||||
|
python -c 'from yaml import CLoader'
|
||||||
|
|
||||||
|
2. Install libyaml according to your Linux distribution, or using `Homebrew
|
||||||
|
<http://mxcl.github.com/homebrew/>`__ on Mac::
|
||||||
|
|
||||||
|
brew install libyaml # Mac with Homebrew
|
||||||
|
apt-get install libyaml-dev # Ubuntu
|
||||||
|
dnf install libyaml-dev # Fedora
|
||||||
|
|
||||||
|
3. Rebuild pyyaml with libyaml::
|
||||||
|
|
||||||
|
pip uninstall pyyaml
|
||||||
|
pip --no-cache-dir install pyyaml
|
||||||
|
|
||||||
|
Upgrade
|
||||||
|
-------
|
||||||
|
|
||||||
|
New Cassette Format
|
||||||
|
~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
The cassette format has changed in *VCR.py 1.x*, the *VCR.py 0.x*
|
||||||
|
cassettes cannot be used with *VCR.py 1.x*. The easiest way to upgrade
|
||||||
|
is to simply delete your cassettes and re-record all of them. VCR.py
|
||||||
|
also provides a migration script that attempts to upgrade your 0.x
|
||||||
|
cassettes to the new 1.x format. To use it, run the following command::
|
||||||
|
|
||||||
|
python -m vcr.migration PATH
|
||||||
|
|
||||||
|
The PATH can be either a path to the directory with cassettes or the
|
||||||
|
path to a single cassette.
|
||||||
|
|
||||||
|
*Note*: Back up your cassettes files before migration. The migration
|
||||||
|
*should* only modify cassettes using the old 0.x format.
|
||||||
|
|
||||||
|
New serializer / deserializer API
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
If you made a custom serializer, you will need to update it to match the
|
||||||
|
new API in version 1.0.x
|
||||||
|
|
||||||
|
- Serializers now take dicts and return strings.
|
||||||
|
- Deserializers take strings and return dicts (instead of requests,
|
||||||
|
responses pair)
|
||||||
|
|
||||||
|
Ruby VCR compatibility
|
||||||
|
----------------------
|
||||||
|
|
||||||
|
VCR.py does not aim to match the format of the Ruby VCR YAML files.
|
||||||
|
Cassettes generated by Ruby's VCR are not compatible with VCR.py.
|
||||||
97
docs/usage.rst
Normal file
97
docs/usage.rst
Normal file
@@ -0,0 +1,97 @@
|
|||||||
|
Usage
|
||||||
|
=====
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
import vcr
|
||||||
|
import urllib2
|
||||||
|
|
||||||
|
with vcr.use_cassette('fixtures/vcr_cassettes/synopsis.yaml'):
|
||||||
|
response = urllib2.urlopen('http://www.iana.org/domains/reserved').read()
|
||||||
|
assert 'Example domains' in response
|
||||||
|
|
||||||
|
Run this test once, and VCR.py will record the HTTP request to
|
||||||
|
``fixtures/vcr_cassettes/synopsis.yml``. Run it again, and VCR.py will
|
||||||
|
replay the response from iana.org when the http request is made. This
|
||||||
|
test is now fast (no real HTTP requests are made anymore), deterministic
|
||||||
|
(the test will continue to pass, even if you are offline, or iana.org
|
||||||
|
goes down for maintenance) and accurate (the response will contain the
|
||||||
|
same headers and body you get from a real request).
|
||||||
|
|
||||||
|
You can also use VCR.py as a decorator. The same request above would
|
||||||
|
look like this:
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
@vcr.use_cassette('fixtures/vcr_cassettes/synopsis.yaml')
|
||||||
|
def test_iana():
|
||||||
|
response = urllib2.urlopen('http://www.iana.org/domains/reserved').read()
|
||||||
|
assert 'Example domains' in response
|
||||||
|
|
||||||
|
When using the decorator version of ``use_cassette``, it is possible to
|
||||||
|
omit the path to the cassette file.
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
@vcr.use_cassette()
|
||||||
|
def test_iana():
|
||||||
|
response = urllib2.urlopen('http://www.iana.org/domains/reserved').read()
|
||||||
|
assert 'Example domains' in response
|
||||||
|
|
||||||
|
In this case, the cassette file will be given the same name as the test
|
||||||
|
function, and it will be placed in the same directory as the file in
|
||||||
|
which the test is defined. See the Automatic Test Naming section below
|
||||||
|
for more details.
|
||||||
|
|
||||||
|
Record Modes
|
||||||
|
------------
|
||||||
|
|
||||||
|
VCR supports 4 record modes (with the same behavior as Ruby's VCR):
|
||||||
|
|
||||||
|
once
|
||||||
|
~~~~
|
||||||
|
|
||||||
|
- Replay previously recorded interactions.
|
||||||
|
- Record new interactions if there is no cassette file.
|
||||||
|
- Cause an error to be raised for new requests if there is a cassette
|
||||||
|
file.
|
||||||
|
|
||||||
|
It is similar to the new\_episodes record mode, but will prevent new,
|
||||||
|
unexpected requests from being made (i.e. because the request URI
|
||||||
|
changed).
|
||||||
|
|
||||||
|
once is the default record mode, used when you do not set one.
|
||||||
|
|
||||||
|
new\_episodes
|
||||||
|
~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
- Record new interactions.
|
||||||
|
- Replay previously recorded interactions. It is similar to the once
|
||||||
|
record mode, but will always record new interactions, even if you
|
||||||
|
have an existing recorded one that is similar, but not identical.
|
||||||
|
|
||||||
|
This was the default behavior in versions < 0.3.0
|
||||||
|
|
||||||
|
none
|
||||||
|
~~~~
|
||||||
|
|
||||||
|
- Replay previously recorded interactions.
|
||||||
|
- Cause an error to be raised for any new requests. This is useful when
|
||||||
|
your code makes potentially dangerous HTTP requests. The none record
|
||||||
|
mode guarantees that no new HTTP requests will be made.
|
||||||
|
|
||||||
|
all
|
||||||
|
~~~
|
||||||
|
|
||||||
|
- Record new interactions.
|
||||||
|
- Never replay previously recorded interactions. This can be
|
||||||
|
temporarily used to force VCR to re-record a cassette (i.e. to ensure
|
||||||
|
the responses are not out of date) or can be used when you simply
|
||||||
|
want to log all HTTP requests.
|
||||||
|
|
||||||
|
Unittest Integration
|
||||||
|
--------------------
|
||||||
|
|
||||||
|
While it's possible to use the context manager or decorator forms with unittest,
|
||||||
|
there's also a ``VCRTestCase`` provided separately by `vcrpy-unittest
|
||||||
|
<https://github.com/agriffis/vcrpy-unittest>`__.
|
||||||
3
runtests.sh
Executable file
3
runtests.sh
Executable file
@@ -0,0 +1,3 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
REQUESTS_CA_BUNDLE=`python -m pytest_httpbin.certs` py.test $1
|
||||||
5
setup.py
5
setup.py
@@ -51,7 +51,7 @@ except Exception:
|
|||||||
|
|
||||||
setup(
|
setup(
|
||||||
name='vcrpy',
|
name='vcrpy',
|
||||||
version='1.7.3',
|
version='1.9.0',
|
||||||
description=(
|
description=(
|
||||||
"Automatically mock your HTTP interactions to simplify and "
|
"Automatically mock your HTTP interactions to simplify and "
|
||||||
"speed up testing"
|
"speed up testing"
|
||||||
@@ -64,8 +64,7 @@ setup(
|
|||||||
install_requires=install_requires,
|
install_requires=install_requires,
|
||||||
extras_require=extras_require,
|
extras_require=extras_require,
|
||||||
license='MIT',
|
license='MIT',
|
||||||
tests_require=['pytest', 'mock', 'pytest-localserver'],
|
tests_require=['pytest', 'mock', 'pytest-httpbin'],
|
||||||
cmdclass={'test': PyTest},
|
|
||||||
classifiers=[
|
classifiers=[
|
||||||
'Development Status :: 4 - Beta',
|
'Development Status :: 4 - Beta',
|
||||||
'Environment :: Console',
|
'Environment :: Console',
|
||||||
|
|||||||
@@ -9,74 +9,63 @@ from six.moves.urllib.request import urlopen
|
|||||||
import vcr
|
import vcr
|
||||||
|
|
||||||
|
|
||||||
def test_nonexistent_directory(tmpdir):
|
def test_nonexistent_directory(tmpdir, httpbin):
|
||||||
'''If we load a cassette in a nonexistent directory, it can save ok'''
|
'''If we load a cassette in a nonexistent directory, it can save ok'''
|
||||||
# Check to make sure directory doesnt exist
|
# Check to make sure directory doesnt exist
|
||||||
assert not os.path.exists(str(tmpdir.join('nonexistent')))
|
assert not os.path.exists(str(tmpdir.join('nonexistent')))
|
||||||
|
|
||||||
# Run VCR to create dir and cassette file
|
# Run VCR to create dir and cassette file
|
||||||
with vcr.use_cassette(str(tmpdir.join('nonexistent', 'cassette.yml'))):
|
with vcr.use_cassette(str(tmpdir.join('nonexistent', 'cassette.yml'))):
|
||||||
urlopen('http://httpbin.org/').read()
|
urlopen(httpbin.url).read()
|
||||||
|
|
||||||
# This should have made the file and the directory
|
# This should have made the file and the directory
|
||||||
assert os.path.exists(str(tmpdir.join('nonexistent', 'cassette.yml')))
|
assert os.path.exists(str(tmpdir.join('nonexistent', 'cassette.yml')))
|
||||||
|
|
||||||
|
|
||||||
def test_unpatch(tmpdir):
|
def test_unpatch(tmpdir, httpbin):
|
||||||
'''Ensure that our cassette gets unpatched when we're done'''
|
'''Ensure that our cassette gets unpatched when we're done'''
|
||||||
with vcr.use_cassette(str(tmpdir.join('unpatch.yaml'))) as cass:
|
with vcr.use_cassette(str(tmpdir.join('unpatch.yaml'))) as cass:
|
||||||
urlopen('http://httpbin.org/').read()
|
urlopen(httpbin.url).read()
|
||||||
|
|
||||||
# Make the same request, and assert that we haven't served any more
|
# Make the same request, and assert that we haven't served any more
|
||||||
# requests out of cache
|
# requests out of cache
|
||||||
urlopen('http://httpbin.org/').read()
|
urlopen(httpbin.url).read()
|
||||||
assert cass.play_count == 0
|
assert cass.play_count == 0
|
||||||
|
|
||||||
|
|
||||||
def test_basic_use(tmpdir):
|
def test_basic_json_use(tmpdir, httpbin):
|
||||||
'''
|
|
||||||
Copied from the docs
|
|
||||||
'''
|
|
||||||
with vcr.use_cassette('fixtures/vcr_cassettes/synopsis.yaml'):
|
|
||||||
response = urlopen(
|
|
||||||
'http://www.iana.org/domains/reserved'
|
|
||||||
).read()
|
|
||||||
assert b'Example domains' in response
|
|
||||||
|
|
||||||
|
|
||||||
def test_basic_json_use(tmpdir):
|
|
||||||
'''
|
'''
|
||||||
Ensure you can load a json serialized cassette
|
Ensure you can load a json serialized cassette
|
||||||
'''
|
'''
|
||||||
test_fixture = 'fixtures/vcr_cassettes/synopsis.json'
|
test_fixture = str(tmpdir.join('synopsis.json'))
|
||||||
with vcr.use_cassette(test_fixture, serializer='json'):
|
with vcr.use_cassette(test_fixture, serializer='json'):
|
||||||
response = urlopen('http://httpbin.org/').read()
|
response = urlopen(httpbin.url).read()
|
||||||
assert b'difficult sometimes' in response
|
assert b'difficult sometimes' in response
|
||||||
|
|
||||||
|
|
||||||
def test_patched_content(tmpdir):
|
def test_patched_content(tmpdir, httpbin):
|
||||||
'''
|
'''
|
||||||
Ensure that what you pull from a cassette is what came from the
|
Ensure that what you pull from a cassette is what came from the
|
||||||
request
|
request
|
||||||
'''
|
'''
|
||||||
with vcr.use_cassette(str(tmpdir.join('synopsis.yaml'))) as cass:
|
with vcr.use_cassette(str(tmpdir.join('synopsis.yaml'))) as cass:
|
||||||
response = urlopen('http://httpbin.org/').read()
|
response = urlopen(httpbin.url).read()
|
||||||
assert cass.play_count == 0
|
assert cass.play_count == 0
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join('synopsis.yaml'))) as cass:
|
with vcr.use_cassette(str(tmpdir.join('synopsis.yaml'))) as cass:
|
||||||
response2 = urlopen('http://httpbin.org/').read()
|
response2 = urlopen(httpbin.url).read()
|
||||||
assert cass.play_count == 1
|
assert cass.play_count == 1
|
||||||
cass._save(force=True)
|
cass._save(force=True)
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join('synopsis.yaml'))) as cass:
|
with vcr.use_cassette(str(tmpdir.join('synopsis.yaml'))) as cass:
|
||||||
response3 = urlopen('http://httpbin.org/').read()
|
response3 = urlopen(httpbin.url).read()
|
||||||
assert cass.play_count == 1
|
assert cass.play_count == 1
|
||||||
|
|
||||||
assert response == response2
|
assert response == response2
|
||||||
assert response2 == response3
|
assert response2 == response3
|
||||||
|
|
||||||
|
|
||||||
def test_patched_content_json(tmpdir):
|
def test_patched_content_json(tmpdir, httpbin):
|
||||||
'''
|
'''
|
||||||
Ensure that what you pull from a json cassette is what came from the
|
Ensure that what you pull from a json cassette is what came from the
|
||||||
request
|
request
|
||||||
@@ -85,16 +74,16 @@ def test_patched_content_json(tmpdir):
|
|||||||
testfile = str(tmpdir.join('synopsis.json'))
|
testfile = str(tmpdir.join('synopsis.json'))
|
||||||
|
|
||||||
with vcr.use_cassette(testfile) as cass:
|
with vcr.use_cassette(testfile) as cass:
|
||||||
response = urlopen('http://httpbin.org/').read()
|
response = urlopen(httpbin.url).read()
|
||||||
assert cass.play_count == 0
|
assert cass.play_count == 0
|
||||||
|
|
||||||
with vcr.use_cassette(testfile) as cass:
|
with vcr.use_cassette(testfile) as cass:
|
||||||
response2 = urlopen('http://httpbin.org/').read()
|
response2 = urlopen(httpbin.url).read()
|
||||||
assert cass.play_count == 1
|
assert cass.play_count == 1
|
||||||
cass._save(force=True)
|
cass._save(force=True)
|
||||||
|
|
||||||
with vcr.use_cassette(testfile) as cass:
|
with vcr.use_cassette(testfile) as cass:
|
||||||
response3 = urlopen('http://httpbin.org/').read()
|
response3 = urlopen(httpbin.url).read()
|
||||||
assert cass.play_count == 1
|
assert cass.play_count == 1
|
||||||
|
|
||||||
assert response == response2
|
assert response == response2
|
||||||
|
|||||||
@@ -1,11 +1,17 @@
|
|||||||
import pytest
|
import pytest
|
||||||
boto = pytest.importorskip("boto")
|
boto = pytest.importorskip("boto")
|
||||||
import boto
|
|
||||||
import boto.iam
|
import boto # NOQA
|
||||||
from boto.s3.connection import S3Connection
|
import boto.iam # NOQA
|
||||||
from boto.s3.key import Key
|
from boto.s3.connection import S3Connection # NOQA
|
||||||
from ConfigParser import DuplicateSectionError
|
from boto.s3.key import Key # NOQA
|
||||||
import vcr
|
import vcr # NOQA
|
||||||
|
try: # NOQA
|
||||||
|
from ConfigParser import DuplicateSectionError # NOQA
|
||||||
|
except ImportError: # NOQA
|
||||||
|
# python3
|
||||||
|
from configparser import DuplicateSectionError # NOQA
|
||||||
|
|
||||||
|
|
||||||
def test_boto_stubs(tmpdir):
|
def test_boto_stubs(tmpdir):
|
||||||
with vcr.use_cassette(str(tmpdir.join('boto-stubs.yml'))):
|
with vcr.use_cassette(str(tmpdir.join('boto-stubs.yml'))):
|
||||||
@@ -17,42 +23,45 @@ def test_boto_stubs(tmpdir):
|
|||||||
assert issubclass(CertValidatingHTTPSConnection, VCRCertValidatingHTTPSConnection)
|
assert issubclass(CertValidatingHTTPSConnection, VCRCertValidatingHTTPSConnection)
|
||||||
CertValidatingHTTPSConnection('hostname.does.not.matter')
|
CertValidatingHTTPSConnection('hostname.does.not.matter')
|
||||||
|
|
||||||
|
|
||||||
def test_boto_without_vcr():
|
def test_boto_without_vcr():
|
||||||
s3_conn = S3Connection()
|
s3_conn = S3Connection()
|
||||||
s3_bucket = s3_conn.get_bucket('boto-demo-1394171994') # a bucket you can access
|
s3_bucket = s3_conn.get_bucket('boto-demo-1394171994') # a bucket you can access
|
||||||
k = Key(s3_bucket)
|
k = Key(s3_bucket)
|
||||||
k.key = 'test.txt'
|
k.key = 'test.txt'
|
||||||
k.set_contents_from_string('hello world i am a string')
|
k.set_contents_from_string('hello world i am a string')
|
||||||
|
|
||||||
|
|
||||||
def test_boto_medium_difficulty(tmpdir):
|
def test_boto_medium_difficulty(tmpdir):
|
||||||
s3_conn = S3Connection()
|
s3_conn = S3Connection()
|
||||||
s3_bucket = s3_conn.get_bucket('boto-demo-1394171994') # a bucket you can access
|
s3_bucket = s3_conn.get_bucket('boto-demo-1394171994') # a bucket you can access
|
||||||
with vcr.use_cassette(str(tmpdir.join('boto-medium.yml'))) as cass:
|
with vcr.use_cassette(str(tmpdir.join('boto-medium.yml'))):
|
||||||
k = Key(s3_bucket)
|
k = Key(s3_bucket)
|
||||||
k.key = 'test.txt'
|
k.key = 'test.txt'
|
||||||
k.set_contents_from_string('hello world i am a string')
|
k.set_contents_from_string('hello world i am a string')
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join('boto-medium.yml'))) as cass:
|
with vcr.use_cassette(str(tmpdir.join('boto-medium.yml'))):
|
||||||
k = Key(s3_bucket)
|
k = Key(s3_bucket)
|
||||||
k.key = 'test.txt'
|
k.key = 'test.txt'
|
||||||
k.set_contents_from_string('hello world i am a string')
|
k.set_contents_from_string('hello world i am a string')
|
||||||
|
|
||||||
|
|
||||||
def test_boto_hardcore_mode(tmpdir):
|
def test_boto_hardcore_mode(tmpdir):
|
||||||
with vcr.use_cassette(str(tmpdir.join('boto-hardcore.yml'))) as cass:
|
with vcr.use_cassette(str(tmpdir.join('boto-hardcore.yml'))):
|
||||||
s3_conn = S3Connection()
|
s3_conn = S3Connection()
|
||||||
s3_bucket = s3_conn.get_bucket('boto-demo-1394171994') # a bucket you can access
|
s3_bucket = s3_conn.get_bucket('boto-demo-1394171994') # a bucket you can access
|
||||||
k = Key(s3_bucket)
|
k = Key(s3_bucket)
|
||||||
k.key = 'test.txt'
|
k.key = 'test.txt'
|
||||||
k.set_contents_from_string('hello world i am a string')
|
k.set_contents_from_string('hello world i am a string')
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join('boto-hardcore.yml'))) as cass:
|
with vcr.use_cassette(str(tmpdir.join('boto-hardcore.yml'))):
|
||||||
s3_conn = S3Connection()
|
s3_conn = S3Connection()
|
||||||
s3_bucket = s3_conn.get_bucket('boto-demo-1394171994') # a bucket you can access
|
s3_bucket = s3_conn.get_bucket('boto-demo-1394171994') # a bucket you can access
|
||||||
k = Key(s3_bucket)
|
k = Key(s3_bucket)
|
||||||
k.key = 'test.txt'
|
k.key = 'test.txt'
|
||||||
k.set_contents_from_string('hello world i am a string')
|
k.set_contents_from_string('hello world i am a string')
|
||||||
|
|
||||||
|
|
||||||
def test_boto_iam(tmpdir):
|
def test_boto_iam(tmpdir):
|
||||||
try:
|
try:
|
||||||
boto.config.add_section('Boto')
|
boto.config.add_section('Boto')
|
||||||
@@ -63,10 +72,10 @@ def test_boto_iam(tmpdir):
|
|||||||
# Ensure that boto uses CertValidatingHTTPSConnection
|
# Ensure that boto uses CertValidatingHTTPSConnection
|
||||||
boto.config.set('Boto', 'https_validate_certificates', 'true')
|
boto.config.set('Boto', 'https_validate_certificates', 'true')
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join('boto-iam.yml'))) as cass:
|
with vcr.use_cassette(str(tmpdir.join('boto-iam.yml'))):
|
||||||
iam_conn = boto.iam.connect_to_region('universal')
|
iam_conn = boto.iam.connect_to_region('universal')
|
||||||
iam_conn.get_all_users()
|
iam_conn.get_all_users()
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join('boto-iam.yml'))) as cass:
|
with vcr.use_cassette(str(tmpdir.join('boto-iam.yml'))):
|
||||||
iam_conn = boto.iam.connect_to_region('universal')
|
iam_conn = boto.iam.connect_to_region('universal')
|
||||||
iam_conn.get_all_users()
|
iam_conn.get_all_users()
|
||||||
|
|||||||
67
tests/integration/test_boto3.py
Normal file
67
tests/integration/test_boto3.py
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
import pytest
|
||||||
|
boto3 = pytest.importorskip("boto3")
|
||||||
|
|
||||||
|
import boto3 # NOQA
|
||||||
|
import vcr # NOQA
|
||||||
|
|
||||||
|
bucket = 'boto3-demo-1337' # a bucket you can access
|
||||||
|
key = 'test/my_test.txt' # key with r+w access
|
||||||
|
content = 'hello world i am a string' # content to put in the test file
|
||||||
|
|
||||||
|
|
||||||
|
def test_boto_stubs(tmpdir):
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('boto3-stubs.yml'))):
|
||||||
|
# Perform the imports within the patched context so that
|
||||||
|
# HTTPConnection, VerifiedHTTPSConnection refers to the patched version.
|
||||||
|
from botocore.vendored.requests.packages.urllib3.connectionpool import \
|
||||||
|
HTTPConnection, VerifiedHTTPSConnection
|
||||||
|
from vcr.stubs.boto3_stubs import VCRRequestsHTTPConnection, VCRRequestsHTTPSConnection
|
||||||
|
# Prove that the class was patched by the stub and that we can instantiate it.
|
||||||
|
assert issubclass(HTTPConnection, VCRRequestsHTTPConnection)
|
||||||
|
assert issubclass(VerifiedHTTPSConnection, VCRRequestsHTTPSConnection)
|
||||||
|
HTTPConnection('hostname.does.not.matter')
|
||||||
|
VerifiedHTTPSConnection('hostname.does.not.matter')
|
||||||
|
|
||||||
|
|
||||||
|
def test_boto3_without_vcr():
|
||||||
|
s3_resource = boto3.resource('s3')
|
||||||
|
b = s3_resource.Bucket(bucket)
|
||||||
|
b.put_object(Key=key, Body=content)
|
||||||
|
|
||||||
|
# retrieve content to check it
|
||||||
|
o = s3_resource.Object(bucket, key).get()
|
||||||
|
|
||||||
|
# decode for python3
|
||||||
|
assert content == o['Body'].read().decode('utf-8')
|
||||||
|
|
||||||
|
|
||||||
|
def test_boto_medium_difficulty(tmpdir):
|
||||||
|
s3_resource = boto3.resource('s3')
|
||||||
|
b = s3_resource.Bucket(bucket)
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('boto3-medium.yml'))):
|
||||||
|
b.put_object(Key=key, Body=content)
|
||||||
|
o = s3_resource.Object(bucket, key).get()
|
||||||
|
assert content == o['Body'].read().decode('utf-8')
|
||||||
|
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('boto3-medium.yml'))) as cass:
|
||||||
|
b.put_object(Key=key, Body=content)
|
||||||
|
o = s3_resource.Object(bucket, key).get()
|
||||||
|
assert content == o['Body'].read().decode('utf-8')
|
||||||
|
assert cass.all_played
|
||||||
|
|
||||||
|
|
||||||
|
def test_boto_hardcore_mode(tmpdir):
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('boto3-hardcore.yml'))):
|
||||||
|
s3_resource = boto3.resource('s3')
|
||||||
|
b = s3_resource.Bucket(bucket)
|
||||||
|
b.put_object(Key=key, Body=content)
|
||||||
|
o = s3_resource.Object(bucket, key).get()
|
||||||
|
assert content == o['Body'].read().decode('utf-8')
|
||||||
|
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('boto3-hardcore.yml'))) as cass:
|
||||||
|
s3_resource = boto3.resource('s3')
|
||||||
|
b = s3_resource.Bucket(bucket)
|
||||||
|
b.put_object(Key=key, Body=content)
|
||||||
|
o = s3_resource.Object(bucket, key).get()
|
||||||
|
assert content == o['Body'].read().decode('utf-8')
|
||||||
|
assert cass.all_played
|
||||||
@@ -5,46 +5,46 @@ import vcr
|
|||||||
from six.moves.urllib.request import urlopen
|
from six.moves.urllib.request import urlopen
|
||||||
|
|
||||||
|
|
||||||
def test_set_serializer_default_config(tmpdir):
|
def test_set_serializer_default_config(tmpdir, httpbin):
|
||||||
my_vcr = vcr.VCR(serializer='json')
|
my_vcr = vcr.VCR(serializer='json')
|
||||||
|
|
||||||
with my_vcr.use_cassette(str(tmpdir.join('test.json'))):
|
with my_vcr.use_cassette(str(tmpdir.join('test.json'))):
|
||||||
assert my_vcr.serializer == 'json'
|
assert my_vcr.serializer == 'json'
|
||||||
urlopen('http://httpbin.org/get')
|
urlopen(httpbin.url + '/get')
|
||||||
|
|
||||||
with open(str(tmpdir.join('test.json'))) as f:
|
with open(str(tmpdir.join('test.json'))) as f:
|
||||||
assert json.loads(f.read())
|
assert json.loads(f.read())
|
||||||
|
|
||||||
|
|
||||||
def test_default_set_cassette_library_dir(tmpdir):
|
def test_default_set_cassette_library_dir(tmpdir, httpbin):
|
||||||
my_vcr = vcr.VCR(cassette_library_dir=str(tmpdir.join('subdir')))
|
my_vcr = vcr.VCR(cassette_library_dir=str(tmpdir.join('subdir')))
|
||||||
|
|
||||||
with my_vcr.use_cassette('test.json'):
|
with my_vcr.use_cassette('test.json'):
|
||||||
urlopen('http://httpbin.org/get')
|
urlopen(httpbin.url + '/get')
|
||||||
|
|
||||||
assert os.path.exists(str(tmpdir.join('subdir').join('test.json')))
|
assert os.path.exists(str(tmpdir.join('subdir').join('test.json')))
|
||||||
|
|
||||||
|
|
||||||
def test_override_set_cassette_library_dir(tmpdir):
|
def test_override_set_cassette_library_dir(tmpdir, httpbin):
|
||||||
my_vcr = vcr.VCR(cassette_library_dir=str(tmpdir.join('subdir')))
|
my_vcr = vcr.VCR(cassette_library_dir=str(tmpdir.join('subdir')))
|
||||||
|
|
||||||
cld = str(tmpdir.join('subdir2'))
|
cld = str(tmpdir.join('subdir2'))
|
||||||
|
|
||||||
with my_vcr.use_cassette('test.json', cassette_library_dir=cld):
|
with my_vcr.use_cassette('test.json', cassette_library_dir=cld):
|
||||||
urlopen('http://httpbin.org/get')
|
urlopen(httpbin.url + '/get')
|
||||||
|
|
||||||
assert os.path.exists(str(tmpdir.join('subdir2').join('test.json')))
|
assert os.path.exists(str(tmpdir.join('subdir2').join('test.json')))
|
||||||
assert not os.path.exists(str(tmpdir.join('subdir').join('test.json')))
|
assert not os.path.exists(str(tmpdir.join('subdir').join('test.json')))
|
||||||
|
|
||||||
|
|
||||||
def test_override_match_on(tmpdir):
|
def test_override_match_on(tmpdir, httpbin):
|
||||||
my_vcr = vcr.VCR(match_on=['method'])
|
my_vcr = vcr.VCR(match_on=['method'])
|
||||||
|
|
||||||
with my_vcr.use_cassette(str(tmpdir.join('test.json'))):
|
with my_vcr.use_cassette(str(tmpdir.join('test.json'))):
|
||||||
urlopen('http://httpbin.org/')
|
urlopen(httpbin.url)
|
||||||
|
|
||||||
with my_vcr.use_cassette(str(tmpdir.join('test.json'))) as cass:
|
with my_vcr.use_cassette(str(tmpdir.join('test.json'))) as cass:
|
||||||
urlopen('http://httpbin.org/get')
|
urlopen(httpbin.url + '/get')
|
||||||
|
|
||||||
assert len(cass) == 1
|
assert len(cass) == 1
|
||||||
assert cass.play_count == 1
|
assert cass.play_count == 1
|
||||||
|
|||||||
@@ -10,19 +10,19 @@ from six.moves.urllib.request import urlopen
|
|||||||
import vcr
|
import vcr
|
||||||
|
|
||||||
|
|
||||||
def test_disk_saver_nowrite(tmpdir):
|
def test_disk_saver_nowrite(tmpdir, httpbin):
|
||||||
'''
|
'''
|
||||||
Ensure that when you close a cassette without changing it it doesn't
|
Ensure that when you close a cassette without changing it it doesn't
|
||||||
rewrite the file
|
rewrite the file
|
||||||
'''
|
'''
|
||||||
fname = str(tmpdir.join('synopsis.yaml'))
|
fname = str(tmpdir.join('synopsis.yaml'))
|
||||||
with vcr.use_cassette(fname) as cass:
|
with vcr.use_cassette(fname) as cass:
|
||||||
urlopen('http://www.iana.org/domains/reserved').read()
|
urlopen(httpbin.url).read()
|
||||||
assert cass.play_count == 0
|
assert cass.play_count == 0
|
||||||
last_mod = os.path.getmtime(fname)
|
last_mod = os.path.getmtime(fname)
|
||||||
|
|
||||||
with vcr.use_cassette(fname) as cass:
|
with vcr.use_cassette(fname) as cass:
|
||||||
urlopen('http://www.iana.org/domains/reserved').read()
|
urlopen(httpbin.url).read()
|
||||||
assert cass.play_count == 1
|
assert cass.play_count == 1
|
||||||
assert cass.dirty is False
|
assert cass.dirty is False
|
||||||
last_mod2 = os.path.getmtime(fname)
|
last_mod2 = os.path.getmtime(fname)
|
||||||
@@ -30,14 +30,14 @@ def test_disk_saver_nowrite(tmpdir):
|
|||||||
assert last_mod == last_mod2
|
assert last_mod == last_mod2
|
||||||
|
|
||||||
|
|
||||||
def test_disk_saver_write(tmpdir):
|
def test_disk_saver_write(tmpdir, httpbin):
|
||||||
'''
|
'''
|
||||||
Ensure that when you close a cassette after changing it it does
|
Ensure that when you close a cassette after changing it it does
|
||||||
rewrite the file
|
rewrite the file
|
||||||
'''
|
'''
|
||||||
fname = str(tmpdir.join('synopsis.yaml'))
|
fname = str(tmpdir.join('synopsis.yaml'))
|
||||||
with vcr.use_cassette(fname) as cass:
|
with vcr.use_cassette(fname) as cass:
|
||||||
urlopen('http://www.iana.org/domains/reserved').read()
|
urlopen(httpbin.url).read()
|
||||||
assert cass.play_count == 0
|
assert cass.play_count == 0
|
||||||
last_mod = os.path.getmtime(fname)
|
last_mod = os.path.getmtime(fname)
|
||||||
|
|
||||||
@@ -46,8 +46,8 @@ def test_disk_saver_write(tmpdir):
|
|||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
|
|
||||||
with vcr.use_cassette(fname, record_mode='any') as cass:
|
with vcr.use_cassette(fname, record_mode='any') as cass:
|
||||||
urlopen('http://www.iana.org/domains/reserved').read()
|
urlopen(httpbin.url).read()
|
||||||
urlopen('http://httpbin.org/').read()
|
urlopen(httpbin.url + '/get').read()
|
||||||
assert cass.play_count == 1
|
assert cass.play_count == 1
|
||||||
assert cass.dirty
|
assert cass.dirty
|
||||||
last_mod2 = os.path.getmtime(fname)
|
last_mod2 = os.path.getmtime(fname)
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ from six.moves.urllib.parse import urlencode
|
|||||||
from six.moves.urllib.error import HTTPError
|
from six.moves.urllib.error import HTTPError
|
||||||
import vcr
|
import vcr
|
||||||
import json
|
import json
|
||||||
|
from assertions import assert_cassette_has_one_response, assert_is_json
|
||||||
|
|
||||||
|
|
||||||
def _request_with_auth(url, username, password):
|
def _request_with_auth(url, username, password):
|
||||||
@@ -17,15 +18,11 @@ def _request_with_auth(url, username, password):
|
|||||||
|
|
||||||
|
|
||||||
def _find_header(cassette, header):
|
def _find_header(cassette, header):
|
||||||
for request in cassette.requests:
|
return any(header in request.headers for request in cassette.requests)
|
||||||
for k in request.headers:
|
|
||||||
if header.lower() == k.lower():
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def test_filter_basic_auth(tmpdir):
|
def test_filter_basic_auth(tmpdir, httpbin):
|
||||||
url = 'http://httpbin.org/basic-auth/user/passwd'
|
url = httpbin.url + '/basic-auth/user/passwd'
|
||||||
cass_file = str(tmpdir.join('basic_auth_filter.yaml'))
|
cass_file = str(tmpdir.join('basic_auth_filter.yaml'))
|
||||||
my_vcr = vcr.VCR(match_on=['uri', 'method', 'headers'])
|
my_vcr = vcr.VCR(match_on=['uri', 'method', 'headers'])
|
||||||
# 2 requests, one with auth failure and one with auth success
|
# 2 requests, one with auth failure and one with auth success
|
||||||
@@ -47,8 +44,8 @@ def test_filter_basic_auth(tmpdir):
|
|||||||
assert len(cass) == 2
|
assert len(cass) == 2
|
||||||
|
|
||||||
|
|
||||||
def test_filter_querystring(tmpdir):
|
def test_filter_querystring(tmpdir, httpbin):
|
||||||
url = 'http://httpbin.org/?foo=bar'
|
url = httpbin.url + '/?foo=bar'
|
||||||
cass_file = str(tmpdir.join('filter_qs.yaml'))
|
cass_file = str(tmpdir.join('filter_qs.yaml'))
|
||||||
with vcr.use_cassette(cass_file, filter_query_parameters=['foo']):
|
with vcr.use_cassette(cass_file, filter_query_parameters=['foo']):
|
||||||
urlopen(url)
|
urlopen(url)
|
||||||
@@ -57,8 +54,8 @@ def test_filter_querystring(tmpdir):
|
|||||||
assert 'foo' not in cass.requests[0].url
|
assert 'foo' not in cass.requests[0].url
|
||||||
|
|
||||||
|
|
||||||
def test_filter_post_data(tmpdir):
|
def test_filter_post_data(tmpdir, httpbin):
|
||||||
url = 'http://httpbin.org/post'
|
url = httpbin.url + '/post'
|
||||||
data = urlencode({'id': 'secret', 'foo': 'bar'}).encode('utf-8')
|
data = urlencode({'id': 'secret', 'foo': 'bar'}).encode('utf-8')
|
||||||
cass_file = str(tmpdir.join('filter_pd.yaml'))
|
cass_file = str(tmpdir.join('filter_pd.yaml'))
|
||||||
with vcr.use_cassette(cass_file, filter_post_data_parameters=['id']):
|
with vcr.use_cassette(cass_file, filter_post_data_parameters=['id']):
|
||||||
@@ -67,9 +64,9 @@ def test_filter_post_data(tmpdir):
|
|||||||
assert b'id=secret' not in cass.requests[0].body
|
assert b'id=secret' not in cass.requests[0].body
|
||||||
|
|
||||||
|
|
||||||
def test_filter_json_post_data(tmpdir):
|
def test_filter_json_post_data(tmpdir, httpbin):
|
||||||
data = json.dumps({'id': 'secret', 'foo': 'bar'}).encode('utf-8')
|
data = json.dumps({'id': 'secret', 'foo': 'bar'}).encode('utf-8')
|
||||||
request = Request('http://httpbin.org/post', data=data)
|
request = Request(httpbin.url + '/post', data=data)
|
||||||
request.add_header('Content-Type', 'application/json')
|
request.add_header('Content-Type', 'application/json')
|
||||||
|
|
||||||
cass_file = str(tmpdir.join('filter_jpd.yaml'))
|
cass_file = str(tmpdir.join('filter_jpd.yaml'))
|
||||||
@@ -79,12 +76,14 @@ def test_filter_json_post_data(tmpdir):
|
|||||||
assert b'"id": "secret"' not in cass.requests[0].body
|
assert b'"id": "secret"' not in cass.requests[0].body
|
||||||
|
|
||||||
|
|
||||||
def test_filter_callback(tmpdir):
|
def test_filter_callback(tmpdir, httpbin):
|
||||||
url = 'http://httpbin.org/get'
|
url = httpbin.url + '/get'
|
||||||
cass_file = str(tmpdir.join('basic_auth_filter.yaml'))
|
cass_file = str(tmpdir.join('basic_auth_filter.yaml'))
|
||||||
|
|
||||||
def before_record_cb(request):
|
def before_record_cb(request):
|
||||||
if request.path != '/get':
|
if request.path != '/get':
|
||||||
return request
|
return request
|
||||||
|
|
||||||
# Test the legacy keyword.
|
# Test the legacy keyword.
|
||||||
my_vcr = vcr.VCR(before_record=before_record_cb)
|
my_vcr = vcr.VCR(before_record=before_record_cb)
|
||||||
with my_vcr.use_cassette(cass_file, filter_headers=['authorization']) as cass:
|
with my_vcr.use_cassette(cass_file, filter_headers=['authorization']) as cass:
|
||||||
@@ -95,3 +94,39 @@ def test_filter_callback(tmpdir):
|
|||||||
with my_vcr.use_cassette(cass_file, filter_headers=['authorization']) as cass:
|
with my_vcr.use_cassette(cass_file, filter_headers=['authorization']) as cass:
|
||||||
urlopen(url)
|
urlopen(url)
|
||||||
assert len(cass) == 0
|
assert len(cass) == 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_decompress_gzip(tmpdir, httpbin):
|
||||||
|
url = httpbin.url + '/gzip'
|
||||||
|
request = Request(url, headers={'Accept-Encoding': ['gzip, deflate']})
|
||||||
|
cass_file = str(tmpdir.join('gzip_response.yaml'))
|
||||||
|
with vcr.use_cassette(cass_file, decode_compressed_response=True):
|
||||||
|
urlopen(request)
|
||||||
|
with vcr.use_cassette(cass_file) as cass:
|
||||||
|
decoded_response = urlopen(url).read()
|
||||||
|
assert_cassette_has_one_response(cass)
|
||||||
|
assert_is_json(decoded_response)
|
||||||
|
|
||||||
|
|
||||||
|
def test_decompress_deflate(tmpdir, httpbin):
|
||||||
|
url = httpbin.url + '/deflate'
|
||||||
|
request = Request(url, headers={'Accept-Encoding': ['gzip, deflate']})
|
||||||
|
cass_file = str(tmpdir.join('deflate_response.yaml'))
|
||||||
|
with vcr.use_cassette(cass_file, decode_compressed_response=True):
|
||||||
|
urlopen(request)
|
||||||
|
with vcr.use_cassette(cass_file) as cass:
|
||||||
|
decoded_response = urlopen(url).read()
|
||||||
|
assert_cassette_has_one_response(cass)
|
||||||
|
assert_is_json(decoded_response)
|
||||||
|
|
||||||
|
|
||||||
|
def test_decompress_regular(tmpdir, httpbin):
|
||||||
|
"""Test that it doesn't try to decompress content that isn't compressed"""
|
||||||
|
url = httpbin.url + '/get'
|
||||||
|
cass_file = str(tmpdir.join('noncompressed_response.yaml'))
|
||||||
|
with vcr.use_cassette(cass_file, decode_compressed_response=True):
|
||||||
|
urlopen(url)
|
||||||
|
with vcr.use_cassette(cass_file) as cass:
|
||||||
|
resp = urlopen(url).read()
|
||||||
|
assert_cassette_has_one_response(cass)
|
||||||
|
assert_is_json(resp)
|
||||||
|
|||||||
@@ -4,6 +4,7 @@
|
|||||||
# External imports
|
# External imports
|
||||||
from six.moves.urllib_parse import urlencode
|
from six.moves.urllib_parse import urlencode
|
||||||
import pytest
|
import pytest
|
||||||
|
import pytest_httpbin.certs
|
||||||
|
|
||||||
# Internal imports
|
# Internal imports
|
||||||
import vcr
|
import vcr
|
||||||
@@ -13,139 +14,141 @@ from assertions import assert_cassette_has_one_response
|
|||||||
httplib2 = pytest.importorskip("httplib2")
|
httplib2 = pytest.importorskip("httplib2")
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(params=["https", "http"])
|
def http():
|
||||||
def scheme(request):
|
|
||||||
"""
|
"""
|
||||||
Fixture that returns both http and https
|
Returns an httplib2 HTTP instance
|
||||||
|
with the certificate replaced by the httpbin one.
|
||||||
"""
|
"""
|
||||||
return request.param
|
return httplib2.Http(ca_certs=pytest_httpbin.certs.where())
|
||||||
|
|
||||||
|
|
||||||
def test_response_code(scheme, tmpdir):
|
def test_response_code(tmpdir, httpbin_both):
|
||||||
'''Ensure we can read a response code from a fetch'''
|
'''Ensure we can read a response code from a fetch'''
|
||||||
url = scheme + '://httpbin.org/'
|
url = httpbin_both.url
|
||||||
with vcr.use_cassette(str(tmpdir.join('atts.yaml'))) as cass:
|
with vcr.use_cassette(str(tmpdir.join('atts.yaml'))):
|
||||||
resp, _ = httplib2.Http().request(url)
|
resp, _ = http().request(url)
|
||||||
code = resp.status
|
code = resp.status
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join('atts.yaml'))) as cass:
|
with vcr.use_cassette(str(tmpdir.join('atts.yaml'))):
|
||||||
resp, _ = httplib2.Http().request(url)
|
resp, _ = http().request(url)
|
||||||
assert code == resp.status
|
assert code == resp.status
|
||||||
|
|
||||||
|
|
||||||
def test_random_body(scheme, tmpdir):
|
def test_random_body(httpbin_both, tmpdir):
|
||||||
'''Ensure we can read the content, and that it's served from cache'''
|
'''Ensure we can read the content, and that it's served from cache'''
|
||||||
url = scheme + '://httpbin.org/bytes/1024'
|
url = httpbin_both.url + '/bytes/1024'
|
||||||
with vcr.use_cassette(str(tmpdir.join('body.yaml'))) as cass:
|
with vcr.use_cassette(str(tmpdir.join('body.yaml'))):
|
||||||
_, content = httplib2.Http().request(url)
|
_, content = http().request(url)
|
||||||
body = content
|
body = content
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join('body.yaml'))) as cass:
|
with vcr.use_cassette(str(tmpdir.join('body.yaml'))):
|
||||||
_, content = httplib2.Http().request(url)
|
_, content = http().request(url)
|
||||||
assert body == content
|
assert body == content
|
||||||
|
|
||||||
|
|
||||||
def test_response_headers(scheme, tmpdir):
|
def test_response_headers(tmpdir, httpbin_both):
|
||||||
'''Ensure we can get information from the response'''
|
'''Ensure we can get information from the response'''
|
||||||
url = scheme + '://httpbin.org/'
|
url = httpbin_both.url
|
||||||
with vcr.use_cassette(str(tmpdir.join('headers.yaml'))) as cass:
|
with vcr.use_cassette(str(tmpdir.join('headers.yaml'))):
|
||||||
resp, _ = httplib2.Http().request(url)
|
resp, _ = http().request(url)
|
||||||
headers = resp.items()
|
headers = resp.items()
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join('headers.yaml'))) as cass:
|
with vcr.use_cassette(str(tmpdir.join('headers.yaml'))):
|
||||||
resp, _ = httplib2.Http().request(url)
|
resp, _ = http().request(url)
|
||||||
assert set(headers) == set(resp.items())
|
assert set(headers) == set(resp.items())
|
||||||
|
|
||||||
def test_effective_url(scheme, tmpdir):
|
|
||||||
'''Ensure that the effective_url is captured'''
|
|
||||||
url = scheme + '://httpbin.org/redirect-to?url=/html'
|
|
||||||
with vcr.use_cassette(str(tmpdir.join('headers.yaml'))) as cass:
|
|
||||||
resp, _ = httplib2.Http().request(url)
|
|
||||||
effective_url = resp['content-location']
|
|
||||||
assert effective_url == scheme + '://httpbin.org/html'
|
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join('headers.yaml'))) as cass:
|
def test_effective_url(tmpdir, httpbin_both):
|
||||||
resp, _ = httplib2.Http().request(url)
|
'''Ensure that the effective_url is captured'''
|
||||||
|
url = httpbin_both.url + '/redirect-to?url=/html'
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('headers.yaml'))):
|
||||||
|
resp, _ = http().request(url)
|
||||||
|
effective_url = resp['content-location']
|
||||||
|
assert effective_url == httpbin_both + '/html'
|
||||||
|
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('headers.yaml'))):
|
||||||
|
resp, _ = http().request(url)
|
||||||
assert effective_url == resp['content-location']
|
assert effective_url == resp['content-location']
|
||||||
|
|
||||||
def test_multiple_requests(scheme, tmpdir):
|
|
||||||
|
def test_multiple_requests(tmpdir, httpbin_both):
|
||||||
'''Ensure that we can cache multiple requests'''
|
'''Ensure that we can cache multiple requests'''
|
||||||
urls = [
|
urls = [
|
||||||
scheme + '://httpbin.org/',
|
httpbin_both.url,
|
||||||
scheme + '://httpbin.org/',
|
httpbin_both.url,
|
||||||
scheme + '://httpbin.org/get',
|
httpbin_both.url + '/get',
|
||||||
scheme + '://httpbin.org/bytes/1024'
|
httpbin_both.url + '/bytes/1024',
|
||||||
]
|
]
|
||||||
with vcr.use_cassette(str(tmpdir.join('multiple.yaml'))) as cass:
|
with vcr.use_cassette(str(tmpdir.join('multiple.yaml'))) as cass:
|
||||||
[httplib2.Http().request(url) for url in urls]
|
[http().request(url) for url in urls]
|
||||||
assert len(cass) == len(urls)
|
assert len(cass) == len(urls)
|
||||||
|
|
||||||
|
|
||||||
def test_get_data(scheme, tmpdir):
|
def test_get_data(tmpdir, httpbin_both):
|
||||||
'''Ensure that it works with query data'''
|
'''Ensure that it works with query data'''
|
||||||
data = urlencode({'some': 1, 'data': 'here'})
|
data = urlencode({'some': 1, 'data': 'here'})
|
||||||
url = scheme + '://httpbin.org/get?' + data
|
url = httpbin_both.url + '/get?' + data
|
||||||
with vcr.use_cassette(str(tmpdir.join('get_data.yaml'))) as cass:
|
with vcr.use_cassette(str(tmpdir.join('get_data.yaml'))):
|
||||||
_, res1 = httplib2.Http().request(url)
|
_, res1 = http().request(url)
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join('get_data.yaml'))) as cass:
|
with vcr.use_cassette(str(tmpdir.join('get_data.yaml'))):
|
||||||
_, res2 = httplib2.Http().request(url)
|
_, res2 = http().request(url)
|
||||||
|
|
||||||
assert res1 == res2
|
assert res1 == res2
|
||||||
|
|
||||||
|
|
||||||
def test_post_data(scheme, tmpdir):
|
def test_post_data(tmpdir, httpbin_both):
|
||||||
'''Ensure that it works when posting data'''
|
'''Ensure that it works when posting data'''
|
||||||
data = urlencode({'some': 1, 'data': 'here'})
|
data = urlencode({'some': 1, 'data': 'here'})
|
||||||
url = scheme + '://httpbin.org/post'
|
url = httpbin_both.url + '/post'
|
||||||
with vcr.use_cassette(str(tmpdir.join('post_data.yaml'))) as cass:
|
with vcr.use_cassette(str(tmpdir.join('post_data.yaml'))):
|
||||||
_, res1 = httplib2.Http().request(url, "POST", data)
|
_, res1 = http().request(url, "POST", data)
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join('post_data.yaml'))) as cass:
|
with vcr.use_cassette(str(tmpdir.join('post_data.yaml'))) as cass:
|
||||||
_, res2 = httplib2.Http().request(url, "POST", data)
|
_, res2 = http().request(url, "POST", data)
|
||||||
|
|
||||||
assert res1 == res2
|
assert res1 == res2
|
||||||
assert_cassette_has_one_response(cass)
|
assert_cassette_has_one_response(cass)
|
||||||
|
|
||||||
|
|
||||||
def test_post_unicode_data(scheme, tmpdir):
|
def test_post_unicode_data(tmpdir, httpbin_both):
|
||||||
'''Ensure that it works when posting unicode data'''
|
'''Ensure that it works when posting unicode data'''
|
||||||
data = urlencode({'snowman': u'☃'.encode('utf-8')})
|
data = urlencode({'snowman': u'☃'.encode('utf-8')})
|
||||||
url = scheme + '://httpbin.org/post'
|
url = httpbin_both.url + '/post'
|
||||||
with vcr.use_cassette(str(tmpdir.join('post_data.yaml'))) as cass:
|
with vcr.use_cassette(str(tmpdir.join('post_data.yaml'))):
|
||||||
_, res1 = httplib2.Http().request(url, "POST", data)
|
_, res1 = http().request(url, "POST", data)
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join('post_data.yaml'))) as cass:
|
with vcr.use_cassette(str(tmpdir.join('post_data.yaml'))) as cass:
|
||||||
_, res2 = httplib2.Http().request(url, "POST", data)
|
_, res2 = http().request(url, "POST", data)
|
||||||
|
|
||||||
assert res1 == res2
|
assert res1 == res2
|
||||||
assert_cassette_has_one_response(cass)
|
assert_cassette_has_one_response(cass)
|
||||||
|
|
||||||
|
|
||||||
def test_cross_scheme(tmpdir):
|
def test_cross_scheme(tmpdir, httpbin, httpbin_secure):
|
||||||
'''Ensure that requests between schemes are treated separately'''
|
'''Ensure that requests between schemes are treated separately'''
|
||||||
# First fetch a url under https, and then again under https and then
|
# First fetch a url under https, and then again under https and then
|
||||||
# ensure that we haven't served anything out of cache, and we have two
|
# ensure that we haven't served anything out of cache, and we have two
|
||||||
# requests / response pairs in the cassette
|
# requests / response pairs in the cassette
|
||||||
with vcr.use_cassette(str(tmpdir.join('cross_scheme.yaml'))) as cass:
|
with vcr.use_cassette(str(tmpdir.join('cross_scheme.yaml'))) as cass:
|
||||||
httplib2.Http().request('https://httpbin.org/')
|
http().request(httpbin_secure.url)
|
||||||
httplib2.Http().request('http://httpbin.org/')
|
http().request(httpbin.url)
|
||||||
assert len(cass) == 2
|
assert len(cass) == 2
|
||||||
assert cass.play_count == 0
|
assert cass.play_count == 0
|
||||||
|
|
||||||
|
|
||||||
def test_decorator(scheme, tmpdir):
|
def test_decorator(tmpdir, httpbin_both):
|
||||||
'''Test the decorator version of VCR.py'''
|
'''Test the decorator version of VCR.py'''
|
||||||
url = scheme + '://httpbin.org/'
|
url = httpbin_both.url
|
||||||
|
|
||||||
@vcr.use_cassette(str(tmpdir.join('atts.yaml')))
|
@vcr.use_cassette(str(tmpdir.join('atts.yaml')))
|
||||||
def inner1():
|
def inner1():
|
||||||
resp, _ = httplib2.Http().request(url)
|
resp, _ = http().request(url)
|
||||||
return resp['status']
|
return resp['status']
|
||||||
|
|
||||||
@vcr.use_cassette(str(tmpdir.join('atts.yaml')))
|
@vcr.use_cassette(str(tmpdir.join('atts.yaml')))
|
||||||
def inner2():
|
def inner2():
|
||||||
resp, _ = httplib2.Http().request(url)
|
resp, _ = http().request(url)
|
||||||
return resp['status']
|
return resp['status']
|
||||||
|
|
||||||
assert inner1() == inner2()
|
assert inner1() == inner2()
|
||||||
|
|||||||
@@ -1,55 +1,73 @@
|
|||||||
import base64
|
from six.moves.urllib.request import urlopen
|
||||||
import pytest
|
import socket
|
||||||
from six.moves.urllib.request import urlopen, Request
|
from contextlib import contextmanager
|
||||||
from six.moves.urllib.error import HTTPError
|
|
||||||
import vcr
|
import vcr
|
||||||
|
|
||||||
|
|
||||||
def test_ignore_localhost(tmpdir, httpserver):
|
@contextmanager
|
||||||
httpserver.serve_content('Hello!')
|
def overridden_dns(overrides):
|
||||||
cass_file = str(tmpdir.join('filter_qs.yaml'))
|
"""
|
||||||
with vcr.use_cassette(cass_file, ignore_localhost=True) as cass:
|
Monkeypatch socket.getaddrinfo() to override DNS lookups (name will resolve
|
||||||
urlopen(httpserver.url)
|
to address)
|
||||||
assert len(cass) == 0
|
"""
|
||||||
urlopen('http://httpbin.org')
|
real_getaddrinfo = socket.getaddrinfo
|
||||||
assert len(cass) == 1
|
|
||||||
|
def fake_getaddrinfo(*args, **kwargs):
|
||||||
|
if args[0] in overrides:
|
||||||
|
address = overrides[args[0]]
|
||||||
|
return [(2, 1, 6, '', (address, args[1]))]
|
||||||
|
return real_getaddrinfo(*args, **kwargs)
|
||||||
|
socket.getaddrinfo = fake_getaddrinfo
|
||||||
|
yield
|
||||||
|
socket.getaddrinfo = real_getaddrinfo
|
||||||
|
|
||||||
|
|
||||||
def test_ignore_httpbin(tmpdir, httpserver):
|
def test_ignore_localhost(tmpdir, httpbin):
|
||||||
httpserver.serve_content('Hello!')
|
with overridden_dns({'httpbin.org': '127.0.0.1'}):
|
||||||
cass_file = str(tmpdir.join('filter_qs.yaml'))
|
cass_file = str(tmpdir.join('filter_qs.yaml'))
|
||||||
with vcr.use_cassette(
|
with vcr.use_cassette(cass_file, ignore_localhost=True) as cass:
|
||||||
cass_file,
|
urlopen('http://localhost:{0}/'.format(httpbin.port))
|
||||||
ignore_hosts=['httpbin.org']
|
assert len(cass) == 0
|
||||||
) as cass:
|
urlopen('http://httpbin.org:{0}/'.format(httpbin.port))
|
||||||
urlopen('http://httpbin.org')
|
assert len(cass) == 1
|
||||||
assert len(cass) == 0
|
|
||||||
urlopen(httpserver.url)
|
|
||||||
assert len(cass) == 1
|
|
||||||
|
|
||||||
|
|
||||||
def test_ignore_localhost_and_httpbin(tmpdir, httpserver):
|
def test_ignore_httpbin(tmpdir, httpbin):
|
||||||
httpserver.serve_content('Hello!')
|
with overridden_dns({'httpbin.org': '127.0.0.1'}):
|
||||||
cass_file = str(tmpdir.join('filter_qs.yaml'))
|
cass_file = str(tmpdir.join('filter_qs.yaml'))
|
||||||
with vcr.use_cassette(
|
with vcr.use_cassette(
|
||||||
cass_file,
|
cass_file,
|
||||||
ignore_hosts=['httpbin.org'],
|
ignore_hosts=['httpbin.org']
|
||||||
ignore_localhost=True
|
) as cass:
|
||||||
) as cass:
|
urlopen('http://httpbin.org:{0}/'.format(httpbin.port))
|
||||||
urlopen('http://httpbin.org')
|
assert len(cass) == 0
|
||||||
urlopen(httpserver.url)
|
urlopen('http://localhost:{0}/'.format(httpbin.port))
|
||||||
assert len(cass) == 0
|
assert len(cass) == 1
|
||||||
|
|
||||||
def test_ignore_localhost_twice(tmpdir, httpserver):
|
|
||||||
httpserver.serve_content('Hello!')
|
def test_ignore_localhost_and_httpbin(tmpdir, httpbin):
|
||||||
cass_file = str(tmpdir.join('filter_qs.yaml'))
|
with overridden_dns({'httpbin.org': '127.0.0.1'}):
|
||||||
with vcr.use_cassette(cass_file, ignore_localhost=True) as cass:
|
cass_file = str(tmpdir.join('filter_qs.yaml'))
|
||||||
urlopen(httpserver.url)
|
with vcr.use_cassette(
|
||||||
assert len(cass) == 0
|
cass_file,
|
||||||
urlopen('http://httpbin.org')
|
ignore_hosts=['httpbin.org'],
|
||||||
assert len(cass) == 1
|
ignore_localhost=True
|
||||||
with vcr.use_cassette(cass_file, ignore_localhost=True) as cass:
|
) as cass:
|
||||||
assert len(cass) == 1
|
urlopen('http://httpbin.org:{0}'.format(httpbin.port))
|
||||||
urlopen(httpserver.url)
|
urlopen('http://localhost:{0}'.format(httpbin.port))
|
||||||
urlopen('http://httpbin.org')
|
assert len(cass) == 0
|
||||||
assert len(cass) == 1
|
|
||||||
|
|
||||||
|
def test_ignore_localhost_twice(tmpdir, httpbin):
|
||||||
|
with overridden_dns({'httpbin.org': '127.0.0.1'}):
|
||||||
|
cass_file = str(tmpdir.join('filter_qs.yaml'))
|
||||||
|
with vcr.use_cassette(cass_file, ignore_localhost=True) as cass:
|
||||||
|
urlopen('http://localhost:{0}'.format(httpbin.port))
|
||||||
|
assert len(cass) == 0
|
||||||
|
urlopen('http://httpbin.org:{0}'.format(httpbin.port))
|
||||||
|
assert len(cass) == 1
|
||||||
|
with vcr.use_cassette(cass_file, ignore_localhost=True) as cass:
|
||||||
|
assert len(cass) == 1
|
||||||
|
urlopen('http://localhost:{0}'.format(httpbin.port))
|
||||||
|
urlopen('http://httpbin.org:{0}'.format(httpbin.port))
|
||||||
|
assert len(cass) == 1
|
||||||
|
|||||||
@@ -6,15 +6,21 @@ from six.moves.urllib.request import urlopen
|
|||||||
DEFAULT_URI = 'http://httpbin.org/get?p1=q1&p2=q2' # base uri for testing
|
DEFAULT_URI = 'http://httpbin.org/get?p1=q1&p2=q2' # base uri for testing
|
||||||
|
|
||||||
|
|
||||||
|
def _replace_httpbin(uri, httpbin, httpbin_secure):
|
||||||
|
return uri.replace('http://httpbin.org', httpbin.url).replace('https://httpbin.org', httpbin_secure.url)
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def cassette(tmpdir):
|
def cassette(tmpdir, httpbin, httpbin_secure):
|
||||||
"""
|
"""
|
||||||
Helper fixture used to prepare the cassete
|
Helper fixture used to prepare the cassete
|
||||||
returns path to the recorded cassette
|
returns path to the recorded cassette
|
||||||
"""
|
"""
|
||||||
|
default_uri = _replace_httpbin(DEFAULT_URI, httpbin, httpbin_secure)
|
||||||
|
|
||||||
cassette_path = str(tmpdir.join('test.yml'))
|
cassette_path = str(tmpdir.join('test.yml'))
|
||||||
with vcr.use_cassette(cassette_path, record_mode='all'):
|
with vcr.use_cassette(cassette_path, record_mode='all'):
|
||||||
urlopen(DEFAULT_URI)
|
urlopen(default_uri)
|
||||||
return cassette_path
|
return cassette_path
|
||||||
|
|
||||||
|
|
||||||
@@ -28,20 +34,22 @@ def cassette(tmpdir):
|
|||||||
('host',
|
('host',
|
||||||
'https://httpbin.org/post?a=b',
|
'https://httpbin.org/post?a=b',
|
||||||
'http://google.com/get?p1=q1&p2=q2'),
|
'http://google.com/get?p1=q1&p2=q2'),
|
||||||
('port',
|
|
||||||
'https://google.com:80/post?a=b',
|
|
||||||
'http://httpbin.org:5000/get?p1=q1&p2=q2'),
|
|
||||||
('path',
|
('path',
|
||||||
'https://google.com/get?a=b',
|
'https://google.com/get?a=b',
|
||||||
'http://httpbin.org/post?p1=q1&p2=q2'),
|
'http://httpbin.org/post?p1=q1&p2=q2'),
|
||||||
('query',
|
('query',
|
||||||
'https://google.com/get?p2=q2&p1=q1',
|
'https://google.com/get?p2=q2&p1=q1',
|
||||||
'http://httpbin.org/get?p1=q1&a=b')
|
'http://httpbin.org/get?p1=q1&a=b')
|
||||||
])
|
])
|
||||||
def test_matchers(cassette, matcher, matching_uri, not_matching_uri):
|
def test_matchers(httpbin, httpbin_secure, cassette, matcher, matching_uri, not_matching_uri):
|
||||||
|
|
||||||
|
matching_uri = _replace_httpbin(matching_uri, httpbin, httpbin_secure)
|
||||||
|
not_matching_uri = _replace_httpbin(not_matching_uri, httpbin, httpbin_secure)
|
||||||
|
default_uri = _replace_httpbin(DEFAULT_URI, httpbin, httpbin_secure)
|
||||||
|
|
||||||
# play cassette with default uri
|
# play cassette with default uri
|
||||||
with vcr.use_cassette(cassette, match_on=[matcher]) as cass:
|
with vcr.use_cassette(cassette, match_on=[matcher]) as cass:
|
||||||
urlopen(DEFAULT_URI)
|
urlopen(default_uri)
|
||||||
assert cass.play_count == 1
|
assert cass.play_count == 1
|
||||||
|
|
||||||
# play cassette with matching on uri
|
# play cassette with matching on uri
|
||||||
@@ -55,7 +63,9 @@ def test_matchers(cassette, matcher, matching_uri, not_matching_uri):
|
|||||||
urlopen(not_matching_uri)
|
urlopen(not_matching_uri)
|
||||||
|
|
||||||
|
|
||||||
def test_method_matcher(cassette):
|
def test_method_matcher(cassette, httpbin, httpbin_secure):
|
||||||
|
default_uri = _replace_httpbin(DEFAULT_URI, httpbin, httpbin_secure)
|
||||||
|
|
||||||
# play cassette with matching on method
|
# play cassette with matching on method
|
||||||
with vcr.use_cassette(cassette, match_on=['method']) as cass:
|
with vcr.use_cassette(cassette, match_on=['method']) as cass:
|
||||||
urlopen('https://google.com/get?a=b')
|
urlopen('https://google.com/get?a=b')
|
||||||
@@ -65,7 +75,7 @@ def test_method_matcher(cassette):
|
|||||||
with pytest.raises(vcr.errors.CannotOverwriteExistingCassetteException):
|
with pytest.raises(vcr.errors.CannotOverwriteExistingCassetteException):
|
||||||
with vcr.use_cassette(cassette, match_on=['method']) as cass:
|
with vcr.use_cassette(cassette, match_on=['method']) as cass:
|
||||||
# is a POST request
|
# is a POST request
|
||||||
urlopen(DEFAULT_URI, data=b'')
|
urlopen(default_uri, data=b'')
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("uri", [
|
@pytest.mark.parametrize("uri", [
|
||||||
@@ -73,7 +83,10 @@ def test_method_matcher(cassette):
|
|||||||
'http://httpbin.org/get?p2=q2&p1=q1',
|
'http://httpbin.org/get?p2=q2&p1=q1',
|
||||||
'http://httpbin.org/get?p2=q2&p1=q1',
|
'http://httpbin.org/get?p2=q2&p1=q1',
|
||||||
])
|
])
|
||||||
def test_default_matcher_matches(cassette, uri):
|
def test_default_matcher_matches(cassette, uri, httpbin, httpbin_secure):
|
||||||
|
|
||||||
|
uri = _replace_httpbin(uri, httpbin, httpbin_secure)
|
||||||
|
|
||||||
with vcr.use_cassette(cassette) as cass:
|
with vcr.use_cassette(cassette) as cass:
|
||||||
urlopen(uri)
|
urlopen(uri)
|
||||||
assert cass.play_count == 1
|
assert cass.play_count == 1
|
||||||
@@ -82,18 +95,19 @@ def test_default_matcher_matches(cassette, uri):
|
|||||||
@pytest.mark.parametrize("uri", [
|
@pytest.mark.parametrize("uri", [
|
||||||
'https://httpbin.org/get?p1=q1&p2=q2',
|
'https://httpbin.org/get?p1=q1&p2=q2',
|
||||||
'http://google.com/get?p1=q1&p2=q2',
|
'http://google.com/get?p1=q1&p2=q2',
|
||||||
'http://httpbin.org:5000/get?p1=q1&p2=q2',
|
|
||||||
'http://httpbin.org/post?p1=q1&p2=q2',
|
'http://httpbin.org/post?p1=q1&p2=q2',
|
||||||
'http://httpbin.org/get?p1=q1&a=b'
|
'http://httpbin.org/get?p1=q1&a=b'
|
||||||
])
|
])
|
||||||
def test_default_matcher_does_not_match(cassette, uri):
|
def test_default_matcher_does_not_match(cassette, uri, httpbin, httpbin_secure):
|
||||||
|
uri = _replace_httpbin(uri, httpbin, httpbin_secure)
|
||||||
with pytest.raises(vcr.errors.CannotOverwriteExistingCassetteException):
|
with pytest.raises(vcr.errors.CannotOverwriteExistingCassetteException):
|
||||||
with vcr.use_cassette(cassette):
|
with vcr.use_cassette(cassette):
|
||||||
urlopen(uri)
|
urlopen(uri)
|
||||||
|
|
||||||
|
|
||||||
def test_default_matcher_does_not_match_on_method(cassette):
|
def test_default_matcher_does_not_match_on_method(cassette, httpbin, httpbin_secure):
|
||||||
|
default_uri = _replace_httpbin(DEFAULT_URI, httpbin, httpbin_secure)
|
||||||
with pytest.raises(vcr.errors.CannotOverwriteExistingCassetteException):
|
with pytest.raises(vcr.errors.CannotOverwriteExistingCassetteException):
|
||||||
with vcr.use_cassette(cassette):
|
with vcr.use_cassette(cassette):
|
||||||
# is a POST request
|
# is a POST request
|
||||||
urlopen(DEFAULT_URI, data=b'')
|
urlopen(default_uri, data=b'')
|
||||||
|
|||||||
@@ -3,18 +3,18 @@ import vcr
|
|||||||
from six.moves.urllib.request import urlopen
|
from six.moves.urllib.request import urlopen
|
||||||
|
|
||||||
|
|
||||||
def test_making_extra_request_raises_exception(tmpdir):
|
def test_making_extra_request_raises_exception(tmpdir, httpbin):
|
||||||
# make two requests in the first request that are considered
|
# make two requests in the first request that are considered
|
||||||
# identical (since the match is based on method)
|
# identical (since the match is based on method)
|
||||||
with vcr.use_cassette(str(tmpdir.join('test.json')), match_on=['method']):
|
with vcr.use_cassette(str(tmpdir.join('test.json')), match_on=['method']):
|
||||||
urlopen('http://httpbin.org/status/200')
|
urlopen(httpbin.url + '/status/200')
|
||||||
urlopen('http://httpbin.org/status/201')
|
urlopen(httpbin.url + '/status/201')
|
||||||
|
|
||||||
# Now, try to make three requests. The first two should return the
|
# Now, try to make three requests. The first two should return the
|
||||||
# correct status codes in order, and the third should raise an
|
# correct status codes in order, and the third should raise an
|
||||||
# exception.
|
# exception.
|
||||||
with vcr.use_cassette(str(tmpdir.join('test.json')), match_on=['method']):
|
with vcr.use_cassette(str(tmpdir.join('test.json')), match_on=['method']):
|
||||||
assert urlopen('http://httpbin.org/status/200').getcode() == 200
|
assert urlopen(httpbin.url + '/status/200').getcode() == 200
|
||||||
assert urlopen('http://httpbin.org/status/201').getcode() == 201
|
assert urlopen(httpbin.url + '/status/201').getcode() == 201
|
||||||
with pytest.raises(Exception):
|
with pytest.raises(Exception):
|
||||||
urlopen('http://httpbin.org/status/200')
|
urlopen(httpbin.url + '/status/200')
|
||||||
|
|||||||
@@ -1,65 +1,64 @@
|
|||||||
import os
|
|
||||||
import pytest
|
import pytest
|
||||||
import vcr
|
import vcr
|
||||||
from six.moves.urllib.request import urlopen
|
from six.moves.urllib.request import urlopen
|
||||||
|
|
||||||
|
|
||||||
def test_once_record_mode(tmpdir):
|
def test_once_record_mode(tmpdir, httpbin):
|
||||||
testfile = str(tmpdir.join('recordmode.yml'))
|
testfile = str(tmpdir.join('recordmode.yml'))
|
||||||
with vcr.use_cassette(testfile, record_mode="once"):
|
with vcr.use_cassette(testfile, record_mode="once"):
|
||||||
# cassette file doesn't exist, so create.
|
# cassette file doesn't exist, so create.
|
||||||
response = urlopen('http://httpbin.org/').read()
|
urlopen(httpbin.url).read()
|
||||||
|
|
||||||
with vcr.use_cassette(testfile, record_mode="once") as cass:
|
with vcr.use_cassette(testfile, record_mode="once"):
|
||||||
# make the same request again
|
# make the same request again
|
||||||
response = urlopen('http://httpbin.org/').read()
|
urlopen(httpbin.url).read()
|
||||||
|
|
||||||
# the first time, it's played from the cassette.
|
# the first time, it's played from the cassette.
|
||||||
# but, try to access something else from the same cassette, and an
|
# but, try to access something else from the same cassette, and an
|
||||||
# exception is raised.
|
# exception is raised.
|
||||||
with pytest.raises(Exception):
|
with pytest.raises(Exception):
|
||||||
response = urlopen('http://httpbin.org/get').read()
|
urlopen(httpbin.url + '/get').read()
|
||||||
|
|
||||||
|
|
||||||
def test_once_record_mode_two_times(tmpdir):
|
def test_once_record_mode_two_times(tmpdir, httpbin):
|
||||||
testfile = str(tmpdir.join('recordmode.yml'))
|
testfile = str(tmpdir.join('recordmode.yml'))
|
||||||
with vcr.use_cassette(testfile, record_mode="once"):
|
with vcr.use_cassette(testfile, record_mode="once"):
|
||||||
# get two of the same file
|
# get two of the same file
|
||||||
response1 = urlopen('http://httpbin.org/').read()
|
urlopen(httpbin.url).read()
|
||||||
response2 = urlopen('http://httpbin.org/').read()
|
urlopen(httpbin.url).read()
|
||||||
|
|
||||||
with vcr.use_cassette(testfile, record_mode="once") as cass:
|
with vcr.use_cassette(testfile, record_mode="once"):
|
||||||
# do it again
|
# do it again
|
||||||
response = urlopen('http://httpbin.org/').read()
|
urlopen(httpbin.url).read()
|
||||||
response = urlopen('http://httpbin.org/').read()
|
urlopen(httpbin.url).read()
|
||||||
|
|
||||||
|
|
||||||
def test_once_mode_three_times(tmpdir):
|
def test_once_mode_three_times(tmpdir, httpbin):
|
||||||
testfile = str(tmpdir.join('recordmode.yml'))
|
testfile = str(tmpdir.join('recordmode.yml'))
|
||||||
with vcr.use_cassette(testfile, record_mode="once"):
|
with vcr.use_cassette(testfile, record_mode="once"):
|
||||||
# get three of the same file
|
# get three of the same file
|
||||||
response1 = urlopen('http://httpbin.org/').read()
|
urlopen(httpbin.url).read()
|
||||||
response2 = urlopen('http://httpbin.org/').read()
|
urlopen(httpbin.url).read()
|
||||||
response2 = urlopen('http://httpbin.org/').read()
|
urlopen(httpbin.url).read()
|
||||||
|
|
||||||
|
|
||||||
def test_new_episodes_record_mode(tmpdir):
|
def test_new_episodes_record_mode(tmpdir, httpbin):
|
||||||
testfile = str(tmpdir.join('recordmode.yml'))
|
testfile = str(tmpdir.join('recordmode.yml'))
|
||||||
|
|
||||||
with vcr.use_cassette(testfile, record_mode="new_episodes"):
|
with vcr.use_cassette(testfile, record_mode="new_episodes"):
|
||||||
# cassette file doesn't exist, so create.
|
# cassette file doesn't exist, so create.
|
||||||
response = urlopen('http://httpbin.org/').read()
|
urlopen(httpbin.url).read()
|
||||||
|
|
||||||
with vcr.use_cassette(testfile, record_mode="new_episodes") as cass:
|
with vcr.use_cassette(testfile, record_mode="new_episodes") as cass:
|
||||||
# make the same request again
|
# make the same request again
|
||||||
response = urlopen('http://httpbin.org/').read()
|
urlopen(httpbin.url).read()
|
||||||
|
|
||||||
# all responses have been played
|
# all responses have been played
|
||||||
assert cass.all_played
|
assert cass.all_played
|
||||||
|
|
||||||
# in the "new_episodes" record mode, we can add more requests to
|
# in the "new_episodes" record mode, we can add more requests to
|
||||||
# a cassette without repurcussions.
|
# a cassette without repurcussions.
|
||||||
response = urlopen('http://httpbin.org/get').read()
|
urlopen(httpbin.url + '/get').read()
|
||||||
|
|
||||||
# one of the responses has been played
|
# one of the responses has been played
|
||||||
assert cass.play_count == 1
|
assert cass.play_count == 1
|
||||||
@@ -72,9 +71,9 @@ def test_new_episodes_record_mode(tmpdir):
|
|||||||
assert len(cass.responses) == 2
|
assert len(cass.responses) == 2
|
||||||
|
|
||||||
|
|
||||||
def test_new_episodes_record_mode_two_times(tmpdir):
|
def test_new_episodes_record_mode_two_times(tmpdir, httpbin):
|
||||||
testfile = str(tmpdir.join('recordmode.yml'))
|
testfile = str(tmpdir.join('recordmode.yml'))
|
||||||
url = 'http://httpbin.org/bytes/1024'
|
url = httpbin.url + '/bytes/1024'
|
||||||
with vcr.use_cassette(testfile, record_mode="new_episodes"):
|
with vcr.use_cassette(testfile, record_mode="new_episodes"):
|
||||||
# cassette file doesn't exist, so create.
|
# cassette file doesn't exist, so create.
|
||||||
original_first_response = urlopen(url).read()
|
original_first_response = urlopen(url).read()
|
||||||
@@ -97,20 +96,20 @@ def test_new_episodes_record_mode_two_times(tmpdir):
|
|||||||
urlopen(url).read()
|
urlopen(url).read()
|
||||||
|
|
||||||
|
|
||||||
def test_all_record_mode(tmpdir):
|
def test_all_record_mode(tmpdir, httpbin):
|
||||||
testfile = str(tmpdir.join('recordmode.yml'))
|
testfile = str(tmpdir.join('recordmode.yml'))
|
||||||
|
|
||||||
with vcr.use_cassette(testfile, record_mode="all"):
|
with vcr.use_cassette(testfile, record_mode="all"):
|
||||||
# cassette file doesn't exist, so create.
|
# cassette file doesn't exist, so create.
|
||||||
response = urlopen('http://httpbin.org/').read()
|
urlopen(httpbin.url).read()
|
||||||
|
|
||||||
with vcr.use_cassette(testfile, record_mode="all") as cass:
|
with vcr.use_cassette(testfile, record_mode="all") as cass:
|
||||||
# make the same request again
|
# make the same request again
|
||||||
response = urlopen('http://httpbin.org/').read()
|
urlopen(httpbin.url).read()
|
||||||
|
|
||||||
# in the "all" record mode, we can add more requests to
|
# in the "all" record mode, we can add more requests to
|
||||||
# a cassette without repurcussions.
|
# a cassette without repurcussions.
|
||||||
response = urlopen('http://httpbin.org/get').read()
|
urlopen(httpbin.url + '/get').read()
|
||||||
|
|
||||||
# The cassette was never actually played, even though it existed.
|
# The cassette was never actually played, even though it existed.
|
||||||
# that's because, in "all" mode, the requests all go directly to
|
# that's because, in "all" mode, the requests all go directly to
|
||||||
@@ -118,26 +117,26 @@ def test_all_record_mode(tmpdir):
|
|||||||
assert cass.play_count == 0
|
assert cass.play_count == 0
|
||||||
|
|
||||||
|
|
||||||
def test_none_record_mode(tmpdir):
|
def test_none_record_mode(tmpdir, httpbin):
|
||||||
# Cassette file doesn't exist, yet we are trying to make a request.
|
# Cassette file doesn't exist, yet we are trying to make a request.
|
||||||
# raise hell.
|
# raise hell.
|
||||||
testfile = str(tmpdir.join('recordmode.yml'))
|
testfile = str(tmpdir.join('recordmode.yml'))
|
||||||
with vcr.use_cassette(testfile, record_mode="none"):
|
with vcr.use_cassette(testfile, record_mode="none"):
|
||||||
with pytest.raises(Exception):
|
with pytest.raises(Exception):
|
||||||
response = urlopen('http://httpbin.org/').read()
|
urlopen(httpbin.url).read()
|
||||||
|
|
||||||
|
|
||||||
def test_none_record_mode_with_existing_cassette(tmpdir):
|
def test_none_record_mode_with_existing_cassette(tmpdir, httpbin):
|
||||||
# create a cassette file
|
# create a cassette file
|
||||||
testfile = str(tmpdir.join('recordmode.yml'))
|
testfile = str(tmpdir.join('recordmode.yml'))
|
||||||
|
|
||||||
with vcr.use_cassette(testfile, record_mode="all"):
|
with vcr.use_cassette(testfile, record_mode="all"):
|
||||||
response = urlopen('http://httpbin.org/').read()
|
urlopen(httpbin.url).read()
|
||||||
|
|
||||||
# play from cassette file
|
# play from cassette file
|
||||||
with vcr.use_cassette(testfile, record_mode="none") as cass:
|
with vcr.use_cassette(testfile, record_mode="none") as cass:
|
||||||
response = urlopen('http://httpbin.org/').read()
|
urlopen(httpbin.url).read()
|
||||||
assert cass.play_count == 1
|
assert cass.play_count == 1
|
||||||
# but if I try to hit the net, raise an exception.
|
# but if I try to hit the net, raise an exception.
|
||||||
with pytest.raises(Exception):
|
with pytest.raises(Exception):
|
||||||
response = urlopen('http://httpbin.org/get').read()
|
urlopen(httpbin.url + '/get').read()
|
||||||
|
|||||||
@@ -10,27 +10,27 @@ def false_matcher(r1, r2):
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def test_registered_true_matcher(tmpdir):
|
def test_registered_true_matcher(tmpdir, httpbin):
|
||||||
my_vcr = vcr.VCR()
|
my_vcr = vcr.VCR()
|
||||||
my_vcr.register_matcher('true', true_matcher)
|
my_vcr.register_matcher('true', true_matcher)
|
||||||
testfile = str(tmpdir.join('test.yml'))
|
testfile = str(tmpdir.join('test.yml'))
|
||||||
with my_vcr.use_cassette(testfile, match_on=['true']) as cass:
|
with my_vcr.use_cassette(testfile, match_on=['true']):
|
||||||
# These 2 different urls are stored as the same request
|
# These 2 different urls are stored as the same request
|
||||||
urlopen('http://httpbin.org/')
|
urlopen(httpbin.url)
|
||||||
urlopen('https://httpbin.org/get')
|
urlopen(httpbin.url + '/get')
|
||||||
|
|
||||||
with my_vcr.use_cassette(testfile, match_on=['true']) as cass:
|
with my_vcr.use_cassette(testfile, match_on=['true']):
|
||||||
# I can get the response twice even though I only asked for it once
|
# I can get the response twice even though I only asked for it once
|
||||||
urlopen('http://httpbin.org/get')
|
urlopen(httpbin.url + '/get')
|
||||||
urlopen('https://httpbin.org/get')
|
urlopen(httpbin.url + '/get')
|
||||||
|
|
||||||
|
|
||||||
def test_registered_false_matcher(tmpdir):
|
def test_registered_false_matcher(tmpdir, httpbin):
|
||||||
my_vcr = vcr.VCR()
|
my_vcr = vcr.VCR()
|
||||||
my_vcr.register_matcher('false', false_matcher)
|
my_vcr.register_matcher('false', false_matcher)
|
||||||
testfile = str(tmpdir.join('test.yml'))
|
testfile = str(tmpdir.join('test.yml'))
|
||||||
with my_vcr.use_cassette(testfile, match_on=['false']) as cass:
|
with my_vcr.use_cassette(testfile, match_on=['false']) as cass:
|
||||||
# These 2 different urls are stored as different requests
|
# These 2 different urls are stored as different requests
|
||||||
urlopen('http://httpbin.org/')
|
urlopen(httpbin.url)
|
||||||
urlopen('https://httpbin.org/get')
|
urlopen(httpbin.url + '/get')
|
||||||
assert len(cass) == 2
|
assert len(cass) == 2
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ class MockSerializer(object):
|
|||||||
def deserialize(self, cassette_string):
|
def deserialize(self, cassette_string):
|
||||||
self.serialize_count += 1
|
self.serialize_count += 1
|
||||||
self.cassette_string = cassette_string
|
self.cassette_string = cassette_string
|
||||||
return {'interactions':[]}
|
return {'interactions': []}
|
||||||
|
|
||||||
def serialize(self, cassette_dict):
|
def serialize(self, cassette_dict):
|
||||||
self.deserialize_count += 1
|
self.deserialize_count += 1
|
||||||
|
|||||||
@@ -2,21 +2,18 @@ import vcr
|
|||||||
from six.moves.urllib.request import urlopen
|
from six.moves.urllib.request import urlopen
|
||||||
|
|
||||||
|
|
||||||
def test_recorded_request_uri_with_redirected_request(tmpdir):
|
def test_recorded_request_uri_with_redirected_request(tmpdir, httpbin):
|
||||||
with vcr.use_cassette(str(tmpdir.join('test.yml'))) as cass:
|
with vcr.use_cassette(str(tmpdir.join('test.yml'))) as cass:
|
||||||
assert len(cass) == 0
|
assert len(cass) == 0
|
||||||
urlopen('http://httpbin.org/redirect/3')
|
urlopen(httpbin.url + '/redirect/3')
|
||||||
assert cass.requests[0].uri == 'http://httpbin.org/redirect/3'
|
assert cass.requests[0].uri == httpbin.url + '/redirect/3'
|
||||||
assert cass.requests[3].uri == 'http://httpbin.org/get'
|
assert cass.requests[3].uri == httpbin.url + '/get'
|
||||||
assert len(cass) == 4
|
assert len(cass) == 4
|
||||||
|
|
||||||
|
|
||||||
def test_records_multiple_header_values(tmpdir, httpserver):
|
def test_records_multiple_header_values(tmpdir, httpbin):
|
||||||
httpserver.serve_content('Hello!', headers=[('foo', 'bar'), ('foo', 'baz')])
|
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join('test.yml'))) as cass:
|
with vcr.use_cassette(str(tmpdir.join('test.yml'))) as cass:
|
||||||
assert len(cass) == 0
|
assert len(cass) == 0
|
||||||
|
urlopen(httpbin.url + '/response-headers?foo=bar&foo=baz')
|
||||||
urlopen(httpserver.url)
|
|
||||||
assert len(cass) == 1
|
assert len(cass) == 1
|
||||||
assert cass.responses[0]['headers']['foo'] == ['bar', 'baz']
|
assert cass.responses[0]['headers']['foo'] == ['bar', 'baz']
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
'''Test requests' interaction with vcr'''
|
'''Test requests' interaction with vcr'''
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
import vcr
|
import vcr
|
||||||
from assertions import assert_cassette_empty, assert_is_json
|
from assertions import assert_cassette_empty, assert_is_json
|
||||||
@@ -9,15 +8,9 @@ from assertions import assert_cassette_empty, assert_is_json
|
|||||||
requests = pytest.importorskip("requests")
|
requests = pytest.importorskip("requests")
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(params=["https", "http"])
|
def test_status_code(httpbin_both, tmpdir):
|
||||||
def scheme(request):
|
|
||||||
'''Fixture that returns both http and https.'''
|
|
||||||
return request.param
|
|
||||||
|
|
||||||
|
|
||||||
def test_status_code(scheme, tmpdir):
|
|
||||||
'''Ensure that we can read the status code'''
|
'''Ensure that we can read the status code'''
|
||||||
url = scheme + '://httpbin.org/'
|
url = httpbin_both.url + '/'
|
||||||
with vcr.use_cassette(str(tmpdir.join('atts.yaml'))):
|
with vcr.use_cassette(str(tmpdir.join('atts.yaml'))):
|
||||||
status_code = requests.get(url).status_code
|
status_code = requests.get(url).status_code
|
||||||
|
|
||||||
@@ -25,9 +18,9 @@ def test_status_code(scheme, tmpdir):
|
|||||||
assert status_code == requests.get(url).status_code
|
assert status_code == requests.get(url).status_code
|
||||||
|
|
||||||
|
|
||||||
def test_headers(scheme, tmpdir):
|
def test_headers(httpbin_both, tmpdir):
|
||||||
'''Ensure that we can read the headers back'''
|
'''Ensure that we can read the headers back'''
|
||||||
url = scheme + '://httpbin.org/'
|
url = httpbin_both + '/'
|
||||||
with vcr.use_cassette(str(tmpdir.join('headers.yaml'))):
|
with vcr.use_cassette(str(tmpdir.join('headers.yaml'))):
|
||||||
headers = requests.get(url).headers
|
headers = requests.get(url).headers
|
||||||
|
|
||||||
@@ -35,29 +28,31 @@ def test_headers(scheme, tmpdir):
|
|||||||
assert headers == requests.get(url).headers
|
assert headers == requests.get(url).headers
|
||||||
|
|
||||||
|
|
||||||
def test_body(tmpdir, scheme):
|
def test_body(tmpdir, httpbin_both):
|
||||||
'''Ensure the responses are all identical enough'''
|
'''Ensure the responses are all identical enough'''
|
||||||
url = scheme + '://httpbin.org/bytes/1024'
|
url = httpbin_both + '/bytes/1024'
|
||||||
with vcr.use_cassette(str(tmpdir.join('body.yaml'))):
|
with vcr.use_cassette(str(tmpdir.join('body.yaml'))):
|
||||||
content = requests.get(url).content
|
content = requests.get(url).content
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join('body.yaml'))):
|
with vcr.use_cassette(str(tmpdir.join('body.yaml'))):
|
||||||
assert content == requests.get(url).content
|
assert content == requests.get(url).content
|
||||||
|
|
||||||
def test_effective_url(scheme, tmpdir):
|
|
||||||
|
def test_effective_url(tmpdir, httpbin_both):
|
||||||
'''Ensure that the effective_url is captured'''
|
'''Ensure that the effective_url is captured'''
|
||||||
url = scheme + '://httpbin.org/redirect-to?url=/html'
|
url = httpbin_both.url + '/redirect-to?url=/html'
|
||||||
with vcr.use_cassette(str(tmpdir.join('url.yaml'))):
|
with vcr.use_cassette(str(tmpdir.join('url.yaml'))):
|
||||||
effective_url = requests.get(url).url
|
effective_url = requests.get(url).url
|
||||||
assert effective_url == scheme + '://httpbin.org/html'
|
assert effective_url == httpbin_both.url + '/html'
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join('url.yaml'))):
|
with vcr.use_cassette(str(tmpdir.join('url.yaml'))):
|
||||||
assert effective_url == requests.get(url).url
|
assert effective_url == requests.get(url).url
|
||||||
|
|
||||||
def test_auth(tmpdir, scheme):
|
|
||||||
|
def test_auth(tmpdir, httpbin_both):
|
||||||
'''Ensure that we can handle basic auth'''
|
'''Ensure that we can handle basic auth'''
|
||||||
auth = ('user', 'passwd')
|
auth = ('user', 'passwd')
|
||||||
url = scheme + '://httpbin.org/basic-auth/user/passwd'
|
url = httpbin_both + '/basic-auth/user/passwd'
|
||||||
with vcr.use_cassette(str(tmpdir.join('auth.yaml'))):
|
with vcr.use_cassette(str(tmpdir.join('auth.yaml'))):
|
||||||
one = requests.get(url, auth=auth)
|
one = requests.get(url, auth=auth)
|
||||||
|
|
||||||
@@ -67,10 +62,10 @@ def test_auth(tmpdir, scheme):
|
|||||||
assert one.status_code == two.status_code
|
assert one.status_code == two.status_code
|
||||||
|
|
||||||
|
|
||||||
def test_auth_failed(tmpdir, scheme):
|
def test_auth_failed(tmpdir, httpbin_both):
|
||||||
'''Ensure that we can save failed auth statuses'''
|
'''Ensure that we can save failed auth statuses'''
|
||||||
auth = ('user', 'wrongwrongwrong')
|
auth = ('user', 'wrongwrongwrong')
|
||||||
url = scheme + '://httpbin.org/basic-auth/user/passwd'
|
url = httpbin_both + '/basic-auth/user/passwd'
|
||||||
with vcr.use_cassette(str(tmpdir.join('auth-failed.yaml'))) as cass:
|
with vcr.use_cassette(str(tmpdir.join('auth-failed.yaml'))) as cass:
|
||||||
# Ensure that this is empty to begin with
|
# Ensure that this is empty to begin with
|
||||||
assert_cassette_empty(cass)
|
assert_cassette_empty(cass)
|
||||||
@@ -80,10 +75,10 @@ def test_auth_failed(tmpdir, scheme):
|
|||||||
assert one.status_code == two.status_code == 401
|
assert one.status_code == two.status_code == 401
|
||||||
|
|
||||||
|
|
||||||
def test_post(tmpdir, scheme):
|
def test_post(tmpdir, httpbin_both):
|
||||||
'''Ensure that we can post and cache the results'''
|
'''Ensure that we can post and cache the results'''
|
||||||
data = {'key1': 'value1', 'key2': 'value2'}
|
data = {'key1': 'value1', 'key2': 'value2'}
|
||||||
url = scheme + '://httpbin.org/post'
|
url = httpbin_both + '/post'
|
||||||
with vcr.use_cassette(str(tmpdir.join('requests.yaml'))):
|
with vcr.use_cassette(str(tmpdir.join('requests.yaml'))):
|
||||||
req1 = requests.post(url, data).content
|
req1 = requests.post(url, data).content
|
||||||
|
|
||||||
@@ -93,9 +88,24 @@ def test_post(tmpdir, scheme):
|
|||||||
assert req1 == req2
|
assert req1 == req2
|
||||||
|
|
||||||
|
|
||||||
def test_redirects(tmpdir, scheme):
|
def test_post_chunked_binary(tmpdir, httpbin_both):
|
||||||
|
'''Ensure that we can send chunked binary without breaking while trying to concatenate bytes with str.'''
|
||||||
|
data1 = iter([b'data', b'to', b'send'])
|
||||||
|
data2 = iter([b'data', b'to', b'send'])
|
||||||
|
url = httpbin_both.url + '/post'
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('requests.yaml'))):
|
||||||
|
req1 = requests.post(url, data1).content
|
||||||
|
print(req1)
|
||||||
|
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('requests.yaml'))):
|
||||||
|
req2 = requests.post(url, data2).content
|
||||||
|
|
||||||
|
assert req1 == req2
|
||||||
|
|
||||||
|
|
||||||
|
def test_redirects(tmpdir, httpbin_both):
|
||||||
'''Ensure that we can handle redirects'''
|
'''Ensure that we can handle redirects'''
|
||||||
url = scheme + '://httpbin.org/redirect-to?url=bytes/1024'
|
url = httpbin_both + '/redirect-to?url=bytes/1024'
|
||||||
with vcr.use_cassette(str(tmpdir.join('requests.yaml'))):
|
with vcr.use_cassette(str(tmpdir.join('requests.yaml'))):
|
||||||
content = requests.get(url).content
|
content = requests.get(url).content
|
||||||
|
|
||||||
@@ -107,24 +117,24 @@ def test_redirects(tmpdir, scheme):
|
|||||||
assert cass.play_count == 2
|
assert cass.play_count == 2
|
||||||
|
|
||||||
|
|
||||||
def test_cross_scheme(tmpdir, scheme):
|
def test_cross_scheme(tmpdir, httpbin_secure, httpbin):
|
||||||
'''Ensure that requests between schemes are treated separately'''
|
'''Ensure that requests between schemes are treated separately'''
|
||||||
# First fetch a url under http, and then again under https and then
|
# First fetch a url under http, and then again under https and then
|
||||||
# ensure that we haven't served anything out of cache, and we have two
|
# ensure that we haven't served anything out of cache, and we have two
|
||||||
# requests / response pairs in the cassette
|
# requests / response pairs in the cassette
|
||||||
with vcr.use_cassette(str(tmpdir.join('cross_scheme.yaml'))) as cass:
|
with vcr.use_cassette(str(tmpdir.join('cross_scheme.yaml'))) as cass:
|
||||||
requests.get('https://httpbin.org/')
|
requests.get(httpbin_secure + '/')
|
||||||
requests.get('http://httpbin.org/')
|
requests.get(httpbin + '/')
|
||||||
assert cass.play_count == 0
|
assert cass.play_count == 0
|
||||||
assert len(cass) == 2
|
assert len(cass) == 2
|
||||||
|
|
||||||
|
|
||||||
def test_gzip(tmpdir, scheme):
|
def test_gzip(tmpdir, httpbin_both):
|
||||||
'''
|
'''
|
||||||
Ensure that requests (actually urllib3) is able to automatically decompress
|
Ensure that requests (actually urllib3) is able to automatically decompress
|
||||||
the response body
|
the response body
|
||||||
'''
|
'''
|
||||||
url = scheme + '://httpbin.org/gzip'
|
url = httpbin_both + '/gzip'
|
||||||
response = requests.get(url)
|
response = requests.get(url)
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join('gzip.yaml'))):
|
with vcr.use_cassette(str(tmpdir.join('gzip.yaml'))):
|
||||||
@@ -135,7 +145,7 @@ def test_gzip(tmpdir, scheme):
|
|||||||
assert_is_json(response.content)
|
assert_is_json(response.content)
|
||||||
|
|
||||||
|
|
||||||
def test_session_and_connection_close(tmpdir, scheme):
|
def test_session_and_connection_close(tmpdir, httpbin):
|
||||||
'''
|
'''
|
||||||
This tests the issue in https://github.com/kevin1024/vcrpy/issues/48
|
This tests the issue in https://github.com/kevin1024/vcrpy/issues/48
|
||||||
|
|
||||||
@@ -146,29 +156,29 @@ def test_session_and_connection_close(tmpdir, scheme):
|
|||||||
with vcr.use_cassette(str(tmpdir.join('session_connection_closed.yaml'))):
|
with vcr.use_cassette(str(tmpdir.join('session_connection_closed.yaml'))):
|
||||||
session = requests.session()
|
session = requests.session()
|
||||||
|
|
||||||
session.get('http://httpbin.org/get', headers={'Connection': 'close'})
|
session.get(httpbin + '/get', headers={'Connection': 'close'})
|
||||||
session.get('http://httpbin.org/get', headers={'Connection': 'close'})
|
session.get(httpbin + '/get', headers={'Connection': 'close'})
|
||||||
|
|
||||||
|
|
||||||
def test_https_with_cert_validation_disabled(tmpdir):
|
def test_https_with_cert_validation_disabled(tmpdir, httpbin_secure):
|
||||||
with vcr.use_cassette(str(tmpdir.join('cert_validation_disabled.yaml'))):
|
with vcr.use_cassette(str(tmpdir.join('cert_validation_disabled.yaml'))):
|
||||||
requests.get('https://httpbin.org', verify=False)
|
requests.get(httpbin_secure.url, verify=False)
|
||||||
|
|
||||||
|
|
||||||
def test_session_can_make_requests_after_requests_unpatched(tmpdir):
|
def test_session_can_make_requests_after_requests_unpatched(tmpdir, httpbin):
|
||||||
with vcr.use_cassette(str(tmpdir.join('test_session_after_unpatched.yaml'))):
|
with vcr.use_cassette(str(tmpdir.join('test_session_after_unpatched.yaml'))):
|
||||||
session = requests.session()
|
session = requests.session()
|
||||||
session.get('http://httpbin.org/get')
|
session.get(httpbin + '/get')
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join('test_session_after_unpatched.yaml'))):
|
with vcr.use_cassette(str(tmpdir.join('test_session_after_unpatched.yaml'))):
|
||||||
session = requests.session()
|
session = requests.session()
|
||||||
session.get('http://httpbin.org/get')
|
session.get(httpbin + '/get')
|
||||||
|
|
||||||
session.get('http://httpbin.org/status/200')
|
session.get(httpbin + '/status/200')
|
||||||
|
|
||||||
|
|
||||||
def test_session_created_before_use_cassette_is_patched(tmpdir, scheme):
|
def test_session_created_before_use_cassette_is_patched(tmpdir, httpbin_both):
|
||||||
url = scheme + '://httpbin.org/bytes/1024'
|
url = httpbin_both + '/bytes/1024'
|
||||||
# Record arbitrary, random data to the cassette
|
# Record arbitrary, random data to the cassette
|
||||||
with vcr.use_cassette(str(tmpdir.join('session_created_outside.yaml'))):
|
with vcr.use_cassette(str(tmpdir.join('session_created_outside.yaml'))):
|
||||||
session = requests.session()
|
session = requests.session()
|
||||||
@@ -177,20 +187,20 @@ def test_session_created_before_use_cassette_is_patched(tmpdir, scheme):
|
|||||||
# Create a session outside of any cassette context manager
|
# Create a session outside of any cassette context manager
|
||||||
session = requests.session()
|
session = requests.session()
|
||||||
# Make a request to make sure that a connectionpool is instantiated
|
# Make a request to make sure that a connectionpool is instantiated
|
||||||
session.get(scheme + '://httpbin.org/get')
|
session.get(httpbin_both + '/get')
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join('session_created_outside.yaml'))):
|
with vcr.use_cassette(str(tmpdir.join('session_created_outside.yaml'))):
|
||||||
# These should only be the same if the patching succeeded.
|
# These should only be the same if the patching succeeded.
|
||||||
assert session.get(url).content == body
|
assert session.get(url).content == body
|
||||||
|
|
||||||
|
|
||||||
def test_nested_cassettes_with_session_created_before_nesting(scheme, tmpdir):
|
def test_nested_cassettes_with_session_created_before_nesting(httpbin_both, tmpdir):
|
||||||
'''
|
'''
|
||||||
This tests ensures that a session that was created while one cassette was
|
This tests ensures that a session that was created while one cassette was
|
||||||
active is patched to the use the responses of a second cassette when it
|
active is patched to the use the responses of a second cassette when it
|
||||||
is enabled.
|
is enabled.
|
||||||
'''
|
'''
|
||||||
url = scheme + '://httpbin.org/bytes/1024'
|
url = httpbin_both + '/bytes/1024'
|
||||||
with vcr.use_cassette(str(tmpdir.join('first_nested.yaml'))):
|
with vcr.use_cassette(str(tmpdir.join('first_nested.yaml'))):
|
||||||
session = requests.session()
|
session = requests.session()
|
||||||
first_body = session.get(url).content
|
first_body = session.get(url).content
|
||||||
@@ -206,12 +216,12 @@ def test_nested_cassettes_with_session_created_before_nesting(scheme, tmpdir):
|
|||||||
assert session.get(url).content == third_body
|
assert session.get(url).content == third_body
|
||||||
|
|
||||||
# Make sure that the session can now get content normally.
|
# Make sure that the session can now get content normally.
|
||||||
session.get('http://www.reddit.com')
|
assert 'User-agent' in session.get(httpbin_both.url + '/robots.txt').text
|
||||||
|
|
||||||
|
|
||||||
def test_post_file(tmpdir, scheme):
|
def test_post_file(tmpdir, httpbin_both):
|
||||||
'''Ensure that we handle posting a file.'''
|
'''Ensure that we handle posting a file.'''
|
||||||
url = scheme + '://httpbin.org/post'
|
url = httpbin_both + '/post'
|
||||||
with vcr.use_cassette(str(tmpdir.join('post_file.yaml'))) as cass:
|
with vcr.use_cassette(str(tmpdir.join('post_file.yaml'))) as cass:
|
||||||
# Don't use 2.7+ only style ',' separated with here because we support python 2.6
|
# Don't use 2.7+ only style ',' separated with here because we support python 2.6
|
||||||
with open('tox.ini') as f:
|
with open('tox.ini') as f:
|
||||||
@@ -228,7 +238,7 @@ def test_post_file(tmpdir, scheme):
|
|||||||
assert original_response == new_response
|
assert original_response == new_response
|
||||||
|
|
||||||
|
|
||||||
def test_filter_post_params(tmpdir, scheme):
|
def test_filter_post_params(tmpdir, httpbin_both):
|
||||||
'''
|
'''
|
||||||
This tests the issue in https://github.com/kevin1024/vcrpy/issues/158
|
This tests the issue in https://github.com/kevin1024/vcrpy/issues/158
|
||||||
|
|
||||||
@@ -236,10 +246,9 @@ def test_filter_post_params(tmpdir, scheme):
|
|||||||
with vcr.use_cassette(cass_file, filter_post_data_parameters=['id']) as cass:
|
with vcr.use_cassette(cass_file, filter_post_data_parameters=['id']) as cass:
|
||||||
assert b'id=secret' not in cass.requests[0].body
|
assert b'id=secret' not in cass.requests[0].body
|
||||||
'''
|
'''
|
||||||
url = scheme + '://httpbin.org/post'
|
url = httpbin_both.url + '/post'
|
||||||
cass_loc = str(tmpdir.join('filter_post_params.yaml'))
|
cass_loc = str(tmpdir.join('filter_post_params.yaml'))
|
||||||
with vcr.use_cassette(cass_loc, filter_post_data_parameters=['key']) as cass:
|
with vcr.use_cassette(cass_loc, filter_post_data_parameters=['key']) as cass:
|
||||||
requests.post(url, data={'key': 'value'})
|
requests.post(url, data={'key': 'value'})
|
||||||
with vcr.use_cassette(cass_loc, filter_post_data_parameters=['key']) as cass:
|
with vcr.use_cassette(cass_loc, filter_post_data_parameters=['key']) as cass:
|
||||||
assert b'key=value' not in cass.requests[0].body
|
assert b'key=value' not in cass.requests[0].body
|
||||||
|
|
||||||
|
|||||||
@@ -1,42 +1,85 @@
|
|||||||
import vcr
|
import vcr
|
||||||
|
import zlib
|
||||||
import six.moves.http_client as httplib
|
import six.moves.http_client as httplib
|
||||||
|
|
||||||
def _headers_are_case_insensitive():
|
from assertions import assert_is_json
|
||||||
conn = httplib.HTTPConnection('httpbin.org')
|
|
||||||
|
|
||||||
|
def _headers_are_case_insensitive(host, port):
|
||||||
|
conn = httplib.HTTPConnection(host, port)
|
||||||
conn.request('GET', "/cookies/set?k1=v1")
|
conn.request('GET', "/cookies/set?k1=v1")
|
||||||
r1 = conn.getresponse()
|
r1 = conn.getresponse()
|
||||||
cookie_data1 = r1.getheader('set-cookie')
|
cookie_data1 = r1.getheader('set-cookie')
|
||||||
conn = httplib.HTTPConnection('httpbin.org')
|
conn = httplib.HTTPConnection(host, port)
|
||||||
conn.request('GET', "/cookies/set?k1=v1")
|
conn.request('GET', "/cookies/set?k1=v1")
|
||||||
r2 = conn.getresponse()
|
r2 = conn.getresponse()
|
||||||
cookie_data2 = r2.getheader('Set-Cookie')
|
cookie_data2 = r2.getheader('Set-Cookie')
|
||||||
return cookie_data1 == cookie_data2
|
return cookie_data1 == cookie_data2
|
||||||
|
|
||||||
def test_case_insensitivity(tmpdir):
|
|
||||||
|
def test_case_insensitivity(tmpdir, httpbin):
|
||||||
testfile = str(tmpdir.join('case_insensitivity.yml'))
|
testfile = str(tmpdir.join('case_insensitivity.yml'))
|
||||||
# check if headers are case insensitive outside of vcrpy
|
# check if headers are case insensitive outside of vcrpy
|
||||||
outside = _headers_are_case_insensitive()
|
host, port = httpbin.host, httpbin.port
|
||||||
|
outside = _headers_are_case_insensitive(host, port)
|
||||||
with vcr.use_cassette(testfile):
|
with vcr.use_cassette(testfile):
|
||||||
# check if headers are case insensitive inside of vcrpy
|
# check if headers are case insensitive inside of vcrpy
|
||||||
inside = _headers_are_case_insensitive()
|
inside = _headers_are_case_insensitive(host, port)
|
||||||
# check if headers are case insensitive after vcrpy deserializes headers
|
# check if headers are case insensitive after vcrpy deserializes headers
|
||||||
inside2 = _headers_are_case_insensitive()
|
inside2 = _headers_are_case_insensitive(host, port)
|
||||||
|
|
||||||
# behavior should be the same both inside and outside
|
# behavior should be the same both inside and outside
|
||||||
assert outside == inside == inside2
|
assert outside == inside == inside2
|
||||||
|
|
||||||
def _multiple_header_value(httpserver):
|
|
||||||
conn = httplib.HTTPConnection('%s:%s' % httpserver.server_address)
|
def _multiple_header_value(httpbin):
|
||||||
conn.request('GET', "/")
|
conn = httplib.HTTPConnection(httpbin.host, httpbin.port)
|
||||||
|
conn.request('GET', "/response-headers?foo=bar&foo=baz")
|
||||||
r = conn.getresponse()
|
r = conn.getresponse()
|
||||||
return r.getheader('foo')
|
return r.getheader('foo')
|
||||||
|
|
||||||
def test_multiple_headers(tmpdir, httpserver):
|
|
||||||
|
def test_multiple_headers(tmpdir, httpbin):
|
||||||
testfile = str(tmpdir.join('multiple_headers.yaml'))
|
testfile = str(tmpdir.join('multiple_headers.yaml'))
|
||||||
httpserver.serve_content('Hello!', headers=[('foo', 'bar'), ('foo', 'baz')])
|
outside = _multiple_header_value(httpbin)
|
||||||
outside = _multiple_header_value(httpserver)
|
|
||||||
|
|
||||||
with vcr.use_cassette(testfile):
|
with vcr.use_cassette(testfile):
|
||||||
inside = _multiple_header_value(httpserver)
|
inside = _multiple_header_value(httpbin)
|
||||||
|
|
||||||
assert outside == inside
|
assert outside == inside
|
||||||
|
|
||||||
|
|
||||||
|
def test_original_decoded_response_is_not_modified(tmpdir, httpbin):
|
||||||
|
testfile = str(tmpdir.join('decoded_response.yml'))
|
||||||
|
host, port = httpbin.host, httpbin.port
|
||||||
|
|
||||||
|
conn = httplib.HTTPConnection(host, port)
|
||||||
|
conn.request('GET', '/gzip')
|
||||||
|
outside = conn.getresponse()
|
||||||
|
|
||||||
|
with vcr.use_cassette(testfile, decode_compressed_response=True):
|
||||||
|
conn = httplib.HTTPConnection(host, port)
|
||||||
|
conn.request('GET', '/gzip')
|
||||||
|
inside = conn.getresponse()
|
||||||
|
|
||||||
|
# Assert that we do not modify the original response while appending
|
||||||
|
# to the casssette.
|
||||||
|
assert 'gzip' == inside.headers['content-encoding']
|
||||||
|
|
||||||
|
# They should effectively be the same response.
|
||||||
|
inside_headers = (h for h in inside.headers.items() if h[0] != 'Date')
|
||||||
|
outside_headers = (h for h in outside.getheaders() if h[0] != 'Date')
|
||||||
|
assert set(inside_headers) == set(outside_headers)
|
||||||
|
inside = zlib.decompress(inside.read(), 16+zlib.MAX_WBITS)
|
||||||
|
outside = zlib.decompress(outside.read(), 16+zlib.MAX_WBITS)
|
||||||
|
assert inside == outside
|
||||||
|
|
||||||
|
# Even though the above are raw bytes, the JSON data should have been
|
||||||
|
# decoded and saved to the cassette.
|
||||||
|
with vcr.use_cassette(testfile):
|
||||||
|
conn = httplib.HTTPConnection(host, port)
|
||||||
|
conn.request('GET', '/gzip')
|
||||||
|
inside = conn.getresponse()
|
||||||
|
|
||||||
|
assert 'content-encoding' not in inside.headers
|
||||||
|
assert_is_json(inside.read())
|
||||||
|
|||||||
@@ -9,9 +9,13 @@ from vcr.errors import CannotOverwriteExistingCassetteException
|
|||||||
|
|
||||||
from assertions import assert_cassette_empty, assert_is_json
|
from assertions import assert_cassette_empty, assert_is_json
|
||||||
|
|
||||||
|
tornado = pytest.importorskip("tornado")
|
||||||
http = pytest.importorskip("tornado.httpclient")
|
http = pytest.importorskip("tornado.httpclient")
|
||||||
|
|
||||||
|
# whether the current version of Tornado supports the raise_error argument for
|
||||||
|
# fetch().
|
||||||
|
supports_raise_error = tornado.version_info >= (4,)
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(params=['simple', 'curl', 'default'])
|
@pytest.fixture(params=['simple', 'curl', 'default'])
|
||||||
def get_client(request):
|
def get_client(request):
|
||||||
@@ -26,10 +30,13 @@ def get_client(request):
|
|||||||
|
|
||||||
|
|
||||||
def get(client, url, **kwargs):
|
def get(client, url, **kwargs):
|
||||||
raise_error = kwargs.pop('raise_error', True)
|
fetch_kwargs = {}
|
||||||
|
if supports_raise_error:
|
||||||
|
fetch_kwargs['raise_error'] = kwargs.pop('raise_error', True)
|
||||||
|
|
||||||
return client.fetch(
|
return client.fetch(
|
||||||
http.HTTPRequest(url, method='GET', **kwargs),
|
http.HTTPRequest(url, method='GET', **kwargs),
|
||||||
raise_error=raise_error,
|
**fetch_kwargs
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -81,6 +88,7 @@ def test_body(get_client, tmpdir, scheme):
|
|||||||
assert content == (yield get(get_client(), url)).body
|
assert content == (yield get(get_client(), url)).body
|
||||||
assert 1 == cass.play_count
|
assert 1 == cass.play_count
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.gen_test
|
@pytest.mark.gen_test
|
||||||
def test_effective_url(get_client, scheme, tmpdir):
|
def test_effective_url(get_client, scheme, tmpdir):
|
||||||
'''Ensure that the effective_url is captured'''
|
'''Ensure that the effective_url is captured'''
|
||||||
@@ -93,6 +101,7 @@ def test_effective_url(get_client, scheme, tmpdir):
|
|||||||
assert effective_url == (yield get(get_client(), url)).effective_url
|
assert effective_url == (yield get(get_client(), url)).effective_url
|
||||||
assert 1 == cass.play_count
|
assert 1 == cass.play_count
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.gen_test
|
@pytest.mark.gen_test
|
||||||
def test_auth(get_client, tmpdir, scheme):
|
def test_auth(get_client, tmpdir, scheme):
|
||||||
'''Ensure that we can handle basic auth'''
|
'''Ensure that we can handle basic auth'''
|
||||||
@@ -120,22 +129,26 @@ def test_auth_failed(get_client, tmpdir, scheme):
|
|||||||
with vcr.use_cassette(str(tmpdir.join('auth-failed.yaml'))) as cass:
|
with vcr.use_cassette(str(tmpdir.join('auth-failed.yaml'))) as cass:
|
||||||
# Ensure that this is empty to begin with
|
# Ensure that this is empty to begin with
|
||||||
assert_cassette_empty(cass)
|
assert_cassette_empty(cass)
|
||||||
one = yield get(
|
with pytest.raises(http.HTTPError) as exc_info:
|
||||||
get_client(),
|
yield get(
|
||||||
url,
|
get_client(),
|
||||||
auth_username=auth[0],
|
url,
|
||||||
auth_password=auth[1],
|
auth_username=auth[0],
|
||||||
raise_error=False
|
auth_password=auth[1],
|
||||||
)
|
)
|
||||||
|
one = exc_info.value.response
|
||||||
|
assert exc_info.value.code == 401
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join('auth-failed.yaml'))) as cass:
|
with vcr.use_cassette(str(tmpdir.join('auth-failed.yaml'))) as cass:
|
||||||
two = yield get(
|
with pytest.raises(http.HTTPError) as exc_info:
|
||||||
get_client(),
|
two = yield get(
|
||||||
url,
|
get_client(),
|
||||||
auth_username=auth[0],
|
url,
|
||||||
auth_password=auth[1],
|
auth_username=auth[0],
|
||||||
raise_error=False
|
auth_password=auth[1],
|
||||||
)
|
)
|
||||||
|
two = exc_info.value.response
|
||||||
|
assert exc_info.value.code == 401
|
||||||
assert one.body == two.body
|
assert one.body == two.body
|
||||||
assert one.code == two.code == 401
|
assert one.code == two.code == 401
|
||||||
assert 1 == cass.play_count
|
assert 1 == cass.play_count
|
||||||
@@ -195,12 +208,19 @@ def test_gzip(get_client, tmpdir, scheme):
|
|||||||
'''
|
'''
|
||||||
url = scheme + '://httpbin.org/gzip'
|
url = scheme + '://httpbin.org/gzip'
|
||||||
|
|
||||||
|
# use_gzip was renamed to decompress_response in 4.0
|
||||||
|
kwargs = {}
|
||||||
|
if tornado.version_info < (4,):
|
||||||
|
kwargs['use_gzip'] = True
|
||||||
|
else:
|
||||||
|
kwargs['decompress_response'] = True
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join('gzip.yaml'))):
|
with vcr.use_cassette(str(tmpdir.join('gzip.yaml'))):
|
||||||
response = yield get(get_client(), url, decompress_response=True)
|
response = yield get(get_client(), url, **kwargs)
|
||||||
assert_is_json(response.body)
|
assert_is_json(response.body)
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join('gzip.yaml'))) as cass:
|
with vcr.use_cassette(str(tmpdir.join('gzip.yaml'))) as cass:
|
||||||
response = yield get(get_client(), url, decompress_response=True)
|
response = yield get(get_client(), url, **kwargs)
|
||||||
assert_is_json(response.body)
|
assert_is_json(response.body)
|
||||||
assert 1 == cass.play_count
|
assert 1 == cass.play_count
|
||||||
|
|
||||||
@@ -236,6 +256,10 @@ def test_unsupported_features_raises_in_future(get_client, tmpdir):
|
|||||||
assert "not yet supported by VCR" in str(excinfo)
|
assert "not yet supported by VCR" in str(excinfo)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skipif(
|
||||||
|
not supports_raise_error,
|
||||||
|
reason='raise_error unavailable in tornado <= 3',
|
||||||
|
)
|
||||||
@pytest.mark.gen_test
|
@pytest.mark.gen_test
|
||||||
def test_unsupported_features_raise_error_disabled(get_client, tmpdir):
|
def test_unsupported_features_raise_error_disabled(get_client, tmpdir):
|
||||||
'''Ensure that the exception for an AsyncHTTPClient feature not being
|
'''Ensure that the exception for an AsyncHTTPClient feature not being
|
||||||
@@ -270,6 +294,10 @@ def test_cannot_overwrite_cassette_raises_in_future(get_client, tmpdir):
|
|||||||
yield future
|
yield future
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skipif(
|
||||||
|
not supports_raise_error,
|
||||||
|
reason='raise_error unavailable in tornado <= 3',
|
||||||
|
)
|
||||||
@pytest.mark.gen_test
|
@pytest.mark.gen_test
|
||||||
def test_cannot_overwrite_cassette_raise_error_disabled(get_client, tmpdir):
|
def test_cannot_overwrite_cassette_raise_error_disabled(get_client, tmpdir):
|
||||||
'''Ensure that CannotOverwriteExistingCassetteException is not raised if
|
'''Ensure that CannotOverwriteExistingCassetteException is not raised if
|
||||||
|
|||||||
@@ -1,9 +1,9 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
'''Integration tests with urllib2'''
|
'''Integration tests with urllib2'''
|
||||||
|
|
||||||
import pytest
|
|
||||||
from six.moves.urllib.request import urlopen
|
from six.moves.urllib.request import urlopen
|
||||||
from six.moves.urllib_parse import urlencode
|
from six.moves.urllib_parse import urlencode
|
||||||
|
import pytest_httpbin.certs
|
||||||
|
|
||||||
# Internal imports
|
# Internal imports
|
||||||
import vcr
|
import vcr
|
||||||
@@ -11,127 +11,136 @@ import vcr
|
|||||||
from assertions import assert_cassette_has_one_response
|
from assertions import assert_cassette_has_one_response
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(params=["https", "http"])
|
def urlopen_with_cafile(*args, **kwargs):
|
||||||
def scheme(request):
|
kwargs['cafile'] = pytest_httpbin.certs.where()
|
||||||
"""
|
try:
|
||||||
Fixture that returns both http and https
|
return urlopen(*args, **kwargs)
|
||||||
"""
|
except TypeError:
|
||||||
return request.param
|
# python2/pypi don't let us override this
|
||||||
|
del kwargs['cafile']
|
||||||
|
return urlopen(*args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
def test_response_code(scheme, tmpdir):
|
def test_response_code(httpbin_both, tmpdir):
|
||||||
'''Ensure we can read a response code from a fetch'''
|
'''Ensure we can read a response code from a fetch'''
|
||||||
url = scheme + '://httpbin.org/'
|
url = httpbin_both.url
|
||||||
with vcr.use_cassette(str(tmpdir.join('atts.yaml'))) as cass:
|
with vcr.use_cassette(str(tmpdir.join('atts.yaml'))):
|
||||||
code = urlopen(url).getcode()
|
code = urlopen_with_cafile(url).getcode()
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join('atts.yaml'))) as cass:
|
with vcr.use_cassette(str(tmpdir.join('atts.yaml'))):
|
||||||
assert code == urlopen(url).getcode()
|
assert code == urlopen_with_cafile(url).getcode()
|
||||||
|
|
||||||
|
|
||||||
def test_random_body(scheme, tmpdir):
|
def test_random_body(httpbin_both, tmpdir):
|
||||||
'''Ensure we can read the content, and that it's served from cache'''
|
'''Ensure we can read the content, and that it's served from cache'''
|
||||||
url = scheme + '://httpbin.org/bytes/1024'
|
url = httpbin_both.url + '/bytes/1024'
|
||||||
with vcr.use_cassette(str(tmpdir.join('body.yaml'))) as cass:
|
with vcr.use_cassette(str(tmpdir.join('body.yaml'))):
|
||||||
body = urlopen(url).read()
|
body = urlopen_with_cafile(url).read()
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join('body.yaml'))) as cass:
|
with vcr.use_cassette(str(tmpdir.join('body.yaml'))):
|
||||||
assert body == urlopen(url).read()
|
assert body == urlopen_with_cafile(url).read()
|
||||||
|
|
||||||
|
|
||||||
def test_response_headers(scheme, tmpdir):
|
def test_response_headers(httpbin_both, tmpdir):
|
||||||
'''Ensure we can get information from the response'''
|
'''Ensure we can get information from the response'''
|
||||||
url = scheme + '://httpbin.org/'
|
url = httpbin_both.url
|
||||||
with vcr.use_cassette(str(tmpdir.join('headers.yaml'))) as cass:
|
with vcr.use_cassette(str(tmpdir.join('headers.yaml'))):
|
||||||
open1 = urlopen(url).info().items()
|
open1 = urlopen_with_cafile(url).info().items()
|
||||||
|
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('headers.yaml'))):
|
||||||
|
open2 = urlopen_with_cafile(url).info().items()
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join('headers.yaml'))) as cass:
|
|
||||||
open2 = urlopen(url).info().items()
|
|
||||||
assert sorted(open1) == sorted(open2)
|
assert sorted(open1) == sorted(open2)
|
||||||
|
|
||||||
def test_effective_url(scheme, tmpdir):
|
|
||||||
|
def test_effective_url(httpbin_both, tmpdir):
|
||||||
'''Ensure that the effective_url is captured'''
|
'''Ensure that the effective_url is captured'''
|
||||||
url = scheme + '://httpbin.org/redirect-to?url=/html'
|
url = httpbin_both.url + '/redirect-to?url=/html'
|
||||||
with vcr.use_cassette(str(tmpdir.join('headers.yaml'))) as cass:
|
with vcr.use_cassette(str(tmpdir.join('headers.yaml'))):
|
||||||
effective_url = urlopen(url).geturl()
|
effective_url = urlopen_with_cafile(url).geturl()
|
||||||
assert effective_url == scheme + '://httpbin.org/html'
|
assert effective_url == httpbin_both.url + '/html'
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join('headers.yaml'))) as cass:
|
with vcr.use_cassette(str(tmpdir.join('headers.yaml'))):
|
||||||
assert effective_url == urlopen(url).geturl()
|
assert effective_url == urlopen_with_cafile(url).geturl()
|
||||||
|
|
||||||
def test_multiple_requests(scheme, tmpdir):
|
|
||||||
|
def test_multiple_requests(httpbin_both, tmpdir):
|
||||||
'''Ensure that we can cache multiple requests'''
|
'''Ensure that we can cache multiple requests'''
|
||||||
urls = [
|
urls = [
|
||||||
scheme + '://httpbin.org/',
|
httpbin_both.url,
|
||||||
scheme + '://httpbin.org/',
|
httpbin_both.url,
|
||||||
scheme + '://httpbin.org/get',
|
httpbin_both.url + '/get',
|
||||||
scheme + '://httpbin.org/bytes/1024'
|
httpbin_both.url + '/bytes/1024',
|
||||||
]
|
]
|
||||||
with vcr.use_cassette(str(tmpdir.join('multiple.yaml'))) as cass:
|
with vcr.use_cassette(str(tmpdir.join('multiple.yaml'))) as cass:
|
||||||
[urlopen(url) for url in urls]
|
[urlopen_with_cafile(url) for url in urls]
|
||||||
assert len(cass) == len(urls)
|
assert len(cass) == len(urls)
|
||||||
|
|
||||||
|
|
||||||
def test_get_data(scheme, tmpdir):
|
def test_get_data(httpbin_both, tmpdir):
|
||||||
'''Ensure that it works with query data'''
|
'''Ensure that it works with query data'''
|
||||||
data = urlencode({'some': 1, 'data': 'here'})
|
data = urlencode({'some': 1, 'data': 'here'})
|
||||||
url = scheme + '://httpbin.org/get?' + data
|
url = httpbin_both.url + '/get?' + data
|
||||||
with vcr.use_cassette(str(tmpdir.join('get_data.yaml'))) as cass:
|
with vcr.use_cassette(str(tmpdir.join('get_data.yaml'))):
|
||||||
res1 = urlopen(url).read()
|
res1 = urlopen_with_cafile(url).read()
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join('get_data.yaml'))) as cass:
|
|
||||||
res2 = urlopen(url).read()
|
|
||||||
|
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('get_data.yaml'))):
|
||||||
|
res2 = urlopen_with_cafile(url).read()
|
||||||
assert res1 == res2
|
assert res1 == res2
|
||||||
|
|
||||||
|
|
||||||
def test_post_data(scheme, tmpdir):
|
def test_post_data(httpbin_both, tmpdir):
|
||||||
'''Ensure that it works when posting data'''
|
'''Ensure that it works when posting data'''
|
||||||
data = urlencode({'some': 1, 'data': 'here'}).encode('utf-8')
|
data = urlencode({'some': 1, 'data': 'here'}).encode('utf-8')
|
||||||
url = scheme + '://httpbin.org/post'
|
url = httpbin_both.url + '/post'
|
||||||
with vcr.use_cassette(str(tmpdir.join('post_data.yaml'))) as cass:
|
with vcr.use_cassette(str(tmpdir.join('post_data.yaml'))):
|
||||||
res1 = urlopen(url, data).read()
|
res1 = urlopen_with_cafile(url, data).read()
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join('post_data.yaml'))) as cass:
|
with vcr.use_cassette(str(tmpdir.join('post_data.yaml'))) as cass:
|
||||||
res2 = urlopen(url, data).read()
|
res2 = urlopen_with_cafile(url, data).read()
|
||||||
|
assert len(cass) == 1
|
||||||
|
|
||||||
assert res1 == res2
|
assert res1 == res2
|
||||||
assert_cassette_has_one_response(cass)
|
assert_cassette_has_one_response(cass)
|
||||||
|
|
||||||
|
|
||||||
def test_post_unicode_data(scheme, tmpdir):
|
def test_post_unicode_data(httpbin_both, tmpdir):
|
||||||
'''Ensure that it works when posting unicode data'''
|
'''Ensure that it works when posting unicode data'''
|
||||||
data = urlencode({'snowman': u'☃'.encode('utf-8')}).encode('utf-8')
|
data = urlencode({'snowman': u'☃'.encode('utf-8')}).encode('utf-8')
|
||||||
url = scheme + '://httpbin.org/post'
|
url = httpbin_both.url + '/post'
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('post_data.yaml'))):
|
||||||
|
res1 = urlopen_with_cafile(url, data).read()
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join('post_data.yaml'))) as cass:
|
with vcr.use_cassette(str(tmpdir.join('post_data.yaml'))) as cass:
|
||||||
res1 = urlopen(url, data).read()
|
res2 = urlopen_with_cafile(url, data).read()
|
||||||
with vcr.use_cassette(str(tmpdir.join('post_data.yaml'))) as cass:
|
assert len(cass) == 1
|
||||||
res2 = urlopen(url, data).read()
|
|
||||||
assert res1 == res2
|
assert res1 == res2
|
||||||
assert_cassette_has_one_response(cass)
|
assert_cassette_has_one_response(cass)
|
||||||
|
|
||||||
|
|
||||||
def test_cross_scheme(tmpdir):
|
def test_cross_scheme(tmpdir, httpbin_secure, httpbin):
|
||||||
'''Ensure that requests between schemes are treated separately'''
|
'''Ensure that requests between schemes are treated separately'''
|
||||||
# First fetch a url under https, and then again under https and then
|
# First fetch a url under https, and then again under https and then
|
||||||
# ensure that we haven't served anything out of cache, and we have two
|
# ensure that we haven't served anything out of cache, and we have two
|
||||||
# requests / response pairs in the cassette
|
# requests / response pairs in the cassette
|
||||||
with vcr.use_cassette(str(tmpdir.join('cross_scheme.yaml'))) as cass:
|
with vcr.use_cassette(str(tmpdir.join('cross_scheme.yaml'))) as cass:
|
||||||
urlopen('https://httpbin.org/')
|
urlopen_with_cafile(httpbin_secure.url)
|
||||||
urlopen('http://httpbin.org/')
|
urlopen_with_cafile(httpbin.url)
|
||||||
assert len(cass) == 2
|
assert len(cass) == 2
|
||||||
assert cass.play_count == 0
|
assert cass.play_count == 0
|
||||||
|
|
||||||
def test_decorator(scheme, tmpdir):
|
|
||||||
|
def test_decorator(httpbin_both, tmpdir):
|
||||||
'''Test the decorator version of VCR.py'''
|
'''Test the decorator version of VCR.py'''
|
||||||
url = scheme + '://httpbin.org/'
|
url = httpbin_both.url
|
||||||
|
|
||||||
@vcr.use_cassette(str(tmpdir.join('atts.yaml')))
|
@vcr.use_cassette(str(tmpdir.join('atts.yaml')))
|
||||||
def inner1():
|
def inner1():
|
||||||
return urlopen(url).getcode()
|
return urlopen_with_cafile(url).getcode()
|
||||||
|
|
||||||
@vcr.use_cassette(str(tmpdir.join('atts.yaml')))
|
@vcr.use_cassette(str(tmpdir.join('atts.yaml')))
|
||||||
def inner2():
|
def inner2():
|
||||||
return urlopen(url).getcode()
|
return urlopen_with_cafile(url).getcode()
|
||||||
|
|
||||||
assert inner1() == inner2()
|
assert inner1() == inner2()
|
||||||
|
|||||||
@@ -3,25 +3,17 @@
|
|||||||
# coding=utf-8
|
# coding=utf-8
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
import pytest_httpbin
|
||||||
import vcr
|
import vcr
|
||||||
from assertions import assert_cassette_empty, assert_is_json
|
from assertions import assert_cassette_empty, assert_is_json
|
||||||
certifi = pytest.importorskip("certifi")
|
|
||||||
urllib3 = pytest.importorskip("urllib3")
|
urllib3 = pytest.importorskip("urllib3")
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(params=["https", "http"])
|
|
||||||
def scheme(request):
|
|
||||||
"""
|
|
||||||
Fixture that returns both http and https
|
|
||||||
"""
|
|
||||||
return request.param
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='module')
|
@pytest.fixture(scope='module')
|
||||||
def verify_pool_mgr():
|
def verify_pool_mgr():
|
||||||
return urllib3.PoolManager(
|
return urllib3.PoolManager(
|
||||||
cert_reqs='CERT_REQUIRED', # Force certificate check.
|
cert_reqs='CERT_REQUIRED', # Force certificate check.
|
||||||
ca_certs=certifi.where()
|
ca_certs=pytest_httpbin.certs.where()
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -30,9 +22,9 @@ def pool_mgr():
|
|||||||
return urllib3.PoolManager()
|
return urllib3.PoolManager()
|
||||||
|
|
||||||
|
|
||||||
def test_status_code(scheme, tmpdir, verify_pool_mgr):
|
def test_status_code(httpbin_both, tmpdir, verify_pool_mgr):
|
||||||
'''Ensure that we can read the status code'''
|
'''Ensure that we can read the status code'''
|
||||||
url = scheme + '://httpbin.org/'
|
url = httpbin_both.url
|
||||||
with vcr.use_cassette(str(tmpdir.join('atts.yaml'))):
|
with vcr.use_cassette(str(tmpdir.join('atts.yaml'))):
|
||||||
status_code = verify_pool_mgr.request('GET', url).status
|
status_code = verify_pool_mgr.request('GET', url).status
|
||||||
|
|
||||||
@@ -40,9 +32,9 @@ def test_status_code(scheme, tmpdir, verify_pool_mgr):
|
|||||||
assert status_code == verify_pool_mgr.request('GET', url).status
|
assert status_code == verify_pool_mgr.request('GET', url).status
|
||||||
|
|
||||||
|
|
||||||
def test_headers(scheme, tmpdir, verify_pool_mgr):
|
def test_headers(tmpdir, httpbin_both, verify_pool_mgr):
|
||||||
'''Ensure that we can read the headers back'''
|
'''Ensure that we can read the headers back'''
|
||||||
url = scheme + '://httpbin.org/'
|
url = httpbin_both.url
|
||||||
with vcr.use_cassette(str(tmpdir.join('headers.yaml'))):
|
with vcr.use_cassette(str(tmpdir.join('headers.yaml'))):
|
||||||
headers = verify_pool_mgr.request('GET', url).headers
|
headers = verify_pool_mgr.request('GET', url).headers
|
||||||
|
|
||||||
@@ -50,9 +42,9 @@ def test_headers(scheme, tmpdir, verify_pool_mgr):
|
|||||||
assert headers == verify_pool_mgr.request('GET', url).headers
|
assert headers == verify_pool_mgr.request('GET', url).headers
|
||||||
|
|
||||||
|
|
||||||
def test_body(tmpdir, scheme, verify_pool_mgr):
|
def test_body(tmpdir, httpbin_both, verify_pool_mgr):
|
||||||
'''Ensure the responses are all identical enough'''
|
'''Ensure the responses are all identical enough'''
|
||||||
url = scheme + '://httpbin.org/bytes/1024'
|
url = httpbin_both.url + '/bytes/1024'
|
||||||
with vcr.use_cassette(str(tmpdir.join('body.yaml'))):
|
with vcr.use_cassette(str(tmpdir.join('body.yaml'))):
|
||||||
content = verify_pool_mgr.request('GET', url).data
|
content = verify_pool_mgr.request('GET', url).data
|
||||||
|
|
||||||
@@ -60,11 +52,11 @@ def test_body(tmpdir, scheme, verify_pool_mgr):
|
|||||||
assert content == verify_pool_mgr.request('GET', url).data
|
assert content == verify_pool_mgr.request('GET', url).data
|
||||||
|
|
||||||
|
|
||||||
def test_auth(tmpdir, scheme, verify_pool_mgr):
|
def test_auth(tmpdir, httpbin_both, verify_pool_mgr):
|
||||||
'''Ensure that we can handle basic auth'''
|
'''Ensure that we can handle basic auth'''
|
||||||
auth = ('user', 'passwd')
|
auth = ('user', 'passwd')
|
||||||
headers = urllib3.util.make_headers(basic_auth='{0}:{1}'.format(*auth))
|
headers = urllib3.util.make_headers(basic_auth='{0}:{1}'.format(*auth))
|
||||||
url = scheme + '://httpbin.org/basic-auth/user/passwd'
|
url = httpbin_both.url + '/basic-auth/user/passwd'
|
||||||
with vcr.use_cassette(str(tmpdir.join('auth.yaml'))):
|
with vcr.use_cassette(str(tmpdir.join('auth.yaml'))):
|
||||||
one = verify_pool_mgr.request('GET', url, headers=headers)
|
one = verify_pool_mgr.request('GET', url, headers=headers)
|
||||||
|
|
||||||
@@ -74,11 +66,11 @@ def test_auth(tmpdir, scheme, verify_pool_mgr):
|
|||||||
assert one.status == two.status
|
assert one.status == two.status
|
||||||
|
|
||||||
|
|
||||||
def test_auth_failed(tmpdir, scheme, verify_pool_mgr):
|
def test_auth_failed(tmpdir, httpbin_both, verify_pool_mgr):
|
||||||
'''Ensure that we can save failed auth statuses'''
|
'''Ensure that we can save failed auth statuses'''
|
||||||
auth = ('user', 'wrongwrongwrong')
|
auth = ('user', 'wrongwrongwrong')
|
||||||
headers = urllib3.util.make_headers(basic_auth='{0}:{1}'.format(*auth))
|
headers = urllib3.util.make_headers(basic_auth='{0}:{1}'.format(*auth))
|
||||||
url = scheme + '://httpbin.org/basic-auth/user/passwd'
|
url = httpbin_both.url + '/basic-auth/user/passwd'
|
||||||
with vcr.use_cassette(str(tmpdir.join('auth-failed.yaml'))) as cass:
|
with vcr.use_cassette(str(tmpdir.join('auth-failed.yaml'))) as cass:
|
||||||
# Ensure that this is empty to begin with
|
# Ensure that this is empty to begin with
|
||||||
assert_cassette_empty(cass)
|
assert_cassette_empty(cass)
|
||||||
@@ -88,10 +80,10 @@ def test_auth_failed(tmpdir, scheme, verify_pool_mgr):
|
|||||||
assert one.status == two.status == 401
|
assert one.status == two.status == 401
|
||||||
|
|
||||||
|
|
||||||
def test_post(tmpdir, scheme, verify_pool_mgr):
|
def test_post(tmpdir, httpbin_both, verify_pool_mgr):
|
||||||
'''Ensure that we can post and cache the results'''
|
'''Ensure that we can post and cache the results'''
|
||||||
data = {'key1': 'value1', 'key2': 'value2'}
|
data = {'key1': 'value1', 'key2': 'value2'}
|
||||||
url = scheme + '://httpbin.org/post'
|
url = httpbin_both.url + '/post'
|
||||||
with vcr.use_cassette(str(tmpdir.join('verify_pool_mgr.yaml'))):
|
with vcr.use_cassette(str(tmpdir.join('verify_pool_mgr.yaml'))):
|
||||||
req1 = verify_pool_mgr.request('POST', url, data).data
|
req1 = verify_pool_mgr.request('POST', url, data).data
|
||||||
|
|
||||||
@@ -101,9 +93,9 @@ def test_post(tmpdir, scheme, verify_pool_mgr):
|
|||||||
assert req1 == req2
|
assert req1 == req2
|
||||||
|
|
||||||
|
|
||||||
def test_redirects(tmpdir, scheme, verify_pool_mgr):
|
def test_redirects(tmpdir, httpbin_both, verify_pool_mgr):
|
||||||
'''Ensure that we can handle redirects'''
|
'''Ensure that we can handle redirects'''
|
||||||
url = scheme + '://httpbin.org/redirect-to?url=bytes/1024'
|
url = httpbin_both.url + '/redirect-to?url=bytes/1024'
|
||||||
with vcr.use_cassette(str(tmpdir.join('verify_pool_mgr.yaml'))):
|
with vcr.use_cassette(str(tmpdir.join('verify_pool_mgr.yaml'))):
|
||||||
content = verify_pool_mgr.request('GET', url).data
|
content = verify_pool_mgr.request('GET', url).data
|
||||||
|
|
||||||
@@ -115,24 +107,24 @@ def test_redirects(tmpdir, scheme, verify_pool_mgr):
|
|||||||
assert cass.play_count == 2
|
assert cass.play_count == 2
|
||||||
|
|
||||||
|
|
||||||
def test_cross_scheme(tmpdir, scheme, verify_pool_mgr):
|
def test_cross_scheme(tmpdir, httpbin, httpbin_secure, verify_pool_mgr):
|
||||||
'''Ensure that requests between schemes are treated separately'''
|
'''Ensure that requests between schemes are treated separately'''
|
||||||
# First fetch a url under http, and then again under https and then
|
# First fetch a url under http, and then again under https and then
|
||||||
# ensure that we haven't served anything out of cache, and we have two
|
# ensure that we haven't served anything out of cache, and we have two
|
||||||
# requests / response pairs in the cassette
|
# requests / response pairs in the cassette
|
||||||
with vcr.use_cassette(str(tmpdir.join('cross_scheme.yaml'))) as cass:
|
with vcr.use_cassette(str(tmpdir.join('cross_scheme.yaml'))) as cass:
|
||||||
verify_pool_mgr.request('GET', 'https://httpbin.org/')
|
verify_pool_mgr.request('GET', httpbin_secure.url)
|
||||||
verify_pool_mgr.request('GET', 'http://httpbin.org/')
|
verify_pool_mgr.request('GET', httpbin.url)
|
||||||
assert cass.play_count == 0
|
assert cass.play_count == 0
|
||||||
assert len(cass) == 2
|
assert len(cass) == 2
|
||||||
|
|
||||||
|
|
||||||
def test_gzip(tmpdir, scheme, verify_pool_mgr):
|
def test_gzip(tmpdir, httpbin_both, verify_pool_mgr):
|
||||||
'''
|
'''
|
||||||
Ensure that requests (actually urllib3) is able to automatically decompress
|
Ensure that requests (actually urllib3) is able to automatically decompress
|
||||||
the response body
|
the response body
|
||||||
'''
|
'''
|
||||||
url = scheme + '://httpbin.org/gzip'
|
url = httpbin_both.url + '/gzip'
|
||||||
response = verify_pool_mgr.request('GET', url)
|
response = verify_pool_mgr.request('GET', url)
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join('gzip.yaml'))):
|
with vcr.use_cassette(str(tmpdir.join('gzip.yaml'))):
|
||||||
@@ -143,6 +135,6 @@ def test_gzip(tmpdir, scheme, verify_pool_mgr):
|
|||||||
assert_is_json(response.data)
|
assert_is_json(response.data)
|
||||||
|
|
||||||
|
|
||||||
def test_https_with_cert_validation_disabled(tmpdir, pool_mgr):
|
def test_https_with_cert_validation_disabled(tmpdir, httpbin_secure, pool_mgr):
|
||||||
with vcr.use_cassette(str(tmpdir.join('cert_validation_disabled.yaml'))):
|
with vcr.use_cassette(str(tmpdir.join('cert_validation_disabled.yaml'))):
|
||||||
pool_mgr.request('GET', 'https://httpbin.org')
|
pool_mgr.request('GET', httpbin_secure.url)
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ from six.moves import xmlrpc_client
|
|||||||
|
|
||||||
requests = pytest.importorskip("requests")
|
requests = pytest.importorskip("requests")
|
||||||
|
|
||||||
import vcr
|
import vcr # NOQA
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import httplib
|
import httplib
|
||||||
@@ -24,14 +24,14 @@ def test_domain_redirect():
|
|||||||
assert len(cass) == 2
|
assert len(cass) == 2
|
||||||
|
|
||||||
|
|
||||||
def test_flickr_multipart_upload():
|
def test_flickr_multipart_upload(httpbin, tmpdir):
|
||||||
"""
|
"""
|
||||||
The python-flickr-api project does a multipart
|
The python-flickr-api project does a multipart
|
||||||
upload that confuses vcrpy
|
upload that confuses vcrpy
|
||||||
"""
|
"""
|
||||||
def _pretend_to_be_flickr_library():
|
def _pretend_to_be_flickr_library():
|
||||||
content_type, body = "text/plain", "HELLO WORLD"
|
content_type, body = "text/plain", "HELLO WORLD"
|
||||||
h = httplib.HTTPConnection("httpbin.org")
|
h = httplib.HTTPConnection(httpbin.host, httpbin.port)
|
||||||
headers = {
|
headers = {
|
||||||
"Content-Type": content_type,
|
"Content-Type": content_type,
|
||||||
"content-length": str(len(body))
|
"content-length": str(len(body))
|
||||||
@@ -42,11 +42,14 @@ def test_flickr_multipart_upload():
|
|||||||
data = r.read()
|
data = r.read()
|
||||||
h.close()
|
h.close()
|
||||||
|
|
||||||
with vcr.use_cassette('fixtures/vcr_cassettes/flickr.yaml') as cass:
|
return data
|
||||||
|
|
||||||
|
testfile = str(tmpdir.join('flickr.yml'))
|
||||||
|
with vcr.use_cassette(testfile) as cass:
|
||||||
_pretend_to_be_flickr_library()
|
_pretend_to_be_flickr_library()
|
||||||
assert len(cass) == 1
|
assert len(cass) == 1
|
||||||
|
|
||||||
with vcr.use_cassette('fixtures/vcr_cassettes/flickr.yaml') as cass:
|
with vcr.use_cassette(testfile) as cass:
|
||||||
assert len(cass) == 1
|
assert len(cass) == 1
|
||||||
_pretend_to_be_flickr_library()
|
_pretend_to_be_flickr_library()
|
||||||
assert cass.play_count == 1
|
assert cass.play_count == 1
|
||||||
@@ -59,12 +62,13 @@ def test_flickr_should_respond_with_200(tmpdir):
|
|||||||
assert r.status_code == 200
|
assert r.status_code == 200
|
||||||
|
|
||||||
|
|
||||||
def test_cookies(tmpdir):
|
def test_cookies(tmpdir, httpbin):
|
||||||
testfile = str(tmpdir.join('cookies.yml'))
|
testfile = str(tmpdir.join('cookies.yml'))
|
||||||
with vcr.use_cassette(testfile):
|
with vcr.use_cassette(testfile):
|
||||||
s = requests.Session()
|
s = requests.Session()
|
||||||
r1 = s.get("http://httpbin.org/cookies/set?k1=v1&k2=v2")
|
s.get(httpbin.url + "/cookies/set?k1=v1&k2=v2")
|
||||||
r2 = s.get("http://httpbin.org/cookies")
|
|
||||||
|
r2 = s.get(httpbin.url + "/cookies")
|
||||||
assert len(r2.json()['cookies']) == 2
|
assert len(r2.json()['cookies']) == 2
|
||||||
|
|
||||||
|
|
||||||
@@ -72,7 +76,7 @@ def test_amazon_doctype(tmpdir):
|
|||||||
# amazon gzips its homepage. For some reason, in requests 2.7, it's not
|
# amazon gzips its homepage. For some reason, in requests 2.7, it's not
|
||||||
# getting gunzipped.
|
# getting gunzipped.
|
||||||
with vcr.use_cassette(str(tmpdir.join('amz.yml'))):
|
with vcr.use_cassette(str(tmpdir.join('amz.yml'))):
|
||||||
r = requests.get('http://www.amazon.com')
|
r = requests.get('http://www.amazon.com', verify=False)
|
||||||
assert 'html' in r.text
|
assert 'html' in r.text
|
||||||
|
|
||||||
|
|
||||||
@@ -81,9 +85,8 @@ def test_xmlrpclib(tmpdir):
|
|||||||
roundup_server = xmlrpc_client.ServerProxy('http://bugs.python.org/xmlrpc', allow_none=True)
|
roundup_server = xmlrpc_client.ServerProxy('http://bugs.python.org/xmlrpc', allow_none=True)
|
||||||
original_schema = roundup_server.schema()
|
original_schema = roundup_server.schema()
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join('xmlrpcvideo.yaml'))) as cassette:
|
with vcr.use_cassette(str(tmpdir.join('xmlrpcvideo.yaml'))):
|
||||||
roundup_server = xmlrpc_client.ServerProxy('http://bugs.python.org/xmlrpc', allow_none=True)
|
roundup_server = xmlrpc_client.ServerProxy('http://bugs.python.org/xmlrpc', allow_none=True)
|
||||||
second_schema = roundup_server.schema()
|
second_schema = roundup_server.schema()
|
||||||
|
|
||||||
assert original_schema == second_schema
|
assert original_schema == second_schema
|
||||||
|
|
||||||
|
|||||||
@@ -89,7 +89,7 @@ def make_get_request():
|
|||||||
def test_function_decorated_with_use_cassette_can_be_invoked_multiple_times(*args):
|
def test_function_decorated_with_use_cassette_can_be_invoked_multiple_times(*args):
|
||||||
decorated_function = Cassette.use(path='test')(make_get_request)
|
decorated_function = Cassette.use(path='test')(make_get_request)
|
||||||
for i in range(4):
|
for i in range(4):
|
||||||
decorated_function()
|
decorated_function()
|
||||||
|
|
||||||
|
|
||||||
def test_arg_getter_functionality():
|
def test_arg_getter_functionality():
|
||||||
@@ -160,7 +160,7 @@ def test_nesting_cassette_context_managers(*args):
|
|||||||
with contextlib.ExitStack() as exit_stack:
|
with contextlib.ExitStack() as exit_stack:
|
||||||
first_cassette = exit_stack.enter_context(Cassette.use(path='test'))
|
first_cassette = exit_stack.enter_context(Cassette.use(path='test'))
|
||||||
exit_stack.enter_context(mock.patch.object(first_cassette, 'play_response',
|
exit_stack.enter_context(mock.patch.object(first_cassette, 'play_response',
|
||||||
return_value=first_response))
|
return_value=first_response))
|
||||||
assert_get_response_body_is('first_response')
|
assert_get_response_body_is('first_response')
|
||||||
|
|
||||||
# Make sure a second cassette can supercede the first
|
# Make sure a second cassette can supercede the first
|
||||||
@@ -200,16 +200,17 @@ def test_custom_patchers():
|
|||||||
custom_patches=((Test, 'attribute', VCRHTTPSConnection),)):
|
custom_patches=((Test, 'attribute', VCRHTTPSConnection),)):
|
||||||
assert issubclass(Test.attribute, VCRHTTPSConnection)
|
assert issubclass(Test.attribute, VCRHTTPSConnection)
|
||||||
assert VCRHTTPSConnection is not Test.attribute
|
assert VCRHTTPSConnection is not Test.attribute
|
||||||
assert Test.attribute is not old_attribute
|
assert Test.attribute is not old_attribute
|
||||||
|
|
||||||
assert issubclass(Test.attribute, VCRHTTPSConnection)
|
assert issubclass(Test.attribute, VCRHTTPSConnection)
|
||||||
assert VCRHTTPSConnection is not Test.attribute
|
assert VCRHTTPSConnection is not Test.attribute
|
||||||
assert Test.attribute is old_attribute
|
assert Test.attribute is old_attribute
|
||||||
|
|
||||||
|
|
||||||
def test_decorated_functions_are_reentrant():
|
def test_decorated_functions_are_reentrant():
|
||||||
info = {"second": False}
|
info = {"second": False}
|
||||||
original_conn = httplib.HTTPConnection
|
original_conn = httplib.HTTPConnection
|
||||||
|
|
||||||
@Cassette.use(path='whatever', inject=True)
|
@Cassette.use(path='whatever', inject=True)
|
||||||
def test_function(cassette):
|
def test_function(cassette):
|
||||||
if info['second']:
|
if info['second']:
|
||||||
@@ -219,6 +220,7 @@ def test_decorated_functions_are_reentrant():
|
|||||||
info['second'] = True
|
info['second'] = True
|
||||||
test_function()
|
test_function()
|
||||||
assert httplib.HTTPConnection is info['first_conn']
|
assert httplib.HTTPConnection is info['first_conn']
|
||||||
|
|
||||||
test_function()
|
test_function()
|
||||||
assert httplib.HTTPConnection is original_conn
|
assert httplib.HTTPConnection is original_conn
|
||||||
|
|
||||||
@@ -231,10 +233,13 @@ def test_cassette_use_called_without_path_uses_function_to_generate_path():
|
|||||||
|
|
||||||
|
|
||||||
def test_path_transformer_with_function_path():
|
def test_path_transformer_with_function_path():
|
||||||
path_transformer = lambda path: os.path.join('a', path)
|
def path_transformer(path):
|
||||||
|
return os.path.join('a', path)
|
||||||
|
|
||||||
@Cassette.use(inject=True, path_transformer=path_transformer)
|
@Cassette.use(inject=True, path_transformer=path_transformer)
|
||||||
def function_name(cassette):
|
def function_name(cassette):
|
||||||
assert cassette._path == os.path.join('a', 'function_name')
|
assert cassette._path == os.path.join('a', 'function_name')
|
||||||
|
|
||||||
function_name()
|
function_name()
|
||||||
|
|
||||||
|
|
||||||
@@ -245,18 +250,28 @@ def test_path_transformer_with_context_manager():
|
|||||||
assert cassette._path == 'a'
|
assert cassette._path == 'a'
|
||||||
|
|
||||||
|
|
||||||
|
def test_path_transformer_None():
|
||||||
|
with Cassette.use(
|
||||||
|
path='a', path_transformer=None,
|
||||||
|
) as cassette:
|
||||||
|
assert cassette._path == 'a'
|
||||||
|
|
||||||
|
|
||||||
def test_func_path_generator():
|
def test_func_path_generator():
|
||||||
def generator(function):
|
def generator(function):
|
||||||
return os.path.join(os.path.dirname(inspect.getfile(function)),
|
return os.path.join(os.path.dirname(inspect.getfile(function)),
|
||||||
function.__name__)
|
function.__name__)
|
||||||
|
|
||||||
@Cassette.use(inject=True, func_path_generator=generator)
|
@Cassette.use(inject=True, func_path_generator=generator)
|
||||||
def function_name(cassette):
|
def function_name(cassette):
|
||||||
assert cassette._path == os.path.join(os.path.dirname(__file__), 'function_name')
|
assert cassette._path == os.path.join(os.path.dirname(__file__), 'function_name')
|
||||||
|
|
||||||
function_name()
|
function_name()
|
||||||
|
|
||||||
|
|
||||||
def test_use_as_decorator_on_coroutine():
|
def test_use_as_decorator_on_coroutine():
|
||||||
original_http_connetion = httplib.HTTPConnection
|
original_http_connetion = httplib.HTTPConnection
|
||||||
|
|
||||||
@Cassette.use(inject=True)
|
@Cassette.use(inject=True)
|
||||||
def test_function(cassette):
|
def test_function(cassette):
|
||||||
assert httplib.HTTPConnection.cassette is cassette
|
assert httplib.HTTPConnection.cassette is cassette
|
||||||
@@ -267,6 +282,7 @@ def test_use_as_decorator_on_coroutine():
|
|||||||
assert httplib.HTTPConnection is not original_http_connetion
|
assert httplib.HTTPConnection is not original_http_connetion
|
||||||
value = yield 2
|
value = yield 2
|
||||||
assert value == 2
|
assert value == 2
|
||||||
|
|
||||||
coroutine = test_function()
|
coroutine = test_function()
|
||||||
value = next(coroutine)
|
value = next(coroutine)
|
||||||
while True:
|
while True:
|
||||||
@@ -278,6 +294,7 @@ def test_use_as_decorator_on_coroutine():
|
|||||||
|
|
||||||
def test_use_as_decorator_on_generator():
|
def test_use_as_decorator_on_generator():
|
||||||
original_http_connetion = httplib.HTTPConnection
|
original_http_connetion = httplib.HTTPConnection
|
||||||
|
|
||||||
@Cassette.use(inject=True)
|
@Cassette.use(inject=True)
|
||||||
def test_function(cassette):
|
def test_function(cassette):
|
||||||
assert httplib.HTTPConnection.cassette is cassette
|
assert httplib.HTTPConnection.cassette is cassette
|
||||||
@@ -286,4 +303,5 @@ def test_use_as_decorator_on_generator():
|
|||||||
assert httplib.HTTPConnection.cassette is cassette
|
assert httplib.HTTPConnection.cassette is cassette
|
||||||
assert httplib.HTTPConnection is not original_http_connetion
|
assert httplib.HTTPConnection is not original_http_connetion
|
||||||
yield 2
|
yield 2
|
||||||
|
|
||||||
assert list(test_function()) == [1, 2]
|
assert list(test_function()) == [1, 2]
|
||||||
|
|||||||
@@ -1,48 +1,149 @@
|
|||||||
|
from six import BytesIO
|
||||||
from vcr.filters import (
|
from vcr.filters import (
|
||||||
remove_headers,
|
remove_headers, replace_headers,
|
||||||
remove_query_parameters,
|
remove_query_parameters, replace_query_parameters,
|
||||||
remove_post_data_parameters
|
remove_post_data_parameters, replace_post_data_parameters,
|
||||||
|
decode_response
|
||||||
)
|
)
|
||||||
|
from vcr.compat import mock
|
||||||
from vcr.request import Request
|
from vcr.request import Request
|
||||||
|
import gzip
|
||||||
import json
|
import json
|
||||||
|
import zlib
|
||||||
|
|
||||||
|
|
||||||
|
def test_replace_headers():
|
||||||
|
# This tests all of:
|
||||||
|
# 1. keeping a header
|
||||||
|
# 2. removing a header
|
||||||
|
# 3. replacing a header
|
||||||
|
# 4. replacing a header using a callable
|
||||||
|
# 5. removing a header using a callable
|
||||||
|
# 6. replacing a header that doesn't exist
|
||||||
|
headers = {
|
||||||
|
'one': ['keep'],
|
||||||
|
'two': ['lose'],
|
||||||
|
'three': ['change'],
|
||||||
|
'four': ['shout'],
|
||||||
|
'five': ['whisper'],
|
||||||
|
}
|
||||||
|
request = Request('GET', 'http://google.com', '', headers)
|
||||||
|
replace_headers(request, [
|
||||||
|
('two', None),
|
||||||
|
('three', 'tada'),
|
||||||
|
('four', lambda key, value, request: value.upper()),
|
||||||
|
('five', lambda key, value, request: None),
|
||||||
|
('six', 'doesntexist'),
|
||||||
|
])
|
||||||
|
assert request.headers == {
|
||||||
|
'one': 'keep',
|
||||||
|
'three': 'tada',
|
||||||
|
'four': 'SHOUT',
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def test_replace_headers_empty():
|
||||||
|
headers = {'hello': 'goodbye', 'secret': 'header'}
|
||||||
|
request = Request('GET', 'http://google.com', '', headers)
|
||||||
|
replace_headers(request, [])
|
||||||
|
assert request.headers == headers
|
||||||
|
|
||||||
|
|
||||||
|
def test_replace_headers_callable():
|
||||||
|
# This goes beyond test_replace_headers() to ensure that the callable
|
||||||
|
# receives the expected arguments.
|
||||||
|
headers = {'hey': 'there'}
|
||||||
|
request = Request('GET', 'http://google.com', '', headers)
|
||||||
|
callme = mock.Mock(return_value='ho')
|
||||||
|
replace_headers(request, [('hey', callme)])
|
||||||
|
assert request.headers == {'hey': 'ho'}
|
||||||
|
assert callme.call_args == ((), {'request': request,
|
||||||
|
'key': 'hey',
|
||||||
|
'value': 'there'})
|
||||||
|
|
||||||
|
|
||||||
def test_remove_headers():
|
def test_remove_headers():
|
||||||
|
# Test the backward-compatible API wrapper.
|
||||||
headers = {'hello': ['goodbye'], 'secret': ['header']}
|
headers = {'hello': ['goodbye'], 'secret': ['header']}
|
||||||
request = Request('GET', 'http://google.com', '', headers)
|
request = Request('GET', 'http://google.com', '', headers)
|
||||||
remove_headers(request, ['secret'])
|
remove_headers(request, ['secret'])
|
||||||
assert request.headers == {'hello': 'goodbye'}
|
assert request.headers == {'hello': 'goodbye'}
|
||||||
|
|
||||||
|
|
||||||
def test_remove_headers_empty():
|
def test_replace_query_parameters():
|
||||||
headers = {'hello': 'goodbye', 'secret': 'header'}
|
# This tests all of:
|
||||||
request = Request('GET', 'http://google.com', '', headers)
|
# 1. keeping a parameter
|
||||||
remove_headers(request, [])
|
# 2. removing a parameter
|
||||||
assert request.headers == headers
|
# 3. replacing a parameter
|
||||||
|
# 4. replacing a parameter using a callable
|
||||||
|
# 5. removing a parameter using a callable
|
||||||
|
# 6. replacing a parameter that doesn't exist
|
||||||
|
uri = 'http://g.com/?one=keep&two=lose&three=change&four=shout&five=whisper'
|
||||||
|
request = Request('GET', uri, '', {})
|
||||||
|
replace_query_parameters(request, [
|
||||||
|
('two', None),
|
||||||
|
('three', 'tada'),
|
||||||
|
('four', lambda key, value, request: value.upper()),
|
||||||
|
('five', lambda key, value, request: None),
|
||||||
|
('six', 'doesntexist'),
|
||||||
|
])
|
||||||
|
assert request.query == [
|
||||||
|
('four', 'SHOUT'),
|
||||||
|
('one', 'keep'),
|
||||||
|
('three', 'tada'),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test_remove_all_query_parameters():
|
||||||
|
uri = 'http://g.com/?q=cowboys&w=1'
|
||||||
|
request = Request('GET', uri, '', {})
|
||||||
|
replace_query_parameters(request, [('w', None), ('q', None)])
|
||||||
|
assert request.uri == 'http://g.com/'
|
||||||
|
|
||||||
|
|
||||||
|
def test_replace_query_parameters_callable():
|
||||||
|
# This goes beyond test_replace_query_parameters() to ensure that the
|
||||||
|
# callable receives the expected arguments.
|
||||||
|
uri = 'http://g.com/?hey=there'
|
||||||
|
request = Request('GET', uri, '', {})
|
||||||
|
callme = mock.Mock(return_value='ho')
|
||||||
|
replace_query_parameters(request, [('hey', callme)])
|
||||||
|
assert request.uri == 'http://g.com/?hey=ho'
|
||||||
|
assert callme.call_args == ((), {'request': request,
|
||||||
|
'key': 'hey',
|
||||||
|
'value': 'there'})
|
||||||
|
|
||||||
|
|
||||||
def test_remove_query_parameters():
|
def test_remove_query_parameters():
|
||||||
|
# Test the backward-compatible API wrapper.
|
||||||
uri = 'http://g.com/?q=cowboys&w=1'
|
uri = 'http://g.com/?q=cowboys&w=1'
|
||||||
request = Request('GET', uri, '', {})
|
request = Request('GET', uri, '', {})
|
||||||
remove_query_parameters(request, ['w'])
|
remove_query_parameters(request, ['w'])
|
||||||
assert request.uri == 'http://g.com/?q=cowboys'
|
assert request.uri == 'http://g.com/?q=cowboys'
|
||||||
|
|
||||||
|
|
||||||
def test_remove_all_query_parameters():
|
def test_replace_post_data_parameters():
|
||||||
uri = 'http://g.com/?q=cowboys&w=1'
|
# This tests all of:
|
||||||
request = Request('GET', uri, '', {})
|
# 1. keeping a parameter
|
||||||
remove_query_parameters(request, ['w', 'q'])
|
# 2. removing a parameter
|
||||||
assert request.uri == 'http://g.com/'
|
# 3. replacing a parameter
|
||||||
|
# 4. replacing a parameter using a callable
|
||||||
|
# 5. removing a parameter using a callable
|
||||||
def test_remove_nonexistent_query_parameters():
|
# 6. replacing a parameter that doesn't exist
|
||||||
uri = 'http://g.com/'
|
body = b'one=keep&two=lose&three=change&four=shout&five=whisper'
|
||||||
request = Request('GET', uri, '', {})
|
request = Request('POST', 'http://google.com', body, {})
|
||||||
remove_query_parameters(request, ['w', 'q'])
|
replace_post_data_parameters(request, [
|
||||||
assert request.uri == 'http://g.com/'
|
('two', None),
|
||||||
|
('three', 'tada'),
|
||||||
|
('four', lambda key, value, request: value.upper()),
|
||||||
|
('five', lambda key, value, request: None),
|
||||||
|
('six', 'doesntexist'),
|
||||||
|
])
|
||||||
|
assert request.body == b'one=keep&three=tada&four=SHOUT'
|
||||||
|
|
||||||
|
|
||||||
def test_remove_post_data_parameters():
|
def test_remove_post_data_parameters():
|
||||||
|
# Test the backward-compatible API wrapper.
|
||||||
body = b'id=secret&foo=bar'
|
body = b'id=secret&foo=bar'
|
||||||
request = Request('POST', 'http://google.com', body, {})
|
request = Request('POST', 'http://google.com', body, {})
|
||||||
remove_post_data_parameters(request, ['id'])
|
remove_post_data_parameters(request, ['id'])
|
||||||
@@ -52,28 +153,45 @@ def test_remove_post_data_parameters():
|
|||||||
def test_preserve_multiple_post_data_parameters():
|
def test_preserve_multiple_post_data_parameters():
|
||||||
body = b'id=secret&foo=bar&foo=baz'
|
body = b'id=secret&foo=bar&foo=baz'
|
||||||
request = Request('POST', 'http://google.com', body, {})
|
request = Request('POST', 'http://google.com', body, {})
|
||||||
remove_post_data_parameters(request, ['id'])
|
replace_post_data_parameters(request, [('id', None)])
|
||||||
assert request.body == b'foo=bar&foo=baz'
|
assert request.body == b'foo=bar&foo=baz'
|
||||||
|
|
||||||
|
|
||||||
def test_remove_all_post_data_parameters():
|
def test_remove_all_post_data_parameters():
|
||||||
body = b'id=secret&foo=bar'
|
body = b'id=secret&foo=bar'
|
||||||
request = Request('POST', 'http://google.com', body, {})
|
request = Request('POST', 'http://google.com', body, {})
|
||||||
remove_post_data_parameters(request, ['id', 'foo'])
|
replace_post_data_parameters(request, [('id', None), ('foo', None)])
|
||||||
assert request.body == b''
|
assert request.body == b''
|
||||||
|
|
||||||
|
|
||||||
def test_remove_nonexistent_post_data_parameters():
|
def test_replace_json_post_data_parameters():
|
||||||
body = b''
|
# This tests all of:
|
||||||
|
# 1. keeping a parameter
|
||||||
|
# 2. removing a parameter
|
||||||
|
# 3. replacing a parameter
|
||||||
|
# 4. replacing a parameter using a callable
|
||||||
|
# 5. removing a parameter using a callable
|
||||||
|
# 6. replacing a parameter that doesn't exist
|
||||||
|
body = b'{"one": "keep", "two": "lose", "three": "change", "four": "shout", "five": "whisper"}'
|
||||||
request = Request('POST', 'http://google.com', body, {})
|
request = Request('POST', 'http://google.com', body, {})
|
||||||
remove_post_data_parameters(request, ['id'])
|
request.headers['Content-Type'] = 'application/json'
|
||||||
assert request.body == b''
|
replace_post_data_parameters(request, [
|
||||||
|
('two', None),
|
||||||
|
('three', 'tada'),
|
||||||
|
('four', lambda key, value, request: value.upper()),
|
||||||
|
('five', lambda key, value, request: None),
|
||||||
|
('six', 'doesntexist'),
|
||||||
|
])
|
||||||
|
request_data = json.loads(request.body.decode('utf-8'))
|
||||||
|
expected_data = json.loads('{"one": "keep", "three": "tada", "four": "SHOUT"}')
|
||||||
|
assert request_data == expected_data
|
||||||
|
|
||||||
|
|
||||||
def test_remove_json_post_data_parameters():
|
def test_remove_json_post_data_parameters():
|
||||||
|
# Test the backward-compatible API wrapper.
|
||||||
body = b'{"id": "secret", "foo": "bar", "baz": "qux"}'
|
body = b'{"id": "secret", "foo": "bar", "baz": "qux"}'
|
||||||
request = Request('POST', 'http://google.com', body, {})
|
request = Request('POST', 'http://google.com', body, {})
|
||||||
request.add_header('Content-Type', 'application/json')
|
request.headers['Content-Type'] = 'application/json'
|
||||||
remove_post_data_parameters(request, ['id'])
|
remove_post_data_parameters(request, ['id'])
|
||||||
request_body_json = json.loads(request.body.decode('utf-8'))
|
request_body_json = json.loads(request.body.decode('utf-8'))
|
||||||
expected_json = json.loads(b'{"foo": "bar", "baz": "qux"}'.decode('utf-8'))
|
expected_json = json.loads(b'{"foo": "bar", "baz": "qux"}'.decode('utf-8'))
|
||||||
@@ -83,14 +201,74 @@ def test_remove_json_post_data_parameters():
|
|||||||
def test_remove_all_json_post_data_parameters():
|
def test_remove_all_json_post_data_parameters():
|
||||||
body = b'{"id": "secret", "foo": "bar"}'
|
body = b'{"id": "secret", "foo": "bar"}'
|
||||||
request = Request('POST', 'http://google.com', body, {})
|
request = Request('POST', 'http://google.com', body, {})
|
||||||
request.add_header('Content-Type', 'application/json')
|
request.headers['Content-Type'] = 'application/json'
|
||||||
remove_post_data_parameters(request, ['id', 'foo'])
|
replace_post_data_parameters(request, [('id', None), ('foo', None)])
|
||||||
assert request.body == b'{}'
|
assert request.body == b'{}'
|
||||||
|
|
||||||
|
|
||||||
def test_remove_nonexistent_json_post_data_parameters():
|
def test_decode_response_uncompressed():
|
||||||
body = b'{}'
|
recorded_response = {
|
||||||
request = Request('POST', 'http://google.com', body, {})
|
"status": {
|
||||||
request.add_header('Content-Type', 'application/json')
|
"message": "OK",
|
||||||
remove_post_data_parameters(request, ['id'])
|
"code": 200
|
||||||
assert request.body == b'{}'
|
},
|
||||||
|
"headers": {
|
||||||
|
"content-length": ["10806"],
|
||||||
|
"date": ["Fri, 24 Oct 2014 18:35:37 GMT"],
|
||||||
|
"content-type": ["text/html; charset=utf-8"],
|
||||||
|
},
|
||||||
|
"body": {
|
||||||
|
"string": b""
|
||||||
|
}
|
||||||
|
}
|
||||||
|
assert decode_response(recorded_response) == recorded_response
|
||||||
|
|
||||||
|
|
||||||
|
def test_decode_response_deflate():
|
||||||
|
body = b'deflate message'
|
||||||
|
deflate_response = {
|
||||||
|
'body': {'string': zlib.compress(body)},
|
||||||
|
'headers': {
|
||||||
|
'access-control-allow-credentials': ['true'],
|
||||||
|
'access-control-allow-origin': ['*'],
|
||||||
|
'connection': ['keep-alive'],
|
||||||
|
'content-encoding': ['deflate'],
|
||||||
|
'content-length': ['177'],
|
||||||
|
'content-type': ['application/json'],
|
||||||
|
'date': ['Wed, 02 Dec 2015 19:44:32 GMT'],
|
||||||
|
'server': ['nginx']
|
||||||
|
},
|
||||||
|
'status': {'code': 200, 'message': 'OK'}
|
||||||
|
}
|
||||||
|
decoded_response = decode_response(deflate_response)
|
||||||
|
assert decoded_response['body']['string'] == body
|
||||||
|
assert decoded_response['headers']['content-length'] == [str(len(body))]
|
||||||
|
|
||||||
|
|
||||||
|
def test_decode_response_gzip():
|
||||||
|
body = b'gzip message'
|
||||||
|
|
||||||
|
buf = BytesIO()
|
||||||
|
f = gzip.GzipFile('a', fileobj=buf, mode='wb')
|
||||||
|
f.write(body)
|
||||||
|
f.close()
|
||||||
|
|
||||||
|
compressed_body = buf.getvalue()
|
||||||
|
buf.close()
|
||||||
|
gzip_response = {
|
||||||
|
'body': {'string': compressed_body},
|
||||||
|
'headers': {
|
||||||
|
'access-control-allow-credentials': ['true'],
|
||||||
|
'access-control-allow-origin': ['*'],
|
||||||
|
'connection': ['keep-alive'],
|
||||||
|
'content-encoding': ['gzip'],
|
||||||
|
'content-length': ['177'],
|
||||||
|
'content-type': ['application/json'],
|
||||||
|
'date': ['Wed, 02 Dec 2015 19:44:32 GMT'],
|
||||||
|
'server': ['nginx']
|
||||||
|
},
|
||||||
|
'status': {'code': 200, 'message': 'OK'}
|
||||||
|
}
|
||||||
|
decoded_response = decode_response(gzip_response)
|
||||||
|
assert decoded_response['body']['string'] == body
|
||||||
|
assert decoded_response['headers']['content-length'] == [str(len(body))]
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from vcr.request import Request
|
from vcr.request import Request, HeadersDict
|
||||||
|
|
||||||
|
|
||||||
def test_str():
|
def test_str():
|
||||||
@@ -12,11 +12,16 @@ def test_headers():
|
|||||||
headers = {'X-Header1': ['h1'], 'X-Header2': 'h2'}
|
headers = {'X-Header1': ['h1'], 'X-Header2': 'h2'}
|
||||||
req = Request('GET', 'http://go.com/', '', headers)
|
req = Request('GET', 'http://go.com/', '', headers)
|
||||||
assert req.headers == {'X-Header1': 'h1', 'X-Header2': 'h2'}
|
assert req.headers == {'X-Header1': 'h1', 'X-Header2': 'h2'}
|
||||||
|
req.headers['X-Header1'] = 'h11'
|
||||||
req.add_header('X-Header1', 'h11')
|
|
||||||
assert req.headers == {'X-Header1': 'h11', 'X-Header2': 'h2'}
|
assert req.headers == {'X-Header1': 'h11', 'X-Header2': 'h2'}
|
||||||
|
|
||||||
|
|
||||||
|
def test_add_header_deprecated():
|
||||||
|
req = Request('GET', 'http://go.com/', '', {})
|
||||||
|
pytest.deprecated_call(req.add_header, 'foo', 'bar')
|
||||||
|
assert req.headers == {'foo': 'bar'}
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("uri, expected_port", [
|
@pytest.mark.parametrize("uri, expected_port", [
|
||||||
('http://go.com/', 80),
|
('http://go.com/', 80),
|
||||||
('http://go.com:80/', 80),
|
('http://go.com:80/', 80),
|
||||||
@@ -24,9 +29,9 @@ def test_headers():
|
|||||||
('https://go.com/', 443),
|
('https://go.com/', 443),
|
||||||
('https://go.com:443/', 443),
|
('https://go.com:443/', 443),
|
||||||
('https://go.com:3000/', 3000),
|
('https://go.com:3000/', 3000),
|
||||||
])
|
])
|
||||||
def test_port(uri, expected_port):
|
def test_port(uri, expected_port):
|
||||||
req = Request('GET', uri, '', {})
|
req = Request('GET', uri, '', {})
|
||||||
assert req.port == expected_port
|
assert req.port == expected_port
|
||||||
|
|
||||||
|
|
||||||
@@ -36,3 +41,30 @@ def test_uri():
|
|||||||
|
|
||||||
req = Request('GET', 'http://go.com:80/', '', {})
|
req = Request('GET', 'http://go.com:80/', '', {})
|
||||||
assert req.uri == 'http://go.com:80/'
|
assert req.uri == 'http://go.com:80/'
|
||||||
|
|
||||||
|
|
||||||
|
def test_HeadersDict():
|
||||||
|
|
||||||
|
# Simple test of CaseInsensitiveDict
|
||||||
|
h = HeadersDict()
|
||||||
|
assert h == {}
|
||||||
|
h['Content-Type'] = 'application/json'
|
||||||
|
assert h == {'Content-Type': 'application/json'}
|
||||||
|
assert h['content-type'] == 'application/json'
|
||||||
|
assert h['CONTENT-TYPE'] == 'application/json'
|
||||||
|
|
||||||
|
# Test feature of HeadersDict: devolve list to first element
|
||||||
|
h = HeadersDict()
|
||||||
|
assert h == {}
|
||||||
|
h['x'] = ['foo', 'bar']
|
||||||
|
assert h == {'x': 'foo'}
|
||||||
|
|
||||||
|
# Test feature of HeadersDict: preserve original key case
|
||||||
|
h = HeadersDict()
|
||||||
|
assert h == {}
|
||||||
|
h['Content-Type'] = 'application/json'
|
||||||
|
assert h == {'Content-Type': 'application/json'}
|
||||||
|
h['content-type'] = 'text/plain'
|
||||||
|
assert h == {'Content-Type': 'text/plain'}
|
||||||
|
h['CONtent-tyPE'] = 'whoa'
|
||||||
|
assert h == {'Content-Type': 'whoa'}
|
||||||
|
|||||||
@@ -2,7 +2,8 @@
|
|||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from vcr.compat import mock
|
from vcr.compat import mock
|
||||||
from vcr.serialize import deserialize
|
from vcr.request import Request
|
||||||
|
from vcr.serialize import deserialize, serialize
|
||||||
from vcr.serializers import yamlserializer, jsonserializer
|
from vcr.serializers import yamlserializer, jsonserializer
|
||||||
|
|
||||||
|
|
||||||
@@ -83,3 +84,50 @@ def test_deserialize_py2py3_yaml_cassette(tmpdir, req_body, expect):
|
|||||||
def test_serialize_constructs_UnicodeDecodeError(mock_dumps):
|
def test_serialize_constructs_UnicodeDecodeError(mock_dumps):
|
||||||
with pytest.raises(UnicodeDecodeError):
|
with pytest.raises(UnicodeDecodeError):
|
||||||
jsonserializer.serialize({})
|
jsonserializer.serialize({})
|
||||||
|
|
||||||
|
|
||||||
|
def test_serialize_empty_request():
|
||||||
|
request = Request(
|
||||||
|
method='POST',
|
||||||
|
uri='http://localhost/',
|
||||||
|
body='',
|
||||||
|
headers={},
|
||||||
|
)
|
||||||
|
|
||||||
|
serialize(
|
||||||
|
{'requests': [request], 'responses': [{}]},
|
||||||
|
jsonserializer
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_serialize_json_request():
|
||||||
|
request = Request(
|
||||||
|
method='POST',
|
||||||
|
uri='http://localhost/',
|
||||||
|
body="{'hello': 'world'}",
|
||||||
|
headers={},
|
||||||
|
)
|
||||||
|
|
||||||
|
serialize(
|
||||||
|
{'requests': [request], 'responses': [{}]},
|
||||||
|
jsonserializer
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_serialize_binary_request():
|
||||||
|
msg = "Does this HTTP interaction contain binary data?"
|
||||||
|
|
||||||
|
request = Request(
|
||||||
|
method='POST',
|
||||||
|
uri='http://localhost/',
|
||||||
|
body=b'\x8c',
|
||||||
|
headers={},
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
serialize(
|
||||||
|
{'requests': [request], 'responses': [{}]},
|
||||||
|
jsonserializer
|
||||||
|
)
|
||||||
|
except (UnicodeDecodeError, TypeError) as exc:
|
||||||
|
assert msg in str(exc)
|
||||||
|
|||||||
9
tests/unit/test_stubs.py
Normal file
9
tests/unit/test_stubs.py
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
from vcr.stubs import VCRHTTPSConnection
|
||||||
|
|
||||||
|
|
||||||
|
class TestVCRConnection(object):
|
||||||
|
|
||||||
|
def test_setting_of_attributes_get_propogated_to_real_connection(self):
|
||||||
|
vcr_connection = VCRHTTPSConnection('www.examplehost.com')
|
||||||
|
vcr_connection.ssl_version = 'example_ssl_version'
|
||||||
|
assert vcr_connection.real_connection.ssl_version == 'example_ssl_version'
|
||||||
@@ -1,11 +1,13 @@
|
|||||||
import os
|
import os
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
from six.moves import http_client as httplib
|
||||||
|
|
||||||
from vcr import VCR, use_cassette
|
from vcr import VCR, use_cassette
|
||||||
from vcr.compat import mock
|
from vcr.compat import mock
|
||||||
from vcr.request import Request
|
from vcr.request import Request
|
||||||
from vcr.stubs import VCRHTTPSConnection
|
from vcr.stubs import VCRHTTPSConnection
|
||||||
|
from vcr.patch import _HTTPConnection, force_reset
|
||||||
|
|
||||||
|
|
||||||
def test_vcr_use_cassette():
|
def test_vcr_use_cassette():
|
||||||
@@ -45,31 +47,44 @@ def test_vcr_before_record_request_params():
|
|||||||
if request.path != '/get':
|
if request.path != '/get':
|
||||||
return request
|
return request
|
||||||
|
|
||||||
test_vcr = VCR(filter_headers=('cookie',), before_record_request=before_record_cb,
|
test_vcr = VCR(filter_headers=('cookie', ('bert', 'ernie')),
|
||||||
|
before_record_request=before_record_cb,
|
||||||
ignore_hosts=('www.test.com',), ignore_localhost=True,
|
ignore_hosts=('www.test.com',), ignore_localhost=True,
|
||||||
filter_query_parameters=('foo',))
|
filter_query_parameters=('foo', ('tom', 'jerry')),
|
||||||
|
filter_post_data_parameters=('posted', ('no', 'trespassing')))
|
||||||
|
|
||||||
with test_vcr.use_cassette('test') as cassette:
|
with test_vcr.use_cassette('test') as cassette:
|
||||||
assert cassette.filter_request(Request('GET', base_path + 'get', '', {})) is None
|
# Test explicit before_record_cb
|
||||||
assert cassette.filter_request(Request('GET', base_path + 'get2', '', {})) is not None
|
request_get = Request('GET', base_path + 'get', '', {})
|
||||||
|
assert cassette.filter_request(request_get) is None
|
||||||
|
request = Request('GET', base_path + 'get2', '', {})
|
||||||
|
assert cassette.filter_request(request) is not None
|
||||||
|
|
||||||
assert cassette.filter_request(Request('GET', base_path + '?foo=bar', '', {})).query == []
|
# Test filter_query_parameters
|
||||||
assert cassette.filter_request(
|
request = Request('GET', base_path + '?foo=bar', '', {})
|
||||||
Request('GET', base_path + '?foo=bar', '',
|
assert cassette.filter_request(request).query == []
|
||||||
{'cookie': 'test', 'other': 'fun'})).headers == {'other': 'fun'}
|
request = Request('GET', base_path + '?tom=nobody', '', {})
|
||||||
assert cassette.filter_request(
|
assert cassette.filter_request(request).query == [('tom', 'jerry')]
|
||||||
Request(
|
|
||||||
'GET', base_path + '?foo=bar', '',
|
|
||||||
{'cookie': 'test', 'other': 'fun'}
|
|
||||||
)
|
|
||||||
).headers == {'other': 'fun'}
|
|
||||||
|
|
||||||
assert cassette.filter_request(Request('GET', 'http://www.test.com' + '?foo=bar', '',
|
# Test filter_headers
|
||||||
{'cookie': 'test', 'other': 'fun'})) is None
|
request = Request('GET', base_path + '?foo=bar', '',
|
||||||
|
{'cookie': 'test', 'other': 'fun', 'bert': 'nobody'})
|
||||||
|
assert (cassette.filter_request(request).headers ==
|
||||||
|
{'other': 'fun', 'bert': 'ernie'})
|
||||||
|
|
||||||
|
# Test ignore_hosts
|
||||||
|
request = Request('GET', 'http://www.test.com' + '?foo=bar', '',
|
||||||
|
{'cookie': 'test', 'other': 'fun'})
|
||||||
|
assert cassette.filter_request(request) is None
|
||||||
|
|
||||||
|
# Test ignore_localhost
|
||||||
|
request = Request('GET', 'http://localhost:8000' + '?foo=bar', '',
|
||||||
|
{'cookie': 'test', 'other': 'fun'})
|
||||||
|
assert cassette.filter_request(request) is None
|
||||||
|
|
||||||
with test_vcr.use_cassette('test', before_record_request=None) as cassette:
|
with test_vcr.use_cassette('test', before_record_request=None) as cassette:
|
||||||
# Test that before_record can be overwritten with
|
# Test that before_record can be overwritten in context manager.
|
||||||
assert cassette.filter_request(Request('GET', base_path + 'get', '', {})) is not None
|
assert cassette.filter_request(request_get) is not None
|
||||||
|
|
||||||
|
|
||||||
def test_vcr_before_record_response_iterable():
|
def test_vcr_before_record_response_iterable():
|
||||||
@@ -98,6 +113,43 @@ def test_vcr_before_record_response_iterable():
|
|||||||
assert mock_filter.call_count == 1
|
assert mock_filter.call_count == 1
|
||||||
|
|
||||||
|
|
||||||
|
def test_before_record_response_as_filter():
|
||||||
|
request = Request('GET', '/', '', {})
|
||||||
|
response = object() # just can't be None
|
||||||
|
|
||||||
|
# Prevent actually saving the cassette
|
||||||
|
with mock.patch('vcr.cassette.save_cassette'):
|
||||||
|
|
||||||
|
filter_all = mock.Mock(return_value=None)
|
||||||
|
vcr = VCR(before_record_response=filter_all)
|
||||||
|
with vcr.use_cassette('test') as cassette:
|
||||||
|
cassette.append(request, response)
|
||||||
|
assert cassette.data == []
|
||||||
|
assert not cassette.dirty
|
||||||
|
|
||||||
|
|
||||||
|
def test_vcr_path_transformer():
|
||||||
|
# Regression test for #199
|
||||||
|
|
||||||
|
# Prevent actually saving the cassette
|
||||||
|
with mock.patch('vcr.cassette.save_cassette'):
|
||||||
|
|
||||||
|
# Baseline: path should be unchanged
|
||||||
|
vcr = VCR()
|
||||||
|
with vcr.use_cassette('test') as cassette:
|
||||||
|
assert cassette._path == 'test'
|
||||||
|
|
||||||
|
# Regression test: path_transformer=None should do the same.
|
||||||
|
vcr = VCR(path_transformer=None)
|
||||||
|
with vcr.use_cassette('test') as cassette:
|
||||||
|
assert cassette._path == 'test'
|
||||||
|
|
||||||
|
# and it should still work with cassette_library_dir
|
||||||
|
vcr = VCR(cassette_library_dir='/foo')
|
||||||
|
with vcr.use_cassette('test') as cassette:
|
||||||
|
assert cassette._path == '/foo/test'
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def random_fixture():
|
def random_fixture():
|
||||||
return 1
|
return 1
|
||||||
@@ -243,6 +295,7 @@ def test_path_transformer():
|
|||||||
|
|
||||||
def test_cassette_name_generator_defaults_to_using_module_function_defined_in():
|
def test_cassette_name_generator_defaults_to_using_module_function_defined_in():
|
||||||
vcr = VCR(inject_cassette=True)
|
vcr = VCR(inject_cassette=True)
|
||||||
|
|
||||||
@vcr.use_cassette
|
@vcr.use_cassette
|
||||||
def function_name(cassette):
|
def function_name(cassette):
|
||||||
assert cassette._path == os.path.join(os.path.dirname(__file__),
|
assert cassette._path == os.path.join(os.path.dirname(__file__),
|
||||||
@@ -274,3 +327,40 @@ def test_additional_matchers():
|
|||||||
|
|
||||||
function_defaults()
|
function_defaults()
|
||||||
function_additional()
|
function_additional()
|
||||||
|
|
||||||
|
|
||||||
|
def test_decoration_should_respect_function_return_value():
|
||||||
|
vcr = VCR()
|
||||||
|
ret = 'a-return-value'
|
||||||
|
|
||||||
|
@vcr.use_cassette
|
||||||
|
def function_with_return():
|
||||||
|
return ret
|
||||||
|
|
||||||
|
assert ret == function_with_return()
|
||||||
|
|
||||||
|
|
||||||
|
class TestVCRClass(VCR().test_case()):
|
||||||
|
|
||||||
|
def no_decoration(self):
|
||||||
|
assert httplib.HTTPConnection == _HTTPConnection
|
||||||
|
self.test_dynamically_added()
|
||||||
|
assert httplib.HTTPConnection == _HTTPConnection
|
||||||
|
|
||||||
|
def test_one(self):
|
||||||
|
with force_reset():
|
||||||
|
self.no_decoration()
|
||||||
|
with force_reset():
|
||||||
|
self.test_two()
|
||||||
|
assert httplib.HTTPConnection != _HTTPConnection
|
||||||
|
|
||||||
|
def test_two(self):
|
||||||
|
assert httplib.HTTPConnection != _HTTPConnection
|
||||||
|
|
||||||
|
|
||||||
|
def test_dynamically_added(self):
|
||||||
|
assert httplib.HTTPConnection != _HTTPConnection
|
||||||
|
|
||||||
|
|
||||||
|
TestVCRClass.test_dynamically_added = test_dynamically_added
|
||||||
|
del test_dynamically_added
|
||||||
|
|||||||
35
tox.ini
35
tox.ini
@@ -1,19 +1,23 @@
|
|||||||
[tox]
|
[tox]
|
||||||
envlist = {py26,py27,py33,py34,pypy}-{requests27,requests26,requests25,requests24,requests23,requests22,requests1,httplib2,urllib317,urllib319,urllib3110,tornado,boto}
|
envlist = {py26,py27,py33,py34,pypy,pypy3}-{flakes,requests27,requests26,requests25,requests24,requests23,requests22,requests1,httplib2,urllib317,urllib319,urllib3110,tornado3,tornado4,boto,boto3}
|
||||||
|
|
||||||
|
[testenv:flakes]
|
||||||
|
skipsdist = True
|
||||||
|
commands =
|
||||||
|
flake8 --version
|
||||||
|
flake8 --exclude="./docs/conf.py"
|
||||||
|
pyflakes ./docs/conf.py
|
||||||
|
deps = flake8
|
||||||
|
|
||||||
[testenv]
|
[testenv]
|
||||||
commands =
|
commands =
|
||||||
py.test {posargs}
|
./runtests.sh {posargs}
|
||||||
basepython =
|
|
||||||
py26: python2.6
|
|
||||||
py27: python2.7
|
|
||||||
py33: python3.3
|
|
||||||
py34: python3.4
|
|
||||||
pypy: pypy
|
|
||||||
deps =
|
deps =
|
||||||
|
# httpbin fails with latest Flask, so we pin it
|
||||||
|
Flask==0.10.1
|
||||||
mock
|
mock
|
||||||
pytest
|
pytest
|
||||||
pytest-localserver
|
pytest-httpbin
|
||||||
PyYAML
|
PyYAML
|
||||||
requests1: requests==1.2.3
|
requests1: requests==1.2.3
|
||||||
requests27: requests==2.7.0
|
requests27: requests==2.7.0
|
||||||
@@ -26,7 +30,14 @@ deps =
|
|||||||
urllib317: urllib3==1.7.1
|
urllib317: urllib3==1.7.1
|
||||||
urllib319: urllib3==1.9.1
|
urllib319: urllib3==1.9.1
|
||||||
urllib3110: urllib3==1.10.2
|
urllib3110: urllib3==1.10.2
|
||||||
{py26,py27,py33,py34,pypy}-tornado: tornado
|
{py26,py27,py33,py34,pypy}-tornado3: tornado>=3,<4
|
||||||
{py26,py27,py33,py34,pypy}-tornado: pytest-tornado
|
{py26,py27,py33,py34,pypy}-tornado4: tornado>=4,<5
|
||||||
{py26,py27,py33,py34}-tornado: pycurl
|
{py26,py27,py33,py34,pypy}-tornado3: pytest-tornado
|
||||||
|
{py26,py27,py33,py34,pypy}-tornado4: pytest-tornado
|
||||||
|
{py26,py27,py33,py34}-tornado3: pycurl
|
||||||
|
{py26,py27,py33,py34}-tornado4: pycurl
|
||||||
boto: boto
|
boto: boto
|
||||||
|
boto3: boto3
|
||||||
|
|
||||||
|
[flake8]
|
||||||
|
max_line_length = 110
|
||||||
|
|||||||
@@ -75,7 +75,7 @@ class CassetteContextDecorator(object):
|
|||||||
lambda key, _: key in self._non_cassette_arguments,
|
lambda key, _: key in self._non_cassette_arguments,
|
||||||
self._args_getter()
|
self._args_getter()
|
||||||
)
|
)
|
||||||
if 'path_transformer' in other_kwargs:
|
if other_kwargs.get('path_transformer'):
|
||||||
transformer = other_kwargs['path_transformer']
|
transformer = other_kwargs['path_transformer']
|
||||||
cassette_kwargs['path'] = transformer(cassette_kwargs['path'])
|
cassette_kwargs['path'] = transformer(cassette_kwargs['path'])
|
||||||
self.__finish = self._patch_generator(self.cls.load(**cassette_kwargs))
|
self.__finish = self._patch_generator(self.cls.load(**cassette_kwargs))
|
||||||
@@ -127,7 +127,7 @@ class CassetteContextDecorator(object):
|
|||||||
|
|
||||||
def _handle_function(self, function, args, kwargs):
|
def _handle_function(self, function, args, kwargs):
|
||||||
with self as cassette:
|
with self as cassette:
|
||||||
self.__handle_function(cassette, function, args, kwargs)
|
return self.__handle_function(cassette, function, args, kwargs)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_function_name(function):
|
def get_function_name(function):
|
||||||
@@ -211,6 +211,8 @@ class Cassette(object):
|
|||||||
if not request:
|
if not request:
|
||||||
return
|
return
|
||||||
response = self._before_record_response(response)
|
response = self._before_record_response(response)
|
||||||
|
if response is None:
|
||||||
|
return
|
||||||
self.data.append((request, response))
|
self.data.append((request, response))
|
||||||
self.dirty = True
|
self.dirty = True
|
||||||
|
|
||||||
|
|||||||
@@ -2,19 +2,25 @@ import copy
|
|||||||
import functools
|
import functools
|
||||||
import inspect
|
import inspect
|
||||||
import os
|
import os
|
||||||
|
import types
|
||||||
|
|
||||||
import six
|
import six
|
||||||
|
|
||||||
from .compat import collections
|
from .compat import collections
|
||||||
from .cassette import Cassette
|
from .cassette import Cassette
|
||||||
from .serializers import yamlserializer, jsonserializer
|
from .serializers import yamlserializer, jsonserializer
|
||||||
from .util import compose
|
from .util import compose, auto_decorate
|
||||||
from . import matchers
|
from . import matchers
|
||||||
from . import filters
|
from . import filters
|
||||||
|
|
||||||
|
|
||||||
class VCR(object):
|
class VCR(object):
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def is_test_method(method_name, function):
|
||||||
|
return method_name.startswith('test') and \
|
||||||
|
isinstance(function, types.FunctionType)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def ensure_suffix(suffix):
|
def ensure_suffix(suffix):
|
||||||
def ensure(path):
|
def ensure(path):
|
||||||
@@ -23,13 +29,14 @@ class VCR(object):
|
|||||||
return path
|
return path
|
||||||
return ensure
|
return ensure
|
||||||
|
|
||||||
def __init__(self, path_transformer=lambda x: x, before_record_request=None,
|
def __init__(self, path_transformer=None, before_record_request=None,
|
||||||
custom_patches=(), filter_query_parameters=(), ignore_hosts=(),
|
custom_patches=(), filter_query_parameters=(), ignore_hosts=(),
|
||||||
record_mode="once", ignore_localhost=False, filter_headers=(),
|
record_mode="once", ignore_localhost=False, filter_headers=(),
|
||||||
before_record_response=None, filter_post_data_parameters=(),
|
before_record_response=None, filter_post_data_parameters=(),
|
||||||
match_on=('method', 'scheme', 'host', 'port', 'path', 'query'),
|
match_on=('method', 'scheme', 'host', 'port', 'path', 'query'),
|
||||||
before_record=None, inject_cassette=False, serializer='yaml',
|
before_record=None, inject_cassette=False, serializer='yaml',
|
||||||
cassette_library_dir=None, func_path_generator=None):
|
cassette_library_dir=None, func_path_generator=None,
|
||||||
|
decode_compressed_response=False):
|
||||||
self.serializer = serializer
|
self.serializer = serializer
|
||||||
self.match_on = match_on
|
self.match_on = match_on
|
||||||
self.cassette_library_dir = cassette_library_dir
|
self.cassette_library_dir = cassette_library_dir
|
||||||
@@ -61,6 +68,7 @@ class VCR(object):
|
|||||||
self.inject_cassette = inject_cassette
|
self.inject_cassette = inject_cassette
|
||||||
self.path_transformer = path_transformer
|
self.path_transformer = path_transformer
|
||||||
self.func_path_generator = func_path_generator
|
self.func_path_generator = func_path_generator
|
||||||
|
self.decode_compressed_response = decode_compressed_response
|
||||||
self._custom_patches = tuple(custom_patches)
|
self._custom_patches = tuple(custom_patches)
|
||||||
|
|
||||||
def _get_serializer(self, serializer_name):
|
def _get_serializer(self, serializer_name):
|
||||||
@@ -108,7 +116,7 @@ class VCR(object):
|
|||||||
matcher_names = kwargs.get('match_on', self.match_on)
|
matcher_names = kwargs.get('match_on', self.match_on)
|
||||||
path_transformer = kwargs.get(
|
path_transformer = kwargs.get(
|
||||||
'path_transformer',
|
'path_transformer',
|
||||||
self.path_transformer or self.ensure_suffix('.yaml')
|
self.path_transformer
|
||||||
)
|
)
|
||||||
func_path_generator = kwargs.get(
|
func_path_generator = kwargs.get(
|
||||||
'func_path_generator',
|
'func_path_generator',
|
||||||
@@ -157,7 +165,12 @@ class VCR(object):
|
|||||||
before_record_response = options.get(
|
before_record_response = options.get(
|
||||||
'before_record_response', self.before_record_response
|
'before_record_response', self.before_record_response
|
||||||
)
|
)
|
||||||
|
decode_compressed_response = options.get(
|
||||||
|
'decode_compressed_response', self.decode_compressed_response
|
||||||
|
)
|
||||||
filter_functions = []
|
filter_functions = []
|
||||||
|
if decode_compressed_response:
|
||||||
|
filter_functions.append(filters.decode_response)
|
||||||
if before_record_response:
|
if before_record_response:
|
||||||
if not isinstance(before_record_response, collections.Iterable):
|
if not isinstance(before_record_response, collections.Iterable):
|
||||||
before_record_response = (before_record_response,)
|
before_record_response = (before_record_response,)
|
||||||
@@ -193,22 +206,28 @@ class VCR(object):
|
|||||||
'ignore_localhost', self.ignore_localhost
|
'ignore_localhost', self.ignore_localhost
|
||||||
)
|
)
|
||||||
if filter_headers:
|
if filter_headers:
|
||||||
|
replacements = [h if isinstance(h, tuple) else (h, None)
|
||||||
|
for h in filter_headers]
|
||||||
filter_functions.append(
|
filter_functions.append(
|
||||||
functools.partial(
|
functools.partial(
|
||||||
filters.remove_headers,
|
filters.replace_headers,
|
||||||
headers_to_remove=filter_headers
|
replacements=replacements,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
if filter_query_parameters:
|
if filter_query_parameters:
|
||||||
|
replacements = [p if isinstance(p, tuple) else (p, None)
|
||||||
|
for p in filter_query_parameters]
|
||||||
filter_functions.append(functools.partial(
|
filter_functions.append(functools.partial(
|
||||||
filters.remove_query_parameters,
|
filters.replace_query_parameters,
|
||||||
query_parameters_to_remove=filter_query_parameters
|
replacements=replacements,
|
||||||
))
|
))
|
||||||
if filter_post_data_parameters:
|
if filter_post_data_parameters:
|
||||||
|
replacements = [p if isinstance(p, tuple) else (p, None)
|
||||||
|
for p in filter_post_data_parameters]
|
||||||
filter_functions.append(
|
filter_functions.append(
|
||||||
functools.partial(
|
functools.partial(
|
||||||
filters.remove_post_data_parameters,
|
filters.replace_post_data_parameters,
|
||||||
post_data_parameters_to_remove=filter_post_data_parameters
|
replacements=replacements,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -250,3 +269,7 @@ class VCR(object):
|
|||||||
|
|
||||||
def register_matcher(self, name, matcher):
|
def register_matcher(self, name, matcher):
|
||||||
self.matchers[name] = matcher
|
self.matchers[name] = matcher
|
||||||
|
|
||||||
|
def test_case(self, predicate=None):
|
||||||
|
predicate = predicate or self.is_test_method
|
||||||
|
return six.with_metaclass(auto_decorate(self.use_cassette, predicate))
|
||||||
|
|||||||
182
vcr/filters.py
182
vcr/filters.py
@@ -2,52 +2,162 @@ from six import BytesIO, text_type
|
|||||||
from six.moves.urllib.parse import urlparse, urlencode, urlunparse
|
from six.moves.urllib.parse import urlparse, urlencode, urlunparse
|
||||||
import copy
|
import copy
|
||||||
import json
|
import json
|
||||||
|
import zlib
|
||||||
|
|
||||||
from .compat import collections
|
from .util import CaseInsensitiveDict
|
||||||
|
|
||||||
|
|
||||||
|
def replace_headers(request, replacements):
|
||||||
|
"""
|
||||||
|
Replace headers in request according to replacements. The replacements
|
||||||
|
should be a list of (key, value) pairs where the value can be any of:
|
||||||
|
1. A simple replacement string value.
|
||||||
|
2. None to remove the given header.
|
||||||
|
3. A callable which accepts (key, value, request) and returns a string
|
||||||
|
value or None.
|
||||||
|
"""
|
||||||
|
new_headers = request.headers.copy()
|
||||||
|
for k, rv in replacements:
|
||||||
|
if k in new_headers:
|
||||||
|
ov = new_headers.pop(k)
|
||||||
|
if callable(rv):
|
||||||
|
rv = rv(key=k, value=ov, request=request)
|
||||||
|
if rv is not None:
|
||||||
|
new_headers[k] = rv
|
||||||
|
request.headers = new_headers
|
||||||
|
return request
|
||||||
|
|
||||||
|
|
||||||
def remove_headers(request, headers_to_remove):
|
def remove_headers(request, headers_to_remove):
|
||||||
headers = copy.copy(request.headers)
|
"""
|
||||||
headers_to_remove = [h.lower() for h in headers_to_remove]
|
Wrap replace_headers() for API backward compatibility.
|
||||||
keys = [k for k in headers if k.lower() in headers_to_remove]
|
"""
|
||||||
if keys:
|
replacements = [(k, None) for k in headers_to_remove]
|
||||||
for k in keys:
|
return replace_headers(request, replacements)
|
||||||
headers.pop(k)
|
|
||||||
request.headers = headers
|
|
||||||
|
def replace_query_parameters(request, replacements):
|
||||||
|
"""
|
||||||
|
Replace query parameters in request according to replacements. The
|
||||||
|
replacements should be a list of (key, value) pairs where the value can be
|
||||||
|
any of:
|
||||||
|
1. A simple replacement string value.
|
||||||
|
2. None to remove the given header.
|
||||||
|
3. A callable which accepts (key, value, request) and returns a string
|
||||||
|
value or None.
|
||||||
|
"""
|
||||||
|
query = request.query
|
||||||
|
new_query = []
|
||||||
|
replacements = dict(replacements)
|
||||||
|
for k, ov in query:
|
||||||
|
if k not in replacements:
|
||||||
|
new_query.append((k, ov))
|
||||||
|
else:
|
||||||
|
rv = replacements[k]
|
||||||
|
if callable(rv):
|
||||||
|
rv = rv(key=k, value=ov, request=request)
|
||||||
|
if rv is not None:
|
||||||
|
new_query.append((k, rv))
|
||||||
|
uri_parts = list(urlparse(request.uri))
|
||||||
|
uri_parts[4] = urlencode(new_query)
|
||||||
|
request.uri = urlunparse(uri_parts)
|
||||||
return request
|
return request
|
||||||
|
|
||||||
|
|
||||||
def remove_query_parameters(request, query_parameters_to_remove):
|
def remove_query_parameters(request, query_parameters_to_remove):
|
||||||
query = request.query
|
"""
|
||||||
new_query = [(k, v) for (k, v) in query
|
Wrap replace_query_parameters() for API backward compatibility.
|
||||||
if k not in query_parameters_to_remove]
|
"""
|
||||||
if len(new_query) != len(query):
|
replacements = [(k, None) for k in query_parameters_to_remove]
|
||||||
uri_parts = list(urlparse(request.uri))
|
return replace_query_parameters(request, replacements)
|
||||||
uri_parts[4] = urlencode(new_query)
|
|
||||||
request.uri = urlunparse(uri_parts)
|
|
||||||
|
def replace_post_data_parameters(request, replacements):
|
||||||
|
"""
|
||||||
|
Replace post data in request--either form data or json--according to
|
||||||
|
replacements. The replacements should be a list of (key, value) pairs where
|
||||||
|
the value can be any of:
|
||||||
|
1. A simple replacement string value.
|
||||||
|
2. None to remove the given header.
|
||||||
|
3. A callable which accepts (key, value, request) and returns a string
|
||||||
|
value or None.
|
||||||
|
"""
|
||||||
|
replacements = dict(replacements)
|
||||||
|
if request.method == 'POST' and not isinstance(request.body, BytesIO):
|
||||||
|
if request.headers.get('Content-Type') == 'application/json':
|
||||||
|
json_data = json.loads(request.body.decode('utf-8'))
|
||||||
|
for k, rv in replacements.items():
|
||||||
|
if k in json_data:
|
||||||
|
ov = json_data.pop(k)
|
||||||
|
if callable(rv):
|
||||||
|
rv = rv(key=k, value=ov, request=request)
|
||||||
|
if rv is not None:
|
||||||
|
json_data[k] = rv
|
||||||
|
request.body = json.dumps(json_data).encode('utf-8')
|
||||||
|
else:
|
||||||
|
if isinstance(request.body, text_type):
|
||||||
|
request.body = request.body.encode('utf-8')
|
||||||
|
splits = [p.partition(b'=') for p in request.body.split(b'&')]
|
||||||
|
new_splits = []
|
||||||
|
for k, sep, ov in splits:
|
||||||
|
if sep is None:
|
||||||
|
new_splits.append((k, sep, ov))
|
||||||
|
else:
|
||||||
|
rk = k.decode('utf-8')
|
||||||
|
if rk not in replacements:
|
||||||
|
new_splits.append((k, sep, ov))
|
||||||
|
else:
|
||||||
|
rv = replacements[rk]
|
||||||
|
if callable(rv):
|
||||||
|
rv = rv(key=rk, value=ov.decode('utf-8'),
|
||||||
|
request=request)
|
||||||
|
if rv is not None:
|
||||||
|
new_splits.append((k, sep, rv.encode('utf-8')))
|
||||||
|
request.body = b'&'.join(k if sep is None else b''.join([k, sep, v])
|
||||||
|
for k, sep, v in new_splits)
|
||||||
return request
|
return request
|
||||||
|
|
||||||
|
|
||||||
def remove_post_data_parameters(request, post_data_parameters_to_remove):
|
def remove_post_data_parameters(request, post_data_parameters_to_remove):
|
||||||
if request.method == 'POST' and not isinstance(request.body, BytesIO):
|
"""
|
||||||
if ('Content-Type' in request.headers and
|
Wrap replace_post_data_parameters() for API backward compatibility.
|
||||||
request.headers['Content-Type'] == 'application/json'):
|
"""
|
||||||
json_data = json.loads(request.body.decode('utf-8'))
|
replacements = [(k, None) for k in post_data_parameters_to_remove]
|
||||||
for k in list(json_data.keys()):
|
return replace_post_data_parameters(request, replacements)
|
||||||
if k in post_data_parameters_to_remove:
|
|
||||||
del json_data[k]
|
|
||||||
request.body = json.dumps(json_data).encode('utf-8')
|
|
||||||
else:
|
|
||||||
post_data = collections.OrderedDict()
|
|
||||||
if isinstance(request.body, text_type):
|
|
||||||
request.body = request.body.encode('utf-8')
|
|
||||||
|
|
||||||
for k, sep, v in (p.partition(b'=') for p in request.body.split(b'&')):
|
|
||||||
if k in post_data:
|
def decode_response(response):
|
||||||
post_data[k].append(v)
|
"""
|
||||||
elif len(k) > 0 and k.decode('utf-8') not in post_data_parameters_to_remove:
|
If the response is compressed with gzip or deflate:
|
||||||
post_data[k] = [v]
|
1. decompress the response body
|
||||||
request.body = b'&'.join(
|
2. delete the content-encoding header
|
||||||
b'='.join([k, v])
|
3. update content-length header to decompressed length
|
||||||
for k, vals in post_data.items() for v in vals)
|
"""
|
||||||
return request
|
def is_compressed(headers):
|
||||||
|
encoding = headers.get('content-encoding', [])
|
||||||
|
return encoding and encoding[0] in ('gzip', 'deflate')
|
||||||
|
|
||||||
|
def decompress_body(body, encoding):
|
||||||
|
"""Returns decompressed body according to encoding using zlib.
|
||||||
|
to (de-)compress gzip format, use wbits = zlib.MAX_WBITS | 16
|
||||||
|
"""
|
||||||
|
if encoding == 'gzip':
|
||||||
|
return zlib.decompress(body, zlib.MAX_WBITS | 16)
|
||||||
|
else: # encoding == 'deflate'
|
||||||
|
return zlib.decompress(body)
|
||||||
|
|
||||||
|
# Deepcopy here in case `headers` contain objects that could
|
||||||
|
# be mutated by a shallow copy and corrupt the real response.
|
||||||
|
response = copy.deepcopy(response)
|
||||||
|
headers = CaseInsensitiveDict(response['headers'])
|
||||||
|
if is_compressed(headers):
|
||||||
|
encoding = headers['content-encoding'][0]
|
||||||
|
headers['content-encoding'].remove(encoding)
|
||||||
|
if not headers['content-encoding']:
|
||||||
|
del headers['content-encoding']
|
||||||
|
|
||||||
|
new_body = decompress_body(response['body']['string'], encoding)
|
||||||
|
response['body']['string'] = new_body
|
||||||
|
headers['content-length'] = [str(len(new_body))]
|
||||||
|
response['headers'] = dict(headers)
|
||||||
|
return response
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import json
|
import json
|
||||||
from six.moves import urllib, xmlrpc_client
|
from six.moves import urllib, xmlrpc_client
|
||||||
from .util import CaseInsensitiveDict, read_body
|
from .util import read_body
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
|
||||||
@@ -66,9 +66,9 @@ def _identity(x):
|
|||||||
|
|
||||||
|
|
||||||
def _get_transformer(request):
|
def _get_transformer(request):
|
||||||
headers = CaseInsensitiveDict(request.headers)
|
|
||||||
for checker, transformer in _checker_transformer_pairs:
|
for checker, transformer in _checker_transformer_pairs:
|
||||||
if checker(headers): return transformer
|
if checker(request.headers):
|
||||||
|
return transformer
|
||||||
else:
|
else:
|
||||||
return _identity
|
return _identity
|
||||||
|
|
||||||
|
|||||||
@@ -19,7 +19,7 @@ import sys
|
|||||||
import tempfile
|
import tempfile
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
from .serializers import compat, yamlserializer, jsonserializer
|
from .serializers import yamlserializer, jsonserializer
|
||||||
from .serialize import serialize
|
from .serialize import serialize
|
||||||
from . import request
|
from . import request
|
||||||
from .stubs.compat import get_httpmessage
|
from .stubs.compat import get_httpmessage
|
||||||
|
|||||||
63
vcr/patch.py
63
vcr/patch.py
@@ -22,6 +22,16 @@ else:
|
|||||||
_cpoolHTTPConnection = cpool.HTTPConnection
|
_cpoolHTTPConnection = cpool.HTTPConnection
|
||||||
_cpoolHTTPSConnection = cpool.HTTPSConnection
|
_cpoolHTTPSConnection = cpool.HTTPSConnection
|
||||||
|
|
||||||
|
# Try to save the original types for boto3
|
||||||
|
try:
|
||||||
|
import botocore.vendored.requests.packages.urllib3.connectionpool as cpool
|
||||||
|
except ImportError: # pragma: no cover
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
_Boto3VerifiedHTTPSConnection = cpool.VerifiedHTTPSConnection
|
||||||
|
_cpoolBoto3HTTPConnection = cpool.HTTPConnection
|
||||||
|
_cpoolBoto3HTTPSConnection = cpool.HTTPSConnection
|
||||||
|
|
||||||
|
|
||||||
# Try to save the original types for urllib3
|
# Try to save the original types for urllib3
|
||||||
try:
|
try:
|
||||||
@@ -59,7 +69,7 @@ except ImportError: # pragma: no cover
|
|||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
_SimpleAsyncHTTPClient_fetch_impl = \
|
_SimpleAsyncHTTPClient_fetch_impl = \
|
||||||
tornado.simple_httpclient.SimpleAsyncHTTPClient.fetch_impl
|
tornado.simple_httpclient.SimpleAsyncHTTPClient.fetch_impl
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -68,7 +78,7 @@ except ImportError: # pragma: no cover
|
|||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
_CurlAsyncHTTPClient_fetch_impl = \
|
_CurlAsyncHTTPClient_fetch_impl = \
|
||||||
tornado.curl_httpclient.CurlAsyncHTTPClient.fetch_impl
|
tornado.curl_httpclient.CurlAsyncHTTPClient.fetch_impl
|
||||||
|
|
||||||
|
|
||||||
class CassettePatcherBuilder(object):
|
class CassettePatcherBuilder(object):
|
||||||
@@ -87,7 +97,7 @@ class CassettePatcherBuilder(object):
|
|||||||
|
|
||||||
def build(self):
|
def build(self):
|
||||||
return itertools.chain(
|
return itertools.chain(
|
||||||
self._httplib(), self._requests(), self._urllib3(),
|
self._httplib(), self._requests(), self._boto3(), self._urllib3(),
|
||||||
self._httplib2(), self._boto(), self._tornado(),
|
self._httplib2(), self._boto(), self._tornado(),
|
||||||
self._build_patchers_from_mock_triples(
|
self._build_patchers_from_mock_triples(
|
||||||
self._cassette.custom_patches
|
self._cassette.custom_patches
|
||||||
@@ -127,13 +137,13 @@ class CassettePatcherBuilder(object):
|
|||||||
described in the previous paragraph.
|
described in the previous paragraph.
|
||||||
"""
|
"""
|
||||||
if isinstance(replacement_dict_or_obj, dict):
|
if isinstance(replacement_dict_or_obj, dict):
|
||||||
for key, replacement_obj in replacement_dict_or_obj.items():
|
for key, replacement_obj in replacement_dict_or_obj.items():
|
||||||
replacement_obj = self._recursively_apply_get_cassette_subclass(
|
replacement_obj = self._recursively_apply_get_cassette_subclass(
|
||||||
replacement_obj)
|
replacement_obj)
|
||||||
replacement_dict_or_obj[key] = replacement_obj
|
replacement_dict_or_obj[key] = replacement_obj
|
||||||
return replacement_dict_or_obj
|
return replacement_dict_or_obj
|
||||||
if hasattr(replacement_dict_or_obj, 'cassette'):
|
if hasattr(replacement_dict_or_obj, 'cassette'):
|
||||||
replacement_dict_or_obj = self._get_cassette_subclass(
|
replacement_dict_or_obj = self._get_cassette_subclass(
|
||||||
replacement_dict_or_obj)
|
replacement_dict_or_obj)
|
||||||
return replacement_dict_or_obj
|
return replacement_dict_or_obj
|
||||||
|
|
||||||
@@ -147,7 +157,7 @@ class CassettePatcherBuilder(object):
|
|||||||
|
|
||||||
def _build_cassette_subclass(self, base_class):
|
def _build_cassette_subclass(self, base_class):
|
||||||
bases = (base_class,)
|
bases = (base_class,)
|
||||||
if not issubclass(base_class, object): # Check for old style class
|
if not issubclass(base_class, object): # Check for old style class
|
||||||
bases += (object,)
|
bases += (object,)
|
||||||
return type('{0}{1}'.format(base_class.__name__, self._cassette._path),
|
return type('{0}{1}'.format(base_class.__name__, self._cassette._path),
|
||||||
bases, dict(cassette=self._cassette))
|
bases, dict(cassette=self._cassette))
|
||||||
@@ -165,13 +175,23 @@ class CassettePatcherBuilder(object):
|
|||||||
from .stubs import requests_stubs
|
from .stubs import requests_stubs
|
||||||
return self._urllib3_patchers(cpool, requests_stubs)
|
return self._urllib3_patchers(cpool, requests_stubs)
|
||||||
|
|
||||||
|
def _boto3(self):
|
||||||
|
try:
|
||||||
|
import botocore.vendored.requests.packages.urllib3.connectionpool as cpool
|
||||||
|
except ImportError: # pragma: no cover
|
||||||
|
return ()
|
||||||
|
from .stubs import boto3_stubs
|
||||||
|
return self._urllib3_patchers(cpool, boto3_stubs)
|
||||||
|
|
||||||
def _patched_get_conn(self, connection_pool_class, connection_class_getter):
|
def _patched_get_conn(self, connection_pool_class, connection_class_getter):
|
||||||
get_conn = connection_pool_class._get_conn
|
get_conn = connection_pool_class._get_conn
|
||||||
|
|
||||||
@functools.wraps(get_conn)
|
@functools.wraps(get_conn)
|
||||||
def patched_get_conn(pool, timeout=None):
|
def patched_get_conn(pool, timeout=None):
|
||||||
connection = get_conn(pool, timeout)
|
connection = get_conn(pool, timeout)
|
||||||
connection_class = pool.ConnectionCls if hasattr(pool, 'ConnectionCls') \
|
connection_class = (
|
||||||
else connection_class_getter()
|
pool.ConnectionCls if hasattr(pool, 'ConnectionCls')
|
||||||
|
else connection_class_getter())
|
||||||
# We need to make sure that we are actually providing a
|
# We need to make sure that we are actually providing a
|
||||||
# patched version of the connection class. This might not
|
# patched version of the connection class. This might not
|
||||||
# always be the case because the pool keeps previously
|
# always be the case because the pool keeps previously
|
||||||
@@ -181,15 +201,18 @@ class CassettePatcherBuilder(object):
|
|||||||
while not isinstance(connection, connection_class):
|
while not isinstance(connection, connection_class):
|
||||||
connection = get_conn(pool, timeout)
|
connection = get_conn(pool, timeout)
|
||||||
return connection
|
return connection
|
||||||
|
|
||||||
return patched_get_conn
|
return patched_get_conn
|
||||||
|
|
||||||
def _patched_new_conn(self, connection_pool_class, connection_remover):
|
def _patched_new_conn(self, connection_pool_class, connection_remover):
|
||||||
new_conn = connection_pool_class._new_conn
|
new_conn = connection_pool_class._new_conn
|
||||||
|
|
||||||
@functools.wraps(new_conn)
|
@functools.wraps(new_conn)
|
||||||
def patched_new_conn(pool):
|
def patched_new_conn(pool):
|
||||||
new_connection = new_conn(pool)
|
new_connection = new_conn(pool)
|
||||||
connection_remover.add_connection_to_pool_entry(pool, new_connection)
|
connection_remover.add_connection_to_pool_entry(pool, new_connection)
|
||||||
return new_connection
|
return new_connection
|
||||||
|
|
||||||
return patched_new_conn
|
return patched_new_conn
|
||||||
|
|
||||||
def _urllib3(self):
|
def _urllib3(self):
|
||||||
@@ -270,10 +293,10 @@ class CassettePatcherBuilder(object):
|
|||||||
# connections of the appropriate type.
|
# connections of the appropriate type.
|
||||||
mock_triples += ((cpool.HTTPConnectionPool, '_get_conn',
|
mock_triples += ((cpool.HTTPConnectionPool, '_get_conn',
|
||||||
self._patched_get_conn(cpool.HTTPConnectionPool,
|
self._patched_get_conn(cpool.HTTPConnectionPool,
|
||||||
lambda : cpool.HTTPConnection)),
|
lambda: cpool.HTTPConnection)),
|
||||||
(cpool.HTTPSConnectionPool, '_get_conn',
|
(cpool.HTTPSConnectionPool, '_get_conn',
|
||||||
self._patched_get_conn(cpool.HTTPSConnectionPool,
|
self._patched_get_conn(cpool.HTTPSConnectionPool,
|
||||||
lambda : cpool.HTTPSConnection)),
|
lambda: cpool.HTTPSConnection)),
|
||||||
(cpool.HTTPConnectionPool, '_new_conn',
|
(cpool.HTTPConnectionPool, '_new_conn',
|
||||||
self._patched_new_conn(cpool.HTTPConnectionPool,
|
self._patched_new_conn(cpool.HTTPConnectionPool,
|
||||||
http_connection_remover)),
|
http_connection_remover)),
|
||||||
@@ -348,6 +371,24 @@ def reset_patchers():
|
|||||||
yield mock.patch.object(cpool.HTTPConnectionPool, 'ConnectionCls', _HTTPConnection)
|
yield mock.patch.object(cpool.HTTPConnectionPool, 'ConnectionCls', _HTTPConnection)
|
||||||
yield mock.patch.object(cpool.HTTPSConnectionPool, 'ConnectionCls', _HTTPSConnection)
|
yield mock.patch.object(cpool.HTTPSConnectionPool, 'ConnectionCls', _HTTPSConnection)
|
||||||
|
|
||||||
|
try:
|
||||||
|
import botocore.vendored.requests.packages.urllib3.connectionpool as cpool
|
||||||
|
except ImportError: # pragma: no cover
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
# unpatch requests v1.x
|
||||||
|
yield mock.patch.object(cpool, 'VerifiedHTTPSConnection', _Boto3VerifiedHTTPSConnection)
|
||||||
|
yield mock.patch.object(cpool, 'HTTPConnection', _cpoolBoto3HTTPConnection)
|
||||||
|
# unpatch requests v2.x
|
||||||
|
if hasattr(cpool.HTTPConnectionPool, 'ConnectionCls'):
|
||||||
|
yield mock.patch.object(cpool.HTTPConnectionPool, 'ConnectionCls',
|
||||||
|
_cpoolBoto3HTTPConnection)
|
||||||
|
yield mock.patch.object(cpool.HTTPSConnectionPool, 'ConnectionCls',
|
||||||
|
_cpoolBoto3HTTPSConnection)
|
||||||
|
|
||||||
|
if hasattr(cpool, 'HTTPSConnection'):
|
||||||
|
yield mock.patch.object(cpool, 'HTTPSConnection', _cpoolBoto3HTTPSConnection)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import httplib2 as cpool
|
import httplib2 as cpool
|
||||||
except ImportError: # pragma: no cover
|
except ImportError: # pragma: no cover
|
||||||
@@ -382,7 +423,7 @@ def reset_patchers():
|
|||||||
else:
|
else:
|
||||||
yield mock.patch.object(
|
yield mock.patch.object(
|
||||||
curl.CurlAsyncHTTPClient,
|
curl.CurlAsyncHTTPClient,
|
||||||
'fetch_impl',
|
'fetch_impl',
|
||||||
_CurlAsyncHTTPClient_fetch_impl,
|
_CurlAsyncHTTPClient_fetch_impl,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -1,27 +1,12 @@
|
|||||||
|
import warnings
|
||||||
from six import BytesIO, text_type
|
from six import BytesIO, text_type
|
||||||
from six.moves.urllib.parse import urlparse, parse_qsl
|
from six.moves.urllib.parse import urlparse, parse_qsl
|
||||||
|
from .util import CaseInsensitiveDict
|
||||||
|
|
||||||
|
|
||||||
class Request(object):
|
class Request(object):
|
||||||
"""
|
"""
|
||||||
VCR's representation of a request.
|
VCR's representation of a request.
|
||||||
|
|
||||||
There is a weird quirk in HTTP. You can send the same header twice. For
|
|
||||||
this reason, headers are represented by a dict, with lists as the values.
|
|
||||||
However, it appears that HTTPlib is completely incapable of sending the
|
|
||||||
same header twice. This puts me in a weird position: I want to be able to
|
|
||||||
accurately represent HTTP headers in cassettes, but I don't want the extra
|
|
||||||
step of always having to do [0] in the general case, i.e.
|
|
||||||
request.headers['key'][0]
|
|
||||||
|
|
||||||
In addition, some servers sometimes send the same header more than once,
|
|
||||||
and httplib *can* deal with this situation.
|
|
||||||
|
|
||||||
Futhermore, I wanted to keep the request and response cassette format as
|
|
||||||
similar as possible.
|
|
||||||
|
|
||||||
For this reason, in cassettes I keep a dict with lists as keys, but once
|
|
||||||
deserialized into VCR, I keep them as plain, naked dicts.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, method, uri, body, headers):
|
def __init__(self, method, uri, body, headers):
|
||||||
@@ -32,9 +17,17 @@ class Request(object):
|
|||||||
self.body = body.read()
|
self.body = body.read()
|
||||||
else:
|
else:
|
||||||
self.body = body
|
self.body = body
|
||||||
self.headers = {}
|
self.headers = headers
|
||||||
for key in headers:
|
|
||||||
self.add_header(key, headers[key])
|
@property
|
||||||
|
def headers(self):
|
||||||
|
return self._headers
|
||||||
|
|
||||||
|
@headers.setter
|
||||||
|
def headers(self, value):
|
||||||
|
if not isinstance(value, HeadersDict):
|
||||||
|
value = HeadersDict(value)
|
||||||
|
self._headers = value
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def body(self):
|
def body(self):
|
||||||
@@ -47,11 +40,10 @@ class Request(object):
|
|||||||
self._body = value
|
self._body = value
|
||||||
|
|
||||||
def add_header(self, key, value):
|
def add_header(self, key, value):
|
||||||
# see class docstring for an explanation
|
warnings.warn("Request.add_header is deprecated. "
|
||||||
if isinstance(value, (tuple, list)):
|
"Please assign to request.headers instead.",
|
||||||
self.headers[key] = value[0]
|
DeprecationWarning)
|
||||||
else:
|
self.headers[key] = value
|
||||||
self.headers[key] = value
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def scheme(self):
|
def scheme(self):
|
||||||
@@ -105,3 +97,35 @@ class Request(object):
|
|||||||
@classmethod
|
@classmethod
|
||||||
def _from_dict(cls, dct):
|
def _from_dict(cls, dct):
|
||||||
return Request(**dct)
|
return Request(**dct)
|
||||||
|
|
||||||
|
|
||||||
|
class HeadersDict(CaseInsensitiveDict):
|
||||||
|
"""
|
||||||
|
There is a weird quirk in HTTP. You can send the same header twice. For
|
||||||
|
this reason, headers are represented by a dict, with lists as the values.
|
||||||
|
However, it appears that HTTPlib is completely incapable of sending the
|
||||||
|
same header twice. This puts me in a weird position: I want to be able to
|
||||||
|
accurately represent HTTP headers in cassettes, but I don't want the extra
|
||||||
|
step of always having to do [0] in the general case, i.e.
|
||||||
|
request.headers['key'][0]
|
||||||
|
|
||||||
|
In addition, some servers sometimes send the same header more than once,
|
||||||
|
and httplib *can* deal with this situation.
|
||||||
|
|
||||||
|
Futhermore, I wanted to keep the request and response cassette format as
|
||||||
|
similar as possible.
|
||||||
|
|
||||||
|
For this reason, in cassettes I keep a dict with lists as keys, but once
|
||||||
|
deserialized into VCR, I keep them as plain, naked dicts.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __setitem__(self, key, value):
|
||||||
|
if isinstance(value, (tuple, list)):
|
||||||
|
value = value[0]
|
||||||
|
|
||||||
|
# Preserve the case from the first time this key was set.
|
||||||
|
old = self._store.get(key.lower())
|
||||||
|
if old:
|
||||||
|
key = old[0]
|
||||||
|
|
||||||
|
super(HeadersDict, self).__setitem__(key, value)
|
||||||
|
|||||||
@@ -50,7 +50,7 @@ def deserialize(cassette_string, serializer):
|
|||||||
|
|
||||||
def serialize(cassette_dict, serializer):
|
def serialize(cassette_dict, serializer):
|
||||||
interactions = ([{
|
interactions = ([{
|
||||||
'request': request._to_dict(),
|
'request': compat.convert_to_unicode(request._to_dict()),
|
||||||
'response': compat.convert_to_unicode(response),
|
'response': compat.convert_to_unicode(response),
|
||||||
} for request, response in zip(
|
} for request, response in zip(
|
||||||
cassette_dict['requests'],
|
cassette_dict['requests'],
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ def convert_body_to_bytes(resp):
|
|||||||
|
|
||||||
By default yaml serializes to utf-8 encoded bytestrings.
|
By default yaml serializes to utf-8 encoded bytestrings.
|
||||||
When this cassette is loaded by python3, it's automatically decoded
|
When this cassette is loaded by python3, it's automatically decoded
|
||||||
into unicode strings. This makes sure that it stays a bytestring, since
|
into unicode strings. This makes sure that it stays a bytestring, since
|
||||||
that's what all the internal httplib machinery is expecting.
|
that's what all the internal httplib machinery is expecting.
|
||||||
|
|
||||||
For more info on py3 yaml:
|
For more info on py3 yaml:
|
||||||
@@ -37,19 +37,43 @@ def convert_body_to_bytes(resp):
|
|||||||
return resp
|
return resp
|
||||||
|
|
||||||
|
|
||||||
|
def _convert_string_to_unicode(string):
|
||||||
|
"""
|
||||||
|
If the string is bytes, decode it to a string (for python3 support)
|
||||||
|
"""
|
||||||
|
result = string
|
||||||
|
|
||||||
|
try:
|
||||||
|
if string is not None and not isinstance(string, six.text_type):
|
||||||
|
result = string.decode('utf-8')
|
||||||
|
except (TypeError, UnicodeDecodeError, AttributeError):
|
||||||
|
# Sometimes the string actually is binary or StringIO object,
|
||||||
|
# so if you can't decode it, just give up.
|
||||||
|
pass
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
def convert_body_to_unicode(resp):
|
def convert_body_to_unicode(resp):
|
||||||
"""
|
"""
|
||||||
If the request body is bytes, decode it to a string (for python3 support)
|
If the request or responses body is bytes, decode it to a string
|
||||||
|
(for python3 support)
|
||||||
"""
|
"""
|
||||||
try:
|
if type(resp) is not dict:
|
||||||
if not isinstance(resp['body']['string'], six.text_type):
|
# Some of the tests just serialize and deserialize a string.
|
||||||
resp['body']['string'] = resp['body']['string'].decode('utf-8')
|
return _convert_string_to_unicode(resp)
|
||||||
except (KeyError, TypeError, UnicodeDecodeError):
|
else:
|
||||||
# The thing we were converting either wasn't a dictionary or didn't
|
body = resp.get('body')
|
||||||
# have the keys we were expecting. Some of the tests just serialize
|
|
||||||
# and deserialize a string.
|
if body is not None:
|
||||||
|
try:
|
||||||
|
body['string'] = _convert_string_to_unicode(
|
||||||
|
body['string']
|
||||||
|
)
|
||||||
|
except (KeyError, TypeError, AttributeError):
|
||||||
|
# The thing we were converting either wasn't a dictionary or
|
||||||
|
# didn't have the keys we were expecting.
|
||||||
|
# For example request object has no 'string' key.
|
||||||
|
resp['body'] = _convert_string_to_unicode(body)
|
||||||
|
|
||||||
# Also, sometimes the thing actually is binary, so if you can't decode
|
|
||||||
# it, just give up.
|
|
||||||
pass
|
|
||||||
return resp
|
return resp
|
||||||
|
|||||||
@@ -9,16 +9,21 @@ def deserialize(cassette_string):
|
|||||||
|
|
||||||
|
|
||||||
def serialize(cassette_dict):
|
def serialize(cassette_dict):
|
||||||
|
error_message = (
|
||||||
|
"Does this HTTP interaction contain binary data? "
|
||||||
|
"If so, use a different serializer (like the yaml serializer) "
|
||||||
|
"for this request?"
|
||||||
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return json.dumps(cassette_dict, indent=4)
|
return json.dumps(cassette_dict, indent=4)
|
||||||
except UnicodeDecodeError as original:
|
except UnicodeDecodeError as original: # py2
|
||||||
raise UnicodeDecodeError(
|
raise UnicodeDecodeError(
|
||||||
original.encoding,
|
original.encoding,
|
||||||
b"Error serializing cassette to JSON",
|
b"Error serializing cassette to JSON",
|
||||||
original.start,
|
original.start,
|
||||||
original.end,
|
original.end,
|
||||||
original.args[-1] +
|
original.args[-1] + error_message
|
||||||
("Does this HTTP interaction contain binary data? "
|
|
||||||
"If so, use a different serializer (like the yaml serializer) "
|
|
||||||
"for this request?")
|
|
||||||
)
|
)
|
||||||
|
except TypeError as original: # py3
|
||||||
|
raise TypeError(error_message)
|
||||||
|
|||||||
@@ -1,15 +1,10 @@
|
|||||||
'''Stubs for patching HTTP and HTTPS requests'''
|
'''Stubs for patching HTTP and HTTPS requests'''
|
||||||
|
|
||||||
try:
|
|
||||||
import http.client
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
import logging
|
import logging
|
||||||
import six
|
import six
|
||||||
from six.moves.http_client import (
|
from six.moves.http_client import (
|
||||||
HTTPConnection,
|
HTTPConnection,
|
||||||
HTTPSConnection,
|
HTTPSConnection,
|
||||||
HTTPMessage,
|
|
||||||
HTTPResponse,
|
HTTPResponse,
|
||||||
)
|
)
|
||||||
from six import BytesIO
|
from six import BytesIO
|
||||||
@@ -188,8 +183,7 @@ class VCRConnection(object):
|
|||||||
log.debug('Got {0}'.format(self._vcr_request))
|
log.debug('Got {0}'.format(self._vcr_request))
|
||||||
|
|
||||||
def putheader(self, header, *values):
|
def putheader(self, header, *values):
|
||||||
for value in values:
|
self._vcr_request.headers[header] = values
|
||||||
self._vcr_request.add_header(header, value)
|
|
||||||
|
|
||||||
def send(self, data):
|
def send(self, data):
|
||||||
'''
|
'''
|
||||||
@@ -197,7 +191,8 @@ class VCRConnection(object):
|
|||||||
body of the request. So if that happens, let's just append the data
|
body of the request. So if that happens, let's just append the data
|
||||||
onto the most recent request in the cassette.
|
onto the most recent request in the cassette.
|
||||||
'''
|
'''
|
||||||
self._vcr_request.body = (self._vcr_request.body or '') + data
|
self._vcr_request.body = self._vcr_request.body + data \
|
||||||
|
if self._vcr_request.body else data
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
# Note: the real connection will only close if it's open, so
|
# Note: the real connection will only close if it's open, so
|
||||||
@@ -307,7 +302,7 @@ class VCRConnection(object):
|
|||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
if six.PY3:
|
if six.PY3:
|
||||||
kwargs.pop('strict', None) # apparently this is gone in py3
|
kwargs.pop('strict', None) # apparently this is gone in py3
|
||||||
|
|
||||||
# need to temporarily reset here because the real connection
|
# need to temporarily reset here because the real connection
|
||||||
# inherits from the thing that we are mocking out. Take out
|
# inherits from the thing that we are mocking out. Take out
|
||||||
@@ -316,6 +311,27 @@ class VCRConnection(object):
|
|||||||
with force_reset():
|
with force_reset():
|
||||||
self.real_connection = self._baseclass(*args, **kwargs)
|
self.real_connection = self._baseclass(*args, **kwargs)
|
||||||
|
|
||||||
|
def __setattr__(self, name, value):
|
||||||
|
"""
|
||||||
|
We need to define this because any attributes that are set on the
|
||||||
|
VCRConnection need to be propogated to the real connection.
|
||||||
|
|
||||||
|
For example, urllib3 will set certain attributes on the connection,
|
||||||
|
such as 'ssl_version'. These attributes need to get set on the real
|
||||||
|
connection to have the correct and expected behavior.
|
||||||
|
|
||||||
|
TODO: Separately setting the attribute on the two instances is not
|
||||||
|
ideal. We should switch to a proxying implementation.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
setattr(self.real_connection, name, value)
|
||||||
|
except AttributeError:
|
||||||
|
# raised if real_connection has not been set yet, such as when
|
||||||
|
# we're setting the real_connection itself for the first time
|
||||||
|
pass
|
||||||
|
|
||||||
|
super(VCRConnection, self).__setattr__(name, value)
|
||||||
|
|
||||||
|
|
||||||
class VCRHTTPConnection(VCRConnection):
|
class VCRHTTPConnection(VCRConnection):
|
||||||
'''A Mocked class for HTTP requests'''
|
'''A Mocked class for HTTP requests'''
|
||||||
|
|||||||
15
vcr/stubs/boto3_stubs.py
Normal file
15
vcr/stubs/boto3_stubs.py
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
'''Stubs for boto3'''
|
||||||
|
|
||||||
|
from botocore.vendored.requests.packages.urllib3.connectionpool import HTTPConnection, VerifiedHTTPSConnection
|
||||||
|
from ..stubs import VCRHTTPConnection, VCRHTTPSConnection
|
||||||
|
|
||||||
|
# urllib3 defines its own HTTPConnection classes, which boto3 goes ahead and assumes
|
||||||
|
# you're using. It includes some polyfills for newer features missing in older pythons.
|
||||||
|
|
||||||
|
|
||||||
|
class VCRRequestsHTTPConnection(VCRHTTPConnection, HTTPConnection):
|
||||||
|
_baseclass = HTTPConnection
|
||||||
|
|
||||||
|
|
||||||
|
class VCRRequestsHTTPSConnection(VCRHTTPSConnection, VerifiedHTTPSConnection):
|
||||||
|
_baseclass = VerifiedHTTPSConnection
|
||||||
@@ -41,6 +41,7 @@ class VCRHTTPSConnectionWithTimeout(VCRHTTPSConnection,
|
|||||||
'strict',
|
'strict',
|
||||||
'timeout',
|
'timeout',
|
||||||
'source_address',
|
'source_address',
|
||||||
|
'ca_certs',
|
||||||
))
|
))
|
||||||
unknown_keys = set(kwargs.keys()) - safe_keys
|
unknown_keys = set(kwargs.keys()) - safe_keys
|
||||||
safe_kwargs = kwargs.copy()
|
safe_kwargs = kwargs.copy()
|
||||||
|
|||||||
@@ -1,13 +1,19 @@
|
|||||||
'''Stubs for requests'''
|
'''Stubs for requests'''
|
||||||
|
|
||||||
from requests.packages.urllib3.connectionpool import HTTPConnection, VerifiedHTTPSConnection
|
try:
|
||||||
|
from requests.packages.urllib3.connectionpool import HTTPConnection, VerifiedHTTPSConnection
|
||||||
|
except ImportError:
|
||||||
|
from urllib3.connectionpool import HTTPConnection, VerifiedHTTPSConnection
|
||||||
|
|
||||||
from ..stubs import VCRHTTPConnection, VCRHTTPSConnection
|
from ..stubs import VCRHTTPConnection, VCRHTTPSConnection
|
||||||
|
|
||||||
# urllib3 defines its own HTTPConnection classes, which requests goes ahead and assumes
|
# urllib3 defines its own HTTPConnection classes, which requests goes ahead and assumes
|
||||||
# you're using. It includes some polyfills for newer features missing in older pythons.
|
# you're using. It includes some polyfills for newer features missing in older pythons.
|
||||||
|
|
||||||
|
|
||||||
class VCRRequestsHTTPConnection(VCRHTTPConnection, HTTPConnection):
|
class VCRRequestsHTTPConnection(VCRHTTPConnection, HTTPConnection):
|
||||||
_baseclass = HTTPConnection
|
_baseclass = HTTPConnection
|
||||||
|
|
||||||
|
|
||||||
class VCRRequestsHTTPSConnection(VCRHTTPSConnection, VerifiedHTTPSConnection):
|
class VCRRequestsHTTPSConnection(VCRHTTPSConnection, VerifiedHTTPSConnection):
|
||||||
_baseclass = VerifiedHTTPSConnection
|
_baseclass = VerifiedHTTPSConnection
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ def vcr_fetch_impl(cassette, real_fetch_impl):
|
|||||||
|
|
||||||
@functools.wraps(real_fetch_impl)
|
@functools.wraps(real_fetch_impl)
|
||||||
def new_fetch_impl(self, request, callback):
|
def new_fetch_impl(self, request, callback):
|
||||||
headers = dict(request.headers)
|
headers = request.headers.copy()
|
||||||
if request.user_agent:
|
if request.user_agent:
|
||||||
headers.setdefault('User-Agent', request.user_agent)
|
headers.setdefault('User-Agent', request.user_agent)
|
||||||
|
|
||||||
@@ -23,7 +23,7 @@ def vcr_fetch_impl(cassette, real_fetch_impl):
|
|||||||
# yet supported.
|
# yet supported.
|
||||||
|
|
||||||
unsupported_call = (
|
unsupported_call = (
|
||||||
request.body_producer is not None or
|
getattr(request, 'body_producer', None) is not None or
|
||||||
request.header_callback is not None or
|
request.header_callback is not None or
|
||||||
request.streaming_callback is not None
|
request.streaming_callback is not None
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -6,8 +6,10 @@ from ..stubs import VCRHTTPConnection, VCRHTTPSConnection
|
|||||||
# urllib3 defines its own HTTPConnection classes. It includes some polyfills
|
# urllib3 defines its own HTTPConnection classes. It includes some polyfills
|
||||||
# for newer features missing in older pythons.
|
# for newer features missing in older pythons.
|
||||||
|
|
||||||
|
|
||||||
class VCRRequestsHTTPConnection(VCRHTTPConnection, HTTPConnection):
|
class VCRRequestsHTTPConnection(VCRHTTPConnection, HTTPConnection):
|
||||||
_baseclass = HTTPConnection
|
_baseclass = HTTPConnection
|
||||||
|
|
||||||
|
|
||||||
class VCRRequestsHTTPSConnection(VCRHTTPSConnection, VerifiedHTTPSConnection):
|
class VCRRequestsHTTPSConnection(VCRHTTPSConnection, VerifiedHTTPSConnection):
|
||||||
_baseclass = VerifiedHTTPSConnection
|
_baseclass = VerifiedHTTPSConnection
|
||||||
|
|||||||
36
vcr/util.py
36
vcr/util.py
@@ -1,4 +1,6 @@
|
|||||||
import collections
|
import collections
|
||||||
|
import types
|
||||||
|
|
||||||
|
|
||||||
# Shamelessly stolen from https://github.com/kennethreitz/requests/blob/master/requests/structures.py
|
# Shamelessly stolen from https://github.com/kennethreitz/requests/blob/master/requests/structures.py
|
||||||
class CaseInsensitiveDict(collections.MutableMapping):
|
class CaseInsensitiveDict(collections.MutableMapping):
|
||||||
@@ -69,6 +71,7 @@ class CaseInsensitiveDict(collections.MutableMapping):
|
|||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return str(dict(self.items()))
|
return str(dict(self.items()))
|
||||||
|
|
||||||
|
|
||||||
def partition_dict(predicate, dictionary):
|
def partition_dict(predicate, dictionary):
|
||||||
true_dict = {}
|
true_dict = {}
|
||||||
false_dict = {}
|
false_dict = {}
|
||||||
@@ -81,12 +84,41 @@ def partition_dict(predicate, dictionary):
|
|||||||
def compose(*functions):
|
def compose(*functions):
|
||||||
def composed(incoming):
|
def composed(incoming):
|
||||||
res = incoming
|
res = incoming
|
||||||
for function in functions[::-1]:
|
for function in reversed(functions):
|
||||||
res = function(res)
|
if function:
|
||||||
|
res = function(res)
|
||||||
return res
|
return res
|
||||||
return composed
|
return composed
|
||||||
|
|
||||||
|
|
||||||
def read_body(request):
|
def read_body(request):
|
||||||
if hasattr(request.body, 'read'):
|
if hasattr(request.body, 'read'):
|
||||||
return request.body.read()
|
return request.body.read()
|
||||||
return request.body
|
return request.body
|
||||||
|
|
||||||
|
|
||||||
|
def auto_decorate(
|
||||||
|
decorator,
|
||||||
|
predicate=lambda name, value: isinstance(value, types.FunctionType)
|
||||||
|
):
|
||||||
|
def maybe_decorate(attribute, value):
|
||||||
|
if predicate(attribute, value):
|
||||||
|
value = decorator(value)
|
||||||
|
return value
|
||||||
|
|
||||||
|
class DecorateAll(type):
|
||||||
|
|
||||||
|
def __setattr__(cls, attribute, value):
|
||||||
|
return super(DecorateAll, cls).__setattr__(
|
||||||
|
attribute, maybe_decorate(attribute, value)
|
||||||
|
)
|
||||||
|
|
||||||
|
def __new__(cls, name, bases, attributes_dict):
|
||||||
|
new_attributes_dict = dict(
|
||||||
|
(attribute, maybe_decorate(attribute, value))
|
||||||
|
for attribute, value in attributes_dict.items()
|
||||||
|
)
|
||||||
|
return super(DecorateAll, cls).__new__(
|
||||||
|
cls, name, bases, new_attributes_dict
|
||||||
|
)
|
||||||
|
return DecorateAll
|
||||||
|
|||||||
Reference in New Issue
Block a user