mirror of
https://github.com/kevin1024/vcrpy.git
synced 2025-12-09 09:13:23 +00:00
Compare commits
98 Commits
v1.6.0
...
better_log
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f821fed418 | ||
|
|
50246791e3 | ||
|
|
8a5bf23d34 | ||
|
|
f6b8e4f8e7 | ||
|
|
2ac3fa9abe | ||
|
|
1324feae99 | ||
|
|
7990c549d1 | ||
|
|
327797c4ff | ||
|
|
ac510097e0 | ||
|
|
00d973a0f5 | ||
|
|
79ff59feae | ||
|
|
34252bc234 | ||
|
|
5f78657c52 | ||
|
|
00b4e451fe | ||
|
|
44564ba39f | ||
|
|
7f02a7e999 | ||
|
|
c28adea66d | ||
|
|
3f006cc261 | ||
|
|
0eda8ba482 | ||
|
|
d620095c36 | ||
|
|
c8180326ad | ||
|
|
d55d593d1c | ||
|
|
04f4a7fd2f | ||
|
|
6fd04f3675 | ||
|
|
420f83b6b1 | ||
|
|
c6adcc83b3 | ||
|
|
dc61f5f520 | ||
|
|
4450cb992f | ||
|
|
083b1ec686 | ||
|
|
97c924d8dd | ||
|
|
65398131a4 | ||
|
|
7312229aef | ||
|
|
b62265c0ad | ||
|
|
d00c60a4ad | ||
|
|
4ddfb47c9c | ||
|
|
f0b7c3f1e0 | ||
|
|
646d12df94 | ||
|
|
eda64bc3be | ||
|
|
efe6744eda | ||
|
|
58f4b98f7f | ||
|
|
3305f0ca7d | ||
|
|
7f02d65dd9 | ||
|
|
3e5553c56a | ||
|
|
a569dd4dc8 | ||
|
|
eb1cdad03a | ||
|
|
08bb3bd187 | ||
|
|
ae5580c8f9 | ||
|
|
f342f92f03 | ||
|
|
be3bf39161 | ||
|
|
29d37e410a | ||
|
|
8b7e6c0ab8 | ||
|
|
bd7c6ed03f | ||
|
|
1e414826e7 | ||
|
|
1e1c093b3c | ||
|
|
bb8f563135 | ||
|
|
ca3200d96e | ||
|
|
04b5978adc | ||
|
|
01f1f9fdc1 | ||
|
|
a82e8628c2 | ||
|
|
7d68f0577a | ||
|
|
d0aa5fddb7 | ||
|
|
e54aeadc68 | ||
|
|
c4a33d1cff | ||
|
|
8b59d73f25 | ||
|
|
eb394b90d9 | ||
|
|
14931dd47a | ||
|
|
89cdda86d1 | ||
|
|
ad48d71897 | ||
|
|
946ce17a97 | ||
|
|
4d438dac75 | ||
|
|
a234ad6b12 | ||
|
|
1d000ac652 | ||
|
|
21c176ee1e | ||
|
|
4fb5bef8e1 | ||
|
|
9717596e2c | ||
|
|
1660cc3a9f | ||
|
|
4beb023204 | ||
|
|
72eb5345d6 | ||
|
|
fe7d193d1a | ||
|
|
09b7ccf561 | ||
|
|
a4a80b431b | ||
|
|
025a3b422d | ||
|
|
bb05b2fcf7 | ||
|
|
f77ef81877 | ||
|
|
80ece7750f | ||
|
|
8a86d75dc5 | ||
|
|
33a4fb98c6 | ||
|
|
a046697567 | ||
|
|
c0286dfd97 | ||
|
|
cc9af1d5fb | ||
|
|
5f8407a8a1 | ||
|
|
c789c82c1d | ||
|
|
16b5b77bcd | ||
|
|
0a093786ed | ||
|
|
3986caf182 | ||
|
|
cc6c26646c | ||
|
|
3846a4ccef | ||
|
|
aae4ae255b |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -8,3 +8,4 @@ dist/
|
|||||||
pytestdebug.log
|
pytestdebug.log
|
||||||
|
|
||||||
fixtures/
|
fixtures/
|
||||||
|
/docs/_build
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
language: python
|
language: python
|
||||||
|
sudo: false
|
||||||
before_install: openssl version
|
before_install: openssl version
|
||||||
env:
|
env:
|
||||||
global:
|
global:
|
||||||
@@ -9,6 +10,7 @@ env:
|
|||||||
- WITH_LIB="requests2.3"
|
- WITH_LIB="requests2.3"
|
||||||
- WITH_LIB="requests2.4"
|
- WITH_LIB="requests2.4"
|
||||||
- WITH_LIB="requests2.5"
|
- WITH_LIB="requests2.5"
|
||||||
|
- WITH_LIB="requests2.6"
|
||||||
- WITH_LIB="requests2.7"
|
- WITH_LIB="requests2.7"
|
||||||
- WITH_LIB="requests1.x"
|
- WITH_LIB="requests1.x"
|
||||||
- WITH_LIB="httplib2"
|
- WITH_LIB="httplib2"
|
||||||
@@ -40,6 +42,7 @@ install:
|
|||||||
- if [ $WITH_LIB = "requests2.3" ] ; then pip install requests==2.3.0; fi
|
- if [ $WITH_LIB = "requests2.3" ] ; then pip install requests==2.3.0; fi
|
||||||
- if [ $WITH_LIB = "requests2.4" ] ; then pip install requests==2.4.0; fi
|
- if [ $WITH_LIB = "requests2.4" ] ; then pip install requests==2.4.0; fi
|
||||||
- if [ $WITH_LIB = "requests2.5" ] ; then pip install requests==2.5.0; fi
|
- if [ $WITH_LIB = "requests2.5" ] ; then pip install requests==2.5.0; fi
|
||||||
|
- if [ $WITH_LIB = "requests2.6" ] ; then pip install requests==2.6.0; fi
|
||||||
- if [ $WITH_LIB = "requests2.7" ] ; then pip install requests==2.7.0; fi
|
- if [ $WITH_LIB = "requests2.7" ] ; then pip install requests==2.7.0; fi
|
||||||
- if [ $WITH_LIB = "httplib2" ] ; then pip install httplib2; fi
|
- if [ $WITH_LIB = "httplib2" ] ; then pip install httplib2; fi
|
||||||
- if [ $WITH_LIB = "boto" ] ; then pip install boto; fi
|
- if [ $WITH_LIB = "boto" ] ; then pip install boto; fi
|
||||||
|
|||||||
85
README.rst
85
README.rst
@@ -1,29 +1,29 @@
|
|||||||
|
|Build Status| |Stories in Ready| |Gitter|
|
||||||
|
|
||||||
VCR.py
|
VCR.py
|
||||||
======
|
======
|
||||||
|
|
||||||
.. figure:: https://raw.github.com/kevin1024/vcrpy/master/vcr.png
|
.. image:: https://raw.github.com/kevin1024/vcrpy/master/vcr.png
|
||||||
:alt: vcr.py
|
:alt: vcr.py
|
||||||
|
|
||||||
vcr.py
|
|
||||||
This is a Python version of `Ruby's VCR
|
This is a Python version of `Ruby's VCR
|
||||||
library <https://github.com/vcr/vcr>`__.
|
library <https://github.com/vcr/vcr>`__.
|
||||||
|
|
||||||
|Build Status| |Stories in Ready|
|
|
||||||
|
|
||||||
What it does
|
What it does
|
||||||
------------
|
------------
|
||||||
|
|
||||||
VCR.py simplifies and speeds up tests that make HTTP requests. The first
|
VCR.py simplifies and speeds up tests that make HTTP requests. The
|
||||||
time you run code that is inside a VCR.py context manager or decorated
|
first time you run code that is inside a VCR.py context manager or
|
||||||
function, VCR.py records all HTTP interactions that take place through
|
decorated function, VCR.py records all HTTP interactions that take
|
||||||
the libraries it supports and serializes and writes them to a flat file
|
place through the libraries it supports and serializes and writes them
|
||||||
(in yaml format by default). This flat file is called a cassette. When
|
to a flat file (in yaml format by default). This flat file is called a
|
||||||
the relevant peice of code is executed again, VCR.py will read the
|
cassette. When the relevant peice of code is executed again, VCR.py
|
||||||
serialized requests and responses from the aforementioned cassette file,
|
will read the serialized requests and responses from the
|
||||||
and intercept any HTTP requests that it recognizes from the original
|
aforementioned cassette file, and intercept any HTTP requests that it
|
||||||
test run and return responses that corresponded to those requests. This
|
recognizes from the original test run and return the responses that
|
||||||
means that the requests will not actually result in HTTP traffic, which
|
corresponded to those requests. This means that the requests will not
|
||||||
confers several benefits including:
|
actually result in HTTP traffic, which confers several benefits
|
||||||
|
including:
|
||||||
|
|
||||||
- The ability to work offline
|
- The ability to work offline
|
||||||
- Completely deterministic tests
|
- Completely deterministic tests
|
||||||
@@ -49,6 +49,7 @@ The following http libraries are supported:
|
|||||||
- requests (both 1.x and 2.x versions)
|
- requests (both 1.x and 2.x versions)
|
||||||
- httplib2
|
- httplib2
|
||||||
- boto
|
- boto
|
||||||
|
- Tornado's AsyncHTTPClient
|
||||||
|
|
||||||
Usage
|
Usage
|
||||||
-----
|
-----
|
||||||
@@ -144,7 +145,9 @@ The following options are available :
|
|||||||
- port (the port of the server receiving the request)
|
- port (the port of the server receiving the request)
|
||||||
- path (the path of the request)
|
- path (the path of the request)
|
||||||
- query (the query string of the request)
|
- query (the query string of the request)
|
||||||
- body (the entire request body)
|
- raw\_body (the entire request body as is)
|
||||||
|
- body (the entire request body unmarshalled by content-type
|
||||||
|
i.e. xmlrpc, json, form-urlencoded, falling back on raw\_body)
|
||||||
- headers (the headers of the request)
|
- headers (the headers of the request)
|
||||||
|
|
||||||
Backwards compatible matchers:
|
Backwards compatible matchers:
|
||||||
@@ -413,12 +416,13 @@ that of ``before_record``:
|
|||||||
.. code:: python
|
.. code:: python
|
||||||
|
|
||||||
def scrub_string(string, replacement=''):
|
def scrub_string(string, replacement=''):
|
||||||
def before_record_reponse(response):
|
def before_record_response(response):
|
||||||
return response['body']['string'] = response['body']['string'].replace(string, replacement)
|
response['body']['string'] = response['body']['string'].replace(string, replacement)
|
||||||
return scrub_string
|
return response
|
||||||
|
return before_record_response
|
||||||
|
|
||||||
my_vcr = vcr.VCR(
|
my_vcr = vcr.VCR(
|
||||||
before_record=scrub_string(settings.USERNAME, 'username'),
|
before_record_response=scrub_string(settings.USERNAME, 'username'),
|
||||||
)
|
)
|
||||||
with my_vcr.use_cassette('test.yml'):
|
with my_vcr.use_cassette('test.yml'):
|
||||||
# your http code here
|
# your http code here
|
||||||
@@ -569,6 +573,24 @@ If you set the loglevel to DEBUG, you will also get information about
|
|||||||
which matchers didn't match. This can help you with debugging custom
|
which matchers didn't match. This can help you with debugging custom
|
||||||
matchers.
|
matchers.
|
||||||
|
|
||||||
|
Speed
|
||||||
|
-----
|
||||||
|
VCR.py runs about 10x faster when pyyaml can use the libyaml extensions. However, just installing ``libyaml`` (Mac) or ``libyaml-dev`` (Linux) is not enough, as pyyaml needs to be rebuild with the proper flag. Note that this flag is cached by pip, so clear the cache first.
|
||||||
|
|
||||||
|
Are you using libyaml already? This should work:
|
||||||
|
|
||||||
|
.. code:: sh
|
||||||
|
|
||||||
|
python -c 'from yaml import CLoader'
|
||||||
|
|
||||||
|
If not:
|
||||||
|
|
||||||
|
.. code:: sh
|
||||||
|
|
||||||
|
pip uninstall pyyaml
|
||||||
|
pip --no-cache-dir install pyyaml
|
||||||
|
|
||||||
|
|
||||||
Upgrade
|
Upgrade
|
||||||
-------
|
-------
|
||||||
|
|
||||||
@@ -603,7 +625,25 @@ new API in version 1.0.x
|
|||||||
|
|
||||||
Changelog
|
Changelog
|
||||||
---------
|
---------
|
||||||
- 1.6.0 [#120] Tornado support thanks (thanks @abhinav), [#147] packaging fixes
|
- 1.7.4 [#217] Make use_cassette decorated functions actually return a
|
||||||
|
value (thanks @bcen). [#199] Fix path transfromation defaults.
|
||||||
|
Better headers dictionary management.
|
||||||
|
- 1.7.3 [#188] ``additional_matchers`` kwarg on ``use_cassette``.
|
||||||
|
[#191] Actually support passing multiple before_record_request
|
||||||
|
functions (thanks @agriffis).
|
||||||
|
- 1.7.2 [#186] Get effective_url in tornado (thanks @mvschaik), [#187]
|
||||||
|
Set request_time on Response object in tornado (thanks @abhinav).
|
||||||
|
- 1.7.1 [#183] Patch ``fetch_impl`` instead of the entire HTTPClient
|
||||||
|
class for Tornado (thanks @abhinav).
|
||||||
|
- 1.7.0 [#177] Properly support coroutine/generator decoration. [#178]
|
||||||
|
Support distribute (thanks @graingert). [#163] Make compatibility
|
||||||
|
between python2 and python3 recorded cassettes more robust (thanks
|
||||||
|
@gward).
|
||||||
|
- 1.6.1 [#169] Support conditional requirements in old versions of
|
||||||
|
pip, Fix RST parse errors generated by pandoc, [Tornado] Fix
|
||||||
|
unsupported features exception not being raised, [#166]
|
||||||
|
content-aware body matcher.
|
||||||
|
- 1.6.0 [#120] Tornado support (thanks @abhinav), [#147] packaging fixes
|
||||||
(thanks @graingert), [#158] allow filtering post params in requests
|
(thanks @graingert), [#158] allow filtering post params in requests
|
||||||
(thanks @MrJohz), [#140] add xmlrpclib support (thanks @Diaoul).
|
(thanks @MrJohz), [#140] add xmlrpclib support (thanks @Diaoul).
|
||||||
- 1.5.2 Fix crash when cassette path contains cassette library
|
- 1.5.2 Fix crash when cassette path contains cassette library
|
||||||
@@ -737,3 +777,6 @@ more details
|
|||||||
:target: http://travis-ci.org/kevin1024/vcrpy
|
:target: http://travis-ci.org/kevin1024/vcrpy
|
||||||
.. |Stories in Ready| image:: https://badge.waffle.io/kevin1024/vcrpy.png?label=ready&title=Ready
|
.. |Stories in Ready| image:: https://badge.waffle.io/kevin1024/vcrpy.png?label=ready&title=Ready
|
||||||
:target: https://waffle.io/kevin1024/vcrpy
|
:target: https://waffle.io/kevin1024/vcrpy
|
||||||
|
.. |Gitter| image:: https://badges.gitter.im/Join%20Chat.svg
|
||||||
|
:alt: Join the chat at https://gitter.im/kevin1024/vcrpy
|
||||||
|
:target: https://gitter.im/kevin1024/vcrpy?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge
|
||||||
|
|||||||
192
docs/Makefile
Normal file
192
docs/Makefile
Normal file
@@ -0,0 +1,192 @@
|
|||||||
|
# Makefile for Sphinx documentation
|
||||||
|
#
|
||||||
|
|
||||||
|
# You can set these variables from the command line.
|
||||||
|
SPHINXOPTS =
|
||||||
|
SPHINXBUILD = sphinx-build
|
||||||
|
PAPER =
|
||||||
|
BUILDDIR = _build
|
||||||
|
|
||||||
|
# User-friendly check for sphinx-build
|
||||||
|
ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
|
||||||
|
$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
|
||||||
|
endif
|
||||||
|
|
||||||
|
# Internal variables.
|
||||||
|
PAPEROPT_a4 = -D latex_paper_size=a4
|
||||||
|
PAPEROPT_letter = -D latex_paper_size=letter
|
||||||
|
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
|
||||||
|
# the i18n builder cannot share the environment and doctrees with the others
|
||||||
|
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
|
||||||
|
|
||||||
|
.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest coverage gettext
|
||||||
|
|
||||||
|
help:
|
||||||
|
@echo "Please use \`make <target>' where <target> is one of"
|
||||||
|
@echo " html to make standalone HTML files"
|
||||||
|
@echo " dirhtml to make HTML files named index.html in directories"
|
||||||
|
@echo " singlehtml to make a single large HTML file"
|
||||||
|
@echo " pickle to make pickle files"
|
||||||
|
@echo " json to make JSON files"
|
||||||
|
@echo " htmlhelp to make HTML files and a HTML help project"
|
||||||
|
@echo " qthelp to make HTML files and a qthelp project"
|
||||||
|
@echo " applehelp to make an Apple Help Book"
|
||||||
|
@echo " devhelp to make HTML files and a Devhelp project"
|
||||||
|
@echo " epub to make an epub"
|
||||||
|
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
|
||||||
|
@echo " latexpdf to make LaTeX files and run them through pdflatex"
|
||||||
|
@echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
|
||||||
|
@echo " text to make text files"
|
||||||
|
@echo " man to make manual pages"
|
||||||
|
@echo " texinfo to make Texinfo files"
|
||||||
|
@echo " info to make Texinfo files and run them through makeinfo"
|
||||||
|
@echo " gettext to make PO message catalogs"
|
||||||
|
@echo " changes to make an overview of all changed/added/deprecated items"
|
||||||
|
@echo " xml to make Docutils-native XML files"
|
||||||
|
@echo " pseudoxml to make pseudoxml-XML files for display purposes"
|
||||||
|
@echo " linkcheck to check all external links for integrity"
|
||||||
|
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
|
||||||
|
@echo " coverage to run coverage check of the documentation (if enabled)"
|
||||||
|
|
||||||
|
clean:
|
||||||
|
rm -rf $(BUILDDIR)/*
|
||||||
|
|
||||||
|
html:
|
||||||
|
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
||||||
|
@echo
|
||||||
|
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
|
||||||
|
|
||||||
|
dirhtml:
|
||||||
|
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
|
||||||
|
@echo
|
||||||
|
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
|
||||||
|
|
||||||
|
singlehtml:
|
||||||
|
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
|
||||||
|
@echo
|
||||||
|
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
|
||||||
|
|
||||||
|
pickle:
|
||||||
|
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
|
||||||
|
@echo
|
||||||
|
@echo "Build finished; now you can process the pickle files."
|
||||||
|
|
||||||
|
json:
|
||||||
|
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
|
||||||
|
@echo
|
||||||
|
@echo "Build finished; now you can process the JSON files."
|
||||||
|
|
||||||
|
htmlhelp:
|
||||||
|
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
|
||||||
|
@echo
|
||||||
|
@echo "Build finished; now you can run HTML Help Workshop with the" \
|
||||||
|
".hhp project file in $(BUILDDIR)/htmlhelp."
|
||||||
|
|
||||||
|
qthelp:
|
||||||
|
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
|
||||||
|
@echo
|
||||||
|
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
|
||||||
|
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
|
||||||
|
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/vcrpy.qhcp"
|
||||||
|
@echo "To view the help file:"
|
||||||
|
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/vcrpy.qhc"
|
||||||
|
|
||||||
|
applehelp:
|
||||||
|
$(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp
|
||||||
|
@echo
|
||||||
|
@echo "Build finished. The help book is in $(BUILDDIR)/applehelp."
|
||||||
|
@echo "N.B. You won't be able to view it unless you put it in" \
|
||||||
|
"~/Library/Documentation/Help or install it in your application" \
|
||||||
|
"bundle."
|
||||||
|
|
||||||
|
devhelp:
|
||||||
|
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
|
||||||
|
@echo
|
||||||
|
@echo "Build finished."
|
||||||
|
@echo "To view the help file:"
|
||||||
|
@echo "# mkdir -p $$HOME/.local/share/devhelp/vcrpy"
|
||||||
|
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/vcrpy"
|
||||||
|
@echo "# devhelp"
|
||||||
|
|
||||||
|
epub:
|
||||||
|
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
|
||||||
|
@echo
|
||||||
|
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
|
||||||
|
|
||||||
|
latex:
|
||||||
|
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||||
|
@echo
|
||||||
|
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
|
||||||
|
@echo "Run \`make' in that directory to run these through (pdf)latex" \
|
||||||
|
"(use \`make latexpdf' here to do that automatically)."
|
||||||
|
|
||||||
|
latexpdf:
|
||||||
|
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||||
|
@echo "Running LaTeX files through pdflatex..."
|
||||||
|
$(MAKE) -C $(BUILDDIR)/latex all-pdf
|
||||||
|
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
|
||||||
|
|
||||||
|
latexpdfja:
|
||||||
|
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||||
|
@echo "Running LaTeX files through platex and dvipdfmx..."
|
||||||
|
$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
|
||||||
|
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
|
||||||
|
|
||||||
|
text:
|
||||||
|
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
|
||||||
|
@echo
|
||||||
|
@echo "Build finished. The text files are in $(BUILDDIR)/text."
|
||||||
|
|
||||||
|
man:
|
||||||
|
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
|
||||||
|
@echo
|
||||||
|
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
|
||||||
|
|
||||||
|
texinfo:
|
||||||
|
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
||||||
|
@echo
|
||||||
|
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
|
||||||
|
@echo "Run \`make' in that directory to run these through makeinfo" \
|
||||||
|
"(use \`make info' here to do that automatically)."
|
||||||
|
|
||||||
|
info:
|
||||||
|
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
||||||
|
@echo "Running Texinfo files through makeinfo..."
|
||||||
|
make -C $(BUILDDIR)/texinfo info
|
||||||
|
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
|
||||||
|
|
||||||
|
gettext:
|
||||||
|
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
|
||||||
|
@echo
|
||||||
|
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
|
||||||
|
|
||||||
|
changes:
|
||||||
|
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
|
||||||
|
@echo
|
||||||
|
@echo "The overview file is in $(BUILDDIR)/changes."
|
||||||
|
|
||||||
|
linkcheck:
|
||||||
|
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
|
||||||
|
@echo
|
||||||
|
@echo "Link check complete; look for any errors in the above output " \
|
||||||
|
"or in $(BUILDDIR)/linkcheck/output.txt."
|
||||||
|
|
||||||
|
doctest:
|
||||||
|
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
|
||||||
|
@echo "Testing of doctests in the sources finished, look at the " \
|
||||||
|
"results in $(BUILDDIR)/doctest/output.txt."
|
||||||
|
|
||||||
|
coverage:
|
||||||
|
$(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage
|
||||||
|
@echo "Testing of coverage in the sources finished, look at the " \
|
||||||
|
"results in $(BUILDDIR)/coverage/python.txt."
|
||||||
|
|
||||||
|
xml:
|
||||||
|
$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
|
||||||
|
@echo
|
||||||
|
@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
|
||||||
|
|
||||||
|
pseudoxml:
|
||||||
|
$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
|
||||||
|
@echo
|
||||||
|
@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
|
||||||
293
docs/conf.py
Normal file
293
docs/conf.py
Normal file
@@ -0,0 +1,293 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#
|
||||||
|
# vcrpy documentation build configuration file, created by
|
||||||
|
# sphinx-quickstart on Sun Sep 13 11:18:00 2015.
|
||||||
|
#
|
||||||
|
# This file is execfile()d with the current directory set to its
|
||||||
|
# containing dir.
|
||||||
|
#
|
||||||
|
# Note that not all possible configuration values are present in this
|
||||||
|
# autogenerated file.
|
||||||
|
#
|
||||||
|
# All configuration values have a default; values that are commented out
|
||||||
|
# serve to show the default.
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import shlex
|
||||||
|
|
||||||
|
# If extensions (or modules to document with autodoc) are in another directory,
|
||||||
|
# add these directories to sys.path here. If the directory is relative to the
|
||||||
|
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||||
|
#sys.path.insert(0, os.path.abspath('.'))
|
||||||
|
|
||||||
|
# -- General configuration ------------------------------------------------
|
||||||
|
|
||||||
|
# If your documentation needs a minimal Sphinx version, state it here.
|
||||||
|
#needs_sphinx = '1.0'
|
||||||
|
|
||||||
|
# Add any Sphinx extension module names here, as strings. They can be
|
||||||
|
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||||
|
# ones.
|
||||||
|
extensions = [
|
||||||
|
'sphinx.ext.autodoc',
|
||||||
|
'sphinx.ext.intersphinx',
|
||||||
|
'sphinx.ext.coverage',
|
||||||
|
'sphinx.ext.viewcode',
|
||||||
|
]
|
||||||
|
|
||||||
|
# Add any paths that contain templates here, relative to this directory.
|
||||||
|
templates_path = ['_templates']
|
||||||
|
|
||||||
|
# The suffix(es) of source filenames.
|
||||||
|
# You can specify multiple suffix as a list of string:
|
||||||
|
# source_suffix = ['.rst', '.md']
|
||||||
|
source_suffix = '.rst'
|
||||||
|
|
||||||
|
# The encoding of source files.
|
||||||
|
#source_encoding = 'utf-8-sig'
|
||||||
|
|
||||||
|
# The master toctree document.
|
||||||
|
master_doc = 'index'
|
||||||
|
|
||||||
|
# General information about the project.
|
||||||
|
project = u'vcrpy'
|
||||||
|
copyright = u'2015, Kevin McCarthy'
|
||||||
|
author = u'Kevin McCarthy'
|
||||||
|
|
||||||
|
# The version info for the project you're documenting, acts as replacement for
|
||||||
|
# |version| and |release|, also used in various other places throughout the
|
||||||
|
# built documents.
|
||||||
|
#
|
||||||
|
# The short X.Y version.
|
||||||
|
version = '1.7.3'
|
||||||
|
# The full version, including alpha/beta/rc tags.
|
||||||
|
release = '1.7.3'
|
||||||
|
|
||||||
|
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||||
|
# for a list of supported languages.
|
||||||
|
#
|
||||||
|
# This is also used if you do content translation via gettext catalogs.
|
||||||
|
# Usually you set "language" from the command line for these cases.
|
||||||
|
language = None
|
||||||
|
|
||||||
|
# There are two options for replacing |today|: either, you set today to some
|
||||||
|
# non-false value, then it is used:
|
||||||
|
#today = ''
|
||||||
|
# Else, today_fmt is used as the format for a strftime call.
|
||||||
|
#today_fmt = '%B %d, %Y'
|
||||||
|
|
||||||
|
# List of patterns, relative to source directory, that match files and
|
||||||
|
# directories to ignore when looking for source files.
|
||||||
|
exclude_patterns = ['_build']
|
||||||
|
|
||||||
|
# The reST default role (used for this markup: `text`) to use for all
|
||||||
|
# documents.
|
||||||
|
#default_role = None
|
||||||
|
|
||||||
|
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||||
|
#add_function_parentheses = True
|
||||||
|
|
||||||
|
# If true, the current module name will be prepended to all description
|
||||||
|
# unit titles (such as .. function::).
|
||||||
|
#add_module_names = True
|
||||||
|
|
||||||
|
# If true, sectionauthor and moduleauthor directives will be shown in the
|
||||||
|
# output. They are ignored by default.
|
||||||
|
#show_authors = False
|
||||||
|
|
||||||
|
# The name of the Pygments (syntax highlighting) style to use.
|
||||||
|
pygments_style = 'sphinx'
|
||||||
|
|
||||||
|
# A list of ignored prefixes for module index sorting.
|
||||||
|
#modindex_common_prefix = []
|
||||||
|
|
||||||
|
# If true, keep warnings as "system message" paragraphs in the built documents.
|
||||||
|
#keep_warnings = False
|
||||||
|
|
||||||
|
# If true, `todo` and `todoList` produce output, else they produce nothing.
|
||||||
|
todo_include_todos = False
|
||||||
|
|
||||||
|
|
||||||
|
# -- Options for HTML output ----------------------------------------------
|
||||||
|
|
||||||
|
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||||
|
# a list of builtin themes.
|
||||||
|
html_theme = 'alabaster'
|
||||||
|
|
||||||
|
# Theme options are theme-specific and customize the look and feel of a theme
|
||||||
|
# further. For a list of options available for each theme, see the
|
||||||
|
# documentation.
|
||||||
|
#html_theme_options = {}
|
||||||
|
|
||||||
|
# Add any paths that contain custom themes here, relative to this directory.
|
||||||
|
#html_theme_path = []
|
||||||
|
|
||||||
|
# The name for this set of Sphinx documents. If None, it defaults to
|
||||||
|
# "<project> v<release> documentation".
|
||||||
|
#html_title = None
|
||||||
|
|
||||||
|
# A shorter title for the navigation bar. Default is the same as html_title.
|
||||||
|
#html_short_title = None
|
||||||
|
|
||||||
|
# The name of an image file (relative to this directory) to place at the top
|
||||||
|
# of the sidebar.
|
||||||
|
#html_logo = None
|
||||||
|
|
||||||
|
# The name of an image file (within the static path) to use as favicon of the
|
||||||
|
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
||||||
|
# pixels large.
|
||||||
|
#html_favicon = None
|
||||||
|
|
||||||
|
# Add any paths that contain custom static files (such as style sheets) here,
|
||||||
|
# relative to this directory. They are copied after the builtin static files,
|
||||||
|
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||||
|
html_static_path = ['_static']
|
||||||
|
|
||||||
|
# Add any extra paths that contain custom files (such as robots.txt or
|
||||||
|
# .htaccess) here, relative to this directory. These files are copied
|
||||||
|
# directly to the root of the documentation.
|
||||||
|
#html_extra_path = []
|
||||||
|
|
||||||
|
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
||||||
|
# using the given strftime format.
|
||||||
|
#html_last_updated_fmt = '%b %d, %Y'
|
||||||
|
|
||||||
|
# If true, SmartyPants will be used to convert quotes and dashes to
|
||||||
|
# typographically correct entities.
|
||||||
|
#html_use_smartypants = True
|
||||||
|
|
||||||
|
# Custom sidebar templates, maps document names to template names.
|
||||||
|
#html_sidebars = {}
|
||||||
|
|
||||||
|
# Additional templates that should be rendered to pages, maps page names to
|
||||||
|
# template names.
|
||||||
|
#html_additional_pages = {}
|
||||||
|
|
||||||
|
# If false, no module index is generated.
|
||||||
|
#html_domain_indices = True
|
||||||
|
|
||||||
|
# If false, no index is generated.
|
||||||
|
#html_use_index = True
|
||||||
|
|
||||||
|
# If true, the index is split into individual pages for each letter.
|
||||||
|
#html_split_index = False
|
||||||
|
|
||||||
|
# If true, links to the reST sources are added to the pages.
|
||||||
|
#html_show_sourcelink = True
|
||||||
|
|
||||||
|
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
|
||||||
|
#html_show_sphinx = True
|
||||||
|
|
||||||
|
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
|
||||||
|
#html_show_copyright = True
|
||||||
|
|
||||||
|
# If true, an OpenSearch description file will be output, and all pages will
|
||||||
|
# contain a <link> tag referring to it. The value of this option must be the
|
||||||
|
# base URL from which the finished HTML is served.
|
||||||
|
#html_use_opensearch = ''
|
||||||
|
|
||||||
|
# This is the file name suffix for HTML files (e.g. ".xhtml").
|
||||||
|
#html_file_suffix = None
|
||||||
|
|
||||||
|
# Language to be used for generating the HTML full-text search index.
|
||||||
|
# Sphinx supports the following languages:
|
||||||
|
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
|
||||||
|
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
|
||||||
|
#html_search_language = 'en'
|
||||||
|
|
||||||
|
# A dictionary with options for the search language support, empty by default.
|
||||||
|
# Now only 'ja' uses this config value
|
||||||
|
#html_search_options = {'type': 'default'}
|
||||||
|
|
||||||
|
# The name of a javascript file (relative to the configuration directory) that
|
||||||
|
# implements a search results scorer. If empty, the default will be used.
|
||||||
|
#html_search_scorer = 'scorer.js'
|
||||||
|
|
||||||
|
# Output file base name for HTML help builder.
|
||||||
|
htmlhelp_basename = 'vcrpydoc'
|
||||||
|
|
||||||
|
# -- Options for LaTeX output ---------------------------------------------
|
||||||
|
|
||||||
|
latex_elements = {
|
||||||
|
# The paper size ('letterpaper' or 'a4paper').
|
||||||
|
#'papersize': 'letterpaper',
|
||||||
|
|
||||||
|
# The font size ('10pt', '11pt' or '12pt').
|
||||||
|
#'pointsize': '10pt',
|
||||||
|
|
||||||
|
# Additional stuff for the LaTeX preamble.
|
||||||
|
#'preamble': '',
|
||||||
|
|
||||||
|
# Latex figure (float) alignment
|
||||||
|
#'figure_align': 'htbp',
|
||||||
|
}
|
||||||
|
|
||||||
|
# Grouping the document tree into LaTeX files. List of tuples
|
||||||
|
# (source start file, target name, title,
|
||||||
|
# author, documentclass [howto, manual, or own class]).
|
||||||
|
latex_documents = [
|
||||||
|
(master_doc, 'vcrpy.tex', u'vcrpy Documentation',
|
||||||
|
u'Kevin McCarthy', 'manual'),
|
||||||
|
]
|
||||||
|
|
||||||
|
# The name of an image file (relative to this directory) to place at the top of
|
||||||
|
# the title page.
|
||||||
|
#latex_logo = None
|
||||||
|
|
||||||
|
# For "manual" documents, if this is true, then toplevel headings are parts,
|
||||||
|
# not chapters.
|
||||||
|
#latex_use_parts = False
|
||||||
|
|
||||||
|
# If true, show page references after internal links.
|
||||||
|
#latex_show_pagerefs = False
|
||||||
|
|
||||||
|
# If true, show URL addresses after external links.
|
||||||
|
#latex_show_urls = False
|
||||||
|
|
||||||
|
# Documents to append as an appendix to all manuals.
|
||||||
|
#latex_appendices = []
|
||||||
|
|
||||||
|
# If false, no module index is generated.
|
||||||
|
#latex_domain_indices = True
|
||||||
|
|
||||||
|
|
||||||
|
# -- Options for manual page output ---------------------------------------
|
||||||
|
|
||||||
|
# One entry per manual page. List of tuples
|
||||||
|
# (source start file, name, description, authors, manual section).
|
||||||
|
man_pages = [
|
||||||
|
(master_doc, 'vcrpy', u'vcrpy Documentation',
|
||||||
|
[author], 1)
|
||||||
|
]
|
||||||
|
|
||||||
|
# If true, show URL addresses after external links.
|
||||||
|
#man_show_urls = False
|
||||||
|
|
||||||
|
|
||||||
|
# -- Options for Texinfo output -------------------------------------------
|
||||||
|
|
||||||
|
# Grouping the document tree into Texinfo files. List of tuples
|
||||||
|
# (source start file, target name, title, author,
|
||||||
|
# dir menu entry, description, category)
|
||||||
|
texinfo_documents = [
|
||||||
|
(master_doc, 'vcrpy', u'vcrpy Documentation',
|
||||||
|
author, 'vcrpy', 'One line description of project.',
|
||||||
|
'Miscellaneous'),
|
||||||
|
]
|
||||||
|
|
||||||
|
# Documents to append as an appendix to all manuals.
|
||||||
|
#texinfo_appendices = []
|
||||||
|
|
||||||
|
# If false, no module index is generated.
|
||||||
|
#texinfo_domain_indices = True
|
||||||
|
|
||||||
|
# How to display URL addresses: 'footnote', 'no', or 'inline'.
|
||||||
|
#texinfo_show_urls = 'footnote'
|
||||||
|
|
||||||
|
# If true, do not generate a @detailmenu in the "Top" node's menu.
|
||||||
|
#texinfo_no_detailmenu = False
|
||||||
|
|
||||||
|
|
||||||
|
# Example configuration for intersphinx: refer to the Python standard library.
|
||||||
|
intersphinx_mapping = {'https://docs.python.org/': None}
|
||||||
19
docs/index.rst
Normal file
19
docs/index.rst
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
vcrpy
|
||||||
|
=====
|
||||||
|
|
||||||
|
Contents:
|
||||||
|
|
||||||
|
.. toctree::
|
||||||
|
:maxdepth: 2
|
||||||
|
|
||||||
|
vcr
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
Indices and tables
|
||||||
|
==================
|
||||||
|
|
||||||
|
* :ref:`genindex`
|
||||||
|
* :ref:`modindex`
|
||||||
|
* :ref:`search`
|
||||||
|
|
||||||
6
docs/vcr.rst
Normal file
6
docs/vcr.rst
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
:mod:`~vcr.config`
|
||||||
|
=================
|
||||||
|
|
||||||
|
.. automodule:: vcr.config
|
||||||
|
:members:
|
||||||
|
:special-members: __init__
|
||||||
39
setup.py
39
setup.py
@@ -1,11 +1,15 @@
|
|||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
|
import logging
|
||||||
|
|
||||||
from setuptools import setup, find_packages
|
from setuptools import setup, find_packages
|
||||||
from setuptools.command.test import test as TestCommand
|
from setuptools.command.test import test as TestCommand
|
||||||
|
import pkg_resources
|
||||||
|
|
||||||
long_description = open('README.rst', 'r').read()
|
long_description = open('README.rst', 'r').read()
|
||||||
|
|
||||||
|
|
||||||
class PyTest(TestCommand):
|
class PyTest(TestCommand):
|
||||||
|
|
||||||
def finalize_options(self):
|
def finalize_options(self):
|
||||||
@@ -20,9 +24,34 @@ class PyTest(TestCommand):
|
|||||||
sys.exit(errno)
|
sys.exit(errno)
|
||||||
|
|
||||||
|
|
||||||
|
install_requires = ['PyYAML', 'wrapt', 'six>=1.5']
|
||||||
|
|
||||||
|
|
||||||
|
extras_require = {
|
||||||
|
':python_version in "2.4, 2.5, 2.6"':
|
||||||
|
['contextlib2', 'backport_collections', 'mock'],
|
||||||
|
':python_version in "2.7, 3.1, 3.2"': ['contextlib2', 'mock'],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
if 'bdist_wheel' not in sys.argv:
|
||||||
|
for key, value in extras_require.items():
|
||||||
|
if key.startswith(':') and pkg_resources.evaluate_marker(key[1:]):
|
||||||
|
install_requires.extend(value)
|
||||||
|
except Exception:
|
||||||
|
logging.getLogger(__name__).exception(
|
||||||
|
'Something went wrong calculating platform specific dependencies, so '
|
||||||
|
"you're getting them all!"
|
||||||
|
)
|
||||||
|
for key, value in extras_require.items():
|
||||||
|
if key.startswith(':'):
|
||||||
|
install_requires.extend(value)
|
||||||
|
|
||||||
|
|
||||||
setup(
|
setup(
|
||||||
name='vcrpy',
|
name='vcrpy',
|
||||||
version='1.6.0',
|
version='1.7.4',
|
||||||
description=(
|
description=(
|
||||||
"Automatically mock your HTTP interactions to simplify and "
|
"Automatically mock your HTTP interactions to simplify and "
|
||||||
"speed up testing"
|
"speed up testing"
|
||||||
@@ -32,12 +61,8 @@ setup(
|
|||||||
author_email='me@kevinmccarthy.org',
|
author_email='me@kevinmccarthy.org',
|
||||||
url='https://github.com/kevin1024/vcrpy',
|
url='https://github.com/kevin1024/vcrpy',
|
||||||
packages=find_packages(exclude=("tests*",)),
|
packages=find_packages(exclude=("tests*",)),
|
||||||
install_requires=['PyYAML', 'wrapt', 'six>=1.5'],
|
install_requires=install_requires,
|
||||||
extras_require = {
|
extras_require=extras_require,
|
||||||
':python_version in "2.4, 2.5, 2.6"':
|
|
||||||
['contextlib2', 'backport_collections', 'mock'],
|
|
||||||
':python_version in "2.7, 3.1, 3.2"': ['contextlib2', 'mock'],
|
|
||||||
},
|
|
||||||
license='MIT',
|
license='MIT',
|
||||||
tests_require=['pytest', 'mock', 'pytest-localserver'],
|
tests_require=['pytest', 'mock', 'pytest-localserver'],
|
||||||
cmdclass={'test': PyTest},
|
cmdclass={'test': PyTest},
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
import pytest
|
import pytest
|
||||||
boto = pytest.importorskip("boto")
|
boto = pytest.importorskip("boto")
|
||||||
|
|
||||||
import boto
|
import boto
|
||||||
import boto.iam
|
import boto.iam
|
||||||
from boto.s3.connection import S3Connection
|
from boto.s3.connection import S3Connection
|
||||||
@@ -7,6 +8,7 @@ from boto.s3.key import Key
|
|||||||
from ConfigParser import DuplicateSectionError
|
from ConfigParser import DuplicateSectionError
|
||||||
import vcr
|
import vcr
|
||||||
|
|
||||||
|
|
||||||
def test_boto_stubs(tmpdir):
|
def test_boto_stubs(tmpdir):
|
||||||
with vcr.use_cassette(str(tmpdir.join('boto-stubs.yml'))):
|
with vcr.use_cassette(str(tmpdir.join('boto-stubs.yml'))):
|
||||||
# Perform the imports within the patched context so that
|
# Perform the imports within the patched context so that
|
||||||
@@ -17,6 +19,7 @@ def test_boto_stubs(tmpdir):
|
|||||||
assert issubclass(CertValidatingHTTPSConnection, VCRCertValidatingHTTPSConnection)
|
assert issubclass(CertValidatingHTTPSConnection, VCRCertValidatingHTTPSConnection)
|
||||||
CertValidatingHTTPSConnection('hostname.does.not.matter')
|
CertValidatingHTTPSConnection('hostname.does.not.matter')
|
||||||
|
|
||||||
|
|
||||||
def test_boto_without_vcr():
|
def test_boto_without_vcr():
|
||||||
s3_conn = S3Connection()
|
s3_conn = S3Connection()
|
||||||
s3_bucket = s3_conn.get_bucket('boto-demo-1394171994') # a bucket you can access
|
s3_bucket = s3_conn.get_bucket('boto-demo-1394171994') # a bucket you can access
|
||||||
@@ -24,6 +27,7 @@ def test_boto_without_vcr():
|
|||||||
k.key = 'test.txt'
|
k.key = 'test.txt'
|
||||||
k.set_contents_from_string('hello world i am a string')
|
k.set_contents_from_string('hello world i am a string')
|
||||||
|
|
||||||
|
|
||||||
def test_boto_medium_difficulty(tmpdir):
|
def test_boto_medium_difficulty(tmpdir):
|
||||||
s3_conn = S3Connection()
|
s3_conn = S3Connection()
|
||||||
s3_bucket = s3_conn.get_bucket('boto-demo-1394171994') # a bucket you can access
|
s3_bucket = s3_conn.get_bucket('boto-demo-1394171994') # a bucket you can access
|
||||||
@@ -53,6 +57,7 @@ def test_boto_hardcore_mode(tmpdir):
|
|||||||
k.key = 'test.txt'
|
k.key = 'test.txt'
|
||||||
k.set_contents_from_string('hello world i am a string')
|
k.set_contents_from_string('hello world i am a string')
|
||||||
|
|
||||||
|
|
||||||
def test_boto_iam(tmpdir):
|
def test_boto_iam(tmpdir):
|
||||||
try:
|
try:
|
||||||
boto.config.add_section('Boto')
|
boto.config.add_section('Boto')
|
||||||
|
|||||||
@@ -17,11 +17,7 @@ def _request_with_auth(url, username, password):
|
|||||||
|
|
||||||
|
|
||||||
def _find_header(cassette, header):
|
def _find_header(cassette, header):
|
||||||
for request in cassette.requests:
|
return any(header in request.headers for request in cassette.requests)
|
||||||
for k in request.headers:
|
|
||||||
if header.lower() == k.lower():
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def test_filter_basic_auth(tmpdir):
|
def test_filter_basic_auth(tmpdir):
|
||||||
|
|||||||
@@ -56,6 +56,17 @@ def test_response_headers(scheme, tmpdir):
|
|||||||
resp, _ = httplib2.Http().request(url)
|
resp, _ = httplib2.Http().request(url)
|
||||||
assert set(headers) == set(resp.items())
|
assert set(headers) == set(resp.items())
|
||||||
|
|
||||||
|
def test_effective_url(scheme, tmpdir):
|
||||||
|
'''Ensure that the effective_url is captured'''
|
||||||
|
url = scheme + '://httpbin.org/redirect-to?url=/html'
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('headers.yaml'))) as cass:
|
||||||
|
resp, _ = httplib2.Http().request(url)
|
||||||
|
effective_url = resp['content-location']
|
||||||
|
assert effective_url == scheme + '://httpbin.org/html'
|
||||||
|
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('headers.yaml'))) as cass:
|
||||||
|
resp, _ = httplib2.Http().request(url)
|
||||||
|
assert effective_url == resp['content-location']
|
||||||
|
|
||||||
def test_multiple_requests(scheme, tmpdir):
|
def test_multiple_requests(scheme, tmpdir):
|
||||||
'''Ensure that we can cache multiple requests'''
|
'''Ensure that we can cache multiple requests'''
|
||||||
|
|||||||
@@ -44,6 +44,15 @@ def test_body(tmpdir, scheme):
|
|||||||
with vcr.use_cassette(str(tmpdir.join('body.yaml'))):
|
with vcr.use_cassette(str(tmpdir.join('body.yaml'))):
|
||||||
assert content == requests.get(url).content
|
assert content == requests.get(url).content
|
||||||
|
|
||||||
|
def test_effective_url(scheme, tmpdir):
|
||||||
|
'''Ensure that the effective_url is captured'''
|
||||||
|
url = scheme + '://httpbin.org/redirect-to?url=/html'
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('url.yaml'))):
|
||||||
|
effective_url = requests.get(url).url
|
||||||
|
assert effective_url == scheme + '://httpbin.org/html'
|
||||||
|
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('url.yaml'))):
|
||||||
|
assert effective_url == requests.get(url).url
|
||||||
|
|
||||||
def test_auth(tmpdir, scheme):
|
def test_auth(tmpdir, scheme):
|
||||||
'''Ensure that we can handle basic auth'''
|
'''Ensure that we can handle basic auth'''
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ import json
|
|||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
import vcr
|
import vcr
|
||||||
|
from vcr.errors import CannotOverwriteExistingCassetteException
|
||||||
|
|
||||||
from assertions import assert_cassette_empty, assert_is_json
|
from assertions import assert_cassette_empty, assert_is_json
|
||||||
|
|
||||||
@@ -80,6 +81,17 @@ def test_body(get_client, tmpdir, scheme):
|
|||||||
assert content == (yield get(get_client(), url)).body
|
assert content == (yield get(get_client(), url)).body
|
||||||
assert 1 == cass.play_count
|
assert 1 == cass.play_count
|
||||||
|
|
||||||
|
@pytest.mark.gen_test
|
||||||
|
def test_effective_url(get_client, scheme, tmpdir):
|
||||||
|
'''Ensure that the effective_url is captured'''
|
||||||
|
url = scheme + '://httpbin.org/redirect-to?url=/html'
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('url.yaml'))):
|
||||||
|
effective_url = (yield get(get_client(), url)).effective_url
|
||||||
|
assert effective_url == scheme + '://httpbin.org/html'
|
||||||
|
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('url.yaml'))) as cass:
|
||||||
|
assert effective_url == (yield get(get_client(), url)).effective_url
|
||||||
|
assert 1 == cass.play_count
|
||||||
|
|
||||||
@pytest.mark.gen_test
|
@pytest.mark.gen_test
|
||||||
def test_auth(get_client, tmpdir, scheme):
|
def test_auth(get_client, tmpdir, scheme):
|
||||||
@@ -203,3 +215,141 @@ def test_https_with_cert_validation_disabled(get_client, tmpdir):
|
|||||||
with vcr.use_cassette(cass_path) as cass:
|
with vcr.use_cassette(cass_path) as cass:
|
||||||
yield get(get_client(), 'https://httpbin.org', validate_cert=False)
|
yield get(get_client(), 'https://httpbin.org', validate_cert=False)
|
||||||
assert 1 == cass.play_count
|
assert 1 == cass.play_count
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.gen_test
|
||||||
|
def test_unsupported_features_raises_in_future(get_client, tmpdir):
|
||||||
|
'''Ensure that the exception for an AsyncHTTPClient feature not being
|
||||||
|
supported is raised inside the future.'''
|
||||||
|
|
||||||
|
def callback(chunk):
|
||||||
|
assert False, "Did not expect to be called."
|
||||||
|
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('invalid.yaml'))):
|
||||||
|
future = get(
|
||||||
|
get_client(), 'http://httpbin.org', streaming_callback=callback
|
||||||
|
)
|
||||||
|
|
||||||
|
with pytest.raises(Exception) as excinfo:
|
||||||
|
yield future
|
||||||
|
|
||||||
|
assert "not yet supported by VCR" in str(excinfo)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.gen_test
|
||||||
|
def test_unsupported_features_raise_error_disabled(get_client, tmpdir):
|
||||||
|
'''Ensure that the exception for an AsyncHTTPClient feature not being
|
||||||
|
supported is not raised if raise_error=False.'''
|
||||||
|
|
||||||
|
def callback(chunk):
|
||||||
|
assert False, "Did not expect to be called."
|
||||||
|
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('invalid.yaml'))):
|
||||||
|
response = yield get(
|
||||||
|
get_client(),
|
||||||
|
'http://httpbin.org',
|
||||||
|
streaming_callback=callback,
|
||||||
|
raise_error=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert "not yet supported by VCR" in str(response.error)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.gen_test
|
||||||
|
def test_cannot_overwrite_cassette_raises_in_future(get_client, tmpdir):
|
||||||
|
'''Ensure that CannotOverwriteExistingCassetteException is raised inside
|
||||||
|
the future.'''
|
||||||
|
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('overwrite.yaml'))):
|
||||||
|
yield get(get_client(), 'http://httpbin.org/get')
|
||||||
|
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('overwrite.yaml'))):
|
||||||
|
future = get(get_client(), 'http://httpbin.org/headers')
|
||||||
|
|
||||||
|
with pytest.raises(CannotOverwriteExistingCassetteException):
|
||||||
|
yield future
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.gen_test
|
||||||
|
def test_cannot_overwrite_cassette_raise_error_disabled(get_client, tmpdir):
|
||||||
|
'''Ensure that CannotOverwriteExistingCassetteException is not raised if
|
||||||
|
raise_error=False in the fetch() call.'''
|
||||||
|
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('overwrite.yaml'))):
|
||||||
|
yield get(
|
||||||
|
get_client(), 'http://httpbin.org/get', raise_error=False
|
||||||
|
)
|
||||||
|
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('overwrite.yaml'))):
|
||||||
|
response = yield get(
|
||||||
|
get_client(), 'http://httpbin.org/headers', raise_error=False
|
||||||
|
)
|
||||||
|
|
||||||
|
assert isinstance(response.error, CannotOverwriteExistingCassetteException)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.gen_test
|
||||||
|
@vcr.use_cassette(path_transformer=vcr.default_vcr.ensure_suffix('.yaml'))
|
||||||
|
def test_tornado_with_decorator_use_cassette(get_client):
|
||||||
|
response = yield get_client().fetch(
|
||||||
|
http.HTTPRequest('http://www.google.com/', method='GET')
|
||||||
|
)
|
||||||
|
assert response.body.decode('utf-8') == "not actually google"
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.gen_test
|
||||||
|
@vcr.use_cassette(path_transformer=vcr.default_vcr.ensure_suffix('.yaml'))
|
||||||
|
def test_tornado_exception_can_be_caught(get_client):
|
||||||
|
try:
|
||||||
|
yield get(get_client(), 'http://httpbin.org/status/500')
|
||||||
|
except http.HTTPError as e:
|
||||||
|
assert e.code == 500
|
||||||
|
|
||||||
|
try:
|
||||||
|
yield get(get_client(), 'http://httpbin.org/status/404')
|
||||||
|
except http.HTTPError as e:
|
||||||
|
assert e.code == 404
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.gen_test
|
||||||
|
def test_existing_references_get_patched(tmpdir):
|
||||||
|
from tornado.httpclient import AsyncHTTPClient
|
||||||
|
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('data.yaml'))):
|
||||||
|
client = AsyncHTTPClient()
|
||||||
|
yield get(client, 'http://httpbin.org/get')
|
||||||
|
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('data.yaml'))) as cass:
|
||||||
|
yield get(client, 'http://httpbin.org/get')
|
||||||
|
assert cass.play_count == 1
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.gen_test
|
||||||
|
def test_existing_instances_get_patched(get_client, tmpdir):
|
||||||
|
'''Ensure that existing instances of AsyncHTTPClient get patched upon
|
||||||
|
entering VCR context.'''
|
||||||
|
|
||||||
|
client = get_client()
|
||||||
|
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('data.yaml'))):
|
||||||
|
yield get(client, 'http://httpbin.org/get')
|
||||||
|
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('data.yaml'))) as cass:
|
||||||
|
yield get(client, 'http://httpbin.org/get')
|
||||||
|
assert cass.play_count == 1
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.gen_test
|
||||||
|
def test_request_time_is_set(get_client, tmpdir):
|
||||||
|
'''Ensures that the request_time on HTTPResponses is set.'''
|
||||||
|
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('data.yaml'))):
|
||||||
|
client = get_client()
|
||||||
|
response = yield get(client, 'http://httpbin.org/get')
|
||||||
|
assert response.request_time is not None
|
||||||
|
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('data.yaml'))) as cass:
|
||||||
|
client = get_client()
|
||||||
|
response = yield get(client, 'http://httpbin.org/get')
|
||||||
|
assert response.request_time is not None
|
||||||
|
assert cass.play_count == 1
|
||||||
|
|||||||
62
tests/integration/test_tornado_exception_can_be_caught.yaml
Normal file
62
tests/integration/test_tornado_exception_can_be_caught.yaml
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
interactions:
|
||||||
|
- request:
|
||||||
|
body: null
|
||||||
|
headers: {}
|
||||||
|
method: GET
|
||||||
|
uri: http://httpbin.org/status/500
|
||||||
|
response:
|
||||||
|
body: {string: !!python/unicode ''}
|
||||||
|
headers:
|
||||||
|
- !!python/tuple
|
||||||
|
- Content-Length
|
||||||
|
- ['0']
|
||||||
|
- !!python/tuple
|
||||||
|
- Server
|
||||||
|
- [nginx]
|
||||||
|
- !!python/tuple
|
||||||
|
- Connection
|
||||||
|
- [close]
|
||||||
|
- !!python/tuple
|
||||||
|
- Access-Control-Allow-Credentials
|
||||||
|
- ['true']
|
||||||
|
- !!python/tuple
|
||||||
|
- Date
|
||||||
|
- ['Thu, 30 Jul 2015 17:32:39 GMT']
|
||||||
|
- !!python/tuple
|
||||||
|
- Access-Control-Allow-Origin
|
||||||
|
- ['*']
|
||||||
|
- !!python/tuple
|
||||||
|
- Content-Type
|
||||||
|
- [text/html; charset=utf-8]
|
||||||
|
status: {code: 500, message: INTERNAL SERVER ERROR}
|
||||||
|
- request:
|
||||||
|
body: null
|
||||||
|
headers: {}
|
||||||
|
method: GET
|
||||||
|
uri: http://httpbin.org/status/404
|
||||||
|
response:
|
||||||
|
body: {string: !!python/unicode ''}
|
||||||
|
headers:
|
||||||
|
- !!python/tuple
|
||||||
|
- Content-Length
|
||||||
|
- ['0']
|
||||||
|
- !!python/tuple
|
||||||
|
- Server
|
||||||
|
- [nginx]
|
||||||
|
- !!python/tuple
|
||||||
|
- Connection
|
||||||
|
- [close]
|
||||||
|
- !!python/tuple
|
||||||
|
- Access-Control-Allow-Credentials
|
||||||
|
- ['true']
|
||||||
|
- !!python/tuple
|
||||||
|
- Date
|
||||||
|
- ['Thu, 30 Jul 2015 17:32:39 GMT']
|
||||||
|
- !!python/tuple
|
||||||
|
- Access-Control-Allow-Origin
|
||||||
|
- ['*']
|
||||||
|
- !!python/tuple
|
||||||
|
- Content-Type
|
||||||
|
- [text/html; charset=utf-8]
|
||||||
|
status: {code: 404, message: NOT FOUND}
|
||||||
|
version: 1
|
||||||
@@ -0,0 +1,53 @@
|
|||||||
|
interactions:
|
||||||
|
- request:
|
||||||
|
body: null
|
||||||
|
headers: {}
|
||||||
|
method: GET
|
||||||
|
uri: http://www.google.com/
|
||||||
|
response:
|
||||||
|
body: {string: !!python/unicode 'not actually google'}
|
||||||
|
headers:
|
||||||
|
- !!python/tuple
|
||||||
|
- Expires
|
||||||
|
- ['-1']
|
||||||
|
- !!python/tuple
|
||||||
|
- Connection
|
||||||
|
- [close]
|
||||||
|
- !!python/tuple
|
||||||
|
- P3p
|
||||||
|
- ['CP="This is not a P3P policy! See http://www.google.com/support/accounts/bin/answer.py?hl=en&answer=151657
|
||||||
|
for more info."']
|
||||||
|
- !!python/tuple
|
||||||
|
- Alternate-Protocol
|
||||||
|
- ['80:quic,p=0']
|
||||||
|
- !!python/tuple
|
||||||
|
- Accept-Ranges
|
||||||
|
- [none]
|
||||||
|
- !!python/tuple
|
||||||
|
- X-Xss-Protection
|
||||||
|
- [1; mode=block]
|
||||||
|
- !!python/tuple
|
||||||
|
- Vary
|
||||||
|
- [Accept-Encoding]
|
||||||
|
- !!python/tuple
|
||||||
|
- Date
|
||||||
|
- ['Thu, 30 Jul 2015 08:41:40 GMT']
|
||||||
|
- !!python/tuple
|
||||||
|
- Cache-Control
|
||||||
|
- ['private, max-age=0']
|
||||||
|
- !!python/tuple
|
||||||
|
- Content-Type
|
||||||
|
- [text/html; charset=ISO-8859-1]
|
||||||
|
- !!python/tuple
|
||||||
|
- Set-Cookie
|
||||||
|
- ['PREF=ID=1111111111111111:FF=0:TM=1438245700:LM=1438245700:V=1:S=GAzVO0ALebSpC_cJ;
|
||||||
|
expires=Sat, 29-Jul-2017 08:41:40 GMT; path=/; domain=.google.com', 'NID=69=Br7oRAwgmKoK__HC6FEnuxglTFDmFxqP6Md63lKhzW1w6WkDbp3U90CDxnUKvDP6wJH8yxY5Lk5ZnFf66Q1B0d4OsYoKgq0vjfBAYXuCIAWtOuGZEOsFXanXs7pt2Mjx;
|
||||||
|
expires=Fri, 29-Jan-2016 08:41:40 GMT; path=/; domain=.google.com; HttpOnly']
|
||||||
|
- !!python/tuple
|
||||||
|
- X-Frame-Options
|
||||||
|
- [SAMEORIGIN]
|
||||||
|
- !!python/tuple
|
||||||
|
- Server
|
||||||
|
- [gws]
|
||||||
|
status: {code: 200, message: OK}
|
||||||
|
version: 1
|
||||||
@@ -49,6 +49,15 @@ def test_response_headers(scheme, tmpdir):
|
|||||||
open2 = urlopen(url).info().items()
|
open2 = urlopen(url).info().items()
|
||||||
assert sorted(open1) == sorted(open2)
|
assert sorted(open1) == sorted(open2)
|
||||||
|
|
||||||
|
def test_effective_url(scheme, tmpdir):
|
||||||
|
'''Ensure that the effective_url is captured'''
|
||||||
|
url = scheme + '://httpbin.org/redirect-to?url=/html'
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('headers.yaml'))) as cass:
|
||||||
|
effective_url = urlopen(url).geturl()
|
||||||
|
assert effective_url == scheme + '://httpbin.org/html'
|
||||||
|
|
||||||
|
with vcr.use_cassette(str(tmpdir.join('headers.yaml'))) as cass:
|
||||||
|
assert effective_url == urlopen(url).geturl()
|
||||||
|
|
||||||
def test_multiple_requests(scheme, tmpdir):
|
def test_multiple_requests(scheme, tmpdir):
|
||||||
'''Ensure that we can cache multiple requests'''
|
'''Ensure that we can cache multiple requests'''
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ from vcr.compat import mock, contextlib
|
|||||||
from vcr.cassette import Cassette
|
from vcr.cassette import Cassette
|
||||||
from vcr.errors import UnhandledHTTPRequestError
|
from vcr.errors import UnhandledHTTPRequestError
|
||||||
from vcr.patch import force_reset
|
from vcr.patch import force_reset
|
||||||
|
from vcr.matchers import path, method, query, host
|
||||||
from vcr.stubs import VCRHTTPSConnection
|
from vcr.stubs import VCRHTTPSConnection
|
||||||
|
|
||||||
|
|
||||||
@@ -245,6 +246,13 @@ def test_path_transformer_with_context_manager():
|
|||||||
assert cassette._path == 'a'
|
assert cassette._path == 'a'
|
||||||
|
|
||||||
|
|
||||||
|
def test_path_transformer_None():
|
||||||
|
with Cassette.use(
|
||||||
|
path='a', path_transformer=None,
|
||||||
|
) as cassette:
|
||||||
|
assert cassette._path == 'a'
|
||||||
|
|
||||||
|
|
||||||
def test_func_path_generator():
|
def test_func_path_generator():
|
||||||
def generator(function):
|
def generator(function):
|
||||||
return os.path.join(os.path.dirname(inspect.getfile(function)),
|
return os.path.join(os.path.dirname(inspect.getfile(function)),
|
||||||
@@ -253,3 +261,52 @@ def test_func_path_generator():
|
|||||||
def function_name(cassette):
|
def function_name(cassette):
|
||||||
assert cassette._path == os.path.join(os.path.dirname(__file__), 'function_name')
|
assert cassette._path == os.path.join(os.path.dirname(__file__), 'function_name')
|
||||||
function_name()
|
function_name()
|
||||||
|
|
||||||
|
|
||||||
|
def test_use_as_decorator_on_coroutine():
|
||||||
|
original_http_connetion = httplib.HTTPConnection
|
||||||
|
@Cassette.use(inject=True)
|
||||||
|
def test_function(cassette):
|
||||||
|
assert httplib.HTTPConnection.cassette is cassette
|
||||||
|
assert httplib.HTTPConnection is not original_http_connetion
|
||||||
|
value = yield 1
|
||||||
|
assert value == 1
|
||||||
|
assert httplib.HTTPConnection.cassette is cassette
|
||||||
|
assert httplib.HTTPConnection is not original_http_connetion
|
||||||
|
value = yield 2
|
||||||
|
assert value == 2
|
||||||
|
coroutine = test_function()
|
||||||
|
value = next(coroutine)
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
value = coroutine.send(value)
|
||||||
|
except StopIteration:
|
||||||
|
break
|
||||||
|
|
||||||
|
|
||||||
|
def test_use_as_decorator_on_generator():
|
||||||
|
original_http_connetion = httplib.HTTPConnection
|
||||||
|
@Cassette.use(inject=True)
|
||||||
|
def test_function(cassette):
|
||||||
|
assert httplib.HTTPConnection.cassette is cassette
|
||||||
|
assert httplib.HTTPConnection is not original_http_connetion
|
||||||
|
yield 1
|
||||||
|
assert httplib.HTTPConnection.cassette is cassette
|
||||||
|
assert httplib.HTTPConnection is not original_http_connetion
|
||||||
|
yield 2
|
||||||
|
assert list(test_function()) == [1, 2]
|
||||||
|
|
||||||
|
|
||||||
|
def test_similar_requests(tmpdir):
|
||||||
|
# WIP needs to be finished
|
||||||
|
@Cassette.use(inject=True, match_on=(path, query, host, method))
|
||||||
|
def test_function(cassette):
|
||||||
|
conn = httplib.HTTPConnection("www.python.org")
|
||||||
|
conn.request("GET", "/index.html?test=1")
|
||||||
|
|
||||||
|
conn = httplib.HTTPConnection("www.python.org")
|
||||||
|
conn.request("GET", "/index.html?test=0")
|
||||||
|
|
||||||
|
conn = httplib.HTTPConnection("www.cool.org")
|
||||||
|
conn.request("GET", "/index.html?test=0")
|
||||||
|
cassette.similar_requests()
|
||||||
|
|||||||
@@ -73,7 +73,7 @@ def test_remove_nonexistent_post_data_parameters():
|
|||||||
def test_remove_json_post_data_parameters():
|
def test_remove_json_post_data_parameters():
|
||||||
body = b'{"id": "secret", "foo": "bar", "baz": "qux"}'
|
body = b'{"id": "secret", "foo": "bar", "baz": "qux"}'
|
||||||
request = Request('POST', 'http://google.com', body, {})
|
request = Request('POST', 'http://google.com', body, {})
|
||||||
request.add_header('Content-Type', 'application/json')
|
request.headers['Content-Type'] = 'application/json'
|
||||||
remove_post_data_parameters(request, ['id'])
|
remove_post_data_parameters(request, ['id'])
|
||||||
request_body_json = json.loads(request.body.decode('utf-8'))
|
request_body_json = json.loads(request.body.decode('utf-8'))
|
||||||
expected_json = json.loads(b'{"foo": "bar", "baz": "qux"}'.decode('utf-8'))
|
expected_json = json.loads(b'{"foo": "bar", "baz": "qux"}'.decode('utf-8'))
|
||||||
@@ -83,7 +83,7 @@ def test_remove_json_post_data_parameters():
|
|||||||
def test_remove_all_json_post_data_parameters():
|
def test_remove_all_json_post_data_parameters():
|
||||||
body = b'{"id": "secret", "foo": "bar"}'
|
body = b'{"id": "secret", "foo": "bar"}'
|
||||||
request = Request('POST', 'http://google.com', body, {})
|
request = Request('POST', 'http://google.com', body, {})
|
||||||
request.add_header('Content-Type', 'application/json')
|
request.headers['Content-Type'] = 'application/json'
|
||||||
remove_post_data_parameters(request, ['id', 'foo'])
|
remove_post_data_parameters(request, ['id', 'foo'])
|
||||||
assert request.body == b'{}'
|
assert request.body == b'{}'
|
||||||
|
|
||||||
@@ -91,6 +91,6 @@ def test_remove_all_json_post_data_parameters():
|
|||||||
def test_remove_nonexistent_json_post_data_parameters():
|
def test_remove_nonexistent_json_post_data_parameters():
|
||||||
body = b'{}'
|
body = b'{}'
|
||||||
request = Request('POST', 'http://google.com', body, {})
|
request = Request('POST', 'http://google.com', body, {})
|
||||||
request.add_header('Content-Type', 'application/json')
|
request.headers['Content-Type'] = 'application/json'
|
||||||
remove_post_data_parameters(request, ['id'])
|
remove_post_data_parameters(request, ['id'])
|
||||||
assert request.body == b'{}'
|
assert request.body == b'{}'
|
||||||
|
|||||||
@@ -1,5 +1,7 @@
|
|||||||
import itertools
|
import itertools
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
from vcr import matchers
|
from vcr import matchers
|
||||||
from vcr import request
|
from vcr import request
|
||||||
|
|
||||||
@@ -35,6 +37,107 @@ def test_uri_matcher():
|
|||||||
assert matched
|
assert matched
|
||||||
|
|
||||||
|
|
||||||
|
req1_body = (b"<?xml version='1.0'?><methodCall><methodName>test</methodName>"
|
||||||
|
b"<params><param><value><array><data><value><struct>"
|
||||||
|
b"<member><name>a</name><value><string>1</string></value></member>"
|
||||||
|
b"<member><name>b</name><value><string>2</string></value></member>"
|
||||||
|
b"</struct></value></data></array></value></param></params></methodCall>")
|
||||||
|
req2_body = (b"<?xml version='1.0'?><methodCall><methodName>test</methodName>"
|
||||||
|
b"<params><param><value><array><data><value><struct>"
|
||||||
|
b"<member><name>b</name><value><string>2</string></value></member>"
|
||||||
|
b"<member><name>a</name><value><string>1</string></value></member>"
|
||||||
|
b"</struct></value></data></array></value></param></params></methodCall>")
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("r1, r2", [
|
||||||
|
(
|
||||||
|
request.Request('POST', 'http://host.com/', '123', {}),
|
||||||
|
request.Request('POST', 'http://another-host.com/',
|
||||||
|
'123', {'Some-Header': 'value'})
|
||||||
|
),
|
||||||
|
(
|
||||||
|
request.Request('POST', 'http://host.com/', 'a=1&b=2',
|
||||||
|
{'Content-Type': 'application/x-www-form-urlencoded'}),
|
||||||
|
request.Request('POST', 'http://host.com/', 'b=2&a=1',
|
||||||
|
{'Content-Type': 'application/x-www-form-urlencoded'})
|
||||||
|
),
|
||||||
|
(
|
||||||
|
request.Request('POST', 'http://host.com/', '123', {}),
|
||||||
|
request.Request('POST', 'http://another-host.com/', '123', {'Some-Header': 'value'})
|
||||||
|
),
|
||||||
|
(
|
||||||
|
request.Request(
|
||||||
|
'POST', 'http://host.com/', 'a=1&b=2',
|
||||||
|
{'Content-Type': 'application/x-www-form-urlencoded'}
|
||||||
|
),
|
||||||
|
request.Request(
|
||||||
|
'POST', 'http://host.com/', 'b=2&a=1',
|
||||||
|
{'Content-Type': 'application/x-www-form-urlencoded'}
|
||||||
|
)
|
||||||
|
),
|
||||||
|
(
|
||||||
|
request.Request(
|
||||||
|
'POST', 'http://host.com/', '{"a": 1, "b": 2}',
|
||||||
|
{'Content-Type': 'application/json'}
|
||||||
|
),
|
||||||
|
request.Request(
|
||||||
|
'POST', 'http://host.com/', '{"b": 2, "a": 1}',
|
||||||
|
{'content-type': 'application/json'}
|
||||||
|
)
|
||||||
|
),
|
||||||
|
(
|
||||||
|
request.Request(
|
||||||
|
'POST', 'http://host.com/', req1_body,
|
||||||
|
{'User-Agent': 'xmlrpclib', 'Content-Type': 'text/xml'}
|
||||||
|
),
|
||||||
|
request.Request(
|
||||||
|
'POST', 'http://host.com/', req2_body,
|
||||||
|
{'user-agent': 'somexmlrpc', 'content-type': 'text/xml'}
|
||||||
|
)
|
||||||
|
),
|
||||||
|
(
|
||||||
|
request.Request(
|
||||||
|
'POST', 'http://host.com/',
|
||||||
|
'{"a": 1, "b": 2}', {'Content-Type': 'application/json'}
|
||||||
|
),
|
||||||
|
request.Request(
|
||||||
|
'POST', 'http://host.com/',
|
||||||
|
'{"b": 2, "a": 1}', {'content-type': 'application/json'}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
])
|
||||||
|
def test_body_matcher_does_match(r1, r2):
|
||||||
|
assert matchers.body(r1, r2)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("r1, r2", [
|
||||||
|
(
|
||||||
|
request.Request('POST', 'http://host.com/', '{"a": 1, "b": 2}', {}),
|
||||||
|
request.Request('POST', 'http://host.com/', '{"b": 2, "a": 1}', {}),
|
||||||
|
),
|
||||||
|
(
|
||||||
|
request.Request(
|
||||||
|
'POST', 'http://host.com/',
|
||||||
|
'{"a": 1, "b": 3}', {'Content-Type': 'application/json'}
|
||||||
|
),
|
||||||
|
request.Request(
|
||||||
|
'POST', 'http://host.com/',
|
||||||
|
'{"b": 2, "a": 1}', {'content-type': 'application/json'}
|
||||||
|
)
|
||||||
|
),
|
||||||
|
(
|
||||||
|
request.Request(
|
||||||
|
'POST', 'http://host.com/', req1_body, {'Content-Type': 'text/xml'}
|
||||||
|
),
|
||||||
|
request.Request(
|
||||||
|
'POST', 'http://host.com/', req2_body, {'content-type': 'text/xml'}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
])
|
||||||
|
def test_body_match_does_not_match(r1, r2):
|
||||||
|
assert not matchers.body(r1, r2)
|
||||||
|
|
||||||
|
|
||||||
def test_query_matcher():
|
def test_query_matcher():
|
||||||
req1 = request.Request('GET', 'http://host.com/?a=b&c=d', '', {})
|
req1 = request.Request('GET', 'http://host.com/?a=b&c=d', '', {})
|
||||||
req2 = request.Request('GET', 'http://host.com/?c=d&a=b', '', {})
|
req2 = request.Request('GET', 'http://host.com/?c=d&a=b', '', {})
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from vcr.request import Request
|
from vcr.request import Request, HeadersDict
|
||||||
|
|
||||||
|
|
||||||
def test_str():
|
def test_str():
|
||||||
@@ -12,11 +12,16 @@ def test_headers():
|
|||||||
headers = {'X-Header1': ['h1'], 'X-Header2': 'h2'}
|
headers = {'X-Header1': ['h1'], 'X-Header2': 'h2'}
|
||||||
req = Request('GET', 'http://go.com/', '', headers)
|
req = Request('GET', 'http://go.com/', '', headers)
|
||||||
assert req.headers == {'X-Header1': 'h1', 'X-Header2': 'h2'}
|
assert req.headers == {'X-Header1': 'h1', 'X-Header2': 'h2'}
|
||||||
|
req.headers['X-Header1'] = 'h11'
|
||||||
req.add_header('X-Header1', 'h11')
|
|
||||||
assert req.headers == {'X-Header1': 'h11', 'X-Header2': 'h2'}
|
assert req.headers == {'X-Header1': 'h11', 'X-Header2': 'h2'}
|
||||||
|
|
||||||
|
|
||||||
|
def test_add_header_deprecated():
|
||||||
|
req = Request('GET', 'http://go.com/', '', {})
|
||||||
|
pytest.deprecated_call(req.add_header, 'foo', 'bar')
|
||||||
|
assert req.headers == {'foo': 'bar'}
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("uri, expected_port", [
|
@pytest.mark.parametrize("uri, expected_port", [
|
||||||
('http://go.com/', 80),
|
('http://go.com/', 80),
|
||||||
('http://go.com:80/', 80),
|
('http://go.com:80/', 80),
|
||||||
@@ -36,3 +41,30 @@ def test_uri():
|
|||||||
|
|
||||||
req = Request('GET', 'http://go.com:80/', '', {})
|
req = Request('GET', 'http://go.com:80/', '', {})
|
||||||
assert req.uri == 'http://go.com:80/'
|
assert req.uri == 'http://go.com:80/'
|
||||||
|
|
||||||
|
|
||||||
|
def test_HeadersDict():
|
||||||
|
|
||||||
|
# Simple test of CaseInsensitiveDict
|
||||||
|
h = HeadersDict()
|
||||||
|
assert h == {}
|
||||||
|
h['Content-Type'] = 'application/json'
|
||||||
|
assert h == {'Content-Type': 'application/json'}
|
||||||
|
assert h['content-type'] == 'application/json'
|
||||||
|
assert h['CONTENT-TYPE'] == 'application/json'
|
||||||
|
|
||||||
|
# Test feature of HeadersDict: devolve list to first element
|
||||||
|
h = HeadersDict()
|
||||||
|
assert h == {}
|
||||||
|
h['x'] = ['foo', 'bar']
|
||||||
|
assert h == {'x': 'foo'}
|
||||||
|
|
||||||
|
# Test feature of HeadersDict: preserve original key case
|
||||||
|
h = HeadersDict()
|
||||||
|
assert h == {}
|
||||||
|
h['Content-Type'] = 'application/json'
|
||||||
|
assert h == {'Content-Type': 'application/json'}
|
||||||
|
h['content-type'] = 'text/plain'
|
||||||
|
assert h == {'Content-Type': 'text/plain'}
|
||||||
|
h['CONtent-tyPE'] = 'whoa'
|
||||||
|
assert h == {'Content-Type': 'whoa'}
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
# -*- encoding: utf-8 -*-
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from vcr.compat import mock
|
from vcr.compat import mock
|
||||||
@@ -27,6 +28,55 @@ def test_deserialize_new_json_cassette():
|
|||||||
deserialize(f.read(), jsonserializer)
|
deserialize(f.read(), jsonserializer)
|
||||||
|
|
||||||
|
|
||||||
|
REQBODY_TEMPLATE = u'''\
|
||||||
|
interactions:
|
||||||
|
- request:
|
||||||
|
body: {req_body}
|
||||||
|
headers:
|
||||||
|
Content-Type: [application/x-www-form-urlencoded]
|
||||||
|
Host: [httpbin.org]
|
||||||
|
method: POST
|
||||||
|
uri: http://httpbin.org/post
|
||||||
|
response:
|
||||||
|
body: {{string: ""}}
|
||||||
|
headers:
|
||||||
|
content-length: ['0']
|
||||||
|
content-type: [application/json]
|
||||||
|
status: {{code: 200, message: OK}}
|
||||||
|
'''
|
||||||
|
|
||||||
|
|
||||||
|
# A cassette generated under Python 2 stores the request body as a string,
|
||||||
|
# but the same cassette generated under Python 3 stores it as "!!binary".
|
||||||
|
# Make sure we accept both forms, regardless of whether we're running under
|
||||||
|
# Python 2 or 3.
|
||||||
|
@pytest.mark.parametrize("req_body, expect", [
|
||||||
|
# Cassette written under Python 2 (pure ASCII body)
|
||||||
|
('x=5&y=2', b'x=5&y=2'),
|
||||||
|
# Cassette written under Python 3 (pure ASCII body)
|
||||||
|
('!!binary |\n eD01Jnk9Mg==', b'x=5&y=2'),
|
||||||
|
|
||||||
|
# Request body has non-ASCII chars (x=föo&y=2), encoded in UTF-8.
|
||||||
|
('!!python/str "x=f\\xF6o&y=2"', b'x=f\xc3\xb6o&y=2'),
|
||||||
|
('!!binary |\n eD1mw7ZvJnk9Mg==', b'x=f\xc3\xb6o&y=2'),
|
||||||
|
|
||||||
|
# Same request body, this time encoded in UTF-16. In this case, we
|
||||||
|
# write the same YAML file under both Python 2 and 3, so there's only
|
||||||
|
# one test case here.
|
||||||
|
('!!binary |\n //54AD0AZgD2AG8AJgB5AD0AMgA=',
|
||||||
|
b'\xff\xfex\x00=\x00f\x00\xf6\x00o\x00&\x00y\x00=\x002\x00'),
|
||||||
|
|
||||||
|
# Same again, this time encoded in ISO-8859-1.
|
||||||
|
('!!binary |\n eD1m9m8meT0y', b'x=f\xf6o&y=2'),
|
||||||
|
])
|
||||||
|
def test_deserialize_py2py3_yaml_cassette(tmpdir, req_body, expect):
|
||||||
|
cfile = tmpdir.join('test_cassette.yaml')
|
||||||
|
cfile.write(REQBODY_TEMPLATE.format(req_body=req_body))
|
||||||
|
with open(str(cfile)) as f:
|
||||||
|
(requests, responses) = deserialize(f.read(), yamlserializer)
|
||||||
|
assert requests[0].body == expect
|
||||||
|
|
||||||
|
|
||||||
@mock.patch.object(jsonserializer.json, 'dumps',
|
@mock.patch.object(jsonserializer.json, 'dumps',
|
||||||
side_effect=UnicodeDecodeError('utf-8', b'unicode error in serialization',
|
side_effect=UnicodeDecodeError('utf-8', b'unicode error in serialization',
|
||||||
0, 10, 'blew up'))
|
0, 10, 'blew up'))
|
||||||
|
|||||||
7
tests/unit/test_stubs.py
Normal file
7
tests/unit/test_stubs.py
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
from vcr.stubs import VCRHTTPSConnection
|
||||||
|
|
||||||
|
class TestVCRConnection(object):
|
||||||
|
def test_setting_of_attributes_get_propogated_to_real_connection(self):
|
||||||
|
vcr_connection = VCRHTTPSConnection('www.examplehost.com')
|
||||||
|
vcr_connection.ssl_version = 'example_ssl_version'
|
||||||
|
assert vcr_connection.real_connection.ssl_version == 'example_ssl_version'
|
||||||
@@ -1,11 +1,13 @@
|
|||||||
import os
|
import os
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
from six.moves import http_client as httplib
|
||||||
|
|
||||||
from vcr import VCR, use_cassette
|
from vcr import VCR, use_cassette
|
||||||
from vcr.compat import mock
|
from vcr.compat import mock
|
||||||
from vcr.request import Request
|
from vcr.request import Request
|
||||||
from vcr.stubs import VCRHTTPSConnection
|
from vcr.stubs import VCRHTTPSConnection
|
||||||
|
from vcr.patch import _HTTPConnection, force_reset
|
||||||
|
|
||||||
|
|
||||||
def test_vcr_use_cassette():
|
def test_vcr_use_cassette():
|
||||||
@@ -15,9 +17,11 @@ def test_vcr_use_cassette():
|
|||||||
'vcr.cassette.Cassette.load',
|
'vcr.cassette.Cassette.load',
|
||||||
return_value=mock.MagicMock(inject=False)
|
return_value=mock.MagicMock(inject=False)
|
||||||
) as mock_cassette_load:
|
) as mock_cassette_load:
|
||||||
|
|
||||||
@test_vcr.use_cassette('test')
|
@test_vcr.use_cassette('test')
|
||||||
def function():
|
def function():
|
||||||
pass
|
pass
|
||||||
|
|
||||||
assert mock_cassette_load.call_count == 0
|
assert mock_cassette_load.call_count == 0
|
||||||
function()
|
function()
|
||||||
assert mock_cassette_load.call_args[1]['record_mode'] is record_mode
|
assert mock_cassette_load.call_args[1]['record_mode'] is record_mode
|
||||||
@@ -38,9 +42,11 @@ def test_vcr_use_cassette():
|
|||||||
|
|
||||||
def test_vcr_before_record_request_params():
|
def test_vcr_before_record_request_params():
|
||||||
base_path = 'http://httpbin.org/'
|
base_path = 'http://httpbin.org/'
|
||||||
|
|
||||||
def before_record_cb(request):
|
def before_record_cb(request):
|
||||||
if request.path != '/get':
|
if request.path != '/get':
|
||||||
return request
|
return request
|
||||||
|
|
||||||
test_vcr = VCR(filter_headers=('cookie',), before_record_request=before_record_cb,
|
test_vcr = VCR(filter_headers=('cookie',), before_record_request=before_record_cb,
|
||||||
ignore_hosts=('www.test.com',), ignore_localhost=True,
|
ignore_hosts=('www.test.com',), ignore_localhost=True,
|
||||||
filter_query_parameters=('foo',))
|
filter_query_parameters=('foo',))
|
||||||
@@ -53,8 +59,12 @@ def test_vcr_before_record_request_params():
|
|||||||
assert cassette.filter_request(
|
assert cassette.filter_request(
|
||||||
Request('GET', base_path + '?foo=bar', '',
|
Request('GET', base_path + '?foo=bar', '',
|
||||||
{'cookie': 'test', 'other': 'fun'})).headers == {'other': 'fun'}
|
{'cookie': 'test', 'other': 'fun'})).headers == {'other': 'fun'}
|
||||||
assert cassette.filter_request(Request('GET', base_path + '?foo=bar', '',
|
assert cassette.filter_request(
|
||||||
{'cookie': 'test', 'other': 'fun'})).headers == {'other': 'fun'}
|
Request(
|
||||||
|
'GET', base_path + '?foo=bar', '',
|
||||||
|
{'cookie': 'test', 'other': 'fun'}
|
||||||
|
)
|
||||||
|
).headers == {'other': 'fun'}
|
||||||
|
|
||||||
assert cassette.filter_request(Request('GET', 'http://www.test.com' + '?foo=bar', '',
|
assert cassette.filter_request(Request('GET', 'http://www.test.com' + '?foo=bar', '',
|
||||||
{'cookie': 'test', 'other': 'fun'})) is None
|
{'cookie': 'test', 'other': 'fun'})) is None
|
||||||
@@ -64,6 +74,69 @@ def test_vcr_before_record_request_params():
|
|||||||
assert cassette.filter_request(Request('GET', base_path + 'get', '', {})) is not None
|
assert cassette.filter_request(Request('GET', base_path + 'get', '', {})) is not None
|
||||||
|
|
||||||
|
|
||||||
|
def test_vcr_before_record_response_iterable():
|
||||||
|
# Regression test for #191
|
||||||
|
|
||||||
|
request = Request('GET', '/', '', {})
|
||||||
|
response = object() # just can't be None
|
||||||
|
|
||||||
|
# Prevent actually saving the cassette
|
||||||
|
with mock.patch('vcr.cassette.save_cassette'):
|
||||||
|
|
||||||
|
# Baseline: non-iterable before_record_response should work
|
||||||
|
mock_filter = mock.Mock()
|
||||||
|
vcr = VCR(before_record_response=mock_filter)
|
||||||
|
with vcr.use_cassette('test') as cassette:
|
||||||
|
assert mock_filter.call_count == 0
|
||||||
|
cassette.append(request, response)
|
||||||
|
assert mock_filter.call_count == 1
|
||||||
|
|
||||||
|
# Regression test: iterable before_record_response should work too
|
||||||
|
mock_filter = mock.Mock()
|
||||||
|
vcr = VCR(before_record_response=(mock_filter,))
|
||||||
|
with vcr.use_cassette('test') as cassette:
|
||||||
|
assert mock_filter.call_count == 0
|
||||||
|
cassette.append(request, response)
|
||||||
|
assert mock_filter.call_count == 1
|
||||||
|
|
||||||
|
|
||||||
|
def test_before_record_response_as_filter():
|
||||||
|
request = Request('GET', '/', '', {})
|
||||||
|
response = object() # just can't be None
|
||||||
|
|
||||||
|
# Prevent actually saving the cassette
|
||||||
|
with mock.patch('vcr.cassette.save_cassette'):
|
||||||
|
|
||||||
|
filter_all = mock.Mock(return_value=None)
|
||||||
|
vcr = VCR(before_record_response=filter_all)
|
||||||
|
with vcr.use_cassette('test') as cassette:
|
||||||
|
cassette.append(request, response)
|
||||||
|
assert cassette.data == []e
|
||||||
|
assert not cassette.dirty
|
||||||
|
|
||||||
|
|
||||||
|
def test_vcr_path_transformer():
|
||||||
|
# Regression test for #199
|
||||||
|
|
||||||
|
# Prevent actually saving the cassette
|
||||||
|
with mock.patch('vcr.cassette.save_cassette'):
|
||||||
|
|
||||||
|
# Baseline: path should be unchanged
|
||||||
|
vcr = VCR()
|
||||||
|
with vcr.use_cassette('test') as cassette:
|
||||||
|
assert cassette._path == 'test'
|
||||||
|
|
||||||
|
# Regression test: path_transformer=None should do the same.
|
||||||
|
vcr = VCR(path_transformer=None)
|
||||||
|
with vcr.use_cassette('test') as cassette:
|
||||||
|
assert cassette._path == 'test'
|
||||||
|
|
||||||
|
# and it should still work with cassette_library_dir
|
||||||
|
vcr = VCR(cassette_library_dir='/foo')
|
||||||
|
with vcr.use_cassette('test') as cassette:
|
||||||
|
assert cassette._path == '/foo/test'
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def random_fixture():
|
def random_fixture():
|
||||||
return 1
|
return 1
|
||||||
@@ -103,6 +176,7 @@ def test_custom_patchers():
|
|||||||
|
|
||||||
def test_inject_cassette():
|
def test_inject_cassette():
|
||||||
vcr = VCR(inject_cassette=True)
|
vcr = VCR(inject_cassette=True)
|
||||||
|
|
||||||
@vcr.use_cassette('test', record_mode='once')
|
@vcr.use_cassette('test', record_mode='once')
|
||||||
def with_cassette_injected(cassette):
|
def with_cassette_injected(cassette):
|
||||||
assert cassette.record_mode == 'once'
|
assert cassette.record_mode == 'once'
|
||||||
@@ -117,9 +191,11 @@ def test_inject_cassette():
|
|||||||
|
|
||||||
def test_with_current_defaults():
|
def test_with_current_defaults():
|
||||||
vcr = VCR(inject_cassette=True, record_mode='once')
|
vcr = VCR(inject_cassette=True, record_mode='once')
|
||||||
|
|
||||||
@vcr.use_cassette('test', with_current_defaults=False)
|
@vcr.use_cassette('test', with_current_defaults=False)
|
||||||
def changing_defaults(cassette, checks):
|
def changing_defaults(cassette, checks):
|
||||||
checks(cassette)
|
checks(cassette)
|
||||||
|
|
||||||
@vcr.use_cassette('test', with_current_defaults=True)
|
@vcr.use_cassette('test', with_current_defaults=True)
|
||||||
def current_defaults(cassette, checks):
|
def current_defaults(cassette, checks):
|
||||||
checks(cassette)
|
checks(cassette)
|
||||||
@@ -141,27 +217,33 @@ def test_with_current_defaults():
|
|||||||
def test_cassette_library_dir_with_decoration_and_no_explicit_path():
|
def test_cassette_library_dir_with_decoration_and_no_explicit_path():
|
||||||
library_dir = '/libary_dir'
|
library_dir = '/libary_dir'
|
||||||
vcr = VCR(inject_cassette=True, cassette_library_dir=library_dir)
|
vcr = VCR(inject_cassette=True, cassette_library_dir=library_dir)
|
||||||
|
|
||||||
@vcr.use_cassette()
|
@vcr.use_cassette()
|
||||||
def function_name(cassette):
|
def function_name(cassette):
|
||||||
assert cassette._path == os.path.join(library_dir, 'function_name')
|
assert cassette._path == os.path.join(library_dir, 'function_name')
|
||||||
|
|
||||||
function_name()
|
function_name()
|
||||||
|
|
||||||
|
|
||||||
def test_cassette_library_dir_with_decoration_and_explicit_path():
|
def test_cassette_library_dir_with_decoration_and_explicit_path():
|
||||||
library_dir = '/libary_dir'
|
library_dir = '/libary_dir'
|
||||||
vcr = VCR(inject_cassette=True, cassette_library_dir=library_dir)
|
vcr = VCR(inject_cassette=True, cassette_library_dir=library_dir)
|
||||||
|
|
||||||
@vcr.use_cassette(path='custom_name')
|
@vcr.use_cassette(path='custom_name')
|
||||||
def function_name(cassette):
|
def function_name(cassette):
|
||||||
assert cassette._path == os.path.join(library_dir, 'custom_name')
|
assert cassette._path == os.path.join(library_dir, 'custom_name')
|
||||||
|
|
||||||
function_name()
|
function_name()
|
||||||
|
|
||||||
|
|
||||||
def test_cassette_library_dir_with_decoration_and_super_explicit_path():
|
def test_cassette_library_dir_with_decoration_and_super_explicit_path():
|
||||||
library_dir = '/libary_dir'
|
library_dir = '/libary_dir'
|
||||||
vcr = VCR(inject_cassette=True, cassette_library_dir=library_dir)
|
vcr = VCR(inject_cassette=True, cassette_library_dir=library_dir)
|
||||||
|
|
||||||
@vcr.use_cassette(path=os.path.join(library_dir, 'custom_name'))
|
@vcr.use_cassette(path=os.path.join(library_dir, 'custom_name'))
|
||||||
def function_name(cassette):
|
def function_name(cassette):
|
||||||
assert cassette._path == os.path.join(library_dir, 'custom_name')
|
assert cassette._path == os.path.join(library_dir, 'custom_name')
|
||||||
|
|
||||||
function_name()
|
function_name()
|
||||||
|
|
||||||
|
|
||||||
@@ -169,31 +251,38 @@ def test_cassette_library_dir_with_path_transformer():
|
|||||||
library_dir = '/libary_dir'
|
library_dir = '/libary_dir'
|
||||||
vcr = VCR(inject_cassette=True, cassette_library_dir=library_dir,
|
vcr = VCR(inject_cassette=True, cassette_library_dir=library_dir,
|
||||||
path_transformer=lambda path: path + '.json')
|
path_transformer=lambda path: path + '.json')
|
||||||
|
|
||||||
@vcr.use_cassette()
|
@vcr.use_cassette()
|
||||||
def function_name(cassette):
|
def function_name(cassette):
|
||||||
assert cassette._path == os.path.join(library_dir, 'function_name.json')
|
assert cassette._path == os.path.join(library_dir, 'function_name.json')
|
||||||
|
|
||||||
function_name()
|
function_name()
|
||||||
|
|
||||||
|
|
||||||
def test_use_cassette_with_no_extra_invocation():
|
def test_use_cassette_with_no_extra_invocation():
|
||||||
vcr = VCR(inject_cassette=True, cassette_library_dir='/')
|
vcr = VCR(inject_cassette=True, cassette_library_dir='/')
|
||||||
|
|
||||||
@vcr.use_cassette
|
@vcr.use_cassette
|
||||||
def function_name(cassette):
|
def function_name(cassette):
|
||||||
assert cassette._path == os.path.join('/', 'function_name')
|
assert cassette._path == os.path.join('/', 'function_name')
|
||||||
|
|
||||||
function_name()
|
function_name()
|
||||||
|
|
||||||
|
|
||||||
def test_path_transformer():
|
def test_path_transformer():
|
||||||
vcr = VCR(inject_cassette=True, cassette_library_dir='/',
|
vcr = VCR(inject_cassette=True, cassette_library_dir='/',
|
||||||
path_transformer=lambda x: x + '_test')
|
path_transformer=lambda x: x + '_test')
|
||||||
|
|
||||||
@vcr.use_cassette
|
@vcr.use_cassette
|
||||||
def function_name(cassette):
|
def function_name(cassette):
|
||||||
assert cassette._path == os.path.join('/', 'function_name_test')
|
assert cassette._path == os.path.join('/', 'function_name_test')
|
||||||
|
|
||||||
function_name()
|
function_name()
|
||||||
|
|
||||||
|
|
||||||
def test_cassette_name_generator_defaults_to_using_module_function_defined_in():
|
def test_cassette_name_generator_defaults_to_using_module_function_defined_in():
|
||||||
vcr = VCR(inject_cassette=True)
|
vcr = VCR(inject_cassette=True)
|
||||||
|
|
||||||
@vcr.use_cassette
|
@vcr.use_cassette
|
||||||
def function_name(cassette):
|
def function_name(cassette):
|
||||||
assert cassette._path == os.path.join(os.path.dirname(__file__),
|
assert cassette._path == os.path.join(os.path.dirname(__file__),
|
||||||
@@ -203,8 +292,62 @@ def test_cassette_name_generator_defaults_to_using_module_function_defined_in():
|
|||||||
|
|
||||||
def test_ensure_suffix():
|
def test_ensure_suffix():
|
||||||
vcr = VCR(inject_cassette=True, path_transformer=VCR.ensure_suffix('.yaml'))
|
vcr = VCR(inject_cassette=True, path_transformer=VCR.ensure_suffix('.yaml'))
|
||||||
|
|
||||||
@vcr.use_cassette
|
@vcr.use_cassette
|
||||||
def function_name(cassette):
|
def function_name(cassette):
|
||||||
assert cassette._path == os.path.join(os.path.dirname(__file__),
|
assert cassette._path == os.path.join(os.path.dirname(__file__),
|
||||||
'function_name.yaml')
|
'function_name.yaml')
|
||||||
|
|
||||||
function_name()
|
function_name()
|
||||||
|
|
||||||
|
|
||||||
|
def test_additional_matchers():
|
||||||
|
vcr = VCR(match_on=('uri',), inject_cassette=True)
|
||||||
|
|
||||||
|
@vcr.use_cassette
|
||||||
|
def function_defaults(cassette):
|
||||||
|
assert set(cassette._match_on) == set([vcr.matchers['uri']])
|
||||||
|
|
||||||
|
@vcr.use_cassette(additional_matchers=('body',))
|
||||||
|
def function_additional(cassette):
|
||||||
|
assert set(cassette._match_on) == set([vcr.matchers['uri'], vcr.matchers['body']])
|
||||||
|
|
||||||
|
function_defaults()
|
||||||
|
function_additional()
|
||||||
|
|
||||||
|
|
||||||
|
def test_decoration_should_respect_function_return_value():
|
||||||
|
vcr = VCR()
|
||||||
|
ret = 'a-return-value'
|
||||||
|
|
||||||
|
@vcr.use_cassette
|
||||||
|
def function_with_return():
|
||||||
|
return ret
|
||||||
|
|
||||||
|
assert ret == function_with_return()
|
||||||
|
|
||||||
|
|
||||||
|
class TestVCRClass(VCR().test_case()):
|
||||||
|
|
||||||
|
def no_decoration(self):
|
||||||
|
assert httplib.HTTPConnection == _HTTPConnection
|
||||||
|
self.test_dynamically_added()
|
||||||
|
assert httplib.HTTPConnection == _HTTPConnection
|
||||||
|
|
||||||
|
def test_one(self):
|
||||||
|
with force_reset():
|
||||||
|
self.no_decoration()
|
||||||
|
with force_reset():
|
||||||
|
self.test_two()
|
||||||
|
assert httplib.HTTPConnection != _HTTPConnection
|
||||||
|
|
||||||
|
def test_two(self):
|
||||||
|
assert httplib.HTTPConnection != _HTTPConnection
|
||||||
|
|
||||||
|
|
||||||
|
def test_dynamically_added(self):
|
||||||
|
assert httplib.HTTPConnection != _HTTPConnection
|
||||||
|
|
||||||
|
|
||||||
|
TestVCRClass.test_dynamically_added = test_dynamically_added
|
||||||
|
del test_dynamically_added
|
||||||
|
|||||||
139
vcr/cassette.py
139
vcr/cassette.py
@@ -1,11 +1,10 @@
|
|||||||
"""The container for recorded requests and responses"""
|
import inspect
|
||||||
import functools
|
|
||||||
import logging
|
import logging
|
||||||
|
import operator
|
||||||
|
import sys
|
||||||
|
|
||||||
import wrapt
|
import wrapt
|
||||||
|
|
||||||
# Internal imports
|
|
||||||
from .compat import contextlib, collections
|
from .compat import contextlib, collections
|
||||||
from .errors import UnhandledHTTPRequestError
|
from .errors import UnhandledHTTPRequestError
|
||||||
from .matchers import requests_match, uri, method
|
from .matchers import requests_match, uri, method
|
||||||
@@ -22,10 +21,18 @@ class CassetteContextDecorator(object):
|
|||||||
"""Context manager/decorator that handles installing the cassette and
|
"""Context manager/decorator that handles installing the cassette and
|
||||||
removing cassettes.
|
removing cassettes.
|
||||||
|
|
||||||
This class defers the creation of a new cassette instance until the point at
|
This class defers the creation of a new cassette instance until
|
||||||
which it is installed by context manager or decorator. The fact that a new
|
the point at which it is installed by context manager or
|
||||||
cassette is used with each application prevents the state of any cassette
|
decorator. The fact that a new cassette is used with each
|
||||||
from interfering with another.
|
application prevents the state of any cassette from interfering
|
||||||
|
with another.
|
||||||
|
|
||||||
|
Instances of this class are NOT reentrant as context managers.
|
||||||
|
However, functions that are decorated by
|
||||||
|
``CassetteContextDecorator`` instances ARE reentrant. See the
|
||||||
|
implementation of ``__call__`` on this class for more details.
|
||||||
|
There is also a guard against attempts to reenter instances of
|
||||||
|
this class as a context manager in ``__exit__``.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
_non_cassette_arguments = ('path_transformer', 'func_path_generator')
|
_non_cassette_arguments = ('path_transformer', 'func_path_generator')
|
||||||
@@ -43,21 +50,18 @@ class CassetteContextDecorator(object):
|
|||||||
with contextlib.ExitStack() as exit_stack:
|
with contextlib.ExitStack() as exit_stack:
|
||||||
for patcher in CassettePatcherBuilder(cassette).build():
|
for patcher in CassettePatcherBuilder(cassette).build():
|
||||||
exit_stack.enter_context(patcher)
|
exit_stack.enter_context(patcher)
|
||||||
log.debug('Entered context for cassette at {0}.'.format(cassette._path))
|
log_format = '{action} context for cassette at {path}.'
|
||||||
|
log.debug(log_format.format(
|
||||||
|
action="Entering", path=cassette._path
|
||||||
|
))
|
||||||
yield cassette
|
yield cassette
|
||||||
log.debug('Exiting context for cassette at {0}.'.format(cassette._path))
|
log.debug(log_format.format(
|
||||||
|
action="Exiting", path=cassette._path
|
||||||
|
))
|
||||||
# TODO(@IvanMalison): Hmmm. it kind of feels like this should be
|
# TODO(@IvanMalison): Hmmm. it kind of feels like this should be
|
||||||
# somewhere else.
|
# somewhere else.
|
||||||
cassette._save()
|
cassette._save()
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def key_predicate(cls, key, value):
|
|
||||||
return key in cls._non_cassette_arguments
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _split_keys(cls, kwargs):
|
|
||||||
return partition_dict(cls.key_predicate, kwargs)
|
|
||||||
|
|
||||||
def __enter__(self):
|
def __enter__(self):
|
||||||
# This assertion is here to prevent the dangerous behavior
|
# This assertion is here to prevent the dangerous behavior
|
||||||
# that would result from forgetting about a __finish before
|
# that would result from forgetting about a __finish before
|
||||||
@@ -68,8 +72,11 @@ class CassetteContextDecorator(object):
|
|||||||
# with context_decorator:
|
# with context_decorator:
|
||||||
# pass
|
# pass
|
||||||
assert self.__finish is None, "Cassette already open."
|
assert self.__finish is None, "Cassette already open."
|
||||||
other_kwargs, cassette_kwargs = self._split_keys(self._args_getter())
|
other_kwargs, cassette_kwargs = partition_dict(
|
||||||
if 'path_transformer' in other_kwargs:
|
lambda key, _: key in self._non_cassette_arguments,
|
||||||
|
self._args_getter()
|
||||||
|
)
|
||||||
|
if other_kwargs.get('path_transformer'):
|
||||||
transformer = other_kwargs['path_transformer']
|
transformer = other_kwargs['path_transformer']
|
||||||
cassette_kwargs['path'] = transformer(cassette_kwargs['path'])
|
cassette_kwargs['path'] = transformer(cassette_kwargs['path'])
|
||||||
self.__finish = self._patch_generator(self.cls.load(**cassette_kwargs))
|
self.__finish = self._patch_generator(self.cls.load(**cassette_kwargs))
|
||||||
@@ -84,36 +91,88 @@ class CassetteContextDecorator(object):
|
|||||||
# This awkward cloning thing is done to ensure that decorated
|
# This awkward cloning thing is done to ensure that decorated
|
||||||
# functions are reentrant. This is required for thread
|
# functions are reentrant. This is required for thread
|
||||||
# safety and the correct operation of recursive functions.
|
# safety and the correct operation of recursive functions.
|
||||||
args_getter = self._build_args_getter_for_decorator(
|
args_getter = self._build_args_getter_for_decorator(function)
|
||||||
function, self._args_getter
|
return type(self)(self.cls, args_getter)._execute_function(
|
||||||
|
function, args, kwargs
|
||||||
)
|
)
|
||||||
clone = type(self)(self.cls, args_getter)
|
|
||||||
with clone as cassette:
|
def _execute_function(self, function, args, kwargs):
|
||||||
|
if inspect.isgeneratorfunction(function):
|
||||||
|
handler = self._handle_coroutine
|
||||||
|
else:
|
||||||
|
handler = self._handle_function
|
||||||
|
return handler(function, args, kwargs)
|
||||||
|
|
||||||
|
def _handle_coroutine(self, function, args, kwargs):
|
||||||
|
"""Wraps a coroutine so that we're inside the cassette context for the
|
||||||
|
duration of the coroutine.
|
||||||
|
"""
|
||||||
|
with self as cassette:
|
||||||
|
coroutine = self.__handle_function(cassette, function, args, kwargs)
|
||||||
|
# We don't need to catch StopIteration. The caller (Tornado's
|
||||||
|
# gen.coroutine, for example) will handle that.
|
||||||
|
to_yield = next(coroutine)
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
to_send = yield to_yield
|
||||||
|
except Exception:
|
||||||
|
to_yield = coroutine.throw(*sys.exc_info())
|
||||||
|
else:
|
||||||
|
to_yield = coroutine.send(to_send)
|
||||||
|
|
||||||
|
def __handle_function(self, cassette, function, args, kwargs):
|
||||||
if cassette.inject:
|
if cassette.inject:
|
||||||
return function(cassette, *args, **kwargs)
|
return function(cassette, *args, **kwargs)
|
||||||
else:
|
else:
|
||||||
return function(*args, **kwargs)
|
return function(*args, **kwargs)
|
||||||
|
|
||||||
|
def _handle_function(self, function, args, kwargs):
|
||||||
|
with self as cassette:
|
||||||
|
return self.__handle_function(cassette, function, args, kwargs)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_function_name(function):
|
def get_function_name(function):
|
||||||
return function.__name__
|
return function.__name__
|
||||||
|
|
||||||
@classmethod
|
def _build_args_getter_for_decorator(self, function):
|
||||||
def _build_args_getter_for_decorator(cls, function, args_getter):
|
|
||||||
def new_args_getter():
|
def new_args_getter():
|
||||||
kwargs = args_getter()
|
kwargs = self._args_getter()
|
||||||
if 'path' not in kwargs:
|
if 'path' not in kwargs:
|
||||||
name_generator = (kwargs.get('func_path_generator') or
|
name_generator = (kwargs.get('func_path_generator') or
|
||||||
cls.get_function_name)
|
self.get_function_name)
|
||||||
path = name_generator(function)
|
path = name_generator(function)
|
||||||
kwargs['path'] = path
|
kwargs['path'] = path
|
||||||
return kwargs
|
return kwargs
|
||||||
return new_args_getter
|
return new_args_getter
|
||||||
|
|
||||||
|
|
||||||
|
class SimilarityScorer(object):
|
||||||
|
|
||||||
|
def __init__(self, matchers, request, ascending=False):
|
||||||
|
self._matchers = matchers
|
||||||
|
self._request = request
|
||||||
|
self._ascending = False
|
||||||
|
|
||||||
|
def score(self, candidate, play_count):
|
||||||
|
value = 1
|
||||||
|
total = 0
|
||||||
|
if play_count < 1:
|
||||||
|
total += value
|
||||||
|
if self._ascending:
|
||||||
|
value *= 2
|
||||||
|
for matcher in self._matchers[::-1]:
|
||||||
|
if matcher(self._request, candidate):
|
||||||
|
total += value
|
||||||
|
if self._ascending:
|
||||||
|
value *= 2
|
||||||
|
return total
|
||||||
|
|
||||||
|
|
||||||
class Cassette(object):
|
class Cassette(object):
|
||||||
"""A container for recorded requests and responses"""
|
"""A container for recorded requests and responses"""
|
||||||
|
|
||||||
|
max_playcount = 1
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def load(cls, **kwargs):
|
def load(cls, **kwargs):
|
||||||
"""Instantiate and load the cassette stored at the specified path."""
|
"""Instantiate and load the cassette stored at the specified path."""
|
||||||
@@ -131,14 +190,15 @@ class Cassette(object):
|
|||||||
|
|
||||||
def __init__(self, path, serializer=yamlserializer, record_mode='once',
|
def __init__(self, path, serializer=yamlserializer, record_mode='once',
|
||||||
match_on=(uri, method), before_record_request=None,
|
match_on=(uri, method), before_record_request=None,
|
||||||
before_record_response=None, custom_patches=(),
|
before_record_response=None, custom_patches=(), inject=False,
|
||||||
inject=False):
|
similarity_scorer_factory=None):
|
||||||
|
|
||||||
self._path = path
|
self._path = path
|
||||||
self._serializer = serializer
|
self._serializer = serializer
|
||||||
self._match_on = match_on
|
self._match_on = match_on
|
||||||
self._before_record_request = before_record_request or (lambda x: x)
|
self._before_record_request = before_record_request or (lambda x: x)
|
||||||
self._before_record_response = before_record_response or (lambda x: x)
|
self._before_record_response = before_record_response or (lambda x: x)
|
||||||
|
self._similarity_scorer_factory = similarity_scorer_factory or SimilarityScorer
|
||||||
self.inject = inject
|
self.inject = inject
|
||||||
self.record_mode = record_mode
|
self.record_mode = record_mode
|
||||||
self.custom_patches = custom_patches
|
self.custom_patches = custom_patches
|
||||||
@@ -176,8 +236,9 @@ class Cassette(object):
|
|||||||
request = self._before_record_request(request)
|
request = self._before_record_request(request)
|
||||||
if not request:
|
if not request:
|
||||||
return
|
return
|
||||||
if self._before_record_response:
|
|
||||||
response = self._before_record_response(response)
|
response = self._before_record_response(response)
|
||||||
|
if response is None:
|
||||||
|
return
|
||||||
self.data.append((request, response))
|
self.data.append((request, response))
|
||||||
self.dirty = True
|
self.dirty = True
|
||||||
|
|
||||||
@@ -194,6 +255,20 @@ class Cassette(object):
|
|||||||
if requests_match(request, stored_request, self._match_on):
|
if requests_match(request, stored_request, self._match_on):
|
||||||
yield index, response
|
yield index, response
|
||||||
|
|
||||||
|
def failing_matchers(self, a, b):
|
||||||
|
return [matcher for matcher in self._match_on if not matcher(a, b)]
|
||||||
|
|
||||||
|
def similar_requests(self, request):
|
||||||
|
scorer = self._similarity_scorer_factory(self._match_on, request).score
|
||||||
|
scored_requests = [
|
||||||
|
(
|
||||||
|
stored_request,
|
||||||
|
scorer(stored_request, self.play_counts[index])
|
||||||
|
)
|
||||||
|
for index, (stored_request, response) in enumerate(self.data)
|
||||||
|
]
|
||||||
|
return sorted(scored_requests, key=operator.itemgetter(1), reverse=True)
|
||||||
|
|
||||||
def can_play_response_for(self, request):
|
def can_play_response_for(self, request):
|
||||||
request = self._before_record_request(request)
|
request = self._before_record_request(request)
|
||||||
return request and request in self and \
|
return request and request in self and \
|
||||||
@@ -206,7 +281,7 @@ class Cassette(object):
|
|||||||
hasn't been played back before, and mark it as played
|
hasn't been played back before, and mark it as played
|
||||||
"""
|
"""
|
||||||
for index, response in self._responses(request):
|
for index, response in self._responses(request):
|
||||||
if self.play_counts[index] == 0:
|
if self.play_counts[index] < self.max_playcount:
|
||||||
self.play_counts[index] += 1
|
self.play_counts[index] += 1
|
||||||
return response
|
return response
|
||||||
# The cassette doesn't contain the request asked for.
|
# The cassette doesn't contain the request asked for.
|
||||||
|
|||||||
@@ -2,19 +2,25 @@ import copy
|
|||||||
import functools
|
import functools
|
||||||
import inspect
|
import inspect
|
||||||
import os
|
import os
|
||||||
|
import types
|
||||||
|
|
||||||
import six
|
import six
|
||||||
|
|
||||||
from .compat import collections
|
from .compat import collections
|
||||||
from .cassette import Cassette
|
from .cassette import Cassette
|
||||||
from .serializers import yamlserializer, jsonserializer
|
from .serializers import yamlserializer, jsonserializer
|
||||||
from .util import compose
|
from .util import compose, auto_decorate
|
||||||
from . import matchers
|
from . import matchers
|
||||||
from . import filters
|
from . import filters
|
||||||
|
|
||||||
|
|
||||||
class VCR(object):
|
class VCR(object):
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def is_test_method(method_name, function):
|
||||||
|
return method_name.startswith('test') and \
|
||||||
|
isinstance(function, types.FunctionType)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def ensure_suffix(suffix):
|
def ensure_suffix(suffix):
|
||||||
def ensure(path):
|
def ensure(path):
|
||||||
@@ -23,7 +29,7 @@ class VCR(object):
|
|||||||
return path
|
return path
|
||||||
return ensure
|
return ensure
|
||||||
|
|
||||||
def __init__(self, path_transformer=lambda x: x, before_record_request=None,
|
def __init__(self, path_transformer=None, before_record_request=None,
|
||||||
custom_patches=(), filter_query_parameters=(), ignore_hosts=(),
|
custom_patches=(), filter_query_parameters=(), ignore_hosts=(),
|
||||||
record_mode="once", ignore_localhost=False, filter_headers=(),
|
record_mode="once", ignore_localhost=False, filter_headers=(),
|
||||||
before_record_response=None, filter_post_data_parameters=(),
|
before_record_response=None, filter_post_data_parameters=(),
|
||||||
@@ -47,6 +53,7 @@ class VCR(object):
|
|||||||
'path': matchers.path,
|
'path': matchers.path,
|
||||||
'query': matchers.query,
|
'query': matchers.query,
|
||||||
'headers': matchers.headers,
|
'headers': matchers.headers,
|
||||||
|
'raw_body': matchers.raw_body,
|
||||||
'body': matchers.body,
|
'body': matchers.body,
|
||||||
}
|
}
|
||||||
self.record_mode = record_mode
|
self.record_mode = record_mode
|
||||||
@@ -66,10 +73,11 @@ class VCR(object):
|
|||||||
try:
|
try:
|
||||||
serializer = self.serializers[serializer_name]
|
serializer = self.serializers[serializer_name]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
print("Serializer {0} doesn't exist or isn't registered".format(
|
raise KeyError(
|
||||||
|
"Serializer {0} doesn't exist or isn't registered".format(
|
||||||
serializer_name
|
serializer_name
|
||||||
))
|
)
|
||||||
raise KeyError
|
)
|
||||||
return serializer
|
return serializer
|
||||||
|
|
||||||
def _get_matchers(self, matcher_names):
|
def _get_matchers(self, matcher_names):
|
||||||
@@ -116,12 +124,16 @@ class VCR(object):
|
|||||||
'cassette_library_dir',
|
'cassette_library_dir',
|
||||||
self.cassette_library_dir
|
self.cassette_library_dir
|
||||||
)
|
)
|
||||||
|
additional_matchers = kwargs.get('additional_matchers', ())
|
||||||
|
|
||||||
if cassette_library_dir:
|
if cassette_library_dir:
|
||||||
def add_cassette_library_dir(path):
|
def add_cassette_library_dir(path):
|
||||||
if not path.startswith(cassette_library_dir):
|
if not path.startswith(cassette_library_dir):
|
||||||
return os.path.join(cassette_library_dir, path)
|
return os.path.join(cassette_library_dir, path)
|
||||||
return path
|
return path
|
||||||
path_transformer = compose(add_cassette_library_dir, path_transformer)
|
path_transformer = compose(
|
||||||
|
add_cassette_library_dir, path_transformer
|
||||||
|
)
|
||||||
elif not func_path_generator:
|
elif not func_path_generator:
|
||||||
# If we don't have a library dir, use the functions
|
# If we don't have a library dir, use the functions
|
||||||
# location to build a full path for cassettes.
|
# location to build a full path for cassettes.
|
||||||
@@ -129,12 +141,12 @@ class VCR(object):
|
|||||||
|
|
||||||
merged_config = {
|
merged_config = {
|
||||||
'serializer': self._get_serializer(serializer_name),
|
'serializer': self._get_serializer(serializer_name),
|
||||||
'match_on': self._get_matchers(matcher_names),
|
'match_on': self._get_matchers(
|
||||||
|
tuple(matcher_names) + tuple(additional_matchers)
|
||||||
|
),
|
||||||
'record_mode': kwargs.get('record_mode', self.record_mode),
|
'record_mode': kwargs.get('record_mode', self.record_mode),
|
||||||
'before_record_request': self._build_before_record_request(kwargs),
|
'before_record_request': self._build_before_record_request(kwargs),
|
||||||
'before_record_response': self._build_before_record_response(
|
'before_record_response': self._build_before_record_response(kwargs),
|
||||||
kwargs
|
|
||||||
),
|
|
||||||
'custom_patches': self._custom_patches + kwargs.get(
|
'custom_patches': self._custom_patches + kwargs.get(
|
||||||
'custom_patches', ()
|
'custom_patches', ()
|
||||||
),
|
),
|
||||||
@@ -152,11 +164,11 @@ class VCR(object):
|
|||||||
'before_record_response', self.before_record_response
|
'before_record_response', self.before_record_response
|
||||||
)
|
)
|
||||||
filter_functions = []
|
filter_functions = []
|
||||||
if before_record_response and not isinstance(before_record_response,
|
if before_record_response:
|
||||||
collections.Iterable):
|
if not isinstance(before_record_response, collections.Iterable):
|
||||||
before_record_response = (before_record_response,)
|
before_record_response = (before_record_response,)
|
||||||
for function in before_record_response:
|
filter_functions.extend(before_record_response)
|
||||||
filter_functions.append(function)
|
|
||||||
def before_record_response(response):
|
def before_record_response(response):
|
||||||
for function in filter_functions:
|
for function in filter_functions:
|
||||||
if response is None:
|
if response is None:
|
||||||
@@ -177,7 +189,8 @@ class VCR(object):
|
|||||||
'filter_post_data_parameters', self.filter_post_data_parameters
|
'filter_post_data_parameters', self.filter_post_data_parameters
|
||||||
)
|
)
|
||||||
before_record_request = options.get(
|
before_record_request = options.get(
|
||||||
"before_record_request", options.get("before_record", self.before_record_request)
|
"before_record_request",
|
||||||
|
options.get("before_record", self.before_record_request)
|
||||||
)
|
)
|
||||||
ignore_hosts = options.get(
|
ignore_hosts = options.get(
|
||||||
'ignore_hosts', self.ignore_hosts
|
'ignore_hosts', self.ignore_hosts
|
||||||
@@ -186,28 +199,36 @@ class VCR(object):
|
|||||||
'ignore_localhost', self.ignore_localhost
|
'ignore_localhost', self.ignore_localhost
|
||||||
)
|
)
|
||||||
if filter_headers:
|
if filter_headers:
|
||||||
filter_functions.append(functools.partial(filters.remove_headers,
|
filter_functions.append(
|
||||||
headers_to_remove=filter_headers))
|
functools.partial(
|
||||||
|
filters.remove_headers,
|
||||||
|
headers_to_remove=filter_headers
|
||||||
|
)
|
||||||
|
)
|
||||||
if filter_query_parameters:
|
if filter_query_parameters:
|
||||||
filter_functions.append(functools.partial(filters.remove_query_parameters,
|
filter_functions.append(functools.partial(
|
||||||
query_parameters_to_remove=filter_query_parameters))
|
filters.remove_query_parameters,
|
||||||
|
query_parameters_to_remove=filter_query_parameters
|
||||||
|
))
|
||||||
if filter_post_data_parameters:
|
if filter_post_data_parameters:
|
||||||
filter_functions.append(functools.partial(filters.remove_post_data_parameters,
|
filter_functions.append(
|
||||||
post_data_parameters_to_remove=filter_post_data_parameters))
|
functools.partial(
|
||||||
|
filters.remove_post_data_parameters,
|
||||||
|
post_data_parameters_to_remove=filter_post_data_parameters
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
hosts_to_ignore = list(ignore_hosts)
|
hosts_to_ignore = set(ignore_hosts)
|
||||||
if ignore_localhost:
|
if ignore_localhost:
|
||||||
hosts_to_ignore.extend(('localhost', '0.0.0.0', '127.0.0.1'))
|
hosts_to_ignore.update(('localhost', '0.0.0.0', '127.0.0.1'))
|
||||||
|
|
||||||
if hosts_to_ignore:
|
if hosts_to_ignore:
|
||||||
hosts_to_ignore = set(hosts_to_ignore)
|
|
||||||
filter_functions.append(self._build_ignore_hosts(hosts_to_ignore))
|
filter_functions.append(self._build_ignore_hosts(hosts_to_ignore))
|
||||||
|
|
||||||
if before_record_request:
|
if before_record_request:
|
||||||
if not isinstance(before_record_request, collections.Iterable):
|
if not isinstance(before_record_request, collections.Iterable):
|
||||||
before_record_request = (before_record_request,)
|
before_record_request = (before_record_request,)
|
||||||
for function in before_record_request:
|
filter_functions.extend(before_record_request)
|
||||||
filter_functions.append(function)
|
|
||||||
def before_record_request(request):
|
def before_record_request(request):
|
||||||
request = copy.copy(request)
|
request = copy.copy(request)
|
||||||
for function in filter_functions:
|
for function in filter_functions:
|
||||||
@@ -215,7 +236,6 @@ class VCR(object):
|
|||||||
break
|
break
|
||||||
request = function(request)
|
request = function(request)
|
||||||
return request
|
return request
|
||||||
|
|
||||||
return before_record_request
|
return before_record_request
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@@ -236,3 +256,7 @@ class VCR(object):
|
|||||||
|
|
||||||
def register_matcher(self, name, matcher):
|
def register_matcher(self, name, matcher):
|
||||||
self.matchers[name] = matcher
|
self.matchers[name] = matcher
|
||||||
|
|
||||||
|
def test_case(self, predicate=None):
|
||||||
|
predicate = predicate or self.is_test_method
|
||||||
|
return six.with_metaclass(auto_decorate(self.use_cassette, predicate))
|
||||||
|
|||||||
@@ -3,8 +3,5 @@ class CannotOverwriteExistingCassetteException(Exception):
|
|||||||
|
|
||||||
|
|
||||||
class UnhandledHTTPRequestError(KeyError):
|
class UnhandledHTTPRequestError(KeyError):
|
||||||
'''
|
"""Raised when a cassette does not contain the request we want."""
|
||||||
Raised when a cassette does not c
|
|
||||||
ontain the request we want
|
|
||||||
'''
|
|
||||||
pass
|
pass
|
||||||
|
|||||||
@@ -1,19 +1,16 @@
|
|||||||
from six import BytesIO, text_type
|
from six import BytesIO, text_type
|
||||||
from six.moves.urllib.parse import urlparse, urlencode, urlunparse
|
from six.moves.urllib.parse import urlparse, urlencode, urlunparse
|
||||||
import copy
|
|
||||||
import json
|
import json
|
||||||
|
|
||||||
from .compat import collections
|
from .compat import collections
|
||||||
|
|
||||||
|
|
||||||
def remove_headers(request, headers_to_remove):
|
def remove_headers(request, headers_to_remove):
|
||||||
headers = copy.copy(request.headers)
|
new_headers = request.headers.copy()
|
||||||
headers_to_remove = [h.lower() for h in headers_to_remove]
|
for k in headers_to_remove:
|
||||||
keys = [k for k in headers if k.lower() in headers_to_remove]
|
if k in new_headers:
|
||||||
if keys:
|
del new_headers[k]
|
||||||
for k in keys:
|
request.headers = new_headers
|
||||||
headers.pop(k)
|
|
||||||
request.headers = headers
|
|
||||||
return request
|
return request
|
||||||
|
|
||||||
|
|
||||||
@@ -30,8 +27,7 @@ def remove_query_parameters(request, query_parameters_to_remove):
|
|||||||
|
|
||||||
def remove_post_data_parameters(request, post_data_parameters_to_remove):
|
def remove_post_data_parameters(request, post_data_parameters_to_remove):
|
||||||
if request.method == 'POST' and not isinstance(request.body, BytesIO):
|
if request.method == 'POST' and not isinstance(request.body, BytesIO):
|
||||||
if ('Content-Type' in request.headers and
|
if request.headers.get('Content-Type') == 'application/json':
|
||||||
request.headers['Content-Type'] == 'application/json'):
|
|
||||||
json_data = json.loads(request.body.decode('utf-8'))
|
json_data = json.loads(request.body.decode('utf-8'))
|
||||||
for k in list(json_data.keys()):
|
for k in list(json_data.keys()):
|
||||||
if k in post_data_parameters_to_remove:
|
if k in post_data_parameters_to_remove:
|
||||||
|
|||||||
@@ -1,4 +1,9 @@
|
|||||||
|
import json
|
||||||
|
from six.moves import urllib, xmlrpc_client
|
||||||
|
from .util import read_body
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@@ -30,10 +35,49 @@ def query(r1, r2):
|
|||||||
return r1.query == r2.query
|
return r1.query == r2.query
|
||||||
|
|
||||||
|
|
||||||
|
def raw_body(r1, r2):
|
||||||
|
return read_body(r1) == read_body(r2)
|
||||||
|
|
||||||
|
|
||||||
|
def _header_checker(value, header='Content-Type'):
|
||||||
|
def checker(headers):
|
||||||
|
return value in headers.get(header, '').lower()
|
||||||
|
return checker
|
||||||
|
|
||||||
|
|
||||||
|
def _transform_json(body):
|
||||||
|
# Request body is always a byte string, but json.loads() wants a text
|
||||||
|
# string. RFC 7159 says the default encoding is UTF-8 (although UTF-16
|
||||||
|
# and UTF-32 are also allowed: hmmmmm).
|
||||||
|
return json.loads(body.decode('utf-8'))
|
||||||
|
|
||||||
|
|
||||||
|
_xml_header_checker = _header_checker('text/xml')
|
||||||
|
_xmlrpc_header_checker = _header_checker('xmlrpc', header='User-Agent')
|
||||||
|
_checker_transformer_pairs = (
|
||||||
|
(_header_checker('application/x-www-form-urlencoded'), urllib.parse.parse_qs),
|
||||||
|
(_header_checker('application/json'), _transform_json),
|
||||||
|
(lambda request: _xml_header_checker(request) and _xmlrpc_header_checker(request), xmlrpc_client.loads),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _identity(x):
|
||||||
|
return x
|
||||||
|
|
||||||
|
|
||||||
|
def _get_transformer(request):
|
||||||
|
for checker, transformer in _checker_transformer_pairs:
|
||||||
|
if checker(request.headers): return transformer
|
||||||
|
else:
|
||||||
|
return _identity
|
||||||
|
|
||||||
|
|
||||||
def body(r1, r2):
|
def body(r1, r2):
|
||||||
if hasattr(r1.body, 'read') and hasattr(r2.body, 'read'):
|
transformer = _get_transformer(r1)
|
||||||
return r1.body.read() == r2.body.read()
|
r2_transformer = _get_transformer(r2)
|
||||||
return r1.body == r2.body
|
if transformer != r2_transformer:
|
||||||
|
transformer = _identity
|
||||||
|
return transformer(read_body(r1)) == transformer(read_body(r2))
|
||||||
|
|
||||||
|
|
||||||
def headers(r1, r2):
|
def headers(r1, r2):
|
||||||
|
|||||||
40
vcr/patch.py
40
vcr/patch.py
@@ -54,13 +54,12 @@ else:
|
|||||||
|
|
||||||
# Try to save the original types for Tornado
|
# Try to save the original types for Tornado
|
||||||
try:
|
try:
|
||||||
import tornado.httpclient
|
|
||||||
import tornado.simple_httpclient
|
import tornado.simple_httpclient
|
||||||
except ImportError: # pragma: no cover
|
except ImportError: # pragma: no cover
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
_AsyncHTTPClient = tornado.httpclient.AsyncHTTPClient
|
_SimpleAsyncHTTPClient_fetch_impl = \
|
||||||
_SimpleAsyncHTTPClient = tornado.simple_httpclient.SimpleAsyncHTTPClient
|
tornado.simple_httpclient.SimpleAsyncHTTPClient.fetch_impl
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -68,7 +67,8 @@ try:
|
|||||||
except ImportError: # pragma: no cover
|
except ImportError: # pragma: no cover
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
_CurlAsyncHTTPClient = tornado.curl_httpclient.CurlAsyncHTTPClient
|
_CurlAsyncHTTPClient_fetch_impl = \
|
||||||
|
tornado.curl_httpclient.CurlAsyncHTTPClient.fetch_impl
|
||||||
|
|
||||||
|
|
||||||
class CassettePatcherBuilder(object):
|
class CassettePatcherBuilder(object):
|
||||||
@@ -228,23 +228,27 @@ class CassettePatcherBuilder(object):
|
|||||||
@_build_patchers_from_mock_triples_decorator
|
@_build_patchers_from_mock_triples_decorator
|
||||||
def _tornado(self):
|
def _tornado(self):
|
||||||
try:
|
try:
|
||||||
import tornado.httpclient as http
|
|
||||||
import tornado.simple_httpclient as simple
|
import tornado.simple_httpclient as simple
|
||||||
except ImportError: # pragma: no cover
|
except ImportError: # pragma: no cover
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
from .stubs.tornado_stubs import VCRAsyncHTTPClient
|
from .stubs.tornado_stubs import vcr_fetch_impl
|
||||||
from .stubs.tornado_stubs import VCRSimpleAsyncHTTPClient
|
|
||||||
|
|
||||||
yield http, 'AsyncHTTPClient', VCRAsyncHTTPClient
|
new_fetch_impl = vcr_fetch_impl(
|
||||||
yield simple, 'SimpleAsyncHTTPClient', VCRSimpleAsyncHTTPClient
|
self._cassette, _SimpleAsyncHTTPClient_fetch_impl
|
||||||
|
)
|
||||||
|
yield simple.SimpleAsyncHTTPClient, 'fetch_impl', new_fetch_impl
|
||||||
try:
|
try:
|
||||||
import tornado.curl_httpclient as curl
|
import tornado.curl_httpclient as curl
|
||||||
except ImportError: # pragma: no cover
|
except ImportError: # pragma: no cover
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
from .stubs.tornado_stubs import VCRCurlAsyncHTTPClient
|
from .stubs.tornado_stubs import vcr_fetch_impl
|
||||||
yield curl, 'CurlAsyncHTTPClient', VCRCurlAsyncHTTPClient
|
|
||||||
|
new_fetch_impl = vcr_fetch_impl(
|
||||||
|
self._cassette, _CurlAsyncHTTPClient_fetch_impl
|
||||||
|
)
|
||||||
|
yield curl.CurlAsyncHTTPClient, 'fetch_impl', new_fetch_impl
|
||||||
|
|
||||||
def _urllib3_patchers(self, cpool, stubs):
|
def _urllib3_patchers(self, cpool, stubs):
|
||||||
http_connection_remover = ConnectionRemover(
|
http_connection_remover = ConnectionRemover(
|
||||||
@@ -362,19 +366,25 @@ def reset_patchers():
|
|||||||
_CertValidatingHTTPSConnection)
|
_CertValidatingHTTPSConnection)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import tornado.httpclient as http
|
|
||||||
import tornado.simple_httpclient as simple
|
import tornado.simple_httpclient as simple
|
||||||
except ImportError: # pragma: no cover
|
except ImportError: # pragma: no cover
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
yield mock.patch.object(http, 'AsyncHTTPClient', _AsyncHTTPClient)
|
yield mock.patch.object(
|
||||||
yield mock.patch.object(simple, 'SimpleAsyncHTTPClient', _SimpleAsyncHTTPClient)
|
simple.SimpleAsyncHTTPClient,
|
||||||
|
'fetch_impl',
|
||||||
|
_SimpleAsyncHTTPClient_fetch_impl,
|
||||||
|
)
|
||||||
try:
|
try:
|
||||||
import tornado.curl_httpclient as curl
|
import tornado.curl_httpclient as curl
|
||||||
except ImportError: # pragma: no cover
|
except ImportError: # pragma: no cover
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
yield mock.patch.object(curl, 'CurlAsyncHTTPClient', _CurlAsyncHTTPClient)
|
yield mock.patch.object(
|
||||||
|
curl.CurlAsyncHTTPClient,
|
||||||
|
'fetch_impl',
|
||||||
|
_CurlAsyncHTTPClient_fetch_impl,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
|
|||||||
@@ -1,27 +1,12 @@
|
|||||||
from six import BytesIO, binary_type
|
import warnings
|
||||||
|
from six import BytesIO, text_type
|
||||||
from six.moves.urllib.parse import urlparse, parse_qsl
|
from six.moves.urllib.parse import urlparse, parse_qsl
|
||||||
|
from .util import CaseInsensitiveDict
|
||||||
|
|
||||||
|
|
||||||
class Request(object):
|
class Request(object):
|
||||||
"""
|
"""
|
||||||
VCR's representation of a request.
|
VCR's representation of a request.
|
||||||
|
|
||||||
There is a weird quirk in HTTP. You can send the same header twice. For
|
|
||||||
this reason, headers are represented by a dict, with lists as the values.
|
|
||||||
However, it appears that HTTPlib is completely incapable of sending the
|
|
||||||
same header twice. This puts me in a weird position: I want to be able to
|
|
||||||
accurately represent HTTP headers in cassettes, but I don't want the extra
|
|
||||||
step of always having to do [0] in the general case, i.e.
|
|
||||||
request.headers['key'][0]
|
|
||||||
|
|
||||||
In addition, some servers sometimes send the same header more than once,
|
|
||||||
and httplib *can* deal with this situation.
|
|
||||||
|
|
||||||
Futhermore, I wanted to keep the request and response cassette format as
|
|
||||||
similar as possible.
|
|
||||||
|
|
||||||
For this reason, in cassettes I keep a dict with lists as keys, but once
|
|
||||||
deserialized into VCR, I keep them as plain, naked dicts.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, method, uri, body, headers):
|
def __init__(self, method, uri, body, headers):
|
||||||
@@ -29,14 +14,20 @@ class Request(object):
|
|||||||
self.uri = uri
|
self.uri = uri
|
||||||
self._was_file = hasattr(body, 'read')
|
self._was_file = hasattr(body, 'read')
|
||||||
if self._was_file:
|
if self._was_file:
|
||||||
self._body = body.read()
|
self.body = body.read()
|
||||||
if not isinstance(self._body, binary_type):
|
|
||||||
self._body = self._body.encode('utf-8')
|
|
||||||
else:
|
else:
|
||||||
self._body = body
|
self.body = body
|
||||||
self.headers = {}
|
self.headers = headers
|
||||||
for key in headers:
|
|
||||||
self.add_header(key, headers[key])
|
@property
|
||||||
|
def headers(self):
|
||||||
|
return self._headers
|
||||||
|
|
||||||
|
@headers.setter
|
||||||
|
def headers(self, value):
|
||||||
|
if not isinstance(value, HeadersDict):
|
||||||
|
value = HeadersDict(value)
|
||||||
|
self._headers = value
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def body(self):
|
def body(self):
|
||||||
@@ -44,13 +35,14 @@ class Request(object):
|
|||||||
|
|
||||||
@body.setter
|
@body.setter
|
||||||
def body(self, value):
|
def body(self, value):
|
||||||
|
if isinstance(value, text_type):
|
||||||
|
value = value.encode('utf-8')
|
||||||
self._body = value
|
self._body = value
|
||||||
|
|
||||||
def add_header(self, key, value):
|
def add_header(self, key, value):
|
||||||
# see class docstring for an explanation
|
warnings.warn("Request.add_header is deprecated. "
|
||||||
if isinstance(value, (tuple, list)):
|
"Please assign to request.headers instead.",
|
||||||
self.headers[key] = value[0]
|
DeprecationWarning)
|
||||||
else:
|
|
||||||
self.headers[key] = value
|
self.headers[key] = value
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -105,3 +97,35 @@ class Request(object):
|
|||||||
@classmethod
|
@classmethod
|
||||||
def _from_dict(cls, dct):
|
def _from_dict(cls, dct):
|
||||||
return Request(**dct)
|
return Request(**dct)
|
||||||
|
|
||||||
|
|
||||||
|
class HeadersDict(CaseInsensitiveDict):
|
||||||
|
"""
|
||||||
|
There is a weird quirk in HTTP. You can send the same header twice. For
|
||||||
|
this reason, headers are represented by a dict, with lists as the values.
|
||||||
|
However, it appears that HTTPlib is completely incapable of sending the
|
||||||
|
same header twice. This puts me in a weird position: I want to be able to
|
||||||
|
accurately represent HTTP headers in cassettes, but I don't want the extra
|
||||||
|
step of always having to do [0] in the general case, i.e.
|
||||||
|
request.headers['key'][0]
|
||||||
|
|
||||||
|
In addition, some servers sometimes send the same header more than once,
|
||||||
|
and httplib *can* deal with this situation.
|
||||||
|
|
||||||
|
Futhermore, I wanted to keep the request and response cassette format as
|
||||||
|
similar as possible.
|
||||||
|
|
||||||
|
For this reason, in cassettes I keep a dict with lists as keys, but once
|
||||||
|
deserialized into VCR, I keep them as plain, naked dicts.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __setitem__(self, key, value):
|
||||||
|
if isinstance(value, (tuple, list)):
|
||||||
|
value = value[0]
|
||||||
|
|
||||||
|
# Preserve the case from the first time this key was set.
|
||||||
|
old = self._store.get(key.lower())
|
||||||
|
if old:
|
||||||
|
key = old[0]
|
||||||
|
|
||||||
|
super(HeadersDict, self).__setitem__(key, value)
|
||||||
|
|||||||
@@ -9,7 +9,6 @@ import six
|
|||||||
from six.moves.http_client import (
|
from six.moves.http_client import (
|
||||||
HTTPConnection,
|
HTTPConnection,
|
||||||
HTTPSConnection,
|
HTTPSConnection,
|
||||||
HTTPMessage,
|
|
||||||
HTTPResponse,
|
HTTPResponse,
|
||||||
)
|
)
|
||||||
from six import BytesIO
|
from six import BytesIO
|
||||||
@@ -188,8 +187,7 @@ class VCRConnection(object):
|
|||||||
log.debug('Got {0}'.format(self._vcr_request))
|
log.debug('Got {0}'.format(self._vcr_request))
|
||||||
|
|
||||||
def putheader(self, header, *values):
|
def putheader(self, header, *values):
|
||||||
for value in values:
|
self._vcr_request.headers[header] = values
|
||||||
self._vcr_request.add_header(header, value)
|
|
||||||
|
|
||||||
def send(self, data):
|
def send(self, data):
|
||||||
'''
|
'''
|
||||||
@@ -229,12 +227,26 @@ class VCRConnection(object):
|
|||||||
if self.cassette.write_protected and self.cassette.filter_request(
|
if self.cassette.write_protected and self.cassette.filter_request(
|
||||||
self._vcr_request
|
self._vcr_request
|
||||||
):
|
):
|
||||||
|
most_similar_request = None
|
||||||
|
failing_matchers = None
|
||||||
|
most_similar_request_info = None
|
||||||
|
try:
|
||||||
|
most_similar_request_info = self.cassette.similar_requests(self._vcr_request)
|
||||||
|
most_similar_request = most_similar_request_info[0][0]
|
||||||
|
failing_matchers = self.cassette.failing_matchers(
|
||||||
|
self._vcr_request, most_similar_request
|
||||||
|
)
|
||||||
|
except Exception as err:
|
||||||
|
print "XXXX {0}".format(err)
|
||||||
|
import ipdb; ipdb.set_trace()
|
||||||
raise CannotOverwriteExistingCassetteException(
|
raise CannotOverwriteExistingCassetteException(
|
||||||
"No match for the request (%r) was found. "
|
"No match for the request (%r) was found. "
|
||||||
"Can't overwrite existing cassette (%r) in "
|
"Can't overwrite existing cassette (%r) in "
|
||||||
"your current record mode (%r)."
|
"your current record mode (%r). Most similar request was (%r). "
|
||||||
|
"It differed from the request according to (%r). \n\n\n(%r)"
|
||||||
% (self._vcr_request, self.cassette._path,
|
% (self._vcr_request, self.cassette._path,
|
||||||
self.cassette.record_mode)
|
self.cassette.record_mode, most_similar_request,
|
||||||
|
failing_matchers, most_similar_request_info)
|
||||||
)
|
)
|
||||||
|
|
||||||
# Otherwise, we should send the request, then get the response
|
# Otherwise, we should send the request, then get the response
|
||||||
@@ -316,6 +328,27 @@ class VCRConnection(object):
|
|||||||
with force_reset():
|
with force_reset():
|
||||||
self.real_connection = self._baseclass(*args, **kwargs)
|
self.real_connection = self._baseclass(*args, **kwargs)
|
||||||
|
|
||||||
|
def __setattr__(self, name, value):
|
||||||
|
"""
|
||||||
|
We need to define this because any attributes that are set on the
|
||||||
|
VCRConnection need to be propogated to the real connection.
|
||||||
|
|
||||||
|
For example, urllib3 will set certain attributes on the connection,
|
||||||
|
such as 'ssl_version'. These attributes need to get set on the real
|
||||||
|
connection to have the correct and expected behavior.
|
||||||
|
|
||||||
|
TODO: Separately setting the attribute on the two instances is not
|
||||||
|
ideal. We should switch to a proxying implementation.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
setattr(self.real_connection, name, value)
|
||||||
|
except AttributeError:
|
||||||
|
# raised if real_connection has not been set yet, such as when
|
||||||
|
# we're setting the real_connection itself for the first time
|
||||||
|
pass
|
||||||
|
|
||||||
|
super(VCRConnection, self).__setattr__(name, value)
|
||||||
|
|
||||||
|
|
||||||
class VCRHTTPConnection(VCRConnection):
|
class VCRHTTPConnection(VCRConnection):
|
||||||
'''A Mocked class for HTTP requests'''
|
'''A Mocked class for HTTP requests'''
|
||||||
|
|||||||
@@ -1,49 +1,21 @@
|
|||||||
'''Stubs for tornado HTTP clients'''
|
'''Stubs for tornado HTTP clients'''
|
||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
|
|
||||||
|
import functools
|
||||||
from six import BytesIO
|
from six import BytesIO
|
||||||
|
|
||||||
from tornado import httputil
|
from tornado import httputil
|
||||||
from tornado.httpclient import AsyncHTTPClient
|
|
||||||
from tornado.httpclient import HTTPResponse
|
from tornado.httpclient import HTTPResponse
|
||||||
from tornado.simple_httpclient import SimpleAsyncHTTPClient
|
|
||||||
|
|
||||||
from vcr.errors import CannotOverwriteExistingCassetteException
|
from vcr.errors import CannotOverwriteExistingCassetteException
|
||||||
from vcr.request import Request
|
from vcr.request import Request
|
||||||
|
|
||||||
|
|
||||||
class _VCRAsyncClient(object):
|
def vcr_fetch_impl(cassette, real_fetch_impl):
|
||||||
cassette = None
|
|
||||||
|
|
||||||
def __new__(cls, *args, **kwargs):
|
@functools.wraps(real_fetch_impl)
|
||||||
from vcr.patch import force_reset
|
def new_fetch_impl(self, request, callback):
|
||||||
with force_reset():
|
headers = request.headers.copy()
|
||||||
return super(_VCRAsyncClient, cls).__new__(cls, *args, **kwargs)
|
|
||||||
|
|
||||||
def initialize(self, *args, **kwargs):
|
|
||||||
from vcr.patch import force_reset
|
|
||||||
with force_reset():
|
|
||||||
self.real_client = self._baseclass(*args, **kwargs)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def io_loop(self):
|
|
||||||
return self.real_client.io_loop
|
|
||||||
|
|
||||||
@property
|
|
||||||
def _closed(self):
|
|
||||||
return self.real_client._closed
|
|
||||||
|
|
||||||
@property
|
|
||||||
def defaults(self):
|
|
||||||
return self.real_client.defaults
|
|
||||||
|
|
||||||
def close(self):
|
|
||||||
from vcr.patch import force_reset
|
|
||||||
with force_reset():
|
|
||||||
self.real_client.close()
|
|
||||||
|
|
||||||
def fetch_impl(self, request, callback):
|
|
||||||
headers = dict(request.headers)
|
|
||||||
if request.user_agent:
|
if request.user_agent:
|
||||||
headers.setdefault('User-Agent', request.user_agent)
|
headers.setdefault('User-Agent', request.user_agent)
|
||||||
|
|
||||||
@@ -64,7 +36,9 @@ class _VCRAsyncClient(object):
|
|||||||
"that is not yet supported by VCR.py. Please make the "
|
"that is not yet supported by VCR.py. Please make the "
|
||||||
"request outside a VCR.py context." % repr(request)
|
"request outside a VCR.py context." % repr(request)
|
||||||
),
|
),
|
||||||
|
request_time=self.io_loop.time() - request.start_time,
|
||||||
)
|
)
|
||||||
|
return callback(response)
|
||||||
|
|
||||||
vcr_request = Request(
|
vcr_request = Request(
|
||||||
request.method,
|
request.method,
|
||||||
@@ -73,8 +47,8 @@ class _VCRAsyncClient(object):
|
|||||||
headers,
|
headers,
|
||||||
)
|
)
|
||||||
|
|
||||||
if self.cassette.can_play_response_for(vcr_request):
|
if cassette.can_play_response_for(vcr_request):
|
||||||
vcr_response = self.cassette.play_response(vcr_request)
|
vcr_response = cassette.play_response(vcr_request)
|
||||||
headers = httputil.HTTPHeaders()
|
headers = httputil.HTTPHeaders()
|
||||||
|
|
||||||
recorded_headers = vcr_response['headers']
|
recorded_headers = vcr_response['headers']
|
||||||
@@ -89,10 +63,12 @@ class _VCRAsyncClient(object):
|
|||||||
reason=vcr_response['status']['message'],
|
reason=vcr_response['status']['message'],
|
||||||
headers=headers,
|
headers=headers,
|
||||||
buffer=BytesIO(vcr_response['body']['string']),
|
buffer=BytesIO(vcr_response['body']['string']),
|
||||||
|
effective_url=vcr_response.get('url'),
|
||||||
|
request_time=self.io_loop.time() - request.start_time,
|
||||||
)
|
)
|
||||||
callback(response)
|
return callback(response)
|
||||||
else:
|
else:
|
||||||
if self.cassette.write_protected and self.cassette.filter_request(
|
if cassette.write_protected and cassette.filter_request(
|
||||||
vcr_request
|
vcr_request
|
||||||
):
|
):
|
||||||
response = HTTPResponse(
|
response = HTTPResponse(
|
||||||
@@ -102,11 +78,11 @@ class _VCRAsyncClient(object):
|
|||||||
"No match for the request (%r) was found. "
|
"No match for the request (%r) was found. "
|
||||||
"Can't overwrite existing cassette (%r) in "
|
"Can't overwrite existing cassette (%r) in "
|
||||||
"your current record mode (%r)."
|
"your current record mode (%r)."
|
||||||
% (vcr_request, self.cassette._path,
|
% (vcr_request, cassette._path, cassette.record_mode)
|
||||||
self.cassette.record_mode)
|
|
||||||
),
|
),
|
||||||
|
request_time=self.io_loop.time() - request.start_time,
|
||||||
)
|
)
|
||||||
callback(response)
|
return callback(response)
|
||||||
|
|
||||||
def new_callback(response):
|
def new_callback(response):
|
||||||
headers = [
|
headers = [
|
||||||
@@ -121,27 +97,11 @@ class _VCRAsyncClient(object):
|
|||||||
},
|
},
|
||||||
'headers': headers,
|
'headers': headers,
|
||||||
'body': {'string': response.body},
|
'body': {'string': response.body},
|
||||||
|
'url': response.effective_url,
|
||||||
}
|
}
|
||||||
self.cassette.append(vcr_request, vcr_response)
|
cassette.append(vcr_request, vcr_response)
|
||||||
callback(response)
|
return callback(response)
|
||||||
|
|
||||||
from vcr.patch import force_reset
|
real_fetch_impl(self, request, new_callback)
|
||||||
with force_reset():
|
|
||||||
self.real_client.fetch_impl(request, new_callback)
|
|
||||||
|
|
||||||
|
return new_fetch_impl
|
||||||
class VCRAsyncHTTPClient(_VCRAsyncClient, AsyncHTTPClient):
|
|
||||||
_baseclass = AsyncHTTPClient
|
|
||||||
|
|
||||||
|
|
||||||
class VCRSimpleAsyncHTTPClient(_VCRAsyncClient, SimpleAsyncHTTPClient):
|
|
||||||
_baseclass = SimpleAsyncHTTPClient
|
|
||||||
|
|
||||||
|
|
||||||
try:
|
|
||||||
from tornado.curl_httpclient import CurlAsyncHTTPClient
|
|
||||||
except ImportError: # pragma: no cover
|
|
||||||
VCRCurlAsyncHTTPClient = None
|
|
||||||
else:
|
|
||||||
class VCRCurlAsyncHTTPClient(_VCRAsyncClient, CurlAsyncHTTPClient):
|
|
||||||
_baseclass = CurlAsyncHTTPClient
|
|
||||||
|
|||||||
108
vcr/util.py
108
vcr/util.py
@@ -1,3 +1,76 @@
|
|||||||
|
import collections
|
||||||
|
import types
|
||||||
|
|
||||||
|
|
||||||
|
# Shamelessly stolen from https://github.com/kennethreitz/requests/blob/master/requests/structures.py
|
||||||
|
class CaseInsensitiveDict(collections.MutableMapping):
|
||||||
|
"""
|
||||||
|
A case-insensitive ``dict``-like object.
|
||||||
|
Implements all methods and operations of
|
||||||
|
``collections.MutableMapping`` as well as dict's ``copy``. Also
|
||||||
|
provides ``lower_items``.
|
||||||
|
All keys are expected to be strings. The structure remembers the
|
||||||
|
case of the last key to be set, and ``iter(instance)``,
|
||||||
|
``keys()``, ``items()``, ``iterkeys()``, and ``iteritems()``
|
||||||
|
will contain case-sensitive keys. However, querying and contains
|
||||||
|
testing is case insensitive::
|
||||||
|
cid = CaseInsensitiveDict()
|
||||||
|
cid['Accept'] = 'application/json'
|
||||||
|
cid['aCCEPT'] == 'application/json' # True
|
||||||
|
list(cid) == ['Accept'] # True
|
||||||
|
For example, ``headers['content-encoding']`` will return the
|
||||||
|
value of a ``'Content-Encoding'`` response header, regardless
|
||||||
|
of how the header name was originally stored.
|
||||||
|
If the constructor, ``.update``, or equality comparison
|
||||||
|
operations are given keys that have equal ``.lower()``s, the
|
||||||
|
behavior is undefined.
|
||||||
|
"""
|
||||||
|
def __init__(self, data=None, **kwargs):
|
||||||
|
self._store = dict()
|
||||||
|
if data is None:
|
||||||
|
data = {}
|
||||||
|
self.update(data, **kwargs)
|
||||||
|
|
||||||
|
def __setitem__(self, key, value):
|
||||||
|
# Use the lowercased key for lookups, but store the actual
|
||||||
|
# key alongside the value.
|
||||||
|
self._store[key.lower()] = (key, value)
|
||||||
|
|
||||||
|
def __getitem__(self, key):
|
||||||
|
return self._store[key.lower()][1]
|
||||||
|
|
||||||
|
def __delitem__(self, key):
|
||||||
|
del self._store[key.lower()]
|
||||||
|
|
||||||
|
def __iter__(self):
|
||||||
|
return (casedkey for casedkey, mappedvalue in self._store.values())
|
||||||
|
|
||||||
|
def __len__(self):
|
||||||
|
return len(self._store)
|
||||||
|
|
||||||
|
def lower_items(self):
|
||||||
|
"""Like iteritems(), but with all lowercase keys."""
|
||||||
|
return (
|
||||||
|
(lowerkey, keyval[1])
|
||||||
|
for (lowerkey, keyval)
|
||||||
|
in self._store.items()
|
||||||
|
)
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
if isinstance(other, collections.Mapping):
|
||||||
|
other = CaseInsensitiveDict(other)
|
||||||
|
else:
|
||||||
|
return NotImplemented
|
||||||
|
# Compare insensitively
|
||||||
|
return dict(self.lower_items()) == dict(other.lower_items())
|
||||||
|
|
||||||
|
# Copy is required
|
||||||
|
def copy(self):
|
||||||
|
return CaseInsensitiveDict(self._store.values())
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return str(dict(self.items()))
|
||||||
|
|
||||||
def partition_dict(predicate, dictionary):
|
def partition_dict(predicate, dictionary):
|
||||||
true_dict = {}
|
true_dict = {}
|
||||||
false_dict = {}
|
false_dict = {}
|
||||||
@@ -10,7 +83,40 @@ def partition_dict(predicate, dictionary):
|
|||||||
def compose(*functions):
|
def compose(*functions):
|
||||||
def composed(incoming):
|
def composed(incoming):
|
||||||
res = incoming
|
res = incoming
|
||||||
for function in functions[::-1]:
|
for function in reversed(functions):
|
||||||
|
if function:
|
||||||
res = function(res)
|
res = function(res)
|
||||||
return res
|
return res
|
||||||
return composed
|
return composed
|
||||||
|
|
||||||
|
def read_body(request):
|
||||||
|
if hasattr(request.body, 'read'):
|
||||||
|
return request.body.read()
|
||||||
|
return request.body
|
||||||
|
|
||||||
|
|
||||||
|
def auto_decorate(
|
||||||
|
decorator,
|
||||||
|
predicate=lambda name, value: isinstance(value, types.FunctionType)
|
||||||
|
):
|
||||||
|
def maybe_decorate(attribute, value):
|
||||||
|
if predicate(attribute, value):
|
||||||
|
value = decorator(value)
|
||||||
|
return value
|
||||||
|
|
||||||
|
class DecorateAll(type):
|
||||||
|
|
||||||
|
def __setattr__(cls, attribute, value):
|
||||||
|
return super(DecorateAll, cls).__setattr__(
|
||||||
|
attribute, maybe_decorate(attribute, value)
|
||||||
|
)
|
||||||
|
|
||||||
|
def __new__(cls, name, bases, attributes_dict):
|
||||||
|
new_attributes_dict = dict(
|
||||||
|
(attribute, maybe_decorate(attribute, value))
|
||||||
|
for attribute, value in attributes_dict.items()
|
||||||
|
)
|
||||||
|
return super(DecorateAll, cls).__new__(
|
||||||
|
cls, name, bases, new_attributes_dict
|
||||||
|
)
|
||||||
|
return DecorateAll
|
||||||
|
|||||||
Reference in New Issue
Block a user