mirror of
https://github.com/kevin1024/vcrpy.git
synced 2025-12-09 17:15:35 +00:00
Compare commits
38 Commits
v1.7.3
...
better_log
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f821fed418 | ||
|
|
50246791e3 | ||
|
|
8a5bf23d34 | ||
|
|
f6b8e4f8e7 | ||
|
|
2ac3fa9abe | ||
|
|
1324feae99 | ||
|
|
7990c549d1 | ||
|
|
327797c4ff | ||
|
|
ac510097e0 | ||
|
|
00d973a0f5 | ||
|
|
79ff59feae | ||
|
|
34252bc234 | ||
|
|
5f78657c52 | ||
|
|
00b4e451fe | ||
|
|
44564ba39f | ||
|
|
7f02a7e999 | ||
|
|
c28adea66d | ||
|
|
3f006cc261 | ||
|
|
0eda8ba482 | ||
|
|
d620095c36 | ||
|
|
c8180326ad | ||
|
|
d55d593d1c | ||
|
|
04f4a7fd2f | ||
|
|
6fd04f3675 | ||
|
|
420f83b6b1 | ||
|
|
c6adcc83b3 | ||
|
|
dc61f5f520 | ||
|
|
4450cb992f | ||
|
|
083b1ec686 | ||
|
|
97c924d8dd | ||
|
|
65398131a4 | ||
|
|
7312229aef | ||
|
|
b62265c0ad | ||
|
|
d00c60a4ad | ||
|
|
4ddfb47c9c | ||
|
|
f0b7c3f1e0 | ||
|
|
646d12df94 | ||
|
|
eda64bc3be |
3
.gitignore
vendored
3
.gitignore
vendored
@@ -7,4 +7,5 @@ dist/
|
|||||||
*.egg-info/
|
*.egg-info/
|
||||||
pytestdebug.log
|
pytestdebug.log
|
||||||
|
|
||||||
fixtures/
|
fixtures/
|
||||||
|
/docs/_build
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
language: python
|
language: python
|
||||||
|
sudo: false
|
||||||
before_install: openssl version
|
before_install: openssl version
|
||||||
env:
|
env:
|
||||||
global:
|
global:
|
||||||
@@ -9,6 +10,7 @@ env:
|
|||||||
- WITH_LIB="requests2.3"
|
- WITH_LIB="requests2.3"
|
||||||
- WITH_LIB="requests2.4"
|
- WITH_LIB="requests2.4"
|
||||||
- WITH_LIB="requests2.5"
|
- WITH_LIB="requests2.5"
|
||||||
|
- WITH_LIB="requests2.6"
|
||||||
- WITH_LIB="requests2.7"
|
- WITH_LIB="requests2.7"
|
||||||
- WITH_LIB="requests1.x"
|
- WITH_LIB="requests1.x"
|
||||||
- WITH_LIB="httplib2"
|
- WITH_LIB="httplib2"
|
||||||
@@ -40,6 +42,7 @@ install:
|
|||||||
- if [ $WITH_LIB = "requests2.3" ] ; then pip install requests==2.3.0; fi
|
- if [ $WITH_LIB = "requests2.3" ] ; then pip install requests==2.3.0; fi
|
||||||
- if [ $WITH_LIB = "requests2.4" ] ; then pip install requests==2.4.0; fi
|
- if [ $WITH_LIB = "requests2.4" ] ; then pip install requests==2.4.0; fi
|
||||||
- if [ $WITH_LIB = "requests2.5" ] ; then pip install requests==2.5.0; fi
|
- if [ $WITH_LIB = "requests2.5" ] ; then pip install requests==2.5.0; fi
|
||||||
|
- if [ $WITH_LIB = "requests2.6" ] ; then pip install requests==2.6.0; fi
|
||||||
- if [ $WITH_LIB = "requests2.7" ] ; then pip install requests==2.7.0; fi
|
- if [ $WITH_LIB = "requests2.7" ] ; then pip install requests==2.7.0; fi
|
||||||
- if [ $WITH_LIB = "httplib2" ] ; then pip install httplib2; fi
|
- if [ $WITH_LIB = "httplib2" ] ; then pip install httplib2; fi
|
||||||
- if [ $WITH_LIB = "boto" ] ; then pip install boto; fi
|
- if [ $WITH_LIB = "boto" ] ; then pip install boto; fi
|
||||||
|
|||||||
34
README.rst
34
README.rst
@@ -1,16 +1,14 @@
|
|||||||
|
|Build Status| |Stories in Ready| |Gitter|
|
||||||
|
|
||||||
VCR.py
|
VCR.py
|
||||||
======
|
======
|
||||||
|
|
||||||
.. figure:: https://raw.github.com/kevin1024/vcrpy/master/vcr.png
|
.. image:: https://raw.github.com/kevin1024/vcrpy/master/vcr.png
|
||||||
:alt: vcr.py
|
:alt: vcr.py
|
||||||
|
|
||||||
vcr.py
|
|
||||||
|
|
||||||
This is a Python version of `Ruby's VCR
|
This is a Python version of `Ruby's VCR
|
||||||
library <https://github.com/vcr/vcr>`__.
|
library <https://github.com/vcr/vcr>`__.
|
||||||
|
|
||||||
|Build Status| |Stories in Ready|
|
|
||||||
|
|
||||||
What it does
|
What it does
|
||||||
------------
|
------------
|
||||||
|
|
||||||
@@ -575,6 +573,24 @@ If you set the loglevel to DEBUG, you will also get information about
|
|||||||
which matchers didn't match. This can help you with debugging custom
|
which matchers didn't match. This can help you with debugging custom
|
||||||
matchers.
|
matchers.
|
||||||
|
|
||||||
|
Speed
|
||||||
|
-----
|
||||||
|
VCR.py runs about 10x faster when pyyaml can use the libyaml extensions. However, just installing ``libyaml`` (Mac) or ``libyaml-dev`` (Linux) is not enough, as pyyaml needs to be rebuild with the proper flag. Note that this flag is cached by pip, so clear the cache first.
|
||||||
|
|
||||||
|
Are you using libyaml already? This should work:
|
||||||
|
|
||||||
|
.. code:: sh
|
||||||
|
|
||||||
|
python -c 'from yaml import CLoader'
|
||||||
|
|
||||||
|
If not:
|
||||||
|
|
||||||
|
.. code:: sh
|
||||||
|
|
||||||
|
pip uninstall pyyaml
|
||||||
|
pip --no-cache-dir install pyyaml
|
||||||
|
|
||||||
|
|
||||||
Upgrade
|
Upgrade
|
||||||
-------
|
-------
|
||||||
|
|
||||||
@@ -609,7 +625,10 @@ new API in version 1.0.x
|
|||||||
|
|
||||||
Changelog
|
Changelog
|
||||||
---------
|
---------
|
||||||
- 1.7.3 [#188] ``additional_matchers`` kwarg on ``use_casstte``.
|
- 1.7.4 [#217] Make use_cassette decorated functions actually return a
|
||||||
|
value (thanks @bcen). [#199] Fix path transfromation defaults.
|
||||||
|
Better headers dictionary management.
|
||||||
|
- 1.7.3 [#188] ``additional_matchers`` kwarg on ``use_cassette``.
|
||||||
[#191] Actually support passing multiple before_record_request
|
[#191] Actually support passing multiple before_record_request
|
||||||
functions (thanks @agriffis).
|
functions (thanks @agriffis).
|
||||||
- 1.7.2 [#186] Get effective_url in tornado (thanks @mvschaik), [#187]
|
- 1.7.2 [#186] Get effective_url in tornado (thanks @mvschaik), [#187]
|
||||||
@@ -758,3 +777,6 @@ more details
|
|||||||
:target: http://travis-ci.org/kevin1024/vcrpy
|
:target: http://travis-ci.org/kevin1024/vcrpy
|
||||||
.. |Stories in Ready| image:: https://badge.waffle.io/kevin1024/vcrpy.png?label=ready&title=Ready
|
.. |Stories in Ready| image:: https://badge.waffle.io/kevin1024/vcrpy.png?label=ready&title=Ready
|
||||||
:target: https://waffle.io/kevin1024/vcrpy
|
:target: https://waffle.io/kevin1024/vcrpy
|
||||||
|
.. |Gitter| image:: https://badges.gitter.im/Join%20Chat.svg
|
||||||
|
:alt: Join the chat at https://gitter.im/kevin1024/vcrpy
|
||||||
|
:target: https://gitter.im/kevin1024/vcrpy?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge
|
||||||
|
|||||||
192
docs/Makefile
Normal file
192
docs/Makefile
Normal file
@@ -0,0 +1,192 @@
|
|||||||
|
# Makefile for Sphinx documentation
|
||||||
|
#
|
||||||
|
|
||||||
|
# You can set these variables from the command line.
|
||||||
|
SPHINXOPTS =
|
||||||
|
SPHINXBUILD = sphinx-build
|
||||||
|
PAPER =
|
||||||
|
BUILDDIR = _build
|
||||||
|
|
||||||
|
# User-friendly check for sphinx-build
|
||||||
|
ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
|
||||||
|
$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
|
||||||
|
endif
|
||||||
|
|
||||||
|
# Internal variables.
|
||||||
|
PAPEROPT_a4 = -D latex_paper_size=a4
|
||||||
|
PAPEROPT_letter = -D latex_paper_size=letter
|
||||||
|
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
|
||||||
|
# the i18n builder cannot share the environment and doctrees with the others
|
||||||
|
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
|
||||||
|
|
||||||
|
.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest coverage gettext
|
||||||
|
|
||||||
|
help:
|
||||||
|
@echo "Please use \`make <target>' where <target> is one of"
|
||||||
|
@echo " html to make standalone HTML files"
|
||||||
|
@echo " dirhtml to make HTML files named index.html in directories"
|
||||||
|
@echo " singlehtml to make a single large HTML file"
|
||||||
|
@echo " pickle to make pickle files"
|
||||||
|
@echo " json to make JSON files"
|
||||||
|
@echo " htmlhelp to make HTML files and a HTML help project"
|
||||||
|
@echo " qthelp to make HTML files and a qthelp project"
|
||||||
|
@echo " applehelp to make an Apple Help Book"
|
||||||
|
@echo " devhelp to make HTML files and a Devhelp project"
|
||||||
|
@echo " epub to make an epub"
|
||||||
|
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
|
||||||
|
@echo " latexpdf to make LaTeX files and run them through pdflatex"
|
||||||
|
@echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
|
||||||
|
@echo " text to make text files"
|
||||||
|
@echo " man to make manual pages"
|
||||||
|
@echo " texinfo to make Texinfo files"
|
||||||
|
@echo " info to make Texinfo files and run them through makeinfo"
|
||||||
|
@echo " gettext to make PO message catalogs"
|
||||||
|
@echo " changes to make an overview of all changed/added/deprecated items"
|
||||||
|
@echo " xml to make Docutils-native XML files"
|
||||||
|
@echo " pseudoxml to make pseudoxml-XML files for display purposes"
|
||||||
|
@echo " linkcheck to check all external links for integrity"
|
||||||
|
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
|
||||||
|
@echo " coverage to run coverage check of the documentation (if enabled)"
|
||||||
|
|
||||||
|
clean:
|
||||||
|
rm -rf $(BUILDDIR)/*
|
||||||
|
|
||||||
|
html:
|
||||||
|
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
||||||
|
@echo
|
||||||
|
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
|
||||||
|
|
||||||
|
dirhtml:
|
||||||
|
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
|
||||||
|
@echo
|
||||||
|
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
|
||||||
|
|
||||||
|
singlehtml:
|
||||||
|
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
|
||||||
|
@echo
|
||||||
|
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
|
||||||
|
|
||||||
|
pickle:
|
||||||
|
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
|
||||||
|
@echo
|
||||||
|
@echo "Build finished; now you can process the pickle files."
|
||||||
|
|
||||||
|
json:
|
||||||
|
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
|
||||||
|
@echo
|
||||||
|
@echo "Build finished; now you can process the JSON files."
|
||||||
|
|
||||||
|
htmlhelp:
|
||||||
|
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
|
||||||
|
@echo
|
||||||
|
@echo "Build finished; now you can run HTML Help Workshop with the" \
|
||||||
|
".hhp project file in $(BUILDDIR)/htmlhelp."
|
||||||
|
|
||||||
|
qthelp:
|
||||||
|
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
|
||||||
|
@echo
|
||||||
|
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
|
||||||
|
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
|
||||||
|
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/vcrpy.qhcp"
|
||||||
|
@echo "To view the help file:"
|
||||||
|
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/vcrpy.qhc"
|
||||||
|
|
||||||
|
applehelp:
|
||||||
|
$(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp
|
||||||
|
@echo
|
||||||
|
@echo "Build finished. The help book is in $(BUILDDIR)/applehelp."
|
||||||
|
@echo "N.B. You won't be able to view it unless you put it in" \
|
||||||
|
"~/Library/Documentation/Help or install it in your application" \
|
||||||
|
"bundle."
|
||||||
|
|
||||||
|
devhelp:
|
||||||
|
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
|
||||||
|
@echo
|
||||||
|
@echo "Build finished."
|
||||||
|
@echo "To view the help file:"
|
||||||
|
@echo "# mkdir -p $$HOME/.local/share/devhelp/vcrpy"
|
||||||
|
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/vcrpy"
|
||||||
|
@echo "# devhelp"
|
||||||
|
|
||||||
|
epub:
|
||||||
|
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
|
||||||
|
@echo
|
||||||
|
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
|
||||||
|
|
||||||
|
latex:
|
||||||
|
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||||
|
@echo
|
||||||
|
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
|
||||||
|
@echo "Run \`make' in that directory to run these through (pdf)latex" \
|
||||||
|
"(use \`make latexpdf' here to do that automatically)."
|
||||||
|
|
||||||
|
latexpdf:
|
||||||
|
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||||
|
@echo "Running LaTeX files through pdflatex..."
|
||||||
|
$(MAKE) -C $(BUILDDIR)/latex all-pdf
|
||||||
|
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
|
||||||
|
|
||||||
|
latexpdfja:
|
||||||
|
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||||
|
@echo "Running LaTeX files through platex and dvipdfmx..."
|
||||||
|
$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
|
||||||
|
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
|
||||||
|
|
||||||
|
text:
|
||||||
|
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
|
||||||
|
@echo
|
||||||
|
@echo "Build finished. The text files are in $(BUILDDIR)/text."
|
||||||
|
|
||||||
|
man:
|
||||||
|
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
|
||||||
|
@echo
|
||||||
|
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
|
||||||
|
|
||||||
|
texinfo:
|
||||||
|
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
||||||
|
@echo
|
||||||
|
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
|
||||||
|
@echo "Run \`make' in that directory to run these through makeinfo" \
|
||||||
|
"(use \`make info' here to do that automatically)."
|
||||||
|
|
||||||
|
info:
|
||||||
|
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
||||||
|
@echo "Running Texinfo files through makeinfo..."
|
||||||
|
make -C $(BUILDDIR)/texinfo info
|
||||||
|
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
|
||||||
|
|
||||||
|
gettext:
|
||||||
|
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
|
||||||
|
@echo
|
||||||
|
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
|
||||||
|
|
||||||
|
changes:
|
||||||
|
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
|
||||||
|
@echo
|
||||||
|
@echo "The overview file is in $(BUILDDIR)/changes."
|
||||||
|
|
||||||
|
linkcheck:
|
||||||
|
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
|
||||||
|
@echo
|
||||||
|
@echo "Link check complete; look for any errors in the above output " \
|
||||||
|
"or in $(BUILDDIR)/linkcheck/output.txt."
|
||||||
|
|
||||||
|
doctest:
|
||||||
|
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
|
||||||
|
@echo "Testing of doctests in the sources finished, look at the " \
|
||||||
|
"results in $(BUILDDIR)/doctest/output.txt."
|
||||||
|
|
||||||
|
coverage:
|
||||||
|
$(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage
|
||||||
|
@echo "Testing of coverage in the sources finished, look at the " \
|
||||||
|
"results in $(BUILDDIR)/coverage/python.txt."
|
||||||
|
|
||||||
|
xml:
|
||||||
|
$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
|
||||||
|
@echo
|
||||||
|
@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
|
||||||
|
|
||||||
|
pseudoxml:
|
||||||
|
$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
|
||||||
|
@echo
|
||||||
|
@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
|
||||||
293
docs/conf.py
Normal file
293
docs/conf.py
Normal file
@@ -0,0 +1,293 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
#
|
||||||
|
# vcrpy documentation build configuration file, created by
|
||||||
|
# sphinx-quickstart on Sun Sep 13 11:18:00 2015.
|
||||||
|
#
|
||||||
|
# This file is execfile()d with the current directory set to its
|
||||||
|
# containing dir.
|
||||||
|
#
|
||||||
|
# Note that not all possible configuration values are present in this
|
||||||
|
# autogenerated file.
|
||||||
|
#
|
||||||
|
# All configuration values have a default; values that are commented out
|
||||||
|
# serve to show the default.
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
import shlex
|
||||||
|
|
||||||
|
# If extensions (or modules to document with autodoc) are in another directory,
|
||||||
|
# add these directories to sys.path here. If the directory is relative to the
|
||||||
|
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||||
|
#sys.path.insert(0, os.path.abspath('.'))
|
||||||
|
|
||||||
|
# -- General configuration ------------------------------------------------
|
||||||
|
|
||||||
|
# If your documentation needs a minimal Sphinx version, state it here.
|
||||||
|
#needs_sphinx = '1.0'
|
||||||
|
|
||||||
|
# Add any Sphinx extension module names here, as strings. They can be
|
||||||
|
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||||
|
# ones.
|
||||||
|
extensions = [
|
||||||
|
'sphinx.ext.autodoc',
|
||||||
|
'sphinx.ext.intersphinx',
|
||||||
|
'sphinx.ext.coverage',
|
||||||
|
'sphinx.ext.viewcode',
|
||||||
|
]
|
||||||
|
|
||||||
|
# Add any paths that contain templates here, relative to this directory.
|
||||||
|
templates_path = ['_templates']
|
||||||
|
|
||||||
|
# The suffix(es) of source filenames.
|
||||||
|
# You can specify multiple suffix as a list of string:
|
||||||
|
# source_suffix = ['.rst', '.md']
|
||||||
|
source_suffix = '.rst'
|
||||||
|
|
||||||
|
# The encoding of source files.
|
||||||
|
#source_encoding = 'utf-8-sig'
|
||||||
|
|
||||||
|
# The master toctree document.
|
||||||
|
master_doc = 'index'
|
||||||
|
|
||||||
|
# General information about the project.
|
||||||
|
project = u'vcrpy'
|
||||||
|
copyright = u'2015, Kevin McCarthy'
|
||||||
|
author = u'Kevin McCarthy'
|
||||||
|
|
||||||
|
# The version info for the project you're documenting, acts as replacement for
|
||||||
|
# |version| and |release|, also used in various other places throughout the
|
||||||
|
# built documents.
|
||||||
|
#
|
||||||
|
# The short X.Y version.
|
||||||
|
version = '1.7.3'
|
||||||
|
# The full version, including alpha/beta/rc tags.
|
||||||
|
release = '1.7.3'
|
||||||
|
|
||||||
|
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||||
|
# for a list of supported languages.
|
||||||
|
#
|
||||||
|
# This is also used if you do content translation via gettext catalogs.
|
||||||
|
# Usually you set "language" from the command line for these cases.
|
||||||
|
language = None
|
||||||
|
|
||||||
|
# There are two options for replacing |today|: either, you set today to some
|
||||||
|
# non-false value, then it is used:
|
||||||
|
#today = ''
|
||||||
|
# Else, today_fmt is used as the format for a strftime call.
|
||||||
|
#today_fmt = '%B %d, %Y'
|
||||||
|
|
||||||
|
# List of patterns, relative to source directory, that match files and
|
||||||
|
# directories to ignore when looking for source files.
|
||||||
|
exclude_patterns = ['_build']
|
||||||
|
|
||||||
|
# The reST default role (used for this markup: `text`) to use for all
|
||||||
|
# documents.
|
||||||
|
#default_role = None
|
||||||
|
|
||||||
|
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||||
|
#add_function_parentheses = True
|
||||||
|
|
||||||
|
# If true, the current module name will be prepended to all description
|
||||||
|
# unit titles (such as .. function::).
|
||||||
|
#add_module_names = True
|
||||||
|
|
||||||
|
# If true, sectionauthor and moduleauthor directives will be shown in the
|
||||||
|
# output. They are ignored by default.
|
||||||
|
#show_authors = False
|
||||||
|
|
||||||
|
# The name of the Pygments (syntax highlighting) style to use.
|
||||||
|
pygments_style = 'sphinx'
|
||||||
|
|
||||||
|
# A list of ignored prefixes for module index sorting.
|
||||||
|
#modindex_common_prefix = []
|
||||||
|
|
||||||
|
# If true, keep warnings as "system message" paragraphs in the built documents.
|
||||||
|
#keep_warnings = False
|
||||||
|
|
||||||
|
# If true, `todo` and `todoList` produce output, else they produce nothing.
|
||||||
|
todo_include_todos = False
|
||||||
|
|
||||||
|
|
||||||
|
# -- Options for HTML output ----------------------------------------------
|
||||||
|
|
||||||
|
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||||
|
# a list of builtin themes.
|
||||||
|
html_theme = 'alabaster'
|
||||||
|
|
||||||
|
# Theme options are theme-specific and customize the look and feel of a theme
|
||||||
|
# further. For a list of options available for each theme, see the
|
||||||
|
# documentation.
|
||||||
|
#html_theme_options = {}
|
||||||
|
|
||||||
|
# Add any paths that contain custom themes here, relative to this directory.
|
||||||
|
#html_theme_path = []
|
||||||
|
|
||||||
|
# The name for this set of Sphinx documents. If None, it defaults to
|
||||||
|
# "<project> v<release> documentation".
|
||||||
|
#html_title = None
|
||||||
|
|
||||||
|
# A shorter title for the navigation bar. Default is the same as html_title.
|
||||||
|
#html_short_title = None
|
||||||
|
|
||||||
|
# The name of an image file (relative to this directory) to place at the top
|
||||||
|
# of the sidebar.
|
||||||
|
#html_logo = None
|
||||||
|
|
||||||
|
# The name of an image file (within the static path) to use as favicon of the
|
||||||
|
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
||||||
|
# pixels large.
|
||||||
|
#html_favicon = None
|
||||||
|
|
||||||
|
# Add any paths that contain custom static files (such as style sheets) here,
|
||||||
|
# relative to this directory. They are copied after the builtin static files,
|
||||||
|
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||||
|
html_static_path = ['_static']
|
||||||
|
|
||||||
|
# Add any extra paths that contain custom files (such as robots.txt or
|
||||||
|
# .htaccess) here, relative to this directory. These files are copied
|
||||||
|
# directly to the root of the documentation.
|
||||||
|
#html_extra_path = []
|
||||||
|
|
||||||
|
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
||||||
|
# using the given strftime format.
|
||||||
|
#html_last_updated_fmt = '%b %d, %Y'
|
||||||
|
|
||||||
|
# If true, SmartyPants will be used to convert quotes and dashes to
|
||||||
|
# typographically correct entities.
|
||||||
|
#html_use_smartypants = True
|
||||||
|
|
||||||
|
# Custom sidebar templates, maps document names to template names.
|
||||||
|
#html_sidebars = {}
|
||||||
|
|
||||||
|
# Additional templates that should be rendered to pages, maps page names to
|
||||||
|
# template names.
|
||||||
|
#html_additional_pages = {}
|
||||||
|
|
||||||
|
# If false, no module index is generated.
|
||||||
|
#html_domain_indices = True
|
||||||
|
|
||||||
|
# If false, no index is generated.
|
||||||
|
#html_use_index = True
|
||||||
|
|
||||||
|
# If true, the index is split into individual pages for each letter.
|
||||||
|
#html_split_index = False
|
||||||
|
|
||||||
|
# If true, links to the reST sources are added to the pages.
|
||||||
|
#html_show_sourcelink = True
|
||||||
|
|
||||||
|
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
|
||||||
|
#html_show_sphinx = True
|
||||||
|
|
||||||
|
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
|
||||||
|
#html_show_copyright = True
|
||||||
|
|
||||||
|
# If true, an OpenSearch description file will be output, and all pages will
|
||||||
|
# contain a <link> tag referring to it. The value of this option must be the
|
||||||
|
# base URL from which the finished HTML is served.
|
||||||
|
#html_use_opensearch = ''
|
||||||
|
|
||||||
|
# This is the file name suffix for HTML files (e.g. ".xhtml").
|
||||||
|
#html_file_suffix = None
|
||||||
|
|
||||||
|
# Language to be used for generating the HTML full-text search index.
|
||||||
|
# Sphinx supports the following languages:
|
||||||
|
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
|
||||||
|
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
|
||||||
|
#html_search_language = 'en'
|
||||||
|
|
||||||
|
# A dictionary with options for the search language support, empty by default.
|
||||||
|
# Now only 'ja' uses this config value
|
||||||
|
#html_search_options = {'type': 'default'}
|
||||||
|
|
||||||
|
# The name of a javascript file (relative to the configuration directory) that
|
||||||
|
# implements a search results scorer. If empty, the default will be used.
|
||||||
|
#html_search_scorer = 'scorer.js'
|
||||||
|
|
||||||
|
# Output file base name for HTML help builder.
|
||||||
|
htmlhelp_basename = 'vcrpydoc'
|
||||||
|
|
||||||
|
# -- Options for LaTeX output ---------------------------------------------
|
||||||
|
|
||||||
|
latex_elements = {
|
||||||
|
# The paper size ('letterpaper' or 'a4paper').
|
||||||
|
#'papersize': 'letterpaper',
|
||||||
|
|
||||||
|
# The font size ('10pt', '11pt' or '12pt').
|
||||||
|
#'pointsize': '10pt',
|
||||||
|
|
||||||
|
# Additional stuff for the LaTeX preamble.
|
||||||
|
#'preamble': '',
|
||||||
|
|
||||||
|
# Latex figure (float) alignment
|
||||||
|
#'figure_align': 'htbp',
|
||||||
|
}
|
||||||
|
|
||||||
|
# Grouping the document tree into LaTeX files. List of tuples
|
||||||
|
# (source start file, target name, title,
|
||||||
|
# author, documentclass [howto, manual, or own class]).
|
||||||
|
latex_documents = [
|
||||||
|
(master_doc, 'vcrpy.tex', u'vcrpy Documentation',
|
||||||
|
u'Kevin McCarthy', 'manual'),
|
||||||
|
]
|
||||||
|
|
||||||
|
# The name of an image file (relative to this directory) to place at the top of
|
||||||
|
# the title page.
|
||||||
|
#latex_logo = None
|
||||||
|
|
||||||
|
# For "manual" documents, if this is true, then toplevel headings are parts,
|
||||||
|
# not chapters.
|
||||||
|
#latex_use_parts = False
|
||||||
|
|
||||||
|
# If true, show page references after internal links.
|
||||||
|
#latex_show_pagerefs = False
|
||||||
|
|
||||||
|
# If true, show URL addresses after external links.
|
||||||
|
#latex_show_urls = False
|
||||||
|
|
||||||
|
# Documents to append as an appendix to all manuals.
|
||||||
|
#latex_appendices = []
|
||||||
|
|
||||||
|
# If false, no module index is generated.
|
||||||
|
#latex_domain_indices = True
|
||||||
|
|
||||||
|
|
||||||
|
# -- Options for manual page output ---------------------------------------
|
||||||
|
|
||||||
|
# One entry per manual page. List of tuples
|
||||||
|
# (source start file, name, description, authors, manual section).
|
||||||
|
man_pages = [
|
||||||
|
(master_doc, 'vcrpy', u'vcrpy Documentation',
|
||||||
|
[author], 1)
|
||||||
|
]
|
||||||
|
|
||||||
|
# If true, show URL addresses after external links.
|
||||||
|
#man_show_urls = False
|
||||||
|
|
||||||
|
|
||||||
|
# -- Options for Texinfo output -------------------------------------------
|
||||||
|
|
||||||
|
# Grouping the document tree into Texinfo files. List of tuples
|
||||||
|
# (source start file, target name, title, author,
|
||||||
|
# dir menu entry, description, category)
|
||||||
|
texinfo_documents = [
|
||||||
|
(master_doc, 'vcrpy', u'vcrpy Documentation',
|
||||||
|
author, 'vcrpy', 'One line description of project.',
|
||||||
|
'Miscellaneous'),
|
||||||
|
]
|
||||||
|
|
||||||
|
# Documents to append as an appendix to all manuals.
|
||||||
|
#texinfo_appendices = []
|
||||||
|
|
||||||
|
# If false, no module index is generated.
|
||||||
|
#texinfo_domain_indices = True
|
||||||
|
|
||||||
|
# How to display URL addresses: 'footnote', 'no', or 'inline'.
|
||||||
|
#texinfo_show_urls = 'footnote'
|
||||||
|
|
||||||
|
# If true, do not generate a @detailmenu in the "Top" node's menu.
|
||||||
|
#texinfo_no_detailmenu = False
|
||||||
|
|
||||||
|
|
||||||
|
# Example configuration for intersphinx: refer to the Python standard library.
|
||||||
|
intersphinx_mapping = {'https://docs.python.org/': None}
|
||||||
19
docs/index.rst
Normal file
19
docs/index.rst
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
vcrpy
|
||||||
|
=====
|
||||||
|
|
||||||
|
Contents:
|
||||||
|
|
||||||
|
.. toctree::
|
||||||
|
:maxdepth: 2
|
||||||
|
|
||||||
|
vcr
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
Indices and tables
|
||||||
|
==================
|
||||||
|
|
||||||
|
* :ref:`genindex`
|
||||||
|
* :ref:`modindex`
|
||||||
|
* :ref:`search`
|
||||||
|
|
||||||
6
docs/vcr.rst
Normal file
6
docs/vcr.rst
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
:mod:`~vcr.config`
|
||||||
|
=================
|
||||||
|
|
||||||
|
.. automodule:: vcr.config
|
||||||
|
:members:
|
||||||
|
:special-members: __init__
|
||||||
2
setup.py
2
setup.py
@@ -51,7 +51,7 @@ except Exception:
|
|||||||
|
|
||||||
setup(
|
setup(
|
||||||
name='vcrpy',
|
name='vcrpy',
|
||||||
version='1.7.3',
|
version='1.7.4',
|
||||||
description=(
|
description=(
|
||||||
"Automatically mock your HTTP interactions to simplify and "
|
"Automatically mock your HTTP interactions to simplify and "
|
||||||
"speed up testing"
|
"speed up testing"
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
import pytest
|
import pytest
|
||||||
boto = pytest.importorskip("boto")
|
boto = pytest.importorskip("boto")
|
||||||
|
|
||||||
import boto
|
import boto
|
||||||
import boto.iam
|
import boto.iam
|
||||||
from boto.s3.connection import S3Connection
|
from boto.s3.connection import S3Connection
|
||||||
@@ -7,6 +8,7 @@ from boto.s3.key import Key
|
|||||||
from ConfigParser import DuplicateSectionError
|
from ConfigParser import DuplicateSectionError
|
||||||
import vcr
|
import vcr
|
||||||
|
|
||||||
|
|
||||||
def test_boto_stubs(tmpdir):
|
def test_boto_stubs(tmpdir):
|
||||||
with vcr.use_cassette(str(tmpdir.join('boto-stubs.yml'))):
|
with vcr.use_cassette(str(tmpdir.join('boto-stubs.yml'))):
|
||||||
# Perform the imports within the patched context so that
|
# Perform the imports within the patched context so that
|
||||||
@@ -17,16 +19,18 @@ def test_boto_stubs(tmpdir):
|
|||||||
assert issubclass(CertValidatingHTTPSConnection, VCRCertValidatingHTTPSConnection)
|
assert issubclass(CertValidatingHTTPSConnection, VCRCertValidatingHTTPSConnection)
|
||||||
CertValidatingHTTPSConnection('hostname.does.not.matter')
|
CertValidatingHTTPSConnection('hostname.does.not.matter')
|
||||||
|
|
||||||
|
|
||||||
def test_boto_without_vcr():
|
def test_boto_without_vcr():
|
||||||
s3_conn = S3Connection()
|
s3_conn = S3Connection()
|
||||||
s3_bucket = s3_conn.get_bucket('boto-demo-1394171994') # a bucket you can access
|
s3_bucket = s3_conn.get_bucket('boto-demo-1394171994') # a bucket you can access
|
||||||
k = Key(s3_bucket)
|
k = Key(s3_bucket)
|
||||||
k.key = 'test.txt'
|
k.key = 'test.txt'
|
||||||
k.set_contents_from_string('hello world i am a string')
|
k.set_contents_from_string('hello world i am a string')
|
||||||
|
|
||||||
|
|
||||||
def test_boto_medium_difficulty(tmpdir):
|
def test_boto_medium_difficulty(tmpdir):
|
||||||
s3_conn = S3Connection()
|
s3_conn = S3Connection()
|
||||||
s3_bucket = s3_conn.get_bucket('boto-demo-1394171994') # a bucket you can access
|
s3_bucket = s3_conn.get_bucket('boto-demo-1394171994') # a bucket you can access
|
||||||
with vcr.use_cassette(str(tmpdir.join('boto-medium.yml'))) as cass:
|
with vcr.use_cassette(str(tmpdir.join('boto-medium.yml'))) as cass:
|
||||||
k = Key(s3_bucket)
|
k = Key(s3_bucket)
|
||||||
k.key = 'test.txt'
|
k.key = 'test.txt'
|
||||||
@@ -41,18 +45,19 @@ def test_boto_medium_difficulty(tmpdir):
|
|||||||
def test_boto_hardcore_mode(tmpdir):
|
def test_boto_hardcore_mode(tmpdir):
|
||||||
with vcr.use_cassette(str(tmpdir.join('boto-hardcore.yml'))) as cass:
|
with vcr.use_cassette(str(tmpdir.join('boto-hardcore.yml'))) as cass:
|
||||||
s3_conn = S3Connection()
|
s3_conn = S3Connection()
|
||||||
s3_bucket = s3_conn.get_bucket('boto-demo-1394171994') # a bucket you can access
|
s3_bucket = s3_conn.get_bucket('boto-demo-1394171994') # a bucket you can access
|
||||||
k = Key(s3_bucket)
|
k = Key(s3_bucket)
|
||||||
k.key = 'test.txt'
|
k.key = 'test.txt'
|
||||||
k.set_contents_from_string('hello world i am a string')
|
k.set_contents_from_string('hello world i am a string')
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join('boto-hardcore.yml'))) as cass:
|
with vcr.use_cassette(str(tmpdir.join('boto-hardcore.yml'))) as cass:
|
||||||
s3_conn = S3Connection()
|
s3_conn = S3Connection()
|
||||||
s3_bucket = s3_conn.get_bucket('boto-demo-1394171994') # a bucket you can access
|
s3_bucket = s3_conn.get_bucket('boto-demo-1394171994') # a bucket you can access
|
||||||
k = Key(s3_bucket)
|
k = Key(s3_bucket)
|
||||||
k.key = 'test.txt'
|
k.key = 'test.txt'
|
||||||
k.set_contents_from_string('hello world i am a string')
|
k.set_contents_from_string('hello world i am a string')
|
||||||
|
|
||||||
|
|
||||||
def test_boto_iam(tmpdir):
|
def test_boto_iam(tmpdir):
|
||||||
try:
|
try:
|
||||||
boto.config.add_section('Boto')
|
boto.config.add_section('Boto')
|
||||||
|
|||||||
@@ -17,11 +17,7 @@ def _request_with_auth(url, username, password):
|
|||||||
|
|
||||||
|
|
||||||
def _find_header(cassette, header):
|
def _find_header(cassette, header):
|
||||||
for request in cassette.requests:
|
return any(header in request.headers for request in cassette.requests)
|
||||||
for k in request.headers:
|
|
||||||
if header.lower() == k.lower():
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def test_filter_basic_auth(tmpdir):
|
def test_filter_basic_auth(tmpdir):
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ from vcr.compat import mock, contextlib
|
|||||||
from vcr.cassette import Cassette
|
from vcr.cassette import Cassette
|
||||||
from vcr.errors import UnhandledHTTPRequestError
|
from vcr.errors import UnhandledHTTPRequestError
|
||||||
from vcr.patch import force_reset
|
from vcr.patch import force_reset
|
||||||
|
from vcr.matchers import path, method, query, host
|
||||||
from vcr.stubs import VCRHTTPSConnection
|
from vcr.stubs import VCRHTTPSConnection
|
||||||
|
|
||||||
|
|
||||||
@@ -245,6 +246,13 @@ def test_path_transformer_with_context_manager():
|
|||||||
assert cassette._path == 'a'
|
assert cassette._path == 'a'
|
||||||
|
|
||||||
|
|
||||||
|
def test_path_transformer_None():
|
||||||
|
with Cassette.use(
|
||||||
|
path='a', path_transformer=None,
|
||||||
|
) as cassette:
|
||||||
|
assert cassette._path == 'a'
|
||||||
|
|
||||||
|
|
||||||
def test_func_path_generator():
|
def test_func_path_generator():
|
||||||
def generator(function):
|
def generator(function):
|
||||||
return os.path.join(os.path.dirname(inspect.getfile(function)),
|
return os.path.join(os.path.dirname(inspect.getfile(function)),
|
||||||
@@ -287,3 +295,18 @@ def test_use_as_decorator_on_generator():
|
|||||||
assert httplib.HTTPConnection is not original_http_connetion
|
assert httplib.HTTPConnection is not original_http_connetion
|
||||||
yield 2
|
yield 2
|
||||||
assert list(test_function()) == [1, 2]
|
assert list(test_function()) == [1, 2]
|
||||||
|
|
||||||
|
|
||||||
|
def test_similar_requests(tmpdir):
|
||||||
|
# WIP needs to be finished
|
||||||
|
@Cassette.use(inject=True, match_on=(path, query, host, method))
|
||||||
|
def test_function(cassette):
|
||||||
|
conn = httplib.HTTPConnection("www.python.org")
|
||||||
|
conn.request("GET", "/index.html?test=1")
|
||||||
|
|
||||||
|
conn = httplib.HTTPConnection("www.python.org")
|
||||||
|
conn.request("GET", "/index.html?test=0")
|
||||||
|
|
||||||
|
conn = httplib.HTTPConnection("www.cool.org")
|
||||||
|
conn.request("GET", "/index.html?test=0")
|
||||||
|
cassette.similar_requests()
|
||||||
|
|||||||
@@ -73,7 +73,7 @@ def test_remove_nonexistent_post_data_parameters():
|
|||||||
def test_remove_json_post_data_parameters():
|
def test_remove_json_post_data_parameters():
|
||||||
body = b'{"id": "secret", "foo": "bar", "baz": "qux"}'
|
body = b'{"id": "secret", "foo": "bar", "baz": "qux"}'
|
||||||
request = Request('POST', 'http://google.com', body, {})
|
request = Request('POST', 'http://google.com', body, {})
|
||||||
request.add_header('Content-Type', 'application/json')
|
request.headers['Content-Type'] = 'application/json'
|
||||||
remove_post_data_parameters(request, ['id'])
|
remove_post_data_parameters(request, ['id'])
|
||||||
request_body_json = json.loads(request.body.decode('utf-8'))
|
request_body_json = json.loads(request.body.decode('utf-8'))
|
||||||
expected_json = json.loads(b'{"foo": "bar", "baz": "qux"}'.decode('utf-8'))
|
expected_json = json.loads(b'{"foo": "bar", "baz": "qux"}'.decode('utf-8'))
|
||||||
@@ -83,7 +83,7 @@ def test_remove_json_post_data_parameters():
|
|||||||
def test_remove_all_json_post_data_parameters():
|
def test_remove_all_json_post_data_parameters():
|
||||||
body = b'{"id": "secret", "foo": "bar"}'
|
body = b'{"id": "secret", "foo": "bar"}'
|
||||||
request = Request('POST', 'http://google.com', body, {})
|
request = Request('POST', 'http://google.com', body, {})
|
||||||
request.add_header('Content-Type', 'application/json')
|
request.headers['Content-Type'] = 'application/json'
|
||||||
remove_post_data_parameters(request, ['id', 'foo'])
|
remove_post_data_parameters(request, ['id', 'foo'])
|
||||||
assert request.body == b'{}'
|
assert request.body == b'{}'
|
||||||
|
|
||||||
@@ -91,6 +91,6 @@ def test_remove_all_json_post_data_parameters():
|
|||||||
def test_remove_nonexistent_json_post_data_parameters():
|
def test_remove_nonexistent_json_post_data_parameters():
|
||||||
body = b'{}'
|
body = b'{}'
|
||||||
request = Request('POST', 'http://google.com', body, {})
|
request = Request('POST', 'http://google.com', body, {})
|
||||||
request.add_header('Content-Type', 'application/json')
|
request.headers['Content-Type'] = 'application/json'
|
||||||
remove_post_data_parameters(request, ['id'])
|
remove_post_data_parameters(request, ['id'])
|
||||||
assert request.body == b'{}'
|
assert request.body == b'{}'
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from vcr.request import Request
|
from vcr.request import Request, HeadersDict
|
||||||
|
|
||||||
|
|
||||||
def test_str():
|
def test_str():
|
||||||
@@ -12,11 +12,16 @@ def test_headers():
|
|||||||
headers = {'X-Header1': ['h1'], 'X-Header2': 'h2'}
|
headers = {'X-Header1': ['h1'], 'X-Header2': 'h2'}
|
||||||
req = Request('GET', 'http://go.com/', '', headers)
|
req = Request('GET', 'http://go.com/', '', headers)
|
||||||
assert req.headers == {'X-Header1': 'h1', 'X-Header2': 'h2'}
|
assert req.headers == {'X-Header1': 'h1', 'X-Header2': 'h2'}
|
||||||
|
req.headers['X-Header1'] = 'h11'
|
||||||
req.add_header('X-Header1', 'h11')
|
|
||||||
assert req.headers == {'X-Header1': 'h11', 'X-Header2': 'h2'}
|
assert req.headers == {'X-Header1': 'h11', 'X-Header2': 'h2'}
|
||||||
|
|
||||||
|
|
||||||
|
def test_add_header_deprecated():
|
||||||
|
req = Request('GET', 'http://go.com/', '', {})
|
||||||
|
pytest.deprecated_call(req.add_header, 'foo', 'bar')
|
||||||
|
assert req.headers == {'foo': 'bar'}
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("uri, expected_port", [
|
@pytest.mark.parametrize("uri, expected_port", [
|
||||||
('http://go.com/', 80),
|
('http://go.com/', 80),
|
||||||
('http://go.com:80/', 80),
|
('http://go.com:80/', 80),
|
||||||
@@ -36,3 +41,30 @@ def test_uri():
|
|||||||
|
|
||||||
req = Request('GET', 'http://go.com:80/', '', {})
|
req = Request('GET', 'http://go.com:80/', '', {})
|
||||||
assert req.uri == 'http://go.com:80/'
|
assert req.uri == 'http://go.com:80/'
|
||||||
|
|
||||||
|
|
||||||
|
def test_HeadersDict():
|
||||||
|
|
||||||
|
# Simple test of CaseInsensitiveDict
|
||||||
|
h = HeadersDict()
|
||||||
|
assert h == {}
|
||||||
|
h['Content-Type'] = 'application/json'
|
||||||
|
assert h == {'Content-Type': 'application/json'}
|
||||||
|
assert h['content-type'] == 'application/json'
|
||||||
|
assert h['CONTENT-TYPE'] == 'application/json'
|
||||||
|
|
||||||
|
# Test feature of HeadersDict: devolve list to first element
|
||||||
|
h = HeadersDict()
|
||||||
|
assert h == {}
|
||||||
|
h['x'] = ['foo', 'bar']
|
||||||
|
assert h == {'x': 'foo'}
|
||||||
|
|
||||||
|
# Test feature of HeadersDict: preserve original key case
|
||||||
|
h = HeadersDict()
|
||||||
|
assert h == {}
|
||||||
|
h['Content-Type'] = 'application/json'
|
||||||
|
assert h == {'Content-Type': 'application/json'}
|
||||||
|
h['content-type'] = 'text/plain'
|
||||||
|
assert h == {'Content-Type': 'text/plain'}
|
||||||
|
h['CONtent-tyPE'] = 'whoa'
|
||||||
|
assert h == {'Content-Type': 'whoa'}
|
||||||
|
|||||||
7
tests/unit/test_stubs.py
Normal file
7
tests/unit/test_stubs.py
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
from vcr.stubs import VCRHTTPSConnection
|
||||||
|
|
||||||
|
class TestVCRConnection(object):
|
||||||
|
def test_setting_of_attributes_get_propogated_to_real_connection(self):
|
||||||
|
vcr_connection = VCRHTTPSConnection('www.examplehost.com')
|
||||||
|
vcr_connection.ssl_version = 'example_ssl_version'
|
||||||
|
assert vcr_connection.real_connection.ssl_version == 'example_ssl_version'
|
||||||
@@ -1,11 +1,13 @@
|
|||||||
import os
|
import os
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
from six.moves import http_client as httplib
|
||||||
|
|
||||||
from vcr import VCR, use_cassette
|
from vcr import VCR, use_cassette
|
||||||
from vcr.compat import mock
|
from vcr.compat import mock
|
||||||
from vcr.request import Request
|
from vcr.request import Request
|
||||||
from vcr.stubs import VCRHTTPSConnection
|
from vcr.stubs import VCRHTTPSConnection
|
||||||
|
from vcr.patch import _HTTPConnection, force_reset
|
||||||
|
|
||||||
|
|
||||||
def test_vcr_use_cassette():
|
def test_vcr_use_cassette():
|
||||||
@@ -98,6 +100,43 @@ def test_vcr_before_record_response_iterable():
|
|||||||
assert mock_filter.call_count == 1
|
assert mock_filter.call_count == 1
|
||||||
|
|
||||||
|
|
||||||
|
def test_before_record_response_as_filter():
|
||||||
|
request = Request('GET', '/', '', {})
|
||||||
|
response = object() # just can't be None
|
||||||
|
|
||||||
|
# Prevent actually saving the cassette
|
||||||
|
with mock.patch('vcr.cassette.save_cassette'):
|
||||||
|
|
||||||
|
filter_all = mock.Mock(return_value=None)
|
||||||
|
vcr = VCR(before_record_response=filter_all)
|
||||||
|
with vcr.use_cassette('test') as cassette:
|
||||||
|
cassette.append(request, response)
|
||||||
|
assert cassette.data == []e
|
||||||
|
assert not cassette.dirty
|
||||||
|
|
||||||
|
|
||||||
|
def test_vcr_path_transformer():
|
||||||
|
# Regression test for #199
|
||||||
|
|
||||||
|
# Prevent actually saving the cassette
|
||||||
|
with mock.patch('vcr.cassette.save_cassette'):
|
||||||
|
|
||||||
|
# Baseline: path should be unchanged
|
||||||
|
vcr = VCR()
|
||||||
|
with vcr.use_cassette('test') as cassette:
|
||||||
|
assert cassette._path == 'test'
|
||||||
|
|
||||||
|
# Regression test: path_transformer=None should do the same.
|
||||||
|
vcr = VCR(path_transformer=None)
|
||||||
|
with vcr.use_cassette('test') as cassette:
|
||||||
|
assert cassette._path == 'test'
|
||||||
|
|
||||||
|
# and it should still work with cassette_library_dir
|
||||||
|
vcr = VCR(cassette_library_dir='/foo')
|
||||||
|
with vcr.use_cassette('test') as cassette:
|
||||||
|
assert cassette._path == '/foo/test'
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def random_fixture():
|
def random_fixture():
|
||||||
return 1
|
return 1
|
||||||
@@ -243,6 +282,7 @@ def test_path_transformer():
|
|||||||
|
|
||||||
def test_cassette_name_generator_defaults_to_using_module_function_defined_in():
|
def test_cassette_name_generator_defaults_to_using_module_function_defined_in():
|
||||||
vcr = VCR(inject_cassette=True)
|
vcr = VCR(inject_cassette=True)
|
||||||
|
|
||||||
@vcr.use_cassette
|
@vcr.use_cassette
|
||||||
def function_name(cassette):
|
def function_name(cassette):
|
||||||
assert cassette._path == os.path.join(os.path.dirname(__file__),
|
assert cassette._path == os.path.join(os.path.dirname(__file__),
|
||||||
@@ -274,3 +314,40 @@ def test_additional_matchers():
|
|||||||
|
|
||||||
function_defaults()
|
function_defaults()
|
||||||
function_additional()
|
function_additional()
|
||||||
|
|
||||||
|
|
||||||
|
def test_decoration_should_respect_function_return_value():
|
||||||
|
vcr = VCR()
|
||||||
|
ret = 'a-return-value'
|
||||||
|
|
||||||
|
@vcr.use_cassette
|
||||||
|
def function_with_return():
|
||||||
|
return ret
|
||||||
|
|
||||||
|
assert ret == function_with_return()
|
||||||
|
|
||||||
|
|
||||||
|
class TestVCRClass(VCR().test_case()):
|
||||||
|
|
||||||
|
def no_decoration(self):
|
||||||
|
assert httplib.HTTPConnection == _HTTPConnection
|
||||||
|
self.test_dynamically_added()
|
||||||
|
assert httplib.HTTPConnection == _HTTPConnection
|
||||||
|
|
||||||
|
def test_one(self):
|
||||||
|
with force_reset():
|
||||||
|
self.no_decoration()
|
||||||
|
with force_reset():
|
||||||
|
self.test_two()
|
||||||
|
assert httplib.HTTPConnection != _HTTPConnection
|
||||||
|
|
||||||
|
def test_two(self):
|
||||||
|
assert httplib.HTTPConnection != _HTTPConnection
|
||||||
|
|
||||||
|
|
||||||
|
def test_dynamically_added(self):
|
||||||
|
assert httplib.HTTPConnection != _HTTPConnection
|
||||||
|
|
||||||
|
|
||||||
|
TestVCRClass.test_dynamically_added = test_dynamically_added
|
||||||
|
del test_dynamically_added
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import sys
|
|
||||||
import inspect
|
import inspect
|
||||||
import logging
|
import logging
|
||||||
|
import operator
|
||||||
|
import sys
|
||||||
|
|
||||||
import wrapt
|
import wrapt
|
||||||
|
|
||||||
@@ -75,7 +76,7 @@ class CassetteContextDecorator(object):
|
|||||||
lambda key, _: key in self._non_cassette_arguments,
|
lambda key, _: key in self._non_cassette_arguments,
|
||||||
self._args_getter()
|
self._args_getter()
|
||||||
)
|
)
|
||||||
if 'path_transformer' in other_kwargs:
|
if other_kwargs.get('path_transformer'):
|
||||||
transformer = other_kwargs['path_transformer']
|
transformer = other_kwargs['path_transformer']
|
||||||
cassette_kwargs['path'] = transformer(cassette_kwargs['path'])
|
cassette_kwargs['path'] = transformer(cassette_kwargs['path'])
|
||||||
self.__finish = self._patch_generator(self.cls.load(**cassette_kwargs))
|
self.__finish = self._patch_generator(self.cls.load(**cassette_kwargs))
|
||||||
@@ -127,7 +128,7 @@ class CassetteContextDecorator(object):
|
|||||||
|
|
||||||
def _handle_function(self, function, args, kwargs):
|
def _handle_function(self, function, args, kwargs):
|
||||||
with self as cassette:
|
with self as cassette:
|
||||||
self.__handle_function(cassette, function, args, kwargs)
|
return self.__handle_function(cassette, function, args, kwargs)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_function_name(function):
|
def get_function_name(function):
|
||||||
@@ -145,9 +146,33 @@ class CassetteContextDecorator(object):
|
|||||||
return new_args_getter
|
return new_args_getter
|
||||||
|
|
||||||
|
|
||||||
|
class SimilarityScorer(object):
|
||||||
|
|
||||||
|
def __init__(self, matchers, request, ascending=False):
|
||||||
|
self._matchers = matchers
|
||||||
|
self._request = request
|
||||||
|
self._ascending = False
|
||||||
|
|
||||||
|
def score(self, candidate, play_count):
|
||||||
|
value = 1
|
||||||
|
total = 0
|
||||||
|
if play_count < 1:
|
||||||
|
total += value
|
||||||
|
if self._ascending:
|
||||||
|
value *= 2
|
||||||
|
for matcher in self._matchers[::-1]:
|
||||||
|
if matcher(self._request, candidate):
|
||||||
|
total += value
|
||||||
|
if self._ascending:
|
||||||
|
value *= 2
|
||||||
|
return total
|
||||||
|
|
||||||
|
|
||||||
class Cassette(object):
|
class Cassette(object):
|
||||||
"""A container for recorded requests and responses"""
|
"""A container for recorded requests and responses"""
|
||||||
|
|
||||||
|
max_playcount = 1
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def load(cls, **kwargs):
|
def load(cls, **kwargs):
|
||||||
"""Instantiate and load the cassette stored at the specified path."""
|
"""Instantiate and load the cassette stored at the specified path."""
|
||||||
@@ -165,14 +190,15 @@ class Cassette(object):
|
|||||||
|
|
||||||
def __init__(self, path, serializer=yamlserializer, record_mode='once',
|
def __init__(self, path, serializer=yamlserializer, record_mode='once',
|
||||||
match_on=(uri, method), before_record_request=None,
|
match_on=(uri, method), before_record_request=None,
|
||||||
before_record_response=None, custom_patches=(),
|
before_record_response=None, custom_patches=(), inject=False,
|
||||||
inject=False):
|
similarity_scorer_factory=None):
|
||||||
|
|
||||||
self._path = path
|
self._path = path
|
||||||
self._serializer = serializer
|
self._serializer = serializer
|
||||||
self._match_on = match_on
|
self._match_on = match_on
|
||||||
self._before_record_request = before_record_request or (lambda x: x)
|
self._before_record_request = before_record_request or (lambda x: x)
|
||||||
self._before_record_response = before_record_response or (lambda x: x)
|
self._before_record_response = before_record_response or (lambda x: x)
|
||||||
|
self._similarity_scorer_factory = similarity_scorer_factory or SimilarityScorer
|
||||||
self.inject = inject
|
self.inject = inject
|
||||||
self.record_mode = record_mode
|
self.record_mode = record_mode
|
||||||
self.custom_patches = custom_patches
|
self.custom_patches = custom_patches
|
||||||
@@ -211,6 +237,8 @@ class Cassette(object):
|
|||||||
if not request:
|
if not request:
|
||||||
return
|
return
|
||||||
response = self._before_record_response(response)
|
response = self._before_record_response(response)
|
||||||
|
if response is None:
|
||||||
|
return
|
||||||
self.data.append((request, response))
|
self.data.append((request, response))
|
||||||
self.dirty = True
|
self.dirty = True
|
||||||
|
|
||||||
@@ -227,6 +255,20 @@ class Cassette(object):
|
|||||||
if requests_match(request, stored_request, self._match_on):
|
if requests_match(request, stored_request, self._match_on):
|
||||||
yield index, response
|
yield index, response
|
||||||
|
|
||||||
|
def failing_matchers(self, a, b):
|
||||||
|
return [matcher for matcher in self._match_on if not matcher(a, b)]
|
||||||
|
|
||||||
|
def similar_requests(self, request):
|
||||||
|
scorer = self._similarity_scorer_factory(self._match_on, request).score
|
||||||
|
scored_requests = [
|
||||||
|
(
|
||||||
|
stored_request,
|
||||||
|
scorer(stored_request, self.play_counts[index])
|
||||||
|
)
|
||||||
|
for index, (stored_request, response) in enumerate(self.data)
|
||||||
|
]
|
||||||
|
return sorted(scored_requests, key=operator.itemgetter(1), reverse=True)
|
||||||
|
|
||||||
def can_play_response_for(self, request):
|
def can_play_response_for(self, request):
|
||||||
request = self._before_record_request(request)
|
request = self._before_record_request(request)
|
||||||
return request and request in self and \
|
return request and request in self and \
|
||||||
@@ -239,7 +281,7 @@ class Cassette(object):
|
|||||||
hasn't been played back before, and mark it as played
|
hasn't been played back before, and mark it as played
|
||||||
"""
|
"""
|
||||||
for index, response in self._responses(request):
|
for index, response in self._responses(request):
|
||||||
if self.play_counts[index] == 0:
|
if self.play_counts[index] < self.max_playcount:
|
||||||
self.play_counts[index] += 1
|
self.play_counts[index] += 1
|
||||||
return response
|
return response
|
||||||
# The cassette doesn't contain the request asked for.
|
# The cassette doesn't contain the request asked for.
|
||||||
|
|||||||
@@ -2,19 +2,25 @@ import copy
|
|||||||
import functools
|
import functools
|
||||||
import inspect
|
import inspect
|
||||||
import os
|
import os
|
||||||
|
import types
|
||||||
|
|
||||||
import six
|
import six
|
||||||
|
|
||||||
from .compat import collections
|
from .compat import collections
|
||||||
from .cassette import Cassette
|
from .cassette import Cassette
|
||||||
from .serializers import yamlserializer, jsonserializer
|
from .serializers import yamlserializer, jsonserializer
|
||||||
from .util import compose
|
from .util import compose, auto_decorate
|
||||||
from . import matchers
|
from . import matchers
|
||||||
from . import filters
|
from . import filters
|
||||||
|
|
||||||
|
|
||||||
class VCR(object):
|
class VCR(object):
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def is_test_method(method_name, function):
|
||||||
|
return method_name.startswith('test') and \
|
||||||
|
isinstance(function, types.FunctionType)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def ensure_suffix(suffix):
|
def ensure_suffix(suffix):
|
||||||
def ensure(path):
|
def ensure(path):
|
||||||
@@ -23,7 +29,7 @@ class VCR(object):
|
|||||||
return path
|
return path
|
||||||
return ensure
|
return ensure
|
||||||
|
|
||||||
def __init__(self, path_transformer=lambda x: x, before_record_request=None,
|
def __init__(self, path_transformer=None, before_record_request=None,
|
||||||
custom_patches=(), filter_query_parameters=(), ignore_hosts=(),
|
custom_patches=(), filter_query_parameters=(), ignore_hosts=(),
|
||||||
record_mode="once", ignore_localhost=False, filter_headers=(),
|
record_mode="once", ignore_localhost=False, filter_headers=(),
|
||||||
before_record_response=None, filter_post_data_parameters=(),
|
before_record_response=None, filter_post_data_parameters=(),
|
||||||
@@ -108,7 +114,7 @@ class VCR(object):
|
|||||||
matcher_names = kwargs.get('match_on', self.match_on)
|
matcher_names = kwargs.get('match_on', self.match_on)
|
||||||
path_transformer = kwargs.get(
|
path_transformer = kwargs.get(
|
||||||
'path_transformer',
|
'path_transformer',
|
||||||
self.path_transformer or self.ensure_suffix('.yaml')
|
self.path_transformer
|
||||||
)
|
)
|
||||||
func_path_generator = kwargs.get(
|
func_path_generator = kwargs.get(
|
||||||
'func_path_generator',
|
'func_path_generator',
|
||||||
@@ -202,7 +208,7 @@ class VCR(object):
|
|||||||
if filter_query_parameters:
|
if filter_query_parameters:
|
||||||
filter_functions.append(functools.partial(
|
filter_functions.append(functools.partial(
|
||||||
filters.remove_query_parameters,
|
filters.remove_query_parameters,
|
||||||
query_parameters_to_remove=filter_query_parameters
|
query_parameters_to_remove=filter_query_parameters
|
||||||
))
|
))
|
||||||
if filter_post_data_parameters:
|
if filter_post_data_parameters:
|
||||||
filter_functions.append(
|
filter_functions.append(
|
||||||
@@ -250,3 +256,7 @@ class VCR(object):
|
|||||||
|
|
||||||
def register_matcher(self, name, matcher):
|
def register_matcher(self, name, matcher):
|
||||||
self.matchers[name] = matcher
|
self.matchers[name] = matcher
|
||||||
|
|
||||||
|
def test_case(self, predicate=None):
|
||||||
|
predicate = predicate or self.is_test_method
|
||||||
|
return six.with_metaclass(auto_decorate(self.use_cassette, predicate))
|
||||||
|
|||||||
@@ -1,19 +1,16 @@
|
|||||||
from six import BytesIO, text_type
|
from six import BytesIO, text_type
|
||||||
from six.moves.urllib.parse import urlparse, urlencode, urlunparse
|
from six.moves.urllib.parse import urlparse, urlencode, urlunparse
|
||||||
import copy
|
|
||||||
import json
|
import json
|
||||||
|
|
||||||
from .compat import collections
|
from .compat import collections
|
||||||
|
|
||||||
|
|
||||||
def remove_headers(request, headers_to_remove):
|
def remove_headers(request, headers_to_remove):
|
||||||
headers = copy.copy(request.headers)
|
new_headers = request.headers.copy()
|
||||||
headers_to_remove = [h.lower() for h in headers_to_remove]
|
for k in headers_to_remove:
|
||||||
keys = [k for k in headers if k.lower() in headers_to_remove]
|
if k in new_headers:
|
||||||
if keys:
|
del new_headers[k]
|
||||||
for k in keys:
|
request.headers = new_headers
|
||||||
headers.pop(k)
|
|
||||||
request.headers = headers
|
|
||||||
return request
|
return request
|
||||||
|
|
||||||
|
|
||||||
@@ -30,8 +27,7 @@ def remove_query_parameters(request, query_parameters_to_remove):
|
|||||||
|
|
||||||
def remove_post_data_parameters(request, post_data_parameters_to_remove):
|
def remove_post_data_parameters(request, post_data_parameters_to_remove):
|
||||||
if request.method == 'POST' and not isinstance(request.body, BytesIO):
|
if request.method == 'POST' and not isinstance(request.body, BytesIO):
|
||||||
if ('Content-Type' in request.headers and
|
if request.headers.get('Content-Type') == 'application/json':
|
||||||
request.headers['Content-Type'] == 'application/json'):
|
|
||||||
json_data = json.loads(request.body.decode('utf-8'))
|
json_data = json.loads(request.body.decode('utf-8'))
|
||||||
for k in list(json_data.keys()):
|
for k in list(json_data.keys()):
|
||||||
if k in post_data_parameters_to_remove:
|
if k in post_data_parameters_to_remove:
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import json
|
import json
|
||||||
from six.moves import urllib, xmlrpc_client
|
from six.moves import urllib, xmlrpc_client
|
||||||
from .util import CaseInsensitiveDict, read_body
|
from .util import read_body
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
|
||||||
@@ -66,9 +66,8 @@ def _identity(x):
|
|||||||
|
|
||||||
|
|
||||||
def _get_transformer(request):
|
def _get_transformer(request):
|
||||||
headers = CaseInsensitiveDict(request.headers)
|
|
||||||
for checker, transformer in _checker_transformer_pairs:
|
for checker, transformer in _checker_transformer_pairs:
|
||||||
if checker(headers): return transformer
|
if checker(request.headers): return transformer
|
||||||
else:
|
else:
|
||||||
return _identity
|
return _identity
|
||||||
|
|
||||||
|
|||||||
@@ -1,27 +1,12 @@
|
|||||||
|
import warnings
|
||||||
from six import BytesIO, text_type
|
from six import BytesIO, text_type
|
||||||
from six.moves.urllib.parse import urlparse, parse_qsl
|
from six.moves.urllib.parse import urlparse, parse_qsl
|
||||||
|
from .util import CaseInsensitiveDict
|
||||||
|
|
||||||
|
|
||||||
class Request(object):
|
class Request(object):
|
||||||
"""
|
"""
|
||||||
VCR's representation of a request.
|
VCR's representation of a request.
|
||||||
|
|
||||||
There is a weird quirk in HTTP. You can send the same header twice. For
|
|
||||||
this reason, headers are represented by a dict, with lists as the values.
|
|
||||||
However, it appears that HTTPlib is completely incapable of sending the
|
|
||||||
same header twice. This puts me in a weird position: I want to be able to
|
|
||||||
accurately represent HTTP headers in cassettes, but I don't want the extra
|
|
||||||
step of always having to do [0] in the general case, i.e.
|
|
||||||
request.headers['key'][0]
|
|
||||||
|
|
||||||
In addition, some servers sometimes send the same header more than once,
|
|
||||||
and httplib *can* deal with this situation.
|
|
||||||
|
|
||||||
Futhermore, I wanted to keep the request and response cassette format as
|
|
||||||
similar as possible.
|
|
||||||
|
|
||||||
For this reason, in cassettes I keep a dict with lists as keys, but once
|
|
||||||
deserialized into VCR, I keep them as plain, naked dicts.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, method, uri, body, headers):
|
def __init__(self, method, uri, body, headers):
|
||||||
@@ -32,9 +17,17 @@ class Request(object):
|
|||||||
self.body = body.read()
|
self.body = body.read()
|
||||||
else:
|
else:
|
||||||
self.body = body
|
self.body = body
|
||||||
self.headers = {}
|
self.headers = headers
|
||||||
for key in headers:
|
|
||||||
self.add_header(key, headers[key])
|
@property
|
||||||
|
def headers(self):
|
||||||
|
return self._headers
|
||||||
|
|
||||||
|
@headers.setter
|
||||||
|
def headers(self, value):
|
||||||
|
if not isinstance(value, HeadersDict):
|
||||||
|
value = HeadersDict(value)
|
||||||
|
self._headers = value
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def body(self):
|
def body(self):
|
||||||
@@ -47,11 +40,10 @@ class Request(object):
|
|||||||
self._body = value
|
self._body = value
|
||||||
|
|
||||||
def add_header(self, key, value):
|
def add_header(self, key, value):
|
||||||
# see class docstring for an explanation
|
warnings.warn("Request.add_header is deprecated. "
|
||||||
if isinstance(value, (tuple, list)):
|
"Please assign to request.headers instead.",
|
||||||
self.headers[key] = value[0]
|
DeprecationWarning)
|
||||||
else:
|
self.headers[key] = value
|
||||||
self.headers[key] = value
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def scheme(self):
|
def scheme(self):
|
||||||
@@ -105,3 +97,35 @@ class Request(object):
|
|||||||
@classmethod
|
@classmethod
|
||||||
def _from_dict(cls, dct):
|
def _from_dict(cls, dct):
|
||||||
return Request(**dct)
|
return Request(**dct)
|
||||||
|
|
||||||
|
|
||||||
|
class HeadersDict(CaseInsensitiveDict):
|
||||||
|
"""
|
||||||
|
There is a weird quirk in HTTP. You can send the same header twice. For
|
||||||
|
this reason, headers are represented by a dict, with lists as the values.
|
||||||
|
However, it appears that HTTPlib is completely incapable of sending the
|
||||||
|
same header twice. This puts me in a weird position: I want to be able to
|
||||||
|
accurately represent HTTP headers in cassettes, but I don't want the extra
|
||||||
|
step of always having to do [0] in the general case, i.e.
|
||||||
|
request.headers['key'][0]
|
||||||
|
|
||||||
|
In addition, some servers sometimes send the same header more than once,
|
||||||
|
and httplib *can* deal with this situation.
|
||||||
|
|
||||||
|
Futhermore, I wanted to keep the request and response cassette format as
|
||||||
|
similar as possible.
|
||||||
|
|
||||||
|
For this reason, in cassettes I keep a dict with lists as keys, but once
|
||||||
|
deserialized into VCR, I keep them as plain, naked dicts.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __setitem__(self, key, value):
|
||||||
|
if isinstance(value, (tuple, list)):
|
||||||
|
value = value[0]
|
||||||
|
|
||||||
|
# Preserve the case from the first time this key was set.
|
||||||
|
old = self._store.get(key.lower())
|
||||||
|
if old:
|
||||||
|
key = old[0]
|
||||||
|
|
||||||
|
super(HeadersDict, self).__setitem__(key, value)
|
||||||
|
|||||||
@@ -9,7 +9,6 @@ import six
|
|||||||
from six.moves.http_client import (
|
from six.moves.http_client import (
|
||||||
HTTPConnection,
|
HTTPConnection,
|
||||||
HTTPSConnection,
|
HTTPSConnection,
|
||||||
HTTPMessage,
|
|
||||||
HTTPResponse,
|
HTTPResponse,
|
||||||
)
|
)
|
||||||
from six import BytesIO
|
from six import BytesIO
|
||||||
@@ -188,8 +187,7 @@ class VCRConnection(object):
|
|||||||
log.debug('Got {0}'.format(self._vcr_request))
|
log.debug('Got {0}'.format(self._vcr_request))
|
||||||
|
|
||||||
def putheader(self, header, *values):
|
def putheader(self, header, *values):
|
||||||
for value in values:
|
self._vcr_request.headers[header] = values
|
||||||
self._vcr_request.add_header(header, value)
|
|
||||||
|
|
||||||
def send(self, data):
|
def send(self, data):
|
||||||
'''
|
'''
|
||||||
@@ -229,12 +227,26 @@ class VCRConnection(object):
|
|||||||
if self.cassette.write_protected and self.cassette.filter_request(
|
if self.cassette.write_protected and self.cassette.filter_request(
|
||||||
self._vcr_request
|
self._vcr_request
|
||||||
):
|
):
|
||||||
|
most_similar_request = None
|
||||||
|
failing_matchers = None
|
||||||
|
most_similar_request_info = None
|
||||||
|
try:
|
||||||
|
most_similar_request_info = self.cassette.similar_requests(self._vcr_request)
|
||||||
|
most_similar_request = most_similar_request_info[0][0]
|
||||||
|
failing_matchers = self.cassette.failing_matchers(
|
||||||
|
self._vcr_request, most_similar_request
|
||||||
|
)
|
||||||
|
except Exception as err:
|
||||||
|
print "XXXX {0}".format(err)
|
||||||
|
import ipdb; ipdb.set_trace()
|
||||||
raise CannotOverwriteExistingCassetteException(
|
raise CannotOverwriteExistingCassetteException(
|
||||||
"No match for the request (%r) was found. "
|
"No match for the request (%r) was found. "
|
||||||
"Can't overwrite existing cassette (%r) in "
|
"Can't overwrite existing cassette (%r) in "
|
||||||
"your current record mode (%r)."
|
"your current record mode (%r). Most similar request was (%r). "
|
||||||
|
"It differed from the request according to (%r). \n\n\n(%r)"
|
||||||
% (self._vcr_request, self.cassette._path,
|
% (self._vcr_request, self.cassette._path,
|
||||||
self.cassette.record_mode)
|
self.cassette.record_mode, most_similar_request,
|
||||||
|
failing_matchers, most_similar_request_info)
|
||||||
)
|
)
|
||||||
|
|
||||||
# Otherwise, we should send the request, then get the response
|
# Otherwise, we should send the request, then get the response
|
||||||
@@ -307,7 +319,7 @@ class VCRConnection(object):
|
|||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
if six.PY3:
|
if six.PY3:
|
||||||
kwargs.pop('strict', None) # apparently this is gone in py3
|
kwargs.pop('strict', None) # apparently this is gone in py3
|
||||||
|
|
||||||
# need to temporarily reset here because the real connection
|
# need to temporarily reset here because the real connection
|
||||||
# inherits from the thing that we are mocking out. Take out
|
# inherits from the thing that we are mocking out. Take out
|
||||||
@@ -316,6 +328,27 @@ class VCRConnection(object):
|
|||||||
with force_reset():
|
with force_reset():
|
||||||
self.real_connection = self._baseclass(*args, **kwargs)
|
self.real_connection = self._baseclass(*args, **kwargs)
|
||||||
|
|
||||||
|
def __setattr__(self, name, value):
|
||||||
|
"""
|
||||||
|
We need to define this because any attributes that are set on the
|
||||||
|
VCRConnection need to be propogated to the real connection.
|
||||||
|
|
||||||
|
For example, urllib3 will set certain attributes on the connection,
|
||||||
|
such as 'ssl_version'. These attributes need to get set on the real
|
||||||
|
connection to have the correct and expected behavior.
|
||||||
|
|
||||||
|
TODO: Separately setting the attribute on the two instances is not
|
||||||
|
ideal. We should switch to a proxying implementation.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
setattr(self.real_connection, name, value)
|
||||||
|
except AttributeError:
|
||||||
|
# raised if real_connection has not been set yet, such as when
|
||||||
|
# we're setting the real_connection itself for the first time
|
||||||
|
pass
|
||||||
|
|
||||||
|
super(VCRConnection, self).__setattr__(name, value)
|
||||||
|
|
||||||
|
|
||||||
class VCRHTTPConnection(VCRConnection):
|
class VCRHTTPConnection(VCRConnection):
|
||||||
'''A Mocked class for HTTP requests'''
|
'''A Mocked class for HTTP requests'''
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ def vcr_fetch_impl(cassette, real_fetch_impl):
|
|||||||
|
|
||||||
@functools.wraps(real_fetch_impl)
|
@functools.wraps(real_fetch_impl)
|
||||||
def new_fetch_impl(self, request, callback):
|
def new_fetch_impl(self, request, callback):
|
||||||
headers = dict(request.headers)
|
headers = request.headers.copy()
|
||||||
if request.user_agent:
|
if request.user_agent:
|
||||||
headers.setdefault('User-Agent', request.user_agent)
|
headers.setdefault('User-Agent', request.user_agent)
|
||||||
|
|
||||||
|
|||||||
34
vcr/util.py
34
vcr/util.py
@@ -1,4 +1,6 @@
|
|||||||
import collections
|
import collections
|
||||||
|
import types
|
||||||
|
|
||||||
|
|
||||||
# Shamelessly stolen from https://github.com/kennethreitz/requests/blob/master/requests/structures.py
|
# Shamelessly stolen from https://github.com/kennethreitz/requests/blob/master/requests/structures.py
|
||||||
class CaseInsensitiveDict(collections.MutableMapping):
|
class CaseInsensitiveDict(collections.MutableMapping):
|
||||||
@@ -81,8 +83,9 @@ def partition_dict(predicate, dictionary):
|
|||||||
def compose(*functions):
|
def compose(*functions):
|
||||||
def composed(incoming):
|
def composed(incoming):
|
||||||
res = incoming
|
res = incoming
|
||||||
for function in functions[::-1]:
|
for function in reversed(functions):
|
||||||
res = function(res)
|
if function:
|
||||||
|
res = function(res)
|
||||||
return res
|
return res
|
||||||
return composed
|
return composed
|
||||||
|
|
||||||
@@ -90,3 +93,30 @@ def read_body(request):
|
|||||||
if hasattr(request.body, 'read'):
|
if hasattr(request.body, 'read'):
|
||||||
return request.body.read()
|
return request.body.read()
|
||||||
return request.body
|
return request.body
|
||||||
|
|
||||||
|
|
||||||
|
def auto_decorate(
|
||||||
|
decorator,
|
||||||
|
predicate=lambda name, value: isinstance(value, types.FunctionType)
|
||||||
|
):
|
||||||
|
def maybe_decorate(attribute, value):
|
||||||
|
if predicate(attribute, value):
|
||||||
|
value = decorator(value)
|
||||||
|
return value
|
||||||
|
|
||||||
|
class DecorateAll(type):
|
||||||
|
|
||||||
|
def __setattr__(cls, attribute, value):
|
||||||
|
return super(DecorateAll, cls).__setattr__(
|
||||||
|
attribute, maybe_decorate(attribute, value)
|
||||||
|
)
|
||||||
|
|
||||||
|
def __new__(cls, name, bases, attributes_dict):
|
||||||
|
new_attributes_dict = dict(
|
||||||
|
(attribute, maybe_decorate(attribute, value))
|
||||||
|
for attribute, value in attributes_dict.items()
|
||||||
|
)
|
||||||
|
return super(DecorateAll, cls).__new__(
|
||||||
|
cls, name, bases, new_attributes_dict
|
||||||
|
)
|
||||||
|
return DecorateAll
|
||||||
|
|||||||
Reference in New Issue
Block a user