1
0
mirror of https://github.com/kevin1024/vcrpy.git synced 2025-12-09 17:15:35 +00:00

Compare commits

..

1 Commits

Author SHA1 Message Date
Luiz Menezes
4ce937978e Add pytest-xdist 2018-05-06 19:23:56 -03:00
111 changed files with 3340 additions and 6564 deletions

View File

@@ -1,7 +0,0 @@
coverage:
status:
project:
default:
target: 75
# Allow 0% coverage regression
threshold: 0

View File

@@ -1,14 +0,0 @@
root = true
[*]
indent_style = space
indent_size = 4
charset = utf-8
trim_trailing_whitespace = true
insert_final_newline = true
[Makefile]
indent_style = tab
[*.{yml,yaml}]
indent_size = 2

View File

@@ -1,11 +0,0 @@
version: 2
updates:
- package-ecosystem: pip
directory: "/"
schedule:
interval: weekly
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: weekly

View File

@@ -1,22 +0,0 @@
---
name: Codespell
on:
push:
branches: [master]
pull_request:
branches: [master]
permissions:
contents: read
jobs:
codespell:
name: Check for spelling errors
runs-on: ubuntu-24.04
steps:
- name: Checkout
uses: actions/checkout@v5
- name: Codespell
uses: codespell-project/actions-codespell@v2

View File

@@ -1,23 +0,0 @@
name: Validate docs
on:
push:
paths:
- 'docs/**'
jobs:
validate:
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@v5
- uses: actions/setup-python@v6
with:
python-version: "3.12"
- name: Install build dependencies
run: pip install -r docs/requirements.txt
- name: Rendering HTML documentation
run: sphinx-build -b html docs/ html
- name: Inspect html rendered
run: cat html/index.html

View File

@@ -1,61 +0,0 @@
name: Test
on:
push:
branches:
- master
pull_request:
schedule:
- cron: "0 16 * * 5" # Every Friday 4pm
workflow_dispatch:
jobs:
build:
runs-on: ubuntu-24.04
strategy:
fail-fast: false
matrix:
python-version:
- "3.10"
- "3.11"
- "3.12"
- "3.13"
- "pypy-3.11"
steps:
- uses: actions/checkout@v5
- name: Install uv
uses: astral-sh/setup-uv@v7
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v6
with:
python-version: ${{ matrix.python-version }}
allow-prereleases: true
- name: Install project dependencies
run: |
uv pip install --system --upgrade pip setuptools
uv pip install --system codecov '.[tests]'
uv pip check
- name: Allow creation of user namespaces (e.g. to the unshare command)
run: |
# .. so that we don't get error:
# unshare: write failed /proc/self/uid_map: Operation not permitted
# Idea from https://github.com/YoYoGames/GameMaker-Bugs/issues/6015#issuecomment-2135552784 .
sudo sysctl kernel.apparmor_restrict_unprivileged_userns=0
- name: Run online tests
run: ./runtests.sh --cov=./vcr --cov-branch --cov-report=xml --cov-append -m online
- name: Run offline tests with no access to the Internet
run: |
# We're using unshare to take Internet access
# away so that we'll notice whenever some new test
# is missing @pytest.mark.online decoration in the future
unshare --map-root-user --net -- \
sh -c 'ip link set lo up; ./runtests.sh --cov=./vcr --cov-branch --cov-report=xml --cov-append -m "not online"'
- name: Run coverage
run: codecov

View File

@@ -1,62 +0,0 @@
# Copyright (c) 2023 Sebastian Pipping <sebastian@pipping.org>
# Licensed under the MIT license
name: Detect outdated pre-commit hooks
on:
schedule:
- cron: '0 16 * * 5' # Every Friday 4pm
# NOTE: This will drop all permissions from GITHUB_TOKEN except metadata read,
# and then (re)add the ones listed below:
permissions:
contents: write
pull-requests: write
jobs:
pre_commit_detect_outdated:
name: Detect outdated pre-commit hooks
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@v5
- name: Set up Python 3.12
uses: actions/setup-python@v6
with:
python-version: 3.12
- name: Install pre-commit
run: |-
pip install \
--disable-pip-version-check \
--no-warn-script-location \
--user \
pre-commit
echo "PATH=${HOME}/.local/bin:${PATH}" >> "${GITHUB_ENV}"
- name: Check for outdated hooks
run: |-
pre-commit autoupdate
git diff -- .pre-commit-config.yaml
- name: Create pull request from changes (if any)
id: create-pull-request
uses: peter-evans/create-pull-request@v7
with:
author: 'pre-commit <pre-commit@tools.invalid>'
base: master
body: |-
For your consideration.
:warning: Please **CLOSE AND RE-OPEN** this pull request so that [further workflow runs get triggered](https://github.com/peter-evans/create-pull-request/blob/main/docs/concepts-guidelines.md#triggering-further-workflow-runs) for this pull request.
branch: precommit-autoupdate
commit-message: "pre-commit: Autoupdate"
delete-branch: true
draft: true
labels: enhancement
title: "pre-commit: Autoupdate"
- name: Log pull request URL
if: "${{ steps.create-pull-request.outputs.pull-request-url }}"
run: |
echo "Pull request URL is: ${{ steps.create-pull-request.outputs.pull-request-url }}"

View File

@@ -1,20 +0,0 @@
# Copyright (c) 2023 Sebastian Pipping <sebastian@pipping.org>
# Licensed under the MIT license
name: Run pre-commit
on:
- pull_request
- push
- workflow_dispatch
jobs:
pre-commit:
name: Run pre-commit
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@v5
- uses: actions/setup-python@v6
with:
python-version: 3.12
- uses: pre-commit/action@v3.0.1

5
.gitignore vendored
View File

@@ -1,17 +1,12 @@
*.pyc
.tox
.cache
.pytest_cache/
build/
dist/
*.egg/
.coverage
coverage.xml
htmlcov/
*.egg-info/
pytestdebug.log
pip-wheel-metadata/
.python-version
fixtures/
/docs/_build

View File

@@ -1,17 +0,0 @@
# Copyright (c) 2023 Sebastian Pipping <sebastian@pipping.org>
# Licensed under the MIT license
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.14.8
hooks:
- id: ruff
args: ["--output-format=full"]
- id: ruff-format
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v6.0.0
hooks:
- id: check-merge-conflict
- id: end-of-file-fixer
- id: trailing-whitespace

View File

@@ -1,24 +0,0 @@
# .readthedocs.yaml
# Read the Docs configuration file
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
# Required
version: 2
# Set the version of Python and other tools you might need
build:
os: ubuntu-24.04
tools:
python: "3.12"
# Build documentation in the docs/ directory with Sphinx
sphinx:
configuration: docs/conf.py
# We recommend specifying your dependencies to enable reproducible builds:
# https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html
python:
install:
- requirements: docs/requirements.txt
- method: pip
path: .

43
.travis.yml Normal file
View File

@@ -0,0 +1,43 @@
language: python
sudo: false
before_install: openssl version
env:
global:
- secure: AifoKzwhjV94cmcQZrdQmqRu/9rkZZvWpwBv1daeAQpLOKFPGsOm3D+x2cSw9+iCfkgDZDfqQVv1kCaFVxTll8v8jTq5SJdqEY0NmGWbj/UkNtShh609oRDsuzLxAEwtVKYjf/h8K2BRea+bl1tGkwZ2vtmYS6dxNlAijjWOfds=
- secure: LBSEg/gMj4u4Hrpo3zs6Y/1mTpd2RtcN49mZIFgTdbJ9IhpiNPqcEt647Lz94F9Eses2x2WbNuKqZKZZReY7QLbEzU1m0nN5jlaKrjcG5NR5clNABfFFyhgc0jBikyS4abAG8jc2efeaTrFuQwdoF4sE8YiVrkiVj2X5Xoi6sBk=
matrix:
- TOX_SUFFIX="flakes"
- TOX_SUFFIX="requests27"
- TOX_SUFFIX="httplib2"
- TOX_SUFFIX="boto3"
- TOX_SUFFIX="urllib3121"
- TOX_SUFFIX="tornado4"
- TOX_SUFFIX="aiohttp"
matrix:
allow_failures:
- env: TOX_SUFFIX="boto3"
exclude:
# Only run flakes on a single Python 2.x and a single 3.x
- env: TOX_SUFFIX="flakes"
python: 3.4
- env: TOX_SUFFIX="flakes"
python: 3.5
- env: TOX_SUFFIX="flakes"
python: pypy
- env: TOX_SUFFIX="flakes"
python: "pypy3.5-5.9.0"
- env: TOX_SUFFIX="aiohttp"
python: 2.7
- env: TOX_SUFFIX="aiohttp"
python: pypy
python:
- 2.7
- 3.5
- 3.6
- pypy
- "pypy3.5-5.9.0"
install:
- pip install tox-travis
- if [[ $TOX_SUFFIX != 'flakes' ]]; then python setup.py install ; fi
script:
- tox -e "${TOX_SUFFIX}"

View File

@@ -1,5 +1,6 @@
include README.rst
include LICENSE.txt
include tox.ini
recursive-include tests *
recursive-exclude * __pycache__
recursive-exclude * *.py[co]

View File

@@ -1,16 +1,10 @@
|PyPI| |Python versions| |Build Status| |Waffle Ready| |Gitter|
###########
VCR.py 📼
###########
|PyPI| |Python versions| |Build Status| |CodeCov| |Gitter|
----
.. image:: https://vcrpy.readthedocs.io/en/latest/_images/vcr.svg
:alt: vcr.py logo
VCR.py
======
.. image:: https://raw.github.com/kevin1024/vcrpy/master/vcr.png
:alt: vcr.py
This is a Python version of `Ruby's VCR
library <https://github.com/vcr/vcr>`__.
@@ -47,26 +41,33 @@ VCR.py will detect the absence of a cassette file and once again record
all HTTP interactions, which will update them to correspond to the new
API.
Usage with Pytest
-----------------
Support
-------
VCR.py works great with the following HTTP clients:
- requests
- aiohttp
- urllib3
- tornado
- urllib2
- boto3
There is a library to provide some pytest fixtures called pytest-recording https://github.com/kiwicom/pytest-recording
License
-------
=======
This library uses the MIT license. See `LICENSE.txt <LICENSE.txt>`__ for
more details
.. |PyPI| image:: https://img.shields.io/pypi/v/vcrpy.svg
:target: https://pypi.python.org/pypi/vcrpy
.. |Python versions| image:: https://img.shields.io/pypi/pyversions/vcrpy.svg
:target: https://pypi.python.org/pypi/vcrpy
.. |Build Status| image:: https://github.com/kevin1024/vcrpy/actions/workflows/main.yml/badge.svg
:target: https://github.com/kevin1024/vcrpy/actions
:target: https://pypi.python.org/pypi/vcrpy-unittest
.. |Python versions| image:: https://img.shields.io/pypi/pyversions/vcrpy-unittest.svg
:target: https://pypi.python.org/pypi/vcrpy-unittest
.. |Build Status| image:: https://secure.travis-ci.org/kevin1024/vcrpy.png?branch=master
:target: http://travis-ci.org/kevin1024/vcrpy
.. |Waffle Ready| image:: https://badge.waffle.io/kevin1024/vcrpy.png?label=ready&title=waffle
:target: https://waffle.io/kevin1024/vcrpy
.. |Gitter| image:: https://badges.gitter.im/Join%20Chat.svg
:alt: Join the chat at https://gitter.im/kevin1024/vcrpy
:target: https://gitter.im/kevin1024/vcrpy?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge
.. |CodeCov| image:: https://codecov.io/gh/kevin1024/vcrpy/branch/master/graph/badge.svg
:target: https://codecov.io/gh/kevin1024/vcrpy
:alt: Code Coverage Status

BIN
docs/_static/vcr.png vendored

Binary file not shown.

Before

Width:  |  Height:  |  Size: 114 KiB

27
docs/_static/vcr.svg vendored
View File

@@ -1,27 +0,0 @@
<svg width="634" height="346" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M0 10C0 4.477 4.477 0 10 0h614c5.523 0 10 4.477 10 10v326c0 5.523-4.477 10-10 10H10c-5.523 0-10-4.477-10-10V10z" fill="#181B19"/>
<rect x="36" y="83" width="561" height="234" rx="19" fill="#262927"/>
<mask id="a" maskUnits="userSpaceOnUse" x="66" y="104" width="503" height="192">
<path d="M161.434 120h310.431m-229.757 80c0 44.183-35.841 80-80.054 80S82 244.183 82 200s35.841-80 80.054-80 80.054 35.817 80.054 80zm310.432 0c0 44.183-35.841 80-80.054 80s-80.054-35.817-80.054-80 35.841-80 80.054-80 80.054 35.817 80.054 80z" stroke="#27DD7C" stroke-width="31" stroke-dasharray="503" stroke-dashoffset="0">
<animate attributeName="stroke-dashoffset" to="503" begin="1s" dur="1s"/>
<animate attributeName="stroke" to="#0" begin="1s" dur="1s"/>
<animate attributeName="stroke-dashoffset" from="503" to="0" begin="2s" dur="3s" />
<animate attributeName="stroke" from="#000" to="#27DD7C" begin="2s" dur="3s"/>
</path>
</mask>
<g mask="url(#a)">
<path fill="url(#paint0_linear)" d="M64 102h507v196H64z"/>
</g>
<path d="M0 10C0 4.477 4.477 0 10 0h614c5.523 0 10 4.477 10 10v40H0V10z" fill="#262927"/>
<rect x="189" y="6" width="256" height="38" rx="2" fill="#fff"/>
<path stroke="#E6E8E6" d="M198 14.5h238M198 25.5h238M198 36.5h238"/>
<path d="M261.207 18.61c-.443 0-.762-.098-.957-.294-.182-.195-.273-.54-.273-1.035 0-.494.091-.84.273-1.035.195-.195.514-.293.957-.293h6.914c.443 0 .755.098.938.293.195.195.293.54.293 1.035 0 .495-.098.84-.293 1.035-.183.196-.495.293-.938.293h-2.5l5.332 12.735 5.391-12.735h-1.699c-.443 0-.762-.097-.957-.293-.183-.195-.274-.54-.274-1.035 0-.494.091-.84.274-1.035.195-.195.514-.293.957-.293h6.132c.443 0 .756.098.938.293.195.195.293.54.293 1.035 0 .495-.098.84-.293 1.035-.182.196-.495.293-.938.293h-1.23l-6.309 14.551c-.182.443-.449.762-.8.957-.352.209-.853.313-1.504.313s-1.146-.105-1.485-.313c-.338-.208-.599-.527-.781-.957l-6.25-14.55h-1.211zm38.136 3.847a3.73 3.73 0 00-.352-1.621 3.392 3.392 0 00-1.054-1.27c-.456-.364-1.022-.644-1.7-.84-.677-.208-1.464-.312-2.363-.312-.95 0-1.829.163-2.637.488a6.03 6.03 0 00-2.05 1.367 6.354 6.354 0 00-1.348 2.09c-.325.795-.488 1.667-.488 2.618 0 .95.15 1.829.449 2.636a6.197 6.197 0 001.27 2.07 5.858 5.858 0 002.011 1.368c.781.325 1.647.488 2.598.488 1.198 0 2.357-.182 3.476-.547a10.295 10.295 0 003.086-1.62c.339-.261.658-.359.957-.294.3.065.586.326.86.781.221.352.312.716.273 1.094-.026.378-.228.703-.605.977-1.055.78-2.285 1.393-3.692 1.836a14.417 14.417 0 01-4.355.664c-1.432 0-2.728-.235-3.887-.703-1.159-.482-2.148-1.146-2.969-1.993a8.805 8.805 0 01-1.894-2.988c-.443-1.159-.664-2.415-.664-3.77 0-1.354.228-2.604.683-3.75a9.008 9.008 0 011.914-3.007 8.783 8.783 0 012.969-1.973c1.159-.482 2.442-.723 3.848-.723 1.185 0 2.259.215 3.223.645a6.591 6.591 0 012.441 1.797v-1.172c0-.456.104-.781.312-.977.222-.195.606-.293 1.153-.293s.924.098 1.133.293c.221.196.332.521.332.977v5.664c0 .456-.111.781-.332.977-.209.195-.586.293-1.133.293s-.931-.098-1.153-.293c-.208-.196-.312-.521-.312-.977zm12.394-3.848h-2.481c-.442 0-.761-.097-.957-.293-.182-.195-.273-.54-.273-1.035 0-.494.091-.84.273-1.035.196-.195.515-.293.957-.293h3.985c.442 0 .755.098.937.293.195.195.293.54.293 1.035v3.946a13.604 13.604 0 011.641-2.364 11.627 11.627 0 011.992-1.797 8.754 8.754 0 012.187-1.132 6.569 6.569 0 012.246-.41 5.53 5.53 0 012.852.78c.378.222.619.495.723.821.104.326.065.73-.117 1.21-.183.47-.417.802-.704.997-.273.195-.599.182-.976-.039a4.257 4.257 0 00-2.051-.527c-.586 0-1.185.11-1.797.332a7.489 7.489 0 00-1.758.898c-.56.378-1.087.84-1.582 1.387a10.586 10.586 0 00-1.289 1.777 9.542 9.542 0 00-.859 2.09 8.5 8.5 0 00-.313 2.305v3.789h6.582c.443 0 .756.097.938.293.195.195.293.54.293 1.035 0 .495-.098.84-.293 1.035-.182.195-.495.293-.938.293H308.67c-.442 0-.761-.098-.957-.293-.182-.195-.273-.54-.273-1.035 0-.495.091-.84.273-1.035.196-.196.515-.293.957-.293h3.067V18.609zm19.561 0h-1.601c-.443 0-.762-.097-.957-.293-.182-.195-.274-.54-.274-1.035 0-.494.092-.84.274-1.035.195-.195.514-.293.957-.293h3.105c.443 0 .756.098.938.293.195.195.293.54.293 1.035v1.563c.807-1.055 1.745-1.869 2.812-2.442 1.081-.586 2.272-.879 3.575-.879 1.21 0 2.33.209 3.359.625a7.4 7.4 0 012.656 1.758c.755.768 1.348 1.706 1.778 2.813.429 1.107.644 2.363.644 3.77 0 1.405-.215 2.662-.644 3.769-.43 1.107-1.023 2.044-1.778 2.812a7.62 7.62 0 01-2.656 1.778c-1.029.403-2.149.605-3.359.605-1.303 0-2.468-.26-3.497-.781-1.015-.534-1.914-1.29-2.695-2.266v8.614h4.336c.443 0 .755.097.938.292.195.196.293.54.293 1.036 0 .494-.098.84-.293 1.035-.183.195-.495.293-.938.293h-9.453c-.443 0-.762-.098-.957-.293-.182-.196-.274-.54-.274-1.035 0-.495.092-.84.274-1.035.195-.196.514-.294.957-.294h2.187V18.61zm2.93 5.88c0 .963.15 1.822.449 2.577.3.756.716 1.394 1.25 1.915a5.536 5.536 0 001.895 1.171c.742.274 1.543.41 2.402.41.886 0 1.673-.15 2.364-.449.69-.3 1.269-.716 1.738-1.25a5.696 5.696 0 001.074-1.933 7.692 7.692 0 00.371-2.442c0-.885-.124-1.699-.371-2.441a5.485 5.485 0 00-1.074-1.914 4.738 4.738 0 00-1.738-1.27c-.691-.3-1.478-.449-2.364-.449-.859 0-1.66.137-2.402.41a5.391 5.391 0 00-1.895 1.192c-.534.507-.95 1.139-1.25 1.894-.299.755-.449 1.615-.449 2.578zm37.687-8.536c.443 0 .755.098.937.293.196.195.293.54.293 1.035 0 .495-.097.84-.293 1.035-.182.196-.494.293-.937.293h-1.211l-7.988 16.368c-.625 1.276-1.237 2.356-1.836 3.242-.599.885-1.244 1.608-1.934 2.168s-1.452.97-2.285 1.23c-.82.274-1.764.43-2.832.469-.378.013-.697-.072-.957-.254-.248-.182-.397-.534-.449-1.055-.052-.573.013-.983.195-1.23.195-.247.469-.384.82-.41.899-.052 1.66-.176 2.285-.371a4.375 4.375 0 001.739-.977c.521-.469 1.028-1.12 1.523-1.953.508-.82 1.087-1.888 1.738-3.203l-7.148-14.024h-1.23c-.443 0-.762-.097-.958-.293-.182-.195-.273-.54-.273-1.035 0-.494.091-.84.273-1.035.196-.195.515-.293.958-.293h6.914c.442 0 .755.098.937.293.195.195.293.54.293 1.035 0 .495-.098.84-.293 1.035-.182.196-.495.293-.937.293h-2.5l5.507 10.977 5.254-10.977h-1.738c-.443 0-.762-.097-.957-.293-.182-.195-.273-.54-.273-1.035 0-.494.091-.84.273-1.035.195-.195.514-.293.957-.293h6.133z" fill="#262927"/>
<defs>
<linearGradient id="paint0_linear" x1="64" y1="298" x2="544.524" y2="50.579" gradientUnits="userSpaceOnUse">
<stop stop-color="#27DD70"/>
<stop offset="1" stop-color="#27DDA6"/>
</linearGradient>
</defs>
</svg>

Before

Width:  |  Height:  |  Size: 6.2 KiB

View File

@@ -33,8 +33,6 @@ consider part of the API. The fields are as follows:
been played back.
- ``responses_of(request)``: Access the responses that match a given
request
- ``allow_playback_repeats``: A boolean indicating whether responses
can be played back more than once.
The ``Request`` object has the following properties:
@@ -71,7 +69,7 @@ Finally, register your class with VCR to use your new serializer.
import vcr
class BogoSerializer:
class BogoSerializer(object):
"""
Must implement serialize() and deserialize() methods
"""
@@ -99,12 +97,8 @@ Create your own method with the following signature
def my_matcher(r1, r2):
Your method receives the two requests and can either:
- Use an ``assert`` statement: return None if they match and raise ``AssertionError`` if not.
- Return a boolean: ``True`` if they match, ``False`` if not.
Note: in order to have good feedback when a matcher fails, we recommend using an ``assert`` statement with a clear error message.
Your method receives the two requests and must return ``True`` if they
match, ``False`` if they don't.
Finally, register your method with VCR to use your new request matcher.
@@ -113,8 +107,7 @@ Finally, register your method with VCR to use your new request matcher.
import vcr
def jurassic_matcher(r1, r2):
assert r1.uri == r2.uri and 'JURASSIC PARK' in r1.body, \
'required string (JURASSIC PARK) not found in request body'
return r1.uri == r2.uri and 'JURASSIC PARK' in r1.body
my_vcr = vcr.VCR()
my_vcr.register_matcher('jurassic', jurassic_matcher)
@@ -132,12 +125,11 @@ Finally, register your method with VCR to use your new request matcher.
Register your own cassette persister
------------------------------------
Create your own persistence class, see the example below:
Create your own persistence class, see the :ref:`persister_example`.
Your custom persister must implement both ``load_cassette`` and ``save_cassette``
methods. The ``load_cassette`` method must return a deserialized cassette or raise
either ``CassetteNotFoundError`` if no cassette is found, or ``CassetteDecodeError``
if the cassette cannot be successfully deserialized.
``ValueError`` if no cassette is found.
Once the persister class is defined, register with VCR like so...
@@ -146,7 +138,7 @@ Once the persister class is defined, register with VCR like so...
import vcr
my_vcr = vcr.VCR()
class CustomerPersister:
class CustomerPersister(object):
# implement Persister methods...
my_vcr.register_persister(CustomPersister)
@@ -189,7 +181,7 @@ of post data parameters to filter.
.. code:: python
with my_vcr.use_cassette('test.yml', filter_post_data_parameters=['api_key']):
with my_vcr.use_cassette('test.yml', filter_post_data_parameters=['client_secret']):
requests.post('http://api.com/postdata', data={'api_key': 'secretstring'})
Advanced use of filter_headers, filter_query_parameters and filter_post_data_parameters
@@ -229,25 +221,24 @@ Custom Request filtering
~~~~~~~~~~~~~~~~~~~~~~~~
If none of these covers your request filtering needs, you can register a
callback with the ``before_record_request`` configuration option to
manipulate the HTTP request before adding it to the cassette, or return
``None`` to ignore it entirely. Here is an example that will never record
requests to the ``'/login'`` path:
callback that will manipulate the HTTP request before adding it to the
cassette. Use the ``before_record_request`` configuration option to so this.
Here is an example that will never record requests to the /login
endpoint.
.. code:: python
def before_record_cb(request):
if request.path == '/login':
return None
return request
if request.path != '/login':
return request
my_vcr = vcr.VCR(
before_record_request=before_record_cb,
before_record_request = before_record_cb,
)
with my_vcr.use_cassette('test.yml'):
# your http code here
You can also mutate the request using this callback. For example, you
You can also mutate the response using this callback. For example, you
could remove all query parameters from any requests to the ``'/login'``
path.
@@ -255,7 +246,7 @@ path.
def scrub_login_request(request):
if request.path == '/login':
request.uri, _ = urllib.splitquery(request.uri)
request.uri, _ = urllib.splitquery(response.uri)
return request
my_vcr = vcr.VCR(
@@ -267,12 +258,9 @@ path.
Custom Response Filtering
~~~~~~~~~~~~~~~~~~~~~~~~~
You can also do response filtering with the
``before_record_response`` configuration option. Its usage is
similar to the above ``before_record_request`` - you can
mutate the response, or return ``None`` to avoid recording
the request and response altogether. For example to hide
sensitive data from the response body:
VCR.py also suports response filtering with the
``before_record_response`` keyword argument. It's usage is similar to
that of ``before_record``:
.. code:: python
@@ -314,8 +302,8 @@ in a few ways:
or 0.0.0.0.
- Set the ``ignore_hosts`` configuration option to a list of hosts to
ignore
- Add a ``before_record_request`` or ``before_record_response`` callback
that returns ``None`` for requests you want to ignore (see above).
- Add a ``before_record`` callback that returns None for requests you
want to ignore
Requests that are ignored by VCR will not be saved in a cassette, nor
played back from a cassette. VCR will completely ignore those requests
@@ -376,67 +364,3 @@ cassette names, use ``VCR.ensure_suffix`` as follows:
@my_vcr.use_cassette
def my_test_function():
Rewind Cassette
---------------
VCR.py allows to rewind a cassette in order to replay it inside the same function/test.
.. code:: python
with vcr.use_cassette('fixtures/vcr_cassettes/synopsis.yaml') as cass:
response = urllib2.urlopen('http://www.zombo.com/').read()
assert cass.all_played
cass.rewind()
assert not cass.all_played
Playback Repeats
----------------
By default, each response in a cassette can only be matched and played back
once while the cassette is in use, unless the cassette is rewound.
If you want to allow playback repeats without rewinding the cassette, use
the Cassette ``allow_playback_repeats`` option.
.. code:: python
with vcr.use_cassette('fixtures/vcr_cassettes/synopsis.yaml', allow_playback_repeats=True) as cass:
for x in range(10):
response = urllib2.urlopen('http://www.zombo.com/').read()
assert cass.all_played
Discards Cassette on Errors
---------------------------
By default VCR will save the cassette file even when there is any error inside
the enclosing context/test.
If you want to save the cassette only when the test succeeds, set the Cassette
``record_on_exception`` option to ``False``.
.. code:: python
try:
my_vcr = VCR(record_on_exception=False)
with my_vcr.use_cassette('fixtures/vcr_cassettes/synopsis.yaml') as cass:
response = urllib2.urlopen('http://www.zombo.com/').read()
raise RuntimeError("Oops, something happened")
except RuntimeError:
pass
# Since there was an exception, the cassette file hasn't been created.
assert not os.path.exists('fixtures/vcr_cassettes/synopsis.yaml')
Drop unused requests
--------------------
Even if any HTTP request is changed or removed from tests, previously recorded
interactions remain in the cassette file. If set the ``drop_unused_requests``
option to ``True``, VCR will not save old HTTP interactions if they are not used.
.. code:: python
my_vcr = VCR(drop_unused_requests=True)
with my_vcr.use_cassette('fixtures/vcr_cassettes/synopsis.yaml'):
... # your HTTP interactions here

View File

@@ -1,325 +1,166 @@
Changelog
---------
For a full list of triaged issues, bugs and PRs and what release they are targeted for please see the following link.
`ROADMAP MILESTONES <https://github.com/kevin1024/vcrpy/milestones>`_
All help in providing PRs to close out bug issues is appreciated. Even if that is providing a repo that fully replicates issues. We have very generous contributors that have added these to bug issues which meant another contributor picked up the bug and closed it out.
- 8.0.0
- BREAKING: Drop support for Python 3.9 (major version bump) - thanks @jairhenrique
- BREAKING: Drop support for urllib3 < 2 - fixes CVE warnings from urllib3 1.x (#926, #880) - thanks @jairhenrique
- New feature: ``drop_unused_requests`` option to remove unused interactions from cassettes (#763) - thanks @danielnsilva
- Rewrite httpx support to patch httpcore instead of httpx (#943) - thanks @seowalex
- Fixes ``httpx.ResponseNotRead`` exceptions (#832, #834)
- Fixes ``KeyError: 'follow_redirects'`` (#945)
- Adds support for custom httpx transports
- Fix HTTPS proxy handling - proxy address no longer ends up in cassette URIs (#809, #914) - thanks @alga
- Fix ``iscoroutinefunction`` deprecation warning on Python 3.14 - thanks @kloczek
- Only log message if response is appended - thanks @talfus-laddus
- Optimize urllib.parse calls - thanks @Martin-Brunthaler
- Fix CI for Ubuntu 24.04 - thanks @hartwork
- Various CI improvements: migrate to uv, update GitHub Actions - thanks @jairhenrique
- Various linting and test improvements - thanks @jairhenrique and @hartwork
- 7.0.0
- Drop support for python 3.8 (major version bump) - thanks @jairhenrique
- Various linting and test fixes - thanks @jairhenrique
- Bugfix for urllib2>=2.3.0 - missing version_string (#888)
- Bugfix for asyncio.run - thanks @alekeik1
- 6.0.2
- Ensure body is consumed only once (#846) - thanks @sathieu
- Permit urllib3 2.x for non-PyPy Python >=3.10
- Fix typos in test commands - thanks @chuckwondo
- Several test and workflow improvements - thanks @hartwork and @graingert
- 6.0.1
- Bugfix with to Tornado cassette generator (thanks @graingert)
- 6.0.0
- BREAKING: Fix issue with httpx support (thanks @parkerhancock) in #784. NOTE: You may have to recreate some of your cassettes produced in previous releases due to the binary format being saved incorrectly in previous releases
- BREAKING: Drop support for `boto` (vcrpy still supports boto3, but is dropping the deprecated `boto` support in this release. (thanks @jairhenrique)
- Fix compatibility issue with Python 3.12 (thanks @hartwork)
- Drop simplejson (fixes some compatibility issues) (thanks @jairhenrique)
- Run CI on Python 3.12 and PyPy 3.9-3.10 (thanks @mgorny)
- Various linting and docs improvements (thanks @jairhenrique)
- Tornado fixes (thanks @graingert)
- 5.1.0
- Use ruff for linting (instead of current flake8/isort/pyflakes) - thanks @jairhenrique
- Enable rule B (flake8-bugbear) on ruff - thanks @jairhenrique
- Configure read the docs V2 - thanks @jairhenrique
- Fix typo in docs - thanks @quasimik
- Make json.loads of Python >=3.6 decode bytes by itself - thanks @hartwork
- Fix body matcher for chunked requests (fixes #734) - thanks @hartwork
- Fix query param filter for aiohttp (fixes #517) - thanks @hartwork and @salomvary
- Remove unnecessary dependency on six. - thanks @charettes
- build(deps): update sphinx requirement from <7 to <8 - thanks @jairhenrique
- Add action to validate docs - thanks @jairhenrique
- Add editorconfig file - thanks @jairhenrique
- Drop iscoroutinefunction fallback function for unsupported python thanks @jairhenrique
- 5.0.0
- BREAKING CHANGE: Drop support for Python 3.7. 3.7 is EOL as of 6/27/23 Thanks @jairhenrique
- BREAKING CHANGE: Custom Cassette persisters no longer catch ValueError. If you have implemented a custom persister (has anyone implemented a custom persister? Let us know!) then you will need to throw a CassetteNotFoundError when unable to find a cassette. See #681 for discussion and reason for this change. Thanks @amosjyng for the PR and the review from @hartwork
- 4.4.0
- HUGE thanks to @hartwork for all the work done on this release!
- Bring vcr/unittest in to vcrpy as a full feature of vcr instead of a separate library. Big thanks to @hartwork for doing this and to @agriffis for originally creating the library
- Make decompression robust towards already decompressed input (thanks @hartwork)
- Bugfix: Add read1 method (fixes compatibility with biopython), thanks @mghantous
- Bugfix: Prevent filters from corrupting request (thanks @abramclark)
- Bugfix: Add support for `response.raw.stream()` to fix urllib v2 compat
- Bugfix: Replace `assert` with `raise AssertionError`: fixes support for `PYTHONOPTIMIZE=1`
- Add pytest.mark.online to run test suite offline, thanks @jspricke
- use python3 and pip3 binaries to ease debian packaging (thanks @hartwork)
- Add codespell (thanks @mghantous)
- 4.3.1
- Support urllib3 v1 and v2. NOTE: there is an issue running urllib3 v2 on
Python older than 3.10, so this is currently blocked in the requirements.
Hopefully we can resolve this situation in the future. Thanks to @shifqu,
hartwork, jairhenrique, pquentin, and vEpiphyte for your work on this.
- 4.3.0
- Add support for Python 3.11 (Thanks @evgeni)
- Drop support for botocore <1.11.0 and requests <2.16.2 (thanks @hartwork)
- Bugfix: decode_compressed_response raises exception on empty responses. Thanks @CharString
- Don't save requests from decorated tests if decorated test fails (thanks @dan-passaro)
- Fix not calling all the exit stack when record_on_exception is False (thanks @Terseus)
- Various CI, documentation, testing, and formatting improvements (Thanks @jairhenrique, @dan-passaro, @hartwork, and Terseus)
- 4.2.1
- Fix a bug where the first request in a redirect chain was not being recorded with aiohttp
- Various typos and small fixes, thanks @jairhenrique, @timgates42
- 4.2.0
- Drop support for python < 3.7, thanks @jairhenrique, @IvanMalison, @AthulMuralidhar
- Various aiohtt bigfixes (thanks @pauloromeira and boechat107)
- Bugfix: filter_post_data_parameters not working with aiohttp. Thank you @vprakashplanview, @scop, @jairhenrique, and @cinemascop89
- Bugfix: Some random misspellings (thanks @scop)
- Migrate the CI suite to Github Actions from Travis (thanks @jairhenrique and @cclauss)
- Various documentation and code misspelling fixes (thanks @scop and @Justintime50)
- Bugfix: httpx support (select between allow_redirects/follow_redirects) (thanks @immerrr)
- Bugfix: httpx support (select between allow_redirects/follow_redirects) (thanks @immerrr)
- 4.1.1
- Fix HTTPX support for versions greater than 0.15 (thanks @jairhenrique)
- Include a trailing newline on json cassettes (thanks @AaronRobson)
- 4.1.0
- Add support for httpx!! (thanks @herdigiorgi)
- Add the new `allow_playback_repeats` option (thanks @tysonholub)
- Several aiohttp improvements (cookie support, multiple headers with same key) (Thanks @pauloromeira)
- Use enums for record modes (thanks @aaronbannin)
- Bugfix: Do not redirect on 304 in aiohttp (Thanks @royjs)
- Bugfix: Fix test suite by switching to mockbin (thanks @jairhenrique)
- 4.0.2
- Fix mock imports as reported in #504 by @llybin. Thank you.
- 4.0.1
- Fix logo alignment for PyPI
- 4.0.0
- Remove Python2 support (@hugovk)
- Add Python 3.8 TravisCI support (@neozenith)
- Updated the logo to a modern material design (@sean0x42)
- 3.0.0
- This release is a breaking change as it changes how aiohttp follows redirects and your cassettes may need to be re-recorded with this update.
- Fix multiple requests being replayed per single request in aiohttp stub #495 (@nickdirienzo)
- Add support for `request_info` on mocked responses in aiohttp stub #495 (@nickdirienzo)
- doc: fixed variable name (a -> cass) in an example for rewind #492 (@yarikoptic)
- 2.1.1
- Format code with black (@neozenith)
- Use latest pypy3 in Travis (@hugovk)
- Improve documentation about custom matchers (@gward)
- Fix exception when body is empty (@keithprickett)
- Add `pytest-recording` to the documentation as an alternative Pytest plugin (@Stranger6667)
- Fix yarl and python3.5 version issue (@neozenith)
- Fix header matcher for boto3 - fixes #474 (@simahawk)
- 2.1.0
- Add a `rewind` method to reset a cassette (thanks @khamidou)
- New error message with more details on why the cassette failed to play a request (thanks @arthurHamon2, @neozenith)
- Handle connect tunnel URI (thanks @jeking3)
- Add code coverage to the project (thanks @neozenith)
- Drop support to python 3.4
- Add deprecation warning on python 2.7, next major release will drop python 2.7 support
- Fix build problems on requests tests (thanks to @dunossauro)
- Fix matching on 'body' failing when Unicode symbols are present in them (thanks @valgur)
- Fix bugs on aiohttp integration (thanks @graingert, @steinnes, @stj, @lamenezes, @lmazuel)
- Fix Biopython incompatibility (thanks @rishab121)
- Fix Boto3 integration (thanks @1oglop1, @arthurHamon2)
- 2.0.1
- Fix bug when using vcrpy with python 3.4
- 2.0.0
- Support python 3.7 (fix httplib2 and urllib2, thanks @felixonmars)
- [#356] Fixes `before_record_response` so the original response isn't changed (thanks @kgraves)
- Fix requests stub when using proxy (thanks @samuelfekete @daneoshiga)
- (only for aiohttp stub) Drop support to python 3.4 asyncio.coroutine (aiohttp doesn't support python it anymore)
- Fix aiohttp stub to work with aiohttp client (thanks @stj)
- Fix aiohttp stub to accept content type passed
- Improve docs (thanks @adamchainz)
- 1.13.0
- Fix support to latest aiohttp version (3.3.2). Fix content-type bug in aiohttp stub. Save URL with query params properly when using aiohttp.
- 1.12.0
- Fix support to latest aiohttp version (3.2.1), Adapted setup to PEP508, Support binary responses on aiohttp, Dropped support for EOL python versions (2.6 and 3.3)
- 1.11.1
- Fix compatibility with newest requests and urllib3 releases
- 1.11.0
- Allow injection of persistence methods + bugfixes (thanks @j-funk and @IvanMalison),
- Support python 3.6 + CI tests (thanks @derekbekoe and @graingert),
- Support pytest-asyncio coroutines (thanks @graingert)
- 1.10.5
- Added a fix to httplib2 (thanks @carlosds730), Fix an issue with
- aiohttp (thanks @madninja), Add missing requirement yarl (thanks @lamenezes),
- Remove duplicate mock triple (thanks @FooBarQuaxx)
- 1.10.4
- Fix an issue with asyncio aiohttp (thanks @madninja)
- 1.10.3
- Fix some issues with asyncio and params (thanks @anovikov1984 and @lamenezes)
- Fix some issues with cassette serialize / deserialize and empty response bodies (thanks @gRoussac and @dz0ny)
- 1.10.2
- Fix 1.10.1 release - add aiohttp support back in
- 1.10.1
- [bad release] Fix build for Fedora package + python2 (thanks @puiterwijk and @lamenezes)
- 1.10.0
- Add support for aiohttp (thanks @lamenezes)
- 1.9.0
- Add support for boto3 (thanks @desdm, @foorbarna).
- Fix deepcopy issue for response headers when `decode_compressed_response` is enabled (thanks @nickdirienzo)
- 1.8.0
- Fix for Serialization errors with JSON adapter (thanks @aliaksandrb).
- Avoid concatenating bytes with strings (thanks @jaysonsantos).
- Exclude __pycache__ dirs & compiled files in sdist (thanks @koobs).
- Fix Tornado support behavior for Tornado 3 (thanks @abhinav).
- decode_compressed_response option and filter (thanks @jayvdb).
- 1.7.4 [#217]
- Make use_cassette decorated functions actually return a value (thanks @bcen).
- [#199] Fix path transformation defaults.
- Better headers dictionary management.
- 1.7.3 [#188]
- ``additional_matchers`` kwarg on ``use_cassette``.
- [#191] Actually support passing multiple before_record_request functions (thanks @agriffis).
- 1.7.2
- [#186] Get effective_url in tornado (thanks @mvschaik)
- [#187] Set request_time on Response object in tornado (thanks @abhinav).
- 1.7.1
- [#183] Patch ``fetch_impl`` instead of the entire HTTPClient class for Tornado (thanks @abhinav).
- 1.7.0
- [#177] Properly support coroutine/generator decoration.
- [#178] Support distribute (thanks @graingert). [#163] Make compatibility between python2 and python3 recorded cassettes more robust (thanks @gward).
- 1.6.1
- [#169] Support conditional requirements in old versions of pip
- Fix RST parse errors generated by pandoc
- [Tornado] Fix unsupported features exception not being raised
- [#166] content-aware body matcher.
- 1.6.0
- [#120] Tornado support (thanks @abhinav)
- [#147] packaging fixes (thanks @graingert)
- [#158] allow filtering post params in requests (thanks @MrJohz)
- [#140] add xmlrpclib support (thanks @Diaoul).
- 1.5.2
- Fix crash when cassette path contains cassette library directory (thanks @gazpachoking).
- 1.5.0
- Automatic cassette naming and 'application/json' post data filtering (thanks @marco-santamaria).
- 1.4.2
- Fix a bug caused by requests 2.7 and chunked transfer encoding
- 1.4.1
- Include README, tests, LICENSE in package. Thanks @ralphbean.
- 1.4.0
- Filter post data parameters (thanks @eadmundo)
- Support for posting files through requests, inject\_cassette kwarg to access cassette from ``use_cassette`` decorated function, ``with_current_defaults`` actually works (thanks @samstav).
- 1.3.0
- Fix/add support for urllib3 (thanks @aisch)
- Fix default port for https (thanks @abhinav).
- 1.2.0
- Add custom\_patches argument to VCR/Cassette objects to allow users to stub custom classes when cassettes become active.
- 1.1.4
- Add force reset around calls to actual connection from stubs, to ensure compatibility with the version of httplib/urlib2 in python 2.7.9.
- 1.1.3
- Fix python3 headers field (thanks @rtaboada)
- fix boto test (thanks @telaviv)
- fix new\_episodes record mode (thanks @jashugan),
- fix Windows connectionpool stub bug (thanks @gazpachoking)
- add support for requests 2.5
- 1.1.2
- Add urllib==1.7.1 support.
- Make json serialize error handling correct
- Improve logging of match failures.
- 1.1.1
- Use function signature preserving ``wrapt.decorator`` to write the decorator version of use\_cassette in order to ensure compatibility with py.test fixtures and python 2.
- Move all request filtering into the ``before_record_callable``.
- 1.1.0
- Add ``before_record_response``. Fix several bugs related to the context management of cassettes.
- 1.0.3
- Fix an issue with requests 2.4 and make sure case sensitivity is consistent across python versions
- 1.0.2
- Fix an issue with requests 2.3
- 1.0.1
- Fix a bug with the new ignore requests feature and the once record mode
- 1.0.0
- *BACKWARDS INCOMPATIBLE*: Please see the 'upgrade' section in the README. Take a look at the matcher section as well, you might want to update your ``match_on`` settings.
- Add support for filtering sensitive data from requests, matching query strings after the order changes and improving the built-in matchers, (thanks to @mshytikov)
- Support for ignoring requests to certain hosts, bump supported Python3 version to 3.4, fix some bugs with Boto support (thanks @marusich)
- Fix error with URL field capitalization in README (thanks @simon-weber)
- Added some log messages to help with debugging
- Added ``all_played`` property on cassette (thanks @mshytikov)
- 0.7.0
- VCR.py now supports Python 3! (thanks @asundg)
- Also I refactored the stub connections quite a bit to add support for the putrequest and putheader calls.
- This version also adds support for httplib2 (thanks @nilp0inter).
- I have added a couple tests for boto since it is an http client in its own right.
- Finally, this version includes a fix for a bug where requests wasn't being patched properly (thanks @msabramo).
- 0.6.0
- Store response headers as a list since a HTTP response can have the same header twice (happens with set-cookie sometimes).
- This has the added benefit of preserving the order of headers.
- Thanks @smallcode for the bug report leading to this change.
- I have made an effort to ensure backwards compatibility with the old cassettes' header storage mechanism, but if you want to upgrade to the new header storage, you should delete your cassettes and re-record them.
- Also this release adds better error messages (thanks @msabramo)
- and adds support for using VCR as a decorator (thanks @smallcode for the motivation)
- 0.5.0
- Change the ``response_of`` method to ``responses_of`` since cassettes can now contain more than one response for a request.
- Since this changes the API, I'm bumping the version.
- Also includes 2 bugfixes:
- a better error message when attempting to overwrite a cassette file,
- and a fix for a bug with requests sessions (thanks @msabramo)
- 0.4.0
- Change default request recording behavior for multiple requests.
- If you make the same request multiple times to the same URL, the response might be different each time (maybe the response has a timestamp in it or something), so this will make the same request multiple times and save them all.
- Then, when you are replaying the cassette, the responses will be played back in the same order in which they were received.
- If you were making multiple requests to the same URL in a cassette before version 0.4.0, you might need to regenerate your cassette files.
- Also, removes support for the cassette.play\_count counter API, since individual requests aren't unique anymore.
- A cassette might contain the same request several times.
- Also removes secure overwrite feature since that was breaking overwriting files in Windows
- And fixes a bug preventing request's automatic body decompression from working.
- 0.3.5
- Fix compatibility with requests 2.x
- 0.3.4
- Bugfix: close file before renaming it. This fixes an issue on Windows. Thanks @smallcode for the fix.
- 0.3.3
- Bugfix for error message when an unregistered custom matcher was used
- 0.3.2
- Fix issue with new config syntax and the ``match_on`` parameter. Thanks, @chromy!
- 0.3.1
- Fix issue causing full paths to be sent on the HTTP request line.
- 0.3.0
- *Backwards incompatible release*
- Added support for record modes, and changed the default recording behavior to the "once" record mode. Please see the documentation on record modes for more.
- Added support for custom request matching, and changed the default request matching behavior to match only on the URL and method.
- Also, improved the httplib mocking to add support for the ``HTTPConnection.send()`` method.
- This means that requests won't actually be sent until the response is read, since I need to record the entire request in order to match up the appropriate response.
- I don't think this should cause any issues unless you are sending requests without ever loading the response (which none of the standard httplib wrappers do, as far as I know).
- Thanks to @fatuhoku for some of the ideas and the motivation behind this release.
- 0.2.1
- Fixed missing modules in setup.py
- 0.2.0
- Added configuration API, which lets you configure some settings on VCR (see the README).
- Also, VCR no longer saves cassettes if they haven't changed at all and supports JSON as well as YAML (thanks @sirpengi).
- Added amazing new skeumorphic logo, thanks @hairarrow.
- 0.1.0
- *backwards incompatible release - delete your old cassette files*
- This release adds the ability to access the cassette to make assertions on it
- as well as a major code refactor thanks to @dlecocq.
- It also fixes a couple longstanding bugs with redirects and HTTPS. [#3 and #4]
- 0.0.4
- If you have libyaml installed, vcrpy will use the c bindings instead. Speed up your tests! Thanks @dlecocq
- 0.0.3
- Add support for requests 1.2.3. Support for older versions of requests dropped (thanks @vitormazzi and @bryanhelmig)
- 0.0.2
- Add support for requests / urllib3
- 0.0.1
- Initial Release
- 1.11.1 Fix compatibility with newest requests and urllib3 releases
- 1.11.0 Allow injection of persistence methods + bugfixes (thanks @j-funk and @IvanMalison),
Support python 3.6 + CI tests (thanks @derekbekoe and @graingert),
Support pytest-asyncio coroutines (thanks @graingert)
- 1.10.5 Added a fix to httplib2 (thanks @carlosds730), Fix an issue with
aiohttp (thanks @madninja), Add missing requirement yarl (thanks @lamenezes),
Remove duplicate mock triple (thanks @FooBarQuaxx)
- 1.10.4 Fix an issue with asyncio aiohttp (thanks @madninja)
- 1.10.3 Fix some issues with asyncio and params (thanks @anovikov1984 and
@lamenezes), Fix some issues with cassette serialize / deserialize and empty
response bodies (thanks @gRoussac and @dz0ny)
- 1.10.2 Fix 1.10.1 release - add aiohttp support back in
- 1.10.1 [bad release] Fix build for Fedora package + python2 (thanks @puiterwijk and @lamenezes)
- 1.10.0 Add support for aiohttp (thanks @lamenezes)
- 1.9.0 Add support for boto3 (thanks @desdm, @foorbarna). Fix deepcopy issue
for response headers when `decode_compressed_response` is enabled (thanks
@nickdirienzo)
- 1.8.0 Fix for Serialization errors with JSON adapter (thanks
@aliaksandrb). Avoid concatenating bytes with strings (thanks
@jaysonsantos). Exclude __pycache__ dirs & compiled files in sdist
(thanks @koobs). Fix Tornado support behavior for Tornado 3 (thanks
@abhinav). decode_compressed_response option and filter (thanks
@jayvdb).
- 1.7.4 [#217] Make use_cassette decorated functions actually return a
value (thanks @bcen). [#199] Fix path transfromation defaults.
Better headers dictionary management.
- 1.7.3 [#188] ``additional_matchers`` kwarg on ``use_cassette``.
[#191] Actually support passing multiple before_record_request
functions (thanks @agriffis).
- 1.7.2 [#186] Get effective_url in tornado (thanks @mvschaik), [#187]
Set request_time on Response object in tornado (thanks @abhinav).
- 1.7.1 [#183] Patch ``fetch_impl`` instead of the entire HTTPClient
class for Tornado (thanks @abhinav).
- 1.7.0 [#177] Properly support coroutine/generator decoration. [#178]
Support distribute (thanks @graingert). [#163] Make compatibility
between python2 and python3 recorded cassettes more robust (thanks
@gward).
- 1.6.1 [#169] Support conditional requirements in old versions of
pip, Fix RST parse errors generated by pandoc, [Tornado] Fix
unsupported features exception not being raised, [#166]
content-aware body matcher.
- 1.6.0 [#120] Tornado support (thanks @abhinav), [#147] packaging fixes
(thanks @graingert), [#158] allow filtering post params in requests
(thanks @MrJohz), [#140] add xmlrpclib support (thanks @Diaoul).
- 1.5.2 Fix crash when cassette path contains cassette library
directory (thanks @gazpachoking).
- 1.5.0 Automatic cassette naming and 'application/json' post data
filtering (thanks @marco-santamaria).
- 1.4.2 Fix a bug caused by requests 2.7 and chunked transfer encoding
- 1.4.1 Include README, tests, LICENSE in package. Thanks @ralphbean.
- 1.4.0 Filter post data parameters (thanks @eadmundo), support for
posting files through requests, inject\_cassette kwarg to access
cassette from ``use_cassette`` decorated function,
``with_current_defaults`` actually works (thanks @samstav).
- 1.3.0 Fix/add support for urllib3 (thanks @aisch), fix default port
for https (thanks @abhinav).
- 1.2.0 Add custom\_patches argument to VCR/Cassette objects to allow
users to stub custom classes when cassettes become active.
- 1.1.4 Add force reset around calls to actual connection from stubs,
to ensure compatibility with the version of httplib/urlib2 in python
2.7.9.
- 1.1.3 Fix python3 headers field (thanks @rtaboada), fix boto test
(thanks @telaviv), fix new\_episodes record mode (thanks @jashugan),
fix Windows connectionpool stub bug (thanks @gazpachoking), add
support for requests 2.5
- 1.1.2 Add urllib==1.7.1 support. Make json serialize error handling
correct Improve logging of match failures.
- 1.1.1 Use function signature preserving ``wrapt.decorator`` to write
the decorator version of use\_cassette in order to ensure
compatibility with py.test fixtures and python 2. Move all request
filtering into the ``before_record_callable``.
- 1.1.0 Add ``before_record_response``. Fix several bugs related to the
context management of cassettes.
- 1.0.3: Fix an issue with requests 2.4 and make sure case sensitivity
is consistent across python versions
- 1.0.2: Fix an issue with requests 2.3
- 1.0.1: Fix a bug with the new ignore requests feature and the once
record mode
- 1.0.0: *BACKWARDS INCOMPATIBLE*: Please see the 'upgrade' section in
the README. Take a look at the matcher section as well, you might
want to update your ``match_on`` settings. Add support for filtering
sensitive data from requests, matching query strings after the order
changes and improving the built-in matchers, (thanks to @mshytikov),
support for ignoring requests to certain hosts, bump supported
Python3 version to 3.4, fix some bugs with Boto support (thanks
@marusich), fix error with URL field capitalization in README (thanks
@simon-weber), added some log messages to help with debugging, added
``all_played`` property on cassette (thanks @mshytikov)
- 0.7.0: VCR.py now supports Python 3! (thanks @asundg) Also I
refactored the stub connections quite a bit to add support for the
putrequest and putheader calls. This version also adds support for
httplib2 (thanks @nilp0inter). I have added a couple tests for boto
since it is an http client in its own right. Finally, this version
includes a fix for a bug where requests wasn't being patched properly
(thanks @msabramo).
- 0.6.0: Store response headers as a list since a HTTP response can
have the same header twice (happens with set-cookie sometimes). This
has the added benefit of preserving the order of headers. Thanks
@smallcode for the bug report leading to this change. I have made an
effort to ensure backwards compatibility with the old cassettes'
header storage mechanism, but if you want to upgrade to the new
header storage, you should delete your cassettes and re-record them.
Also this release adds better error messages (thanks @msabramo) and
adds support for using VCR as a decorator (thanks @smallcode for the
motivation)
- 0.5.0: Change the ``response_of`` method to ``responses_of`` since
cassettes can now contain more than one response for a request. Since
this changes the API, I'm bumping the version. Also includes 2
bugfixes: a better error message when attempting to overwrite a
cassette file, and a fix for a bug with requests sessions (thanks
@msabramo)
- 0.4.0: Change default request recording behavior for multiple
requests. If you make the same request multiple times to the same
URL, the response might be different each time (maybe the response
has a timestamp in it or something), so this will make the same
request multiple times and save them all. Then, when you are
replaying the cassette, the responses will be played back in the same
order in which they were received. If you were making multiple
requests to the same URL in a cassette before version 0.4.0, you
might need to regenerate your cassette files. Also, removes support
for the cassette.play\_count counter API, since individual requests
aren't unique anymore. A cassette might contain the same request
several times. Also removes secure overwrite feature since that was
breaking overwriting files in Windows, and fixes a bug preventing
request's automatic body decompression from working.
- 0.3.5: Fix compatibility with requests 2.x
- 0.3.4: Bugfix: close file before renaming it. This fixes an issue on
Windows. Thanks @smallcode for the fix.
- 0.3.3: Bugfix for error message when an unreigstered custom matcher
was used
- 0.3.2: Fix issue with new config syntax and the ``match_on``
parameter. Thanks, @chromy!
- 0.3.1: Fix issue causing full paths to be sent on the HTTP request
line.
- 0.3.0: *Backwards incompatible release* - Added support for record
modes, and changed the default recording behavior to the "once"
record mode. Please see the documentation on record modes for more.
Added support for custom request matching, and changed the default
request matching behavior to match only on the URL and method. Also,
improved the httplib mocking to add support for the
``HTTPConnection.send()`` method. This means that requests won't
actually be sent until the response is read, since I need to record
the entire request in order to match up the appropriate response. I
don't think this should cause any issues unless you are sending
requests without ever loading the response (which none of the
standard httplib wrappers do, as far as I know. Thanks to @fatuhoku
for some of the ideas and the motivation behind this release.
- 0.2.1: Fixed missing modules in setup.py
- 0.2.0: Added configuration API, which lets you configure some
settings on VCR (see the README). Also, VCR no longer saves cassettes
if they haven't changed at all and supports JSON as well as YAML
(thanks @sirpengi). Added amazing new skeumorphic logo, thanks
@hairarrow.
- 0.1.0: *backwards incompatible release - delete your old cassette
files*: This release adds the ability to access the cassette to make
assertions on it, as well as a major code refactor thanks to
@dlecocq. It also fixes a couple longstanding bugs with redirects and
HTTPS. [#3 and #4]
- 0.0.4: If you have libyaml installed, vcrpy will use the c bindings
instead. Speed up your tests! Thanks @dlecocq
- 0.0.3: Add support for requests 1.2.3. Support for older versions of
requests dropped (thanks @vitormazzi and @bryanhelmig)
- 0.0.2: Add support for requests / urllib3
- 0.0.1: Initial Release

View File

@@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
#
# vcrpy documentation build configuration file, created by
# sphinx-quickstart on Sun Sep 13 11:18:00 2015.
@@ -11,123 +12,96 @@
# All configuration values have a default; values that are commented out
# serve to show the default.
import codecs
import os
import re
here = os.path.abspath(os.path.dirname(__file__))
def read(*parts):
# intentionally *not* adding an encoding option to open, See:
# https://github.com/pypa/virtualenv/issues/201#issuecomment-3145690
with codecs.open(os.path.join(here, *parts), "r") as fp:
return fp.read()
def find_version(*file_paths):
version_file = read(*file_paths)
version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", version_file, re.M)
if version_match:
return version_match.group(1)
raise RuntimeError("Unable to find version string.")
autodoc_default_options = {
"members": None,
"undoc-members": None,
}
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
# sys.path.insert(0, os.path.abspath('.'))
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.intersphinx",
"sphinx.ext.coverage",
"sphinx.ext.viewcode",
"sphinx.ext.todo",
"sphinx.ext.githubpages",
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.coverage',
'sphinx.ext.viewcode',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = ".rst"
source_suffix = '.rst'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = "index"
master_doc = 'index'
# General information about the project.
project = "vcrpy"
copyright = "2015, Kevin McCarthy"
author = "Kevin McCarthy"
project = u'vcrpy'
copyright = u'2015, Kevin McCarthy'
author = u'Kevin McCarthy'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
# version = "1.7.4"
version = '1.7.4'
# The full version, including alpha/beta/rc tags.
version = release = find_version("..", "vcr", "__init__.py")
release = '1.7.4'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = "en"
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
#today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ["_build"]
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
# keep_warnings = False
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
@@ -137,152 +111,156 @@ todo_include_todos = False
# The theme to use for HTML and HTML Help pages.
# https://read-the-docs.readthedocs.io/en/latest/theme.html#how-do-i-use-this-locally-and-on-read-the-docs
# if "READTHEDOCS" not in os.environ:
# import sphinx_rtd_theme
#
# html_theme = "sphinx_rtd_theme"
# html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
if 'READTHEDOCS' not in os.environ:
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
#html_theme_options = {}
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
# html_extra_path = []
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
html_sidebars = {"**": ["globaltoc.html", "relations.html", "sourcelink.html", "searchbox.html"]}
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
#html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
#html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
#html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
#html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
# html_search_language = 'en'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
# html_search_options = {'type': 'default'}
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
# html_search_scorer = 'scorer.js'
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = "vcrpydoc"
htmlhelp_basename = 'vcrpydoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
# Latex figure (float) alignment
# 'figure_align': 'htbp',
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, "vcrpy.tex", "vcrpy Documentation", "Kevin McCarthy", "manual"),
(master_doc, 'vcrpy.tex', u'vcrpy Documentation',
u'Kevin McCarthy', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
#latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
#latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [(master_doc, "vcrpy", "vcrpy Documentation", [author], 1)]
man_pages = [
(master_doc, 'vcrpy', u'vcrpy Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
# man_show_urls = False
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
@@ -291,30 +269,23 @@ man_pages = [(master_doc, "vcrpy", "vcrpy Documentation", [author], 1)]
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(
master_doc,
"vcrpy",
"vcrpy Documentation",
author,
"vcrpy",
"One line description of project.",
"Miscellaneous",
),
(master_doc, 'vcrpy', u'vcrpy Documentation',
author, 'vcrpy', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
#texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
# texinfo_no_detailmenu = False
#texinfo_no_detailmenu = False
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {"python": ("https://docs.python.org/3", None)}
html_theme = "alabaster"
intersphinx_mapping = {'https://docs.python.org/': None}

View File

@@ -42,8 +42,7 @@ This can be configured by changing the ``match_on`` setting.
The following options are available :
- method (for example, POST or GET)
- uri (the full URI)
- scheme (for example, HTTP or HTTPS)
- uri (the full URI.)
- host (the hostname of the server receiving the request)
- port (the port of the server receiving the request)
- path (the path of the request)

View File

@@ -1,140 +1,25 @@
Contributing
============
.. image:: _static/vcr.svg
:alt: vcr.py logo
:align: right
🚀 Milestones
--------------
For anyone interested in the roadmap and projected release milestones please see the following link:
`MILESTONES <https://github.com/kevin1024/vcrpy/milestones>`_
----
🎁 Contributing Issues and PRs
-------------------------------
- Issues and PRs will get triaged and assigned to the appropriate milestone.
- PRs get priority over issues.
- The maintainers have limited bandwidth and do so **voluntarily**.
So whilst reporting issues are valuable, please consider:
- contributing an issue with a toy repo that replicates the issue.
- contributing PRs is a more valuable donation of your time and effort.
Thanks again for your interest and support in VCRpy.
We really appreciate it.
----
👥 Collaborators
-----------------
We also have a large test matrix to cover and would like members to volunteer covering these roles.
============ ==================== ================= ================== ======================
**Library** **Issue Triager(s)** **Maintainer(s)** **PR Reviewer(s)** **Release Manager(s)**
------------ -------------------- ----------------- ------------------ ----------------------
``core`` Needs support Needs support Needs support @neozenith
``requests`` @neozenith Needs support @neozenith @neozenith
``aiohttp`` Needs support Needs support Needs support @neozenith
``urllib3`` Needs support Needs support Needs support @neozenith
``httplib2`` Needs support Needs support Needs support @neozenith
``tornado4`` Needs support Needs support Needs support @neozenith
``boto3`` Needs support Needs support Needs support @neozenith
============ ==================== ================= ================== ======================
Role Descriptions
~~~~~~~~~~~~~~~~~
**Issue Triager:**
Simply adding these three labels for incoming issues means a lot for maintaining this project:
- ``bug`` or ``enhancement``
- Which library does it affect? ``core``, ``aiohttp``, ``requests``, ``urllib3``, ``tornado4``, ``httplib2``
- If it is a bug, is it ``Verified Can Replicate`` or ``Requires Help Replicating``
- Thanking people for raising issues. Feedback is always appreciated.
- Politely asking if they are able to link to an example repo that replicates the issue if they haven't already. Being able to *clone and go* helps the next person and we like that. 😃
**Maintainer:**
This involves creating PRs to address bugs and enhancement requests. It also means maintaining the test suite, docstrings and documentation .
**PR Reviewer:**
The PR reviewer is a second set of eyes to see if:
- Are there tests covering the code paths added/modified?
- Do the tests and modifications make sense seem appropriate?
- Add specific feedback, even on approvals, why it is accepted. eg "I like how you use a context manager there. 😄 "
- Also make sure they add a line to `docs/changelog.rst` to claim credit for their contribution.
**Release Manager:**
- Ensure CI is passing.
- Create a release on github and tag it with the changelog release notes.
- ``python3 setup.py build sdist bdist_wheel``
- ``twine upload dist/*``
- Go to ReadTheDocs build page and trigger a build https://readthedocs.org/projects/vcrpy/builds/
----
Running VCR's test suite
------------------------
The tests are all run automatically on `Github Actions CI <https://github.com/kevin1024/vcrpy/actions>`__,
but you can also run them yourself using `pytest <http://pytest.org/>`__.
The tests are all run automatically on `Travis
CI <https://travis-ci.org/kevin1024/vcrpy>`__, but you can also run them
yourself using `py.test <http://pytest.org/>`__ and
`Tox <http://tox.testrun.org/>`__. Tox will automatically run them in
all environments VCR.py supports. The test suite is pretty big and slow,
but you can tell tox to only run specific tests like this::
In order for the boto3 tests to run, you will need an AWS key.
Refer to the `boto3
documentation <https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/index.html>`__
for how to set this up. I have marked the boto3 tests as optional in
tox -e py27requests -- -v -k "'test_status_code or test_gzip'"
This will run only tests that look like ``test_status_code`` or
``test_gzip`` in the test suite, and only in the python 2.7 environment
that has ``requests`` installed.
Also, in order for the boto tests to run, you will need an AWS key.
Refer to the `boto
documentation <https://boto.readthedocs.io/en/latest/getting_started.html>`__
for how to set this up. I have marked the boto tests as optional in
Travis so you don't have to worry about them failing if you submit a
pull request.
Using Pyenv with VCR's test suite
---------------------------------
Pyenv is a tool for managing multiple installation of python on your system.
See the full documentation at their `github <https://github.com/pyenv/pyenv>`_
in this example::
git clone https://github.com/pyenv/pyenv ~/.pyenv
# Add ~/.pyenv/bin to your PATH
export PATH="$PATH:~/.pyenv/bin"
# Setup shim paths
eval "$(pyenv init -)"
# Install supported versions (at time of writing), this does not activate them
pyenv install 3.12.0 pypy3.10
# This activates them
pyenv local 3.12.0 pypy3.10
# Run the whole test suite
pip install .[tests]
./runtests.sh
Troubleshooting on MacOSX
-------------------------
If you have this kind of error when running tests :
.. code:: python
__main__.ConfigurationError: Curl is configured to use SSL, but we have
not been able to determine which SSL backend it is using. Please see PycURL documentation for how to specify the SSL backend manually.
Then you need to define some environment variables:
.. code:: bash
export PYCURL_SSL_LIBRARY=openssl
export LDFLAGS=-L/usr/local/opt/openssl/lib
export CPPFLAGS=-I/usr/local/opt/openssl/include
Reference : `stackoverflow issue <https://stackoverflow.com/questions/51019622/curl-is-configured-to-use-ssl-but-we-have-not-been-able-to-determine-which-ssl>`__

View File

@@ -29,29 +29,3 @@ The second time, you will see::
If you set the loglevel to DEBUG, you will also get information about
which matchers didn't match. This can help you with debugging custom
matchers.
CannotOverwriteExistingCassetteException
----------------------------------------
When a request failed to be found in an existing cassette,
VCR.py tries to get the request(s) that may be similar to the one being searched.
The goal is to see which matcher(s) failed and understand what part of the failed request may have changed.
It can return multiple similar requests with :
- the matchers that have succeeded
- the matchers that have failed
- for each failed matchers, why it has failed with an assertion message
CannotOverwriteExistingCassetteException message example :
.. code::
CannotOverwriteExistingCassetteException: Can't overwrite existing cassette ('cassette.yaml') in your current record mode ('once').
No match for the request (<Request (GET) https://www.googleapis.com/?alt=json&maxResults=200>) was found.
Found 1 similar requests with 1 different matchers :
1 - (<Request (GET) https://www.googleapis.com/?alt=json&maxResults=500>).
Matchers succeeded : ['method', 'scheme', 'host', 'port', 'path']
Matchers failed :
query - assertion failure :
[('alt', 'json'), ('maxResults', '200')] != [('alt', 'json'), ('maxResults', '500')]

View File

@@ -4,7 +4,7 @@ Contents
========
.. toctree::
:maxdepth: 3
:maxdepth: 2
installation
usage
@@ -15,7 +15,6 @@ Contents
contributing
changelog
==================
Indices and tables
==================

View File

@@ -4,25 +4,23 @@ Installation
VCR.py is a package on `PyPI <https://pypi.python.org>`__, so you can install
with pip::
pip3 install vcrpy
pip install vcrpy
Compatibility
-------------
VCR.py supports Python 3.9+, and `pypy <http://pypy.org>`__.
VCR.py supports Python 2.7 and 3.4+, and
`pypy <http://pypy.org>`__.
The following HTTP libraries are supported:
The following http libraries are supported:
- ``aiohttp``
- ``boto3``
- ``http.client``
- ``httplib2``
- ``requests`` (>=2.16.2 versions)
- ``tornado.httpclient``
- ``urllib2``
- ``urllib3``
- ``httpx``
- ``httpcore``
- urllib2
- urllib3
- http.client (python3)
- requests (both 1.x and 2.x versions)
- httplib2
- boto
- Tornado's AsyncHTTPClient
Speed
-----
@@ -35,7 +33,7 @@ rebuilding pyyaml.
1. Test if pyyaml is built with libyaml. This should work::
python3 -c 'from yaml import CLoader'
python -c 'from yaml import CLoader'
2. Install libyaml according to your Linux distribution, or using `Homebrew
<http://mxcl.github.com/homebrew/>`__ on Mac::
@@ -46,8 +44,8 @@ rebuilding pyyaml.
3. Rebuild pyyaml with libyaml::
pip3 uninstall pyyaml
pip3 --no-cache-dir install pyyaml
pip uninstall pyyaml
pip --no-cache-dir install pyyaml
Upgrade
-------
@@ -61,7 +59,7 @@ is to simply delete your cassettes and re-record all of them. VCR.py
also provides a migration script that attempts to upgrade your 0.x
cassettes to the new 1.x format. To use it, run the following command::
python3 -m vcr.migration PATH
python -m vcr.migration PATH
The PATH can be either a path to the directory with cassettes or the
path to a single cassette.

View File

@@ -1,2 +0,0 @@
sphinx<9
sphinx_rtd_theme==3.0.2

View File

@@ -4,14 +4,14 @@ Usage
.. code:: python
import vcr
import urllib.request
import urllib2
with vcr.use_cassette('fixtures/vcr_cassettes/synopsis.yaml'):
response = urllib.request.urlopen('http://www.iana.org/domains/reserved').read()
assert b'Example domains' in response
response = urllib2.urlopen('http://www.iana.org/domains/reserved').read()
assert 'Example domains' in response
Run this test once, and VCR.py will record the HTTP request to
``fixtures/vcr_cassettes/synopsis.yaml``. Run it again, and VCR.py will
``fixtures/vcr_cassettes/synopsis.yml``. Run it again, and VCR.py will
replay the response from iana.org when the http request is made. This
test is now fast (no real HTTP requests are made anymore), deterministic
(the test will continue to pass, even if you are offline, or iana.org
@@ -25,8 +25,8 @@ look like this:
@vcr.use_cassette('fixtures/vcr_cassettes/synopsis.yaml')
def test_iana():
response = urllib.request.urlopen('http://www.iana.org/domains/reserved').read()
assert b'Example domains' in response
response = urllib2.urlopen('http://www.iana.org/domains/reserved').read()
assert 'Example domains' in response
When using the decorator version of ``use_cassette``, it is possible to
omit the path to the cassette file.
@@ -35,8 +35,8 @@ omit the path to the cassette file.
@vcr.use_cassette()
def test_iana():
response = urllib.request.urlopen('http://www.iana.org/domains/reserved').read()
assert b'Example domains' in response
response = urllib2.urlopen('http://www.iana.org/domains/reserved').read()
assert 'Example domains' in response
In this case, the cassette file will be given the same name as the test
function, and it will be placed in the same directory as the file in
@@ -92,79 +92,6 @@ all
Unittest Integration
--------------------
Inherit from ``VCRTestCase`` for automatic recording and playback of HTTP
interactions.
.. code:: python
from vcr.unittest import VCRTestCase
import requests
class MyTestCase(VCRTestCase):
def test_something(self):
response = requests.get('http://example.com')
Similar to how VCR.py returns the cassette from the context manager,
``VCRTestCase`` makes the cassette available as ``self.cassette``:
.. code:: python
self.assertEqual(len(self.cassette), 1)
self.assertEqual(self.cassette.requests[0].uri, 'http://example.com')
By default cassettes will be placed in the ``cassettes`` subdirectory next to the
test, named according to the test class and method. For example, the above test
would read from and write to ``cassettes/MyTestCase.test_something.yaml``
The configuration can be modified by overriding methods on your subclass:
``_get_vcr_kwargs``, ``_get_cassette_library_dir`` and ``_get_cassette_name``.
To modify the ``VCR`` object after instantiation, for example to add a matcher,
you can hook on ``_get_vcr``, for example:
.. code:: python
class MyTestCase(VCRTestCase):
def _get_vcr(self, **kwargs):
myvcr = super(MyTestCase, self)._get_vcr(**kwargs)
myvcr.register_matcher('mymatcher', mymatcher)
myvcr.match_on = ['mymatcher']
return myvcr
See
`the source
<https://github.com/kevin1024/vcrpy/blob/master/vcr/unittest.py>`__
for the default implementations of these methods.
If you implement a ``setUp`` method on your test class then make sure to call
the parent version ``super().setUp()`` in your own in order to continue getting
the cassettes produced.
VCRMixin
~~~~~~~~
In case inheriting from ``VCRTestCase`` is difficult because of an existing
class hierarchy containing tests in the base classes, inherit from ``VCRMixin``
instead.
.. code:: python
from vcr.unittest import VCRMixin
import requests
import unittest
class MyTestMixin(VCRMixin):
def test_something(self):
response = requests.get(self.url)
class MyTestCase(MyTestMixin, unittest.TestCase):
url = 'http://example.com'
Pytest Integration
------------------
A Pytest plugin is available here : `pytest-vcr
<https://github.com/ktosiek/pytest-vcr>`__.
Alternative plugin, that also provides network access blocking: `pytest-recording
<https://github.com/kiwicom/pytest-recording>`__.
While it's possible to use the context manager or decorator forms with unittest,
there's also a ``VCRTestCase`` provided separately by `vcrpy-unittest
<https://github.com/agriffis/vcrpy-unittest>`__.

View File

@@ -1,32 +0,0 @@
[tool.codespell]
skip = '.git,*.pdf,*.svg,.tox'
ignore-regex = "\\\\[fnrstv]"
[tool.pytest]
addopts = ["--strict-config", "--strict-markers"]
asyncio_default_fixture_loop_scope = "session"
asyncio_default_test_loop_scope = "session"
markers = ["online"]
[tool.ruff]
line-length = 110
target-version = "py310"
[tool.ruff.lint]
select = [
"B", # flake8-bugbear
"C4", # flake8-comprehensions
"COM", # flake8-commas
"E", # pycodestyle error
"F", # pyflakes
"I", # isort
"ISC", # flake8-implicit-str-concat
"PIE", # flake8-pie
"RUF", # Ruff-specific rules
"UP", # pyupgrade
"W", # pycodestyle warning
"SIM",
]
[tool.ruff.lint.isort]
known-first-party = ["vcr"]

View File

@@ -1,5 +1,3 @@
#!/bin/bash
# If you are getting an INVOCATION ERROR for this script then there is a good chance you are running on Windows.
# You can and should use WSL for running tests on Windows when it calls bash scripts.
REQUESTS_CA_BUNDLE=`python3 -m pytest_httpbin.certs` exec pytest "$@"
REQUESTS_CA_BUNDLE=`python -m pytest_httpbin.certs` py.test $*

122
setup.py
View File

@@ -1,89 +1,71 @@
#!/usr/bin/env python
import codecs
import os
import re
from pathlib import Path
import sys
from setuptools import find_packages, setup
from setuptools import setup, find_packages
from setuptools.command.test import test as TestCommand
long_description = Path("README.rst").read_text()
here = os.path.abspath(os.path.dirname(__file__))
long_description = open('README.rst', 'r').read()
def read(*parts):
# intentionally *not* adding an encoding option to open, See:
# https://github.com/pypa/virtualenv/issues/201#issuecomment-3145690
with codecs.open(os.path.join(here, *parts), "r") as fp:
return fp.read()
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def find_version(*file_paths):
version_file = read(*file_paths)
version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", version_file, re.M)
if version_match:
return version_match.group(1)
raise RuntimeError("Unable to find version string.")
def run_tests(self):
# import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(self.test_args)
sys.exit(errno)
install_requires = [
"PyYAML",
"wrapt",
'PyYAML',
'wrapt',
'six>=1.5',
'contextlib2; python_version=="2.7"',
'mock; python_version=="2.7"',
'yarl; python_version>="3.4"',
]
extras_require = {
"tests": [
"aiohttp",
"boto3",
"cryptography",
"httpbin",
"httpcore",
"httplib2",
"httpx",
"pycurl; platform_python_implementation !='PyPy'",
"pytest",
"pytest-aiohttp",
"pytest-asyncio",
"pytest-cov",
"pytest-httpbin",
"requests>=2.22.0",
"tornado",
"urllib3",
"werkzeug==2.0.3",
],
}
excluded_packages = ["tests*"]
if sys.version_info[0] == 2:
excluded_packages.append("vcr.stubs.aiohttp_stubs")
setup(
name="vcrpy",
version=find_version("vcr", "__init__.py"),
description=("Automatically mock your HTTP interactions to simplify and speed up testing"),
name='vcrpy',
version='1.11.1',
description=(
"Automatically mock your HTTP interactions to simplify and "
"speed up testing"
),
long_description=long_description,
long_description_content_type="text/x-rst",
author="Kevin McCarthy",
author_email="me@kevinmccarthy.org",
url="https://github.com/kevin1024/vcrpy",
packages=find_packages(exclude=["tests*"]),
python_requires=">=3.10",
author='Kevin McCarthy',
author_email='me@kevinmccarthy.org',
url='https://github.com/kevin1024/vcrpy',
packages=find_packages(exclude=excluded_packages),
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',
install_requires=install_requires,
license="MIT",
extras_require=extras_require,
tests_require=extras_require["tests"],
license='MIT',
tests_require=['pytest', 'mock', 'pytest-httpbin'],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"Intended Audience :: Developers",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Programming Language :: Python :: 3.13",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
"Topic :: Software Development :: Testing",
"Topic :: Internet :: WWW/HTTP",
"License :: OSI Approved :: MIT License",
],
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Testing',
'Topic :: Internet :: WWW/HTTP',
'License :: OSI Approved :: MIT License',
]
)

View File

View File

@@ -11,12 +11,9 @@ def assert_cassette_has_one_response(cass):
assert cass.play_count == 1
def assert_is_json_bytes(b: bytes):
assert isinstance(b, bytes)
def assert_is_json(a_string):
try:
json.loads(b)
except Exception as error:
raise AssertionError() from error
json.loads(a_string.decode('utf-8'))
except Exception:
assert False
assert True

View File

@@ -9,7 +9,7 @@ interactions:
method: GET
uri: http://httpbin.org/ip
response:
body: {string: "{\n \"origin\": \"217.122.164.194\"\n}"}
body: {string: !!python/unicode "{\n \"origin\": \"217.122.164.194\"\n}"}
headers:
access-control-allow-origin: ['*']
content-type: [application/json]

View File

@@ -1,42 +1,15 @@
# flake8: noqa
import asyncio
import aiohttp
async def aiohttp_request(loop, method, url, output="text", encoding="utf-8", content_type=None, **kwargs):
async with aiohttp.ClientSession(loop=loop) as session:
response_ctx = session.request(method, url, **kwargs)
response = await response_ctx.__aenter__()
if output == "text":
content = await response.text()
elif output == "json":
content_type = content_type or "application/json"
content = await response.json(encoding=encoding, content_type=content_type)
elif output == "raw":
content = await response.read()
elif output == "stream":
content = await response.content.read()
response_ctx._resp.close()
await session.close()
@asyncio.coroutine
def aiohttp_request(loop, method, url, output='text', **kwargs):
with aiohttp.ClientSession(loop=loop) as session:
response = yield from session.request(method, url, **kwargs) # NOQA: E999
if output == 'text':
content = yield from response.text() # NOQA: E999
elif output == 'json':
content = yield from response.json() # NOQA: E999
elif output == 'raw':
content = yield from response.read() # NOQA: E999
return response, content
def aiohttp_app():
async def hello(request):
return aiohttp.web.Response(text="hello")
async def json(request):
return aiohttp.web.json_response({})
async def json_empty_body(request):
return aiohttp.web.json_response()
app = aiohttp.web.Application()
app.router.add_get("/", hello)
app.router.add_get("/json", json)
app.router.add_get("/json/empty", json_empty_body)
return app

View File

@@ -0,0 +1,13 @@
import aiohttp
import pytest
import vcr
@vcr.use_cassette()
@pytest.mark.asyncio
async def test_http(): # noqa: E999
async with aiohttp.ClientSession() as session:
url = 'https://httpbin.org/get'
params = {'ham': 'spam'}
resp = await session.get(url, params=params) # noqa: E999
assert (await resp.json())['args'] == {'ham': 'spam'} # noqa: E999

View File

@@ -1,41 +0,0 @@
interactions:
- request:
body: ''
headers:
accept:
- '*/*'
accept-encoding:
- gzip, deflate, br
connection:
- keep-alive
host:
- httpbin.org
user-agent:
- python-httpx/0.23.0
method: GET
uri: https://httpbin.org/gzip
response:
content: "{\n \"gzipped\": true, \n \"headers\": {\n \"Accept\": \"*/*\",
\n \"Accept-Encoding\": \"gzip, deflate, br\", \n \"Host\": \"httpbin.org\",
\n \"User-Agent\": \"python-httpx/0.23.0\", \n \"X-Amzn-Trace-Id\": \"Root=1-62a62a8d-5f39b5c50c744da821d6ea99\"\n
\ }, \n \"method\": \"GET\", \n \"origin\": \"146.200.25.115\"\n}\n"
headers:
Access-Control-Allow-Credentials:
- 'true'
Access-Control-Allow-Origin:
- '*'
Connection:
- keep-alive
Content-Encoding:
- gzip
Content-Length:
- '230'
Content-Type:
- application/json
Date:
- Sun, 12 Jun 2022 18:03:57 GMT
Server:
- gunicorn/19.9.0
http_version: HTTP/1.1
status_code: 200
version: 1

View File

@@ -1,42 +0,0 @@
interactions:
- request:
body: null
headers:
Accept:
- '*/*'
Accept-Encoding:
- gzip, deflate, br
Connection:
- keep-alive
User-Agent:
- python-requests/2.28.0
method: GET
uri: https://httpbin.org/gzip
response:
body:
string: !!binary |
H4sIAKwrpmIA/z2OSwrCMBCG956izLIkfQSxkl2RogfQA9R2bIM1iUkqaOndnYDIrGa+/zELDB9l
LfYgg5uRwYhtj86DXKDuOrQBJKR5Cuy38kZ3pld6oHu0sqTH29QGZMnVkepgtMYuKKNJcEe0vJ3U
C4mcjI9hpaiygqaUW7ETFYGLR8frAXXE9h1Go7nD54w++FxkYp8VsDJ4IBH6E47NmVzGqUHFkn8g
rJsvp2omYs8AAAA=
headers:
Access-Control-Allow-Credentials:
- 'true'
Access-Control-Allow-Origin:
- '*'
Connection:
- Close
Content-Encoding:
- gzip
Content-Length:
- '182'
Content-Type:
- application/json
Date:
- Sun, 12 Jun 2022 18:08:44 GMT
Server:
- Pytest-HTTPBIN/0.1.0
status:
code: 200
message: great
version: 1

View File

@@ -1,41 +0,0 @@
interactions:
- request:
body: ''
headers:
accept:
- '*/*'
accept-encoding:
- gzip, deflate, br
connection:
- keep-alive
host:
- httpbin.org
user-agent:
- python-httpx/0.12.1
method: GET
uri: https://mockbin.org/headers
response:
content: "{\n \"headers\": {\n \"Accept\": \"*/*\", \n \"Accept-Encoding\"\
: \"gzip, deflate, br\", \n \"Host\": \"httpbin.org\", \n \"User-Agent\"\
: \"python-httpx/0.12.1\", \n \"X-Amzn-Trace-Id\": \"Root=1-5ea778c9-ea76170da792abdbf7614067\"\
\n }\n}\n"
headers:
access-control-allow-credentials:
- 'true'
access-control-allow-origin:
- '*'
connection:
- keep-alive
content-length:
- '226'
content-type:
- application/json
date:
- Tue, 28 Apr 2020 00:28:57 GMT
server:
- gunicorn/19.9.0
via:
- my_own_proxy
http_version: HTTP/1.1
status_code: 200
version: 1

View File

@@ -1,464 +1,140 @@
import logging
import ssl
import urllib.parse
import pytest
import pytest_httpbin.certs
import yarl
import vcr
asyncio = pytest.importorskip("asyncio")
aiohttp = pytest.importorskip("aiohttp")
import asyncio # noqa: E402
import contextlib # noqa: E402
from .aiohttp_utils import aiohttp_app, aiohttp_request # noqa: E402
import pytest # noqa: E402
import vcr # noqa: E402
HTTPBIN_SSL_CONTEXT = ssl.create_default_context(cafile=pytest_httpbin.certs.where())
from .aiohttp_utils import aiohttp_request # noqa: E402
try:
from .async_def import test_http # noqa: F401
except SyntaxError:
pass
def run_in_loop(fn):
async def wrapper():
return await fn(asyncio.get_running_loop())
return asyncio.run(wrapper())
with contextlib.closing(asyncio.new_event_loop()) as loop:
asyncio.set_event_loop(loop)
task = loop.create_task(fn(loop))
return loop.run_until_complete(task)
def request(method, url, output="text", **kwargs):
def request(method, url, output='text', **kwargs):
def run(loop):
return aiohttp_request(loop, method, url, output=output, **kwargs)
return run_in_loop(run)
def get(url, output="text", **kwargs):
return request("GET", url, output=output, **kwargs)
def get(url, output='text', **kwargs):
return request('GET', url, output=output, **kwargs)
def post(url, output="text", **kwargs):
return request("POST", url, output="text", **kwargs)
def post(url, output='text', **kwargs):
return request('POST', url, output='text', **kwargs)
@pytest.mark.online
def test_status(tmpdir, httpbin):
url = httpbin.url
@pytest.fixture(params=["https", "http"])
def scheme(request):
'''Fixture that returns both http and https.'''
return request.param
with vcr.use_cassette(str(tmpdir.join("status.yaml"))):
def test_status(tmpdir, scheme):
url = scheme + '://httpbin.org'
with vcr.use_cassette(str(tmpdir.join('status.yaml'))):
response, _ = get(url)
with vcr.use_cassette(str(tmpdir.join("status.yaml"))) as cassette:
with vcr.use_cassette(str(tmpdir.join('status.yaml'))) as cassette:
cassette_response, _ = get(url)
assert cassette_response.status == response.status
assert cassette.play_count == 1
@pytest.mark.online
@pytest.mark.parametrize("auth", [None, aiohttp.BasicAuth("vcrpy", "test")])
def test_headers(tmpdir, auth, httpbin):
url = httpbin.url
with vcr.use_cassette(str(tmpdir.join("headers.yaml"))):
response, _ = get(url, auth=auth)
def test_headers(tmpdir, scheme):
url = scheme + '://httpbin.org'
with vcr.use_cassette(str(tmpdir.join('headers.yaml'))):
response, _ = get(url)
with vcr.use_cassette(str(tmpdir.join("headers.yaml"))) as cassette:
if auth is not None:
request = cassette.requests[0]
assert "AUTHORIZATION" in request.headers
cassette_response, _ = get(url, auth=auth)
assert cassette_response.headers.items() == response.headers.items()
assert cassette.play_count == 1
assert "istr" not in cassette.data[0]
assert "yarl.URL" not in cassette.data[0]
@pytest.mark.online
def test_case_insensitive_headers(tmpdir, httpbin):
url = httpbin.url
with vcr.use_cassette(str(tmpdir.join("whatever.yaml"))):
_, _ = get(url)
with vcr.use_cassette(str(tmpdir.join("whatever.yaml"))) as cassette:
with vcr.use_cassette(str(tmpdir.join('headers.yaml'))) as cassette:
cassette_response, _ = get(url)
assert "Content-Type" in cassette_response.headers
assert "content-type" in cassette_response.headers
assert cassette_response.headers == response.headers
assert cassette.play_count == 1
@pytest.mark.online
def test_text(tmpdir, httpbin):
url = httpbin.url
with vcr.use_cassette(str(tmpdir.join("text.yaml"))):
def test_text(tmpdir, scheme):
url = scheme + '://httpbin.org'
with vcr.use_cassette(str(tmpdir.join('text.yaml'))):
_, response_text = get(url)
with vcr.use_cassette(str(tmpdir.join("text.yaml"))) as cassette:
with vcr.use_cassette(str(tmpdir.join('text.yaml'))) as cassette:
_, cassette_response_text = get(url)
assert cassette_response_text == response_text
assert cassette.play_count == 1
@pytest.mark.online
def test_json(tmpdir, httpbin):
url = httpbin.url + "/json"
headers = {"Content-Type": "application/json"}
def test_json(tmpdir, scheme):
url = scheme + '://httpbin.org/get'
with vcr.use_cassette(str(tmpdir.join('json.yaml'))):
_, response_json = get(url, output='json')
with vcr.use_cassette(str(tmpdir.join("json.yaml"))):
_, response_json = get(url, output="json", headers=headers)
with vcr.use_cassette(str(tmpdir.join("json.yaml"))) as cassette:
_, cassette_response_json = get(url, output="json", headers=headers)
with vcr.use_cassette(str(tmpdir.join('json.yaml'))) as cassette:
_, cassette_response_json = get(url, output='json')
assert cassette_response_json == response_json
assert cassette.play_count == 1
@pytest.mark.online
def test_binary(tmpdir, httpbin):
url = httpbin.url + "/image/png"
with vcr.use_cassette(str(tmpdir.join("binary.yaml"))):
_, response_binary = get(url, output="raw")
def test_binary(tmpdir, scheme):
url = scheme + '://httpbin.org/image/png'
with vcr.use_cassette(str(tmpdir.join('binary.yaml'))):
_, response_binary = get(url, output='raw')
with vcr.use_cassette(str(tmpdir.join("binary.yaml"))) as cassette:
_, cassette_response_binary = get(url, output="raw")
with vcr.use_cassette(str(tmpdir.join('binary.yaml'))) as cassette:
_, cassette_response_binary = get(url, output='raw')
assert cassette_response_binary == response_binary
assert cassette.play_count == 1
@pytest.mark.online
def test_stream(tmpdir, httpbin):
url = httpbin.url
def test_post(tmpdir, scheme):
data = {'key1': 'value1', 'key2': 'value2'}
url = scheme + '://httpbin.org/post'
with vcr.use_cassette(str(tmpdir.join('post.yaml'))):
_, response_json = post(url, data=data)
with vcr.use_cassette(str(tmpdir.join("stream.yaml"))):
_, body = get(url, output="raw") # Do not use stream here, as the stream is exhausted by vcr
with vcr.use_cassette(str(tmpdir.join("stream.yaml"))) as cassette:
_, cassette_body = get(url, output="stream")
assert cassette_body == body
assert cassette.play_count == 1
@pytest.mark.online
@pytest.mark.parametrize("body", ["data", "json"])
def test_post(tmpdir, body, caplog, httpbin):
caplog.set_level(logging.INFO)
data = {"key1": "value1", "key2": "value2"}
url = httpbin.url
with vcr.use_cassette(str(tmpdir.join("post.yaml"))):
_, response_json = post(url, **{body: data})
with vcr.use_cassette(str(tmpdir.join("post.yaml"))) as cassette:
request = cassette.requests[0]
assert request.body == data
_, cassette_response_json = post(url, **{body: data})
assert cassette_response_json == response_json
assert cassette.play_count == 1
assert next(
(
log
for log in caplog.records
if log.getMessage() == f"<Request (POST) {url}> not in cassette, sending to real server"
),
None,
), "Log message not found."
@pytest.mark.online
def test_params(tmpdir, httpbin):
url = httpbin.url + "/get?d=d"
headers = {"Content-Type": "application/json"}
params = {"a": 1, "b": 2, "c": "c"}
with vcr.use_cassette(str(tmpdir.join("get.yaml"))) as cassette:
_, response_json = get(url, output="json", params=params, headers=headers)
assert response_json["args"] == {"a": "1", "b": "2", "c": "c", "d": "d"}
with vcr.use_cassette(str(tmpdir.join("get.yaml"))) as cassette:
_, cassette_response_json = get(url, output="json", params=params, headers=headers)
with vcr.use_cassette(str(tmpdir.join('post.yaml'))) as cassette:
_, cassette_response_json = post(url, data=data)
assert cassette_response_json == response_json
assert cassette.play_count == 1
@pytest.mark.online
def test_params_same_url_distinct_params(tmpdir, httpbin):
url = httpbin.url + "/json"
headers = {"Content-Type": "application/json"}
params = {"a": 1, "b": 2, "c": "c"}
def test_params(tmpdir, scheme):
url = scheme + '://httpbin.org/get'
params = {'a': 1, 'b': False, 'c': 'c'}
with vcr.use_cassette(str(tmpdir.join('get.yaml'))) as cassette:
_, response_json = get(url, output='json', params=params)
with vcr.use_cassette(str(tmpdir.join("get.yaml"))) as cassette:
_, response_json = get(url, output="json", params=params, headers=headers)
with vcr.use_cassette(str(tmpdir.join("get.yaml"))) as cassette:
_, cassette_response_json = get(url, output="json", params=params, headers=headers)
assert cassette_response_json == response_json
assert cassette.play_count == 1
other_params = {"other": "params"}
with (
vcr.use_cassette(str(tmpdir.join("get.yaml"))) as cassette,
pytest.raises(vcr.errors.CannotOverwriteExistingCassetteException),
):
get(url, output="text", params=other_params)
@pytest.mark.online
def test_params_on_url(tmpdir, httpbin):
url = httpbin.url + "/get?a=1&b=foo"
headers = {"Content-Type": "application/json"}
with vcr.use_cassette(str(tmpdir.join("get.yaml"))) as cassette:
_, response_json = get(url, output="json", headers=headers)
request = cassette.requests[0]
assert request.url == url
with vcr.use_cassette(str(tmpdir.join("get.yaml"))) as cassette:
_, cassette_response_json = get(url, output="json", headers=headers)
request = cassette.requests[0]
assert request.url == url
with vcr.use_cassette(str(tmpdir.join('get.yaml'))) as cassette:
_, cassette_response_json = get(url, output='json', params=params)
assert cassette_response_json == response_json
assert cassette.play_count == 1
def test_aiohttp_test_client(aiohttp_client, tmpdir):
loop = asyncio.get_event_loop()
app = aiohttp_app()
url = "/"
client = loop.run_until_complete(aiohttp_client(app))
def test_params_same_url_distinct_params(tmpdir, scheme):
url = scheme + '://httpbin.org/get'
params = {'a': 1, 'b': False, 'c': 'c'}
with vcr.use_cassette(str(tmpdir.join('get.yaml'))) as cassette:
_, response_json = get(url, output='json', params=params)
with vcr.use_cassette(str(tmpdir.join("get.yaml"))):
response = loop.run_until_complete(client.get(url))
assert response.status == 200
response_text = loop.run_until_complete(response.text())
assert response_text == "hello"
response_text = loop.run_until_complete(response.text(errors="replace"))
assert response_text == "hello"
with vcr.use_cassette(str(tmpdir.join("get.yaml"))) as cassette:
response = loop.run_until_complete(client.get(url))
request = cassette.requests[0]
assert request.url == str(client.make_url(url))
response_text = loop.run_until_complete(response.text())
assert response_text == "hello"
assert cassette.play_count == 1
def test_aiohttp_test_client_json(aiohttp_client, tmpdir):
loop = asyncio.get_event_loop()
app = aiohttp_app()
url = "/json/empty"
client = loop.run_until_complete(aiohttp_client(app))
with vcr.use_cassette(str(tmpdir.join("get.yaml"))):
response = loop.run_until_complete(client.get(url))
assert response.status == 200
response_json = loop.run_until_complete(response.json())
assert response_json is None
with vcr.use_cassette(str(tmpdir.join("get.yaml"))) as cassette:
response = loop.run_until_complete(client.get(url))
request = cassette.requests[0]
assert request.url == str(client.make_url(url))
response_json = loop.run_until_complete(response.json())
assert response_json is None
assert cassette.play_count == 1
@pytest.mark.online
def test_redirect(tmpdir, httpbin):
url = httpbin.url + "/redirect/2"
with vcr.use_cassette(str(tmpdir.join("redirect.yaml"))):
response, _ = get(url)
with vcr.use_cassette(str(tmpdir.join("redirect.yaml"))) as cassette:
cassette_response, _ = get(url)
assert cassette_response.status == response.status
assert len(cassette_response.history) == len(response.history)
assert len(cassette) == 3
assert cassette.play_count == 3
# Assert that the real response and the cassette response have a similar
# looking request_info.
assert cassette_response.request_info.url == response.request_info.url
assert cassette_response.request_info.method == response.request_info.method
assert cassette_response.request_info.headers.items() == response.request_info.headers.items()
assert cassette_response.request_info.real_url == response.request_info.real_url
@pytest.mark.online
def test_not_modified(tmpdir, httpbin):
"""It doesn't try to redirect on 304"""
url = httpbin.url + "/status/304"
with vcr.use_cassette(str(tmpdir.join("not_modified.yaml"))):
response, _ = get(url)
with vcr.use_cassette(str(tmpdir.join("not_modified.yaml"))) as cassette:
cassette_response, _ = get(url)
assert cassette_response.status == 304
assert response.status == 304
assert len(cassette_response.history) == len(response.history)
assert len(cassette) == 1
with vcr.use_cassette(str(tmpdir.join('get.yaml'))) as cassette:
_, cassette_response_json = get(url, output='json', params=params)
assert cassette_response_json == response_json
assert cassette.play_count == 1
@pytest.mark.online
def test_double_requests(tmpdir, httpbin):
"""We should capture, record, and replay all requests and response chains,
even if there are duplicate ones.
We should replay in the order we saw them.
"""
url = httpbin.url
with vcr.use_cassette(str(tmpdir.join("text.yaml"))):
_, response_text1 = get(url, output="text")
_, response_text2 = get(url, output="text")
with vcr.use_cassette(str(tmpdir.join("text.yaml"))) as cassette:
resp, cassette_response_text = get(url, output="text")
assert resp.status == 200
assert cassette_response_text == response_text1
# We made only one request, so we should only play 1 recording.
assert cassette.play_count == 1
# Now make the second test to url
resp, cassette_response_text = get(url, output="text")
assert resp.status == 200
assert cassette_response_text == response_text2
# Now that we made both requests, we should have played both.
assert cassette.play_count == 2
def test_cookies(httpbin_both, tmpdir):
async def run(loop):
cookies_url = httpbin_both.url + (
"/response-headers?"
"set-cookie=" + urllib.parse.quote("cookie_1=val_1; Path=/") + "&"
"Set-Cookie=" + urllib.parse.quote("Cookie_2=Val_2; Path=/")
)
home_url = httpbin_both.url + "/"
tmp = str(tmpdir.join("cookies.yaml"))
req_cookies = {"Cookie_3": "Val_3"}
req_headers = {"Cookie": "Cookie_4=Val_4"}
# ------------------------- Record -------------------------- #
with vcr.use_cassette(tmp) as cassette:
async with aiohttp.ClientSession(loop=loop, cookie_jar=aiohttp.CookieJar(unsafe=True)) as session:
cookies_resp = await session.get(cookies_url, ssl=HTTPBIN_SSL_CONTEXT)
home_resp = await session.get(
home_url,
cookies=req_cookies,
headers=req_headers,
ssl=HTTPBIN_SSL_CONTEXT,
)
assert cassette.play_count == 0
assert_responses(cookies_resp, home_resp)
# -------------------------- Play --------------------------- #
with vcr.use_cassette(tmp, record_mode=vcr.mode.NONE) as cassette:
async with aiohttp.ClientSession(loop=loop, cookie_jar=aiohttp.CookieJar(unsafe=True)) as session:
cookies_resp = await session.get(cookies_url, ssl=HTTPBIN_SSL_CONTEXT)
home_resp = await session.get(
home_url,
cookies=req_cookies,
headers=req_headers,
ssl=HTTPBIN_SSL_CONTEXT,
)
assert cassette.play_count == 2
assert_responses(cookies_resp, home_resp)
def assert_responses(cookies_resp, home_resp):
assert cookies_resp.cookies.get("cookie_1").value == "val_1"
assert cookies_resp.cookies.get("Cookie_2").value == "Val_2"
request_cookies = home_resp.request_info.headers["cookie"]
assert "cookie_1=val_1" in request_cookies
assert "Cookie_2=Val_2" in request_cookies
assert "Cookie_3=Val_3" in request_cookies
assert "Cookie_4=Val_4" in request_cookies
run_in_loop(run)
def test_cookies_redirect(httpbin_both, tmpdir):
async def run(loop):
# Sets cookie as provided by the query string and redirects
cookies_url = httpbin_both.url + "/cookies/set?Cookie_1=Val_1"
tmp = str(tmpdir.join("cookies.yaml"))
# ------------------------- Record -------------------------- #
with vcr.use_cassette(tmp) as cassette:
async with aiohttp.ClientSession(loop=loop, cookie_jar=aiohttp.CookieJar(unsafe=True)) as session:
cookies_resp = await session.get(cookies_url, ssl=HTTPBIN_SSL_CONTEXT)
assert not cookies_resp.cookies
cookies = session.cookie_jar.filter_cookies(yarl.URL(cookies_url))
assert cookies["Cookie_1"].value == "Val_1"
assert cassette.play_count == 0
assert cassette.requests[1].headers["Cookie"] == "Cookie_1=Val_1"
# -------------------------- Play --------------------------- #
with vcr.use_cassette(tmp, record_mode=vcr.mode.NONE) as cassette:
async with aiohttp.ClientSession(loop=loop, cookie_jar=aiohttp.CookieJar(unsafe=True)) as session:
cookies_resp = await session.get(cookies_url, ssl=HTTPBIN_SSL_CONTEXT)
assert not cookies_resp.cookies
cookies = session.cookie_jar.filter_cookies(yarl.URL(cookies_url))
assert cookies["Cookie_1"].value == "Val_1"
assert cassette.play_count == 2
assert cassette.requests[1].headers["Cookie"] == "Cookie_1=Val_1"
# Assert that it's ignoring expiration date
with vcr.use_cassette(tmp, record_mode=vcr.mode.NONE) as cassette:
cassette.responses[0]["headers"]["set-cookie"] = [
"Cookie_1=Val_1; Expires=Wed, 21 Oct 2015 07:28:00 GMT",
]
async with aiohttp.ClientSession(loop=loop, cookie_jar=aiohttp.CookieJar(unsafe=True)) as session:
cookies_resp = await session.get(cookies_url, ssl=HTTPBIN_SSL_CONTEXT)
assert not cookies_resp.cookies
cookies = session.cookie_jar.filter_cookies(yarl.URL(cookies_url))
assert cookies["Cookie_1"].value == "Val_1"
run_in_loop(run)
@pytest.mark.online
def test_not_allow_redirects(tmpdir, httpbin):
url = httpbin + "/redirect-to?url=.%2F&status_code=308"
path = str(tmpdir.join("redirects.yaml"))
with vcr.use_cassette(path):
response, _ = get(url, allow_redirects=False)
assert response.url.path == "/redirect-to"
assert response.status == 308
with vcr.use_cassette(path) as cassette:
response, _ = get(url, allow_redirects=False)
assert response.url.path == "/redirect-to"
assert response.status == 308
assert cassette.play_count == 1
def test_filter_query_parameters(tmpdir, httpbin):
url = httpbin + "?password=secret"
path = str(tmpdir.join("query_param_filter.yaml"))
with vcr.use_cassette(path, filter_query_parameters=["password"]) as cassette:
get(url)
assert "password" not in cassette.requests[0].url
assert "secret" not in cassette.requests[0].url
with open(path) as f:
cassette_content = f.read()
assert "password" not in cassette_content
assert "secret" not in cassette_content
other_params = {'other': 'params'}
with vcr.use_cassette(str(tmpdir.join('get.yaml'))) as cassette:
response, cassette_response_text = get(url, output='text', params=other_params)
assert 'No match for the request' in cassette_response_text
assert response.status == 599

View File

@@ -1,29 +1,30 @@
"""Basic tests for cassettes"""
# -*- coding: utf-8 -*-
'''Basic tests for cassettes'''
# External imports
import os
from urllib.request import urlopen
from six.moves.urllib.request import urlopen
# Internal imports
import vcr
def test_nonexistent_directory(tmpdir, httpbin):
"""If we load a cassette in a nonexistent directory, it can save ok"""
# Check to make sure directory doesn't exist
assert not os.path.exists(str(tmpdir.join("nonexistent")))
'''If we load a cassette in a nonexistent directory, it can save ok'''
# Check to make sure directory doesnt exist
assert not os.path.exists(str(tmpdir.join('nonexistent')))
# Run VCR to create dir and cassette file
with vcr.use_cassette(str(tmpdir.join("nonexistent", "cassette.yml"))):
with vcr.use_cassette(str(tmpdir.join('nonexistent', 'cassette.yml'))):
urlopen(httpbin.url).read()
# This should have made the file and the directory
assert os.path.exists(str(tmpdir.join("nonexistent", "cassette.yml")))
assert os.path.exists(str(tmpdir.join('nonexistent', 'cassette.yml')))
def test_unpatch(tmpdir, httpbin):
"""Ensure that our cassette gets unpatched when we're done"""
with vcr.use_cassette(str(tmpdir.join("unpatch.yaml"))) as cass:
'''Ensure that our cassette gets unpatched when we're done'''
with vcr.use_cassette(str(tmpdir.join('unpatch.yaml'))) as cass:
urlopen(httpbin.url).read()
# Make the same request, and assert that we haven't served any more
@@ -33,30 +34,30 @@ def test_unpatch(tmpdir, httpbin):
def test_basic_json_use(tmpdir, httpbin):
"""
'''
Ensure you can load a json serialized cassette
"""
test_fixture = str(tmpdir.join("synopsis.json"))
with vcr.use_cassette(test_fixture, serializer="json"):
'''
test_fixture = str(tmpdir.join('synopsis.json'))
with vcr.use_cassette(test_fixture, serializer='json'):
response = urlopen(httpbin.url).read()
assert b"HTTP Request &amp; Response Service" in response
assert b'difficult sometimes' in response
def test_patched_content(tmpdir, httpbin):
"""
'''
Ensure that what you pull from a cassette is what came from the
request
"""
with vcr.use_cassette(str(tmpdir.join("synopsis.yaml"))) as cass:
'''
with vcr.use_cassette(str(tmpdir.join('synopsis.yaml'))) as cass:
response = urlopen(httpbin.url).read()
assert cass.play_count == 0
with vcr.use_cassette(str(tmpdir.join("synopsis.yaml"))) as cass:
with vcr.use_cassette(str(tmpdir.join('synopsis.yaml'))) as cass:
response2 = urlopen(httpbin.url).read()
assert cass.play_count == 1
cass._save(force=True)
with vcr.use_cassette(str(tmpdir.join("synopsis.yaml"))) as cass:
with vcr.use_cassette(str(tmpdir.join('synopsis.yaml'))) as cass:
response3 = urlopen(httpbin.url).read()
assert cass.play_count == 1
@@ -65,12 +66,12 @@ def test_patched_content(tmpdir, httpbin):
def test_patched_content_json(tmpdir, httpbin):
"""
'''
Ensure that what you pull from a json cassette is what came from the
request
"""
'''
testfile = str(tmpdir.join("synopsis.json"))
testfile = str(tmpdir.join('synopsis.json'))
with vcr.use_cassette(testfile) as cass:
response = urlopen(httpbin.url).read()

View File

@@ -0,0 +1,81 @@
import pytest
boto = pytest.importorskip("boto")
import boto # NOQA
import boto.iam # NOQA
from boto.s3.connection import S3Connection # NOQA
from boto.s3.key import Key # NOQA
import vcr # NOQA
try: # NOQA
from ConfigParser import DuplicateSectionError # NOQA
except ImportError: # NOQA
# python3
from configparser import DuplicateSectionError # NOQA
def test_boto_stubs(tmpdir):
with vcr.use_cassette(str(tmpdir.join('boto-stubs.yml'))):
# Perform the imports within the patched context so that
# CertValidatingHTTPSConnection refers to the patched version.
from boto.https_connection import CertValidatingHTTPSConnection
from vcr.stubs.boto_stubs import VCRCertValidatingHTTPSConnection
# Prove that the class was patched by the stub and that we can instantiate it.
assert issubclass(CertValidatingHTTPSConnection, VCRCertValidatingHTTPSConnection)
CertValidatingHTTPSConnection('hostname.does.not.matter')
def test_boto_without_vcr():
s3_conn = S3Connection()
s3_bucket = s3_conn.get_bucket('boto-demo-1394171994') # a bucket you can access
k = Key(s3_bucket)
k.key = 'test.txt'
k.set_contents_from_string('hello world i am a string')
def test_boto_medium_difficulty(tmpdir):
s3_conn = S3Connection()
s3_bucket = s3_conn.get_bucket('boto-demo-1394171994') # a bucket you can access
with vcr.use_cassette(str(tmpdir.join('boto-medium.yml'))):
k = Key(s3_bucket)
k.key = 'test.txt'
k.set_contents_from_string('hello world i am a string')
with vcr.use_cassette(str(tmpdir.join('boto-medium.yml'))):
k = Key(s3_bucket)
k.key = 'test.txt'
k.set_contents_from_string('hello world i am a string')
def test_boto_hardcore_mode(tmpdir):
with vcr.use_cassette(str(tmpdir.join('boto-hardcore.yml'))):
s3_conn = S3Connection()
s3_bucket = s3_conn.get_bucket('boto-demo-1394171994') # a bucket you can access
k = Key(s3_bucket)
k.key = 'test.txt'
k.set_contents_from_string('hello world i am a string')
with vcr.use_cassette(str(tmpdir.join('boto-hardcore.yml'))):
s3_conn = S3Connection()
s3_bucket = s3_conn.get_bucket('boto-demo-1394171994') # a bucket you can access
k = Key(s3_bucket)
k.key = 'test.txt'
k.set_contents_from_string('hello world i am a string')
def test_boto_iam(tmpdir):
try:
boto.config.add_section('Boto')
except DuplicateSectionError:
pass
# Ensure that boto uses HTTPS
boto.config.set('Boto', 'is_secure', 'true')
# Ensure that boto uses CertValidatingHTTPSConnection
boto.config.set('Boto', 'https_validate_certificates', 'true')
with vcr.use_cassette(str(tmpdir.join('boto-iam.yml'))):
iam_conn = boto.iam.connect_to_region('universal')
iam_conn.get_all_users()
with vcr.use_cassette(str(tmpdir.join('boto-iam.yml'))):
iam_conn = boto.iam.connect_to_region('universal')
iam_conn.get_all_users()

View File

@@ -1,104 +1,67 @@
import os
import pytest
import vcr
boto3 = pytest.importorskip("boto3")
import botocore # noqa
import boto3 # NOQA
import vcr # NOQA
try:
from botocore import awsrequest # noqa
botocore_awsrequest = True
except ImportError:
botocore_awsrequest = False
# skip tests if boto does not use vendored requests anymore
# https://github.com/boto/botocore/pull/1495
boto3_skip_vendored_requests = pytest.mark.skipif(
botocore_awsrequest,
reason=f"botocore version {botocore.__version__} does not use vendored requests anymore.",
)
boto3_skip_awsrequest = pytest.mark.skipif(
not botocore_awsrequest,
reason=f"botocore version {botocore.__version__} still uses vendored requests.",
)
IAM_USER_NAME = "vcrpy"
bucket = 'boto3-demo-1337' # a bucket you can access
key = 'test/my_test.txt' # key with r+w access
content = 'hello world i am a string' # content to put in the test file
@pytest.fixture
def iam_client():
def _iam_client(boto3_session=None):
if boto3_session is None:
boto3_session = boto3.Session(
aws_access_key_id=os.environ.get("AWS_ACCESS_KEY_ID", "default"),
aws_secret_access_key=os.environ.get("AWS_SECRET_ACCESS_KEY", "default"),
aws_session_token=None,
region_name=os.environ.get("AWS_DEFAULT_REGION", "default"),
)
return boto3_session.client("iam")
return _iam_client
def test_boto_stubs(tmpdir):
with vcr.use_cassette(str(tmpdir.join('boto3-stubs.yml'))):
# Perform the imports within the patched context so that
# HTTPConnection, VerifiedHTTPSConnection refers to the patched version.
from botocore.vendored.requests.packages.urllib3.connectionpool import \
HTTPConnection, VerifiedHTTPSConnection
from vcr.stubs.boto3_stubs import VCRRequestsHTTPConnection, VCRRequestsHTTPSConnection
# Prove that the class was patched by the stub and that we can instantiate it.
assert issubclass(HTTPConnection, VCRRequestsHTTPConnection)
assert issubclass(VerifiedHTTPSConnection, VCRRequestsHTTPSConnection)
HTTPConnection('hostname.does.not.matter')
VerifiedHTTPSConnection('hostname.does.not.matter')
@pytest.fixture
def get_user(iam_client):
def _get_user(client=None, user_name=IAM_USER_NAME):
if client is None:
# Default client set with fixture `iam_client`
client = iam_client()
return client.get_user(UserName=user_name)
def test_boto3_without_vcr():
s3_resource = boto3.resource('s3')
b = s3_resource.Bucket(bucket)
b.put_object(Key=key, Body=content)
return _get_user
# retrieve content to check it
o = s3_resource.Object(bucket, key).get()
# decode for python3
assert content == o['Body'].read().decode('utf-8')
@pytest.mark.skipif(
os.environ.get("TRAVIS_PULL_REQUEST") != "false",
reason="Encrypted Environment Variables from Travis Repository Settings"
" are disabled on PRs from forks. "
"https://docs.travis-ci.com/user/pull-requests/#pull-requests-and-security-restrictions",
)
def test_boto_medium_difficulty(tmpdir, get_user):
with vcr.use_cassette(str(tmpdir.join("boto3-medium.yml"))):
response = get_user()
assert response["User"]["UserName"] == IAM_USER_NAME
def test_boto_medium_difficulty(tmpdir):
s3_resource = boto3.resource('s3')
b = s3_resource.Bucket(bucket)
with vcr.use_cassette(str(tmpdir.join('boto3-medium.yml'))):
b.put_object(Key=key, Body=content)
o = s3_resource.Object(bucket, key).get()
assert content == o['Body'].read().decode('utf-8')
with vcr.use_cassette(str(tmpdir.join("boto3-medium.yml"))) as cass:
response = get_user()
assert response["User"]["UserName"] == IAM_USER_NAME
with vcr.use_cassette(str(tmpdir.join('boto3-medium.yml'))) as cass:
b.put_object(Key=key, Body=content)
o = s3_resource.Object(bucket, key).get()
assert content == o['Body'].read().decode('utf-8')
assert cass.all_played
@pytest.mark.skipif(
os.environ.get("TRAVIS_PULL_REQUEST") != "false",
reason="Encrypted Environment Variables from Travis Repository Settings"
" are disabled on PRs from forks. "
"https://docs.travis-ci.com/user/pull-requests/#pull-requests-and-security-restrictions",
)
def test_boto_hardcore_mode(tmpdir, iam_client, get_user):
with vcr.use_cassette(str(tmpdir.join("boto3-hardcore.yml"))):
ses = boto3.Session(
aws_access_key_id=os.environ.get("AWS_ACCESS_KEY_ID"),
aws_secret_access_key=os.environ.get("AWS_SECRET_ACCESS_KEY"),
region_name=os.environ.get("AWS_DEFAULT_REGION"),
)
client = iam_client(ses)
response = get_user(client=client)
assert response["User"]["UserName"] == IAM_USER_NAME
def test_boto_hardcore_mode(tmpdir):
with vcr.use_cassette(str(tmpdir.join('boto3-hardcore.yml'))):
s3_resource = boto3.resource('s3')
b = s3_resource.Bucket(bucket)
b.put_object(Key=key, Body=content)
o = s3_resource.Object(bucket, key).get()
assert content == o['Body'].read().decode('utf-8')
with vcr.use_cassette(str(tmpdir.join("boto3-hardcore.yml"))) as cass:
ses = boto3.Session(
aws_access_key_id=os.environ.get("AWS_ACCESS_KEY_ID"),
aws_secret_access_key=os.environ.get("AWS_SECRET_ACCESS_KEY"),
aws_session_token=None,
region_name=os.environ.get("AWS_DEFAULT_REGION"),
)
client = iam_client(ses)
response = get_user(client=client)
assert response["User"]["UserName"] == IAM_USER_NAME
with vcr.use_cassette(str(tmpdir.join('boto3-hardcore.yml'))) as cass:
s3_resource = boto3.resource('s3')
b = s3_resource.Bucket(bucket)
b.put_object(Key=key, Body=content)
o = s3_resource.Object(bucket, key).get()
assert content == o['Body'].read().decode('utf-8')
assert cass.all_played

View File

@@ -1,59 +1,50 @@
import json
import os
from urllib.request import urlopen
import json
import pytest
import vcr
from vcr.cassette import Cassette
from six.moves.urllib.request import urlopen
@pytest.mark.online
def test_set_serializer_default_config(tmpdir, httpbin):
my_vcr = vcr.VCR(serializer="json")
my_vcr = vcr.VCR(serializer='json')
with my_vcr.use_cassette(str(tmpdir.join("test.json"))):
assert my_vcr.serializer == "json"
urlopen(httpbin.url)
with my_vcr.use_cassette(str(tmpdir.join('test.json'))):
assert my_vcr.serializer == 'json'
urlopen(httpbin.url + '/get')
with open(str(tmpdir.join("test.json"))) as f:
file_content = f.read()
assert file_content.endswith("\n")
assert json.loads(file_content)
with open(str(tmpdir.join('test.json'))) as f:
assert json.loads(f.read())
@pytest.mark.online
def test_default_set_cassette_library_dir(tmpdir, httpbin):
my_vcr = vcr.VCR(cassette_library_dir=str(tmpdir.join("subdir")))
my_vcr = vcr.VCR(cassette_library_dir=str(tmpdir.join('subdir')))
with my_vcr.use_cassette("test.json"):
urlopen(httpbin.url)
with my_vcr.use_cassette('test.json'):
urlopen(httpbin.url + '/get')
assert os.path.exists(str(tmpdir.join("subdir").join("test.json")))
assert os.path.exists(str(tmpdir.join('subdir').join('test.json')))
@pytest.mark.online
def test_override_set_cassette_library_dir(tmpdir, httpbin):
my_vcr = vcr.VCR(cassette_library_dir=str(tmpdir.join("subdir")))
my_vcr = vcr.VCR(cassette_library_dir=str(tmpdir.join('subdir')))
cld = str(tmpdir.join("subdir2"))
cld = str(tmpdir.join('subdir2'))
with my_vcr.use_cassette("test.json", cassette_library_dir=cld):
urlopen(httpbin.url)
with my_vcr.use_cassette('test.json', cassette_library_dir=cld):
urlopen(httpbin.url + '/get')
assert os.path.exists(str(tmpdir.join("subdir2").join("test.json")))
assert not os.path.exists(str(tmpdir.join("subdir").join("test.json")))
assert os.path.exists(str(tmpdir.join('subdir2').join('test.json')))
assert not os.path.exists(str(tmpdir.join('subdir').join('test.json')))
@pytest.mark.online
def test_override_match_on(tmpdir, httpbin):
my_vcr = vcr.VCR(match_on=["method"])
my_vcr = vcr.VCR(match_on=['method'])
with my_vcr.use_cassette(str(tmpdir.join("test.json"))):
with my_vcr.use_cassette(str(tmpdir.join('test.json'))):
urlopen(httpbin.url)
with my_vcr.use_cassette(str(tmpdir.join("test.json"))) as cass:
urlopen(httpbin.url)
with my_vcr.use_cassette(str(tmpdir.join('test.json'))) as cass:
urlopen(httpbin.url + '/get')
assert len(cass) == 1
assert cass.play_count == 1
@@ -62,43 +53,6 @@ def test_override_match_on(tmpdir, httpbin):
def test_missing_matcher():
my_vcr = vcr.VCR()
my_vcr.register_matcher("awesome", object)
with pytest.raises(KeyError), my_vcr.use_cassette("test.yaml", match_on=["notawesome"]):
pass
@pytest.mark.online
def test_dont_record_on_exception(tmpdir, httpbin):
my_vcr = vcr.VCR(record_on_exception=False)
@my_vcr.use_cassette(str(tmpdir.join("dontsave.yml")))
def some_test():
assert b"Not in content" in urlopen(httpbin.url)
with pytest.raises(AssertionError):
some_test()
assert not os.path.exists(str(tmpdir.join("dontsave.yml")))
# Make sure context decorator has the same behavior
with pytest.raises(AssertionError), my_vcr.use_cassette(str(tmpdir.join("dontsave2.yml"))):
assert b"Not in content" in urlopen(httpbin.url).read()
assert not os.path.exists(str(tmpdir.join("dontsave2.yml")))
def test_set_drop_unused_requests(tmpdir, httpbin):
my_vcr = vcr.VCR(drop_unused_requests=True)
file = str(tmpdir.join("test.yaml"))
with my_vcr.use_cassette(file):
urlopen(httpbin.url)
urlopen(httpbin.url + "/get")
cassette = Cassette.load(path=file)
assert len(cassette) == 2
with my_vcr.use_cassette(file):
urlopen(httpbin.url)
cassette = Cassette.load(path=file)
assert len(cassette) == 1
with pytest.raises(KeyError):
with my_vcr.use_cassette("test.yaml", match_on=['notawesome']):
pass

View File

@@ -1,23 +1,21 @@
"""Basic tests about save behavior"""
# -*- coding: utf-8 -*-
'''Basic tests about save behavior'''
# External imports
import os
import time
from urllib.request import urlopen
import pytest
from six.moves.urllib.request import urlopen
# Internal imports
import vcr
@pytest.mark.online
def test_disk_saver_nowrite(tmpdir, httpbin):
"""
'''
Ensure that when you close a cassette without changing it it doesn't
rewrite the file
"""
fname = str(tmpdir.join("synopsis.yaml"))
'''
fname = str(tmpdir.join('synopsis.yaml'))
with vcr.use_cassette(fname) as cass:
urlopen(httpbin.url).read()
assert cass.play_count == 0
@@ -32,13 +30,12 @@ def test_disk_saver_nowrite(tmpdir, httpbin):
assert last_mod == last_mod2
@pytest.mark.online
def test_disk_saver_write(tmpdir, httpbin):
"""
'''
Ensure that when you close a cassette after changing it it does
rewrite the file
"""
fname = str(tmpdir.join("synopsis.yaml"))
'''
fname = str(tmpdir.join('synopsis.yaml'))
with vcr.use_cassette(fname) as cass:
urlopen(httpbin.url).read()
assert cass.play_count == 0
@@ -48,9 +45,9 @@ def test_disk_saver_write(tmpdir, httpbin):
# the mtime doesn't change
time.sleep(1)
with vcr.use_cassette(fname, record_mode=vcr.mode.ANY) as cass:
with vcr.use_cassette(fname, record_mode='any') as cass:
urlopen(httpbin.url).read()
urlopen(httpbin.url + "/get").read()
urlopen(httpbin.url + '/get').read()
assert cass.play_count == 1
assert cass.dirty
last_mod2 = os.path.getmtime(fname)

View File

@@ -1,19 +1,18 @@
import base64
import json
from urllib.error import HTTPError
from urllib.parse import urlencode
from urllib.request import Request, urlopen
import pytest
from six.moves.urllib.request import urlopen, Request
from six.moves.urllib.parse import urlencode
from six.moves.urllib.error import HTTPError
import vcr
from ..assertions import assert_cassette_has_one_response, assert_is_json_bytes
import json
from assertions import assert_cassette_has_one_response, assert_is_json
def _request_with_auth(url, username, password):
request = Request(url)
base64string = base64.b64encode(username.encode("ascii") + b":" + password.encode("ascii"))
base64string = base64.b64encode(
username.encode('ascii') + b':' + password.encode('ascii')
)
request.add_header(b"Authorization", b"Basic " + base64string)
return urlopen(request)
@@ -23,149 +22,111 @@ def _find_header(cassette, header):
def test_filter_basic_auth(tmpdir, httpbin):
url = httpbin.url + "/basic-auth/user/passwd"
cass_file = str(tmpdir.join("basic_auth_filter.yaml"))
my_vcr = vcr.VCR(match_on=["uri", "method", "headers"])
url = httpbin.url + '/basic-auth/user/passwd'
cass_file = str(tmpdir.join('basic_auth_filter.yaml'))
my_vcr = vcr.VCR(match_on=['uri', 'method', 'headers'])
# 2 requests, one with auth failure and one with auth success
with my_vcr.use_cassette(cass_file, filter_headers=["authorization"]):
with my_vcr.use_cassette(cass_file, filter_headers=['authorization']):
with pytest.raises(HTTPError):
resp = _request_with_auth(url, "user", "wrongpasswd")
resp = _request_with_auth(url, 'user', 'wrongpasswd')
assert resp.getcode() == 401
resp = _request_with_auth(url, "user", "passwd")
resp = _request_with_auth(url, 'user', 'passwd')
assert resp.getcode() == 200
# make same 2 requests, this time both served from cassette.
with my_vcr.use_cassette(cass_file, filter_headers=["authorization"]) as cass:
with my_vcr.use_cassette(cass_file, filter_headers=['authorization']) as cass:
with pytest.raises(HTTPError):
resp = _request_with_auth(url, "user", "wrongpasswd")
resp = _request_with_auth(url, 'user', 'wrongpasswd')
assert resp.getcode() == 401
resp = _request_with_auth(url, "user", "passwd")
resp = _request_with_auth(url, 'user', 'passwd')
assert resp.getcode() == 200
# authorization header should not have been recorded
assert not _find_header(cass, "authorization")
assert not _find_header(cass, 'authorization')
assert len(cass) == 2
def test_filter_querystring(tmpdir, httpbin):
url = httpbin.url + "/?password=secret"
cass_file = str(tmpdir.join("filter_qs.yaml"))
with vcr.use_cassette(cass_file, filter_query_parameters=["password"]):
url = httpbin.url + '/?foo=bar'
cass_file = str(tmpdir.join('filter_qs.yaml'))
with vcr.use_cassette(cass_file, filter_query_parameters=['foo']):
urlopen(url)
with vcr.use_cassette(cass_file, filter_query_parameters=["password"]) as cass:
with vcr.use_cassette(cass_file, filter_query_parameters=['foo']) as cass:
urlopen(url)
assert "password" not in cass.requests[0].url
assert "secret" not in cass.requests[0].url
with open(cass_file) as f:
cassette_content = f.read()
assert "password" not in cassette_content
assert "secret" not in cassette_content
assert 'foo' not in cass.requests[0].url
def test_filter_post_data(tmpdir, httpbin):
url = httpbin.url + "/post"
data = urlencode({"id": "secret", "foo": "bar"}).encode("utf-8")
cass_file = str(tmpdir.join("filter_pd.yaml"))
with vcr.use_cassette(cass_file, filter_post_data_parameters=["id"]):
url = httpbin.url + '/post'
data = urlencode({'id': 'secret', 'foo': 'bar'}).encode('utf-8')
cass_file = str(tmpdir.join('filter_pd.yaml'))
with vcr.use_cassette(cass_file, filter_post_data_parameters=['id']):
urlopen(url, data)
with vcr.use_cassette(cass_file, filter_post_data_parameters=["id"]) as cass:
assert b"id=secret" not in cass.requests[0].body
with vcr.use_cassette(cass_file, filter_post_data_parameters=['id']) as cass:
assert b'id=secret' not in cass.requests[0].body
def test_filter_json_post_data(tmpdir, httpbin):
data = json.dumps({"id": "secret", "foo": "bar"}).encode("utf-8")
request = Request(httpbin.url + "/post", data=data)
request.add_header("Content-Type", "application/json")
data = json.dumps({'id': 'secret', 'foo': 'bar'}).encode('utf-8')
request = Request(httpbin.url + '/post', data=data)
request.add_header('Content-Type', 'application/json')
cass_file = str(tmpdir.join("filter_jpd.yaml"))
with vcr.use_cassette(cass_file, filter_post_data_parameters=["id"]):
cass_file = str(tmpdir.join('filter_jpd.yaml'))
with vcr.use_cassette(cass_file, filter_post_data_parameters=['id']):
urlopen(request)
with vcr.use_cassette(cass_file, filter_post_data_parameters=["id"]) as cass:
with vcr.use_cassette(cass_file, filter_post_data_parameters=['id']) as cass:
assert b'"id": "secret"' not in cass.requests[0].body
def test_filter_callback(tmpdir, httpbin):
url = httpbin.url + "/get"
cass_file = str(tmpdir.join("basic_auth_filter.yaml"))
url = httpbin.url + '/get'
cass_file = str(tmpdir.join('basic_auth_filter.yaml'))
def before_record_cb(request):
if request.path != "/get":
if request.path != '/get':
return request
# Test the legacy keyword.
my_vcr = vcr.VCR(before_record=before_record_cb)
with my_vcr.use_cassette(cass_file, filter_headers=["authorization"]) as cass:
with my_vcr.use_cassette(cass_file, filter_headers=['authorization']) as cass:
urlopen(url)
assert len(cass) == 0
my_vcr = vcr.VCR(before_record_request=before_record_cb)
with my_vcr.use_cassette(cass_file, filter_headers=["authorization"]) as cass:
with my_vcr.use_cassette(cass_file, filter_headers=['authorization']) as cass:
urlopen(url)
assert len(cass) == 0
def test_decompress_gzip(tmpdir, httpbin):
url = httpbin.url + "/gzip"
request = Request(url, headers={"Accept-Encoding": ["gzip, deflate"]})
cass_file = str(tmpdir.join("gzip_response.yaml"))
url = httpbin.url + '/gzip'
request = Request(url, headers={'Accept-Encoding': ['gzip, deflate']})
cass_file = str(tmpdir.join('gzip_response.yaml'))
with vcr.use_cassette(cass_file, decode_compressed_response=True):
urlopen(request)
with vcr.use_cassette(cass_file) as cass:
decoded_response = urlopen(url).read()
assert_cassette_has_one_response(cass)
assert_is_json_bytes(decoded_response)
def test_decomptess_empty_body(tmpdir, httpbin):
url = httpbin.url + "/gzip"
request = Request(url, headers={"Accept-Encoding": ["gzip, deflate"]}, method="HEAD")
cass_file = str(tmpdir.join("gzip_empty_response.yaml"))
with vcr.use_cassette(cass_file, decode_compressed_response=True):
response = urlopen(request).read()
with vcr.use_cassette(cass_file) as cass:
decoded_response = urlopen(request).read()
assert_cassette_has_one_response(cass)
assert decoded_response == response
assert_is_json(decoded_response)
def test_decompress_deflate(tmpdir, httpbin):
url = httpbin.url + "/deflate"
request = Request(url, headers={"Accept-Encoding": ["gzip, deflate"]})
cass_file = str(tmpdir.join("deflate_response.yaml"))
url = httpbin.url + '/deflate'
request = Request(url, headers={'Accept-Encoding': ['gzip, deflate']})
cass_file = str(tmpdir.join('deflate_response.yaml'))
with vcr.use_cassette(cass_file, decode_compressed_response=True):
urlopen(request)
with vcr.use_cassette(cass_file) as cass:
decoded_response = urlopen(url).read()
assert_cassette_has_one_response(cass)
assert_is_json_bytes(decoded_response)
assert_is_json(decoded_response)
def test_decompress_regular(tmpdir, httpbin):
"""Test that it doesn't try to decompress content that isn't compressed"""
url = httpbin.url + "/get"
cass_file = str(tmpdir.join("noncompressed_response.yaml"))
url = httpbin.url + '/get'
cass_file = str(tmpdir.join('noncompressed_response.yaml'))
with vcr.use_cassette(cass_file, decode_compressed_response=True):
urlopen(url)
with vcr.use_cassette(cass_file) as cass:
resp = urlopen(url).read()
assert_cassette_has_one_response(cass)
assert_is_json_bytes(resp)
def test_before_record_request_corruption(tmpdir, httpbin):
"""Modifying request in before_record_request should not affect outgoing request"""
def before_record(request):
request.headers.clear()
request.body = b""
return request
req = Request(
httpbin.url + "/post",
data=urlencode({"test": "exists"}).encode(),
headers={"X-Test": "exists"},
)
cass_file = str(tmpdir.join("modified_response.yaml"))
with vcr.use_cassette(cass_file, before_record_request=before_record):
resp = json.loads(urlopen(req).read())
assert resp["headers"]["X-Test"] == "exists"
assert resp["form"]["test"] == "exists"
assert_is_json(resp)

View File

@@ -1,13 +1,15 @@
"""Integration tests with httplib2"""
from urllib.parse import urlencode
# -*- coding: utf-8 -*-
'''Integration tests with httplib2'''
# External imports
from six.moves.urllib_parse import urlencode
import pytest
import pytest_httpbin.certs
# Internal imports
import vcr
from ..assertions import assert_cassette_has_one_response
from assertions import assert_cassette_has_one_response
httplib2 = pytest.importorskip("httplib2")
@@ -17,90 +19,92 @@ def http():
Returns an httplib2 HTTP instance
with the certificate replaced by the httpbin one.
"""
kwargs = {"ca_certs": pytest_httpbin.certs.where()}
return httplib2.Http(**kwargs)
return httplib2.Http(ca_certs=pytest_httpbin.certs.where())
def test_response_code(tmpdir, httpbin_both):
"""Ensure we can read a response code from a fetch"""
'''Ensure we can read a response code from a fetch'''
url = httpbin_both.url
with vcr.use_cassette(str(tmpdir.join("atts.yaml"))):
with vcr.use_cassette(str(tmpdir.join('atts.yaml'))):
resp, _ = http().request(url)
code = resp.status
with vcr.use_cassette(str(tmpdir.join("atts.yaml"))):
with vcr.use_cassette(str(tmpdir.join('atts.yaml'))):
resp, _ = http().request(url)
assert code == resp.status
def test_random_body(httpbin_both, tmpdir):
"""Ensure we can read the content, and that it's served from cache"""
url = httpbin_both.url + "/bytes/1024"
with vcr.use_cassette(str(tmpdir.join("body.yaml"))):
'''Ensure we can read the content, and that it's served from cache'''
url = httpbin_both.url + '/bytes/1024'
with vcr.use_cassette(str(tmpdir.join('body.yaml'))):
_, content = http().request(url)
body = content
with vcr.use_cassette(str(tmpdir.join("body.yaml"))):
with vcr.use_cassette(str(tmpdir.join('body.yaml'))):
_, content = http().request(url)
assert body == content
def test_response_headers(tmpdir, httpbin_both):
"""Ensure we can get information from the response"""
'''Ensure we can get information from the response'''
url = httpbin_both.url
with vcr.use_cassette(str(tmpdir.join("headers.yaml"))):
with vcr.use_cassette(str(tmpdir.join('headers.yaml'))):
resp, _ = http().request(url)
headers = resp.items()
with vcr.use_cassette(str(tmpdir.join("headers.yaml"))):
with vcr.use_cassette(str(tmpdir.join('headers.yaml'))):
resp, _ = http().request(url)
assert set(headers) == set(resp.items())
@pytest.mark.online
def test_effective_url(tmpdir, httpbin):
"""Ensure that the effective_url is captured"""
url = httpbin.url + "/redirect-to?url=.%2F&status_code=301"
with vcr.use_cassette(str(tmpdir.join("headers.yaml"))):
def test_effective_url(tmpdir, httpbin_both):
'''Ensure that the effective_url is captured'''
url = httpbin_both.url + '/redirect-to?url=/html'
with vcr.use_cassette(str(tmpdir.join('headers.yaml'))):
resp, _ = http().request(url)
effective_url = resp["content-location"]
assert effective_url == httpbin.url + "/"
effective_url = resp['content-location']
assert effective_url == httpbin_both + '/html'
with vcr.use_cassette(str(tmpdir.join("headers.yaml"))):
with vcr.use_cassette(str(tmpdir.join('headers.yaml'))):
resp, _ = http().request(url)
assert effective_url == resp["content-location"]
assert effective_url == resp['content-location']
def test_multiple_requests(tmpdir, httpbin_both):
"""Ensure that we can cache multiple requests"""
urls = [httpbin_both.url, httpbin_both.url, httpbin_both.url + "/get", httpbin_both.url + "/bytes/1024"]
with vcr.use_cassette(str(tmpdir.join("multiple.yaml"))) as cass:
'''Ensure that we can cache multiple requests'''
urls = [
httpbin_both.url,
httpbin_both.url,
httpbin_both.url + '/get',
httpbin_both.url + '/bytes/1024',
]
with vcr.use_cassette(str(tmpdir.join('multiple.yaml'))) as cass:
[http().request(url) for url in urls]
assert len(cass) == len(urls)
def test_get_data(tmpdir, httpbin_both):
"""Ensure that it works with query data"""
data = urlencode({"some": 1, "data": "here"})
url = httpbin_both.url + "/get?" + data
with vcr.use_cassette(str(tmpdir.join("get_data.yaml"))):
'''Ensure that it works with query data'''
data = urlencode({'some': 1, 'data': 'here'})
url = httpbin_both.url + '/get?' + data
with vcr.use_cassette(str(tmpdir.join('get_data.yaml'))):
_, res1 = http().request(url)
with vcr.use_cassette(str(tmpdir.join("get_data.yaml"))):
with vcr.use_cassette(str(tmpdir.join('get_data.yaml'))):
_, res2 = http().request(url)
assert res1 == res2
def test_post_data(tmpdir, httpbin_both):
"""Ensure that it works when posting data"""
data = urlencode({"some": 1, "data": "here"})
url = httpbin_both.url + "/post"
with vcr.use_cassette(str(tmpdir.join("post_data.yaml"))):
'''Ensure that it works when posting data'''
data = urlencode({'some': 1, 'data': 'here'})
url = httpbin_both.url + '/post'
with vcr.use_cassette(str(tmpdir.join('post_data.yaml'))):
_, res1 = http().request(url, "POST", data)
with vcr.use_cassette(str(tmpdir.join("post_data.yaml"))) as cass:
with vcr.use_cassette(str(tmpdir.join('post_data.yaml'))) as cass:
_, res2 = http().request(url, "POST", data)
assert res1 == res2
@@ -108,13 +112,13 @@ def test_post_data(tmpdir, httpbin_both):
def test_post_unicode_data(tmpdir, httpbin_both):
"""Ensure that it works when posting unicode data"""
data = urlencode({"snowman": "".encode()})
url = httpbin_both.url + "/post"
with vcr.use_cassette(str(tmpdir.join("post_data.yaml"))):
'''Ensure that it works when posting unicode data'''
data = urlencode({'snowman': u''.encode('utf-8')})
url = httpbin_both.url + '/post'
with vcr.use_cassette(str(tmpdir.join('post_data.yaml'))):
_, res1 = http().request(url, "POST", data)
with vcr.use_cassette(str(tmpdir.join("post_data.yaml"))) as cass:
with vcr.use_cassette(str(tmpdir.join('post_data.yaml'))) as cass:
_, res2 = http().request(url, "POST", data)
assert res1 == res2
@@ -122,11 +126,11 @@ def test_post_unicode_data(tmpdir, httpbin_both):
def test_cross_scheme(tmpdir, httpbin, httpbin_secure):
"""Ensure that requests between schemes are treated separately"""
'''Ensure that requests between schemes are treated separately'''
# First fetch a url under https, and then again under https and then
# ensure that we haven't served anything out of cache, and we have two
# requests / response pairs in the cassette
with vcr.use_cassette(str(tmpdir.join("cross_scheme.yaml"))) as cass:
with vcr.use_cassette(str(tmpdir.join('cross_scheme.yaml'))) as cass:
http().request(httpbin_secure.url)
http().request(httpbin.url)
assert len(cass) == 2
@@ -134,17 +138,17 @@ def test_cross_scheme(tmpdir, httpbin, httpbin_secure):
def test_decorator(tmpdir, httpbin_both):
"""Test the decorator version of VCR.py"""
'''Test the decorator version of VCR.py'''
url = httpbin_both.url
@vcr.use_cassette(str(tmpdir.join("atts.yaml")))
@vcr.use_cassette(str(tmpdir.join('atts.yaml')))
def inner1():
resp, _ = http().request(url)
return resp["status"]
return resp['status']
@vcr.use_cassette(str(tmpdir.join("atts.yaml")))
@vcr.use_cassette(str(tmpdir.join('atts.yaml')))
def inner2():
resp, _ = http().request(url)
return resp["status"]
return resp['status']
assert inner1() == inner2()

View File

@@ -1,362 +0,0 @@
import os
import pytest
import vcr
from ..assertions import assert_is_json_bytes
asyncio = pytest.importorskip("asyncio")
httpx = pytest.importorskip("httpx")
@pytest.fixture(params=["https", "http"])
def scheme(request):
"""Fixture that returns both http and https."""
return request.param
class BaseDoRequest:
_client_class = None
def __init__(self, *args, **kwargs):
self._client_args = args
self._client_kwargs = kwargs
self._client_kwargs["follow_redirects"] = self._client_kwargs.get("follow_redirects", True)
def _make_client(self):
return self._client_class(*self._client_args, **self._client_kwargs)
class DoSyncRequest(BaseDoRequest):
_client_class = httpx.Client
def __enter__(self):
self._client = self._make_client()
return self
def __exit__(self, *args):
self._client.close()
del self._client
@property
def client(self):
try:
return self._client
except AttributeError as e:
raise ValueError('To access sync client, use "with do_request() as client"') from e
def __call__(self, *args, **kwargs):
if hasattr(self, "_client"):
return self.client.request(*args, timeout=60, **kwargs)
# Use one-time context and dispose of the client afterwards
with self:
return self.client.request(*args, timeout=60, **kwargs)
def stream(self, *args, **kwargs):
if hasattr(self, "_client"):
with self.client.stream(*args, **kwargs) as response:
return b"".join(response.iter_bytes())
# Use one-time context and dispose of the client afterwards
with self, self.client.stream(*args, **kwargs) as response:
return b"".join(response.iter_bytes())
class DoAsyncRequest(BaseDoRequest):
_client_class = httpx.AsyncClient
def __enter__(self):
# Need to manage both loop and client, because client's implementation
# will fail if the loop is closed before the client's end of life.
self._loop = asyncio.new_event_loop()
asyncio.set_event_loop(self._loop)
self._client = self._make_client()
self._loop.run_until_complete(self._client.__aenter__())
return self
def __exit__(self, *args):
try:
self._loop.run_until_complete(self._client.__aexit__(*args))
finally:
del self._client
self._loop.close()
del self._loop
@property
def client(self):
try:
return self._client
except AttributeError as e:
raise ValueError('To access async client, use "with do_request() as client"') from e
def __call__(self, *args, **kwargs):
if hasattr(self, "_loop"):
return self._loop.run_until_complete(self.client.request(*args, **kwargs))
# Use one-time context and dispose of the loop/client afterwards
with self:
return self._loop.run_until_complete(self.client.request(*args, **kwargs))
async def _get_stream(self, *args, **kwargs):
async with self.client.stream(*args, **kwargs) as response:
content = b""
async for c in response.aiter_bytes():
content += c
return content
def stream(self, *args, **kwargs):
if hasattr(self, "_loop"):
return self._loop.run_until_complete(self._get_stream(*args, **kwargs))
# Use one-time context and dispose of the loop/client afterwards
with self:
return self._loop.run_until_complete(self._get_stream(*args, **kwargs))
def pytest_generate_tests(metafunc):
if "do_request" in metafunc.fixturenames:
metafunc.parametrize("do_request", [DoAsyncRequest, DoSyncRequest])
@pytest.fixture
def yml(tmpdir, request):
return str(tmpdir.join(request.function.__name__ + ".yaml"))
@pytest.mark.online
def test_status(tmpdir, httpbin, do_request):
url = httpbin.url
with vcr.use_cassette(str(tmpdir.join("status.yaml"))):
response = do_request()("GET", url)
with vcr.use_cassette(str(tmpdir.join("status.yaml"))) as cassette:
cassette_response = do_request()("GET", url)
assert cassette_response.status_code == response.status_code
assert cassette.play_count == 1
@pytest.mark.online
def test_case_insensitive_headers(tmpdir, httpbin, do_request):
url = httpbin.url
with vcr.use_cassette(str(tmpdir.join("whatever.yaml"))):
do_request()("GET", url)
with vcr.use_cassette(str(tmpdir.join("whatever.yaml"))) as cassette:
cassette_response = do_request()("GET", url)
assert "Content-Type" in cassette_response.headers
assert "content-type" in cassette_response.headers
assert cassette.play_count == 1
@pytest.mark.online
def test_content(tmpdir, httpbin, do_request):
url = httpbin.url
with vcr.use_cassette(str(tmpdir.join("cointent.yaml"))):
response = do_request()("GET", url)
with vcr.use_cassette(str(tmpdir.join("cointent.yaml"))) as cassette:
cassette_response = do_request()("GET", url)
assert cassette_response.content == response.content
assert cassette.play_count == 1
@pytest.mark.online
def test_json(tmpdir, httpbin, do_request):
url = httpbin.url + "/json"
with vcr.use_cassette(str(tmpdir.join("json.yaml"))):
response = do_request()("GET", url)
with vcr.use_cassette(str(tmpdir.join("json.yaml"))) as cassette:
cassette_response = do_request()("GET", url)
assert cassette_response.json() == response.json()
assert cassette.play_count == 1
@pytest.mark.online
def test_params_same_url_distinct_params(tmpdir, httpbin, do_request):
url = httpbin.url + "/get"
headers = {"Content-Type": "application/json"}
params = {"a": 1, "b": False, "c": "c"}
with vcr.use_cassette(str(tmpdir.join("get.yaml"))) as cassette:
response = do_request()("GET", url, params=params, headers=headers)
with vcr.use_cassette(str(tmpdir.join("get.yaml"))) as cassette:
cassette_response = do_request()("GET", url, params=params, headers=headers)
assert cassette_response.request.url == response.request.url
assert cassette_response.json() == response.json()
assert cassette.play_count == 1
params = {"other": "params"}
with (
vcr.use_cassette(str(tmpdir.join("get.yaml"))) as cassette,
pytest.raises(vcr.errors.CannotOverwriteExistingCassetteException),
):
do_request()("GET", url, params=params, headers=headers)
@pytest.mark.online
def test_redirect(httpbin, yml, do_request):
url = httpbin.url + "/redirect-to"
response = do_request()("GET", url)
with vcr.use_cassette(yml):
response = do_request()("GET", url, params={"url": "./get", "status_code": 302})
with vcr.use_cassette(yml) as cassette:
cassette_response = do_request()("GET", url, params={"url": "./get", "status_code": 302})
assert cassette_response.status_code == response.status_code
assert len(cassette_response.history) == len(response.history)
assert len(cassette) == 2
assert cassette.play_count == 2
# Assert that the real response and the cassette response have a similar
# looking request_info.
assert cassette_response.request.url == response.request.url
assert cassette_response.request.method == response.request.method
assert cassette_response.request.headers.items() == response.request.headers.items()
@pytest.mark.online
@pytest.mark.parametrize("url", ["https://github.com/kevin1024/vcrpy/issues/" + str(i) for i in range(3, 6)])
def test_simple_fetching(do_request, yml, url):
with vcr.use_cassette(yml):
do_request()("GET", url)
with vcr.use_cassette(yml) as cassette:
cassette_response = do_request()("GET", url)
assert str(cassette_response.request.url) == url
assert cassette.play_count == 1
@pytest.mark.online
def test_cookies(tmpdir, httpbin, do_request):
def client_cookies(client):
return list(client.client.cookies)
def response_cookies(response):
return list(response.cookies)
url = httpbin.url + "/cookies/set"
params = {"k1": "v1", "k2": "v2"}
with do_request(params=params, follow_redirects=False) as client:
assert client_cookies(client) == []
testfile = str(tmpdir.join("cookies.yml"))
with vcr.use_cassette(testfile):
r1 = client("GET", url)
assert response_cookies(r1) == ["k1", "k2"]
r2 = client("GET", url)
assert response_cookies(r2) == ["k1", "k2"]
assert client_cookies(client) == ["k1", "k2"]
with do_request(params=params, follow_redirects=False) as new_client:
assert client_cookies(new_client) == []
with vcr.use_cassette(testfile) as cassette:
cassette_response = new_client("GET", url)
assert cassette.play_count == 1
assert response_cookies(cassette_response) == ["k1", "k2"]
assert client_cookies(new_client) == ["k1", "k2"]
@pytest.mark.online
def test_stream(tmpdir, httpbin, do_request):
url = httpbin.url + "/stream-bytes/512"
testfile = str(tmpdir.join("stream.yml"))
with vcr.use_cassette(testfile):
response_content = do_request().stream("GET", url)
assert len(response_content) == 512
with vcr.use_cassette(testfile) as cassette:
cassette_content = do_request().stream("GET", url)
assert cassette_content == response_content
assert len(cassette_content) == 512
assert cassette.play_count == 1
# Regular cassette formats support the status reason,
# but the old HTTPX cassette format does not.
@pytest.mark.parametrize(
"cassette_name,reason",
[
("requests", "great"),
("httpx_old_format", "OK"),
],
)
def test_load_cassette_format(do_request, cassette_name, reason):
mydir = os.path.dirname(os.path.realpath(__file__))
yml = f"{mydir}/cassettes/gzip_{cassette_name}.yaml"
url = "https://httpbin.org/gzip"
with vcr.use_cassette(yml) as cassette:
cassette_response = do_request()("GET", url)
assert str(cassette_response.request.url) == url
assert cassette.play_count == 1
# Should be able to load up the JSON inside,
# regardless whether the content is the gzipped
# in the cassette or not.
json = cassette_response.json()
assert json["method"] == "GET", json
assert cassette_response.status_code == 200
assert cassette_response.reason_phrase == reason
def test_gzip__decode_compressed_response_false(tmpdir, httpbin, do_request):
"""
Ensure that httpx is able to automatically decompress the response body.
"""
for _ in range(2): # one for recording, one for re-playing
with vcr.use_cassette(str(tmpdir.join("gzip.yaml"))) as cassette:
response = do_request()("GET", httpbin + "/gzip")
assert response.headers["content-encoding"] == "gzip" # i.e. not removed
# The content stored in the cassette should be gzipped.
assert cassette.responses[0]["body"]["string"][:2] == b"\x1f\x8b"
assert_is_json_bytes(response.content) # i.e. uncompressed bytes
def test_gzip__decode_compressed_response_true(do_request, tmpdir, httpbin):
url = httpbin + "/gzip"
expected_response = do_request()("GET", url)
expected_content = expected_response.content
assert expected_response.headers["content-encoding"] == "gzip" # self-test
with vcr.use_cassette(
str(tmpdir.join("decode_compressed.yaml")),
decode_compressed_response=True,
) as cassette:
r = do_request()("GET", url)
assert r.headers["content-encoding"] == "gzip" # i.e. not removed
content_length = r.headers["content-length"]
assert r.content == expected_content
# Has the cassette body been decompressed?
cassette_response_body = cassette.responses[0]["body"]["string"]
assert isinstance(cassette_response_body, str)
# Content should be JSON.
assert cassette_response_body[0:1] == "{"
with vcr.use_cassette(str(tmpdir.join("decode_compressed.yaml")), decode_compressed_response=True):
r = httpx.get(url)
assert "content-encoding" not in r.headers # i.e. removed
assert r.content == expected_content
# As the content is uncompressed, it should have a bigger
# length than the compressed version.
assert r.headers["content-length"] > content_length

View File

@@ -1,7 +1,6 @@
from six.moves.urllib.request import urlopen
import socket
from contextlib import contextmanager
from urllib.request import urlopen
import vcr
@@ -16,53 +15,59 @@ def overridden_dns(overrides):
def fake_getaddrinfo(*args, **kwargs):
if args[0] in overrides:
address = overrides[args[0]]
return [(2, 1, 6, "", (address, args[1]))]
return [(2, 1, 6, '', (address, args[1]))]
return real_getaddrinfo(*args, **kwargs)
socket.getaddrinfo = fake_getaddrinfo
yield
socket.getaddrinfo = real_getaddrinfo
def test_ignore_localhost(tmpdir, httpbin):
with overridden_dns({"httpbin.org": "127.0.0.1"}):
cass_file = str(tmpdir.join("filter_qs.yaml"))
with overridden_dns({'httpbin.org': '127.0.0.1'}):
cass_file = str(tmpdir.join('filter_qs.yaml'))
with vcr.use_cassette(cass_file, ignore_localhost=True) as cass:
urlopen(f"http://localhost:{httpbin.port}/")
urlopen('http://localhost:{}/'.format(httpbin.port))
assert len(cass) == 0
urlopen(f"http://httpbin.org:{httpbin.port}/")
urlopen('http://httpbin.org:{}/'.format(httpbin.port))
assert len(cass) == 1
def test_ignore_httpbin(tmpdir, httpbin):
with overridden_dns({"httpbin.org": "127.0.0.1"}):
cass_file = str(tmpdir.join("filter_qs.yaml"))
with vcr.use_cassette(cass_file, ignore_hosts=["httpbin.org"]) as cass:
urlopen(f"http://httpbin.org:{httpbin.port}/")
with overridden_dns({'httpbin.org': '127.0.0.1'}):
cass_file = str(tmpdir.join('filter_qs.yaml'))
with vcr.use_cassette(
cass_file,
ignore_hosts=['httpbin.org']
) as cass:
urlopen('http://httpbin.org:{}/'.format(httpbin.port))
assert len(cass) == 0
urlopen(f"http://localhost:{httpbin.port}/")
urlopen('http://localhost:{}/'.format(httpbin.port))
assert len(cass) == 1
def test_ignore_localhost_and_httpbin(tmpdir, httpbin):
with overridden_dns({"httpbin.org": "127.0.0.1"}):
cass_file = str(tmpdir.join("filter_qs.yaml"))
with vcr.use_cassette(cass_file, ignore_hosts=["httpbin.org"], ignore_localhost=True) as cass:
urlopen(f"http://httpbin.org:{httpbin.port}")
urlopen(f"http://localhost:{httpbin.port}")
with overridden_dns({'httpbin.org': '127.0.0.1'}):
cass_file = str(tmpdir.join('filter_qs.yaml'))
with vcr.use_cassette(
cass_file,
ignore_hosts=['httpbin.org'],
ignore_localhost=True
) as cass:
urlopen('http://httpbin.org:{}'.format(httpbin.port))
urlopen('http://localhost:{}'.format(httpbin.port))
assert len(cass) == 0
def test_ignore_localhost_twice(tmpdir, httpbin):
with overridden_dns({"httpbin.org": "127.0.0.1"}):
cass_file = str(tmpdir.join("filter_qs.yaml"))
with overridden_dns({'httpbin.org': '127.0.0.1'}):
cass_file = str(tmpdir.join('filter_qs.yaml'))
with vcr.use_cassette(cass_file, ignore_localhost=True) as cass:
urlopen(f"http://localhost:{httpbin.port}")
urlopen('http://localhost:{}'.format(httpbin.port))
assert len(cass) == 0
urlopen(f"http://httpbin.org:{httpbin.port}")
urlopen('http://httpbin.org:{}'.format(httpbin.port))
assert len(cass) == 1
with vcr.use_cassette(cass_file, ignore_localhost=True) as cass:
assert len(cass) == 1
urlopen(f"http://localhost:{httpbin.port}")
urlopen(f"http://httpbin.org:{httpbin.port}")
urlopen('http://localhost:{}'.format(httpbin.port))
urlopen('http://httpbin.org:{}'.format(httpbin.port))
assert len(cass) == 1

View File

@@ -1,41 +1,48 @@
from urllib.request import urlopen
import pytest
import vcr
import pytest
from six.moves.urllib.request import urlopen
DEFAULT_URI = "http://httpbin.org/get?p1=q1&p2=q2" # base uri for testing
DEFAULT_URI = 'http://httpbin.org/get?p1=q1&p2=q2' # base uri for testing
def _replace_httpbin(uri, httpbin, httpbin_secure):
return uri.replace("http://httpbin.org", httpbin.url).replace("https://httpbin.org", httpbin_secure.url)
return uri.replace('http://httpbin.org', httpbin.url).replace('https://httpbin.org', httpbin_secure.url)
@pytest.fixture
def cassette(tmpdir, httpbin, httpbin_secure):
"""
Helper fixture used to prepare the cassette
Helper fixture used to prepare the cassete
returns path to the recorded cassette
"""
default_uri = _replace_httpbin(DEFAULT_URI, httpbin, httpbin_secure)
cassette_path = str(tmpdir.join("test.yml"))
with vcr.use_cassette(cassette_path, record_mode=vcr.mode.ALL):
cassette_path = str(tmpdir.join('test.yml'))
with vcr.use_cassette(cassette_path, record_mode='all'):
urlopen(default_uri)
return cassette_path
@pytest.mark.parametrize(
"matcher, matching_uri, not_matching_uri",
[
("uri", "http://httpbin.org/get?p1=q1&p2=q2", "http://httpbin.org/get?p2=q2&p1=q1"),
("scheme", "http://google.com/post?a=b", "https://httpbin.org/get?p1=q1&p2=q2"),
("host", "https://httpbin.org/post?a=b", "http://google.com/get?p1=q1&p2=q2"),
("path", "https://google.com/get?a=b", "http://httpbin.org/post?p1=q1&p2=q2"),
("query", "https://google.com/get?p2=q2&p1=q1", "http://httpbin.org/get?p1=q1&a=b"),
],
)
@pytest.mark.parametrize("matcher, matching_uri, not_matching_uri", [
('uri',
'http://httpbin.org/get?p1=q1&p2=q2',
'http://httpbin.org/get?p2=q2&p1=q1'),
('scheme',
'http://google.com/post?a=b',
'https://httpbin.org/get?p1=q1&p2=q2'),
('host',
'https://httpbin.org/post?a=b',
'http://google.com/get?p1=q1&p2=q2'),
('path',
'https://google.com/get?a=b',
'http://httpbin.org/post?p1=q1&p2=q2'),
('query',
'https://google.com/get?p2=q2&p1=q1',
'http://httpbin.org/get?p1=q1&a=b')
])
def test_matchers(httpbin, httpbin_secure, cassette, matcher, matching_uri, not_matching_uri):
matching_uri = _replace_httpbin(matching_uri, httpbin, httpbin_secure)
not_matching_uri = _replace_httpbin(not_matching_uri, httpbin, httpbin_secure)
default_uri = _replace_httpbin(DEFAULT_URI, httpbin, httpbin_secure)
@@ -51,39 +58,33 @@ def test_matchers(httpbin, httpbin_secure, cassette, matcher, matching_uri, not_
assert cass.play_count == 1
# play cassette with not matching on uri, it should fail
with (
pytest.raises(vcr.errors.CannotOverwriteExistingCassetteException),
vcr.use_cassette(cassette, match_on=[matcher]) as cass,
):
urlopen(not_matching_uri)
with pytest.raises(vcr.errors.CannotOverwriteExistingCassetteException):
with vcr.use_cassette(cassette, match_on=[matcher]) as cass:
urlopen(not_matching_uri)
def test_method_matcher(cassette, httpbin, httpbin_secure):
default_uri = _replace_httpbin(DEFAULT_URI, httpbin, httpbin_secure)
# play cassette with matching on method
with vcr.use_cassette(cassette, match_on=["method"]) as cass:
urlopen("https://google.com/get?a=b")
with vcr.use_cassette(cassette, match_on=['method']) as cass:
urlopen('https://google.com/get?a=b')
assert cass.play_count == 1
# should fail if method does not match
with (
pytest.raises(vcr.errors.CannotOverwriteExistingCassetteException),
vcr.use_cassette(cassette, match_on=["method"]) as cass,
):
# is a POST request
urlopen(default_uri, data=b"")
with pytest.raises(vcr.errors.CannotOverwriteExistingCassetteException):
with vcr.use_cassette(cassette, match_on=['method']) as cass:
# is a POST request
urlopen(default_uri, data=b'')
@pytest.mark.parametrize(
"uri",
(
DEFAULT_URI,
"http://httpbin.org/get?p2=q2&p1=q1",
"http://httpbin.org/get?p2=q2&p1=q1",
),
)
@pytest.mark.parametrize("uri", [
DEFAULT_URI,
'http://httpbin.org/get?p2=q2&p1=q1',
'http://httpbin.org/get?p2=q2&p1=q1',
])
def test_default_matcher_matches(cassette, uri, httpbin, httpbin_secure):
uri = _replace_httpbin(uri, httpbin, httpbin_secure)
with vcr.use_cassette(cassette) as cass:
@@ -91,23 +92,22 @@ def test_default_matcher_matches(cassette, uri, httpbin, httpbin_secure):
assert cass.play_count == 1
@pytest.mark.parametrize(
"uri",
[
"https://httpbin.org/get?p1=q1&p2=q2",
"http://google.com/get?p1=q1&p2=q2",
"http://httpbin.org/post?p1=q1&p2=q2",
"http://httpbin.org/get?p1=q1&a=b",
],
)
@pytest.mark.parametrize("uri", [
'https://httpbin.org/get?p1=q1&p2=q2',
'http://google.com/get?p1=q1&p2=q2',
'http://httpbin.org/post?p1=q1&p2=q2',
'http://httpbin.org/get?p1=q1&a=b'
])
def test_default_matcher_does_not_match(cassette, uri, httpbin, httpbin_secure):
uri = _replace_httpbin(uri, httpbin, httpbin_secure)
with pytest.raises(vcr.errors.CannotOverwriteExistingCassetteException), vcr.use_cassette(cassette):
urlopen(uri)
with pytest.raises(vcr.errors.CannotOverwriteExistingCassetteException):
with vcr.use_cassette(cassette):
urlopen(uri)
def test_default_matcher_does_not_match_on_method(cassette, httpbin, httpbin_secure):
default_uri = _replace_httpbin(DEFAULT_URI, httpbin, httpbin_secure)
with pytest.raises(vcr.errors.CannotOverwriteExistingCassetteException), vcr.use_cassette(cassette):
# is a POST request
urlopen(default_uri, data=b"")
with pytest.raises(vcr.errors.CannotOverwriteExistingCassetteException):
with vcr.use_cassette(cassette):
# is a POST request
urlopen(default_uri, data=b'')

View File

@@ -1,23 +1,20 @@
from urllib.request import urlopen
import pytest
import vcr
from vcr.errors import CannotOverwriteExistingCassetteException
from six.moves.urllib.request import urlopen
def test_making_extra_request_raises_exception(tmpdir, httpbin):
# make two requests in the first request that are considered
# identical (since the match is based on method)
with vcr.use_cassette(str(tmpdir.join("test.json")), match_on=["method"]):
urlopen(httpbin.url + "/status/200")
urlopen(httpbin.url + "/status/201")
with vcr.use_cassette(str(tmpdir.join('test.json')), match_on=['method']):
urlopen(httpbin.url + '/status/200')
urlopen(httpbin.url + '/status/201')
# Now, try to make three requests. The first two should return the
# correct status codes in order, and the third should raise an
# exception.
with vcr.use_cassette(str(tmpdir.join("test.json")), match_on=["method"]):
assert urlopen(httpbin.url + "/status/200").getcode() == 200
assert urlopen(httpbin.url + "/status/201").getcode() == 201
with pytest.raises(CannotOverwriteExistingCassetteException):
urlopen(httpbin.url + "/status/200")
with vcr.use_cassette(str(tmpdir.join('test.json')), match_on=['method']):
assert urlopen(httpbin.url + '/status/200').getcode() == 200
assert urlopen(httpbin.url + '/status/201').getcode() == 201
with pytest.raises(Exception):
urlopen(httpbin.url + '/status/200')

View File

@@ -1,107 +0,0 @@
"""Test using a proxy."""
import asyncio
import http.server
import socketserver
import threading
from urllib.request import urlopen
import pytest
import vcr
# Conditional imports
requests = pytest.importorskip("requests")
class Proxy(http.server.SimpleHTTPRequestHandler):
"""
Simple proxy server.
(Inspired by: http://effbot.org/librarybook/simplehttpserver.htm).
"""
def do_GET(self):
upstream_response = urlopen(self.path)
try:
status = upstream_response.status
headers = upstream_response.headers.items()
except AttributeError:
# In Python 2 the response is an addinfourl instance.
status = upstream_response.code
headers = upstream_response.info().items()
self.log_request(status)
self.send_response_only(status, upstream_response.msg)
for header in headers:
self.send_header(*header)
self.end_headers()
self.copyfile(upstream_response, self.wfile)
def do_CONNECT(self):
host, port = self.path.split(":")
asyncio.run(self._tunnel(host, port, self.connection))
async def _tunnel(self, host, port, client_sock):
target_r, target_w = await asyncio.open_connection(host=host, port=port)
self.send_response(http.HTTPStatus.OK)
self.end_headers()
source_r, source_w = await asyncio.open_connection(sock=client_sock)
async def channel(reader, writer):
while True:
data = await reader.read(1024)
if not data:
break
writer.write(data)
await writer.drain()
writer.close()
await writer.wait_closed()
await asyncio.gather(
channel(target_r, source_w),
channel(source_r, target_w),
)
@pytest.fixture(scope="session")
def proxy_server():
with socketserver.ThreadingTCPServer(("", 0), Proxy) as httpd:
proxy_process = threading.Thread(target=httpd.serve_forever)
proxy_process.start()
yield "http://{}:{}".format(*httpd.server_address)
httpd.shutdown()
proxy_process.join()
def test_use_proxy(tmpdir, httpbin, proxy_server):
"""Ensure that it works with a proxy."""
with vcr.use_cassette(str(tmpdir.join("proxy.yaml"))):
response = requests.get(httpbin.url, proxies={"http": proxy_server})
with vcr.use_cassette(str(tmpdir.join("proxy.yaml")), mode="none") as cassette:
cassette_response = requests.get(httpbin.url, proxies={"http": proxy_server})
assert cassette_response.headers == response.headers
assert cassette.play_count == 1
def test_use_https_proxy(tmpdir, httpbin_secure, proxy_server):
"""Ensure that it works with an HTTPS proxy."""
with vcr.use_cassette(str(tmpdir.join("proxy.yaml"))):
response = requests.get(httpbin_secure.url, proxies={"https": proxy_server})
with vcr.use_cassette(str(tmpdir.join("proxy.yaml")), mode="none") as cassette:
cassette_response = requests.get(
httpbin_secure.url,
proxies={"https": proxy_server},
)
assert cassette_response.headers == response.headers
assert cassette.play_count == 1
# The cassette URL points to httpbin, not the proxy
assert cassette.requests[0].url == httpbin_secure.url + "/"

View File

@@ -1,44 +1,41 @@
from urllib.request import urlopen
import pytest
import vcr
from vcr.errors import CannotOverwriteExistingCassetteException
from six.moves.urllib.request import urlopen
def test_once_record_mode(tmpdir, httpbin):
testfile = str(tmpdir.join("recordmode.yml"))
with vcr.use_cassette(testfile, record_mode=vcr.mode.ONCE):
testfile = str(tmpdir.join('recordmode.yml'))
with vcr.use_cassette(testfile, record_mode="once"):
# cassette file doesn't exist, so create.
urlopen(httpbin.url).read()
with vcr.use_cassette(testfile, record_mode=vcr.mode.ONCE):
with vcr.use_cassette(testfile, record_mode="once"):
# make the same request again
urlopen(httpbin.url).read()
# the first time, it's played from the cassette.
# but, try to access something else from the same cassette, and an
# exception is raised.
with pytest.raises(CannotOverwriteExistingCassetteException):
urlopen(httpbin.url + "/get").read()
with pytest.raises(Exception):
urlopen(httpbin.url + '/get').read()
def test_once_record_mode_two_times(tmpdir, httpbin):
testfile = str(tmpdir.join("recordmode.yml"))
with vcr.use_cassette(testfile, record_mode=vcr.mode.ONCE):
testfile = str(tmpdir.join('recordmode.yml'))
with vcr.use_cassette(testfile, record_mode="once"):
# get two of the same file
urlopen(httpbin.url).read()
urlopen(httpbin.url).read()
with vcr.use_cassette(testfile, record_mode=vcr.mode.ONCE):
with vcr.use_cassette(testfile, record_mode="once"):
# do it again
urlopen(httpbin.url).read()
urlopen(httpbin.url).read()
def test_once_mode_three_times(tmpdir, httpbin):
testfile = str(tmpdir.join("recordmode.yml"))
with vcr.use_cassette(testfile, record_mode=vcr.mode.ONCE):
testfile = str(tmpdir.join('recordmode.yml'))
with vcr.use_cassette(testfile, record_mode="once"):
# get three of the same file
urlopen(httpbin.url).read()
urlopen(httpbin.url).read()
@@ -46,13 +43,13 @@ def test_once_mode_three_times(tmpdir, httpbin):
def test_new_episodes_record_mode(tmpdir, httpbin):
testfile = str(tmpdir.join("recordmode.yml"))
testfile = str(tmpdir.join('recordmode.yml'))
with vcr.use_cassette(testfile, record_mode=vcr.mode.NEW_EPISODES):
with vcr.use_cassette(testfile, record_mode="new_episodes"):
# cassette file doesn't exist, so create.
urlopen(httpbin.url).read()
with vcr.use_cassette(testfile, record_mode=vcr.mode.NEW_EPISODES) as cass:
with vcr.use_cassette(testfile, record_mode="new_episodes") as cass:
# make the same request again
urlopen(httpbin.url).read()
@@ -60,8 +57,8 @@ def test_new_episodes_record_mode(tmpdir, httpbin):
assert cass.all_played
# in the "new_episodes" record mode, we can add more requests to
# a cassette without repercussions.
urlopen(httpbin.url + "/get").read()
# a cassette without repurcussions.
urlopen(httpbin.url + '/get').read()
# one of the responses has been played
assert cass.play_count == 1
@@ -69,19 +66,19 @@ def test_new_episodes_record_mode(tmpdir, httpbin):
# not all responses have been played
assert not cass.all_played
with vcr.use_cassette(testfile, record_mode=vcr.mode.NEW_EPISODES) as cass:
with vcr.use_cassette(testfile, record_mode="new_episodes") as cass:
# the cassette should now have 2 responses
assert len(cass.responses) == 2
def test_new_episodes_record_mode_two_times(tmpdir, httpbin):
testfile = str(tmpdir.join("recordmode.yml"))
url = httpbin.url + "/bytes/1024"
with vcr.use_cassette(testfile, record_mode=vcr.mode.NEW_EPISODES):
testfile = str(tmpdir.join('recordmode.yml'))
url = httpbin.url + '/bytes/1024'
with vcr.use_cassette(testfile, record_mode="new_episodes"):
# cassette file doesn't exist, so create.
original_first_response = urlopen(url).read()
with vcr.use_cassette(testfile, record_mode=vcr.mode.NEW_EPISODES):
with vcr.use_cassette(testfile, record_mode="new_episodes"):
# make the same request again
assert urlopen(url).read() == original_first_response
@@ -89,30 +86,30 @@ def test_new_episodes_record_mode_two_times(tmpdir, httpbin):
# to the cassette without repercussions
original_second_response = urlopen(url).read()
with vcr.use_cassette(testfile, record_mode=vcr.mode.ONCE):
with vcr.use_cassette(testfile, record_mode="once"):
# make the same request again
assert urlopen(url).read() == original_first_response
assert urlopen(url).read() == original_second_response
# now that we are back in once mode, this should raise
# an error.
with pytest.raises(CannotOverwriteExistingCassetteException):
with pytest.raises(Exception):
urlopen(url).read()
def test_all_record_mode(tmpdir, httpbin):
testfile = str(tmpdir.join("recordmode.yml"))
testfile = str(tmpdir.join('recordmode.yml'))
with vcr.use_cassette(testfile, record_mode=vcr.mode.ALL):
with vcr.use_cassette(testfile, record_mode="all"):
# cassette file doesn't exist, so create.
urlopen(httpbin.url).read()
with vcr.use_cassette(testfile, record_mode=vcr.mode.ALL) as cass:
with vcr.use_cassette(testfile, record_mode="all") as cass:
# make the same request again
urlopen(httpbin.url).read()
# in the "all" record mode, we can add more requests to
# a cassette without repercussions.
urlopen(httpbin.url + "/get").read()
# a cassette without repurcussions.
urlopen(httpbin.url + '/get').read()
# The cassette was never actually played, even though it existed.
# that's because, in "all" mode, the requests all go directly to
@@ -123,25 +120,23 @@ def test_all_record_mode(tmpdir, httpbin):
def test_none_record_mode(tmpdir, httpbin):
# Cassette file doesn't exist, yet we are trying to make a request.
# raise hell.
testfile = str(tmpdir.join("recordmode.yml"))
with (
vcr.use_cassette(testfile, record_mode=vcr.mode.NONE),
pytest.raises(CannotOverwriteExistingCassetteException),
):
urlopen(httpbin.url).read()
testfile = str(tmpdir.join('recordmode.yml'))
with vcr.use_cassette(testfile, record_mode="none"):
with pytest.raises(Exception):
urlopen(httpbin.url).read()
def test_none_record_mode_with_existing_cassette(tmpdir, httpbin):
# create a cassette file
testfile = str(tmpdir.join("recordmode.yml"))
testfile = str(tmpdir.join('recordmode.yml'))
with vcr.use_cassette(testfile, record_mode=vcr.mode.ALL):
with vcr.use_cassette(testfile, record_mode="all"):
urlopen(httpbin.url).read()
# play from cassette file
with vcr.use_cassette(testfile, record_mode=vcr.mode.NONE) as cass:
with vcr.use_cassette(testfile, record_mode="none") as cass:
urlopen(httpbin.url).read()
assert cass.play_count == 1
# but if I try to hit the net, raise an exception.
with pytest.raises(CannotOverwriteExistingCassetteException):
urlopen(httpbin.url + "/get").read()
with pytest.raises(Exception):
urlopen(httpbin.url + '/get').read()

View File

@@ -1,8 +1,5 @@
from urllib.request import urlopen
import pytest
import vcr
from six.moves.urllib.request import urlopen
def true_matcher(r1, r2):
@@ -13,29 +10,27 @@ def false_matcher(r1, r2):
return False
@pytest.mark.online
def test_registered_true_matcher(tmpdir, httpbin):
my_vcr = vcr.VCR()
my_vcr.register_matcher("true", true_matcher)
testfile = str(tmpdir.join("test.yml"))
with my_vcr.use_cassette(testfile, match_on=["true"]):
my_vcr.register_matcher('true', true_matcher)
testfile = str(tmpdir.join('test.yml'))
with my_vcr.use_cassette(testfile, match_on=['true']):
# These 2 different urls are stored as the same request
urlopen(httpbin.url)
urlopen(httpbin.url + "/get")
urlopen(httpbin.url + '/get')
with my_vcr.use_cassette(testfile, match_on=["true"]):
with my_vcr.use_cassette(testfile, match_on=['true']):
# I can get the response twice even though I only asked for it once
urlopen(httpbin.url)
urlopen(httpbin.url)
urlopen(httpbin.url + '/get')
urlopen(httpbin.url + '/get')
@pytest.mark.online
def test_registered_false_matcher(tmpdir, httpbin):
my_vcr = vcr.VCR()
my_vcr.register_matcher("false", false_matcher)
testfile = str(tmpdir.join("test.yml"))
with my_vcr.use_cassette(testfile, match_on=["false"]) as cass:
my_vcr.register_matcher('false', false_matcher)
testfile = str(tmpdir.join('test.yml'))
with my_vcr.use_cassette(testfile, match_on=['false']) as cass:
# These 2 different urls are stored as different requests
urlopen(httpbin.url)
urlopen(httpbin.url + "/get")
urlopen(httpbin.url + '/get')
assert len(cass) == 2

View File

@@ -1,87 +1,55 @@
"""Tests for cassettes with custom persistence"""
# -*- coding: utf-8 -*-
'''Tests for cassettes with custom persistence'''
# External imports
import os
from urllib.request import urlopen
import pytest
from six.moves.urllib.request import urlopen
# Internal imports
import vcr
from vcr.persisters.filesystem import CassetteDecodeError, CassetteNotFoundError, FilesystemPersister
from vcr.persisters.filesystem import FilesystemPersister
class CustomFilesystemPersister:
"""Behaves just like default FilesystemPersister but adds .test extension
to the cassette file"""
class CustomFilesystemPersister(object):
'''Behaves just like default FilesystemPersister but adds .test extension
to the cassette file'''
@staticmethod
def load_cassette(cassette_path, serializer):
cassette_path += ".test"
cassette_path += '.test'
return FilesystemPersister.load_cassette(cassette_path, serializer)
@staticmethod
def save_cassette(cassette_path, cassette_dict, serializer):
cassette_path += ".test"
FilesystemPersister.save_cassette(cassette_path, cassette_dict, serializer)
class BadPersister(FilesystemPersister):
"""A bad persister that raises different errors."""
@staticmethod
def load_cassette(cassette_path, serializer):
if "nonexistent" in cassette_path:
raise CassetteNotFoundError()
elif "encoding" in cassette_path:
raise CassetteDecodeError()
else:
raise ValueError("buggy persister")
cassette_path += '.test'
FilesystemPersister.save_cassette(cassette_path, cassette_dict,
serializer)
def test_save_cassette_with_custom_persister(tmpdir, httpbin):
"""Ensure you can save a cassette using custom persister"""
'''Ensure you can save a cassette using custom persister'''
my_vcr = vcr.VCR()
my_vcr.register_persister(CustomFilesystemPersister)
# Check to make sure directory doesn't exist
assert not os.path.exists(str(tmpdir.join("nonexistent")))
# Check to make sure directory doesnt exist
assert not os.path.exists(str(tmpdir.join('nonexistent')))
# Run VCR to create dir and cassette file using new save_cassette callback
with my_vcr.use_cassette(str(tmpdir.join("nonexistent", "cassette.yml"))):
with my_vcr.use_cassette(str(tmpdir.join('nonexistent', 'cassette.yml'))):
urlopen(httpbin.url).read()
# Callback should have made the file and the directory
assert os.path.exists(str(tmpdir.join("nonexistent", "cassette.yml.test")))
assert os.path.exists(str(tmpdir.join('nonexistent', 'cassette.yml.test')))
def test_load_cassette_with_custom_persister(tmpdir, httpbin):
"""
'''
Ensure you can load a cassette using custom persister
"""
'''
my_vcr = vcr.VCR()
my_vcr.register_persister(CustomFilesystemPersister)
test_fixture = str(tmpdir.join("synopsis.json.test"))
test_fixture = str(tmpdir.join('synopsis.json.test'))
with my_vcr.use_cassette(test_fixture, serializer="json"):
with my_vcr.use_cassette(test_fixture, serializer='json'):
response = urlopen(httpbin.url).read()
assert b"HTTP Request &amp; Response Service" in response
def test_load_cassette_persister_exception_handling(tmpdir, httpbin):
"""
Ensure expected errors from persister are swallowed while unexpected ones
are passed up the call stack.
"""
my_vcr = vcr.VCR()
my_vcr.register_persister(BadPersister)
with my_vcr.use_cassette("bad/nonexistent") as cass:
assert len(cass) == 0
with my_vcr.use_cassette("bad/encoding") as cass:
assert len(cass) == 0
with pytest.raises(ValueError), my_vcr.use_cassette("bad/buggy") as cass:
pass
assert b'difficult sometimes' in response

View File

@@ -1,7 +1,7 @@
import vcr
class MockSerializer:
class MockSerializer(object):
def __init__(self):
self.serialize_count = 0
self.deserialize_count = 0
@@ -10,7 +10,7 @@ class MockSerializer:
def deserialize(self, cassette_string):
self.serialize_count += 1
self.cassette_string = cassette_string
return {"interactions": []}
return {'interactions': []}
def serialize(self, cassette_dict):
self.deserialize_count += 1
@@ -20,13 +20,13 @@ class MockSerializer:
def test_registered_serializer(tmpdir):
ms = MockSerializer()
my_vcr = vcr.VCR()
my_vcr.register_serializer("mock", ms)
tmpdir.join("test.mock").write("test_data")
with my_vcr.use_cassette(str(tmpdir.join("test.mock")), serializer="mock"):
my_vcr.register_serializer('mock', ms)
tmpdir.join('test.mock').write('test_data')
with my_vcr.use_cassette(str(tmpdir.join('test.mock')), serializer='mock'):
# Serializer deserialized once
assert ms.serialize_count == 1
# and serialized the test data string
assert ms.cassette_string == "test_data"
assert ms.cassette_string == 'test_data'
# and hasn't serialized yet
assert ms.deserialize_count == 0

View File

@@ -1,20 +1,19 @@
from urllib.request import urlopen
import vcr
from six.moves.urllib.request import urlopen
def test_recorded_request_uri_with_redirected_request(tmpdir, httpbin):
with vcr.use_cassette(str(tmpdir.join("test.yml"))) as cass:
with vcr.use_cassette(str(tmpdir.join('test.yml'))) as cass:
assert len(cass) == 0
urlopen(httpbin.url + "/redirect/3")
assert cass.requests[0].uri == httpbin.url + "/redirect/3"
assert cass.requests[3].uri == httpbin.url + "/get"
urlopen(httpbin.url + '/redirect/3')
assert cass.requests[0].uri == httpbin.url + '/redirect/3'
assert cass.requests[3].uri == httpbin.url + '/get'
assert len(cass) == 4
def test_records_multiple_header_values(tmpdir, httpbin):
with vcr.use_cassette(str(tmpdir.join("test.yml"))) as cass:
with vcr.use_cassette(str(tmpdir.join('test.yml'))) as cass:
assert len(cass) == 0
urlopen(httpbin.url + "/response-headers?foo=bar&foo=baz")
urlopen(httpbin.url + '/response-headers?foo=bar&foo=baz')
assert len(cass) == 1
assert cass.responses[0]["headers"]["foo"] == ["bar", "baz"]
assert cass.responses[0]['headers']['foo'] == ['bar', 'baz']

View File

@@ -1,85 +1,86 @@
"""Test requests' interaction with vcr"""
# -*- coding: utf-8 -*-
'''Test requests' interaction with vcr'''
import platform
import pytest
import sys
import vcr
from ..assertions import assert_cassette_empty, assert_is_json_bytes
from assertions import assert_cassette_empty, assert_is_json
requests = pytest.importorskip("requests")
from requests.exceptions import ConnectionError # noqa E402
def test_status_code(httpbin_both, tmpdir):
"""Ensure that we can read the status code"""
url = httpbin_both.url + "/"
with vcr.use_cassette(str(tmpdir.join("atts.yaml"))):
'''Ensure that we can read the status code'''
url = httpbin_both.url + '/'
with vcr.use_cassette(str(tmpdir.join('atts.yaml'))):
status_code = requests.get(url).status_code
with vcr.use_cassette(str(tmpdir.join("atts.yaml"))):
with vcr.use_cassette(str(tmpdir.join('atts.yaml'))):
assert status_code == requests.get(url).status_code
def test_headers(httpbin_both, tmpdir):
"""Ensure that we can read the headers back"""
url = httpbin_both + "/"
with vcr.use_cassette(str(tmpdir.join("headers.yaml"))):
'''Ensure that we can read the headers back'''
url = httpbin_both + '/'
with vcr.use_cassette(str(tmpdir.join('headers.yaml'))):
headers = requests.get(url).headers
with vcr.use_cassette(str(tmpdir.join("headers.yaml"))):
with vcr.use_cassette(str(tmpdir.join('headers.yaml'))):
assert headers == requests.get(url).headers
def test_body(tmpdir, httpbin_both):
"""Ensure the responses are all identical enough"""
url = httpbin_both + "/bytes/1024"
with vcr.use_cassette(str(tmpdir.join("body.yaml"))):
'''Ensure the responses are all identical enough'''
url = httpbin_both + '/bytes/1024'
with vcr.use_cassette(str(tmpdir.join('body.yaml'))):
content = requests.get(url).content
with vcr.use_cassette(str(tmpdir.join("body.yaml"))):
with vcr.use_cassette(str(tmpdir.join('body.yaml'))):
assert content == requests.get(url).content
def test_get_empty_content_type_json(tmpdir, httpbin_both):
"""Ensure GET with application/json content-type and empty request body doesn't crash"""
url = httpbin_both + "/status/200"
headers = {"Content-Type": "application/json"}
'''Ensure GET with application/json content-type and empty request body doesn't crash'''
url = httpbin_both + '/status/200'
headers = {'Content-Type': 'application/json'}
with vcr.use_cassette(str(tmpdir.join("get_empty_json.yaml")), match_on=("body",)):
with vcr.use_cassette(str(tmpdir.join('get_empty_json.yaml')), match_on=('body',)):
status = requests.get(url, headers=headers).status_code
with vcr.use_cassette(str(tmpdir.join("get_empty_json.yaml")), match_on=("body",)):
with vcr.use_cassette(str(tmpdir.join('get_empty_json.yaml')), match_on=('body',)):
assert status == requests.get(url, headers=headers).status_code
def test_effective_url(tmpdir, httpbin_both):
"""Ensure that the effective_url is captured"""
url = httpbin_both.url + "/redirect-to?url=/html"
with vcr.use_cassette(str(tmpdir.join("url.yaml"))):
'''Ensure that the effective_url is captured'''
url = httpbin_both.url + '/redirect-to?url=/html'
with vcr.use_cassette(str(tmpdir.join('url.yaml'))):
effective_url = requests.get(url).url
assert effective_url == httpbin_both.url + "/html"
assert effective_url == httpbin_both.url + '/html'
with vcr.use_cassette(str(tmpdir.join("url.yaml"))):
with vcr.use_cassette(str(tmpdir.join('url.yaml'))):
assert effective_url == requests.get(url).url
def test_auth(tmpdir, httpbin_both):
"""Ensure that we can handle basic auth"""
auth = ("user", "passwd")
url = httpbin_both + "/basic-auth/user/passwd"
with vcr.use_cassette(str(tmpdir.join("auth.yaml"))):
'''Ensure that we can handle basic auth'''
auth = ('user', 'passwd')
url = httpbin_both + '/basic-auth/user/passwd'
with vcr.use_cassette(str(tmpdir.join('auth.yaml'))):
one = requests.get(url, auth=auth)
with vcr.use_cassette(str(tmpdir.join("auth.yaml"))):
with vcr.use_cassette(str(tmpdir.join('auth.yaml'))):
two = requests.get(url, auth=auth)
assert one.content == two.content
assert one.status_code == two.status_code
def test_auth_failed(tmpdir, httpbin_both):
"""Ensure that we can save failed auth statuses"""
auth = ("user", "wrongwrongwrong")
url = httpbin_both + "/basic-auth/user/passwd"
with vcr.use_cassette(str(tmpdir.join("auth-failed.yaml"))) as cass:
'''Ensure that we can save failed auth statuses'''
auth = ('user', 'wrongwrongwrong')
url = httpbin_both + '/basic-auth/user/passwd'
with vcr.use_cassette(str(tmpdir.join('auth-failed.yaml'))) as cass:
# Ensure that this is empty to begin with
assert_cassette_empty(cass)
one = requests.get(url, auth=auth)
@@ -89,39 +90,58 @@ def test_auth_failed(tmpdir, httpbin_both):
def test_post(tmpdir, httpbin_both):
"""Ensure that we can post and cache the results"""
data = {"key1": "value1", "key2": "value2"}
url = httpbin_both + "/post"
with vcr.use_cassette(str(tmpdir.join("requests.yaml"))):
'''Ensure that we can post and cache the results'''
data = {'key1': 'value1', 'key2': 'value2'}
url = httpbin_both + '/post'
with vcr.use_cassette(str(tmpdir.join('requests.yaml'))):
req1 = requests.post(url, data).content
with vcr.use_cassette(str(tmpdir.join("requests.yaml"))):
with vcr.use_cassette(str(tmpdir.join('requests.yaml'))):
req2 = requests.post(url, data).content
assert req1 == req2
def test_post_chunked_binary(tmpdir, httpbin):
"""Ensure that we can send chunked binary without breaking while trying to concatenate bytes with str."""
data1 = iter([b"data", b"to", b"send"])
data2 = iter([b"data", b"to", b"send"])
url = httpbin.url + "/post"
with vcr.use_cassette(str(tmpdir.join("requests.yaml"))):
'''Ensure that we can send chunked binary without breaking while trying to concatenate bytes with str.'''
data1 = iter([b'data', b'to', b'send'])
data2 = iter([b'data', b'to', b'send'])
url = httpbin.url + '/post'
with vcr.use_cassette(str(tmpdir.join('requests.yaml'))):
req1 = requests.post(url, data1).content
with vcr.use_cassette(str(tmpdir.join("requests.yaml"))):
with vcr.use_cassette(str(tmpdir.join('requests.yaml'))):
req2 = requests.post(url, data2).content
assert req1 == req2
@pytest.mark.xfail('sys.version_info >= (3, 6)', strict=True, raises=ConnectionError)
@pytest.mark.xfail((3, 5) < sys.version_info < (3, 6) and
platform.python_implementation() == 'CPython',
reason='Fails on CPython 3.5')
def test_post_chunked_binary_secure(tmpdir, httpbin_secure):
'''Ensure that we can send chunked binary without breaking while trying to concatenate bytes with str.'''
data1 = iter([b'data', b'to', b'send'])
data2 = iter([b'data', b'to', b'send'])
url = httpbin_secure.url + '/post'
with vcr.use_cassette(str(tmpdir.join('requests.yaml'))):
req1 = requests.post(url, data1).content
print(req1)
with vcr.use_cassette(str(tmpdir.join('requests.yaml'))):
req2 = requests.post(url, data2).content
assert req1 == req2
def test_redirects(tmpdir, httpbin_both):
"""Ensure that we can handle redirects"""
url = httpbin_both + "/redirect-to?url=bytes/1024"
with vcr.use_cassette(str(tmpdir.join("requests.yaml"))):
'''Ensure that we can handle redirects'''
url = httpbin_both + '/redirect-to?url=bytes/1024'
with vcr.use_cassette(str(tmpdir.join('requests.yaml'))):
content = requests.get(url).content
with vcr.use_cassette(str(tmpdir.join("requests.yaml"))) as cass:
with vcr.use_cassette(str(tmpdir.join('requests.yaml'))) as cass:
assert content == requests.get(url).content
# Ensure that we've now cached *two* responses. One for the redirect
# and one for the final fetch
@@ -129,184 +149,136 @@ def test_redirects(tmpdir, httpbin_both):
assert cass.play_count == 2
def test_raw_stream(tmpdir, httpbin):
expected_response = requests.get(httpbin.url, stream=True)
expected_content = b"".join(expected_response.raw.stream())
for _ in range(2): # one for recording, one for cassette reply
with vcr.use_cassette(str(tmpdir.join("raw_stream.yaml"))):
actual_response = requests.get(httpbin.url, stream=True)
actual_content = b"".join(actual_response.raw.stream())
assert actual_content == expected_content
def test_cross_scheme(tmpdir, httpbin_secure, httpbin):
"""Ensure that requests between schemes are treated separately"""
'''Ensure that requests between schemes are treated separately'''
# First fetch a url under http, and then again under https and then
# ensure that we haven't served anything out of cache, and we have two
# requests / response pairs in the cassette
with vcr.use_cassette(str(tmpdir.join("cross_scheme.yaml"))) as cass:
requests.get(httpbin_secure + "/")
requests.get(httpbin + "/")
with vcr.use_cassette(str(tmpdir.join('cross_scheme.yaml'))) as cass:
requests.get(httpbin_secure + '/')
requests.get(httpbin + '/')
assert cass.play_count == 0
assert len(cass) == 2
def test_gzip__decode_compressed_response_false(tmpdir, httpbin_both):
"""
def test_gzip(tmpdir, httpbin_both):
'''
Ensure that requests (actually urllib3) is able to automatically decompress
the response body
"""
for _ in range(2): # one for recording, one for re-playing
with vcr.use_cassette(str(tmpdir.join("gzip.yaml"))):
response = requests.get(httpbin_both + "/gzip")
assert response.headers["content-encoding"] == "gzip" # i.e. not removed
assert_is_json_bytes(response.content) # i.e. uncompressed bytes
'''
url = httpbin_both + '/gzip'
response = requests.get(url)
with vcr.use_cassette(str(tmpdir.join('gzip.yaml'))):
response = requests.get(url)
assert_is_json(response.content)
def test_gzip__decode_compressed_response_true(tmpdir, httpbin_both):
url = httpbin_both + "/gzip"
expected_response = requests.get(url)
expected_content = expected_response.content
assert expected_response.headers["content-encoding"] == "gzip" # self-test
with vcr.use_cassette(
str(tmpdir.join("decode_compressed.yaml")),
decode_compressed_response=True,
) as cassette:
r = requests.get(url)
assert r.headers["content-encoding"] == "gzip" # i.e. not removed
assert r.content == expected_content
# Has the cassette body been decompressed?
cassette_response_body = cassette.responses[0]["body"]["string"]
assert isinstance(cassette_response_body, str)
with vcr.use_cassette(str(tmpdir.join("decode_compressed.yaml")), decode_compressed_response=True):
r = requests.get(url)
assert "content-encoding" not in r.headers # i.e. removed
assert r.content == expected_content
with vcr.use_cassette(str(tmpdir.join('gzip.yaml'))):
assert_is_json(response.content)
def test_session_and_connection_close(tmpdir, httpbin):
"""
'''
This tests the issue in https://github.com/kevin1024/vcrpy/issues/48
If you use a requests.session and the connection is closed, then an
exception is raised in the urllib3 module vendored into requests:
`AttributeError: 'NoneType' object has no attribute 'settimeout'`
"""
with vcr.use_cassette(str(tmpdir.join("session_connection_closed.yaml"))):
'''
with vcr.use_cassette(str(tmpdir.join('session_connection_closed.yaml'))):
session = requests.session()
session.get(httpbin + "/get", headers={"Connection": "close"})
session.get(httpbin + "/get", headers={"Connection": "close"})
session.get(httpbin + '/get', headers={'Connection': 'close'})
session.get(httpbin + '/get', headers={'Connection': 'close'})
def test_https_with_cert_validation_disabled(tmpdir, httpbin_secure):
with vcr.use_cassette(str(tmpdir.join("cert_validation_disabled.yaml"))):
with vcr.use_cassette(str(tmpdir.join('cert_validation_disabled.yaml'))):
requests.get(httpbin_secure.url, verify=False)
def test_session_can_make_requests_after_requests_unpatched(tmpdir, httpbin):
with vcr.use_cassette(str(tmpdir.join("test_session_after_unpatched.yaml"))):
with vcr.use_cassette(str(tmpdir.join('test_session_after_unpatched.yaml'))):
session = requests.session()
session.get(httpbin + "/get")
session.get(httpbin + '/get')
with vcr.use_cassette(str(tmpdir.join("test_session_after_unpatched.yaml"))):
with vcr.use_cassette(str(tmpdir.join('test_session_after_unpatched.yaml'))):
session = requests.session()
session.get(httpbin + "/get")
session.get(httpbin + '/get')
session.get(httpbin + "/status/200")
session.get(httpbin + '/status/200')
def test_session_created_before_use_cassette_is_patched(tmpdir, httpbin_both):
url = httpbin_both + "/bytes/1024"
url = httpbin_both + '/bytes/1024'
# Record arbitrary, random data to the cassette
with vcr.use_cassette(str(tmpdir.join("session_created_outside.yaml"))):
with vcr.use_cassette(str(tmpdir.join('session_created_outside.yaml'))):
session = requests.session()
body = session.get(url).content
# Create a session outside of any cassette context manager
session = requests.session()
# Make a request to make sure that a connectionpool is instantiated
session.get(httpbin_both + "/get")
session.get(httpbin_both + '/get')
with vcr.use_cassette(str(tmpdir.join("session_created_outside.yaml"))):
with vcr.use_cassette(str(tmpdir.join('session_created_outside.yaml'))):
# These should only be the same if the patching succeeded.
assert session.get(url).content == body
def test_nested_cassettes_with_session_created_before_nesting(httpbin_both, tmpdir):
"""
'''
This tests ensures that a session that was created while one cassette was
active is patched to the use the responses of a second cassette when it
is enabled.
"""
url = httpbin_both + "/bytes/1024"
with vcr.use_cassette(str(tmpdir.join("first_nested.yaml"))):
'''
url = httpbin_both + '/bytes/1024'
with vcr.use_cassette(str(tmpdir.join('first_nested.yaml'))):
session = requests.session()
first_body = session.get(url).content
with vcr.use_cassette(str(tmpdir.join("second_nested.yaml"))):
with vcr.use_cassette(str(tmpdir.join('second_nested.yaml'))):
second_body = session.get(url).content
third_body = requests.get(url).content
with vcr.use_cassette(str(tmpdir.join("second_nested.yaml"))):
with vcr.use_cassette(str(tmpdir.join('second_nested.yaml'))):
session = requests.session()
assert session.get(url).content == second_body
with vcr.use_cassette(str(tmpdir.join("first_nested.yaml"))):
with vcr.use_cassette(str(tmpdir.join('first_nested.yaml'))):
assert session.get(url).content == first_body
assert session.get(url).content == third_body
# Make sure that the session can now get content normally.
assert "User-agent" in session.get(httpbin_both.url + "/robots.txt").text
assert 'User-agent' in session.get(httpbin_both.url + '/robots.txt').text
def test_post_file(tmpdir, httpbin_both):
"""Ensure that we handle posting a file."""
url = httpbin_both + "/post"
with vcr.use_cassette(str(tmpdir.join("post_file.yaml"))) as cass, open(".editorconfig", "rb") as f:
'''Ensure that we handle posting a file.'''
url = httpbin_both + '/post'
with vcr.use_cassette(str(tmpdir.join('post_file.yaml'))) as cass, open('tox.ini') as f:
original_response = requests.post(url, f).content
# This also tests that we do the right thing with matching the body when they are files.
with vcr.use_cassette(
str(tmpdir.join("post_file.yaml")),
match_on=("method", "scheme", "host", "port", "path", "query", "body"),
) as cass:
with open(".editorconfig", "rb") as f:
editorconfig = f.read()
assert cass.requests[0].body.read() == editorconfig
with open(".editorconfig", "rb") as f:
with vcr.use_cassette(str(tmpdir.join('post_file.yaml')),
match_on=('method', 'scheme', 'host', 'port', 'path', 'query', 'body')) as cass:
with open('tox.ini', 'rb') as f:
tox_content = f.read()
assert cass.requests[0].body.read() == tox_content
with open('tox.ini', 'rb') as f:
new_response = requests.post(url, f).content
assert original_response == new_response
def test_filter_post_params(tmpdir, httpbin_both):
"""
'''
This tests the issue in https://github.com/kevin1024/vcrpy/issues/158
Ensure that a post request made through requests can still be filtered.
with vcr.use_cassette(cass_file, filter_post_data_parameters=['id']) as cass:
assert b'id=secret' not in cass.requests[0].body
"""
url = httpbin_both.url + "/post"
cass_loc = str(tmpdir.join("filter_post_params.yaml"))
with vcr.use_cassette(cass_loc, filter_post_data_parameters=["key"]) as cass:
requests.post(url, data={"key": "value"})
with vcr.use_cassette(cass_loc, filter_post_data_parameters=["key"]) as cass:
assert b"key=value" not in cass.requests[0].body
def test_post_unicode_match_on_body(tmpdir, httpbin_both):
"""Ensure that matching on POST body that contains Unicode characters works."""
data = {"key1": "value1", "●‿●": "٩(●̮̮̃•̃)۶"}
url = httpbin_both + "/post"
with vcr.use_cassette(str(tmpdir.join("requests.yaml")), additional_matchers=("body",)):
req1 = requests.post(url, data).content
with vcr.use_cassette(str(tmpdir.join("requests.yaml")), additional_matchers=("body",)):
req2 = requests.post(url, data).content
assert req1 == req2
'''
url = httpbin_both.url + '/post'
cass_loc = str(tmpdir.join('filter_post_params.yaml'))
with vcr.use_cassette(cass_loc, filter_post_data_parameters=['key']) as cass:
requests.post(url, data={'key': 'value'})
with vcr.use_cassette(cass_loc, filter_post_data_parameters=['key']) as cass:
assert b'key=value' not in cass.requests[0].body

View File

@@ -1,26 +1,24 @@
import http.client as httplib
import json
import zlib
import vcr
import zlib
import six.moves.http_client as httplib
from ..assertions import assert_is_json_bytes
from assertions import assert_is_json
def _headers_are_case_insensitive(host, port):
conn = httplib.HTTPConnection(host, port)
conn.request("GET", "/cookies/set?k1=v1")
conn.request('GET', "/cookies/set?k1=v1")
r1 = conn.getresponse()
cookie_data1 = r1.getheader("set-cookie")
cookie_data1 = r1.getheader('set-cookie')
conn = httplib.HTTPConnection(host, port)
conn.request("GET", "/cookies/set?k1=v1")
conn.request('GET', "/cookies/set?k1=v1")
r2 = conn.getresponse()
cookie_data2 = r2.getheader("Set-Cookie")
cookie_data2 = r2.getheader('Set-Cookie')
return cookie_data1 == cookie_data2
def test_case_insensitivity(tmpdir, httpbin):
testfile = str(tmpdir.join("case_insensitivity.yml"))
testfile = str(tmpdir.join('case_insensitivity.yml'))
# check if headers are case insensitive outside of vcrpy
host, port = httpbin.host, httpbin.port
outside = _headers_are_case_insensitive(host, port)
@@ -36,13 +34,13 @@ def test_case_insensitivity(tmpdir, httpbin):
def _multiple_header_value(httpbin):
conn = httplib.HTTPConnection(httpbin.host, httpbin.port)
conn.request("GET", "/response-headers?foo=bar&foo=baz")
conn.request('GET', "/response-headers?foo=bar&foo=baz")
r = conn.getresponse()
return r.getheader("foo")
return r.getheader('foo')
def test_multiple_headers(tmpdir, httpbin):
testfile = str(tmpdir.join("multiple_headers.yaml"))
testfile = str(tmpdir.join('multiple_headers.yaml'))
outside = _multiple_header_value(httpbin)
with vcr.use_cassette(testfile):
@@ -52,84 +50,36 @@ def test_multiple_headers(tmpdir, httpbin):
def test_original_decoded_response_is_not_modified(tmpdir, httpbin):
testfile = str(tmpdir.join("decoded_response.yml"))
testfile = str(tmpdir.join('decoded_response.yml'))
host, port = httpbin.host, httpbin.port
conn = httplib.HTTPConnection(host, port)
conn.request("GET", "/gzip")
conn.request('GET', '/gzip')
outside = conn.getresponse()
with vcr.use_cassette(testfile, decode_compressed_response=True):
conn = httplib.HTTPConnection(host, port)
conn.request("GET", "/gzip")
conn.request('GET', '/gzip')
inside = conn.getresponse()
# Assert that we do not modify the original response while appending
# to the cassette.
assert inside.headers["content-encoding"] == "gzip"
# to the casssette.
assert 'gzip' == inside.headers['content-encoding']
# They should effectively be the same response.
inside_headers = (h for h in inside.headers.items() if h[0].lower() != "date")
outside_headers = (h for h in outside.getheaders() if h[0].lower() != "date")
inside_headers = (h for h in inside.headers.items() if h[0].lower() != 'date')
outside_headers = (h for h in outside.getheaders() if h[0].lower() != 'date')
assert set(inside_headers) == set(outside_headers)
inside = zlib.decompress(inside.read(), 16 + zlib.MAX_WBITS)
outside = zlib.decompress(outside.read(), 16 + zlib.MAX_WBITS)
inside = zlib.decompress(inside.read(), 16+zlib.MAX_WBITS)
outside = zlib.decompress(outside.read(), 16+zlib.MAX_WBITS)
assert inside == outside
# Even though the above are raw bytes, the JSON data should have been
# decoded and saved to the cassette.
with vcr.use_cassette(testfile):
conn = httplib.HTTPConnection(host, port)
conn.request("GET", "/gzip")
conn.request('GET', '/gzip')
inside = conn.getresponse()
assert "content-encoding" not in inside.headers
assert_is_json_bytes(inside.read())
def _make_before_record_response(fields, replacement="[REDACTED]"):
def before_record_response(response):
string_body = response["body"]["string"].decode("utf8")
body = json.loads(string_body)
for field in fields:
if field in body:
body[field] = replacement
response["body"]["string"] = json.dumps(body).encode()
return response
return before_record_response
def test_original_response_is_not_modified_by_before_filter(tmpdir, httpbin):
testfile = str(tmpdir.join("sensitive_data_scrubbed_response.yml"))
host, port = httpbin.host, httpbin.port
field_to_scrub = "url"
replacement = "[YOU_CANT_HAVE_THE_MANGO]"
conn = httplib.HTTPConnection(host, port)
conn.request("GET", "/get")
outside = conn.getresponse()
callback = _make_before_record_response([field_to_scrub], replacement)
with vcr.use_cassette(testfile, before_record_response=callback):
conn = httplib.HTTPConnection(host, port)
conn.request("GET", "/get")
inside = conn.getresponse()
# The scrubbed field should be the same, because no cassette existed.
# Furthermore, the responses should be identical.
inside_body = json.loads(inside.read())
outside_body = json.loads(outside.read())
assert inside_body[field_to_scrub] != replacement
assert inside_body[field_to_scrub] == outside_body[field_to_scrub]
# Ensure that when a cassette exists, the scrubbed response is returned.
with vcr.use_cassette(testfile, before_record_response=callback):
conn = httplib.HTTPConnection(host, port)
conn.request("GET", "/get")
inside = conn.getresponse()
inside_body = json.loads(inside.read())
assert inside_body[field_to_scrub] == replacement
assert 'content-encoding' not in inside.headers
assert_is_json(inside.read())

View File

@@ -1,272 +1,254 @@
"""Test requests' interaction with vcr"""
# -*- coding: utf-8 -*-
'''Test requests' interaction with vcr'''
import asyncio
import functools
import inspect
import json
import os
import ssl
import pytest
import vcr
from vcr.errors import CannotOverwriteExistingCassetteException
from ..assertions import assert_cassette_empty, assert_is_json_bytes
from assertions import assert_cassette_empty, assert_is_json
tornado = pytest.importorskip("tornado")
gen = pytest.importorskip("tornado.gen")
http = pytest.importorskip("tornado.httpclient")
# whether the current version of Tornado supports the raise_error argument for
# fetch().
supports_raise_error = tornado.version_info >= (4,)
raise_error_for_response_code_only = tornado.version_info >= (6,)
def gen_test(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
async def coro():
return await gen.coroutine(func)(*args, **kwargs)
return asyncio.run(coro())
# Patch the signature so pytest can inject fixtures
# we can't use wrapt.decorator because it returns a generator function
wrapper.__signature__ = inspect.signature(func)
return wrapper
@pytest.fixture(params=["simple", "curl", "default"])
@pytest.fixture(params=['simple', 'curl', 'default'])
def get_client(request):
ca_bundle_path = os.environ.get("REQUESTS_CA_BUNDLE")
ssl_ctx = ssl.create_default_context(ssl.Purpose.SERVER_AUTH)
ssl_ctx.load_verify_locations(cafile=ca_bundle_path)
ssl_ctx.verify_mode = ssl.CERT_REQUIRED
if request.param == "simple":
if request.param == 'simple':
from tornado import simple_httpclient as simple
return lambda: simple.SimpleAsyncHTTPClient(defaults={"ssl_options": ssl_ctx})
if request.param == "curl":
return (lambda: simple.SimpleAsyncHTTPClient())
elif request.param == 'curl':
curl = pytest.importorskip("tornado.curl_httpclient")
return lambda: curl.CurlAsyncHTTPClient(defaults={"ca_certs": ca_bundle_path})
return lambda: http.AsyncHTTPClient(defaults={"ssl_options": ssl_ctx})
return (lambda: curl.CurlAsyncHTTPClient())
else:
return (lambda: http.AsyncHTTPClient())
def get(client, url, **kwargs):
fetch_kwargs = {}
if supports_raise_error:
fetch_kwargs["raise_error"] = kwargs.pop("raise_error", True)
fetch_kwargs['raise_error'] = kwargs.pop('raise_error', True)
return client.fetch(http.HTTPRequest(url, method="GET", **kwargs), **fetch_kwargs)
return client.fetch(
http.HTTPRequest(url, method='GET', **kwargs),
**fetch_kwargs
)
def post(client, url, data=None, **kwargs):
if data:
kwargs["body"] = json.dumps(data)
return client.fetch(http.HTTPRequest(url, method="POST", **kwargs))
kwargs['body'] = json.dumps(data)
return client.fetch(http.HTTPRequest(url, method='POST', **kwargs))
@pytest.mark.online
@gen_test
def test_status_code(get_client, tmpdir, httpbin_both):
"""Ensure that we can read the status code"""
url = httpbin_both.url
with vcr.use_cassette(str(tmpdir.join("atts.yaml"))):
@pytest.fixture(params=["https", "http"])
def scheme(request):
'''Fixture that returns both http and https.'''
return request.param
@pytest.mark.gen_test
def test_status_code(get_client, scheme, tmpdir):
'''Ensure that we can read the status code'''
url = scheme + '://httpbin.org/'
with vcr.use_cassette(str(tmpdir.join('atts.yaml'))):
status_code = (yield get(get_client(), url)).code
with vcr.use_cassette(str(tmpdir.join("atts.yaml"))) as cass:
with vcr.use_cassette(str(tmpdir.join('atts.yaml'))) as cass:
assert status_code == (yield get(get_client(), url)).code
assert cass.play_count == 1
assert 1 == cass.play_count
@pytest.mark.online
@gen_test
def test_headers(get_client, httpbin_both, tmpdir):
"""Ensure that we can read the headers back"""
url = httpbin_both.url
with vcr.use_cassette(str(tmpdir.join("headers.yaml"))):
@pytest.mark.gen_test
def test_headers(get_client, scheme, tmpdir):
'''Ensure that we can read the headers back'''
url = scheme + '://httpbin.org/'
with vcr.use_cassette(str(tmpdir.join('headers.yaml'))):
headers = (yield get(get_client(), url)).headers
with vcr.use_cassette(str(tmpdir.join("headers.yaml"))) as cass:
with vcr.use_cassette(str(tmpdir.join('headers.yaml'))) as cass:
assert headers == (yield get(get_client(), url)).headers
assert cass.play_count == 1
assert 1 == cass.play_count
@pytest.mark.online
@gen_test
def test_body(get_client, tmpdir, httpbin_both):
"""Ensure the responses are all identical enough"""
@pytest.mark.gen_test
def test_body(get_client, tmpdir, scheme):
'''Ensure the responses are all identical enough'''
url = httpbin_both.url + "/bytes/1024"
with vcr.use_cassette(str(tmpdir.join("body.yaml"))):
url = scheme + '://httpbin.org/bytes/1024'
with vcr.use_cassette(str(tmpdir.join('body.yaml'))):
content = (yield get(get_client(), url)).body
with vcr.use_cassette(str(tmpdir.join("body.yaml"))) as cass:
with vcr.use_cassette(str(tmpdir.join('body.yaml'))) as cass:
assert content == (yield get(get_client(), url)).body
assert cass.play_count == 1
assert 1 == cass.play_count
@gen_test
def test_effective_url(get_client, tmpdir, httpbin):
"""Ensure that the effective_url is captured"""
url = httpbin.url + "/redirect/1"
with vcr.use_cassette(str(tmpdir.join("url.yaml"))):
@pytest.mark.gen_test
def test_effective_url(get_client, scheme, tmpdir):
'''Ensure that the effective_url is captured'''
url = scheme + '://httpbin.org/redirect-to?url=/html'
with vcr.use_cassette(str(tmpdir.join('url.yaml'))):
effective_url = (yield get(get_client(), url)).effective_url
assert effective_url == httpbin.url + "/get"
assert effective_url == scheme + '://httpbin.org/html'
with vcr.use_cassette(str(tmpdir.join("url.yaml"))) as cass:
with vcr.use_cassette(str(tmpdir.join('url.yaml'))) as cass:
assert effective_url == (yield get(get_client(), url)).effective_url
assert cass.play_count == 1
assert 1 == cass.play_count
@pytest.mark.online
@gen_test
def test_auth(get_client, tmpdir, httpbin_both):
"""Ensure that we can handle basic auth"""
auth = ("user", "passwd")
url = httpbin_both.url + "/basic-auth/user/passwd"
with vcr.use_cassette(str(tmpdir.join("auth.yaml"))):
one = yield get(get_client(), url, auth_username=auth[0], auth_password=auth[1])
@pytest.mark.gen_test
def test_auth(get_client, tmpdir, scheme):
'''Ensure that we can handle basic auth'''
auth = ('user', 'passwd')
url = scheme + '://httpbin.org/basic-auth/user/passwd'
with vcr.use_cassette(str(tmpdir.join('auth.yaml'))):
one = yield get(
get_client(), url, auth_username=auth[0], auth_password=auth[1]
)
with vcr.use_cassette(str(tmpdir.join("auth.yaml"))) as cass:
two = yield get(get_client(), url, auth_username=auth[0], auth_password=auth[1])
with vcr.use_cassette(str(tmpdir.join('auth.yaml'))) as cass:
two = yield get(
get_client(), url, auth_username=auth[0], auth_password=auth[1]
)
assert one.body == two.body
assert one.code == two.code
assert cass.play_count == 1
assert 1 == cass.play_count
@pytest.mark.online
@gen_test
def test_auth_failed(get_client, tmpdir, httpbin_both):
"""Ensure that we can save failed auth statuses"""
auth = ("user", "wrongwrongwrong")
url = httpbin_both.url + "/basic-auth/user/passwd"
with vcr.use_cassette(str(tmpdir.join("auth-failed.yaml"))) as cass:
@pytest.mark.gen_test
def test_auth_failed(get_client, tmpdir, scheme):
'''Ensure that we can save failed auth statuses'''
auth = ('user', 'wrongwrongwrong')
url = scheme + '://httpbin.org/basic-auth/user/passwd'
with vcr.use_cassette(str(tmpdir.join('auth-failed.yaml'))) as cass:
# Ensure that this is empty to begin with
assert_cassette_empty(cass)
with pytest.raises(http.HTTPError) as exc_info:
yield get(get_client(), url, auth_username=auth[0], auth_password=auth[1])
yield get(
get_client(),
url,
auth_username=auth[0],
auth_password=auth[1],
)
one = exc_info.value.response
assert exc_info.value.code == 401
with vcr.use_cassette(str(tmpdir.join("auth-failed.yaml"))) as cass:
with vcr.use_cassette(str(tmpdir.join('auth-failed.yaml'))) as cass:
with pytest.raises(http.HTTPError) as exc_info:
two = yield get(get_client(), url, auth_username=auth[0], auth_password=auth[1])
two = yield get(
get_client(),
url,
auth_username=auth[0],
auth_password=auth[1],
)
two = exc_info.value.response
assert exc_info.value.code == 401
assert one.body == two.body
assert one.code == two.code == 401
assert cass.play_count == 1
assert 1 == cass.play_count
@pytest.mark.online
@gen_test
def test_post(get_client, tmpdir, httpbin_both):
"""Ensure that we can post and cache the results"""
data = {"key1": "value1", "key2": "value2"}
url = httpbin_both.url + "/post"
with vcr.use_cassette(str(tmpdir.join("requests.yaml"))):
@pytest.mark.gen_test
def test_post(get_client, tmpdir, scheme):
'''Ensure that we can post and cache the results'''
data = {'key1': 'value1', 'key2': 'value2'}
url = scheme + '://httpbin.org/post'
with vcr.use_cassette(str(tmpdir.join('requests.yaml'))):
req1 = (yield post(get_client(), url, data)).body
with vcr.use_cassette(str(tmpdir.join("requests.yaml"))) as cass:
with vcr.use_cassette(str(tmpdir.join('requests.yaml'))) as cass:
req2 = (yield post(get_client(), url, data)).body
assert req1 == req2
assert cass.play_count == 1
assert 1 == cass.play_count
@gen_test
def test_redirects(get_client, tmpdir, httpbin):
"""Ensure that we can handle redirects"""
url = httpbin + "/redirect-to?url=bytes/1024&status_code=301"
with vcr.use_cassette(str(tmpdir.join("requests.yaml"))):
@pytest.mark.gen_test
def test_redirects(get_client, tmpdir, scheme):
'''Ensure that we can handle redirects'''
url = scheme + '://httpbin.org/redirect-to?url=bytes/1024'
with vcr.use_cassette(str(tmpdir.join('requests.yaml'))):
content = (yield get(get_client(), url)).body
with vcr.use_cassette(str(tmpdir.join("requests.yaml"))) as cass:
with vcr.use_cassette(str(tmpdir.join('requests.yaml'))) as cass:
assert content == (yield get(get_client(), url)).body
assert cass.play_count == 1
@pytest.mark.online
@gen_test
def test_cross_scheme(get_client, tmpdir, httpbin, httpbin_secure):
"""Ensure that requests between schemes are treated separately"""
@pytest.mark.gen_test
def test_cross_scheme(get_client, tmpdir, scheme):
'''Ensure that requests between schemes are treated separately'''
# First fetch a url under http, and then again under https and then
# ensure that we haven't served anything out of cache, and we have two
# requests / response pairs in the cassette
url = httpbin.url
url_secure = httpbin_secure.url
with vcr.use_cassette(str(tmpdir.join("cross_scheme.yaml"))) as cass:
yield get(get_client(), url)
yield get(get_client(), url_secure)
with vcr.use_cassette(str(tmpdir.join('cross_scheme.yaml'))) as cass:
yield get(get_client(), 'https://httpbin.org/')
yield get(get_client(), 'http://httpbin.org/')
assert cass.play_count == 0
assert len(cass) == 2
# Then repeat the same requests and ensure both were replayed.
with vcr.use_cassette(str(tmpdir.join("cross_scheme.yaml"))) as cass:
yield get(get_client(), url)
yield get(get_client(), url_secure)
with vcr.use_cassette(str(tmpdir.join('cross_scheme.yaml'))) as cass:
yield get(get_client(), 'https://httpbin.org/')
yield get(get_client(), 'http://httpbin.org/')
assert cass.play_count == 2
@pytest.mark.online
@gen_test
def test_gzip(get_client, tmpdir, httpbin_both):
"""
@pytest.mark.gen_test
def test_gzip(get_client, tmpdir, scheme):
'''
Ensure that httpclient is able to automatically decompress the response
body
"""
url = httpbin_both + "/gzip"
'''
url = scheme + '://httpbin.org/gzip'
# use_gzip was renamed to decompress_response in 4.0
kwargs = {}
if tornado.version_info < (4,):
kwargs["use_gzip"] = True
kwargs['use_gzip'] = True
else:
kwargs["decompress_response"] = True
kwargs['decompress_response'] = True
with vcr.use_cassette(str(tmpdir.join("gzip.yaml"))):
with vcr.use_cassette(str(tmpdir.join('gzip.yaml'))):
response = yield get(get_client(), url, **kwargs)
assert_is_json_bytes(response.body)
assert_is_json(response.body)
with vcr.use_cassette(str(tmpdir.join("gzip.yaml"))) as cass:
with vcr.use_cassette(str(tmpdir.join('gzip.yaml'))) as cass:
response = yield get(get_client(), url, **kwargs)
assert_is_json_bytes(response.body)
assert cass.play_count == 1
assert_is_json(response.body)
assert 1 == cass.play_count
@pytest.mark.online
@gen_test
def test_https_with_cert_validation_disabled(get_client, tmpdir, httpbin_secure):
cass_path = str(tmpdir.join("cert_validation_disabled.yaml"))
url = httpbin_secure.url
@pytest.mark.gen_test
def test_https_with_cert_validation_disabled(get_client, tmpdir):
cass_path = str(tmpdir.join('cert_validation_disabled.yaml'))
with vcr.use_cassette(cass_path):
yield get(get_client(), url, validate_cert=False)
yield get(get_client(), 'https://httpbin.org', validate_cert=False)
with vcr.use_cassette(cass_path) as cass:
yield get(get_client(), url, validate_cert=False)
assert cass.play_count == 1
yield get(get_client(), 'https://httpbin.org', validate_cert=False)
assert 1 == cass.play_count
@gen_test
def test_unsupported_features_raises_in_future(get_client, tmpdir, httpbin):
"""Ensure that the exception for an AsyncHTTPClient feature not being
supported is raised inside the future."""
@pytest.mark.gen_test
def test_unsupported_features_raises_in_future(get_client, tmpdir):
'''Ensure that the exception for an AsyncHTTPClient feature not being
supported is raised inside the future.'''
def callback(chunk):
raise AssertionError("Did not expect to be called.")
assert False, "Did not expect to be called."
with vcr.use_cassette(str(tmpdir.join("invalid.yaml"))):
future = get(get_client(), httpbin.url, streaming_callback=callback)
with vcr.use_cassette(str(tmpdir.join('invalid.yaml'))):
future = get(
get_client(), 'http://httpbin.org', streaming_callback=callback
)
with pytest.raises(Exception) as excinfo:
yield future
@@ -274,23 +256,22 @@ def test_unsupported_features_raises_in_future(get_client, tmpdir, httpbin):
assert "not yet supported by VCR" in str(excinfo)
@pytest.mark.skipif(not supports_raise_error, reason="raise_error unavailable in tornado <= 3")
@pytest.mark.skipif(
raise_error_for_response_code_only,
reason="raise_error only ignores HTTPErrors due to response code",
not supports_raise_error,
reason='raise_error unavailable in tornado <= 3',
)
@gen_test
@pytest.mark.gen_test
def test_unsupported_features_raise_error_disabled(get_client, tmpdir):
"""Ensure that the exception for an AsyncHTTPClient feature not being
supported is not raised if raise_error=False."""
'''Ensure that the exception for an AsyncHTTPClient feature not being
supported is not raised if raise_error=False.'''
def callback(chunk):
raise AssertionError("Did not expect to be called.")
assert False, "Did not expect to be called."
with vcr.use_cassette(str(tmpdir.join("invalid.yaml"))):
with vcr.use_cassette(str(tmpdir.join('invalid.yaml'))):
response = yield get(
get_client(),
"http://httpbin.org",
'http://httpbin.org',
streaming_callback=callback,
raise_error=False,
)
@@ -298,113 +279,105 @@ def test_unsupported_features_raise_error_disabled(get_client, tmpdir):
assert "not yet supported by VCR" in str(response.error)
@pytest.mark.online
@gen_test
def test_cannot_overwrite_cassette_raises_in_future(get_client, tmpdir, httpbin):
"""Ensure that CannotOverwriteExistingCassetteException is raised inside
the future."""
@pytest.mark.gen_test
def test_cannot_overwrite_cassette_raises_in_future(get_client, tmpdir):
'''Ensure that CannotOverwriteExistingCassetteException is raised inside
the future.'''
url = httpbin.url
with vcr.use_cassette(str(tmpdir.join('overwrite.yaml'))):
yield get(get_client(), 'http://httpbin.org/get')
with vcr.use_cassette(str(tmpdir.join("overwrite.yaml"))):
yield get(get_client(), url + "/get")
with vcr.use_cassette(str(tmpdir.join("overwrite.yaml"))):
future = get(get_client(), url + "/headers")
with vcr.use_cassette(str(tmpdir.join('overwrite.yaml'))):
future = get(get_client(), 'http://httpbin.org/headers')
with pytest.raises(CannotOverwriteExistingCassetteException):
yield future
@pytest.mark.skipif(not supports_raise_error, reason="raise_error unavailable in tornado <= 3")
@pytest.mark.skipif(
raise_error_for_response_code_only,
reason="raise_error only ignores HTTPErrors due to response code",
not supports_raise_error,
reason='raise_error unavailable in tornado <= 3',
)
@gen_test
def test_cannot_overwrite_cassette_raise_error_disabled(get_client, tmpdir, httpbin):
"""Ensure that CannotOverwriteExistingCassetteException is not raised if
raise_error=False in the fetch() call."""
@pytest.mark.gen_test
def test_cannot_overwrite_cassette_raise_error_disabled(get_client, tmpdir):
'''Ensure that CannotOverwriteExistingCassetteException is not raised if
raise_error=False in the fetch() call.'''
url = httpbin.url
with vcr.use_cassette(str(tmpdir.join('overwrite.yaml'))):
yield get(
get_client(), 'http://httpbin.org/get', raise_error=False
)
with vcr.use_cassette(str(tmpdir.join("overwrite.yaml"))):
yield get(get_client(), url + "/get", raise_error=False)
with vcr.use_cassette(str(tmpdir.join("overwrite.yaml"))):
response = yield get(get_client(), url + "/headers", raise_error=False)
with vcr.use_cassette(str(tmpdir.join('overwrite.yaml'))):
response = yield get(
get_client(), 'http://httpbin.org/headers', raise_error=False
)
assert isinstance(response.error, CannotOverwriteExistingCassetteException)
@gen_test
@vcr.use_cassette(path_transformer=vcr.default_vcr.ensure_suffix(".yaml"))
@pytest.mark.gen_test
@vcr.use_cassette(path_transformer=vcr.default_vcr.ensure_suffix('.yaml'))
def test_tornado_with_decorator_use_cassette(get_client):
response = yield get_client().fetch(http.HTTPRequest("http://www.google.com/", method="GET"))
assert response.body.decode("utf-8") == "not actually google"
response = yield get_client().fetch(
http.HTTPRequest('http://www.google.com/', method='GET')
)
assert response.body.decode('utf-8') == "not actually google"
@gen_test
@vcr.use_cassette(path_transformer=vcr.default_vcr.ensure_suffix(".yaml"))
@pytest.mark.gen_test
@vcr.use_cassette(path_transformer=vcr.default_vcr.ensure_suffix('.yaml'))
def test_tornado_exception_can_be_caught(get_client):
try:
yield get(get_client(), "http://httpbin.org/status/500")
yield get(get_client(), 'http://httpbin.org/status/500')
except http.HTTPError as e:
assert e.code == 500
try:
yield get(get_client(), "http://httpbin.org/status/404")
yield get(get_client(), 'http://httpbin.org/status/404')
except http.HTTPError as e:
assert e.code == 404
@pytest.mark.online
@gen_test
def test_existing_references_get_patched(tmpdir, httpbin):
@pytest.mark.gen_test
def test_existing_references_get_patched(tmpdir):
from tornado.httpclient import AsyncHTTPClient
url = httpbin.url + "/get"
with vcr.use_cassette(str(tmpdir.join("data.yaml"))):
with vcr.use_cassette(str(tmpdir.join('data.yaml'))):
client = AsyncHTTPClient()
yield get(client, url)
yield get(client, 'http://httpbin.org/get')
with vcr.use_cassette(str(tmpdir.join("data.yaml"))) as cass:
yield get(client, url)
with vcr.use_cassette(str(tmpdir.join('data.yaml'))) as cass:
yield get(client, 'http://httpbin.org/get')
assert cass.play_count == 1
@pytest.mark.online
@gen_test
def test_existing_instances_get_patched(get_client, tmpdir, httpbin):
"""Ensure that existing instances of AsyncHTTPClient get patched upon
entering VCR context."""
@pytest.mark.gen_test
def test_existing_instances_get_patched(get_client, tmpdir):
'''Ensure that existing instances of AsyncHTTPClient get patched upon
entering VCR context.'''
url = httpbin.url + "/get"
client = get_client()
with vcr.use_cassette(str(tmpdir.join("data.yaml"))):
yield get(client, url)
with vcr.use_cassette(str(tmpdir.join('data.yaml'))):
yield get(client, 'http://httpbin.org/get')
with vcr.use_cassette(str(tmpdir.join("data.yaml"))) as cass:
yield get(client, url)
with vcr.use_cassette(str(tmpdir.join('data.yaml'))) as cass:
yield get(client, 'http://httpbin.org/get')
assert cass.play_count == 1
@pytest.mark.online
@gen_test
def test_request_time_is_set(get_client, tmpdir, httpbin):
"""Ensures that the request_time on HTTPResponses is set."""
@pytest.mark.gen_test
def test_request_time_is_set(get_client, tmpdir):
'''Ensures that the request_time on HTTPResponses is set.'''
url = httpbin.url + "/get"
with vcr.use_cassette(str(tmpdir.join("data.yaml"))):
with vcr.use_cassette(str(tmpdir.join('data.yaml'))):
client = get_client()
response = yield get(client, url)
response = yield get(client, 'http://httpbin.org/get')
assert response.request_time is not None
with vcr.use_cassette(str(tmpdir.join("data.yaml"))) as cass:
with vcr.use_cassette(str(tmpdir.join('data.yaml'))) as cass:
client = get_client()
response = yield get(client, url)
response = yield get(client, 'http://httpbin.org/get')
assert response.request_time is not None
assert cass.play_count == 1

View File

@@ -0,0 +1,146 @@
# -*- coding: utf-8 -*-
'''Integration tests with urllib2'''
from six.moves.urllib.request import urlopen
from six.moves.urllib_parse import urlencode
import pytest_httpbin.certs
# Internal imports
import vcr
from assertions import assert_cassette_has_one_response
def urlopen_with_cafile(*args, **kwargs):
kwargs['cafile'] = pytest_httpbin.certs.where()
try:
return urlopen(*args, **kwargs)
except TypeError:
# python2/pypi don't let us override this
del kwargs['cafile']
return urlopen(*args, **kwargs)
def test_response_code(httpbin_both, tmpdir):
'''Ensure we can read a response code from a fetch'''
url = httpbin_both.url
with vcr.use_cassette(str(tmpdir.join('atts.yaml'))):
code = urlopen_with_cafile(url).getcode()
with vcr.use_cassette(str(tmpdir.join('atts.yaml'))):
assert code == urlopen_with_cafile(url).getcode()
def test_random_body(httpbin_both, tmpdir):
'''Ensure we can read the content, and that it's served from cache'''
url = httpbin_both.url + '/bytes/1024'
with vcr.use_cassette(str(tmpdir.join('body.yaml'))):
body = urlopen_with_cafile(url).read()
with vcr.use_cassette(str(tmpdir.join('body.yaml'))):
assert body == urlopen_with_cafile(url).read()
def test_response_headers(httpbin_both, tmpdir):
'''Ensure we can get information from the response'''
url = httpbin_both.url
with vcr.use_cassette(str(tmpdir.join('headers.yaml'))):
open1 = urlopen_with_cafile(url).info().items()
with vcr.use_cassette(str(tmpdir.join('headers.yaml'))):
open2 = urlopen_with_cafile(url).info().items()
assert sorted(open1) == sorted(open2)
def test_effective_url(httpbin_both, tmpdir):
'''Ensure that the effective_url is captured'''
url = httpbin_both.url + '/redirect-to?url=/html'
with vcr.use_cassette(str(tmpdir.join('headers.yaml'))):
effective_url = urlopen_with_cafile(url).geturl()
assert effective_url == httpbin_both.url + '/html'
with vcr.use_cassette(str(tmpdir.join('headers.yaml'))):
assert effective_url == urlopen_with_cafile(url).geturl()
def test_multiple_requests(httpbin_both, tmpdir):
'''Ensure that we can cache multiple requests'''
urls = [
httpbin_both.url,
httpbin_both.url,
httpbin_both.url + '/get',
httpbin_both.url + '/bytes/1024',
]
with vcr.use_cassette(str(tmpdir.join('multiple.yaml'))) as cass:
[urlopen_with_cafile(url) for url in urls]
assert len(cass) == len(urls)
def test_get_data(httpbin_both, tmpdir):
'''Ensure that it works with query data'''
data = urlencode({'some': 1, 'data': 'here'})
url = httpbin_both.url + '/get?' + data
with vcr.use_cassette(str(tmpdir.join('get_data.yaml'))):
res1 = urlopen_with_cafile(url).read()
with vcr.use_cassette(str(tmpdir.join('get_data.yaml'))):
res2 = urlopen_with_cafile(url).read()
assert res1 == res2
def test_post_data(httpbin_both, tmpdir):
'''Ensure that it works when posting data'''
data = urlencode({'some': 1, 'data': 'here'}).encode('utf-8')
url = httpbin_both.url + '/post'
with vcr.use_cassette(str(tmpdir.join('post_data.yaml'))):
res1 = urlopen_with_cafile(url, data).read()
with vcr.use_cassette(str(tmpdir.join('post_data.yaml'))) as cass:
res2 = urlopen_with_cafile(url, data).read()
assert len(cass) == 1
assert res1 == res2
assert_cassette_has_one_response(cass)
def test_post_unicode_data(httpbin_both, tmpdir):
'''Ensure that it works when posting unicode data'''
data = urlencode({'snowman': u''.encode('utf-8')}).encode('utf-8')
url = httpbin_both.url + '/post'
with vcr.use_cassette(str(tmpdir.join('post_data.yaml'))):
res1 = urlopen_with_cafile(url, data).read()
with vcr.use_cassette(str(tmpdir.join('post_data.yaml'))) as cass:
res2 = urlopen_with_cafile(url, data).read()
assert len(cass) == 1
assert res1 == res2
assert_cassette_has_one_response(cass)
def test_cross_scheme(tmpdir, httpbin_secure, httpbin):
'''Ensure that requests between schemes are treated separately'''
# First fetch a url under https, and then again under https and then
# ensure that we haven't served anything out of cache, and we have two
# requests / response pairs in the cassette
with vcr.use_cassette(str(tmpdir.join('cross_scheme.yaml'))) as cass:
urlopen_with_cafile(httpbin_secure.url)
urlopen_with_cafile(httpbin.url)
assert len(cass) == 2
assert cass.play_count == 0
def test_decorator(httpbin_both, tmpdir):
'''Test the decorator version of VCR.py'''
url = httpbin_both.url
@vcr.use_cassette(str(tmpdir.join('atts.yaml')))
def inner1():
return urlopen_with_cafile(url).getcode()
@vcr.use_cassette(str(tmpdir.join('atts.yaml')))
def inner2():
return urlopen_with_cafile(url).getcode()
assert inner1() == inner2()

View File

@@ -1,167 +1,159 @@
"""Integration tests with urllib3"""
'''Integration tests with urllib3'''
# coding=utf-8
import pytest
import pytest_httpbin
import vcr
from vcr.patch import force_reset
from vcr.stubs.compat import get_headers
from ..assertions import assert_cassette_empty, assert_is_json_bytes
from assertions import assert_cassette_empty, assert_is_json
urllib3 = pytest.importorskip("urllib3")
@pytest.fixture(scope="module")
@pytest.fixture(scope='module')
def verify_pool_mgr():
return urllib3.PoolManager(
cert_reqs="CERT_REQUIRED",
ca_certs=pytest_httpbin.certs.where(), # Force certificate check.
cert_reqs='CERT_REQUIRED', # Force certificate check.
ca_certs=pytest_httpbin.certs.where()
)
@pytest.fixture(scope="module")
@pytest.fixture(scope='module')
def pool_mgr():
return urllib3.PoolManager(cert_reqs="CERT_NONE")
return urllib3.PoolManager()
def test_status_code(httpbin_both, tmpdir, verify_pool_mgr):
"""Ensure that we can read the status code"""
'''Ensure that we can read the status code'''
url = httpbin_both.url
with vcr.use_cassette(str(tmpdir.join("atts.yaml"))):
status_code = verify_pool_mgr.request("GET", url).status
with vcr.use_cassette(str(tmpdir.join('atts.yaml'))):
status_code = verify_pool_mgr.request('GET', url).status
with vcr.use_cassette(str(tmpdir.join("atts.yaml"))):
assert status_code == verify_pool_mgr.request("GET", url).status
with vcr.use_cassette(str(tmpdir.join('atts.yaml'))):
assert status_code == verify_pool_mgr.request('GET', url).status
def test_headers(tmpdir, httpbin_both, verify_pool_mgr):
"""Ensure that we can read the headers back"""
'''Ensure that we can read the headers back'''
url = httpbin_both.url
with vcr.use_cassette(str(tmpdir.join("headers.yaml"))):
headers = verify_pool_mgr.request("GET", url).headers
with vcr.use_cassette(str(tmpdir.join('headers.yaml'))):
headers = verify_pool_mgr.request('GET', url).headers
with vcr.use_cassette(str(tmpdir.join("headers.yaml"))):
new_headers = verify_pool_mgr.request("GET", url).headers
assert sorted(get_headers(headers)) == sorted(get_headers(new_headers))
with vcr.use_cassette(str(tmpdir.join('headers.yaml'))):
assert headers == verify_pool_mgr.request('GET', url).headers
def test_body(tmpdir, httpbin_both, verify_pool_mgr):
"""Ensure the responses are all identical enough"""
url = httpbin_both.url + "/bytes/1024"
with vcr.use_cassette(str(tmpdir.join("body.yaml"))):
content = verify_pool_mgr.request("GET", url).data
'''Ensure the responses are all identical enough'''
url = httpbin_both.url + '/bytes/1024'
with vcr.use_cassette(str(tmpdir.join('body.yaml'))):
content = verify_pool_mgr.request('GET', url).data
with vcr.use_cassette(str(tmpdir.join("body.yaml"))):
assert content == verify_pool_mgr.request("GET", url).data
with vcr.use_cassette(str(tmpdir.join('body.yaml'))):
assert content == verify_pool_mgr.request('GET', url).data
def test_auth(tmpdir, httpbin_both, verify_pool_mgr):
"""Ensure that we can handle basic auth"""
auth = ("user", "passwd")
headers = urllib3.util.make_headers(basic_auth="{}:{}".format(*auth))
url = httpbin_both.url + "/basic-auth/user/passwd"
with vcr.use_cassette(str(tmpdir.join("auth.yaml"))):
one = verify_pool_mgr.request("GET", url, headers=headers)
'''Ensure that we can handle basic auth'''
auth = ('user', 'passwd')
headers = urllib3.util.make_headers(basic_auth='{}:{}'.format(*auth))
url = httpbin_both.url + '/basic-auth/user/passwd'
with vcr.use_cassette(str(tmpdir.join('auth.yaml'))):
one = verify_pool_mgr.request('GET', url, headers=headers)
with vcr.use_cassette(str(tmpdir.join("auth.yaml"))):
two = verify_pool_mgr.request("GET", url, headers=headers)
with vcr.use_cassette(str(tmpdir.join('auth.yaml'))):
two = verify_pool_mgr.request('GET', url, headers=headers)
assert one.data == two.data
assert one.status == two.status
def test_auth_failed(tmpdir, httpbin_both, verify_pool_mgr):
"""Ensure that we can save failed auth statuses"""
auth = ("user", "wrongwrongwrong")
headers = urllib3.util.make_headers(basic_auth="{}:{}".format(*auth))
url = httpbin_both.url + "/basic-auth/user/passwd"
with vcr.use_cassette(str(tmpdir.join("auth-failed.yaml"))) as cass:
'''Ensure that we can save failed auth statuses'''
auth = ('user', 'wrongwrongwrong')
headers = urllib3.util.make_headers(basic_auth='{}:{}'.format(*auth))
url = httpbin_both.url + '/basic-auth/user/passwd'
with vcr.use_cassette(str(tmpdir.join('auth-failed.yaml'))) as cass:
# Ensure that this is empty to begin with
assert_cassette_empty(cass)
one = verify_pool_mgr.request("GET", url, headers=headers)
two = verify_pool_mgr.request("GET", url, headers=headers)
one = verify_pool_mgr.request('GET', url, headers=headers)
two = verify_pool_mgr.request('GET', url, headers=headers)
assert one.data == two.data
assert one.status == two.status == 401
def test_post(tmpdir, httpbin_both, verify_pool_mgr):
"""Ensure that we can post and cache the results"""
data = {"key1": "value1", "key2": "value2"}
url = httpbin_both.url + "/post"
with vcr.use_cassette(str(tmpdir.join("verify_pool_mgr.yaml"))):
req1 = verify_pool_mgr.request("POST", url, data).data
'''Ensure that we can post and cache the results'''
data = {'key1': 'value1', 'key2': 'value2'}
url = httpbin_both.url + '/post'
with vcr.use_cassette(str(tmpdir.join('verify_pool_mgr.yaml'))):
req1 = verify_pool_mgr.request('POST', url, data).data
with vcr.use_cassette(str(tmpdir.join("verify_pool_mgr.yaml"))):
req2 = verify_pool_mgr.request("POST", url, data).data
with vcr.use_cassette(str(tmpdir.join('verify_pool_mgr.yaml'))):
req2 = verify_pool_mgr.request('POST', url, data).data
assert req1 == req2
@pytest.mark.online
def test_redirects(tmpdir, verify_pool_mgr, httpbin):
"""Ensure that we can handle redirects"""
url = httpbin.url + "/redirect/1"
def test_redirects(tmpdir, httpbin_both, verify_pool_mgr):
'''Ensure that we can handle redirects'''
url = httpbin_both.url + '/redirect-to?url=bytes/1024'
with vcr.use_cassette(str(tmpdir.join('verify_pool_mgr.yaml'))):
content = verify_pool_mgr.request('GET', url).data
with vcr.use_cassette(str(tmpdir.join("verify_pool_mgr.yaml"))):
content = verify_pool_mgr.request("GET", url).data
with vcr.use_cassette(str(tmpdir.join("verify_pool_mgr.yaml"))) as cass:
assert content == verify_pool_mgr.request("GET", url).data
with vcr.use_cassette(str(tmpdir.join('verify_pool_mgr.yaml'))) as cass:
assert content == verify_pool_mgr.request('GET', url).data
# Ensure that we've now cached *two* responses. One for the redirect
# and one for the final fetch
assert len(cass) == 2
assert cass.play_count == 2
assert len(cass) == 2
assert cass.play_count == 2
def test_cross_scheme(tmpdir, httpbin, httpbin_secure, verify_pool_mgr):
"""Ensure that requests between schemes are treated separately"""
'''Ensure that requests between schemes are treated separately'''
# First fetch a url under http, and then again under https and then
# ensure that we haven't served anything out of cache, and we have two
# requests / response pairs in the cassette
with vcr.use_cassette(str(tmpdir.join("cross_scheme.yaml"))) as cass:
verify_pool_mgr.request("GET", httpbin_secure.url)
verify_pool_mgr.request("GET", httpbin.url)
with vcr.use_cassette(str(tmpdir.join('cross_scheme.yaml'))) as cass:
verify_pool_mgr.request('GET', httpbin_secure.url)
verify_pool_mgr.request('GET', httpbin.url)
assert cass.play_count == 0
assert len(cass) == 2
def test_gzip(tmpdir, httpbin_both, verify_pool_mgr):
"""
'''
Ensure that requests (actually urllib3) is able to automatically decompress
the response body
"""
url = httpbin_both.url + "/gzip"
response = verify_pool_mgr.request("GET", url)
'''
url = httpbin_both.url + '/gzip'
response = verify_pool_mgr.request('GET', url)
with vcr.use_cassette(str(tmpdir.join("gzip.yaml"))):
response = verify_pool_mgr.request("GET", url)
assert_is_json_bytes(response.data)
with vcr.use_cassette(str(tmpdir.join('gzip.yaml'))):
response = verify_pool_mgr.request('GET', url)
assert_is_json(response.data)
with vcr.use_cassette(str(tmpdir.join("gzip.yaml"))):
assert_is_json_bytes(response.data)
with vcr.use_cassette(str(tmpdir.join('gzip.yaml'))):
assert_is_json(response.data)
def test_https_with_cert_validation_disabled(tmpdir, httpbin_secure, pool_mgr):
with vcr.use_cassette(str(tmpdir.join("cert_validation_disabled.yaml"))):
pool_mgr.request("GET", httpbin_secure.url)
with vcr.use_cassette(str(tmpdir.join('cert_validation_disabled.yaml'))):
pool_mgr.request('GET', httpbin_secure.url)
def test_urllib3_force_reset():
conn = urllib3.connection
http_original = conn.HTTPConnection
https_original = conn.HTTPSConnection
verified_https_original = conn.VerifiedHTTPSConnection
with vcr.use_cassette(path="test"):
first_cassette_HTTPConnection = conn.HTTPConnection
first_cassette_HTTPSConnection = conn.HTTPSConnection
first_cassette_VerifiedHTTPSConnection = conn.VerifiedHTTPSConnection
cpool = urllib3.connectionpool
http_original = cpool.HTTPConnection
https_original = cpool.HTTPSConnection
verified_https_original = cpool.VerifiedHTTPSConnection
with vcr.use_cassette(path='test'):
first_cassette_HTTPConnection = cpool.HTTPConnection
first_cassette_HTTPSConnection = cpool.HTTPSConnection
first_cassette_VerifiedHTTPSConnection = cpool.VerifiedHTTPSConnection
with force_reset():
assert conn.HTTPConnection is http_original
assert conn.HTTPSConnection is https_original
assert conn.VerifiedHTTPSConnection is verified_https_original
assert conn.HTTPConnection is first_cassette_HTTPConnection
assert conn.HTTPSConnection is first_cassette_HTTPSConnection
assert conn.VerifiedHTTPSConnection is first_cassette_VerifiedHTTPSConnection
assert cpool.HTTPConnection is http_original
assert cpool.HTTPSConnection is https_original
assert cpool.VerifiedHTTPSConnection is verified_https_original
assert cpool.HTTPConnection is first_cassette_HTTPConnection
assert cpool.HTTPSConnection is first_cassette_HTTPSConnection
assert cpool.VerifiedHTTPSConnection is first_cassette_VerifiedHTTPSConnection

View File

@@ -1,23 +1,25 @@
import http.client as httplib
import multiprocessing
from xmlrpc.client import ServerProxy
from xmlrpc.server import SimpleXMLRPCServer
import pytest
import vcr
from six.moves import xmlrpc_client, xmlrpc_server
requests = pytest.importorskip("requests")
import vcr # NOQA
try:
import httplib
except ImportError:
import http.client as httplib
def test_domain_redirect():
"""Ensure that redirects across domains are considered unique"""
'''Ensure that redirects across domains are considered unique'''
# In this example, seomoz.org redirects to moz.com, and if those
# requests are considered identical, then we'll be stuck in a redirect
# loop.
url = "http://seomoz.org/"
with vcr.use_cassette("tests/fixtures/wild/domain_redirect.yaml") as cass:
requests.get(url, headers={"User-Agent": "vcrpy-test"})
url = 'http://seomoz.org/'
with vcr.use_cassette('tests/fixtures/wild/domain_redirect.yaml') as cass:
requests.get(url, headers={'User-Agent': 'vcrpy-test'})
# Ensure that we've now served two responses. One for the original
# redirect, and a second for the actual fetch
assert len(cass) == 2
@@ -28,11 +30,13 @@ def test_flickr_multipart_upload(httpbin, tmpdir):
The python-flickr-api project does a multipart
upload that confuses vcrpy
"""
def _pretend_to_be_flickr_library():
content_type, body = "text/plain", "HELLO WORLD"
h = httplib.HTTPConnection(httpbin.host, httpbin.port)
headers = {"Content-Type": content_type, "content-length": str(len(body))}
headers = {
"Content-Type": content_type,
"content-length": str(len(body))
}
h.request("POST", "/post/", headers=headers)
h.send(body)
r = h.getresponse()
@@ -41,7 +45,7 @@ def test_flickr_multipart_upload(httpbin, tmpdir):
return data
testfile = str(tmpdir.join("flickr.yml"))
testfile = str(tmpdir.join('flickr.yml'))
with vcr.use_cassette(testfile) as cass:
_pretend_to_be_flickr_library()
assert len(cass) == 1
@@ -52,58 +56,52 @@ def test_flickr_multipart_upload(httpbin, tmpdir):
assert cass.play_count == 1
@pytest.mark.online
def test_flickr_should_respond_with_200(tmpdir):
testfile = str(tmpdir.join("flickr.yml"))
testfile = str(tmpdir.join('flickr.yml'))
with vcr.use_cassette(testfile):
r = requests.post("https://api.flickr.com/services/upload", verify=False)
r = requests.post("http://api.flickr.com/services/upload")
assert r.status_code == 200
def test_cookies(tmpdir, httpbin):
testfile = str(tmpdir.join("cookies.yml"))
with vcr.use_cassette(testfile), requests.Session() as s:
testfile = str(tmpdir.join('cookies.yml'))
with vcr.use_cassette(testfile):
s = requests.Session()
s.get(httpbin.url + "/cookies/set?k1=v1&k2=v2")
assert s.cookies.keys() == ["k1", "k2"]
r2 = s.get(httpbin.url + "/cookies")
assert sorted(r2.json()["cookies"].keys()) == ["k1", "k2"]
assert len(r2.json()['cookies']) == 2
@pytest.mark.online
def test_amazon_doctype(tmpdir):
# amazon gzips its homepage. For some reason, in requests 2.7, it's not
# getting gunzipped.
with vcr.use_cassette(str(tmpdir.join("amz.yml"))):
r = requests.get("http://www.amazon.com", verify=False)
assert "html" in r.text
with vcr.use_cassette(str(tmpdir.join('amz.yml'))):
r = requests.get('http://www.amazon.com', verify=False)
assert 'html' in r.text
def start_rpc_server(q):
httpd = SimpleXMLRPCServer(("127.0.0.1", 0))
httpd.register_function(pow)
q.put("http://{}:{}".format(*httpd.server_address))
httpd.serve_forever()
@pytest.fixture(scope="session")
@pytest.yield_fixture(scope='session')
def rpc_server():
q = multiprocessing.Queue()
proxy_process = multiprocessing.Process(target=start_rpc_server, args=(q,))
httpd = xmlrpc_server.SimpleXMLRPCServer(('', 0))
httpd.register_function(pow)
proxy_process = multiprocessing.Process(
target=httpd.serve_forever,
)
try:
proxy_process.start()
yield q.get()
yield 'http://{}:{}'.format(*httpd.server_address)
finally:
proxy_process.terminate()
def test_xmlrpclib(tmpdir, rpc_server):
with vcr.use_cassette(str(tmpdir.join("xmlrpcvideo.yaml"))):
roundup_server = ServerProxy(rpc_server, allow_none=True)
with vcr.use_cassette(str(tmpdir.join('xmlrpcvideo.yaml'))):
roundup_server = xmlrpc_client.ServerProxy(rpc_server, allow_none=True)
original_schema = roundup_server.pow(2, 4)
with vcr.use_cassette(str(tmpdir.join("xmlrpcvideo.yaml"))):
roundup_server = ServerProxy(rpc_server, allow_none=True)
with vcr.use_cassette(str(tmpdir.join('xmlrpcvideo.yaml'))):
roundup_server = xmlrpc_client.ServerProxy(rpc_server, allow_none=True)
second_schema = roundup_server.pow(2, 4)
assert original_schema == second_schema

View File

@@ -1,67 +1,44 @@
import contextlib
import copy
import http.client as httplib
import inspect
import os
from unittest import mock
from six.moves import http_client as httplib
import pytest
import yaml
from vcr.compat import mock, contextlib
from vcr.cassette import Cassette
from vcr.errors import UnhandledHTTPRequestError
from vcr.patch import force_reset
from vcr.request import Request
from vcr.stubs import VCRHTTPSConnection
def test_cassette_load(tmpdir):
a_file = tmpdir.join("test_cassette.yml")
a_file.write(
yaml.dump(
{
"interactions": [
{
"request": {"body": "", "uri": "foo", "method": "GET", "headers": {}},
"response": "bar",
},
],
},
),
)
a_file = tmpdir.join('test_cassette.yml')
a_file.write(yaml.dump({'interactions': [
{'request': {'body': '', 'uri': 'foo', 'method': 'GET', 'headers': {}},
'response': 'bar'}
]}))
a_cassette = Cassette.load(path=str(a_file))
assert len(a_cassette) == 1
def test_cassette_load_nonexistent():
a_cassette = Cassette.load(path="something/nonexistent.yml")
assert len(a_cassette) == 0
def test_cassette_load_invalid_encoding(tmpdir):
a_file = tmpdir.join("invalid_encoding.yml")
with open(a_file, "wb") as fd:
fd.write(b"\xda")
a_cassette = Cassette.load(path=str(a_file))
assert len(a_cassette) == 0
def test_cassette_not_played():
a = Cassette("test")
a = Cassette('test')
assert not a.play_count
def test_cassette_append():
a = Cassette("test")
a.append("foo", "bar")
assert a.requests == ["foo"]
assert a.responses == ["bar"]
a = Cassette('test')
a.append('foo', 'bar')
assert a.requests == ['foo']
assert a.responses == ['bar']
def test_cassette_len():
a = Cassette("test")
a.append("foo", "bar")
a.append("foo2", "bar2")
a = Cassette('test')
a.append('foo', 'bar')
a.append('foo2', 'bar2')
assert len(a) == 2
@@ -69,34 +46,34 @@ def _mock_requests_match(request1, request2, matchers):
return request1 == request2
@mock.patch("vcr.cassette.requests_match", _mock_requests_match)
@mock.patch('vcr.cassette.requests_match', _mock_requests_match)
def test_cassette_contains():
a = Cassette("test")
a.append("foo", "bar")
assert "foo" in a
a = Cassette('test')
a.append('foo', 'bar')
assert 'foo' in a
@mock.patch("vcr.cassette.requests_match", _mock_requests_match)
@mock.patch('vcr.cassette.requests_match', _mock_requests_match)
def test_cassette_responses_of():
a = Cassette("test")
a.append("foo", "bar")
assert a.responses_of("foo") == ["bar"]
a = Cassette('test')
a.append('foo', 'bar')
assert a.responses_of('foo') == ['bar']
@mock.patch("vcr.cassette.requests_match", _mock_requests_match)
@mock.patch('vcr.cassette.requests_match', _mock_requests_match)
def test_cassette_get_missing_response():
a = Cassette("test")
a = Cassette('test')
with pytest.raises(UnhandledHTTPRequestError):
a.responses_of("foo")
a.responses_of('foo')
@mock.patch("vcr.cassette.requests_match", _mock_requests_match)
@mock.patch('vcr.cassette.requests_match', _mock_requests_match)
def test_cassette_cant_read_same_request_twice():
a = Cassette("test")
a.append("foo", "bar")
a.play_response("foo")
a = Cassette('test')
a.append('foo', 'bar')
a.play_response('foo')
with pytest.raises(UnhandledHTTPRequestError):
a.play_response("foo")
a.play_response('foo')
def make_get_request():
@@ -105,146 +82,105 @@ def make_get_request():
return conn.getresponse()
@mock.patch("vcr.cassette.requests_match", return_value=True)
@mock.patch(
"vcr.cassette.FilesystemPersister.load_cassette",
classmethod(lambda *args, **kwargs: (("foo",), (mock.MagicMock(),))),
)
@mock.patch("vcr.cassette.Cassette.can_play_response_for", return_value=True)
@mock.patch("vcr.stubs.VCRHTTPResponse")
@mock.patch('vcr.cassette.requests_match', return_value=True)
@mock.patch('vcr.cassette.FilesystemPersister.load_cassette',
classmethod(lambda *args, **kwargs: (('foo',), (mock.MagicMock(),))))
@mock.patch('vcr.cassette.Cassette.can_play_response_for', return_value=True)
@mock.patch('vcr.stubs.VCRHTTPResponse')
def test_function_decorated_with_use_cassette_can_be_invoked_multiple_times(*args):
decorated_function = Cassette.use(path="test")(make_get_request)
for _ in range(4):
decorated_function = Cassette.use(path='test')(make_get_request)
for i in range(4):
decorated_function()
def test_arg_getter_functionality():
arg_getter = mock.Mock(return_value={"path": "test"})
arg_getter = mock.Mock(return_value={'path': 'test'})
context_decorator = Cassette.use_arg_getter(arg_getter)
with context_decorator as cassette:
assert cassette._path == "test"
assert cassette._path == 'test'
arg_getter.return_value = {"path": "other"}
arg_getter.return_value = {'path': 'other'}
with context_decorator as cassette:
assert cassette._path == "other"
assert cassette._path == 'other'
arg_getter.return_value = {"path": "other", "filter_headers": ("header_name",)}
arg_getter.return_value = {'path': 'other', 'filter_headers': ('header_name',)}
@context_decorator
def function():
pass
with mock.patch.object(Cassette, "load", return_value=mock.MagicMock(inject=False)) as cassette_load:
with mock.patch.object(
Cassette, 'load',
return_value=mock.MagicMock(inject=False)
) as cassette_load:
function()
cassette_load.assert_called_once_with(**arg_getter.return_value)
def test_cassette_not_all_played():
a = Cassette("test")
a.append("foo", "bar")
a = Cassette('test')
a.append('foo', 'bar')
assert not a.all_played
@mock.patch("vcr.cassette.requests_match", _mock_requests_match)
@mock.patch('vcr.cassette.requests_match', _mock_requests_match)
def test_cassette_all_played():
a = Cassette("test")
a.append("foo", "bar")
a.play_response("foo")
a = Cassette('test')
a.append('foo', 'bar')
a.play_response('foo')
assert a.all_played
@mock.patch("vcr.cassette.requests_match", _mock_requests_match)
def test_cassette_allow_playback_repeats():
a = Cassette("test", allow_playback_repeats=True)
a.append("foo", "bar")
a.append("other", "resp")
for _ in range(10):
assert a.play_response("foo") == "bar"
assert a.play_count == 10
assert a.all_played is False
assert a.play_response("other") == "resp"
assert a.play_count == 11
assert a.all_played
a.allow_playback_repeats = False
with pytest.raises(UnhandledHTTPRequestError) as e:
a.play_response("foo")
assert str(e.value) == "\"The cassette ('test') doesn't contain the request ('foo') asked for\""
a.rewind()
assert a.all_played is False
assert a.play_response("foo") == "bar"
assert a.all_played is False
assert a.play_response("other") == "resp"
assert a.all_played
@mock.patch("vcr.cassette.requests_match", _mock_requests_match)
def test_cassette_rewound():
a = Cassette("test")
a.append("foo", "bar")
a.play_response("foo")
assert a.all_played
a.rewind()
assert not a.all_played
def test_before_record_response():
before_record_response = mock.Mock(return_value="mutated")
cassette = Cassette("test", before_record_response=before_record_response)
cassette.append("req", "res")
before_record_response = mock.Mock(return_value='mutated')
cassette = Cassette('test', before_record_response=before_record_response)
cassette.append('req', 'res')
before_record_response.assert_called_once_with("res")
assert cassette.responses[0] == "mutated"
before_record_response.assert_called_once_with('res')
assert cassette.responses[0] == 'mutated'
def assert_get_response_body_is(value):
conn = httplib.HTTPConnection("www.python.org")
conn.request("GET", "/index.html")
assert conn.getresponse().read().decode("utf8") == value
assert conn.getresponse().read().decode('utf8') == value
@mock.patch("vcr.cassette.requests_match", _mock_requests_match)
@mock.patch("vcr.cassette.Cassette.can_play_response_for", return_value=True)
@mock.patch("vcr.cassette.Cassette._save", return_value=True)
@mock.patch('vcr.cassette.requests_match', _mock_requests_match)
@mock.patch('vcr.cassette.Cassette.can_play_response_for', return_value=True)
@mock.patch('vcr.cassette.Cassette._save', return_value=True)
def test_nesting_cassette_context_managers(*args):
first_response = {
"body": {"string": b"first_response"},
"headers": {},
"status": {"message": "m", "code": 200},
}
first_response = {'body': {'string': b'first_response'}, 'headers': {},
'status': {'message': 'm', 'code': 200}}
second_response = copy.deepcopy(first_response)
second_response["body"]["string"] = b"second_response"
second_response['body']['string'] = b'second_response'
with contextlib.ExitStack() as exit_stack:
first_cassette = exit_stack.enter_context(Cassette.use(path="test"))
exit_stack.enter_context(
mock.patch.object(first_cassette, "play_response", return_value=first_response),
)
assert_get_response_body_is("first_response")
first_cassette = exit_stack.enter_context(Cassette.use(path='test'))
exit_stack.enter_context(mock.patch.object(first_cassette, 'play_response',
return_value=first_response))
assert_get_response_body_is('first_response')
# Make sure a second cassette can supersede the first
with (
Cassette.use(path="test") as second_cassette,
mock.patch.object(second_cassette, "play_response", return_value=second_response),
):
assert_get_response_body_is("second_response")
# Make sure a second cassette can supercede the first
with Cassette.use(path='test') as second_cassette:
with mock.patch.object(second_cassette, 'play_response', return_value=second_response):
assert_get_response_body_is('second_response')
# Now the first cassette should be back in effect
assert_get_response_body_is("first_response")
assert_get_response_body_is('first_response')
def test_nesting_context_managers_by_checking_references_of_http_connection():
original = httplib.HTTPConnection
with Cassette.use(path="test"):
with Cassette.use(path='test'):
first_cassette_HTTPConnection = httplib.HTTPConnection
with Cassette.use(path="test"):
with Cassette.use(path='test'):
second_cassette_HTTPConnection = httplib.HTTPConnection
assert second_cassette_HTTPConnection is not first_cassette_HTTPConnection
with Cassette.use(path="test"):
with Cassette.use(path='test'):
assert httplib.HTTPConnection is not second_cassette_HTTPConnection
with force_reset():
assert httplib.HTTPConnection is original
@@ -253,15 +189,16 @@ def test_nesting_context_managers_by_checking_references_of_http_connection():
def test_custom_patchers():
class Test:
class Test(object):
attribute = None
with Cassette.use(path="custom_patches", custom_patches=((Test, "attribute", VCRHTTPSConnection),)):
with Cassette.use(path='custom_patches',
custom_patches=((Test, 'attribute', VCRHTTPSConnection),)):
assert issubclass(Test.attribute, VCRHTTPSConnection)
assert VCRHTTPSConnection is not Test.attribute
old_attribute = Test.attribute
with Cassette.use(path="custom_patches", custom_patches=((Test, "attribute", VCRHTTPSConnection),)):
with Cassette.use(path='custom_patches',
custom_patches=((Test, 'attribute', VCRHTTPSConnection),)):
assert issubclass(Test.attribute, VCRHTTPSConnection)
assert VCRHTTPSConnection is not Test.attribute
assert Test.attribute is not old_attribute
@@ -275,15 +212,15 @@ def test_decorated_functions_are_reentrant():
info = {"second": False}
original_conn = httplib.HTTPConnection
@Cassette.use(path="whatever", inject=True)
@Cassette.use(path='whatever', inject=True)
def test_function(cassette):
if info["second"]:
assert httplib.HTTPConnection is not info["first_conn"]
if info['second']:
assert httplib.HTTPConnection is not info['first_conn']
else:
info["first_conn"] = httplib.HTTPConnection
info["second"] = True
info['first_conn'] = httplib.HTTPConnection
info['second'] = True
test_function()
assert httplib.HTTPConnection is info["first_conn"]
assert httplib.HTTPConnection is info['first_conn']
test_function()
assert httplib.HTTPConnection is original_conn
@@ -292,54 +229,58 @@ def test_decorated_functions_are_reentrant():
def test_cassette_use_called_without_path_uses_function_to_generate_path():
@Cassette.use(inject=True)
def function_name(cassette):
assert cassette._path == "function_name"
assert cassette._path == 'function_name'
function_name()
def test_path_transformer_with_function_path():
def path_transformer(path):
return os.path.join("a", path)
return os.path.join('a', path)
@Cassette.use(inject=True, path_transformer=path_transformer)
def function_name(cassette):
assert cassette._path == os.path.join("a", "function_name")
assert cassette._path == os.path.join('a', 'function_name')
function_name()
def test_path_transformer_with_context_manager():
with Cassette.use(path="b", path_transformer=lambda *args: "a") as cassette:
assert cassette._path == "a"
with Cassette.use(
path='b', path_transformer=lambda *args: 'a'
) as cassette:
assert cassette._path == 'a'
def test_path_transformer_None():
with Cassette.use(path="a", path_transformer=None) as cassette:
assert cassette._path == "a"
with Cassette.use(
path='a', path_transformer=None,
) as cassette:
assert cassette._path == 'a'
def test_func_path_generator():
def generator(function):
return os.path.join(os.path.dirname(inspect.getfile(function)), function.__name__)
return os.path.join(os.path.dirname(inspect.getfile(function)),
function.__name__)
@Cassette.use(inject=True, func_path_generator=generator)
def function_name(cassette):
assert cassette._path == os.path.join(os.path.dirname(__file__), "function_name")
assert cassette._path == os.path.join(os.path.dirname(__file__), 'function_name')
function_name()
def test_use_as_decorator_on_coroutine():
original_http_connection = httplib.HTTPConnection
original_http_connetion = httplib.HTTPConnection
@Cassette.use(inject=True)
def test_function(cassette):
assert httplib.HTTPConnection.cassette is cassette
assert httplib.HTTPConnection is not original_http_connection
assert httplib.HTTPConnection is not original_http_connetion
value = yield 1
assert value == 1
assert httplib.HTTPConnection.cassette is cassette
assert httplib.HTTPConnection is not original_http_connection
assert httplib.HTTPConnection is not original_http_connetion
value = yield 2
assert value == 2
@@ -353,85 +294,15 @@ def test_use_as_decorator_on_coroutine():
def test_use_as_decorator_on_generator():
original_http_connection = httplib.HTTPConnection
original_http_connetion = httplib.HTTPConnection
@Cassette.use(inject=True)
def test_function(cassette):
assert httplib.HTTPConnection.cassette is cassette
assert httplib.HTTPConnection is not original_http_connection
assert httplib.HTTPConnection is not original_http_connetion
yield 1
assert httplib.HTTPConnection.cassette is cassette
assert httplib.HTTPConnection is not original_http_connection
assert httplib.HTTPConnection is not original_http_connetion
yield 2
assert list(test_function()) == [1, 2]
@mock.patch("vcr.cassette.get_matchers_results")
def test_find_requests_with_most_matches_one_similar_request(mock_get_matchers_results):
mock_get_matchers_results.side_effect = [
(["method"], [("path", "failed : path"), ("query", "failed : query")]),
(["method", "path"], [("query", "failed : query")]),
([], [("method", "failed : method"), ("path", "failed : path"), ("query", "failed : query")]),
]
cassette = Cassette("test")
for request in range(1, 4):
cassette.append(request, "response")
result = cassette.find_requests_with_most_matches("fake request")
assert result == [(2, ["method", "path"], [("query", "failed : query")])]
@mock.patch("vcr.cassette.get_matchers_results")
def test_find_requests_with_most_matches_no_similar_requests(mock_get_matchers_results):
mock_get_matchers_results.side_effect = [
([], [("path", "failed : path"), ("query", "failed : query")]),
([], [("path", "failed : path"), ("query", "failed : query")]),
([], [("path", "failed : path"), ("query", "failed : query")]),
]
cassette = Cassette("test")
for request in range(1, 4):
cassette.append(request, "response")
result = cassette.find_requests_with_most_matches("fake request")
assert result == []
@mock.patch("vcr.cassette.get_matchers_results")
def test_find_requests_with_most_matches_many_similar_requests(mock_get_matchers_results):
mock_get_matchers_results.side_effect = [
(["method", "path"], [("query", "failed : query")]),
(["method"], [("path", "failed : path"), ("query", "failed : query")]),
(["method", "path"], [("query", "failed : query")]),
]
cassette = Cassette("test")
for request in range(1, 4):
cassette.append(request, "response")
result = cassette.find_requests_with_most_matches("fake request")
assert result == [
(1, ["method", "path"], [("query", "failed : query")]),
(3, ["method", "path"], [("query", "failed : query")]),
]
def test_used_interactions(tmpdir):
interactions = [
{"request": {"body": "", "uri": "foo1", "method": "GET", "headers": {}}, "response": "bar1"},
{"request": {"body": "", "uri": "foo2", "method": "GET", "headers": {}}, "response": "bar2"},
{"request": {"body": "", "uri": "foo3", "method": "GET", "headers": {}}, "response": "bar3"},
]
file = tmpdir.join("test_cassette.yml")
file.write(yaml.dump({"interactions": [interactions[0], interactions[1]]}))
cassette = Cassette.load(path=str(file))
request = Request._from_dict(interactions[1]["request"])
cassette.play_response(request)
assert len(cassette._played_interactions) < len(cassette._old_interactions)
request = Request._from_dict(interactions[2]["request"])
cassette.append(request, interactions[2]["response"])
assert len(cassette._new_interactions()) == 1
used_interactions = cassette._played_interactions + cassette._new_interactions()
assert len(used_interactions) == 2

View File

@@ -1,72 +0,0 @@
from unittest import mock
import pytest
from vcr import errors
from vcr.cassette import Cassette
@mock.patch("vcr.cassette.Cassette.find_requests_with_most_matches")
@pytest.mark.parametrize(
"most_matches, expected_message",
[
# No request match found
([], "No similar requests, that have not been played, found."),
# One matcher failed
(
[("similar request", ["method", "path"], [("query", "failed : query")])],
"Found 1 similar requests with 1 different matcher(s) :\n"
"\n1 - ('similar request').\n"
"Matchers succeeded : ['method', 'path']\n"
"Matchers failed :\n"
"query - assertion failure :\n"
"failed : query\n",
),
# Multiple failed matchers
(
[("similar request", ["method"], [("query", "failed : query"), ("path", "failed : path")])],
"Found 1 similar requests with 2 different matcher(s) :\n"
"\n1 - ('similar request').\n"
"Matchers succeeded : ['method']\n"
"Matchers failed :\n"
"query - assertion failure :\n"
"failed : query\n"
"path - assertion failure :\n"
"failed : path\n",
),
# Multiple similar requests
(
[
("similar request", ["method"], [("query", "failed : query")]),
("similar request 2", ["method"], [("query", "failed : query 2")]),
],
"Found 2 similar requests with 1 different matcher(s) :\n"
"\n1 - ('similar request').\n"
"Matchers succeeded : ['method']\n"
"Matchers failed :\n"
"query - assertion failure :\n"
"failed : query\n"
"\n2 - ('similar request 2').\n"
"Matchers succeeded : ['method']\n"
"Matchers failed :\n"
"query - assertion failure :\n"
"failed : query 2\n",
),
],
)
def test_CannotOverwriteExistingCassetteException_get_message(
mock_find_requests_with_most_matches,
most_matches,
expected_message,
):
mock_find_requests_with_most_matches.return_value = most_matches
cassette = Cassette("path")
failed_request = "request"
exception_message = errors.CannotOverwriteExistingCassetteException._get_message(cassette, "request")
expected = (
f"Can't overwrite existing cassette ({cassette._path!r}) "
f"in your current record mode ({cassette.record_mode!r}).\n"
f"No match for the request ({failed_request!r}) was found.\n"
f"{expected_message}"
)
assert exception_message == expected

View File

@@ -1,19 +1,15 @@
from six import BytesIO
from vcr.filters import (
remove_headers, replace_headers,
remove_query_parameters, replace_query_parameters,
remove_post_data_parameters, replace_post_data_parameters,
decode_response
)
from vcr.compat import mock
from vcr.request import Request
import gzip
import json
import zlib
from io import BytesIO
from unittest import mock
from vcr.filters import (
decode_response,
remove_headers,
remove_post_data_parameters,
remove_query_parameters,
replace_headers,
replace_post_data_parameters,
replace_query_parameters,
)
from vcr.request import Request
def test_replace_headers():
@@ -24,24 +20,31 @@ def test_replace_headers():
# 4. replacing a header using a callable
# 5. removing a header using a callable
# 6. replacing a header that doesn't exist
headers = {"one": ["keep"], "two": ["lose"], "three": ["change"], "four": ["shout"], "five": ["whisper"]}
request = Request("GET", "http://google.com", "", headers)
replace_headers(
request,
[
("two", None),
("three", "tada"),
("four", lambda key, value, request: value.upper()),
("five", lambda key, value, request: None),
("six", "doesntexist"),
],
)
assert request.headers == {"one": "keep", "three": "tada", "four": "SHOUT"}
headers = {
'one': ['keep'],
'two': ['lose'],
'three': ['change'],
'four': ['shout'],
'five': ['whisper'],
}
request = Request('GET', 'http://google.com', '', headers)
replace_headers(request, [
('two', None),
('three', 'tada'),
('four', lambda key, value, request: value.upper()),
('five', lambda key, value, request: None),
('six', 'doesntexist'),
])
assert request.headers == {
'one': 'keep',
'three': 'tada',
'four': 'SHOUT',
}
def test_replace_headers_empty():
headers = {"hello": "goodbye", "secret": "header"}
request = Request("GET", "http://google.com", "", headers)
headers = {'hello': 'goodbye', 'secret': 'header'}
request = Request('GET', 'http://google.com', '', headers)
replace_headers(request, [])
assert request.headers == headers
@@ -49,20 +52,22 @@ def test_replace_headers_empty():
def test_replace_headers_callable():
# This goes beyond test_replace_headers() to ensure that the callable
# receives the expected arguments.
headers = {"hey": "there"}
request = Request("GET", "http://google.com", "", headers)
callme = mock.Mock(return_value="ho")
replace_headers(request, [("hey", callme)])
assert request.headers == {"hey": "ho"}
assert callme.call_args == ((), {"request": request, "key": "hey", "value": "there"})
headers = {'hey': 'there'}
request = Request('GET', 'http://google.com', '', headers)
callme = mock.Mock(return_value='ho')
replace_headers(request, [('hey', callme)])
assert request.headers == {'hey': 'ho'}
assert callme.call_args == ((), {'request': request,
'key': 'hey',
'value': 'there'})
def test_remove_headers():
# Test the backward-compatible API wrapper.
headers = {"hello": ["goodbye"], "secret": ["header"]}
request = Request("GET", "http://google.com", "", headers)
remove_headers(request, ["secret"])
assert request.headers == {"hello": "goodbye"}
headers = {'hello': ['goodbye'], 'secret': ['header']}
request = Request('GET', 'http://google.com', '', headers)
remove_headers(request, ['secret'])
assert request.headers == {'hello': 'goodbye'}
def test_replace_query_parameters():
@@ -73,45 +78,48 @@ def test_replace_query_parameters():
# 4. replacing a parameter using a callable
# 5. removing a parameter using a callable
# 6. replacing a parameter that doesn't exist
uri = "http://g.com/?one=keep&two=lose&three=change&four=shout&five=whisper"
request = Request("GET", uri, "", {})
replace_query_parameters(
request,
[
("two", None),
("three", "tada"),
("four", lambda key, value, request: value.upper()),
("five", lambda key, value, request: None),
("six", "doesntexist"),
],
)
assert request.query == [("four", "SHOUT"), ("one", "keep"), ("three", "tada")]
uri = 'http://g.com/?one=keep&two=lose&three=change&four=shout&five=whisper'
request = Request('GET', uri, '', {})
replace_query_parameters(request, [
('two', None),
('three', 'tada'),
('four', lambda key, value, request: value.upper()),
('five', lambda key, value, request: None),
('six', 'doesntexist'),
])
assert request.query == [
('four', 'SHOUT'),
('one', 'keep'),
('three', 'tada'),
]
def test_remove_all_query_parameters():
uri = "http://g.com/?q=cowboys&w=1"
request = Request("GET", uri, "", {})
replace_query_parameters(request, [("w", None), ("q", None)])
assert request.uri == "http://g.com/"
uri = 'http://g.com/?q=cowboys&w=1'
request = Request('GET', uri, '', {})
replace_query_parameters(request, [('w', None), ('q', None)])
assert request.uri == 'http://g.com/'
def test_replace_query_parameters_callable():
# This goes beyond test_replace_query_parameters() to ensure that the
# callable receives the expected arguments.
uri = "http://g.com/?hey=there"
request = Request("GET", uri, "", {})
callme = mock.Mock(return_value="ho")
replace_query_parameters(request, [("hey", callme)])
assert request.uri == "http://g.com/?hey=ho"
assert callme.call_args == ((), {"request": request, "key": "hey", "value": "there"})
uri = 'http://g.com/?hey=there'
request = Request('GET', uri, '', {})
callme = mock.Mock(return_value='ho')
replace_query_parameters(request, [('hey', callme)])
assert request.uri == 'http://g.com/?hey=ho'
assert callme.call_args == ((), {'request': request,
'key': 'hey',
'value': 'there'})
def test_remove_query_parameters():
# Test the backward-compatible API wrapper.
uri = "http://g.com/?q=cowboys&w=1"
request = Request("GET", uri, "", {})
remove_query_parameters(request, ["w"])
assert request.uri == "http://g.com/?q=cowboys"
uri = 'http://g.com/?q=cowboys&w=1'
request = Request('GET', uri, '', {})
remove_query_parameters(request, ['w'])
assert request.uri == 'http://g.com/?q=cowboys'
def test_replace_post_data_parameters():
@@ -122,58 +130,38 @@ def test_replace_post_data_parameters():
# 4. replacing a parameter using a callable
# 5. removing a parameter using a callable
# 6. replacing a parameter that doesn't exist
body = b"one=keep&two=lose&three=change&four=shout&five=whisper"
request = Request("POST", "http://google.com", body, {})
replace_post_data_parameters(
request,
[
("two", None),
("three", "tada"),
("four", lambda key, value, request: value.upper()),
("five", lambda key, value, request: None),
("six", "doesntexist"),
],
)
assert request.body == b"one=keep&three=tada&four=SHOUT"
def test_replace_post_data_parameters_empty_body():
# This test ensures replace_post_data_parameters doesn't throw exception when body is empty.
body = None
request = Request("POST", "http://google.com", body, {})
replace_post_data_parameters(
request,
[
("two", None),
("three", "tada"),
("four", lambda key, value, request: value.upper()),
("five", lambda key, value, request: None),
("six", "doesntexist"),
],
)
assert request.body is None
body = b'one=keep&two=lose&three=change&four=shout&five=whisper'
request = Request('POST', 'http://google.com', body, {})
replace_post_data_parameters(request, [
('two', None),
('three', 'tada'),
('four', lambda key, value, request: value.upper()),
('five', lambda key, value, request: None),
('six', 'doesntexist'),
])
assert request.body == b'one=keep&three=tada&four=SHOUT'
def test_remove_post_data_parameters():
# Test the backward-compatible API wrapper.
body = b"id=secret&foo=bar"
request = Request("POST", "http://google.com", body, {})
remove_post_data_parameters(request, ["id"])
assert request.body == b"foo=bar"
body = b'id=secret&foo=bar'
request = Request('POST', 'http://google.com', body, {})
remove_post_data_parameters(request, ['id'])
assert request.body == b'foo=bar'
def test_preserve_multiple_post_data_parameters():
body = b"id=secret&foo=bar&foo=baz"
request = Request("POST", "http://google.com", body, {})
replace_post_data_parameters(request, [("id", None)])
assert request.body == b"foo=bar&foo=baz"
body = b'id=secret&foo=bar&foo=baz'
request = Request('POST', 'http://google.com', body, {})
replace_post_data_parameters(request, [('id', None)])
assert request.body == b'foo=bar&foo=baz'
def test_remove_all_post_data_parameters():
body = b"id=secret&foo=bar"
request = Request("POST", "http://google.com", body, {})
replace_post_data_parameters(request, [("id", None), ("foo", None)])
assert request.body == b""
body = b'id=secret&foo=bar'
request = Request('POST', 'http://google.com', body, {})
replace_post_data_parameters(request, [('id', None), ('foo', None)])
assert request.body == b''
def test_replace_json_post_data_parameters():
@@ -185,19 +173,16 @@ def test_replace_json_post_data_parameters():
# 5. removing a parameter using a callable
# 6. replacing a parameter that doesn't exist
body = b'{"one": "keep", "two": "lose", "three": "change", "four": "shout", "five": "whisper"}'
request = Request("POST", "http://google.com", body, {})
request.headers["Content-Type"] = "application/json"
replace_post_data_parameters(
request,
[
("two", None),
("three", "tada"),
("four", lambda key, value, request: value.upper()),
("five", lambda key, value, request: None),
("six", "doesntexist"),
],
)
request_data = json.loads(request.body)
request = Request('POST', 'http://google.com', body, {})
request.headers['Content-Type'] = 'application/json'
replace_post_data_parameters(request, [
('two', None),
('three', 'tada'),
('four', lambda key, value, request: value.upper()),
('five', lambda key, value, request: None),
('six', 'doesntexist'),
])
request_data = json.loads(request.body.decode('utf-8'))
expected_data = json.loads('{"one": "keep", "three": "tada", "four": "SHOUT"}')
assert request_data == expected_data
@@ -205,147 +190,85 @@ def test_replace_json_post_data_parameters():
def test_remove_json_post_data_parameters():
# Test the backward-compatible API wrapper.
body = b'{"id": "secret", "foo": "bar", "baz": "qux"}'
request = Request("POST", "http://google.com", body, {})
request.headers["Content-Type"] = "application/json"
remove_post_data_parameters(request, ["id"])
request_body_json = json.loads(request.body)
expected_json = json.loads(b'{"foo": "bar", "baz": "qux"}')
request = Request('POST', 'http://google.com', body, {})
request.headers['Content-Type'] = 'application/json'
remove_post_data_parameters(request, ['id'])
request_body_json = json.loads(request.body.decode('utf-8'))
expected_json = json.loads(b'{"foo": "bar", "baz": "qux"}'.decode('utf-8'))
assert request_body_json == expected_json
def test_remove_all_json_post_data_parameters():
body = b'{"id": "secret", "foo": "bar"}'
request = Request("POST", "http://google.com", body, {})
request.headers["Content-Type"] = "application/json"
replace_post_data_parameters(request, [("id", None), ("foo", None)])
assert request.body == b"{}"
def test_replace_dict_post_data_parameters():
# This tests all of:
# 1. keeping a parameter
# 2. removing a parameter
# 3. replacing a parameter
# 4. replacing a parameter using a callable
# 5. removing a parameter using a callable
# 6. replacing a parameter that doesn't exist
body = {"one": "keep", "two": "lose", "three": "change", "four": "shout", "five": "whisper"}
request = Request("POST", "http://google.com", body, {})
request.headers["Content-Type"] = "application/x-www-form-urlencoded"
replace_post_data_parameters(
request,
[
("two", None),
("three", "tada"),
("four", lambda key, value, request: value.upper()),
("five", lambda key, value, request: None),
("six", "doesntexist"),
],
)
expected_data = {"one": "keep", "three": "tada", "four": "SHOUT"}
assert request.body == expected_data
def test_remove_dict_post_data_parameters():
# Test the backward-compatible API wrapper.
body = {"id": "secret", "foo": "bar", "baz": "qux"}
request = Request("POST", "http://google.com", body, {})
request.headers["Content-Type"] = "application/x-www-form-urlencoded"
remove_post_data_parameters(request, ["id"])
expected_data = {"foo": "bar", "baz": "qux"}
assert request.body == expected_data
def test_remove_all_dict_post_data_parameters():
body = {"id": "secret", "foo": "bar"}
request = Request("POST", "http://google.com", body, {})
request.headers["Content-Type"] = "application/x-www-form-urlencoded"
replace_post_data_parameters(request, [("id", None), ("foo", None)])
assert request.body == {}
request = Request('POST', 'http://google.com', body, {})
request.headers['Content-Type'] = 'application/json'
replace_post_data_parameters(request, [('id', None), ('foo', None)])
assert request.body == b'{}'
def test_decode_response_uncompressed():
recorded_response = {
"status": {"message": "OK", "code": 200},
"status": {
"message": "OK",
"code": 200
},
"headers": {
"content-length": ["10806"],
"date": ["Fri, 24 Oct 2014 18:35:37 GMT"],
"content-type": ["text/html; charset=utf-8"],
},
"body": {"string": b""},
"body": {
"string": b""
}
}
assert decode_response(recorded_response) == recorded_response
def test_decode_response_deflate():
body = b"deflate message"
body = b'deflate message'
deflate_response = {
"body": {"string": zlib.compress(body)},
"headers": {
"access-control-allow-credentials": ["true"],
"access-control-allow-origin": ["*"],
"connection": ["keep-alive"],
"content-encoding": ["deflate"],
"content-length": ["177"],
"content-type": ["application/json"],
"date": ["Wed, 02 Dec 2015 19:44:32 GMT"],
"server": ["nginx"],
'body': {'string': zlib.compress(body)},
'headers': {
'access-control-allow-credentials': ['true'],
'access-control-allow-origin': ['*'],
'connection': ['keep-alive'],
'content-encoding': ['deflate'],
'content-length': ['177'],
'content-type': ['application/json'],
'date': ['Wed, 02 Dec 2015 19:44:32 GMT'],
'server': ['nginx']
},
"status": {"code": 200, "message": "OK"},
'status': {'code': 200, 'message': 'OK'}
}
decoded_response = decode_response(deflate_response)
assert decoded_response["body"]["string"] == body
assert decoded_response["headers"]["content-length"] == [str(len(body))]
def test_decode_response_deflate_already_decompressed():
body = b"deflate message"
gzip_response = {
"body": {"string": body},
"headers": {
"content-encoding": ["deflate"],
},
}
decoded_response = decode_response(gzip_response)
assert decoded_response["body"]["string"] == body
assert decoded_response['body']['string'] == body
assert decoded_response['headers']['content-length'] == [str(len(body))]
def test_decode_response_gzip():
body = b"gzip message"
body = b'gzip message'
buf = BytesIO()
f = gzip.GzipFile("a", fileobj=buf, mode="wb")
f = gzip.GzipFile('a', fileobj=buf, mode='wb')
f.write(body)
f.close()
compressed_body = buf.getvalue()
buf.close()
gzip_response = {
"body": {"string": compressed_body},
"headers": {
"access-control-allow-credentials": ["true"],
"access-control-allow-origin": ["*"],
"connection": ["keep-alive"],
"content-encoding": ["gzip"],
"content-length": ["177"],
"content-type": ["application/json"],
"date": ["Wed, 02 Dec 2015 19:44:32 GMT"],
"server": ["nginx"],
'body': {'string': compressed_body},
'headers': {
'access-control-allow-credentials': ['true'],
'access-control-allow-origin': ['*'],
'connection': ['keep-alive'],
'content-encoding': ['gzip'],
'content-length': ['177'],
'content-type': ['application/json'],
'date': ['Wed, 02 Dec 2015 19:44:32 GMT'],
'server': ['nginx']
},
"status": {"code": 200, "message": "OK"},
'status': {'code': 200, 'message': 'OK'}
}
decoded_response = decode_response(gzip_response)
assert decoded_response["body"]["string"] == body
assert decoded_response["headers"]["content-length"] == [str(len(body))]
def test_decode_response_gzip_already_decompressed():
body = b"gzip message"
gzip_response = {
"body": {"string": body},
"headers": {
"content-encoding": ["gzip"],
},
}
decoded_response = decode_response(gzip_response)
assert decoded_response["body"]["string"] == body
assert decoded_response['body']['string'] == body
assert decoded_response['headers']['content-length'] == [str(len(body))]

View File

@@ -1,18 +1,19 @@
import pytest
from vcr.request import Request
from vcr.serializers.jsonserializer import serialize
from vcr.request import Request
def test_serialize_binary():
request = Request(method="GET", uri="http://localhost/", body="", headers={})
cassette = {"requests": [request], "responses": [{"body": b"\x8c"}]}
request = Request(
method='GET',
uri='http://localhost/',
body='',
headers={},
)
cassette = {'requests': [request], 'responses': [{'body': b'\x8c'}]}
with pytest.raises(Exception) as e:
serialize(cassette)
assert (
e.message
== "Error serializing cassette to JSON. Does this \
assert e.message == "Error serializing cassette to JSON. Does this \
HTTP interaction contain binary data? If so, use a different \
serializer (like the yaml serializer) for this request"
)

View File

@@ -1,339 +1,159 @@
import itertools
from unittest import mock
import pytest
from vcr import matchers, request
from vcr import matchers
from vcr import request
# the dict contains requests with corresponding to its key difference
# with 'base' request.
REQUESTS = {
"base": request.Request("GET", "http://host.com/p?a=b", "", {}),
"method": request.Request("POST", "http://host.com/p?a=b", "", {}),
"scheme": request.Request("GET", "https://host.com:80/p?a=b", "", {}),
"host": request.Request("GET", "http://another-host.com/p?a=b", "", {}),
"port": request.Request("GET", "http://host.com:90/p?a=b", "", {}),
"path": request.Request("GET", "http://host.com/x?a=b", "", {}),
"query": request.Request("GET", "http://host.com/p?c=d", "", {}),
'base': request.Request('GET', 'http://host.com/p?a=b', '', {}),
'method': request.Request('POST', 'http://host.com/p?a=b', '', {}),
'scheme': request.Request('GET', 'https://host.com:80/p?a=b', '', {}),
'host': request.Request('GET', 'http://another-host.com/p?a=b', '', {}),
'port': request.Request('GET', 'http://host.com:90/p?a=b', '', {}),
'path': request.Request('GET', 'http://host.com/x?a=b', '', {}),
'query': request.Request('GET', 'http://host.com/p?c=d', '', {}),
}
def assert_matcher(matcher_name):
matcher = getattr(matchers, matcher_name)
for k1, k2 in itertools.permutations(REQUESTS, 2):
expecting_assertion_error = matcher_name in {k1, k2}
if expecting_assertion_error:
with pytest.raises(AssertionError):
matcher(REQUESTS[k1], REQUESTS[k2])
matched = matcher(REQUESTS[k1], REQUESTS[k2])
if matcher_name in {k1, k2}:
assert not matched
else:
assert matcher(REQUESTS[k1], REQUESTS[k2]) is None
assert matched
def test_uri_matcher():
for k1, k2 in itertools.permutations(REQUESTS, 2):
expecting_assertion_error = {k1, k2} != {"base", "method"}
if expecting_assertion_error:
with pytest.raises(AssertionError):
matchers.uri(REQUESTS[k1], REQUESTS[k2])
matched = matchers.uri(REQUESTS[k1], REQUESTS[k2])
if {k1, k2} != {'base', 'method'}:
assert not matched
else:
assert matchers.uri(REQUESTS[k1], REQUESTS[k2]) is None
assert matched
req1_body = (
b"<?xml version='1.0'?><methodCall><methodName>test</methodName>"
b"<params><param><value><array><data><value><struct>"
b"<member><name>a</name><value><string>1</string></value></member>"
b"<member><name>b</name><value><string>2</string></value></member>"
b"</struct></value></data></array></value></param></params></methodCall>"
)
req2_body = (
b"<?xml version='1.0'?><methodCall><methodName>test</methodName>"
b"<params><param><value><array><data><value><struct>"
b"<member><name>b</name><value><string>2</string></value></member>"
b"<member><name>a</name><value><string>1</string></value></member>"
b"</struct></value></data></array></value></param></params></methodCall>"
)
boto3_bytes_headers = {
"X-Amz-Content-SHA256": b"UNSIGNED-PAYLOAD",
"Cache-Control": b"max-age=31536000, public",
"X-Amz-Date": b"20191102T143910Z",
"User-Agent": b"Boto3/1.9.102 Python/3.5.3 Linux/4.15.0-54-generic Botocore/1.12.253 Resource",
"Content-MD5": b"GQqjEXsRqrPyxfTl99nkAg==",
"Content-Type": b"text/plain",
"Expect": b"100-continue",
"Content-Length": "21",
}
chunked_headers = {
"Transfer-Encoding": "chunked",
}
req1_body = (b"<?xml version='1.0'?><methodCall><methodName>test</methodName>"
b"<params><param><value><array><data><value><struct>"
b"<member><name>a</name><value><string>1</string></value></member>"
b"<member><name>b</name><value><string>2</string></value></member>"
b"</struct></value></data></array></value></param></params></methodCall>")
req2_body = (b"<?xml version='1.0'?><methodCall><methodName>test</methodName>"
b"<params><param><value><array><data><value><struct>"
b"<member><name>b</name><value><string>2</string></value></member>"
b"<member><name>a</name><value><string>1</string></value></member>"
b"</struct></value></data></array></value></param></params></methodCall>")
@pytest.mark.parametrize(
"r1, r2",
[
(
request.Request("POST", "http://host.com/", "123", {}),
request.Request("POST", "http://another-host.com/", "123", {"Some-Header": "value"}),
@pytest.mark.parametrize("r1, r2", [
(
request.Request('POST', 'http://host.com/', '123', {}),
request.Request('POST', 'http://another-host.com/',
'123', {'Some-Header': 'value'})
),
(
request.Request('POST', 'http://host.com/', 'a=1&b=2',
{'Content-Type': 'application/x-www-form-urlencoded'}),
request.Request('POST', 'http://host.com/', 'b=2&a=1',
{'Content-Type': 'application/x-www-form-urlencoded'})
),
(
request.Request('POST', 'http://host.com/', '123', {}),
request.Request('POST', 'http://another-host.com/', '123', {'Some-Header': 'value'})
),
(
request.Request(
'POST', 'http://host.com/', 'a=1&b=2',
{'Content-Type': 'application/x-www-form-urlencoded'}
),
(
request.Request(
"POST",
"http://host.com/",
"a=1&b=2",
{"Content-Type": "application/x-www-form-urlencoded"},
),
request.Request(
"POST",
"http://host.com/",
"b=2&a=1",
{"Content-Type": "application/x-www-form-urlencoded"},
),
request.Request(
'POST', 'http://host.com/', 'b=2&a=1',
{'Content-Type': 'application/x-www-form-urlencoded'}
)
),
(
request.Request(
'POST', 'http://host.com/', '{"a": 1, "b": 2}',
{'Content-Type': 'application/json'}
),
(
request.Request("POST", "http://host.com/", "123", {}),
request.Request("POST", "http://another-host.com/", "123", {"Some-Header": "value"}),
request.Request(
'POST', 'http://host.com/', '{"b": 2, "a": 1}',
{'content-type': 'application/json'}
)
),
(
request.Request(
'POST', 'http://host.com/', req1_body,
{'User-Agent': 'xmlrpclib', 'Content-Type': 'text/xml'}
),
(
request.Request(
"POST",
"http://host.com/",
"a=1&b=2",
{"Content-Type": "application/x-www-form-urlencoded"},
),
request.Request(
"POST",
"http://host.com/",
"b=2&a=1",
{"Content-Type": "application/x-www-form-urlencoded"},
),
request.Request(
'POST', 'http://host.com/', req2_body,
{'user-agent': 'somexmlrpc', 'content-type': 'text/xml'}
)
),
(
request.Request(
'POST', 'http://host.com/',
'{"a": 1, "b": 2}', {'Content-Type': 'application/json'}
),
(
request.Request(
"POST",
"http://host.com/",
'{"a": 1, "b": 2}',
{"Content-Type": "application/json"},
),
request.Request(
"POST",
"http://host.com/",
'{"b": 2, "a": 1}',
{"content-type": "application/json"},
),
),
(
request.Request(
"POST",
"http://host.com/",
req1_body,
{"User-Agent": "xmlrpclib", "Content-Type": "text/xml"},
),
request.Request(
"POST",
"http://host.com/",
req2_body,
{"user-agent": "somexmlrpc", "content-type": "text/xml"},
),
),
(
request.Request(
"POST",
"http://host.com/",
'{"a": 1, "b": 2}',
{"Content-Type": "application/json"},
),
request.Request(
"POST",
"http://host.com/",
'{"b": 2, "a": 1}',
{"content-type": "application/json"},
),
),
(
# special case for boto3 bytes headers
request.Request("POST", "http://aws.custom.com/", b"123", boto3_bytes_headers),
request.Request("POST", "http://aws.custom.com/", b"123", boto3_bytes_headers),
),
(
# chunked transfer encoding: decoded bytes versus encoded bytes
request.Request("POST", "scheme1://host1.test/", b"123456789_123456", chunked_headers),
request.Request(
"GET",
"scheme2://host2.test/",
b"10\r\n123456789_123456\r\n0\r\n\r\n",
chunked_headers,
),
),
(
# chunked transfer encoding: bytes iterator versus string iterator
request.Request(
"POST",
"scheme1://host1.test/",
iter([b"123456789_", b"123456"]),
chunked_headers,
),
request.Request("GET", "scheme2://host2.test/", iter(["123456789_", "123456"]), chunked_headers),
),
(
# chunked transfer encoding: bytes iterator versus single byte iterator
request.Request(
"POST",
"scheme1://host1.test/",
iter([b"123456789_", b"123456"]),
chunked_headers,
),
request.Request("GET", "scheme2://host2.test/", iter(b"123456789_123456"), chunked_headers),
),
],
)
request.Request(
'POST', 'http://host.com/',
'{"b": 2, "a": 1}', {'content-type': 'application/json'}
)
)
])
def test_body_matcher_does_match(r1, r2):
assert matchers.body(r1, r2) is None
assert matchers.body(r1, r2)
@pytest.mark.parametrize(
"r1, r2",
[
(
request.Request("POST", "http://host.com/", '{"a": 1, "b": 2}', {}),
request.Request("POST", "http://host.com/", '{"b": 2, "a": 1}', {}),
@pytest.mark.parametrize("r1, r2", [
(
request.Request('POST', 'http://host.com/', '{"a": 1, "b": 2}', {}),
request.Request('POST', 'http://host.com/', '{"b": 2, "a": 1}', {}),
),
(
request.Request(
'POST', 'http://host.com/',
'{"a": 1, "b": 3}', {'Content-Type': 'application/json'}
),
(
request.Request(
"POST",
"http://host.com/",
'{"a": 1, "b": 3}',
{"Content-Type": "application/json"},
),
request.Request(
"POST",
"http://host.com/",
'{"b": 2, "a": 1}',
{"content-type": "application/json"},
),
request.Request(
'POST', 'http://host.com/',
'{"b": 2, "a": 1}', {'content-type': 'application/json'}
)
),
(
request.Request(
'POST', 'http://host.com/', req1_body, {'Content-Type': 'text/xml'}
),
(
request.Request("POST", "http://host.com/", req1_body, {"Content-Type": "text/xml"}),
request.Request("POST", "http://host.com/", req2_body, {"content-type": "text/xml"}),
),
],
)
request.Request(
'POST', 'http://host.com/', req2_body, {'content-type': 'text/xml'}
)
)
])
def test_body_match_does_not_match(r1, r2):
with pytest.raises(AssertionError):
matchers.body(r1, r2)
assert not matchers.body(r1, r2)
def test_query_matcher():
req1 = request.Request("GET", "http://host.com/?a=b&c=d", "", {})
req2 = request.Request("GET", "http://host.com/?c=d&a=b", "", {})
assert matchers.query(req1, req2) is None
req1 = request.Request('GET', 'http://host.com/?a=b&c=d', '', {})
req2 = request.Request('GET', 'http://host.com/?c=d&a=b', '', {})
assert matchers.query(req1, req2)
req1 = request.Request("GET", "http://host.com/?a=b&a=b&c=d", "", {})
req2 = request.Request("GET", "http://host.com/?a=b&c=d&a=b", "", {})
req3 = request.Request("GET", "http://host.com/?c=d&a=b&a=b", "", {})
assert matchers.query(req1, req2) is None
assert matchers.query(req1, req3) is None
req1 = request.Request('GET', 'http://host.com/?a=b&a=b&c=d', '', {})
req2 = request.Request('GET', 'http://host.com/?a=b&c=d&a=b', '', {})
req3 = request.Request('GET', 'http://host.com/?c=d&a=b&a=b', '', {})
assert matchers.query(req1, req2)
assert matchers.query(req1, req3)
def test_matchers():
assert_matcher("method")
assert_matcher("scheme")
assert_matcher("host")
assert_matcher("port")
assert_matcher("path")
assert_matcher("query")
def test_evaluate_matcher_does_match():
def bool_matcher(r1, r2):
return True
def assertion_matcher(r1, r2):
assert 1 == 1
r1, r2 = None, None
for matcher in [bool_matcher, assertion_matcher]:
match, assertion_msg = matchers._evaluate_matcher(matcher, r1, r2)
assert match is True
assert assertion_msg is None
def test_evaluate_matcher_does_not_match():
def bool_matcher(r1, r2):
return False
def assertion_matcher(r1, r2):
# This is like the "assert" statement preventing pytest to recompile it
raise AssertionError()
r1, r2 = None, None
for matcher in [bool_matcher, assertion_matcher]:
match, assertion_msg = matchers._evaluate_matcher(matcher, r1, r2)
assert match is False
assert not assertion_msg
def test_evaluate_matcher_does_not_match_with_assert_message():
def assertion_matcher(r1, r2):
# This is like the "assert" statement preventing pytest to recompile it
raise AssertionError("Failing matcher")
r1, r2 = None, None
match, assertion_msg = matchers._evaluate_matcher(assertion_matcher, r1, r2)
assert match is False
assert assertion_msg == "Failing matcher"
def test_get_assertion_message():
assert matchers.get_assertion_message(None) is None
assert matchers.get_assertion_message("") == ""
def test_get_assertion_message_with_details():
assertion_msg = "q1=1 != q2=1"
expected = assertion_msg
assert matchers.get_assertion_message(assertion_msg) == expected
@pytest.mark.parametrize(
"r1, r2, expected_successes, expected_failures",
[
(
request.Request("GET", "http://host.com/p?a=b", "", {}),
request.Request("GET", "http://host.com/p?a=b", "", {}),
["method", "path"],
[],
),
(
request.Request("GET", "http://host.com/p?a=b", "", {}),
request.Request("POST", "http://host.com/p?a=b", "", {}),
["path"],
["method"],
),
(
request.Request("GET", "http://host.com/p?a=b", "", {}),
request.Request("POST", "http://host.com/path?a=b", "", {}),
[],
["method", "path"],
),
],
)
def test_get_matchers_results(r1, r2, expected_successes, expected_failures):
successes, failures = matchers.get_matchers_results(r1, r2, [matchers.method, matchers.path])
assert successes == expected_successes
assert len(failures) == len(expected_failures)
for i, expected_failure in enumerate(expected_failures):
assert failures[i][0] == expected_failure
assert failures[i][1] is not None
@mock.patch("vcr.matchers.get_matchers_results")
@pytest.mark.parametrize(
"successes, failures, expected_match",
[(["method", "path"], [], True), (["method"], ["path"], False), ([], ["method", "path"], False)],
)
def test_requests_match(mock_get_matchers_results, successes, failures, expected_match):
mock_get_matchers_results.return_value = (successes, failures)
r1 = request.Request("GET", "http://host.com/p?a=b", "", {})
r2 = request.Request("GET", "http://host.com/p?a=b", "", {})
match = matchers.requests_match(r1, r2, [matchers.method, matchers.path])
assert match is expected_match
def test_metchers():
assert_matcher('method')
assert_matcher('scheme')
assert_matcher('host')
assert_matcher('port')
assert_matcher('path')
assert_matcher('query')

View File

@@ -1,48 +1,41 @@
import filecmp
import json
import shutil
import yaml
import vcr.migration
# Use the libYAML versions if possible
try:
from yaml import CLoader as Loader
except ImportError:
from yaml import Loader
def test_try_migrate_with_json(tmpdir):
cassette = tmpdir.join("cassette.json").strpath
shutil.copy("tests/fixtures/migration/old_cassette.json", cassette)
cassette = tmpdir.join('cassette.json').strpath
shutil.copy('tests/fixtures/migration/old_cassette.json', cassette)
assert vcr.migration.try_migrate(cassette)
with open("tests/fixtures/migration/new_cassette.json") as f:
with open('tests/fixtures/migration/new_cassette.json', 'r') as f:
expected_json = json.load(f)
with open(cassette) as f:
with open(cassette, 'r') as f:
actual_json = json.load(f)
assert actual_json == expected_json
def test_try_migrate_with_yaml(tmpdir):
cassette = tmpdir.join("cassette.yaml").strpath
shutil.copy("tests/fixtures/migration/old_cassette.yaml", cassette)
cassette = tmpdir.join('cassette.yaml').strpath
shutil.copy('tests/fixtures/migration/old_cassette.yaml', cassette)
assert vcr.migration.try_migrate(cassette)
with open("tests/fixtures/migration/new_cassette.yaml") as f:
expected_yaml = yaml.load(f, Loader=Loader)
with open(cassette) as f:
actual_yaml = yaml.load(f, Loader=Loader)
with open('tests/fixtures/migration/new_cassette.yaml', 'r') as f:
expected_yaml = yaml.load(f)
with open(cassette, 'r') as f:
actual_yaml = yaml.load(f)
assert actual_yaml == expected_yaml
def test_try_migrate_with_invalid_or_new_cassettes(tmpdir):
cassette = tmpdir.join("cassette").strpath
cassette = tmpdir.join('cassette').strpath
files = [
"tests/fixtures/migration/not_cassette.txt",
"tests/fixtures/migration/new_cassette.yaml",
"tests/fixtures/migration/new_cassette.json",
'tests/fixtures/migration/not_cassette.txt',
'tests/fixtures/migration/new_cassette.yaml',
'tests/fixtures/migration/new_cassette.json',
]
for file_path in files:
shutil.copy(file_path, cassette)
assert not vcr.migration.try_migrate(cassette)
assert filecmp.cmp(cassette, file_path) # should not change file
assert filecmp.cmp(cassette, file_path) # shold not change file

View File

@@ -4,26 +4,20 @@ from vcr.persisters.filesystem import FilesystemPersister
from vcr.serializers import jsonserializer, yamlserializer
@pytest.mark.parametrize(
"cassette_path, serializer",
[
("tests/fixtures/migration/old_cassette.json", jsonserializer),
("tests/fixtures/migration/old_cassette.yaml", yamlserializer),
],
)
@pytest.mark.parametrize("cassette_path, serializer", [
('tests/fixtures/migration/old_cassette.json', jsonserializer),
('tests/fixtures/migration/old_cassette.yaml', yamlserializer),
])
def test_load_cassette_with_old_cassettes(cassette_path, serializer):
with pytest.raises(ValueError) as excinfo:
FilesystemPersister.load_cassette(cassette_path, serializer)
assert "run the migration script" in excinfo.exconly()
@pytest.mark.parametrize(
"cassette_path, serializer",
[
("tests/fixtures/migration/not_cassette.txt", jsonserializer),
("tests/fixtures/migration/not_cassette.txt", yamlserializer),
],
)
@pytest.mark.parametrize("cassette_path, serializer", [
('tests/fixtures/migration/not_cassette.txt', jsonserializer),
('tests/fixtures/migration/not_cassette.txt', yamlserializer),
])
def test_load_cassette_with_invalid_cassettes(cassette_path, serializer):
with pytest.raises(Exception) as excinfo:
FilesystemPersister.load_cassette(cassette_path, serializer)

View File

@@ -1,85 +1,70 @@
import pytest
from vcr.request import HeadersDict, Request
from vcr.request import Request, HeadersDict
@pytest.mark.parametrize(
"method, uri, expected_str",
[
("GET", "http://www.google.com/", "<Request (GET) http://www.google.com/>"),
("OPTIONS", "*", "<Request (OPTIONS) *>"),
("CONNECT", "host.some.where:1234", "<Request (CONNECT) host.some.where:1234>"),
],
)
def test_str(method, uri, expected_str):
assert str(Request(method, uri, "", {})) == expected_str
def test_str():
req = Request('GET', 'http://www.google.com/', '', {})
str(req) == '<Request (GET) http://www.google.com/>'
def test_headers():
headers = {"X-Header1": ["h1"], "X-Header2": "h2"}
req = Request("GET", "http://go.com/", "", headers)
assert req.headers == {"X-Header1": "h1", "X-Header2": "h2"}
req.headers["X-Header1"] = "h11"
assert req.headers == {"X-Header1": "h11", "X-Header2": "h2"}
headers = {'X-Header1': ['h1'], 'X-Header2': 'h2'}
req = Request('GET', 'http://go.com/', '', headers)
assert req.headers == {'X-Header1': 'h1', 'X-Header2': 'h2'}
req.headers['X-Header1'] = 'h11'
assert req.headers == {'X-Header1': 'h11', 'X-Header2': 'h2'}
def test_add_header_deprecated():
req = Request("GET", "http://go.com/", "", {})
pytest.deprecated_call(req.add_header, "foo", "bar")
assert req.headers == {"foo": "bar"}
req = Request('GET', 'http://go.com/', '', {})
pytest.deprecated_call(req.add_header, 'foo', 'bar')
assert req.headers == {'foo': 'bar'}
@pytest.mark.parametrize(
"uri, expected_port",
[
("http://go.com/", 80),
("http://go.com:80/", 80),
("http://go.com:3000/", 3000),
("https://go.com/", 443),
("https://go.com:443/", 443),
("https://go.com:3000/", 3000),
("*", None),
],
)
@pytest.mark.parametrize("uri, expected_port", [
('http://go.com/', 80),
('http://go.com:80/', 80),
('http://go.com:3000/', 3000),
('https://go.com/', 443),
('https://go.com:443/', 443),
('https://go.com:3000/', 3000),
])
def test_port(uri, expected_port):
req = Request("GET", uri, "", {})
req = Request('GET', uri, '', {})
assert req.port == expected_port
@pytest.mark.parametrize(
"method, uri",
[
("GET", "http://go.com/"),
("GET", "http://go.com:80/"),
("CONNECT", "localhost:1234"),
("OPTIONS", "*"),
],
)
def test_uri(method, uri):
assert Request(method, uri, "", {}).uri == uri
def test_uri():
req = Request('GET', 'http://go.com/', '', {})
assert req.uri == 'http://go.com/'
req = Request('GET', 'http://go.com:80/', '', {})
assert req.uri == 'http://go.com:80/'
def test_HeadersDict():
# Simple test of CaseInsensitiveDict
h = HeadersDict()
assert h == {}
h["Content-Type"] = "application/json"
assert h == {"Content-Type": "application/json"}
assert h["content-type"] == "application/json"
assert h["CONTENT-TYPE"] == "application/json"
h['Content-Type'] = 'application/json'
assert h == {'Content-Type': 'application/json'}
assert h['content-type'] == 'application/json'
assert h['CONTENT-TYPE'] == 'application/json'
# Test feature of HeadersDict: devolve list to first element
h = HeadersDict()
assert h == {}
h["x"] = ["foo", "bar"]
assert h == {"x": "foo"}
h['x'] = ['foo', 'bar']
assert h == {'x': 'foo'}
# Test feature of HeadersDict: preserve original key case
h = HeadersDict()
assert h == {}
h["Content-Type"] = "application/json"
assert h == {"Content-Type": "application/json"}
h["content-type"] = "text/plain"
assert h == {"Content-Type": "text/plain"}
h["CONtent-tyPE"] = "whoa"
assert h == {"Content-Type": "whoa"}
h['Content-Type'] = 'application/json'
assert h == {'Content-Type': 'application/json'}
h['content-type'] = 'text/plain'
assert h == {'Content-Type': 'text/plain'}
h['CONtent-tyPE'] = 'whoa'
assert h == {'Content-Type': 'whoa'}

View File

@@ -1,66 +1,13 @@
import io
# coding: UTF-8
from vcr.stubs import VCRHTTPResponse
def test_response_should_have_headers_field():
recorded_response = {
"status": {"message": "OK", "code": 200},
"headers": {
"content-length": ["0"],
"server": ["gunicorn/18.0"],
"connection": ["Close"],
"access-control-allow-credentials": ["true"],
"date": ["Fri, 24 Oct 2014 18:35:37 GMT"],
"access-control-allow-origin": ["*"],
"content-type": ["text/html; charset=utf-8"],
"status": {
"message": "OK",
"code": 200
},
"body": {"string": b""},
}
response = VCRHTTPResponse(recorded_response)
assert response.headers is not None
def test_response_headers_should_be_equal_to_msg():
recorded_response = {
"status": {"message": b"OK", "code": 200},
"headers": {
"content-length": ["0"],
"server": ["gunicorn/18.0"],
"connection": ["Close"],
"content-type": ["text/html; charset=utf-8"],
},
"body": {"string": b""},
}
response = VCRHTTPResponse(recorded_response)
assert response.headers == response.msg
def test_response_headers_should_have_correct_values():
recorded_response = {
"status": {"message": "OK", "code": 200},
"headers": {
"content-length": ["10806"],
"date": ["Fri, 24 Oct 2014 18:35:37 GMT"],
"content-type": ["text/html; charset=utf-8"],
},
"body": {"string": b""},
}
response = VCRHTTPResponse(recorded_response)
assert response.headers.get("content-length") == "10806"
assert response.headers.get("date") == "Fri, 24 Oct 2014 18:35:37 GMT"
def test_response_parses_correctly_and_fp_attribute_error_is_not_thrown():
"""
Regression test for https://github.com/kevin1024/vcrpy/issues/440
:return:
"""
recorded_response = {
"status": {"message": "OK", "code": 200},
"headers": {
"content-length": ["0"],
"server": ["gunicorn/18.0"],
@@ -71,28 +18,51 @@ def test_response_parses_correctly_and_fp_attribute_error_is_not_thrown():
"content-type": ["text/html; charset=utf-8"],
},
"body": {
"string": b"\nPMID- 19416910\nOWN - NLM\nSTAT- MEDLINE\nDA - 20090513\nDCOM- "
b"20090622\nLR - "
b"20141209\nIS - 1091-6490 (Electronic)\nIS - 0027-8424 (Linking)\nVI - "
b"106\nIP - "
b"19\nDP - 2009 May 12\nTI - Genetic dissection of histone deacetylase "
b"requirement in "
b"tumor cells.\nPG - 7751-5\nLID - 10.1073/pnas.0903139106 [doi]\nAB - "
b"Histone "
b"deacetylase inhibitors (HDACi) represent a new group of drugs currently\n "
b" being "
b"tested in a wide variety of clinical applications. They are especially\n "
b" effective "
b"in preclinical models of cancer where they show antiproliferative\n "
b"action in many "
b"different types of cancer cells. Recently, the first HDACi was\n "
b"approved for the "
b"treatment of cutaneous T cell lymphomas. Most HDACi currently in\n "
b"clinical ",
},
"string": b""
}
}
vcr_response = VCRHTTPResponse(recorded_response)
handle = io.TextIOWrapper(vcr_response, encoding="utf-8")
handle = iter(handle)
articles = list(handle)
assert len(articles) > 1
response = VCRHTTPResponse(recorded_response)
assert response.headers is not None
def test_response_headers_should_be_equal_to_msg():
recorded_response = {
"status": {
"message": b"OK",
"code": 200
},
"headers": {
"content-length": ["0"],
"server": ["gunicorn/18.0"],
"connection": ["Close"],
"content-type": ["text/html; charset=utf-8"],
},
"body": {
"string": b""
}
}
response = VCRHTTPResponse(recorded_response)
assert response.headers == response.msg
def test_response_headers_should_have_correct_values():
recorded_response = {
"status": {
"message": "OK",
"code": 200
},
"headers": {
"content-length": ["10806"],
"date": ["Fri, 24 Oct 2014 18:35:37 GMT"],
"content-type": ["text/html; charset=utf-8"],
},
"body": {
"string": b""
}
}
response = VCRHTTPResponse(recorded_response)
assert response.headers.get('content-length') == "10806"
assert response.headers.get('date') == "Fri, 24 Oct 2014 18:35:37 GMT"

View File

@@ -1,33 +1,35 @@
from unittest import mock
# -*- encoding: utf-8 -*-
import pytest
from vcr.compat import mock
from vcr.request import Request
from vcr.serialize import deserialize, serialize
from vcr.serializers import compat, jsonserializer, yamlserializer
from vcr.serializers import yamlserializer, jsonserializer, compat
def test_deserialize_old_yaml_cassette():
with open("tests/fixtures/migration/old_cassette.yaml") as f, pytest.raises(ValueError):
deserialize(f.read(), yamlserializer)
with open('tests/fixtures/migration/old_cassette.yaml', 'r') as f:
with pytest.raises(ValueError):
deserialize(f.read(), yamlserializer)
def test_deserialize_old_json_cassette():
with open("tests/fixtures/migration/old_cassette.json") as f, pytest.raises(ValueError):
deserialize(f.read(), jsonserializer)
with open('tests/fixtures/migration/old_cassette.json', 'r') as f:
with pytest.raises(ValueError):
deserialize(f.read(), jsonserializer)
def test_deserialize_new_yaml_cassette():
with open("tests/fixtures/migration/new_cassette.yaml") as f:
with open('tests/fixtures/migration/new_cassette.yaml', 'r') as f:
deserialize(f.read(), yamlserializer)
def test_deserialize_new_json_cassette():
with open("tests/fixtures/migration/new_cassette.json") as f:
with open('tests/fixtures/migration/new_cassette.json', 'r') as f:
deserialize(f.read(), jsonserializer)
REQBODY_TEMPLATE = """\
REQBODY_TEMPLATE = u'''\
interactions:
- request:
body: {req_body}
@@ -42,76 +44,96 @@ interactions:
content-length: ['0']
content-type: [application/json]
status: {{code: 200, message: OK}}
"""
'''
# A cassette generated under Python 2 stores the request body as a string,
# but the same cassette generated under Python 3 stores it as "!!binary".
# Make sure we accept both forms, regardless of whether we're running under
# Python 2 or 3.
@pytest.mark.parametrize(
"req_body, expect",
[
# Cassette written under Python 2 (pure ASCII body)
("x=5&y=2", b"x=5&y=2"),
# Cassette written under Python 3 (pure ASCII body)
("!!binary |\n eD01Jnk9Mg==", b"x=5&y=2"),
# Request body has non-ASCII chars (x=föo&y=2), encoded in UTF-8.
('!!python/str "x=f\\xF6o&y=2"', b"x=f\xc3\xb6o&y=2"),
("!!binary |\n eD1mw7ZvJnk9Mg==", b"x=f\xc3\xb6o&y=2"),
# Same request body, this time encoded in UTF-16. In this case, we
# write the same YAML file under both Python 2 and 3, so there's only
# one test case here.
(
"!!binary |\n //54AD0AZgD2AG8AJgB5AD0AMgA=",
b"\xff\xfex\x00=\x00f\x00\xf6\x00o\x00&\x00y\x00=\x002\x00",
),
# Same again, this time encoded in ISO-8859-1.
("!!binary |\n eD1m9m8meT0y", b"x=f\xf6o&y=2"),
],
)
@pytest.mark.parametrize("req_body, expect", [
# Cassette written under Python 2 (pure ASCII body)
('x=5&y=2', b'x=5&y=2'),
# Cassette written under Python 3 (pure ASCII body)
('!!binary |\n eD01Jnk9Mg==', b'x=5&y=2'),
# Request body has non-ASCII chars (x=föo&y=2), encoded in UTF-8.
('!!python/str "x=f\\xF6o&y=2"', b'x=f\xc3\xb6o&y=2'),
('!!binary |\n eD1mw7ZvJnk9Mg==', b'x=f\xc3\xb6o&y=2'),
# Same request body, this time encoded in UTF-16. In this case, we
# write the same YAML file under both Python 2 and 3, so there's only
# one test case here.
('!!binary |\n //54AD0AZgD2AG8AJgB5AD0AMgA=',
b'\xff\xfex\x00=\x00f\x00\xf6\x00o\x00&\x00y\x00=\x002\x00'),
# Same again, this time encoded in ISO-8859-1.
('!!binary |\n eD1m9m8meT0y', b'x=f\xf6o&y=2'),
])
def test_deserialize_py2py3_yaml_cassette(tmpdir, req_body, expect):
cfile = tmpdir.join("test_cassette.yaml")
cfile = tmpdir.join('test_cassette.yaml')
cfile.write(REQBODY_TEMPLATE.format(req_body=req_body))
with open(str(cfile)) as f:
(requests, _) = deserialize(f.read(), yamlserializer)
(requests, responses) = deserialize(f.read(), yamlserializer)
assert requests[0].body == expect
@mock.patch.object(
jsonserializer.json,
"dumps",
side_effect=UnicodeDecodeError("utf-8", b"unicode error in serialization", 0, 10, "blew up"),
)
@mock.patch.object(jsonserializer.json, 'dumps',
side_effect=UnicodeDecodeError('utf-8', b'unicode error in serialization',
0, 10, 'blew up'))
def test_serialize_constructs_UnicodeDecodeError(mock_dumps):
with pytest.raises(UnicodeDecodeError):
jsonserializer.serialize({})
def test_serialize_empty_request():
request = Request(method="POST", uri="http://localhost/", body="", headers={})
request = Request(
method='POST',
uri='http://localhost/',
body='',
headers={},
)
serialize({"requests": [request], "responses": [{}]}, jsonserializer)
serialize(
{'requests': [request], 'responses': [{}]},
jsonserializer
)
def test_serialize_json_request():
request = Request(method="POST", uri="http://localhost/", body="{'hello': 'world'}", headers={})
request = Request(
method='POST',
uri='http://localhost/',
body="{'hello': 'world'}",
headers={},
)
serialize({"requests": [request], "responses": [{}]}, jsonserializer)
serialize(
{'requests': [request], 'responses': [{}]},
jsonserializer
)
def test_serialize_binary_request():
msg = "Does this HTTP interaction contain binary data?"
request = Request(method="POST", uri="http://localhost/", body=b"\x8c", headers={})
request = Request(
method='POST',
uri='http://localhost/',
body=b'\x8c',
headers={},
)
try:
serialize({"requests": [request], "responses": [{}]}, jsonserializer)
serialize(
{'requests': [request], 'responses': [{}]},
jsonserializer
)
except (UnicodeDecodeError, TypeError) as exc:
assert msg in str(exc)
def test_deserialize_no_body_string():
data = {"body": {"string": None}}
data = {'body': {'string': None}}
output = compat.convert_to_bytes(data)
assert data == output

View File

@@ -1,75 +1,18 @@
import contextlib
import http.client as httplib
from io import BytesIO
from tempfile import NamedTemporaryFile
from unittest import mock
from pytest import mark
from vcr import mode, use_cassette
from vcr.cassette import Cassette
from vcr.stubs import VCRHTTPSConnection
from vcr.compat import mock
from vcr.cassette import Cassette
class TestVCRConnection:
def test_setting_of_attributes_get_propagated_to_real_connection(self):
vcr_connection = VCRHTTPSConnection("www.examplehost.com")
vcr_connection.ssl_version = "example_ssl_version"
assert vcr_connection.real_connection.ssl_version == "example_ssl_version"
class TestVCRConnection(object):
@mark.online
@mock.patch("vcr.cassette.Cassette.can_play_response_for", return_value=False)
def test_setting_of_attributes_get_propogated_to_real_connection(self):
vcr_connection = VCRHTTPSConnection('www.examplehost.com')
vcr_connection.ssl_version = 'example_ssl_version'
assert vcr_connection.real_connection.ssl_version == 'example_ssl_version'
@mock.patch('vcr.cassette.Cassette.can_play_response_for', return_value=False)
def testing_connect(*args):
with contextlib.closing(VCRHTTPSConnection("www.google.com")) as vcr_connection:
vcr_connection.cassette = Cassette("test", record_mode=mode.ALL)
vcr_connection.real_connection.connect()
assert vcr_connection.real_connection.sock is not None
def test_body_consumed_once_stream(self, tmpdir, httpbin):
self._test_body_consumed_once(
tmpdir,
httpbin,
BytesIO(b"1234567890"),
BytesIO(b"9876543210"),
BytesIO(b"9876543210"),
)
def test_body_consumed_once_iterator(self, tmpdir, httpbin):
self._test_body_consumed_once(
tmpdir,
httpbin,
iter([b"1234567890"]),
iter([b"9876543210"]),
iter([b"9876543210"]),
)
# data2 and data3 should serve the same data, potentially as iterators
def _test_body_consumed_once(
self,
tmpdir,
httpbin,
data1,
data2,
data3,
):
with NamedTemporaryFile(dir=tmpdir, suffix=".yml") as f:
testpath = f.name
# NOTE: ``use_cassette`` is not okay with the file existing
# already. So we using ``.close()`` to not only
# close but also delete the empty file, before we start.
f.close()
host, port = httpbin.host, httpbin.port
match_on = ["method", "uri", "body"]
with use_cassette(testpath, match_on=match_on):
conn1 = httplib.HTTPConnection(host, port)
conn1.request("POST", "/anything", body=data1)
conn1.getresponse()
conn2 = httplib.HTTPConnection(host, port)
conn2.request("POST", "/anything", body=data2)
conn2.getresponse()
with use_cassette(testpath, match_on=match_on) as cass:
conn3 = httplib.HTTPConnection(host, port)
conn3.request("POST", "/anything", body=data3)
conn3.getresponse()
assert cass.play_counts[0] == 0
assert cass.play_counts[1] == 1
vcr_connection = VCRHTTPSConnection('www.google.com')
vcr_connection.cassette = Cassette('test', record_mode='all')
vcr_connection.real_connection.connect()
assert vcr_connection.real_connection.sock is not None

View File

@@ -1,199 +0,0 @@
import os
from unittest import TextTestRunner, defaultTestLoader
from unittest.mock import MagicMock
from urllib.request import urlopen
import pytest
from vcr.unittest import VCRTestCase
def test_defaults():
class MyTest(VCRTestCase):
def test_foo(self):
pass
test = run_testcase(MyTest)[0][0]
expected_path = os.path.join(os.path.dirname(__file__), "cassettes")
expected_name = "MyTest.test_foo.yaml"
assert os.path.dirname(test.cassette._path) == expected_path
assert os.path.basename(test.cassette._path) == expected_name
def test_disabled():
# Baseline vcr_enabled = True
class MyTest(VCRTestCase):
def test_foo(self):
pass
test = run_testcase(MyTest)[0][0]
assert hasattr(test, "cassette")
# Test vcr_enabled = False
class MyTest(VCRTestCase):
vcr_enabled = False
def test_foo(self):
pass
test = run_testcase(MyTest)[0][0]
assert not hasattr(test, "cassette")
def test_cassette_library_dir():
class MyTest(VCRTestCase):
def test_foo(self):
pass
def _get_cassette_library_dir(self):
return "/testing"
test = run_testcase(MyTest)[0][0]
assert test.cassette._path.startswith("/testing/")
def test_cassette_name():
class MyTest(VCRTestCase):
def test_foo(self):
pass
def _get_cassette_name(self):
return "my-custom-name"
test = run_testcase(MyTest)[0][0]
assert os.path.basename(test.cassette._path) == "my-custom-name"
def test_vcr_kwargs_overridden():
class MyTest(VCRTestCase):
def test_foo(self):
pass
def _get_vcr_kwargs(self):
kwargs = super()._get_vcr_kwargs()
kwargs["record_mode"] = "new_episodes"
return kwargs
test = run_testcase(MyTest)[0][0]
assert test.cassette.record_mode == "new_episodes"
def test_vcr_kwargs_passed():
class MyTest(VCRTestCase):
def test_foo(self):
pass
def _get_vcr_kwargs(self):
return super()._get_vcr_kwargs(
record_mode="new_episodes",
)
test = run_testcase(MyTest)[0][0]
assert test.cassette.record_mode == "new_episodes"
def test_vcr_kwargs_cassette_dir():
# Test that _get_cassette_library_dir applies if cassette_library_dir
# is absent from vcr kwargs.
class MyTest(VCRTestCase):
def test_foo(self):
pass
def _get_vcr_kwargs(self):
return {
"record_mode": "new_episodes",
}
_get_cassette_library_dir = MagicMock(return_value="/testing")
test = run_testcase(MyTest)[0][0]
assert test.cassette._path.startswith("/testing/")
assert test._get_cassette_library_dir.call_count == 1
# Test that _get_cassette_library_dir is ignored if cassette_library_dir
# is present in vcr kwargs.
class MyTest(VCRTestCase):
def test_foo(self):
pass
def _get_vcr_kwargs(self):
return {
"cassette_library_dir": "/testing",
}
_get_cassette_library_dir = MagicMock(return_value="/ignored")
test = run_testcase(MyTest)[0][0]
assert test.cassette._path.startswith("/testing/")
assert test._get_cassette_library_dir.call_count == 0
@pytest.mark.online
def test_get_vcr_with_matcher(tmpdir):
cassette_dir = tmpdir.mkdir("cassettes")
assert len(cassette_dir.listdir()) == 0
mock_matcher = MagicMock(return_value=True, __name__="MockMatcher")
class MyTest(VCRTestCase):
def test_foo(self):
self.response = urlopen("http://example.com").read()
def _get_vcr(self):
myvcr = super()._get_vcr()
myvcr.register_matcher("mymatcher", mock_matcher)
myvcr.match_on = ["mymatcher"]
return myvcr
def _get_cassette_library_dir(self):
return str(cassette_dir)
# First run to fill cassette.
test = run_testcase(MyTest)[0][0]
assert len(test.cassette.requests) == 1
assert not mock_matcher.called # nothing in cassette
# Second run to call matcher.
test = run_testcase(MyTest)[0][0]
assert len(test.cassette.requests) == 1
assert mock_matcher.called
assert (
repr(mock_matcher.mock_calls[0])
== "call(<Request (GET) http://example.com>, <Request (GET) http://example.com>)"
)
@pytest.mark.online
def test_testcase_playback(tmpdir):
cassette_dir = tmpdir.mkdir("cassettes")
assert len(cassette_dir.listdir()) == 0
# First test actually reads from the web.
class MyTest(VCRTestCase):
def test_foo(self):
self.response = urlopen("http://example.com").read()
def _get_cassette_library_dir(self):
return str(cassette_dir)
test = run_testcase(MyTest)[0][0]
assert b"Example Domain" in test.response
assert len(test.cassette.requests) == 1
assert test.cassette.play_count == 0
# Second test reads from cassette.
test2 = run_testcase(MyTest)[0][0]
assert test.cassette is not test2.cassette
assert b"Example Domain" in test.response
assert len(test2.cassette.requests) == 1
assert test2.cassette.play_count == 1
def run_testcase(testcase_class):
"""Run all the tests in a TestCase and return them."""
suite = defaultTestLoader.loadTestsFromTestCase(testcase_class)
tests = list(suite._tests)
result = TextTestRunner().run(suite)
return tests, result

View File

@@ -1,33 +0,0 @@
from io import BytesIO, StringIO
import pytest
from vcr import request
from vcr.util import read_body
@pytest.mark.parametrize(
"input_, expected_output",
[
(BytesIO(b"Stream"), b"Stream"),
(StringIO("Stream"), b"Stream"),
(iter(["StringIter"]), b"StringIter"),
(iter(["String", "Iter"]), b"StringIter"),
(iter([b"BytesIter"]), b"BytesIter"),
(iter([b"Bytes", b"Iter"]), b"BytesIter"),
(iter([70, 111, 111]), b"Foo"),
(iter([]), b""),
("String", b"String"),
(b"Bytes", b"Bytes"),
],
)
def test_read_body(input_, expected_output):
r = request.Request("POST", "http://host.com/", input_, {})
assert read_body(r) == expected_output
def test_unsupported_read_body():
r = request.Request("POST", "http://host.com/", iter([[]]), {})
with pytest.raises(ValueError) as excinfo:
assert read_body(r)
assert excinfo.value.args == ("Body type <class 'list'> not supported",)

View File

@@ -1,93 +1,88 @@
import http.client as httplib
import os
from pathlib import Path
from unittest import mock
import pytest
from six.moves import http_client as httplib
from vcr import VCR, mode, use_cassette
from vcr.patch import _HTTPConnection, force_reset
from vcr import VCR, use_cassette
from vcr.compat import mock
from vcr.request import Request
from vcr.stubs import VCRHTTPSConnection
from vcr.patch import _HTTPConnection, force_reset
def test_vcr_use_cassette():
record_mode = mock.Mock()
test_vcr = VCR(record_mode=record_mode)
with mock.patch(
"vcr.cassette.Cassette.load",
return_value=mock.MagicMock(inject=False),
'vcr.cassette.Cassette.load',
return_value=mock.MagicMock(inject=False)
) as mock_cassette_load:
@test_vcr.use_cassette("test")
@test_vcr.use_cassette('test')
def function():
pass
assert mock_cassette_load.call_count == 0
function()
assert mock_cassette_load.call_args[1]["record_mode"] is record_mode
assert mock_cassette_load.call_args[1]['record_mode'] is record_mode
# Make sure that calls to function now use cassettes with the
# new filter_header_settings
test_vcr.record_mode = mock.Mock()
function()
assert mock_cassette_load.call_args[1]["record_mode"] == test_vcr.record_mode
assert mock_cassette_load.call_args[1]['record_mode'] == test_vcr.record_mode
# Ensure that explicitly provided arguments still supersede
# Ensure that explicitly provided arguments still supercede
# those on the vcr.
new_record_mode = mock.Mock()
with test_vcr.use_cassette("test", record_mode=new_record_mode) as cassette:
with test_vcr.use_cassette('test', record_mode=new_record_mode) as cassette:
assert cassette.record_mode == new_record_mode
def test_vcr_before_record_request_params():
base_path = "http://whatever.test/"
base_path = 'http://httpbin.org/'
def before_record_cb(request):
if request.path != "/get":
if request.path != '/get':
return request
test_vcr = VCR(
filter_headers=("cookie", ("bert", "ernie")),
before_record_request=before_record_cb,
ignore_hosts=("www.test.com",),
ignore_localhost=True,
filter_query_parameters=("foo", ("tom", "jerry")),
filter_post_data_parameters=("posted", ("no", "trespassing")),
)
test_vcr = VCR(filter_headers=('cookie', ('bert', 'ernie')),
before_record_request=before_record_cb,
ignore_hosts=('www.test.com',), ignore_localhost=True,
filter_query_parameters=('foo', ('tom', 'jerry')),
filter_post_data_parameters=('posted', ('no', 'trespassing')))
with test_vcr.use_cassette("test") as cassette:
with test_vcr.use_cassette('test') as cassette:
# Test explicit before_record_cb
request_get = Request("GET", base_path + "get", "", {})
request_get = Request('GET', base_path + 'get', '', {})
assert cassette.filter_request(request_get) is None
request = Request("GET", base_path + "get2", "", {})
request = Request('GET', base_path + 'get2', '', {})
assert cassette.filter_request(request) is not None
# Test filter_query_parameters
request = Request("GET", base_path + "?foo=bar", "", {})
request = Request('GET', base_path + '?foo=bar', '', {})
assert cassette.filter_request(request).query == []
request = Request("GET", base_path + "?tom=nobody", "", {})
assert cassette.filter_request(request).query == [("tom", "jerry")]
request = Request('GET', base_path + '?tom=nobody', '', {})
assert cassette.filter_request(request).query == [('tom', 'jerry')]
# Test filter_headers
request = Request(
"GET",
base_path + "?foo=bar",
"",
{"cookie": "test", "other": "fun", "bert": "nobody"},
)
assert cassette.filter_request(request).headers == {"other": "fun", "bert": "ernie"}
request = Request('GET', base_path + '?foo=bar', '',
{'cookie': 'test', 'other': 'fun', 'bert': 'nobody'})
assert (cassette.filter_request(request).headers ==
{'other': 'fun', 'bert': 'ernie'})
# Test ignore_hosts
request = Request("GET", "http://www.test.com?foo=bar", "", {"cookie": "test", "other": "fun"})
request = Request('GET', 'http://www.test.com' + '?foo=bar', '',
{'cookie': 'test', 'other': 'fun'})
assert cassette.filter_request(request) is None
# Test ignore_localhost
request = Request("GET", "http://localhost:8000?foo=bar", "", {"cookie": "test", "other": "fun"})
request = Request('GET', 'http://localhost:8000' + '?foo=bar', '',
{'cookie': 'test', 'other': 'fun'})
assert cassette.filter_request(request) is None
with test_vcr.use_cassette("test", before_record_request=None) as cassette:
with test_vcr.use_cassette('test', before_record_request=None) as cassette:
# Test that before_record can be overwritten in context manager.
assert cassette.filter_request(request_get) is not None
@@ -95,15 +90,16 @@ def test_vcr_before_record_request_params():
def test_vcr_before_record_response_iterable():
# Regression test for #191
request = Request("GET", "/", "", {})
request = Request('GET', '/', '', {})
response = object() # just can't be None
# Prevent actually saving the cassette
with mock.patch("vcr.cassette.FilesystemPersister.save_cassette"):
with mock.patch('vcr.cassette.FilesystemPersister.save_cassette'):
# Baseline: non-iterable before_record_response should work
mock_filter = mock.Mock()
vcr = VCR(before_record_response=mock_filter)
with vcr.use_cassette("test") as cassette:
with vcr.use_cassette('test') as cassette:
assert mock_filter.call_count == 0
cassette.append(request, response)
assert mock_filter.call_count == 1
@@ -111,21 +107,22 @@ def test_vcr_before_record_response_iterable():
# Regression test: iterable before_record_response should work too
mock_filter = mock.Mock()
vcr = VCR(before_record_response=(mock_filter,))
with vcr.use_cassette("test") as cassette:
with vcr.use_cassette('test') as cassette:
assert mock_filter.call_count == 0
cassette.append(request, response)
assert mock_filter.call_count == 1
def test_before_record_response_as_filter():
request = Request("GET", "/", "", {})
request = Request('GET', '/', '', {})
response = object() # just can't be None
# Prevent actually saving the cassette
with mock.patch("vcr.cassette.FilesystemPersister.save_cassette"):
with mock.patch('vcr.cassette.FilesystemPersister.save_cassette'):
filter_all = mock.Mock(return_value=None)
vcr = VCR(before_record_response=filter_all)
with vcr.use_cassette("test") as cassette:
with vcr.use_cassette('test') as cassette:
cassette.append(request, response)
assert cassette.data == []
assert not cassette.dirty
@@ -135,21 +132,22 @@ def test_vcr_path_transformer():
# Regression test for #199
# Prevent actually saving the cassette
with mock.patch("vcr.cassette.FilesystemPersister.save_cassette"):
with mock.patch('vcr.cassette.FilesystemPersister.save_cassette'):
# Baseline: path should be unchanged
vcr = VCR()
with vcr.use_cassette("test") as cassette:
assert cassette._path == "test"
with vcr.use_cassette('test') as cassette:
assert cassette._path == 'test'
# Regression test: path_transformer=None should do the same.
vcr = VCR(path_transformer=None)
with vcr.use_cassette("test") as cassette:
assert cassette._path == "test"
with vcr.use_cassette('test') as cassette:
assert cassette._path == 'test'
# and it should still work with cassette_library_dir
vcr = VCR(cassette_library_dir="/foo")
with vcr.use_cassette("test") as cassette:
assert os.path.abspath(cassette._path) == os.path.abspath("/foo/test")
vcr = VCR(cassette_library_dir='/foo')
with vcr.use_cassette('test') as cassette:
assert cassette._path == '/foo/test'
@pytest.fixture
@@ -157,7 +155,7 @@ def random_fixture():
return 1
@use_cassette("test")
@use_cassette('test')
def test_fixtures_with_use_cassette(random_fixture):
# Applying a decorator to a test function that requests features can cause
# problems if the decorator does not preserve the signature of the original
@@ -172,16 +170,18 @@ def test_fixtures_with_use_cassette(random_fixture):
def test_custom_patchers():
class Test:
class Test(object):
attribute = None
attribute2 = None
test_vcr = VCR(custom_patches=((Test, "attribute", VCRHTTPSConnection),))
with test_vcr.use_cassette("custom_patches"):
test_vcr = VCR(custom_patches=((Test, 'attribute', VCRHTTPSConnection),))
with test_vcr.use_cassette('custom_patches'):
assert issubclass(Test.attribute, VCRHTTPSConnection)
assert VCRHTTPSConnection is not Test.attribute
with test_vcr.use_cassette("custom_patches", custom_patches=((Test, "attribute2", VCRHTTPSConnection),)):
with test_vcr.use_cassette(
'custom_patches',
custom_patches=((Test, 'attribute2', VCRHTTPSConnection),)
):
assert issubclass(Test.attribute, VCRHTTPSConnection)
assert VCRHTTPSConnection is not Test.attribute
assert Test.attribute is Test.attribute2
@@ -190,11 +190,11 @@ def test_custom_patchers():
def test_inject_cassette():
vcr = VCR(inject_cassette=True)
@vcr.use_cassette("test", record_mode=mode.ONCE)
@vcr.use_cassette('test', record_mode='once')
def with_cassette_injected(cassette):
assert cassette.record_mode == mode.ONCE
assert cassette.record_mode == 'once'
@vcr.use_cassette("test", record_mode=mode.ONCE, inject_cassette=False)
@vcr.use_cassette('test', record_mode='once', inject_cassette=False)
def without_cassette_injected():
pass
@@ -203,94 +203,92 @@ def test_inject_cassette():
def test_with_current_defaults():
vcr = VCR(inject_cassette=True, record_mode=mode.ONCE)
vcr = VCR(inject_cassette=True, record_mode='once')
@vcr.use_cassette("test", with_current_defaults=False)
@vcr.use_cassette('test', with_current_defaults=False)
def changing_defaults(cassette, checks):
checks(cassette)
@vcr.use_cassette("test", with_current_defaults=True)
@vcr.use_cassette('test', with_current_defaults=True)
def current_defaults(cassette, checks):
checks(cassette)
def assert_record_mode_once(cassette):
assert cassette.record_mode == mode.ONCE
assert cassette.record_mode == 'once'
def assert_record_mode_all(cassette):
assert cassette.record_mode == mode.ALL
assert cassette.record_mode == 'all'
changing_defaults(assert_record_mode_once)
current_defaults(assert_record_mode_once)
vcr.record_mode = "all"
vcr.record_mode = 'all'
changing_defaults(assert_record_mode_all)
current_defaults(assert_record_mode_once)
def test_cassette_library_dir_with_decoration_and_no_explicit_path():
library_dir = "/library_dir"
library_dir = '/libary_dir'
vcr = VCR(inject_cassette=True, cassette_library_dir=library_dir)
@vcr.use_cassette()
def function_name(cassette):
assert cassette._path == os.path.join(library_dir, "function_name")
assert cassette._path == os.path.join(library_dir, 'function_name')
function_name()
def test_cassette_library_dir_with_decoration_and_explicit_path():
library_dir = "/library_dir"
library_dir = '/libary_dir'
vcr = VCR(inject_cassette=True, cassette_library_dir=library_dir)
@vcr.use_cassette(path="custom_name")
@vcr.use_cassette(path='custom_name')
def function_name(cassette):
assert cassette._path == os.path.join(library_dir, "custom_name")
assert cassette._path == os.path.join(library_dir, 'custom_name')
function_name()
def test_cassette_library_dir_with_decoration_and_super_explicit_path():
library_dir = "/library_dir"
library_dir = '/libary_dir'
vcr = VCR(inject_cassette=True, cassette_library_dir=library_dir)
@vcr.use_cassette(path=os.path.join(library_dir, "custom_name"))
@vcr.use_cassette(path=os.path.join(library_dir, 'custom_name'))
def function_name(cassette):
assert cassette._path == os.path.join(library_dir, "custom_name")
assert cassette._path == os.path.join(library_dir, 'custom_name')
function_name()
def test_cassette_library_dir_with_path_transformer():
library_dir = "/library_dir"
vcr = VCR(
inject_cassette=True,
cassette_library_dir=library_dir,
path_transformer=lambda path: path + ".json",
)
library_dir = '/libary_dir'
vcr = VCR(inject_cassette=True, cassette_library_dir=library_dir,
path_transformer=lambda path: path + '.json')
@vcr.use_cassette()
def function_name(cassette):
assert cassette._path == os.path.join(library_dir, "function_name.json")
assert cassette._path == os.path.join(library_dir, 'function_name.json')
function_name()
def test_use_cassette_with_no_extra_invocation():
vcr = VCR(inject_cassette=True, cassette_library_dir="/")
vcr = VCR(inject_cassette=True, cassette_library_dir='/')
@vcr.use_cassette
def function_name(cassette):
assert cassette._path == os.path.join("/", "function_name")
assert cassette._path == os.path.join('/', 'function_name')
function_name()
def test_path_transformer():
vcr = VCR(inject_cassette=True, cassette_library_dir="/", path_transformer=lambda x: x + "_test")
vcr = VCR(inject_cassette=True, cassette_library_dir='/',
path_transformer=lambda x: x + '_test')
@vcr.use_cassette
def function_name(cassette):
assert cassette._path == os.path.join("/", "function_name_test")
assert cassette._path == os.path.join('/', 'function_name_test')
function_name()
@@ -300,31 +298,32 @@ def test_cassette_name_generator_defaults_to_using_module_function_defined_in():
@vcr.use_cassette
def function_name(cassette):
assert cassette._path == os.path.join(os.path.dirname(__file__), "function_name")
assert cassette._path == os.path.join(os.path.dirname(__file__),
'function_name')
function_name()
def test_ensure_suffix():
vcr = VCR(inject_cassette=True, path_transformer=VCR.ensure_suffix(".yaml"))
vcr = VCR(inject_cassette=True, path_transformer=VCR.ensure_suffix('.yaml'))
@vcr.use_cassette
def function_name(cassette):
assert cassette._path == os.path.join(os.path.dirname(__file__), "function_name.yaml")
assert cassette._path == os.path.join(os.path.dirname(__file__),
'function_name.yaml')
function_name()
def test_additional_matchers():
vcr = VCR(match_on=("uri",), inject_cassette=True)
vcr = VCR(match_on=('uri',), inject_cassette=True)
@vcr.use_cassette
def function_defaults(cassette):
assert set(cassette._match_on) == {vcr.matchers["uri"]}
assert set(cassette._match_on) == {vcr.matchers['uri']}
@vcr.use_cassette(additional_matchers=("body",))
@vcr.use_cassette(additional_matchers=('body',))
def function_additional(cassette):
assert set(cassette._match_on) == {vcr.matchers["uri"], vcr.matchers["body"]}
assert set(cassette._match_on) == {vcr.matchers['uri'], vcr.matchers['body']}
function_defaults()
function_additional()
@@ -332,7 +331,7 @@ def test_additional_matchers():
def test_decoration_should_respect_function_return_value():
vcr = VCR()
ret = "a-return-value"
ret = 'a-return-value'
@vcr.use_cassette
def function_with_return():
@@ -342,6 +341,7 @@ def test_decoration_should_respect_function_return_value():
class TestVCRClass(VCR().test_case()):
def no_decoration(self):
assert httplib.HTTPConnection == _HTTPConnection
self.test_dynamically_added()
@@ -364,27 +364,3 @@ def test_dynamically_added(self):
TestVCRClass.test_dynamically_added = test_dynamically_added
del test_dynamically_added
def test_path_class_as_cassette():
path = Path(__file__).parent.parent.joinpath(
"integration/cassettes/test_httpx_test_test_behind_proxy.yml",
)
with use_cassette(path):
pass
def test_use_cassette_generator_return():
ret_val = object()
vcr = VCR()
@vcr.use_cassette("test")
def gen():
return ret_val
yield
with pytest.raises(StopIteration) as exc_info:
next(gen())
assert exc_info.value.value is ret_val

View File

@@ -1,11 +0,0 @@
import sys
def test_vcr_import_deprecation(recwarn):
if "vcr" in sys.modules:
# Remove imported module entry if already loaded in another test
del sys.modules["vcr"]
import vcr # noqa: F401
assert len(recwarn) == 0

33
tox.ini Normal file
View File

@@ -0,0 +1,33 @@
[tox]
envlist = {py27,py35,py36,pypy}-{flakes,requests27,httplib2,urllib3121,tornado4,boto3,aiohttp}
[testenv:flakes]
skipsdist = True
commands =
flake8 --version
flake8 --exclude=./docs/conf.py,./.tox/
pyflakes ./docs/conf.py
deps = flake8
[testenv]
commands =
./runtests.sh -n 4 {posargs}
deps =
Flask<1
mock
pytest
pytest-httpbin
pytest-xdist
PyYAML
requests27: requests==2.7.0
httplib2: httplib2
urllib3121: urllib3==1.21.1
{py27,py35,py36,pypy}-tornado4: tornado>=4,<5
{py27,py35,py36,pypy}-tornado4: pytest-tornado
{py27,py35,py36}-tornado4: pycurl
boto3: boto3
aiohttp: aiohttp<3
aiohttp: pytest-asyncio
[flake8]
max_line_length = 110

BIN
vcr.png

Binary file not shown.

Before

Width:  |  Height:  |  Size: 114 KiB

After

Width:  |  Height:  |  Size: 240 KiB

View File

@@ -1,10 +1,14 @@
import logging
from logging import NullHandler
from .config import VCR
from .record_mode import RecordMode as mode # noqa: F401
__version__ = "8.0.0"
# Set default logging handler to avoid "No handler found" warnings.
try: # Python 2.7+
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
logging.getLogger(__name__).addHandler(NullHandler())

View File

@@ -1,3 +1,7 @@
async def handle_coroutine(vcr, fn):
import asyncio
@asyncio.coroutine
def handle_coroutine(vcr, fn):
with vcr as cassette:
return await fn(cassette)
return (yield from fn(cassette)) # noqa: E999

View File

@@ -1,25 +1,33 @@
import collections
import contextlib
import copy
import sys
import inspect
import logging
from inspect import iscoroutinefunction
import wrapt
from ._handle_coroutine import handle_coroutine
from .compat import contextlib
from .errors import UnhandledHTTPRequestError
from .matchers import get_matchers_results, method, requests_match, uri
from .matchers import requests_match, uri, method
from .patch import CassettePatcherBuilder
from .persisters.filesystem import CassetteDecodeError, CassetteNotFoundError, FilesystemPersister
from .record_mode import RecordMode
from .serializers import yamlserializer
from .persisters.filesystem import FilesystemPersister
from .util import partition_dict
try:
from asyncio import iscoroutinefunction
from ._handle_coroutine import handle_coroutine
except ImportError:
def iscoroutinefunction(*args, **kwargs):
return False
def handle_coroutine(*args, **kwags):
raise NotImplementedError('Not implemented on Python 2')
log = logging.getLogger(__name__)
class CassetteContextDecorator:
class CassetteContextDecorator(object):
"""Context manager/decorator that handles installing the cassette and
removing cassettes.
@@ -37,11 +45,7 @@ class CassetteContextDecorator:
this class as a context manager in ``__exit__``.
"""
_non_cassette_arguments = (
"path_transformer",
"func_path_generator",
"record_on_exception",
)
_non_cassette_arguments = ('path_transformer', 'func_path_generator')
@classmethod
def from_args(cls, cassette_class, **kwargs):
@@ -51,16 +55,22 @@ class CassetteContextDecorator:
self.cls = cls
self._args_getter = args_getter
self.__finish = None
self.__cassette = None
def _patch_generator(self, cassette):
with contextlib.ExitStack() as exit_stack:
for patcher in CassettePatcherBuilder(cassette).build():
exit_stack.enter_context(patcher)
log_format = "{action} context for cassette at {path}."
log.debug(log_format.format(action="Entering", path=cassette._path))
log_format = '{action} context for cassette at {path}.'
log.debug(log_format.format(
action="Entering", path=cassette._path
))
yield cassette
log.debug(log_format.format(action="Exiting", path=cassette._path))
log.debug(log_format.format(
action="Exiting", path=cassette._path
))
# TODO(@IvanMalison): Hmmm. it kind of feels like this should be
# somewhere else.
cassette._save()
def __enter__(self):
# This assertion is here to prevent the dangerous behavior
@@ -74,27 +84,15 @@ class CassetteContextDecorator:
assert self.__finish is None, "Cassette already open."
other_kwargs, cassette_kwargs = partition_dict(
lambda key, _: key in self._non_cassette_arguments,
self._args_getter(),
self._args_getter()
)
if other_kwargs.get("path_transformer"):
transformer = other_kwargs["path_transformer"]
cassette_kwargs["path"] = transformer(cassette_kwargs["path"])
self.__cassette = self.cls.load(**cassette_kwargs)
self.__finish = self._patch_generator(self.__cassette)
if other_kwargs.get('path_transformer'):
transformer = other_kwargs['path_transformer']
cassette_kwargs['path'] = transformer(cassette_kwargs['path'])
self.__finish = self._patch_generator(self.cls.load(**cassette_kwargs))
return next(self.__finish)
def __exit__(self, *exc_info):
exception_was_raised = any(exc_info)
record_on_exception = self._args_getter().get("record_on_exception", True)
if record_on_exception or not exception_was_raised:
self.__cassette._save()
self.__cassette = None
# Fellow programmer, don't remove this `next`, if `self.__finish` is
# not consumed the unpatcher functions accumulated in the `exit_stack`
# object created in `_patch_generator` will not be called until
# `exit_stack` is not garbage collected.
# This works in CPython but not in Pypy, where the unpatchers will not
# be called until much later.
def __exit__(self, *args):
next(self.__finish, None)
self.__finish = None
@@ -104,7 +102,9 @@ class CassetteContextDecorator:
# functions are reentrant. This is required for thread
# safety and the correct operation of recursive functions.
args_getter = self._build_args_getter_for_decorator(function)
return type(self)(self.cls, args_getter)._execute_function(function, args, kwargs)
return type(self)(self.cls, args_getter)._execute_function(
function, args, kwargs
)
def _execute_function(self, function, args, kwargs):
def handle_function(cassette):
@@ -125,7 +125,17 @@ class CassetteContextDecorator:
duration of the generator.
"""
with self as cassette:
return (yield from fn(cassette))
coroutine = fn(cassette)
# We don't need to catch StopIteration. The caller (Tornado's
# gen.coroutine, for example) will handle that.
to_yield = next(coroutine)
while True:
try:
to_send = yield to_yield
except Exception:
to_yield = coroutine.throw(*sys.exc_info())
else:
to_yield = coroutine.send(to_send)
def _handle_function(self, fn):
with self as cassette:
@@ -138,16 +148,16 @@ class CassetteContextDecorator:
def _build_args_getter_for_decorator(self, function):
def new_args_getter():
kwargs = self._args_getter()
if "path" not in kwargs:
name_generator = kwargs.get("func_path_generator") or self.get_function_name
if 'path' not in kwargs:
name_generator = (kwargs.get('func_path_generator') or
self.get_function_name)
path = name_generator(function)
kwargs["path"] = path
kwargs['path'] = path
return kwargs
return new_args_getter
class Cassette:
class Cassette(object):
"""A container for recorded requests and responses"""
@classmethod
@@ -165,32 +175,19 @@ class Cassette:
def use(cls, **kwargs):
return CassetteContextDecorator.from_args(cls, **kwargs)
def __init__(
self,
path,
serializer=None,
persister=None,
record_mode=RecordMode.ONCE,
match_on=(uri, method),
before_record_request=None,
before_record_response=None,
custom_patches=(),
inject=False,
allow_playback_repeats=False,
drop_unused_requests=False,
):
def __init__(self, path, serializer=None, persister=None, record_mode='once',
match_on=(uri, method), before_record_request=None,
before_record_response=None, custom_patches=(),
inject=False):
self._persister = persister or FilesystemPersister
self._path = path
self._serializer = serializer or yamlserializer
self._match_on = match_on
self._before_record_request = before_record_request or (lambda x: x)
log.info(self._before_record_request)
self._before_record_response = before_record_response or (lambda x: x)
self.inject = inject
self.record_mode = record_mode
self.custom_patches = custom_patches
self.allow_playback_repeats = allow_playback_repeats
self.drop_unused_requests = drop_unused_requests
# self.data is the list of (req, resp) tuples
self.data = []
@@ -198,10 +195,6 @@ class Cassette:
self.dirty = False
self.rewound = False
# Subsets of self.data to store old and played interactions
self._old_interactions = []
self._played_interactions = []
@property
def play_count(self):
return sum(self.play_counts.values())
@@ -209,7 +202,7 @@ class Cassette:
@property
def all_played(self):
"""Returns True if all responses have been played, False otherwise."""
return len(self.play_counts.values()) == len(self)
return self.play_count == len(self)
@property
def requests(self):
@@ -221,17 +214,14 @@ class Cassette:
@property
def write_protected(self):
return (self.rewound and self.record_mode == RecordMode.ONCE) or self.record_mode == RecordMode.NONE
return self.rewound and self.record_mode == 'once' or \
self.record_mode == 'none'
def append(self, request, response):
"""Add a request, response pair to this cassette"""
request = self._before_record_request(request)
if not request:
return
log.info("Appending request %s and response %s", request, response)
# Deepcopy is here because mutation of `response` will corrupt the
# real response.
response = copy.deepcopy(response)
response = self._before_record_response(response)
if response is None:
return
@@ -253,7 +243,9 @@ class Cassette:
def can_play_response_for(self, request):
request = self._before_record_request(request)
return request and request in self and self.record_mode != RecordMode.ALL and self.rewound
return request and request in self and \
self.record_mode != 'all' and \
self.rewound
def play_response(self, request):
"""
@@ -261,13 +253,13 @@ class Cassette:
hasn't been played back before, and mark it as played
"""
for index, response in self._responses(request):
if self.play_counts[index] == 0 or self.allow_playback_repeats:
if self.play_counts[index] == 0:
self.play_counts[index] += 1
self._played_interactions.append((request, response))
return response
# The cassette doesn't contain the request asked for.
raise UnhandledHTTPRequestError(
f"The cassette ({self._path!r}) doesn't contain the request ({request!r}) asked for",
"The cassette (%r) doesn't contain the request (%r) asked for"
% (self._path, request)
)
def responses_of(self, request):
@@ -282,93 +274,39 @@ class Cassette:
return responses
# The cassette doesn't contain the request asked for.
raise UnhandledHTTPRequestError(
f"The cassette ({self._path!r}) doesn't contain the request ({request!r}) asked for",
"The cassette (%r) doesn't contain the request (%r) asked for"
% (self._path, request)
)
def rewind(self):
self.play_counts = collections.Counter()
def find_requests_with_most_matches(self, request):
"""
Get the most similar request(s) stored in the cassette
of a given request as a list of tuples like this:
- the request object
- the successful matchers as string
- the failed matchers and the related assertion message with the difference details as strings tuple
This is useful when a request failed to be found,
we can get the similar request(s) in order to know what have changed in the request parts.
"""
best_matches = []
request = self._before_record_request(request)
for _, (stored_request, _) in enumerate(self.data):
successes, fails = get_matchers_results(request, stored_request, self._match_on)
best_matches.append((len(successes), stored_request, successes, fails))
best_matches.sort(key=lambda t: t[0], reverse=True)
# Get the first best matches (multiple if equal matches)
final_best_matches = []
if not best_matches:
return final_best_matches
previous_nb_success = best_matches[0][0]
for best_match in best_matches:
nb_success = best_match[0]
# Do not keep matches that have 0 successes,
# it means that the request is totally different from
# the ones stored in the cassette
if nb_success < 1 or previous_nb_success != nb_success:
break
previous_nb_success = nb_success
final_best_matches.append(best_match[1:])
return final_best_matches
def _new_interactions(self):
"""List of new HTTP interactions (request/response tuples)"""
new_interactions = []
for request, response in self.data:
if all(
not requests_match(request, old_request, self._match_on)
for old_request, _ in self._old_interactions
):
new_interactions.append((request, response))
return new_interactions
def _as_dict(self):
return {"requests": self.requests, "responses": self.responses}
def _build_used_interactions_dict(self):
interactions = self._played_interactions + self._new_interactions()
cassete_dict = {
"requests": [request for request, _ in interactions],
"responses": [response for _, response in interactions],
}
return cassete_dict
def _save(self, force=False):
if self.drop_unused_requests and len(self._played_interactions) < len(self._old_interactions):
cassete_dict = self._build_used_interactions_dict()
force = True
else:
cassete_dict = self._as_dict()
if force or self.dirty:
self._persister.save_cassette(self._path, cassete_dict, serializer=self._serializer)
self._persister.save_cassette(
self._path,
self._as_dict(),
serializer=self._serializer,
)
self.dirty = False
def _load(self):
try:
requests, responses = self._persister.load_cassette(self._path, serializer=self._serializer)
for request, response in zip(requests, responses, strict=False):
requests, responses = self._persister.load_cassette(
self._path,
serializer=self._serializer,
)
for request, response in zip(requests, responses):
self.append(request, response)
self._old_interactions.append((request, response))
self.dirty = False
self.rewound = True
except (CassetteDecodeError, CassetteNotFoundError):
except ValueError:
pass
def __str__(self):
return f"<Cassette containing {len(self)} recorded response(s)>"
return "<Cassette containing {} recorded response(s)>".format(
len(self)
)
def __len__(self):
"""Return the number of request,response pairs stored in here"""
@@ -376,7 +314,7 @@ class Cassette:
def __contains__(self, request):
"""Return whether or not a request has been stored"""
for index, _ in self._responses(request):
if self.play_counts[index] == 0 or self.allow_playback_repeats:
for index, response in self._responses(request):
if self.play_counts[index] == 0:
return True
return False

14
vcr/compat.py Normal file
View File

@@ -0,0 +1,14 @@
try:
from unittest import mock
except ImportError:
import mock
try:
import contextlib
except ImportError:
import contextlib2 as contextlib
else:
if not hasattr(contextlib, 'ExitStack'):
import contextlib2 as contextlib
__all__ = ['mock', 'contextlib']

View File

@@ -1,23 +1,26 @@
import copy
import collections
import functools
import inspect
import os
import types
from collections import abc as collections_abc
from pathlib import Path
from . import filters, matchers
import six
from .cassette import Cassette
from .serializers import yamlserializer, jsonserializer
from .persisters.filesystem import FilesystemPersister
from .record_mode import RecordMode
from .serializers import jsonserializer, yamlserializer
from .util import auto_decorate, compose
from .util import compose, auto_decorate
from . import matchers
from . import filters
class VCR:
class VCR(object):
@staticmethod
def is_test_method(method_name, function):
return method_name.startswith("test") and isinstance(function, types.FunctionType)
return method_name.startswith('test') and \
isinstance(function, types.FunctionType)
@staticmethod
def ensure_suffix(suffix):
@@ -25,47 +28,35 @@ class VCR:
if not path.endswith(suffix):
return path + suffix
return path
return ensure
def __init__(
self,
path_transformer=None,
before_record_request=None,
custom_patches=(),
filter_query_parameters=(),
ignore_hosts=(),
record_mode=RecordMode.ONCE,
ignore_localhost=False,
filter_headers=(),
before_record_response=None,
filter_post_data_parameters=(),
match_on=("method", "scheme", "host", "port", "path", "query"),
before_record=None,
inject_cassette=False,
serializer="yaml",
cassette_library_dir=None,
func_path_generator=None,
decode_compressed_response=False,
record_on_exception=True,
drop_unused_requests=False,
):
def __init__(self, path_transformer=None, before_record_request=None,
custom_patches=(), filter_query_parameters=(), ignore_hosts=(),
record_mode="once", ignore_localhost=False, filter_headers=(),
before_record_response=None, filter_post_data_parameters=(),
match_on=('method', 'scheme', 'host', 'port', 'path', 'query'),
before_record=None, inject_cassette=False, serializer='yaml',
cassette_library_dir=None, func_path_generator=None,
decode_compressed_response=False):
self.serializer = serializer
self.match_on = match_on
self.cassette_library_dir = cassette_library_dir
self.serializers = {"yaml": yamlserializer, "json": jsonserializer}
self.serializers = {
'yaml': yamlserializer,
'json': jsonserializer,
}
self.matchers = {
"method": matchers.method,
"uri": matchers.uri,
"url": matchers.uri, # matcher for backwards compatibility
"scheme": matchers.scheme,
"host": matchers.host,
"port": matchers.port,
"path": matchers.path,
"query": matchers.query,
"headers": matchers.headers,
"raw_body": matchers.raw_body,
"body": matchers.body,
'method': matchers.method,
'uri': matchers.uri,
'url': matchers.uri, # matcher for backwards compatibility
'scheme': matchers.scheme,
'host': matchers.host,
'port': matchers.port,
'path': matchers.path,
'query': matchers.query,
'headers': matchers.headers,
'raw_body': matchers.raw_body,
'body': matchers.body,
}
self.persister = FilesystemPersister
self.record_mode = record_mode
@@ -80,15 +71,17 @@ class VCR:
self.path_transformer = path_transformer
self.func_path_generator = func_path_generator
self.decode_compressed_response = decode_compressed_response
self.record_on_exception = record_on_exception
self._custom_patches = tuple(custom_patches)
self.drop_unused_requests = drop_unused_requests
def _get_serializer(self, serializer_name):
try:
serializer = self.serializers[serializer_name]
except KeyError:
raise KeyError(f"Serializer {serializer_name} doesn't exist or isn't registered") from None
raise KeyError(
"Serializer {} doesn't exist or isn't registered".format(
serializer_name
)
)
return serializer
def _get_matchers(self, matcher_names):
@@ -97,11 +90,13 @@ class VCR:
for m in matcher_names:
matchers.append(self.matchers[m])
except KeyError:
raise KeyError(f"Matcher {m} doesn't exist or isn't registered") from None
raise KeyError(
"Matcher {} doesn't exist or isn't registered".format(m)
)
return matchers
def use_cassette(self, path=None, **kwargs):
if path is not None and not isinstance(path, (str, Path)):
if path is not None and not isinstance(path, six.string_types):
function = path
# Assume this is an attempt to decorate a function
return self._use_cassette(**kwargs)(function)
@@ -119,58 +114,68 @@ class VCR:
return Cassette.use_arg_getter(args_getter)
def get_merged_config(self, **kwargs):
serializer_name = kwargs.get("serializer", self.serializer)
matcher_names = kwargs.get("match_on", self.match_on)
path_transformer = kwargs.get("path_transformer", self.path_transformer)
func_path_generator = kwargs.get("func_path_generator", self.func_path_generator)
cassette_library_dir = kwargs.get("cassette_library_dir", self.cassette_library_dir)
additional_matchers = kwargs.get("additional_matchers", ())
record_on_exception = kwargs.get("record_on_exception", self.record_on_exception)
serializer_name = kwargs.get('serializer', self.serializer)
matcher_names = kwargs.get('match_on', self.match_on)
path_transformer = kwargs.get(
'path_transformer',
self.path_transformer
)
func_path_generator = kwargs.get(
'func_path_generator',
self.func_path_generator
)
cassette_library_dir = kwargs.get(
'cassette_library_dir',
self.cassette_library_dir
)
additional_matchers = kwargs.get('additional_matchers', ())
if cassette_library_dir:
def add_cassette_library_dir(path):
if not path.startswith(cassette_library_dir):
return os.path.join(cassette_library_dir, path)
return path
path_transformer = compose(add_cassette_library_dir, path_transformer)
path_transformer = compose(
add_cassette_library_dir, path_transformer
)
elif not func_path_generator:
# If we don't have a library dir, use the functions
# location to build a full path for cassettes.
func_path_generator = self._build_path_from_func_using_module
merged_config = {
"serializer": self._get_serializer(serializer_name),
"persister": self.persister,
"match_on": self._get_matchers(tuple(matcher_names) + tuple(additional_matchers)),
"record_mode": kwargs.get("record_mode", self.record_mode),
"before_record_request": self._build_before_record_request(kwargs),
"before_record_response": self._build_before_record_response(kwargs),
"custom_patches": self._custom_patches + kwargs.get("custom_patches", ()),
"inject": kwargs.get("inject_cassette", self.inject_cassette),
"path_transformer": path_transformer,
"func_path_generator": func_path_generator,
"allow_playback_repeats": kwargs.get("allow_playback_repeats", False),
"record_on_exception": record_on_exception,
"drop_unused_requests": kwargs.get("drop_unused_requests", self.drop_unused_requests),
'serializer': self._get_serializer(serializer_name),
'persister': self.persister,
'match_on': self._get_matchers(
tuple(matcher_names) + tuple(additional_matchers)
),
'record_mode': kwargs.get('record_mode', self.record_mode),
'before_record_request': self._build_before_record_request(kwargs),
'before_record_response': self._build_before_record_response(kwargs),
'custom_patches': self._custom_patches + kwargs.get(
'custom_patches', ()
),
'inject': kwargs.get('inject_cassette', self.inject_cassette),
'path_transformer': path_transformer,
'func_path_generator': func_path_generator
}
path = kwargs.get("path")
path = kwargs.get('path')
if path:
merged_config["path"] = path
merged_config['path'] = path
return merged_config
def _build_before_record_response(self, options):
before_record_response = options.get("before_record_response", self.before_record_response)
before_record_response = options.get(
'before_record_response', self.before_record_response
)
decode_compressed_response = options.get(
"decode_compressed_response",
self.decode_compressed_response,
'decode_compressed_response', self.decode_compressed_response
)
filter_functions = []
if decode_compressed_response:
filter_functions.append(filters.decode_response)
if before_record_response:
if not isinstance(before_record_response, collections_abc.Iterable):
if not isinstance(before_record_response, collections.Iterable):
before_record_response = (before_record_response,)
filter_functions.extend(before_record_response)
@@ -180,70 +185,87 @@ class VCR:
break
response = function(response)
return response
return before_record_response
def _build_before_record_request(self, options):
filter_functions = []
filter_headers = options.get("filter_headers", self.filter_headers)
filter_query_parameters = options.get("filter_query_parameters", self.filter_query_parameters)
filter_headers = options.get(
'filter_headers', self.filter_headers
)
filter_query_parameters = options.get(
'filter_query_parameters', self.filter_query_parameters
)
filter_post_data_parameters = options.get(
"filter_post_data_parameters",
self.filter_post_data_parameters,
'filter_post_data_parameters', self.filter_post_data_parameters
)
before_record_request = options.get(
"before_record_request",
options.get("before_record", self.before_record_request),
options.get("before_record", self.before_record_request)
)
ignore_hosts = options.get(
'ignore_hosts', self.ignore_hosts
)
ignore_localhost = options.get(
'ignore_localhost', self.ignore_localhost
)
ignore_hosts = options.get("ignore_hosts", self.ignore_hosts)
ignore_localhost = options.get("ignore_localhost", self.ignore_localhost)
if filter_headers:
replacements = [h if isinstance(h, tuple) else (h, None) for h in filter_headers]
filter_functions.append(functools.partial(filters.replace_headers, replacements=replacements))
if filter_query_parameters:
replacements = [p if isinstance(p, tuple) else (p, None) for p in filter_query_parameters]
replacements = [h if isinstance(h, tuple) else (h, None)
for h in filter_headers]
filter_functions.append(
functools.partial(filters.replace_query_parameters, replacements=replacements),
functools.partial(
filters.replace_headers,
replacements=replacements,
)
)
if filter_query_parameters:
replacements = [p if isinstance(p, tuple) else (p, None)
for p in filter_query_parameters]
filter_functions.append(functools.partial(
filters.replace_query_parameters,
replacements=replacements,
))
if filter_post_data_parameters:
replacements = [p if isinstance(p, tuple) else (p, None) for p in filter_post_data_parameters]
replacements = [p if isinstance(p, tuple) else (p, None)
for p in filter_post_data_parameters]
filter_functions.append(
functools.partial(filters.replace_post_data_parameters, replacements=replacements),
functools.partial(
filters.replace_post_data_parameters,
replacements=replacements,
)
)
hosts_to_ignore = set(ignore_hosts)
if ignore_localhost:
hosts_to_ignore.update(("localhost", "0.0.0.0", "127.0.0.1"))
hosts_to_ignore.update(('localhost', '0.0.0.0', '127.0.0.1'))
if hosts_to_ignore:
filter_functions.append(self._build_ignore_hosts(hosts_to_ignore))
if before_record_request:
if not isinstance(before_record_request, collections_abc.Iterable):
if not isinstance(before_record_request, collections.Iterable):
before_record_request = (before_record_request,)
filter_functions.extend(before_record_request)
def before_record_request(request):
request = copy.deepcopy(request)
request = copy.copy(request)
for function in filter_functions:
if request is None:
break
request = function(request)
return request
return before_record_request
@staticmethod
def _build_ignore_hosts(hosts_to_ignore):
def filter_ignored_hosts(request):
if hasattr(request, "host") and request.host in hosts_to_ignore:
if hasattr(request, 'host') and request.host in hosts_to_ignore:
return
return request
return filter_ignored_hosts
@staticmethod
def _build_path_from_func_using_module(function):
return os.path.join(os.path.dirname(inspect.getfile(function)), function.__name__)
return os.path.join(os.path.dirname(inspect.getfile(function)),
function.__name__)
def register_serializer(self, name, serializer):
self.serializers[name] = serializer
@@ -257,5 +279,4 @@ class VCR:
def test_case(self, predicate=None):
predicate = predicate or self.is_test_method
metaclass = auto_decorate(self.use_cassette, predicate)
return metaclass("temporary_class", (), {})
return six.with_metaclass(auto_decorate(self.use_cassette, predicate))

View File

@@ -1,41 +1,7 @@
class CannotOverwriteExistingCassetteException(Exception):
def __init__(self, *args, **kwargs):
self.cassette = kwargs["cassette"]
self.failed_request = kwargs["failed_request"]
message = self._get_message(kwargs["cassette"], kwargs["failed_request"])
super().__init__(message)
@staticmethod
def _get_message(cassette, failed_request):
"""Get the final message related to the exception"""
# Get the similar requests in the cassette that
# have match the most with the request.
best_matches = cassette.find_requests_with_most_matches(failed_request)
if best_matches:
# Build a comprehensible message to put in the exception.
best_matches_msg = (
f"Found {len(best_matches)} similar requests "
f"with {len(best_matches[0][2])} different matcher(s) :\n"
)
for idx, best_match in enumerate(best_matches, start=1):
request, succeeded_matchers, failed_matchers_assertion_msgs = best_match
best_matches_msg += (
f"\n{idx} - ({request!r}).\n"
f"Matchers succeeded : {succeeded_matchers}\n"
"Matchers failed :\n"
)
for failed_matcher, assertion_msg in failed_matchers_assertion_msgs:
best_matches_msg += f"{failed_matcher} - assertion failure :\n{assertion_msg}\n"
else:
best_matches_msg = "No similar requests, that have not been played, found."
return (
f"Can't overwrite existing cassette ({cassette._path!r}) in "
f"your current record mode ({cassette.record_mode!r}).\n"
f"No match for the request ({failed_request!r}) was found.\n"
f"{best_matches_msg}"
)
pass
class UnhandledHTTPRequestError(KeyError):
"""Raised when a cassette does not contain the request we want."""
pass

View File

@@ -1,18 +1,20 @@
from six import BytesIO, text_type
from six.moves.urllib.parse import urlparse, urlencode, urlunparse
import copy
import json
import zlib
from io import BytesIO
from urllib.parse import urlencode, urlparse, urlunparse
from .util import CaseInsensitiveDict
def replace_headers(request, replacements):
"""Replace headers in request according to replacements.
The replacements should be a list of (key, value) pairs where the value can be any of:
1. A simple replacement string value.
2. None to remove the given header.
3. A callable which accepts (key, value, request) and returns a string value or None.
"""
Replace headers in request according to replacements. The replacements
should be a list of (key, value) pairs where the value can be any of:
1. A simple replacement string value.
2. None to remove the given header.
3. A callable which accepts (key, value, request) and returns a string
value or None.
"""
new_headers = request.headers.copy()
for k, rv in replacements:
@@ -35,9 +37,10 @@ def remove_headers(request, headers_to_remove):
def replace_query_parameters(request, replacements):
"""Replace query parameters in request according to replacements.
The replacements should be a list of (key, value) pairs where the value can be any of:
"""
Replace query parameters in request according to replacements. The
replacements should be a list of (key, value) pairs where the value can be
any of:
1. A simple replacement string value.
2. None to remove the given header.
3. A callable which accepts (key, value, request) and returns a string
@@ -70,32 +73,19 @@ def remove_query_parameters(request, query_parameters_to_remove):
def replace_post_data_parameters(request, replacements):
"""Replace post data in request--either form data or json--according to replacements.
The replacements should be a list of (key, value) pairs where the value can be any of:
"""
Replace post data in request--either form data or json--according to
replacements. The replacements should be a list of (key, value) pairs where
the value can be any of:
1. A simple replacement string value.
2. None to remove the given header.
3. A callable which accepts (key, value, request) and returns a string
value or None.
"""
if not request.body:
# Nothing to replace
return request
replacements = dict(replacements)
if request.method == "POST" and not isinstance(request.body, BytesIO):
if isinstance(request.body, dict):
new_body = request.body.copy()
for k, rv in replacements.items():
if k in new_body:
ov = new_body.pop(k)
if callable(rv):
rv = rv(key=k, value=ov, request=request)
if rv is not None:
new_body[k] = rv
request.body = new_body
elif request.headers.get("Content-Type") == "application/json":
json_data = json.loads(request.body)
if request.method == 'POST' and not isinstance(request.body, BytesIO):
if request.headers.get('Content-Type') == 'application/json':
json_data = json.loads(request.body.decode('utf-8'))
for k, rv in replacements.items():
if k in json_data:
ov = json_data.pop(k)
@@ -103,26 +93,28 @@ def replace_post_data_parameters(request, replacements):
rv = rv(key=k, value=ov, request=request)
if rv is not None:
json_data[k] = rv
request.body = json.dumps(json_data).encode("utf-8")
request.body = json.dumps(json_data).encode('utf-8')
else:
if isinstance(request.body, str):
request.body = request.body.encode("utf-8")
splits = [p.partition(b"=") for p in request.body.split(b"&")]
if isinstance(request.body, text_type):
request.body = request.body.encode('utf-8')
splits = [p.partition(b'=') for p in request.body.split(b'&')]
new_splits = []
for k, sep, ov in splits:
if sep is None:
new_splits.append((k, sep, ov))
else:
rk = k.decode("utf-8")
rk = k.decode('utf-8')
if rk not in replacements:
new_splits.append((k, sep, ov))
else:
rv = replacements[rk]
if callable(rv):
rv = rv(key=rk, value=ov.decode("utf-8"), request=request)
rv = rv(key=rk, value=ov.decode('utf-8'),
request=request)
if rv is not None:
new_splits.append((k, sep, rv.encode("utf-8")))
request.body = b"&".join(k if sep is None else b"".join([k, sep, v]) for k, sep, v in new_splits)
new_splits.append((k, sep, rv.encode('utf-8')))
request.body = b'&'.join(k if sep is None else b''.join([k, sep, v])
for k, sep, v in new_splits)
return request
@@ -141,40 +133,31 @@ def decode_response(response):
2. delete the content-encoding header
3. update content-length header to decompressed length
"""
def is_compressed(headers):
encoding = headers.get("content-encoding", [])
return encoding and encoding[0] in ("gzip", "deflate")
encoding = headers.get('content-encoding', [])
return encoding and encoding[0] in ('gzip', 'deflate')
def decompress_body(body, encoding):
"""Returns decompressed body according to encoding using zlib.
to (de-)compress gzip format, use wbits = zlib.MAX_WBITS | 16
"""
if not body:
return ""
if encoding == "gzip":
try:
return zlib.decompress(body, zlib.MAX_WBITS | 16)
except zlib.error:
return body # assumes that the data was already decompressed
if encoding == 'gzip':
return zlib.decompress(body, zlib.MAX_WBITS | 16)
else: # encoding == 'deflate'
try:
return zlib.decompress(body)
except zlib.error:
return body # assumes that the data was already decompressed
return zlib.decompress(body)
# Deepcopy here in case `headers` contain objects that could
# be mutated by a shallow copy and corrupt the real response.
response = copy.deepcopy(response)
headers = CaseInsensitiveDict(response["headers"])
headers = CaseInsensitiveDict(response['headers'])
if is_compressed(headers):
encoding = headers["content-encoding"][0]
headers["content-encoding"].remove(encoding)
if not headers["content-encoding"]:
del headers["content-encoding"]
encoding = headers['content-encoding'][0]
headers['content-encoding'].remove(encoding)
if not headers['content-encoding']:
del headers['content-encoding']
new_body = decompress_body(response["body"]["string"], encoding)
response["body"]["string"] = new_body
headers["content-length"] = [str(len(new_body))]
response["headers"] = dict(headers)
new_body = decompress_body(response['body']['string'], encoding)
response['body']['string'] = new_body
headers['content-length'] = [str(len(new_body))]
response['headers'] = dict(headers)
return response

View File

@@ -1,210 +1,101 @@
import json
import logging
import urllib
import xmlrpc.client
from string import hexdigits
from six.moves import urllib, xmlrpc_client
from .util import read_body
import logging
_HEXDIG_CODE_POINTS: set[int] = {ord(s.encode("ascii")) for s in hexdigits}
log = logging.getLogger(__name__)
def method(r1, r2):
if r1.method != r2.method:
raise AssertionError(f"{r1.method} != {r2.method}")
return r1.method == r2.method
def uri(r1, r2):
if r1.uri != r2.uri:
raise AssertionError(f"{r1.uri} != {r2.uri}")
return r1.uri == r2.uri
def host(r1, r2):
if r1.host != r2.host:
raise AssertionError(f"{r1.host} != {r2.host}")
return r1.host == r2.host
def scheme(r1, r2):
if r1.scheme != r2.scheme:
raise AssertionError(f"{r1.scheme} != {r2.scheme}")
return r1.scheme == r2.scheme
def port(r1, r2):
if r1.port != r2.port:
raise AssertionError(f"{r1.port} != {r2.port}")
return r1.port == r2.port
def path(r1, r2):
if r1.path != r2.path:
raise AssertionError(f"{r1.path} != {r2.path}")
return r1.path == r2.path
def query(r1, r2):
if r1.query != r2.query:
raise AssertionError(f"{r1.query} != {r2.query}")
return r1.query == r2.query
def raw_body(r1, r2):
if read_body(r1) != read_body(r2):
raise AssertionError
return read_body(r1) == read_body(r2)
def body(r1, r2):
transformers = list(_get_transformers(r1))
if transformers != list(_get_transformers(r2)):
transformers = []
b1 = read_body(r1)
b2 = read_body(r2)
for transform in transformers:
b1 = transform(b1)
b2 = transform(b2)
if b1 != b2:
raise AssertionError
def headers(r1, r2):
if r1.headers != r2.headers:
raise AssertionError(f"{r1.headers} != {r2.headers}")
def _header_checker(value, header="Content-Type"):
def _header_checker(value, header='Content-Type'):
def checker(headers):
_header = headers.get(header, "")
if isinstance(_header, bytes):
_header = _header.decode("utf-8")
return value in _header.lower()
return value in headers.get(header, '').lower()
return checker
def _dechunk(body):
if isinstance(body, str):
body = body.encode("utf-8")
elif isinstance(body, bytearray):
body = bytes(body)
elif hasattr(body, "__iter__"):
body = list(body)
if body:
if isinstance(body[0], str):
body = ("".join(body)).encode("utf-8")
elif isinstance(body[0], bytes):
body = b"".join(body)
elif isinstance(body[0], int):
body = bytes(body)
else:
raise ValueError(f"Body chunk type {type(body[0])} not supported")
else:
body = None
if not isinstance(body, bytes):
return body
# Now decode chunked data format (https://en.wikipedia.org/wiki/Chunked_transfer_encoding)
# Example input: b"45\r\n<69 bytes>\r\n0\r\n\r\n" where int(b"45", 16) == 69.
CHUNK_GAP = b"\r\n"
BODY_LEN: int = len(body)
chunks: list[bytes] = []
pos: int = 0
while True:
for i in range(pos, BODY_LEN):
if body[i] not in _HEXDIG_CODE_POINTS:
break
if i == 0 or body[i : i + len(CHUNK_GAP)] != CHUNK_GAP:
if pos == 0:
return body # i.e. assume non-chunk data
raise ValueError("Malformed chunked data")
size_bytes = int(body[pos:i], 16)
if size_bytes == 0: # i.e. well-formed ending
return b"".join(chunks)
chunk_data_first = i + len(CHUNK_GAP)
chunk_data_after_last = chunk_data_first + size_bytes
if body[chunk_data_after_last : chunk_data_after_last + len(CHUNK_GAP)] != CHUNK_GAP:
raise ValueError("Malformed chunked data")
chunk_data = body[chunk_data_first:chunk_data_after_last]
chunks.append(chunk_data)
pos = chunk_data_after_last + len(CHUNK_GAP)
def _transform_json(body):
# Request body is always a byte string, but json.loads() wants a text
# string. RFC 7159 says the default encoding is UTF-8 (although UTF-16
# and UTF-32 are also allowed: hmmmmm).
if body:
return json.loads(body)
return json.loads(body.decode('utf-8'))
_xml_header_checker = _header_checker("text/xml")
_xmlrpc_header_checker = _header_checker("xmlrpc", header="User-Agent")
_xml_header_checker = _header_checker('text/xml')
_xmlrpc_header_checker = _header_checker('xmlrpc', header='User-Agent')
_checker_transformer_pairs = (
(_header_checker("chunked", header="Transfer-Encoding"), _dechunk),
(
_header_checker("application/x-www-form-urlencoded"),
lambda body: urllib.parse.parse_qs(body.decode("ascii")),
),
(_header_checker("application/json"), _transform_json),
(lambda request: _xml_header_checker(request) and _xmlrpc_header_checker(request), xmlrpc.client.loads),
(_header_checker('application/x-www-form-urlencoded'), urllib.parse.parse_qs),
(_header_checker('application/json'), _transform_json),
(lambda request: _xml_header_checker(request) and _xmlrpc_header_checker(request), xmlrpc_client.loads),
)
def _get_transformers(request):
def _identity(x):
return x
def _get_transformer(request):
for checker, transformer in _checker_transformer_pairs:
if checker(request.headers):
yield transformer
return transformer
else:
return _identity
def body(r1, r2):
transformer = _get_transformer(r1)
r2_transformer = _get_transformer(r2)
if transformer != r2_transformer:
transformer = _identity
return transformer(read_body(r1)) == transformer(read_body(r2))
def headers(r1, r2):
return r1.headers == r2.headers
def _log_matches(r1, r2, matches):
differences = [m for m in matches if not m[0]]
if differences:
log.debug(
"Requests {} and {} differ according to "
"the following matchers: {}".format(r1, r2, differences)
)
def requests_match(r1, r2, matchers):
_, failures = get_matchers_results(r1, r2, matchers)
if failures:
log.debug(f"Requests {r1} and {r2} differ.\nFailure details:\n{failures}")
return len(failures) == 0
def _evaluate_matcher(matcher_function, *args):
"""
Evaluate the result of a given matcher as a boolean with an assertion error message if any.
It handles two types of matcher :
- a matcher returning a boolean value.
- a matcher that only makes an assert, returning None or raises an assertion error.
"""
assertion_message = None
try:
match = matcher_function(*args)
match = True if match is None else match
except AssertionError as e:
match = False
assertion_message = str(e)
return match, assertion_message
def get_matchers_results(r1, r2, matchers):
"""
Get the comparison results of two requests as two list.
The first returned list represents the matchers names that passed.
The second list is the failed matchers as a string with failed assertion details if any.
"""
matches_success, matches_fails = [], []
for m in matchers:
matcher_name = m.__name__
match, assertion_message = _evaluate_matcher(m, r1, r2)
if match:
matches_success.append(matcher_name)
else:
assertion_message = get_assertion_message(assertion_message)
matches_fails.append((matcher_name, assertion_message))
return matches_success, matches_fails
def get_assertion_message(assertion_details):
"""
Get a detailed message about the failing matcher.
"""
return assertion_details
matches = [(m(r1, r2), m) for m in matchers]
_log_matches(r1, r2, matches)
return all(m[0] for m in matches)

View File

@@ -7,7 +7,7 @@ It merges and deletes the request obsolete keys (protocol, host, port, path)
into new 'uri' key.
Usage::
python3 -m vcr.migration PATH
python -m vcr.migration PATH
The PATH can be path to the directory with cassettes or cassette itself
"""
@@ -17,12 +17,11 @@ import os
import shutil
import sys
import tempfile
import yaml
from . import request
from .serializers import yamlserializer, jsonserializer
from .serialize import serialize
from .serializers import jsonserializer, yamlserializer
from . import request
from .stubs.compat import get_httpmessage
# Use the libYAML versions if possible
@@ -39,46 +38,55 @@ def preprocess_yaml(cassette):
# versions. So this just strips the tags before deserializing.
STRINGS_TO_NUKE = [
"!!python/object:vcr.request.Request",
"!!python/object/apply:__builtin__.frozenset",
"!!python/object/apply:builtins.frozenset",
'!!python/object:vcr.request.Request',
'!!python/object/apply:__builtin__.frozenset',
'!!python/object/apply:builtins.frozenset',
]
for s in STRINGS_TO_NUKE:
cassette = cassette.replace(s, "")
cassette = cassette.replace(s, '')
return cassette
PARTS = ["protocol", "host", "port", "path"]
PARTS = [
'protocol',
'host',
'port',
'path',
]
def build_uri(**parts):
port = parts["port"]
scheme = parts["protocol"]
default_port = {"https": 443, "http": 80}[scheme]
parts["port"] = f":{port}" if port != default_port else ""
port = parts['port']
scheme = parts['protocol']
default_port = {'https': 443, 'http': 80}[scheme]
parts['port'] = ':{}'.format(port) if port != default_port else ''
return "{protocol}://{host}{port}{path}".format(**parts)
def _migrate(data):
interactions = []
for item in data:
req = item["request"]
res = item["response"]
uri = {k: req.pop(k) for k in PARTS}
req["uri"] = build_uri(**uri)
req = item['request']
res = item['response']
uri = dict((k, req.pop(k)) for k in PARTS)
req['uri'] = build_uri(**uri)
# convert headers to dict of lists
headers = req["headers"]
headers = req['headers']
for k in headers:
headers[k] = [headers[k]]
response_headers = {}
for k, v in get_httpmessage(b"".join(h.encode("utf-8") for h in res["headers"])).items():
for k, v in get_httpmessage(
b"".join(h.encode('utf-8') for h in res['headers'])
).items():
response_headers.setdefault(k, [])
response_headers[k].append(v)
res["headers"] = response_headers
interactions.append({"request": req, "response": res})
res['headers'] = response_headers
interactions.append({'request': req, 'response': res})
return {
"requests": [request.Request._from_dict(i["request"]) for i in interactions],
"responses": [i["response"] for i in interactions],
'requests': [
request.Request._from_dict(i['request']) for i in interactions
],
'responses': [i['response'] for i in interactions],
}
@@ -92,12 +100,12 @@ def migrate_json(in_fp, out_fp):
def _list_of_tuples_to_dict(fs):
return dict(fs[0])
return dict((k, v) for k, v in fs[0])
def _already_migrated(data):
try:
if data.get("version") == 1:
if data.get('version') == 1:
return True
except AttributeError:
return False
@@ -108,7 +116,9 @@ def migrate_yml(in_fp, out_fp):
if _already_migrated(data):
return False
for i in range(len(data)):
data[i]["request"]["headers"] = _list_of_tuples_to_dict(data[i]["request"]["headers"])
data[i]['request']['headers'] = _list_of_tuples_to_dict(
data[i]['request']['headers']
)
interactions = _migrate(data)
out_fp.write(serialize(interactions, yamlserializer))
return True
@@ -117,42 +127,43 @@ def migrate_yml(in_fp, out_fp):
def migrate(file_path, migration_fn):
# because we assume that original files can be reverted
# we will try to copy the content. (os.rename not needed)
with tempfile.TemporaryFile(mode="w+") as out_fp:
with open(file_path) as in_fp:
with tempfile.TemporaryFile(mode='w+') as out_fp:
with open(file_path, 'r') as in_fp:
if not migration_fn(in_fp, out_fp):
return False
with open(file_path, "w") as in_fp:
with open(file_path, 'w') as in_fp:
out_fp.seek(0)
shutil.copyfileobj(out_fp, in_fp)
return True
def try_migrate(path):
if path.endswith(".json"):
if path.endswith('.json'):
return migrate(path, migrate_json)
elif path.endswith((".yaml", ".yml")):
elif path.endswith('.yaml') or path.endswith('.yml'):
return migrate(path, migrate_yml)
return False
def main():
if len(sys.argv) != 2:
raise SystemExit(
"Please provide path to cassettes directory or file. Usage: python3 -m vcr.migration PATH",
)
raise SystemExit("Please provide path to cassettes directory or file. "
"Usage: python -m vcr.migration PATH")
path = sys.argv[1]
if not os.path.isabs(path):
path = os.path.abspath(path)
files = [path]
if os.path.isdir(path):
files = (os.path.join(root, name) for (root, dirs, files) in os.walk(path) for name in files)
files = (os.path.join(root, name)
for (root, dirs, files) in os.walk(path)
for name in files)
for file_path in files:
migrated = try_migrate(file_path)
status = "OK" if migrated else "FAIL"
sys.stderr.write(f"[{status}] {file_path}\n")
migrated = try_migrate(file_path)
status = 'OK' if migrated else 'FAIL'
sys.stderr.write("[{}] {}\n".format(status, file_path))
sys.stderr.write("Done.\n")
if __name__ == "__main__":
if __name__ == '__main__':
main()

View File

@@ -1,61 +1,48 @@
"""Utilities for patching in cassettes"""
import contextlib
'''Utilities for patching in cassettes'''
import functools
import http.client as httplib
import itertools
import logging
from unittest import mock
from .compat import contextlib, mock
from .stubs import VCRHTTPConnection, VCRHTTPSConnection
from six.moves import http_client as httplib
log = logging.getLogger(__name__)
# Save some of the original types for the purposes of unpatching
_HTTPConnection = httplib.HTTPConnection
_HTTPSConnection = httplib.HTTPSConnection
# Try to save the original types for boto3
try:
from botocore.awsrequest import AWSHTTPConnection, AWSHTTPSConnection
except ImportError as e:
try:
import botocore.vendored.requests # noqa: F401
except ImportError: # pragma: no cover
pass
else:
raise RuntimeError(
"vcrpy >=4.2.2 and botocore <1.11.0 are not compatible"
"; please upgrade botocore (or downgrade vcrpy)",
) from e
import botocore.vendored.requests.packages.urllib3.connectionpool as cpool
except ImportError: # pragma: no cover
pass
else:
_Boto3VerifiedHTTPSConnection = AWSHTTPSConnection
_cpoolBoto3HTTPConnection = AWSHTTPConnection
_cpoolBoto3HTTPSConnection = AWSHTTPSConnection
_Boto3VerifiedHTTPSConnection = cpool.VerifiedHTTPSConnection
_cpoolBoto3HTTPConnection = cpool.HTTPConnection
_cpoolBoto3HTTPSConnection = cpool.HTTPSConnection
cpool = None
conn = None
# Try to save the original types for urllib3
try:
import urllib3.connection as conn
import urllib3.connectionpool as cpool
except ImportError: # pragma: no cover
pass
else:
_VerifiedHTTPSConnection = conn.VerifiedHTTPSConnection
_connHTTPConnection = conn.HTTPConnection
_connHTTPSConnection = conn.HTTPSConnection
_VerifiedHTTPSConnection = cpool.VerifiedHTTPSConnection
_cpoolHTTPConnection = cpool.HTTPConnection
_cpoolHTTPSConnection = cpool.HTTPSConnection
# Try to save the original types for requests
try:
import requests
if not cpool:
import requests.packages.urllib3.connectionpool as cpool
except ImportError: # pragma: no cover
pass
else:
if requests.__build__ < 0x021602:
raise RuntimeError(
"vcrpy >=4.2.2 and requests <2.16.2 are not compatible"
"; please upgrade requests (or downgrade vcrpy)",
)
_VerifiedHTTPSConnection = cpool.VerifiedHTTPSConnection
_cpoolHTTPConnection = cpool.HTTPConnection
_cpoolHTTPSConnection = cpool.HTTPSConnection
# Try to save the original types for httplib2
@@ -68,20 +55,33 @@ else:
_HTTPSConnectionWithTimeout = httplib2.HTTPSConnectionWithTimeout
_SCHEME_TO_CONNECTION = httplib2.SCHEME_TO_CONNECTION
# Try to save the original types for boto
try:
import boto.https_connection
except ImportError: # pragma: no cover
pass
else:
_CertValidatingHTTPSConnection = boto.https_connection.CertValidatingHTTPSConnection
# Try to save the original types for Tornado
try:
import tornado.simple_httpclient
except ImportError: # pragma: no cover
pass
else:
_SimpleAsyncHTTPClient_fetch_impl = tornado.simple_httpclient.SimpleAsyncHTTPClient.fetch_impl
_SimpleAsyncHTTPClient_fetch_impl = \
tornado.simple_httpclient.SimpleAsyncHTTPClient.fetch_impl
try:
import tornado.curl_httpclient
except ImportError: # pragma: no cover
pass
else:
_CurlAsyncHTTPClient_fetch_impl = tornado.curl_httpclient.CurlAsyncHTTPClient.fetch_impl
_CurlAsyncHTTPClient_fetch_impl = \
tornado.curl_httpclient.CurlAsyncHTTPClient.fetch_impl
try:
import aiohttp.client
@@ -91,21 +91,14 @@ else:
_AiohttpClientSessionRequest = aiohttp.client.ClientSession._request
try:
import httpcore
except ImportError: # pragma: no cover
pass
else:
_HttpcoreConnectionPool_handle_request = httpcore.ConnectionPool.handle_request
_HttpcoreAsyncConnectionPool_handle_async_request = httpcore.AsyncConnectionPool.handle_async_request
class CassettePatcherBuilder(object):
class CassettePatcherBuilder:
def _build_patchers_from_mock_triples_decorator(function):
@functools.wraps(function)
def wrapped(self, *args, **kwargs):
return self._build_patchers_from_mock_triples(function(self, *args, **kwargs))
return self._build_patchers_from_mock_triples(
function(self, *args, **kwargs)
)
return wrapped
def __init__(self, cassette):
@@ -114,15 +107,11 @@ class CassettePatcherBuilder:
def build(self):
return itertools.chain(
self._httplib(),
self._requests(),
self._boto3(),
self._urllib3(),
self._httplib2(),
self._tornado(),
self._aiohttp(),
self._httpcore(),
self._build_patchers_from_mock_triples(self._cassette.custom_patches),
self._httplib(), self._requests(), self._boto3(), self._urllib3(),
self._httplib2(), self._boto(), self._tornado(), self._aiohttp(),
self._build_patchers_from_mock_triples(
self._cassette.custom_patches
),
)
def _build_patchers_from_mock_triples(self, mock_triples):
@@ -135,11 +124,9 @@ class CassettePatcherBuilder:
if not hasattr(obj, patched_attribute):
return
return mock.patch.object(
obj,
patched_attribute,
self._recursively_apply_get_cassette_subclass(replacement_class),
)
return mock.patch.object(obj, patched_attribute,
self._recursively_apply_get_cassette_subclass(
replacement_class))
def _recursively_apply_get_cassette_subclass(self, replacement_dict_or_obj):
"""One of the subtleties of this class is that it does not directly
@@ -161,11 +148,13 @@ class CassettePatcherBuilder:
"""
if isinstance(replacement_dict_or_obj, dict):
for key, replacement_obj in replacement_dict_or_obj.items():
replacement_obj = self._recursively_apply_get_cassette_subclass(replacement_obj)
replacement_obj = self._recursively_apply_get_cassette_subclass(
replacement_obj)
replacement_dict_or_obj[key] = replacement_obj
return replacement_dict_or_obj
if hasattr(replacement_dict_or_obj, "cassette"):
replacement_dict_or_obj = self._get_cassette_subclass(replacement_dict_or_obj)
if hasattr(replacement_dict_or_obj, 'cassette'):
replacement_dict_or_obj = self._get_cassette_subclass(
replacement_dict_or_obj)
return replacement_dict_or_obj
def _get_cassette_subclass(self, klass):
@@ -180,33 +169,28 @@ class CassettePatcherBuilder:
bases = (base_class,)
if not issubclass(base_class, object): # Check for old style class
bases += (object,)
return type(f"{base_class.__name__}{self._cassette._path}", bases, {"cassette": self._cassette})
return type('{}{}'.format(base_class.__name__, self._cassette._path),
bases, dict(cassette=self._cassette))
@_build_patchers_from_mock_triples_decorator
def _httplib(self):
yield httplib, "HTTPConnection", VCRHTTPConnection
yield httplib, "HTTPSConnection", VCRHTTPSConnection
yield httplib, 'HTTPConnection', VCRHTTPConnection
yield httplib, 'HTTPSConnection', VCRHTTPSConnection
def _requests(self):
try:
from .stubs import requests_stubs
except ImportError: # pragma: no cover
return ()
return self._urllib3_patchers(cpool, conn, requests_stubs)
return self._urllib3_patchers(cpool, requests_stubs)
@_build_patchers_from_mock_triples_decorator
def _boto3(self):
try:
# botocore using awsrequest
import botocore.awsrequest as cpool
import botocore.vendored.requests.packages.urllib3.connectionpool as cpool
except ImportError: # pragma: no cover
pass
else:
from .stubs import boto3_stubs
log.debug("Patching boto3 cpool with %s", cpool)
yield cpool.AWSHTTPConnectionPool, "ConnectionCls", boto3_stubs.VCRRequestsHTTPConnection
yield cpool.AWSHTTPSConnectionPool, "ConnectionCls", boto3_stubs.VCRRequestsHTTPSConnection
return ()
from .stubs import boto3_stubs
return self._urllib3_patchers(cpool, boto3_stubs)
def _patched_get_conn(self, connection_pool_class, connection_class_getter):
get_conn = connection_pool_class._get_conn
@@ -215,8 +199,8 @@ class CassettePatcherBuilder:
def patched_get_conn(pool, timeout=None):
connection = get_conn(pool, timeout)
connection_class = (
pool.ConnectionCls if hasattr(pool, "ConnectionCls") else connection_class_getter()
)
pool.ConnectionCls if hasattr(pool, 'ConnectionCls')
else connection_class_getter())
# We need to make sure that we are actually providing a
# patched version of the connection class. This might not
# always be the case because the pool keeps previously
@@ -242,13 +226,11 @@ class CassettePatcherBuilder:
def _urllib3(self):
try:
import urllib3.connection as conn
import urllib3.connectionpool as cpool
except ImportError: # pragma: no cover
return ()
from .stubs import urllib3_stubs
return self._urllib3_patchers(cpool, conn, urllib3_stubs)
return self._urllib3_patchers(cpool, urllib3_stubs)
@_build_patchers_from_mock_triples_decorator
def _httplib2(self):
@@ -257,18 +239,23 @@ class CassettePatcherBuilder:
except ImportError: # pragma: no cover
pass
else:
from .stubs.httplib2_stubs import VCRHTTPConnectionWithTimeout, VCRHTTPSConnectionWithTimeout
from .stubs.httplib2_stubs import VCRHTTPConnectionWithTimeout
from .stubs.httplib2_stubs import VCRHTTPSConnectionWithTimeout
yield cpool, "HTTPConnectionWithTimeout", VCRHTTPConnectionWithTimeout
yield cpool, "HTTPSConnectionWithTimeout", VCRHTTPSConnectionWithTimeout
yield (
cpool,
"SCHEME_TO_CONNECTION",
{
"http": VCRHTTPConnectionWithTimeout,
"https": VCRHTTPSConnectionWithTimeout,
},
)
yield cpool, 'HTTPConnectionWithTimeout', VCRHTTPConnectionWithTimeout
yield cpool, 'HTTPSConnectionWithTimeout', VCRHTTPSConnectionWithTimeout
yield cpool, 'SCHEME_TO_CONNECTION', {'http': VCRHTTPConnectionWithTimeout,
'https': VCRHTTPSConnectionWithTimeout}
@_build_patchers_from_mock_triples_decorator
def _boto(self):
try:
import boto.https_connection as cpool
except ImportError: # pragma: no cover
pass
else:
from .stubs.boto_stubs import VCRCertValidatingHTTPSConnection
yield cpool, 'CertValidatingHTTPSConnection', VCRCertValidatingHTTPSConnection
@_build_patchers_from_mock_triples_decorator
def _tornado(self):
@@ -279,8 +266,10 @@ class CassettePatcherBuilder:
else:
from .stubs.tornado_stubs import vcr_fetch_impl
new_fetch_impl = vcr_fetch_impl(self._cassette, _SimpleAsyncHTTPClient_fetch_impl)
yield simple.SimpleAsyncHTTPClient, "fetch_impl", new_fetch_impl
new_fetch_impl = vcr_fetch_impl(
self._cassette, _SimpleAsyncHTTPClient_fetch_impl
)
yield simple.SimpleAsyncHTTPClient, 'fetch_impl', new_fetch_impl
try:
import tornado.curl_httpclient as curl
except ImportError: # pragma: no cover
@@ -288,8 +277,10 @@ class CassettePatcherBuilder:
else:
from .stubs.tornado_stubs import vcr_fetch_impl
new_fetch_impl = vcr_fetch_impl(self._cassette, _CurlAsyncHTTPClient_fetch_impl)
yield curl.CurlAsyncHTTPClient, "fetch_impl", new_fetch_impl
new_fetch_impl = vcr_fetch_impl(
self._cassette, _CurlAsyncHTTPClient_fetch_impl
)
yield curl.CurlAsyncHTTPClient, 'fetch_impl', new_fetch_impl
@_build_patchers_from_mock_triples_decorator
def _aiohttp(self):
@@ -299,75 +290,47 @@ class CassettePatcherBuilder:
pass
else:
from .stubs.aiohttp_stubs import vcr_request
new_request = vcr_request(self._cassette, _AiohttpClientSessionRequest)
yield client.ClientSession, "_request", new_request
@_build_patchers_from_mock_triples_decorator
def _httpcore(self):
try:
import httpcore
except ImportError: # pragma: no cover
return
else:
from .stubs.httpcore_stubs import vcr_handle_async_request, vcr_handle_request
new_handle_async_request = vcr_handle_async_request(
self._cassette,
_HttpcoreAsyncConnectionPool_handle_async_request,
new_request = vcr_request(
self._cassette, _AiohttpClientSessionRequest
)
yield httpcore.AsyncConnectionPool, "handle_async_request", new_handle_async_request
yield client.ClientSession, '_request', new_request
new_handle_request = vcr_handle_request(self._cassette, _HttpcoreConnectionPool_handle_request)
yield httpcore.ConnectionPool, "handle_request", new_handle_request
def _urllib3_patchers(self, cpool, conn, stubs):
def _urllib3_patchers(self, cpool, stubs):
http_connection_remover = ConnectionRemover(
self._get_cassette_subclass(stubs.VCRRequestsHTTPConnection),
self._get_cassette_subclass(stubs.VCRRequestsHTTPConnection)
)
https_connection_remover = ConnectionRemover(
self._get_cassette_subclass(stubs.VCRRequestsHTTPSConnection),
self._get_cassette_subclass(stubs.VCRRequestsHTTPSConnection)
)
mock_triples = (
(conn, "VerifiedHTTPSConnection", stubs.VCRRequestsHTTPSConnection),
(conn, "HTTPConnection", stubs.VCRRequestsHTTPConnection),
(conn, "HTTPSConnection", stubs.VCRRequestsHTTPSConnection),
(cpool, "is_connection_dropped", mock.Mock(return_value=False)), # Needed on Windows only
(cpool.HTTPConnectionPool, "ConnectionCls", stubs.VCRRequestsHTTPConnection),
(cpool.HTTPSConnectionPool, "ConnectionCls", stubs.VCRRequestsHTTPSConnection),
(cpool, 'VerifiedHTTPSConnection', stubs.VCRRequestsHTTPSConnection),
(cpool, 'HTTPConnection', stubs.VCRRequestsHTTPConnection),
(cpool, 'HTTPSConnection', stubs.VCRRequestsHTTPSConnection),
(cpool, 'is_connection_dropped', mock.Mock(return_value=False)), # Needed on Windows only
(cpool.HTTPConnectionPool, 'ConnectionCls', stubs.VCRRequestsHTTPConnection),
(cpool.HTTPSConnectionPool, 'ConnectionCls', stubs.VCRRequestsHTTPSConnection),
)
# These handle making sure that sessions only use the
# connections of the appropriate type.
mock_triples += (
(
cpool.HTTPConnectionPool,
"_get_conn",
self._patched_get_conn(cpool.HTTPConnectionPool, lambda: cpool.HTTPConnection),
),
(
cpool.HTTPSConnectionPool,
"_get_conn",
self._patched_get_conn(cpool.HTTPSConnectionPool, lambda: cpool.HTTPSConnection),
),
(
cpool.HTTPConnectionPool,
"_new_conn",
self._patched_new_conn(cpool.HTTPConnectionPool, http_connection_remover),
),
(
cpool.HTTPSConnectionPool,
"_new_conn",
self._patched_new_conn(cpool.HTTPSConnectionPool, https_connection_remover),
),
)
mock_triples += ((cpool.HTTPConnectionPool, '_get_conn',
self._patched_get_conn(cpool.HTTPConnectionPool,
lambda: cpool.HTTPConnection)),
(cpool.HTTPSConnectionPool, '_get_conn',
self._patched_get_conn(cpool.HTTPSConnectionPool,
lambda: cpool.HTTPSConnection)),
(cpool.HTTPConnectionPool, '_new_conn',
self._patched_new_conn(cpool.HTTPConnectionPool,
http_connection_remover)),
(cpool.HTTPSConnectionPool, '_new_conn',
self._patched_new_conn(cpool.HTTPSConnectionPool,
https_connection_remover)))
return itertools.chain(
self._build_patchers_from_mock_triples(mock_triples),
(http_connection_remover, https_connection_remover),
)
return itertools.chain(self._build_patchers_from_mock_triples(mock_triples),
(http_connection_remover, https_connection_remover))
class ConnectionRemover:
class ConnectionRemover(object):
def __init__(self, connection_class):
self._connection_class = connection_class
self._connection_pool_to_connections = {}
@@ -376,6 +339,10 @@ class ConnectionRemover:
if isinstance(connection, self._connection_class):
self._connection_pool_to_connections.setdefault(pool, set()).add(connection)
def remove_connection_to_pool_entry(self, pool, connection):
if isinstance(connection, self._connection_class):
self._connection_pool_to_connections[self._connection_class].remove(connection)
def __enter__(self):
return self
@@ -386,66 +353,114 @@ class ConnectionRemover:
connection = pool.pool.get()
if isinstance(connection, self._connection_class):
connections.remove(connection)
connection.close()
else:
readd_connections.append(connection)
for connection in readd_connections:
pool._put_conn(connection)
for connection in connections:
connection.close()
def reset_patchers():
yield mock.patch.object(httplib, "HTTPConnection", _HTTPConnection)
yield mock.patch.object(httplib, "HTTPSConnection", _HTTPSConnection)
yield mock.patch.object(httplib, 'HTTPConnection', _HTTPConnection)
yield mock.patch.object(httplib, 'HTTPSConnection', _HTTPSConnection)
try:
import requests
if requests.__build__ < 0x021603:
# Avoid double unmock if requests 2.16.3
# First, this is pointless, requests.packages.urllib3 *IS* urllib3 (see packages.py)
# Second, this is unmocking twice the same classes with different namespaces
# and is creating weird issues and bugs:
# > AssertionError: assert <class 'urllib3.connection.HTTPConnection'>
# > is <class 'requests.packages.urllib3.connection.HTTPConnection'>
# This assert should work!!!
# Note that this also means that now, requests.packages is never imported
# if requests 2.16.3 or greater is used with VCRPy.
import requests.packages.urllib3.connectionpool as cpool
else:
raise ImportError("Skip requests not vendored anymore")
except ImportError: # pragma: no cover
pass
else:
# unpatch requests v1.x
yield mock.patch.object(cpool, 'VerifiedHTTPSConnection', _VerifiedHTTPSConnection)
yield mock.patch.object(cpool, 'HTTPConnection', _cpoolHTTPConnection)
# unpatch requests v2.x
if hasattr(cpool.HTTPConnectionPool, 'ConnectionCls'):
yield mock.patch.object(cpool.HTTPConnectionPool, 'ConnectionCls',
_cpoolHTTPConnection)
yield mock.patch.object(cpool.HTTPSConnectionPool, 'ConnectionCls',
_cpoolHTTPSConnection)
if hasattr(cpool, 'HTTPSConnection'):
yield mock.patch.object(cpool, 'HTTPSConnection', _cpoolHTTPSConnection)
try:
import urllib3.connection as conn
import urllib3.connectionpool as cpool
except ImportError: # pragma: no cover
pass
else:
yield mock.patch.object(conn, "VerifiedHTTPSConnection", _VerifiedHTTPSConnection)
yield mock.patch.object(conn, "HTTPConnection", _connHTTPConnection)
yield mock.patch.object(conn, "HTTPSConnection", _connHTTPSConnection)
if hasattr(cpool.HTTPConnectionPool, "ConnectionCls"):
yield mock.patch.object(cpool.HTTPConnectionPool, "ConnectionCls", _connHTTPConnection)
yield mock.patch.object(cpool.HTTPSConnectionPool, "ConnectionCls", _connHTTPSConnection)
yield mock.patch.object(cpool, 'VerifiedHTTPSConnection', _VerifiedHTTPSConnection)
yield mock.patch.object(cpool, 'HTTPConnection', _cpoolHTTPConnection)
yield mock.patch.object(cpool, 'HTTPSConnection', _cpoolHTTPSConnection)
if hasattr(cpool.HTTPConnectionPool, 'ConnectionCls'):
yield mock.patch.object(cpool.HTTPConnectionPool, 'ConnectionCls', _cpoolHTTPConnection)
yield mock.patch.object(cpool.HTTPSConnectionPool, 'ConnectionCls', _cpoolHTTPSConnection)
try:
# unpatch botocore with awsrequest
import botocore.awsrequest as cpool
import botocore.vendored.requests.packages.urllib3.connectionpool as cpool
except ImportError: # pragma: no cover
pass
else:
if hasattr(cpool.AWSHTTPConnectionPool, "ConnectionCls"):
yield mock.patch.object(cpool.AWSHTTPConnectionPool, "ConnectionCls", _cpoolBoto3HTTPConnection)
yield mock.patch.object(cpool.AWSHTTPSConnectionPool, "ConnectionCls", _cpoolBoto3HTTPSConnection)
# unpatch requests v1.x
yield mock.patch.object(cpool, 'VerifiedHTTPSConnection', _Boto3VerifiedHTTPSConnection)
yield mock.patch.object(cpool, 'HTTPConnection', _cpoolBoto3HTTPConnection)
# unpatch requests v2.x
if hasattr(cpool.HTTPConnectionPool, 'ConnectionCls'):
yield mock.patch.object(cpool.HTTPConnectionPool, 'ConnectionCls',
_cpoolBoto3HTTPConnection)
yield mock.patch.object(cpool.HTTPSConnectionPool, 'ConnectionCls',
_cpoolBoto3HTTPSConnection)
if hasattr(cpool, "AWSHTTPSConnection"):
yield mock.patch.object(cpool, "AWSHTTPSConnection", _cpoolBoto3HTTPSConnection)
if hasattr(cpool, 'HTTPSConnection'):
yield mock.patch.object(cpool, 'HTTPSConnection', _cpoolBoto3HTTPSConnection)
try:
import httplib2 as cpool
except ImportError: # pragma: no cover
pass
else:
yield mock.patch.object(cpool, "HTTPConnectionWithTimeout", _HTTPConnectionWithTimeout)
yield mock.patch.object(cpool, "HTTPSConnectionWithTimeout", _HTTPSConnectionWithTimeout)
yield mock.patch.object(cpool, "SCHEME_TO_CONNECTION", _SCHEME_TO_CONNECTION)
yield mock.patch.object(cpool, 'HTTPConnectionWithTimeout', _HTTPConnectionWithTimeout)
yield mock.patch.object(cpool, 'HTTPSConnectionWithTimeout', _HTTPSConnectionWithTimeout)
yield mock.patch.object(cpool, 'SCHEME_TO_CONNECTION', _SCHEME_TO_CONNECTION)
try:
import boto.https_connection as cpool
except ImportError: # pragma: no cover
pass
else:
yield mock.patch.object(cpool, 'CertValidatingHTTPSConnection',
_CertValidatingHTTPSConnection)
try:
import tornado.simple_httpclient as simple
except ImportError: # pragma: no cover
pass
else:
yield mock.patch.object(simple.SimpleAsyncHTTPClient, "fetch_impl", _SimpleAsyncHTTPClient_fetch_impl)
yield mock.patch.object(
simple.SimpleAsyncHTTPClient,
'fetch_impl',
_SimpleAsyncHTTPClient_fetch_impl,
)
try:
import tornado.curl_httpclient as curl
except ImportError: # pragma: no cover
pass
else:
yield mock.patch.object(curl.CurlAsyncHTTPClient, "fetch_impl", _CurlAsyncHTTPClient_fetch_impl)
yield mock.patch.object(
curl.CurlAsyncHTTPClient,
'fetch_impl',
_CurlAsyncHTTPClient_fetch_impl,
)
@contextlib.contextmanager

View File

@@ -1,40 +1,26 @@
# .. _persister_example:
from pathlib import Path
from ..serialize import deserialize, serialize
import os
from ..serialize import serialize, deserialize
class CassetteNotFoundError(FileNotFoundError):
pass
class FilesystemPersister(object):
class CassetteDecodeError(ValueError):
pass
class FilesystemPersister:
@classmethod
def load_cassette(cls, cassette_path, serializer):
cassette_path = Path(cassette_path) # if cassette path is already Path this is no operation
if not cassette_path.is_file():
raise CassetteNotFoundError()
try:
with cassette_path.open() as f:
data = f.read()
except UnicodeDecodeError as err:
raise CassetteDecodeError("Can't read Cassette, Encoding is broken") from err
return deserialize(data, serializer)
with open(cassette_path) as f:
cassette_content = f.read()
except IOError:
raise ValueError('Cassette not found.')
cassette = deserialize(cassette_content, serializer)
return cassette
@staticmethod
def save_cassette(cassette_path, cassette_dict, serializer):
data = serialize(cassette_dict, serializer)
cassette_path = Path(cassette_path) # if cassette path is already Path this is no operation
cassette_folder = cassette_path.parent
if not cassette_folder.exists():
cassette_folder.mkdir(parents=True)
with cassette_path.open("w") as f:
dirname, filename = os.path.split(cassette_path)
if dirname and not os.path.exists(dirname):
os.makedirs(dirname)
with open(cassette_path, 'w') as f:
f.write(data)

View File

@@ -1,23 +0,0 @@
from enum import Enum
class RecordMode(str, Enum):
"""
Configures when VCR will record to the cassette.
Can be declared by either using the enumerated value (`vcr.mode.ONCE`)
or by simply using the defined string (`once`).
`ALL`: Every request is recorded.
`ANY`: ?
`NEW_EPISODES`: Any request not found in the cassette is recorded.
`NONE`: No requests are recorded.
`ONCE`: First set of requests is recorded, all others are replayed.
Attempting to add a new episode fails.
"""
ALL = "all"
ANY = "any"
NEW_EPISODES = "new_episodes"
NONE = "none"
ONCE = "once"

View File

@@ -1,15 +1,10 @@
import logging
import warnings
from contextlib import suppress
from io import BytesIO
from urllib.parse import parse_qsl, urlparse
from .util import CaseInsensitiveDict, _is_nonsequence_iterator
log = logging.getLogger(__name__)
from six import BytesIO, text_type
from six.moves.urllib.parse import urlparse, parse_qsl
from .util import CaseInsensitiveDict
class Request:
class Request(object):
"""
VCR's representation of a request.
"""
@@ -17,25 +12,12 @@ class Request:
def __init__(self, method, uri, body, headers):
self.method = method
self.uri = uri
self._was_file = hasattr(body, "read")
self._was_iter = _is_nonsequence_iterator(body)
self._was_file = hasattr(body, 'read')
if self._was_file:
self.body = body.read()
elif self._was_iter:
self.body = list(body)
else:
self.body = body
self.headers = headers
log.debug("Invoking Request %s", self.uri)
@property
def uri(self):
return self._uri
@uri.setter
def uri(self, uri):
self._uri = uri
self.parsed_uri = urlparse(uri)
@property
def headers(self):
@@ -49,50 +31,43 @@ class Request:
@property
def body(self):
if self._was_file:
return BytesIO(self._body)
if self._was_iter:
return iter(self._body)
return self._body
return BytesIO(self._body) if self._was_file else self._body
@body.setter
def body(self, value):
if isinstance(value, str):
value = value.encode("utf-8")
if isinstance(value, text_type):
value = value.encode('utf-8')
self._body = value
def add_header(self, key, value):
warnings.warn(
"Request.add_header is deprecated. Please assign to request.headers instead.",
DeprecationWarning,
stacklevel=2,
)
warnings.warn("Request.add_header is deprecated. "
"Please assign to request.headers instead.",
DeprecationWarning)
self.headers[key] = value
@property
def scheme(self):
return self.parsed_uri.scheme
return urlparse(self.uri).scheme
@property
def host(self):
return self.parsed_uri.hostname
return urlparse(self.uri).hostname
@property
def port(self):
port = self.parsed_uri.port
parse_uri = urlparse(self.uri)
port = parse_uri.port
if port is None:
with suppress(KeyError):
port = {"https": 443, "http": 80}[self.parsed_uri.scheme]
port = {'https': 443, 'http': 80}[parse_uri.scheme]
return port
@property
def path(self):
return self.parsed_uri.path
return urlparse(self.uri).path
@property
def query(self):
q = self.parsed_uri.query
q = urlparse(self.uri).query
return sorted(parse_qsl(q))
# alias for backwards compatibility
@@ -106,17 +81,17 @@ class Request:
return self.scheme
def __str__(self):
return f"<Request ({self.method}) {self.uri}>"
return "<Request ({}) {}>".format(self.method, self.uri)
def __repr__(self):
return self.__str__()
def _to_dict(self):
return {
"method": self.method,
"uri": self.uri,
"body": self.body,
"headers": {k: [v] for k, v in self.headers.items()},
'method': self.method,
'uri': self.uri,
'body': self.body,
'headers': dict(((k, [v]) for k, v in self.headers.items())),
}
@classmethod
@@ -137,7 +112,7 @@ class HeadersDict(CaseInsensitiveDict):
In addition, some servers sometimes send the same header more than once,
and httplib *can* deal with this situation.
Furthermore, I wanted to keep the request and response cassette format as
Futhermore, I wanted to keep the request and response cassette format as
similar as possible.
For this reason, in cassettes I keep a dict with lists as keys, but once
@@ -153,4 +128,4 @@ class HeadersDict(CaseInsensitiveDict):
if old:
key = old[0]
super().__setitem__(key, value)
super(HeadersDict, self).__setitem__(key, value)

View File

@@ -1,7 +1,6 @@
import yaml
from vcr.request import Request
from vcr.serializers import compat
from vcr.request import Request
import yaml
# version 1 cassettes started with VCR 1.0.x.
# Before 1.0.x, there was no versioning.
@@ -21,14 +20,14 @@ Deserializing: string (yaml converts from utf-8) -> bytestring
def _looks_like_an_old_cassette(data):
return isinstance(data, list) and len(data) and "request" in data[0]
return isinstance(data, list) and len(data) and 'request' in data[0]
def _warn_about_old_cassette_format():
raise ValueError(
"Your cassette files were generated in an older version "
"of VCR. Delete your cassettes or run the migration script."
"See http://git.io/mHhLBg for more details.",
"See http://git.io/mHhLBg for more details."
)
@@ -42,18 +41,23 @@ def deserialize(cassette_string, serializer):
if _looks_like_an_old_cassette(data):
_warn_about_old_cassette_format()
requests = [Request._from_dict(r["request"]) for r in data["interactions"]]
responses = [compat.convert_to_bytes(r["response"]) for r in data["interactions"]]
requests = [Request._from_dict(r['request']) for r in data['interactions']]
responses = [
compat.convert_to_bytes(r['response']) for r in data['interactions']
]
return requests, responses
def serialize(cassette_dict, serializer):
interactions = [
{
"request": compat.convert_to_unicode(request._to_dict()),
"response": compat.convert_to_unicode(response),
}
for request, response in zip(cassette_dict["requests"], cassette_dict["responses"], strict=False)
]
data = {"version": CASSETTE_FORMAT_VERSION, "interactions": interactions}
interactions = ([{
'request': compat.convert_to_unicode(request._to_dict()),
'response': compat.convert_to_unicode(response),
} for request, response in zip(
cassette_dict['requests'],
cassette_dict['responses'],
)])
data = {
'version': CASSETTE_FORMAT_VERSION,
'interactions': interactions,
}
return serializer.serialize(data)

View File

@@ -1,3 +1,6 @@
import six
def convert_to_bytes(resp):
resp = convert_body_to_bytes(resp)
return resp
@@ -21,8 +24,8 @@ def convert_body_to_bytes(resp):
http://pyyaml.org/wiki/PyYAMLDocumentation#Python3support
"""
try:
if resp["body"]["string"] is not None and not isinstance(resp["body"]["string"], bytes):
resp["body"]["string"] = resp["body"]["string"].encode("utf-8")
if resp['body']['string'] is not None and not isinstance(resp['body']['string'], six.binary_type):
resp['body']['string'] = resp['body']['string'].encode('utf-8')
except (KeyError, TypeError, UnicodeEncodeError):
# The thing we were converting either wasn't a dictionary or didn't
# have the keys we were expecting. Some of the tests just serialize
@@ -41,8 +44,8 @@ def _convert_string_to_unicode(string):
result = string
try:
if string is not None and not isinstance(string, str):
result = string.decode("utf-8")
if string is not None and not isinstance(string, six.text_type):
result = string.decode('utf-8')
except (TypeError, UnicodeDecodeError, AttributeError):
# Sometimes the string actually is binary or StringIO object,
# so if you can't decode it, just give up.
@@ -56,19 +59,21 @@ def convert_body_to_unicode(resp):
If the request or responses body is bytes, decode it to a string
(for python3 support)
"""
if not isinstance(resp, dict):
if type(resp) is not dict:
# Some of the tests just serialize and deserialize a string.
return _convert_string_to_unicode(resp)
else:
body = resp.get("body")
body = resp.get('body')
if body is not None:
try:
body["string"] = _convert_string_to_unicode(body["string"])
body['string'] = _convert_string_to_unicode(
body['string']
)
except (KeyError, TypeError, AttributeError):
# The thing we were converting either wasn't a dictionary or
# didn't have the keys we were expecting.
# For example request object has no 'string' key.
resp["body"] = _convert_string_to_unicode(body)
resp['body'] = _convert_string_to_unicode(body)
return resp

View File

@@ -1,4 +1,7 @@
import json
try:
import simplejson as json
except ImportError:
import json
def deserialize(cassette_string):
@@ -13,6 +16,14 @@ def serialize(cassette_dict):
)
try:
return json.dumps(cassette_dict, indent=4) + "\n"
except TypeError:
raise TypeError(error_message) from None
return json.dumps(cassette_dict, indent=4)
except UnicodeDecodeError as original: # py2
raise UnicodeDecodeError(
original.encoding,
b"Error serializing cassette to JSON",
original.start,
original.end,
original.args[-1] + error_message
)
except TypeError as original: # py3
raise TypeError(error_message)

View File

@@ -2,10 +2,9 @@ import yaml
# Use the libYAML versions if possible
try:
from yaml import CDumper as Dumper
from yaml import CLoader as Loader
from yaml import CLoader as Loader, CDumper as Dumper
except ImportError:
from yaml import Dumper, Loader
from yaml import Loader, Dumper
def deserialize(cassette_string):

View File

@@ -1,22 +1,24 @@
"""Stubs for patching HTTP and HTTPS requests"""
'''Stubs for patching HTTP and HTTPS requests'''
import logging
from contextlib import suppress
from http.client import HTTPConnection, HTTPResponse, HTTPSConnection
from io import BytesIO
from vcr.errors import CannotOverwriteExistingCassetteException
import six
from six.moves.http_client import (
HTTPConnection,
HTTPSConnection,
HTTPResponse,
)
from six import BytesIO
from vcr.request import Request
from vcr.errors import CannotOverwriteExistingCassetteException
from . import compat
log = logging.getLogger(__name__)
class VCRFakeSocket:
class VCRFakeSocket(object):
"""
A socket that doesn't do anything!
Used when playing back cassettes, when there
Used when playing back casssettes, when there
is no actual open socket.
"""
@@ -43,14 +45,14 @@ def parse_headers(header_list):
header_string = b""
for key, values in header_list.items():
for v in values:
header_string += key.encode("utf-8") + b":" + v.encode("utf-8") + b"\r\n"
header_string += \
key.encode('utf-8') + b":" + v.encode('utf-8') + b"\r\n"
return compat.get_httpmessage(header_string)
def serialize_headers(response):
headers = response.headers if response.msg is None else response.msg
out = {}
for key, values in compat.get_headers(headers):
for key, values in compat.get_headers(response.msg):
out.setdefault(key, [])
out[key].extend(values)
return out
@@ -58,70 +60,42 @@ def serialize_headers(response):
class VCRHTTPResponse(HTTPResponse):
"""
Stub response class that gets returned instead of a HTTPResponse
Stub reponse class that gets returned instead of a HTTPResponse
"""
def __init__(self, recorded_response):
self.fp = None
self.recorded_response = recorded_response
self.reason = recorded_response["status"]["message"]
self.status = self.code = recorded_response["status"]["code"]
self.reason = recorded_response['status']['message']
self.status = self.code = recorded_response['status']['code']
self.version = None
self.version_string = None
self._content = BytesIO(self.recorded_response["body"]["string"])
self._content = BytesIO(self.recorded_response['body']['string'])
self._closed = False
self._original_response = self # for requests.session.Session cookie extraction
headers = self.recorded_response["headers"]
headers = self.recorded_response['headers']
# Since we are loading a response that has already been serialized, our
# response is no longer chunked. That means we don't want any
# libraries trying to process a chunked response. By removing the
# transfer-encoding: chunked header, this should cause the downstream
# libraries to process this as a non-chunked response.
te_key = [h for h in headers if h.upper() == "TRANSFER-ENCODING"]
te_key = [h for h in headers.keys() if h.upper() == 'TRANSFER-ENCODING']
if te_key:
del headers[te_key[0]]
self.headers = self.msg = parse_headers(headers)
self.length = compat.get_header(self.msg, "content-length") or None
self.length = compat.get_header(self.msg, 'content-length') or None
@property
def closed(self):
# in python3, I can't change the value of self.closed. So I'
# twiddling self._closed and using this property to shadow the real
# self.closed from the superclass
# self.closed from the superclas
return self._closed
def read(self, *args, **kwargs):
return self._content.read(*args, **kwargs)
def read1(self, *args, **kwargs):
return self._content.read1(*args, **kwargs)
def readall(self):
return self._content.readall()
def readinto(self, *args, **kwargs):
return self._content.readinto(*args, **kwargs)
def readline(self, *args, **kwargs):
return self._content.readline(*args, **kwargs)
def readlines(self, *args, **kwargs):
return self._content.readlines(*args, **kwargs)
def seekable(self):
return self._content.seekable()
def tell(self):
return self._content.tell()
def isatty(self):
return self._content.isatty()
def seek(self, *args, **kwargs):
return self._content.seek(*args, **kwargs)
def close(self):
self._closed = True
return True
@@ -133,54 +107,22 @@ class VCRHTTPResponse(HTTPResponse):
return self.closed
def info(self):
return parse_headers(self.recorded_response["headers"])
return parse_headers(self.recorded_response['headers'])
def getheaders(self):
message = parse_headers(self.recorded_response["headers"])
message = parse_headers(self.recorded_response['headers'])
return list(compat.get_header_items(message))
def getheader(self, header, default=None):
values = [v for (k, v) in self.getheaders() if k.lower() == header.lower()]
if values:
return ", ".join(values)
return ', '.join(values)
else:
return default
def readable(self):
return self._content.readable()
@property
def length_remaining(self):
return self._content.getbuffer().nbytes - self._content.tell()
def get_redirect_location(self):
"""
Returns (a) redirect location string if we got a redirect
status code and valid location, (b) None if redirect status and
no location, (c) False if not a redirect status code.
See https://urllib3.readthedocs.io/en/stable/reference/urllib3.response.html .
"""
if not (300 <= self.status <= 399):
return False
return self.getheader("Location")
@property
def data(self):
return self._content.getbuffer().tobytes()
def drain_conn(self):
pass
def stream(self, amt=65536, decode_content=None):
while True:
b = self._content.read(amt)
yield b
if not b:
break
class VCRConnection:
class VCRConnection(object):
# A reference to the cassette that's currently being patched in
cassette = None
@@ -188,67 +130,69 @@ class VCRConnection:
"""
Returns empty string for the default port and ':port' otherwise
"""
port = (
self.real_connection.port
if not self.real_connection._tunnel_host
else self.real_connection._tunnel_port
)
default_port = {"https": 443, "http": 80}[self._protocol]
return f":{port}" if port != default_port else ""
def _real_host(self):
"""Returns the request host"""
if self.real_connection._tunnel_host:
# The real connection is to an HTTPS proxy
return self.real_connection._tunnel_host
else:
return self.real_connection.host
port = self.real_connection.port
default_port = {'https': 443, 'http': 80}[self._protocol]
return ':{}'.format(port) if port != default_port else ''
def _uri(self, url):
"""Returns request absolute URI"""
if url and not url.startswith("/"):
# Then this must be a proxy request.
return url
uri = f"{self._protocol}://{self._real_host()}{self._port_postfix()}{url}"
log.debug("Absolute URI: %s", uri)
uri = "{}://{}{}{}".format(
self._protocol,
self.real_connection.host,
self._port_postfix(),
url,
)
return uri
def _url(self, uri):
"""Returns request selector url from absolute URI"""
prefix = f"{self._protocol}://{self._real_host()}{self._port_postfix()}"
return uri.replace(prefix, "", 1)
prefix = "{}://{}{}".format(
self._protocol,
self.real_connection.host,
self._port_postfix(),
)
return uri.replace(prefix, '', 1)
def request(self, method, url, body=None, headers=None, *args, **kwargs):
"""Persist the request metadata in self._vcr_request"""
self._vcr_request = Request(method=method, uri=self._uri(url), body=body, headers=headers or {})
log.debug(f"Got {self._vcr_request}")
'''Persist the request metadata in self._vcr_request'''
self._vcr_request = Request(
method=method,
uri=self._uri(url),
body=body,
headers=headers or {}
)
log.debug('Got {}'.format(self._vcr_request))
# Note: The request may not actually be finished at this point, so
# I'm not sending the actual request until getresponse(). This
# allows me to compare the entire length of the response to see if it
# exists in the cassette.
self._sock = VCRFakeSocket()
def putrequest(self, method, url, *args, **kwargs):
"""
httplib gives you more than one way to do it. This is a way
to start building up a request. Usually followed by a bunch
of putheader() calls.
"""
self._vcr_request = Request(method=method, uri=self._uri(url), body="", headers={})
log.debug(f"Got {self._vcr_request}")
self._vcr_request = Request(
method=method,
uri=self._uri(url),
body="",
headers={}
)
log.debug('Got {}'.format(self._vcr_request))
def putheader(self, header, *values):
self._vcr_request.headers[header] = values
def send(self, data):
"""
'''
This method is called after request(), to add additional data to the
body of the request. So if that happens, let's just append the data
onto the most recent request in the cassette.
"""
self._vcr_request.body = self._vcr_request.body + data if self._vcr_request.body else data
'''
self._vcr_request.body = self._vcr_request.body + data \
if self._vcr_request.body else data
def close(self):
# Note: the real connection will only close if it's open, so
@@ -265,28 +209,40 @@ class VCRConnection:
self._vcr_request.body = message_body
def getresponse(self, _=False, **kwargs):
"""Retrieve the response"""
'''Retrieve the response'''
# Check to see if the cassette has a response for this request. If so,
# then return it
if self.cassette.can_play_response_for(self._vcr_request):
log.info(f"Playing response for {self._vcr_request} from cassette")
log.info(
"Playing response for {} from cassette".format(
self._vcr_request
)
)
response = self.cassette.play_response(self._vcr_request)
return VCRHTTPResponse(response)
else:
if self.cassette.write_protected and self.cassette.filter_request(self._vcr_request):
if self.cassette.write_protected and self.cassette.filter_request(
self._vcr_request
):
raise CannotOverwriteExistingCassetteException(
cassette=self.cassette,
failed_request=self._vcr_request,
"No match for the request (%r) was found. "
"Can't overwrite existing cassette (%r) in "
"your current record mode (%r)."
% (self._vcr_request, self.cassette._path,
self.cassette.record_mode)
)
# Otherwise, we should send the request, then get the response
# and return it.
log.info(f"{self._vcr_request} not in cassette, sending to real server")
log.info(
"{} not in cassette, sending to real server".format(
self._vcr_request
)
)
# This is imported here to avoid circular import.
# TODO(@IvanMalison): Refactor to allow normal import.
from vcr.patch import force_reset
with force_reset():
self.real_connection.request(
method=self._vcr_request.method,
@@ -297,13 +253,15 @@ class VCRConnection:
# get the response
response = self.real_connection.getresponse()
response_data = response.data if hasattr(response, "data") else response.read()
# put the response into the cassette
response = {
"status": {"code": response.status, "message": response.reason},
"headers": serialize_headers(response),
"body": {"string": response_data},
'status': {
'code': response.status,
'message': response.reason
},
'headers': serialize_headers(response),
'body': {'string': response.read()},
}
self.cassette.append(self._vcr_request, response)
return VCRHTTPResponse(response)
@@ -319,7 +277,8 @@ class VCRConnection:
and are not write-protected.
"""
if hasattr(self, "_vcr_request") and self.cassette.can_play_response_for(self._vcr_request):
if hasattr(self, '_vcr_request') and \
self.cassette.can_play_response_for(self._vcr_request):
# We already have a response we are going to play, don't
# actually connect
return
@@ -329,17 +288,14 @@ class VCRConnection:
return
from vcr.patch import force_reset
with force_reset():
return self.real_connection.connect(*args, **kwargs)
self._sock = VCRFakeSocket()
@property
def sock(self):
if self.real_connection.sock:
return self.real_connection.sock
return self._sock
return VCRFakeSocket()
@sock.setter
def sock(self, value):
@@ -347,22 +303,20 @@ class VCRConnection:
self.real_connection.sock = value
def __init__(self, *args, **kwargs):
kwargs.pop("strict", None) # apparently this is gone in py3
if six.PY3:
kwargs.pop('strict', None) # apparently this is gone in py3
# need to temporarily reset here because the real connection
# inherits from the thing that we are mocking out. Take out
# the reset if you want to see what I mean :)
from vcr.patch import force_reset
with force_reset():
self.real_connection = self._baseclass(*args, **kwargs)
self._sock = None
def __setattr__(self, name, value):
"""
We need to define this because any attributes that are set on the
VCRConnection need to be propagated to the real connection.
VCRConnection need to be propogated to the real connection.
For example, urllib3 will set certain attributes on the connection,
such as 'ssl_version'. These attributes need to get set on the real
@@ -371,22 +325,26 @@ class VCRConnection:
TODO: Separately setting the attribute on the two instances is not
ideal. We should switch to a proxying implementation.
"""
with suppress(AttributeError):
try:
setattr(self.real_connection, name, value)
except AttributeError:
# raised if real_connection has not been set yet, such as when
# we're setting the real_connection itself for the first time
pass
super().__setattr__(name, value)
super(VCRConnection, self).__setattr__(name, value)
def __getattr__(self, name):
"""
Send requests for weird attributes up to the real connection
(counterpart to __setattr above)
"""
if self.__dict__.get("real_connection"):
if self.__dict__.get('real_connection'):
# check in case real_connection has not been set yet, such as when
# we're setting the real_connection itself for the first time
return getattr(self.real_connection, name)
return super().__getattr__(name)
return super(VCRConnection, self).__getattr__(name)
for k, v in HTTPConnection.__dict__.items():
@@ -395,19 +353,13 @@ for k, v in HTTPConnection.__dict__.items():
class VCRHTTPConnection(VCRConnection):
"""A Mocked class for HTTP requests"""
'''A Mocked class for HTTP requests'''
_baseclass = HTTPConnection
_protocol = "http"
debuglevel = _baseclass.debuglevel
_http_vsn = _baseclass._http_vsn
_protocol = 'http'
class VCRHTTPSConnection(VCRConnection):
"""A Mocked class for HTTPS requests"""
'''A Mocked class for HTTPS requests'''
_baseclass = HTTPSConnection
_protocol = "https"
_protocol = 'https'
is_verified = True
debuglevel = _baseclass.debuglevel
_http_vsn = _baseclass._http_vsn

View File

@@ -1,280 +0,0 @@
"""Stubs for aiohttp HTTP clients"""
import asyncio
import functools
import json
import logging
from collections.abc import Mapping
from http.cookies import CookieError, Morsel, SimpleCookie
from aiohttp import ClientConnectionError, ClientResponse, CookieJar, RequestInfo, hdrs, streams
from aiohttp.helpers import strip_auth_from_url
from multidict import CIMultiDict, CIMultiDictProxy, MultiDict
from yarl import URL
from vcr.errors import CannotOverwriteExistingCassetteException
from vcr.request import Request
log = logging.getLogger(__name__)
class MockStream(asyncio.StreamReader, streams.AsyncStreamReaderMixin):
pass
class MockClientResponse(ClientResponse):
def __init__(self, method, url, request_info=None):
super().__init__(
method=method,
url=url,
writer=None,
continue100=None,
timer=None,
request_info=request_info,
traces=None,
loop=asyncio.get_event_loop(),
session=None,
)
async def json(self, *, encoding="utf-8", loads=json.loads, **kwargs):
stripped = self._body.strip()
if not stripped:
return None
return loads(stripped.decode(encoding))
async def text(self, encoding="utf-8", errors="strict"):
return self._body.decode(encoding, errors=errors)
async def read(self):
return self._body
def release(self):
pass
@property
def content(self):
s = MockStream()
s.feed_data(self._body)
s.feed_eof()
return s
def build_response(vcr_request, vcr_response, history):
request_info = RequestInfo(
url=URL(vcr_request.url),
method=vcr_request.method,
headers=_deserialize_headers(vcr_request.headers),
real_url=URL(vcr_request.url),
)
response = MockClientResponse(vcr_request.method, URL(vcr_request.url), request_info=request_info)
response.status = vcr_response["status"]["code"]
response._body = vcr_response["body"].get("string", b"")
response.reason = vcr_response["status"]["message"]
response._headers = _deserialize_headers(vcr_response["headers"])
response._history = tuple(history)
# cookies
for hdr in response.headers.getall(hdrs.SET_COOKIE, ()):
try:
cookies = SimpleCookie(hdr)
for cookie_name, cookie in cookies.items():
expires = cookie.get("expires", "").strip()
if expires:
log.debug('Ignoring expiration date: %s="%s"', cookie_name, expires)
cookie["expires"] = ""
response.cookies.load(cookie.output(header="").strip())
except CookieError as exc:
log.warning("Can not load response cookies: %s", exc)
response.close()
return response
def _serialize_headers(headers):
"""Serialize CIMultiDictProxy to a pickle-able dict because proxy
objects forbid pickling:
https://github.com/aio-libs/multidict/issues/340
"""
# Mark strings as keys so 'istr' types don't show up in
# the cassettes as comments.
serialized_headers = {}
for k, v in headers.items():
serialized_headers.setdefault(str(k), []).append(v)
return serialized_headers
def _deserialize_headers(headers):
deserialized_headers = CIMultiDict()
for k, vs in headers.items():
if isinstance(vs, list):
for v in vs:
deserialized_headers.add(k, v)
else:
deserialized_headers.add(k, vs)
return CIMultiDictProxy(deserialized_headers)
def play_responses(cassette, vcr_request, kwargs):
history = []
allow_redirects = kwargs.get("allow_redirects", True)
vcr_response = cassette.play_response(vcr_request)
response = build_response(vcr_request, vcr_response, history)
# If we're following redirects, continue playing until we reach
# our final destination.
while allow_redirects and 300 <= response.status <= 399:
if "location" not in response.headers:
break
next_url = URL(response.url).join(URL(response.headers["location"]))
# Make a stub VCR request that we can then use to look up the recorded
# VCR request saved to the cassette. This feels a little hacky and
# may have edge cases based on the headers we're providing (e.g. if
# there's a matcher that is used to filter by headers).
vcr_request = Request("GET", str(next_url), None, _serialize_headers(response.request_info.headers))
vcr_requests = cassette.find_requests_with_most_matches(vcr_request)
for vcr_request, *_ in vcr_requests:
if cassette.can_play_response_for(vcr_request):
break
# Tack on the response we saw from the redirect into the history
# list that is added on to the final response.
history.append(response)
vcr_response = cassette.play_response(vcr_request)
response = build_response(vcr_request, vcr_response, history)
return response
async def record_response(cassette, vcr_request, response):
"""Record a VCR request-response chain to the cassette."""
try:
body = {"string": (await response.read())}
# aiohttp raises a ClientConnectionError on reads when
# there is no body. We can use this to know to not write one.
except ClientConnectionError:
body = {}
vcr_response = {
"status": {"code": response.status, "message": response.reason},
"headers": _serialize_headers(response.headers),
"body": body,
}
cassette.append(vcr_request, vcr_response)
async def record_responses(cassette, vcr_request, response):
"""Because aiohttp follows redirects by default, we must support
them by default. This method is used to write individual
request-response chains that were implicitly followed to get
to the final destination.
"""
for i, past_response in enumerate(response.history):
aiohttp_request = past_response.request_info
past_request = Request(
aiohttp_request.method,
str(aiohttp_request.url),
# Record body of first request, rest are following a redirect.
None if i else vcr_request.body,
_serialize_headers(aiohttp_request.headers),
)
await record_response(cassette, past_request, past_response)
# If we're following redirects, then the last request-response
# we record is the one attached to the `response`.
if response.history:
aiohttp_request = response.request_info
vcr_request = Request(
aiohttp_request.method,
str(aiohttp_request.url),
None,
_serialize_headers(aiohttp_request.headers),
)
await record_response(cassette, vcr_request, response)
def _build_cookie_header(session, cookies, cookie_header, url):
url, _ = strip_auth_from_url(url)
all_cookies = session._cookie_jar.filter_cookies(url)
if cookies is not None:
tmp_cookie_jar = CookieJar()
tmp_cookie_jar.update_cookies(cookies)
req_cookies = tmp_cookie_jar.filter_cookies(url)
if req_cookies:
all_cookies.load(req_cookies)
if not all_cookies and not cookie_header:
return None
c = SimpleCookie()
if cookie_header:
c.load(cookie_header)
for name, value in all_cookies.items():
if isinstance(value, Morsel):
mrsl_val = value.get(value.key, Morsel())
mrsl_val.set(value.key, value.value, value.coded_value)
c[name] = mrsl_val
else:
c[name] = value
return c.output(header="", sep=";").strip()
def _build_url_with_params(url_str: str, params: Mapping[str, str | int | float]) -> URL:
# This code is basically a copy&paste of aiohttp.
# https://github.com/aio-libs/aiohttp/blob/master/aiohttp/client_reqrep.py#L225
url = URL(url_str)
q = MultiDict(url.query)
url2 = url.with_query(params)
q.extend(url2.query)
return url.with_query(q)
def vcr_request(cassette, real_request):
@functools.wraps(real_request)
async def new_request(self, method, url, **kwargs):
headers = kwargs.get("headers")
auth = kwargs.get("auth")
headers = self._prepare_headers(headers)
data = kwargs.get("data", kwargs.get("json"))
params = kwargs.get("params")
cookies = kwargs.get("cookies")
if auth is not None:
headers["AUTHORIZATION"] = auth.encode()
request_url = URL(url) if not params else _build_url_with_params(url, params)
c_header = headers.pop(hdrs.COOKIE, None)
cookie_header = _build_cookie_header(self, cookies, c_header, request_url)
if cookie_header:
headers[hdrs.COOKIE] = cookie_header
vcr_request = Request(method, str(request_url), data, _serialize_headers(headers))
if cassette.can_play_response_for(vcr_request):
log.info(f"Playing response for {vcr_request} from cassette")
response = play_responses(cassette, vcr_request, kwargs)
for redirect in response.history:
self._cookie_jar.update_cookies(redirect.cookies, redirect.url)
self._cookie_jar.update_cookies(response.cookies, response.url)
return response
if cassette.write_protected and cassette.filter_request(vcr_request):
raise CannotOverwriteExistingCassetteException(cassette=cassette, failed_request=vcr_request)
log.info("%s not in cassette, sending to real server", vcr_request)
response = await real_request(self, method, url, **kwargs)
await record_responses(cassette, vcr_request, response)
return response
return new_request

View File

@@ -0,0 +1,85 @@
'''Stubs for aiohttp HTTP clients'''
from __future__ import absolute_import
import asyncio
import functools
import json
from aiohttp import ClientResponse
from yarl import URL
from vcr.request import Request
class MockClientResponse(ClientResponse):
# TODO: get encoding from header
@asyncio.coroutine
def json(self, *, encoding='utf-8', loads=json.loads): # NOQA: E999
return loads(self.content.decode(encoding))
@asyncio.coroutine
def text(self, encoding='utf-8'):
return self.content.decode(encoding)
@asyncio.coroutine
def read(self):
return self.content
@asyncio.coroutine
def release(self):
pass
def vcr_request(cassette, real_request):
@functools.wraps(real_request)
@asyncio.coroutine
def new_request(self, method, url, **kwargs):
headers = kwargs.get('headers')
headers = self._prepare_headers(headers)
data = kwargs.get('data')
params = kwargs.get('params')
if params:
for k, v in params.items():
params[k] = str(v)
request_url = URL(url).with_query(params)
vcr_request = Request(method, str(request_url), data, headers)
if cassette.can_play_response_for(vcr_request):
vcr_response = cassette.play_response(vcr_request)
response = MockClientResponse(method, URL(vcr_response.get('url')))
response.status = vcr_response['status']['code']
response.content = vcr_response['body']['string']
response.reason = vcr_response['status']['message']
response.headers = vcr_response['headers']
response.close()
return response
if cassette.write_protected and cassette.filter_request(vcr_request):
response = MockClientResponse(method, URL(url))
response.status = 599
msg = ("No match for the request {!r} was found. Can't overwrite "
"existing cassette {!r} in your current record mode {!r}.")
msg = msg.format(vcr_request, cassette._path, cassette.record_mode)
response.content = msg.encode()
response.close()
return response
response = yield from real_request(self, method, url, **kwargs) # NOQA: E999
vcr_response = {
'status': {
'code': response.status,
'message': response.reason,
},
'headers': dict(response.headers),
'body': {'string': (yield from response.read())}, # NOQA: E999
'url': response.url,
}
cassette.append(vcr_request, vcr_response)
return response
return new_request

View File

@@ -1,10 +1,11 @@
"""Stubs for boto3"""
from botocore.awsrequest import AWSHTTPConnection as HTTPConnection
from botocore.awsrequest import AWSHTTPSConnection as VerifiedHTTPSConnection
'''Stubs for boto3'''
from botocore.vendored.requests.packages.urllib3.connectionpool import HTTPConnection, VerifiedHTTPSConnection
from ..stubs import VCRHTTPConnection, VCRHTTPSConnection
# urllib3 defines its own HTTPConnection classes, which boto3 goes ahead and assumes
# you're using. It includes some polyfills for newer features missing in older pythons.
class VCRRequestsHTTPConnection(VCRHTTPConnection, HTTPConnection):
_baseclass = HTTPConnection
@@ -12,20 +13,3 @@ class VCRRequestsHTTPConnection(VCRHTTPConnection, HTTPConnection):
class VCRRequestsHTTPSConnection(VCRHTTPSConnection, VerifiedHTTPSConnection):
_baseclass = VerifiedHTTPSConnection
def __init__(self, *args, **kwargs):
kwargs.pop("strict", None)
# need to temporarily reset here because the real connection
# inherits from the thing that we are mocking out. Take out
# the reset if you want to see what I mean :)
from vcr.patch import force_reset
with force_reset():
self.real_connection = self._baseclass(*args, **kwargs)
# Make sure to set those attributes as it seems `AWSHTTPConnection` does not
# set them, making the connection to fail !
self.real_connection.assert_hostname = kwargs.get("assert_hostname", False)
self.real_connection.cert_reqs = kwargs.get("cert_reqs", "CERT_NONE")
self._sock = None

8
vcr/stubs/boto_stubs.py Normal file
View File

@@ -0,0 +1,8 @@
'''Stubs for boto'''
from boto.https_connection import CertValidatingHTTPSConnection
from ..stubs import VCRHTTPSConnection
class VCRCertValidatingHTTPSConnection(VCRHTTPSConnection):
_baseclass = CertValidatingHTTPSConnection

Some files were not shown because too many files have changed in this diff Show More