1
0
mirror of https://github.com/kevin1024/vcrpy.git synced 2025-12-10 09:35:34 +00:00

Compare commits

..

15 Commits

Author SHA1 Message Date
Sebastian Pipping
a72d81cc9d [Revert me] Experiment with Python-Ubuntu combinations 2023-05-21 23:20:21 +02:00
Sebastian Pipping
470dd96c68 [Revert me] Debug Python SSL version 2023-05-21 23:20:21 +02:00
Sebastian Pipping
612f597aa9 [Revert me] Trigger on push to any branch 2023-05-21 23:12:23 +02:00
Sebastian Pipping
47be90add8 Stop installing libgnutls28-dev 2023-05-21 23:12:23 +02:00
Sebastian Pipping
ab3d8bf7c9 test_vcr.py: Clarify that test_vcr_before_record_request_params is an offline test 2023-05-20 16:21:01 +02:00
Sebastian Pipping
ec4fb9b0b3 tox.ini: Cover both urllib3 v1 and v2 2023-05-20 16:21:01 +02:00
Sebastian Pipping
f83f83a0c4 tox.ini: Drop needless "boto3: urllib3"
boto3 depends on botocore which in turn depends on urllib3.
2023-05-20 16:21:01 +02:00
Sebastian Pipping
ef2e1d895a Fix VCRHTTPResponse for requests.cookies.extract_cookies_to_jar 2023-05-20 16:21:01 +02:00
Sebastian Pipping
384d47714e Make test "test_cookies" more mean and helpful 2023-05-20 16:21:01 +02:00
Sebastian Pipping
3547ed966f Make VCRHTTPResponse interface satisfy urllib3.response.HTTPResponse 2023-05-20 16:21:01 +02:00
Sebastian Pipping
f1b921c211 Respect urllib3.response.HTTPResponse.data 2023-05-20 16:21:01 +02:00
Sebastian Pipping
ea5e20edc7 Make test_headers robust with regard to order of headers 2023-05-20 16:21:01 +02:00
Sebastian Pipping
b09c271a76 Tolerate urllib3.response.HTTPResponse.msg being None 2023-05-20 16:21:01 +02:00
Sonny V
ef7cb8cf50 build(tox.ini): revert pinning urllib to <2
In #690 a quick fix was introduced to get a green ci, this change should no longer be required.
2023-05-20 16:21:01 +02:00
Sonny V
c78b0c81e9 fix: use urllib3.connection where needed.
Since urllib3 v2 the re-export of connection.HTTPConnection in
urllib3.connectionpool was removed.

In this commit we use urllib3.connection where needed. Some references
to connectionpool.HTTPConnection are still there for backward
compatibility.

Closes #688
2023-05-20 16:21:01 +02:00
88 changed files with 1089 additions and 2260 deletions

View File

@@ -1,14 +0,0 @@
root = true
[*]
indent_style = space
indent_size = 4
charset = utf-8
trim_trailing_whitespace = true
insert_final_newline = true
[Makefile]
indent_style = tab
[*.{yml,yaml}]
indent_size = 2

View File

@@ -1,22 +0,0 @@
---
name: Codespell
on:
push:
branches: [master]
pull_request:
branches: [master]
permissions:
contents: read
jobs:
codespell:
name: Check for spelling errors
runs-on: ubuntu-24.04
steps:
- name: Checkout
uses: actions/checkout@v5
- name: Codespell
uses: codespell-project/actions-codespell@v2

View File

@@ -1,23 +0,0 @@
name: Validate docs
on:
push:
paths:
- 'docs/**'
jobs:
validate:
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@v5
- uses: actions/setup-python@v5
with:
python-version: "3.12"
- name: Install build dependencies
run: pip install -r docs/requirements.txt
- name: Rendering HTML documentation
run: sphinx-build -b html docs/ html
- name: Inspect html rendered
run: cat html/index.html

View File

@@ -2,74 +2,58 @@ name: Test
on: on:
push: push:
branches:
- master
pull_request: pull_request:
schedule:
- cron: '0 16 * * 5' # Every Friday 4pm
workflow_dispatch: workflow_dispatch:
jobs: jobs:
build: build:
runs-on: ubuntu-24.04 runs-on: ${{ matrix.runs-on }}
strategy: strategy:
fail-fast: false fail-fast: false
matrix: matrix:
python-version: include:
- "3.9" - python-version: "3.7"
- "3.10" runs-on: ubuntu-20.04
- "3.11" - python-version: "3.8"
- "3.12" runs-on: ubuntu-20.04
- "3.13"
- "pypy-3.9"
- "pypy-3.10"
urllib3-requirement:
- "urllib3>=2"
- "urllib3<2"
exclude:
- python-version: "3.9" - python-version: "3.9"
urllib3-requirement: "urllib3>=2" runs-on: ubuntu-20.04
- python-version: "3.10"
runs-on: ubuntu-22.04
- python-version: "3.11"
runs-on: ubuntu-22.04
- python-version: "pypy-3.7"
runs-on: ubuntu-20.04
- python-version: "pypy-3.8"
runs-on: ubuntu-20.04
- python-version: "pypy-3.9" - python-version: "pypy-3.9"
urllib3-requirement: "urllib3>=2" runs-on: ubuntu-20.04
- python-version: "pypy-3.10" - python-version: "pypy-3.10"
urllib3-requirement: "urllib3>=2" runs-on: ubuntu-22.04
- python-version: "pypy-3.11"
runs-on: ubuntu-22.04
steps: steps:
- uses: actions/checkout@v5 - uses: actions/checkout@v3.5.2
- name: Install uv
uses: astral-sh/setup-uv@v6
- name: Set up Python ${{ matrix.python-version }} - name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5 uses: actions/setup-python@v4
with: with:
python-version: ${{ matrix.python-version }} python-version: ${{ matrix.python-version }}
cache: pip
allow-prereleases: true
- name: Install project dependencies - name: Install project dependencies
run: | run: |
uv pip install --system --upgrade pip setuptools pip install --upgrade pip
uv pip install --system codecov '.[tests]' '${{ matrix.urllib3-requirement }}' pip install codecov tox tox-gh-actions
uv pip check
- name: Allow creation of user namespaces (e.g. to the unshare command) - name: "Debug OpenSSL version used with Python ${{ matrix.python-version }}"
run: | run: |
# .. so that we don't get error: which python
# unshare: write failed /proc/self/uid_map: Operation not permitted python --version
# Idea from https://github.com/YoYoGames/GameMaker-Bugs/issues/6015#issuecomment-2135552784 . python -c 'import ssl; print(ssl.OPENSSL_VERSION_INFO)'
sudo sysctl kernel.apparmor_restrict_unprivileged_userns=0
- name: Run online tests - name: Run tests with tox
run: ./runtests.sh --cov=./vcr --cov-branch --cov-report=xml --cov-append -m online run: tox
- name: Run offline tests with no access to the Internet
run: |
# We're using unshare to take Internet access
# away so that we'll notice whenever some new test
# is missing @pytest.mark.online decoration in the future
unshare --map-root-user --net -- \
sh -c 'ip link set lo up; ./runtests.sh --cov=./vcr --cov-branch --cov-report=xml --cov-append -m "not online"'
- name: Run coverage - name: Run coverage
run: codecov run: codecov

View File

@@ -1,62 +0,0 @@
# Copyright (c) 2023 Sebastian Pipping <sebastian@pipping.org>
# Licensed under the MIT license
name: Detect outdated pre-commit hooks
on:
schedule:
- cron: '0 16 * * 5' # Every Friday 4pm
# NOTE: This will drop all permissions from GITHUB_TOKEN except metadata read,
# and then (re)add the ones listed below:
permissions:
contents: write
pull-requests: write
jobs:
pre_commit_detect_outdated:
name: Detect outdated pre-commit hooks
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@v5
- name: Set up Python 3.12
uses: actions/setup-python@v5
with:
python-version: 3.12
- name: Install pre-commit
run: |-
pip install \
--disable-pip-version-check \
--no-warn-script-location \
--user \
pre-commit
echo "PATH=${HOME}/.local/bin:${PATH}" >> "${GITHUB_ENV}"
- name: Check for outdated hooks
run: |-
pre-commit autoupdate
git diff -- .pre-commit-config.yaml
- name: Create pull request from changes (if any)
id: create-pull-request
uses: peter-evans/create-pull-request@v7
with:
author: 'pre-commit <pre-commit@tools.invalid>'
base: master
body: |-
For your consideration.
:warning: Please **CLOSE AND RE-OPEN** this pull request so that [further workflow runs get triggered](https://github.com/peter-evans/create-pull-request/blob/main/docs/concepts-guidelines.md#triggering-further-workflow-runs) for this pull request.
branch: precommit-autoupdate
commit-message: "pre-commit: Autoupdate"
delete-branch: true
draft: true
labels: enhancement
title: "pre-commit: Autoupdate"
- name: Log pull request URL
if: "${{ steps.create-pull-request.outputs.pull-request-url }}"
run: |
echo "Pull request URL is: ${{ steps.create-pull-request.outputs.pull-request-url }}"

View File

@@ -1,20 +0,0 @@
# Copyright (c) 2023 Sebastian Pipping <sebastian@pipping.org>
# Licensed under the MIT license
name: Run pre-commit
on:
- pull_request
- push
- workflow_dispatch
jobs:
pre-commit:
name: Run pre-commit
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@v5
- uses: actions/setup-python@v5
with:
python-version: 3.12
- uses: pre-commit/action@v3.0.1

View File

@@ -1,17 +0,0 @@
# Copyright (c) 2023 Sebastian Pipping <sebastian@pipping.org>
# Licensed under the MIT license
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.12.7
hooks:
- id: ruff
args: ["--output-format=full"]
- id: ruff-format
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v5.0.0
hooks:
- id: check-merge-conflict
- id: end-of-file-fixer
- id: trailing-whitespace

View File

@@ -1,24 +0,0 @@
# .readthedocs.yaml
# Read the Docs configuration file
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
# Required
version: 2
# Set the version of Python and other tools you might need
build:
os: ubuntu-24.04
tools:
python: "3.12"
# Build documentation in the docs/ directory with Sphinx
sphinx:
configuration: docs/conf.py
# We recommend specifying your dependencies to enable reproducible builds:
# https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html
python:
install:
- requirements: docs/requirements.txt
- method: pip
path: .

View File

@@ -1,5 +1,6 @@
include README.rst include README.rst
include LICENSE.txt include LICENSE.txt
include tox.ini
recursive-include tests * recursive-include tests *
recursive-exclude * __pycache__ recursive-exclude * __pycache__
recursive-exclude * *.py[co] recursive-exclude * *.py[co]

View File

@@ -4,7 +4,7 @@ VCR.py 📼
########### ###########
|PyPI| |Python versions| |Build Status| |CodeCov| |Gitter| |PyPI| |Python versions| |Build Status| |CodeCov| |Gitter| |CodeStyleBlack|
---- ----
@@ -70,3 +70,6 @@ more details
.. |CodeCov| image:: https://codecov.io/gh/kevin1024/vcrpy/branch/master/graph/badge.svg .. |CodeCov| image:: https://codecov.io/gh/kevin1024/vcrpy/branch/master/graph/badge.svg
:target: https://codecov.io/gh/kevin1024/vcrpy :target: https://codecov.io/gh/kevin1024/vcrpy
:alt: Code Coverage Status :alt: Code Coverage Status
.. |CodeStyleBlack| image:: https://img.shields.io/badge/code%20style-black-000000.svg
:target: https://github.com/psf/black
:alt: Code Style: black

View File

@@ -71,7 +71,7 @@ Finally, register your class with VCR to use your new serializer.
import vcr import vcr
class BogoSerializer: class BogoSerializer(object):
""" """
Must implement serialize() and deserialize() methods Must implement serialize() and deserialize() methods
""" """
@@ -136,8 +136,7 @@ Create your own persistence class, see the example below:
Your custom persister must implement both ``load_cassette`` and ``save_cassette`` Your custom persister must implement both ``load_cassette`` and ``save_cassette``
methods. The ``load_cassette`` method must return a deserialized cassette or raise methods. The ``load_cassette`` method must return a deserialized cassette or raise
either ``CassetteNotFoundError`` if no cassette is found, or ``CassetteDecodeError`` ``ValueError`` if no cassette is found.
if the cassette cannot be successfully deserialized.
Once the persister class is defined, register with VCR like so... Once the persister class is defined, register with VCR like so...
@@ -189,7 +188,7 @@ of post data parameters to filter.
.. code:: python .. code:: python
with my_vcr.use_cassette('test.yml', filter_post_data_parameters=['api_key']): with my_vcr.use_cassette('test.yml', filter_post_data_parameters=['client_secret']):
requests.post('http://api.com/postdata', data={'api_key': 'secretstring'}) requests.post('http://api.com/postdata', data={'api_key': 'secretstring'})
Advanced use of filter_headers, filter_query_parameters and filter_post_data_parameters Advanced use of filter_headers, filter_query_parameters and filter_post_data_parameters
@@ -427,16 +426,3 @@ If you want to save the cassette only when the test succeeds, set the Cassette
# Since there was an exception, the cassette file hasn't been created. # Since there was an exception, the cassette file hasn't been created.
assert not os.path.exists('fixtures/vcr_cassettes/synopsis.yaml') assert not os.path.exists('fixtures/vcr_cassettes/synopsis.yaml')
Drop unused requests
--------------------
Even if any HTTP request is changed or removed from tests, previously recorded
interactions remain in the cassette file. If set the ``drop_unused_requests``
option to ``True``, VCR will not save old HTTP interactions if they are not used.
.. code:: python
my_vcr = VCR(drop_unused_requests=True)
with my_vcr.use_cassette('fixtures/vcr_cassettes/synopsis.yaml'):
... # your HTTP interactions here

View File

@@ -7,66 +7,6 @@ For a full list of triaged issues, bugs and PRs and what release they are target
All help in providing PRs to close out bug issues is appreciated. Even if that is providing a repo that fully replicates issues. We have very generous contributors that have added these to bug issues which meant another contributor picked up the bug and closed it out. All help in providing PRs to close out bug issues is appreciated. Even if that is providing a repo that fully replicates issues. We have very generous contributors that have added these to bug issues which meant another contributor picked up the bug and closed it out.
- 7.0.0
- Drop support for python 3.8 (major version bump) - thanks @jairhenrique
- Various linting and test fixes - thanks @jairhenrique
- Bugfix for urllib2>=2.3.0 - missing version_string (#888)
- Bugfix for asyncio.run - thanks @alekeik1
- 6.0.2
- Ensure body is consumed only once (#846) - thanks @sathieu
- Permit urllib3 2.x for non-PyPy Python >=3.10
- Fix typos in test commands - thanks @chuckwondo
- Several test and workflow improvements - thanks @hartwork and @graingert
- 6.0.1
- Bugfix with to Tornado cassette generator (thanks @graingert)
- 6.0.0
- BREAKING: Fix issue with httpx support (thanks @parkerhancock) in #784. NOTE: You may have to recreate some of your cassettes produced in previous releases due to the binary format being saved incorrectly in previous releases
- BREAKING: Drop support for `boto` (vcrpy still supports boto3, but is dropping the deprecated `boto` support in this release. (thanks @jairhenrique)
- Fix compatibility issue with Python 3.12 (thanks @hartwork)
- Drop simplejson (fixes some compatibility issues) (thanks @jairhenrique)
- Run CI on Python 3.12 and PyPy 3.9-3.10 (thanks @mgorny)
- Various linting and docs improvements (thanks @jairhenrique)
- Tornado fixes (thanks @graingert)
- 5.1.0
- Use ruff for linting (instead of current flake8/isort/pyflakes) - thanks @jairhenrique
- Enable rule B (flake8-bugbear) on ruff - thanks @jairhenrique
- Configure read the docs V2 - thanks @jairhenrique
- Fix typo in docs - thanks @quasimik
- Make json.loads of Python >=3.6 decode bytes by itself - thanks @hartwork
- Fix body matcher for chunked requests (fixes #734) - thanks @hartwork
- Fix query param filter for aiohttp (fixes #517) - thanks @hartwork and @salomvary
- Remove unnecessary dependency on six. - thanks @charettes
- build(deps): update sphinx requirement from <7 to <8 - thanks @jairhenrique
- Add action to validate docs - thanks @jairhenrique
- Add editorconfig file - thanks @jairhenrique
- Drop iscoroutinefunction fallback function for unsupported python thanks @jairhenrique
- 5.0.0
- BREAKING CHANGE: Drop support for Python 3.7. 3.7 is EOL as of 6/27/23 Thanks @jairhenrique
- BREAKING CHANGE: Custom Cassette persisters no longer catch ValueError. If you have implemented a custom persister (has anyone implemented a custom persister? Let us know!) then you will need to throw a CassetteNotFoundError when unable to find a cassette. See #681 for discussion and reason for this change. Thanks @amosjyng for the PR and the review from @hartwork
- 4.4.0
- HUGE thanks to @hartwork for all the work done on this release!
- Bring vcr/unittest in to vcrpy as a full feature of vcr instead of a separate library. Big thanks to @hartwork for doing this and to @agriffis for originally creating the library
- Make decompression robust towards already decompressed input (thanks @hartwork)
- Bugfix: Add read1 method (fixes compatibility with biopython), thanks @mghantous
- Bugfix: Prevent filters from corrupting request (thanks @abramclark)
- Bugfix: Add support for `response.raw.stream()` to fix urllib v2 compat
- Bugfix: Replace `assert` with `raise AssertionError`: fixes support for `PYTHONOPTIMIZE=1`
- Add pytest.mark.online to run test suite offline, thanks @jspricke
- use python3 and pip3 binaries to ease debian packaging (thanks @hartwork)
- Add codespell (thanks @mghantous)
- 4.3.1
- Support urllib3 v1 and v2. NOTE: there is an issue running urllib3 v2 on
Python older than 3.10, so this is currently blocked in the requirements.
Hopefully we can resolve this situation in the future. Thanks to @shifqu,
hartwork, jairhenrique, pquentin, and vEpiphyte for your work on this.
- 4.3.0
- Add support for Python 3.11 (Thanks @evgeni)
- Drop support for botocore <1.11.0 and requests <2.16.2 (thanks @hartwork)
- Bugfix: decode_compressed_response raises exception on empty responses. Thanks @CharString
- Don't save requests from decorated tests if decorated test fails (thanks @dan-passaro)
- Fix not calling all the exit stack when record_on_exception is False (thanks @Terseus)
- Various CI, documentation, testing, and formatting improvements (Thanks @jairhenrique, @dan-passaro, @hartwork, and Terseus)
- 4.2.1 - 4.2.1
- Fix a bug where the first request in a redirect chain was not being recorded with aiohttp - Fix a bug where the first request in a redirect chain was not being recorded with aiohttp
- Various typos and small fixes, thanks @jairhenrique, @timgates42 - Various typos and small fixes, thanks @jairhenrique, @timgates42
@@ -307,3 +247,4 @@ All help in providing PRs to close out bug issues is appreciated. Even if that i
- Add support for requests / urllib3 - Add support for requests / urllib3
- 0.0.1 - 0.0.1
- Initial Release - Initial Release

View File

@@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
# #
# vcrpy documentation build configuration file, created by # vcrpy documentation build configuration file, created by
# sphinx-quickstart on Sun Sep 13 11:18:00 2015. # sphinx-quickstart on Sun Sep 13 11:18:00 2015.
@@ -316,5 +317,5 @@ texinfo_documents = [
# Example configuration for intersphinx: refer to the Python standard library. # Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {"python": ("https://docs.python.org/3", None)} intersphinx_mapping = {"https://docs.python.org/": None}
html_theme = "alabaster" html_theme = "alabaster"

View File

@@ -74,7 +74,7 @@ The PR reviewer is a second set of eyes to see if:
**Release Manager:** **Release Manager:**
- Ensure CI is passing. - Ensure CI is passing.
- Create a release on github and tag it with the changelog release notes. - Create a release on github and tag it with the changelog release notes.
- ``python3 setup.py build sdist bdist_wheel`` - ``python setup.py build sdist bdist_wheel``
- ``twine upload dist/*`` - ``twine upload dist/*``
- Go to ReadTheDocs build page and trigger a build https://readthedocs.org/projects/vcrpy/builds/ - Go to ReadTheDocs build page and trigger a build https://readthedocs.org/projects/vcrpy/builds/
@@ -83,21 +83,39 @@ The PR reviewer is a second set of eyes to see if:
Running VCR's test suite Running VCR's test suite
------------------------ ------------------------
The tests are all run automatically on `Github Actions CI <https://github.com/kevin1024/vcrpy/actions>`__, The tests are all run automatically on `Travis
but you can also run them yourself using `pytest <http://pytest.org/>`__. CI <https://travis-ci.org/kevin1024/vcrpy>`__, but you can also run them
yourself using `pytest <http://pytest.org/>`__ and
`Tox <http://tox.testrun.org/>`__.
In order for the boto3 tests to run, you will need an AWS key. Tox will automatically run them in all environments VCR.py supports if they are available on your `PATH`. Alternatively you can use `tox-pyenv <https://pypi.org/project/tox-pyenv/>`_ with
Refer to the `boto3 `pyenv <https://github.com/pyenv/pyenv>`_.
documentation <https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/index.html>`__ We recommend you read the documentation for each and see the section further below.
for how to set this up. I have marked the boto3 tests as optional in
The test suite is pretty big and slow, but you can tell tox to only run specific tests like this::
tox -e {pyNN}-{HTTP_LIBRARY} -- <pytest flags passed through>
tox -e py37-requests -- -v -k "'test_status_code or test_gzip'"
tox -e py37-requests -- -v --last-failed
This will run only tests that look like ``test_status_code`` or
``test_gzip`` in the test suite, and only in the python 3.7 environment
that has ``requests`` installed.
Also, in order for the boto tests to run, you will need an AWS key.
Refer to the `boto
documentation <https://boto.readthedocs.io/en/latest/getting_started.html>`__
for how to set this up. I have marked the boto tests as optional in
Travis so you don't have to worry about them failing if you submit a Travis so you don't have to worry about them failing if you submit a
pull request. pull request.
Using Pyenv with VCR's test suite Using PyEnv with VCR's test suite
--------------------------------- ---------------------------------
Pyenv is a tool for managing multiple installation of python on your system. PyEnv is a tool for managing multiple installation of python on your system.
See the full documentation at their `github <https://github.com/pyenv/pyenv>`_ See the full documentation at their `github <https://github.com/pyenv/pyenv>`_
but we are also going to use `tox-pyenv <https://pypi.org/project/tox-pyenv/>`_
in this example:: in this example::
git clone https://github.com/pyenv/pyenv ~/.pyenv git clone https://github.com/pyenv/pyenv ~/.pyenv
@@ -108,21 +126,27 @@ in this example::
# Setup shim paths # Setup shim paths
eval "$(pyenv init -)" eval "$(pyenv init -)"
# Setup your local system tox tooling
pip install tox tox-pyenv
# Install supported versions (at time of writing), this does not activate them # Install supported versions (at time of writing), this does not activate them
pyenv install 3.12.0 pypy3.10 pyenv install 3.7.5 3.8.0 pypy3.8
# This activates them # This activates them
pyenv local 3.12.0 pypy3.10 pyenv local 3.7.5 3.8.0 pypy3.8
# Run the whole test suite # Run the whole test suite
pip install .[tests] tox
./runtests.sh
# Run the whole test suite or just part of it
tox -e lint
tox -e py37-requests
Troubleshooting on MacOSX Troubleshooting on MacOSX
------------------------- -------------------------
If you have this kind of error when running tests : If you have this kind of error when running tox :
.. code:: python .. code:: python

View File

@@ -4,20 +4,21 @@ Installation
VCR.py is a package on `PyPI <https://pypi.python.org>`__, so you can install VCR.py is a package on `PyPI <https://pypi.python.org>`__, so you can install
with pip:: with pip::
pip3 install vcrpy pip install vcrpy
Compatibility Compatibility
------------- -------------
VCR.py supports Python 3.9+, and `pypy <http://pypy.org>`__. VCR.py supports Python 3.7+, and `pypy <http://pypy.org>`__.
The following HTTP libraries are supported: The following HTTP libraries are supported:
- ``aiohttp`` - ``aiohttp``
- ``boto``
- ``boto3`` - ``boto3``
- ``http.client`` - ``http.client``
- ``httplib2`` - ``httplib2``
- ``requests`` (>=2.16.2 versions) - ``requests`` (both 1.x and 2.x versions)
- ``tornado.httpclient`` - ``tornado.httpclient``
- ``urllib2`` - ``urllib2``
- ``urllib3`` - ``urllib3``
@@ -34,7 +35,7 @@ rebuilding pyyaml.
1. Test if pyyaml is built with libyaml. This should work:: 1. Test if pyyaml is built with libyaml. This should work::
python3 -c 'from yaml import CLoader' python -c 'from yaml import CLoader'
2. Install libyaml according to your Linux distribution, or using `Homebrew 2. Install libyaml according to your Linux distribution, or using `Homebrew
<http://mxcl.github.com/homebrew/>`__ on Mac:: <http://mxcl.github.com/homebrew/>`__ on Mac::
@@ -45,8 +46,8 @@ rebuilding pyyaml.
3. Rebuild pyyaml with libyaml:: 3. Rebuild pyyaml with libyaml::
pip3 uninstall pyyaml pip uninstall pyyaml
pip3 --no-cache-dir install pyyaml pip --no-cache-dir install pyyaml
Upgrade Upgrade
------- -------
@@ -60,7 +61,7 @@ is to simply delete your cassettes and re-record all of them. VCR.py
also provides a migration script that attempts to upgrade your 0.x also provides a migration script that attempts to upgrade your 0.x
cassettes to the new 1.x format. To use it, run the following command:: cassettes to the new 1.x format. To use it, run the following command::
python3 -m vcr.migration PATH python -m vcr.migration PATH
The PATH can be either a path to the directory with cassettes or the The PATH can be either a path to the directory with cassettes or the
path to a single cassette. path to a single cassette.

View File

@@ -1,2 +0,0 @@
sphinx<9
sphinx_rtd_theme==3.0.2

View File

@@ -4,11 +4,11 @@ Usage
.. code:: python .. code:: python
import vcr import vcr
import urllib.request import urllib
with vcr.use_cassette('fixtures/vcr_cassettes/synopsis.yaml'): with vcr.use_cassette('fixtures/vcr_cassettes/synopsis.yaml'):
response = urllib.request.urlopen('http://www.iana.org/domains/reserved').read() response = urllib.request.urlopen('http://www.iana.org/domains/reserved').read()
assert b'Example domains' in response assert 'Example domains' in response
Run this test once, and VCR.py will record the HTTP request to Run this test once, and VCR.py will record the HTTP request to
``fixtures/vcr_cassettes/synopsis.yaml``. Run it again, and VCR.py will ``fixtures/vcr_cassettes/synopsis.yaml``. Run it again, and VCR.py will
@@ -26,7 +26,7 @@ look like this:
@vcr.use_cassette('fixtures/vcr_cassettes/synopsis.yaml') @vcr.use_cassette('fixtures/vcr_cassettes/synopsis.yaml')
def test_iana(): def test_iana():
response = urllib.request.urlopen('http://www.iana.org/domains/reserved').read() response = urllib.request.urlopen('http://www.iana.org/domains/reserved').read()
assert b'Example domains' in response assert 'Example domains' in response
When using the decorator version of ``use_cassette``, it is possible to When using the decorator version of ``use_cassette``, it is possible to
omit the path to the cassette file. omit the path to the cassette file.
@@ -36,7 +36,7 @@ omit the path to the cassette file.
@vcr.use_cassette() @vcr.use_cassette()
def test_iana(): def test_iana():
response = urllib.request.urlopen('http://www.iana.org/domains/reserved').read() response = urllib.request.urlopen('http://www.iana.org/domains/reserved').read()
assert b'Example domains' in response assert 'Example domains' in response
In this case, the cassette file will be given the same name as the test In this case, the cassette file will be given the same name as the test
function, and it will be placed in the same directory as the file in function, and it will be placed in the same directory as the file in
@@ -92,73 +92,9 @@ all
Unittest Integration Unittest Integration
-------------------- --------------------
Inherit from ``VCRTestCase`` for automatic recording and playback of HTTP While it's possible to use the context manager or decorator forms with unittest,
interactions. there's also a ``VCRTestCase`` provided separately by `vcrpy-unittest
<https://github.com/agriffis/vcrpy-unittest>`__.
.. code:: python
from vcr.unittest import VCRTestCase
import requests
class MyTestCase(VCRTestCase):
def test_something(self):
response = requests.get('http://example.com')
Similar to how VCR.py returns the cassette from the context manager,
``VCRTestCase`` makes the cassette available as ``self.cassette``:
.. code:: python
self.assertEqual(len(self.cassette), 1)
self.assertEqual(self.cassette.requests[0].uri, 'http://example.com')
By default cassettes will be placed in the ``cassettes`` subdirectory next to the
test, named according to the test class and method. For example, the above test
would read from and write to ``cassettes/MyTestCase.test_something.yaml``
The configuration can be modified by overriding methods on your subclass:
``_get_vcr_kwargs``, ``_get_cassette_library_dir`` and ``_get_cassette_name``.
To modify the ``VCR`` object after instantiation, for example to add a matcher,
you can hook on ``_get_vcr``, for example:
.. code:: python
class MyTestCase(VCRTestCase):
def _get_vcr(self, **kwargs):
myvcr = super(MyTestCase, self)._get_vcr(**kwargs)
myvcr.register_matcher('mymatcher', mymatcher)
myvcr.match_on = ['mymatcher']
return myvcr
See
`the source
<https://github.com/kevin1024/vcrpy/blob/master/vcr/unittest.py>`__
for the default implementations of these methods.
If you implement a ``setUp`` method on your test class then make sure to call
the parent version ``super().setUp()`` in your own in order to continue getting
the cassettes produced.
VCRMixin
~~~~~~~~
In case inheriting from ``VCRTestCase`` is difficult because of an existing
class hierarchy containing tests in the base classes, inherit from ``VCRMixin``
instead.
.. code:: python
from vcr.unittest import VCRMixin
import requests
import unittest
class MyTestMixin(VCRMixin):
def test_something(self):
response = requests.get(self.url)
class MyTestCase(MyTestMixin, unittest.TestCase):
url = 'http://example.com'
Pytest Integration Pytest Integration
------------------ ------------------

View File

@@ -1,30 +1,9 @@
[tool.codespell] [tool.black]
skip = '.git,*.pdf,*.svg,.tox' line-length=110
ignore-regex = "\\\\[fnrstv]"
[tool.pytest.ini_options] [tool.isort]
addopts = ["--strict-config", "--strict-markers"] line_length = 110
asyncio_default_fixture_loop_scope = "function" known_first_party = "vcrpy"
markers = ["online"] multi_line_output = 3
use_parentheses = true
[tool.ruff] include_trailing_comma = true
line-length = 110
target-version = "py39"
[tool.ruff.lint]
select = [
"B", # flake8-bugbear
"C4", # flake8-comprehensions
"COM", # flake8-commas
"E", # pycodestyle error
"F", # pyflakes
"I", # isort
"ISC", # flake8-implicit-str-concat
"PIE", # flake8-pie
"RUF", # Ruff-specific rules
"UP", # pyupgrade
"W", # pycodestyle warning
]
[tool.ruff.lint.isort]
known-first-party = ["vcr"]

View File

@@ -1,5 +1,7 @@
#!/bin/bash #!/bin/bash
# If you are getting an INVOCATION ERROR for this script then there is a good chance you are running on Windows. # https://blog.ionelmc.ro/2015/04/14/tox-tricks-and-patterns/#when-it-inevitably-leads-to-shell-scripts
# You can and should use WSL for running tests on Windows when it calls bash scripts. # If you are getting an INVOCATION ERROR for this script then there is
REQUESTS_CA_BUNDLE=`python3 -m pytest_httpbin.certs` exec pytest "$@" # a good chance you are running on Windows.
# You can and should use WSL for running tox on Windows when it calls bash scripts.
REQUESTS_CA_BUNDLE=`python -m pytest_httpbin.certs` pytest $*

View File

@@ -3,10 +3,12 @@
import codecs import codecs
import os import os
import re import re
import sys
from setuptools import find_packages, setup from setuptools import find_packages, setup
from setuptools.command.test import test as TestCommand
long_description = open("README.rst").read() long_description = open("README.rst", "r").read()
here = os.path.abspath(os.path.dirname(__file__)) here = os.path.abspath(os.path.dirname(__file__))
@@ -26,48 +28,27 @@ def find_version(*file_paths):
raise RuntimeError("Unable to find version string.") raise RuntimeError("Unable to find version string.")
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
# import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(self.test_args)
sys.exit(errno)
install_requires = [ install_requires = [
"PyYAML", "PyYAML",
"wrapt", "wrapt",
"six>=1.5",
"yarl", "yarl",
# Support for urllib3 >=2 needs CPython >=3.10
# so we need to block urllib3 >=2 for Python <3.10 and PyPy for now.
# Note that vcrpy would work fine without any urllib3 around,
# so this block and the dependency can be dropped at some point
# in the future. For more Details:
# https://github.com/kevin1024/vcrpy/pull/699#issuecomment-1551439663
"urllib3 <2; python_version <'3.10'",
# https://github.com/kevin1024/vcrpy/pull/775#issuecomment-1847849962
"urllib3 <2; platform_python_implementation =='PyPy'",
# Workaround for Poetry with CPython >= 3.10, problem description at:
# https://github.com/kevin1024/vcrpy/pull/826
"urllib3; platform_python_implementation !='PyPy' and python_version >='3.10'",
] ]
extras_require = {
"tests": [
"aiohttp",
"boto3",
"httplib2",
"httpx",
"pytest-aiohttp",
"pytest-asyncio",
"pytest-cov",
"pytest-httpbin",
"pytest",
"requests>=2.22.0",
"tornado",
"urllib3",
# Needed to un-break httpbin 0.7.0. For httpbin >=0.7.1 and after,
# this pin and the dependency itself can be removed, provided
# that the related bug in httpbin has been fixed:
# https://github.com/kevin1024/vcrpy/issues/645#issuecomment-1562489489
# https://github.com/postmanlabs/httpbin/issues/673
# https://github.com/postmanlabs/httpbin/pull/674
"Werkzeug==2.0.3",
],
}
setup( setup(
name="vcrpy", name="vcrpy",
version=find_version("vcr", "__init__.py"), version=find_version("vcr", "__init__.py"),
@@ -78,22 +59,21 @@ setup(
author_email="me@kevinmccarthy.org", author_email="me@kevinmccarthy.org",
url="https://github.com/kevin1024/vcrpy", url="https://github.com/kevin1024/vcrpy",
packages=find_packages(exclude=["tests*"]), packages=find_packages(exclude=["tests*"]),
python_requires=">=3.9", python_requires=">=3.7",
install_requires=install_requires, install_requires=install_requires,
license="MIT", license="MIT",
extras_require=extras_require, tests_require=["pytest", "mock", "pytest-httpbin"],
tests_require=extras_require["tests"],
classifiers=[ classifiers=[
"Development Status :: 5 - Production/Stable", "Development Status :: 5 - Production/Stable",
"Environment :: Console", "Environment :: Console",
"Intended Audience :: Developers", "Intended Audience :: Developers",
"Programming Language :: Python", "Programming Language :: Python",
"Programming Language :: Python :: 3", "Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Programming Language :: Python :: 3.13",
"Programming Language :: Python :: 3 :: Only", "Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy", "Programming Language :: Python :: Implementation :: PyPy",

View File

View File

@@ -11,12 +11,9 @@ def assert_cassette_has_one_response(cass):
assert cass.play_count == 1 assert cass.play_count == 1
def assert_is_json_bytes(b: bytes): def assert_is_json(a_string):
assert isinstance(b, bytes)
try: try:
json.loads(b) json.loads(a_string.decode("utf-8"))
except Exception as error: except Exception:
raise AssertionError() from error assert False
assert True assert True

View File

@@ -5,24 +5,24 @@ import aiohttp
async def aiohttp_request(loop, method, url, output="text", encoding="utf-8", content_type=None, **kwargs): async def aiohttp_request(loop, method, url, output="text", encoding="utf-8", content_type=None, **kwargs):
async with aiohttp.ClientSession(loop=loop) as session: session = aiohttp.ClientSession(loop=loop)
response_ctx = session.request(method, url, **kwargs) response_ctx = session.request(method, url, **kwargs)
response = await response_ctx.__aenter__() response = await response_ctx.__aenter__()
if output == "text": if output == "text":
content = await response.text() content = await response.text()
elif output == "json": elif output == "json":
content_type = content_type or "application/json" content_type = content_type or "application/json"
content = await response.json(encoding=encoding, content_type=content_type) content = await response.json(encoding=encoding, content_type=content_type)
elif output == "raw": elif output == "raw":
content = await response.read() content = await response.read()
elif output == "stream": elif output == "stream":
content = await response.content.read() content = await response.content.read()
response_ctx._resp.close() response_ctx._resp.close()
await session.close() await session.close()
return response, content return response, content
def aiohttp_app(): def aiohttp_app():

View File

@@ -1,41 +0,0 @@
interactions:
- request:
body: ''
headers:
accept:
- '*/*'
accept-encoding:
- gzip, deflate, br
connection:
- keep-alive
host:
- httpbin.org
user-agent:
- python-httpx/0.23.0
method: GET
uri: https://httpbin.org/gzip
response:
content: "{\n \"gzipped\": true, \n \"headers\": {\n \"Accept\": \"*/*\",
\n \"Accept-Encoding\": \"gzip, deflate, br\", \n \"Host\": \"httpbin.org\",
\n \"User-Agent\": \"python-httpx/0.23.0\", \n \"X-Amzn-Trace-Id\": \"Root=1-62a62a8d-5f39b5c50c744da821d6ea99\"\n
\ }, \n \"method\": \"GET\", \n \"origin\": \"146.200.25.115\"\n}\n"
headers:
Access-Control-Allow-Credentials:
- 'true'
Access-Control-Allow-Origin:
- '*'
Connection:
- keep-alive
Content-Encoding:
- gzip
Content-Length:
- '230'
Content-Type:
- application/json
Date:
- Sun, 12 Jun 2022 18:03:57 GMT
Server:
- gunicorn/19.9.0
http_version: HTTP/1.1
status_code: 200
version: 1

View File

@@ -1,42 +0,0 @@
interactions:
- request:
body: null
headers:
Accept:
- '*/*'
Accept-Encoding:
- gzip, deflate, br
Connection:
- keep-alive
User-Agent:
- python-requests/2.28.0
method: GET
uri: https://httpbin.org/gzip
response:
body:
string: !!binary |
H4sIAKwrpmIA/z2OSwrCMBCG956izLIkfQSxkl2RogfQA9R2bIM1iUkqaOndnYDIrGa+/zELDB9l
LfYgg5uRwYhtj86DXKDuOrQBJKR5Cuy38kZ3pld6oHu0sqTH29QGZMnVkepgtMYuKKNJcEe0vJ3U
C4mcjI9hpaiygqaUW7ETFYGLR8frAXXE9h1Go7nD54w++FxkYp8VsDJ4IBH6E47NmVzGqUHFkn8g
rJsvp2omYs8AAAA=
headers:
Access-Control-Allow-Credentials:
- 'true'
Access-Control-Allow-Origin:
- '*'
Connection:
- Close
Content-Encoding:
- gzip
Content-Length:
- '182'
Content-Type:
- application/json
Date:
- Sun, 12 Jun 2022 18:08:44 GMT
Server:
- Pytest-HTTPBIN/0.1.0
status:
code: 200
message: great
version: 1

View File

@@ -0,0 +1,17 @@
import pytest
@pytest.fixture(params=["https", "http"])
def scheme(request):
"""Fixture that returns both http and https."""
return request.param
@pytest.fixture
def mockbin(scheme):
return scheme + "://mockbin.org"
@pytest.fixture
def mockbin_request_url(mockbin):
return mockbin + "/request"

View File

@@ -1,27 +1,22 @@
import contextlib
import logging import logging
import ssl
import urllib.parse import urllib.parse
import pytest import pytest
import pytest_httpbin.certs
import yarl
import vcr
asyncio = pytest.importorskip("asyncio") asyncio = pytest.importorskip("asyncio")
aiohttp = pytest.importorskip("aiohttp") aiohttp = pytest.importorskip("aiohttp")
import vcr # noqa: E402
from .aiohttp_utils import aiohttp_app, aiohttp_request # noqa: E402 from .aiohttp_utils import aiohttp_app, aiohttp_request # noqa: E402
HTTPBIN_SSL_CONTEXT = ssl.create_default_context(cafile=pytest_httpbin.certs.where())
def run_in_loop(fn): def run_in_loop(fn):
async def wrapper(): with contextlib.closing(asyncio.new_event_loop()) as loop:
return await fn(asyncio.get_running_loop()) asyncio.set_event_loop(loop)
task = loop.create_task(fn(loop))
return asyncio.run(wrapper()) return loop.run_until_complete(task)
def request(method, url, output="text", **kwargs): def request(method, url, output="text", **kwargs):
@@ -39,9 +34,8 @@ def post(url, output="text", **kwargs):
return request("POST", url, output="text", **kwargs) return request("POST", url, output="text", **kwargs)
@pytest.mark.online def test_status(tmpdir, mockbin_request_url):
def test_status(tmpdir, httpbin): url = mockbin_request_url
url = httpbin.url
with vcr.use_cassette(str(tmpdir.join("status.yaml"))): with vcr.use_cassette(str(tmpdir.join("status.yaml"))):
response, _ = get(url) response, _ = get(url)
@@ -52,10 +46,9 @@ def test_status(tmpdir, httpbin):
assert cassette.play_count == 1 assert cassette.play_count == 1
@pytest.mark.online
@pytest.mark.parametrize("auth", [None, aiohttp.BasicAuth("vcrpy", "test")]) @pytest.mark.parametrize("auth", [None, aiohttp.BasicAuth("vcrpy", "test")])
def test_headers(tmpdir, auth, httpbin): def test_headers(tmpdir, auth, mockbin_request_url):
url = httpbin.url url = mockbin_request_url
with vcr.use_cassette(str(tmpdir.join("headers.yaml"))): with vcr.use_cassette(str(tmpdir.join("headers.yaml"))):
response, _ = get(url, auth=auth) response, _ = get(url, auth=auth)
@@ -64,15 +57,14 @@ def test_headers(tmpdir, auth, httpbin):
request = cassette.requests[0] request = cassette.requests[0]
assert "AUTHORIZATION" in request.headers assert "AUTHORIZATION" in request.headers
cassette_response, _ = get(url, auth=auth) cassette_response, _ = get(url, auth=auth)
assert cassette_response.headers.items() == response.headers.items() assert dict(cassette_response.headers) == dict(response.headers)
assert cassette.play_count == 1 assert cassette.play_count == 1
assert "istr" not in cassette.data[0] assert "istr" not in cassette.data[0]
assert "yarl.URL" not in cassette.data[0] assert "yarl.URL" not in cassette.data[0]
@pytest.mark.online def test_case_insensitive_headers(tmpdir, mockbin_request_url):
def test_case_insensitive_headers(tmpdir, httpbin): url = mockbin_request_url
url = httpbin.url
with vcr.use_cassette(str(tmpdir.join("whatever.yaml"))): with vcr.use_cassette(str(tmpdir.join("whatever.yaml"))):
_, _ = get(url) _, _ = get(url)
@@ -84,9 +76,8 @@ def test_case_insensitive_headers(tmpdir, httpbin):
assert cassette.play_count == 1 assert cassette.play_count == 1
@pytest.mark.online def test_text(tmpdir, mockbin_request_url):
def test_text(tmpdir, httpbin): url = mockbin_request_url
url = httpbin.url
with vcr.use_cassette(str(tmpdir.join("text.yaml"))): with vcr.use_cassette(str(tmpdir.join("text.yaml"))):
_, response_text = get(url) _, response_text = get(url)
@@ -97,9 +88,8 @@ def test_text(tmpdir, httpbin):
assert cassette.play_count == 1 assert cassette.play_count == 1
@pytest.mark.online def test_json(tmpdir, mockbin_request_url):
def test_json(tmpdir, httpbin): url = mockbin_request_url
url = httpbin.url + "/json"
headers = {"Content-Type": "application/json"} headers = {"Content-Type": "application/json"}
with vcr.use_cassette(str(tmpdir.join("json.yaml"))): with vcr.use_cassette(str(tmpdir.join("json.yaml"))):
@@ -111,9 +101,8 @@ def test_json(tmpdir, httpbin):
assert cassette.play_count == 1 assert cassette.play_count == 1
@pytest.mark.online def test_binary(tmpdir, mockbin_request_url):
def test_binary(tmpdir, httpbin): url = mockbin_request_url + "/image/png"
url = httpbin.url + "/image/png"
with vcr.use_cassette(str(tmpdir.join("binary.yaml"))): with vcr.use_cassette(str(tmpdir.join("binary.yaml"))):
_, response_binary = get(url, output="raw") _, response_binary = get(url, output="raw")
@@ -123,9 +112,8 @@ def test_binary(tmpdir, httpbin):
assert cassette.play_count == 1 assert cassette.play_count == 1
@pytest.mark.online def test_stream(tmpdir, mockbin_request_url):
def test_stream(tmpdir, httpbin): url = mockbin_request_url
url = httpbin.url
with vcr.use_cassette(str(tmpdir.join("stream.yaml"))): with vcr.use_cassette(str(tmpdir.join("stream.yaml"))):
_, body = get(url, output="raw") # Do not use stream here, as the stream is exhausted by vcr _, body = get(url, output="raw") # Do not use stream here, as the stream is exhausted by vcr
@@ -136,12 +124,11 @@ def test_stream(tmpdir, httpbin):
assert cassette.play_count == 1 assert cassette.play_count == 1
@pytest.mark.online
@pytest.mark.parametrize("body", ["data", "json"]) @pytest.mark.parametrize("body", ["data", "json"])
def test_post(tmpdir, body, caplog, httpbin): def test_post(tmpdir, body, caplog, mockbin_request_url):
caplog.set_level(logging.INFO) caplog.set_level(logging.INFO)
data = {"key1": "value1", "key2": "value2"} data = {"key1": "value1", "key2": "value2"}
url = httpbin.url url = mockbin_request_url
with vcr.use_cassette(str(tmpdir.join("post.yaml"))): with vcr.use_cassette(str(tmpdir.join("post.yaml"))):
_, response_json = post(url, **{body: data}) _, response_json = post(url, **{body: data})
@@ -156,21 +143,20 @@ def test_post(tmpdir, body, caplog, httpbin):
( (
log log
for log in caplog.records for log in caplog.records
if log.getMessage() == f"<Request (POST) {url}> not in cassette, sending to real server" if log.getMessage() == "<Request (POST) {}> not in cassette, sending to real server".format(url)
), ),
None, None,
), "Log message not found." ), "Log message not found."
@pytest.mark.online def test_params(tmpdir, mockbin_request_url):
def test_params(tmpdir, httpbin): url = mockbin_request_url + "?d=d"
url = httpbin.url + "/get?d=d"
headers = {"Content-Type": "application/json"} headers = {"Content-Type": "application/json"}
params = {"a": 1, "b": 2, "c": "c"} params = {"a": 1, "b": 2, "c": "c"}
with vcr.use_cassette(str(tmpdir.join("get.yaml"))) as cassette: with vcr.use_cassette(str(tmpdir.join("get.yaml"))) as cassette:
_, response_json = get(url, output="json", params=params, headers=headers) _, response_json = get(url, output="json", params=params, headers=headers)
assert response_json["args"] == {"a": "1", "b": "2", "c": "c", "d": "d"} assert response_json["queryString"] == {"a": "1", "b": "2", "c": "c", "d": "d"}
with vcr.use_cassette(str(tmpdir.join("get.yaml"))) as cassette: with vcr.use_cassette(str(tmpdir.join("get.yaml"))) as cassette:
_, cassette_response_json = get(url, output="json", params=params, headers=headers) _, cassette_response_json = get(url, output="json", params=params, headers=headers)
@@ -178,9 +164,8 @@ def test_params(tmpdir, httpbin):
assert cassette.play_count == 1 assert cassette.play_count == 1
@pytest.mark.online def test_params_same_url_distinct_params(tmpdir, mockbin_request_url):
def test_params_same_url_distinct_params(tmpdir, httpbin): url = mockbin_request_url
url = httpbin.url + "/json"
headers = {"Content-Type": "application/json"} headers = {"Content-Type": "application/json"}
params = {"a": 1, "b": 2, "c": "c"} params = {"a": 1, "b": 2, "c": "c"}
@@ -198,9 +183,8 @@ def test_params_same_url_distinct_params(tmpdir, httpbin):
get(url, output="text", params=other_params) get(url, output="text", params=other_params)
@pytest.mark.online def test_params_on_url(tmpdir, mockbin_request_url):
def test_params_on_url(tmpdir, httpbin): url = mockbin_request_url + "?a=1&b=foo"
url = httpbin.url + "/get?a=1&b=foo"
headers = {"Content-Type": "application/json"} headers = {"Content-Type": "application/json"}
with vcr.use_cassette(str(tmpdir.join("get.yaml"))) as cassette: with vcr.use_cassette(str(tmpdir.join("get.yaml"))) as cassette:
@@ -264,15 +248,8 @@ def test_aiohttp_test_client_json(aiohttp_client, tmpdir):
assert cassette.play_count == 1 assert cassette.play_count == 1
def test_cleanup_from_pytest_asyncio(): def test_redirect(tmpdir, mockbin):
# work around https://github.com/pytest-dev/pytest-asyncio/issues/724 url = mockbin + "/redirect/302/2"
asyncio.get_event_loop().close()
asyncio.set_event_loop(None)
@pytest.mark.online
def test_redirect(tmpdir, httpbin):
url = httpbin.url + "/redirect/2"
with vcr.use_cassette(str(tmpdir.join("redirect.yaml"))): with vcr.use_cassette(str(tmpdir.join("redirect.yaml"))):
response, _ = get(url) response, _ = get(url)
@@ -289,14 +266,15 @@ def test_redirect(tmpdir, httpbin):
# looking request_info. # looking request_info.
assert cassette_response.request_info.url == response.request_info.url assert cassette_response.request_info.url == response.request_info.url
assert cassette_response.request_info.method == response.request_info.method assert cassette_response.request_info.method == response.request_info.method
assert cassette_response.request_info.headers.items() == response.request_info.headers.items() assert {k: v for k, v in cassette_response.request_info.headers.items()} == {
k: v for k, v in response.request_info.headers.items()
}
assert cassette_response.request_info.real_url == response.request_info.real_url assert cassette_response.request_info.real_url == response.request_info.real_url
@pytest.mark.online def test_not_modified(tmpdir, mockbin):
def test_not_modified(tmpdir, httpbin):
"""It doesn't try to redirect on 304""" """It doesn't try to redirect on 304"""
url = httpbin.url + "/status/304" url = mockbin + "/status/304"
with vcr.use_cassette(str(tmpdir.join("not_modified.yaml"))): with vcr.use_cassette(str(tmpdir.join("not_modified.yaml"))):
response, _ = get(url) response, _ = get(url)
@@ -311,14 +289,13 @@ def test_not_modified(tmpdir, httpbin):
assert cassette.play_count == 1 assert cassette.play_count == 1
@pytest.mark.online def test_double_requests(tmpdir, mockbin_request_url):
def test_double_requests(tmpdir, httpbin):
"""We should capture, record, and replay all requests and response chains, """We should capture, record, and replay all requests and response chains,
even if there are duplicate ones. even if there are duplicate ones.
We should replay in the order we saw them. We should replay in the order we saw them.
""" """
url = httpbin.url url = mockbin_request_url
with vcr.use_cassette(str(tmpdir.join("text.yaml"))): with vcr.use_cassette(str(tmpdir.join("text.yaml"))):
_, response_text1 = get(url, output="text") _, response_text1 = get(url, output="text")
@@ -343,41 +320,31 @@ def test_double_requests(tmpdir, httpbin):
assert cassette.play_count == 2 assert cassette.play_count == 2
def test_cookies(httpbin_both, tmpdir): def test_cookies(scheme, tmpdir):
async def run(loop): async def run(loop):
cookies_url = httpbin_both.url + ( cookies_url = scheme + (
"/response-headers?" "://httpbin.org/response-headers?"
"set-cookie=" + urllib.parse.quote("cookie_1=val_1; Path=/") + "&" "set-cookie=" + urllib.parse.quote("cookie_1=val_1; Path=/") + "&"
"Set-Cookie=" + urllib.parse.quote("Cookie_2=Val_2; Path=/") "Set-Cookie=" + urllib.parse.quote("Cookie_2=Val_2; Path=/")
) )
home_url = httpbin_both.url + "/" home_url = scheme + "://httpbin.org/"
tmp = str(tmpdir.join("cookies.yaml")) tmp = str(tmpdir.join("cookies.yaml"))
req_cookies = {"Cookie_3": "Val_3"} req_cookies = {"Cookie_3": "Val_3"}
req_headers = {"Cookie": "Cookie_4=Val_4"} req_headers = {"Cookie": "Cookie_4=Val_4"}
# ------------------------- Record -------------------------- # # ------------------------- Record -------------------------- #
with vcr.use_cassette(tmp) as cassette: with vcr.use_cassette(tmp) as cassette:
async with aiohttp.ClientSession(loop=loop, cookie_jar=aiohttp.CookieJar(unsafe=True)) as session: async with aiohttp.ClientSession(loop=loop) as session:
cookies_resp = await session.get(cookies_url, ssl=HTTPBIN_SSL_CONTEXT) cookies_resp = await session.get(cookies_url)
home_resp = await session.get( home_resp = await session.get(home_url, cookies=req_cookies, headers=req_headers)
home_url,
cookies=req_cookies,
headers=req_headers,
ssl=HTTPBIN_SSL_CONTEXT,
)
assert cassette.play_count == 0 assert cassette.play_count == 0
assert_responses(cookies_resp, home_resp) assert_responses(cookies_resp, home_resp)
# -------------------------- Play --------------------------- # # -------------------------- Play --------------------------- #
with vcr.use_cassette(tmp, record_mode=vcr.mode.NONE) as cassette: with vcr.use_cassette(tmp, record_mode=vcr.mode.NONE) as cassette:
async with aiohttp.ClientSession(loop=loop, cookie_jar=aiohttp.CookieJar(unsafe=True)) as session: async with aiohttp.ClientSession(loop=loop) as session:
cookies_resp = await session.get(cookies_url, ssl=HTTPBIN_SSL_CONTEXT) cookies_resp = await session.get(cookies_url)
home_resp = await session.get( home_resp = await session.get(home_url, cookies=req_cookies, headers=req_headers)
home_url,
cookies=req_cookies,
headers=req_headers,
ssl=HTTPBIN_SSL_CONTEXT,
)
assert cassette.play_count == 2 assert cassette.play_count == 2
assert_responses(cookies_resp, home_resp) assert_responses(cookies_resp, home_resp)
@@ -393,76 +360,57 @@ def test_cookies(httpbin_both, tmpdir):
run_in_loop(run) run_in_loop(run)
def test_cookies_redirect(httpbin_both, tmpdir): def test_cookies_redirect(scheme, tmpdir):
async def run(loop): async def run(loop):
# Sets cookie as provided by the query string and redirects # Sets cookie as provided by the query string and redirects
cookies_url = httpbin_both.url + "/cookies/set?Cookie_1=Val_1" cookies_url = scheme + "://httpbin.org/cookies/set?Cookie_1=Val_1"
tmp = str(tmpdir.join("cookies.yaml")) tmp = str(tmpdir.join("cookies.yaml"))
# ------------------------- Record -------------------------- # # ------------------------- Record -------------------------- #
with vcr.use_cassette(tmp) as cassette: with vcr.use_cassette(tmp) as cassette:
async with aiohttp.ClientSession(loop=loop, cookie_jar=aiohttp.CookieJar(unsafe=True)) as session: async with aiohttp.ClientSession(loop=loop) as session:
cookies_resp = await session.get(cookies_url, ssl=HTTPBIN_SSL_CONTEXT) cookies_resp = await session.get(cookies_url)
assert not cookies_resp.cookies assert not cookies_resp.cookies
cookies = session.cookie_jar.filter_cookies(yarl.URL(cookies_url)) cookies = session.cookie_jar.filter_cookies(cookies_url)
assert cookies["Cookie_1"].value == "Val_1" assert cookies["Cookie_1"].value == "Val_1"
assert cassette.play_count == 0 assert cassette.play_count == 0
cassette.requests[1].headers["Cookie"] == "Cookie_1=Val_1"
assert cassette.requests[1].headers["Cookie"] == "Cookie_1=Val_1"
# -------------------------- Play --------------------------- # # -------------------------- Play --------------------------- #
with vcr.use_cassette(tmp, record_mode=vcr.mode.NONE) as cassette: with vcr.use_cassette(tmp, record_mode=vcr.mode.NONE) as cassette:
async with aiohttp.ClientSession(loop=loop, cookie_jar=aiohttp.CookieJar(unsafe=True)) as session: async with aiohttp.ClientSession(loop=loop) as session:
cookies_resp = await session.get(cookies_url, ssl=HTTPBIN_SSL_CONTEXT) cookies_resp = await session.get(cookies_url)
assert not cookies_resp.cookies assert not cookies_resp.cookies
cookies = session.cookie_jar.filter_cookies(yarl.URL(cookies_url)) cookies = session.cookie_jar.filter_cookies(cookies_url)
assert cookies["Cookie_1"].value == "Val_1" assert cookies["Cookie_1"].value == "Val_1"
assert cassette.play_count == 2 assert cassette.play_count == 2
cassette.requests[1].headers["Cookie"] == "Cookie_1=Val_1"
assert cassette.requests[1].headers["Cookie"] == "Cookie_1=Val_1"
# Assert that it's ignoring expiration date # Assert that it's ignoring expiration date
with vcr.use_cassette(tmp, record_mode=vcr.mode.NONE) as cassette: with vcr.use_cassette(tmp, record_mode=vcr.mode.NONE) as cassette:
cassette.responses[0]["headers"]["set-cookie"] = [ cassette.responses[0]["headers"]["set-cookie"] = [
"Cookie_1=Val_1; Expires=Wed, 21 Oct 2015 07:28:00 GMT", "Cookie_1=Val_1; Expires=Wed, 21 Oct 2015 07:28:00 GMT"
] ]
async with aiohttp.ClientSession(loop=loop, cookie_jar=aiohttp.CookieJar(unsafe=True)) as session: async with aiohttp.ClientSession(loop=loop) as session:
cookies_resp = await session.get(cookies_url, ssl=HTTPBIN_SSL_CONTEXT) cookies_resp = await session.get(cookies_url)
assert not cookies_resp.cookies assert not cookies_resp.cookies
cookies = session.cookie_jar.filter_cookies(yarl.URL(cookies_url)) cookies = session.cookie_jar.filter_cookies(cookies_url)
assert cookies["Cookie_1"].value == "Val_1" assert cookies["Cookie_1"].value == "Val_1"
run_in_loop(run) run_in_loop(run)
@pytest.mark.online def test_not_allow_redirects(tmpdir, mockbin):
def test_not_allow_redirects(tmpdir, httpbin): url = mockbin + "/redirect/308/5"
url = httpbin + "/redirect-to?url=.%2F&status_code=308"
path = str(tmpdir.join("redirects.yaml")) path = str(tmpdir.join("redirects.yaml"))
with vcr.use_cassette(path): with vcr.use_cassette(path):
response, _ = get(url, allow_redirects=False) response, _ = get(url, allow_redirects=False)
assert response.url.path == "/redirect-to" assert response.url.path == "/redirect/308/5"
assert response.status == 308 assert response.status == 308
with vcr.use_cassette(path) as cassette: with vcr.use_cassette(path) as cassette:
response, _ = get(url, allow_redirects=False) response, _ = get(url, allow_redirects=False)
assert response.url.path == "/redirect-to" assert response.url.path == "/redirect/308/5"
assert response.status == 308 assert response.status == 308
assert cassette.play_count == 1 assert cassette.play_count == 1
def test_filter_query_parameters(tmpdir, httpbin):
url = httpbin + "?password=secret"
path = str(tmpdir.join("query_param_filter.yaml"))
with vcr.use_cassette(path, filter_query_parameters=["password"]) as cassette:
get(url)
assert "password" not in cassette.requests[0].url
assert "secret" not in cassette.requests[0].url
with open(path) as f:
cassette_content = f.read()
assert "password" not in cassette_content
assert "secret" not in cassette_content

View File

@@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
"""Basic tests for cassettes""" """Basic tests for cassettes"""
# External imports # External imports
@@ -39,7 +40,7 @@ def test_basic_json_use(tmpdir, httpbin):
test_fixture = str(tmpdir.join("synopsis.json")) test_fixture = str(tmpdir.join("synopsis.json"))
with vcr.use_cassette(test_fixture, serializer="json"): with vcr.use_cassette(test_fixture, serializer="json"):
response = urlopen(httpbin.url).read() response = urlopen(httpbin.url).read()
assert b"HTTP Request &amp; Response Service" in response assert b"difficult sometimes" in response
def test_patched_content(tmpdir, httpbin): def test_patched_content(tmpdir, httpbin):

View File

@@ -0,0 +1,82 @@
import pytest
boto = pytest.importorskip("boto")
from configparser import DuplicateSectionError # NOQA
import boto # NOQA
import boto.iam # NOQA
from boto.s3.connection import S3Connection # NOQA
from boto.s3.key import Key # NOQA
import vcr # NOQA
def test_boto_stubs(tmpdir):
with vcr.use_cassette(str(tmpdir.join("boto-stubs.yml"))):
# Perform the imports within the patched context so that
# CertValidatingHTTPSConnection refers to the patched version.
from boto.https_connection import CertValidatingHTTPSConnection
from vcr.stubs.boto_stubs import VCRCertValidatingHTTPSConnection
# Prove that the class was patched by the stub and that we can instantiate it.
assert issubclass(CertValidatingHTTPSConnection, VCRCertValidatingHTTPSConnection)
CertValidatingHTTPSConnection("hostname.does.not.matter")
def test_boto_without_vcr():
s3_conn = S3Connection()
s3_bucket = s3_conn.get_bucket("boto-demo-1394171994") # a bucket you can access
k = Key(s3_bucket)
k.key = "test.txt"
k.set_contents_from_string("hello world i am a string")
def test_boto_medium_difficulty(tmpdir):
s3_conn = S3Connection()
s3_bucket = s3_conn.get_bucket("boto-demo-1394171994") # a bucket you can access
with vcr.use_cassette(str(tmpdir.join("boto-medium.yml"))):
k = Key(s3_bucket)
k.key = "test.txt"
k.set_contents_from_string("hello world i am a string")
with vcr.use_cassette(str(tmpdir.join("boto-medium.yml"))):
k = Key(s3_bucket)
k.key = "test.txt"
k.set_contents_from_string("hello world i am a string")
def test_boto_hardcore_mode(tmpdir):
with vcr.use_cassette(str(tmpdir.join("boto-hardcore.yml"))):
s3_conn = S3Connection()
s3_bucket = s3_conn.get_bucket("boto-demo-1394171994") # a bucket you can access
k = Key(s3_bucket)
k.key = "test.txt"
k.set_contents_from_string("hello world i am a string")
with vcr.use_cassette(str(tmpdir.join("boto-hardcore.yml"))):
s3_conn = S3Connection()
s3_bucket = s3_conn.get_bucket("boto-demo-1394171994") # a bucket you can access
k = Key(s3_bucket)
k.key = "test.txt"
k.set_contents_from_string("hello world i am a string")
def test_boto_iam(tmpdir):
try:
boto.config.add_section("Boto")
except DuplicateSectionError:
pass
# Ensure that boto uses HTTPS
boto.config.set("Boto", "is_secure", "true")
# Ensure that boto uses CertValidatingHTTPSConnection
boto.config.set("Boto", "https_validate_certificates", "true")
with vcr.use_cassette(str(tmpdir.join("boto-iam.yml"))):
iam_conn = boto.iam.connect_to_region("universal")
iam_conn.get_all_users()
with vcr.use_cassette(str(tmpdir.join("boto-iam.yml"))):
iam_conn = boto.iam.connect_to_region("universal")
iam_conn.get_all_users()

View File

@@ -2,14 +2,15 @@ import os
import pytest import pytest
import vcr
boto3 = pytest.importorskip("boto3") boto3 = pytest.importorskip("boto3")
import botocore # noqa import boto3 # NOQA
import botocore # NOQA
import vcr # NOQA
try: try:
from botocore import awsrequest # noqa from botocore import awsrequest # NOQA
botocore_awsrequest = True botocore_awsrequest = True
except ImportError: except ImportError:
@@ -19,12 +20,12 @@ except ImportError:
# https://github.com/boto/botocore/pull/1495 # https://github.com/boto/botocore/pull/1495
boto3_skip_vendored_requests = pytest.mark.skipif( boto3_skip_vendored_requests = pytest.mark.skipif(
botocore_awsrequest, botocore_awsrequest,
reason=f"botocore version {botocore.__version__} does not use vendored requests anymore.", reason="botocore version {ver} does not use vendored requests anymore.".format(ver=botocore.__version__),
) )
boto3_skip_awsrequest = pytest.mark.skipif( boto3_skip_awsrequest = pytest.mark.skipif(
not botocore_awsrequest, not botocore_awsrequest,
reason=f"botocore version {botocore.__version__} still uses vendored requests.", reason="botocore version {ver} still uses vendored requests.".format(ver=botocore.__version__),
) )
IAM_USER_NAME = "vcrpy" IAM_USER_NAME = "vcrpy"

View File

@@ -5,16 +5,14 @@ from urllib.request import urlopen
import pytest import pytest
import vcr import vcr
from vcr.cassette import Cassette
@pytest.mark.online def test_set_serializer_default_config(tmpdir, mockbin_request_url):
def test_set_serializer_default_config(tmpdir, httpbin):
my_vcr = vcr.VCR(serializer="json") my_vcr = vcr.VCR(serializer="json")
with my_vcr.use_cassette(str(tmpdir.join("test.json"))): with my_vcr.use_cassette(str(tmpdir.join("test.json"))):
assert my_vcr.serializer == "json" assert my_vcr.serializer == "json"
urlopen(httpbin.url) urlopen(mockbin_request_url)
with open(str(tmpdir.join("test.json"))) as f: with open(str(tmpdir.join("test.json"))) as f:
file_content = f.read() file_content = f.read()
@@ -22,38 +20,35 @@ def test_set_serializer_default_config(tmpdir, httpbin):
assert json.loads(file_content) assert json.loads(file_content)
@pytest.mark.online def test_default_set_cassette_library_dir(tmpdir, mockbin_request_url):
def test_default_set_cassette_library_dir(tmpdir, httpbin):
my_vcr = vcr.VCR(cassette_library_dir=str(tmpdir.join("subdir"))) my_vcr = vcr.VCR(cassette_library_dir=str(tmpdir.join("subdir")))
with my_vcr.use_cassette("test.json"): with my_vcr.use_cassette("test.json"):
urlopen(httpbin.url) urlopen(mockbin_request_url)
assert os.path.exists(str(tmpdir.join("subdir").join("test.json"))) assert os.path.exists(str(tmpdir.join("subdir").join("test.json")))
@pytest.mark.online def test_override_set_cassette_library_dir(tmpdir, mockbin_request_url):
def test_override_set_cassette_library_dir(tmpdir, httpbin):
my_vcr = vcr.VCR(cassette_library_dir=str(tmpdir.join("subdir"))) my_vcr = vcr.VCR(cassette_library_dir=str(tmpdir.join("subdir")))
cld = str(tmpdir.join("subdir2")) cld = str(tmpdir.join("subdir2"))
with my_vcr.use_cassette("test.json", cassette_library_dir=cld): with my_vcr.use_cassette("test.json", cassette_library_dir=cld):
urlopen(httpbin.url) urlopen(mockbin_request_url)
assert os.path.exists(str(tmpdir.join("subdir2").join("test.json"))) assert os.path.exists(str(tmpdir.join("subdir2").join("test.json")))
assert not os.path.exists(str(tmpdir.join("subdir").join("test.json"))) assert not os.path.exists(str(tmpdir.join("subdir").join("test.json")))
@pytest.mark.online def test_override_match_on(tmpdir, mockbin_request_url):
def test_override_match_on(tmpdir, httpbin):
my_vcr = vcr.VCR(match_on=["method"]) my_vcr = vcr.VCR(match_on=["method"])
with my_vcr.use_cassette(str(tmpdir.join("test.json"))): with my_vcr.use_cassette(str(tmpdir.join("test.json"))):
urlopen(httpbin.url) urlopen(mockbin_request_url)
with my_vcr.use_cassette(str(tmpdir.join("test.json"))) as cass: with my_vcr.use_cassette(str(tmpdir.join("test.json"))) as cass:
urlopen(httpbin.url) urlopen(mockbin_request_url)
assert len(cass) == 1 assert len(cass) == 1
assert cass.play_count == 1 assert cass.play_count == 1
@@ -67,13 +62,12 @@ def test_missing_matcher():
pass pass
@pytest.mark.online def test_dont_record_on_exception(tmpdir, mockbin_request_url):
def test_dont_record_on_exception(tmpdir, httpbin):
my_vcr = vcr.VCR(record_on_exception=False) my_vcr = vcr.VCR(record_on_exception=False)
@my_vcr.use_cassette(str(tmpdir.join("dontsave.yml"))) @my_vcr.use_cassette(str(tmpdir.join("dontsave.yml")))
def some_test(): def some_test():
assert b"Not in content" in urlopen(httpbin.url) assert b"Not in content" in urlopen(mockbin_request_url)
with pytest.raises(AssertionError): with pytest.raises(AssertionError):
some_test() some_test()
@@ -83,24 +77,6 @@ def test_dont_record_on_exception(tmpdir, httpbin):
# Make sure context decorator has the same behavior # Make sure context decorator has the same behavior
with pytest.raises(AssertionError): with pytest.raises(AssertionError):
with my_vcr.use_cassette(str(tmpdir.join("dontsave2.yml"))): with my_vcr.use_cassette(str(tmpdir.join("dontsave2.yml"))):
assert b"Not in content" in urlopen(httpbin.url).read() assert b"Not in content" in urlopen(mockbin_request_url).read()
assert not os.path.exists(str(tmpdir.join("dontsave2.yml"))) assert not os.path.exists(str(tmpdir.join("dontsave2.yml")))
def test_set_drop_unused_requests(tmpdir, httpbin):
my_vcr = vcr.VCR(drop_unused_requests=True)
file = str(tmpdir.join("test.yaml"))
with my_vcr.use_cassette(file):
urlopen(httpbin.url)
urlopen(httpbin.url + "/get")
cassette = Cassette.load(path=file)
assert len(cassette) == 2
with my_vcr.use_cassette(file):
urlopen(httpbin.url)
cassette = Cassette.load(path=file)
assert len(cassette) == 1

View File

@@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
"""Basic tests about save behavior""" """Basic tests about save behavior"""
# External imports # External imports
@@ -5,26 +6,23 @@ import os
import time import time
from urllib.request import urlopen from urllib.request import urlopen
import pytest
# Internal imports # Internal imports
import vcr import vcr
@pytest.mark.online def test_disk_saver_nowrite(tmpdir, mockbin_request_url):
def test_disk_saver_nowrite(tmpdir, httpbin):
""" """
Ensure that when you close a cassette without changing it it doesn't Ensure that when you close a cassette without changing it it doesn't
rewrite the file rewrite the file
""" """
fname = str(tmpdir.join("synopsis.yaml")) fname = str(tmpdir.join("synopsis.yaml"))
with vcr.use_cassette(fname) as cass: with vcr.use_cassette(fname) as cass:
urlopen(httpbin.url).read() urlopen(mockbin_request_url).read()
assert cass.play_count == 0 assert cass.play_count == 0
last_mod = os.path.getmtime(fname) last_mod = os.path.getmtime(fname)
with vcr.use_cassette(fname) as cass: with vcr.use_cassette(fname) as cass:
urlopen(httpbin.url).read() urlopen(mockbin_request_url).read()
assert cass.play_count == 1 assert cass.play_count == 1
assert cass.dirty is False assert cass.dirty is False
last_mod2 = os.path.getmtime(fname) last_mod2 = os.path.getmtime(fname)
@@ -32,15 +30,14 @@ def test_disk_saver_nowrite(tmpdir, httpbin):
assert last_mod == last_mod2 assert last_mod == last_mod2
@pytest.mark.online def test_disk_saver_write(tmpdir, mockbin_request_url):
def test_disk_saver_write(tmpdir, httpbin):
""" """
Ensure that when you close a cassette after changing it it does Ensure that when you close a cassette after changing it it does
rewrite the file rewrite the file
""" """
fname = str(tmpdir.join("synopsis.yaml")) fname = str(tmpdir.join("synopsis.yaml"))
with vcr.use_cassette(fname) as cass: with vcr.use_cassette(fname) as cass:
urlopen(httpbin.url).read() urlopen(mockbin_request_url).read()
assert cass.play_count == 0 assert cass.play_count == 0
last_mod = os.path.getmtime(fname) last_mod = os.path.getmtime(fname)
@@ -49,8 +46,8 @@ def test_disk_saver_write(tmpdir, httpbin):
time.sleep(1) time.sleep(1)
with vcr.use_cassette(fname, record_mode=vcr.mode.ANY) as cass: with vcr.use_cassette(fname, record_mode=vcr.mode.ANY) as cass:
urlopen(httpbin.url).read() urlopen(mockbin_request_url).read()
urlopen(httpbin.url + "/get").read() urlopen(mockbin_request_url + "/get").read()
assert cass.play_count == 1 assert cass.play_count == 1
assert cass.dirty assert cass.dirty
last_mod2 = os.path.getmtime(fname) last_mod2 = os.path.getmtime(fname)

View File

@@ -5,11 +5,10 @@ from urllib.parse import urlencode
from urllib.request import Request, urlopen from urllib.request import Request, urlopen
import pytest import pytest
from assertions import assert_cassette_has_one_response, assert_is_json
import vcr import vcr
from ..assertions import assert_cassette_has_one_response, assert_is_json_bytes
def _request_with_auth(url, username, password): def _request_with_auth(url, username, password):
request = Request(url) request = Request(url)
@@ -46,18 +45,13 @@ def test_filter_basic_auth(tmpdir, httpbin):
def test_filter_querystring(tmpdir, httpbin): def test_filter_querystring(tmpdir, httpbin):
url = httpbin.url + "/?password=secret" url = httpbin.url + "/?foo=bar"
cass_file = str(tmpdir.join("filter_qs.yaml")) cass_file = str(tmpdir.join("filter_qs.yaml"))
with vcr.use_cassette(cass_file, filter_query_parameters=["password"]): with vcr.use_cassette(cass_file, filter_query_parameters=["foo"]):
urlopen(url) urlopen(url)
with vcr.use_cassette(cass_file, filter_query_parameters=["password"]) as cass: with vcr.use_cassette(cass_file, filter_query_parameters=["foo"]) as cass:
urlopen(url) urlopen(url)
assert "password" not in cass.requests[0].url assert "foo" not in cass.requests[0].url
assert "secret" not in cass.requests[0].url
with open(cass_file) as f:
cassette_content = f.read()
assert "password" not in cassette_content
assert "secret" not in cassette_content
def test_filter_post_data(tmpdir, httpbin): def test_filter_post_data(tmpdir, httpbin):
@@ -111,7 +105,7 @@ def test_decompress_gzip(tmpdir, httpbin):
with vcr.use_cassette(cass_file) as cass: with vcr.use_cassette(cass_file) as cass:
decoded_response = urlopen(url).read() decoded_response = urlopen(url).read()
assert_cassette_has_one_response(cass) assert_cassette_has_one_response(cass)
assert_is_json_bytes(decoded_response) assert_is_json(decoded_response)
def test_decomptess_empty_body(tmpdir, httpbin): def test_decomptess_empty_body(tmpdir, httpbin):
@@ -135,7 +129,7 @@ def test_decompress_deflate(tmpdir, httpbin):
with vcr.use_cassette(cass_file) as cass: with vcr.use_cassette(cass_file) as cass:
decoded_response = urlopen(url).read() decoded_response = urlopen(url).read()
assert_cassette_has_one_response(cass) assert_cassette_has_one_response(cass)
assert_is_json_bytes(decoded_response) assert_is_json(decoded_response)
def test_decompress_regular(tmpdir, httpbin): def test_decompress_regular(tmpdir, httpbin):
@@ -147,25 +141,4 @@ def test_decompress_regular(tmpdir, httpbin):
with vcr.use_cassette(cass_file) as cass: with vcr.use_cassette(cass_file) as cass:
resp = urlopen(url).read() resp = urlopen(url).read()
assert_cassette_has_one_response(cass) assert_cassette_has_one_response(cass)
assert_is_json_bytes(resp) assert_is_json(resp)
def test_before_record_request_corruption(tmpdir, httpbin):
"""Modifying request in before_record_request should not affect outgoing request"""
def before_record(request):
request.headers.clear()
request.body = b""
return request
req = Request(
httpbin.url + "/post",
data=urlencode({"test": "exists"}).encode(),
headers={"X-Test": "exists"},
)
cass_file = str(tmpdir.join("modified_response.yaml"))
with vcr.use_cassette(cass_file, before_record_request=before_record):
resp = json.loads(urlopen(req).read())
assert resp["headers"]["X-Test"] == "exists"
assert resp["form"]["test"] == "exists"

View File

@@ -1,14 +1,13 @@
# -*- coding: utf-8 -*-
"""Integration tests with httplib2""" """Integration tests with httplib2"""
from urllib.parse import urlencode from urllib.parse import urlencode
import pytest import pytest
import pytest_httpbin.certs import pytest_httpbin.certs
from assertions import assert_cassette_has_one_response
import vcr import vcr
from ..assertions import assert_cassette_has_one_response
httplib2 = pytest.importorskip("httplib2") httplib2 = pytest.importorskip("httplib2")
@@ -57,15 +56,14 @@ def test_response_headers(tmpdir, httpbin_both):
assert set(headers) == set(resp.items()) assert set(headers) == set(resp.items())
@pytest.mark.online def test_effective_url(tmpdir):
def test_effective_url(tmpdir, httpbin):
"""Ensure that the effective_url is captured""" """Ensure that the effective_url is captured"""
url = httpbin.url + "/redirect-to?url=.%2F&status_code=301" url = "http://mockbin.org/redirect/301"
with vcr.use_cassette(str(tmpdir.join("headers.yaml"))): with vcr.use_cassette(str(tmpdir.join("headers.yaml"))):
resp, _ = http().request(url) resp, _ = http().request(url)
effective_url = resp["content-location"] effective_url = resp["content-location"]
assert effective_url == httpbin.url + "/" assert effective_url == "http://mockbin.org/redirect/301/0"
with vcr.use_cassette(str(tmpdir.join("headers.yaml"))): with vcr.use_cassette(str(tmpdir.join("headers.yaml"))):
resp, _ = http().request(url) resp, _ = http().request(url)

View File

@@ -2,18 +2,11 @@ import os
import pytest import pytest
import vcr
from ..assertions import assert_is_json_bytes
asyncio = pytest.importorskip("asyncio") asyncio = pytest.importorskip("asyncio")
httpx = pytest.importorskip("httpx") httpx = pytest.importorskip("httpx")
import vcr # noqa: E402
@pytest.fixture(params=["https", "http"]) from vcr.stubs.httpx_stubs import HTTPX_REDIRECT_PARAM # noqa: E402
def scheme(request):
"""Fixture that returns both http and https."""
return request.param
class BaseDoRequest: class BaseDoRequest:
@@ -22,7 +15,6 @@ class BaseDoRequest:
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
self._client_args = args self._client_args = args
self._client_kwargs = kwargs self._client_kwargs = kwargs
self._client_kwargs["follow_redirects"] = self._client_kwargs.get("follow_redirects", True)
def _make_client(self): def _make_client(self):
return self._client_class(*self._client_args, **self._client_kwargs) return self._client_class(*self._client_args, **self._client_kwargs)
@@ -32,37 +24,21 @@ class DoSyncRequest(BaseDoRequest):
_client_class = httpx.Client _client_class = httpx.Client
def __enter__(self): def __enter__(self):
self._client = self._make_client()
return self return self
def __exit__(self, *args): def __exit__(self, *args):
self._client.close() pass
del self._client
@property @property
def client(self): def client(self):
try: try:
return self._client return self._client
except AttributeError as e: except AttributeError:
raise ValueError('To access sync client, use "with do_request() as client"') from e self._client = self._make_client()
return self._client
def __call__(self, *args, **kwargs): def __call__(self, *args, **kwargs):
if hasattr(self, "_client"): return self.client.request(*args, timeout=60, **kwargs)
return self.client.request(*args, timeout=60, **kwargs)
# Use one-time context and dispose of the client afterwards
with self:
return self.client.request(*args, timeout=60, **kwargs)
def stream(self, *args, **kwargs):
if hasattr(self, "_client"):
with self.client.stream(*args, **kwargs) as response:
return b"".join(response.iter_bytes())
# Use one-time context and dispose of the client afterwards
with self:
with self.client.stream(*args, **kwargs) as response:
return b"".join(response.iter_bytes())
class DoAsyncRequest(BaseDoRequest): class DoAsyncRequest(BaseDoRequest):
@@ -89,8 +65,8 @@ class DoAsyncRequest(BaseDoRequest):
def client(self): def client(self):
try: try:
return self._client return self._client
except AttributeError as e: except AttributeError:
raise ValueError('To access async client, use "with do_request() as client"') from e raise ValueError('To access async client, use "with do_request() as client"')
def __call__(self, *args, **kwargs): def __call__(self, *args, **kwargs):
if hasattr(self, "_loop"): if hasattr(self, "_loop"):
@@ -98,22 +74,7 @@ class DoAsyncRequest(BaseDoRequest):
# Use one-time context and dispose of the loop/client afterwards # Use one-time context and dispose of the loop/client afterwards
with self: with self:
return self._loop.run_until_complete(self.client.request(*args, **kwargs)) return self(*args, **kwargs)
async def _get_stream(self, *args, **kwargs):
async with self.client.stream(*args, **kwargs) as response:
content = b""
async for c in response.aiter_bytes():
content += c
return content
def stream(self, *args, **kwargs):
if hasattr(self, "_loop"):
return self._loop.run_until_complete(self._get_stream(*args, **kwargs))
# Use one-time context and dispose of the loop/client afterwards
with self:
return self._loop.run_until_complete(self._get_stream(*args, **kwargs))
def pytest_generate_tests(metafunc): def pytest_generate_tests(metafunc):
@@ -126,9 +87,8 @@ def yml(tmpdir, request):
return str(tmpdir.join(request.function.__name__ + ".yaml")) return str(tmpdir.join(request.function.__name__ + ".yaml"))
@pytest.mark.online def test_status(tmpdir, mockbin, do_request):
def test_status(tmpdir, httpbin, do_request): url = mockbin
url = httpbin.url
with vcr.use_cassette(str(tmpdir.join("status.yaml"))): with vcr.use_cassette(str(tmpdir.join("status.yaml"))):
response = do_request()("GET", url) response = do_request()("GET", url)
@@ -139,9 +99,8 @@ def test_status(tmpdir, httpbin, do_request):
assert cassette.play_count == 1 assert cassette.play_count == 1
@pytest.mark.online def test_case_insensitive_headers(tmpdir, mockbin, do_request):
def test_case_insensitive_headers(tmpdir, httpbin, do_request): url = mockbin
url = httpbin.url
with vcr.use_cassette(str(tmpdir.join("whatever.yaml"))): with vcr.use_cassette(str(tmpdir.join("whatever.yaml"))):
do_request()("GET", url) do_request()("GET", url)
@@ -153,9 +112,8 @@ def test_case_insensitive_headers(tmpdir, httpbin, do_request):
assert cassette.play_count == 1 assert cassette.play_count == 1
@pytest.mark.online def test_content(tmpdir, mockbin, do_request):
def test_content(tmpdir, httpbin, do_request): url = mockbin
url = httpbin.url
with vcr.use_cassette(str(tmpdir.join("cointent.yaml"))): with vcr.use_cassette(str(tmpdir.join("cointent.yaml"))):
response = do_request()("GET", url) response = do_request()("GET", url)
@@ -166,22 +124,22 @@ def test_content(tmpdir, httpbin, do_request):
assert cassette.play_count == 1 assert cassette.play_count == 1
@pytest.mark.online def test_json(tmpdir, mockbin, do_request):
def test_json(tmpdir, httpbin, do_request): url = mockbin + "/request"
url = httpbin.url + "/json"
headers = {"content-type": "application/json"}
with vcr.use_cassette(str(tmpdir.join("json.yaml"))): with vcr.use_cassette(str(tmpdir.join("json.yaml"))):
response = do_request()("GET", url) response = do_request(headers=headers)("GET", url)
with vcr.use_cassette(str(tmpdir.join("json.yaml"))) as cassette: with vcr.use_cassette(str(tmpdir.join("json.yaml"))) as cassette:
cassette_response = do_request()("GET", url) cassette_response = do_request(headers=headers)("GET", url)
assert cassette_response.json() == response.json() assert cassette_response.json() == response.json()
assert cassette.play_count == 1 assert cassette.play_count == 1
@pytest.mark.online def test_params_same_url_distinct_params(tmpdir, mockbin, do_request):
def test_params_same_url_distinct_params(tmpdir, httpbin, do_request): url = mockbin + "/request"
url = httpbin.url + "/get"
headers = {"Content-Type": "application/json"} headers = {"Content-Type": "application/json"}
params = {"a": 1, "b": False, "c": "c"} params = {"a": 1, "b": False, "c": "c"}
@@ -200,30 +158,47 @@ def test_params_same_url_distinct_params(tmpdir, httpbin, do_request):
do_request()("GET", url, params=params, headers=headers) do_request()("GET", url, params=params, headers=headers)
@pytest.mark.online def test_redirect(mockbin, yml, do_request):
def test_redirect(httpbin, yml, do_request): url = mockbin + "/redirect/303/2"
url = httpbin.url + "/redirect-to"
response = do_request()("GET", url) redirect_kwargs = {HTTPX_REDIRECT_PARAM.name: True}
response = do_request()("GET", url, **redirect_kwargs)
with vcr.use_cassette(yml): with vcr.use_cassette(yml):
response = do_request()("GET", url, params={"url": "./get", "status_code": 302}) response = do_request()("GET", url, **redirect_kwargs)
with vcr.use_cassette(yml) as cassette: with vcr.use_cassette(yml) as cassette:
cassette_response = do_request()("GET", url, params={"url": "./get", "status_code": 302}) cassette_response = do_request()("GET", url, **redirect_kwargs)
assert cassette_response.status_code == response.status_code assert cassette_response.status_code == response.status_code
assert len(cassette_response.history) == len(response.history) assert len(cassette_response.history) == len(response.history)
assert len(cassette) == 2 assert len(cassette) == 3
assert cassette.play_count == 2 assert cassette.play_count == 3
# Assert that the real response and the cassette response have a similar # Assert that the real response and the cassette response have a similar
# looking request_info. # looking request_info.
assert cassette_response.request.url == response.request.url assert cassette_response.request.url == response.request.url
assert cassette_response.request.method == response.request.method assert cassette_response.request.method == response.request.method
assert cassette_response.request.headers.items() == response.request.headers.items() assert {k: v for k, v in cassette_response.request.headers.items()} == {
k: v for k, v in response.request.headers.items()
}
def test_work_with_gzipped_data(mockbin, do_request, yml):
url = mockbin + "/gzip?foo=bar"
headers = {"accept-encoding": "deflate, gzip"}
with vcr.use_cassette(yml):
do_request(headers=headers)("GET", url)
with vcr.use_cassette(yml) as cassette:
cassette_response = do_request(headers=headers)("GET", url)
assert cassette_response.headers["content-encoding"] == "gzip"
assert cassette_response.read()
assert cassette.play_count == 1
@pytest.mark.online
@pytest.mark.parametrize("url", ["https://github.com/kevin1024/vcrpy/issues/" + str(i) for i in range(3, 6)]) @pytest.mark.parametrize("url", ["https://github.com/kevin1024/vcrpy/issues/" + str(i) for i in range(3, 6)])
def test_simple_fetching(do_request, yml, url): def test_simple_fetching(do_request, yml, url):
with vcr.use_cassette(yml): with vcr.use_cassette(yml):
@@ -235,32 +210,54 @@ def test_simple_fetching(do_request, yml, url):
assert cassette.play_count == 1 assert cassette.play_count == 1
@pytest.mark.online def test_behind_proxy(do_request):
def test_cookies(tmpdir, httpbin, do_request): # This is recorded because otherwise we should have a live proxy somewhere.
yml = (
os.path.dirname(os.path.realpath(__file__)) + "/cassettes/" + "test_httpx_test_test_behind_proxy.yml"
)
url = "https://mockbin.org/headers"
proxy = "http://localhost:8080"
proxies = {"http://": proxy, "https://": proxy}
with vcr.use_cassette(yml):
response = do_request(proxies=proxies, verify=False)("GET", url)
with vcr.use_cassette(yml) as cassette:
cassette_response = do_request(proxies=proxies, verify=False)("GET", url)
assert str(cassette_response.request.url) == url
assert cassette.play_count == 1
assert cassette_response.headers["Via"] == "my_own_proxy", str(cassette_response.headers)
assert cassette_response.request.url == response.request.url
def test_cookies(tmpdir, mockbin, do_request):
def client_cookies(client): def client_cookies(client):
return list(client.client.cookies) return [c for c in client.client.cookies]
def response_cookies(response): def response_cookies(response):
return list(response.cookies) return [c for c in response.cookies]
url = httpbin.url + "/cookies/set" url = mockbin + "/bin/26148652-fe25-4f21-aaf5-689b5b4bf65f"
params = {"k1": "v1", "k2": "v2"} headers = {"cookie": "k1=v1;k2=v2"}
with do_request(params=params, follow_redirects=False) as client: with do_request(headers=headers) as client:
assert client_cookies(client) == [] assert client_cookies(client) == []
redirect_kwargs = {HTTPX_REDIRECT_PARAM.name: True}
testfile = str(tmpdir.join("cookies.yml")) testfile = str(tmpdir.join("cookies.yml"))
with vcr.use_cassette(testfile): with vcr.use_cassette(testfile):
r1 = client("GET", url) r1 = client("GET", url, **redirect_kwargs)
assert response_cookies(r1) == ["k1", "k2"] assert response_cookies(r1) == ["k1", "k2"]
r2 = client("GET", url) r2 = client("GET", url, **redirect_kwargs)
assert response_cookies(r2) == ["k1", "k2"] assert response_cookies(r2) == ["k1", "k2"]
assert client_cookies(client) == ["k1", "k2"] assert client_cookies(client) == ["k1", "k2"]
with do_request(params=params, follow_redirects=False) as new_client: with do_request(headers=headers) as new_client:
assert client_cookies(new_client) == [] assert client_cookies(new_client) == []
with vcr.use_cassette(testfile) as cassette: with vcr.use_cassette(testfile) as cassette:
@@ -271,91 +268,39 @@ def test_cookies(tmpdir, httpbin, do_request):
assert client_cookies(new_client) == ["k1", "k2"] assert client_cookies(new_client) == ["k1", "k2"]
@pytest.mark.online def test_relative_redirects(tmpdir, scheme, do_request, mockbin):
def test_stream(tmpdir, httpbin, do_request): redirect_kwargs = {HTTPX_REDIRECT_PARAM.name: True}
url = httpbin.url + "/stream-bytes/512"
testfile = str(tmpdir.join("stream.yml"))
url = mockbin + "/redirect/301?to=/redirect/301?to=/request"
testfile = str(tmpdir.join("relative_redirects.yml"))
with vcr.use_cassette(testfile): with vcr.use_cassette(testfile):
response_content = do_request().stream("GET", url) response = do_request()("GET", url, **redirect_kwargs)
assert len(response_content) == 512 assert len(response.history) == 2, response
assert response.json()["url"].endswith("request")
with vcr.use_cassette(testfile) as cassette: with vcr.use_cassette(testfile) as cassette:
cassette_content = do_request().stream("GET", url) response = do_request()("GET", url, **redirect_kwargs)
assert cassette_content == response_content assert len(response.history) == 2
assert len(cassette_content) == 512 assert response.json()["url"].endswith("request")
assert cassette.play_count == 1
assert cassette.play_count == 3
# Regular cassette formats support the status reason, def test_redirect_wo_allow_redirects(do_request, mockbin, yml):
# but the old HTTPX cassette format does not. url = mockbin + "/redirect/308/5"
@pytest.mark.parametrize(
"cassette_name,reason", redirect_kwargs = {HTTPX_REDIRECT_PARAM.name: False}
[
("requests", "great"), with vcr.use_cassette(yml):
("httpx_old_format", "OK"), response = do_request()("GET", url, **redirect_kwargs)
],
) assert str(response.url).endswith("308/5")
def test_load_cassette_format(do_request, cassette_name, reason): assert response.status_code == 308
mydir = os.path.dirname(os.path.realpath(__file__))
yml = f"{mydir}/cassettes/gzip_{cassette_name}.yaml"
url = "https://httpbin.org/gzip"
with vcr.use_cassette(yml) as cassette: with vcr.use_cassette(yml) as cassette:
cassette_response = do_request()("GET", url) response = do_request()("GET", url, **redirect_kwargs)
assert str(cassette_response.request.url) == url
assert str(response.url).endswith("308/5")
assert response.status_code == 308
assert cassette.play_count == 1 assert cassette.play_count == 1
# Should be able to load up the JSON inside,
# regardless whether the content is the gzipped
# in the cassette or not.
json = cassette_response.json()
assert json["method"] == "GET", json
assert cassette_response.status_code == 200
assert cassette_response.reason_phrase == reason
def test_gzip__decode_compressed_response_false(tmpdir, httpbin, do_request):
"""
Ensure that httpx is able to automatically decompress the response body.
"""
for _ in range(2): # one for recording, one for re-playing
with vcr.use_cassette(str(tmpdir.join("gzip.yaml"))) as cassette:
response = do_request()("GET", httpbin + "/gzip")
assert response.headers["content-encoding"] == "gzip" # i.e. not removed
# The content stored in the cassette should be gzipped.
assert cassette.responses[0]["body"]["string"][:2] == b"\x1f\x8b"
assert_is_json_bytes(response.content) # i.e. uncompressed bytes
def test_gzip__decode_compressed_response_true(do_request, tmpdir, httpbin):
url = httpbin + "/gzip"
expected_response = do_request()("GET", url)
expected_content = expected_response.content
assert expected_response.headers["content-encoding"] == "gzip" # self-test
with vcr.use_cassette(
str(tmpdir.join("decode_compressed.yaml")),
decode_compressed_response=True,
) as cassette:
r = do_request()("GET", url)
assert r.headers["content-encoding"] == "gzip" # i.e. not removed
content_length = r.headers["content-length"]
assert r.content == expected_content
# Has the cassette body been decompressed?
cassette_response_body = cassette.responses[0]["body"]["string"]
assert isinstance(cassette_response_body, str)
# Content should be JSON.
assert cassette_response_body[0:1] == "{"
with vcr.use_cassette(str(tmpdir.join("decode_compressed.yaml")), decode_compressed_response=True):
r = httpx.get(url)
assert "content-encoding" not in r.headers # i.e. removed
assert r.content == expected_content
# As the content is uncompressed, it should have a bigger
# length than the compressed version.
assert r.headers["content-length"] > content_length

View File

@@ -28,9 +28,9 @@ def test_ignore_localhost(tmpdir, httpbin):
with overridden_dns({"httpbin.org": "127.0.0.1"}): with overridden_dns({"httpbin.org": "127.0.0.1"}):
cass_file = str(tmpdir.join("filter_qs.yaml")) cass_file = str(tmpdir.join("filter_qs.yaml"))
with vcr.use_cassette(cass_file, ignore_localhost=True) as cass: with vcr.use_cassette(cass_file, ignore_localhost=True) as cass:
urlopen(f"http://localhost:{httpbin.port}/") urlopen("http://localhost:{}/".format(httpbin.port))
assert len(cass) == 0 assert len(cass) == 0
urlopen(f"http://httpbin.org:{httpbin.port}/") urlopen("http://httpbin.org:{}/".format(httpbin.port))
assert len(cass) == 1 assert len(cass) == 1
@@ -38,9 +38,9 @@ def test_ignore_httpbin(tmpdir, httpbin):
with overridden_dns({"httpbin.org": "127.0.0.1"}): with overridden_dns({"httpbin.org": "127.0.0.1"}):
cass_file = str(tmpdir.join("filter_qs.yaml")) cass_file = str(tmpdir.join("filter_qs.yaml"))
with vcr.use_cassette(cass_file, ignore_hosts=["httpbin.org"]) as cass: with vcr.use_cassette(cass_file, ignore_hosts=["httpbin.org"]) as cass:
urlopen(f"http://httpbin.org:{httpbin.port}/") urlopen("http://httpbin.org:{}/".format(httpbin.port))
assert len(cass) == 0 assert len(cass) == 0
urlopen(f"http://localhost:{httpbin.port}/") urlopen("http://localhost:{}/".format(httpbin.port))
assert len(cass) == 1 assert len(cass) == 1
@@ -48,8 +48,8 @@ def test_ignore_localhost_and_httpbin(tmpdir, httpbin):
with overridden_dns({"httpbin.org": "127.0.0.1"}): with overridden_dns({"httpbin.org": "127.0.0.1"}):
cass_file = str(tmpdir.join("filter_qs.yaml")) cass_file = str(tmpdir.join("filter_qs.yaml"))
with vcr.use_cassette(cass_file, ignore_hosts=["httpbin.org"], ignore_localhost=True) as cass: with vcr.use_cassette(cass_file, ignore_hosts=["httpbin.org"], ignore_localhost=True) as cass:
urlopen(f"http://httpbin.org:{httpbin.port}") urlopen("http://httpbin.org:{}".format(httpbin.port))
urlopen(f"http://localhost:{httpbin.port}") urlopen("http://localhost:{}".format(httpbin.port))
assert len(cass) == 0 assert len(cass) == 0
@@ -57,12 +57,12 @@ def test_ignore_localhost_twice(tmpdir, httpbin):
with overridden_dns({"httpbin.org": "127.0.0.1"}): with overridden_dns({"httpbin.org": "127.0.0.1"}):
cass_file = str(tmpdir.join("filter_qs.yaml")) cass_file = str(tmpdir.join("filter_qs.yaml"))
with vcr.use_cassette(cass_file, ignore_localhost=True) as cass: with vcr.use_cassette(cass_file, ignore_localhost=True) as cass:
urlopen(f"http://localhost:{httpbin.port}") urlopen("http://localhost:{}".format(httpbin.port))
assert len(cass) == 0 assert len(cass) == 0
urlopen(f"http://httpbin.org:{httpbin.port}") urlopen("http://httpbin.org:{}".format(httpbin.port))
assert len(cass) == 1 assert len(cass) == 1
with vcr.use_cassette(cass_file, ignore_localhost=True) as cass: with vcr.use_cassette(cass_file, ignore_localhost=True) as cass:
assert len(cass) == 1 assert len(cass) == 1
urlopen(f"http://localhost:{httpbin.port}") urlopen("http://localhost:{}".format(httpbin.port))
urlopen(f"http://httpbin.org:{httpbin.port}") urlopen("http://httpbin.org:{}".format(httpbin.port))
assert len(cass) == 1 assert len(cass) == 1

View File

@@ -72,12 +72,7 @@ def test_method_matcher(cassette, httpbin, httpbin_secure):
@pytest.mark.parametrize( @pytest.mark.parametrize(
"uri", "uri", [DEFAULT_URI, "http://httpbin.org/get?p2=q2&p1=q1", "http://httpbin.org/get?p2=q2&p1=q1"]
(
DEFAULT_URI,
"http://httpbin.org/get?p2=q2&p1=q1",
"http://httpbin.org/get?p2=q2&p1=q1",
),
) )
def test_default_matcher_matches(cassette, uri, httpbin, httpbin_secure): def test_default_matcher_matches(cassette, uri, httpbin, httpbin_secure):
uri = _replace_httpbin(uri, httpbin, httpbin_secure) uri = _replace_httpbin(uri, httpbin, httpbin_secure)

View File

@@ -3,7 +3,6 @@ from urllib.request import urlopen
import pytest import pytest
import vcr import vcr
from vcr.errors import CannotOverwriteExistingCassetteException
def test_making_extra_request_raises_exception(tmpdir, httpbin): def test_making_extra_request_raises_exception(tmpdir, httpbin):
@@ -19,5 +18,5 @@ def test_making_extra_request_raises_exception(tmpdir, httpbin):
with vcr.use_cassette(str(tmpdir.join("test.json")), match_on=["method"]): with vcr.use_cassette(str(tmpdir.join("test.json")), match_on=["method"]):
assert urlopen(httpbin.url + "/status/200").getcode() == 200 assert urlopen(httpbin.url + "/status/200").getcode() == 200
assert urlopen(httpbin.url + "/status/201").getcode() == 201 assert urlopen(httpbin.url + "/status/201").getcode() == 201
with pytest.raises(CannotOverwriteExistingCassetteException): with pytest.raises(Exception):
urlopen(httpbin.url + "/status/200") urlopen(httpbin.url + "/status/200")

View File

@@ -1,9 +1,9 @@
# -*- coding: utf-8 -*-
"""Test using a proxy.""" """Test using a proxy."""
import asyncio
import http.server import http.server
import multiprocessing
import socketserver import socketserver
import threading
from urllib.request import urlopen from urllib.request import urlopen
import pytest import pytest
@@ -30,51 +30,20 @@ class Proxy(http.server.SimpleHTTPRequestHandler):
# In Python 2 the response is an addinfourl instance. # In Python 2 the response is an addinfourl instance.
status = upstream_response.code status = upstream_response.code
headers = upstream_response.info().items() headers = upstream_response.info().items()
self.log_request(status) self.send_response(status, upstream_response.msg)
self.send_response_only(status, upstream_response.msg)
for header in headers: for header in headers:
self.send_header(*header) self.send_header(*header)
self.end_headers() self.end_headers()
self.copyfile(upstream_response, self.wfile) self.copyfile(upstream_response, self.wfile)
def do_CONNECT(self):
host, port = self.path.split(":")
asyncio.run(self._tunnel(host, port, self.connection))
async def _tunnel(self, host, port, client_sock):
target_r, target_w = await asyncio.open_connection(host=host, port=port)
self.send_response(http.HTTPStatus.OK)
self.end_headers()
source_r, source_w = await asyncio.open_connection(sock=client_sock)
async def channel(reader, writer):
while True:
data = await reader.read(1024)
if not data:
break
writer.write(data)
await writer.drain()
writer.close()
await writer.wait_closed()
await asyncio.gather(
channel(target_r, source_w),
channel(source_r, target_w),
)
@pytest.fixture(scope="session") @pytest.fixture(scope="session")
def proxy_server(): def proxy_server():
with socketserver.ThreadingTCPServer(("", 0), Proxy) as httpd: httpd = socketserver.ThreadingTCPServer(("", 0), Proxy)
proxy_process = threading.Thread(target=httpd.serve_forever) proxy_process = multiprocessing.Process(target=httpd.serve_forever)
proxy_process.start() proxy_process.start()
yield "http://{}:{}".format(*httpd.server_address) yield "http://{}:{}".format(*httpd.server_address)
httpd.shutdown() proxy_process.terminate()
proxy_process.join()
def test_use_proxy(tmpdir, httpbin, proxy_server): def test_use_proxy(tmpdir, httpbin, proxy_server):
@@ -82,26 +51,8 @@ def test_use_proxy(tmpdir, httpbin, proxy_server):
with vcr.use_cassette(str(tmpdir.join("proxy.yaml"))): with vcr.use_cassette(str(tmpdir.join("proxy.yaml"))):
response = requests.get(httpbin.url, proxies={"http": proxy_server}) response = requests.get(httpbin.url, proxies={"http": proxy_server})
with vcr.use_cassette(str(tmpdir.join("proxy.yaml")), mode="none") as cassette: with vcr.use_cassette(str(tmpdir.join("proxy.yaml"))) as cassette:
cassette_response = requests.get(httpbin.url, proxies={"http": proxy_server}) cassette_response = requests.get(httpbin.url, proxies={"http": proxy_server})
assert cassette_response.headers == response.headers assert cassette_response.headers == response.headers
assert cassette.play_count == 1 assert cassette.play_count == 1
def test_use_https_proxy(tmpdir, httpbin_secure, proxy_server):
"""Ensure that it works with an HTTPS proxy."""
with vcr.use_cassette(str(tmpdir.join("proxy.yaml"))):
response = requests.get(httpbin_secure.url, proxies={"https": proxy_server})
with vcr.use_cassette(str(tmpdir.join("proxy.yaml")), mode="none") as cassette:
cassette_response = requests.get(
httpbin_secure.url,
proxies={"https": proxy_server},
)
assert cassette_response.headers == response.headers
assert cassette.play_count == 1
# The cassette URL points to httpbin, not the proxy
assert cassette.requests[0].url == httpbin_secure.url + "/"

View File

@@ -3,7 +3,6 @@ from urllib.request import urlopen
import pytest import pytest
import vcr import vcr
from vcr.errors import CannotOverwriteExistingCassetteException
def test_once_record_mode(tmpdir, httpbin): def test_once_record_mode(tmpdir, httpbin):
@@ -19,7 +18,7 @@ def test_once_record_mode(tmpdir, httpbin):
# the first time, it's played from the cassette. # the first time, it's played from the cassette.
# but, try to access something else from the same cassette, and an # but, try to access something else from the same cassette, and an
# exception is raised. # exception is raised.
with pytest.raises(CannotOverwriteExistingCassetteException): with pytest.raises(Exception):
urlopen(httpbin.url + "/get").read() urlopen(httpbin.url + "/get").read()
@@ -95,7 +94,7 @@ def test_new_episodes_record_mode_two_times(tmpdir, httpbin):
assert urlopen(url).read() == original_second_response assert urlopen(url).read() == original_second_response
# now that we are back in once mode, this should raise # now that we are back in once mode, this should raise
# an error. # an error.
with pytest.raises(CannotOverwriteExistingCassetteException): with pytest.raises(Exception):
urlopen(url).read() urlopen(url).read()
@@ -125,7 +124,7 @@ def test_none_record_mode(tmpdir, httpbin):
# raise hell. # raise hell.
testfile = str(tmpdir.join("recordmode.yml")) testfile = str(tmpdir.join("recordmode.yml"))
with vcr.use_cassette(testfile, record_mode=vcr.mode.NONE): with vcr.use_cassette(testfile, record_mode=vcr.mode.NONE):
with pytest.raises(CannotOverwriteExistingCassetteException): with pytest.raises(Exception):
urlopen(httpbin.url).read() urlopen(httpbin.url).read()
@@ -141,5 +140,5 @@ def test_none_record_mode_with_existing_cassette(tmpdir, httpbin):
urlopen(httpbin.url).read() urlopen(httpbin.url).read()
assert cass.play_count == 1 assert cass.play_count == 1
# but if I try to hit the net, raise an exception. # but if I try to hit the net, raise an exception.
with pytest.raises(CannotOverwriteExistingCassetteException): with pytest.raises(Exception):
urlopen(httpbin.url + "/get").read() urlopen(httpbin.url + "/get").read()

View File

@@ -1,7 +1,5 @@
from urllib.request import urlopen from urllib.request import urlopen
import pytest
import vcr import vcr
@@ -13,29 +11,27 @@ def false_matcher(r1, r2):
return False return False
@pytest.mark.online def test_registered_true_matcher(tmpdir, mockbin_request_url):
def test_registered_true_matcher(tmpdir, httpbin):
my_vcr = vcr.VCR() my_vcr = vcr.VCR()
my_vcr.register_matcher("true", true_matcher) my_vcr.register_matcher("true", true_matcher)
testfile = str(tmpdir.join("test.yml")) testfile = str(tmpdir.join("test.yml"))
with my_vcr.use_cassette(testfile, match_on=["true"]): with my_vcr.use_cassette(testfile, match_on=["true"]):
# These 2 different urls are stored as the same request # These 2 different urls are stored as the same request
urlopen(httpbin.url) urlopen(mockbin_request_url)
urlopen(httpbin.url + "/get") urlopen(mockbin_request_url + "/get")
with my_vcr.use_cassette(testfile, match_on=["true"]): with my_vcr.use_cassette(testfile, match_on=["true"]):
# I can get the response twice even though I only asked for it once # I can get the response twice even though I only asked for it once
urlopen(httpbin.url) urlopen(mockbin_request_url)
urlopen(httpbin.url) urlopen(mockbin_request_url)
@pytest.mark.online def test_registered_false_matcher(tmpdir, mockbin_request_url):
def test_registered_false_matcher(tmpdir, httpbin):
my_vcr = vcr.VCR() my_vcr = vcr.VCR()
my_vcr.register_matcher("false", false_matcher) my_vcr.register_matcher("false", false_matcher)
testfile = str(tmpdir.join("test.yml")) testfile = str(tmpdir.join("test.yml"))
with my_vcr.use_cassette(testfile, match_on=["false"]) as cass: with my_vcr.use_cassette(testfile, match_on=["false"]) as cass:
# These 2 different urls are stored as different requests # These 2 different urls are stored as different requests
urlopen(httpbin.url) urlopen(mockbin_request_url)
urlopen(httpbin.url + "/get") urlopen(mockbin_request_url + "/get")
assert len(cass) == 2 assert len(cass) == 2

View File

@@ -1,17 +1,16 @@
# -*- coding: utf-8 -*-
"""Tests for cassettes with custom persistence""" """Tests for cassettes with custom persistence"""
# External imports # External imports
import os import os
from urllib.request import urlopen from urllib.request import urlopen
import pytest
# Internal imports # Internal imports
import vcr import vcr
from vcr.persisters.filesystem import CassetteDecodeError, CassetteNotFoundError, FilesystemPersister from vcr.persisters.filesystem import FilesystemPersister
class CustomFilesystemPersister: class CustomFilesystemPersister(object):
"""Behaves just like default FilesystemPersister but adds .test extension """Behaves just like default FilesystemPersister but adds .test extension
to the cassette file""" to the cassette file"""
@@ -26,19 +25,6 @@ class CustomFilesystemPersister:
FilesystemPersister.save_cassette(cassette_path, cassette_dict, serializer) FilesystemPersister.save_cassette(cassette_path, cassette_dict, serializer)
class BadPersister(FilesystemPersister):
"""A bad persister that raises different errors."""
@staticmethod
def load_cassette(cassette_path, serializer):
if "nonexistent" in cassette_path:
raise CassetteNotFoundError()
elif "encoding" in cassette_path:
raise CassetteDecodeError()
else:
raise ValueError("buggy persister")
def test_save_cassette_with_custom_persister(tmpdir, httpbin): def test_save_cassette_with_custom_persister(tmpdir, httpbin):
"""Ensure you can save a cassette using custom persister""" """Ensure you can save a cassette using custom persister"""
my_vcr = vcr.VCR() my_vcr = vcr.VCR()
@@ -66,23 +52,4 @@ def test_load_cassette_with_custom_persister(tmpdir, httpbin):
with my_vcr.use_cassette(test_fixture, serializer="json"): with my_vcr.use_cassette(test_fixture, serializer="json"):
response = urlopen(httpbin.url).read() response = urlopen(httpbin.url).read()
assert b"HTTP Request &amp; Response Service" in response assert b"difficult sometimes" in response
def test_load_cassette_persister_exception_handling(tmpdir, httpbin):
"""
Ensure expected errors from persister are swallowed while unexpected ones
are passed up the call stack.
"""
my_vcr = vcr.VCR()
my_vcr.register_persister(BadPersister)
with my_vcr.use_cassette("bad/nonexistent") as cass:
assert len(cass) == 0
with my_vcr.use_cassette("bad/encoding") as cass:
assert len(cass) == 0
with pytest.raises(ValueError):
with my_vcr.use_cassette("bad/buggy") as cass:
pass

View File

@@ -1,12 +1,11 @@
"""Test requests' interaction with vcr""" """Test requests' interaction with vcr"""
import pytest import pytest
from assertions import assert_cassette_empty, assert_is_json
import vcr import vcr
from ..assertions import assert_cassette_empty, assert_is_json_bytes
requests = pytest.importorskip("requests") requests = pytest.importorskip("requests")
from requests.exceptions import ConnectionError # noqa E402
def test_status_code(httpbin_both, tmpdir): def test_status_code(httpbin_both, tmpdir):
@@ -115,6 +114,22 @@ def test_post_chunked_binary(tmpdir, httpbin):
assert req1 == req2 assert req1 == req2
@pytest.mark.skipif("sys.version_info >= (3, 6)", strict=True, raises=ConnectionError)
def test_post_chunked_binary_secure(tmpdir, httpbin_secure):
"""Ensure that we can send chunked binary without breaking while trying to concatenate bytes with str."""
data1 = iter([b"data", b"to", b"send"])
data2 = iter([b"data", b"to", b"send"])
url = httpbin_secure.url + "/post"
with vcr.use_cassette(str(tmpdir.join("requests.yaml"))):
req1 = requests.post(url, data1).content
print(req1)
with vcr.use_cassette(str(tmpdir.join("requests.yaml"))):
req2 = requests.post(url, data2).content
assert req1 == req2
def test_redirects(tmpdir, httpbin_both): def test_redirects(tmpdir, httpbin_both):
"""Ensure that we can handle redirects""" """Ensure that we can handle redirects"""
url = httpbin_both + "/redirect-to?url=bytes/1024" url = httpbin_both + "/redirect-to?url=bytes/1024"
@@ -129,17 +144,6 @@ def test_redirects(tmpdir, httpbin_both):
assert cass.play_count == 2 assert cass.play_count == 2
def test_raw_stream(tmpdir, httpbin):
expected_response = requests.get(httpbin.url, stream=True)
expected_content = b"".join(expected_response.raw.stream())
for _ in range(2): # one for recording, one for cassette reply
with vcr.use_cassette(str(tmpdir.join("raw_stream.yaml"))):
actual_response = requests.get(httpbin.url, stream=True)
actual_content = b"".join(actual_response.raw.stream())
assert actual_content == expected_content
def test_cross_scheme(tmpdir, httpbin_secure, httpbin): def test_cross_scheme(tmpdir, httpbin_secure, httpbin):
"""Ensure that requests between schemes are treated separately""" """Ensure that requests between schemes are treated separately"""
# First fetch a url under http, and then again under https and then # First fetch a url under http, and then again under https and then
@@ -152,41 +156,20 @@ def test_cross_scheme(tmpdir, httpbin_secure, httpbin):
assert len(cass) == 2 assert len(cass) == 2
def test_gzip__decode_compressed_response_false(tmpdir, httpbin_both): def test_gzip(tmpdir, httpbin_both):
""" """
Ensure that requests (actually urllib3) is able to automatically decompress Ensure that requests (actually urllib3) is able to automatically decompress
the response body the response body
""" """
for _ in range(2): # one for recording, one for re-playing
with vcr.use_cassette(str(tmpdir.join("gzip.yaml"))):
response = requests.get(httpbin_both + "/gzip")
assert response.headers["content-encoding"] == "gzip" # i.e. not removed
assert_is_json_bytes(response.content) # i.e. uncompressed bytes
def test_gzip__decode_compressed_response_true(tmpdir, httpbin_both):
url = httpbin_both + "/gzip" url = httpbin_both + "/gzip"
response = requests.get(url)
expected_response = requests.get(url) with vcr.use_cassette(str(tmpdir.join("gzip.yaml"))):
expected_content = expected_response.content response = requests.get(url)
assert expected_response.headers["content-encoding"] == "gzip" # self-test assert_is_json(response.content)
with vcr.use_cassette( with vcr.use_cassette(str(tmpdir.join("gzip.yaml"))):
str(tmpdir.join("decode_compressed.yaml")), assert_is_json(response.content)
decode_compressed_response=True,
) as cassette:
r = requests.get(url)
assert r.headers["content-encoding"] == "gzip" # i.e. not removed
assert r.content == expected_content
# Has the cassette body been decompressed?
cassette_response_body = cassette.responses[0]["body"]["string"]
assert isinstance(cassette_response_body, str)
with vcr.use_cassette(str(tmpdir.join("decode_compressed.yaml")), decode_compressed_response=True):
r = requests.get(url)
assert "content-encoding" not in r.headers # i.e. removed
assert r.content == expected_content
def test_session_and_connection_close(tmpdir, httpbin): def test_session_and_connection_close(tmpdir, httpbin):
@@ -266,7 +249,7 @@ def test_nested_cassettes_with_session_created_before_nesting(httpbin_both, tmpd
def test_post_file(tmpdir, httpbin_both): def test_post_file(tmpdir, httpbin_both):
"""Ensure that we handle posting a file.""" """Ensure that we handle posting a file."""
url = httpbin_both + "/post" url = httpbin_both + "/post"
with vcr.use_cassette(str(tmpdir.join("post_file.yaml"))) as cass, open(".editorconfig", "rb") as f: with vcr.use_cassette(str(tmpdir.join("post_file.yaml"))) as cass, open("tox.ini", "rb") as f:
original_response = requests.post(url, f).content original_response = requests.post(url, f).content
# This also tests that we do the right thing with matching the body when they are files. # This also tests that we do the right thing with matching the body when they are files.
@@ -274,10 +257,10 @@ def test_post_file(tmpdir, httpbin_both):
str(tmpdir.join("post_file.yaml")), str(tmpdir.join("post_file.yaml")),
match_on=("method", "scheme", "host", "port", "path", "query", "body"), match_on=("method", "scheme", "host", "port", "path", "query", "body"),
) as cass: ) as cass:
with open(".editorconfig", "rb") as f: with open("tox.ini", "rb") as f:
editorconfig = f.read() tox_content = f.read()
assert cass.requests[0].body.read() == editorconfig assert cass.requests[0].body.read() == tox_content
with open(".editorconfig", "rb") as f: with open("tox.ini", "rb") as f:
new_response = requests.post(url, f).content new_response = requests.post(url, f).content
assert original_response == new_response assert original_response == new_response

View File

@@ -2,9 +2,9 @@ import http.client as httplib
import json import json
import zlib import zlib
import vcr from assertions import assert_is_json
from ..assertions import assert_is_json_bytes import vcr
def _headers_are_case_insensitive(host, port): def _headers_are_case_insensitive(host, port):
@@ -84,7 +84,7 @@ def test_original_decoded_response_is_not_modified(tmpdir, httpbin):
inside = conn.getresponse() inside = conn.getresponse()
assert "content-encoding" not in inside.headers assert "content-encoding" not in inside.headers
assert_is_json_bytes(inside.read()) assert_is_json(inside.read())
def _make_before_record_response(fields, replacement="[REDACTED]"): def _make_before_record_response(fields, replacement="[REDACTED]"):
@@ -120,8 +120,8 @@ def test_original_response_is_not_modified_by_before_filter(tmpdir, httpbin):
# The scrubbed field should be the same, because no cassette existed. # The scrubbed field should be the same, because no cassette existed.
# Furthermore, the responses should be identical. # Furthermore, the responses should be identical.
inside_body = json.loads(inside.read()) inside_body = json.loads(inside.read().decode("utf-8"))
outside_body = json.loads(outside.read()) outside_body = json.loads(outside.read().decode("utf-8"))
assert not inside_body[field_to_scrub] == replacement assert not inside_body[field_to_scrub] == replacement
assert inside_body[field_to_scrub] == outside_body[field_to_scrub] assert inside_body[field_to_scrub] == outside_body[field_to_scrub]
@@ -131,5 +131,5 @@ def test_original_response_is_not_modified_by_before_filter(tmpdir, httpbin):
conn.request("GET", "/get") conn.request("GET", "/get")
inside = conn.getresponse() inside = conn.getresponse()
inside_body = json.loads(inside.read()) inside_body = json.loads(inside.read().decode("utf-8"))
assert inside_body[field_to_scrub] == replacement assert inside_body[field_to_scrub] == replacement

View File

@@ -1,45 +1,20 @@
# -*- coding: utf-8 -*-
"""Test requests' interaction with vcr""" """Test requests' interaction with vcr"""
import asyncio
import functools
import inspect
import json import json
import pytest import pytest
from assertions import assert_cassette_empty, assert_is_json
import vcr import vcr
from vcr.errors import CannotOverwriteExistingCassetteException from vcr.errors import CannotOverwriteExistingCassetteException
from ..assertions import assert_cassette_empty, assert_is_json_bytes
tornado = pytest.importorskip("tornado") tornado = pytest.importorskip("tornado")
gen = pytest.importorskip("tornado.gen")
http = pytest.importorskip("tornado.httpclient") http = pytest.importorskip("tornado.httpclient")
# whether the current version of Tornado supports the raise_error argument for # whether the current version of Tornado supports the raise_error argument for
# fetch(). # fetch().
supports_raise_error = tornado.version_info >= (4,) supports_raise_error = tornado.version_info >= (4,)
raise_error_for_response_code_only = tornado.version_info >= (6,)
def gen_test(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
async def coro():
return await gen.coroutine(func)(*args, **kwargs)
return asyncio.run(coro())
# Patch the signature so pytest can inject fixtures
# we can't use wrapt.decorator because it returns a generator function
wrapper.__signature__ = inspect.signature(func)
return wrapper
@pytest.fixture(params=["https", "http"])
def scheme(request):
"""Fixture that returns both http and https."""
return request.param
@pytest.fixture(params=["simple", "curl", "default"]) @pytest.fixture(params=["simple", "curl", "default"])
@@ -69,8 +44,7 @@ def post(client, url, data=None, **kwargs):
return client.fetch(http.HTTPRequest(url, method="POST", **kwargs)) return client.fetch(http.HTTPRequest(url, method="POST", **kwargs))
@pytest.mark.online @pytest.mark.gen_test
@gen_test
def test_status_code(get_client, scheme, tmpdir): def test_status_code(get_client, scheme, tmpdir):
"""Ensure that we can read the status code""" """Ensure that we can read the status code"""
url = scheme + "://httpbin.org/" url = scheme + "://httpbin.org/"
@@ -82,8 +56,7 @@ def test_status_code(get_client, scheme, tmpdir):
assert 1 == cass.play_count assert 1 == cass.play_count
@pytest.mark.online @pytest.mark.gen_test
@gen_test
def test_headers(get_client, scheme, tmpdir): def test_headers(get_client, scheme, tmpdir):
"""Ensure that we can read the headers back""" """Ensure that we can read the headers back"""
url = scheme + "://httpbin.org/" url = scheme + "://httpbin.org/"
@@ -95,8 +68,7 @@ def test_headers(get_client, scheme, tmpdir):
assert 1 == cass.play_count assert 1 == cass.play_count
@pytest.mark.online @pytest.mark.gen_test
@gen_test
def test_body(get_client, tmpdir, scheme): def test_body(get_client, tmpdir, scheme):
"""Ensure the responses are all identical enough""" """Ensure the responses are all identical enough"""
@@ -109,21 +81,20 @@ def test_body(get_client, tmpdir, scheme):
assert 1 == cass.play_count assert 1 == cass.play_count
@gen_test @pytest.mark.gen_test
def test_effective_url(get_client, tmpdir, httpbin): def test_effective_url(get_client, scheme, tmpdir):
"""Ensure that the effective_url is captured""" """Ensure that the effective_url is captured"""
url = httpbin.url + "/redirect/1" url = scheme + "://mockbin.org/redirect/301?url=/html"
with vcr.use_cassette(str(tmpdir.join("url.yaml"))): with vcr.use_cassette(str(tmpdir.join("url.yaml"))):
effective_url = (yield get(get_client(), url)).effective_url effective_url = (yield get(get_client(), url)).effective_url
assert effective_url == httpbin.url + "/get" assert effective_url == scheme + "://mockbin.org/redirect/301/0"
with vcr.use_cassette(str(tmpdir.join("url.yaml"))) as cass: with vcr.use_cassette(str(tmpdir.join("url.yaml"))) as cass:
assert effective_url == (yield get(get_client(), url)).effective_url assert effective_url == (yield get(get_client(), url)).effective_url
assert 1 == cass.play_count assert 1 == cass.play_count
@pytest.mark.online @pytest.mark.gen_test
@gen_test
def test_auth(get_client, tmpdir, scheme): def test_auth(get_client, tmpdir, scheme):
"""Ensure that we can handle basic auth""" """Ensure that we can handle basic auth"""
auth = ("user", "passwd") auth = ("user", "passwd")
@@ -138,8 +109,7 @@ def test_auth(get_client, tmpdir, scheme):
assert 1 == cass.play_count assert 1 == cass.play_count
@pytest.mark.online @pytest.mark.gen_test
@gen_test
def test_auth_failed(get_client, tmpdir, scheme): def test_auth_failed(get_client, tmpdir, scheme):
"""Ensure that we can save failed auth statuses""" """Ensure that we can save failed auth statuses"""
auth = ("user", "wrongwrongwrong") auth = ("user", "wrongwrongwrong")
@@ -162,8 +132,7 @@ def test_auth_failed(get_client, tmpdir, scheme):
assert 1 == cass.play_count assert 1 == cass.play_count
@pytest.mark.online @pytest.mark.gen_test
@gen_test
def test_post(get_client, tmpdir, scheme): def test_post(get_client, tmpdir, scheme):
"""Ensure that we can post and cache the results""" """Ensure that we can post and cache the results"""
data = {"key1": "value1", "key2": "value2"} data = {"key1": "value1", "key2": "value2"}
@@ -178,10 +147,10 @@ def test_post(get_client, tmpdir, scheme):
assert 1 == cass.play_count assert 1 == cass.play_count
@gen_test @pytest.mark.gen_test
def test_redirects(get_client, tmpdir, httpbin): def test_redirects(get_client, tmpdir, scheme):
"""Ensure that we can handle redirects""" """Ensure that we can handle redirects"""
url = httpbin + "/redirect-to?url=bytes/1024&status_code=301" url = scheme + "://mockbin.org/redirect/301?url=bytes/1024"
with vcr.use_cassette(str(tmpdir.join("requests.yaml"))): with vcr.use_cassette(str(tmpdir.join("requests.yaml"))):
content = (yield get(get_client(), url)).body content = (yield get(get_client(), url)).body
@@ -190,8 +159,7 @@ def test_redirects(get_client, tmpdir, httpbin):
assert cass.play_count == 1 assert cass.play_count == 1
@pytest.mark.online @pytest.mark.gen_test
@gen_test
def test_cross_scheme(get_client, tmpdir, scheme): def test_cross_scheme(get_client, tmpdir, scheme):
"""Ensure that requests between schemes are treated separately""" """Ensure that requests between schemes are treated separately"""
# First fetch a url under http, and then again under https and then # First fetch a url under http, and then again under https and then
@@ -210,8 +178,7 @@ def test_cross_scheme(get_client, tmpdir, scheme):
assert cass.play_count == 2 assert cass.play_count == 2
@pytest.mark.online @pytest.mark.gen_test
@gen_test
def test_gzip(get_client, tmpdir, scheme): def test_gzip(get_client, tmpdir, scheme):
""" """
Ensure that httpclient is able to automatically decompress the response Ensure that httpclient is able to automatically decompress the response
@@ -228,16 +195,15 @@ def test_gzip(get_client, tmpdir, scheme):
with vcr.use_cassette(str(tmpdir.join("gzip.yaml"))): with vcr.use_cassette(str(tmpdir.join("gzip.yaml"))):
response = yield get(get_client(), url, **kwargs) response = yield get(get_client(), url, **kwargs)
assert_is_json_bytes(response.body) assert_is_json(response.body)
with vcr.use_cassette(str(tmpdir.join("gzip.yaml"))) as cass: with vcr.use_cassette(str(tmpdir.join("gzip.yaml"))) as cass:
response = yield get(get_client(), url, **kwargs) response = yield get(get_client(), url, **kwargs)
assert_is_json_bytes(response.body) assert_is_json(response.body)
assert 1 == cass.play_count assert 1 == cass.play_count
@pytest.mark.online @pytest.mark.gen_test
@gen_test
def test_https_with_cert_validation_disabled(get_client, tmpdir): def test_https_with_cert_validation_disabled(get_client, tmpdir):
cass_path = str(tmpdir.join("cert_validation_disabled.yaml")) cass_path = str(tmpdir.join("cert_validation_disabled.yaml"))
@@ -249,13 +215,13 @@ def test_https_with_cert_validation_disabled(get_client, tmpdir):
assert 1 == cass.play_count assert 1 == cass.play_count
@gen_test @pytest.mark.gen_test
def test_unsupported_features_raises_in_future(get_client, tmpdir): def test_unsupported_features_raises_in_future(get_client, tmpdir):
"""Ensure that the exception for an AsyncHTTPClient feature not being """Ensure that the exception for an AsyncHTTPClient feature not being
supported is raised inside the future.""" supported is raised inside the future."""
def callback(chunk): def callback(chunk):
raise AssertionError("Did not expect to be called.") assert False, "Did not expect to be called."
with vcr.use_cassette(str(tmpdir.join("invalid.yaml"))): with vcr.use_cassette(str(tmpdir.join("invalid.yaml"))):
future = get(get_client(), "http://httpbin.org", streaming_callback=callback) future = get(get_client(), "http://httpbin.org", streaming_callback=callback)
@@ -267,31 +233,23 @@ def test_unsupported_features_raises_in_future(get_client, tmpdir):
@pytest.mark.skipif(not supports_raise_error, reason="raise_error unavailable in tornado <= 3") @pytest.mark.skipif(not supports_raise_error, reason="raise_error unavailable in tornado <= 3")
@pytest.mark.skipif( @pytest.mark.gen_test
raise_error_for_response_code_only,
reason="raise_error only ignores HTTPErrors due to response code",
)
@gen_test
def test_unsupported_features_raise_error_disabled(get_client, tmpdir): def test_unsupported_features_raise_error_disabled(get_client, tmpdir):
"""Ensure that the exception for an AsyncHTTPClient feature not being """Ensure that the exception for an AsyncHTTPClient feature not being
supported is not raised if raise_error=False.""" supported is not raised if raise_error=False."""
def callback(chunk): def callback(chunk):
raise AssertionError("Did not expect to be called.") assert False, "Did not expect to be called."
with vcr.use_cassette(str(tmpdir.join("invalid.yaml"))): with vcr.use_cassette(str(tmpdir.join("invalid.yaml"))):
response = yield get( response = yield get(
get_client(), get_client(), "http://httpbin.org", streaming_callback=callback, raise_error=False
"http://httpbin.org",
streaming_callback=callback,
raise_error=False,
) )
assert "not yet supported by VCR" in str(response.error) assert "not yet supported by VCR" in str(response.error)
@pytest.mark.online @pytest.mark.gen_test
@gen_test
def test_cannot_overwrite_cassette_raises_in_future(get_client, tmpdir): def test_cannot_overwrite_cassette_raises_in_future(get_client, tmpdir):
"""Ensure that CannotOverwriteExistingCassetteException is raised inside """Ensure that CannotOverwriteExistingCassetteException is raised inside
the future.""" the future."""
@@ -307,11 +265,7 @@ def test_cannot_overwrite_cassette_raises_in_future(get_client, tmpdir):
@pytest.mark.skipif(not supports_raise_error, reason="raise_error unavailable in tornado <= 3") @pytest.mark.skipif(not supports_raise_error, reason="raise_error unavailable in tornado <= 3")
@pytest.mark.skipif( @pytest.mark.gen_test
raise_error_for_response_code_only,
reason="raise_error only ignores HTTPErrors due to response code",
)
@gen_test
def test_cannot_overwrite_cassette_raise_error_disabled(get_client, tmpdir): def test_cannot_overwrite_cassette_raise_error_disabled(get_client, tmpdir):
"""Ensure that CannotOverwriteExistingCassetteException is not raised if """Ensure that CannotOverwriteExistingCassetteException is not raised if
raise_error=False in the fetch() call.""" raise_error=False in the fetch() call."""
@@ -325,14 +279,14 @@ def test_cannot_overwrite_cassette_raise_error_disabled(get_client, tmpdir):
assert isinstance(response.error, CannotOverwriteExistingCassetteException) assert isinstance(response.error, CannotOverwriteExistingCassetteException)
@gen_test @pytest.mark.gen_test
@vcr.use_cassette(path_transformer=vcr.default_vcr.ensure_suffix(".yaml")) @vcr.use_cassette(path_transformer=vcr.default_vcr.ensure_suffix(".yaml"))
def test_tornado_with_decorator_use_cassette(get_client): def test_tornado_with_decorator_use_cassette(get_client):
response = yield get_client().fetch(http.HTTPRequest("http://www.google.com/", method="GET")) response = yield get_client().fetch(http.HTTPRequest("http://www.google.com/", method="GET"))
assert response.body.decode("utf-8") == "not actually google" assert response.body.decode("utf-8") == "not actually google"
@gen_test @pytest.mark.gen_test
@vcr.use_cassette(path_transformer=vcr.default_vcr.ensure_suffix(".yaml")) @vcr.use_cassette(path_transformer=vcr.default_vcr.ensure_suffix(".yaml"))
def test_tornado_exception_can_be_caught(get_client): def test_tornado_exception_can_be_caught(get_client):
try: try:
@@ -346,8 +300,7 @@ def test_tornado_exception_can_be_caught(get_client):
assert e.code == 404 assert e.code == 404
@pytest.mark.online @pytest.mark.gen_test
@gen_test
def test_existing_references_get_patched(tmpdir): def test_existing_references_get_patched(tmpdir):
from tornado.httpclient import AsyncHTTPClient from tornado.httpclient import AsyncHTTPClient
@@ -360,8 +313,7 @@ def test_existing_references_get_patched(tmpdir):
assert cass.play_count == 1 assert cass.play_count == 1
@pytest.mark.online @pytest.mark.gen_test
@gen_test
def test_existing_instances_get_patched(get_client, tmpdir): def test_existing_instances_get_patched(get_client, tmpdir):
"""Ensure that existing instances of AsyncHTTPClient get patched upon """Ensure that existing instances of AsyncHTTPClient get patched upon
entering VCR context.""" entering VCR context."""
@@ -376,8 +328,7 @@ def test_existing_instances_get_patched(get_client, tmpdir):
assert cass.play_count == 1 assert cass.play_count == 1
@pytest.mark.online @pytest.mark.gen_test
@gen_test
def test_request_time_is_set(get_client, tmpdir): def test_request_time_is_set(get_client, tmpdir):
"""Ensures that the request_time on HTTPResponses is set.""" """Ensures that the request_time on HTTPResponses is set."""

View File

@@ -1,3 +1,4 @@
# -*- coding: utf-8 -*-
"""Integration tests with urllib2""" """Integration tests with urllib2"""
import ssl import ssl
@@ -5,13 +6,11 @@ from urllib.parse import urlencode
from urllib.request import urlopen from urllib.request import urlopen
import pytest_httpbin.certs import pytest_httpbin.certs
from pytest import mark from assertions import assert_cassette_has_one_response
# Internal imports # Internal imports
import vcr import vcr
from ..assertions import assert_cassette_has_one_response
def urlopen_with_cafile(*args, **kwargs): def urlopen_with_cafile(*args, **kwargs):
context = ssl.create_default_context(cafile=pytest_httpbin.certs.where()) context = ssl.create_default_context(cafile=pytest_httpbin.certs.where())
@@ -57,14 +56,13 @@ def test_response_headers(httpbin_both, tmpdir):
assert sorted(open1) == sorted(open2) assert sorted(open1) == sorted(open2)
@mark.online def test_effective_url(tmpdir):
def test_effective_url(tmpdir, httpbin):
"""Ensure that the effective_url is captured""" """Ensure that the effective_url is captured"""
url = httpbin.url + "/redirect-to?url=.%2F&status_code=301" url = "http://mockbin.org/redirect/301"
with vcr.use_cassette(str(tmpdir.join("headers.yaml"))): with vcr.use_cassette(str(tmpdir.join("headers.yaml"))):
effective_url = urlopen_with_cafile(url).geturl() effective_url = urlopen_with_cafile(url).geturl()
assert effective_url == httpbin.url + "/" assert effective_url == "http://mockbin.org/redirect/301/0"
with vcr.use_cassette(str(tmpdir.join("headers.yaml"))): with vcr.use_cassette(str(tmpdir.join("headers.yaml"))):
assert effective_url == urlopen_with_cafile(url).geturl() assert effective_url == urlopen_with_cafile(url).geturl()

View File

@@ -4,21 +4,19 @@
import pytest import pytest
import pytest_httpbin import pytest_httpbin
from assertions import assert_cassette_empty, assert_is_json
import vcr import vcr
from vcr.patch import force_reset from vcr.patch import force_reset
from vcr.stubs.compat import get_headers from vcr.stubs.compat import get_headers
from ..assertions import assert_cassette_empty, assert_is_json_bytes
urllib3 = pytest.importorskip("urllib3") urllib3 = pytest.importorskip("urllib3")
@pytest.fixture(scope="module") @pytest.fixture(scope="module")
def verify_pool_mgr(): def verify_pool_mgr():
return urllib3.PoolManager( return urllib3.PoolManager(
cert_reqs="CERT_REQUIRED", cert_reqs="CERT_REQUIRED", ca_certs=pytest_httpbin.certs.where() # Force certificate check.
ca_certs=pytest_httpbin.certs.where(), # Force certificate check.
) )
@@ -99,10 +97,9 @@ def test_post(tmpdir, httpbin_both, verify_pool_mgr):
assert req1 == req2 assert req1 == req2
@pytest.mark.online def test_redirects(tmpdir, verify_pool_mgr):
def test_redirects(tmpdir, verify_pool_mgr, httpbin):
"""Ensure that we can handle redirects""" """Ensure that we can handle redirects"""
url = httpbin.url + "/redirect/1" url = "http://mockbin.org/redirect/301"
with vcr.use_cassette(str(tmpdir.join("verify_pool_mgr.yaml"))): with vcr.use_cassette(str(tmpdir.join("verify_pool_mgr.yaml"))):
content = verify_pool_mgr.request("GET", url).data content = verify_pool_mgr.request("GET", url).data
@@ -138,10 +135,10 @@ def test_gzip(tmpdir, httpbin_both, verify_pool_mgr):
with vcr.use_cassette(str(tmpdir.join("gzip.yaml"))): with vcr.use_cassette(str(tmpdir.join("gzip.yaml"))):
response = verify_pool_mgr.request("GET", url) response = verify_pool_mgr.request("GET", url)
assert_is_json_bytes(response.data) assert_is_json(response.data)
with vcr.use_cassette(str(tmpdir.join("gzip.yaml"))): with vcr.use_cassette(str(tmpdir.join("gzip.yaml"))):
assert_is_json_bytes(response.data) assert_is_json(response.data)
def test_https_with_cert_validation_disabled(tmpdir, httpbin_secure, pool_mgr): def test_https_with_cert_validation_disabled(tmpdir, httpbin_secure, pool_mgr):

View File

@@ -5,10 +5,10 @@ from xmlrpc.server import SimpleXMLRPCServer
import pytest import pytest
import vcr
requests = pytest.importorskip("requests") requests = pytest.importorskip("requests")
import vcr # NOQA
def test_domain_redirect(): def test_domain_redirect():
"""Ensure that redirects across domains are considered unique""" """Ensure that redirects across domains are considered unique"""
@@ -52,7 +52,6 @@ def test_flickr_multipart_upload(httpbin, tmpdir):
assert cass.play_count == 1 assert cass.play_count == 1
@pytest.mark.online
def test_flickr_should_respond_with_200(tmpdir): def test_flickr_should_respond_with_200(tmpdir):
testfile = str(tmpdir.join("flickr.yml")) testfile = str(tmpdir.join("flickr.yml"))
with vcr.use_cassette(testfile): with vcr.use_cassette(testfile):
@@ -63,15 +62,14 @@ def test_flickr_should_respond_with_200(tmpdir):
def test_cookies(tmpdir, httpbin): def test_cookies(tmpdir, httpbin):
testfile = str(tmpdir.join("cookies.yml")) testfile = str(tmpdir.join("cookies.yml"))
with vcr.use_cassette(testfile): with vcr.use_cassette(testfile):
with requests.Session() as s: s = requests.Session()
s.get(httpbin.url + "/cookies/set?k1=v1&k2=v2") s.get(httpbin.url + "/cookies/set?k1=v1&k2=v2")
assert s.cookies.keys() == ["k1", "k2"] assert s.cookies.keys() == ["k1", "k2"]
r2 = s.get(httpbin.url + "/cookies") r2 = s.get(httpbin.url + "/cookies")
assert sorted(r2.json()["cookies"].keys()) == ["k1", "k2"] assert sorted(r2.json()["cookies"].keys()) == ["k1", "k2"]
@pytest.mark.online
def test_amazon_doctype(tmpdir): def test_amazon_doctype(tmpdir):
# amazon gzips its homepage. For some reason, in requests 2.7, it's not # amazon gzips its homepage. For some reason, in requests 2.7, it's not
# getting gunzipped. # getting gunzipped.

View File

@@ -11,7 +11,6 @@ import yaml
from vcr.cassette import Cassette from vcr.cassette import Cassette
from vcr.errors import UnhandledHTTPRequestError from vcr.errors import UnhandledHTTPRequestError
from vcr.patch import force_reset from vcr.patch import force_reset
from vcr.request import Request
from vcr.stubs import VCRHTTPSConnection from vcr.stubs import VCRHTTPSConnection
@@ -21,31 +20,15 @@ def test_cassette_load(tmpdir):
yaml.dump( yaml.dump(
{ {
"interactions": [ "interactions": [
{ {"request": {"body": "", "uri": "foo", "method": "GET", "headers": {}}, "response": "bar"}
"request": {"body": "", "uri": "foo", "method": "GET", "headers": {}}, ]
"response": "bar", }
}, )
],
},
),
) )
a_cassette = Cassette.load(path=str(a_file)) a_cassette = Cassette.load(path=str(a_file))
assert len(a_cassette) == 1 assert len(a_cassette) == 1
def test_cassette_load_nonexistent():
a_cassette = Cassette.load(path="something/nonexistent.yml")
assert len(a_cassette) == 0
def test_cassette_load_invalid_encoding(tmpdir):
a_file = tmpdir.join("invalid_encoding.yml")
with open(a_file, "wb") as fd:
fd.write(b"\xda")
a_cassette = Cassette.load(path=str(a_file))
assert len(a_cassette) == 0
def test_cassette_not_played(): def test_cassette_not_played():
a = Cassette("test") a = Cassette("test")
assert not a.play_count assert not a.play_count
@@ -114,7 +97,7 @@ def make_get_request():
@mock.patch("vcr.stubs.VCRHTTPResponse") @mock.patch("vcr.stubs.VCRHTTPResponse")
def test_function_decorated_with_use_cassette_can_be_invoked_multiple_times(*args): def test_function_decorated_with_use_cassette_can_be_invoked_multiple_times(*args):
decorated_function = Cassette.use(path="test")(make_get_request) decorated_function = Cassette.use(path="test")(make_get_request)
for _ in range(4): for i in range(4):
decorated_function() decorated_function()
@@ -160,7 +143,7 @@ def test_cassette_allow_playback_repeats():
a = Cassette("test", allow_playback_repeats=True) a = Cassette("test", allow_playback_repeats=True)
a.append("foo", "bar") a.append("foo", "bar")
a.append("other", "resp") a.append("other", "resp")
for _ in range(10): for x in range(10):
assert a.play_response("foo") == "bar" assert a.play_response("foo") == "bar"
assert a.play_count == 10 assert a.play_count == 10
assert a.all_played is False assert a.all_played is False
@@ -222,7 +205,7 @@ def test_nesting_cassette_context_managers(*args):
with contextlib.ExitStack() as exit_stack: with contextlib.ExitStack() as exit_stack:
first_cassette = exit_stack.enter_context(Cassette.use(path="test")) first_cassette = exit_stack.enter_context(Cassette.use(path="test"))
exit_stack.enter_context( exit_stack.enter_context(
mock.patch.object(first_cassette, "play_response", return_value=first_response), mock.patch.object(first_cassette, "play_response", return_value=first_response)
) )
assert_get_response_body_is("first_response") assert_get_response_body_is("first_response")
@@ -411,25 +394,3 @@ def test_find_requests_with_most_matches_many_similar_requests(mock_get_matchers
(1, ["method", "path"], [("query", "failed : query")]), (1, ["method", "path"], [("query", "failed : query")]),
(3, ["method", "path"], [("query", "failed : query")]), (3, ["method", "path"], [("query", "failed : query")]),
] ]
def test_used_interactions(tmpdir):
interactions = [
{"request": {"body": "", "uri": "foo1", "method": "GET", "headers": {}}, "response": "bar1"},
{"request": {"body": "", "uri": "foo2", "method": "GET", "headers": {}}, "response": "bar2"},
{"request": {"body": "", "uri": "foo3", "method": "GET", "headers": {}}, "response": "bar3"},
]
file = tmpdir.join("test_cassette.yml")
file.write(yaml.dump({"interactions": [interactions[0], interactions[1]]}))
cassette = Cassette.load(path=str(file))
request = Request._from_dict(interactions[1]["request"])
cassette.play_response(request)
assert len(cassette._played_interactions) < len(cassette._old_interactions)
request = Request._from_dict(interactions[2]["request"])
cassette.append(request, interactions[2]["response"])
assert len(cassette._new_interactions()) == 1
used_interactions = cassette._played_interactions + cassette._new_interactions()
assert len(used_interactions) == 2

View File

@@ -55,18 +55,15 @@ from vcr.cassette import Cassette
], ],
) )
def test_CannotOverwriteExistingCassetteException_get_message( def test_CannotOverwriteExistingCassetteException_get_message(
mock_find_requests_with_most_matches, mock_find_requests_with_most_matches, most_matches, expected_message
most_matches,
expected_message,
): ):
mock_find_requests_with_most_matches.return_value = most_matches mock_find_requests_with_most_matches.return_value = most_matches
cassette = Cassette("path") cassette = Cassette("path")
failed_request = "request" failed_request = "request"
exception_message = errors.CannotOverwriteExistingCassetteException._get_message(cassette, "request") exception_message = errors.CannotOverwriteExistingCassetteException._get_message(cassette, "request")
expected = ( expected = (
f"Can't overwrite existing cassette ({cassette._path!r}) " "Can't overwrite existing cassette (%r) in your current record mode (%r).\n"
f"in your current record mode ({cassette.record_mode!r}).\n" "No match for the request (%r) was found.\n"
f"No match for the request ({failed_request!r}) was found.\n" "%s" % (cassette._path, cassette.record_mode, failed_request, expected_message)
f"{expected_message}"
) )
assert exception_message == expected assert exception_message == expected

View File

@@ -197,7 +197,7 @@ def test_replace_json_post_data_parameters():
("six", "doesntexist"), ("six", "doesntexist"),
], ],
) )
request_data = json.loads(request.body) request_data = json.loads(request.body.decode("utf-8"))
expected_data = json.loads('{"one": "keep", "three": "tada", "four": "SHOUT"}') expected_data = json.loads('{"one": "keep", "three": "tada", "four": "SHOUT"}')
assert request_data == expected_data assert request_data == expected_data
@@ -208,8 +208,8 @@ def test_remove_json_post_data_parameters():
request = Request("POST", "http://google.com", body, {}) request = Request("POST", "http://google.com", body, {})
request.headers["Content-Type"] = "application/json" request.headers["Content-Type"] = "application/json"
remove_post_data_parameters(request, ["id"]) remove_post_data_parameters(request, ["id"])
request_body_json = json.loads(request.body) request_body_json = json.loads(request.body.decode("utf-8"))
expected_json = json.loads(b'{"foo": "bar", "baz": "qux"}') expected_json = json.loads(b'{"foo": "bar", "baz": "qux"}'.decode("utf-8"))
assert request_body_json == expected_json assert request_body_json == expected_json
@@ -298,18 +298,6 @@ def test_decode_response_deflate():
assert decoded_response["headers"]["content-length"] == [str(len(body))] assert decoded_response["headers"]["content-length"] == [str(len(body))]
def test_decode_response_deflate_already_decompressed():
body = b"deflate message"
gzip_response = {
"body": {"string": body},
"headers": {
"content-encoding": ["deflate"],
},
}
decoded_response = decode_response(gzip_response)
assert decoded_response["body"]["string"] == body
def test_decode_response_gzip(): def test_decode_response_gzip():
body = b"gzip message" body = b"gzip message"
@@ -337,15 +325,3 @@ def test_decode_response_gzip():
decoded_response = decode_response(gzip_response) decoded_response = decode_response(gzip_response)
assert decoded_response["body"]["string"] == body assert decoded_response["body"]["string"] == body
assert decoded_response["headers"]["content-length"] == [str(len(body))] assert decoded_response["headers"]["content-length"] == [str(len(body))]
def test_decode_response_gzip_already_decompressed():
body = b"gzip message"
gzip_response = {
"body": {"string": body},
"headers": {
"content-encoding": ["gzip"],
},
}
decoded_response = decode_response(gzip_response)
assert decoded_response["body"]["string"] == body

View File

@@ -63,9 +63,6 @@ boto3_bytes_headers = {
"Expect": b"100-continue", "Expect": b"100-continue",
"Content-Length": "21", "Content-Length": "21",
} }
chunked_headers = {
"Transfer-Encoding": "chunked",
}
@pytest.mark.parametrize( @pytest.mark.parametrize(
@@ -77,16 +74,10 @@ chunked_headers = {
), ),
( (
request.Request( request.Request(
"POST", "POST", "http://host.com/", "a=1&b=2", {"Content-Type": "application/x-www-form-urlencoded"}
"http://host.com/",
"a=1&b=2",
{"Content-Type": "application/x-www-form-urlencoded"},
), ),
request.Request( request.Request(
"POST", "POST", "http://host.com/", "b=2&a=1", {"Content-Type": "application/x-www-form-urlencoded"}
"http://host.com/",
"b=2&a=1",
{"Content-Type": "application/x-www-form-urlencoded"},
), ),
), ),
( (
@@ -95,38 +86,23 @@ chunked_headers = {
), ),
( (
request.Request( request.Request(
"POST", "POST", "http://host.com/", "a=1&b=2", {"Content-Type": "application/x-www-form-urlencoded"}
"http://host.com/",
"a=1&b=2",
{"Content-Type": "application/x-www-form-urlencoded"},
), ),
request.Request( request.Request(
"POST", "POST", "http://host.com/", "b=2&a=1", {"Content-Type": "application/x-www-form-urlencoded"}
"http://host.com/",
"b=2&a=1",
{"Content-Type": "application/x-www-form-urlencoded"},
), ),
), ),
( (
request.Request( request.Request(
"POST", "POST", "http://host.com/", '{"a": 1, "b": 2}', {"Content-Type": "application/json"}
"http://host.com/",
'{"a": 1, "b": 2}',
{"Content-Type": "application/json"},
), ),
request.Request( request.Request(
"POST", "POST", "http://host.com/", '{"b": 2, "a": 1}', {"content-type": "application/json"}
"http://host.com/",
'{"b": 2, "a": 1}',
{"content-type": "application/json"},
), ),
), ),
( (
request.Request( request.Request(
"POST", "POST", "http://host.com/", req1_body, {"User-Agent": "xmlrpclib", "Content-Type": "text/xml"}
"http://host.com/",
req1_body,
{"User-Agent": "xmlrpclib", "Content-Type": "text/xml"},
), ),
request.Request( request.Request(
"POST", "POST",
@@ -137,16 +113,10 @@ chunked_headers = {
), ),
( (
request.Request( request.Request(
"POST", "POST", "http://host.com/", '{"a": 1, "b": 2}', {"Content-Type": "application/json"}
"http://host.com/",
'{"a": 1, "b": 2}',
{"Content-Type": "application/json"},
), ),
request.Request( request.Request(
"POST", "POST", "http://host.com/", '{"b": 2, "a": 1}', {"content-type": "application/json"}
"http://host.com/",
'{"b": 2, "a": 1}',
{"content-type": "application/json"},
), ),
), ),
( (
@@ -154,36 +124,6 @@ chunked_headers = {
request.Request("POST", "http://aws.custom.com/", b"123", boto3_bytes_headers), request.Request("POST", "http://aws.custom.com/", b"123", boto3_bytes_headers),
request.Request("POST", "http://aws.custom.com/", b"123", boto3_bytes_headers), request.Request("POST", "http://aws.custom.com/", b"123", boto3_bytes_headers),
), ),
(
# chunked transfer encoding: decoded bytes versus encoded bytes
request.Request("POST", "scheme1://host1.test/", b"123456789_123456", chunked_headers),
request.Request(
"GET",
"scheme2://host2.test/",
b"10\r\n123456789_123456\r\n0\r\n\r\n",
chunked_headers,
),
),
(
# chunked transfer encoding: bytes iterator versus string iterator
request.Request(
"POST",
"scheme1://host1.test/",
iter([b"123456789_", b"123456"]),
chunked_headers,
),
request.Request("GET", "scheme2://host2.test/", iter(["123456789_", "123456"]), chunked_headers),
),
(
# chunked transfer encoding: bytes iterator versus single byte iterator
request.Request(
"POST",
"scheme1://host1.test/",
iter([b"123456789_", b"123456"]),
chunked_headers,
),
request.Request("GET", "scheme2://host2.test/", iter(b"123456789_123456"), chunked_headers),
),
], ],
) )
def test_body_matcher_does_match(r1, r2): def test_body_matcher_does_match(r1, r2):
@@ -199,16 +139,10 @@ def test_body_matcher_does_match(r1, r2):
), ),
( (
request.Request( request.Request(
"POST", "POST", "http://host.com/", '{"a": 1, "b": 3}', {"Content-Type": "application/json"}
"http://host.com/",
'{"a": 1, "b": 3}',
{"Content-Type": "application/json"},
), ),
request.Request( request.Request(
"POST", "POST", "http://host.com/", '{"b": 2, "a": 1}', {"content-type": "application/json"}
"http://host.com/",
'{"b": 2, "a": 1}',
{"content-type": "application/json"},
), ),
), ),
( (

View File

@@ -17,9 +17,9 @@ def test_try_migrate_with_json(tmpdir):
cassette = tmpdir.join("cassette.json").strpath cassette = tmpdir.join("cassette.json").strpath
shutil.copy("tests/fixtures/migration/old_cassette.json", cassette) shutil.copy("tests/fixtures/migration/old_cassette.json", cassette)
assert vcr.migration.try_migrate(cassette) assert vcr.migration.try_migrate(cassette)
with open("tests/fixtures/migration/new_cassette.json") as f: with open("tests/fixtures/migration/new_cassette.json", "r") as f:
expected_json = json.load(f) expected_json = json.load(f)
with open(cassette) as f: with open(cassette, "r") as f:
actual_json = json.load(f) actual_json = json.load(f)
assert actual_json == expected_json assert actual_json == expected_json
@@ -28,9 +28,9 @@ def test_try_migrate_with_yaml(tmpdir):
cassette = tmpdir.join("cassette.yaml").strpath cassette = tmpdir.join("cassette.yaml").strpath
shutil.copy("tests/fixtures/migration/old_cassette.yaml", cassette) shutil.copy("tests/fixtures/migration/old_cassette.yaml", cassette)
assert vcr.migration.try_migrate(cassette) assert vcr.migration.try_migrate(cassette)
with open("tests/fixtures/migration/new_cassette.yaml") as f: with open("tests/fixtures/migration/new_cassette.yaml", "r") as f:
expected_yaml = yaml.load(f, Loader=Loader) expected_yaml = yaml.load(f, Loader=Loader)
with open(cassette) as f: with open(cassette, "r") as f:
actual_yaml = yaml.load(f, Loader=Loader) actual_yaml = yaml.load(f, Loader=Loader)
assert actual_yaml == expected_yaml assert actual_yaml == expected_yaml

View File

@@ -1,3 +1,4 @@
# coding: UTF-8
import io import io
from vcr.stubs import VCRHTTPResponse from vcr.stubs import VCRHTTPResponse
@@ -88,11 +89,11 @@ def test_response_parses_correctly_and_fp_attribute_error_is_not_thrown():
b"different types of cancer cells. Recently, the first HDACi was\n " b"different types of cancer cells. Recently, the first HDACi was\n "
b"approved for the " b"approved for the "
b"treatment of cutaneous T cell lymphomas. Most HDACi currently in\n " b"treatment of cutaneous T cell lymphomas. Most HDACi currently in\n "
b"clinical ", b"clinical "
}, },
} }
vcr_response = VCRHTTPResponse(recorded_response) vcr_response = VCRHTTPResponse(recorded_response)
handle = io.TextIOWrapper(vcr_response, encoding="utf-8") handle = io.TextIOWrapper(io.BufferedReader(vcr_response), encoding="utf-8")
handle = iter(handle) handle = iter(handle)
articles = list(handle) articles = [line for line in handle]
assert len(articles) > 1 assert len(articles) > 1

View File

@@ -1,3 +1,4 @@
# -*- encoding: utf-8 -*-
from unittest import mock from unittest import mock
import pytest import pytest
@@ -8,24 +9,24 @@ from vcr.serializers import compat, jsonserializer, yamlserializer
def test_deserialize_old_yaml_cassette(): def test_deserialize_old_yaml_cassette():
with open("tests/fixtures/migration/old_cassette.yaml") as f: with open("tests/fixtures/migration/old_cassette.yaml", "r") as f:
with pytest.raises(ValueError): with pytest.raises(ValueError):
deserialize(f.read(), yamlserializer) deserialize(f.read(), yamlserializer)
def test_deserialize_old_json_cassette(): def test_deserialize_old_json_cassette():
with open("tests/fixtures/migration/old_cassette.json") as f: with open("tests/fixtures/migration/old_cassette.json", "r") as f:
with pytest.raises(ValueError): with pytest.raises(ValueError):
deserialize(f.read(), jsonserializer) deserialize(f.read(), jsonserializer)
def test_deserialize_new_yaml_cassette(): def test_deserialize_new_yaml_cassette():
with open("tests/fixtures/migration/new_cassette.yaml") as f: with open("tests/fixtures/migration/new_cassette.yaml", "r") as f:
deserialize(f.read(), yamlserializer) deserialize(f.read(), yamlserializer)
def test_deserialize_new_json_cassette(): def test_deserialize_new_json_cassette():
with open("tests/fixtures/migration/new_cassette.json") as f: with open("tests/fixtures/migration/new_cassette.json", "r") as f:
deserialize(f.read(), jsonserializer) deserialize(f.read(), jsonserializer)

View File

@@ -1,12 +1,6 @@
import contextlib
import http.client as httplib
from io import BytesIO
from tempfile import NamedTemporaryFile
from unittest import mock from unittest import mock
from pytest import mark from vcr import mode
from vcr import mode, use_cassette
from vcr.cassette import Cassette from vcr.cassette import Cassette
from vcr.stubs import VCRHTTPSConnection from vcr.stubs import VCRHTTPSConnection
@@ -17,59 +11,9 @@ class TestVCRConnection:
vcr_connection.ssl_version = "example_ssl_version" vcr_connection.ssl_version = "example_ssl_version"
assert vcr_connection.real_connection.ssl_version == "example_ssl_version" assert vcr_connection.real_connection.ssl_version == "example_ssl_version"
@mark.online
@mock.patch("vcr.cassette.Cassette.can_play_response_for", return_value=False) @mock.patch("vcr.cassette.Cassette.can_play_response_for", return_value=False)
def testing_connect(*args): def testing_connect(*args):
with contextlib.closing(VCRHTTPSConnection("www.google.com")) as vcr_connection: vcr_connection = VCRHTTPSConnection("www.google.com")
vcr_connection.cassette = Cassette("test", record_mode=mode.ALL) vcr_connection.cassette = Cassette("test", record_mode=mode.ALL)
vcr_connection.real_connection.connect() vcr_connection.real_connection.connect()
assert vcr_connection.real_connection.sock is not None assert vcr_connection.real_connection.sock is not None
def test_body_consumed_once_stream(self, tmpdir, httpbin):
self._test_body_consumed_once(
tmpdir,
httpbin,
BytesIO(b"1234567890"),
BytesIO(b"9876543210"),
BytesIO(b"9876543210"),
)
def test_body_consumed_once_iterator(self, tmpdir, httpbin):
self._test_body_consumed_once(
tmpdir,
httpbin,
iter([b"1234567890"]),
iter([b"9876543210"]),
iter([b"9876543210"]),
)
# data2 and data3 should serve the same data, potentially as iterators
def _test_body_consumed_once(
self,
tmpdir,
httpbin,
data1,
data2,
data3,
):
with NamedTemporaryFile(dir=tmpdir, suffix=".yml") as f:
testpath = f.name
# NOTE: ``use_cassette`` is not okay with the file existing
# already. So we using ``.close()`` to not only
# close but also delete the empty file, before we start.
f.close()
host, port = httpbin.host, httpbin.port
match_on = ["method", "uri", "body"]
with use_cassette(testpath, match_on=match_on):
conn1 = httplib.HTTPConnection(host, port)
conn1.request("POST", "/anything", body=data1)
conn1.getresponse()
conn2 = httplib.HTTPConnection(host, port)
conn2.request("POST", "/anything", body=data2)
conn2.getresponse()
with use_cassette(testpath, match_on=match_on) as cass:
conn3 = httplib.HTTPConnection(host, port)
conn3.request("POST", "/anything", body=data3)
conn3.getresponse()
assert cass.play_counts[0] == 0
assert cass.play_counts[1] == 1

View File

@@ -1,199 +0,0 @@
import os
from unittest import TextTestRunner, defaultTestLoader
from unittest.mock import MagicMock
from urllib.request import urlopen
import pytest
from vcr.unittest import VCRTestCase
def test_defaults():
class MyTest(VCRTestCase):
def test_foo(self):
pass
test = run_testcase(MyTest)[0][0]
expected_path = os.path.join(os.path.dirname(__file__), "cassettes")
expected_name = "MyTest.test_foo.yaml"
assert os.path.dirname(test.cassette._path) == expected_path
assert os.path.basename(test.cassette._path) == expected_name
def test_disabled():
# Baseline vcr_enabled = True
class MyTest(VCRTestCase):
def test_foo(self):
pass
test = run_testcase(MyTest)[0][0]
assert hasattr(test, "cassette")
# Test vcr_enabled = False
class MyTest(VCRTestCase):
vcr_enabled = False
def test_foo(self):
pass
test = run_testcase(MyTest)[0][0]
assert not hasattr(test, "cassette")
def test_cassette_library_dir():
class MyTest(VCRTestCase):
def test_foo(self):
pass
def _get_cassette_library_dir(self):
return "/testing"
test = run_testcase(MyTest)[0][0]
assert test.cassette._path.startswith("/testing/")
def test_cassette_name():
class MyTest(VCRTestCase):
def test_foo(self):
pass
def _get_cassette_name(self):
return "my-custom-name"
test = run_testcase(MyTest)[0][0]
assert os.path.basename(test.cassette._path) == "my-custom-name"
def test_vcr_kwargs_overridden():
class MyTest(VCRTestCase):
def test_foo(self):
pass
def _get_vcr_kwargs(self):
kwargs = super()._get_vcr_kwargs()
kwargs["record_mode"] = "new_episodes"
return kwargs
test = run_testcase(MyTest)[0][0]
assert test.cassette.record_mode == "new_episodes"
def test_vcr_kwargs_passed():
class MyTest(VCRTestCase):
def test_foo(self):
pass
def _get_vcr_kwargs(self):
return super()._get_vcr_kwargs(
record_mode="new_episodes",
)
test = run_testcase(MyTest)[0][0]
assert test.cassette.record_mode == "new_episodes"
def test_vcr_kwargs_cassette_dir():
# Test that _get_cassette_library_dir applies if cassette_library_dir
# is absent from vcr kwargs.
class MyTest(VCRTestCase):
def test_foo(self):
pass
def _get_vcr_kwargs(self):
return {
"record_mode": "new_episodes",
}
_get_cassette_library_dir = MagicMock(return_value="/testing")
test = run_testcase(MyTest)[0][0]
assert test.cassette._path.startswith("/testing/")
assert test._get_cassette_library_dir.call_count == 1
# Test that _get_cassette_library_dir is ignored if cassette_library_dir
# is present in vcr kwargs.
class MyTest(VCRTestCase):
def test_foo(self):
pass
def _get_vcr_kwargs(self):
return {
"cassette_library_dir": "/testing",
}
_get_cassette_library_dir = MagicMock(return_value="/ignored")
test = run_testcase(MyTest)[0][0]
assert test.cassette._path.startswith("/testing/")
assert test._get_cassette_library_dir.call_count == 0
@pytest.mark.online
def test_get_vcr_with_matcher(tmpdir):
cassette_dir = tmpdir.mkdir("cassettes")
assert len(cassette_dir.listdir()) == 0
mock_matcher = MagicMock(return_value=True, __name__="MockMatcher")
class MyTest(VCRTestCase):
def test_foo(self):
self.response = urlopen("http://example.com").read()
def _get_vcr(self):
myvcr = super()._get_vcr()
myvcr.register_matcher("mymatcher", mock_matcher)
myvcr.match_on = ["mymatcher"]
return myvcr
def _get_cassette_library_dir(self):
return str(cassette_dir)
# First run to fill cassette.
test = run_testcase(MyTest)[0][0]
assert len(test.cassette.requests) == 1
assert not mock_matcher.called # nothing in cassette
# Second run to call matcher.
test = run_testcase(MyTest)[0][0]
assert len(test.cassette.requests) == 1
assert mock_matcher.called
assert (
repr(mock_matcher.mock_calls[0])
== "call(<Request (GET) http://example.com>, <Request (GET) http://example.com>)"
)
@pytest.mark.online
def test_testcase_playback(tmpdir):
cassette_dir = tmpdir.mkdir("cassettes")
assert len(cassette_dir.listdir()) == 0
# First test actually reads from the web.
class MyTest(VCRTestCase):
def test_foo(self):
self.response = urlopen("http://example.com").read()
def _get_cassette_library_dir(self):
return str(cassette_dir)
test = run_testcase(MyTest)[0][0]
assert b"illustrative examples" in test.response
assert len(test.cassette.requests) == 1
assert test.cassette.play_count == 0
# Second test reads from cassette.
test2 = run_testcase(MyTest)[0][0]
assert test.cassette is not test2.cassette
assert b"illustrative examples" in test.response
assert len(test2.cassette.requests) == 1
assert test2.cassette.play_count == 1
def run_testcase(testcase_class):
"""Run all the tests in a TestCase and return them."""
suite = defaultTestLoader.loadTestsFromTestCase(testcase_class)
tests = list(suite._tests)
result = TextTestRunner().run(suite)
return tests, result

View File

@@ -1,33 +0,0 @@
from io import BytesIO, StringIO
import pytest
from vcr import request
from vcr.util import read_body
@pytest.mark.parametrize(
"input_, expected_output",
[
(BytesIO(b"Stream"), b"Stream"),
(StringIO("Stream"), b"Stream"),
(iter(["StringIter"]), b"StringIter"),
(iter(["String", "Iter"]), b"StringIter"),
(iter([b"BytesIter"]), b"BytesIter"),
(iter([b"Bytes", b"Iter"]), b"BytesIter"),
(iter([70, 111, 111]), b"Foo"),
(iter([]), b""),
("String", b"String"),
(b"Bytes", b"Bytes"),
],
)
def test_read_body(input_, expected_output):
r = request.Request("POST", "http://host.com/", input_, {})
assert read_body(r) == expected_output
def test_unsupported_read_body():
r = request.Request("POST", "http://host.com/", iter([[]]), {})
with pytest.raises(ValueError) as excinfo:
assert read_body(r)
assert excinfo.value.args == ("Body type <class 'list'> not supported",)

View File

@@ -15,8 +15,7 @@ def test_vcr_use_cassette():
record_mode = mock.Mock() record_mode = mock.Mock()
test_vcr = VCR(record_mode=record_mode) test_vcr = VCR(record_mode=record_mode)
with mock.patch( with mock.patch(
"vcr.cassette.Cassette.load", "vcr.cassette.Cassette.load", return_value=mock.MagicMock(inject=False)
return_value=mock.MagicMock(inject=False),
) as mock_cassette_load: ) as mock_cassette_load:
@test_vcr.use_cassette("test") @test_vcr.use_cassette("test")
@@ -72,19 +71,16 @@ def test_vcr_before_record_request_params():
# Test filter_headers # Test filter_headers
request = Request( request = Request(
"GET", "GET", base_path + "?foo=bar", "", {"cookie": "test", "other": "fun", "bert": "nobody"}
base_path + "?foo=bar",
"",
{"cookie": "test", "other": "fun", "bert": "nobody"},
) )
assert cassette.filter_request(request).headers == {"other": "fun", "bert": "ernie"} assert cassette.filter_request(request).headers == {"other": "fun", "bert": "ernie"}
# Test ignore_hosts # Test ignore_hosts
request = Request("GET", "http://www.test.com?foo=bar", "", {"cookie": "test", "other": "fun"}) request = Request("GET", "http://www.test.com" + "?foo=bar", "", {"cookie": "test", "other": "fun"})
assert cassette.filter_request(request) is None assert cassette.filter_request(request) is None
# Test ignore_localhost # Test ignore_localhost
request = Request("GET", "http://localhost:8000?foo=bar", "", {"cookie": "test", "other": "fun"}) request = Request("GET", "http://localhost:8000" + "?foo=bar", "", {"cookie": "test", "other": "fun"})
assert cassette.filter_request(request) is None assert cassette.filter_request(request) is None
with test_vcr.use_cassette("test", before_record_request=None) as cassette: with test_vcr.use_cassette("test", before_record_request=None) as cassette:
@@ -263,9 +259,7 @@ def test_cassette_library_dir_with_decoration_and_super_explicit_path():
def test_cassette_library_dir_with_path_transformer(): def test_cassette_library_dir_with_path_transformer():
library_dir = "/library_dir" library_dir = "/library_dir"
vcr = VCR( vcr = VCR(
inject_cassette=True, inject_cassette=True, cassette_library_dir=library_dir, path_transformer=lambda path: path + ".json"
cassette_library_dir=library_dir,
path_transformer=lambda path: path + ".json",
) )
@vcr.use_cassette() @vcr.use_cassette()
@@ -368,23 +362,7 @@ del test_dynamically_added
def test_path_class_as_cassette(): def test_path_class_as_cassette():
path = Path(__file__).parent.parent.joinpath( path = Path(__file__).parent.parent.joinpath(
"integration/cassettes/test_httpx_test_test_behind_proxy.yml", "integration/cassettes/test_httpx_test_test_behind_proxy.yml"
) )
with use_cassette(path): with use_cassette(path):
pass pass
def test_use_cassette_generator_return():
ret_val = object()
vcr = VCR()
@vcr.use_cassette("test")
def gen():
return ret_val
yield
with pytest.raises(StopIteration) as exc_info:
next(gen())
assert exc_info.value.value is ret_val

View File

@@ -8,4 +8,8 @@ def test_vcr_import_deprecation(recwarn):
import vcr # noqa: F401 import vcr # noqa: F401
assert len(recwarn) == 0 if sys.version_info[0] == 2:
assert len(recwarn) == 1
assert issubclass(recwarn[0].category, DeprecationWarning)
else:
assert len(recwarn) == 0

111
tox.ini Normal file
View File

@@ -0,0 +1,111 @@
[tox]
skip_missing_interpreters=true
envlist =
cov-clean,
lint,
{py37,py38,py39,py310,py311}-{requests-urllib3-1,requests-urllib3-2,httplib2,urllib3-1,urllib3-2,tornado4,boto3,aiohttp,httpx},
{pypy3}-{requests-urllib3-1,requests-urllib3-2,httplib2,urllib3-1,urllib3-2,tornado4,boto3},
{py310}-httpx019,
cov-report
[gh-actions]
python =
3.7: py37
3.8: py38
3.9: py39
3.10: py310, lint
3.11: py311
pypy-3: pypy3
# Coverage environment tasks: cov-clean and cov-report
# https://pytest-cov.readthedocs.io/en/latest/tox.html
[testenv:cov-clean]
deps = coverage
skip_install=true
commands = coverage erase
[testenv:cov-report]
deps = coverage
skip_install=true
commands =
coverage html
coverage report --fail-under=90
[testenv:lint]
skipsdist = True
commands =
black --version
black --check --diff .
isort --version
isort . --check --diff
flake8 --version
flake8 --exclude=./docs/conf.py,./.tox/,./venv/
pyflakes ./docs/conf.py
deps =
flake8
black
isort
basepython = python3.10
[testenv:docs]
# Running sphinx from inside the "docs" directory
# ensures it will not pick up any stray files that might
# get into a virtual environment under the top-level directory
# or other artifacts under build/
changedir = docs
# The only dependency is sphinx
# If we were using extensions packaged separately,
# we would specify them here.
# A better practice is to specify a specific version of sphinx.
deps =
sphinx
sphinx_rtd_theme
# This is the sphinx command to generate HTML.
# In other circumstances, we might want to generate a PDF or an ebook
commands =
sphinx-build -W -b html -d {envtmpdir}/doctrees . {envtmpdir}/html
# We use Python 3.7. Tox sometimes tries to autodetect it based on the name of
# the testenv, but "docs" does not give useful clues so we have to be explicit.
basepython = python3.7
[testenv]
# Need to use develop install so that paths
# for aggregate code coverage combine
usedevelop=true
commands =
./runtests.sh --cov=./vcr --cov-branch --cov-report=xml --cov-append {posargs}
allowlist_externals =
./runtests.sh
deps =
Werkzeug==2.0.3
pytest
pytest-httpbin>=1.0.1
pytest-cov
PyYAML
ipaddress
requests: requests>=2.22.0
httplib2: httplib2
urllib3-1: urllib3<2
urllib3-2: urllib3<3
boto3: boto3
aiohttp: aiohttp
aiohttp: pytest-asyncio
aiohttp: pytest-aiohttp
httpx: httpx
{py37,py38,py39,py310}-{httpx}: httpx
{py37,py38,py39,py310}-{httpx}: pytest-asyncio
httpx: httpx>0.19
# httpx==0.19 is the latest version that supports allow_redirects, newer versions use follow_redirects
httpx019: httpx==0.19
{py37,py38,py39,py310}-{httpx}: pytest-asyncio
depends =
lint,{py37,py38,py39,py310,py311,pypy3}-{requests-urllib3-1,requests-urllib3-2,httplib2,urllib3-1,urllib3-2,tornado4,boto3},{py37,py38,py39,py310,py311}-{aiohttp},{py37,py38,py39,py310,py311}-{httpx}: cov-clean
cov-report: lint,{py37,py38,py39,py310,py311,pypy3}-{requests-urllib3-1,requests-urllib3-2,httplib2,urllib3-1,urllib3-2,tornado4,boto3},{py37,py38,py39,py310,py311}-{aiohttp}
passenv =
AWS_ACCESS_KEY_ID
AWS_DEFAULT_REGION
AWS_SECRET_ACCESS_KEY
[flake8]
max_line_length = 110

View File

@@ -2,9 +2,9 @@ import logging
from logging import NullHandler from logging import NullHandler
from .config import VCR from .config import VCR
from .record_mode import RecordMode as mode # noqa: F401 from .record_mode import RecordMode as mode # noqa import is not used in this file
__version__ = "7.0.0" __version__ = "4.2.1"
logging.getLogger(__name__).addHandler(NullHandler()) logging.getLogger(__name__).addHandler(NullHandler())

View File

@@ -1,3 +1,3 @@
async def handle_coroutine(vcr, fn): async def handle_coroutine(vcr, fn): # noqa: E999
with vcr as cassette: with vcr as cassette:
return await fn(cassette) return await fn(cassette) # noqa: E999

View File

@@ -3,7 +3,7 @@ import contextlib
import copy import copy
import inspect import inspect
import logging import logging
from inspect import iscoroutinefunction import sys
import wrapt import wrapt
@@ -11,11 +11,19 @@ from ._handle_coroutine import handle_coroutine
from .errors import UnhandledHTTPRequestError from .errors import UnhandledHTTPRequestError
from .matchers import get_matchers_results, method, requests_match, uri from .matchers import get_matchers_results, method, requests_match, uri
from .patch import CassettePatcherBuilder from .patch import CassettePatcherBuilder
from .persisters.filesystem import CassetteDecodeError, CassetteNotFoundError, FilesystemPersister from .persisters.filesystem import FilesystemPersister
from .record_mode import RecordMode from .record_mode import RecordMode
from .serializers import yamlserializer from .serializers import yamlserializer
from .util import partition_dict from .util import partition_dict
try:
from asyncio import iscoroutinefunction
except ImportError:
def iscoroutinefunction(*args, **kwargs):
return False
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
@@ -73,8 +81,7 @@ class CassetteContextDecorator:
# pass # pass
assert self.__finish is None, "Cassette already open." assert self.__finish is None, "Cassette already open."
other_kwargs, cassette_kwargs = partition_dict( other_kwargs, cassette_kwargs = partition_dict(
lambda key, _: key in self._non_cassette_arguments, lambda key, _: key in self._non_cassette_arguments, self._args_getter()
self._args_getter(),
) )
if other_kwargs.get("path_transformer"): if other_kwargs.get("path_transformer"):
transformer = other_kwargs["path_transformer"] transformer = other_kwargs["path_transformer"]
@@ -125,7 +132,20 @@ class CassetteContextDecorator:
duration of the generator. duration of the generator.
""" """
with self as cassette: with self as cassette:
return (yield from fn(cassette)) coroutine = fn(cassette)
# We don't need to catch StopIteration. The caller (Tornado's
# gen.coroutine, for example) will handle that.
to_yield = next(coroutine)
while True:
try:
to_send = yield to_yield
except Exception:
to_yield = coroutine.throw(*sys.exc_info())
else:
try:
to_yield = coroutine.send(to_send)
except StopIteration:
break
def _handle_function(self, fn): def _handle_function(self, fn):
with self as cassette: with self as cassette:
@@ -177,7 +197,6 @@ class Cassette:
custom_patches=(), custom_patches=(),
inject=False, inject=False,
allow_playback_repeats=False, allow_playback_repeats=False,
drop_unused_requests=False,
): ):
self._persister = persister or FilesystemPersister self._persister = persister or FilesystemPersister
self._path = path self._path = path
@@ -190,7 +209,6 @@ class Cassette:
self.record_mode = record_mode self.record_mode = record_mode
self.custom_patches = custom_patches self.custom_patches = custom_patches
self.allow_playback_repeats = allow_playback_repeats self.allow_playback_repeats = allow_playback_repeats
self.drop_unused_requests = drop_unused_requests
# self.data is the list of (req, resp) tuples # self.data is the list of (req, resp) tuples
self.data = [] self.data = []
@@ -198,10 +216,6 @@ class Cassette:
self.dirty = False self.dirty = False
self.rewound = False self.rewound = False
# Subsets of self.data to store old and played interactions
self._old_interactions = []
self._played_interactions = []
@property @property
def play_count(self): def play_count(self):
return sum(self.play_counts.values()) return sum(self.play_counts.values())
@@ -221,7 +235,7 @@ class Cassette:
@property @property
def write_protected(self): def write_protected(self):
return (self.rewound and self.record_mode == RecordMode.ONCE) or self.record_mode == RecordMode.NONE return self.rewound and self.record_mode == RecordMode.ONCE or self.record_mode == RecordMode.NONE
def append(self, request, response): def append(self, request, response):
"""Add a request, response pair to this cassette""" """Add a request, response pair to this cassette"""
@@ -263,11 +277,10 @@ class Cassette:
for index, response in self._responses(request): for index, response in self._responses(request):
if self.play_counts[index] == 0 or self.allow_playback_repeats: if self.play_counts[index] == 0 or self.allow_playback_repeats:
self.play_counts[index] += 1 self.play_counts[index] += 1
self._played_interactions.append((request, response))
return response return response
# The cassette doesn't contain the request asked for. # The cassette doesn't contain the request asked for.
raise UnhandledHTTPRequestError( raise UnhandledHTTPRequestError(
f"The cassette ({self._path!r}) doesn't contain the request ({request!r}) asked for", "The cassette (%r) doesn't contain the request (%r) asked for" % (self._path, request)
) )
def responses_of(self, request): def responses_of(self, request):
@@ -282,7 +295,7 @@ class Cassette:
return responses return responses
# The cassette doesn't contain the request asked for. # The cassette doesn't contain the request asked for.
raise UnhandledHTTPRequestError( raise UnhandledHTTPRequestError(
f"The cassette ({self._path!r}) doesn't contain the request ({request!r}) asked for", "The cassette (%r) doesn't contain the request (%r) asked for" % (self._path, request)
) )
def rewind(self): def rewind(self):
@@ -301,7 +314,7 @@ class Cassette:
""" """
best_matches = [] best_matches = []
request = self._before_record_request(request) request = self._before_record_request(request)
for _, (stored_request, _) in enumerate(self.data): for index, (stored_request, response) in enumerate(self.data):
successes, fails = get_matchers_results(request, stored_request, self._match_on) successes, fails = get_matchers_results(request, stored_request, self._match_on)
best_matches.append((len(successes), stored_request, successes, fails)) best_matches.append((len(successes), stored_request, successes, fails))
best_matches.sort(key=lambda t: t[0], reverse=True) best_matches.sort(key=lambda t: t[0], reverse=True)
@@ -324,36 +337,12 @@ class Cassette:
return final_best_matches return final_best_matches
def _new_interactions(self):
"""List of new HTTP interactions (request/response tuples)"""
new_interactions = []
for request, response in self.data:
if all(
not requests_match(request, old_request, self._match_on)
for old_request, _ in self._old_interactions
):
new_interactions.append((request, response))
return new_interactions
def _as_dict(self): def _as_dict(self):
return {"requests": self.requests, "responses": self.responses} return {"requests": self.requests, "responses": self.responses}
def _build_used_interactions_dict(self):
interactions = self._played_interactions + self._new_interactions()
cassete_dict = {
"requests": [request for request, _ in interactions],
"responses": [response for _, response in interactions],
}
return cassete_dict
def _save(self, force=False): def _save(self, force=False):
if self.drop_unused_requests and len(self._played_interactions) < len(self._old_interactions):
cassete_dict = self._build_used_interactions_dict()
force = True
else:
cassete_dict = self._as_dict()
if force or self.dirty: if force or self.dirty:
self._persister.save_cassette(self._path, cassete_dict, serializer=self._serializer) self._persister.save_cassette(self._path, self._as_dict(), serializer=self._serializer)
self.dirty = False self.dirty = False
def _load(self): def _load(self):
@@ -361,14 +350,13 @@ class Cassette:
requests, responses = self._persister.load_cassette(self._path, serializer=self._serializer) requests, responses = self._persister.load_cassette(self._path, serializer=self._serializer)
for request, response in zip(requests, responses): for request, response in zip(requests, responses):
self.append(request, response) self.append(request, response)
self._old_interactions.append((request, response))
self.dirty = False self.dirty = False
self.rewound = True self.rewound = True
except (CassetteDecodeError, CassetteNotFoundError): except ValueError:
pass pass
def __str__(self): def __str__(self):
return f"<Cassette containing {len(self)} recorded response(s)>" return "<Cassette containing {} recorded response(s)>".format(len(self))
def __len__(self): def __len__(self):
"""Return the number of request,response pairs stored in here""" """Return the number of request,response pairs stored in here"""
@@ -376,7 +364,7 @@ class Cassette:
def __contains__(self, request): def __contains__(self, request):
"""Return whether or not a request has been stored""" """Return whether or not a request has been stored"""
for index, _ in self._responses(request): for index, response in self._responses(request):
if self.play_counts[index] == 0 or self.allow_playback_repeats: if self.play_counts[index] == 0 or self.allow_playback_repeats:
return True return True
return False return False

View File

@@ -6,6 +6,8 @@ import types
from collections import abc as collections_abc from collections import abc as collections_abc
from pathlib import Path from pathlib import Path
import six
from . import filters, matchers from . import filters, matchers
from .cassette import Cassette from .cassette import Cassette
from .persisters.filesystem import FilesystemPersister from .persisters.filesystem import FilesystemPersister
@@ -48,7 +50,6 @@ class VCR:
func_path_generator=None, func_path_generator=None,
decode_compressed_response=False, decode_compressed_response=False,
record_on_exception=True, record_on_exception=True,
drop_unused_requests=False,
): ):
self.serializer = serializer self.serializer = serializer
self.match_on = match_on self.match_on = match_on
@@ -82,13 +83,12 @@ class VCR:
self.decode_compressed_response = decode_compressed_response self.decode_compressed_response = decode_compressed_response
self.record_on_exception = record_on_exception self.record_on_exception = record_on_exception
self._custom_patches = tuple(custom_patches) self._custom_patches = tuple(custom_patches)
self.drop_unused_requests = drop_unused_requests
def _get_serializer(self, serializer_name): def _get_serializer(self, serializer_name):
try: try:
serializer = self.serializers[serializer_name] serializer = self.serializers[serializer_name]
except KeyError: except KeyError:
raise KeyError(f"Serializer {serializer_name} doesn't exist or isn't registered") from None raise KeyError("Serializer {} doesn't exist or isn't registered".format(serializer_name))
return serializer return serializer
def _get_matchers(self, matcher_names): def _get_matchers(self, matcher_names):
@@ -97,7 +97,7 @@ class VCR:
for m in matcher_names: for m in matcher_names:
matchers.append(self.matchers[m]) matchers.append(self.matchers[m])
except KeyError: except KeyError:
raise KeyError(f"Matcher {m} doesn't exist or isn't registered") from None raise KeyError("Matcher {} doesn't exist or isn't registered".format(m))
return matchers return matchers
def use_cassette(self, path=None, **kwargs): def use_cassette(self, path=None, **kwargs):
@@ -153,7 +153,6 @@ class VCR:
"func_path_generator": func_path_generator, "func_path_generator": func_path_generator,
"allow_playback_repeats": kwargs.get("allow_playback_repeats", False), "allow_playback_repeats": kwargs.get("allow_playback_repeats", False),
"record_on_exception": record_on_exception, "record_on_exception": record_on_exception,
"drop_unused_requests": kwargs.get("drop_unused_requests", self.drop_unused_requests),
} }
path = kwargs.get("path") path = kwargs.get("path")
if path: if path:
@@ -163,8 +162,7 @@ class VCR:
def _build_before_record_response(self, options): def _build_before_record_response(self, options):
before_record_response = options.get("before_record_response", self.before_record_response) before_record_response = options.get("before_record_response", self.before_record_response)
decode_compressed_response = options.get( decode_compressed_response = options.get(
"decode_compressed_response", "decode_compressed_response", self.decode_compressed_response
self.decode_compressed_response,
) )
filter_functions = [] filter_functions = []
if decode_compressed_response: if decode_compressed_response:
@@ -188,12 +186,10 @@ class VCR:
filter_headers = options.get("filter_headers", self.filter_headers) filter_headers = options.get("filter_headers", self.filter_headers)
filter_query_parameters = options.get("filter_query_parameters", self.filter_query_parameters) filter_query_parameters = options.get("filter_query_parameters", self.filter_query_parameters)
filter_post_data_parameters = options.get( filter_post_data_parameters = options.get(
"filter_post_data_parameters", "filter_post_data_parameters", self.filter_post_data_parameters
self.filter_post_data_parameters,
) )
before_record_request = options.get( before_record_request = options.get(
"before_record_request", "before_record_request", options.get("before_record", self.before_record_request)
options.get("before_record", self.before_record_request),
) )
ignore_hosts = options.get("ignore_hosts", self.ignore_hosts) ignore_hosts = options.get("ignore_hosts", self.ignore_hosts)
ignore_localhost = options.get("ignore_localhost", self.ignore_localhost) ignore_localhost = options.get("ignore_localhost", self.ignore_localhost)
@@ -203,12 +199,12 @@ class VCR:
if filter_query_parameters: if filter_query_parameters:
replacements = [p if isinstance(p, tuple) else (p, None) for p in filter_query_parameters] replacements = [p if isinstance(p, tuple) else (p, None) for p in filter_query_parameters]
filter_functions.append( filter_functions.append(
functools.partial(filters.replace_query_parameters, replacements=replacements), functools.partial(filters.replace_query_parameters, replacements=replacements)
) )
if filter_post_data_parameters: if filter_post_data_parameters:
replacements = [p if isinstance(p, tuple) else (p, None) for p in filter_post_data_parameters] replacements = [p if isinstance(p, tuple) else (p, None) for p in filter_post_data_parameters]
filter_functions.append( filter_functions.append(
functools.partial(filters.replace_post_data_parameters, replacements=replacements), functools.partial(filters.replace_post_data_parameters, replacements=replacements)
) )
hosts_to_ignore = set(ignore_hosts) hosts_to_ignore = set(ignore_hosts)
@@ -223,7 +219,7 @@ class VCR:
filter_functions.extend(before_record_request) filter_functions.extend(before_record_request)
def before_record_request(request): def before_record_request(request):
request = copy.deepcopy(request) request = copy.copy(request)
for function in filter_functions: for function in filter_functions:
if request is None: if request is None:
break break
@@ -257,5 +253,5 @@ class VCR:
def test_case(self, predicate=None): def test_case(self, predicate=None):
predicate = predicate or self.is_test_method predicate = predicate or self.is_test_method
metaclass = auto_decorate(self.use_cassette, predicate) # TODO: Remove this reference to `six` in favor of the Python3 equivalent
return metaclass("temporary_class", (), {}) return six.with_metaclass(auto_decorate(self.use_cassette, predicate))

View File

@@ -13,29 +13,30 @@ class CannotOverwriteExistingCassetteException(Exception):
best_matches = cassette.find_requests_with_most_matches(failed_request) best_matches = cassette.find_requests_with_most_matches(failed_request)
if best_matches: if best_matches:
# Build a comprehensible message to put in the exception. # Build a comprehensible message to put in the exception.
best_matches_msg = ( best_matches_msg = "Found {} similar requests with {} different matcher(s) :\n".format(
f"Found {len(best_matches)} similar requests " len(best_matches), len(best_matches[0][2])
f"with {len(best_matches[0][2])} different matcher(s) :\n"
) )
for idx, best_match in enumerate(best_matches, start=1): for idx, best_match in enumerate(best_matches, start=1):
request, succeeded_matchers, failed_matchers_assertion_msgs = best_match request, succeeded_matchers, failed_matchers_assertion_msgs = best_match
best_matches_msg += ( best_matches_msg += (
f"\n{idx} - ({request!r}).\n" "\n%s - (%r).\n"
f"Matchers succeeded : {succeeded_matchers}\n" "Matchers succeeded : %s\n"
"Matchers failed :\n" "Matchers failed :\n" % (idx, request, succeeded_matchers)
) )
for failed_matcher, assertion_msg in failed_matchers_assertion_msgs: for failed_matcher, assertion_msg in failed_matchers_assertion_msgs:
best_matches_msg += f"{failed_matcher} - assertion failure :\n{assertion_msg}\n" best_matches_msg += "%s - assertion failure :\n" "%s\n" % (failed_matcher, assertion_msg)
else: else:
best_matches_msg = "No similar requests, that have not been played, found." best_matches_msg = "No similar requests, that have not been played, found."
return ( return (
f"Can't overwrite existing cassette ({cassette._path!r}) in " "Can't overwrite existing cassette (%r) in "
f"your current record mode ({cassette.record_mode!r}).\n" "your current record mode (%r).\n"
f"No match for the request ({failed_request!r}) was found.\n" "No match for the request (%r) was found.\n"
f"{best_matches_msg}" "%s" % (cassette._path, cassette.record_mode, failed_request, best_matches_msg)
) )
class UnhandledHTTPRequestError(KeyError): class UnhandledHTTPRequestError(KeyError):
"""Raised when a cassette does not contain the request we want.""" """Raised when a cassette does not contain the request we want."""
pass

View File

@@ -95,7 +95,7 @@ def replace_post_data_parameters(request, replacements):
new_body[k] = rv new_body[k] = rv
request.body = new_body request.body = new_body
elif request.headers.get("Content-Type") == "application/json": elif request.headers.get("Content-Type") == "application/json":
json_data = json.loads(request.body) json_data = json.loads(request.body.decode("utf-8"))
for k, rv in replacements.items(): for k, rv in replacements.items():
if k in json_data: if k in json_data:
ov = json_data.pop(k) ov = json_data.pop(k)
@@ -153,15 +153,9 @@ def decode_response(response):
if not body: if not body:
return "" return ""
if encoding == "gzip": if encoding == "gzip":
try: return zlib.decompress(body, zlib.MAX_WBITS | 16)
return zlib.decompress(body, zlib.MAX_WBITS | 16)
except zlib.error:
return body # assumes that the data was already decompressed
else: # encoding == 'deflate' else: # encoding == 'deflate'
try: return zlib.decompress(body)
return zlib.decompress(body)
except zlib.error:
return body # assumes that the data was already decompressed
# Deepcopy here in case `headers` contain objects that could # Deepcopy here in case `headers` contain objects that could
# be mutated by a shallow copy and corrupt the real response. # be mutated by a shallow copy and corrupt the real response.

View File

@@ -2,73 +2,54 @@ import json
import logging import logging
import urllib import urllib
import xmlrpc.client import xmlrpc.client
from string import hexdigits
from .util import read_body from .util import read_body
_HEXDIG_CODE_POINTS: set[int] = {ord(s.encode("ascii")) for s in hexdigits}
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
def method(r1, r2): def method(r1, r2):
if r1.method != r2.method: assert r1.method == r2.method, "{} != {}".format(r1.method, r2.method)
raise AssertionError(f"{r1.method} != {r2.method}")
def uri(r1, r2): def uri(r1, r2):
if r1.uri != r2.uri: assert r1.uri == r2.uri, "{} != {}".format(r1.uri, r2.uri)
raise AssertionError(f"{r1.uri} != {r2.uri}")
def host(r1, r2): def host(r1, r2):
if r1.host != r2.host: assert r1.host == r2.host, "{} != {}".format(r1.host, r2.host)
raise AssertionError(f"{r1.host} != {r2.host}")
def scheme(r1, r2): def scheme(r1, r2):
if r1.scheme != r2.scheme: assert r1.scheme == r2.scheme, "{} != {}".format(r1.scheme, r2.scheme)
raise AssertionError(f"{r1.scheme} != {r2.scheme}")
def port(r1, r2): def port(r1, r2):
if r1.port != r2.port: assert r1.port == r2.port, "{} != {}".format(r1.port, r2.port)
raise AssertionError(f"{r1.port} != {r2.port}")
def path(r1, r2): def path(r1, r2):
if r1.path != r2.path: assert r1.path == r2.path, "{} != {}".format(r1.path, r2.path)
raise AssertionError(f"{r1.path} != {r2.path}")
def query(r1, r2): def query(r1, r2):
if r1.query != r2.query: assert r1.query == r2.query, "{} != {}".format(r1.query, r2.query)
raise AssertionError(f"{r1.query} != {r2.query}")
def raw_body(r1, r2): def raw_body(r1, r2):
if read_body(r1) != read_body(r2): assert read_body(r1) == read_body(r2)
raise AssertionError
def body(r1, r2): def body(r1, r2):
transformers = list(_get_transformers(r1)) transformer = _get_transformer(r1)
if transformers != list(_get_transformers(r2)): r2_transformer = _get_transformer(r2)
transformers = [] if transformer != r2_transformer:
transformer = _identity
b1 = read_body(r1) assert transformer(read_body(r1)) == transformer(read_body(r2))
b2 = read_body(r2)
for transform in transformers:
b1 = transform(b1)
b2 = transform(b2)
if b1 != b2:
raise AssertionError
def headers(r1, r2): def headers(r1, r2):
if r1.headers != r2.headers: assert r1.headers == r2.headers, "{} != {}".format(r1.headers, r2.headers)
raise AssertionError(f"{r1.headers} != {r2.headers}")
def _header_checker(value, header="Content-Type"): def _header_checker(value, header="Content-Type"):
@@ -81,71 +62,17 @@ def _header_checker(value, header="Content-Type"):
return checker return checker
def _dechunk(body):
if isinstance(body, str):
body = body.encode("utf-8")
elif isinstance(body, bytearray):
body = bytes(body)
elif hasattr(body, "__iter__"):
body = list(body)
if body:
if isinstance(body[0], str):
body = ("".join(body)).encode("utf-8")
elif isinstance(body[0], bytes):
body = b"".join(body)
elif isinstance(body[0], int):
body = bytes(body)
else:
raise ValueError(f"Body chunk type {type(body[0])} not supported")
else:
body = None
if not isinstance(body, bytes):
return body
# Now decode chunked data format (https://en.wikipedia.org/wiki/Chunked_transfer_encoding)
# Example input: b"45\r\n<69 bytes>\r\n0\r\n\r\n" where int(b"45", 16) == 69.
CHUNK_GAP = b"\r\n"
BODY_LEN: int = len(body)
chunks: list[bytes] = []
pos: int = 0
while True:
for i in range(pos, BODY_LEN):
if body[i] not in _HEXDIG_CODE_POINTS:
break
if i == 0 or body[i : i + len(CHUNK_GAP)] != CHUNK_GAP:
if pos == 0:
return body # i.e. assume non-chunk data
raise ValueError("Malformed chunked data")
size_bytes = int(body[pos:i], 16)
if size_bytes == 0: # i.e. well-formed ending
return b"".join(chunks)
chunk_data_first = i + len(CHUNK_GAP)
chunk_data_after_last = chunk_data_first + size_bytes
if body[chunk_data_after_last : chunk_data_after_last + len(CHUNK_GAP)] != CHUNK_GAP:
raise ValueError("Malformed chunked data")
chunk_data = body[chunk_data_first:chunk_data_after_last]
chunks.append(chunk_data)
pos = chunk_data_after_last + len(CHUNK_GAP)
def _transform_json(body): def _transform_json(body):
# Request body is always a byte string, but json.loads() wants a text
# string. RFC 7159 says the default encoding is UTF-8 (although UTF-16
# and UTF-32 are also allowed: hmmmmm).
if body: if body:
return json.loads(body) return json.loads(body.decode("utf-8"))
_xml_header_checker = _header_checker("text/xml") _xml_header_checker = _header_checker("text/xml")
_xmlrpc_header_checker = _header_checker("xmlrpc", header="User-Agent") _xmlrpc_header_checker = _header_checker("xmlrpc", header="User-Agent")
_checker_transformer_pairs = ( _checker_transformer_pairs = (
(_header_checker("chunked", header="Transfer-Encoding"), _dechunk),
( (
_header_checker("application/x-www-form-urlencoded"), _header_checker("application/x-www-form-urlencoded"),
lambda body: urllib.parse.parse_qs(body.decode("ascii")), lambda body: urllib.parse.parse_qs(body.decode("ascii")),
@@ -155,16 +82,22 @@ _checker_transformer_pairs = (
) )
def _get_transformers(request): def _identity(x):
return x
def _get_transformer(request):
for checker, transformer in _checker_transformer_pairs: for checker, transformer in _checker_transformer_pairs:
if checker(request.headers): if checker(request.headers):
yield transformer return transformer
else:
return _identity
def requests_match(r1, r2, matchers): def requests_match(r1, r2, matchers):
successes, failures = get_matchers_results(r1, r2, matchers) successes, failures = get_matchers_results(r1, r2, matchers)
if failures: if failures:
log.debug(f"Requests {r1} and {r2} differ.\nFailure details:\n{failures}") log.debug("Requests {} and {} differ.\n" "Failure details:\n" "{}".format(r1, r2, failures))
return len(failures) == 0 return len(failures) == 0

View File

@@ -7,7 +7,7 @@ It merges and deletes the request obsolete keys (protocol, host, port, path)
into new 'uri' key. into new 'uri' key.
Usage:: Usage::
python3 -m vcr.migration PATH python -m vcr.migration PATH
The PATH can be path to the directory with cassettes or cassette itself The PATH can be path to the directory with cassettes or cassette itself
""" """
@@ -55,7 +55,7 @@ def build_uri(**parts):
port = parts["port"] port = parts["port"]
scheme = parts["protocol"] scheme = parts["protocol"]
default_port = {"https": 443, "http": 80}[scheme] default_port = {"https": 443, "http": 80}[scheme]
parts["port"] = f":{port}" if port != default_port else "" parts["port"] = ":{}".format(port) if port != default_port else ""
return "{protocol}://{host}{port}{path}".format(**parts) return "{protocol}://{host}{port}{path}".format(**parts)
@@ -92,7 +92,7 @@ def migrate_json(in_fp, out_fp):
def _list_of_tuples_to_dict(fs): def _list_of_tuples_to_dict(fs):
return dict(fs[0]) return {k: v for k, v in fs[0]}
def _already_migrated(data): def _already_migrated(data):
@@ -118,7 +118,7 @@ def migrate(file_path, migration_fn):
# because we assume that original files can be reverted # because we assume that original files can be reverted
# we will try to copy the content. (os.rename not needed) # we will try to copy the content. (os.rename not needed)
with tempfile.TemporaryFile(mode="w+") as out_fp: with tempfile.TemporaryFile(mode="w+") as out_fp:
with open(file_path) as in_fp: with open(file_path, "r") as in_fp:
if not migration_fn(in_fp, out_fp): if not migration_fn(in_fp, out_fp):
return False return False
with open(file_path, "w") as in_fp: with open(file_path, "w") as in_fp:
@@ -130,7 +130,7 @@ def migrate(file_path, migration_fn):
def try_migrate(path): def try_migrate(path):
if path.endswith(".json"): if path.endswith(".json"):
return migrate(path, migrate_json) return migrate(path, migrate_json)
elif path.endswith((".yaml", ".yml")): elif path.endswith(".yaml") or path.endswith(".yml"):
return migrate(path, migrate_yml) return migrate(path, migrate_yml)
return False return False
@@ -138,7 +138,7 @@ def try_migrate(path):
def main(): def main():
if len(sys.argv) != 2: if len(sys.argv) != 2:
raise SystemExit( raise SystemExit(
"Please provide path to cassettes directory or file. Usage: python3 -m vcr.migration PATH", "Please provide path to cassettes directory or file. " "Usage: python -m vcr.migration PATH"
) )
path = sys.argv[1] path = sys.argv[1]
@@ -150,7 +150,7 @@ def main():
for file_path in files: for file_path in files:
migrated = try_migrate(file_path) migrated = try_migrate(file_path)
status = "OK" if migrated else "FAIL" status = "OK" if migrated else "FAIL"
sys.stderr.write(f"[{status}] {file_path}\n") sys.stderr.write("[{}] {}\n".format(status, file_path))
sys.stderr.write("Done.\n") sys.stderr.write("Done.\n")

View File

@@ -1,5 +1,4 @@
"""Utilities for patching in cassettes""" """Utilities for patching in cassettes"""
import contextlib import contextlib
import functools import functools
import http.client as httplib import http.client as httplib
@@ -25,7 +24,7 @@ except ImportError as e:
else: else:
raise RuntimeError( raise RuntimeError(
"vcrpy >=4.2.2 and botocore <1.11.0 are not compatible" "vcrpy >=4.2.2 and botocore <1.11.0 are not compatible"
"; please upgrade botocore (or downgrade vcrpy)", "; please upgrade botocore (or downgrade vcrpy)"
) from e ) from e
else: else:
_Boto3VerifiedHTTPSConnection = AWSHTTPSConnection _Boto3VerifiedHTTPSConnection = AWSHTTPSConnection
@@ -54,7 +53,7 @@ else:
if requests.__build__ < 0x021602: if requests.__build__ < 0x021602:
raise RuntimeError( raise RuntimeError(
"vcrpy >=4.2.2 and requests <2.16.2 are not compatible" "vcrpy >=4.2.2 and requests <2.16.2 are not compatible"
"; please upgrade requests (or downgrade vcrpy)", "; please upgrade requests (or downgrade vcrpy)"
) )
@@ -68,6 +67,14 @@ else:
_HTTPSConnectionWithTimeout = httplib2.HTTPSConnectionWithTimeout _HTTPSConnectionWithTimeout = httplib2.HTTPSConnectionWithTimeout
_SCHEME_TO_CONNECTION = httplib2.SCHEME_TO_CONNECTION _SCHEME_TO_CONNECTION = httplib2.SCHEME_TO_CONNECTION
# Try to save the original types for boto
try:
import boto.https_connection
except ImportError: # pragma: no cover
pass
else:
_CertValidatingHTTPSConnection = boto.https_connection.CertValidatingHTTPSConnection
# Try to save the original types for Tornado # Try to save the original types for Tornado
try: try:
import tornado.simple_httpclient import tornado.simple_httpclient
@@ -96,8 +103,8 @@ try:
except ImportError: # pragma: no cover except ImportError: # pragma: no cover
pass pass
else: else:
_HttpxSyncClient_send_single_request = httpx.Client._send_single_request _HttpxSyncClient_send = httpx.Client.send
_HttpxAsyncClient_send_single_request = httpx.AsyncClient._send_single_request _HttpxAsyncClient_send = httpx.AsyncClient.send
class CassettePatcherBuilder: class CassettePatcherBuilder:
@@ -119,6 +126,7 @@ class CassettePatcherBuilder:
self._boto3(), self._boto3(),
self._urllib3(), self._urllib3(),
self._httplib2(), self._httplib2(),
self._boto(),
self._tornado(), self._tornado(),
self._aiohttp(), self._aiohttp(),
self._httpx(), self._httpx(),
@@ -136,9 +144,7 @@ class CassettePatcherBuilder:
return return
return mock.patch.object( return mock.patch.object(
obj, obj, patched_attribute, self._recursively_apply_get_cassette_subclass(replacement_class)
patched_attribute,
self._recursively_apply_get_cassette_subclass(replacement_class),
) )
def _recursively_apply_get_cassette_subclass(self, replacement_dict_or_obj): def _recursively_apply_get_cassette_subclass(self, replacement_dict_or_obj):
@@ -180,7 +186,9 @@ class CassettePatcherBuilder:
bases = (base_class,) bases = (base_class,)
if not issubclass(base_class, object): # Check for old style class if not issubclass(base_class, object): # Check for old style class
bases += (object,) bases += (object,)
return type(f"{base_class.__name__}{self._cassette._path}", bases, {"cassette": self._cassette}) return type(
"{}{}".format(base_class.__name__, self._cassette._path), bases, dict(cassette=self._cassette)
)
@_build_patchers_from_mock_triples_decorator @_build_patchers_from_mock_triples_decorator
def _httplib(self): def _httplib(self):
@@ -261,14 +269,21 @@ class CassettePatcherBuilder:
yield cpool, "HTTPConnectionWithTimeout", VCRHTTPConnectionWithTimeout yield cpool, "HTTPConnectionWithTimeout", VCRHTTPConnectionWithTimeout
yield cpool, "HTTPSConnectionWithTimeout", VCRHTTPSConnectionWithTimeout yield cpool, "HTTPSConnectionWithTimeout", VCRHTTPSConnectionWithTimeout
yield ( yield cpool, "SCHEME_TO_CONNECTION", {
cpool, "http": VCRHTTPConnectionWithTimeout,
"SCHEME_TO_CONNECTION", "https": VCRHTTPSConnectionWithTimeout,
{ }
"http": VCRHTTPConnectionWithTimeout,
"https": VCRHTTPSConnectionWithTimeout, @_build_patchers_from_mock_triples_decorator
}, def _boto(self):
) try:
import boto.https_connection as cpool
except ImportError: # pragma: no cover
pass
else:
from .stubs.boto_stubs import VCRCertValidatingHTTPSConnection
yield cpool, "CertValidatingHTTPSConnection", VCRCertValidatingHTTPSConnection
@_build_patchers_from_mock_triples_decorator @_build_patchers_from_mock_triples_decorator
def _tornado(self): def _tornado(self):
@@ -312,18 +327,18 @@ class CassettePatcherBuilder:
else: else:
from .stubs.httpx_stubs import async_vcr_send, sync_vcr_send from .stubs.httpx_stubs import async_vcr_send, sync_vcr_send
new_async_client_send = async_vcr_send(self._cassette, _HttpxAsyncClient_send_single_request) new_async_client_send = async_vcr_send(self._cassette, _HttpxAsyncClient_send)
yield httpx.AsyncClient, "_send_single_request", new_async_client_send yield httpx.AsyncClient, "send", new_async_client_send
new_sync_client_send = sync_vcr_send(self._cassette, _HttpxSyncClient_send_single_request) new_sync_client_send = sync_vcr_send(self._cassette, _HttpxSyncClient_send)
yield httpx.Client, "_send_single_request", new_sync_client_send yield httpx.Client, "send", new_sync_client_send
def _urllib3_patchers(self, cpool, conn, stubs): def _urllib3_patchers(self, cpool, conn, stubs):
http_connection_remover = ConnectionRemover( http_connection_remover = ConnectionRemover(
self._get_cassette_subclass(stubs.VCRRequestsHTTPConnection), self._get_cassette_subclass(stubs.VCRRequestsHTTPConnection)
) )
https_connection_remover = ConnectionRemover( https_connection_remover = ConnectionRemover(
self._get_cassette_subclass(stubs.VCRRequestsHTTPSConnection), self._get_cassette_subclass(stubs.VCRRequestsHTTPSConnection)
) )
mock_triples = ( mock_triples = (
(conn, "VerifiedHTTPSConnection", stubs.VCRRequestsHTTPSConnection), (conn, "VerifiedHTTPSConnection", stubs.VCRRequestsHTTPSConnection),
@@ -373,6 +388,10 @@ class ConnectionRemover:
if isinstance(connection, self._connection_class): if isinstance(connection, self._connection_class):
self._connection_pool_to_connections.setdefault(pool, set()).add(connection) self._connection_pool_to_connections.setdefault(pool, set()).add(connection)
def remove_connection_to_pool_entry(self, pool, connection):
if isinstance(connection, self._connection_class):
self._connection_pool_to_connections[self._connection_class].remove(connection)
def __enter__(self): def __enter__(self):
return self return self
@@ -383,13 +402,10 @@ class ConnectionRemover:
connection = pool.pool.get() connection = pool.pool.get()
if isinstance(connection, self._connection_class): if isinstance(connection, self._connection_class):
connections.remove(connection) connections.remove(connection)
connection.close()
else: else:
readd_connections.append(connection) readd_connections.append(connection)
for connection in readd_connections: for connection in readd_connections:
pool._put_conn(connection) pool._put_conn(connection)
for connection in connections:
connection.close()
def reset_patchers(): def reset_patchers():
@@ -431,6 +447,13 @@ def reset_patchers():
yield mock.patch.object(cpool, "HTTPSConnectionWithTimeout", _HTTPSConnectionWithTimeout) yield mock.patch.object(cpool, "HTTPSConnectionWithTimeout", _HTTPSConnectionWithTimeout)
yield mock.patch.object(cpool, "SCHEME_TO_CONNECTION", _SCHEME_TO_CONNECTION) yield mock.patch.object(cpool, "SCHEME_TO_CONNECTION", _SCHEME_TO_CONNECTION)
try:
import boto.https_connection as cpool
except ImportError: # pragma: no cover
pass
else:
yield mock.patch.object(cpool, "CertValidatingHTTPSConnection", _CertValidatingHTTPSConnection)
try: try:
import tornado.simple_httpclient as simple import tornado.simple_httpclient as simple
except ImportError: # pragma: no cover except ImportError: # pragma: no cover

View File

@@ -5,25 +5,17 @@ from pathlib import Path
from ..serialize import deserialize, serialize from ..serialize import deserialize, serialize
class CassetteNotFoundError(FileNotFoundError):
pass
class CassetteDecodeError(ValueError):
pass
class FilesystemPersister: class FilesystemPersister:
@classmethod @classmethod
def load_cassette(cls, cassette_path, serializer): def load_cassette(cls, cassette_path, serializer):
cassette_path = Path(cassette_path) # if cassette path is already Path this is no operation cassette_path = Path(cassette_path) # if cassette path is already Path this is no operation
if not cassette_path.is_file(): if not cassette_path.is_file():
raise CassetteNotFoundError() raise ValueError("Cassette not found.")
try: try:
with cassette_path.open() as f: with cassette_path.open() as f:
data = f.read() data = f.read()
except UnicodeDecodeError as err: except UnicodeEncodeError as err:
raise CassetteDecodeError("Can't read Cassette, Encoding is broken") from err raise ValueError("Can't read Cassette, Encoding is broken") from err
return deserialize(data, serializer) return deserialize(data, serializer)

View File

@@ -3,7 +3,7 @@ import warnings
from io import BytesIO from io import BytesIO
from urllib.parse import parse_qsl, urlparse from urllib.parse import parse_qsl, urlparse
from .util import CaseInsensitiveDict, _is_nonsequence_iterator from .util import CaseInsensitiveDict
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
@@ -17,25 +17,13 @@ class Request:
self.method = method self.method = method
self.uri = uri self.uri = uri
self._was_file = hasattr(body, "read") self._was_file = hasattr(body, "read")
self._was_iter = _is_nonsequence_iterator(body)
if self._was_file: if self._was_file:
self.body = body.read() self.body = body.read()
elif self._was_iter:
self.body = list(body)
else: else:
self.body = body self.body = body
self.headers = headers self.headers = headers
log.debug("Invoking Request %s", self.uri) log.debug("Invoking Request %s", self.uri)
@property
def uri(self):
return self._uri
@uri.setter
def uri(self, uri):
self._uri = uri
self.parsed_uri = urlparse(uri)
@property @property
def headers(self): def headers(self):
return self._headers return self._headers
@@ -48,11 +36,7 @@ class Request:
@property @property
def body(self): def body(self):
if self._was_file: return BytesIO(self._body) if self._was_file else self._body
return BytesIO(self._body)
if self._was_iter:
return iter(self._body)
return self._body
@body.setter @body.setter
def body(self, value): def body(self, value):
@@ -62,37 +46,37 @@ class Request:
def add_header(self, key, value): def add_header(self, key, value):
warnings.warn( warnings.warn(
"Request.add_header is deprecated. Please assign to request.headers instead.", "Request.add_header is deprecated. " "Please assign to request.headers instead.",
DeprecationWarning, DeprecationWarning,
stacklevel=2,
) )
self.headers[key] = value self.headers[key] = value
@property @property
def scheme(self): def scheme(self):
return self.parsed_uri.scheme return urlparse(self.uri).scheme
@property @property
def host(self): def host(self):
return self.parsed_uri.hostname return urlparse(self.uri).hostname
@property @property
def port(self): def port(self):
port = self.parsed_uri.port parse_uri = urlparse(self.uri)
port = parse_uri.port
if port is None: if port is None:
try: try:
port = {"https": 443, "http": 80}[self.parsed_uri.scheme] port = {"https": 443, "http": 80}[parse_uri.scheme]
except KeyError: except KeyError:
pass pass
return port return port
@property @property
def path(self): def path(self):
return self.parsed_uri.path return urlparse(self.uri).path
@property @property
def query(self): def query(self):
q = self.parsed_uri.query q = urlparse(self.uri).query
return sorted(parse_qsl(q)) return sorted(parse_qsl(q))
# alias for backwards compatibility # alias for backwards compatibility
@@ -106,7 +90,7 @@ class Request:
return self.scheme return self.scheme
def __str__(self): def __str__(self):
return f"<Request ({self.method}) {self.uri}>" return "<Request ({}) {}>".format(self.method, self.uri)
def __repr__(self): def __repr__(self):
return self.__str__() return self.__str__()

View File

@@ -28,7 +28,7 @@ def _warn_about_old_cassette_format():
raise ValueError( raise ValueError(
"Your cassette files were generated in an older version " "Your cassette files were generated in an older version "
"of VCR. Delete your cassettes or run the migration script." "of VCR. Delete your cassettes or run the migration script."
"See http://git.io/mHhLBg for more details.", "See http://git.io/mHhLBg for more details."
) )

View File

@@ -56,7 +56,7 @@ def convert_body_to_unicode(resp):
If the request or responses body is bytes, decode it to a string If the request or responses body is bytes, decode it to a string
(for python3 support) (for python3 support)
""" """
if not isinstance(resp, dict): if type(resp) is not dict:
# Some of the tests just serialize and deserialize a string. # Some of the tests just serialize and deserialize a string.
return _convert_string_to_unicode(resp) return _convert_string_to_unicode(resp)
else: else:

View File

@@ -1,4 +1,7 @@
import json try:
import simplejson as json
except ImportError:
import json
def deserialize(cassette_string): def deserialize(cassette_string):
@@ -14,5 +17,13 @@ def serialize(cassette_dict):
try: try:
return json.dumps(cassette_dict, indent=4) + "\n" return json.dumps(cassette_dict, indent=4) + "\n"
except TypeError: except UnicodeDecodeError as original: # py2
raise TypeError(error_message) from None raise UnicodeDecodeError(
original.encoding,
b"Error serializing cassette to JSON",
original.start,
original.end,
original.args[-1] + error_message,
)
except TypeError: # py3
raise TypeError(error_message)

View File

@@ -66,7 +66,6 @@ class VCRHTTPResponse(HTTPResponse):
self.reason = recorded_response["status"]["message"] self.reason = recorded_response["status"]["message"]
self.status = self.code = recorded_response["status"]["code"] self.status = self.code = recorded_response["status"]["code"]
self.version = None self.version = None
self.version_string = None
self._content = BytesIO(self.recorded_response["body"]["string"]) self._content = BytesIO(self.recorded_response["body"]["string"])
self._closed = False self._closed = False
self._original_response = self # for requests.session.Session cookie extraction self._original_response = self # for requests.session.Session cookie extraction
@@ -94,9 +93,6 @@ class VCRHTTPResponse(HTTPResponse):
def read(self, *args, **kwargs): def read(self, *args, **kwargs):
return self._content.read(*args, **kwargs) return self._content.read(*args, **kwargs)
def read1(self, *args, **kwargs):
return self._content.read1(*args, **kwargs)
def readall(self): def readall(self):
return self._content.readall() return self._content.readall()
@@ -171,13 +167,6 @@ class VCRHTTPResponse(HTTPResponse):
def drain_conn(self): def drain_conn(self):
pass pass
def stream(self, amt=65536, decode_content=None):
while True:
b = self._content.read(amt)
yield b
if not b:
break
class VCRConnection: class VCRConnection:
# A reference to the cassette that's currently being patched in # A reference to the cassette that's currently being patched in
@@ -187,40 +176,28 @@ class VCRConnection:
""" """
Returns empty string for the default port and ':port' otherwise Returns empty string for the default port and ':port' otherwise
""" """
port = ( port = self.real_connection.port
self.real_connection.port
if not self.real_connection._tunnel_host
else self.real_connection._tunnel_port
)
default_port = {"https": 443, "http": 80}[self._protocol] default_port = {"https": 443, "http": 80}[self._protocol]
return f":{port}" if port != default_port else "" return ":{}".format(port) if port != default_port else ""
def _real_host(self):
"""Returns the request host"""
if self.real_connection._tunnel_host:
# The real connection is to an HTTPS proxy
return self.real_connection._tunnel_host
else:
return self.real_connection.host
def _uri(self, url): def _uri(self, url):
"""Returns request absolute URI""" """Returns request absolute URI"""
if url and not url.startswith("/"): if url and not url.startswith("/"):
# Then this must be a proxy request. # Then this must be a proxy request.
return url return url
uri = f"{self._protocol}://{self._real_host()}{self._port_postfix()}{url}" uri = "{}://{}{}{}".format(self._protocol, self.real_connection.host, self._port_postfix(), url)
log.debug("Absolute URI: %s", uri) log.debug("Absolute URI: %s", uri)
return uri return uri
def _url(self, uri): def _url(self, uri):
"""Returns request selector url from absolute URI""" """Returns request selector url from absolute URI"""
prefix = f"{self._protocol}://{self._real_host()}{self._port_postfix()}" prefix = "{}://{}{}".format(self._protocol, self.real_connection.host, self._port_postfix())
return uri.replace(prefix, "", 1) return uri.replace(prefix, "", 1)
def request(self, method, url, body=None, headers=None, *args, **kwargs): def request(self, method, url, body=None, headers=None, *args, **kwargs):
"""Persist the request metadata in self._vcr_request""" """Persist the request metadata in self._vcr_request"""
self._vcr_request = Request(method=method, uri=self._uri(url), body=body, headers=headers or {}) self._vcr_request = Request(method=method, uri=self._uri(url), body=body, headers=headers or {})
log.debug(f"Got {self._vcr_request}") log.debug("Got {}".format(self._vcr_request))
# Note: The request may not actually be finished at this point, so # Note: The request may not actually be finished at this point, so
# I'm not sending the actual request until getresponse(). This # I'm not sending the actual request until getresponse(). This
@@ -236,7 +213,7 @@ class VCRConnection:
of putheader() calls. of putheader() calls.
""" """
self._vcr_request = Request(method=method, uri=self._uri(url), body="", headers={}) self._vcr_request = Request(method=method, uri=self._uri(url), body="", headers={})
log.debug(f"Got {self._vcr_request}") log.debug("Got {}".format(self._vcr_request))
def putheader(self, header, *values): def putheader(self, header, *values):
self._vcr_request.headers[header] = values self._vcr_request.headers[header] = values
@@ -268,20 +245,19 @@ class VCRConnection:
# Check to see if the cassette has a response for this request. If so, # Check to see if the cassette has a response for this request. If so,
# then return it # then return it
if self.cassette.can_play_response_for(self._vcr_request): if self.cassette.can_play_response_for(self._vcr_request):
log.info(f"Playing response for {self._vcr_request} from cassette") log.info("Playing response for {} from cassette".format(self._vcr_request))
response = self.cassette.play_response(self._vcr_request) response = self.cassette.play_response(self._vcr_request)
return VCRHTTPResponse(response) return VCRHTTPResponse(response)
else: else:
if self.cassette.write_protected and self.cassette.filter_request(self._vcr_request): if self.cassette.write_protected and self.cassette.filter_request(self._vcr_request):
raise CannotOverwriteExistingCassetteException( raise CannotOverwriteExistingCassetteException(
cassette=self.cassette, cassette=self.cassette, failed_request=self._vcr_request
failed_request=self._vcr_request,
) )
# Otherwise, we should send the request, then get the response # Otherwise, we should send the request, then get the response
# and return it. # and return it.
log.info(f"{self._vcr_request} not in cassette, sending to real server") log.info("{} not in cassette, sending to real server".format(self._vcr_request))
# This is imported here to avoid circular import. # This is imported here to avoid circular import.
# TODO(@IvanMalison): Refactor to allow normal import. # TODO(@IvanMalison): Refactor to allow normal import.
from vcr.patch import force_reset from vcr.patch import force_reset
@@ -402,8 +378,6 @@ class VCRHTTPConnection(VCRConnection):
_baseclass = HTTPConnection _baseclass = HTTPConnection
_protocol = "http" _protocol = "http"
debuglevel = _baseclass.debuglevel
_http_vsn = _baseclass._http_vsn
class VCRHTTPSConnection(VCRConnection): class VCRHTTPSConnection(VCRConnection):
@@ -412,5 +386,3 @@ class VCRHTTPSConnection(VCRConnection):
_baseclass = HTTPSConnection _baseclass = HTTPSConnection
_protocol = "https" _protocol = "https"
is_verified = True is_verified = True
debuglevel = _baseclass.debuglevel
_http_vsn = _baseclass._http_vsn

View File

@@ -1,12 +1,10 @@
"""Stubs for aiohttp HTTP clients""" """Stubs for aiohttp HTTP clients"""
import asyncio import asyncio
import functools import functools
import json import json
import logging import logging
from collections.abc import Mapping
from http.cookies import CookieError, Morsel, SimpleCookie from http.cookies import CookieError, Morsel, SimpleCookie
from typing import Union from typing import Mapping, Union
from aiohttp import ClientConnectionError, ClientResponse, CookieJar, RequestInfo, hdrs, streams from aiohttp import ClientConnectionError, ClientResponse, CookieJar, RequestInfo, hdrs, streams
from aiohttp.helpers import strip_auth_from_url from aiohttp.helpers import strip_auth_from_url
@@ -37,7 +35,7 @@ class MockClientResponse(ClientResponse):
session=None, session=None,
) )
async def json(self, *, encoding="utf-8", loads=json.loads, **kwargs): async def json(self, *, encoding="utf-8", loads=json.loads, **kwargs): # NOQA: E999
stripped = self._body.strip() stripped = self._body.strip()
if not stripped: if not stripped:
return None return None
@@ -68,7 +66,7 @@ def build_response(vcr_request, vcr_response, history):
headers=_deserialize_headers(vcr_request.headers), headers=_deserialize_headers(vcr_request.headers),
real_url=URL(vcr_request.url), real_url=URL(vcr_request.url),
) )
response = MockClientResponse(vcr_request.method, URL(vcr_request.url), request_info=request_info) response = MockClientResponse(vcr_request.method, URL(vcr_response.get("url")), request_info=request_info)
response.status = vcr_response["status"]["code"] response.status = vcr_response["status"]["code"]
response._body = vcr_response["body"].get("string", b"") response._body = vcr_response["body"].get("string", b"")
response.reason = vcr_response["status"]["message"] response.reason = vcr_response["status"]["message"]
@@ -164,7 +162,8 @@ async def record_response(cassette, vcr_request, response):
vcr_response = { vcr_response = {
"status": {"code": response.status, "message": response.reason}, "status": {"code": response.status, "message": response.reason},
"headers": _serialize_headers(response.headers), "headers": _serialize_headers(response.headers),
"body": body, "body": body, # NOQA: E999
"url": str(response.url),
} }
cassette.append(vcr_request, vcr_response) cassette.append(vcr_request, vcr_response)
@@ -262,7 +261,7 @@ def vcr_request(cassette, real_request):
vcr_request = Request(method, str(request_url), data, _serialize_headers(headers)) vcr_request = Request(method, str(request_url), data, _serialize_headers(headers))
if cassette.can_play_response_for(vcr_request): if cassette.can_play_response_for(vcr_request):
log.info(f"Playing response for {vcr_request} from cassette") log.info("Playing response for {} from cassette".format(vcr_request))
response = play_responses(cassette, vcr_request, kwargs) response = play_responses(cassette, vcr_request, kwargs)
for redirect in response.history: for redirect in response.history:
self._cookie_jar.update_cookies(redirect.cookies, redirect.url) self._cookie_jar.update_cookies(redirect.cookies, redirect.url)
@@ -274,7 +273,7 @@ def vcr_request(cassette, real_request):
log.info("%s not in cassette, sending to real server", vcr_request) log.info("%s not in cassette, sending to real server", vcr_request)
response = await real_request(self, method, url, **kwargs) response = await real_request(self, method, url, **kwargs) # NOQA: E999
await record_responses(cassette, vcr_request, response) await record_responses(cassette, vcr_request, response)
return response return response

View File

@@ -1,5 +1,4 @@
"""Stubs for boto3""" """Stubs for boto3"""
from botocore.awsrequest import AWSHTTPConnection as HTTPConnection from botocore.awsrequest import AWSHTTPConnection as HTTPConnection
from botocore.awsrequest import AWSHTTPSConnection as VerifiedHTTPSConnection from botocore.awsrequest import AWSHTTPSConnection as VerifiedHTTPSConnection

9
vcr/stubs/boto_stubs.py Normal file
View File

@@ -0,0 +1,9 @@
"""Stubs for boto"""
from boto.https_connection import CertValidatingHTTPSConnection
from ..stubs import VCRHTTPSConnection
class VCRCertValidatingHTTPSConnection(VCRHTTPSConnection):
_baseclass = CertValidatingHTTPSConnection

View File

@@ -1,4 +1,3 @@
import asyncio
import functools import functools
import inspect import inspect
import logging import logging
@@ -7,9 +6,7 @@ from unittest.mock import MagicMock, patch
import httpx import httpx
from vcr.errors import CannotOverwriteExistingCassetteException from vcr.errors import CannotOverwriteExistingCassetteException
from vcr.filters import decode_response
from vcr.request import Request as VcrRequest from vcr.request import Request as VcrRequest
from vcr.serializers.compat import convert_body_to_bytes
_httpx_signature = inspect.signature(httpx.Client.request) _httpx_signature = inspect.signature(httpx.Client.request)
@@ -36,29 +33,14 @@ def _transform_headers(httpx_response):
return out return out
async def _to_serialized_response(resp, aread): def _to_serialized_response(httpx_response):
# The content shouldn't already have been read in by HTTPX. return {
assert not hasattr(resp, "_decoder") "status_code": httpx_response.status_code,
"http_version": httpx_response.http_version,
# Retrieve the content, but without decoding it. "headers": _transform_headers(httpx_response),
with patch.dict(resp.headers, {"Content-Encoding": ""}): "content": httpx_response.content.decode("utf-8", "ignore"),
if aread:
await resp.aread()
else:
resp.read()
result = {
"status": {"code": resp.status_code, "message": resp.reason_phrase},
"headers": _transform_headers(resp),
"body": {"string": resp.content},
} }
# As the content wasn't decoded, we restore the response to a state which
# will be capable of decoding the content for the consumer.
del resp._decoder
resp._content = resp._get_content_decoder().decode(resp.content)
return result
def _from_serialized_headers(headers): def _from_serialized_headers(headers):
""" """
@@ -75,32 +57,15 @@ def _from_serialized_headers(headers):
@patch("httpx.Response.close", MagicMock()) @patch("httpx.Response.close", MagicMock())
@patch("httpx.Response.read", MagicMock()) @patch("httpx.Response.read", MagicMock())
def _from_serialized_response(request, serialized_response, history=None): def _from_serialized_response(request, serialized_response, history=None):
# Cassette format generated for HTTPX requests by older versions of content = serialized_response.get("content").encode()
# vcrpy. We restructure the content to resemble what a regular
# cassette looks like.
if "status_code" in serialized_response:
serialized_response = decode_response(
convert_body_to_bytes(
{
"headers": serialized_response["headers"],
"body": {"string": serialized_response["content"]},
"status": {"code": serialized_response["status_code"]},
},
),
)
extensions = None
else:
extensions = {"reason_phrase": serialized_response["status"]["message"].encode()}
response = httpx.Response( response = httpx.Response(
status_code=serialized_response["status"]["code"], status_code=serialized_response.get("status_code"),
request=request, request=request,
headers=_from_serialized_headers(serialized_response["headers"]), headers=_from_serialized_headers(serialized_response.get("headers")),
content=serialized_response["body"]["string"], content=content,
history=history or [], history=history or [],
extensions=extensions,
) )
response._content = content
return response return response
@@ -126,23 +91,45 @@ def _shared_vcr_send(cassette, real_send, *args, **kwargs):
return vcr_request, None return vcr_request, None
async def _record_responses(cassette, vcr_request, real_response, aread): def _record_responses(cassette, vcr_request, real_response):
for past_real_response in real_response.history: for past_real_response in real_response.history:
past_vcr_request = _make_vcr_request(past_real_response.request) past_vcr_request = _make_vcr_request(past_real_response.request)
cassette.append(past_vcr_request, await _to_serialized_response(past_real_response, aread)) cassette.append(past_vcr_request, _to_serialized_response(past_real_response))
if real_response.history: if real_response.history:
# If there was a redirection keep we want the request which will hold the # If there was a redirection keep we want the request which will hold the
# final redirect value # final redirect value
vcr_request = _make_vcr_request(real_response.request) vcr_request = _make_vcr_request(real_response.request)
cassette.append(vcr_request, await _to_serialized_response(real_response, aread)) cassette.append(vcr_request, _to_serialized_response(real_response))
return real_response return real_response
def _play_responses(cassette, request, vcr_request, client, kwargs): def _play_responses(cassette, request, vcr_request, client, kwargs):
history = []
allow_redirects = kwargs.get(
HTTPX_REDIRECT_PARAM.name,
HTTPX_REDIRECT_PARAM.default,
)
vcr_response = cassette.play_response(vcr_request) vcr_response = cassette.play_response(vcr_request)
response = _from_serialized_response(request, vcr_response) response = _from_serialized_response(request, vcr_response)
while allow_redirects and 300 <= response.status_code <= 399:
next_url = response.headers.get("location")
if not next_url:
break
vcr_request = VcrRequest("GET", next_url, None, dict(response.headers))
vcr_request = cassette.find_requests_with_most_matches(vcr_request)[0][0]
history.append(response)
# add cookies from response to session cookie store
client.cookies.extract_cookies(response)
vcr_response = cassette.play_response(vcr_request)
response = _from_serialized_response(vcr_request, vcr_response, history)
return response return response
@@ -154,8 +141,7 @@ async def _async_vcr_send(cassette, real_send, *args, **kwargs):
return response return response
real_response = await real_send(*args, **kwargs) real_response = await real_send(*args, **kwargs)
await _record_responses(cassette, vcr_request, real_response, aread=True) return _record_responses(cassette, vcr_request, real_response)
return real_response
def async_vcr_send(cassette, real_send): def async_vcr_send(cassette, real_send):
@@ -166,22 +152,6 @@ def async_vcr_send(cassette, real_send):
return _inner_send return _inner_send
def _run_async_function(sync_func, *args, **kwargs):
"""
Safely run an asynchronous function from a synchronous context.
Handles both cases:
- An event loop is already running.
- No event loop exists yet.
"""
try:
asyncio.get_running_loop()
except RuntimeError:
return asyncio.run(sync_func(*args, **kwargs))
else:
# If inside a running loop, create a task and wait for it
return asyncio.ensure_future(sync_func(*args, **kwargs))
def _sync_vcr_send(cassette, real_send, *args, **kwargs): def _sync_vcr_send(cassette, real_send, *args, **kwargs):
vcr_request, response = _shared_vcr_send(cassette, real_send, *args, **kwargs) vcr_request, response = _shared_vcr_send(cassette, real_send, *args, **kwargs)
if response: if response:
@@ -190,8 +160,7 @@ def _sync_vcr_send(cassette, real_send, *args, **kwargs):
return response return response
real_response = real_send(*args, **kwargs) real_response = real_send(*args, **kwargs)
_run_async_function(_record_responses, cassette, vcr_request, real_response, aread=False) return _record_responses(cassette, vcr_request, real_response)
return real_response
def sync_vcr_send(cassette, real_send): def sync_vcr_send(cassette, real_send):

View File

@@ -1,5 +1,4 @@
"""Stubs for tornado HTTP clients""" """Stubs for tornado HTTP clients"""
import functools import functools
from io import BytesIO from io import BytesIO
@@ -30,9 +29,9 @@ def vcr_fetch_impl(cassette, real_fetch_impl):
request, request,
599, 599,
error=Exception( error=Exception(
f"The request ({request!r}) uses AsyncHTTPClient functionality " "The request (%s) uses AsyncHTTPClient functionality "
"that is not yet supported by VCR.py. Please make the " "that is not yet supported by VCR.py. Please make the "
"request outside a VCR.py context.", "request outside a VCR.py context." % repr(request)
), ),
request_time=self.io_loop.time() - request.start_time, request_time=self.io_loop.time() - request.start_time,
) )
@@ -66,8 +65,7 @@ def vcr_fetch_impl(cassette, real_fetch_impl):
request, request,
599, 599,
error=CannotOverwriteExistingCassetteException( error=CannotOverwriteExistingCassetteException(
cassette=cassette, cassette=cassette, failed_request=vcr_request
failed_request=vcr_request,
), ),
request_time=self.io_loop.time() - request.start_time, request_time=self.io_loop.time() - request.start_time,
) )

View File

@@ -1,39 +0,0 @@
import inspect
import os
import unittest
from .config import VCR
class VCRMixin:
"""A TestCase mixin that provides VCR integration."""
vcr_enabled = True
def setUp(self):
super().setUp()
if self.vcr_enabled:
kwargs = self._get_vcr_kwargs()
myvcr = self._get_vcr(**kwargs)
cm = myvcr.use_cassette(self._get_cassette_name())
self.cassette = cm.__enter__()
self.addCleanup(cm.__exit__, None, None, None)
def _get_vcr(self, **kwargs):
if "cassette_library_dir" not in kwargs:
kwargs["cassette_library_dir"] = self._get_cassette_library_dir()
return VCR(**kwargs)
def _get_vcr_kwargs(self, **kwargs):
return kwargs
def _get_cassette_library_dir(self):
testdir = os.path.dirname(inspect.getfile(self.__class__))
return os.path.join(testdir, "cassettes")
def _get_cassette_name(self):
return f"{self.__class__.__name__}.{self._testMethodName}.yaml"
class VCRTestCase(VCRMixin, unittest.TestCase):
pass

View File

@@ -1,5 +1,9 @@
import types import types
from collections.abc import Mapping, MutableMapping
try:
from collections.abc import Mapping, MutableMapping
except ImportError:
from collections import Mapping, MutableMapping
# Shamelessly stolen from https://github.com/kennethreitz/requests/blob/master/requests/structures.py # Shamelessly stolen from https://github.com/kennethreitz/requests/blob/master/requests/structures.py
@@ -27,7 +31,7 @@ class CaseInsensitiveDict(MutableMapping):
""" """
def __init__(self, data=None, **kwargs): def __init__(self, data=None, **kwargs):
self._store = {} self._store = dict()
if data is None: if data is None:
data = {} data = {}
self.update(data, **kwargs) self.update(data, **kwargs)
@@ -89,28 +93,9 @@ def compose(*functions):
return composed return composed
def _is_nonsequence_iterator(obj):
return hasattr(obj, "__iter__") and not isinstance(
obj,
(bytearray, bytes, dict, list, str),
)
def read_body(request): def read_body(request):
if hasattr(request.body, "read"): if hasattr(request.body, "read"):
return request.body.read() return request.body.read()
if _is_nonsequence_iterator(request.body):
body = list(request.body)
if body:
if isinstance(body[0], str):
return "".join(body).encode("utf-8")
elif isinstance(body[0], (bytes, bytearray)):
return b"".join(body)
elif isinstance(body[0], int):
return bytes(body)
else:
raise ValueError(f"Body type {type(body[0])} not supported")
return b""
return request.body return request.body