mirror of
https://github.com/kevin1024/vcrpy.git
synced 2025-12-08 16:53:23 +00:00
Compare commits
102 Commits
v6.0.1
...
b28316ab10
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b28316ab10 | ||
|
|
3f78330c1e | ||
|
|
e8818e5c0b | ||
|
|
c7bb59ecd8 | ||
|
|
2183690da3 | ||
|
|
d5ba702a1b | ||
|
|
952994b365 | ||
|
|
e2f3240835 | ||
|
|
bb690833bc | ||
|
|
73eed94c47 | ||
|
|
a23fe0333a | ||
|
|
bb743861b6 | ||
|
|
ac70eaa17f | ||
|
|
d50f3385a6 | ||
|
|
14db4de224 | ||
|
|
2c4df79498 | ||
|
|
1456673cb4 | ||
|
|
19bd4e012c | ||
|
|
558c7fc625 | ||
|
|
8217a4c21b | ||
|
|
bd0aa59cd2 | ||
|
|
9a37817a3a | ||
|
|
b4c65bd677 | ||
|
|
93bc59508c | ||
|
|
e313a9cd52 | ||
|
|
5f1b20c4ca | ||
|
|
cd31d71901 | ||
|
|
4607ca1102 | ||
|
|
e3ced4385e | ||
|
|
80099ac6d7 | ||
|
|
440bc20faf | ||
|
|
3ddff27cda | ||
|
|
30b423e8c0 | ||
|
|
752ba0b749 | ||
|
|
c16e526d6a | ||
|
|
d64cdd337b | ||
|
|
ac230b76af | ||
|
|
965f3658d5 | ||
|
|
6465a5995b | ||
|
|
69ca261a88 | ||
|
|
3278619dcc | ||
|
|
3fb62e0f9b | ||
|
|
81978659f1 | ||
|
|
be651bd27c | ||
|
|
a6698ed060 | ||
|
|
48d0a2e453 | ||
|
|
5b858b132d | ||
|
|
c8d99a99ec | ||
|
|
ce27c63685 | ||
|
|
ab8944d3ca | ||
|
|
c6a7f4ae15 | ||
|
|
1d100dda25 | ||
|
|
7275e5d65d | ||
|
|
c6be705fb4 | ||
|
|
10b7f4efb3 | ||
|
|
7a6ef00f4d | ||
|
|
3bf6ac7184 | ||
|
|
983b2202ed | ||
|
|
15a6b71997 | ||
|
|
1ca708dcff | ||
|
|
f5597fa6c1 | ||
|
|
2b3247b3df | ||
|
|
d123a5e8d0 | ||
|
|
e2815fbc88 | ||
|
|
f9d4500c6e | ||
|
|
71eb624708 | ||
|
|
dc449715c1 | ||
|
|
275b9085f3 | ||
|
|
35650b141b | ||
|
|
9c8b679136 | ||
|
|
fab082eff5 | ||
|
|
ffc04f9128 | ||
|
|
4d84da1809 | ||
|
|
241b0bbd91 | ||
|
|
042e16c3e4 | ||
|
|
acef3f49bf | ||
|
|
9cfa6c5173 | ||
|
|
39a86ba3cf | ||
|
|
543c72ba51 | ||
|
|
86b114f2f5 | ||
|
|
4b06f3dba1 | ||
|
|
1c6503526b | ||
|
|
c9c05682cb | ||
|
|
39c8648aa7 | ||
|
|
dfff84d5bb | ||
|
|
40ac0de652 | ||
|
|
f3147f574b | ||
|
|
298a6933ff | ||
|
|
52da776b59 | ||
|
|
8842fb1c3a | ||
|
|
6c4ba172d8 | ||
|
|
c88f2c0dab | ||
|
|
3fd6b1c0b4 | ||
|
|
c6d87309f4 | ||
|
|
1fb9179cf9 | ||
|
|
a58e0d8830 | ||
|
|
acc101412d | ||
|
|
e60dafb8dc | ||
|
|
3ce5979acb | ||
|
|
36c7465cf7 | ||
|
|
010fa268d1 | ||
|
|
99c0384770 |
4
.github/workflows/codespell.yml
vendored
4
.github/workflows/codespell.yml
vendored
@@ -13,10 +13,10 @@ permissions:
|
|||||||
jobs:
|
jobs:
|
||||||
codespell:
|
codespell:
|
||||||
name: Check for spelling errors
|
name: Check for spelling errors
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-24.04
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v6
|
||||||
- name: Codespell
|
- name: Codespell
|
||||||
uses: codespell-project/actions-codespell@v2
|
uses: codespell-project/actions-codespell@v2
|
||||||
|
|||||||
6
.github/workflows/docs.yml
vendored
6
.github/workflows/docs.yml
vendored
@@ -7,11 +7,11 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
validate:
|
validate:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-24.04
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v6
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: "3.12"
|
python-version: "3.12"
|
||||||
|
|
||||||
|
|||||||
47
.github/workflows/main.yml
vendored
47
.github/workflows/main.yml
vendored
@@ -5,53 +5,46 @@ on:
|
|||||||
branches:
|
branches:
|
||||||
- master
|
- master
|
||||||
pull_request:
|
pull_request:
|
||||||
|
schedule:
|
||||||
|
- cron: "0 16 * * 5" # Every Friday 4pm
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-24.04
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
python-version:
|
python-version:
|
||||||
- "3.8"
|
|
||||||
- "3.9"
|
|
||||||
- "3.10"
|
- "3.10"
|
||||||
- "3.11"
|
- "3.11"
|
||||||
- "3.12"
|
- "3.12"
|
||||||
- "pypy-3.8"
|
- "3.13"
|
||||||
- "pypy-3.9"
|
- "pypy-3.11"
|
||||||
- "pypy-3.10"
|
|
||||||
urllib3-requirement:
|
|
||||||
- "urllib3>=2"
|
|
||||||
- "urllib3<2"
|
|
||||||
|
|
||||||
exclude:
|
|
||||||
- python-version: "3.8"
|
|
||||||
urllib3-requirement: "urllib3>=2"
|
|
||||||
- python-version: "pypy-3.8"
|
|
||||||
urllib3-requirement: "urllib3>=2"
|
|
||||||
- python-version: "3.9"
|
|
||||||
urllib3-requirement: "urllib3>=2"
|
|
||||||
- python-version: "pypy-3.9"
|
|
||||||
urllib3-requirement: "urllib3>=2"
|
|
||||||
- python-version: "pypy-3.10"
|
|
||||||
urllib3-requirement: "urllib3>=2"
|
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v6
|
||||||
|
- name: Install uv
|
||||||
|
uses: astral-sh/setup-uv@v7
|
||||||
|
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
cache: pip
|
allow-prereleases: true
|
||||||
|
|
||||||
- name: Install project dependencies
|
- name: Install project dependencies
|
||||||
run: |
|
run: |
|
||||||
pip install --upgrade pip
|
uv pip install --system --upgrade pip setuptools
|
||||||
pip install codecov '.[tests]' '${{ matrix.urllib3-requirement }}'
|
uv pip install --system codecov '.[tests]'
|
||||||
pip check
|
uv pip check
|
||||||
|
|
||||||
|
- name: Allow creation of user namespaces (e.g. to the unshare command)
|
||||||
|
run: |
|
||||||
|
# .. so that we don't get error:
|
||||||
|
# unshare: write failed /proc/self/uid_map: Operation not permitted
|
||||||
|
# Idea from https://github.com/YoYoGames/GameMaker-Bugs/issues/6015#issuecomment-2135552784 .
|
||||||
|
sudo sysctl kernel.apparmor_restrict_unprivileged_userns=0
|
||||||
|
|
||||||
- name: Run online tests
|
- name: Run online tests
|
||||||
run: ./runtests.sh --cov=./vcr --cov-branch --cov-report=xml --cov-append -m online
|
run: ./runtests.sh --cov=./vcr --cov-branch --cov-report=xml --cov-append -m online
|
||||||
|
|||||||
@@ -16,12 +16,12 @@ permissions:
|
|||||||
jobs:
|
jobs:
|
||||||
pre_commit_detect_outdated:
|
pre_commit_detect_outdated:
|
||||||
name: Detect outdated pre-commit hooks
|
name: Detect outdated pre-commit hooks
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-24.04
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v6
|
||||||
|
|
||||||
- name: Set up Python 3.12
|
- name: Set up Python 3.12
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: 3.12
|
python-version: 3.12
|
||||||
|
|
||||||
@@ -41,7 +41,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Create pull request from changes (if any)
|
- name: Create pull request from changes (if any)
|
||||||
id: create-pull-request
|
id: create-pull-request
|
||||||
uses: peter-evans/create-pull-request@v5
|
uses: peter-evans/create-pull-request@v7
|
||||||
with:
|
with:
|
||||||
author: 'pre-commit <pre-commit@tools.invalid>'
|
author: 'pre-commit <pre-commit@tools.invalid>'
|
||||||
base: master
|
base: master
|
||||||
|
|||||||
8
.github/workflows/pre-commit.yml
vendored
8
.github/workflows/pre-commit.yml
vendored
@@ -11,10 +11,10 @@ on:
|
|||||||
jobs:
|
jobs:
|
||||||
pre-commit:
|
pre-commit:
|
||||||
name: Run pre-commit
|
name: Run pre-commit
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-24.04
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v6
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: 3.12
|
python-version: 3.12
|
||||||
- uses: pre-commit/action@v3.0.0
|
- uses: pre-commit/action@v3.0.1
|
||||||
|
|||||||
@@ -3,14 +3,14 @@
|
|||||||
|
|
||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
rev: v0.1.13
|
rev: v0.14.6
|
||||||
hooks:
|
hooks:
|
||||||
- id: ruff
|
- id: ruff
|
||||||
args: ["--show-source"]
|
args: ["--output-format=full"]
|
||||||
- id: ruff-format
|
- id: ruff-format
|
||||||
|
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
rev: v4.5.0
|
rev: v6.0.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: check-merge-conflict
|
- id: check-merge-conflict
|
||||||
- id: end-of-file-fixer
|
- id: end-of-file-fixer
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ version: 2
|
|||||||
|
|
||||||
# Set the version of Python and other tools you might need
|
# Set the version of Python and other tools you might need
|
||||||
build:
|
build:
|
||||||
os: ubuntu-22.04
|
os: ubuntu-24.04
|
||||||
tools:
|
tools:
|
||||||
python: "3.12"
|
python: "3.12"
|
||||||
|
|
||||||
|
|||||||
@@ -427,3 +427,16 @@ If you want to save the cassette only when the test succeeds, set the Cassette
|
|||||||
|
|
||||||
# Since there was an exception, the cassette file hasn't been created.
|
# Since there was an exception, the cassette file hasn't been created.
|
||||||
assert not os.path.exists('fixtures/vcr_cassettes/synopsis.yaml')
|
assert not os.path.exists('fixtures/vcr_cassettes/synopsis.yaml')
|
||||||
|
|
||||||
|
Drop unused requests
|
||||||
|
--------------------
|
||||||
|
|
||||||
|
Even if any HTTP request is changed or removed from tests, previously recorded
|
||||||
|
interactions remain in the cassette file. If set the ``drop_unused_requests``
|
||||||
|
option to ``True``, VCR will not save old HTTP interactions if they are not used.
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
my_vcr = VCR(drop_unused_requests=True)
|
||||||
|
with my_vcr.use_cassette('fixtures/vcr_cassettes/synopsis.yaml'):
|
||||||
|
... # your HTTP interactions here
|
||||||
|
|||||||
@@ -7,6 +7,32 @@ For a full list of triaged issues, bugs and PRs and what release they are target
|
|||||||
|
|
||||||
All help in providing PRs to close out bug issues is appreciated. Even if that is providing a repo that fully replicates issues. We have very generous contributors that have added these to bug issues which meant another contributor picked up the bug and closed it out.
|
All help in providing PRs to close out bug issues is appreciated. Even if that is providing a repo that fully replicates issues. We have very generous contributors that have added these to bug issues which meant another contributor picked up the bug and closed it out.
|
||||||
|
|
||||||
|
- 8.0.0
|
||||||
|
- BREAKING: Drop support for Python 3.9 (major version bump) - thanks @jairhenrique
|
||||||
|
- BREAKING: Drop support for urllib3 < 2 - fixes CVE warnings from urllib3 1.x (#926, #880) - thanks @jairhenrique
|
||||||
|
- New feature: ``drop_unused_requests`` option to remove unused interactions from cassettes (#763) - thanks @danielnsilva
|
||||||
|
- Rewrite httpx support to patch httpcore instead of httpx (#943) - thanks @seowalex
|
||||||
|
- Fixes ``httpx.ResponseNotRead`` exceptions (#832, #834)
|
||||||
|
- Fixes ``KeyError: 'follow_redirects'`` (#945)
|
||||||
|
- Adds support for custom httpx transports
|
||||||
|
- Fix HTTPS proxy handling - proxy address no longer ends up in cassette URIs (#809, #914) - thanks @alga
|
||||||
|
- Fix ``iscoroutinefunction`` deprecation warning on Python 3.14 - thanks @kloczek
|
||||||
|
- Only log message if response is appended - thanks @talfus-laddus
|
||||||
|
- Optimize urllib.parse calls - thanks @Martin-Brunthaler
|
||||||
|
- Fix CI for Ubuntu 24.04 - thanks @hartwork
|
||||||
|
- Various CI improvements: migrate to uv, update GitHub Actions - thanks @jairhenrique
|
||||||
|
- Various linting and test improvements - thanks @jairhenrique and @hartwork
|
||||||
|
|
||||||
|
- 7.0.0
|
||||||
|
- Drop support for python 3.8 (major version bump) - thanks @jairhenrique
|
||||||
|
- Various linting and test fixes - thanks @jairhenrique
|
||||||
|
- Bugfix for urllib2>=2.3.0 - missing version_string (#888)
|
||||||
|
- Bugfix for asyncio.run - thanks @alekeik1
|
||||||
|
- 6.0.2
|
||||||
|
- Ensure body is consumed only once (#846) - thanks @sathieu
|
||||||
|
- Permit urllib3 2.x for non-PyPy Python >=3.10
|
||||||
|
- Fix typos in test commands - thanks @chuckwondo
|
||||||
|
- Several test and workflow improvements - thanks @hartwork and @graingert
|
||||||
- 6.0.1
|
- 6.0.1
|
||||||
- Bugfix with to Tornado cassette generator (thanks @graingert)
|
- Bugfix with to Tornado cassette generator (thanks @graingert)
|
||||||
- 6.0.0
|
- 6.0.0
|
||||||
|
|||||||
@@ -316,5 +316,5 @@ texinfo_documents = [
|
|||||||
|
|
||||||
|
|
||||||
# Example configuration for intersphinx: refer to the Python standard library.
|
# Example configuration for intersphinx: refer to the Python standard library.
|
||||||
intersphinx_mapping = {"https://docs.python.org/": None}
|
intersphinx_mapping = {"python": ("https://docs.python.org/3", None)}
|
||||||
html_theme = "alabaster"
|
html_theme = "alabaster"
|
||||||
|
|||||||
@@ -115,8 +115,8 @@ in this example::
|
|||||||
pyenv local 3.12.0 pypy3.10
|
pyenv local 3.12.0 pypy3.10
|
||||||
|
|
||||||
# Run the whole test suite
|
# Run the whole test suite
|
||||||
pip install .[test]
|
pip install .[tests]
|
||||||
./run_tests.sh
|
./runtests.sh
|
||||||
|
|
||||||
|
|
||||||
Troubleshooting on MacOSX
|
Troubleshooting on MacOSX
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ with pip::
|
|||||||
Compatibility
|
Compatibility
|
||||||
-------------
|
-------------
|
||||||
|
|
||||||
VCR.py supports Python 3.8+, and `pypy <http://pypy.org>`__.
|
VCR.py supports Python 3.9+, and `pypy <http://pypy.org>`__.
|
||||||
|
|
||||||
The following HTTP libraries are supported:
|
The following HTTP libraries are supported:
|
||||||
|
|
||||||
@@ -22,6 +22,7 @@ The following HTTP libraries are supported:
|
|||||||
- ``urllib2``
|
- ``urllib2``
|
||||||
- ``urllib3``
|
- ``urllib3``
|
||||||
- ``httpx``
|
- ``httpx``
|
||||||
|
- ``httpcore``
|
||||||
|
|
||||||
Speed
|
Speed
|
||||||
-----
|
-----
|
||||||
|
|||||||
@@ -1,2 +1,2 @@
|
|||||||
sphinx<8
|
sphinx<9
|
||||||
sphinx_rtd_theme==2.0.0
|
sphinx_rtd_theme==3.0.2
|
||||||
|
|||||||
@@ -1,21 +1,18 @@
|
|||||||
[tool.codespell]
|
[tool.codespell]
|
||||||
skip = '.git,*.pdf,*.svg,.tox'
|
skip = '.git,*.pdf,*.svg,.tox'
|
||||||
ignore-regex = "\\\\[fnrstv]"
|
ignore-regex = "\\\\[fnrstv]"
|
||||||
#
|
|
||||||
# ignore-words-list = ''
|
|
||||||
|
|
||||||
[tool.pytest.ini_options]
|
[tool.pytest]
|
||||||
addopts = [
|
addopts = ["--strict-config", "--strict-markers"]
|
||||||
"--strict-config",
|
asyncio_default_fixture_loop_scope = "session"
|
||||||
"--strict-markers",
|
asyncio_default_test_loop_scope = "session"
|
||||||
]
|
|
||||||
markers = ["online"]
|
markers = ["online"]
|
||||||
filterwarnings = [
|
|
||||||
"error",
|
|
||||||
'''ignore:datetime\.datetime\.utcfromtimestamp\(\) is deprecated and scheduled for removal in a future version.*:DeprecationWarning''',
|
|
||||||
]
|
|
||||||
|
|
||||||
[tool.ruff]
|
[tool.ruff]
|
||||||
|
line-length = 110
|
||||||
|
target-version = "py310"
|
||||||
|
|
||||||
|
[tool.ruff.lint]
|
||||||
select = [
|
select = [
|
||||||
"B", # flake8-bugbear
|
"B", # flake8-bugbear
|
||||||
"C4", # flake8-comprehensions
|
"C4", # flake8-comprehensions
|
||||||
@@ -28,9 +25,8 @@ select = [
|
|||||||
"RUF", # Ruff-specific rules
|
"RUF", # Ruff-specific rules
|
||||||
"UP", # pyupgrade
|
"UP", # pyupgrade
|
||||||
"W", # pycodestyle warning
|
"W", # pycodestyle warning
|
||||||
|
"SIM",
|
||||||
]
|
]
|
||||||
line-length = 110
|
|
||||||
target-version = "py38"
|
|
||||||
|
|
||||||
[tool.ruff.isort]
|
[tool.ruff.lint.isort]
|
||||||
known-first-party = ["vcr"]
|
known-first-party = ["vcr"]
|
||||||
|
|||||||
48
setup.py
48
setup.py
@@ -3,12 +3,11 @@
|
|||||||
import codecs
|
import codecs
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
from pathlib import Path
|
||||||
|
|
||||||
from setuptools import find_packages, setup
|
from setuptools import find_packages, setup
|
||||||
from setuptools.command.test import test as TestCommand
|
|
||||||
|
|
||||||
long_description = open("README.rst").read()
|
long_description = Path("README.rst").read_text()
|
||||||
here = os.path.abspath(os.path.dirname(__file__))
|
here = os.path.abspath(os.path.dirname(__file__))
|
||||||
|
|
||||||
|
|
||||||
@@ -28,56 +27,30 @@ def find_version(*file_paths):
|
|||||||
raise RuntimeError("Unable to find version string.")
|
raise RuntimeError("Unable to find version string.")
|
||||||
|
|
||||||
|
|
||||||
class PyTest(TestCommand):
|
|
||||||
def finalize_options(self):
|
|
||||||
TestCommand.finalize_options(self)
|
|
||||||
self.test_args = []
|
|
||||||
self.test_suite = True
|
|
||||||
|
|
||||||
def run_tests(self):
|
|
||||||
# import here, cause outside the eggs aren't loaded
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
errno = pytest.main(self.test_args)
|
|
||||||
sys.exit(errno)
|
|
||||||
|
|
||||||
|
|
||||||
install_requires = [
|
install_requires = [
|
||||||
"PyYAML",
|
"PyYAML",
|
||||||
"wrapt",
|
"wrapt",
|
||||||
"yarl",
|
|
||||||
# Support for urllib3 >=2 needs CPython >=3.10
|
|
||||||
# so we need to block urllib3 >=2 for Python <3.10 and PyPy for now.
|
|
||||||
# Note that vcrpy would work fine without any urllib3 around,
|
|
||||||
# so this block and the dependency can be dropped at some point
|
|
||||||
# in the future. For more Details:
|
|
||||||
# https://github.com/kevin1024/vcrpy/pull/699#issuecomment-1551439663
|
|
||||||
"urllib3 <2; python_version <'3.10'",
|
|
||||||
# https://github.com/kevin1024/vcrpy/pull/775#issuecomment-1847849962
|
|
||||||
"urllib3 <2; platform_python_implementation =='PyPy'",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
extras_require = {
|
extras_require = {
|
||||||
"tests": [
|
"tests": [
|
||||||
"aiohttp",
|
"aiohttp",
|
||||||
"boto3",
|
"boto3",
|
||||||
|
"cryptography",
|
||||||
|
"httpbin",
|
||||||
|
"httpcore",
|
||||||
"httplib2",
|
"httplib2",
|
||||||
"httpx",
|
"httpx",
|
||||||
|
"pycurl; platform_python_implementation !='PyPy'",
|
||||||
|
"pytest",
|
||||||
"pytest-aiohttp",
|
"pytest-aiohttp",
|
||||||
"pytest-asyncio",
|
"pytest-asyncio",
|
||||||
"pytest-cov",
|
"pytest-cov",
|
||||||
"pytest-httpbin",
|
"pytest-httpbin",
|
||||||
"pytest",
|
|
||||||
"requests>=2.22.0",
|
"requests>=2.22.0",
|
||||||
"tornado",
|
"tornado",
|
||||||
"urllib3",
|
"urllib3",
|
||||||
# Needed to un-break httpbin 0.7.0. For httpbin >=0.7.1 and after,
|
"werkzeug==2.0.3",
|
||||||
# this pin and the dependency itself can be removed, provided
|
|
||||||
# that the related bug in httpbin has been fixed:
|
|
||||||
# https://github.com/kevin1024/vcrpy/issues/645#issuecomment-1562489489
|
|
||||||
# https://github.com/postmanlabs/httpbin/issues/673
|
|
||||||
# https://github.com/postmanlabs/httpbin/pull/674
|
|
||||||
"Werkzeug==2.0.3",
|
|
||||||
],
|
],
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -91,7 +64,7 @@ setup(
|
|||||||
author_email="me@kevinmccarthy.org",
|
author_email="me@kevinmccarthy.org",
|
||||||
url="https://github.com/kevin1024/vcrpy",
|
url="https://github.com/kevin1024/vcrpy",
|
||||||
packages=find_packages(exclude=["tests*"]),
|
packages=find_packages(exclude=["tests*"]),
|
||||||
python_requires=">=3.8",
|
python_requires=">=3.10",
|
||||||
install_requires=install_requires,
|
install_requires=install_requires,
|
||||||
license="MIT",
|
license="MIT",
|
||||||
extras_require=extras_require,
|
extras_require=extras_require,
|
||||||
@@ -102,11 +75,10 @@ setup(
|
|||||||
"Intended Audience :: Developers",
|
"Intended Audience :: Developers",
|
||||||
"Programming Language :: Python",
|
"Programming Language :: Python",
|
||||||
"Programming Language :: Python :: 3",
|
"Programming Language :: Python :: 3",
|
||||||
"Programming Language :: Python :: 3.8",
|
|
||||||
"Programming Language :: Python :: 3.9",
|
|
||||||
"Programming Language :: Python :: 3.10",
|
"Programming Language :: Python :: 3.10",
|
||||||
"Programming Language :: Python :: 3.11",
|
"Programming Language :: Python :: 3.11",
|
||||||
"Programming Language :: Python :: 3.12",
|
"Programming Language :: Python :: 3.12",
|
||||||
|
"Programming Language :: Python :: 3.13",
|
||||||
"Programming Language :: Python :: 3 :: Only",
|
"Programming Language :: Python :: 3 :: Only",
|
||||||
"Programming Language :: Python :: Implementation :: CPython",
|
"Programming Language :: Python :: Implementation :: CPython",
|
||||||
"Programming Language :: Python :: Implementation :: PyPy",
|
"Programming Language :: Python :: Implementation :: PyPy",
|
||||||
|
|||||||
@@ -1,16 +0,0 @@
|
|||||||
import os
|
|
||||||
import ssl
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def httpbin_ssl_context():
|
|
||||||
ssl_ca_location = os.environ["REQUESTS_CA_BUNDLE"]
|
|
||||||
ssl_cert_location = os.environ["REQUESTS_CA_BUNDLE"].replace("cacert.pem", "cert.pem")
|
|
||||||
ssl_key_location = os.environ["REQUESTS_CA_BUNDLE"].replace("cacert.pem", "key.pem")
|
|
||||||
|
|
||||||
ssl_context = ssl.create_default_context(cafile=ssl_ca_location)
|
|
||||||
ssl_context.load_cert_chain(ssl_cert_location, ssl_key_location)
|
|
||||||
|
|
||||||
return ssl_context
|
|
||||||
@@ -1,7 +1,11 @@
|
|||||||
|
import io
|
||||||
import logging
|
import logging
|
||||||
|
import ssl
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
import pytest_httpbin.certs
|
||||||
|
import yarl
|
||||||
|
|
||||||
import vcr
|
import vcr
|
||||||
|
|
||||||
@@ -11,6 +15,8 @@ aiohttp = pytest.importorskip("aiohttp")
|
|||||||
|
|
||||||
from .aiohttp_utils import aiohttp_app, aiohttp_request # noqa: E402
|
from .aiohttp_utils import aiohttp_app, aiohttp_request # noqa: E402
|
||||||
|
|
||||||
|
HTTPBIN_SSL_CONTEXT = ssl.create_default_context(cafile=pytest_httpbin.certs.where())
|
||||||
|
|
||||||
|
|
||||||
def run_in_loop(fn):
|
def run_in_loop(fn):
|
||||||
async def wrapper():
|
async def wrapper():
|
||||||
@@ -188,9 +194,11 @@ def test_params_same_url_distinct_params(tmpdir, httpbin):
|
|||||||
assert cassette.play_count == 1
|
assert cassette.play_count == 1
|
||||||
|
|
||||||
other_params = {"other": "params"}
|
other_params = {"other": "params"}
|
||||||
with vcr.use_cassette(str(tmpdir.join("get.yaml"))) as cassette:
|
with (
|
||||||
with pytest.raises(vcr.errors.CannotOverwriteExistingCassetteException):
|
vcr.use_cassette(str(tmpdir.join("get.yaml"))) as cassette,
|
||||||
get(url, output="text", params=other_params)
|
pytest.raises(vcr.errors.CannotOverwriteExistingCassetteException),
|
||||||
|
):
|
||||||
|
get(url, output="text", params=other_params)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.online
|
@pytest.mark.online
|
||||||
@@ -259,12 +267,6 @@ def test_aiohttp_test_client_json(aiohttp_client, tmpdir):
|
|||||||
assert cassette.play_count == 1
|
assert cassette.play_count == 1
|
||||||
|
|
||||||
|
|
||||||
def test_cleanup_from_pytest_asyncio():
|
|
||||||
# work around https://github.com/pytest-dev/pytest-asyncio/issues/724
|
|
||||||
asyncio.get_event_loop().close()
|
|
||||||
asyncio.set_event_loop(None)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.online
|
@pytest.mark.online
|
||||||
def test_redirect(tmpdir, httpbin):
|
def test_redirect(tmpdir, httpbin):
|
||||||
url = httpbin.url + "/redirect/2"
|
url = httpbin.url + "/redirect/2"
|
||||||
@@ -338,7 +340,7 @@ def test_double_requests(tmpdir, httpbin):
|
|||||||
assert cassette.play_count == 2
|
assert cassette.play_count == 2
|
||||||
|
|
||||||
|
|
||||||
def test_cookies(httpbin_both, httpbin_ssl_context, tmpdir):
|
def test_cookies(httpbin_both, tmpdir):
|
||||||
async def run(loop):
|
async def run(loop):
|
||||||
cookies_url = httpbin_both.url + (
|
cookies_url = httpbin_both.url + (
|
||||||
"/response-headers?"
|
"/response-headers?"
|
||||||
@@ -353,12 +355,12 @@ def test_cookies(httpbin_both, httpbin_ssl_context, tmpdir):
|
|||||||
# ------------------------- Record -------------------------- #
|
# ------------------------- Record -------------------------- #
|
||||||
with vcr.use_cassette(tmp) as cassette:
|
with vcr.use_cassette(tmp) as cassette:
|
||||||
async with aiohttp.ClientSession(loop=loop, cookie_jar=aiohttp.CookieJar(unsafe=True)) as session:
|
async with aiohttp.ClientSession(loop=loop, cookie_jar=aiohttp.CookieJar(unsafe=True)) as session:
|
||||||
cookies_resp = await session.get(cookies_url, ssl=httpbin_ssl_context)
|
cookies_resp = await session.get(cookies_url, ssl=HTTPBIN_SSL_CONTEXT)
|
||||||
home_resp = await session.get(
|
home_resp = await session.get(
|
||||||
home_url,
|
home_url,
|
||||||
cookies=req_cookies,
|
cookies=req_cookies,
|
||||||
headers=req_headers,
|
headers=req_headers,
|
||||||
ssl=httpbin_ssl_context,
|
ssl=HTTPBIN_SSL_CONTEXT,
|
||||||
)
|
)
|
||||||
assert cassette.play_count == 0
|
assert cassette.play_count == 0
|
||||||
assert_responses(cookies_resp, home_resp)
|
assert_responses(cookies_resp, home_resp)
|
||||||
@@ -366,12 +368,12 @@ def test_cookies(httpbin_both, httpbin_ssl_context, tmpdir):
|
|||||||
# -------------------------- Play --------------------------- #
|
# -------------------------- Play --------------------------- #
|
||||||
with vcr.use_cassette(tmp, record_mode=vcr.mode.NONE) as cassette:
|
with vcr.use_cassette(tmp, record_mode=vcr.mode.NONE) as cassette:
|
||||||
async with aiohttp.ClientSession(loop=loop, cookie_jar=aiohttp.CookieJar(unsafe=True)) as session:
|
async with aiohttp.ClientSession(loop=loop, cookie_jar=aiohttp.CookieJar(unsafe=True)) as session:
|
||||||
cookies_resp = await session.get(cookies_url, ssl=httpbin_ssl_context)
|
cookies_resp = await session.get(cookies_url, ssl=HTTPBIN_SSL_CONTEXT)
|
||||||
home_resp = await session.get(
|
home_resp = await session.get(
|
||||||
home_url,
|
home_url,
|
||||||
cookies=req_cookies,
|
cookies=req_cookies,
|
||||||
headers=req_headers,
|
headers=req_headers,
|
||||||
ssl=httpbin_ssl_context,
|
ssl=HTTPBIN_SSL_CONTEXT,
|
||||||
)
|
)
|
||||||
assert cassette.play_count == 2
|
assert cassette.play_count == 2
|
||||||
assert_responses(cookies_resp, home_resp)
|
assert_responses(cookies_resp, home_resp)
|
||||||
@@ -388,7 +390,7 @@ def test_cookies(httpbin_both, httpbin_ssl_context, tmpdir):
|
|||||||
run_in_loop(run)
|
run_in_loop(run)
|
||||||
|
|
||||||
|
|
||||||
def test_cookies_redirect(httpbin_both, httpbin_ssl_context, tmpdir):
|
def test_cookies_redirect(httpbin_both, tmpdir):
|
||||||
async def run(loop):
|
async def run(loop):
|
||||||
# Sets cookie as provided by the query string and redirects
|
# Sets cookie as provided by the query string and redirects
|
||||||
cookies_url = httpbin_both.url + "/cookies/set?Cookie_1=Val_1"
|
cookies_url = httpbin_both.url + "/cookies/set?Cookie_1=Val_1"
|
||||||
@@ -397,9 +399,9 @@ def test_cookies_redirect(httpbin_both, httpbin_ssl_context, tmpdir):
|
|||||||
# ------------------------- Record -------------------------- #
|
# ------------------------- Record -------------------------- #
|
||||||
with vcr.use_cassette(tmp) as cassette:
|
with vcr.use_cassette(tmp) as cassette:
|
||||||
async with aiohttp.ClientSession(loop=loop, cookie_jar=aiohttp.CookieJar(unsafe=True)) as session:
|
async with aiohttp.ClientSession(loop=loop, cookie_jar=aiohttp.CookieJar(unsafe=True)) as session:
|
||||||
cookies_resp = await session.get(cookies_url, ssl=httpbin_ssl_context)
|
cookies_resp = await session.get(cookies_url, ssl=HTTPBIN_SSL_CONTEXT)
|
||||||
assert not cookies_resp.cookies
|
assert not cookies_resp.cookies
|
||||||
cookies = session.cookie_jar.filter_cookies(cookies_url)
|
cookies = session.cookie_jar.filter_cookies(yarl.URL(cookies_url))
|
||||||
assert cookies["Cookie_1"].value == "Val_1"
|
assert cookies["Cookie_1"].value == "Val_1"
|
||||||
assert cassette.play_count == 0
|
assert cassette.play_count == 0
|
||||||
|
|
||||||
@@ -408,9 +410,9 @@ def test_cookies_redirect(httpbin_both, httpbin_ssl_context, tmpdir):
|
|||||||
# -------------------------- Play --------------------------- #
|
# -------------------------- Play --------------------------- #
|
||||||
with vcr.use_cassette(tmp, record_mode=vcr.mode.NONE) as cassette:
|
with vcr.use_cassette(tmp, record_mode=vcr.mode.NONE) as cassette:
|
||||||
async with aiohttp.ClientSession(loop=loop, cookie_jar=aiohttp.CookieJar(unsafe=True)) as session:
|
async with aiohttp.ClientSession(loop=loop, cookie_jar=aiohttp.CookieJar(unsafe=True)) as session:
|
||||||
cookies_resp = await session.get(cookies_url, ssl=httpbin_ssl_context)
|
cookies_resp = await session.get(cookies_url, ssl=HTTPBIN_SSL_CONTEXT)
|
||||||
assert not cookies_resp.cookies
|
assert not cookies_resp.cookies
|
||||||
cookies = session.cookie_jar.filter_cookies(cookies_url)
|
cookies = session.cookie_jar.filter_cookies(yarl.URL(cookies_url))
|
||||||
assert cookies["Cookie_1"].value == "Val_1"
|
assert cookies["Cookie_1"].value == "Val_1"
|
||||||
assert cassette.play_count == 2
|
assert cassette.play_count == 2
|
||||||
|
|
||||||
@@ -422,9 +424,9 @@ def test_cookies_redirect(httpbin_both, httpbin_ssl_context, tmpdir):
|
|||||||
"Cookie_1=Val_1; Expires=Wed, 21 Oct 2015 07:28:00 GMT",
|
"Cookie_1=Val_1; Expires=Wed, 21 Oct 2015 07:28:00 GMT",
|
||||||
]
|
]
|
||||||
async with aiohttp.ClientSession(loop=loop, cookie_jar=aiohttp.CookieJar(unsafe=True)) as session:
|
async with aiohttp.ClientSession(loop=loop, cookie_jar=aiohttp.CookieJar(unsafe=True)) as session:
|
||||||
cookies_resp = await session.get(cookies_url, ssl=httpbin_ssl_context)
|
cookies_resp = await session.get(cookies_url, ssl=HTTPBIN_SSL_CONTEXT)
|
||||||
assert not cookies_resp.cookies
|
assert not cookies_resp.cookies
|
||||||
cookies = session.cookie_jar.filter_cookies(cookies_url)
|
cookies = session.cookie_jar.filter_cookies(yarl.URL(cookies_url))
|
||||||
assert cookies["Cookie_1"].value == "Val_1"
|
assert cookies["Cookie_1"].value == "Val_1"
|
||||||
|
|
||||||
run_in_loop(run)
|
run_in_loop(run)
|
||||||
@@ -461,3 +463,19 @@ def test_filter_query_parameters(tmpdir, httpbin):
|
|||||||
cassette_content = f.read()
|
cassette_content = f.read()
|
||||||
assert "password" not in cassette_content
|
assert "password" not in cassette_content
|
||||||
assert "secret" not in cassette_content
|
assert "secret" not in cassette_content
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.online
|
||||||
|
def test_use_cassette_with_io(tmpdir, caplog, httpbin):
|
||||||
|
url = httpbin.url + "/post"
|
||||||
|
|
||||||
|
# test without cassettes
|
||||||
|
data = io.BytesIO(b"hello")
|
||||||
|
_, response_json = request("POST", url, output="json", data=data)
|
||||||
|
assert response_json["data"] == "hello"
|
||||||
|
|
||||||
|
# test with cassettes
|
||||||
|
data = io.BytesIO(b"hello")
|
||||||
|
with vcr.use_cassette(str(tmpdir.join("post.yaml"))):
|
||||||
|
_, response_json = request("POST", url, output="json", data=data)
|
||||||
|
assert response_json["data"] == "hello"
|
||||||
|
|||||||
@@ -39,7 +39,7 @@ def test_basic_json_use(tmpdir, httpbin):
|
|||||||
test_fixture = str(tmpdir.join("synopsis.json"))
|
test_fixture = str(tmpdir.join("synopsis.json"))
|
||||||
with vcr.use_cassette(test_fixture, serializer="json"):
|
with vcr.use_cassette(test_fixture, serializer="json"):
|
||||||
response = urlopen(httpbin.url).read()
|
response = urlopen(httpbin.url).read()
|
||||||
assert b"A simple HTTP Request & Response Service." in response
|
assert b"HTTP Request & Response Service" in response
|
||||||
|
|
||||||
|
|
||||||
def test_patched_content(tmpdir, httpbin):
|
def test_patched_content(tmpdir, httpbin):
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ from urllib.request import urlopen
|
|||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
import vcr
|
import vcr
|
||||||
|
from vcr.cassette import Cassette
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.online
|
@pytest.mark.online
|
||||||
@@ -61,9 +62,8 @@ def test_override_match_on(tmpdir, httpbin):
|
|||||||
def test_missing_matcher():
|
def test_missing_matcher():
|
||||||
my_vcr = vcr.VCR()
|
my_vcr = vcr.VCR()
|
||||||
my_vcr.register_matcher("awesome", object)
|
my_vcr.register_matcher("awesome", object)
|
||||||
with pytest.raises(KeyError):
|
with pytest.raises(KeyError), my_vcr.use_cassette("test.yaml", match_on=["notawesome"]):
|
||||||
with my_vcr.use_cassette("test.yaml", match_on=["notawesome"]):
|
pass
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.online
|
@pytest.mark.online
|
||||||
@@ -80,8 +80,25 @@ def test_dont_record_on_exception(tmpdir, httpbin):
|
|||||||
assert not os.path.exists(str(tmpdir.join("dontsave.yml")))
|
assert not os.path.exists(str(tmpdir.join("dontsave.yml")))
|
||||||
|
|
||||||
# Make sure context decorator has the same behavior
|
# Make sure context decorator has the same behavior
|
||||||
with pytest.raises(AssertionError):
|
with pytest.raises(AssertionError), my_vcr.use_cassette(str(tmpdir.join("dontsave2.yml"))):
|
||||||
with my_vcr.use_cassette(str(tmpdir.join("dontsave2.yml"))):
|
assert b"Not in content" in urlopen(httpbin.url).read()
|
||||||
assert b"Not in content" in urlopen(httpbin.url).read()
|
|
||||||
|
|
||||||
assert not os.path.exists(str(tmpdir.join("dontsave2.yml")))
|
assert not os.path.exists(str(tmpdir.join("dontsave2.yml")))
|
||||||
|
|
||||||
|
|
||||||
|
def test_set_drop_unused_requests(tmpdir, httpbin):
|
||||||
|
my_vcr = vcr.VCR(drop_unused_requests=True)
|
||||||
|
file = str(tmpdir.join("test.yaml"))
|
||||||
|
|
||||||
|
with my_vcr.use_cassette(file):
|
||||||
|
urlopen(httpbin.url)
|
||||||
|
urlopen(httpbin.url + "/get")
|
||||||
|
|
||||||
|
cassette = Cassette.load(path=file)
|
||||||
|
assert len(cassette) == 2
|
||||||
|
|
||||||
|
with my_vcr.use_cassette(file):
|
||||||
|
urlopen(httpbin.url)
|
||||||
|
|
||||||
|
cassette = Cassette.load(path=file)
|
||||||
|
assert len(cassette) == 1
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ from urllib.request import Request, urlopen
|
|||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
import vcr
|
import vcr
|
||||||
|
from vcr.filters import brotli
|
||||||
|
|
||||||
from ..assertions import assert_cassette_has_one_response, assert_is_json_bytes
|
from ..assertions import assert_cassette_has_one_response, assert_is_json_bytes
|
||||||
|
|
||||||
@@ -138,6 +139,22 @@ def test_decompress_deflate(tmpdir, httpbin):
|
|||||||
assert_is_json_bytes(decoded_response)
|
assert_is_json_bytes(decoded_response)
|
||||||
|
|
||||||
|
|
||||||
|
def test_decompress_brotli(tmpdir, httpbin):
|
||||||
|
if brotli is None:
|
||||||
|
# XXX: this is never true, because brotlipy is installed with "httpbin"
|
||||||
|
pytest.skip("Brotli is not installed")
|
||||||
|
|
||||||
|
url = httpbin.url + "/brotli"
|
||||||
|
request = Request(url, headers={"Accept-Encoding": ["gzip, deflate, br"]})
|
||||||
|
cass_file = str(tmpdir.join("brotli_response.yaml"))
|
||||||
|
with vcr.use_cassette(cass_file, decode_compressed_response=True):
|
||||||
|
urlopen(request)
|
||||||
|
with vcr.use_cassette(cass_file) as cass:
|
||||||
|
decoded_response = urlopen(url).read()
|
||||||
|
assert_cassette_has_one_response(cass)
|
||||||
|
assert_is_json_bytes(decoded_response)
|
||||||
|
|
||||||
|
|
||||||
def test_decompress_regular(tmpdir, httpbin):
|
def test_decompress_regular(tmpdir, httpbin):
|
||||||
"""Test that it doesn't try to decompress content that isn't compressed"""
|
"""Test that it doesn't try to decompress content that isn't compressed"""
|
||||||
url = httpbin.url + "/get"
|
url = httpbin.url + "/get"
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Integration tests with httplib2"""
|
"""Integration tests with httplib2"""
|
||||||
|
|
||||||
from urllib.parse import urlencode
|
from urllib.parse import urlencode
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|||||||
@@ -60,9 +60,8 @@ class DoSyncRequest(BaseDoRequest):
|
|||||||
return b"".join(response.iter_bytes())
|
return b"".join(response.iter_bytes())
|
||||||
|
|
||||||
# Use one-time context and dispose of the client afterwards
|
# Use one-time context and dispose of the client afterwards
|
||||||
with self:
|
with self, self.client.stream(*args, **kwargs) as response:
|
||||||
with self.client.stream(*args, **kwargs) as response:
|
return b"".join(response.iter_bytes())
|
||||||
return b"".join(response.iter_bytes())
|
|
||||||
|
|
||||||
|
|
||||||
class DoAsyncRequest(BaseDoRequest):
|
class DoAsyncRequest(BaseDoRequest):
|
||||||
@@ -195,9 +194,11 @@ def test_params_same_url_distinct_params(tmpdir, httpbin, do_request):
|
|||||||
assert cassette.play_count == 1
|
assert cassette.play_count == 1
|
||||||
|
|
||||||
params = {"other": "params"}
|
params = {"other": "params"}
|
||||||
with vcr.use_cassette(str(tmpdir.join("get.yaml"))) as cassette:
|
with (
|
||||||
with pytest.raises(vcr.errors.CannotOverwriteExistingCassetteException):
|
vcr.use_cassette(str(tmpdir.join("get.yaml"))) as cassette,
|
||||||
do_request()("GET", url, params=params, headers=headers)
|
pytest.raises(vcr.errors.CannotOverwriteExistingCassetteException),
|
||||||
|
):
|
||||||
|
do_request()("GET", url, params=params, headers=headers)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.online
|
@pytest.mark.online
|
||||||
|
|||||||
@@ -51,9 +51,11 @@ def test_matchers(httpbin, httpbin_secure, cassette, matcher, matching_uri, not_
|
|||||||
assert cass.play_count == 1
|
assert cass.play_count == 1
|
||||||
|
|
||||||
# play cassette with not matching on uri, it should fail
|
# play cassette with not matching on uri, it should fail
|
||||||
with pytest.raises(vcr.errors.CannotOverwriteExistingCassetteException):
|
with (
|
||||||
with vcr.use_cassette(cassette, match_on=[matcher]) as cass:
|
pytest.raises(vcr.errors.CannotOverwriteExistingCassetteException),
|
||||||
urlopen(not_matching_uri)
|
vcr.use_cassette(cassette, match_on=[matcher]) as cass,
|
||||||
|
):
|
||||||
|
urlopen(not_matching_uri)
|
||||||
|
|
||||||
|
|
||||||
def test_method_matcher(cassette, httpbin, httpbin_secure):
|
def test_method_matcher(cassette, httpbin, httpbin_secure):
|
||||||
@@ -65,10 +67,12 @@ def test_method_matcher(cassette, httpbin, httpbin_secure):
|
|||||||
assert cass.play_count == 1
|
assert cass.play_count == 1
|
||||||
|
|
||||||
# should fail if method does not match
|
# should fail if method does not match
|
||||||
with pytest.raises(vcr.errors.CannotOverwriteExistingCassetteException):
|
with (
|
||||||
with vcr.use_cassette(cassette, match_on=["method"]) as cass:
|
pytest.raises(vcr.errors.CannotOverwriteExistingCassetteException),
|
||||||
# is a POST request
|
vcr.use_cassette(cassette, match_on=["method"]) as cass,
|
||||||
urlopen(default_uri, data=b"")
|
):
|
||||||
|
# is a POST request
|
||||||
|
urlopen(default_uri, data=b"")
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
@@ -98,14 +102,12 @@ def test_default_matcher_matches(cassette, uri, httpbin, httpbin_secure):
|
|||||||
)
|
)
|
||||||
def test_default_matcher_does_not_match(cassette, uri, httpbin, httpbin_secure):
|
def test_default_matcher_does_not_match(cassette, uri, httpbin, httpbin_secure):
|
||||||
uri = _replace_httpbin(uri, httpbin, httpbin_secure)
|
uri = _replace_httpbin(uri, httpbin, httpbin_secure)
|
||||||
with pytest.raises(vcr.errors.CannotOverwriteExistingCassetteException):
|
with pytest.raises(vcr.errors.CannotOverwriteExistingCassetteException), vcr.use_cassette(cassette):
|
||||||
with vcr.use_cassette(cassette):
|
urlopen(uri)
|
||||||
urlopen(uri)
|
|
||||||
|
|
||||||
|
|
||||||
def test_default_matcher_does_not_match_on_method(cassette, httpbin, httpbin_secure):
|
def test_default_matcher_does_not_match_on_method(cassette, httpbin, httpbin_secure):
|
||||||
default_uri = _replace_httpbin(DEFAULT_URI, httpbin, httpbin_secure)
|
default_uri = _replace_httpbin(DEFAULT_URI, httpbin, httpbin_secure)
|
||||||
with pytest.raises(vcr.errors.CannotOverwriteExistingCassetteException):
|
with pytest.raises(vcr.errors.CannotOverwriteExistingCassetteException), vcr.use_cassette(cassette):
|
||||||
with vcr.use_cassette(cassette):
|
# is a POST request
|
||||||
# is a POST request
|
urlopen(default_uri, data=b"")
|
||||||
urlopen(default_uri, data=b"")
|
|
||||||
|
|||||||
@@ -1,5 +1,6 @@
|
|||||||
"""Test using a proxy."""
|
"""Test using a proxy."""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
import http.server
|
import http.server
|
||||||
import socketserver
|
import socketserver
|
||||||
import threading
|
import threading
|
||||||
@@ -36,6 +37,35 @@ class Proxy(http.server.SimpleHTTPRequestHandler):
|
|||||||
self.end_headers()
|
self.end_headers()
|
||||||
self.copyfile(upstream_response, self.wfile)
|
self.copyfile(upstream_response, self.wfile)
|
||||||
|
|
||||||
|
def do_CONNECT(self):
|
||||||
|
host, port = self.path.split(":")
|
||||||
|
|
||||||
|
asyncio.run(self._tunnel(host, port, self.connection))
|
||||||
|
|
||||||
|
async def _tunnel(self, host, port, client_sock):
|
||||||
|
target_r, target_w = await asyncio.open_connection(host=host, port=port)
|
||||||
|
|
||||||
|
self.send_response(http.HTTPStatus.OK)
|
||||||
|
self.end_headers()
|
||||||
|
|
||||||
|
source_r, source_w = await asyncio.open_connection(sock=client_sock)
|
||||||
|
|
||||||
|
async def channel(reader, writer):
|
||||||
|
while True:
|
||||||
|
data = await reader.read(1024)
|
||||||
|
if not data:
|
||||||
|
break
|
||||||
|
writer.write(data)
|
||||||
|
await writer.drain()
|
||||||
|
|
||||||
|
writer.close()
|
||||||
|
await writer.wait_closed()
|
||||||
|
|
||||||
|
await asyncio.gather(
|
||||||
|
channel(target_r, source_w),
|
||||||
|
channel(source_r, target_w),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="session")
|
@pytest.fixture(scope="session")
|
||||||
def proxy_server():
|
def proxy_server():
|
||||||
@@ -52,10 +82,26 @@ def test_use_proxy(tmpdir, httpbin, proxy_server):
|
|||||||
with vcr.use_cassette(str(tmpdir.join("proxy.yaml"))):
|
with vcr.use_cassette(str(tmpdir.join("proxy.yaml"))):
|
||||||
response = requests.get(httpbin.url, proxies={"http": proxy_server})
|
response = requests.get(httpbin.url, proxies={"http": proxy_server})
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("proxy.yaml")), mode="once") as cassette:
|
with vcr.use_cassette(str(tmpdir.join("proxy.yaml")), mode="none") as cassette:
|
||||||
cassette_response = requests.get(httpbin.url, proxies={"http": proxy_server})
|
cassette_response = requests.get(httpbin.url, proxies={"http": proxy_server})
|
||||||
|
|
||||||
for key in set(cassette_response.headers.keys()) & set(response.headers.keys()):
|
|
||||||
assert cassette_response.headers[key] == response.headers[key]
|
|
||||||
assert cassette_response.headers == response.headers
|
assert cassette_response.headers == response.headers
|
||||||
assert cassette.play_count == 1
|
assert cassette.play_count == 1
|
||||||
|
|
||||||
|
|
||||||
|
def test_use_https_proxy(tmpdir, httpbin_secure, proxy_server):
|
||||||
|
"""Ensure that it works with an HTTPS proxy."""
|
||||||
|
with vcr.use_cassette(str(tmpdir.join("proxy.yaml"))):
|
||||||
|
response = requests.get(httpbin_secure.url, proxies={"https": proxy_server})
|
||||||
|
|
||||||
|
with vcr.use_cassette(str(tmpdir.join("proxy.yaml")), mode="none") as cassette:
|
||||||
|
cassette_response = requests.get(
|
||||||
|
httpbin_secure.url,
|
||||||
|
proxies={"https": proxy_server},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert cassette_response.headers == response.headers
|
||||||
|
assert cassette.play_count == 1
|
||||||
|
|
||||||
|
# The cassette URL points to httpbin, not the proxy
|
||||||
|
assert cassette.requests[0].url == httpbin_secure.url + "/"
|
||||||
|
|||||||
@@ -124,9 +124,11 @@ def test_none_record_mode(tmpdir, httpbin):
|
|||||||
# Cassette file doesn't exist, yet we are trying to make a request.
|
# Cassette file doesn't exist, yet we are trying to make a request.
|
||||||
# raise hell.
|
# raise hell.
|
||||||
testfile = str(tmpdir.join("recordmode.yml"))
|
testfile = str(tmpdir.join("recordmode.yml"))
|
||||||
with vcr.use_cassette(testfile, record_mode=vcr.mode.NONE):
|
with (
|
||||||
with pytest.raises(CannotOverwriteExistingCassetteException):
|
vcr.use_cassette(testfile, record_mode=vcr.mode.NONE),
|
||||||
urlopen(httpbin.url).read()
|
pytest.raises(CannotOverwriteExistingCassetteException),
|
||||||
|
):
|
||||||
|
urlopen(httpbin.url).read()
|
||||||
|
|
||||||
|
|
||||||
def test_none_record_mode_with_existing_cassette(tmpdir, httpbin):
|
def test_none_record_mode_with_existing_cassette(tmpdir, httpbin):
|
||||||
|
|||||||
@@ -66,7 +66,7 @@ def test_load_cassette_with_custom_persister(tmpdir, httpbin):
|
|||||||
|
|
||||||
with my_vcr.use_cassette(test_fixture, serializer="json"):
|
with my_vcr.use_cassette(test_fixture, serializer="json"):
|
||||||
response = urlopen(httpbin.url).read()
|
response = urlopen(httpbin.url).read()
|
||||||
assert b"A simple HTTP Request & Response Service." in response
|
assert b"HTTP Request & Response Service" in response
|
||||||
|
|
||||||
|
|
||||||
def test_load_cassette_persister_exception_handling(tmpdir, httpbin):
|
def test_load_cassette_persister_exception_handling(tmpdir, httpbin):
|
||||||
@@ -83,6 +83,5 @@ def test_load_cassette_persister_exception_handling(tmpdir, httpbin):
|
|||||||
with my_vcr.use_cassette("bad/encoding") as cass:
|
with my_vcr.use_cassette("bad/encoding") as cass:
|
||||||
assert len(cass) == 0
|
assert len(cass) == 0
|
||||||
|
|
||||||
with pytest.raises(ValueError):
|
with pytest.raises(ValueError), my_vcr.use_cassette("bad/buggy") as cass:
|
||||||
with my_vcr.use_cassette("bad/buggy") as cass:
|
pass
|
||||||
pass
|
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Test requests' interaction with vcr"""
|
"""Test requests' interaction with vcr"""
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
import vcr
|
import vcr
|
||||||
|
|||||||
@@ -66,7 +66,7 @@ def test_original_decoded_response_is_not_modified(tmpdir, httpbin):
|
|||||||
|
|
||||||
# Assert that we do not modify the original response while appending
|
# Assert that we do not modify the original response while appending
|
||||||
# to the cassette.
|
# to the cassette.
|
||||||
assert "gzip" == inside.headers["content-encoding"]
|
assert inside.headers["content-encoding"] == "gzip"
|
||||||
|
|
||||||
# They should effectively be the same response.
|
# They should effectively be the same response.
|
||||||
inside_headers = (h for h in inside.headers.items() if h[0].lower() != "date")
|
inside_headers = (h for h in inside.headers.items() if h[0].lower() != "date")
|
||||||
@@ -122,7 +122,7 @@ def test_original_response_is_not_modified_by_before_filter(tmpdir, httpbin):
|
|||||||
# Furthermore, the responses should be identical.
|
# Furthermore, the responses should be identical.
|
||||||
inside_body = json.loads(inside.read())
|
inside_body = json.loads(inside.read())
|
||||||
outside_body = json.loads(outside.read())
|
outside_body = json.loads(outside.read())
|
||||||
assert not inside_body[field_to_scrub] == replacement
|
assert inside_body[field_to_scrub] != replacement
|
||||||
assert inside_body[field_to_scrub] == outside_body[field_to_scrub]
|
assert inside_body[field_to_scrub] == outside_body[field_to_scrub]
|
||||||
|
|
||||||
# Ensure that when a cassette exists, the scrubbed response is returned.
|
# Ensure that when a cassette exists, the scrubbed response is returned.
|
||||||
|
|||||||
@@ -1,8 +1,11 @@
|
|||||||
"""Test requests' interaction with vcr"""
|
"""Test requests' interaction with vcr"""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import functools
|
import functools
|
||||||
import inspect
|
import inspect
|
||||||
import json
|
import json
|
||||||
|
import os
|
||||||
|
import ssl
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
@@ -35,23 +38,23 @@ def gen_test(func):
|
|||||||
return wrapper
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(params=["https", "http"])
|
|
||||||
def scheme(request):
|
|
||||||
"""Fixture that returns both http and https."""
|
|
||||||
return request.param
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(params=["simple", "curl", "default"])
|
@pytest.fixture(params=["simple", "curl", "default"])
|
||||||
def get_client(request):
|
def get_client(request):
|
||||||
|
ca_bundle_path = os.environ.get("REQUESTS_CA_BUNDLE")
|
||||||
|
ssl_ctx = ssl.create_default_context(ssl.Purpose.SERVER_AUTH)
|
||||||
|
ssl_ctx.load_verify_locations(cafile=ca_bundle_path)
|
||||||
|
ssl_ctx.verify_mode = ssl.CERT_REQUIRED
|
||||||
|
|
||||||
if request.param == "simple":
|
if request.param == "simple":
|
||||||
from tornado import simple_httpclient as simple
|
from tornado import simple_httpclient as simple
|
||||||
|
|
||||||
return lambda: simple.SimpleAsyncHTTPClient()
|
return lambda: simple.SimpleAsyncHTTPClient(defaults={"ssl_options": ssl_ctx})
|
||||||
elif request.param == "curl":
|
|
||||||
|
if request.param == "curl":
|
||||||
curl = pytest.importorskip("tornado.curl_httpclient")
|
curl = pytest.importorskip("tornado.curl_httpclient")
|
||||||
return lambda: curl.CurlAsyncHTTPClient()
|
return lambda: curl.CurlAsyncHTTPClient(defaults={"ca_certs": ca_bundle_path})
|
||||||
else:
|
|
||||||
return lambda: http.AsyncHTTPClient()
|
return lambda: http.AsyncHTTPClient(defaults={"ssl_options": ssl_ctx})
|
||||||
|
|
||||||
|
|
||||||
def get(client, url, **kwargs):
|
def get(client, url, **kwargs):
|
||||||
@@ -70,42 +73,42 @@ def post(client, url, data=None, **kwargs):
|
|||||||
|
|
||||||
@pytest.mark.online
|
@pytest.mark.online
|
||||||
@gen_test
|
@gen_test
|
||||||
def test_status_code(get_client, scheme, tmpdir):
|
def test_status_code(get_client, tmpdir, httpbin_both):
|
||||||
"""Ensure that we can read the status code"""
|
"""Ensure that we can read the status code"""
|
||||||
url = scheme + "://httpbin.org/"
|
url = httpbin_both.url
|
||||||
with vcr.use_cassette(str(tmpdir.join("atts.yaml"))):
|
with vcr.use_cassette(str(tmpdir.join("atts.yaml"))):
|
||||||
status_code = (yield get(get_client(), url)).code
|
status_code = (yield get(get_client(), url)).code
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("atts.yaml"))) as cass:
|
with vcr.use_cassette(str(tmpdir.join("atts.yaml"))) as cass:
|
||||||
assert status_code == (yield get(get_client(), url)).code
|
assert status_code == (yield get(get_client(), url)).code
|
||||||
assert 1 == cass.play_count
|
assert cass.play_count == 1
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.online
|
@pytest.mark.online
|
||||||
@gen_test
|
@gen_test
|
||||||
def test_headers(get_client, scheme, tmpdir):
|
def test_headers(get_client, httpbin_both, tmpdir):
|
||||||
"""Ensure that we can read the headers back"""
|
"""Ensure that we can read the headers back"""
|
||||||
url = scheme + "://httpbin.org/"
|
url = httpbin_both.url
|
||||||
with vcr.use_cassette(str(tmpdir.join("headers.yaml"))):
|
with vcr.use_cassette(str(tmpdir.join("headers.yaml"))):
|
||||||
headers = (yield get(get_client(), url)).headers
|
headers = (yield get(get_client(), url)).headers
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("headers.yaml"))) as cass:
|
with vcr.use_cassette(str(tmpdir.join("headers.yaml"))) as cass:
|
||||||
assert headers == (yield get(get_client(), url)).headers
|
assert headers == (yield get(get_client(), url)).headers
|
||||||
assert 1 == cass.play_count
|
assert cass.play_count == 1
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.online
|
@pytest.mark.online
|
||||||
@gen_test
|
@gen_test
|
||||||
def test_body(get_client, tmpdir, scheme):
|
def test_body(get_client, tmpdir, httpbin_both):
|
||||||
"""Ensure the responses are all identical enough"""
|
"""Ensure the responses are all identical enough"""
|
||||||
|
|
||||||
url = scheme + "://httpbin.org/bytes/1024"
|
url = httpbin_both.url + "/bytes/1024"
|
||||||
with vcr.use_cassette(str(tmpdir.join("body.yaml"))):
|
with vcr.use_cassette(str(tmpdir.join("body.yaml"))):
|
||||||
content = (yield get(get_client(), url)).body
|
content = (yield get(get_client(), url)).body
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("body.yaml"))) as cass:
|
with vcr.use_cassette(str(tmpdir.join("body.yaml"))) as cass:
|
||||||
assert content == (yield get(get_client(), url)).body
|
assert content == (yield get(get_client(), url)).body
|
||||||
assert 1 == cass.play_count
|
assert cass.play_count == 1
|
||||||
|
|
||||||
|
|
||||||
@gen_test
|
@gen_test
|
||||||
@@ -118,15 +121,15 @@ def test_effective_url(get_client, tmpdir, httpbin):
|
|||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("url.yaml"))) as cass:
|
with vcr.use_cassette(str(tmpdir.join("url.yaml"))) as cass:
|
||||||
assert effective_url == (yield get(get_client(), url)).effective_url
|
assert effective_url == (yield get(get_client(), url)).effective_url
|
||||||
assert 1 == cass.play_count
|
assert cass.play_count == 1
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.online
|
@pytest.mark.online
|
||||||
@gen_test
|
@gen_test
|
||||||
def test_auth(get_client, tmpdir, scheme):
|
def test_auth(get_client, tmpdir, httpbin_both):
|
||||||
"""Ensure that we can handle basic auth"""
|
"""Ensure that we can handle basic auth"""
|
||||||
auth = ("user", "passwd")
|
auth = ("user", "passwd")
|
||||||
url = scheme + "://httpbin.org/basic-auth/user/passwd"
|
url = httpbin_both.url + "/basic-auth/user/passwd"
|
||||||
with vcr.use_cassette(str(tmpdir.join("auth.yaml"))):
|
with vcr.use_cassette(str(tmpdir.join("auth.yaml"))):
|
||||||
one = yield get(get_client(), url, auth_username=auth[0], auth_password=auth[1])
|
one = yield get(get_client(), url, auth_username=auth[0], auth_password=auth[1])
|
||||||
|
|
||||||
@@ -134,15 +137,15 @@ def test_auth(get_client, tmpdir, scheme):
|
|||||||
two = yield get(get_client(), url, auth_username=auth[0], auth_password=auth[1])
|
two = yield get(get_client(), url, auth_username=auth[0], auth_password=auth[1])
|
||||||
assert one.body == two.body
|
assert one.body == two.body
|
||||||
assert one.code == two.code
|
assert one.code == two.code
|
||||||
assert 1 == cass.play_count
|
assert cass.play_count == 1
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.online
|
@pytest.mark.online
|
||||||
@gen_test
|
@gen_test
|
||||||
def test_auth_failed(get_client, tmpdir, scheme):
|
def test_auth_failed(get_client, tmpdir, httpbin_both):
|
||||||
"""Ensure that we can save failed auth statuses"""
|
"""Ensure that we can save failed auth statuses"""
|
||||||
auth = ("user", "wrongwrongwrong")
|
auth = ("user", "wrongwrongwrong")
|
||||||
url = scheme + "://httpbin.org/basic-auth/user/passwd"
|
url = httpbin_both.url + "/basic-auth/user/passwd"
|
||||||
with vcr.use_cassette(str(tmpdir.join("auth-failed.yaml"))) as cass:
|
with vcr.use_cassette(str(tmpdir.join("auth-failed.yaml"))) as cass:
|
||||||
# Ensure that this is empty to begin with
|
# Ensure that this is empty to begin with
|
||||||
assert_cassette_empty(cass)
|
assert_cassette_empty(cass)
|
||||||
@@ -158,15 +161,15 @@ def test_auth_failed(get_client, tmpdir, scheme):
|
|||||||
assert exc_info.value.code == 401
|
assert exc_info.value.code == 401
|
||||||
assert one.body == two.body
|
assert one.body == two.body
|
||||||
assert one.code == two.code == 401
|
assert one.code == two.code == 401
|
||||||
assert 1 == cass.play_count
|
assert cass.play_count == 1
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.online
|
@pytest.mark.online
|
||||||
@gen_test
|
@gen_test
|
||||||
def test_post(get_client, tmpdir, scheme):
|
def test_post(get_client, tmpdir, httpbin_both):
|
||||||
"""Ensure that we can post and cache the results"""
|
"""Ensure that we can post and cache the results"""
|
||||||
data = {"key1": "value1", "key2": "value2"}
|
data = {"key1": "value1", "key2": "value2"}
|
||||||
url = scheme + "://httpbin.org/post"
|
url = httpbin_both.url + "/post"
|
||||||
with vcr.use_cassette(str(tmpdir.join("requests.yaml"))):
|
with vcr.use_cassette(str(tmpdir.join("requests.yaml"))):
|
||||||
req1 = (yield post(get_client(), url, data)).body
|
req1 = (yield post(get_client(), url, data)).body
|
||||||
|
|
||||||
@@ -174,7 +177,7 @@ def test_post(get_client, tmpdir, scheme):
|
|||||||
req2 = (yield post(get_client(), url, data)).body
|
req2 = (yield post(get_client(), url, data)).body
|
||||||
|
|
||||||
assert req1 == req2
|
assert req1 == req2
|
||||||
assert 1 == cass.play_count
|
assert cass.play_count == 1
|
||||||
|
|
||||||
|
|
||||||
@gen_test
|
@gen_test
|
||||||
@@ -191,32 +194,36 @@ def test_redirects(get_client, tmpdir, httpbin):
|
|||||||
|
|
||||||
@pytest.mark.online
|
@pytest.mark.online
|
||||||
@gen_test
|
@gen_test
|
||||||
def test_cross_scheme(get_client, tmpdir, scheme):
|
def test_cross_scheme(get_client, tmpdir, httpbin, httpbin_secure):
|
||||||
"""Ensure that requests between schemes are treated separately"""
|
"""Ensure that requests between schemes are treated separately"""
|
||||||
# First fetch a url under http, and then again under https and then
|
# First fetch a url under http, and then again under https and then
|
||||||
# ensure that we haven't served anything out of cache, and we have two
|
# ensure that we haven't served anything out of cache, and we have two
|
||||||
# requests / response pairs in the cassette
|
# requests / response pairs in the cassette
|
||||||
|
|
||||||
|
url = httpbin.url
|
||||||
|
url_secure = httpbin_secure.url
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("cross_scheme.yaml"))) as cass:
|
with vcr.use_cassette(str(tmpdir.join("cross_scheme.yaml"))) as cass:
|
||||||
yield get(get_client(), "https://httpbin.org/")
|
yield get(get_client(), url)
|
||||||
yield get(get_client(), "http://httpbin.org/")
|
yield get(get_client(), url_secure)
|
||||||
assert cass.play_count == 0
|
assert cass.play_count == 0
|
||||||
assert len(cass) == 2
|
assert len(cass) == 2
|
||||||
|
|
||||||
# Then repeat the same requests and ensure both were replayed.
|
# Then repeat the same requests and ensure both were replayed.
|
||||||
with vcr.use_cassette(str(tmpdir.join("cross_scheme.yaml"))) as cass:
|
with vcr.use_cassette(str(tmpdir.join("cross_scheme.yaml"))) as cass:
|
||||||
yield get(get_client(), "https://httpbin.org/")
|
yield get(get_client(), url)
|
||||||
yield get(get_client(), "http://httpbin.org/")
|
yield get(get_client(), url_secure)
|
||||||
assert cass.play_count == 2
|
assert cass.play_count == 2
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.online
|
@pytest.mark.online
|
||||||
@gen_test
|
@gen_test
|
||||||
def test_gzip(get_client, tmpdir, scheme):
|
def test_gzip(get_client, tmpdir, httpbin_both):
|
||||||
"""
|
"""
|
||||||
Ensure that httpclient is able to automatically decompress the response
|
Ensure that httpclient is able to automatically decompress the response
|
||||||
body
|
body
|
||||||
"""
|
"""
|
||||||
url = scheme + "://httpbin.org/gzip"
|
url = httpbin_both + "/gzip"
|
||||||
|
|
||||||
# use_gzip was renamed to decompress_response in 4.0
|
# use_gzip was renamed to decompress_response in 4.0
|
||||||
kwargs = {}
|
kwargs = {}
|
||||||
@@ -232,24 +239,26 @@ def test_gzip(get_client, tmpdir, scheme):
|
|||||||
with vcr.use_cassette(str(tmpdir.join("gzip.yaml"))) as cass:
|
with vcr.use_cassette(str(tmpdir.join("gzip.yaml"))) as cass:
|
||||||
response = yield get(get_client(), url, **kwargs)
|
response = yield get(get_client(), url, **kwargs)
|
||||||
assert_is_json_bytes(response.body)
|
assert_is_json_bytes(response.body)
|
||||||
assert 1 == cass.play_count
|
assert cass.play_count == 1
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.online
|
@pytest.mark.online
|
||||||
@gen_test
|
@gen_test
|
||||||
def test_https_with_cert_validation_disabled(get_client, tmpdir):
|
def test_https_with_cert_validation_disabled(get_client, tmpdir, httpbin_secure):
|
||||||
cass_path = str(tmpdir.join("cert_validation_disabled.yaml"))
|
cass_path = str(tmpdir.join("cert_validation_disabled.yaml"))
|
||||||
|
|
||||||
|
url = httpbin_secure.url
|
||||||
|
|
||||||
with vcr.use_cassette(cass_path):
|
with vcr.use_cassette(cass_path):
|
||||||
yield get(get_client(), "https://httpbin.org", validate_cert=False)
|
yield get(get_client(), url, validate_cert=False)
|
||||||
|
|
||||||
with vcr.use_cassette(cass_path) as cass:
|
with vcr.use_cassette(cass_path) as cass:
|
||||||
yield get(get_client(), "https://httpbin.org", validate_cert=False)
|
yield get(get_client(), url, validate_cert=False)
|
||||||
assert 1 == cass.play_count
|
assert cass.play_count == 1
|
||||||
|
|
||||||
|
|
||||||
@gen_test
|
@gen_test
|
||||||
def test_unsupported_features_raises_in_future(get_client, tmpdir):
|
def test_unsupported_features_raises_in_future(get_client, tmpdir, httpbin):
|
||||||
"""Ensure that the exception for an AsyncHTTPClient feature not being
|
"""Ensure that the exception for an AsyncHTTPClient feature not being
|
||||||
supported is raised inside the future."""
|
supported is raised inside the future."""
|
||||||
|
|
||||||
@@ -257,7 +266,7 @@ def test_unsupported_features_raises_in_future(get_client, tmpdir):
|
|||||||
raise AssertionError("Did not expect to be called.")
|
raise AssertionError("Did not expect to be called.")
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("invalid.yaml"))):
|
with vcr.use_cassette(str(tmpdir.join("invalid.yaml"))):
|
||||||
future = get(get_client(), "http://httpbin.org", streaming_callback=callback)
|
future = get(get_client(), httpbin.url, streaming_callback=callback)
|
||||||
|
|
||||||
with pytest.raises(Exception) as excinfo:
|
with pytest.raises(Exception) as excinfo:
|
||||||
yield future
|
yield future
|
||||||
@@ -291,15 +300,17 @@ def test_unsupported_features_raise_error_disabled(get_client, tmpdir):
|
|||||||
|
|
||||||
@pytest.mark.online
|
@pytest.mark.online
|
||||||
@gen_test
|
@gen_test
|
||||||
def test_cannot_overwrite_cassette_raises_in_future(get_client, tmpdir):
|
def test_cannot_overwrite_cassette_raises_in_future(get_client, tmpdir, httpbin):
|
||||||
"""Ensure that CannotOverwriteExistingCassetteException is raised inside
|
"""Ensure that CannotOverwriteExistingCassetteException is raised inside
|
||||||
the future."""
|
the future."""
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("overwrite.yaml"))):
|
url = httpbin.url
|
||||||
yield get(get_client(), "http://httpbin.org/get")
|
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("overwrite.yaml"))):
|
with vcr.use_cassette(str(tmpdir.join("overwrite.yaml"))):
|
||||||
future = get(get_client(), "http://httpbin.org/headers")
|
yield get(get_client(), url + "/get")
|
||||||
|
|
||||||
|
with vcr.use_cassette(str(tmpdir.join("overwrite.yaml"))):
|
||||||
|
future = get(get_client(), url + "/headers")
|
||||||
|
|
||||||
with pytest.raises(CannotOverwriteExistingCassetteException):
|
with pytest.raises(CannotOverwriteExistingCassetteException):
|
||||||
yield future
|
yield future
|
||||||
@@ -311,15 +322,17 @@ def test_cannot_overwrite_cassette_raises_in_future(get_client, tmpdir):
|
|||||||
reason="raise_error only ignores HTTPErrors due to response code",
|
reason="raise_error only ignores HTTPErrors due to response code",
|
||||||
)
|
)
|
||||||
@gen_test
|
@gen_test
|
||||||
def test_cannot_overwrite_cassette_raise_error_disabled(get_client, tmpdir):
|
def test_cannot_overwrite_cassette_raise_error_disabled(get_client, tmpdir, httpbin):
|
||||||
"""Ensure that CannotOverwriteExistingCassetteException is not raised if
|
"""Ensure that CannotOverwriteExistingCassetteException is not raised if
|
||||||
raise_error=False in the fetch() call."""
|
raise_error=False in the fetch() call."""
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("overwrite.yaml"))):
|
url = httpbin.url
|
||||||
yield get(get_client(), "http://httpbin.org/get", raise_error=False)
|
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("overwrite.yaml"))):
|
with vcr.use_cassette(str(tmpdir.join("overwrite.yaml"))):
|
||||||
response = yield get(get_client(), "http://httpbin.org/headers", raise_error=False)
|
yield get(get_client(), url + "/get", raise_error=False)
|
||||||
|
|
||||||
|
with vcr.use_cassette(str(tmpdir.join("overwrite.yaml"))):
|
||||||
|
response = yield get(get_client(), url + "/headers", raise_error=False)
|
||||||
|
|
||||||
assert isinstance(response.error, CannotOverwriteExistingCassetteException)
|
assert isinstance(response.error, CannotOverwriteExistingCassetteException)
|
||||||
|
|
||||||
@@ -347,46 +360,51 @@ def test_tornado_exception_can_be_caught(get_client):
|
|||||||
|
|
||||||
@pytest.mark.online
|
@pytest.mark.online
|
||||||
@gen_test
|
@gen_test
|
||||||
def test_existing_references_get_patched(tmpdir):
|
def test_existing_references_get_patched(tmpdir, httpbin):
|
||||||
from tornado.httpclient import AsyncHTTPClient
|
from tornado.httpclient import AsyncHTTPClient
|
||||||
|
|
||||||
|
url = httpbin.url + "/get"
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("data.yaml"))):
|
with vcr.use_cassette(str(tmpdir.join("data.yaml"))):
|
||||||
client = AsyncHTTPClient()
|
client = AsyncHTTPClient()
|
||||||
yield get(client, "http://httpbin.org/get")
|
yield get(client, url)
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("data.yaml"))) as cass:
|
with vcr.use_cassette(str(tmpdir.join("data.yaml"))) as cass:
|
||||||
yield get(client, "http://httpbin.org/get")
|
yield get(client, url)
|
||||||
assert cass.play_count == 1
|
assert cass.play_count == 1
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.online
|
@pytest.mark.online
|
||||||
@gen_test
|
@gen_test
|
||||||
def test_existing_instances_get_patched(get_client, tmpdir):
|
def test_existing_instances_get_patched(get_client, tmpdir, httpbin):
|
||||||
"""Ensure that existing instances of AsyncHTTPClient get patched upon
|
"""Ensure that existing instances of AsyncHTTPClient get patched upon
|
||||||
entering VCR context."""
|
entering VCR context."""
|
||||||
|
|
||||||
|
url = httpbin.url + "/get"
|
||||||
client = get_client()
|
client = get_client()
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("data.yaml"))):
|
with vcr.use_cassette(str(tmpdir.join("data.yaml"))):
|
||||||
yield get(client, "http://httpbin.org/get")
|
yield get(client, url)
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("data.yaml"))) as cass:
|
with vcr.use_cassette(str(tmpdir.join("data.yaml"))) as cass:
|
||||||
yield get(client, "http://httpbin.org/get")
|
yield get(client, url)
|
||||||
assert cass.play_count == 1
|
assert cass.play_count == 1
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.online
|
@pytest.mark.online
|
||||||
@gen_test
|
@gen_test
|
||||||
def test_request_time_is_set(get_client, tmpdir):
|
def test_request_time_is_set(get_client, tmpdir, httpbin):
|
||||||
"""Ensures that the request_time on HTTPResponses is set."""
|
"""Ensures that the request_time on HTTPResponses is set."""
|
||||||
|
|
||||||
|
url = httpbin.url + "/get"
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("data.yaml"))):
|
with vcr.use_cassette(str(tmpdir.join("data.yaml"))):
|
||||||
client = get_client()
|
client = get_client()
|
||||||
response = yield get(client, "http://httpbin.org/get")
|
response = yield get(client, url)
|
||||||
assert response.request_time is not None
|
assert response.request_time is not None
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("data.yaml"))) as cass:
|
with vcr.use_cassette(str(tmpdir.join("data.yaml"))) as cass:
|
||||||
client = get_client()
|
client = get_client()
|
||||||
response = yield get(client, "http://httpbin.org/get")
|
response = yield get(client, url)
|
||||||
assert response.request_time is not None
|
assert response.request_time is not None
|
||||||
assert cass.play_count == 1
|
assert cass.play_count == 1
|
||||||
|
|||||||
@@ -1,147 +0,0 @@
|
|||||||
"""Integration tests with urllib2"""
|
|
||||||
|
|
||||||
import ssl
|
|
||||||
from urllib.parse import urlencode
|
|
||||||
from urllib.request import urlopen
|
|
||||||
|
|
||||||
import pytest_httpbin.certs
|
|
||||||
from pytest import mark
|
|
||||||
|
|
||||||
# Internal imports
|
|
||||||
import vcr
|
|
||||||
|
|
||||||
from ..assertions import assert_cassette_has_one_response
|
|
||||||
|
|
||||||
|
|
||||||
def urlopen_with_cafile(*args, **kwargs):
|
|
||||||
context = ssl.create_default_context(cafile=pytest_httpbin.certs.where())
|
|
||||||
context.check_hostname = False
|
|
||||||
kwargs["context"] = context
|
|
||||||
try:
|
|
||||||
return urlopen(*args, **kwargs)
|
|
||||||
except TypeError:
|
|
||||||
# python2/pypi don't let us override this
|
|
||||||
del kwargs["cafile"]
|
|
||||||
return urlopen(*args, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
def test_response_code(httpbin_both, tmpdir):
|
|
||||||
"""Ensure we can read a response code from a fetch"""
|
|
||||||
url = httpbin_both.url
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("atts.yaml"))):
|
|
||||||
code = urlopen_with_cafile(url).getcode()
|
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("atts.yaml"))):
|
|
||||||
assert code == urlopen_with_cafile(url).getcode()
|
|
||||||
|
|
||||||
|
|
||||||
def test_random_body(httpbin_both, tmpdir):
|
|
||||||
"""Ensure we can read the content, and that it's served from cache"""
|
|
||||||
url = httpbin_both.url + "/bytes/1024"
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("body.yaml"))):
|
|
||||||
body = urlopen_with_cafile(url).read()
|
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("body.yaml"))):
|
|
||||||
assert body == urlopen_with_cafile(url).read()
|
|
||||||
|
|
||||||
|
|
||||||
def test_response_headers(httpbin_both, tmpdir):
|
|
||||||
"""Ensure we can get information from the response"""
|
|
||||||
url = httpbin_both.url
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("headers.yaml"))):
|
|
||||||
open1 = urlopen_with_cafile(url).info().items()
|
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("headers.yaml"))):
|
|
||||||
open2 = urlopen_with_cafile(url).info().items()
|
|
||||||
|
|
||||||
assert sorted(open1) == sorted(open2)
|
|
||||||
|
|
||||||
|
|
||||||
@mark.online
|
|
||||||
def test_effective_url(tmpdir, httpbin):
|
|
||||||
"""Ensure that the effective_url is captured"""
|
|
||||||
url = httpbin.url + "/redirect-to?url=.%2F&status_code=301"
|
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("headers.yaml"))):
|
|
||||||
effective_url = urlopen_with_cafile(url).geturl()
|
|
||||||
assert effective_url == httpbin.url + "/"
|
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("headers.yaml"))):
|
|
||||||
assert effective_url == urlopen_with_cafile(url).geturl()
|
|
||||||
|
|
||||||
|
|
||||||
def test_multiple_requests(httpbin_both, tmpdir):
|
|
||||||
"""Ensure that we can cache multiple requests"""
|
|
||||||
urls = [httpbin_both.url, httpbin_both.url, httpbin_both.url + "/get", httpbin_both.url + "/bytes/1024"]
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("multiple.yaml"))) as cass:
|
|
||||||
[urlopen_with_cafile(url) for url in urls]
|
|
||||||
assert len(cass) == len(urls)
|
|
||||||
|
|
||||||
|
|
||||||
def test_get_data(httpbin_both, tmpdir):
|
|
||||||
"""Ensure that it works with query data"""
|
|
||||||
data = urlencode({"some": 1, "data": "here"})
|
|
||||||
url = httpbin_both.url + "/get?" + data
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("get_data.yaml"))):
|
|
||||||
res1 = urlopen_with_cafile(url).read()
|
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("get_data.yaml"))):
|
|
||||||
res2 = urlopen_with_cafile(url).read()
|
|
||||||
assert res1 == res2
|
|
||||||
|
|
||||||
|
|
||||||
def test_post_data(httpbin_both, tmpdir):
|
|
||||||
"""Ensure that it works when posting data"""
|
|
||||||
data = urlencode({"some": 1, "data": "here"}).encode("utf-8")
|
|
||||||
url = httpbin_both.url + "/post"
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("post_data.yaml"))):
|
|
||||||
res1 = urlopen_with_cafile(url, data).read()
|
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("post_data.yaml"))) as cass:
|
|
||||||
res2 = urlopen_with_cafile(url, data).read()
|
|
||||||
assert len(cass) == 1
|
|
||||||
|
|
||||||
assert res1 == res2
|
|
||||||
assert_cassette_has_one_response(cass)
|
|
||||||
|
|
||||||
|
|
||||||
def test_post_unicode_data(httpbin_both, tmpdir):
|
|
||||||
"""Ensure that it works when posting unicode data"""
|
|
||||||
data = urlencode({"snowman": "☃".encode()}).encode("utf-8")
|
|
||||||
url = httpbin_both.url + "/post"
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("post_data.yaml"))):
|
|
||||||
res1 = urlopen_with_cafile(url, data).read()
|
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("post_data.yaml"))) as cass:
|
|
||||||
res2 = urlopen_with_cafile(url, data).read()
|
|
||||||
assert len(cass) == 1
|
|
||||||
|
|
||||||
assert res1 == res2
|
|
||||||
assert_cassette_has_one_response(cass)
|
|
||||||
|
|
||||||
|
|
||||||
def test_cross_scheme(tmpdir, httpbin_secure, httpbin):
|
|
||||||
"""Ensure that requests between schemes are treated separately"""
|
|
||||||
# First fetch a url under https, and then again under https and then
|
|
||||||
# ensure that we haven't served anything out of cache, and we have two
|
|
||||||
# requests / response pairs in the cassette
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("cross_scheme.yaml"))) as cass:
|
|
||||||
urlopen_with_cafile(httpbin_secure.url)
|
|
||||||
urlopen_with_cafile(httpbin.url)
|
|
||||||
assert len(cass) == 2
|
|
||||||
assert cass.play_count == 0
|
|
||||||
|
|
||||||
|
|
||||||
def test_decorator(httpbin_both, tmpdir):
|
|
||||||
"""Test the decorator version of VCR.py"""
|
|
||||||
url = httpbin_both.url
|
|
||||||
|
|
||||||
@vcr.use_cassette(str(tmpdir.join("atts.yaml")))
|
|
||||||
def inner1():
|
|
||||||
return urlopen_with_cafile(url).getcode()
|
|
||||||
|
|
||||||
@vcr.use_cassette(str(tmpdir.join("atts.yaml")))
|
|
||||||
def inner2():
|
|
||||||
return urlopen_with_cafile(url).getcode()
|
|
||||||
|
|
||||||
assert inner1() == inner2()
|
|
||||||
@@ -62,13 +62,12 @@ def test_flickr_should_respond_with_200(tmpdir):
|
|||||||
|
|
||||||
def test_cookies(tmpdir, httpbin):
|
def test_cookies(tmpdir, httpbin):
|
||||||
testfile = str(tmpdir.join("cookies.yml"))
|
testfile = str(tmpdir.join("cookies.yml"))
|
||||||
with vcr.use_cassette(testfile):
|
with vcr.use_cassette(testfile), requests.Session() as s:
|
||||||
with requests.Session() as s:
|
s.get(httpbin.url + "/cookies/set?k1=v1&k2=v2")
|
||||||
s.get(httpbin.url + "/cookies/set?k1=v1&k2=v2")
|
assert s.cookies.keys() == ["k1", "k2"]
|
||||||
assert s.cookies.keys() == ["k1", "k2"]
|
|
||||||
|
|
||||||
r2 = s.get(httpbin.url + "/cookies")
|
r2 = s.get(httpbin.url + "/cookies")
|
||||||
assert sorted(r2.json()["cookies"].keys()) == ["k1", "k2"]
|
assert sorted(r2.json()["cookies"].keys()) == ["k1", "k2"]
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.online
|
@pytest.mark.online
|
||||||
|
|||||||
@@ -11,6 +11,7 @@ import yaml
|
|||||||
from vcr.cassette import Cassette
|
from vcr.cassette import Cassette
|
||||||
from vcr.errors import UnhandledHTTPRequestError
|
from vcr.errors import UnhandledHTTPRequestError
|
||||||
from vcr.patch import force_reset
|
from vcr.patch import force_reset
|
||||||
|
from vcr.request import Request
|
||||||
from vcr.stubs import VCRHTTPSConnection
|
from vcr.stubs import VCRHTTPSConnection
|
||||||
|
|
||||||
|
|
||||||
@@ -226,9 +227,11 @@ def test_nesting_cassette_context_managers(*args):
|
|||||||
assert_get_response_body_is("first_response")
|
assert_get_response_body_is("first_response")
|
||||||
|
|
||||||
# Make sure a second cassette can supersede the first
|
# Make sure a second cassette can supersede the first
|
||||||
with Cassette.use(path="test") as second_cassette:
|
with (
|
||||||
with mock.patch.object(second_cassette, "play_response", return_value=second_response):
|
Cassette.use(path="test") as second_cassette,
|
||||||
assert_get_response_body_is("second_response")
|
mock.patch.object(second_cassette, "play_response", return_value=second_response),
|
||||||
|
):
|
||||||
|
assert_get_response_body_is("second_response")
|
||||||
|
|
||||||
# Now the first cassette should be back in effect
|
# Now the first cassette should be back in effect
|
||||||
assert_get_response_body_is("first_response")
|
assert_get_response_body_is("first_response")
|
||||||
@@ -410,3 +413,25 @@ def test_find_requests_with_most_matches_many_similar_requests(mock_get_matchers
|
|||||||
(1, ["method", "path"], [("query", "failed : query")]),
|
(1, ["method", "path"], [("query", "failed : query")]),
|
||||||
(3, ["method", "path"], [("query", "failed : query")]),
|
(3, ["method", "path"], [("query", "failed : query")]),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test_used_interactions(tmpdir):
|
||||||
|
interactions = [
|
||||||
|
{"request": {"body": "", "uri": "foo1", "method": "GET", "headers": {}}, "response": "bar1"},
|
||||||
|
{"request": {"body": "", "uri": "foo2", "method": "GET", "headers": {}}, "response": "bar2"},
|
||||||
|
{"request": {"body": "", "uri": "foo3", "method": "GET", "headers": {}}, "response": "bar3"},
|
||||||
|
]
|
||||||
|
file = tmpdir.join("test_cassette.yml")
|
||||||
|
file.write(yaml.dump({"interactions": [interactions[0], interactions[1]]}))
|
||||||
|
|
||||||
|
cassette = Cassette.load(path=str(file))
|
||||||
|
request = Request._from_dict(interactions[1]["request"])
|
||||||
|
cassette.play_response(request)
|
||||||
|
assert len(cassette._played_interactions) < len(cassette._old_interactions)
|
||||||
|
|
||||||
|
request = Request._from_dict(interactions[2]["request"])
|
||||||
|
cassette.append(request, interactions[2]["response"])
|
||||||
|
assert len(cassette._new_interactions()) == 1
|
||||||
|
|
||||||
|
used_interactions = cassette._played_interactions + cassette._new_interactions()
|
||||||
|
assert len(used_interactions) == 2
|
||||||
|
|||||||
@@ -8,15 +8,13 @@ from vcr.serializers import compat, jsonserializer, yamlserializer
|
|||||||
|
|
||||||
|
|
||||||
def test_deserialize_old_yaml_cassette():
|
def test_deserialize_old_yaml_cassette():
|
||||||
with open("tests/fixtures/migration/old_cassette.yaml") as f:
|
with open("tests/fixtures/migration/old_cassette.yaml") as f, pytest.raises(ValueError):
|
||||||
with pytest.raises(ValueError):
|
deserialize(f.read(), yamlserializer)
|
||||||
deserialize(f.read(), yamlserializer)
|
|
||||||
|
|
||||||
|
|
||||||
def test_deserialize_old_json_cassette():
|
def test_deserialize_old_json_cassette():
|
||||||
with open("tests/fixtures/migration/old_cassette.json") as f:
|
with open("tests/fixtures/migration/old_cassette.json") as f, pytest.raises(ValueError):
|
||||||
with pytest.raises(ValueError):
|
deserialize(f.read(), jsonserializer)
|
||||||
deserialize(f.read(), jsonserializer)
|
|
||||||
|
|
||||||
|
|
||||||
def test_deserialize_new_yaml_cassette():
|
def test_deserialize_new_yaml_cassette():
|
||||||
@@ -76,7 +74,7 @@ def test_deserialize_py2py3_yaml_cassette(tmpdir, req_body, expect):
|
|||||||
cfile = tmpdir.join("test_cassette.yaml")
|
cfile = tmpdir.join("test_cassette.yaml")
|
||||||
cfile.write(REQBODY_TEMPLATE.format(req_body=req_body))
|
cfile.write(REQBODY_TEMPLATE.format(req_body=req_body))
|
||||||
with open(str(cfile)) as f:
|
with open(str(cfile)) as f:
|
||||||
(requests, responses) = deserialize(f.read(), yamlserializer)
|
(requests, _) = deserialize(f.read(), yamlserializer)
|
||||||
assert requests[0].body == expect
|
assert requests[0].body == expect
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,9 +1,12 @@
|
|||||||
import contextlib
|
import contextlib
|
||||||
|
import http.client as httplib
|
||||||
|
from io import BytesIO
|
||||||
|
from tempfile import NamedTemporaryFile
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
|
|
||||||
from pytest import mark
|
from pytest import mark
|
||||||
|
|
||||||
from vcr import mode
|
from vcr import mode, use_cassette
|
||||||
from vcr.cassette import Cassette
|
from vcr.cassette import Cassette
|
||||||
from vcr.stubs import VCRHTTPSConnection
|
from vcr.stubs import VCRHTTPSConnection
|
||||||
|
|
||||||
@@ -21,3 +24,52 @@ class TestVCRConnection:
|
|||||||
vcr_connection.cassette = Cassette("test", record_mode=mode.ALL)
|
vcr_connection.cassette = Cassette("test", record_mode=mode.ALL)
|
||||||
vcr_connection.real_connection.connect()
|
vcr_connection.real_connection.connect()
|
||||||
assert vcr_connection.real_connection.sock is not None
|
assert vcr_connection.real_connection.sock is not None
|
||||||
|
|
||||||
|
def test_body_consumed_once_stream(self, tmpdir, httpbin):
|
||||||
|
self._test_body_consumed_once(
|
||||||
|
tmpdir,
|
||||||
|
httpbin,
|
||||||
|
BytesIO(b"1234567890"),
|
||||||
|
BytesIO(b"9876543210"),
|
||||||
|
BytesIO(b"9876543210"),
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_body_consumed_once_iterator(self, tmpdir, httpbin):
|
||||||
|
self._test_body_consumed_once(
|
||||||
|
tmpdir,
|
||||||
|
httpbin,
|
||||||
|
iter([b"1234567890"]),
|
||||||
|
iter([b"9876543210"]),
|
||||||
|
iter([b"9876543210"]),
|
||||||
|
)
|
||||||
|
|
||||||
|
# data2 and data3 should serve the same data, potentially as iterators
|
||||||
|
def _test_body_consumed_once(
|
||||||
|
self,
|
||||||
|
tmpdir,
|
||||||
|
httpbin,
|
||||||
|
data1,
|
||||||
|
data2,
|
||||||
|
data3,
|
||||||
|
):
|
||||||
|
with NamedTemporaryFile(dir=tmpdir, suffix=".yml") as f:
|
||||||
|
testpath = f.name
|
||||||
|
# NOTE: ``use_cassette`` is not okay with the file existing
|
||||||
|
# already. So we using ``.close()`` to not only
|
||||||
|
# close but also delete the empty file, before we start.
|
||||||
|
f.close()
|
||||||
|
host, port = httpbin.host, httpbin.port
|
||||||
|
match_on = ["method", "uri", "body"]
|
||||||
|
with use_cassette(testpath, match_on=match_on):
|
||||||
|
conn1 = httplib.HTTPConnection(host, port)
|
||||||
|
conn1.request("POST", "/anything", body=data1)
|
||||||
|
conn1.getresponse()
|
||||||
|
conn2 = httplib.HTTPConnection(host, port)
|
||||||
|
conn2.request("POST", "/anything", body=data2)
|
||||||
|
conn2.getresponse()
|
||||||
|
with use_cassette(testpath, match_on=match_on) as cass:
|
||||||
|
conn3 = httplib.HTTPConnection(host, port)
|
||||||
|
conn3.request("POST", "/anything", body=data3)
|
||||||
|
conn3.getresponse()
|
||||||
|
assert cass.play_counts[0] == 0
|
||||||
|
assert cass.play_counts[1] == 1
|
||||||
|
|||||||
@@ -178,7 +178,7 @@ def test_testcase_playback(tmpdir):
|
|||||||
return str(cassette_dir)
|
return str(cassette_dir)
|
||||||
|
|
||||||
test = run_testcase(MyTest)[0][0]
|
test = run_testcase(MyTest)[0][0]
|
||||||
assert b"illustrative examples" in test.response
|
assert b"Example Domain" in test.response
|
||||||
assert len(test.cassette.requests) == 1
|
assert len(test.cassette.requests) == 1
|
||||||
assert test.cassette.play_count == 0
|
assert test.cassette.play_count == 0
|
||||||
|
|
||||||
@@ -186,7 +186,7 @@ def test_testcase_playback(tmpdir):
|
|||||||
|
|
||||||
test2 = run_testcase(MyTest)[0][0]
|
test2 = run_testcase(MyTest)[0][0]
|
||||||
assert test.cassette is not test2.cassette
|
assert test.cassette is not test2.cassette
|
||||||
assert b"illustrative examples" in test.response
|
assert b"Example Domain" in test.response
|
||||||
assert len(test2.cassette.requests) == 1
|
assert len(test2.cassette.requests) == 1
|
||||||
assert test2.cassette.play_count == 1
|
assert test2.cassette.play_count == 1
|
||||||
|
|
||||||
|
|||||||
33
tests/unit/test_util.py
Normal file
33
tests/unit/test_util.py
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
from io import BytesIO, StringIO
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from vcr import request
|
||||||
|
from vcr.util import read_body
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"input_, expected_output",
|
||||||
|
[
|
||||||
|
(BytesIO(b"Stream"), b"Stream"),
|
||||||
|
(StringIO("Stream"), b"Stream"),
|
||||||
|
(iter(["StringIter"]), b"StringIter"),
|
||||||
|
(iter(["String", "Iter"]), b"StringIter"),
|
||||||
|
(iter([b"BytesIter"]), b"BytesIter"),
|
||||||
|
(iter([b"Bytes", b"Iter"]), b"BytesIter"),
|
||||||
|
(iter([70, 111, 111]), b"Foo"),
|
||||||
|
(iter([]), b""),
|
||||||
|
("String", b"String"),
|
||||||
|
(b"Bytes", b"Bytes"),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_read_body(input_, expected_output):
|
||||||
|
r = request.Request("POST", "http://host.com/", input_, {})
|
||||||
|
assert read_body(r) == expected_output
|
||||||
|
|
||||||
|
|
||||||
|
def test_unsupported_read_body():
|
||||||
|
r = request.Request("POST", "http://host.com/", iter([[]]), {})
|
||||||
|
with pytest.raises(ValueError) as excinfo:
|
||||||
|
assert read_body(r)
|
||||||
|
assert excinfo.value.args == ("Body type <class 'list'> not supported",)
|
||||||
@@ -4,7 +4,7 @@ from logging import NullHandler
|
|||||||
from .config import VCR
|
from .config import VCR
|
||||||
from .record_mode import RecordMode as mode # noqa: F401
|
from .record_mode import RecordMode as mode # noqa: F401
|
||||||
|
|
||||||
__version__ = "6.0.1"
|
__version__ = "8.0.0"
|
||||||
|
|
||||||
logging.getLogger(__name__).addHandler(NullHandler())
|
logging.getLogger(__name__).addHandler(NullHandler())
|
||||||
|
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ import contextlib
|
|||||||
import copy
|
import copy
|
||||||
import inspect
|
import inspect
|
||||||
import logging
|
import logging
|
||||||
from asyncio import iscoroutinefunction
|
from inspect import iscoroutinefunction
|
||||||
|
|
||||||
import wrapt
|
import wrapt
|
||||||
|
|
||||||
@@ -177,6 +177,7 @@ class Cassette:
|
|||||||
custom_patches=(),
|
custom_patches=(),
|
||||||
inject=False,
|
inject=False,
|
||||||
allow_playback_repeats=False,
|
allow_playback_repeats=False,
|
||||||
|
drop_unused_requests=False,
|
||||||
):
|
):
|
||||||
self._persister = persister or FilesystemPersister
|
self._persister = persister or FilesystemPersister
|
||||||
self._path = path
|
self._path = path
|
||||||
@@ -189,6 +190,7 @@ class Cassette:
|
|||||||
self.record_mode = record_mode
|
self.record_mode = record_mode
|
||||||
self.custom_patches = custom_patches
|
self.custom_patches = custom_patches
|
||||||
self.allow_playback_repeats = allow_playback_repeats
|
self.allow_playback_repeats = allow_playback_repeats
|
||||||
|
self.drop_unused_requests = drop_unused_requests
|
||||||
|
|
||||||
# self.data is the list of (req, resp) tuples
|
# self.data is the list of (req, resp) tuples
|
||||||
self.data = []
|
self.data = []
|
||||||
@@ -196,6 +198,10 @@ class Cassette:
|
|||||||
self.dirty = False
|
self.dirty = False
|
||||||
self.rewound = False
|
self.rewound = False
|
||||||
|
|
||||||
|
# Subsets of self.data to store old and played interactions
|
||||||
|
self._old_interactions = []
|
||||||
|
self._played_interactions = []
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def play_count(self):
|
def play_count(self):
|
||||||
return sum(self.play_counts.values())
|
return sum(self.play_counts.values())
|
||||||
@@ -215,14 +221,14 @@ class Cassette:
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def write_protected(self):
|
def write_protected(self):
|
||||||
return self.rewound and self.record_mode == RecordMode.ONCE or self.record_mode == RecordMode.NONE
|
return (self.rewound and self.record_mode == RecordMode.ONCE) or self.record_mode == RecordMode.NONE
|
||||||
|
|
||||||
def append(self, request, response):
|
def append(self, request, response):
|
||||||
"""Add a request, response pair to this cassette"""
|
"""Add a request, response pair to this cassette"""
|
||||||
log.info("Appending request %s and response %s", request, response)
|
|
||||||
request = self._before_record_request(request)
|
request = self._before_record_request(request)
|
||||||
if not request:
|
if not request:
|
||||||
return
|
return
|
||||||
|
log.info("Appending request %s and response %s", request, response)
|
||||||
# Deepcopy is here because mutation of `response` will corrupt the
|
# Deepcopy is here because mutation of `response` will corrupt the
|
||||||
# real response.
|
# real response.
|
||||||
response = copy.deepcopy(response)
|
response = copy.deepcopy(response)
|
||||||
@@ -257,6 +263,7 @@ class Cassette:
|
|||||||
for index, response in self._responses(request):
|
for index, response in self._responses(request):
|
||||||
if self.play_counts[index] == 0 or self.allow_playback_repeats:
|
if self.play_counts[index] == 0 or self.allow_playback_repeats:
|
||||||
self.play_counts[index] += 1
|
self.play_counts[index] += 1
|
||||||
|
self._played_interactions.append((request, response))
|
||||||
return response
|
return response
|
||||||
# The cassette doesn't contain the request asked for.
|
# The cassette doesn't contain the request asked for.
|
||||||
raise UnhandledHTTPRequestError(
|
raise UnhandledHTTPRequestError(
|
||||||
@@ -317,19 +324,44 @@ class Cassette:
|
|||||||
|
|
||||||
return final_best_matches
|
return final_best_matches
|
||||||
|
|
||||||
|
def _new_interactions(self):
|
||||||
|
"""List of new HTTP interactions (request/response tuples)"""
|
||||||
|
new_interactions = []
|
||||||
|
for request, response in self.data:
|
||||||
|
if all(
|
||||||
|
not requests_match(request, old_request, self._match_on)
|
||||||
|
for old_request, _ in self._old_interactions
|
||||||
|
):
|
||||||
|
new_interactions.append((request, response))
|
||||||
|
return new_interactions
|
||||||
|
|
||||||
def _as_dict(self):
|
def _as_dict(self):
|
||||||
return {"requests": self.requests, "responses": self.responses}
|
return {"requests": self.requests, "responses": self.responses}
|
||||||
|
|
||||||
|
def _build_used_interactions_dict(self):
|
||||||
|
interactions = self._played_interactions + self._new_interactions()
|
||||||
|
cassete_dict = {
|
||||||
|
"requests": [request for request, _ in interactions],
|
||||||
|
"responses": [response for _, response in interactions],
|
||||||
|
}
|
||||||
|
return cassete_dict
|
||||||
|
|
||||||
def _save(self, force=False):
|
def _save(self, force=False):
|
||||||
|
if self.drop_unused_requests and len(self._played_interactions) < len(self._old_interactions):
|
||||||
|
cassete_dict = self._build_used_interactions_dict()
|
||||||
|
force = True
|
||||||
|
else:
|
||||||
|
cassete_dict = self._as_dict()
|
||||||
if force or self.dirty:
|
if force or self.dirty:
|
||||||
self._persister.save_cassette(self._path, self._as_dict(), serializer=self._serializer)
|
self._persister.save_cassette(self._path, cassete_dict, serializer=self._serializer)
|
||||||
self.dirty = False
|
self.dirty = False
|
||||||
|
|
||||||
def _load(self):
|
def _load(self):
|
||||||
try:
|
try:
|
||||||
requests, responses = self._persister.load_cassette(self._path, serializer=self._serializer)
|
requests, responses = self._persister.load_cassette(self._path, serializer=self._serializer)
|
||||||
for request, response in zip(requests, responses):
|
for request, response in zip(requests, responses, strict=False):
|
||||||
self.append(request, response)
|
self.append(request, response)
|
||||||
|
self._old_interactions.append((request, response))
|
||||||
self.dirty = False
|
self.dirty = False
|
||||||
self.rewound = True
|
self.rewound = True
|
||||||
except (CassetteDecodeError, CassetteNotFoundError):
|
except (CassetteDecodeError, CassetteNotFoundError):
|
||||||
|
|||||||
@@ -48,6 +48,7 @@ class VCR:
|
|||||||
func_path_generator=None,
|
func_path_generator=None,
|
||||||
decode_compressed_response=False,
|
decode_compressed_response=False,
|
||||||
record_on_exception=True,
|
record_on_exception=True,
|
||||||
|
drop_unused_requests=False,
|
||||||
):
|
):
|
||||||
self.serializer = serializer
|
self.serializer = serializer
|
||||||
self.match_on = match_on
|
self.match_on = match_on
|
||||||
@@ -81,6 +82,7 @@ class VCR:
|
|||||||
self.decode_compressed_response = decode_compressed_response
|
self.decode_compressed_response = decode_compressed_response
|
||||||
self.record_on_exception = record_on_exception
|
self.record_on_exception = record_on_exception
|
||||||
self._custom_patches = tuple(custom_patches)
|
self._custom_patches = tuple(custom_patches)
|
||||||
|
self.drop_unused_requests = drop_unused_requests
|
||||||
|
|
||||||
def _get_serializer(self, serializer_name):
|
def _get_serializer(self, serializer_name):
|
||||||
try:
|
try:
|
||||||
@@ -151,6 +153,7 @@ class VCR:
|
|||||||
"func_path_generator": func_path_generator,
|
"func_path_generator": func_path_generator,
|
||||||
"allow_playback_repeats": kwargs.get("allow_playback_repeats", False),
|
"allow_playback_repeats": kwargs.get("allow_playback_repeats", False),
|
||||||
"record_on_exception": record_on_exception,
|
"record_on_exception": record_on_exception,
|
||||||
|
"drop_unused_requests": kwargs.get("drop_unused_requests", self.drop_unused_requests),
|
||||||
}
|
}
|
||||||
path = kwargs.get("path")
|
path = kwargs.get("path")
|
||||||
if path:
|
if path:
|
||||||
|
|||||||
@@ -6,6 +6,49 @@ from urllib.parse import urlencode, urlparse, urlunparse
|
|||||||
|
|
||||||
from .util import CaseInsensitiveDict
|
from .util import CaseInsensitiveDict
|
||||||
|
|
||||||
|
try:
|
||||||
|
# This supports both brotli & brotlipy packages
|
||||||
|
import brotli
|
||||||
|
except ImportError:
|
||||||
|
try:
|
||||||
|
import brotlicffi as brotli
|
||||||
|
except ImportError:
|
||||||
|
brotli = None
|
||||||
|
|
||||||
|
|
||||||
|
def decompress_deflate(body):
|
||||||
|
try:
|
||||||
|
return zlib.decompress(body)
|
||||||
|
except zlib.error:
|
||||||
|
# Assume the response was already decompressed
|
||||||
|
return body
|
||||||
|
|
||||||
|
|
||||||
|
def decompress_gzip(body):
|
||||||
|
# To (de-)compress gzip format, use wbits = zlib.MAX_WBITS | 16.
|
||||||
|
try:
|
||||||
|
return zlib.decompress(body, zlib.MAX_WBITS | 16)
|
||||||
|
except zlib.error:
|
||||||
|
# Assume the response was already decompressed
|
||||||
|
return body
|
||||||
|
|
||||||
|
|
||||||
|
AVAILABLE_DECOMPRESSORS = {
|
||||||
|
"deflate": decompress_deflate,
|
||||||
|
"gzip": decompress_gzip,
|
||||||
|
}
|
||||||
|
|
||||||
|
if brotli is not None:
|
||||||
|
|
||||||
|
def decompress_brotli(body):
|
||||||
|
try:
|
||||||
|
return brotli.decompress(body)
|
||||||
|
except brotli.error:
|
||||||
|
# Assume the response was already decompressed
|
||||||
|
return body
|
||||||
|
|
||||||
|
AVAILABLE_DECOMPRESSORS["br"] = decompress_brotli
|
||||||
|
|
||||||
|
|
||||||
def replace_headers(request, replacements):
|
def replace_headers(request, replacements):
|
||||||
"""Replace headers in request according to replacements.
|
"""Replace headers in request according to replacements.
|
||||||
@@ -136,45 +179,30 @@ def remove_post_data_parameters(request, post_data_parameters_to_remove):
|
|||||||
|
|
||||||
def decode_response(response):
|
def decode_response(response):
|
||||||
"""
|
"""
|
||||||
If the response is compressed with gzip or deflate:
|
If the response is compressed with any supported compression (gzip,
|
||||||
|
deflate, br if available):
|
||||||
1. decompress the response body
|
1. decompress the response body
|
||||||
2. delete the content-encoding header
|
2. delete the content-encoding header
|
||||||
3. update content-length header to decompressed length
|
3. update content-length header to decompressed length
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def is_compressed(headers):
|
|
||||||
encoding = headers.get("content-encoding", [])
|
|
||||||
return encoding and encoding[0] in ("gzip", "deflate")
|
|
||||||
|
|
||||||
def decompress_body(body, encoding):
|
|
||||||
"""Returns decompressed body according to encoding using zlib.
|
|
||||||
to (de-)compress gzip format, use wbits = zlib.MAX_WBITS | 16
|
|
||||||
"""
|
|
||||||
if not body:
|
|
||||||
return ""
|
|
||||||
if encoding == "gzip":
|
|
||||||
try:
|
|
||||||
return zlib.decompress(body, zlib.MAX_WBITS | 16)
|
|
||||||
except zlib.error:
|
|
||||||
return body # assumes that the data was already decompressed
|
|
||||||
else: # encoding == 'deflate'
|
|
||||||
try:
|
|
||||||
return zlib.decompress(body)
|
|
||||||
except zlib.error:
|
|
||||||
return body # assumes that the data was already decompressed
|
|
||||||
|
|
||||||
# Deepcopy here in case `headers` contain objects that could
|
# Deepcopy here in case `headers` contain objects that could
|
||||||
# be mutated by a shallow copy and corrupt the real response.
|
# be mutated by a shallow copy and corrupt the real response.
|
||||||
response = copy.deepcopy(response)
|
response = copy.deepcopy(response)
|
||||||
headers = CaseInsensitiveDict(response["headers"])
|
headers = CaseInsensitiveDict(response["headers"])
|
||||||
if is_compressed(headers):
|
content_encoding = headers.get("content-encoding")
|
||||||
encoding = headers["content-encoding"][0]
|
if not content_encoding:
|
||||||
headers["content-encoding"].remove(encoding)
|
return response
|
||||||
if not headers["content-encoding"]:
|
decompressor = AVAILABLE_DECOMPRESSORS.get(content_encoding[0])
|
||||||
del headers["content-encoding"]
|
if not decompressor:
|
||||||
|
return response
|
||||||
|
|
||||||
new_body = decompress_body(response["body"]["string"], encoding)
|
headers["content-encoding"].remove(content_encoding[0])
|
||||||
response["body"]["string"] = new_body
|
if not headers["content-encoding"]:
|
||||||
headers["content-length"] = [str(len(new_body))]
|
del headers["content-encoding"]
|
||||||
response["headers"] = dict(headers)
|
|
||||||
|
new_body = decompressor(response["body"]["string"])
|
||||||
|
response["body"]["string"] = new_body
|
||||||
|
headers["content-length"] = [str(len(new_body))]
|
||||||
|
response["headers"] = dict(headers)
|
||||||
return response
|
return response
|
||||||
|
|||||||
@@ -3,11 +3,10 @@ import logging
|
|||||||
import urllib
|
import urllib
|
||||||
import xmlrpc.client
|
import xmlrpc.client
|
||||||
from string import hexdigits
|
from string import hexdigits
|
||||||
from typing import List, Set
|
|
||||||
|
|
||||||
from .util import read_body
|
from .util import read_body
|
||||||
|
|
||||||
_HEXDIG_CODE_POINTS: Set[int] = {ord(s.encode("ascii")) for s in hexdigits}
|
_HEXDIG_CODE_POINTS: set[int] = {ord(s.encode("ascii")) for s in hexdigits}
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -109,7 +108,7 @@ def _dechunk(body):
|
|||||||
CHUNK_GAP = b"\r\n"
|
CHUNK_GAP = b"\r\n"
|
||||||
BODY_LEN: int = len(body)
|
BODY_LEN: int = len(body)
|
||||||
|
|
||||||
chunks: List[bytes] = []
|
chunks: list[bytes] = []
|
||||||
pos: int = 0
|
pos: int = 0
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
@@ -163,7 +162,7 @@ def _get_transformers(request):
|
|||||||
|
|
||||||
|
|
||||||
def requests_match(r1, r2, matchers):
|
def requests_match(r1, r2, matchers):
|
||||||
successes, failures = get_matchers_results(r1, r2, matchers)
|
_, failures = get_matchers_results(r1, r2, matchers)
|
||||||
if failures:
|
if failures:
|
||||||
log.debug(f"Requests {r1} and {r2} differ.\nFailure details:\n{failures}")
|
log.debug(f"Requests {r1} and {r2} differ.\nFailure details:\n{failures}")
|
||||||
return len(failures) == 0
|
return len(failures) == 0
|
||||||
|
|||||||
26
vcr/patch.py
26
vcr/patch.py
@@ -1,4 +1,5 @@
|
|||||||
"""Utilities for patching in cassettes"""
|
"""Utilities for patching in cassettes"""
|
||||||
|
|
||||||
import contextlib
|
import contextlib
|
||||||
import functools
|
import functools
|
||||||
import http.client as httplib
|
import http.client as httplib
|
||||||
@@ -91,12 +92,12 @@ else:
|
|||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import httpx
|
import httpcore
|
||||||
except ImportError: # pragma: no cover
|
except ImportError: # pragma: no cover
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
_HttpxSyncClient_send_single_request = httpx.Client._send_single_request
|
_HttpcoreConnectionPool_handle_request = httpcore.ConnectionPool.handle_request
|
||||||
_HttpxAsyncClient_send_single_request = httpx.AsyncClient._send_single_request
|
_HttpcoreAsyncConnectionPool_handle_async_request = httpcore.AsyncConnectionPool.handle_async_request
|
||||||
|
|
||||||
|
|
||||||
class CassettePatcherBuilder:
|
class CassettePatcherBuilder:
|
||||||
@@ -120,7 +121,7 @@ class CassettePatcherBuilder:
|
|||||||
self._httplib2(),
|
self._httplib2(),
|
||||||
self._tornado(),
|
self._tornado(),
|
||||||
self._aiohttp(),
|
self._aiohttp(),
|
||||||
self._httpx(),
|
self._httpcore(),
|
||||||
self._build_patchers_from_mock_triples(self._cassette.custom_patches),
|
self._build_patchers_from_mock_triples(self._cassette.custom_patches),
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -303,19 +304,22 @@ class CassettePatcherBuilder:
|
|||||||
yield client.ClientSession, "_request", new_request
|
yield client.ClientSession, "_request", new_request
|
||||||
|
|
||||||
@_build_patchers_from_mock_triples_decorator
|
@_build_patchers_from_mock_triples_decorator
|
||||||
def _httpx(self):
|
def _httpcore(self):
|
||||||
try:
|
try:
|
||||||
import httpx
|
import httpcore
|
||||||
except ImportError: # pragma: no cover
|
except ImportError: # pragma: no cover
|
||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
from .stubs.httpx_stubs import async_vcr_send, sync_vcr_send
|
from .stubs.httpcore_stubs import vcr_handle_async_request, vcr_handle_request
|
||||||
|
|
||||||
new_async_client_send = async_vcr_send(self._cassette, _HttpxAsyncClient_send_single_request)
|
new_handle_async_request = vcr_handle_async_request(
|
||||||
yield httpx.AsyncClient, "_send_single_request", new_async_client_send
|
self._cassette,
|
||||||
|
_HttpcoreAsyncConnectionPool_handle_async_request,
|
||||||
|
)
|
||||||
|
yield httpcore.AsyncConnectionPool, "handle_async_request", new_handle_async_request
|
||||||
|
|
||||||
new_sync_client_send = sync_vcr_send(self._cassette, _HttpxSyncClient_send_single_request)
|
new_handle_request = vcr_handle_request(self._cassette, _HttpcoreConnectionPool_handle_request)
|
||||||
yield httpx.Client, "_send_single_request", new_sync_client_send
|
yield httpcore.ConnectionPool, "handle_request", new_handle_request
|
||||||
|
|
||||||
def _urllib3_patchers(self, cpool, conn, stubs):
|
def _urllib3_patchers(self, cpool, conn, stubs):
|
||||||
http_connection_remover = ConnectionRemover(
|
http_connection_remover = ConnectionRemover(
|
||||||
|
|||||||
@@ -1,9 +1,10 @@
|
|||||||
import logging
|
import logging
|
||||||
import warnings
|
import warnings
|
||||||
|
from contextlib import suppress
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
from urllib.parse import parse_qsl, urlparse
|
from urllib.parse import parse_qsl, urlparse
|
||||||
|
|
||||||
from .util import CaseInsensitiveDict
|
from .util import CaseInsensitiveDict, _is_nonsequence_iterator
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -17,13 +18,30 @@ class Request:
|
|||||||
self.method = method
|
self.method = method
|
||||||
self.uri = uri
|
self.uri = uri
|
||||||
self._was_file = hasattr(body, "read")
|
self._was_file = hasattr(body, "read")
|
||||||
|
self._was_iter = _is_nonsequence_iterator(body)
|
||||||
if self._was_file:
|
if self._was_file:
|
||||||
self.body = body.read()
|
if hasattr(body, "tell"):
|
||||||
|
tell = body.tell()
|
||||||
|
self.body = body.read()
|
||||||
|
body.seek(tell)
|
||||||
|
else:
|
||||||
|
self.body = body.read()
|
||||||
|
elif self._was_iter:
|
||||||
|
self.body = list(body)
|
||||||
else:
|
else:
|
||||||
self.body = body
|
self.body = body
|
||||||
self.headers = headers
|
self.headers = headers
|
||||||
log.debug("Invoking Request %s", self.uri)
|
log.debug("Invoking Request %s", self.uri)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def uri(self):
|
||||||
|
return self._uri
|
||||||
|
|
||||||
|
@uri.setter
|
||||||
|
def uri(self, uri):
|
||||||
|
self._uri = uri
|
||||||
|
self.parsed_uri = urlparse(uri)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def headers(self):
|
def headers(self):
|
||||||
return self._headers
|
return self._headers
|
||||||
@@ -36,7 +54,11 @@ class Request:
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def body(self):
|
def body(self):
|
||||||
return BytesIO(self._body) if self._was_file else self._body
|
if self._was_file:
|
||||||
|
return BytesIO(self._body)
|
||||||
|
if self._was_iter:
|
||||||
|
return iter(self._body)
|
||||||
|
return self._body
|
||||||
|
|
||||||
@body.setter
|
@body.setter
|
||||||
def body(self, value):
|
def body(self, value):
|
||||||
@@ -54,30 +76,28 @@ class Request:
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def scheme(self):
|
def scheme(self):
|
||||||
return urlparse(self.uri).scheme
|
return self.parsed_uri.scheme
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def host(self):
|
def host(self):
|
||||||
return urlparse(self.uri).hostname
|
return self.parsed_uri.hostname
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def port(self):
|
def port(self):
|
||||||
parse_uri = urlparse(self.uri)
|
port = self.parsed_uri.port
|
||||||
port = parse_uri.port
|
|
||||||
if port is None:
|
if port is None:
|
||||||
try:
|
with suppress(KeyError):
|
||||||
port = {"https": 443, "http": 80}[parse_uri.scheme]
|
port = {"https": 443, "http": 80}[self.parsed_uri.scheme]
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
return port
|
return port
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def path(self):
|
def path(self):
|
||||||
return urlparse(self.uri).path
|
return self.parsed_uri.path
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def query(self):
|
def query(self):
|
||||||
q = urlparse(self.uri).query
|
q = self.parsed_uri.query
|
||||||
return sorted(parse_qsl(q))
|
return sorted(parse_qsl(q))
|
||||||
|
|
||||||
# alias for backwards compatibility
|
# alias for backwards compatibility
|
||||||
|
|||||||
@@ -53,7 +53,7 @@ def serialize(cassette_dict, serializer):
|
|||||||
"request": compat.convert_to_unicode(request._to_dict()),
|
"request": compat.convert_to_unicode(request._to_dict()),
|
||||||
"response": compat.convert_to_unicode(response),
|
"response": compat.convert_to_unicode(response),
|
||||||
}
|
}
|
||||||
for request, response in zip(cassette_dict["requests"], cassette_dict["responses"])
|
for request, response in zip(cassette_dict["requests"], cassette_dict["responses"], strict=False)
|
||||||
]
|
]
|
||||||
data = {"version": CASSETTE_FORMAT_VERSION, "interactions": interactions}
|
data = {"version": CASSETTE_FORMAT_VERSION, "interactions": interactions}
|
||||||
return serializer.serialize(data)
|
return serializer.serialize(data)
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
"""Stubs for patching HTTP and HTTPS requests"""
|
"""Stubs for patching HTTP and HTTPS requests"""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
from contextlib import suppress
|
||||||
from http.client import HTTPConnection, HTTPResponse, HTTPSConnection
|
from http.client import HTTPConnection, HTTPResponse, HTTPSConnection
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
|
|
||||||
@@ -66,6 +67,7 @@ class VCRHTTPResponse(HTTPResponse):
|
|||||||
self.reason = recorded_response["status"]["message"]
|
self.reason = recorded_response["status"]["message"]
|
||||||
self.status = self.code = recorded_response["status"]["code"]
|
self.status = self.code = recorded_response["status"]["code"]
|
||||||
self.version = None
|
self.version = None
|
||||||
|
self.version_string = None
|
||||||
self._content = BytesIO(self.recorded_response["body"]["string"])
|
self._content = BytesIO(self.recorded_response["body"]["string"])
|
||||||
self._closed = False
|
self._closed = False
|
||||||
self._original_response = self # for requests.session.Session cookie extraction
|
self._original_response = self # for requests.session.Session cookie extraction
|
||||||
@@ -76,7 +78,7 @@ class VCRHTTPResponse(HTTPResponse):
|
|||||||
# libraries trying to process a chunked response. By removing the
|
# libraries trying to process a chunked response. By removing the
|
||||||
# transfer-encoding: chunked header, this should cause the downstream
|
# transfer-encoding: chunked header, this should cause the downstream
|
||||||
# libraries to process this as a non-chunked response.
|
# libraries to process this as a non-chunked response.
|
||||||
te_key = [h for h in headers.keys() if h.upper() == "TRANSFER-ENCODING"]
|
te_key = [h for h in headers if h.upper() == "TRANSFER-ENCODING"]
|
||||||
if te_key:
|
if te_key:
|
||||||
del headers[te_key[0]]
|
del headers[te_key[0]]
|
||||||
self.headers = self.msg = parse_headers(headers)
|
self.headers = self.msg = parse_headers(headers)
|
||||||
@@ -186,22 +188,34 @@ class VCRConnection:
|
|||||||
"""
|
"""
|
||||||
Returns empty string for the default port and ':port' otherwise
|
Returns empty string for the default port and ':port' otherwise
|
||||||
"""
|
"""
|
||||||
port = self.real_connection.port
|
port = (
|
||||||
|
self.real_connection.port
|
||||||
|
if not self.real_connection._tunnel_host
|
||||||
|
else self.real_connection._tunnel_port
|
||||||
|
)
|
||||||
default_port = {"https": 443, "http": 80}[self._protocol]
|
default_port = {"https": 443, "http": 80}[self._protocol]
|
||||||
return f":{port}" if port != default_port else ""
|
return f":{port}" if port != default_port else ""
|
||||||
|
|
||||||
|
def _real_host(self):
|
||||||
|
"""Returns the request host"""
|
||||||
|
if self.real_connection._tunnel_host:
|
||||||
|
# The real connection is to an HTTPS proxy
|
||||||
|
return self.real_connection._tunnel_host
|
||||||
|
else:
|
||||||
|
return self.real_connection.host
|
||||||
|
|
||||||
def _uri(self, url):
|
def _uri(self, url):
|
||||||
"""Returns request absolute URI"""
|
"""Returns request absolute URI"""
|
||||||
if url and not url.startswith("/"):
|
if url and not url.startswith("/"):
|
||||||
# Then this must be a proxy request.
|
# Then this must be a proxy request.
|
||||||
return url
|
return url
|
||||||
uri = f"{self._protocol}://{self.real_connection.host}{self._port_postfix()}{url}"
|
uri = f"{self._protocol}://{self._real_host()}{self._port_postfix()}{url}"
|
||||||
log.debug("Absolute URI: %s", uri)
|
log.debug("Absolute URI: %s", uri)
|
||||||
return uri
|
return uri
|
||||||
|
|
||||||
def _url(self, uri):
|
def _url(self, uri):
|
||||||
"""Returns request selector url from absolute URI"""
|
"""Returns request selector url from absolute URI"""
|
||||||
prefix = f"{self._protocol}://{self.real_connection.host}{self._port_postfix()}"
|
prefix = f"{self._protocol}://{self._real_host()}{self._port_postfix()}"
|
||||||
return uri.replace(prefix, "", 1)
|
return uri.replace(prefix, "", 1)
|
||||||
|
|
||||||
def request(self, method, url, body=None, headers=None, *args, **kwargs):
|
def request(self, method, url, body=None, headers=None, *args, **kwargs):
|
||||||
@@ -357,12 +371,8 @@ class VCRConnection:
|
|||||||
TODO: Separately setting the attribute on the two instances is not
|
TODO: Separately setting the attribute on the two instances is not
|
||||||
ideal. We should switch to a proxying implementation.
|
ideal. We should switch to a proxying implementation.
|
||||||
"""
|
"""
|
||||||
try:
|
with suppress(AttributeError):
|
||||||
setattr(self.real_connection, name, value)
|
setattr(self.real_connection, name, value)
|
||||||
except AttributeError:
|
|
||||||
# raised if real_connection has not been set yet, such as when
|
|
||||||
# we're setting the real_connection itself for the first time
|
|
||||||
pass
|
|
||||||
|
|
||||||
super().__setattr__(name, value)
|
super().__setattr__(name, value)
|
||||||
|
|
||||||
|
|||||||
@@ -1,10 +1,11 @@
|
|||||||
"""Stubs for aiohttp HTTP clients"""
|
"""Stubs for aiohttp HTTP clients"""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import functools
|
import functools
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
|
from collections.abc import Mapping
|
||||||
from http.cookies import CookieError, Morsel, SimpleCookie
|
from http.cookies import CookieError, Morsel, SimpleCookie
|
||||||
from typing import Mapping, Union
|
|
||||||
|
|
||||||
from aiohttp import ClientConnectionError, ClientResponse, CookieJar, RequestInfo, hdrs, streams
|
from aiohttp import ClientConnectionError, ClientResponse, CookieJar, RequestInfo, hdrs, streams
|
||||||
from aiohttp.helpers import strip_auth_from_url
|
from aiohttp.helpers import strip_auth_from_url
|
||||||
@@ -228,7 +229,7 @@ def _build_cookie_header(session, cookies, cookie_header, url):
|
|||||||
return c.output(header="", sep=";").strip()
|
return c.output(header="", sep=";").strip()
|
||||||
|
|
||||||
|
|
||||||
def _build_url_with_params(url_str: str, params: Mapping[str, Union[str, int, float]]) -> URL:
|
def _build_url_with_params(url_str: str, params: Mapping[str, str | int | float]) -> URL:
|
||||||
# This code is basically a copy&paste of aiohttp.
|
# This code is basically a copy&paste of aiohttp.
|
||||||
# https://github.com/aio-libs/aiohttp/blob/master/aiohttp/client_reqrep.py#L225
|
# https://github.com/aio-libs/aiohttp/blob/master/aiohttp/client_reqrep.py#L225
|
||||||
url = URL(url_str)
|
url = URL(url_str)
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Stubs for boto3"""
|
"""Stubs for boto3"""
|
||||||
|
|
||||||
from botocore.awsrequest import AWSHTTPConnection as HTTPConnection
|
from botocore.awsrequest import AWSHTTPConnection as HTTPConnection
|
||||||
from botocore.awsrequest import AWSHTTPSConnection as VerifiedHTTPSConnection
|
from botocore.awsrequest import AWSHTTPSConnection as VerifiedHTTPSConnection
|
||||||
|
|
||||||
|
|||||||
215
vcr/stubs/httpcore_stubs.py
Normal file
215
vcr/stubs/httpcore_stubs.py
Normal file
@@ -0,0 +1,215 @@
|
|||||||
|
import asyncio
|
||||||
|
import functools
|
||||||
|
import logging
|
||||||
|
from collections import defaultdict
|
||||||
|
from collections.abc import AsyncIterable, Iterable
|
||||||
|
|
||||||
|
from httpcore import Response
|
||||||
|
from httpcore._models import ByteStream
|
||||||
|
|
||||||
|
from vcr.errors import CannotOverwriteExistingCassetteException
|
||||||
|
from vcr.filters import decode_response
|
||||||
|
from vcr.request import Request as VcrRequest
|
||||||
|
from vcr.serializers.compat import convert_body_to_bytes
|
||||||
|
|
||||||
|
_logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
async def _convert_byte_stream(stream):
|
||||||
|
if isinstance(stream, Iterable):
|
||||||
|
return list(stream)
|
||||||
|
|
||||||
|
if isinstance(stream, AsyncIterable):
|
||||||
|
return [part async for part in stream]
|
||||||
|
|
||||||
|
raise TypeError(
|
||||||
|
f"_convert_byte_stream: stream must be Iterable or AsyncIterable, got {type(stream).__name__}",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _serialize_headers(real_response):
|
||||||
|
"""
|
||||||
|
Some headers can appear multiple times, like "Set-Cookie".
|
||||||
|
Therefore serialize every header key to a list of values.
|
||||||
|
"""
|
||||||
|
|
||||||
|
headers = defaultdict(list)
|
||||||
|
|
||||||
|
for name, value in real_response.headers:
|
||||||
|
headers[name.decode("ascii")].append(value.decode("ascii"))
|
||||||
|
|
||||||
|
return dict(headers)
|
||||||
|
|
||||||
|
|
||||||
|
async def _serialize_response(real_response):
|
||||||
|
# The reason_phrase may not exist
|
||||||
|
try:
|
||||||
|
reason_phrase = real_response.extensions["reason_phrase"].decode("ascii")
|
||||||
|
except KeyError:
|
||||||
|
reason_phrase = None
|
||||||
|
|
||||||
|
# Reading the response stream consumes the iterator, so we need to restore it afterwards
|
||||||
|
content = b"".join(await _convert_byte_stream(real_response.stream))
|
||||||
|
real_response.stream = ByteStream(content)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"status": {"code": real_response.status, "message": reason_phrase},
|
||||||
|
"headers": _serialize_headers(real_response),
|
||||||
|
"body": {"string": content},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _deserialize_headers(headers):
|
||||||
|
"""
|
||||||
|
httpcore accepts headers as list of tuples of header key and value.
|
||||||
|
"""
|
||||||
|
|
||||||
|
return [
|
||||||
|
(name.encode("ascii"), value.encode("ascii")) for name, values in headers.items() for value in values
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def _deserialize_response(vcr_response):
|
||||||
|
# Cassette format generated for HTTPX requests by older versions of
|
||||||
|
# vcrpy. We restructure the content to resemble what a regular
|
||||||
|
# cassette looks like.
|
||||||
|
if "status_code" in vcr_response:
|
||||||
|
vcr_response = decode_response(
|
||||||
|
convert_body_to_bytes(
|
||||||
|
{
|
||||||
|
"headers": vcr_response["headers"],
|
||||||
|
"body": {"string": vcr_response["content"]},
|
||||||
|
"status": {"code": vcr_response["status_code"]},
|
||||||
|
},
|
||||||
|
),
|
||||||
|
)
|
||||||
|
extensions = None
|
||||||
|
else:
|
||||||
|
extensions = (
|
||||||
|
{"reason_phrase": vcr_response["status"]["message"].encode("ascii")}
|
||||||
|
if vcr_response["status"]["message"]
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
|
||||||
|
return Response(
|
||||||
|
vcr_response["status"]["code"],
|
||||||
|
headers=_deserialize_headers(vcr_response["headers"]),
|
||||||
|
content=vcr_response["body"]["string"],
|
||||||
|
extensions=extensions,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def _make_vcr_request(real_request):
|
||||||
|
# Reading the request stream consumes the iterator, so we need to restore it afterwards
|
||||||
|
body = b"".join(await _convert_byte_stream(real_request.stream))
|
||||||
|
real_request.stream = ByteStream(body)
|
||||||
|
|
||||||
|
uri = bytes(real_request.url).decode("ascii")
|
||||||
|
|
||||||
|
# As per HTTPX: If there are multiple headers with the same key, then we concatenate them with commas
|
||||||
|
headers = defaultdict(list)
|
||||||
|
|
||||||
|
for name, value in real_request.headers:
|
||||||
|
headers[name.decode("ascii")].append(value.decode("ascii"))
|
||||||
|
|
||||||
|
headers = {name: ", ".join(values) for name, values in headers.items()}
|
||||||
|
|
||||||
|
return VcrRequest(real_request.method.decode("ascii"), uri, body, headers)
|
||||||
|
|
||||||
|
|
||||||
|
async def _vcr_request(cassette, real_request):
|
||||||
|
vcr_request = await _make_vcr_request(real_request)
|
||||||
|
|
||||||
|
if cassette.can_play_response_for(vcr_request):
|
||||||
|
return vcr_request, _play_responses(cassette, vcr_request)
|
||||||
|
|
||||||
|
if cassette.write_protected and cassette.filter_request(vcr_request):
|
||||||
|
raise CannotOverwriteExistingCassetteException(
|
||||||
|
cassette=cassette,
|
||||||
|
failed_request=vcr_request,
|
||||||
|
)
|
||||||
|
|
||||||
|
_logger.info("%s not in cassette, sending to real server", vcr_request)
|
||||||
|
|
||||||
|
return vcr_request, None
|
||||||
|
|
||||||
|
|
||||||
|
async def _record_responses(cassette, vcr_request, real_response):
|
||||||
|
cassette.append(vcr_request, await _serialize_response(real_response))
|
||||||
|
|
||||||
|
|
||||||
|
def _play_responses(cassette, vcr_request):
|
||||||
|
vcr_response = cassette.play_response(vcr_request)
|
||||||
|
real_response = _deserialize_response(vcr_response)
|
||||||
|
|
||||||
|
return real_response
|
||||||
|
|
||||||
|
|
||||||
|
async def _vcr_handle_async_request(
|
||||||
|
cassette,
|
||||||
|
real_handle_async_request,
|
||||||
|
self,
|
||||||
|
real_request,
|
||||||
|
):
|
||||||
|
vcr_request, vcr_response = await _vcr_request(cassette, real_request)
|
||||||
|
|
||||||
|
if vcr_response:
|
||||||
|
return vcr_response
|
||||||
|
|
||||||
|
real_response = await real_handle_async_request(self, real_request)
|
||||||
|
await _record_responses(cassette, vcr_request, real_response)
|
||||||
|
|
||||||
|
return real_response
|
||||||
|
|
||||||
|
|
||||||
|
def vcr_handle_async_request(cassette, real_handle_async_request):
|
||||||
|
@functools.wraps(real_handle_async_request)
|
||||||
|
def _inner_handle_async_request(self, real_request):
|
||||||
|
return _vcr_handle_async_request(
|
||||||
|
cassette,
|
||||||
|
real_handle_async_request,
|
||||||
|
self,
|
||||||
|
real_request,
|
||||||
|
)
|
||||||
|
|
||||||
|
return _inner_handle_async_request
|
||||||
|
|
||||||
|
|
||||||
|
def _run_async_function(sync_func, *args, **kwargs):
|
||||||
|
"""
|
||||||
|
Safely run an asynchronous function from a synchronous context.
|
||||||
|
Handles both cases:
|
||||||
|
- An event loop is already running.
|
||||||
|
- No event loop exists yet.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
asyncio.get_running_loop()
|
||||||
|
except RuntimeError:
|
||||||
|
return asyncio.run(sync_func(*args, **kwargs))
|
||||||
|
else:
|
||||||
|
# If inside a running loop, create a task and wait for it
|
||||||
|
return asyncio.ensure_future(sync_func(*args, **kwargs))
|
||||||
|
|
||||||
|
|
||||||
|
def _vcr_handle_request(cassette, real_handle_request, self, real_request):
|
||||||
|
vcr_request, vcr_response = _run_async_function(
|
||||||
|
_vcr_request,
|
||||||
|
cassette,
|
||||||
|
real_request,
|
||||||
|
)
|
||||||
|
|
||||||
|
if vcr_response:
|
||||||
|
return vcr_response
|
||||||
|
|
||||||
|
real_response = real_handle_request(self, real_request)
|
||||||
|
_run_async_function(_record_responses, cassette, vcr_request, real_response)
|
||||||
|
|
||||||
|
return real_response
|
||||||
|
|
||||||
|
|
||||||
|
def vcr_handle_request(cassette, real_handle_request):
|
||||||
|
@functools.wraps(real_handle_request)
|
||||||
|
def _inner_handle_request(self, real_request):
|
||||||
|
return _vcr_handle_request(cassette, real_handle_request, self, real_request)
|
||||||
|
|
||||||
|
return _inner_handle_request
|
||||||
@@ -1,186 +0,0 @@
|
|||||||
import asyncio
|
|
||||||
import functools
|
|
||||||
import inspect
|
|
||||||
import logging
|
|
||||||
from unittest.mock import MagicMock, patch
|
|
||||||
|
|
||||||
import httpx
|
|
||||||
|
|
||||||
from vcr.errors import CannotOverwriteExistingCassetteException
|
|
||||||
from vcr.filters import decode_response
|
|
||||||
from vcr.request import Request as VcrRequest
|
|
||||||
from vcr.serializers.compat import convert_body_to_bytes
|
|
||||||
|
|
||||||
_httpx_signature = inspect.signature(httpx.Client.request)
|
|
||||||
|
|
||||||
try:
|
|
||||||
HTTPX_REDIRECT_PARAM = _httpx_signature.parameters["follow_redirects"]
|
|
||||||
except KeyError:
|
|
||||||
HTTPX_REDIRECT_PARAM = _httpx_signature.parameters["allow_redirects"]
|
|
||||||
|
|
||||||
|
|
||||||
_logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def _transform_headers(httpx_response):
|
|
||||||
"""
|
|
||||||
Some headers can appear multiple times, like "Set-Cookie".
|
|
||||||
Therefore transform to every header key to list of values.
|
|
||||||
"""
|
|
||||||
|
|
||||||
out = {}
|
|
||||||
for key, var in httpx_response.headers.raw:
|
|
||||||
decoded_key = key.decode("utf-8")
|
|
||||||
out.setdefault(decoded_key, [])
|
|
||||||
out[decoded_key].append(var.decode("utf-8"))
|
|
||||||
return out
|
|
||||||
|
|
||||||
|
|
||||||
async def _to_serialized_response(resp, aread):
|
|
||||||
# The content shouldn't already have been read in by HTTPX.
|
|
||||||
assert not hasattr(resp, "_decoder")
|
|
||||||
|
|
||||||
# Retrieve the content, but without decoding it.
|
|
||||||
with patch.dict(resp.headers, {"Content-Encoding": ""}):
|
|
||||||
if aread:
|
|
||||||
await resp.aread()
|
|
||||||
else:
|
|
||||||
resp.read()
|
|
||||||
|
|
||||||
result = {
|
|
||||||
"status": {"code": resp.status_code, "message": resp.reason_phrase},
|
|
||||||
"headers": _transform_headers(resp),
|
|
||||||
"body": {"string": resp.content},
|
|
||||||
}
|
|
||||||
|
|
||||||
# As the content wasn't decoded, we restore the response to a state which
|
|
||||||
# will be capable of decoding the content for the consumer.
|
|
||||||
del resp._decoder
|
|
||||||
resp._content = resp._get_content_decoder().decode(resp.content)
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
def _from_serialized_headers(headers):
|
|
||||||
"""
|
|
||||||
httpx accepts headers as list of tuples of header key and value.
|
|
||||||
"""
|
|
||||||
|
|
||||||
header_list = []
|
|
||||||
for key, values in headers.items():
|
|
||||||
for v in values:
|
|
||||||
header_list.append((key, v))
|
|
||||||
return header_list
|
|
||||||
|
|
||||||
|
|
||||||
@patch("httpx.Response.close", MagicMock())
|
|
||||||
@patch("httpx.Response.read", MagicMock())
|
|
||||||
def _from_serialized_response(request, serialized_response, history=None):
|
|
||||||
# Cassette format generated for HTTPX requests by older versions of
|
|
||||||
# vcrpy. We restructure the content to resemble what a regular
|
|
||||||
# cassette looks like.
|
|
||||||
if "status_code" in serialized_response:
|
|
||||||
serialized_response = decode_response(
|
|
||||||
convert_body_to_bytes(
|
|
||||||
{
|
|
||||||
"headers": serialized_response["headers"],
|
|
||||||
"body": {"string": serialized_response["content"]},
|
|
||||||
"status": {"code": serialized_response["status_code"]},
|
|
||||||
},
|
|
||||||
),
|
|
||||||
)
|
|
||||||
extensions = None
|
|
||||||
else:
|
|
||||||
extensions = {"reason_phrase": serialized_response["status"]["message"].encode()}
|
|
||||||
|
|
||||||
response = httpx.Response(
|
|
||||||
status_code=serialized_response["status"]["code"],
|
|
||||||
request=request,
|
|
||||||
headers=_from_serialized_headers(serialized_response["headers"]),
|
|
||||||
content=serialized_response["body"]["string"],
|
|
||||||
history=history or [],
|
|
||||||
extensions=extensions,
|
|
||||||
)
|
|
||||||
|
|
||||||
return response
|
|
||||||
|
|
||||||
|
|
||||||
def _make_vcr_request(httpx_request, **kwargs):
|
|
||||||
body = httpx_request.read().decode("utf-8")
|
|
||||||
uri = str(httpx_request.url)
|
|
||||||
headers = dict(httpx_request.headers)
|
|
||||||
return VcrRequest(httpx_request.method, uri, body, headers)
|
|
||||||
|
|
||||||
|
|
||||||
def _shared_vcr_send(cassette, real_send, *args, **kwargs):
|
|
||||||
real_request = args[1]
|
|
||||||
|
|
||||||
vcr_request = _make_vcr_request(real_request, **kwargs)
|
|
||||||
|
|
||||||
if cassette.can_play_response_for(vcr_request):
|
|
||||||
return vcr_request, _play_responses(cassette, real_request, vcr_request, args[0], kwargs)
|
|
||||||
|
|
||||||
if cassette.write_protected and cassette.filter_request(vcr_request):
|
|
||||||
raise CannotOverwriteExistingCassetteException(cassette=cassette, failed_request=vcr_request)
|
|
||||||
|
|
||||||
_logger.info("%s not in cassette, sending to real server", vcr_request)
|
|
||||||
return vcr_request, None
|
|
||||||
|
|
||||||
|
|
||||||
async def _record_responses(cassette, vcr_request, real_response, aread):
|
|
||||||
for past_real_response in real_response.history:
|
|
||||||
past_vcr_request = _make_vcr_request(past_real_response.request)
|
|
||||||
cassette.append(past_vcr_request, await _to_serialized_response(past_real_response, aread))
|
|
||||||
|
|
||||||
if real_response.history:
|
|
||||||
# If there was a redirection keep we want the request which will hold the
|
|
||||||
# final redirect value
|
|
||||||
vcr_request = _make_vcr_request(real_response.request)
|
|
||||||
|
|
||||||
cassette.append(vcr_request, await _to_serialized_response(real_response, aread))
|
|
||||||
return real_response
|
|
||||||
|
|
||||||
|
|
||||||
def _play_responses(cassette, request, vcr_request, client, kwargs):
|
|
||||||
vcr_response = cassette.play_response(vcr_request)
|
|
||||||
response = _from_serialized_response(request, vcr_response)
|
|
||||||
return response
|
|
||||||
|
|
||||||
|
|
||||||
async def _async_vcr_send(cassette, real_send, *args, **kwargs):
|
|
||||||
vcr_request, response = _shared_vcr_send(cassette, real_send, *args, **kwargs)
|
|
||||||
if response:
|
|
||||||
# add cookies from response to session cookie store
|
|
||||||
args[0].cookies.extract_cookies(response)
|
|
||||||
return response
|
|
||||||
|
|
||||||
real_response = await real_send(*args, **kwargs)
|
|
||||||
await _record_responses(cassette, vcr_request, real_response, aread=True)
|
|
||||||
return real_response
|
|
||||||
|
|
||||||
|
|
||||||
def async_vcr_send(cassette, real_send):
|
|
||||||
@functools.wraps(real_send)
|
|
||||||
def _inner_send(*args, **kwargs):
|
|
||||||
return _async_vcr_send(cassette, real_send, *args, **kwargs)
|
|
||||||
|
|
||||||
return _inner_send
|
|
||||||
|
|
||||||
|
|
||||||
def _sync_vcr_send(cassette, real_send, *args, **kwargs):
|
|
||||||
vcr_request, response = _shared_vcr_send(cassette, real_send, *args, **kwargs)
|
|
||||||
if response:
|
|
||||||
# add cookies from response to session cookie store
|
|
||||||
args[0].cookies.extract_cookies(response)
|
|
||||||
return response
|
|
||||||
|
|
||||||
real_response = real_send(*args, **kwargs)
|
|
||||||
asyncio.run(_record_responses(cassette, vcr_request, real_response, aread=False))
|
|
||||||
return real_response
|
|
||||||
|
|
||||||
|
|
||||||
def sync_vcr_send(cassette, real_send):
|
|
||||||
@functools.wraps(real_send)
|
|
||||||
def _inner_send(*args, **kwargs):
|
|
||||||
return _sync_vcr_send(cassette, real_send, *args, **kwargs)
|
|
||||||
|
|
||||||
return _inner_send
|
|
||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Stubs for tornado HTTP clients"""
|
"""Stubs for tornado HTTP clients"""
|
||||||
|
|
||||||
import functools
|
import functools
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
|
|
||||||
@@ -73,7 +74,7 @@ def vcr_fetch_impl(cassette, real_fetch_impl):
|
|||||||
return callback(response)
|
return callback(response)
|
||||||
|
|
||||||
def new_callback(response):
|
def new_callback(response):
|
||||||
headers = [(k, response.headers.get_list(k)) for k in response.headers.keys()]
|
headers = [(k, response.headers.get_list(k)) for k in response.headers]
|
||||||
|
|
||||||
vcr_response = {
|
vcr_response = {
|
||||||
"status": {"code": response.code, "message": response.reason},
|
"status": {"code": response.code, "message": response.reason},
|
||||||
|
|||||||
19
vcr/util.py
19
vcr/util.py
@@ -89,9 +89,28 @@ def compose(*functions):
|
|||||||
return composed
|
return composed
|
||||||
|
|
||||||
|
|
||||||
|
def _is_nonsequence_iterator(obj):
|
||||||
|
return hasattr(obj, "__iter__") and not isinstance(
|
||||||
|
obj,
|
||||||
|
(bytearray, bytes, dict, list, str),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def read_body(request):
|
def read_body(request):
|
||||||
if hasattr(request.body, "read"):
|
if hasattr(request.body, "read"):
|
||||||
return request.body.read()
|
return request.body.read()
|
||||||
|
if _is_nonsequence_iterator(request.body):
|
||||||
|
body = list(request.body)
|
||||||
|
if body:
|
||||||
|
if isinstance(body[0], str):
|
||||||
|
return "".join(body).encode("utf-8")
|
||||||
|
elif isinstance(body[0], (bytes, bytearray)):
|
||||||
|
return b"".join(body)
|
||||||
|
elif isinstance(body[0], int):
|
||||||
|
return bytes(body)
|
||||||
|
else:
|
||||||
|
raise ValueError(f"Body type {type(body[0])} not supported")
|
||||||
|
return b""
|
||||||
return request.body
|
return request.body
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user