mirror of
https://github.com/kevin1024/vcrpy.git
synced 2025-12-09 09:13:23 +00:00
Compare commits
15 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b122b5c701 | ||
|
|
4883e3eefa | ||
|
|
5678b13b47 | ||
|
|
48f5f84f86 | ||
|
|
31d8c3498b | ||
|
|
b28316ab10 | ||
|
|
3f78330c1e | ||
|
|
e8818e5c0b | ||
|
|
c7bb59ecd8 | ||
|
|
2183690da3 | ||
|
|
d5ba702a1b | ||
|
|
952994b365 | ||
|
|
e2f3240835 | ||
|
|
bb690833bc | ||
|
|
73eed94c47 |
2
.github/workflows/codespell.yml
vendored
2
.github/workflows/codespell.yml
vendored
@@ -17,6 +17,6 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v5
|
||||
uses: actions/checkout@v6
|
||||
- name: Codespell
|
||||
uses: codespell-project/actions-codespell@v2
|
||||
|
||||
2
.github/workflows/docs.yml
vendored
2
.github/workflows/docs.yml
vendored
@@ -10,7 +10,7 @@ jobs:
|
||||
runs-on: ubuntu-24.04
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: "3.12"
|
||||
|
||||
2
.github/workflows/main.yml
vendored
2
.github/workflows/main.yml
vendored
@@ -23,7 +23,7 @@ jobs:
|
||||
- "pypy-3.11"
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v7
|
||||
|
||||
|
||||
@@ -18,7 +18,7 @@ jobs:
|
||||
name: Detect outdated pre-commit hooks
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
|
||||
- name: Set up Python 3.12
|
||||
uses: actions/setup-python@v6
|
||||
|
||||
2
.github/workflows/pre-commit.yml
vendored
2
.github/workflows/pre-commit.yml
vendored
@@ -13,7 +13,7 @@ jobs:
|
||||
name: Run pre-commit
|
||||
runs-on: ubuntu-24.04
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
- uses: actions/checkout@v6
|
||||
- uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: 3.12
|
||||
|
||||
@@ -3,14 +3,14 @@
|
||||
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.14.5
|
||||
rev: v0.14.6
|
||||
hooks:
|
||||
- id: ruff
|
||||
args: ["--output-format=full"]
|
||||
- id: ruff-format
|
||||
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v5.0.0
|
||||
rev: v6.0.0
|
||||
hooks:
|
||||
- id: check-merge-conflict
|
||||
- id: end-of-file-fixer
|
||||
|
||||
@@ -7,9 +7,29 @@ For a full list of triaged issues, bugs and PRs and what release they are target
|
||||
|
||||
All help in providing PRs to close out bug issues is appreciated. Even if that is providing a repo that fully replicates issues. We have very generous contributors that have added these to bug issues which meant another contributor picked up the bug and closed it out.
|
||||
|
||||
- Unreleased
|
||||
- Drop support for Python 3.9
|
||||
- Drop support for urllib3 < 2
|
||||
- 8.1.0
|
||||
- Enable brotli decompression if available (via ``brotli``, ``brotlipy`` or ``brotlicffi``) (#620) - thanks @immerrr
|
||||
- Fix aiohttp allowing both ``data`` and ``json`` arguments when one is None (#624) - thanks @leorochael
|
||||
- Fix usage of io-like interface with VCR.py (#906) - thanks @tito and @kevdevg
|
||||
- Migrate to declarative Python package config (#767) - thanks @deronnax
|
||||
- Various linting fixes - thanks @jairhenrique
|
||||
- CI: bump actions/checkout from 5 to 6 (#955)
|
||||
|
||||
- 8.0.0
|
||||
- BREAKING: Drop support for Python 3.9 (major version bump) - thanks @jairhenrique
|
||||
- BREAKING: Drop support for urllib3 < 2 - fixes CVE warnings from urllib3 1.x (#926, #880) - thanks @jairhenrique
|
||||
- New feature: ``drop_unused_requests`` option to remove unused interactions from cassettes (#763) - thanks @danielnsilva
|
||||
- Rewrite httpx support to patch httpcore instead of httpx (#943) - thanks @seowalex
|
||||
- Fixes ``httpx.ResponseNotRead`` exceptions (#832, #834)
|
||||
- Fixes ``KeyError: 'follow_redirects'`` (#945)
|
||||
- Adds support for custom httpx transports
|
||||
- Fix HTTPS proxy handling - proxy address no longer ends up in cassette URIs (#809, #914) - thanks @alga
|
||||
- Fix ``iscoroutinefunction`` deprecation warning on Python 3.14 - thanks @kloczek
|
||||
- Only log message if response is appended - thanks @talfus-laddus
|
||||
- Optimize urllib.parse calls - thanks @Martin-Brunthaler
|
||||
- Fix CI for Ubuntu 24.04 - thanks @hartwork
|
||||
- Various CI improvements: migrate to uv, update GitHub Actions - thanks @jairhenrique
|
||||
- Various linting and test improvements - thanks @jairhenrique and @hartwork
|
||||
|
||||
- 7.0.0
|
||||
- Drop support for python 3.8 (major version bump) - thanks @jairhenrique
|
||||
|
||||
@@ -22,6 +22,7 @@ The following HTTP libraries are supported:
|
||||
- ``urllib2``
|
||||
- ``urllib3``
|
||||
- ``httpx``
|
||||
- ``httpcore``
|
||||
|
||||
Speed
|
||||
-----
|
||||
|
||||
@@ -1,3 +1,72 @@
|
||||
[project]
|
||||
name = "vcrpy"
|
||||
authors = [{name = "Kevin McCarthy", email = "me@kevinmccarthy.org"}]
|
||||
license = {text = "MIT"}
|
||||
description = "Automatically mock your HTTP interactions to simplify and speed up testing"
|
||||
classifiers = [
|
||||
"Development Status :: 5 - Production/Stable",
|
||||
"Environment :: Console",
|
||||
"Intended Audience :: Developers",
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Programming Language :: Python :: 3.11",
|
||||
"Programming Language :: Python :: 3.12",
|
||||
"Programming Language :: Python :: 3.13",
|
||||
"Programming Language :: Python :: 3 :: Only",
|
||||
"Programming Language :: Python :: Implementation :: CPython",
|
||||
"Programming Language :: Python :: Implementation :: PyPy",
|
||||
"Topic :: Software Development :: Testing",
|
||||
"Topic :: Internet :: WWW/HTTP",
|
||||
"License :: OSI Approved :: MIT License",
|
||||
]
|
||||
urls = {Homepage = "https://github.com/kevin1024/vcrpy"}
|
||||
requires-python = ">=3.10"
|
||||
dependencies = [
|
||||
"PyYAML",
|
||||
"wrapt",
|
||||
]
|
||||
dynamic = ["version"]
|
||||
|
||||
[project.readme]
|
||||
file = "README.rst"
|
||||
content-type = "text/x-rst"
|
||||
|
||||
[project.optional-dependencies]
|
||||
tests = [
|
||||
"aiohttp",
|
||||
"boto3",
|
||||
"cryptography",
|
||||
"httpbin",
|
||||
"httpcore",
|
||||
"httplib2",
|
||||
"httpx",
|
||||
"pycurl; platform_python_implementation !='PyPy'",
|
||||
"pytest",
|
||||
"pytest-aiohttp",
|
||||
"pytest-asyncio",
|
||||
"pytest-cov",
|
||||
"pytest-httpbin",
|
||||
"requests>=2.22.0",
|
||||
"tornado",
|
||||
"urllib3",
|
||||
"werkzeug==2.0.3",
|
||||
]
|
||||
|
||||
[tool.setuptools]
|
||||
include-package-data = false
|
||||
|
||||
[tool.setuptools.packages.find]
|
||||
exclude = ["tests*"]
|
||||
namespaces = false
|
||||
|
||||
[tool.setuptools.dynamic]
|
||||
version = {attr = "vcr.__version__"}
|
||||
|
||||
[build-system]
|
||||
requires = ["setuptools>=61.2"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[tool.codespell]
|
||||
skip = '.git,*.pdf,*.svg,.tox'
|
||||
ignore-regex = "\\\\[fnrstv]"
|
||||
|
||||
60
setup.cfg
60
setup.cfg
@@ -1,2 +1,58 @@
|
||||
[bdist_wheel]
|
||||
universal=1
|
||||
[metadata]
|
||||
name = vcrpy
|
||||
version = attr: vcr.__version__
|
||||
author = Kevin McCarthy
|
||||
author_email = me@kevinmccarthy.org
|
||||
license = MIT
|
||||
description = Automatically mock your HTTP interactions to simplify and speed up testing
|
||||
url = https://github.com/kevin1024/vcrpy
|
||||
long_description = file: README.rst
|
||||
long_description_content_type = text/x-rst
|
||||
classifiers =
|
||||
Development Status :: 5 - Production/Stable
|
||||
Environment :: Console
|
||||
Intended Audience :: Developers
|
||||
Programming Language :: Python
|
||||
Programming Language :: Python :: 3
|
||||
Programming Language :: Python :: 3.10
|
||||
Programming Language :: Python :: 3.11
|
||||
Programming Language :: Python :: 3.12
|
||||
Programming Language :: Python :: 3.13
|
||||
Programming Language :: Python :: 3 :: Only
|
||||
Programming Language :: Python :: Implementation :: CPython
|
||||
Programming Language :: Python :: Implementation :: PyPy
|
||||
Topic :: Software Development :: Testing
|
||||
Topic :: Internet :: WWW/HTTP
|
||||
License :: OSI Approved :: MIT License
|
||||
|
||||
[options]
|
||||
packages = find:
|
||||
python_requires = >=3.10
|
||||
install_requires =
|
||||
PyYAML
|
||||
wrapt
|
||||
tests_require =
|
||||
vcrpy[tests]
|
||||
|
||||
[options.packages.find]
|
||||
exclude = tests*
|
||||
|
||||
[options.extras_require]
|
||||
tests =
|
||||
aiohttp
|
||||
boto3
|
||||
cryptography
|
||||
httpbin
|
||||
httpcore
|
||||
httplib2
|
||||
httpx
|
||||
pycurl; platform_python_implementation !='PyPy'
|
||||
pytest
|
||||
pytest-aiohttp
|
||||
pytest-asyncio
|
||||
pytest-cov
|
||||
pytest-httpbin
|
||||
requests>=2.22.0
|
||||
tornado
|
||||
urllib3
|
||||
werkzeug==2.0.3
|
||||
|
||||
88
setup.py
88
setup.py
@@ -1,88 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import codecs
|
||||
import os
|
||||
import re
|
||||
from pathlib import Path
|
||||
|
||||
from setuptools import find_packages, setup
|
||||
|
||||
long_description = Path("README.rst").read_text()
|
||||
here = os.path.abspath(os.path.dirname(__file__))
|
||||
|
||||
|
||||
def read(*parts):
|
||||
# intentionally *not* adding an encoding option to open, See:
|
||||
# https://github.com/pypa/virtualenv/issues/201#issuecomment-3145690
|
||||
with codecs.open(os.path.join(here, *parts), "r") as fp:
|
||||
return fp.read()
|
||||
|
||||
|
||||
def find_version(*file_paths):
|
||||
version_file = read(*file_paths)
|
||||
version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", version_file, re.M)
|
||||
if version_match:
|
||||
return version_match.group(1)
|
||||
|
||||
raise RuntimeError("Unable to find version string.")
|
||||
|
||||
|
||||
install_requires = [
|
||||
"PyYAML",
|
||||
"wrapt",
|
||||
]
|
||||
|
||||
extras_require = {
|
||||
"tests": [
|
||||
"aiohttp",
|
||||
"boto3",
|
||||
"cryptography",
|
||||
"httpbin",
|
||||
"httplib2",
|
||||
"httpx",
|
||||
"pycurl; platform_python_implementation !='PyPy'",
|
||||
"pytest",
|
||||
"pytest-aiohttp",
|
||||
"pytest-asyncio",
|
||||
"pytest-cov",
|
||||
"pytest-httpbin",
|
||||
"requests>=2.22.0",
|
||||
"tornado",
|
||||
"urllib3",
|
||||
"werkzeug==2.0.3",
|
||||
],
|
||||
}
|
||||
|
||||
setup(
|
||||
name="vcrpy",
|
||||
version=find_version("vcr", "__init__.py"),
|
||||
description=("Automatically mock your HTTP interactions to simplify and speed up testing"),
|
||||
long_description=long_description,
|
||||
long_description_content_type="text/x-rst",
|
||||
author="Kevin McCarthy",
|
||||
author_email="me@kevinmccarthy.org",
|
||||
url="https://github.com/kevin1024/vcrpy",
|
||||
packages=find_packages(exclude=["tests*"]),
|
||||
python_requires=">=3.10",
|
||||
install_requires=install_requires,
|
||||
license="MIT",
|
||||
extras_require=extras_require,
|
||||
tests_require=extras_require["tests"],
|
||||
classifiers=[
|
||||
"Development Status :: 5 - Production/Stable",
|
||||
"Environment :: Console",
|
||||
"Intended Audience :: Developers",
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Programming Language :: Python :: 3.11",
|
||||
"Programming Language :: Python :: 3.12",
|
||||
"Programming Language :: Python :: 3.13",
|
||||
"Programming Language :: Python :: 3 :: Only",
|
||||
"Programming Language :: Python :: Implementation :: CPython",
|
||||
"Programming Language :: Python :: Implementation :: PyPy",
|
||||
"Topic :: Software Development :: Testing",
|
||||
"Topic :: Internet :: WWW/HTTP",
|
||||
"License :: OSI Approved :: MIT License",
|
||||
],
|
||||
)
|
||||
@@ -1,3 +1,4 @@
|
||||
import io
|
||||
import logging
|
||||
import ssl
|
||||
import urllib.parse
|
||||
@@ -136,19 +137,29 @@ def test_stream(tmpdir, httpbin):
|
||||
assert cassette.play_count == 1
|
||||
|
||||
|
||||
POST_DATA = {"key1": "value1", "key2": "value2"}
|
||||
|
||||
|
||||
@pytest.mark.online
|
||||
@pytest.mark.parametrize("body", ["data", "json"])
|
||||
def test_post(tmpdir, body, caplog, httpbin):
|
||||
@pytest.mark.parametrize(
|
||||
"kwargs",
|
||||
[
|
||||
{"data": POST_DATA},
|
||||
{"json": POST_DATA},
|
||||
{"data": POST_DATA, "json": None},
|
||||
{"data": None, "json": POST_DATA},
|
||||
],
|
||||
)
|
||||
def test_post(tmpdir, kwargs, caplog, httpbin):
|
||||
caplog.set_level(logging.INFO)
|
||||
data = {"key1": "value1", "key2": "value2"}
|
||||
url = httpbin.url
|
||||
url = httpbin.url + "/post"
|
||||
with vcr.use_cassette(str(tmpdir.join("post.yaml"))):
|
||||
_, response_json = post(url, **{body: data})
|
||||
_, response_json = post(url, **kwargs)
|
||||
|
||||
with vcr.use_cassette(str(tmpdir.join("post.yaml"))) as cassette:
|
||||
request = cassette.requests[0]
|
||||
assert request.body == data
|
||||
_, cassette_response_json = post(url, **{body: data})
|
||||
assert request.body == POST_DATA
|
||||
_, cassette_response_json = post(url, **kwargs)
|
||||
assert cassette_response_json == response_json
|
||||
assert cassette.play_count == 1
|
||||
|
||||
@@ -162,6 +173,17 @@ def test_post(tmpdir, body, caplog, httpbin):
|
||||
), "Log message not found."
|
||||
|
||||
|
||||
@pytest.mark.online
|
||||
def test_post_data_plus_json_error(tmpdir, httpbin):
|
||||
url = httpbin.url + "/post"
|
||||
with (
|
||||
vcr.use_cassette(str(tmpdir.join("post.yaml"))) as cassette,
|
||||
pytest.raises(ValueError, match="data and json parameters can not be used at the same time"),
|
||||
):
|
||||
post(url, data=POST_DATA, json=POST_DATA)
|
||||
assert cassette.requests == []
|
||||
|
||||
|
||||
@pytest.mark.online
|
||||
def test_params(tmpdir, httpbin):
|
||||
url = httpbin.url + "/get?d=d"
|
||||
@@ -462,3 +484,19 @@ def test_filter_query_parameters(tmpdir, httpbin):
|
||||
cassette_content = f.read()
|
||||
assert "password" not in cassette_content
|
||||
assert "secret" not in cassette_content
|
||||
|
||||
|
||||
@pytest.mark.online
|
||||
def test_use_cassette_with_io(tmpdir, caplog, httpbin):
|
||||
url = httpbin.url + "/post"
|
||||
|
||||
# test without cassettes
|
||||
data = io.BytesIO(b"hello")
|
||||
_, response_json = request("POST", url, output="json", data=data)
|
||||
assert response_json["data"] == "hello"
|
||||
|
||||
# test with cassettes
|
||||
data = io.BytesIO(b"hello")
|
||||
with vcr.use_cassette(str(tmpdir.join("post.yaml"))):
|
||||
_, response_json = request("POST", url, output="json", data=data)
|
||||
assert response_json["data"] == "hello"
|
||||
|
||||
@@ -7,6 +7,7 @@ from urllib.request import Request, urlopen
|
||||
import pytest
|
||||
|
||||
import vcr
|
||||
from vcr.filters import brotli
|
||||
|
||||
from ..assertions import assert_cassette_has_one_response, assert_is_json_bytes
|
||||
|
||||
@@ -138,6 +139,22 @@ def test_decompress_deflate(tmpdir, httpbin):
|
||||
assert_is_json_bytes(decoded_response)
|
||||
|
||||
|
||||
def test_decompress_brotli(tmpdir, httpbin):
|
||||
if brotli is None:
|
||||
# XXX: this is never true, because brotlipy is installed with "httpbin"
|
||||
pytest.skip("Brotli is not installed")
|
||||
|
||||
url = httpbin.url + "/brotli"
|
||||
request = Request(url, headers={"Accept-Encoding": ["gzip, deflate, br"]})
|
||||
cass_file = str(tmpdir.join("brotli_response.yaml"))
|
||||
with vcr.use_cassette(cass_file, decode_compressed_response=True):
|
||||
urlopen(request)
|
||||
with vcr.use_cassette(cass_file) as cass:
|
||||
decoded_response = urlopen(url).read()
|
||||
assert_cassette_has_one_response(cass)
|
||||
assert_is_json_bytes(decoded_response)
|
||||
|
||||
|
||||
def test_decompress_regular(tmpdir, httpbin):
|
||||
"""Test that it doesn't try to decompress content that isn't compressed"""
|
||||
url = httpbin.url + "/get"
|
||||
|
||||
@@ -4,7 +4,7 @@ from logging import NullHandler
|
||||
from .config import VCR
|
||||
from .record_mode import RecordMode as mode # noqa: F401
|
||||
|
||||
__version__ = "7.0.0"
|
||||
__version__ = "8.1.0"
|
||||
|
||||
logging.getLogger(__name__).addHandler(NullHandler())
|
||||
|
||||
|
||||
@@ -225,10 +225,10 @@ class Cassette:
|
||||
|
||||
def append(self, request, response):
|
||||
"""Add a request, response pair to this cassette"""
|
||||
log.info("Appending request %s and response %s", request, response)
|
||||
request = self._before_record_request(request)
|
||||
if not request:
|
||||
return
|
||||
log.info("Appending request %s and response %s", request, response)
|
||||
# Deepcopy is here because mutation of `response` will corrupt the
|
||||
# real response.
|
||||
response = copy.deepcopy(response)
|
||||
|
||||
@@ -6,6 +6,49 @@ from urllib.parse import urlencode, urlparse, urlunparse
|
||||
|
||||
from .util import CaseInsensitiveDict
|
||||
|
||||
try:
|
||||
# This supports both brotli & brotlipy packages
|
||||
import brotli
|
||||
except ImportError:
|
||||
try:
|
||||
import brotlicffi as brotli
|
||||
except ImportError:
|
||||
brotli = None
|
||||
|
||||
|
||||
def decompress_deflate(body):
|
||||
try:
|
||||
return zlib.decompress(body)
|
||||
except zlib.error:
|
||||
# Assume the response was already decompressed
|
||||
return body
|
||||
|
||||
|
||||
def decompress_gzip(body):
|
||||
# To (de-)compress gzip format, use wbits = zlib.MAX_WBITS | 16.
|
||||
try:
|
||||
return zlib.decompress(body, zlib.MAX_WBITS | 16)
|
||||
except zlib.error:
|
||||
# Assume the response was already decompressed
|
||||
return body
|
||||
|
||||
|
||||
AVAILABLE_DECOMPRESSORS = {
|
||||
"deflate": decompress_deflate,
|
||||
"gzip": decompress_gzip,
|
||||
}
|
||||
|
||||
if brotli is not None:
|
||||
|
||||
def decompress_brotli(body):
|
||||
try:
|
||||
return brotli.decompress(body)
|
||||
except brotli.error:
|
||||
# Assume the response was already decompressed
|
||||
return body
|
||||
|
||||
AVAILABLE_DECOMPRESSORS["br"] = decompress_brotli
|
||||
|
||||
|
||||
def replace_headers(request, replacements):
|
||||
"""Replace headers in request according to replacements.
|
||||
@@ -136,45 +179,30 @@ def remove_post_data_parameters(request, post_data_parameters_to_remove):
|
||||
|
||||
def decode_response(response):
|
||||
"""
|
||||
If the response is compressed with gzip or deflate:
|
||||
If the response is compressed with any supported compression (gzip,
|
||||
deflate, br if available):
|
||||
1. decompress the response body
|
||||
2. delete the content-encoding header
|
||||
3. update content-length header to decompressed length
|
||||
"""
|
||||
|
||||
def is_compressed(headers):
|
||||
encoding = headers.get("content-encoding", [])
|
||||
return encoding and encoding[0] in ("gzip", "deflate")
|
||||
|
||||
def decompress_body(body, encoding):
|
||||
"""Returns decompressed body according to encoding using zlib.
|
||||
to (de-)compress gzip format, use wbits = zlib.MAX_WBITS | 16
|
||||
"""
|
||||
if not body:
|
||||
return ""
|
||||
if encoding == "gzip":
|
||||
try:
|
||||
return zlib.decompress(body, zlib.MAX_WBITS | 16)
|
||||
except zlib.error:
|
||||
return body # assumes that the data was already decompressed
|
||||
else: # encoding == 'deflate'
|
||||
try:
|
||||
return zlib.decompress(body)
|
||||
except zlib.error:
|
||||
return body # assumes that the data was already decompressed
|
||||
|
||||
# Deepcopy here in case `headers` contain objects that could
|
||||
# be mutated by a shallow copy and corrupt the real response.
|
||||
response = copy.deepcopy(response)
|
||||
headers = CaseInsensitiveDict(response["headers"])
|
||||
if is_compressed(headers):
|
||||
encoding = headers["content-encoding"][0]
|
||||
headers["content-encoding"].remove(encoding)
|
||||
if not headers["content-encoding"]:
|
||||
del headers["content-encoding"]
|
||||
content_encoding = headers.get("content-encoding")
|
||||
if not content_encoding:
|
||||
return response
|
||||
decompressor = AVAILABLE_DECOMPRESSORS.get(content_encoding[0])
|
||||
if not decompressor:
|
||||
return response
|
||||
|
||||
new_body = decompress_body(response["body"]["string"], encoding)
|
||||
response["body"]["string"] = new_body
|
||||
headers["content-length"] = [str(len(new_body))]
|
||||
response["headers"] = dict(headers)
|
||||
headers["content-encoding"].remove(content_encoding[0])
|
||||
if not headers["content-encoding"]:
|
||||
del headers["content-encoding"]
|
||||
|
||||
new_body = decompressor(response["body"]["string"])
|
||||
response["body"]["string"] = new_body
|
||||
headers["content-length"] = [str(len(new_body))]
|
||||
response["headers"] = dict(headers)
|
||||
return response
|
||||
|
||||
25
vcr/patch.py
25
vcr/patch.py
@@ -92,12 +92,12 @@ else:
|
||||
|
||||
|
||||
try:
|
||||
import httpx
|
||||
import httpcore
|
||||
except ImportError: # pragma: no cover
|
||||
pass
|
||||
else:
|
||||
_HttpxSyncClient_send_single_request = httpx.Client._send_single_request
|
||||
_HttpxAsyncClient_send_single_request = httpx.AsyncClient._send_single_request
|
||||
_HttpcoreConnectionPool_handle_request = httpcore.ConnectionPool.handle_request
|
||||
_HttpcoreAsyncConnectionPool_handle_async_request = httpcore.AsyncConnectionPool.handle_async_request
|
||||
|
||||
|
||||
class CassettePatcherBuilder:
|
||||
@@ -121,7 +121,7 @@ class CassettePatcherBuilder:
|
||||
self._httplib2(),
|
||||
self._tornado(),
|
||||
self._aiohttp(),
|
||||
self._httpx(),
|
||||
self._httpcore(),
|
||||
self._build_patchers_from_mock_triples(self._cassette.custom_patches),
|
||||
)
|
||||
|
||||
@@ -304,19 +304,22 @@ class CassettePatcherBuilder:
|
||||
yield client.ClientSession, "_request", new_request
|
||||
|
||||
@_build_patchers_from_mock_triples_decorator
|
||||
def _httpx(self):
|
||||
def _httpcore(self):
|
||||
try:
|
||||
import httpx
|
||||
import httpcore
|
||||
except ImportError: # pragma: no cover
|
||||
return
|
||||
else:
|
||||
from .stubs.httpx_stubs import async_vcr_send, sync_vcr_send
|
||||
from .stubs.httpcore_stubs import vcr_handle_async_request, vcr_handle_request
|
||||
|
||||
new_async_client_send = async_vcr_send(self._cassette, _HttpxAsyncClient_send_single_request)
|
||||
yield httpx.AsyncClient, "_send_single_request", new_async_client_send
|
||||
new_handle_async_request = vcr_handle_async_request(
|
||||
self._cassette,
|
||||
_HttpcoreAsyncConnectionPool_handle_async_request,
|
||||
)
|
||||
yield httpcore.AsyncConnectionPool, "handle_async_request", new_handle_async_request
|
||||
|
||||
new_sync_client_send = sync_vcr_send(self._cassette, _HttpxSyncClient_send_single_request)
|
||||
yield httpx.Client, "_send_single_request", new_sync_client_send
|
||||
new_handle_request = vcr_handle_request(self._cassette, _HttpcoreConnectionPool_handle_request)
|
||||
yield httpcore.ConnectionPool, "handle_request", new_handle_request
|
||||
|
||||
def _urllib3_patchers(self, cpool, conn, stubs):
|
||||
http_connection_remover = ConnectionRemover(
|
||||
|
||||
@@ -20,7 +20,12 @@ class Request:
|
||||
self._was_file = hasattr(body, "read")
|
||||
self._was_iter = _is_nonsequence_iterator(body)
|
||||
if self._was_file:
|
||||
self.body = body.read()
|
||||
if hasattr(body, "tell"):
|
||||
tell = body.tell()
|
||||
self.body = body.read()
|
||||
body.seek(tell)
|
||||
else:
|
||||
self.body = body.read()
|
||||
elif self._was_iter:
|
||||
self.body = list(body)
|
||||
else:
|
||||
|
||||
@@ -245,7 +245,11 @@ def vcr_request(cassette, real_request):
|
||||
headers = kwargs.get("headers")
|
||||
auth = kwargs.get("auth")
|
||||
headers = self._prepare_headers(headers)
|
||||
data = kwargs.get("data", kwargs.get("json"))
|
||||
data = kwargs.get("data")
|
||||
if data is None:
|
||||
data = kwargs.get("json")
|
||||
elif kwargs.get("json") is not None:
|
||||
raise ValueError("data and json parameters can not be used at the same time")
|
||||
params = kwargs.get("params")
|
||||
cookies = kwargs.get("cookies")
|
||||
|
||||
|
||||
215
vcr/stubs/httpcore_stubs.py
Normal file
215
vcr/stubs/httpcore_stubs.py
Normal file
@@ -0,0 +1,215 @@
|
||||
import asyncio
|
||||
import functools
|
||||
import logging
|
||||
from collections import defaultdict
|
||||
from collections.abc import AsyncIterable, Iterable
|
||||
|
||||
from httpcore import Response
|
||||
from httpcore._models import ByteStream
|
||||
|
||||
from vcr.errors import CannotOverwriteExistingCassetteException
|
||||
from vcr.filters import decode_response
|
||||
from vcr.request import Request as VcrRequest
|
||||
from vcr.serializers.compat import convert_body_to_bytes
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def _convert_byte_stream(stream):
|
||||
if isinstance(stream, Iterable):
|
||||
return list(stream)
|
||||
|
||||
if isinstance(stream, AsyncIterable):
|
||||
return [part async for part in stream]
|
||||
|
||||
raise TypeError(
|
||||
f"_convert_byte_stream: stream must be Iterable or AsyncIterable, got {type(stream).__name__}",
|
||||
)
|
||||
|
||||
|
||||
def _serialize_headers(real_response):
|
||||
"""
|
||||
Some headers can appear multiple times, like "Set-Cookie".
|
||||
Therefore serialize every header key to a list of values.
|
||||
"""
|
||||
|
||||
headers = defaultdict(list)
|
||||
|
||||
for name, value in real_response.headers:
|
||||
headers[name.decode("ascii")].append(value.decode("ascii"))
|
||||
|
||||
return dict(headers)
|
||||
|
||||
|
||||
async def _serialize_response(real_response):
|
||||
# The reason_phrase may not exist
|
||||
try:
|
||||
reason_phrase = real_response.extensions["reason_phrase"].decode("ascii")
|
||||
except KeyError:
|
||||
reason_phrase = None
|
||||
|
||||
# Reading the response stream consumes the iterator, so we need to restore it afterwards
|
||||
content = b"".join(await _convert_byte_stream(real_response.stream))
|
||||
real_response.stream = ByteStream(content)
|
||||
|
||||
return {
|
||||
"status": {"code": real_response.status, "message": reason_phrase},
|
||||
"headers": _serialize_headers(real_response),
|
||||
"body": {"string": content},
|
||||
}
|
||||
|
||||
|
||||
def _deserialize_headers(headers):
|
||||
"""
|
||||
httpcore accepts headers as list of tuples of header key and value.
|
||||
"""
|
||||
|
||||
return [
|
||||
(name.encode("ascii"), value.encode("ascii")) for name, values in headers.items() for value in values
|
||||
]
|
||||
|
||||
|
||||
def _deserialize_response(vcr_response):
|
||||
# Cassette format generated for HTTPX requests by older versions of
|
||||
# vcrpy. We restructure the content to resemble what a regular
|
||||
# cassette looks like.
|
||||
if "status_code" in vcr_response:
|
||||
vcr_response = decode_response(
|
||||
convert_body_to_bytes(
|
||||
{
|
||||
"headers": vcr_response["headers"],
|
||||
"body": {"string": vcr_response["content"]},
|
||||
"status": {"code": vcr_response["status_code"]},
|
||||
},
|
||||
),
|
||||
)
|
||||
extensions = None
|
||||
else:
|
||||
extensions = (
|
||||
{"reason_phrase": vcr_response["status"]["message"].encode("ascii")}
|
||||
if vcr_response["status"]["message"]
|
||||
else None
|
||||
)
|
||||
|
||||
return Response(
|
||||
vcr_response["status"]["code"],
|
||||
headers=_deserialize_headers(vcr_response["headers"]),
|
||||
content=vcr_response["body"]["string"],
|
||||
extensions=extensions,
|
||||
)
|
||||
|
||||
|
||||
async def _make_vcr_request(real_request):
|
||||
# Reading the request stream consumes the iterator, so we need to restore it afterwards
|
||||
body = b"".join(await _convert_byte_stream(real_request.stream))
|
||||
real_request.stream = ByteStream(body)
|
||||
|
||||
uri = bytes(real_request.url).decode("ascii")
|
||||
|
||||
# As per HTTPX: If there are multiple headers with the same key, then we concatenate them with commas
|
||||
headers = defaultdict(list)
|
||||
|
||||
for name, value in real_request.headers:
|
||||
headers[name.decode("ascii")].append(value.decode("ascii"))
|
||||
|
||||
headers = {name: ", ".join(values) for name, values in headers.items()}
|
||||
|
||||
return VcrRequest(real_request.method.decode("ascii"), uri, body, headers)
|
||||
|
||||
|
||||
async def _vcr_request(cassette, real_request):
|
||||
vcr_request = await _make_vcr_request(real_request)
|
||||
|
||||
if cassette.can_play_response_for(vcr_request):
|
||||
return vcr_request, _play_responses(cassette, vcr_request)
|
||||
|
||||
if cassette.write_protected and cassette.filter_request(vcr_request):
|
||||
raise CannotOverwriteExistingCassetteException(
|
||||
cassette=cassette,
|
||||
failed_request=vcr_request,
|
||||
)
|
||||
|
||||
_logger.info("%s not in cassette, sending to real server", vcr_request)
|
||||
|
||||
return vcr_request, None
|
||||
|
||||
|
||||
async def _record_responses(cassette, vcr_request, real_response):
|
||||
cassette.append(vcr_request, await _serialize_response(real_response))
|
||||
|
||||
|
||||
def _play_responses(cassette, vcr_request):
|
||||
vcr_response = cassette.play_response(vcr_request)
|
||||
real_response = _deserialize_response(vcr_response)
|
||||
|
||||
return real_response
|
||||
|
||||
|
||||
async def _vcr_handle_async_request(
|
||||
cassette,
|
||||
real_handle_async_request,
|
||||
self,
|
||||
real_request,
|
||||
):
|
||||
vcr_request, vcr_response = await _vcr_request(cassette, real_request)
|
||||
|
||||
if vcr_response:
|
||||
return vcr_response
|
||||
|
||||
real_response = await real_handle_async_request(self, real_request)
|
||||
await _record_responses(cassette, vcr_request, real_response)
|
||||
|
||||
return real_response
|
||||
|
||||
|
||||
def vcr_handle_async_request(cassette, real_handle_async_request):
|
||||
@functools.wraps(real_handle_async_request)
|
||||
def _inner_handle_async_request(self, real_request):
|
||||
return _vcr_handle_async_request(
|
||||
cassette,
|
||||
real_handle_async_request,
|
||||
self,
|
||||
real_request,
|
||||
)
|
||||
|
||||
return _inner_handle_async_request
|
||||
|
||||
|
||||
def _run_async_function(sync_func, *args, **kwargs):
|
||||
"""
|
||||
Safely run an asynchronous function from a synchronous context.
|
||||
Handles both cases:
|
||||
- An event loop is already running.
|
||||
- No event loop exists yet.
|
||||
"""
|
||||
try:
|
||||
asyncio.get_running_loop()
|
||||
except RuntimeError:
|
||||
return asyncio.run(sync_func(*args, **kwargs))
|
||||
else:
|
||||
# If inside a running loop, create a task and wait for it
|
||||
return asyncio.ensure_future(sync_func(*args, **kwargs))
|
||||
|
||||
|
||||
def _vcr_handle_request(cassette, real_handle_request, self, real_request):
|
||||
vcr_request, vcr_response = _run_async_function(
|
||||
_vcr_request,
|
||||
cassette,
|
||||
real_request,
|
||||
)
|
||||
|
||||
if vcr_response:
|
||||
return vcr_response
|
||||
|
||||
real_response = real_handle_request(self, real_request)
|
||||
_run_async_function(_record_responses, cassette, vcr_request, real_response)
|
||||
|
||||
return real_response
|
||||
|
||||
|
||||
def vcr_handle_request(cassette, real_handle_request):
|
||||
@functools.wraps(real_handle_request)
|
||||
def _inner_handle_request(self, real_request):
|
||||
return _vcr_handle_request(cassette, real_handle_request, self, real_request)
|
||||
|
||||
return _inner_handle_request
|
||||
@@ -1,202 +0,0 @@
|
||||
import asyncio
|
||||
import functools
|
||||
import inspect
|
||||
import logging
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import httpx
|
||||
|
||||
from vcr.errors import CannotOverwriteExistingCassetteException
|
||||
from vcr.filters import decode_response
|
||||
from vcr.request import Request as VcrRequest
|
||||
from vcr.serializers.compat import convert_body_to_bytes
|
||||
|
||||
_httpx_signature = inspect.signature(httpx.Client.request)
|
||||
|
||||
try:
|
||||
HTTPX_REDIRECT_PARAM = _httpx_signature.parameters["follow_redirects"]
|
||||
except KeyError:
|
||||
HTTPX_REDIRECT_PARAM = _httpx_signature.parameters["allow_redirects"]
|
||||
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _transform_headers(httpx_response):
|
||||
"""
|
||||
Some headers can appear multiple times, like "Set-Cookie".
|
||||
Therefore transform to every header key to list of values.
|
||||
"""
|
||||
|
||||
out = {}
|
||||
for key, var in httpx_response.headers.raw:
|
||||
decoded_key = key.decode("utf-8")
|
||||
out.setdefault(decoded_key, [])
|
||||
out[decoded_key].append(var.decode("utf-8"))
|
||||
return out
|
||||
|
||||
|
||||
async def _to_serialized_response(resp, aread):
|
||||
# The content shouldn't already have been read in by HTTPX.
|
||||
assert not hasattr(resp, "_decoder")
|
||||
|
||||
# Retrieve the content, but without decoding it.
|
||||
with patch.dict(resp.headers, {"Content-Encoding": ""}):
|
||||
if aread:
|
||||
await resp.aread()
|
||||
else:
|
||||
resp.read()
|
||||
|
||||
result = {
|
||||
"status": {"code": resp.status_code, "message": resp.reason_phrase},
|
||||
"headers": _transform_headers(resp),
|
||||
"body": {"string": resp.content},
|
||||
}
|
||||
|
||||
# As the content wasn't decoded, we restore the response to a state which
|
||||
# will be capable of decoding the content for the consumer.
|
||||
del resp._decoder
|
||||
resp._content = resp._get_content_decoder().decode(resp.content)
|
||||
return result
|
||||
|
||||
|
||||
def _from_serialized_headers(headers):
|
||||
"""
|
||||
httpx accepts headers as list of tuples of header key and value.
|
||||
"""
|
||||
|
||||
header_list = []
|
||||
for key, values in headers.items():
|
||||
for v in values:
|
||||
header_list.append((key, v))
|
||||
return header_list
|
||||
|
||||
|
||||
@patch("httpx.Response.close", MagicMock())
|
||||
@patch("httpx.Response.read", MagicMock())
|
||||
def _from_serialized_response(request, serialized_response, history=None):
|
||||
# Cassette format generated for HTTPX requests by older versions of
|
||||
# vcrpy. We restructure the content to resemble what a regular
|
||||
# cassette looks like.
|
||||
if "status_code" in serialized_response:
|
||||
serialized_response = decode_response(
|
||||
convert_body_to_bytes(
|
||||
{
|
||||
"headers": serialized_response["headers"],
|
||||
"body": {"string": serialized_response["content"]},
|
||||
"status": {"code": serialized_response["status_code"]},
|
||||
},
|
||||
),
|
||||
)
|
||||
extensions = None
|
||||
else:
|
||||
extensions = {"reason_phrase": serialized_response["status"]["message"].encode()}
|
||||
|
||||
response = httpx.Response(
|
||||
status_code=serialized_response["status"]["code"],
|
||||
request=request,
|
||||
headers=_from_serialized_headers(serialized_response["headers"]),
|
||||
content=serialized_response["body"]["string"],
|
||||
history=history or [],
|
||||
extensions=extensions,
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
|
||||
def _make_vcr_request(httpx_request, **kwargs):
|
||||
body = httpx_request.read().decode("utf-8")
|
||||
uri = str(httpx_request.url)
|
||||
headers = dict(httpx_request.headers)
|
||||
return VcrRequest(httpx_request.method, uri, body, headers)
|
||||
|
||||
|
||||
def _shared_vcr_send(cassette, real_send, *args, **kwargs):
|
||||
real_request = args[1]
|
||||
|
||||
vcr_request = _make_vcr_request(real_request, **kwargs)
|
||||
|
||||
if cassette.can_play_response_for(vcr_request):
|
||||
return vcr_request, _play_responses(cassette, real_request, vcr_request, args[0], kwargs)
|
||||
|
||||
if cassette.write_protected and cassette.filter_request(vcr_request):
|
||||
raise CannotOverwriteExistingCassetteException(cassette=cassette, failed_request=vcr_request)
|
||||
|
||||
_logger.info("%s not in cassette, sending to real server", vcr_request)
|
||||
return vcr_request, None
|
||||
|
||||
|
||||
async def _record_responses(cassette, vcr_request, real_response, aread):
|
||||
for past_real_response in real_response.history:
|
||||
past_vcr_request = _make_vcr_request(past_real_response.request)
|
||||
cassette.append(past_vcr_request, await _to_serialized_response(past_real_response, aread))
|
||||
|
||||
if real_response.history:
|
||||
# If there was a redirection keep we want the request which will hold the
|
||||
# final redirect value
|
||||
vcr_request = _make_vcr_request(real_response.request)
|
||||
|
||||
cassette.append(vcr_request, await _to_serialized_response(real_response, aread))
|
||||
return real_response
|
||||
|
||||
|
||||
def _play_responses(cassette, request, vcr_request, client, kwargs):
|
||||
vcr_response = cassette.play_response(vcr_request)
|
||||
response = _from_serialized_response(request, vcr_response)
|
||||
return response
|
||||
|
||||
|
||||
async def _async_vcr_send(cassette, real_send, *args, **kwargs):
|
||||
vcr_request, response = _shared_vcr_send(cassette, real_send, *args, **kwargs)
|
||||
if response:
|
||||
# add cookies from response to session cookie store
|
||||
args[0].cookies.extract_cookies(response)
|
||||
return response
|
||||
|
||||
real_response = await real_send(*args, **kwargs)
|
||||
await _record_responses(cassette, vcr_request, real_response, aread=True)
|
||||
return real_response
|
||||
|
||||
|
||||
def async_vcr_send(cassette, real_send):
|
||||
@functools.wraps(real_send)
|
||||
def _inner_send(*args, **kwargs):
|
||||
return _async_vcr_send(cassette, real_send, *args, **kwargs)
|
||||
|
||||
return _inner_send
|
||||
|
||||
|
||||
def _run_async_function(sync_func, *args, **kwargs):
|
||||
"""
|
||||
Safely run an asynchronous function from a synchronous context.
|
||||
Handles both cases:
|
||||
- An event loop is already running.
|
||||
- No event loop exists yet.
|
||||
"""
|
||||
try:
|
||||
asyncio.get_running_loop()
|
||||
except RuntimeError:
|
||||
return asyncio.run(sync_func(*args, **kwargs))
|
||||
else:
|
||||
# If inside a running loop, create a task and wait for it
|
||||
return asyncio.ensure_future(sync_func(*args, **kwargs))
|
||||
|
||||
|
||||
def _sync_vcr_send(cassette, real_send, *args, **kwargs):
|
||||
vcr_request, response = _shared_vcr_send(cassette, real_send, *args, **kwargs)
|
||||
if response:
|
||||
# add cookies from response to session cookie store
|
||||
args[0].cookies.extract_cookies(response)
|
||||
return response
|
||||
|
||||
real_response = real_send(*args, **kwargs)
|
||||
_run_async_function(_record_responses, cassette, vcr_request, real_response, aread=False)
|
||||
return real_response
|
||||
|
||||
|
||||
def sync_vcr_send(cassette, real_send):
|
||||
@functools.wraps(real_send)
|
||||
def _inner_send(*args, **kwargs):
|
||||
return _sync_vcr_send(cassette, real_send, *args, **kwargs)
|
||||
|
||||
return _inner_send
|
||||
Reference in New Issue
Block a user