mirror of
https://github.com/kevin1024/vcrpy.git
synced 2025-12-08 16:53:23 +00:00
Merge branch 'master' of github.com:kevin1024/vcrpy into fix-resource-warning-2
This commit is contained in:
41
.github/workflows/main.yml
vendored
41
.github/workflows/main.yml
vendored
@@ -13,7 +13,30 @@ jobs:
|
|||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
python-version: ["3.8", "3.9", "3.10", "3.11", "3.12", "pypy-3.8", "pypy-3.9", "pypy-3.10"]
|
python-version:
|
||||||
|
- "3.8"
|
||||||
|
- "3.9"
|
||||||
|
- "3.10"
|
||||||
|
- "3.11"
|
||||||
|
- "3.12"
|
||||||
|
- "pypy-3.8"
|
||||||
|
- "pypy-3.9"
|
||||||
|
- "pypy-3.10"
|
||||||
|
urllib3-requirement:
|
||||||
|
- "urllib3>=2"
|
||||||
|
- "urllib3<2"
|
||||||
|
|
||||||
|
exclude:
|
||||||
|
- python-version: "3.8"
|
||||||
|
urllib3-requirement: "urllib3>=2"
|
||||||
|
- python-version: "pypy-3.8"
|
||||||
|
urllib3-requirement: "urllib3>=2"
|
||||||
|
- python-version: "3.9"
|
||||||
|
urllib3-requirement: "urllib3>=2"
|
||||||
|
- python-version: "pypy-3.9"
|
||||||
|
urllib3-requirement: "urllib3>=2"
|
||||||
|
- python-version: "pypy-3.10"
|
||||||
|
urllib3-requirement: "urllib3>=2"
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
@@ -22,22 +45,24 @@ jobs:
|
|||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
|
cache: pip
|
||||||
|
|
||||||
- name: Install project dependencies
|
- name: Install project dependencies
|
||||||
run: |
|
run: |
|
||||||
pip3 install --upgrade pip
|
pip install --upgrade pip
|
||||||
pip3 install codecov tox tox-gh-actions
|
pip install codecov '.[tests]' '${{ matrix.urllib3-requirement }}'
|
||||||
|
pip check
|
||||||
|
|
||||||
- name: Run online tests with tox
|
- name: Run online tests
|
||||||
run: tox -- -m online
|
run: ./runtests.sh --cov=./vcr --cov-branch --cov-report=xml --cov-append -m online
|
||||||
|
|
||||||
- name: Run offline tests with tox with no access to the Internet
|
- name: Run offline tests with no access to the Internet
|
||||||
run: |
|
run: |
|
||||||
# We're using unshare to take Internet access
|
# We're using unshare to take Internet access
|
||||||
# away from tox so that we'll notice whenever some new test
|
# away so that we'll notice whenever some new test
|
||||||
# is missing @pytest.mark.online decoration in the future
|
# is missing @pytest.mark.online decoration in the future
|
||||||
unshare --map-root-user --net -- \
|
unshare --map-root-user --net -- \
|
||||||
sh -c 'ip link set lo up; tox -- -m "not online"'
|
sh -c 'ip link set lo up; ./runtests.sh --cov=./vcr --cov-branch --cov-report=xml --cov-append -m "not online"'
|
||||||
|
|
||||||
- name: Run coverage
|
- name: Run coverage
|
||||||
run: codecov
|
run: codecov
|
||||||
|
|||||||
@@ -21,7 +21,7 @@ jobs:
|
|||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Set up Python 3.12
|
- name: Set up Python 3.12
|
||||||
uses: actions/setup-python@v4
|
uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: 3.12
|
python-version: 3.12
|
||||||
|
|
||||||
|
|||||||
4
.github/workflows/pre-commit.yml
vendored
4
.github/workflows/pre-commit.yml
vendored
@@ -13,8 +13,8 @@ jobs:
|
|||||||
name: Run pre-commit
|
name: Run pre-commit
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/setup-python@v4
|
- uses: actions/setup-python@v5
|
||||||
with:
|
with:
|
||||||
python-version: 3.12
|
python-version: 3.12
|
||||||
- uses: pre-commit/action@v3.0.0
|
- uses: pre-commit/action@v3.0.0
|
||||||
|
|||||||
@@ -3,7 +3,7 @@
|
|||||||
|
|
||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
rev: v0.1.8
|
rev: v0.1.13
|
||||||
hooks:
|
hooks:
|
||||||
- id: ruff
|
- id: ruff
|
||||||
args: ["--show-source"]
|
args: ["--show-source"]
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
include README.rst
|
include README.rst
|
||||||
include LICENSE.txt
|
include LICENSE.txt
|
||||||
include tox.ini
|
|
||||||
recursive-include tests *
|
recursive-include tests *
|
||||||
recursive-exclude * __pycache__
|
recursive-exclude * __pycache__
|
||||||
recursive-exclude * *.py[co]
|
recursive-exclude * *.py[co]
|
||||||
|
|||||||
@@ -83,39 +83,21 @@ The PR reviewer is a second set of eyes to see if:
|
|||||||
Running VCR's test suite
|
Running VCR's test suite
|
||||||
------------------------
|
------------------------
|
||||||
|
|
||||||
The tests are all run automatically on `Travis
|
The tests are all run automatically on `Github Actions CI <https://github.com/kevin1024/vcrpy/actions>`__,
|
||||||
CI <https://travis-ci.org/kevin1024/vcrpy>`__, but you can also run them
|
but you can also run them yourself using `pytest <http://pytest.org/>`__.
|
||||||
yourself using `pytest <http://pytest.org/>`__ and
|
|
||||||
`Tox <http://tox.testrun.org/>`__.
|
|
||||||
|
|
||||||
Tox will automatically run them in all environments VCR.py supports if they are available on your `PATH`. Alternatively you can use `tox-pyenv <https://pypi.org/project/tox-pyenv/>`_ with
|
In order for the boto3 tests to run, you will need an AWS key.
|
||||||
`pyenv <https://github.com/pyenv/pyenv>`_.
|
|
||||||
We recommend you read the documentation for each and see the section further below.
|
|
||||||
|
|
||||||
The test suite is pretty big and slow, but you can tell tox to only run specific tests like this::
|
|
||||||
|
|
||||||
tox -e {pyNN}-{HTTP_LIBRARY} -- <pytest flags passed through>
|
|
||||||
|
|
||||||
tox -e py38-requests -- -v -k "'test_status_code or test_gzip'"
|
|
||||||
tox -e py38-requests -- -v --last-failed
|
|
||||||
|
|
||||||
This will run only tests that look like ``test_status_code`` or
|
|
||||||
``test_gzip`` in the test suite, and only in the python 3.8 environment
|
|
||||||
that has ``requests`` installed.
|
|
||||||
|
|
||||||
Also, in order for the boto3 tests to run, you will need an AWS key.
|
|
||||||
Refer to the `boto3
|
Refer to the `boto3
|
||||||
documentation <https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/index.html>`__
|
documentation <https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/index.html>`__
|
||||||
for how to set this up. I have marked the boto3 tests as optional in
|
for how to set this up. I have marked the boto3 tests as optional in
|
||||||
Travis so you don't have to worry about them failing if you submit a
|
Travis so you don't have to worry about them failing if you submit a
|
||||||
pull request.
|
pull request.
|
||||||
|
|
||||||
Using PyEnv with VCR's test suite
|
Using Pyenv with VCR's test suite
|
||||||
---------------------------------
|
---------------------------------
|
||||||
|
|
||||||
PyEnv is a tool for managing multiple installation of python on your system.
|
Pyenv is a tool for managing multiple installation of python on your system.
|
||||||
See the full documentation at their `github <https://github.com/pyenv/pyenv>`_
|
See the full documentation at their `github <https://github.com/pyenv/pyenv>`_
|
||||||
but we are also going to use `tox-pyenv <https://pypi.org/project/tox-pyenv/>`_
|
|
||||||
in this example::
|
in this example::
|
||||||
|
|
||||||
git clone https://github.com/pyenv/pyenv ~/.pyenv
|
git clone https://github.com/pyenv/pyenv ~/.pyenv
|
||||||
@@ -126,26 +108,21 @@ in this example::
|
|||||||
# Setup shim paths
|
# Setup shim paths
|
||||||
eval "$(pyenv init -)"
|
eval "$(pyenv init -)"
|
||||||
|
|
||||||
# Setup your local system tox tooling
|
|
||||||
pip3 install tox tox-pyenv
|
|
||||||
|
|
||||||
# Install supported versions (at time of writing), this does not activate them
|
# Install supported versions (at time of writing), this does not activate them
|
||||||
pyenv install 3.8.0 pypy3.8
|
pyenv install 3.12.0 pypy3.10
|
||||||
|
|
||||||
# This activates them
|
# This activates them
|
||||||
pyenv local 3.8.0 pypy3.8
|
pyenv local 3.12.0 pypy3.10
|
||||||
|
|
||||||
# Run the whole test suite
|
# Run the whole test suite
|
||||||
tox
|
pip install .[test]
|
||||||
|
./run_tests.sh
|
||||||
# Run the whole test suite or just part of it
|
|
||||||
tox -e py38-requests
|
|
||||||
|
|
||||||
|
|
||||||
Troubleshooting on MacOSX
|
Troubleshooting on MacOSX
|
||||||
-------------------------
|
-------------------------
|
||||||
|
|
||||||
If you have this kind of error when running tox :
|
If you have this kind of error when running tests :
|
||||||
|
|
||||||
.. code:: python
|
.. code:: python
|
||||||
|
|
||||||
|
|||||||
@@ -1,7 +1,5 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
# https://blog.ionelmc.ro/2015/04/14/tox-tricks-and-patterns/#when-it-inevitably-leads-to-shell-scripts
|
# If you are getting an INVOCATION ERROR for this script then there is a good chance you are running on Windows.
|
||||||
# If you are getting an INVOCATION ERROR for this script then there is
|
# You can and should use WSL for running tests on Windows when it calls bash scripts.
|
||||||
# a good chance you are running on Windows.
|
|
||||||
# You can and should use WSL for running tox on Windows when it calls bash scripts.
|
|
||||||
REQUESTS_CA_BUNDLE=`python3 -m pytest_httpbin.certs` exec pytest "$@"
|
REQUESTS_CA_BUNDLE=`python3 -m pytest_httpbin.certs` exec pytest "$@"
|
||||||
|
|||||||
44
setup.py
44
setup.py
@@ -57,24 +57,29 @@ install_requires = [
|
|||||||
"urllib3 <2; platform_python_implementation =='PyPy'",
|
"urllib3 <2; platform_python_implementation =='PyPy'",
|
||||||
]
|
]
|
||||||
|
|
||||||
tests_require = [
|
extras_require = {
|
||||||
"aiohttp",
|
"tests": [
|
||||||
"boto3",
|
"aiohttp",
|
||||||
"httplib2",
|
"boto3",
|
||||||
"httpx",
|
"httplib2",
|
||||||
"pytest",
|
"httpx",
|
||||||
"pytest-aiohttp",
|
"pytest-aiohttp",
|
||||||
"pytest-httpbin",
|
"pytest-asyncio",
|
||||||
"requests>=2.16.2",
|
"pytest-cov",
|
||||||
"tornado",
|
"pytest-httpbin",
|
||||||
# Needed to un-break httpbin 0.7.0. For httpbin >=0.7.1 and after,
|
"pytest",
|
||||||
# this pin and the dependency itself can be removed, provided
|
"requests>=2.22.0",
|
||||||
# that the related bug in httpbin has been fixed:
|
"tornado",
|
||||||
# https://github.com/kevin1024/vcrpy/issues/645#issuecomment-1562489489
|
"urllib3",
|
||||||
# https://github.com/postmanlabs/httpbin/issues/673
|
# Needed to un-break httpbin 0.7.0. For httpbin >=0.7.1 and after,
|
||||||
# https://github.com/postmanlabs/httpbin/pull/674
|
# this pin and the dependency itself can be removed, provided
|
||||||
"Werkzeug==2.0.3",
|
# that the related bug in httpbin has been fixed:
|
||||||
]
|
# https://github.com/kevin1024/vcrpy/issues/645#issuecomment-1562489489
|
||||||
|
# https://github.com/postmanlabs/httpbin/issues/673
|
||||||
|
# https://github.com/postmanlabs/httpbin/pull/674
|
||||||
|
"Werkzeug==2.0.3",
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
setup(
|
setup(
|
||||||
name="vcrpy",
|
name="vcrpy",
|
||||||
@@ -89,7 +94,8 @@ setup(
|
|||||||
python_requires=">=3.8",
|
python_requires=">=3.8",
|
||||||
install_requires=install_requires,
|
install_requires=install_requires,
|
||||||
license="MIT",
|
license="MIT",
|
||||||
tests_require=tests_require,
|
extras_require=extras_require,
|
||||||
|
tests_require=extras_require["tests"],
|
||||||
classifiers=[
|
classifiers=[
|
||||||
"Development Status :: 5 - Production/Stable",
|
"Development Status :: 5 - Production/Stable",
|
||||||
"Environment :: Console",
|
"Environment :: Console",
|
||||||
|
|||||||
41
tests/integration/cassettes/gzip_httpx_old_format.yaml
Normal file
41
tests/integration/cassettes/gzip_httpx_old_format.yaml
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
interactions:
|
||||||
|
- request:
|
||||||
|
body: ''
|
||||||
|
headers:
|
||||||
|
accept:
|
||||||
|
- '*/*'
|
||||||
|
accept-encoding:
|
||||||
|
- gzip, deflate, br
|
||||||
|
connection:
|
||||||
|
- keep-alive
|
||||||
|
host:
|
||||||
|
- httpbin.org
|
||||||
|
user-agent:
|
||||||
|
- python-httpx/0.23.0
|
||||||
|
method: GET
|
||||||
|
uri: https://httpbin.org/gzip
|
||||||
|
response:
|
||||||
|
content: "{\n \"gzipped\": true, \n \"headers\": {\n \"Accept\": \"*/*\",
|
||||||
|
\n \"Accept-Encoding\": \"gzip, deflate, br\", \n \"Host\": \"httpbin.org\",
|
||||||
|
\n \"User-Agent\": \"python-httpx/0.23.0\", \n \"X-Amzn-Trace-Id\": \"Root=1-62a62a8d-5f39b5c50c744da821d6ea99\"\n
|
||||||
|
\ }, \n \"method\": \"GET\", \n \"origin\": \"146.200.25.115\"\n}\n"
|
||||||
|
headers:
|
||||||
|
Access-Control-Allow-Credentials:
|
||||||
|
- 'true'
|
||||||
|
Access-Control-Allow-Origin:
|
||||||
|
- '*'
|
||||||
|
Connection:
|
||||||
|
- keep-alive
|
||||||
|
Content-Encoding:
|
||||||
|
- gzip
|
||||||
|
Content-Length:
|
||||||
|
- '230'
|
||||||
|
Content-Type:
|
||||||
|
- application/json
|
||||||
|
Date:
|
||||||
|
- Sun, 12 Jun 2022 18:03:57 GMT
|
||||||
|
Server:
|
||||||
|
- gunicorn/19.9.0
|
||||||
|
http_version: HTTP/1.1
|
||||||
|
status_code: 200
|
||||||
|
version: 1
|
||||||
42
tests/integration/cassettes/gzip_requests.yaml
Normal file
42
tests/integration/cassettes/gzip_requests.yaml
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
interactions:
|
||||||
|
- request:
|
||||||
|
body: null
|
||||||
|
headers:
|
||||||
|
Accept:
|
||||||
|
- '*/*'
|
||||||
|
Accept-Encoding:
|
||||||
|
- gzip, deflate, br
|
||||||
|
Connection:
|
||||||
|
- keep-alive
|
||||||
|
User-Agent:
|
||||||
|
- python-requests/2.28.0
|
||||||
|
method: GET
|
||||||
|
uri: https://httpbin.org/gzip
|
||||||
|
response:
|
||||||
|
body:
|
||||||
|
string: !!binary |
|
||||||
|
H4sIAKwrpmIA/z2OSwrCMBCG956izLIkfQSxkl2RogfQA9R2bIM1iUkqaOndnYDIrGa+/zELDB9l
|
||||||
|
LfYgg5uRwYhtj86DXKDuOrQBJKR5Cuy38kZ3pld6oHu0sqTH29QGZMnVkepgtMYuKKNJcEe0vJ3U
|
||||||
|
C4mcjI9hpaiygqaUW7ETFYGLR8frAXXE9h1Go7nD54w++FxkYp8VsDJ4IBH6E47NmVzGqUHFkn8g
|
||||||
|
rJsvp2omYs8AAAA=
|
||||||
|
headers:
|
||||||
|
Access-Control-Allow-Credentials:
|
||||||
|
- 'true'
|
||||||
|
Access-Control-Allow-Origin:
|
||||||
|
- '*'
|
||||||
|
Connection:
|
||||||
|
- Close
|
||||||
|
Content-Encoding:
|
||||||
|
- gzip
|
||||||
|
Content-Length:
|
||||||
|
- '182'
|
||||||
|
Content-Type:
|
||||||
|
- application/json
|
||||||
|
Date:
|
||||||
|
- Sun, 12 Jun 2022 18:08:44 GMT
|
||||||
|
Server:
|
||||||
|
- Pytest-HTTPBIN/0.1.0
|
||||||
|
status:
|
||||||
|
code: 200
|
||||||
|
message: great
|
||||||
|
version: 1
|
||||||
@@ -1,7 +1,11 @@
|
|||||||
|
import os
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
import vcr
|
import vcr
|
||||||
|
|
||||||
|
from ..assertions import assert_is_json_bytes
|
||||||
|
|
||||||
asyncio = pytest.importorskip("asyncio")
|
asyncio = pytest.importorskip("asyncio")
|
||||||
httpx = pytest.importorskip("httpx")
|
httpx = pytest.importorskip("httpx")
|
||||||
|
|
||||||
@@ -219,22 +223,6 @@ def test_redirect(httpbin, yml, do_request):
|
|||||||
assert cassette_response.request.headers.items() == response.request.headers.items()
|
assert cassette_response.request.headers.items() == response.request.headers.items()
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.online
|
|
||||||
def test_work_with_gzipped_data(httpbin, do_request, yml):
|
|
||||||
url = httpbin.url + "/gzip?foo=bar"
|
|
||||||
headers = {"accept-encoding": "deflate, gzip"}
|
|
||||||
|
|
||||||
with vcr.use_cassette(yml):
|
|
||||||
do_request(headers=headers)("GET", url)
|
|
||||||
|
|
||||||
with vcr.use_cassette(yml) as cassette:
|
|
||||||
cassette_response = do_request(headers=headers)("GET", url)
|
|
||||||
|
|
||||||
assert cassette_response.headers["content-encoding"] == "gzip"
|
|
||||||
assert cassette_response.read()
|
|
||||||
assert cassette.play_count == 1
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.online
|
@pytest.mark.online
|
||||||
@pytest.mark.parametrize("url", ["https://github.com/kevin1024/vcrpy/issues/" + str(i) for i in range(3, 6)])
|
@pytest.mark.parametrize("url", ["https://github.com/kevin1024/vcrpy/issues/" + str(i) for i in range(3, 6)])
|
||||||
def test_simple_fetching(do_request, yml, url):
|
def test_simple_fetching(do_request, yml, url):
|
||||||
@@ -299,29 +287,75 @@ def test_stream(tmpdir, httpbin, do_request):
|
|||||||
assert cassette.play_count == 1
|
assert cassette.play_count == 1
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.online
|
# Regular cassette formats support the status reason,
|
||||||
def test_text_content_type(tmpdir, httpbin, do_request):
|
# but the old HTTPX cassette format does not.
|
||||||
url = httpbin.url + "/json"
|
@pytest.mark.parametrize(
|
||||||
|
"cassette_name,reason",
|
||||||
|
[
|
||||||
|
("requests", "great"),
|
||||||
|
("httpx_old_format", "OK"),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_load_cassette_format(do_request, cassette_name, reason):
|
||||||
|
mydir = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
yml = f"{mydir}/cassettes/gzip_{cassette_name}.yaml"
|
||||||
|
url = "https://httpbin.org/gzip"
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("json_type.yaml"))):
|
with vcr.use_cassette(yml) as cassette:
|
||||||
response = do_request()("GET", url)
|
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("json_type.yaml"))) as cassette:
|
|
||||||
cassette_response = do_request()("GET", url)
|
cassette_response = do_request()("GET", url)
|
||||||
assert cassette_response.content == response.content
|
assert str(cassette_response.request.url) == url
|
||||||
assert cassette.play_count == 1
|
assert cassette.play_count == 1
|
||||||
assert isinstance(cassette.responses[0]["content"], str)
|
|
||||||
|
# Should be able to load up the JSON inside,
|
||||||
|
# regardless whether the content is the gzipped
|
||||||
|
# in the cassette or not.
|
||||||
|
json = cassette_response.json()
|
||||||
|
assert json["method"] == "GET", json
|
||||||
|
assert cassette_response.status_code == 200
|
||||||
|
assert cassette_response.reason_phrase == reason
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.online
|
def test_gzip__decode_compressed_response_false(tmpdir, httpbin, do_request):
|
||||||
def test_binary_content_type(tmpdir, httpbin, do_request):
|
"""
|
||||||
url = httpbin.url + "/bytes/1024"
|
Ensure that httpx is able to automatically decompress the response body.
|
||||||
|
"""
|
||||||
|
for _ in range(2): # one for recording, one for re-playing
|
||||||
|
with vcr.use_cassette(str(tmpdir.join("gzip.yaml"))) as cassette:
|
||||||
|
response = do_request()("GET", httpbin + "/gzip")
|
||||||
|
assert response.headers["content-encoding"] == "gzip" # i.e. not removed
|
||||||
|
# The content stored in the cassette should be gzipped.
|
||||||
|
assert cassette.responses[0]["body"]["string"][:2] == b"\x1f\x8b"
|
||||||
|
assert_is_json_bytes(response.content) # i.e. uncompressed bytes
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("json_type.yaml"))):
|
|
||||||
response = do_request()("GET", url)
|
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("json_type.yaml"))) as cassette:
|
def test_gzip__decode_compressed_response_true(do_request, tmpdir, httpbin):
|
||||||
cassette_response = do_request()("GET", url)
|
url = httpbin + "/gzip"
|
||||||
assert cassette_response.content == response.content
|
|
||||||
assert cassette.play_count == 1
|
expected_response = do_request()("GET", url)
|
||||||
assert isinstance(cassette.responses[0]["content"], bytes)
|
expected_content = expected_response.content
|
||||||
|
assert expected_response.headers["content-encoding"] == "gzip" # self-test
|
||||||
|
|
||||||
|
with vcr.use_cassette(
|
||||||
|
str(tmpdir.join("decode_compressed.yaml")),
|
||||||
|
decode_compressed_response=True,
|
||||||
|
) as cassette:
|
||||||
|
r = do_request()("GET", url)
|
||||||
|
assert r.headers["content-encoding"] == "gzip" # i.e. not removed
|
||||||
|
content_length = r.headers["content-length"]
|
||||||
|
assert r.content == expected_content
|
||||||
|
|
||||||
|
# Has the cassette body been decompressed?
|
||||||
|
cassette_response_body = cassette.responses[0]["body"]["string"]
|
||||||
|
assert isinstance(cassette_response_body, str)
|
||||||
|
|
||||||
|
# Content should be JSON.
|
||||||
|
assert cassette_response_body[0:1] == "{"
|
||||||
|
|
||||||
|
with vcr.use_cassette(str(tmpdir.join("decode_compressed.yaml")), decode_compressed_response=True):
|
||||||
|
r = httpx.get(url)
|
||||||
|
assert "content-encoding" not in r.headers # i.e. removed
|
||||||
|
assert r.content == expected_content
|
||||||
|
|
||||||
|
# As the content is uncompressed, it should have a bigger
|
||||||
|
# length than the compressed version.
|
||||||
|
assert r.headers["content-length"] > content_length
|
||||||
|
|||||||
@@ -265,7 +265,7 @@ def test_nested_cassettes_with_session_created_before_nesting(httpbin_both, tmpd
|
|||||||
def test_post_file(tmpdir, httpbin_both):
|
def test_post_file(tmpdir, httpbin_both):
|
||||||
"""Ensure that we handle posting a file."""
|
"""Ensure that we handle posting a file."""
|
||||||
url = httpbin_both + "/post"
|
url = httpbin_both + "/post"
|
||||||
with vcr.use_cassette(str(tmpdir.join("post_file.yaml"))) as cass, open("tox.ini", "rb") as f:
|
with vcr.use_cassette(str(tmpdir.join("post_file.yaml"))) as cass, open(".editorconfig", "rb") as f:
|
||||||
original_response = requests.post(url, f).content
|
original_response = requests.post(url, f).content
|
||||||
|
|
||||||
# This also tests that we do the right thing with matching the body when they are files.
|
# This also tests that we do the right thing with matching the body when they are files.
|
||||||
@@ -273,10 +273,10 @@ def test_post_file(tmpdir, httpbin_both):
|
|||||||
str(tmpdir.join("post_file.yaml")),
|
str(tmpdir.join("post_file.yaml")),
|
||||||
match_on=("method", "scheme", "host", "port", "path", "query", "body"),
|
match_on=("method", "scheme", "host", "port", "path", "query", "body"),
|
||||||
) as cass:
|
) as cass:
|
||||||
with open("tox.ini", "rb") as f:
|
with open(".editorconfig", "rb") as f:
|
||||||
tox_content = f.read()
|
editorconfig = f.read()
|
||||||
assert cass.requests[0].body.read() == tox_content
|
assert cass.requests[0].body.read() == editorconfig
|
||||||
with open("tox.ini", "rb") as f:
|
with open(".editorconfig", "rb") as f:
|
||||||
new_response = requests.post(url, f).content
|
new_response = requests.post(url, f).content
|
||||||
assert original_response == new_response
|
assert original_response == new_response
|
||||||
|
|
||||||
|
|||||||
74
tox.ini
74
tox.ini
@@ -1,74 +0,0 @@
|
|||||||
[tox]
|
|
||||||
skip_missing_interpreters=true
|
|
||||||
envlist =
|
|
||||||
cov-clean,
|
|
||||||
{py38,py39,py310,py311,py312}-{requests-urllib3-1,httplib2,urllib3-1,tornado4,boto3,aiohttp,httpx},
|
|
||||||
{py310,py311,py312}-{requests-urllib3-2,urllib3-2},
|
|
||||||
{pypy3}-{requests-urllib3-1,httplib2,urllib3-1,tornado4,boto3},
|
|
||||||
#{py310}-httpx019,
|
|
||||||
cov-report
|
|
||||||
|
|
||||||
|
|
||||||
[gh-actions]
|
|
||||||
python =
|
|
||||||
3.8: py38
|
|
||||||
3.9: py39
|
|
||||||
3.10: py310
|
|
||||||
3.11: py311
|
|
||||||
3.12: py312
|
|
||||||
pypy-3: pypy3
|
|
||||||
|
|
||||||
# Coverage environment tasks: cov-clean and cov-report
|
|
||||||
# https://pytest-cov.readthedocs.io/en/latest/tox.html
|
|
||||||
[testenv:cov-clean]
|
|
||||||
deps = coverage
|
|
||||||
skip_install=true
|
|
||||||
commands = coverage erase
|
|
||||||
|
|
||||||
[testenv:cov-report]
|
|
||||||
deps = coverage
|
|
||||||
skip_install=true
|
|
||||||
commands =
|
|
||||||
coverage html
|
|
||||||
coverage report --fail-under=90
|
|
||||||
|
|
||||||
[testenv]
|
|
||||||
# Need to use develop install so that paths
|
|
||||||
# for aggregate code coverage combine
|
|
||||||
usedevelop=true
|
|
||||||
commands =
|
|
||||||
./runtests.sh --cov=./vcr --cov-branch --cov-report=xml --cov-append {posargs}
|
|
||||||
allowlist_externals =
|
|
||||||
./runtests.sh
|
|
||||||
deps =
|
|
||||||
Werkzeug==2.0.3
|
|
||||||
pytest
|
|
||||||
pytest-httpbin>=1.0.1
|
|
||||||
pytest-cov
|
|
||||||
PyYAML
|
|
||||||
ipaddress
|
|
||||||
requests: requests>=2.22.0
|
|
||||||
httplib2: httplib2
|
|
||||||
urllib3-1: urllib3<2
|
|
||||||
urllib3-2: urllib3<3
|
|
||||||
boto3: boto3
|
|
||||||
aiohttp: aiohttp
|
|
||||||
aiohttp: pytest-asyncio
|
|
||||||
aiohttp: pytest-aiohttp
|
|
||||||
httpx: httpx
|
|
||||||
{py38,py39,py310}-{httpx}: httpx
|
|
||||||
{py38,py39,py310}-{httpx}: pytest-asyncio
|
|
||||||
httpx: httpx>0.19
|
|
||||||
httpx019: httpx==0.19
|
|
||||||
{py38,py39,py310}-{httpx}: pytest-asyncio
|
|
||||||
depends =
|
|
||||||
{py38,py39,py310,py311,py312,pypy3}-{requests-urllib3-1,httplib2,urllib3-1,tornado4,boto3},{py310,py311,py312}-{requests-urllib3-2,urllib3-2},{py38,py39,py310,py311,py312}-{aiohttp},{py38,py39,py310,py311,py312}-{httpx}: cov-clean
|
|
||||||
cov-report: {py38,py39,py310,py311,py312,pypy3}-{requests-urllib3-1,httplib2,urllib3-1,tornado4,boto3},{py310,py311,py312}-{requests-urllib3-2,urllib3-2},{py38,py39,py310,py311,py312}-{aiohttp}
|
|
||||||
passenv =
|
|
||||||
AWS_ACCESS_KEY_ID
|
|
||||||
AWS_DEFAULT_REGION
|
|
||||||
AWS_SECRET_ACCESS_KEY
|
|
||||||
setenv =
|
|
||||||
# workaround for broken C extension in aiohttp
|
|
||||||
# see: https://github.com/aio-libs/aiohttp/issues/7229
|
|
||||||
py312: AIOHTTP_NO_EXTENSIONS=1
|
|
||||||
@@ -1,3 +1,4 @@
|
|||||||
|
import asyncio
|
||||||
import functools
|
import functools
|
||||||
import inspect
|
import inspect
|
||||||
import logging
|
import logging
|
||||||
@@ -6,7 +7,9 @@ from unittest.mock import MagicMock, patch
|
|||||||
import httpx
|
import httpx
|
||||||
|
|
||||||
from vcr.errors import CannotOverwriteExistingCassetteException
|
from vcr.errors import CannotOverwriteExistingCassetteException
|
||||||
|
from vcr.filters import decode_response
|
||||||
from vcr.request import Request as VcrRequest
|
from vcr.request import Request as VcrRequest
|
||||||
|
from vcr.serializers.compat import convert_body_to_bytes
|
||||||
|
|
||||||
_httpx_signature = inspect.signature(httpx.Client.request)
|
_httpx_signature = inspect.signature(httpx.Client.request)
|
||||||
|
|
||||||
@@ -33,19 +36,29 @@ def _transform_headers(httpx_response):
|
|||||||
return out
|
return out
|
||||||
|
|
||||||
|
|
||||||
def _to_serialized_response(httpx_response):
|
async def _to_serialized_response(resp, aread):
|
||||||
try:
|
# The content shouldn't already have been read in by HTTPX.
|
||||||
content = httpx_response.content.decode("utf-8")
|
assert not hasattr(resp, "_decoder")
|
||||||
except UnicodeDecodeError:
|
|
||||||
content = httpx_response.content
|
|
||||||
|
|
||||||
return {
|
# Retrieve the content, but without decoding it.
|
||||||
"status_code": httpx_response.status_code,
|
with patch.dict(resp.headers, {"Content-Encoding": ""}):
|
||||||
"http_version": httpx_response.http_version,
|
if aread:
|
||||||
"headers": _transform_headers(httpx_response),
|
await resp.aread()
|
||||||
"content": content,
|
else:
|
||||||
|
resp.read()
|
||||||
|
|
||||||
|
result = {
|
||||||
|
"status": {"code": resp.status_code, "message": resp.reason_phrase},
|
||||||
|
"headers": _transform_headers(resp),
|
||||||
|
"body": {"string": resp.content},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# As the content wasn't decoded, we restore the response to a state which
|
||||||
|
# will be capable of decoding the content for the consumer.
|
||||||
|
del resp._decoder
|
||||||
|
resp._content = resp._get_content_decoder().decode(resp.content)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
def _from_serialized_headers(headers):
|
def _from_serialized_headers(headers):
|
||||||
"""
|
"""
|
||||||
@@ -62,17 +75,32 @@ def _from_serialized_headers(headers):
|
|||||||
@patch("httpx.Response.close", MagicMock())
|
@patch("httpx.Response.close", MagicMock())
|
||||||
@patch("httpx.Response.read", MagicMock())
|
@patch("httpx.Response.read", MagicMock())
|
||||||
def _from_serialized_response(request, serialized_response, history=None):
|
def _from_serialized_response(request, serialized_response, history=None):
|
||||||
content = serialized_response.get("content")
|
# Cassette format generated for HTTPX requests by older versions of
|
||||||
if isinstance(content, str):
|
# vcrpy. We restructure the content to resemble what a regular
|
||||||
content = content.encode("utf-8")
|
# cassette looks like.
|
||||||
|
if "status_code" in serialized_response:
|
||||||
|
serialized_response = decode_response(
|
||||||
|
convert_body_to_bytes(
|
||||||
|
{
|
||||||
|
"headers": serialized_response["headers"],
|
||||||
|
"body": {"string": serialized_response["content"]},
|
||||||
|
"status": {"code": serialized_response["status_code"]},
|
||||||
|
},
|
||||||
|
),
|
||||||
|
)
|
||||||
|
extensions = None
|
||||||
|
else:
|
||||||
|
extensions = {"reason_phrase": serialized_response["status"]["message"].encode()}
|
||||||
|
|
||||||
response = httpx.Response(
|
response = httpx.Response(
|
||||||
status_code=serialized_response.get("status_code"),
|
status_code=serialized_response["status"]["code"],
|
||||||
request=request,
|
request=request,
|
||||||
headers=_from_serialized_headers(serialized_response.get("headers")),
|
headers=_from_serialized_headers(serialized_response["headers"]),
|
||||||
content=content,
|
content=serialized_response["body"]["string"],
|
||||||
history=history or [],
|
history=history or [],
|
||||||
|
extensions=extensions,
|
||||||
)
|
)
|
||||||
response._content = content
|
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
|
||||||
@@ -98,17 +126,17 @@ def _shared_vcr_send(cassette, real_send, *args, **kwargs):
|
|||||||
return vcr_request, None
|
return vcr_request, None
|
||||||
|
|
||||||
|
|
||||||
def _record_responses(cassette, vcr_request, real_response):
|
async def _record_responses(cassette, vcr_request, real_response, aread):
|
||||||
for past_real_response in real_response.history:
|
for past_real_response in real_response.history:
|
||||||
past_vcr_request = _make_vcr_request(past_real_response.request)
|
past_vcr_request = _make_vcr_request(past_real_response.request)
|
||||||
cassette.append(past_vcr_request, _to_serialized_response(past_real_response))
|
cassette.append(past_vcr_request, await _to_serialized_response(past_real_response, aread))
|
||||||
|
|
||||||
if real_response.history:
|
if real_response.history:
|
||||||
# If there was a redirection keep we want the request which will hold the
|
# If there was a redirection keep we want the request which will hold the
|
||||||
# final redirect value
|
# final redirect value
|
||||||
vcr_request = _make_vcr_request(real_response.request)
|
vcr_request = _make_vcr_request(real_response.request)
|
||||||
|
|
||||||
cassette.append(vcr_request, _to_serialized_response(real_response))
|
cassette.append(vcr_request, await _to_serialized_response(real_response, aread))
|
||||||
return real_response
|
return real_response
|
||||||
|
|
||||||
|
|
||||||
@@ -126,8 +154,8 @@ async def _async_vcr_send(cassette, real_send, *args, **kwargs):
|
|||||||
return response
|
return response
|
||||||
|
|
||||||
real_response = await real_send(*args, **kwargs)
|
real_response = await real_send(*args, **kwargs)
|
||||||
await real_response.aread()
|
await _record_responses(cassette, vcr_request, real_response, aread=True)
|
||||||
return _record_responses(cassette, vcr_request, real_response)
|
return real_response
|
||||||
|
|
||||||
|
|
||||||
def async_vcr_send(cassette, real_send):
|
def async_vcr_send(cassette, real_send):
|
||||||
@@ -146,8 +174,8 @@ def _sync_vcr_send(cassette, real_send, *args, **kwargs):
|
|||||||
return response
|
return response
|
||||||
|
|
||||||
real_response = real_send(*args, **kwargs)
|
real_response = real_send(*args, **kwargs)
|
||||||
real_response.read()
|
asyncio.run(_record_responses(cassette, vcr_request, real_response, aread=False))
|
||||||
return _record_responses(cassette, vcr_request, real_response)
|
return real_response
|
||||||
|
|
||||||
|
|
||||||
def sync_vcr_send(cassette, real_send):
|
def sync_vcr_send(cassette, real_send):
|
||||||
|
|||||||
Reference in New Issue
Block a user