mirror of
https://github.com/kevin1024/vcrpy.git
synced 2025-12-08 16:53:23 +00:00
Compare commits
201 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b122b5c701 | ||
|
|
4883e3eefa | ||
|
|
5678b13b47 | ||
|
|
48f5f84f86 | ||
|
|
31d8c3498b | ||
|
|
b28316ab10 | ||
|
|
3f78330c1e | ||
|
|
e8818e5c0b | ||
|
|
c7bb59ecd8 | ||
|
|
2183690da3 | ||
|
|
d5ba702a1b | ||
|
|
952994b365 | ||
|
|
e2f3240835 | ||
|
|
bb690833bc | ||
|
|
73eed94c47 | ||
|
|
a23fe0333a | ||
|
|
bb743861b6 | ||
|
|
ac70eaa17f | ||
|
|
d50f3385a6 | ||
|
|
14db4de224 | ||
|
|
2c4df79498 | ||
|
|
1456673cb4 | ||
|
|
19bd4e012c | ||
|
|
558c7fc625 | ||
|
|
8217a4c21b | ||
|
|
bd0aa59cd2 | ||
|
|
9a37817a3a | ||
|
|
b4c65bd677 | ||
|
|
93bc59508c | ||
|
|
e313a9cd52 | ||
|
|
5f1b20c4ca | ||
|
|
cd31d71901 | ||
|
|
4607ca1102 | ||
|
|
e3ced4385e | ||
|
|
80099ac6d7 | ||
|
|
440bc20faf | ||
|
|
3ddff27cda | ||
|
|
30b423e8c0 | ||
|
|
752ba0b749 | ||
|
|
c16e526d6a | ||
|
|
d64cdd337b | ||
|
|
ac230b76af | ||
|
|
965f3658d5 | ||
|
|
6465a5995b | ||
|
|
69ca261a88 | ||
|
|
3278619dcc | ||
|
|
3fb62e0f9b | ||
|
|
81978659f1 | ||
|
|
be651bd27c | ||
|
|
a6698ed060 | ||
|
|
48d0a2e453 | ||
|
|
5b858b132d | ||
|
|
c8d99a99ec | ||
|
|
ce27c63685 | ||
|
|
ab8944d3ca | ||
|
|
c6a7f4ae15 | ||
|
|
1d100dda25 | ||
|
|
7275e5d65d | ||
|
|
c6be705fb4 | ||
|
|
10b7f4efb3 | ||
|
|
7a6ef00f4d | ||
|
|
3bf6ac7184 | ||
|
|
983b2202ed | ||
|
|
15a6b71997 | ||
|
|
1ca708dcff | ||
|
|
f5597fa6c1 | ||
|
|
2b3247b3df | ||
|
|
d123a5e8d0 | ||
|
|
e2815fbc88 | ||
|
|
f9d4500c6e | ||
|
|
71eb624708 | ||
|
|
dc449715c1 | ||
|
|
275b9085f3 | ||
|
|
35650b141b | ||
|
|
9c8b679136 | ||
|
|
fab082eff5 | ||
|
|
ffc04f9128 | ||
|
|
4d84da1809 | ||
|
|
241b0bbd91 | ||
|
|
042e16c3e4 | ||
|
|
acef3f49bf | ||
|
|
9cfa6c5173 | ||
|
|
39a86ba3cf | ||
|
|
543c72ba51 | ||
|
|
86b114f2f5 | ||
|
|
4b06f3dba1 | ||
|
|
1c6503526b | ||
|
|
c9c05682cb | ||
|
|
39c8648aa7 | ||
|
|
dfff84d5bb | ||
|
|
40ac0de652 | ||
|
|
f3147f574b | ||
|
|
298a6933ff | ||
|
|
52da776b59 | ||
|
|
8842fb1c3a | ||
|
|
6c4ba172d8 | ||
|
|
c88f2c0dab | ||
|
|
3fd6b1c0b4 | ||
|
|
c6d87309f4 | ||
|
|
1fb9179cf9 | ||
|
|
a58e0d8830 | ||
|
|
acc101412d | ||
|
|
e60dafb8dc | ||
|
|
3ce5979acb | ||
|
|
68038d0559 | ||
|
|
f76289aa78 | ||
|
|
6252b92f50 | ||
|
|
1e3a5ac753 | ||
|
|
b1c45cd249 | ||
|
|
3a5ff1c1ce | ||
|
|
bf80673454 | ||
|
|
8028420cbb | ||
|
|
784b2dcb29 | ||
|
|
42b4a5d2fa | ||
|
|
b7f6c2fce2 | ||
|
|
6d7a842a33 | ||
|
|
db1f5b0dee | ||
|
|
c6667ac56c | ||
|
|
a093fb177d | ||
|
|
666686b542 | ||
|
|
5104b1f462 | ||
|
|
62fe272a8e | ||
|
|
f9b69d8da7 | ||
|
|
cb77cb8f69 | ||
|
|
e37fc9ab6e | ||
|
|
abbb50135f | ||
|
|
0594de9b3e | ||
|
|
53f686aa5b | ||
|
|
1677154f04 | ||
|
|
54bc6467eb | ||
|
|
c5487384ee | ||
|
|
5cf23298ac | ||
|
|
5fa7010712 | ||
|
|
f1e0241673 | ||
|
|
a3a255d606 | ||
|
|
0782382982 | ||
|
|
395d2be295 | ||
|
|
ee6e7905e9 | ||
|
|
cc4d03c62e | ||
|
|
8e13af2ee9 | ||
|
|
b522d3f0a3 | ||
|
|
d39c26b358 | ||
|
|
d76c243513 | ||
|
|
5cff354ec8 | ||
|
|
80614dbd00 | ||
|
|
356ff4122c | ||
|
|
cf765928ac | ||
|
|
73d11e80eb | ||
|
|
97de8a0fce | ||
|
|
895ae205ca | ||
|
|
f075c8b0b4 | ||
|
|
3919cb2573 | ||
|
|
bddec2e62a | ||
|
|
fa789e975b | ||
|
|
556fd0166c | ||
|
|
17c78bff9e | ||
|
|
713cb36d35 | ||
|
|
b0cb8765d5 | ||
|
|
97ad51fe6c | ||
|
|
1dd9cbde8b | ||
|
|
962284072b | ||
|
|
e9102b2bb4 | ||
|
|
957c8bd7a3 | ||
|
|
2d5f8a499e | ||
|
|
e5555a5d5b | ||
|
|
a542567e4a | ||
|
|
3168e7813e | ||
|
|
88cf01aa14 | ||
|
|
85ae012d9c | ||
|
|
db1e9e7180 | ||
|
|
dbf7a3337b | ||
|
|
dd97b02b72 | ||
|
|
e8346ad30e | ||
|
|
6a31904333 | ||
|
|
796dc8de7e | ||
|
|
ecb5d84f0f | ||
|
|
cebdd45849 | ||
|
|
8a8d46f130 | ||
|
|
954a100dfd | ||
|
|
604c0be571 | ||
|
|
0e57182207 | ||
|
|
c062c9f54c | ||
|
|
2abf1188a9 | ||
|
|
2b2935a1e7 | ||
|
|
a8545c89a5 | ||
|
|
5532c0b4cf | ||
|
|
f4467a8d6c | ||
|
|
f5fc7aac22 | ||
|
|
e8e9a4af9f | ||
|
|
7bf8f65815 | ||
|
|
defad28771 | ||
|
|
69621c67fb | ||
|
|
469a10b980 | ||
|
|
d90cea0260 | ||
|
|
c9da7a102f | ||
|
|
f4144359f6 | ||
|
|
69de388649 | ||
|
|
6446d00e27 | ||
|
|
36c7465cf7 | ||
|
|
010fa268d1 | ||
|
|
99c0384770 |
4
.github/workflows/codespell.yml
vendored
4
.github/workflows/codespell.yml
vendored
@@ -13,10 +13,10 @@ permissions:
|
|||||||
jobs:
|
jobs:
|
||||||
codespell:
|
codespell:
|
||||||
name: Check for spelling errors
|
name: Check for spelling errors
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-24.04
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v6
|
||||||
- name: Codespell
|
- name: Codespell
|
||||||
uses: codespell-project/actions-codespell@v2
|
uses: codespell-project/actions-codespell@v2
|
||||||
|
|||||||
10
.github/workflows/docs.yml
vendored
10
.github/workflows/docs.yml
vendored
@@ -7,14 +7,14 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
validate:
|
validate:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-24.04
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v6
|
||||||
- uses: actions/setup-python@v4
|
- uses: actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: "3.11"
|
python-version: "3.12"
|
||||||
|
|
||||||
- name: Install build dependencies
|
- name: Install build dependencies
|
||||||
run: pip install -r docs/requirements.txt
|
run: pip install -r docs/requirements.txt
|
||||||
- name: Rendering HTML documentation
|
- name: Rendering HTML documentation
|
||||||
|
|||||||
40
.github/workflows/main.yml
vendored
40
.github/workflows/main.yml
vendored
@@ -5,39 +5,57 @@ on:
|
|||||||
branches:
|
branches:
|
||||||
- master
|
- master
|
||||||
pull_request:
|
pull_request:
|
||||||
|
schedule:
|
||||||
|
- cron: "0 16 * * 5" # Every Friday 4pm
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-24.04
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
python-version: ["3.8", "3.9", "3.10", "3.11", "pypy-3.8"]
|
python-version:
|
||||||
|
- "3.10"
|
||||||
|
- "3.11"
|
||||||
|
- "3.12"
|
||||||
|
- "3.13"
|
||||||
|
- "pypy-3.11"
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3.5.2
|
- uses: actions/checkout@v6
|
||||||
|
- name: Install uv
|
||||||
|
uses: astral-sh/setup-uv@v7
|
||||||
|
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
uses: actions/setup-python@v4
|
uses: actions/setup-python@v6
|
||||||
with:
|
with:
|
||||||
python-version: ${{ matrix.python-version }}
|
python-version: ${{ matrix.python-version }}
|
||||||
|
allow-prereleases: true
|
||||||
|
|
||||||
- name: Install project dependencies
|
- name: Install project dependencies
|
||||||
run: |
|
run: |
|
||||||
pip3 install --upgrade pip
|
uv pip install --system --upgrade pip setuptools
|
||||||
pip3 install codecov tox tox-gh-actions
|
uv pip install --system codecov '.[tests]'
|
||||||
|
uv pip check
|
||||||
|
|
||||||
- name: Run online tests with tox
|
- name: Allow creation of user namespaces (e.g. to the unshare command)
|
||||||
run: tox -- -m online
|
run: |
|
||||||
|
# .. so that we don't get error:
|
||||||
|
# unshare: write failed /proc/self/uid_map: Operation not permitted
|
||||||
|
# Idea from https://github.com/YoYoGames/GameMaker-Bugs/issues/6015#issuecomment-2135552784 .
|
||||||
|
sudo sysctl kernel.apparmor_restrict_unprivileged_userns=0
|
||||||
|
|
||||||
- name: Run offline tests with tox with no access to the Internet
|
- name: Run online tests
|
||||||
|
run: ./runtests.sh --cov=./vcr --cov-branch --cov-report=xml --cov-append -m online
|
||||||
|
|
||||||
|
- name: Run offline tests with no access to the Internet
|
||||||
run: |
|
run: |
|
||||||
# We're using unshare to take Internet access
|
# We're using unshare to take Internet access
|
||||||
# away from tox so that we'll notice whenever some new test
|
# away so that we'll notice whenever some new test
|
||||||
# is missing @pytest.mark.online decoration in the future
|
# is missing @pytest.mark.online decoration in the future
|
||||||
unshare --map-root-user --net -- \
|
unshare --map-root-user --net -- \
|
||||||
sh -c 'ip link set lo up; tox -- -m "not online"'
|
sh -c 'ip link set lo up; ./runtests.sh --cov=./vcr --cov-branch --cov-report=xml --cov-append -m "not online"'
|
||||||
|
|
||||||
- name: Run coverage
|
- name: Run coverage
|
||||||
run: codecov
|
run: codecov
|
||||||
|
|||||||
62
.github/workflows/pre-commit-detect-outdated.yml
vendored
Normal file
62
.github/workflows/pre-commit-detect-outdated.yml
vendored
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
# Copyright (c) 2023 Sebastian Pipping <sebastian@pipping.org>
|
||||||
|
# Licensed under the MIT license
|
||||||
|
|
||||||
|
name: Detect outdated pre-commit hooks
|
||||||
|
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: '0 16 * * 5' # Every Friday 4pm
|
||||||
|
|
||||||
|
# NOTE: This will drop all permissions from GITHUB_TOKEN except metadata read,
|
||||||
|
# and then (re)add the ones listed below:
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
pull-requests: write
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
pre_commit_detect_outdated:
|
||||||
|
name: Detect outdated pre-commit hooks
|
||||||
|
runs-on: ubuntu-24.04
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v6
|
||||||
|
|
||||||
|
- name: Set up Python 3.12
|
||||||
|
uses: actions/setup-python@v6
|
||||||
|
with:
|
||||||
|
python-version: 3.12
|
||||||
|
|
||||||
|
- name: Install pre-commit
|
||||||
|
run: |-
|
||||||
|
pip install \
|
||||||
|
--disable-pip-version-check \
|
||||||
|
--no-warn-script-location \
|
||||||
|
--user \
|
||||||
|
pre-commit
|
||||||
|
echo "PATH=${HOME}/.local/bin:${PATH}" >> "${GITHUB_ENV}"
|
||||||
|
|
||||||
|
- name: Check for outdated hooks
|
||||||
|
run: |-
|
||||||
|
pre-commit autoupdate
|
||||||
|
git diff -- .pre-commit-config.yaml
|
||||||
|
|
||||||
|
- name: Create pull request from changes (if any)
|
||||||
|
id: create-pull-request
|
||||||
|
uses: peter-evans/create-pull-request@v7
|
||||||
|
with:
|
||||||
|
author: 'pre-commit <pre-commit@tools.invalid>'
|
||||||
|
base: master
|
||||||
|
body: |-
|
||||||
|
For your consideration.
|
||||||
|
|
||||||
|
:warning: Please **CLOSE AND RE-OPEN** this pull request so that [further workflow runs get triggered](https://github.com/peter-evans/create-pull-request/blob/main/docs/concepts-guidelines.md#triggering-further-workflow-runs) for this pull request.
|
||||||
|
branch: precommit-autoupdate
|
||||||
|
commit-message: "pre-commit: Autoupdate"
|
||||||
|
delete-branch: true
|
||||||
|
draft: true
|
||||||
|
labels: enhancement
|
||||||
|
title: "pre-commit: Autoupdate"
|
||||||
|
|
||||||
|
- name: Log pull request URL
|
||||||
|
if: "${{ steps.create-pull-request.outputs.pull-request-url }}"
|
||||||
|
run: |
|
||||||
|
echo "Pull request URL is: ${{ steps.create-pull-request.outputs.pull-request-url }}"
|
||||||
20
.github/workflows/pre-commit.yml
vendored
Normal file
20
.github/workflows/pre-commit.yml
vendored
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
# Copyright (c) 2023 Sebastian Pipping <sebastian@pipping.org>
|
||||||
|
# Licensed under the MIT license
|
||||||
|
|
||||||
|
name: Run pre-commit
|
||||||
|
|
||||||
|
on:
|
||||||
|
- pull_request
|
||||||
|
- push
|
||||||
|
- workflow_dispatch
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
pre-commit:
|
||||||
|
name: Run pre-commit
|
||||||
|
runs-on: ubuntu-24.04
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v6
|
||||||
|
- uses: actions/setup-python@v6
|
||||||
|
with:
|
||||||
|
python-version: 3.12
|
||||||
|
- uses: pre-commit/action@v3.0.1
|
||||||
17
.pre-commit-config.yaml
Normal file
17
.pre-commit-config.yaml
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
# Copyright (c) 2023 Sebastian Pipping <sebastian@pipping.org>
|
||||||
|
# Licensed under the MIT license
|
||||||
|
|
||||||
|
repos:
|
||||||
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
|
rev: v0.14.6
|
||||||
|
hooks:
|
||||||
|
- id: ruff
|
||||||
|
args: ["--output-format=full"]
|
||||||
|
- id: ruff-format
|
||||||
|
|
||||||
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
|
rev: v6.0.0
|
||||||
|
hooks:
|
||||||
|
- id: check-merge-conflict
|
||||||
|
- id: end-of-file-fixer
|
||||||
|
- id: trailing-whitespace
|
||||||
@@ -7,9 +7,9 @@ version: 2
|
|||||||
|
|
||||||
# Set the version of Python and other tools you might need
|
# Set the version of Python and other tools you might need
|
||||||
build:
|
build:
|
||||||
os: ubuntu-22.04
|
os: ubuntu-24.04
|
||||||
tools:
|
tools:
|
||||||
python: "3.11"
|
python: "3.12"
|
||||||
|
|
||||||
# Build documentation in the docs/ directory with Sphinx
|
# Build documentation in the docs/ directory with Sphinx
|
||||||
sphinx:
|
sphinx:
|
||||||
@@ -20,3 +20,5 @@ sphinx:
|
|||||||
python:
|
python:
|
||||||
install:
|
install:
|
||||||
- requirements: docs/requirements.txt
|
- requirements: docs/requirements.txt
|
||||||
|
- method: pip
|
||||||
|
path: .
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
include README.rst
|
include README.rst
|
||||||
include LICENSE.txt
|
include LICENSE.txt
|
||||||
include tox.ini
|
|
||||||
recursive-include tests *
|
recursive-include tests *
|
||||||
recursive-exclude * __pycache__
|
recursive-exclude * __pycache__
|
||||||
recursive-exclude * *.py[co]
|
recursive-exclude * *.py[co]
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ VCR.py 📼
|
|||||||
###########
|
###########
|
||||||
|
|
||||||
|
|
||||||
|PyPI| |Python versions| |Build Status| |CodeCov| |Gitter| |CodeStyleBlack|
|
|PyPI| |Python versions| |Build Status| |CodeCov| |Gitter|
|
||||||
|
|
||||||
----
|
----
|
||||||
|
|
||||||
@@ -70,6 +70,3 @@ more details
|
|||||||
.. |CodeCov| image:: https://codecov.io/gh/kevin1024/vcrpy/branch/master/graph/badge.svg
|
.. |CodeCov| image:: https://codecov.io/gh/kevin1024/vcrpy/branch/master/graph/badge.svg
|
||||||
:target: https://codecov.io/gh/kevin1024/vcrpy
|
:target: https://codecov.io/gh/kevin1024/vcrpy
|
||||||
:alt: Code Coverage Status
|
:alt: Code Coverage Status
|
||||||
.. |CodeStyleBlack| image:: https://img.shields.io/badge/code%20style-black-000000.svg
|
|
||||||
:target: https://github.com/psf/black
|
|
||||||
:alt: Code Style: black
|
|
||||||
|
|||||||
2
docs/_static/vcr.svg
vendored
2
docs/_static/vcr.svg
vendored
@@ -24,4 +24,4 @@
|
|||||||
<stop offset="1" stop-color="#27DDA6"/>
|
<stop offset="1" stop-color="#27DDA6"/>
|
||||||
</linearGradient>
|
</linearGradient>
|
||||||
</defs>
|
</defs>
|
||||||
</svg>
|
</svg>
|
||||||
|
|||||||
|
Before Width: | Height: | Size: 6.2 KiB After Width: | Height: | Size: 6.2 KiB |
@@ -16,7 +16,7 @@ a nice addition. Here's an example:
|
|||||||
with vcr.use_cassette('fixtures/vcr_cassettes/synopsis.yaml') as cass:
|
with vcr.use_cassette('fixtures/vcr_cassettes/synopsis.yaml') as cass:
|
||||||
response = urllib2.urlopen('http://www.zombo.com/').read()
|
response = urllib2.urlopen('http://www.zombo.com/').read()
|
||||||
# cass should have 1 request inside it
|
# cass should have 1 request inside it
|
||||||
assert len(cass) == 1
|
assert len(cass) == 1
|
||||||
# the request uri should have been http://www.zombo.com/
|
# the request uri should have been http://www.zombo.com/
|
||||||
assert cass.requests[0].uri == 'http://www.zombo.com/'
|
assert cass.requests[0].uri == 'http://www.zombo.com/'
|
||||||
|
|
||||||
@@ -208,7 +208,7 @@ So these two calls are the same:
|
|||||||
|
|
||||||
# original (still works)
|
# original (still works)
|
||||||
vcr = VCR(filter_headers=['authorization'])
|
vcr = VCR(filter_headers=['authorization'])
|
||||||
|
|
||||||
# new
|
# new
|
||||||
vcr = VCR(filter_headers=[('authorization', None)])
|
vcr = VCR(filter_headers=[('authorization', None)])
|
||||||
|
|
||||||
@@ -218,7 +218,7 @@ Here are two examples of the new functionality:
|
|||||||
|
|
||||||
# replace with a static value (most common)
|
# replace with a static value (most common)
|
||||||
vcr = VCR(filter_headers=[('authorization', 'XXXXXX')])
|
vcr = VCR(filter_headers=[('authorization', 'XXXXXX')])
|
||||||
|
|
||||||
# replace with a callable, for example when testing
|
# replace with a callable, for example when testing
|
||||||
# lots of different kinds of authorization.
|
# lots of different kinds of authorization.
|
||||||
def replace_auth(key, value, request):
|
def replace_auth(key, value, request):
|
||||||
@@ -286,7 +286,7 @@ sensitive data from the response body:
|
|||||||
before_record_response=scrub_string(settings.USERNAME, 'username'),
|
before_record_response=scrub_string(settings.USERNAME, 'username'),
|
||||||
)
|
)
|
||||||
with my_vcr.use_cassette('test.yml'):
|
with my_vcr.use_cassette('test.yml'):
|
||||||
# your http code here
|
# your http code here
|
||||||
|
|
||||||
|
|
||||||
Decode compressed response
|
Decode compressed response
|
||||||
@@ -427,3 +427,16 @@ If you want to save the cassette only when the test succeeds, set the Cassette
|
|||||||
|
|
||||||
# Since there was an exception, the cassette file hasn't been created.
|
# Since there was an exception, the cassette file hasn't been created.
|
||||||
assert not os.path.exists('fixtures/vcr_cassettes/synopsis.yaml')
|
assert not os.path.exists('fixtures/vcr_cassettes/synopsis.yaml')
|
||||||
|
|
||||||
|
Drop unused requests
|
||||||
|
--------------------
|
||||||
|
|
||||||
|
Even if any HTTP request is changed or removed from tests, previously recorded
|
||||||
|
interactions remain in the cassette file. If set the ``drop_unused_requests``
|
||||||
|
option to ``True``, VCR will not save old HTTP interactions if they are not used.
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
my_vcr = VCR(drop_unused_requests=True)
|
||||||
|
with my_vcr.use_cassette('fixtures/vcr_cassettes/synopsis.yaml'):
|
||||||
|
... # your HTTP interactions here
|
||||||
|
|||||||
@@ -7,22 +7,67 @@ For a full list of triaged issues, bugs and PRs and what release they are target
|
|||||||
|
|
||||||
All help in providing PRs to close out bug issues is appreciated. Even if that is providing a repo that fully replicates issues. We have very generous contributors that have added these to bug issues which meant another contributor picked up the bug and closed it out.
|
All help in providing PRs to close out bug issues is appreciated. Even if that is providing a repo that fully replicates issues. We have very generous contributors that have added these to bug issues which meant another contributor picked up the bug and closed it out.
|
||||||
|
|
||||||
|
- 8.1.0
|
||||||
|
- Enable brotli decompression if available (via ``brotli``, ``brotlipy`` or ``brotlicffi``) (#620) - thanks @immerrr
|
||||||
|
- Fix aiohttp allowing both ``data`` and ``json`` arguments when one is None (#624) - thanks @leorochael
|
||||||
|
- Fix usage of io-like interface with VCR.py (#906) - thanks @tito and @kevdevg
|
||||||
|
- Migrate to declarative Python package config (#767) - thanks @deronnax
|
||||||
|
- Various linting fixes - thanks @jairhenrique
|
||||||
|
- CI: bump actions/checkout from 5 to 6 (#955)
|
||||||
|
|
||||||
|
- 8.0.0
|
||||||
|
- BREAKING: Drop support for Python 3.9 (major version bump) - thanks @jairhenrique
|
||||||
|
- BREAKING: Drop support for urllib3 < 2 - fixes CVE warnings from urllib3 1.x (#926, #880) - thanks @jairhenrique
|
||||||
|
- New feature: ``drop_unused_requests`` option to remove unused interactions from cassettes (#763) - thanks @danielnsilva
|
||||||
|
- Rewrite httpx support to patch httpcore instead of httpx (#943) - thanks @seowalex
|
||||||
|
- Fixes ``httpx.ResponseNotRead`` exceptions (#832, #834)
|
||||||
|
- Fixes ``KeyError: 'follow_redirects'`` (#945)
|
||||||
|
- Adds support for custom httpx transports
|
||||||
|
- Fix HTTPS proxy handling - proxy address no longer ends up in cassette URIs (#809, #914) - thanks @alga
|
||||||
|
- Fix ``iscoroutinefunction`` deprecation warning on Python 3.14 - thanks @kloczek
|
||||||
|
- Only log message if response is appended - thanks @talfus-laddus
|
||||||
|
- Optimize urllib.parse calls - thanks @Martin-Brunthaler
|
||||||
|
- Fix CI for Ubuntu 24.04 - thanks @hartwork
|
||||||
|
- Various CI improvements: migrate to uv, update GitHub Actions - thanks @jairhenrique
|
||||||
|
- Various linting and test improvements - thanks @jairhenrique and @hartwork
|
||||||
|
|
||||||
|
- 7.0.0
|
||||||
|
- Drop support for python 3.8 (major version bump) - thanks @jairhenrique
|
||||||
|
- Various linting and test fixes - thanks @jairhenrique
|
||||||
|
- Bugfix for urllib2>=2.3.0 - missing version_string (#888)
|
||||||
|
- Bugfix for asyncio.run - thanks @alekeik1
|
||||||
|
- 6.0.2
|
||||||
|
- Ensure body is consumed only once (#846) - thanks @sathieu
|
||||||
|
- Permit urllib3 2.x for non-PyPy Python >=3.10
|
||||||
|
- Fix typos in test commands - thanks @chuckwondo
|
||||||
|
- Several test and workflow improvements - thanks @hartwork and @graingert
|
||||||
|
- 6.0.1
|
||||||
|
- Bugfix with to Tornado cassette generator (thanks @graingert)
|
||||||
|
- 6.0.0
|
||||||
|
- BREAKING: Fix issue with httpx support (thanks @parkerhancock) in #784. NOTE: You may have to recreate some of your cassettes produced in previous releases due to the binary format being saved incorrectly in previous releases
|
||||||
|
- BREAKING: Drop support for `boto` (vcrpy still supports boto3, but is dropping the deprecated `boto` support in this release. (thanks @jairhenrique)
|
||||||
|
- Fix compatibility issue with Python 3.12 (thanks @hartwork)
|
||||||
|
- Drop simplejson (fixes some compatibility issues) (thanks @jairhenrique)
|
||||||
|
- Run CI on Python 3.12 and PyPy 3.9-3.10 (thanks @mgorny)
|
||||||
|
- Various linting and docs improvements (thanks @jairhenrique)
|
||||||
|
- Tornado fixes (thanks @graingert)
|
||||||
- 5.1.0
|
- 5.1.0
|
||||||
- Use ruff for linting (instead of current flake8/isort/pyflakes) - thanks @jairhenrique
|
- Use ruff for linting (instead of current flake8/isort/pyflakes) - thanks @jairhenrique
|
||||||
- Enable rule B (flake8-bugbear) on ruff - thanks @jairhenrique
|
- Enable rule B (flake8-bugbear) on ruff - thanks @jairhenrique
|
||||||
- Configure read the docs V2 - thanks @jairhenrique
|
- Configure read the docs V2 - thanks @jairhenrique
|
||||||
- Fix typo in docs - thanks @quasimik
|
- Fix typo in docs - thanks @quasimik
|
||||||
- Make json.loads of Python >=3.6 decode bytes by itself - thanks @hartwork
|
- Make json.loads of Python >=3.6 decode bytes by itself - thanks @hartwork
|
||||||
- Fix body matcher for chunked requests (fixes #734) - thanks @hartwork
|
- Fix body matcher for chunked requests (fixes #734) - thanks @hartwork
|
||||||
- Fix query param filter for aiohttp (fixes #517) - thanks @hartwork and @salomvary
|
- Fix query param filter for aiohttp (fixes #517) - thanks @hartwork and @salomvary
|
||||||
- Remove unnecessary dependency on six. - thanks @charettes
|
- Remove unnecessary dependency on six. - thanks @charettes
|
||||||
- build(deps): update sphinx requirement from <7 to <8 - thanks @jairhenrique
|
- build(deps): update sphinx requirement from <7 to <8 - thanks @jairhenrique
|
||||||
- Add action to validate docs - thanks @jairhenrique
|
- Add action to validate docs - thanks @jairhenrique
|
||||||
- Add editorconfig file - thanks @jairhenrique
|
- Add editorconfig file - thanks @jairhenrique
|
||||||
- Drop iscoroutinefunction fallback function for unsupported python thanks @jairhenrique
|
- Drop iscoroutinefunction fallback function for unsupported python thanks @jairhenrique
|
||||||
- 5.0.0
|
- 5.0.0
|
||||||
- BREAKING CHANGE: Drop support for Python 3.7. 3.7 is EOL as of 6/27/23 Thanks @jairhenrique
|
- BREAKING CHANGE: Drop support for Python 3.7. 3.7 is EOL as of 6/27/23 Thanks @jairhenrique
|
||||||
- BREAKING CHANGE: Custom Cassette persisters no longer catch ValueError. If you have implemented a custom persister (has anyone implemented a custom persister? Let us know!) then you will need to throw a CassetteNotFoundError when unable to find a cassette. See #681 for discussion and reason for this change. Thanks @amosjyng for the PR and the review from @hartwork
|
- BREAKING CHANGE: Custom Cassette persisters no longer catch ValueError. If you have implemented a custom persister (has anyone implemented a custom persister? Let us know!) then you will need to throw a CassetteNotFoundError when unable to find a cassette. See #681 for discussion and reason for this change. Thanks @amosjyng for the PR and the review from @hartwork
|
||||||
|
|
||||||
- 4.4.0
|
- 4.4.0
|
||||||
- HUGE thanks to @hartwork for all the work done on this release!
|
- HUGE thanks to @hartwork for all the work done on this release!
|
||||||
- Bring vcr/unittest in to vcrpy as a full feature of vcr instead of a separate library. Big thanks to @hartwork for doing this and to @agriffis for originally creating the library
|
- Bring vcr/unittest in to vcrpy as a full feature of vcr instead of a separate library. Big thanks to @hartwork for doing this and to @agriffis for originally creating the library
|
||||||
@@ -70,8 +115,8 @@ All help in providing PRs to close out bug issues is appreciated. Even if that i
|
|||||||
- Bugfix: Fix test suite by switching to mockbin (thanks @jairhenrique)
|
- Bugfix: Fix test suite by switching to mockbin (thanks @jairhenrique)
|
||||||
- 4.0.2
|
- 4.0.2
|
||||||
- Fix mock imports as reported in #504 by @llybin. Thank you.
|
- Fix mock imports as reported in #504 by @llybin. Thank you.
|
||||||
- 4.0.1
|
- 4.0.1
|
||||||
- Fix logo alignment for PyPI
|
- Fix logo alignment for PyPI
|
||||||
- 4.0.0
|
- 4.0.0
|
||||||
- Remove Python2 support (@hugovk)
|
- Remove Python2 support (@hugovk)
|
||||||
- Add Python 3.8 TravisCI support (@neozenith)
|
- Add Python 3.8 TravisCI support (@neozenith)
|
||||||
@@ -83,7 +128,7 @@ All help in providing PRs to close out bug issues is appreciated. Even if that i
|
|||||||
- Add support for `request_info` on mocked responses in aiohttp stub #495 (@nickdirienzo)
|
- Add support for `request_info` on mocked responses in aiohttp stub #495 (@nickdirienzo)
|
||||||
- doc: fixed variable name (a -> cass) in an example for rewind #492 (@yarikoptic)
|
- doc: fixed variable name (a -> cass) in an example for rewind #492 (@yarikoptic)
|
||||||
|
|
||||||
- 2.1.1
|
- 2.1.1
|
||||||
- Format code with black (@neozenith)
|
- Format code with black (@neozenith)
|
||||||
- Use latest pypy3 in Travis (@hugovk)
|
- Use latest pypy3 in Travis (@hugovk)
|
||||||
- Improve documentation about custom matchers (@gward)
|
- Improve documentation about custom matchers (@gward)
|
||||||
@@ -91,7 +136,7 @@ All help in providing PRs to close out bug issues is appreciated. Even if that i
|
|||||||
- Add `pytest-recording` to the documentation as an alternative Pytest plugin (@Stranger6667)
|
- Add `pytest-recording` to the documentation as an alternative Pytest plugin (@Stranger6667)
|
||||||
- Fix yarl and python3.5 version issue (@neozenith)
|
- Fix yarl and python3.5 version issue (@neozenith)
|
||||||
- Fix header matcher for boto3 - fixes #474 (@simahawk)
|
- Fix header matcher for boto3 - fixes #474 (@simahawk)
|
||||||
- 2.1.0
|
- 2.1.0
|
||||||
- Add a `rewind` method to reset a cassette (thanks @khamidou)
|
- Add a `rewind` method to reset a cassette (thanks @khamidou)
|
||||||
- New error message with more details on why the cassette failed to play a request (thanks @arthurHamon2, @neozenith)
|
- New error message with more details on why the cassette failed to play a request (thanks @arthurHamon2, @neozenith)
|
||||||
- Handle connect tunnel URI (thanks @jeking3)
|
- Handle connect tunnel URI (thanks @jeking3)
|
||||||
@@ -103,9 +148,9 @@ All help in providing PRs to close out bug issues is appreciated. Even if that i
|
|||||||
- Fix bugs on aiohttp integration (thanks @graingert, @steinnes, @stj, @lamenezes, @lmazuel)
|
- Fix bugs on aiohttp integration (thanks @graingert, @steinnes, @stj, @lamenezes, @lmazuel)
|
||||||
- Fix Biopython incompatibility (thanks @rishab121)
|
- Fix Biopython incompatibility (thanks @rishab121)
|
||||||
- Fix Boto3 integration (thanks @1oglop1, @arthurHamon2)
|
- Fix Boto3 integration (thanks @1oglop1, @arthurHamon2)
|
||||||
- 2.0.1
|
- 2.0.1
|
||||||
- Fix bug when using vcrpy with python 3.4
|
- Fix bug when using vcrpy with python 3.4
|
||||||
- 2.0.0
|
- 2.0.0
|
||||||
- Support python 3.7 (fix httplib2 and urllib2, thanks @felixonmars)
|
- Support python 3.7 (fix httplib2 and urllib2, thanks @felixonmars)
|
||||||
- [#356] Fixes `before_record_response` so the original response isn't changed (thanks @kgraves)
|
- [#356] Fixes `before_record_response` so the original response isn't changed (thanks @kgraves)
|
||||||
- Fix requests stub when using proxy (thanks @samuelfekete @daneoshiga)
|
- Fix requests stub when using proxy (thanks @samuelfekete @daneoshiga)
|
||||||
@@ -115,56 +160,56 @@ All help in providing PRs to close out bug issues is appreciated. Even if that i
|
|||||||
- Improve docs (thanks @adamchainz)
|
- Improve docs (thanks @adamchainz)
|
||||||
|
|
||||||
|
|
||||||
- 1.13.0
|
- 1.13.0
|
||||||
- Fix support to latest aiohttp version (3.3.2). Fix content-type bug in aiohttp stub. Save URL with query params properly when using aiohttp.
|
- Fix support to latest aiohttp version (3.3.2). Fix content-type bug in aiohttp stub. Save URL with query params properly when using aiohttp.
|
||||||
- 1.12.0
|
- 1.12.0
|
||||||
- Fix support to latest aiohttp version (3.2.1), Adapted setup to PEP508, Support binary responses on aiohttp, Dropped support for EOL python versions (2.6 and 3.3)
|
- Fix support to latest aiohttp version (3.2.1), Adapted setup to PEP508, Support binary responses on aiohttp, Dropped support for EOL python versions (2.6 and 3.3)
|
||||||
- 1.11.1
|
- 1.11.1
|
||||||
- Fix compatibility with newest requests and urllib3 releases
|
- Fix compatibility with newest requests and urllib3 releases
|
||||||
- 1.11.0
|
- 1.11.0
|
||||||
- Allow injection of persistence methods + bugfixes (thanks @j-funk and @IvanMalison),
|
- Allow injection of persistence methods + bugfixes (thanks @j-funk and @IvanMalison),
|
||||||
- Support python 3.6 + CI tests (thanks @derekbekoe and @graingert),
|
- Support python 3.6 + CI tests (thanks @derekbekoe and @graingert),
|
||||||
- Support pytest-asyncio coroutines (thanks @graingert)
|
- Support pytest-asyncio coroutines (thanks @graingert)
|
||||||
- 1.10.5
|
- 1.10.5
|
||||||
- Added a fix to httplib2 (thanks @carlosds730), Fix an issue with
|
- Added a fix to httplib2 (thanks @carlosds730), Fix an issue with
|
||||||
- aiohttp (thanks @madninja), Add missing requirement yarl (thanks @lamenezes),
|
- aiohttp (thanks @madninja), Add missing requirement yarl (thanks @lamenezes),
|
||||||
- Remove duplicate mock triple (thanks @FooBarQuaxx)
|
- Remove duplicate mock triple (thanks @FooBarQuaxx)
|
||||||
- 1.10.4
|
- 1.10.4
|
||||||
- Fix an issue with asyncio aiohttp (thanks @madninja)
|
- Fix an issue with asyncio aiohttp (thanks @madninja)
|
||||||
- 1.10.3
|
- 1.10.3
|
||||||
- Fix some issues with asyncio and params (thanks @anovikov1984 and @lamenezes)
|
- Fix some issues with asyncio and params (thanks @anovikov1984 and @lamenezes)
|
||||||
- Fix some issues with cassette serialize / deserialize and empty response bodies (thanks @gRoussac and @dz0ny)
|
- Fix some issues with cassette serialize / deserialize and empty response bodies (thanks @gRoussac and @dz0ny)
|
||||||
- 1.10.2
|
- 1.10.2
|
||||||
- Fix 1.10.1 release - add aiohttp support back in
|
- Fix 1.10.1 release - add aiohttp support back in
|
||||||
- 1.10.1
|
- 1.10.1
|
||||||
- [bad release] Fix build for Fedora package + python2 (thanks @puiterwijk and @lamenezes)
|
- [bad release] Fix build for Fedora package + python2 (thanks @puiterwijk and @lamenezes)
|
||||||
- 1.10.0
|
- 1.10.0
|
||||||
- Add support for aiohttp (thanks @lamenezes)
|
- Add support for aiohttp (thanks @lamenezes)
|
||||||
- 1.9.0
|
- 1.9.0
|
||||||
- Add support for boto3 (thanks @desdm, @foorbarna).
|
- Add support for boto3 (thanks @desdm, @foorbarna).
|
||||||
- Fix deepcopy issue for response headers when `decode_compressed_response` is enabled (thanks @nickdirienzo)
|
- Fix deepcopy issue for response headers when `decode_compressed_response` is enabled (thanks @nickdirienzo)
|
||||||
- 1.8.0
|
- 1.8.0
|
||||||
- Fix for Serialization errors with JSON adapter (thanks @aliaksandrb).
|
- Fix for Serialization errors with JSON adapter (thanks @aliaksandrb).
|
||||||
- Avoid concatenating bytes with strings (thanks @jaysonsantos).
|
- Avoid concatenating bytes with strings (thanks @jaysonsantos).
|
||||||
- Exclude __pycache__ dirs & compiled files in sdist (thanks @koobs).
|
- Exclude __pycache__ dirs & compiled files in sdist (thanks @koobs).
|
||||||
- Fix Tornado support behavior for Tornado 3 (thanks @abhinav).
|
- Fix Tornado support behavior for Tornado 3 (thanks @abhinav).
|
||||||
- decode_compressed_response option and filter (thanks @jayvdb).
|
- decode_compressed_response option and filter (thanks @jayvdb).
|
||||||
- 1.7.4 [#217]
|
- 1.7.4 [#217]
|
||||||
- Make use_cassette decorated functions actually return a value (thanks @bcen).
|
- Make use_cassette decorated functions actually return a value (thanks @bcen).
|
||||||
- [#199] Fix path transformation defaults.
|
- [#199] Fix path transformation defaults.
|
||||||
- Better headers dictionary management.
|
- Better headers dictionary management.
|
||||||
- 1.7.3 [#188]
|
- 1.7.3 [#188]
|
||||||
- ``additional_matchers`` kwarg on ``use_cassette``.
|
- ``additional_matchers`` kwarg on ``use_cassette``.
|
||||||
- [#191] Actually support passing multiple before_record_request functions (thanks @agriffis).
|
- [#191] Actually support passing multiple before_record_request functions (thanks @agriffis).
|
||||||
- 1.7.2
|
- 1.7.2
|
||||||
- [#186] Get effective_url in tornado (thanks @mvschaik)
|
- [#186] Get effective_url in tornado (thanks @mvschaik)
|
||||||
- [#187] Set request_time on Response object in tornado (thanks @abhinav).
|
- [#187] Set request_time on Response object in tornado (thanks @abhinav).
|
||||||
- 1.7.1
|
- 1.7.1
|
||||||
- [#183] Patch ``fetch_impl`` instead of the entire HTTPClient class for Tornado (thanks @abhinav).
|
- [#183] Patch ``fetch_impl`` instead of the entire HTTPClient class for Tornado (thanks @abhinav).
|
||||||
- 1.7.0
|
- 1.7.0
|
||||||
- [#177] Properly support coroutine/generator decoration.
|
- [#177] Properly support coroutine/generator decoration.
|
||||||
- [#178] Support distribute (thanks @graingert). [#163] Make compatibility between python2 and python3 recorded cassettes more robust (thanks @gward).
|
- [#178] Support distribute (thanks @graingert). [#163] Make compatibility between python2 and python3 recorded cassettes more robust (thanks @gward).
|
||||||
- 1.6.1
|
- 1.6.1
|
||||||
- [#169] Support conditional requirements in old versions of pip
|
- [#169] Support conditional requirements in old versions of pip
|
||||||
- Fix RST parse errors generated by pandoc
|
- Fix RST parse errors generated by pandoc
|
||||||
- [Tornado] Fix unsupported features exception not being raised
|
- [Tornado] Fix unsupported features exception not being raised
|
||||||
@@ -178,17 +223,17 @@ All help in providing PRs to close out bug issues is appreciated. Even if that i
|
|||||||
- Fix crash when cassette path contains cassette library directory (thanks @gazpachoking).
|
- Fix crash when cassette path contains cassette library directory (thanks @gazpachoking).
|
||||||
- 1.5.0
|
- 1.5.0
|
||||||
- Automatic cassette naming and 'application/json' post data filtering (thanks @marco-santamaria).
|
- Automatic cassette naming and 'application/json' post data filtering (thanks @marco-santamaria).
|
||||||
- 1.4.2
|
- 1.4.2
|
||||||
- Fix a bug caused by requests 2.7 and chunked transfer encoding
|
- Fix a bug caused by requests 2.7 and chunked transfer encoding
|
||||||
- 1.4.1
|
- 1.4.1
|
||||||
- Include README, tests, LICENSE in package. Thanks @ralphbean.
|
- Include README, tests, LICENSE in package. Thanks @ralphbean.
|
||||||
- 1.4.0
|
- 1.4.0
|
||||||
- Filter post data parameters (thanks @eadmundo)
|
- Filter post data parameters (thanks @eadmundo)
|
||||||
- Support for posting files through requests, inject\_cassette kwarg to access cassette from ``use_cassette`` decorated function, ``with_current_defaults`` actually works (thanks @samstav).
|
- Support for posting files through requests, inject\_cassette kwarg to access cassette from ``use_cassette`` decorated function, ``with_current_defaults`` actually works (thanks @samstav).
|
||||||
- 1.3.0
|
- 1.3.0
|
||||||
- Fix/add support for urllib3 (thanks @aisch)
|
- Fix/add support for urllib3 (thanks @aisch)
|
||||||
- Fix default port for https (thanks @abhinav).
|
- Fix default port for https (thanks @abhinav).
|
||||||
- 1.2.0
|
- 1.2.0
|
||||||
- Add custom\_patches argument to VCR/Cassette objects to allow users to stub custom classes when cassettes become active.
|
- Add custom\_patches argument to VCR/Cassette objects to allow users to stub custom classes when cassettes become active.
|
||||||
- 1.1.4
|
- 1.1.4
|
||||||
- Add force reset around calls to actual connection from stubs, to ensure compatibility with the version of httplib/urlib2 in python 2.7.9.
|
- Add force reset around calls to actual connection from stubs, to ensure compatibility with the version of httplib/urlib2 in python 2.7.9.
|
||||||
@@ -199,22 +244,22 @@ All help in providing PRs to close out bug issues is appreciated. Even if that i
|
|||||||
- fix Windows connectionpool stub bug (thanks @gazpachoking)
|
- fix Windows connectionpool stub bug (thanks @gazpachoking)
|
||||||
- add support for requests 2.5
|
- add support for requests 2.5
|
||||||
- 1.1.2
|
- 1.1.2
|
||||||
- Add urllib==1.7.1 support.
|
- Add urllib==1.7.1 support.
|
||||||
- Make json serialize error handling correct
|
- Make json serialize error handling correct
|
||||||
- Improve logging of match failures.
|
- Improve logging of match failures.
|
||||||
- 1.1.1
|
- 1.1.1
|
||||||
- Use function signature preserving ``wrapt.decorator`` to write the decorator version of use\_cassette in order to ensure compatibility with py.test fixtures and python 2.
|
- Use function signature preserving ``wrapt.decorator`` to write the decorator version of use\_cassette in order to ensure compatibility with py.test fixtures and python 2.
|
||||||
- Move all request filtering into the ``before_record_callable``.
|
- Move all request filtering into the ``before_record_callable``.
|
||||||
- 1.1.0
|
- 1.1.0
|
||||||
- Add ``before_record_response``. Fix several bugs related to the context management of cassettes.
|
- Add ``before_record_response``. Fix several bugs related to the context management of cassettes.
|
||||||
- 1.0.3
|
- 1.0.3
|
||||||
- Fix an issue with requests 2.4 and make sure case sensitivity is consistent across python versions
|
- Fix an issue with requests 2.4 and make sure case sensitivity is consistent across python versions
|
||||||
- 1.0.2
|
- 1.0.2
|
||||||
- Fix an issue with requests 2.3
|
- Fix an issue with requests 2.3
|
||||||
- 1.0.1
|
- 1.0.1
|
||||||
- Fix a bug with the new ignore requests feature and the once record mode
|
- Fix a bug with the new ignore requests feature and the once record mode
|
||||||
- 1.0.0
|
- 1.0.0
|
||||||
- *BACKWARDS INCOMPATIBLE*: Please see the 'upgrade' section in the README. Take a look at the matcher section as well, you might want to update your ``match_on`` settings.
|
- *BACKWARDS INCOMPATIBLE*: Please see the 'upgrade' section in the README. Take a look at the matcher section as well, you might want to update your ``match_on`` settings.
|
||||||
- Add support for filtering sensitive data from requests, matching query strings after the order changes and improving the built-in matchers, (thanks to @mshytikov)
|
- Add support for filtering sensitive data from requests, matching query strings after the order changes and improving the built-in matchers, (thanks to @mshytikov)
|
||||||
- Support for ignoring requests to certain hosts, bump supported Python3 version to 3.4, fix some bugs with Boto support (thanks @marusich)
|
- Support for ignoring requests to certain hosts, bump supported Python3 version to 3.4, fix some bugs with Boto support (thanks @marusich)
|
||||||
- Fix error with URL field capitalization in README (thanks @simon-weber)
|
- Fix error with URL field capitalization in README (thanks @simon-weber)
|
||||||
@@ -222,27 +267,27 @@ All help in providing PRs to close out bug issues is appreciated. Even if that i
|
|||||||
- Added ``all_played`` property on cassette (thanks @mshytikov)
|
- Added ``all_played`` property on cassette (thanks @mshytikov)
|
||||||
|
|
||||||
- 0.7.0
|
- 0.7.0
|
||||||
- VCR.py now supports Python 3! (thanks @asundg)
|
- VCR.py now supports Python 3! (thanks @asundg)
|
||||||
- Also I refactored the stub connections quite a bit to add support for the putrequest and putheader calls.
|
- Also I refactored the stub connections quite a bit to add support for the putrequest and putheader calls.
|
||||||
- This version also adds support for httplib2 (thanks @nilp0inter).
|
- This version also adds support for httplib2 (thanks @nilp0inter).
|
||||||
- I have added a couple tests for boto since it is an http client in its own right.
|
- I have added a couple tests for boto since it is an http client in its own right.
|
||||||
- Finally, this version includes a fix for a bug where requests wasn't being patched properly (thanks @msabramo).
|
- Finally, this version includes a fix for a bug where requests wasn't being patched properly (thanks @msabramo).
|
||||||
- 0.6.0
|
- 0.6.0
|
||||||
- Store response headers as a list since a HTTP response can have the same header twice (happens with set-cookie sometimes).
|
- Store response headers as a list since a HTTP response can have the same header twice (happens with set-cookie sometimes).
|
||||||
- This has the added benefit of preserving the order of headers.
|
- This has the added benefit of preserving the order of headers.
|
||||||
- Thanks @smallcode for the bug report leading to this change.
|
- Thanks @smallcode for the bug report leading to this change.
|
||||||
- I have made an effort to ensure backwards compatibility with the old cassettes' header storage mechanism, but if you want to upgrade to the new header storage, you should delete your cassettes and re-record them.
|
- I have made an effort to ensure backwards compatibility with the old cassettes' header storage mechanism, but if you want to upgrade to the new header storage, you should delete your cassettes and re-record them.
|
||||||
- Also this release adds better error messages (thanks @msabramo)
|
- Also this release adds better error messages (thanks @msabramo)
|
||||||
- and adds support for using VCR as a decorator (thanks @smallcode for the motivation)
|
- and adds support for using VCR as a decorator (thanks @smallcode for the motivation)
|
||||||
- 0.5.0
|
- 0.5.0
|
||||||
- Change the ``response_of`` method to ``responses_of`` since cassettes can now contain more than one response for a request.
|
- Change the ``response_of`` method to ``responses_of`` since cassettes can now contain more than one response for a request.
|
||||||
- Since this changes the API, I'm bumping the version.
|
- Since this changes the API, I'm bumping the version.
|
||||||
- Also includes 2 bugfixes:
|
- Also includes 2 bugfixes:
|
||||||
- a better error message when attempting to overwrite a cassette file,
|
- a better error message when attempting to overwrite a cassette file,
|
||||||
- and a fix for a bug with requests sessions (thanks @msabramo)
|
- and a fix for a bug with requests sessions (thanks @msabramo)
|
||||||
- 0.4.0
|
- 0.4.0
|
||||||
- Change default request recording behavior for multiple requests.
|
- Change default request recording behavior for multiple requests.
|
||||||
- If you make the same request multiple times to the same URL, the response might be different each time (maybe the response has a timestamp in it or something), so this will make the same request multiple times and save them all.
|
- If you make the same request multiple times to the same URL, the response might be different each time (maybe the response has a timestamp in it or something), so this will make the same request multiple times and save them all.
|
||||||
- Then, when you are replaying the cassette, the responses will be played back in the same order in which they were received.
|
- Then, when you are replaying the cassette, the responses will be played back in the same order in which they were received.
|
||||||
- If you were making multiple requests to the same URL in a cassette before version 0.4.0, you might need to regenerate your cassette files.
|
- If you were making multiple requests to the same URL in a cassette before version 0.4.0, you might need to regenerate your cassette files.
|
||||||
- Also, removes support for the cassette.play\_count counter API, since individual requests aren't unique anymore.
|
- Also, removes support for the cassette.play\_count counter API, since individual requests aren't unique anymore.
|
||||||
@@ -262,7 +307,7 @@ All help in providing PRs to close out bug issues is appreciated. Even if that i
|
|||||||
- 0.3.0
|
- 0.3.0
|
||||||
- *Backwards incompatible release*
|
- *Backwards incompatible release*
|
||||||
- Added support for record modes, and changed the default recording behavior to the "once" record mode. Please see the documentation on record modes for more.
|
- Added support for record modes, and changed the default recording behavior to the "once" record mode. Please see the documentation on record modes for more.
|
||||||
- Added support for custom request matching, and changed the default request matching behavior to match only on the URL and method.
|
- Added support for custom request matching, and changed the default request matching behavior to match only on the URL and method.
|
||||||
- Also, improved the httplib mocking to add support for the ``HTTPConnection.send()`` method.
|
- Also, improved the httplib mocking to add support for the ``HTTPConnection.send()`` method.
|
||||||
- This means that requests won't actually be sent until the response is read, since I need to record the entire request in order to match up the appropriate response.
|
- This means that requests won't actually be sent until the response is read, since I need to record the entire request in order to match up the appropriate response.
|
||||||
- I don't think this should cause any issues unless you are sending requests without ever loading the response (which none of the standard httplib wrappers do, as far as I know).
|
- I don't think this should cause any issues unless you are sending requests without ever loading the response (which none of the standard httplib wrappers do, as far as I know).
|
||||||
@@ -270,13 +315,13 @@ All help in providing PRs to close out bug issues is appreciated. Even if that i
|
|||||||
- 0.2.1
|
- 0.2.1
|
||||||
- Fixed missing modules in setup.py
|
- Fixed missing modules in setup.py
|
||||||
- 0.2.0
|
- 0.2.0
|
||||||
- Added configuration API, which lets you configure some settings on VCR (see the README).
|
- Added configuration API, which lets you configure some settings on VCR (see the README).
|
||||||
- Also, VCR no longer saves cassettes if they haven't changed at all and supports JSON as well as YAML (thanks @sirpengi).
|
- Also, VCR no longer saves cassettes if they haven't changed at all and supports JSON as well as YAML (thanks @sirpengi).
|
||||||
- Added amazing new skeumorphic logo, thanks @hairarrow.
|
- Added amazing new skeumorphic logo, thanks @hairarrow.
|
||||||
- 0.1.0
|
- 0.1.0
|
||||||
- *backwards incompatible release - delete your old cassette files*
|
- *backwards incompatible release - delete your old cassette files*
|
||||||
- This release adds the ability to access the cassette to make assertions on it
|
- This release adds the ability to access the cassette to make assertions on it
|
||||||
- as well as a major code refactor thanks to @dlecocq.
|
- as well as a major code refactor thanks to @dlecocq.
|
||||||
- It also fixes a couple longstanding bugs with redirects and HTTPS. [#3 and #4]
|
- It also fixes a couple longstanding bugs with redirects and HTTPS. [#3 and #4]
|
||||||
- 0.0.4
|
- 0.0.4
|
||||||
- If you have libyaml installed, vcrpy will use the c bindings instead. Speed up your tests! Thanks @dlecocq
|
- If you have libyaml installed, vcrpy will use the c bindings instead. Speed up your tests! Thanks @dlecocq
|
||||||
@@ -286,4 +331,3 @@ All help in providing PRs to close out bug issues is appreciated. Even if that i
|
|||||||
- Add support for requests / urllib3
|
- Add support for requests / urllib3
|
||||||
- 0.0.1
|
- 0.0.1
|
||||||
- Initial Release
|
- Initial Release
|
||||||
|
|
||||||
|
|||||||
@@ -316,5 +316,5 @@ texinfo_documents = [
|
|||||||
|
|
||||||
|
|
||||||
# Example configuration for intersphinx: refer to the Python standard library.
|
# Example configuration for intersphinx: refer to the Python standard library.
|
||||||
intersphinx_mapping = {"https://docs.python.org/": None}
|
intersphinx_mapping = {"python": ("https://docs.python.org/3", None)}
|
||||||
html_theme = "alabaster"
|
html_theme = "alabaster"
|
||||||
|
|||||||
@@ -24,7 +24,7 @@ So whilst reporting issues are valuable, please consider:
|
|||||||
- contributing an issue with a toy repo that replicates the issue.
|
- contributing an issue with a toy repo that replicates the issue.
|
||||||
- contributing PRs is a more valuable donation of your time and effort.
|
- contributing PRs is a more valuable donation of your time and effort.
|
||||||
|
|
||||||
Thanks again for your interest and support in VCRpy.
|
Thanks again for your interest and support in VCRpy.
|
||||||
|
|
||||||
We really appreciate it.
|
We really appreciate it.
|
||||||
|
|
||||||
@@ -57,7 +57,7 @@ Simply adding these three labels for incoming issues means a lot for maintaining
|
|||||||
- Which library does it affect? ``core``, ``aiohttp``, ``requests``, ``urllib3``, ``tornado4``, ``httplib2``
|
- Which library does it affect? ``core``, ``aiohttp``, ``requests``, ``urllib3``, ``tornado4``, ``httplib2``
|
||||||
- If it is a bug, is it ``Verified Can Replicate`` or ``Requires Help Replicating``
|
- If it is a bug, is it ``Verified Can Replicate`` or ``Requires Help Replicating``
|
||||||
- Thanking people for raising issues. Feedback is always appreciated.
|
- Thanking people for raising issues. Feedback is always appreciated.
|
||||||
- Politely asking if they are able to link to an example repo that replicates the issue if they haven't already. Being able to *clone and go* helps the next person and we like that. 😃
|
- Politely asking if they are able to link to an example repo that replicates the issue if they haven't already. Being able to *clone and go* helps the next person and we like that. 😃
|
||||||
|
|
||||||
**Maintainer:**
|
**Maintainer:**
|
||||||
|
|
||||||
@@ -68,7 +68,7 @@ This involves creating PRs to address bugs and enhancement requests. It also mea
|
|||||||
The PR reviewer is a second set of eyes to see if:
|
The PR reviewer is a second set of eyes to see if:
|
||||||
- Are there tests covering the code paths added/modified?
|
- Are there tests covering the code paths added/modified?
|
||||||
- Do the tests and modifications make sense seem appropriate?
|
- Do the tests and modifications make sense seem appropriate?
|
||||||
- Add specific feedback, even on approvals, why it is accepted. eg "I like how you use a context manager there. 😄 "
|
- Add specific feedback, even on approvals, why it is accepted. eg "I like how you use a context manager there. 😄 "
|
||||||
- Also make sure they add a line to `docs/changelog.rst` to claim credit for their contribution.
|
- Also make sure they add a line to `docs/changelog.rst` to claim credit for their contribution.
|
||||||
|
|
||||||
**Release Manager:**
|
**Release Manager:**
|
||||||
@@ -83,39 +83,21 @@ The PR reviewer is a second set of eyes to see if:
|
|||||||
Running VCR's test suite
|
Running VCR's test suite
|
||||||
------------------------
|
------------------------
|
||||||
|
|
||||||
The tests are all run automatically on `Travis
|
The tests are all run automatically on `Github Actions CI <https://github.com/kevin1024/vcrpy/actions>`__,
|
||||||
CI <https://travis-ci.org/kevin1024/vcrpy>`__, but you can also run them
|
but you can also run them yourself using `pytest <http://pytest.org/>`__.
|
||||||
yourself using `pytest <http://pytest.org/>`__ and
|
|
||||||
`Tox <http://tox.testrun.org/>`__.
|
|
||||||
|
|
||||||
Tox will automatically run them in all environments VCR.py supports if they are available on your `PATH`. Alternatively you can use `tox-pyenv <https://pypi.org/project/tox-pyenv/>`_ with
|
In order for the boto3 tests to run, you will need an AWS key.
|
||||||
`pyenv <https://github.com/pyenv/pyenv>`_.
|
Refer to the `boto3
|
||||||
We recommend you read the documentation for each and see the section further below.
|
documentation <https://boto3.amazonaws.com/v1/documentation/api/latest/reference/services/index.html>`__
|
||||||
|
for how to set this up. I have marked the boto3 tests as optional in
|
||||||
The test suite is pretty big and slow, but you can tell tox to only run specific tests like this::
|
|
||||||
|
|
||||||
tox -e {pyNN}-{HTTP_LIBRARY} -- <pytest flags passed through>
|
|
||||||
|
|
||||||
tox -e py38-requests -- -v -k "'test_status_code or test_gzip'"
|
|
||||||
tox -e py38-requests -- -v --last-failed
|
|
||||||
|
|
||||||
This will run only tests that look like ``test_status_code`` or
|
|
||||||
``test_gzip`` in the test suite, and only in the python 3.8 environment
|
|
||||||
that has ``requests`` installed.
|
|
||||||
|
|
||||||
Also, in order for the boto tests to run, you will need an AWS key.
|
|
||||||
Refer to the `boto
|
|
||||||
documentation <https://boto.readthedocs.io/en/latest/getting_started.html>`__
|
|
||||||
for how to set this up. I have marked the boto tests as optional in
|
|
||||||
Travis so you don't have to worry about them failing if you submit a
|
Travis so you don't have to worry about them failing if you submit a
|
||||||
pull request.
|
pull request.
|
||||||
|
|
||||||
Using PyEnv with VCR's test suite
|
Using Pyenv with VCR's test suite
|
||||||
---------------------------------
|
---------------------------------
|
||||||
|
|
||||||
PyEnv is a tool for managing multiple installation of python on your system.
|
Pyenv is a tool for managing multiple installation of python on your system.
|
||||||
See the full documentation at their `github <https://github.com/pyenv/pyenv>`_
|
See the full documentation at their `github <https://github.com/pyenv/pyenv>`_
|
||||||
but we are also going to use `tox-pyenv <https://pypi.org/project/tox-pyenv/>`_
|
|
||||||
in this example::
|
in this example::
|
||||||
|
|
||||||
git clone https://github.com/pyenv/pyenv ~/.pyenv
|
git clone https://github.com/pyenv/pyenv ~/.pyenv
|
||||||
@@ -126,27 +108,21 @@ in this example::
|
|||||||
# Setup shim paths
|
# Setup shim paths
|
||||||
eval "$(pyenv init -)"
|
eval "$(pyenv init -)"
|
||||||
|
|
||||||
# Setup your local system tox tooling
|
|
||||||
pip3 install tox tox-pyenv
|
|
||||||
|
|
||||||
# Install supported versions (at time of writing), this does not activate them
|
# Install supported versions (at time of writing), this does not activate them
|
||||||
pyenv install 3.8.0 pypy3.8
|
pyenv install 3.12.0 pypy3.10
|
||||||
|
|
||||||
# This activates them
|
# This activates them
|
||||||
pyenv local 3.8.0 pypy3.8
|
pyenv local 3.12.0 pypy3.10
|
||||||
|
|
||||||
# Run the whole test suite
|
# Run the whole test suite
|
||||||
tox
|
pip install .[tests]
|
||||||
|
./runtests.sh
|
||||||
# Run the whole test suite or just part of it
|
|
||||||
tox -e lint
|
|
||||||
tox -e py38-requests
|
|
||||||
|
|
||||||
|
|
||||||
Troubleshooting on MacOSX
|
Troubleshooting on MacOSX
|
||||||
-------------------------
|
-------------------------
|
||||||
|
|
||||||
If you have this kind of error when running tox :
|
If you have this kind of error when running tests :
|
||||||
|
|
||||||
.. code:: python
|
.. code:: python
|
||||||
|
|
||||||
|
|||||||
@@ -9,12 +9,11 @@ with pip::
|
|||||||
Compatibility
|
Compatibility
|
||||||
-------------
|
-------------
|
||||||
|
|
||||||
VCR.py supports Python 3.8+, and `pypy <http://pypy.org>`__.
|
VCR.py supports Python 3.9+, and `pypy <http://pypy.org>`__.
|
||||||
|
|
||||||
The following HTTP libraries are supported:
|
The following HTTP libraries are supported:
|
||||||
|
|
||||||
- ``aiohttp``
|
- ``aiohttp``
|
||||||
- ``boto``
|
|
||||||
- ``boto3``
|
- ``boto3``
|
||||||
- ``http.client``
|
- ``http.client``
|
||||||
- ``httplib2``
|
- ``httplib2``
|
||||||
@@ -23,6 +22,7 @@ The following HTTP libraries are supported:
|
|||||||
- ``urllib2``
|
- ``urllib2``
|
||||||
- ``urllib3``
|
- ``urllib3``
|
||||||
- ``httpx``
|
- ``httpx``
|
||||||
|
- ``httpcore``
|
||||||
|
|
||||||
Speed
|
Speed
|
||||||
-----
|
-----
|
||||||
|
|||||||
@@ -1,2 +1,2 @@
|
|||||||
sphinx<8
|
sphinx<9
|
||||||
sphinx_rtd_theme==1.2.2
|
sphinx_rtd_theme==3.0.2
|
||||||
|
|||||||
@@ -1,18 +1,87 @@
|
|||||||
[tool.black]
|
[project]
|
||||||
line-length=110
|
name = "vcrpy"
|
||||||
|
authors = [{name = "Kevin McCarthy", email = "me@kevinmccarthy.org"}]
|
||||||
|
license = {text = "MIT"}
|
||||||
|
description = "Automatically mock your HTTP interactions to simplify and speed up testing"
|
||||||
|
classifiers = [
|
||||||
|
"Development Status :: 5 - Production/Stable",
|
||||||
|
"Environment :: Console",
|
||||||
|
"Intended Audience :: Developers",
|
||||||
|
"Programming Language :: Python",
|
||||||
|
"Programming Language :: Python :: 3",
|
||||||
|
"Programming Language :: Python :: 3.10",
|
||||||
|
"Programming Language :: Python :: 3.11",
|
||||||
|
"Programming Language :: Python :: 3.12",
|
||||||
|
"Programming Language :: Python :: 3.13",
|
||||||
|
"Programming Language :: Python :: 3 :: Only",
|
||||||
|
"Programming Language :: Python :: Implementation :: CPython",
|
||||||
|
"Programming Language :: Python :: Implementation :: PyPy",
|
||||||
|
"Topic :: Software Development :: Testing",
|
||||||
|
"Topic :: Internet :: WWW/HTTP",
|
||||||
|
"License :: OSI Approved :: MIT License",
|
||||||
|
]
|
||||||
|
urls = {Homepage = "https://github.com/kevin1024/vcrpy"}
|
||||||
|
requires-python = ">=3.10"
|
||||||
|
dependencies = [
|
||||||
|
"PyYAML",
|
||||||
|
"wrapt",
|
||||||
|
]
|
||||||
|
dynamic = ["version"]
|
||||||
|
|
||||||
|
[project.readme]
|
||||||
|
file = "README.rst"
|
||||||
|
content-type = "text/x-rst"
|
||||||
|
|
||||||
|
[project.optional-dependencies]
|
||||||
|
tests = [
|
||||||
|
"aiohttp",
|
||||||
|
"boto3",
|
||||||
|
"cryptography",
|
||||||
|
"httpbin",
|
||||||
|
"httpcore",
|
||||||
|
"httplib2",
|
||||||
|
"httpx",
|
||||||
|
"pycurl; platform_python_implementation !='PyPy'",
|
||||||
|
"pytest",
|
||||||
|
"pytest-aiohttp",
|
||||||
|
"pytest-asyncio",
|
||||||
|
"pytest-cov",
|
||||||
|
"pytest-httpbin",
|
||||||
|
"requests>=2.22.0",
|
||||||
|
"tornado",
|
||||||
|
"urllib3",
|
||||||
|
"werkzeug==2.0.3",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.setuptools]
|
||||||
|
include-package-data = false
|
||||||
|
|
||||||
|
[tool.setuptools.packages.find]
|
||||||
|
exclude = ["tests*"]
|
||||||
|
namespaces = false
|
||||||
|
|
||||||
|
[tool.setuptools.dynamic]
|
||||||
|
version = {attr = "vcr.__version__"}
|
||||||
|
|
||||||
|
[build-system]
|
||||||
|
requires = ["setuptools>=61.2"]
|
||||||
|
build-backend = "setuptools.build_meta"
|
||||||
|
|
||||||
[tool.codespell]
|
[tool.codespell]
|
||||||
skip = '.git,*.pdf,*.svg,.tox'
|
skip = '.git,*.pdf,*.svg,.tox'
|
||||||
ignore-regex = "\\\\[fnrstv]"
|
ignore-regex = "\\\\[fnrstv]"
|
||||||
#
|
|
||||||
# ignore-words-list = ''
|
|
||||||
|
|
||||||
[tool.pytest.ini_options]
|
[tool.pytest]
|
||||||
markers = [
|
addopts = ["--strict-config", "--strict-markers"]
|
||||||
"online",
|
asyncio_default_fixture_loop_scope = "session"
|
||||||
]
|
asyncio_default_test_loop_scope = "session"
|
||||||
|
markers = ["online"]
|
||||||
|
|
||||||
[tool.ruff]
|
[tool.ruff]
|
||||||
|
line-length = 110
|
||||||
|
target-version = "py310"
|
||||||
|
|
||||||
|
[tool.ruff.lint]
|
||||||
select = [
|
select = [
|
||||||
"B", # flake8-bugbear
|
"B", # flake8-bugbear
|
||||||
"C4", # flake8-comprehensions
|
"C4", # flake8-comprehensions
|
||||||
@@ -25,9 +94,8 @@ select = [
|
|||||||
"RUF", # Ruff-specific rules
|
"RUF", # Ruff-specific rules
|
||||||
"UP", # pyupgrade
|
"UP", # pyupgrade
|
||||||
"W", # pycodestyle warning
|
"W", # pycodestyle warning
|
||||||
|
"SIM",
|
||||||
]
|
]
|
||||||
line-length = 110
|
|
||||||
target-version = "py38"
|
|
||||||
|
|
||||||
[tool.ruff.isort]
|
[tool.ruff.lint.isort]
|
||||||
known-first-party = [ "vcr" ]
|
known-first-party = ["vcr"]
|
||||||
|
|||||||
@@ -1,7 +1,5 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
# https://blog.ionelmc.ro/2015/04/14/tox-tricks-and-patterns/#when-it-inevitably-leads-to-shell-scripts
|
# If you are getting an INVOCATION ERROR for this script then there is a good chance you are running on Windows.
|
||||||
# If you are getting an INVOCATION ERROR for this script then there is
|
# You can and should use WSL for running tests on Windows when it calls bash scripts.
|
||||||
# a good chance you are running on Windows.
|
|
||||||
# You can and should use WSL for running tox on Windows when it calls bash scripts.
|
|
||||||
REQUESTS_CA_BUNDLE=`python3 -m pytest_httpbin.certs` exec pytest "$@"
|
REQUESTS_CA_BUNDLE=`python3 -m pytest_httpbin.certs` exec pytest "$@"
|
||||||
|
|||||||
60
setup.cfg
60
setup.cfg
@@ -1,2 +1,58 @@
|
|||||||
[bdist_wheel]
|
[metadata]
|
||||||
universal=1
|
name = vcrpy
|
||||||
|
version = attr: vcr.__version__
|
||||||
|
author = Kevin McCarthy
|
||||||
|
author_email = me@kevinmccarthy.org
|
||||||
|
license = MIT
|
||||||
|
description = Automatically mock your HTTP interactions to simplify and speed up testing
|
||||||
|
url = https://github.com/kevin1024/vcrpy
|
||||||
|
long_description = file: README.rst
|
||||||
|
long_description_content_type = text/x-rst
|
||||||
|
classifiers =
|
||||||
|
Development Status :: 5 - Production/Stable
|
||||||
|
Environment :: Console
|
||||||
|
Intended Audience :: Developers
|
||||||
|
Programming Language :: Python
|
||||||
|
Programming Language :: Python :: 3
|
||||||
|
Programming Language :: Python :: 3.10
|
||||||
|
Programming Language :: Python :: 3.11
|
||||||
|
Programming Language :: Python :: 3.12
|
||||||
|
Programming Language :: Python :: 3.13
|
||||||
|
Programming Language :: Python :: 3 :: Only
|
||||||
|
Programming Language :: Python :: Implementation :: CPython
|
||||||
|
Programming Language :: Python :: Implementation :: PyPy
|
||||||
|
Topic :: Software Development :: Testing
|
||||||
|
Topic :: Internet :: WWW/HTTP
|
||||||
|
License :: OSI Approved :: MIT License
|
||||||
|
|
||||||
|
[options]
|
||||||
|
packages = find:
|
||||||
|
python_requires = >=3.10
|
||||||
|
install_requires =
|
||||||
|
PyYAML
|
||||||
|
wrapt
|
||||||
|
tests_require =
|
||||||
|
vcrpy[tests]
|
||||||
|
|
||||||
|
[options.packages.find]
|
||||||
|
exclude = tests*
|
||||||
|
|
||||||
|
[options.extras_require]
|
||||||
|
tests =
|
||||||
|
aiohttp
|
||||||
|
boto3
|
||||||
|
cryptography
|
||||||
|
httpbin
|
||||||
|
httpcore
|
||||||
|
httplib2
|
||||||
|
httpx
|
||||||
|
pycurl; platform_python_implementation !='PyPy'
|
||||||
|
pytest
|
||||||
|
pytest-aiohttp
|
||||||
|
pytest-asyncio
|
||||||
|
pytest-cov
|
||||||
|
pytest-httpbin
|
||||||
|
requests>=2.22.0
|
||||||
|
tornado
|
||||||
|
urllib3
|
||||||
|
werkzeug==2.0.3
|
||||||
|
|||||||
108
setup.py
108
setup.py
@@ -1,108 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
|
|
||||||
import codecs
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from setuptools import find_packages, setup
|
|
||||||
from setuptools.command.test import test as TestCommand
|
|
||||||
|
|
||||||
long_description = open("README.rst").read()
|
|
||||||
here = os.path.abspath(os.path.dirname(__file__))
|
|
||||||
|
|
||||||
|
|
||||||
def read(*parts):
|
|
||||||
# intentionally *not* adding an encoding option to open, See:
|
|
||||||
# https://github.com/pypa/virtualenv/issues/201#issuecomment-3145690
|
|
||||||
with codecs.open(os.path.join(here, *parts), "r") as fp:
|
|
||||||
return fp.read()
|
|
||||||
|
|
||||||
|
|
||||||
def find_version(*file_paths):
|
|
||||||
version_file = read(*file_paths)
|
|
||||||
version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", version_file, re.M)
|
|
||||||
if version_match:
|
|
||||||
return version_match.group(1)
|
|
||||||
|
|
||||||
raise RuntimeError("Unable to find version string.")
|
|
||||||
|
|
||||||
|
|
||||||
class PyTest(TestCommand):
|
|
||||||
def finalize_options(self):
|
|
||||||
TestCommand.finalize_options(self)
|
|
||||||
self.test_args = []
|
|
||||||
self.test_suite = True
|
|
||||||
|
|
||||||
def run_tests(self):
|
|
||||||
# import here, cause outside the eggs aren't loaded
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
errno = pytest.main(self.test_args)
|
|
||||||
sys.exit(errno)
|
|
||||||
|
|
||||||
|
|
||||||
install_requires = [
|
|
||||||
"PyYAML",
|
|
||||||
"wrapt",
|
|
||||||
"yarl",
|
|
||||||
# Support for urllib3 >=2 needs Python >=3.10
|
|
||||||
# so we need to block urllib3 >=2 for Python <3.10 for now.
|
|
||||||
# Note that vcrpy would work fine without any urllib3 around,
|
|
||||||
# so this block and the dependency can be dropped at some point
|
|
||||||
# in the future. For more Details:
|
|
||||||
# https://github.com/kevin1024/vcrpy/pull/699#issuecomment-1551439663
|
|
||||||
"urllib3 <2; python_version <'3.10'",
|
|
||||||
]
|
|
||||||
|
|
||||||
tests_require = [
|
|
||||||
"aiohttp",
|
|
||||||
"boto3",
|
|
||||||
"httplib2",
|
|
||||||
"httpx",
|
|
||||||
"pytest",
|
|
||||||
"pytest-aiohttp",
|
|
||||||
"pytest-httpbin",
|
|
||||||
"requests>=2.16.2",
|
|
||||||
"tornado",
|
|
||||||
# Needed to un-break httpbin 0.7.0. For httpbin >=0.7.1 and after,
|
|
||||||
# this pin and the dependency itself can be removed, provided
|
|
||||||
# that the related bug in httpbin has been fixed:
|
|
||||||
# https://github.com/kevin1024/vcrpy/issues/645#issuecomment-1562489489
|
|
||||||
# https://github.com/postmanlabs/httpbin/issues/673
|
|
||||||
# https://github.com/postmanlabs/httpbin/pull/674
|
|
||||||
"Werkzeug==2.0.3",
|
|
||||||
]
|
|
||||||
|
|
||||||
setup(
|
|
||||||
name="vcrpy",
|
|
||||||
version=find_version("vcr", "__init__.py"),
|
|
||||||
description=("Automatically mock your HTTP interactions to simplify and speed up testing"),
|
|
||||||
long_description=long_description,
|
|
||||||
long_description_content_type="text/x-rst",
|
|
||||||
author="Kevin McCarthy",
|
|
||||||
author_email="me@kevinmccarthy.org",
|
|
||||||
url="https://github.com/kevin1024/vcrpy",
|
|
||||||
packages=find_packages(exclude=["tests*"]),
|
|
||||||
python_requires=">=3.8",
|
|
||||||
install_requires=install_requires,
|
|
||||||
license="MIT",
|
|
||||||
tests_require=tests_require,
|
|
||||||
classifiers=[
|
|
||||||
"Development Status :: 5 - Production/Stable",
|
|
||||||
"Environment :: Console",
|
|
||||||
"Intended Audience :: Developers",
|
|
||||||
"Programming Language :: Python",
|
|
||||||
"Programming Language :: Python :: 3",
|
|
||||||
"Programming Language :: Python :: 3.8",
|
|
||||||
"Programming Language :: Python :: 3.9",
|
|
||||||
"Programming Language :: Python :: 3.10",
|
|
||||||
"Programming Language :: Python :: 3.11",
|
|
||||||
"Programming Language :: Python :: 3 :: Only",
|
|
||||||
"Programming Language :: Python :: Implementation :: CPython",
|
|
||||||
"Programming Language :: Python :: Implementation :: PyPy",
|
|
||||||
"Topic :: Software Development :: Testing",
|
|
||||||
"Topic :: Internet :: WWW/HTTP",
|
|
||||||
"License :: OSI Approved :: MIT License",
|
|
||||||
],
|
|
||||||
)
|
|
||||||
0
tests/__init__.py
Normal file
0
tests/__init__.py
Normal file
6
tests/fixtures/migration/new_cassette.json
vendored
6
tests/fixtures/migration/new_cassette.json
vendored
@@ -15,9 +15,9 @@
|
|||||||
},
|
},
|
||||||
"response": {
|
"response": {
|
||||||
"status": {
|
"status": {
|
||||||
"message": "OK",
|
"message": "OK",
|
||||||
"code": 200
|
"code": 200
|
||||||
},
|
},
|
||||||
"headers": {
|
"headers": {
|
||||||
"access-control-allow-origin": ["*"],
|
"access-control-allow-origin": ["*"],
|
||||||
"content-type": ["application/json"],
|
"content-type": ["application/json"],
|
||||||
@@ -25,7 +25,7 @@
|
|||||||
"server": ["gunicorn/0.17.4"],
|
"server": ["gunicorn/0.17.4"],
|
||||||
"content-length": ["32"],
|
"content-length": ["32"],
|
||||||
"connection": ["keep-alive"]
|
"connection": ["keep-alive"]
|
||||||
},
|
},
|
||||||
"body": {
|
"body": {
|
||||||
"string": "{\n \"origin\": \"217.122.164.194\"\n}"
|
"string": "{\n \"origin\": \"217.122.164.194\"\n}"
|
||||||
}
|
}
|
||||||
|
|||||||
2
tests/fixtures/migration/new_cassette.yaml
vendored
2
tests/fixtures/migration/new_cassette.yaml
vendored
@@ -2,7 +2,7 @@ version: 1
|
|||||||
interactions:
|
interactions:
|
||||||
- request:
|
- request:
|
||||||
body: null
|
body: null
|
||||||
headers:
|
headers:
|
||||||
accept: ['*/*']
|
accept: ['*/*']
|
||||||
accept-encoding: ['gzip, deflate, compress']
|
accept-encoding: ['gzip, deflate, compress']
|
||||||
user-agent: ['python-requests/2.2.1 CPython/2.6.1 Darwin/10.8.0']
|
user-agent: ['python-requests/2.2.1 CPython/2.6.1 Darwin/10.8.0']
|
||||||
|
|||||||
34
tests/fixtures/migration/old_cassette.json
vendored
34
tests/fixtures/migration/old_cassette.json
vendored
@@ -1,31 +1,31 @@
|
|||||||
[
|
[
|
||||||
{
|
{
|
||||||
"request": {
|
"request": {
|
||||||
"body": null,
|
"body": null,
|
||||||
"protocol": "http",
|
"protocol": "http",
|
||||||
"method": "GET",
|
"method": "GET",
|
||||||
"headers": {
|
"headers": {
|
||||||
"accept-encoding": "gzip, deflate, compress",
|
"accept-encoding": "gzip, deflate, compress",
|
||||||
"accept": "*/*",
|
"accept": "*/*",
|
||||||
"user-agent": "python-requests/2.2.1 CPython/2.6.1 Darwin/10.8.0"
|
"user-agent": "python-requests/2.2.1 CPython/2.6.1 Darwin/10.8.0"
|
||||||
},
|
},
|
||||||
"host": "httpbin.org",
|
"host": "httpbin.org",
|
||||||
"path": "/ip",
|
"path": "/ip",
|
||||||
"port": 80
|
"port": 80
|
||||||
},
|
},
|
||||||
"response": {
|
"response": {
|
||||||
"status": {
|
"status": {
|
||||||
"message": "OK",
|
"message": "OK",
|
||||||
"code": 200
|
"code": 200
|
||||||
},
|
},
|
||||||
"headers": [
|
"headers": [
|
||||||
"access-control-allow-origin: *\r\n",
|
"access-control-allow-origin: *\r\n",
|
||||||
"content-type: application/json\r\n",
|
"content-type: application/json\r\n",
|
||||||
"date: Mon, 21 Apr 2014 23:13:40 GMT\r\n",
|
"date: Mon, 21 Apr 2014 23:13:40 GMT\r\n",
|
||||||
"server: gunicorn/0.17.4\r\n",
|
"server: gunicorn/0.17.4\r\n",
|
||||||
"content-length: 32\r\n",
|
"content-length: 32\r\n",
|
||||||
"connection: keep-alive\r\n"
|
"connection: keep-alive\r\n"
|
||||||
],
|
],
|
||||||
"body": {
|
"body": {
|
||||||
"string": "{\n \"origin\": \"217.122.164.194\"\n}"
|
"string": "{\n \"origin\": \"217.122.164.194\"\n}"
|
||||||
}
|
}
|
||||||
|
|||||||
2
tests/fixtures/wild/domain_redirect.yaml
vendored
2
tests/fixtures/wild/domain_redirect.yaml
vendored
@@ -10,7 +10,7 @@ interactions:
|
|||||||
uri: http://seomoz.org/
|
uri: http://seomoz.org/
|
||||||
response:
|
response:
|
||||||
body: {string: ''}
|
body: {string: ''}
|
||||||
headers:
|
headers:
|
||||||
Location: ['http://moz.com/']
|
Location: ['http://moz.com/']
|
||||||
Server: ['BigIP']
|
Server: ['BigIP']
|
||||||
Connection: ['Keep-Alive']
|
Connection: ['Keep-Alive']
|
||||||
|
|||||||
@@ -5,24 +5,24 @@ import aiohttp
|
|||||||
|
|
||||||
|
|
||||||
async def aiohttp_request(loop, method, url, output="text", encoding="utf-8", content_type=None, **kwargs):
|
async def aiohttp_request(loop, method, url, output="text", encoding="utf-8", content_type=None, **kwargs):
|
||||||
session = aiohttp.ClientSession(loop=loop)
|
async with aiohttp.ClientSession(loop=loop) as session:
|
||||||
response_ctx = session.request(method, url, **kwargs)
|
response_ctx = session.request(method, url, **kwargs)
|
||||||
|
|
||||||
response = await response_ctx.__aenter__()
|
response = await response_ctx.__aenter__()
|
||||||
if output == "text":
|
if output == "text":
|
||||||
content = await response.text()
|
content = await response.text()
|
||||||
elif output == "json":
|
elif output == "json":
|
||||||
content_type = content_type or "application/json"
|
content_type = content_type or "application/json"
|
||||||
content = await response.json(encoding=encoding, content_type=content_type)
|
content = await response.json(encoding=encoding, content_type=content_type)
|
||||||
elif output == "raw":
|
elif output == "raw":
|
||||||
content = await response.read()
|
content = await response.read()
|
||||||
elif output == "stream":
|
elif output == "stream":
|
||||||
content = await response.content.read()
|
content = await response.content.read()
|
||||||
|
|
||||||
response_ctx._resp.close()
|
response_ctx._resp.close()
|
||||||
await session.close()
|
await session.close()
|
||||||
|
|
||||||
return response, content
|
return response, content
|
||||||
|
|
||||||
|
|
||||||
def aiohttp_app():
|
def aiohttp_app():
|
||||||
|
|||||||
41
tests/integration/cassettes/gzip_httpx_old_format.yaml
Normal file
41
tests/integration/cassettes/gzip_httpx_old_format.yaml
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
interactions:
|
||||||
|
- request:
|
||||||
|
body: ''
|
||||||
|
headers:
|
||||||
|
accept:
|
||||||
|
- '*/*'
|
||||||
|
accept-encoding:
|
||||||
|
- gzip, deflate, br
|
||||||
|
connection:
|
||||||
|
- keep-alive
|
||||||
|
host:
|
||||||
|
- httpbin.org
|
||||||
|
user-agent:
|
||||||
|
- python-httpx/0.23.0
|
||||||
|
method: GET
|
||||||
|
uri: https://httpbin.org/gzip
|
||||||
|
response:
|
||||||
|
content: "{\n \"gzipped\": true, \n \"headers\": {\n \"Accept\": \"*/*\",
|
||||||
|
\n \"Accept-Encoding\": \"gzip, deflate, br\", \n \"Host\": \"httpbin.org\",
|
||||||
|
\n \"User-Agent\": \"python-httpx/0.23.0\", \n \"X-Amzn-Trace-Id\": \"Root=1-62a62a8d-5f39b5c50c744da821d6ea99\"\n
|
||||||
|
\ }, \n \"method\": \"GET\", \n \"origin\": \"146.200.25.115\"\n}\n"
|
||||||
|
headers:
|
||||||
|
Access-Control-Allow-Credentials:
|
||||||
|
- 'true'
|
||||||
|
Access-Control-Allow-Origin:
|
||||||
|
- '*'
|
||||||
|
Connection:
|
||||||
|
- keep-alive
|
||||||
|
Content-Encoding:
|
||||||
|
- gzip
|
||||||
|
Content-Length:
|
||||||
|
- '230'
|
||||||
|
Content-Type:
|
||||||
|
- application/json
|
||||||
|
Date:
|
||||||
|
- Sun, 12 Jun 2022 18:03:57 GMT
|
||||||
|
Server:
|
||||||
|
- gunicorn/19.9.0
|
||||||
|
http_version: HTTP/1.1
|
||||||
|
status_code: 200
|
||||||
|
version: 1
|
||||||
42
tests/integration/cassettes/gzip_requests.yaml
Normal file
42
tests/integration/cassettes/gzip_requests.yaml
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
interactions:
|
||||||
|
- request:
|
||||||
|
body: null
|
||||||
|
headers:
|
||||||
|
Accept:
|
||||||
|
- '*/*'
|
||||||
|
Accept-Encoding:
|
||||||
|
- gzip, deflate, br
|
||||||
|
Connection:
|
||||||
|
- keep-alive
|
||||||
|
User-Agent:
|
||||||
|
- python-requests/2.28.0
|
||||||
|
method: GET
|
||||||
|
uri: https://httpbin.org/gzip
|
||||||
|
response:
|
||||||
|
body:
|
||||||
|
string: !!binary |
|
||||||
|
H4sIAKwrpmIA/z2OSwrCMBCG956izLIkfQSxkl2RogfQA9R2bIM1iUkqaOndnYDIrGa+/zELDB9l
|
||||||
|
LfYgg5uRwYhtj86DXKDuOrQBJKR5Cuy38kZ3pld6oHu0sqTH29QGZMnVkepgtMYuKKNJcEe0vJ3U
|
||||||
|
C4mcjI9hpaiygqaUW7ETFYGLR8frAXXE9h1Go7nD54w++FxkYp8VsDJ4IBH6E47NmVzGqUHFkn8g
|
||||||
|
rJsvp2omYs8AAAA=
|
||||||
|
headers:
|
||||||
|
Access-Control-Allow-Credentials:
|
||||||
|
- 'true'
|
||||||
|
Access-Control-Allow-Origin:
|
||||||
|
- '*'
|
||||||
|
Connection:
|
||||||
|
- Close
|
||||||
|
Content-Encoding:
|
||||||
|
- gzip
|
||||||
|
Content-Length:
|
||||||
|
- '182'
|
||||||
|
Content-Type:
|
||||||
|
- application/json
|
||||||
|
Date:
|
||||||
|
- Sun, 12 Jun 2022 18:08:44 GMT
|
||||||
|
Server:
|
||||||
|
- Pytest-HTTPBIN/0.1.0
|
||||||
|
status:
|
||||||
|
code: 200
|
||||||
|
message: great
|
||||||
|
version: 1
|
||||||
@@ -1,32 +0,0 @@
|
|||||||
import os
|
|
||||||
import ssl
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(params=["https", "http"])
|
|
||||||
def scheme(request):
|
|
||||||
"""Fixture that returns both http and https."""
|
|
||||||
return request.param
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def mockbin(scheme):
|
|
||||||
return scheme + "://mockbin.org"
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def mockbin_request_url(mockbin):
|
|
||||||
return mockbin + "/request"
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def httpbin_ssl_context():
|
|
||||||
ssl_ca_location = os.environ["REQUESTS_CA_BUNDLE"]
|
|
||||||
ssl_cert_location = os.environ["REQUESTS_CA_BUNDLE"].replace("cacert.pem", "cert.pem")
|
|
||||||
ssl_key_location = os.environ["REQUESTS_CA_BUNDLE"].replace("cacert.pem", "key.pem")
|
|
||||||
|
|
||||||
ssl_context = ssl.create_default_context(cafile=ssl_ca_location)
|
|
||||||
ssl_context.load_cert_chain(ssl_cert_location, ssl_key_location)
|
|
||||||
|
|
||||||
return ssl_context
|
|
||||||
@@ -1,8 +1,11 @@
|
|||||||
import contextlib
|
import io
|
||||||
import logging
|
import logging
|
||||||
|
import ssl
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
import pytest_httpbin.certs
|
||||||
|
import yarl
|
||||||
|
|
||||||
import vcr
|
import vcr
|
||||||
|
|
||||||
@@ -12,12 +15,14 @@ aiohttp = pytest.importorskip("aiohttp")
|
|||||||
|
|
||||||
from .aiohttp_utils import aiohttp_app, aiohttp_request # noqa: E402
|
from .aiohttp_utils import aiohttp_app, aiohttp_request # noqa: E402
|
||||||
|
|
||||||
|
HTTPBIN_SSL_CONTEXT = ssl.create_default_context(cafile=pytest_httpbin.certs.where())
|
||||||
|
|
||||||
|
|
||||||
def run_in_loop(fn):
|
def run_in_loop(fn):
|
||||||
with contextlib.closing(asyncio.new_event_loop()) as loop:
|
async def wrapper():
|
||||||
asyncio.set_event_loop(loop)
|
return await fn(asyncio.get_running_loop())
|
||||||
task = loop.create_task(fn(loop))
|
|
||||||
return loop.run_until_complete(task)
|
return asyncio.run(wrapper())
|
||||||
|
|
||||||
|
|
||||||
def request(method, url, output="text", **kwargs):
|
def request(method, url, output="text", **kwargs):
|
||||||
@@ -36,8 +41,8 @@ def post(url, output="text", **kwargs):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.online
|
@pytest.mark.online
|
||||||
def test_status(tmpdir, mockbin_request_url):
|
def test_status(tmpdir, httpbin):
|
||||||
url = mockbin_request_url
|
url = httpbin.url
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("status.yaml"))):
|
with vcr.use_cassette(str(tmpdir.join("status.yaml"))):
|
||||||
response, _ = get(url)
|
response, _ = get(url)
|
||||||
@@ -50,8 +55,8 @@ def test_status(tmpdir, mockbin_request_url):
|
|||||||
|
|
||||||
@pytest.mark.online
|
@pytest.mark.online
|
||||||
@pytest.mark.parametrize("auth", [None, aiohttp.BasicAuth("vcrpy", "test")])
|
@pytest.mark.parametrize("auth", [None, aiohttp.BasicAuth("vcrpy", "test")])
|
||||||
def test_headers(tmpdir, auth, mockbin_request_url):
|
def test_headers(tmpdir, auth, httpbin):
|
||||||
url = mockbin_request_url
|
url = httpbin.url
|
||||||
with vcr.use_cassette(str(tmpdir.join("headers.yaml"))):
|
with vcr.use_cassette(str(tmpdir.join("headers.yaml"))):
|
||||||
response, _ = get(url, auth=auth)
|
response, _ = get(url, auth=auth)
|
||||||
|
|
||||||
@@ -67,8 +72,8 @@ def test_headers(tmpdir, auth, mockbin_request_url):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.online
|
@pytest.mark.online
|
||||||
def test_case_insensitive_headers(tmpdir, mockbin_request_url):
|
def test_case_insensitive_headers(tmpdir, httpbin):
|
||||||
url = mockbin_request_url
|
url = httpbin.url
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("whatever.yaml"))):
|
with vcr.use_cassette(str(tmpdir.join("whatever.yaml"))):
|
||||||
_, _ = get(url)
|
_, _ = get(url)
|
||||||
@@ -81,8 +86,8 @@ def test_case_insensitive_headers(tmpdir, mockbin_request_url):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.online
|
@pytest.mark.online
|
||||||
def test_text(tmpdir, mockbin_request_url):
|
def test_text(tmpdir, httpbin):
|
||||||
url = mockbin_request_url
|
url = httpbin.url
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("text.yaml"))):
|
with vcr.use_cassette(str(tmpdir.join("text.yaml"))):
|
||||||
_, response_text = get(url)
|
_, response_text = get(url)
|
||||||
@@ -94,8 +99,8 @@ def test_text(tmpdir, mockbin_request_url):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.online
|
@pytest.mark.online
|
||||||
def test_json(tmpdir, mockbin_request_url):
|
def test_json(tmpdir, httpbin):
|
||||||
url = mockbin_request_url
|
url = httpbin.url + "/json"
|
||||||
headers = {"Content-Type": "application/json"}
|
headers = {"Content-Type": "application/json"}
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("json.yaml"))):
|
with vcr.use_cassette(str(tmpdir.join("json.yaml"))):
|
||||||
@@ -108,8 +113,8 @@ def test_json(tmpdir, mockbin_request_url):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.online
|
@pytest.mark.online
|
||||||
def test_binary(tmpdir, mockbin_request_url):
|
def test_binary(tmpdir, httpbin):
|
||||||
url = mockbin_request_url + "/image/png"
|
url = httpbin.url + "/image/png"
|
||||||
with vcr.use_cassette(str(tmpdir.join("binary.yaml"))):
|
with vcr.use_cassette(str(tmpdir.join("binary.yaml"))):
|
||||||
_, response_binary = get(url, output="raw")
|
_, response_binary = get(url, output="raw")
|
||||||
|
|
||||||
@@ -120,8 +125,8 @@ def test_binary(tmpdir, mockbin_request_url):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.online
|
@pytest.mark.online
|
||||||
def test_stream(tmpdir, mockbin_request_url):
|
def test_stream(tmpdir, httpbin):
|
||||||
url = mockbin_request_url
|
url = httpbin.url
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("stream.yaml"))):
|
with vcr.use_cassette(str(tmpdir.join("stream.yaml"))):
|
||||||
_, body = get(url, output="raw") # Do not use stream here, as the stream is exhausted by vcr
|
_, body = get(url, output="raw") # Do not use stream here, as the stream is exhausted by vcr
|
||||||
@@ -132,19 +137,29 @@ def test_stream(tmpdir, mockbin_request_url):
|
|||||||
assert cassette.play_count == 1
|
assert cassette.play_count == 1
|
||||||
|
|
||||||
|
|
||||||
|
POST_DATA = {"key1": "value1", "key2": "value2"}
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.online
|
@pytest.mark.online
|
||||||
@pytest.mark.parametrize("body", ["data", "json"])
|
@pytest.mark.parametrize(
|
||||||
def test_post(tmpdir, body, caplog, mockbin_request_url):
|
"kwargs",
|
||||||
|
[
|
||||||
|
{"data": POST_DATA},
|
||||||
|
{"json": POST_DATA},
|
||||||
|
{"data": POST_DATA, "json": None},
|
||||||
|
{"data": None, "json": POST_DATA},
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_post(tmpdir, kwargs, caplog, httpbin):
|
||||||
caplog.set_level(logging.INFO)
|
caplog.set_level(logging.INFO)
|
||||||
data = {"key1": "value1", "key2": "value2"}
|
url = httpbin.url + "/post"
|
||||||
url = mockbin_request_url
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("post.yaml"))):
|
with vcr.use_cassette(str(tmpdir.join("post.yaml"))):
|
||||||
_, response_json = post(url, **{body: data})
|
_, response_json = post(url, **kwargs)
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("post.yaml"))) as cassette:
|
with vcr.use_cassette(str(tmpdir.join("post.yaml"))) as cassette:
|
||||||
request = cassette.requests[0]
|
request = cassette.requests[0]
|
||||||
assert request.body == data
|
assert request.body == POST_DATA
|
||||||
_, cassette_response_json = post(url, **{body: data})
|
_, cassette_response_json = post(url, **kwargs)
|
||||||
assert cassette_response_json == response_json
|
assert cassette_response_json == response_json
|
||||||
assert cassette.play_count == 1
|
assert cassette.play_count == 1
|
||||||
|
|
||||||
@@ -159,14 +174,25 @@ def test_post(tmpdir, body, caplog, mockbin_request_url):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.online
|
@pytest.mark.online
|
||||||
def test_params(tmpdir, mockbin_request_url):
|
def test_post_data_plus_json_error(tmpdir, httpbin):
|
||||||
url = mockbin_request_url + "?d=d"
|
url = httpbin.url + "/post"
|
||||||
|
with (
|
||||||
|
vcr.use_cassette(str(tmpdir.join("post.yaml"))) as cassette,
|
||||||
|
pytest.raises(ValueError, match="data and json parameters can not be used at the same time"),
|
||||||
|
):
|
||||||
|
post(url, data=POST_DATA, json=POST_DATA)
|
||||||
|
assert cassette.requests == []
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.online
|
||||||
|
def test_params(tmpdir, httpbin):
|
||||||
|
url = httpbin.url + "/get?d=d"
|
||||||
headers = {"Content-Type": "application/json"}
|
headers = {"Content-Type": "application/json"}
|
||||||
params = {"a": 1, "b": 2, "c": "c"}
|
params = {"a": 1, "b": 2, "c": "c"}
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("get.yaml"))) as cassette:
|
with vcr.use_cassette(str(tmpdir.join("get.yaml"))) as cassette:
|
||||||
_, response_json = get(url, output="json", params=params, headers=headers)
|
_, response_json = get(url, output="json", params=params, headers=headers)
|
||||||
assert response_json["queryString"] == {"a": "1", "b": "2", "c": "c", "d": "d"}
|
assert response_json["args"] == {"a": "1", "b": "2", "c": "c", "d": "d"}
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("get.yaml"))) as cassette:
|
with vcr.use_cassette(str(tmpdir.join("get.yaml"))) as cassette:
|
||||||
_, cassette_response_json = get(url, output="json", params=params, headers=headers)
|
_, cassette_response_json = get(url, output="json", params=params, headers=headers)
|
||||||
@@ -175,8 +201,8 @@ def test_params(tmpdir, mockbin_request_url):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.online
|
@pytest.mark.online
|
||||||
def test_params_same_url_distinct_params(tmpdir, mockbin_request_url):
|
def test_params_same_url_distinct_params(tmpdir, httpbin):
|
||||||
url = mockbin_request_url
|
url = httpbin.url + "/json"
|
||||||
headers = {"Content-Type": "application/json"}
|
headers = {"Content-Type": "application/json"}
|
||||||
params = {"a": 1, "b": 2, "c": "c"}
|
params = {"a": 1, "b": 2, "c": "c"}
|
||||||
|
|
||||||
@@ -189,14 +215,16 @@ def test_params_same_url_distinct_params(tmpdir, mockbin_request_url):
|
|||||||
assert cassette.play_count == 1
|
assert cassette.play_count == 1
|
||||||
|
|
||||||
other_params = {"other": "params"}
|
other_params = {"other": "params"}
|
||||||
with vcr.use_cassette(str(tmpdir.join("get.yaml"))) as cassette:
|
with (
|
||||||
with pytest.raises(vcr.errors.CannotOverwriteExistingCassetteException):
|
vcr.use_cassette(str(tmpdir.join("get.yaml"))) as cassette,
|
||||||
get(url, output="text", params=other_params)
|
pytest.raises(vcr.errors.CannotOverwriteExistingCassetteException),
|
||||||
|
):
|
||||||
|
get(url, output="text", params=other_params)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.online
|
@pytest.mark.online
|
||||||
def test_params_on_url(tmpdir, mockbin_request_url):
|
def test_params_on_url(tmpdir, httpbin):
|
||||||
url = mockbin_request_url + "?a=1&b=foo"
|
url = httpbin.url + "/get?a=1&b=foo"
|
||||||
headers = {"Content-Type": "application/json"}
|
headers = {"Content-Type": "application/json"}
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("get.yaml"))) as cassette:
|
with vcr.use_cassette(str(tmpdir.join("get.yaml"))) as cassette:
|
||||||
@@ -261,8 +289,8 @@ def test_aiohttp_test_client_json(aiohttp_client, tmpdir):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.online
|
@pytest.mark.online
|
||||||
def test_redirect(tmpdir, mockbin):
|
def test_redirect(tmpdir, httpbin):
|
||||||
url = mockbin + "/redirect/302/2"
|
url = httpbin.url + "/redirect/2"
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("redirect.yaml"))):
|
with vcr.use_cassette(str(tmpdir.join("redirect.yaml"))):
|
||||||
response, _ = get(url)
|
response, _ = get(url)
|
||||||
@@ -284,9 +312,9 @@ def test_redirect(tmpdir, mockbin):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.online
|
@pytest.mark.online
|
||||||
def test_not_modified(tmpdir, mockbin):
|
def test_not_modified(tmpdir, httpbin):
|
||||||
"""It doesn't try to redirect on 304"""
|
"""It doesn't try to redirect on 304"""
|
||||||
url = mockbin + "/status/304"
|
url = httpbin.url + "/status/304"
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("not_modified.yaml"))):
|
with vcr.use_cassette(str(tmpdir.join("not_modified.yaml"))):
|
||||||
response, _ = get(url)
|
response, _ = get(url)
|
||||||
@@ -302,13 +330,13 @@ def test_not_modified(tmpdir, mockbin):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.online
|
@pytest.mark.online
|
||||||
def test_double_requests(tmpdir, mockbin_request_url):
|
def test_double_requests(tmpdir, httpbin):
|
||||||
"""We should capture, record, and replay all requests and response chains,
|
"""We should capture, record, and replay all requests and response chains,
|
||||||
even if there are duplicate ones.
|
even if there are duplicate ones.
|
||||||
|
|
||||||
We should replay in the order we saw them.
|
We should replay in the order we saw them.
|
||||||
"""
|
"""
|
||||||
url = mockbin_request_url
|
url = httpbin.url
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("text.yaml"))):
|
with vcr.use_cassette(str(tmpdir.join("text.yaml"))):
|
||||||
_, response_text1 = get(url, output="text")
|
_, response_text1 = get(url, output="text")
|
||||||
@@ -333,7 +361,7 @@ def test_double_requests(tmpdir, mockbin_request_url):
|
|||||||
assert cassette.play_count == 2
|
assert cassette.play_count == 2
|
||||||
|
|
||||||
|
|
||||||
def test_cookies(httpbin_both, httpbin_ssl_context, tmpdir):
|
def test_cookies(httpbin_both, tmpdir):
|
||||||
async def run(loop):
|
async def run(loop):
|
||||||
cookies_url = httpbin_both.url + (
|
cookies_url = httpbin_both.url + (
|
||||||
"/response-headers?"
|
"/response-headers?"
|
||||||
@@ -348,12 +376,12 @@ def test_cookies(httpbin_both, httpbin_ssl_context, tmpdir):
|
|||||||
# ------------------------- Record -------------------------- #
|
# ------------------------- Record -------------------------- #
|
||||||
with vcr.use_cassette(tmp) as cassette:
|
with vcr.use_cassette(tmp) as cassette:
|
||||||
async with aiohttp.ClientSession(loop=loop, cookie_jar=aiohttp.CookieJar(unsafe=True)) as session:
|
async with aiohttp.ClientSession(loop=loop, cookie_jar=aiohttp.CookieJar(unsafe=True)) as session:
|
||||||
cookies_resp = await session.get(cookies_url, ssl=httpbin_ssl_context)
|
cookies_resp = await session.get(cookies_url, ssl=HTTPBIN_SSL_CONTEXT)
|
||||||
home_resp = await session.get(
|
home_resp = await session.get(
|
||||||
home_url,
|
home_url,
|
||||||
cookies=req_cookies,
|
cookies=req_cookies,
|
||||||
headers=req_headers,
|
headers=req_headers,
|
||||||
ssl=httpbin_ssl_context,
|
ssl=HTTPBIN_SSL_CONTEXT,
|
||||||
)
|
)
|
||||||
assert cassette.play_count == 0
|
assert cassette.play_count == 0
|
||||||
assert_responses(cookies_resp, home_resp)
|
assert_responses(cookies_resp, home_resp)
|
||||||
@@ -361,12 +389,12 @@ def test_cookies(httpbin_both, httpbin_ssl_context, tmpdir):
|
|||||||
# -------------------------- Play --------------------------- #
|
# -------------------------- Play --------------------------- #
|
||||||
with vcr.use_cassette(tmp, record_mode=vcr.mode.NONE) as cassette:
|
with vcr.use_cassette(tmp, record_mode=vcr.mode.NONE) as cassette:
|
||||||
async with aiohttp.ClientSession(loop=loop, cookie_jar=aiohttp.CookieJar(unsafe=True)) as session:
|
async with aiohttp.ClientSession(loop=loop, cookie_jar=aiohttp.CookieJar(unsafe=True)) as session:
|
||||||
cookies_resp = await session.get(cookies_url, ssl=httpbin_ssl_context)
|
cookies_resp = await session.get(cookies_url, ssl=HTTPBIN_SSL_CONTEXT)
|
||||||
home_resp = await session.get(
|
home_resp = await session.get(
|
||||||
home_url,
|
home_url,
|
||||||
cookies=req_cookies,
|
cookies=req_cookies,
|
||||||
headers=req_headers,
|
headers=req_headers,
|
||||||
ssl=httpbin_ssl_context,
|
ssl=HTTPBIN_SSL_CONTEXT,
|
||||||
)
|
)
|
||||||
assert cassette.play_count == 2
|
assert cassette.play_count == 2
|
||||||
assert_responses(cookies_resp, home_resp)
|
assert_responses(cookies_resp, home_resp)
|
||||||
@@ -383,7 +411,7 @@ def test_cookies(httpbin_both, httpbin_ssl_context, tmpdir):
|
|||||||
run_in_loop(run)
|
run_in_loop(run)
|
||||||
|
|
||||||
|
|
||||||
def test_cookies_redirect(httpbin_both, httpbin_ssl_context, tmpdir):
|
def test_cookies_redirect(httpbin_both, tmpdir):
|
||||||
async def run(loop):
|
async def run(loop):
|
||||||
# Sets cookie as provided by the query string and redirects
|
# Sets cookie as provided by the query string and redirects
|
||||||
cookies_url = httpbin_both.url + "/cookies/set?Cookie_1=Val_1"
|
cookies_url = httpbin_both.url + "/cookies/set?Cookie_1=Val_1"
|
||||||
@@ -392,9 +420,9 @@ def test_cookies_redirect(httpbin_both, httpbin_ssl_context, tmpdir):
|
|||||||
# ------------------------- Record -------------------------- #
|
# ------------------------- Record -------------------------- #
|
||||||
with vcr.use_cassette(tmp) as cassette:
|
with vcr.use_cassette(tmp) as cassette:
|
||||||
async with aiohttp.ClientSession(loop=loop, cookie_jar=aiohttp.CookieJar(unsafe=True)) as session:
|
async with aiohttp.ClientSession(loop=loop, cookie_jar=aiohttp.CookieJar(unsafe=True)) as session:
|
||||||
cookies_resp = await session.get(cookies_url, ssl=httpbin_ssl_context)
|
cookies_resp = await session.get(cookies_url, ssl=HTTPBIN_SSL_CONTEXT)
|
||||||
assert not cookies_resp.cookies
|
assert not cookies_resp.cookies
|
||||||
cookies = session.cookie_jar.filter_cookies(cookies_url)
|
cookies = session.cookie_jar.filter_cookies(yarl.URL(cookies_url))
|
||||||
assert cookies["Cookie_1"].value == "Val_1"
|
assert cookies["Cookie_1"].value == "Val_1"
|
||||||
assert cassette.play_count == 0
|
assert cassette.play_count == 0
|
||||||
|
|
||||||
@@ -403,9 +431,9 @@ def test_cookies_redirect(httpbin_both, httpbin_ssl_context, tmpdir):
|
|||||||
# -------------------------- Play --------------------------- #
|
# -------------------------- Play --------------------------- #
|
||||||
with vcr.use_cassette(tmp, record_mode=vcr.mode.NONE) as cassette:
|
with vcr.use_cassette(tmp, record_mode=vcr.mode.NONE) as cassette:
|
||||||
async with aiohttp.ClientSession(loop=loop, cookie_jar=aiohttp.CookieJar(unsafe=True)) as session:
|
async with aiohttp.ClientSession(loop=loop, cookie_jar=aiohttp.CookieJar(unsafe=True)) as session:
|
||||||
cookies_resp = await session.get(cookies_url, ssl=httpbin_ssl_context)
|
cookies_resp = await session.get(cookies_url, ssl=HTTPBIN_SSL_CONTEXT)
|
||||||
assert not cookies_resp.cookies
|
assert not cookies_resp.cookies
|
||||||
cookies = session.cookie_jar.filter_cookies(cookies_url)
|
cookies = session.cookie_jar.filter_cookies(yarl.URL(cookies_url))
|
||||||
assert cookies["Cookie_1"].value == "Val_1"
|
assert cookies["Cookie_1"].value == "Val_1"
|
||||||
assert cassette.play_count == 2
|
assert cassette.play_count == 2
|
||||||
|
|
||||||
@@ -417,27 +445,27 @@ def test_cookies_redirect(httpbin_both, httpbin_ssl_context, tmpdir):
|
|||||||
"Cookie_1=Val_1; Expires=Wed, 21 Oct 2015 07:28:00 GMT",
|
"Cookie_1=Val_1; Expires=Wed, 21 Oct 2015 07:28:00 GMT",
|
||||||
]
|
]
|
||||||
async with aiohttp.ClientSession(loop=loop, cookie_jar=aiohttp.CookieJar(unsafe=True)) as session:
|
async with aiohttp.ClientSession(loop=loop, cookie_jar=aiohttp.CookieJar(unsafe=True)) as session:
|
||||||
cookies_resp = await session.get(cookies_url, ssl=httpbin_ssl_context)
|
cookies_resp = await session.get(cookies_url, ssl=HTTPBIN_SSL_CONTEXT)
|
||||||
assert not cookies_resp.cookies
|
assert not cookies_resp.cookies
|
||||||
cookies = session.cookie_jar.filter_cookies(cookies_url)
|
cookies = session.cookie_jar.filter_cookies(yarl.URL(cookies_url))
|
||||||
assert cookies["Cookie_1"].value == "Val_1"
|
assert cookies["Cookie_1"].value == "Val_1"
|
||||||
|
|
||||||
run_in_loop(run)
|
run_in_loop(run)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.online
|
@pytest.mark.online
|
||||||
def test_not_allow_redirects(tmpdir, mockbin):
|
def test_not_allow_redirects(tmpdir, httpbin):
|
||||||
url = mockbin + "/redirect/308/5"
|
url = httpbin + "/redirect-to?url=.%2F&status_code=308"
|
||||||
path = str(tmpdir.join("redirects.yaml"))
|
path = str(tmpdir.join("redirects.yaml"))
|
||||||
|
|
||||||
with vcr.use_cassette(path):
|
with vcr.use_cassette(path):
|
||||||
response, _ = get(url, allow_redirects=False)
|
response, _ = get(url, allow_redirects=False)
|
||||||
assert response.url.path == "/redirect/308/5"
|
assert response.url.path == "/redirect-to"
|
||||||
assert response.status == 308
|
assert response.status == 308
|
||||||
|
|
||||||
with vcr.use_cassette(path) as cassette:
|
with vcr.use_cassette(path) as cassette:
|
||||||
response, _ = get(url, allow_redirects=False)
|
response, _ = get(url, allow_redirects=False)
|
||||||
assert response.url.path == "/redirect/308/5"
|
assert response.url.path == "/redirect-to"
|
||||||
assert response.status == 308
|
assert response.status == 308
|
||||||
assert cassette.play_count == 1
|
assert cassette.play_count == 1
|
||||||
|
|
||||||
@@ -456,3 +484,19 @@ def test_filter_query_parameters(tmpdir, httpbin):
|
|||||||
cassette_content = f.read()
|
cassette_content = f.read()
|
||||||
assert "password" not in cassette_content
|
assert "password" not in cassette_content
|
||||||
assert "secret" not in cassette_content
|
assert "secret" not in cassette_content
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.online
|
||||||
|
def test_use_cassette_with_io(tmpdir, caplog, httpbin):
|
||||||
|
url = httpbin.url + "/post"
|
||||||
|
|
||||||
|
# test without cassettes
|
||||||
|
data = io.BytesIO(b"hello")
|
||||||
|
_, response_json = request("POST", url, output="json", data=data)
|
||||||
|
assert response_json["data"] == "hello"
|
||||||
|
|
||||||
|
# test with cassettes
|
||||||
|
data = io.BytesIO(b"hello")
|
||||||
|
with vcr.use_cassette(str(tmpdir.join("post.yaml"))):
|
||||||
|
_, response_json = request("POST", url, output="json", data=data)
|
||||||
|
assert response_json["data"] == "hello"
|
||||||
|
|||||||
@@ -39,7 +39,7 @@ def test_basic_json_use(tmpdir, httpbin):
|
|||||||
test_fixture = str(tmpdir.join("synopsis.json"))
|
test_fixture = str(tmpdir.join("synopsis.json"))
|
||||||
with vcr.use_cassette(test_fixture, serializer="json"):
|
with vcr.use_cassette(test_fixture, serializer="json"):
|
||||||
response = urlopen(httpbin.url).read()
|
response = urlopen(httpbin.url).read()
|
||||||
assert b"difficult sometimes" in response
|
assert b"HTTP Request & Response Service" in response
|
||||||
|
|
||||||
|
|
||||||
def test_patched_content(tmpdir, httpbin):
|
def test_patched_content(tmpdir, httpbin):
|
||||||
|
|||||||
@@ -1,82 +0,0 @@
|
|||||||
from configparser import DuplicateSectionError
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
import vcr
|
|
||||||
|
|
||||||
boto = pytest.importorskip("boto")
|
|
||||||
|
|
||||||
import boto # noqa
|
|
||||||
import boto.iam # noqa
|
|
||||||
from boto.s3.connection import S3Connection # noqa
|
|
||||||
from boto.s3.key import Key # noqa
|
|
||||||
|
|
||||||
|
|
||||||
def test_boto_stubs(tmpdir):
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("boto-stubs.yml"))):
|
|
||||||
# Perform the imports within the patched context so that
|
|
||||||
# CertValidatingHTTPSConnection refers to the patched version.
|
|
||||||
from boto.https_connection import CertValidatingHTTPSConnection
|
|
||||||
|
|
||||||
from vcr.stubs.boto_stubs import VCRCertValidatingHTTPSConnection
|
|
||||||
|
|
||||||
# Prove that the class was patched by the stub and that we can instantiate it.
|
|
||||||
assert issubclass(CertValidatingHTTPSConnection, VCRCertValidatingHTTPSConnection)
|
|
||||||
CertValidatingHTTPSConnection("hostname.does.not.matter")
|
|
||||||
|
|
||||||
|
|
||||||
def test_boto_without_vcr():
|
|
||||||
s3_conn = S3Connection()
|
|
||||||
s3_bucket = s3_conn.get_bucket("boto-demo-1394171994") # a bucket you can access
|
|
||||||
k = Key(s3_bucket)
|
|
||||||
k.key = "test.txt"
|
|
||||||
k.set_contents_from_string("hello world i am a string")
|
|
||||||
|
|
||||||
|
|
||||||
def test_boto_medium_difficulty(tmpdir):
|
|
||||||
s3_conn = S3Connection()
|
|
||||||
s3_bucket = s3_conn.get_bucket("boto-demo-1394171994") # a bucket you can access
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("boto-medium.yml"))):
|
|
||||||
k = Key(s3_bucket)
|
|
||||||
k.key = "test.txt"
|
|
||||||
k.set_contents_from_string("hello world i am a string")
|
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("boto-medium.yml"))):
|
|
||||||
k = Key(s3_bucket)
|
|
||||||
k.key = "test.txt"
|
|
||||||
k.set_contents_from_string("hello world i am a string")
|
|
||||||
|
|
||||||
|
|
||||||
def test_boto_hardcore_mode(tmpdir):
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("boto-hardcore.yml"))):
|
|
||||||
s3_conn = S3Connection()
|
|
||||||
s3_bucket = s3_conn.get_bucket("boto-demo-1394171994") # a bucket you can access
|
|
||||||
k = Key(s3_bucket)
|
|
||||||
k.key = "test.txt"
|
|
||||||
k.set_contents_from_string("hello world i am a string")
|
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("boto-hardcore.yml"))):
|
|
||||||
s3_conn = S3Connection()
|
|
||||||
s3_bucket = s3_conn.get_bucket("boto-demo-1394171994") # a bucket you can access
|
|
||||||
k = Key(s3_bucket)
|
|
||||||
k.key = "test.txt"
|
|
||||||
k.set_contents_from_string("hello world i am a string")
|
|
||||||
|
|
||||||
|
|
||||||
def test_boto_iam(tmpdir):
|
|
||||||
try:
|
|
||||||
boto.config.add_section("Boto")
|
|
||||||
except DuplicateSectionError:
|
|
||||||
pass
|
|
||||||
# Ensure that boto uses HTTPS
|
|
||||||
boto.config.set("Boto", "is_secure", "true")
|
|
||||||
# Ensure that boto uses CertValidatingHTTPSConnection
|
|
||||||
boto.config.set("Boto", "https_validate_certificates", "true")
|
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("boto-iam.yml"))):
|
|
||||||
iam_conn = boto.iam.connect_to_region("universal")
|
|
||||||
iam_conn.get_all_users()
|
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("boto-iam.yml"))):
|
|
||||||
iam_conn = boto.iam.connect_to_region("universal")
|
|
||||||
iam_conn.get_all_users()
|
|
||||||
@@ -5,15 +5,16 @@ from urllib.request import urlopen
|
|||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
import vcr
|
import vcr
|
||||||
|
from vcr.cassette import Cassette
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.online
|
@pytest.mark.online
|
||||||
def test_set_serializer_default_config(tmpdir, mockbin_request_url):
|
def test_set_serializer_default_config(tmpdir, httpbin):
|
||||||
my_vcr = vcr.VCR(serializer="json")
|
my_vcr = vcr.VCR(serializer="json")
|
||||||
|
|
||||||
with my_vcr.use_cassette(str(tmpdir.join("test.json"))):
|
with my_vcr.use_cassette(str(tmpdir.join("test.json"))):
|
||||||
assert my_vcr.serializer == "json"
|
assert my_vcr.serializer == "json"
|
||||||
urlopen(mockbin_request_url)
|
urlopen(httpbin.url)
|
||||||
|
|
||||||
with open(str(tmpdir.join("test.json"))) as f:
|
with open(str(tmpdir.join("test.json"))) as f:
|
||||||
file_content = f.read()
|
file_content = f.read()
|
||||||
@@ -22,37 +23,37 @@ def test_set_serializer_default_config(tmpdir, mockbin_request_url):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.online
|
@pytest.mark.online
|
||||||
def test_default_set_cassette_library_dir(tmpdir, mockbin_request_url):
|
def test_default_set_cassette_library_dir(tmpdir, httpbin):
|
||||||
my_vcr = vcr.VCR(cassette_library_dir=str(tmpdir.join("subdir")))
|
my_vcr = vcr.VCR(cassette_library_dir=str(tmpdir.join("subdir")))
|
||||||
|
|
||||||
with my_vcr.use_cassette("test.json"):
|
with my_vcr.use_cassette("test.json"):
|
||||||
urlopen(mockbin_request_url)
|
urlopen(httpbin.url)
|
||||||
|
|
||||||
assert os.path.exists(str(tmpdir.join("subdir").join("test.json")))
|
assert os.path.exists(str(tmpdir.join("subdir").join("test.json")))
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.online
|
@pytest.mark.online
|
||||||
def test_override_set_cassette_library_dir(tmpdir, mockbin_request_url):
|
def test_override_set_cassette_library_dir(tmpdir, httpbin):
|
||||||
my_vcr = vcr.VCR(cassette_library_dir=str(tmpdir.join("subdir")))
|
my_vcr = vcr.VCR(cassette_library_dir=str(tmpdir.join("subdir")))
|
||||||
|
|
||||||
cld = str(tmpdir.join("subdir2"))
|
cld = str(tmpdir.join("subdir2"))
|
||||||
|
|
||||||
with my_vcr.use_cassette("test.json", cassette_library_dir=cld):
|
with my_vcr.use_cassette("test.json", cassette_library_dir=cld):
|
||||||
urlopen(mockbin_request_url)
|
urlopen(httpbin.url)
|
||||||
|
|
||||||
assert os.path.exists(str(tmpdir.join("subdir2").join("test.json")))
|
assert os.path.exists(str(tmpdir.join("subdir2").join("test.json")))
|
||||||
assert not os.path.exists(str(tmpdir.join("subdir").join("test.json")))
|
assert not os.path.exists(str(tmpdir.join("subdir").join("test.json")))
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.online
|
@pytest.mark.online
|
||||||
def test_override_match_on(tmpdir, mockbin_request_url):
|
def test_override_match_on(tmpdir, httpbin):
|
||||||
my_vcr = vcr.VCR(match_on=["method"])
|
my_vcr = vcr.VCR(match_on=["method"])
|
||||||
|
|
||||||
with my_vcr.use_cassette(str(tmpdir.join("test.json"))):
|
with my_vcr.use_cassette(str(tmpdir.join("test.json"))):
|
||||||
urlopen(mockbin_request_url)
|
urlopen(httpbin.url)
|
||||||
|
|
||||||
with my_vcr.use_cassette(str(tmpdir.join("test.json"))) as cass:
|
with my_vcr.use_cassette(str(tmpdir.join("test.json"))) as cass:
|
||||||
urlopen(mockbin_request_url)
|
urlopen(httpbin.url)
|
||||||
|
|
||||||
assert len(cass) == 1
|
assert len(cass) == 1
|
||||||
assert cass.play_count == 1
|
assert cass.play_count == 1
|
||||||
@@ -61,18 +62,17 @@ def test_override_match_on(tmpdir, mockbin_request_url):
|
|||||||
def test_missing_matcher():
|
def test_missing_matcher():
|
||||||
my_vcr = vcr.VCR()
|
my_vcr = vcr.VCR()
|
||||||
my_vcr.register_matcher("awesome", object)
|
my_vcr.register_matcher("awesome", object)
|
||||||
with pytest.raises(KeyError):
|
with pytest.raises(KeyError), my_vcr.use_cassette("test.yaml", match_on=["notawesome"]):
|
||||||
with my_vcr.use_cassette("test.yaml", match_on=["notawesome"]):
|
pass
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.online
|
@pytest.mark.online
|
||||||
def test_dont_record_on_exception(tmpdir, mockbin_request_url):
|
def test_dont_record_on_exception(tmpdir, httpbin):
|
||||||
my_vcr = vcr.VCR(record_on_exception=False)
|
my_vcr = vcr.VCR(record_on_exception=False)
|
||||||
|
|
||||||
@my_vcr.use_cassette(str(tmpdir.join("dontsave.yml")))
|
@my_vcr.use_cassette(str(tmpdir.join("dontsave.yml")))
|
||||||
def some_test():
|
def some_test():
|
||||||
assert b"Not in content" in urlopen(mockbin_request_url)
|
assert b"Not in content" in urlopen(httpbin.url)
|
||||||
|
|
||||||
with pytest.raises(AssertionError):
|
with pytest.raises(AssertionError):
|
||||||
some_test()
|
some_test()
|
||||||
@@ -80,8 +80,25 @@ def test_dont_record_on_exception(tmpdir, mockbin_request_url):
|
|||||||
assert not os.path.exists(str(tmpdir.join("dontsave.yml")))
|
assert not os.path.exists(str(tmpdir.join("dontsave.yml")))
|
||||||
|
|
||||||
# Make sure context decorator has the same behavior
|
# Make sure context decorator has the same behavior
|
||||||
with pytest.raises(AssertionError):
|
with pytest.raises(AssertionError), my_vcr.use_cassette(str(tmpdir.join("dontsave2.yml"))):
|
||||||
with my_vcr.use_cassette(str(tmpdir.join("dontsave2.yml"))):
|
assert b"Not in content" in urlopen(httpbin.url).read()
|
||||||
assert b"Not in content" in urlopen(mockbin_request_url).read()
|
|
||||||
|
|
||||||
assert not os.path.exists(str(tmpdir.join("dontsave2.yml")))
|
assert not os.path.exists(str(tmpdir.join("dontsave2.yml")))
|
||||||
|
|
||||||
|
|
||||||
|
def test_set_drop_unused_requests(tmpdir, httpbin):
|
||||||
|
my_vcr = vcr.VCR(drop_unused_requests=True)
|
||||||
|
file = str(tmpdir.join("test.yaml"))
|
||||||
|
|
||||||
|
with my_vcr.use_cassette(file):
|
||||||
|
urlopen(httpbin.url)
|
||||||
|
urlopen(httpbin.url + "/get")
|
||||||
|
|
||||||
|
cassette = Cassette.load(path=file)
|
||||||
|
assert len(cassette) == 2
|
||||||
|
|
||||||
|
with my_vcr.use_cassette(file):
|
||||||
|
urlopen(httpbin.url)
|
||||||
|
|
||||||
|
cassette = Cassette.load(path=file)
|
||||||
|
assert len(cassette) == 1
|
||||||
|
|||||||
@@ -12,19 +12,19 @@ import vcr
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.online
|
@pytest.mark.online
|
||||||
def test_disk_saver_nowrite(tmpdir, mockbin_request_url):
|
def test_disk_saver_nowrite(tmpdir, httpbin):
|
||||||
"""
|
"""
|
||||||
Ensure that when you close a cassette without changing it it doesn't
|
Ensure that when you close a cassette without changing it it doesn't
|
||||||
rewrite the file
|
rewrite the file
|
||||||
"""
|
"""
|
||||||
fname = str(tmpdir.join("synopsis.yaml"))
|
fname = str(tmpdir.join("synopsis.yaml"))
|
||||||
with vcr.use_cassette(fname) as cass:
|
with vcr.use_cassette(fname) as cass:
|
||||||
urlopen(mockbin_request_url).read()
|
urlopen(httpbin.url).read()
|
||||||
assert cass.play_count == 0
|
assert cass.play_count == 0
|
||||||
last_mod = os.path.getmtime(fname)
|
last_mod = os.path.getmtime(fname)
|
||||||
|
|
||||||
with vcr.use_cassette(fname) as cass:
|
with vcr.use_cassette(fname) as cass:
|
||||||
urlopen(mockbin_request_url).read()
|
urlopen(httpbin.url).read()
|
||||||
assert cass.play_count == 1
|
assert cass.play_count == 1
|
||||||
assert cass.dirty is False
|
assert cass.dirty is False
|
||||||
last_mod2 = os.path.getmtime(fname)
|
last_mod2 = os.path.getmtime(fname)
|
||||||
@@ -33,14 +33,14 @@ def test_disk_saver_nowrite(tmpdir, mockbin_request_url):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.online
|
@pytest.mark.online
|
||||||
def test_disk_saver_write(tmpdir, mockbin_request_url):
|
def test_disk_saver_write(tmpdir, httpbin):
|
||||||
"""
|
"""
|
||||||
Ensure that when you close a cassette after changing it it does
|
Ensure that when you close a cassette after changing it it does
|
||||||
rewrite the file
|
rewrite the file
|
||||||
"""
|
"""
|
||||||
fname = str(tmpdir.join("synopsis.yaml"))
|
fname = str(tmpdir.join("synopsis.yaml"))
|
||||||
with vcr.use_cassette(fname) as cass:
|
with vcr.use_cassette(fname) as cass:
|
||||||
urlopen(mockbin_request_url).read()
|
urlopen(httpbin.url).read()
|
||||||
assert cass.play_count == 0
|
assert cass.play_count == 0
|
||||||
last_mod = os.path.getmtime(fname)
|
last_mod = os.path.getmtime(fname)
|
||||||
|
|
||||||
@@ -49,8 +49,8 @@ def test_disk_saver_write(tmpdir, mockbin_request_url):
|
|||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
|
|
||||||
with vcr.use_cassette(fname, record_mode=vcr.mode.ANY) as cass:
|
with vcr.use_cassette(fname, record_mode=vcr.mode.ANY) as cass:
|
||||||
urlopen(mockbin_request_url).read()
|
urlopen(httpbin.url).read()
|
||||||
urlopen(mockbin_request_url + "/get").read()
|
urlopen(httpbin.url + "/get").read()
|
||||||
assert cass.play_count == 1
|
assert cass.play_count == 1
|
||||||
assert cass.dirty
|
assert cass.dirty
|
||||||
last_mod2 = os.path.getmtime(fname)
|
last_mod2 = os.path.getmtime(fname)
|
||||||
|
|||||||
@@ -5,9 +5,11 @@ from urllib.parse import urlencode
|
|||||||
from urllib.request import Request, urlopen
|
from urllib.request import Request, urlopen
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from assertions import assert_cassette_has_one_response, assert_is_json_bytes
|
|
||||||
|
|
||||||
import vcr
|
import vcr
|
||||||
|
from vcr.filters import brotli
|
||||||
|
|
||||||
|
from ..assertions import assert_cassette_has_one_response, assert_is_json_bytes
|
||||||
|
|
||||||
|
|
||||||
def _request_with_auth(url, username, password):
|
def _request_with_auth(url, username, password):
|
||||||
@@ -137,6 +139,22 @@ def test_decompress_deflate(tmpdir, httpbin):
|
|||||||
assert_is_json_bytes(decoded_response)
|
assert_is_json_bytes(decoded_response)
|
||||||
|
|
||||||
|
|
||||||
|
def test_decompress_brotli(tmpdir, httpbin):
|
||||||
|
if brotli is None:
|
||||||
|
# XXX: this is never true, because brotlipy is installed with "httpbin"
|
||||||
|
pytest.skip("Brotli is not installed")
|
||||||
|
|
||||||
|
url = httpbin.url + "/brotli"
|
||||||
|
request = Request(url, headers={"Accept-Encoding": ["gzip, deflate, br"]})
|
||||||
|
cass_file = str(tmpdir.join("brotli_response.yaml"))
|
||||||
|
with vcr.use_cassette(cass_file, decode_compressed_response=True):
|
||||||
|
urlopen(request)
|
||||||
|
with vcr.use_cassette(cass_file) as cass:
|
||||||
|
decoded_response = urlopen(url).read()
|
||||||
|
assert_cassette_has_one_response(cass)
|
||||||
|
assert_is_json_bytes(decoded_response)
|
||||||
|
|
||||||
|
|
||||||
def test_decompress_regular(tmpdir, httpbin):
|
def test_decompress_regular(tmpdir, httpbin):
|
||||||
"""Test that it doesn't try to decompress content that isn't compressed"""
|
"""Test that it doesn't try to decompress content that isn't compressed"""
|
||||||
url = httpbin.url + "/get"
|
url = httpbin.url + "/get"
|
||||||
|
|||||||
@@ -1,12 +1,14 @@
|
|||||||
"""Integration tests with httplib2"""
|
"""Integration tests with httplib2"""
|
||||||
|
|
||||||
from urllib.parse import urlencode
|
from urllib.parse import urlencode
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
import pytest_httpbin.certs
|
import pytest_httpbin.certs
|
||||||
from assertions import assert_cassette_has_one_response
|
|
||||||
|
|
||||||
import vcr
|
import vcr
|
||||||
|
|
||||||
|
from ..assertions import assert_cassette_has_one_response
|
||||||
|
|
||||||
httplib2 = pytest.importorskip("httplib2")
|
httplib2 = pytest.importorskip("httplib2")
|
||||||
|
|
||||||
|
|
||||||
@@ -56,14 +58,14 @@ def test_response_headers(tmpdir, httpbin_both):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.online
|
@pytest.mark.online
|
||||||
def test_effective_url(tmpdir):
|
def test_effective_url(tmpdir, httpbin):
|
||||||
"""Ensure that the effective_url is captured"""
|
"""Ensure that the effective_url is captured"""
|
||||||
url = "http://mockbin.org/redirect/301"
|
url = httpbin.url + "/redirect-to?url=.%2F&status_code=301"
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("headers.yaml"))):
|
with vcr.use_cassette(str(tmpdir.join("headers.yaml"))):
|
||||||
resp, _ = http().request(url)
|
resp, _ = http().request(url)
|
||||||
effective_url = resp["content-location"]
|
effective_url = resp["content-location"]
|
||||||
assert effective_url == "http://mockbin.org/redirect/301/0"
|
assert effective_url == httpbin.url + "/"
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("headers.yaml"))):
|
with vcr.use_cassette(str(tmpdir.join("headers.yaml"))):
|
||||||
resp, _ = http().request(url)
|
resp, _ = http().request(url)
|
||||||
|
|||||||
@@ -4,10 +4,16 @@ import pytest
|
|||||||
|
|
||||||
import vcr
|
import vcr
|
||||||
|
|
||||||
|
from ..assertions import assert_is_json_bytes
|
||||||
|
|
||||||
asyncio = pytest.importorskip("asyncio")
|
asyncio = pytest.importorskip("asyncio")
|
||||||
httpx = pytest.importorskip("httpx")
|
httpx = pytest.importorskip("httpx")
|
||||||
|
|
||||||
from vcr.stubs.httpx_stubs import HTTPX_REDIRECT_PARAM # noqa: E402
|
|
||||||
|
@pytest.fixture(params=["https", "http"])
|
||||||
|
def scheme(request):
|
||||||
|
"""Fixture that returns both http and https."""
|
||||||
|
return request.param
|
||||||
|
|
||||||
|
|
||||||
class BaseDoRequest:
|
class BaseDoRequest:
|
||||||
@@ -16,6 +22,7 @@ class BaseDoRequest:
|
|||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
self._client_args = args
|
self._client_args = args
|
||||||
self._client_kwargs = kwargs
|
self._client_kwargs = kwargs
|
||||||
|
self._client_kwargs["follow_redirects"] = self._client_kwargs.get("follow_redirects", True)
|
||||||
|
|
||||||
def _make_client(self):
|
def _make_client(self):
|
||||||
return self._client_class(*self._client_args, **self._client_kwargs)
|
return self._client_class(*self._client_args, **self._client_kwargs)
|
||||||
@@ -25,21 +32,36 @@ class DoSyncRequest(BaseDoRequest):
|
|||||||
_client_class = httpx.Client
|
_client_class = httpx.Client
|
||||||
|
|
||||||
def __enter__(self):
|
def __enter__(self):
|
||||||
|
self._client = self._make_client()
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def __exit__(self, *args):
|
def __exit__(self, *args):
|
||||||
pass
|
self._client.close()
|
||||||
|
del self._client
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def client(self):
|
def client(self):
|
||||||
try:
|
try:
|
||||||
return self._client
|
return self._client
|
||||||
except AttributeError:
|
except AttributeError as e:
|
||||||
self._client = self._make_client()
|
raise ValueError('To access sync client, use "with do_request() as client"') from e
|
||||||
return self._client
|
|
||||||
|
|
||||||
def __call__(self, *args, **kwargs):
|
def __call__(self, *args, **kwargs):
|
||||||
return self.client.request(*args, timeout=60, **kwargs)
|
if hasattr(self, "_client"):
|
||||||
|
return self.client.request(*args, timeout=60, **kwargs)
|
||||||
|
|
||||||
|
# Use one-time context and dispose of the client afterwards
|
||||||
|
with self:
|
||||||
|
return self.client.request(*args, timeout=60, **kwargs)
|
||||||
|
|
||||||
|
def stream(self, *args, **kwargs):
|
||||||
|
if hasattr(self, "_client"):
|
||||||
|
with self.client.stream(*args, **kwargs) as response:
|
||||||
|
return b"".join(response.iter_bytes())
|
||||||
|
|
||||||
|
# Use one-time context and dispose of the client afterwards
|
||||||
|
with self, self.client.stream(*args, **kwargs) as response:
|
||||||
|
return b"".join(response.iter_bytes())
|
||||||
|
|
||||||
|
|
||||||
class DoAsyncRequest(BaseDoRequest):
|
class DoAsyncRequest(BaseDoRequest):
|
||||||
@@ -75,7 +97,22 @@ class DoAsyncRequest(BaseDoRequest):
|
|||||||
|
|
||||||
# Use one-time context and dispose of the loop/client afterwards
|
# Use one-time context and dispose of the loop/client afterwards
|
||||||
with self:
|
with self:
|
||||||
return self(*args, **kwargs)
|
return self._loop.run_until_complete(self.client.request(*args, **kwargs))
|
||||||
|
|
||||||
|
async def _get_stream(self, *args, **kwargs):
|
||||||
|
async with self.client.stream(*args, **kwargs) as response:
|
||||||
|
content = b""
|
||||||
|
async for c in response.aiter_bytes():
|
||||||
|
content += c
|
||||||
|
return content
|
||||||
|
|
||||||
|
def stream(self, *args, **kwargs):
|
||||||
|
if hasattr(self, "_loop"):
|
||||||
|
return self._loop.run_until_complete(self._get_stream(*args, **kwargs))
|
||||||
|
|
||||||
|
# Use one-time context and dispose of the loop/client afterwards
|
||||||
|
with self:
|
||||||
|
return self._loop.run_until_complete(self._get_stream(*args, **kwargs))
|
||||||
|
|
||||||
|
|
||||||
def pytest_generate_tests(metafunc):
|
def pytest_generate_tests(metafunc):
|
||||||
@@ -89,8 +126,8 @@ def yml(tmpdir, request):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.online
|
@pytest.mark.online
|
||||||
def test_status(tmpdir, mockbin, do_request):
|
def test_status(tmpdir, httpbin, do_request):
|
||||||
url = mockbin
|
url = httpbin.url
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("status.yaml"))):
|
with vcr.use_cassette(str(tmpdir.join("status.yaml"))):
|
||||||
response = do_request()("GET", url)
|
response = do_request()("GET", url)
|
||||||
@@ -102,8 +139,8 @@ def test_status(tmpdir, mockbin, do_request):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.online
|
@pytest.mark.online
|
||||||
def test_case_insensitive_headers(tmpdir, mockbin, do_request):
|
def test_case_insensitive_headers(tmpdir, httpbin, do_request):
|
||||||
url = mockbin
|
url = httpbin.url
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("whatever.yaml"))):
|
with vcr.use_cassette(str(tmpdir.join("whatever.yaml"))):
|
||||||
do_request()("GET", url)
|
do_request()("GET", url)
|
||||||
@@ -116,8 +153,8 @@ def test_case_insensitive_headers(tmpdir, mockbin, do_request):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.online
|
@pytest.mark.online
|
||||||
def test_content(tmpdir, mockbin, do_request):
|
def test_content(tmpdir, httpbin, do_request):
|
||||||
url = mockbin
|
url = httpbin.url
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("cointent.yaml"))):
|
with vcr.use_cassette(str(tmpdir.join("cointent.yaml"))):
|
||||||
response = do_request()("GET", url)
|
response = do_request()("GET", url)
|
||||||
@@ -129,23 +166,21 @@ def test_content(tmpdir, mockbin, do_request):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.online
|
@pytest.mark.online
|
||||||
def test_json(tmpdir, mockbin, do_request):
|
def test_json(tmpdir, httpbin, do_request):
|
||||||
url = mockbin + "/request"
|
url = httpbin.url + "/json"
|
||||||
|
|
||||||
headers = {"content-type": "application/json"}
|
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("json.yaml"))):
|
with vcr.use_cassette(str(tmpdir.join("json.yaml"))):
|
||||||
response = do_request(headers=headers)("GET", url)
|
response = do_request()("GET", url)
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("json.yaml"))) as cassette:
|
with vcr.use_cassette(str(tmpdir.join("json.yaml"))) as cassette:
|
||||||
cassette_response = do_request(headers=headers)("GET", url)
|
cassette_response = do_request()("GET", url)
|
||||||
assert cassette_response.json() == response.json()
|
assert cassette_response.json() == response.json()
|
||||||
assert cassette.play_count == 1
|
assert cassette.play_count == 1
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.online
|
@pytest.mark.online
|
||||||
def test_params_same_url_distinct_params(tmpdir, mockbin, do_request):
|
def test_params_same_url_distinct_params(tmpdir, httpbin, do_request):
|
||||||
url = mockbin + "/request"
|
url = httpbin.url + "/get"
|
||||||
headers = {"Content-Type": "application/json"}
|
headers = {"Content-Type": "application/json"}
|
||||||
params = {"a": 1, "b": False, "c": "c"}
|
params = {"a": 1, "b": False, "c": "c"}
|
||||||
|
|
||||||
@@ -159,28 +194,28 @@ def test_params_same_url_distinct_params(tmpdir, mockbin, do_request):
|
|||||||
assert cassette.play_count == 1
|
assert cassette.play_count == 1
|
||||||
|
|
||||||
params = {"other": "params"}
|
params = {"other": "params"}
|
||||||
with vcr.use_cassette(str(tmpdir.join("get.yaml"))) as cassette:
|
with (
|
||||||
with pytest.raises(vcr.errors.CannotOverwriteExistingCassetteException):
|
vcr.use_cassette(str(tmpdir.join("get.yaml"))) as cassette,
|
||||||
do_request()("GET", url, params=params, headers=headers)
|
pytest.raises(vcr.errors.CannotOverwriteExistingCassetteException),
|
||||||
|
):
|
||||||
|
do_request()("GET", url, params=params, headers=headers)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.online
|
@pytest.mark.online
|
||||||
def test_redirect(mockbin, yml, do_request):
|
def test_redirect(httpbin, yml, do_request):
|
||||||
url = mockbin + "/redirect/303/2"
|
url = httpbin.url + "/redirect-to"
|
||||||
|
|
||||||
redirect_kwargs = {HTTPX_REDIRECT_PARAM.name: True}
|
response = do_request()("GET", url)
|
||||||
|
|
||||||
response = do_request()("GET", url, **redirect_kwargs)
|
|
||||||
with vcr.use_cassette(yml):
|
with vcr.use_cassette(yml):
|
||||||
response = do_request()("GET", url, **redirect_kwargs)
|
response = do_request()("GET", url, params={"url": "./get", "status_code": 302})
|
||||||
|
|
||||||
with vcr.use_cassette(yml) as cassette:
|
with vcr.use_cassette(yml) as cassette:
|
||||||
cassette_response = do_request()("GET", url, **redirect_kwargs)
|
cassette_response = do_request()("GET", url, params={"url": "./get", "status_code": 302})
|
||||||
|
|
||||||
assert cassette_response.status_code == response.status_code
|
assert cassette_response.status_code == response.status_code
|
||||||
assert len(cassette_response.history) == len(response.history)
|
assert len(cassette_response.history) == len(response.history)
|
||||||
assert len(cassette) == 3
|
assert len(cassette) == 2
|
||||||
assert cassette.play_count == 3
|
assert cassette.play_count == 2
|
||||||
|
|
||||||
# Assert that the real response and the cassette response have a similar
|
# Assert that the real response and the cassette response have a similar
|
||||||
# looking request_info.
|
# looking request_info.
|
||||||
@@ -189,22 +224,6 @@ def test_redirect(mockbin, yml, do_request):
|
|||||||
assert cassette_response.request.headers.items() == response.request.headers.items()
|
assert cassette_response.request.headers.items() == response.request.headers.items()
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.online
|
|
||||||
def test_work_with_gzipped_data(mockbin, do_request, yml):
|
|
||||||
url = mockbin + "/gzip?foo=bar"
|
|
||||||
headers = {"accept-encoding": "deflate, gzip"}
|
|
||||||
|
|
||||||
with vcr.use_cassette(yml):
|
|
||||||
do_request(headers=headers)("GET", url)
|
|
||||||
|
|
||||||
with vcr.use_cassette(yml) as cassette:
|
|
||||||
cassette_response = do_request(headers=headers)("GET", url)
|
|
||||||
|
|
||||||
assert cassette_response.headers["content-encoding"] == "gzip"
|
|
||||||
assert cassette_response.read()
|
|
||||||
assert cassette.play_count == 1
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.online
|
@pytest.mark.online
|
||||||
@pytest.mark.parametrize("url", ["https://github.com/kevin1024/vcrpy/issues/" + str(i) for i in range(3, 6)])
|
@pytest.mark.parametrize("url", ["https://github.com/kevin1024/vcrpy/issues/" + str(i) for i in range(3, 6)])
|
||||||
def test_simple_fetching(do_request, yml, url):
|
def test_simple_fetching(do_request, yml, url):
|
||||||
@@ -217,55 +236,32 @@ def test_simple_fetching(do_request, yml, url):
|
|||||||
assert cassette.play_count == 1
|
assert cassette.play_count == 1
|
||||||
|
|
||||||
|
|
||||||
def test_behind_proxy(do_request):
|
|
||||||
# This is recorded because otherwise we should have a live proxy somewhere.
|
|
||||||
yml = (
|
|
||||||
os.path.dirname(os.path.realpath(__file__)) + "/cassettes/" + "test_httpx_test_test_behind_proxy.yml"
|
|
||||||
)
|
|
||||||
url = "https://mockbin.org/headers"
|
|
||||||
proxy = "http://localhost:8080"
|
|
||||||
proxies = {"http://": proxy, "https://": proxy}
|
|
||||||
|
|
||||||
with vcr.use_cassette(yml):
|
|
||||||
response = do_request(proxies=proxies, verify=False)("GET", url)
|
|
||||||
|
|
||||||
with vcr.use_cassette(yml) as cassette:
|
|
||||||
cassette_response = do_request(proxies=proxies, verify=False)("GET", url)
|
|
||||||
assert str(cassette_response.request.url) == url
|
|
||||||
assert cassette.play_count == 1
|
|
||||||
|
|
||||||
assert cassette_response.headers["Via"] == "my_own_proxy", str(cassette_response.headers)
|
|
||||||
assert cassette_response.request.url == response.request.url
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.online
|
@pytest.mark.online
|
||||||
def test_cookies(tmpdir, mockbin, do_request):
|
def test_cookies(tmpdir, httpbin, do_request):
|
||||||
def client_cookies(client):
|
def client_cookies(client):
|
||||||
return list(client.client.cookies)
|
return list(client.client.cookies)
|
||||||
|
|
||||||
def response_cookies(response):
|
def response_cookies(response):
|
||||||
return list(response.cookies)
|
return list(response.cookies)
|
||||||
|
|
||||||
url = mockbin + "/bin/26148652-fe25-4f21-aaf5-689b5b4bf65f"
|
url = httpbin.url + "/cookies/set"
|
||||||
headers = {"cookie": "k1=v1;k2=v2"}
|
params = {"k1": "v1", "k2": "v2"}
|
||||||
|
|
||||||
with do_request(headers=headers) as client:
|
with do_request(params=params, follow_redirects=False) as client:
|
||||||
assert client_cookies(client) == []
|
assert client_cookies(client) == []
|
||||||
|
|
||||||
redirect_kwargs = {HTTPX_REDIRECT_PARAM.name: True}
|
|
||||||
|
|
||||||
testfile = str(tmpdir.join("cookies.yml"))
|
testfile = str(tmpdir.join("cookies.yml"))
|
||||||
with vcr.use_cassette(testfile):
|
with vcr.use_cassette(testfile):
|
||||||
r1 = client("GET", url, **redirect_kwargs)
|
r1 = client("GET", url)
|
||||||
|
|
||||||
assert response_cookies(r1) == ["k1", "k2"]
|
assert response_cookies(r1) == ["k1", "k2"]
|
||||||
|
|
||||||
r2 = client("GET", url, **redirect_kwargs)
|
r2 = client("GET", url)
|
||||||
|
|
||||||
assert response_cookies(r2) == ["k1", "k2"]
|
assert response_cookies(r2) == ["k1", "k2"]
|
||||||
assert client_cookies(client) == ["k1", "k2"]
|
assert client_cookies(client) == ["k1", "k2"]
|
||||||
|
|
||||||
with do_request(headers=headers) as new_client:
|
with do_request(params=params, follow_redirects=False) as new_client:
|
||||||
assert client_cookies(new_client) == []
|
assert client_cookies(new_client) == []
|
||||||
|
|
||||||
with vcr.use_cassette(testfile) as cassette:
|
with vcr.use_cassette(testfile) as cassette:
|
||||||
@@ -277,40 +273,90 @@ def test_cookies(tmpdir, mockbin, do_request):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.online
|
@pytest.mark.online
|
||||||
def test_relative_redirects(tmpdir, scheme, do_request, mockbin):
|
def test_stream(tmpdir, httpbin, do_request):
|
||||||
redirect_kwargs = {HTTPX_REDIRECT_PARAM.name: True}
|
url = httpbin.url + "/stream-bytes/512"
|
||||||
|
testfile = str(tmpdir.join("stream.yml"))
|
||||||
|
|
||||||
url = mockbin + "/redirect/301?to=/redirect/301?to=/request"
|
|
||||||
testfile = str(tmpdir.join("relative_redirects.yml"))
|
|
||||||
with vcr.use_cassette(testfile):
|
with vcr.use_cassette(testfile):
|
||||||
response = do_request()("GET", url, **redirect_kwargs)
|
response_content = do_request().stream("GET", url)
|
||||||
assert len(response.history) == 2, response
|
assert len(response_content) == 512
|
||||||
assert response.json()["url"].endswith("request")
|
|
||||||
|
|
||||||
with vcr.use_cassette(testfile) as cassette:
|
with vcr.use_cassette(testfile) as cassette:
|
||||||
response = do_request()("GET", url, **redirect_kwargs)
|
cassette_content = do_request().stream("GET", url)
|
||||||
assert len(response.history) == 2
|
assert cassette_content == response_content
|
||||||
assert response.json()["url"].endswith("request")
|
assert len(cassette_content) == 512
|
||||||
|
assert cassette.play_count == 1
|
||||||
assert cassette.play_count == 3
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.online
|
# Regular cassette formats support the status reason,
|
||||||
def test_redirect_wo_allow_redirects(do_request, mockbin, yml):
|
# but the old HTTPX cassette format does not.
|
||||||
url = mockbin + "/redirect/308/5"
|
@pytest.mark.parametrize(
|
||||||
|
"cassette_name,reason",
|
||||||
redirect_kwargs = {HTTPX_REDIRECT_PARAM.name: False}
|
[
|
||||||
|
("requests", "great"),
|
||||||
with vcr.use_cassette(yml):
|
("httpx_old_format", "OK"),
|
||||||
response = do_request()("GET", url, **redirect_kwargs)
|
],
|
||||||
|
)
|
||||||
assert str(response.url).endswith("308/5")
|
def test_load_cassette_format(do_request, cassette_name, reason):
|
||||||
assert response.status_code == 308
|
mydir = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
yml = f"{mydir}/cassettes/gzip_{cassette_name}.yaml"
|
||||||
|
url = "https://httpbin.org/gzip"
|
||||||
|
|
||||||
with vcr.use_cassette(yml) as cassette:
|
with vcr.use_cassette(yml) as cassette:
|
||||||
response = do_request()("GET", url, **redirect_kwargs)
|
cassette_response = do_request()("GET", url)
|
||||||
|
assert str(cassette_response.request.url) == url
|
||||||
assert str(response.url).endswith("308/5")
|
|
||||||
assert response.status_code == 308
|
|
||||||
|
|
||||||
assert cassette.play_count == 1
|
assert cassette.play_count == 1
|
||||||
|
|
||||||
|
# Should be able to load up the JSON inside,
|
||||||
|
# regardless whether the content is the gzipped
|
||||||
|
# in the cassette or not.
|
||||||
|
json = cassette_response.json()
|
||||||
|
assert json["method"] == "GET", json
|
||||||
|
assert cassette_response.status_code == 200
|
||||||
|
assert cassette_response.reason_phrase == reason
|
||||||
|
|
||||||
|
|
||||||
|
def test_gzip__decode_compressed_response_false(tmpdir, httpbin, do_request):
|
||||||
|
"""
|
||||||
|
Ensure that httpx is able to automatically decompress the response body.
|
||||||
|
"""
|
||||||
|
for _ in range(2): # one for recording, one for re-playing
|
||||||
|
with vcr.use_cassette(str(tmpdir.join("gzip.yaml"))) as cassette:
|
||||||
|
response = do_request()("GET", httpbin + "/gzip")
|
||||||
|
assert response.headers["content-encoding"] == "gzip" # i.e. not removed
|
||||||
|
# The content stored in the cassette should be gzipped.
|
||||||
|
assert cassette.responses[0]["body"]["string"][:2] == b"\x1f\x8b"
|
||||||
|
assert_is_json_bytes(response.content) # i.e. uncompressed bytes
|
||||||
|
|
||||||
|
|
||||||
|
def test_gzip__decode_compressed_response_true(do_request, tmpdir, httpbin):
|
||||||
|
url = httpbin + "/gzip"
|
||||||
|
|
||||||
|
expected_response = do_request()("GET", url)
|
||||||
|
expected_content = expected_response.content
|
||||||
|
assert expected_response.headers["content-encoding"] == "gzip" # self-test
|
||||||
|
|
||||||
|
with vcr.use_cassette(
|
||||||
|
str(tmpdir.join("decode_compressed.yaml")),
|
||||||
|
decode_compressed_response=True,
|
||||||
|
) as cassette:
|
||||||
|
r = do_request()("GET", url)
|
||||||
|
assert r.headers["content-encoding"] == "gzip" # i.e. not removed
|
||||||
|
content_length = r.headers["content-length"]
|
||||||
|
assert r.content == expected_content
|
||||||
|
|
||||||
|
# Has the cassette body been decompressed?
|
||||||
|
cassette_response_body = cassette.responses[0]["body"]["string"]
|
||||||
|
assert isinstance(cassette_response_body, str)
|
||||||
|
|
||||||
|
# Content should be JSON.
|
||||||
|
assert cassette_response_body[0:1] == "{"
|
||||||
|
|
||||||
|
with vcr.use_cassette(str(tmpdir.join("decode_compressed.yaml")), decode_compressed_response=True):
|
||||||
|
r = httpx.get(url)
|
||||||
|
assert "content-encoding" not in r.headers # i.e. removed
|
||||||
|
assert r.content == expected_content
|
||||||
|
|
||||||
|
# As the content is uncompressed, it should have a bigger
|
||||||
|
# length than the compressed version.
|
||||||
|
assert r.headers["content-length"] > content_length
|
||||||
|
|||||||
@@ -51,9 +51,11 @@ def test_matchers(httpbin, httpbin_secure, cassette, matcher, matching_uri, not_
|
|||||||
assert cass.play_count == 1
|
assert cass.play_count == 1
|
||||||
|
|
||||||
# play cassette with not matching on uri, it should fail
|
# play cassette with not matching on uri, it should fail
|
||||||
with pytest.raises(vcr.errors.CannotOverwriteExistingCassetteException):
|
with (
|
||||||
with vcr.use_cassette(cassette, match_on=[matcher]) as cass:
|
pytest.raises(vcr.errors.CannotOverwriteExistingCassetteException),
|
||||||
urlopen(not_matching_uri)
|
vcr.use_cassette(cassette, match_on=[matcher]) as cass,
|
||||||
|
):
|
||||||
|
urlopen(not_matching_uri)
|
||||||
|
|
||||||
|
|
||||||
def test_method_matcher(cassette, httpbin, httpbin_secure):
|
def test_method_matcher(cassette, httpbin, httpbin_secure):
|
||||||
@@ -65,10 +67,12 @@ def test_method_matcher(cassette, httpbin, httpbin_secure):
|
|||||||
assert cass.play_count == 1
|
assert cass.play_count == 1
|
||||||
|
|
||||||
# should fail if method does not match
|
# should fail if method does not match
|
||||||
with pytest.raises(vcr.errors.CannotOverwriteExistingCassetteException):
|
with (
|
||||||
with vcr.use_cassette(cassette, match_on=["method"]) as cass:
|
pytest.raises(vcr.errors.CannotOverwriteExistingCassetteException),
|
||||||
# is a POST request
|
vcr.use_cassette(cassette, match_on=["method"]) as cass,
|
||||||
urlopen(default_uri, data=b"")
|
):
|
||||||
|
# is a POST request
|
||||||
|
urlopen(default_uri, data=b"")
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
@@ -98,14 +102,12 @@ def test_default_matcher_matches(cassette, uri, httpbin, httpbin_secure):
|
|||||||
)
|
)
|
||||||
def test_default_matcher_does_not_match(cassette, uri, httpbin, httpbin_secure):
|
def test_default_matcher_does_not_match(cassette, uri, httpbin, httpbin_secure):
|
||||||
uri = _replace_httpbin(uri, httpbin, httpbin_secure)
|
uri = _replace_httpbin(uri, httpbin, httpbin_secure)
|
||||||
with pytest.raises(vcr.errors.CannotOverwriteExistingCassetteException):
|
with pytest.raises(vcr.errors.CannotOverwriteExistingCassetteException), vcr.use_cassette(cassette):
|
||||||
with vcr.use_cassette(cassette):
|
urlopen(uri)
|
||||||
urlopen(uri)
|
|
||||||
|
|
||||||
|
|
||||||
def test_default_matcher_does_not_match_on_method(cassette, httpbin, httpbin_secure):
|
def test_default_matcher_does_not_match_on_method(cassette, httpbin, httpbin_secure):
|
||||||
default_uri = _replace_httpbin(DEFAULT_URI, httpbin, httpbin_secure)
|
default_uri = _replace_httpbin(DEFAULT_URI, httpbin, httpbin_secure)
|
||||||
with pytest.raises(vcr.errors.CannotOverwriteExistingCassetteException):
|
with pytest.raises(vcr.errors.CannotOverwriteExistingCassetteException), vcr.use_cassette(cassette):
|
||||||
with vcr.use_cassette(cassette):
|
# is a POST request
|
||||||
# is a POST request
|
urlopen(default_uri, data=b"")
|
||||||
urlopen(default_uri, data=b"")
|
|
||||||
|
|||||||
@@ -1,8 +1,9 @@
|
|||||||
"""Test using a proxy."""
|
"""Test using a proxy."""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
import http.server
|
import http.server
|
||||||
import multiprocessing
|
|
||||||
import socketserver
|
import socketserver
|
||||||
|
import threading
|
||||||
from urllib.request import urlopen
|
from urllib.request import urlopen
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
@@ -29,20 +30,51 @@ class Proxy(http.server.SimpleHTTPRequestHandler):
|
|||||||
# In Python 2 the response is an addinfourl instance.
|
# In Python 2 the response is an addinfourl instance.
|
||||||
status = upstream_response.code
|
status = upstream_response.code
|
||||||
headers = upstream_response.info().items()
|
headers = upstream_response.info().items()
|
||||||
self.send_response(status, upstream_response.msg)
|
self.log_request(status)
|
||||||
|
self.send_response_only(status, upstream_response.msg)
|
||||||
for header in headers:
|
for header in headers:
|
||||||
self.send_header(*header)
|
self.send_header(*header)
|
||||||
self.end_headers()
|
self.end_headers()
|
||||||
self.copyfile(upstream_response, self.wfile)
|
self.copyfile(upstream_response, self.wfile)
|
||||||
|
|
||||||
|
def do_CONNECT(self):
|
||||||
|
host, port = self.path.split(":")
|
||||||
|
|
||||||
|
asyncio.run(self._tunnel(host, port, self.connection))
|
||||||
|
|
||||||
|
async def _tunnel(self, host, port, client_sock):
|
||||||
|
target_r, target_w = await asyncio.open_connection(host=host, port=port)
|
||||||
|
|
||||||
|
self.send_response(http.HTTPStatus.OK)
|
||||||
|
self.end_headers()
|
||||||
|
|
||||||
|
source_r, source_w = await asyncio.open_connection(sock=client_sock)
|
||||||
|
|
||||||
|
async def channel(reader, writer):
|
||||||
|
while True:
|
||||||
|
data = await reader.read(1024)
|
||||||
|
if not data:
|
||||||
|
break
|
||||||
|
writer.write(data)
|
||||||
|
await writer.drain()
|
||||||
|
|
||||||
|
writer.close()
|
||||||
|
await writer.wait_closed()
|
||||||
|
|
||||||
|
await asyncio.gather(
|
||||||
|
channel(target_r, source_w),
|
||||||
|
channel(source_r, target_w),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="session")
|
@pytest.fixture(scope="session")
|
||||||
def proxy_server():
|
def proxy_server():
|
||||||
httpd = socketserver.ThreadingTCPServer(("", 0), Proxy)
|
with socketserver.ThreadingTCPServer(("", 0), Proxy) as httpd:
|
||||||
proxy_process = multiprocessing.Process(target=httpd.serve_forever)
|
proxy_process = threading.Thread(target=httpd.serve_forever)
|
||||||
proxy_process.start()
|
proxy_process.start()
|
||||||
yield "http://{}:{}".format(*httpd.server_address)
|
yield "http://{}:{}".format(*httpd.server_address)
|
||||||
proxy_process.terminate()
|
httpd.shutdown()
|
||||||
|
proxy_process.join()
|
||||||
|
|
||||||
|
|
||||||
def test_use_proxy(tmpdir, httpbin, proxy_server):
|
def test_use_proxy(tmpdir, httpbin, proxy_server):
|
||||||
@@ -50,8 +82,26 @@ def test_use_proxy(tmpdir, httpbin, proxy_server):
|
|||||||
with vcr.use_cassette(str(tmpdir.join("proxy.yaml"))):
|
with vcr.use_cassette(str(tmpdir.join("proxy.yaml"))):
|
||||||
response = requests.get(httpbin.url, proxies={"http": proxy_server})
|
response = requests.get(httpbin.url, proxies={"http": proxy_server})
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("proxy.yaml"))) as cassette:
|
with vcr.use_cassette(str(tmpdir.join("proxy.yaml")), mode="none") as cassette:
|
||||||
cassette_response = requests.get(httpbin.url, proxies={"http": proxy_server})
|
cassette_response = requests.get(httpbin.url, proxies={"http": proxy_server})
|
||||||
|
|
||||||
assert cassette_response.headers == response.headers
|
assert cassette_response.headers == response.headers
|
||||||
assert cassette.play_count == 1
|
assert cassette.play_count == 1
|
||||||
|
|
||||||
|
|
||||||
|
def test_use_https_proxy(tmpdir, httpbin_secure, proxy_server):
|
||||||
|
"""Ensure that it works with an HTTPS proxy."""
|
||||||
|
with vcr.use_cassette(str(tmpdir.join("proxy.yaml"))):
|
||||||
|
response = requests.get(httpbin_secure.url, proxies={"https": proxy_server})
|
||||||
|
|
||||||
|
with vcr.use_cassette(str(tmpdir.join("proxy.yaml")), mode="none") as cassette:
|
||||||
|
cassette_response = requests.get(
|
||||||
|
httpbin_secure.url,
|
||||||
|
proxies={"https": proxy_server},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert cassette_response.headers == response.headers
|
||||||
|
assert cassette.play_count == 1
|
||||||
|
|
||||||
|
# The cassette URL points to httpbin, not the proxy
|
||||||
|
assert cassette.requests[0].url == httpbin_secure.url + "/"
|
||||||
|
|||||||
@@ -124,9 +124,11 @@ def test_none_record_mode(tmpdir, httpbin):
|
|||||||
# Cassette file doesn't exist, yet we are trying to make a request.
|
# Cassette file doesn't exist, yet we are trying to make a request.
|
||||||
# raise hell.
|
# raise hell.
|
||||||
testfile = str(tmpdir.join("recordmode.yml"))
|
testfile = str(tmpdir.join("recordmode.yml"))
|
||||||
with vcr.use_cassette(testfile, record_mode=vcr.mode.NONE):
|
with (
|
||||||
with pytest.raises(CannotOverwriteExistingCassetteException):
|
vcr.use_cassette(testfile, record_mode=vcr.mode.NONE),
|
||||||
urlopen(httpbin.url).read()
|
pytest.raises(CannotOverwriteExistingCassetteException),
|
||||||
|
):
|
||||||
|
urlopen(httpbin.url).read()
|
||||||
|
|
||||||
|
|
||||||
def test_none_record_mode_with_existing_cassette(tmpdir, httpbin):
|
def test_none_record_mode_with_existing_cassette(tmpdir, httpbin):
|
||||||
|
|||||||
@@ -14,28 +14,28 @@ def false_matcher(r1, r2):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.online
|
@pytest.mark.online
|
||||||
def test_registered_true_matcher(tmpdir, mockbin_request_url):
|
def test_registered_true_matcher(tmpdir, httpbin):
|
||||||
my_vcr = vcr.VCR()
|
my_vcr = vcr.VCR()
|
||||||
my_vcr.register_matcher("true", true_matcher)
|
my_vcr.register_matcher("true", true_matcher)
|
||||||
testfile = str(tmpdir.join("test.yml"))
|
testfile = str(tmpdir.join("test.yml"))
|
||||||
with my_vcr.use_cassette(testfile, match_on=["true"]):
|
with my_vcr.use_cassette(testfile, match_on=["true"]):
|
||||||
# These 2 different urls are stored as the same request
|
# These 2 different urls are stored as the same request
|
||||||
urlopen(mockbin_request_url)
|
urlopen(httpbin.url)
|
||||||
urlopen(mockbin_request_url + "/get")
|
urlopen(httpbin.url + "/get")
|
||||||
|
|
||||||
with my_vcr.use_cassette(testfile, match_on=["true"]):
|
with my_vcr.use_cassette(testfile, match_on=["true"]):
|
||||||
# I can get the response twice even though I only asked for it once
|
# I can get the response twice even though I only asked for it once
|
||||||
urlopen(mockbin_request_url)
|
urlopen(httpbin.url)
|
||||||
urlopen(mockbin_request_url)
|
urlopen(httpbin.url)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.online
|
@pytest.mark.online
|
||||||
def test_registered_false_matcher(tmpdir, mockbin_request_url):
|
def test_registered_false_matcher(tmpdir, httpbin):
|
||||||
my_vcr = vcr.VCR()
|
my_vcr = vcr.VCR()
|
||||||
my_vcr.register_matcher("false", false_matcher)
|
my_vcr.register_matcher("false", false_matcher)
|
||||||
testfile = str(tmpdir.join("test.yml"))
|
testfile = str(tmpdir.join("test.yml"))
|
||||||
with my_vcr.use_cassette(testfile, match_on=["false"]) as cass:
|
with my_vcr.use_cassette(testfile, match_on=["false"]) as cass:
|
||||||
# These 2 different urls are stored as different requests
|
# These 2 different urls are stored as different requests
|
||||||
urlopen(mockbin_request_url)
|
urlopen(httpbin.url)
|
||||||
urlopen(mockbin_request_url + "/get")
|
urlopen(httpbin.url + "/get")
|
||||||
assert len(cass) == 2
|
assert len(cass) == 2
|
||||||
|
|||||||
@@ -66,7 +66,7 @@ def test_load_cassette_with_custom_persister(tmpdir, httpbin):
|
|||||||
|
|
||||||
with my_vcr.use_cassette(test_fixture, serializer="json"):
|
with my_vcr.use_cassette(test_fixture, serializer="json"):
|
||||||
response = urlopen(httpbin.url).read()
|
response = urlopen(httpbin.url).read()
|
||||||
assert b"difficult sometimes" in response
|
assert b"HTTP Request & Response Service" in response
|
||||||
|
|
||||||
|
|
||||||
def test_load_cassette_persister_exception_handling(tmpdir, httpbin):
|
def test_load_cassette_persister_exception_handling(tmpdir, httpbin):
|
||||||
@@ -83,6 +83,5 @@ def test_load_cassette_persister_exception_handling(tmpdir, httpbin):
|
|||||||
with my_vcr.use_cassette("bad/encoding") as cass:
|
with my_vcr.use_cassette("bad/encoding") as cass:
|
||||||
assert len(cass) == 0
|
assert len(cass) == 0
|
||||||
|
|
||||||
with pytest.raises(ValueError):
|
with pytest.raises(ValueError), my_vcr.use_cassette("bad/buggy") as cass:
|
||||||
with my_vcr.use_cassette("bad/buggy") as cass:
|
pass
|
||||||
pass
|
|
||||||
|
|||||||
@@ -1,9 +1,11 @@
|
|||||||
"""Test requests' interaction with vcr"""
|
"""Test requests' interaction with vcr"""
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from assertions import assert_cassette_empty, assert_is_json_bytes
|
|
||||||
|
|
||||||
import vcr
|
import vcr
|
||||||
|
|
||||||
|
from ..assertions import assert_cassette_empty, assert_is_json_bytes
|
||||||
|
|
||||||
requests = pytest.importorskip("requests")
|
requests = pytest.importorskip("requests")
|
||||||
|
|
||||||
|
|
||||||
@@ -264,7 +266,7 @@ def test_nested_cassettes_with_session_created_before_nesting(httpbin_both, tmpd
|
|||||||
def test_post_file(tmpdir, httpbin_both):
|
def test_post_file(tmpdir, httpbin_both):
|
||||||
"""Ensure that we handle posting a file."""
|
"""Ensure that we handle posting a file."""
|
||||||
url = httpbin_both + "/post"
|
url = httpbin_both + "/post"
|
||||||
with vcr.use_cassette(str(tmpdir.join("post_file.yaml"))) as cass, open("tox.ini", "rb") as f:
|
with vcr.use_cassette(str(tmpdir.join("post_file.yaml"))) as cass, open(".editorconfig", "rb") as f:
|
||||||
original_response = requests.post(url, f).content
|
original_response = requests.post(url, f).content
|
||||||
|
|
||||||
# This also tests that we do the right thing with matching the body when they are files.
|
# This also tests that we do the right thing with matching the body when they are files.
|
||||||
@@ -272,10 +274,10 @@ def test_post_file(tmpdir, httpbin_both):
|
|||||||
str(tmpdir.join("post_file.yaml")),
|
str(tmpdir.join("post_file.yaml")),
|
||||||
match_on=("method", "scheme", "host", "port", "path", "query", "body"),
|
match_on=("method", "scheme", "host", "port", "path", "query", "body"),
|
||||||
) as cass:
|
) as cass:
|
||||||
with open("tox.ini", "rb") as f:
|
with open(".editorconfig", "rb") as f:
|
||||||
tox_content = f.read()
|
editorconfig = f.read()
|
||||||
assert cass.requests[0].body.read() == tox_content
|
assert cass.requests[0].body.read() == editorconfig
|
||||||
with open("tox.ini", "rb") as f:
|
with open(".editorconfig", "rb") as f:
|
||||||
new_response = requests.post(url, f).content
|
new_response = requests.post(url, f).content
|
||||||
assert original_response == new_response
|
assert original_response == new_response
|
||||||
|
|
||||||
|
|||||||
@@ -2,10 +2,10 @@ import http.client as httplib
|
|||||||
import json
|
import json
|
||||||
import zlib
|
import zlib
|
||||||
|
|
||||||
from assertions import assert_is_json_bytes
|
|
||||||
|
|
||||||
import vcr
|
import vcr
|
||||||
|
|
||||||
|
from ..assertions import assert_is_json_bytes
|
||||||
|
|
||||||
|
|
||||||
def _headers_are_case_insensitive(host, port):
|
def _headers_are_case_insensitive(host, port):
|
||||||
conn = httplib.HTTPConnection(host, port)
|
conn = httplib.HTTPConnection(host, port)
|
||||||
@@ -66,7 +66,7 @@ def test_original_decoded_response_is_not_modified(tmpdir, httpbin):
|
|||||||
|
|
||||||
# Assert that we do not modify the original response while appending
|
# Assert that we do not modify the original response while appending
|
||||||
# to the cassette.
|
# to the cassette.
|
||||||
assert "gzip" == inside.headers["content-encoding"]
|
assert inside.headers["content-encoding"] == "gzip"
|
||||||
|
|
||||||
# They should effectively be the same response.
|
# They should effectively be the same response.
|
||||||
inside_headers = (h for h in inside.headers.items() if h[0].lower() != "date")
|
inside_headers = (h for h in inside.headers.items() if h[0].lower() != "date")
|
||||||
@@ -122,7 +122,7 @@ def test_original_response_is_not_modified_by_before_filter(tmpdir, httpbin):
|
|||||||
# Furthermore, the responses should be identical.
|
# Furthermore, the responses should be identical.
|
||||||
inside_body = json.loads(inside.read())
|
inside_body = json.loads(inside.read())
|
||||||
outside_body = json.loads(outside.read())
|
outside_body = json.loads(outside.read())
|
||||||
assert not inside_body[field_to_scrub] == replacement
|
assert inside_body[field_to_scrub] != replacement
|
||||||
assert inside_body[field_to_scrub] == outside_body[field_to_scrub]
|
assert inside_body[field_to_scrub] == outside_body[field_to_scrub]
|
||||||
|
|
||||||
# Ensure that when a cassette exists, the scrubbed response is returned.
|
# Ensure that when a cassette exists, the scrubbed response is returned.
|
||||||
|
|||||||
@@ -1,32 +1,60 @@
|
|||||||
"""Test requests' interaction with vcr"""
|
"""Test requests' interaction with vcr"""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import functools
|
||||||
|
import inspect
|
||||||
import json
|
import json
|
||||||
|
import os
|
||||||
|
import ssl
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from assertions import assert_cassette_empty, assert_is_json_bytes
|
|
||||||
|
|
||||||
import vcr
|
import vcr
|
||||||
from vcr.errors import CannotOverwriteExistingCassetteException
|
from vcr.errors import CannotOverwriteExistingCassetteException
|
||||||
|
|
||||||
|
from ..assertions import assert_cassette_empty, assert_is_json_bytes
|
||||||
|
|
||||||
tornado = pytest.importorskip("tornado")
|
tornado = pytest.importorskip("tornado")
|
||||||
|
gen = pytest.importorskip("tornado.gen")
|
||||||
http = pytest.importorskip("tornado.httpclient")
|
http = pytest.importorskip("tornado.httpclient")
|
||||||
|
|
||||||
# whether the current version of Tornado supports the raise_error argument for
|
# whether the current version of Tornado supports the raise_error argument for
|
||||||
# fetch().
|
# fetch().
|
||||||
supports_raise_error = tornado.version_info >= (4,)
|
supports_raise_error = tornado.version_info >= (4,)
|
||||||
|
raise_error_for_response_code_only = tornado.version_info >= (6,)
|
||||||
|
|
||||||
|
|
||||||
|
def gen_test(func):
|
||||||
|
@functools.wraps(func)
|
||||||
|
def wrapper(*args, **kwargs):
|
||||||
|
async def coro():
|
||||||
|
return await gen.coroutine(func)(*args, **kwargs)
|
||||||
|
|
||||||
|
return asyncio.run(coro())
|
||||||
|
|
||||||
|
# Patch the signature so pytest can inject fixtures
|
||||||
|
# we can't use wrapt.decorator because it returns a generator function
|
||||||
|
wrapper.__signature__ = inspect.signature(func)
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(params=["simple", "curl", "default"])
|
@pytest.fixture(params=["simple", "curl", "default"])
|
||||||
def get_client(request):
|
def get_client(request):
|
||||||
|
ca_bundle_path = os.environ.get("REQUESTS_CA_BUNDLE")
|
||||||
|
ssl_ctx = ssl.create_default_context(ssl.Purpose.SERVER_AUTH)
|
||||||
|
ssl_ctx.load_verify_locations(cafile=ca_bundle_path)
|
||||||
|
ssl_ctx.verify_mode = ssl.CERT_REQUIRED
|
||||||
|
|
||||||
if request.param == "simple":
|
if request.param == "simple":
|
||||||
from tornado import simple_httpclient as simple
|
from tornado import simple_httpclient as simple
|
||||||
|
|
||||||
return lambda: simple.SimpleAsyncHTTPClient()
|
return lambda: simple.SimpleAsyncHTTPClient(defaults={"ssl_options": ssl_ctx})
|
||||||
elif request.param == "curl":
|
|
||||||
|
if request.param == "curl":
|
||||||
curl = pytest.importorskip("tornado.curl_httpclient")
|
curl = pytest.importorskip("tornado.curl_httpclient")
|
||||||
return lambda: curl.CurlAsyncHTTPClient()
|
return lambda: curl.CurlAsyncHTTPClient(defaults={"ca_certs": ca_bundle_path})
|
||||||
else:
|
|
||||||
return lambda: http.AsyncHTTPClient()
|
return lambda: http.AsyncHTTPClient(defaults={"ssl_options": ssl_ctx})
|
||||||
|
|
||||||
|
|
||||||
def get(client, url, **kwargs):
|
def get(client, url, **kwargs):
|
||||||
@@ -43,61 +71,65 @@ def post(client, url, data=None, **kwargs):
|
|||||||
return client.fetch(http.HTTPRequest(url, method="POST", **kwargs))
|
return client.fetch(http.HTTPRequest(url, method="POST", **kwargs))
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.gen_test
|
@pytest.mark.online
|
||||||
def test_status_code(get_client, scheme, tmpdir):
|
@gen_test
|
||||||
|
def test_status_code(get_client, tmpdir, httpbin_both):
|
||||||
"""Ensure that we can read the status code"""
|
"""Ensure that we can read the status code"""
|
||||||
url = scheme + "://httpbin.org/"
|
url = httpbin_both.url
|
||||||
with vcr.use_cassette(str(tmpdir.join("atts.yaml"))):
|
with vcr.use_cassette(str(tmpdir.join("atts.yaml"))):
|
||||||
status_code = (yield get(get_client(), url)).code
|
status_code = (yield get(get_client(), url)).code
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("atts.yaml"))) as cass:
|
with vcr.use_cassette(str(tmpdir.join("atts.yaml"))) as cass:
|
||||||
assert status_code == (yield get(get_client(), url)).code
|
assert status_code == (yield get(get_client(), url)).code
|
||||||
assert 1 == cass.play_count
|
assert cass.play_count == 1
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.gen_test
|
@pytest.mark.online
|
||||||
def test_headers(get_client, scheme, tmpdir):
|
@gen_test
|
||||||
|
def test_headers(get_client, httpbin_both, tmpdir):
|
||||||
"""Ensure that we can read the headers back"""
|
"""Ensure that we can read the headers back"""
|
||||||
url = scheme + "://httpbin.org/"
|
url = httpbin_both.url
|
||||||
with vcr.use_cassette(str(tmpdir.join("headers.yaml"))):
|
with vcr.use_cassette(str(tmpdir.join("headers.yaml"))):
|
||||||
headers = (yield get(get_client(), url)).headers
|
headers = (yield get(get_client(), url)).headers
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("headers.yaml"))) as cass:
|
with vcr.use_cassette(str(tmpdir.join("headers.yaml"))) as cass:
|
||||||
assert headers == (yield get(get_client(), url)).headers
|
assert headers == (yield get(get_client(), url)).headers
|
||||||
assert 1 == cass.play_count
|
assert cass.play_count == 1
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.gen_test
|
@pytest.mark.online
|
||||||
def test_body(get_client, tmpdir, scheme):
|
@gen_test
|
||||||
|
def test_body(get_client, tmpdir, httpbin_both):
|
||||||
"""Ensure the responses are all identical enough"""
|
"""Ensure the responses are all identical enough"""
|
||||||
|
|
||||||
url = scheme + "://httpbin.org/bytes/1024"
|
url = httpbin_both.url + "/bytes/1024"
|
||||||
with vcr.use_cassette(str(tmpdir.join("body.yaml"))):
|
with vcr.use_cassette(str(tmpdir.join("body.yaml"))):
|
||||||
content = (yield get(get_client(), url)).body
|
content = (yield get(get_client(), url)).body
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("body.yaml"))) as cass:
|
with vcr.use_cassette(str(tmpdir.join("body.yaml"))) as cass:
|
||||||
assert content == (yield get(get_client(), url)).body
|
assert content == (yield get(get_client(), url)).body
|
||||||
assert 1 == cass.play_count
|
assert cass.play_count == 1
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.gen_test
|
@gen_test
|
||||||
def test_effective_url(get_client, scheme, tmpdir):
|
def test_effective_url(get_client, tmpdir, httpbin):
|
||||||
"""Ensure that the effective_url is captured"""
|
"""Ensure that the effective_url is captured"""
|
||||||
url = scheme + "://mockbin.org/redirect/301?url=/html"
|
url = httpbin.url + "/redirect/1"
|
||||||
with vcr.use_cassette(str(tmpdir.join("url.yaml"))):
|
with vcr.use_cassette(str(tmpdir.join("url.yaml"))):
|
||||||
effective_url = (yield get(get_client(), url)).effective_url
|
effective_url = (yield get(get_client(), url)).effective_url
|
||||||
assert effective_url == scheme + "://mockbin.org/redirect/301/0"
|
assert effective_url == httpbin.url + "/get"
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("url.yaml"))) as cass:
|
with vcr.use_cassette(str(tmpdir.join("url.yaml"))) as cass:
|
||||||
assert effective_url == (yield get(get_client(), url)).effective_url
|
assert effective_url == (yield get(get_client(), url)).effective_url
|
||||||
assert 1 == cass.play_count
|
assert cass.play_count == 1
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.gen_test
|
@pytest.mark.online
|
||||||
def test_auth(get_client, tmpdir, scheme):
|
@gen_test
|
||||||
|
def test_auth(get_client, tmpdir, httpbin_both):
|
||||||
"""Ensure that we can handle basic auth"""
|
"""Ensure that we can handle basic auth"""
|
||||||
auth = ("user", "passwd")
|
auth = ("user", "passwd")
|
||||||
url = scheme + "://httpbin.org/basic-auth/user/passwd"
|
url = httpbin_both.url + "/basic-auth/user/passwd"
|
||||||
with vcr.use_cassette(str(tmpdir.join("auth.yaml"))):
|
with vcr.use_cassette(str(tmpdir.join("auth.yaml"))):
|
||||||
one = yield get(get_client(), url, auth_username=auth[0], auth_password=auth[1])
|
one = yield get(get_client(), url, auth_username=auth[0], auth_password=auth[1])
|
||||||
|
|
||||||
@@ -105,14 +137,15 @@ def test_auth(get_client, tmpdir, scheme):
|
|||||||
two = yield get(get_client(), url, auth_username=auth[0], auth_password=auth[1])
|
two = yield get(get_client(), url, auth_username=auth[0], auth_password=auth[1])
|
||||||
assert one.body == two.body
|
assert one.body == two.body
|
||||||
assert one.code == two.code
|
assert one.code == two.code
|
||||||
assert 1 == cass.play_count
|
assert cass.play_count == 1
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.gen_test
|
@pytest.mark.online
|
||||||
def test_auth_failed(get_client, tmpdir, scheme):
|
@gen_test
|
||||||
|
def test_auth_failed(get_client, tmpdir, httpbin_both):
|
||||||
"""Ensure that we can save failed auth statuses"""
|
"""Ensure that we can save failed auth statuses"""
|
||||||
auth = ("user", "wrongwrongwrong")
|
auth = ("user", "wrongwrongwrong")
|
||||||
url = scheme + "://httpbin.org/basic-auth/user/passwd"
|
url = httpbin_both.url + "/basic-auth/user/passwd"
|
||||||
with vcr.use_cassette(str(tmpdir.join("auth-failed.yaml"))) as cass:
|
with vcr.use_cassette(str(tmpdir.join("auth-failed.yaml"))) as cass:
|
||||||
# Ensure that this is empty to begin with
|
# Ensure that this is empty to begin with
|
||||||
assert_cassette_empty(cass)
|
assert_cassette_empty(cass)
|
||||||
@@ -128,14 +161,15 @@ def test_auth_failed(get_client, tmpdir, scheme):
|
|||||||
assert exc_info.value.code == 401
|
assert exc_info.value.code == 401
|
||||||
assert one.body == two.body
|
assert one.body == two.body
|
||||||
assert one.code == two.code == 401
|
assert one.code == two.code == 401
|
||||||
assert 1 == cass.play_count
|
assert cass.play_count == 1
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.gen_test
|
@pytest.mark.online
|
||||||
def test_post(get_client, tmpdir, scheme):
|
@gen_test
|
||||||
|
def test_post(get_client, tmpdir, httpbin_both):
|
||||||
"""Ensure that we can post and cache the results"""
|
"""Ensure that we can post and cache the results"""
|
||||||
data = {"key1": "value1", "key2": "value2"}
|
data = {"key1": "value1", "key2": "value2"}
|
||||||
url = scheme + "://httpbin.org/post"
|
url = httpbin_both.url + "/post"
|
||||||
with vcr.use_cassette(str(tmpdir.join("requests.yaml"))):
|
with vcr.use_cassette(str(tmpdir.join("requests.yaml"))):
|
||||||
req1 = (yield post(get_client(), url, data)).body
|
req1 = (yield post(get_client(), url, data)).body
|
||||||
|
|
||||||
@@ -143,13 +177,13 @@ def test_post(get_client, tmpdir, scheme):
|
|||||||
req2 = (yield post(get_client(), url, data)).body
|
req2 = (yield post(get_client(), url, data)).body
|
||||||
|
|
||||||
assert req1 == req2
|
assert req1 == req2
|
||||||
assert 1 == cass.play_count
|
assert cass.play_count == 1
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.gen_test
|
@gen_test
|
||||||
def test_redirects(get_client, tmpdir, scheme):
|
def test_redirects(get_client, tmpdir, httpbin):
|
||||||
"""Ensure that we can handle redirects"""
|
"""Ensure that we can handle redirects"""
|
||||||
url = scheme + "://mockbin.org/redirect/301?url=bytes/1024"
|
url = httpbin + "/redirect-to?url=bytes/1024&status_code=301"
|
||||||
with vcr.use_cassette(str(tmpdir.join("requests.yaml"))):
|
with vcr.use_cassette(str(tmpdir.join("requests.yaml"))):
|
||||||
content = (yield get(get_client(), url)).body
|
content = (yield get(get_client(), url)).body
|
||||||
|
|
||||||
@@ -158,32 +192,38 @@ def test_redirects(get_client, tmpdir, scheme):
|
|||||||
assert cass.play_count == 1
|
assert cass.play_count == 1
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.gen_test
|
@pytest.mark.online
|
||||||
def test_cross_scheme(get_client, tmpdir, scheme):
|
@gen_test
|
||||||
|
def test_cross_scheme(get_client, tmpdir, httpbin, httpbin_secure):
|
||||||
"""Ensure that requests between schemes are treated separately"""
|
"""Ensure that requests between schemes are treated separately"""
|
||||||
# First fetch a url under http, and then again under https and then
|
# First fetch a url under http, and then again under https and then
|
||||||
# ensure that we haven't served anything out of cache, and we have two
|
# ensure that we haven't served anything out of cache, and we have two
|
||||||
# requests / response pairs in the cassette
|
# requests / response pairs in the cassette
|
||||||
|
|
||||||
|
url = httpbin.url
|
||||||
|
url_secure = httpbin_secure.url
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("cross_scheme.yaml"))) as cass:
|
with vcr.use_cassette(str(tmpdir.join("cross_scheme.yaml"))) as cass:
|
||||||
yield get(get_client(), "https://httpbin.org/")
|
yield get(get_client(), url)
|
||||||
yield get(get_client(), "http://httpbin.org/")
|
yield get(get_client(), url_secure)
|
||||||
assert cass.play_count == 0
|
assert cass.play_count == 0
|
||||||
assert len(cass) == 2
|
assert len(cass) == 2
|
||||||
|
|
||||||
# Then repeat the same requests and ensure both were replayed.
|
# Then repeat the same requests and ensure both were replayed.
|
||||||
with vcr.use_cassette(str(tmpdir.join("cross_scheme.yaml"))) as cass:
|
with vcr.use_cassette(str(tmpdir.join("cross_scheme.yaml"))) as cass:
|
||||||
yield get(get_client(), "https://httpbin.org/")
|
yield get(get_client(), url)
|
||||||
yield get(get_client(), "http://httpbin.org/")
|
yield get(get_client(), url_secure)
|
||||||
assert cass.play_count == 2
|
assert cass.play_count == 2
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.gen_test
|
@pytest.mark.online
|
||||||
def test_gzip(get_client, tmpdir, scheme):
|
@gen_test
|
||||||
|
def test_gzip(get_client, tmpdir, httpbin_both):
|
||||||
"""
|
"""
|
||||||
Ensure that httpclient is able to automatically decompress the response
|
Ensure that httpclient is able to automatically decompress the response
|
||||||
body
|
body
|
||||||
"""
|
"""
|
||||||
url = scheme + "://httpbin.org/gzip"
|
url = httpbin_both + "/gzip"
|
||||||
|
|
||||||
# use_gzip was renamed to decompress_response in 4.0
|
# use_gzip was renamed to decompress_response in 4.0
|
||||||
kwargs = {}
|
kwargs = {}
|
||||||
@@ -199,23 +239,26 @@ def test_gzip(get_client, tmpdir, scheme):
|
|||||||
with vcr.use_cassette(str(tmpdir.join("gzip.yaml"))) as cass:
|
with vcr.use_cassette(str(tmpdir.join("gzip.yaml"))) as cass:
|
||||||
response = yield get(get_client(), url, **kwargs)
|
response = yield get(get_client(), url, **kwargs)
|
||||||
assert_is_json_bytes(response.body)
|
assert_is_json_bytes(response.body)
|
||||||
assert 1 == cass.play_count
|
assert cass.play_count == 1
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.gen_test
|
@pytest.mark.online
|
||||||
def test_https_with_cert_validation_disabled(get_client, tmpdir):
|
@gen_test
|
||||||
|
def test_https_with_cert_validation_disabled(get_client, tmpdir, httpbin_secure):
|
||||||
cass_path = str(tmpdir.join("cert_validation_disabled.yaml"))
|
cass_path = str(tmpdir.join("cert_validation_disabled.yaml"))
|
||||||
|
|
||||||
|
url = httpbin_secure.url
|
||||||
|
|
||||||
with vcr.use_cassette(cass_path):
|
with vcr.use_cassette(cass_path):
|
||||||
yield get(get_client(), "https://httpbin.org", validate_cert=False)
|
yield get(get_client(), url, validate_cert=False)
|
||||||
|
|
||||||
with vcr.use_cassette(cass_path) as cass:
|
with vcr.use_cassette(cass_path) as cass:
|
||||||
yield get(get_client(), "https://httpbin.org", validate_cert=False)
|
yield get(get_client(), url, validate_cert=False)
|
||||||
assert 1 == cass.play_count
|
assert cass.play_count == 1
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.gen_test
|
@gen_test
|
||||||
def test_unsupported_features_raises_in_future(get_client, tmpdir):
|
def test_unsupported_features_raises_in_future(get_client, tmpdir, httpbin):
|
||||||
"""Ensure that the exception for an AsyncHTTPClient feature not being
|
"""Ensure that the exception for an AsyncHTTPClient feature not being
|
||||||
supported is raised inside the future."""
|
supported is raised inside the future."""
|
||||||
|
|
||||||
@@ -223,7 +266,7 @@ def test_unsupported_features_raises_in_future(get_client, tmpdir):
|
|||||||
raise AssertionError("Did not expect to be called.")
|
raise AssertionError("Did not expect to be called.")
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("invalid.yaml"))):
|
with vcr.use_cassette(str(tmpdir.join("invalid.yaml"))):
|
||||||
future = get(get_client(), "http://httpbin.org", streaming_callback=callback)
|
future = get(get_client(), httpbin.url, streaming_callback=callback)
|
||||||
|
|
||||||
with pytest.raises(Exception) as excinfo:
|
with pytest.raises(Exception) as excinfo:
|
||||||
yield future
|
yield future
|
||||||
@@ -232,7 +275,11 @@ def test_unsupported_features_raises_in_future(get_client, tmpdir):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.skipif(not supports_raise_error, reason="raise_error unavailable in tornado <= 3")
|
@pytest.mark.skipif(not supports_raise_error, reason="raise_error unavailable in tornado <= 3")
|
||||||
@pytest.mark.gen_test
|
@pytest.mark.skipif(
|
||||||
|
raise_error_for_response_code_only,
|
||||||
|
reason="raise_error only ignores HTTPErrors due to response code",
|
||||||
|
)
|
||||||
|
@gen_test
|
||||||
def test_unsupported_features_raise_error_disabled(get_client, tmpdir):
|
def test_unsupported_features_raise_error_disabled(get_client, tmpdir):
|
||||||
"""Ensure that the exception for an AsyncHTTPClient feature not being
|
"""Ensure that the exception for an AsyncHTTPClient feature not being
|
||||||
supported is not raised if raise_error=False."""
|
supported is not raised if raise_error=False."""
|
||||||
@@ -251,44 +298,53 @@ def test_unsupported_features_raise_error_disabled(get_client, tmpdir):
|
|||||||
assert "not yet supported by VCR" in str(response.error)
|
assert "not yet supported by VCR" in str(response.error)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.gen_test
|
@pytest.mark.online
|
||||||
def test_cannot_overwrite_cassette_raises_in_future(get_client, tmpdir):
|
@gen_test
|
||||||
|
def test_cannot_overwrite_cassette_raises_in_future(get_client, tmpdir, httpbin):
|
||||||
"""Ensure that CannotOverwriteExistingCassetteException is raised inside
|
"""Ensure that CannotOverwriteExistingCassetteException is raised inside
|
||||||
the future."""
|
the future."""
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("overwrite.yaml"))):
|
url = httpbin.url
|
||||||
yield get(get_client(), "http://httpbin.org/get")
|
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("overwrite.yaml"))):
|
with vcr.use_cassette(str(tmpdir.join("overwrite.yaml"))):
|
||||||
future = get(get_client(), "http://httpbin.org/headers")
|
yield get(get_client(), url + "/get")
|
||||||
|
|
||||||
|
with vcr.use_cassette(str(tmpdir.join("overwrite.yaml"))):
|
||||||
|
future = get(get_client(), url + "/headers")
|
||||||
|
|
||||||
with pytest.raises(CannotOverwriteExistingCassetteException):
|
with pytest.raises(CannotOverwriteExistingCassetteException):
|
||||||
yield future
|
yield future
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skipif(not supports_raise_error, reason="raise_error unavailable in tornado <= 3")
|
@pytest.mark.skipif(not supports_raise_error, reason="raise_error unavailable in tornado <= 3")
|
||||||
@pytest.mark.gen_test
|
@pytest.mark.skipif(
|
||||||
def test_cannot_overwrite_cassette_raise_error_disabled(get_client, tmpdir):
|
raise_error_for_response_code_only,
|
||||||
|
reason="raise_error only ignores HTTPErrors due to response code",
|
||||||
|
)
|
||||||
|
@gen_test
|
||||||
|
def test_cannot_overwrite_cassette_raise_error_disabled(get_client, tmpdir, httpbin):
|
||||||
"""Ensure that CannotOverwriteExistingCassetteException is not raised if
|
"""Ensure that CannotOverwriteExistingCassetteException is not raised if
|
||||||
raise_error=False in the fetch() call."""
|
raise_error=False in the fetch() call."""
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("overwrite.yaml"))):
|
url = httpbin.url
|
||||||
yield get(get_client(), "http://httpbin.org/get", raise_error=False)
|
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("overwrite.yaml"))):
|
with vcr.use_cassette(str(tmpdir.join("overwrite.yaml"))):
|
||||||
response = yield get(get_client(), "http://httpbin.org/headers", raise_error=False)
|
yield get(get_client(), url + "/get", raise_error=False)
|
||||||
|
|
||||||
|
with vcr.use_cassette(str(tmpdir.join("overwrite.yaml"))):
|
||||||
|
response = yield get(get_client(), url + "/headers", raise_error=False)
|
||||||
|
|
||||||
assert isinstance(response.error, CannotOverwriteExistingCassetteException)
|
assert isinstance(response.error, CannotOverwriteExistingCassetteException)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.gen_test
|
@gen_test
|
||||||
@vcr.use_cassette(path_transformer=vcr.default_vcr.ensure_suffix(".yaml"))
|
@vcr.use_cassette(path_transformer=vcr.default_vcr.ensure_suffix(".yaml"))
|
||||||
def test_tornado_with_decorator_use_cassette(get_client):
|
def test_tornado_with_decorator_use_cassette(get_client):
|
||||||
response = yield get_client().fetch(http.HTTPRequest("http://www.google.com/", method="GET"))
|
response = yield get_client().fetch(http.HTTPRequest("http://www.google.com/", method="GET"))
|
||||||
assert response.body.decode("utf-8") == "not actually google"
|
assert response.body.decode("utf-8") == "not actually google"
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.gen_test
|
@gen_test
|
||||||
@vcr.use_cassette(path_transformer=vcr.default_vcr.ensure_suffix(".yaml"))
|
@vcr.use_cassette(path_transformer=vcr.default_vcr.ensure_suffix(".yaml"))
|
||||||
def test_tornado_exception_can_be_caught(get_client):
|
def test_tornado_exception_can_be_caught(get_client):
|
||||||
try:
|
try:
|
||||||
@@ -302,45 +358,53 @@ def test_tornado_exception_can_be_caught(get_client):
|
|||||||
assert e.code == 404
|
assert e.code == 404
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.gen_test
|
@pytest.mark.online
|
||||||
def test_existing_references_get_patched(tmpdir):
|
@gen_test
|
||||||
|
def test_existing_references_get_patched(tmpdir, httpbin):
|
||||||
from tornado.httpclient import AsyncHTTPClient
|
from tornado.httpclient import AsyncHTTPClient
|
||||||
|
|
||||||
|
url = httpbin.url + "/get"
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("data.yaml"))):
|
with vcr.use_cassette(str(tmpdir.join("data.yaml"))):
|
||||||
client = AsyncHTTPClient()
|
client = AsyncHTTPClient()
|
||||||
yield get(client, "http://httpbin.org/get")
|
yield get(client, url)
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("data.yaml"))) as cass:
|
with vcr.use_cassette(str(tmpdir.join("data.yaml"))) as cass:
|
||||||
yield get(client, "http://httpbin.org/get")
|
yield get(client, url)
|
||||||
assert cass.play_count == 1
|
assert cass.play_count == 1
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.gen_test
|
@pytest.mark.online
|
||||||
def test_existing_instances_get_patched(get_client, tmpdir):
|
@gen_test
|
||||||
|
def test_existing_instances_get_patched(get_client, tmpdir, httpbin):
|
||||||
"""Ensure that existing instances of AsyncHTTPClient get patched upon
|
"""Ensure that existing instances of AsyncHTTPClient get patched upon
|
||||||
entering VCR context."""
|
entering VCR context."""
|
||||||
|
|
||||||
|
url = httpbin.url + "/get"
|
||||||
client = get_client()
|
client = get_client()
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("data.yaml"))):
|
with vcr.use_cassette(str(tmpdir.join("data.yaml"))):
|
||||||
yield get(client, "http://httpbin.org/get")
|
yield get(client, url)
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("data.yaml"))) as cass:
|
with vcr.use_cassette(str(tmpdir.join("data.yaml"))) as cass:
|
||||||
yield get(client, "http://httpbin.org/get")
|
yield get(client, url)
|
||||||
assert cass.play_count == 1
|
assert cass.play_count == 1
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.gen_test
|
@pytest.mark.online
|
||||||
def test_request_time_is_set(get_client, tmpdir):
|
@gen_test
|
||||||
|
def test_request_time_is_set(get_client, tmpdir, httpbin):
|
||||||
"""Ensures that the request_time on HTTPResponses is set."""
|
"""Ensures that the request_time on HTTPResponses is set."""
|
||||||
|
|
||||||
|
url = httpbin.url + "/get"
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("data.yaml"))):
|
with vcr.use_cassette(str(tmpdir.join("data.yaml"))):
|
||||||
client = get_client()
|
client = get_client()
|
||||||
response = yield get(client, "http://httpbin.org/get")
|
response = yield get(client, url)
|
||||||
assert response.request_time is not None
|
assert response.request_time is not None
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("data.yaml"))) as cass:
|
with vcr.use_cassette(str(tmpdir.join("data.yaml"))) as cass:
|
||||||
client = get_client()
|
client = get_client()
|
||||||
response = yield get(client, "http://httpbin.org/get")
|
response = yield get(client, url)
|
||||||
assert response.request_time is not None
|
assert response.request_time is not None
|
||||||
assert cass.play_count == 1
|
assert cass.play_count == 1
|
||||||
|
|||||||
@@ -1,146 +0,0 @@
|
|||||||
"""Integration tests with urllib2"""
|
|
||||||
|
|
||||||
import ssl
|
|
||||||
from urllib.parse import urlencode
|
|
||||||
from urllib.request import urlopen
|
|
||||||
|
|
||||||
import pytest_httpbin.certs
|
|
||||||
from assertions import assert_cassette_has_one_response
|
|
||||||
from pytest import mark
|
|
||||||
|
|
||||||
# Internal imports
|
|
||||||
import vcr
|
|
||||||
|
|
||||||
|
|
||||||
def urlopen_with_cafile(*args, **kwargs):
|
|
||||||
context = ssl.create_default_context(cafile=pytest_httpbin.certs.where())
|
|
||||||
context.check_hostname = False
|
|
||||||
kwargs["context"] = context
|
|
||||||
try:
|
|
||||||
return urlopen(*args, **kwargs)
|
|
||||||
except TypeError:
|
|
||||||
# python2/pypi don't let us override this
|
|
||||||
del kwargs["cafile"]
|
|
||||||
return urlopen(*args, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
def test_response_code(httpbin_both, tmpdir):
|
|
||||||
"""Ensure we can read a response code from a fetch"""
|
|
||||||
url = httpbin_both.url
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("atts.yaml"))):
|
|
||||||
code = urlopen_with_cafile(url).getcode()
|
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("atts.yaml"))):
|
|
||||||
assert code == urlopen_with_cafile(url).getcode()
|
|
||||||
|
|
||||||
|
|
||||||
def test_random_body(httpbin_both, tmpdir):
|
|
||||||
"""Ensure we can read the content, and that it's served from cache"""
|
|
||||||
url = httpbin_both.url + "/bytes/1024"
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("body.yaml"))):
|
|
||||||
body = urlopen_with_cafile(url).read()
|
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("body.yaml"))):
|
|
||||||
assert body == urlopen_with_cafile(url).read()
|
|
||||||
|
|
||||||
|
|
||||||
def test_response_headers(httpbin_both, tmpdir):
|
|
||||||
"""Ensure we can get information from the response"""
|
|
||||||
url = httpbin_both.url
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("headers.yaml"))):
|
|
||||||
open1 = urlopen_with_cafile(url).info().items()
|
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("headers.yaml"))):
|
|
||||||
open2 = urlopen_with_cafile(url).info().items()
|
|
||||||
|
|
||||||
assert sorted(open1) == sorted(open2)
|
|
||||||
|
|
||||||
|
|
||||||
@mark.online
|
|
||||||
def test_effective_url(tmpdir):
|
|
||||||
"""Ensure that the effective_url is captured"""
|
|
||||||
url = "http://mockbin.org/redirect/301"
|
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("headers.yaml"))):
|
|
||||||
effective_url = urlopen_with_cafile(url).geturl()
|
|
||||||
assert effective_url == "http://mockbin.org/redirect/301/0"
|
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("headers.yaml"))):
|
|
||||||
assert effective_url == urlopen_with_cafile(url).geturl()
|
|
||||||
|
|
||||||
|
|
||||||
def test_multiple_requests(httpbin_both, tmpdir):
|
|
||||||
"""Ensure that we can cache multiple requests"""
|
|
||||||
urls = [httpbin_both.url, httpbin_both.url, httpbin_both.url + "/get", httpbin_both.url + "/bytes/1024"]
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("multiple.yaml"))) as cass:
|
|
||||||
[urlopen_with_cafile(url) for url in urls]
|
|
||||||
assert len(cass) == len(urls)
|
|
||||||
|
|
||||||
|
|
||||||
def test_get_data(httpbin_both, tmpdir):
|
|
||||||
"""Ensure that it works with query data"""
|
|
||||||
data = urlencode({"some": 1, "data": "here"})
|
|
||||||
url = httpbin_both.url + "/get?" + data
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("get_data.yaml"))):
|
|
||||||
res1 = urlopen_with_cafile(url).read()
|
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("get_data.yaml"))):
|
|
||||||
res2 = urlopen_with_cafile(url).read()
|
|
||||||
assert res1 == res2
|
|
||||||
|
|
||||||
|
|
||||||
def test_post_data(httpbin_both, tmpdir):
|
|
||||||
"""Ensure that it works when posting data"""
|
|
||||||
data = urlencode({"some": 1, "data": "here"}).encode("utf-8")
|
|
||||||
url = httpbin_both.url + "/post"
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("post_data.yaml"))):
|
|
||||||
res1 = urlopen_with_cafile(url, data).read()
|
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("post_data.yaml"))) as cass:
|
|
||||||
res2 = urlopen_with_cafile(url, data).read()
|
|
||||||
assert len(cass) == 1
|
|
||||||
|
|
||||||
assert res1 == res2
|
|
||||||
assert_cassette_has_one_response(cass)
|
|
||||||
|
|
||||||
|
|
||||||
def test_post_unicode_data(httpbin_both, tmpdir):
|
|
||||||
"""Ensure that it works when posting unicode data"""
|
|
||||||
data = urlencode({"snowman": "☃".encode()}).encode("utf-8")
|
|
||||||
url = httpbin_both.url + "/post"
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("post_data.yaml"))):
|
|
||||||
res1 = urlopen_with_cafile(url, data).read()
|
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("post_data.yaml"))) as cass:
|
|
||||||
res2 = urlopen_with_cafile(url, data).read()
|
|
||||||
assert len(cass) == 1
|
|
||||||
|
|
||||||
assert res1 == res2
|
|
||||||
assert_cassette_has_one_response(cass)
|
|
||||||
|
|
||||||
|
|
||||||
def test_cross_scheme(tmpdir, httpbin_secure, httpbin):
|
|
||||||
"""Ensure that requests between schemes are treated separately"""
|
|
||||||
# First fetch a url under https, and then again under https and then
|
|
||||||
# ensure that we haven't served anything out of cache, and we have two
|
|
||||||
# requests / response pairs in the cassette
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("cross_scheme.yaml"))) as cass:
|
|
||||||
urlopen_with_cafile(httpbin_secure.url)
|
|
||||||
urlopen_with_cafile(httpbin.url)
|
|
||||||
assert len(cass) == 2
|
|
||||||
assert cass.play_count == 0
|
|
||||||
|
|
||||||
|
|
||||||
def test_decorator(httpbin_both, tmpdir):
|
|
||||||
"""Test the decorator version of VCR.py"""
|
|
||||||
url = httpbin_both.url
|
|
||||||
|
|
||||||
@vcr.use_cassette(str(tmpdir.join("atts.yaml")))
|
|
||||||
def inner1():
|
|
||||||
return urlopen_with_cafile(url).getcode()
|
|
||||||
|
|
||||||
@vcr.use_cassette(str(tmpdir.join("atts.yaml")))
|
|
||||||
def inner2():
|
|
||||||
return urlopen_with_cafile(url).getcode()
|
|
||||||
|
|
||||||
assert inner1() == inner2()
|
|
||||||
@@ -4,12 +4,13 @@
|
|||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
import pytest_httpbin
|
import pytest_httpbin
|
||||||
from assertions import assert_cassette_empty, assert_is_json_bytes
|
|
||||||
|
|
||||||
import vcr
|
import vcr
|
||||||
from vcr.patch import force_reset
|
from vcr.patch import force_reset
|
||||||
from vcr.stubs.compat import get_headers
|
from vcr.stubs.compat import get_headers
|
||||||
|
|
||||||
|
from ..assertions import assert_cassette_empty, assert_is_json_bytes
|
||||||
|
|
||||||
urllib3 = pytest.importorskip("urllib3")
|
urllib3 = pytest.importorskip("urllib3")
|
||||||
|
|
||||||
|
|
||||||
@@ -99,9 +100,9 @@ def test_post(tmpdir, httpbin_both, verify_pool_mgr):
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.online
|
@pytest.mark.online
|
||||||
def test_redirects(tmpdir, verify_pool_mgr):
|
def test_redirects(tmpdir, verify_pool_mgr, httpbin):
|
||||||
"""Ensure that we can handle redirects"""
|
"""Ensure that we can handle redirects"""
|
||||||
url = "http://mockbin.org/redirect/301"
|
url = httpbin.url + "/redirect/1"
|
||||||
|
|
||||||
with vcr.use_cassette(str(tmpdir.join("verify_pool_mgr.yaml"))):
|
with vcr.use_cassette(str(tmpdir.join("verify_pool_mgr.yaml"))):
|
||||||
content = verify_pool_mgr.request("GET", url).data
|
content = verify_pool_mgr.request("GET", url).data
|
||||||
|
|||||||
@@ -62,8 +62,7 @@ def test_flickr_should_respond_with_200(tmpdir):
|
|||||||
|
|
||||||
def test_cookies(tmpdir, httpbin):
|
def test_cookies(tmpdir, httpbin):
|
||||||
testfile = str(tmpdir.join("cookies.yml"))
|
testfile = str(tmpdir.join("cookies.yml"))
|
||||||
with vcr.use_cassette(testfile):
|
with vcr.use_cassette(testfile), requests.Session() as s:
|
||||||
s = requests.Session()
|
|
||||||
s.get(httpbin.url + "/cookies/set?k1=v1&k2=v2")
|
s.get(httpbin.url + "/cookies/set?k1=v1&k2=v2")
|
||||||
assert s.cookies.keys() == ["k1", "k2"]
|
assert s.cookies.keys() == ["k1", "k2"]
|
||||||
|
|
||||||
|
|||||||
@@ -11,6 +11,7 @@ import yaml
|
|||||||
from vcr.cassette import Cassette
|
from vcr.cassette import Cassette
|
||||||
from vcr.errors import UnhandledHTTPRequestError
|
from vcr.errors import UnhandledHTTPRequestError
|
||||||
from vcr.patch import force_reset
|
from vcr.patch import force_reset
|
||||||
|
from vcr.request import Request
|
||||||
from vcr.stubs import VCRHTTPSConnection
|
from vcr.stubs import VCRHTTPSConnection
|
||||||
|
|
||||||
|
|
||||||
@@ -226,9 +227,11 @@ def test_nesting_cassette_context_managers(*args):
|
|||||||
assert_get_response_body_is("first_response")
|
assert_get_response_body_is("first_response")
|
||||||
|
|
||||||
# Make sure a second cassette can supersede the first
|
# Make sure a second cassette can supersede the first
|
||||||
with Cassette.use(path="test") as second_cassette:
|
with (
|
||||||
with mock.patch.object(second_cassette, "play_response", return_value=second_response):
|
Cassette.use(path="test") as second_cassette,
|
||||||
assert_get_response_body_is("second_response")
|
mock.patch.object(second_cassette, "play_response", return_value=second_response),
|
||||||
|
):
|
||||||
|
assert_get_response_body_is("second_response")
|
||||||
|
|
||||||
# Now the first cassette should be back in effect
|
# Now the first cassette should be back in effect
|
||||||
assert_get_response_body_is("first_response")
|
assert_get_response_body_is("first_response")
|
||||||
@@ -410,3 +413,25 @@ def test_find_requests_with_most_matches_many_similar_requests(mock_get_matchers
|
|||||||
(1, ["method", "path"], [("query", "failed : query")]),
|
(1, ["method", "path"], [("query", "failed : query")]),
|
||||||
(3, ["method", "path"], [("query", "failed : query")]),
|
(3, ["method", "path"], [("query", "failed : query")]),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test_used_interactions(tmpdir):
|
||||||
|
interactions = [
|
||||||
|
{"request": {"body": "", "uri": "foo1", "method": "GET", "headers": {}}, "response": "bar1"},
|
||||||
|
{"request": {"body": "", "uri": "foo2", "method": "GET", "headers": {}}, "response": "bar2"},
|
||||||
|
{"request": {"body": "", "uri": "foo3", "method": "GET", "headers": {}}, "response": "bar3"},
|
||||||
|
]
|
||||||
|
file = tmpdir.join("test_cassette.yml")
|
||||||
|
file.write(yaml.dump({"interactions": [interactions[0], interactions[1]]}))
|
||||||
|
|
||||||
|
cassette = Cassette.load(path=str(file))
|
||||||
|
request = Request._from_dict(interactions[1]["request"])
|
||||||
|
cassette.play_response(request)
|
||||||
|
assert len(cassette._played_interactions) < len(cassette._old_interactions)
|
||||||
|
|
||||||
|
request = Request._from_dict(interactions[2]["request"])
|
||||||
|
cassette.append(request, interactions[2]["response"])
|
||||||
|
assert len(cassette._new_interactions()) == 1
|
||||||
|
|
||||||
|
used_interactions = cassette._played_interactions + cassette._new_interactions()
|
||||||
|
assert len(used_interactions) == 2
|
||||||
|
|||||||
@@ -8,15 +8,13 @@ from vcr.serializers import compat, jsonserializer, yamlserializer
|
|||||||
|
|
||||||
|
|
||||||
def test_deserialize_old_yaml_cassette():
|
def test_deserialize_old_yaml_cassette():
|
||||||
with open("tests/fixtures/migration/old_cassette.yaml") as f:
|
with open("tests/fixtures/migration/old_cassette.yaml") as f, pytest.raises(ValueError):
|
||||||
with pytest.raises(ValueError):
|
deserialize(f.read(), yamlserializer)
|
||||||
deserialize(f.read(), yamlserializer)
|
|
||||||
|
|
||||||
|
|
||||||
def test_deserialize_old_json_cassette():
|
def test_deserialize_old_json_cassette():
|
||||||
with open("tests/fixtures/migration/old_cassette.json") as f:
|
with open("tests/fixtures/migration/old_cassette.json") as f, pytest.raises(ValueError):
|
||||||
with pytest.raises(ValueError):
|
deserialize(f.read(), jsonserializer)
|
||||||
deserialize(f.read(), jsonserializer)
|
|
||||||
|
|
||||||
|
|
||||||
def test_deserialize_new_yaml_cassette():
|
def test_deserialize_new_yaml_cassette():
|
||||||
@@ -76,7 +74,7 @@ def test_deserialize_py2py3_yaml_cassette(tmpdir, req_body, expect):
|
|||||||
cfile = tmpdir.join("test_cassette.yaml")
|
cfile = tmpdir.join("test_cassette.yaml")
|
||||||
cfile.write(REQBODY_TEMPLATE.format(req_body=req_body))
|
cfile.write(REQBODY_TEMPLATE.format(req_body=req_body))
|
||||||
with open(str(cfile)) as f:
|
with open(str(cfile)) as f:
|
||||||
(requests, responses) = deserialize(f.read(), yamlserializer)
|
(requests, _) = deserialize(f.read(), yamlserializer)
|
||||||
assert requests[0].body == expect
|
assert requests[0].body == expect
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,8 +1,12 @@
|
|||||||
|
import contextlib
|
||||||
|
import http.client as httplib
|
||||||
|
from io import BytesIO
|
||||||
|
from tempfile import NamedTemporaryFile
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
|
|
||||||
from pytest import mark
|
from pytest import mark
|
||||||
|
|
||||||
from vcr import mode
|
from vcr import mode, use_cassette
|
||||||
from vcr.cassette import Cassette
|
from vcr.cassette import Cassette
|
||||||
from vcr.stubs import VCRHTTPSConnection
|
from vcr.stubs import VCRHTTPSConnection
|
||||||
|
|
||||||
@@ -16,7 +20,56 @@ class TestVCRConnection:
|
|||||||
@mark.online
|
@mark.online
|
||||||
@mock.patch("vcr.cassette.Cassette.can_play_response_for", return_value=False)
|
@mock.patch("vcr.cassette.Cassette.can_play_response_for", return_value=False)
|
||||||
def testing_connect(*args):
|
def testing_connect(*args):
|
||||||
vcr_connection = VCRHTTPSConnection("www.google.com")
|
with contextlib.closing(VCRHTTPSConnection("www.google.com")) as vcr_connection:
|
||||||
vcr_connection.cassette = Cassette("test", record_mode=mode.ALL)
|
vcr_connection.cassette = Cassette("test", record_mode=mode.ALL)
|
||||||
vcr_connection.real_connection.connect()
|
vcr_connection.real_connection.connect()
|
||||||
assert vcr_connection.real_connection.sock is not None
|
assert vcr_connection.real_connection.sock is not None
|
||||||
|
|
||||||
|
def test_body_consumed_once_stream(self, tmpdir, httpbin):
|
||||||
|
self._test_body_consumed_once(
|
||||||
|
tmpdir,
|
||||||
|
httpbin,
|
||||||
|
BytesIO(b"1234567890"),
|
||||||
|
BytesIO(b"9876543210"),
|
||||||
|
BytesIO(b"9876543210"),
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_body_consumed_once_iterator(self, tmpdir, httpbin):
|
||||||
|
self._test_body_consumed_once(
|
||||||
|
tmpdir,
|
||||||
|
httpbin,
|
||||||
|
iter([b"1234567890"]),
|
||||||
|
iter([b"9876543210"]),
|
||||||
|
iter([b"9876543210"]),
|
||||||
|
)
|
||||||
|
|
||||||
|
# data2 and data3 should serve the same data, potentially as iterators
|
||||||
|
def _test_body_consumed_once(
|
||||||
|
self,
|
||||||
|
tmpdir,
|
||||||
|
httpbin,
|
||||||
|
data1,
|
||||||
|
data2,
|
||||||
|
data3,
|
||||||
|
):
|
||||||
|
with NamedTemporaryFile(dir=tmpdir, suffix=".yml") as f:
|
||||||
|
testpath = f.name
|
||||||
|
# NOTE: ``use_cassette`` is not okay with the file existing
|
||||||
|
# already. So we using ``.close()`` to not only
|
||||||
|
# close but also delete the empty file, before we start.
|
||||||
|
f.close()
|
||||||
|
host, port = httpbin.host, httpbin.port
|
||||||
|
match_on = ["method", "uri", "body"]
|
||||||
|
with use_cassette(testpath, match_on=match_on):
|
||||||
|
conn1 = httplib.HTTPConnection(host, port)
|
||||||
|
conn1.request("POST", "/anything", body=data1)
|
||||||
|
conn1.getresponse()
|
||||||
|
conn2 = httplib.HTTPConnection(host, port)
|
||||||
|
conn2.request("POST", "/anything", body=data2)
|
||||||
|
conn2.getresponse()
|
||||||
|
with use_cassette(testpath, match_on=match_on) as cass:
|
||||||
|
conn3 = httplib.HTTPConnection(host, port)
|
||||||
|
conn3.request("POST", "/anything", body=data3)
|
||||||
|
conn3.getresponse()
|
||||||
|
assert cass.play_counts[0] == 0
|
||||||
|
assert cass.play_counts[1] == 1
|
||||||
|
|||||||
@@ -178,7 +178,7 @@ def test_testcase_playback(tmpdir):
|
|||||||
return str(cassette_dir)
|
return str(cassette_dir)
|
||||||
|
|
||||||
test = run_testcase(MyTest)[0][0]
|
test = run_testcase(MyTest)[0][0]
|
||||||
assert b"illustrative examples" in test.response
|
assert b"Example Domain" in test.response
|
||||||
assert len(test.cassette.requests) == 1
|
assert len(test.cassette.requests) == 1
|
||||||
assert test.cassette.play_count == 0
|
assert test.cassette.play_count == 0
|
||||||
|
|
||||||
@@ -186,7 +186,7 @@ def test_testcase_playback(tmpdir):
|
|||||||
|
|
||||||
test2 = run_testcase(MyTest)[0][0]
|
test2 = run_testcase(MyTest)[0][0]
|
||||||
assert test.cassette is not test2.cassette
|
assert test.cassette is not test2.cassette
|
||||||
assert b"illustrative examples" in test.response
|
assert b"Example Domain" in test.response
|
||||||
assert len(test2.cassette.requests) == 1
|
assert len(test2.cassette.requests) == 1
|
||||||
assert test2.cassette.play_count == 1
|
assert test2.cassette.play_count == 1
|
||||||
|
|
||||||
|
|||||||
33
tests/unit/test_util.py
Normal file
33
tests/unit/test_util.py
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
from io import BytesIO, StringIO
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from vcr import request
|
||||||
|
from vcr.util import read_body
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"input_, expected_output",
|
||||||
|
[
|
||||||
|
(BytesIO(b"Stream"), b"Stream"),
|
||||||
|
(StringIO("Stream"), b"Stream"),
|
||||||
|
(iter(["StringIter"]), b"StringIter"),
|
||||||
|
(iter(["String", "Iter"]), b"StringIter"),
|
||||||
|
(iter([b"BytesIter"]), b"BytesIter"),
|
||||||
|
(iter([b"Bytes", b"Iter"]), b"BytesIter"),
|
||||||
|
(iter([70, 111, 111]), b"Foo"),
|
||||||
|
(iter([]), b""),
|
||||||
|
("String", b"String"),
|
||||||
|
(b"Bytes", b"Bytes"),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_read_body(input_, expected_output):
|
||||||
|
r = request.Request("POST", "http://host.com/", input_, {})
|
||||||
|
assert read_body(r) == expected_output
|
||||||
|
|
||||||
|
|
||||||
|
def test_unsupported_read_body():
|
||||||
|
r = request.Request("POST", "http://host.com/", iter([[]]), {})
|
||||||
|
with pytest.raises(ValueError) as excinfo:
|
||||||
|
assert read_body(r)
|
||||||
|
assert excinfo.value.args == ("Body type <class 'list'> not supported",)
|
||||||
@@ -372,3 +372,19 @@ def test_path_class_as_cassette():
|
|||||||
)
|
)
|
||||||
with use_cassette(path):
|
with use_cassette(path):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def test_use_cassette_generator_return():
|
||||||
|
ret_val = object()
|
||||||
|
|
||||||
|
vcr = VCR()
|
||||||
|
|
||||||
|
@vcr.use_cassette("test")
|
||||||
|
def gen():
|
||||||
|
return ret_val
|
||||||
|
yield
|
||||||
|
|
||||||
|
with pytest.raises(StopIteration) as exc_info:
|
||||||
|
next(gen())
|
||||||
|
|
||||||
|
assert exc_info.value.value is ret_val
|
||||||
|
|||||||
82
tox.ini
82
tox.ini
@@ -1,82 +0,0 @@
|
|||||||
[tox]
|
|
||||||
skip_missing_interpreters=true
|
|
||||||
envlist =
|
|
||||||
cov-clean,
|
|
||||||
lint,
|
|
||||||
{py38,py39,py310,py311}-{requests-urllib3-1,httplib2,urllib3-1,tornado4,boto3,aiohttp,httpx},
|
|
||||||
{py310,py311}-{requests-urllib3-2,urllib3-2},
|
|
||||||
{pypy3}-{requests-urllib3-1,httplib2,urllib3-1,tornado4,boto3},
|
|
||||||
{py310}-httpx019,
|
|
||||||
cov-report
|
|
||||||
|
|
||||||
|
|
||||||
[gh-actions]
|
|
||||||
python =
|
|
||||||
3.8: py38
|
|
||||||
3.9: py39
|
|
||||||
3.10: py310, lint
|
|
||||||
3.11: py311
|
|
||||||
pypy-3: pypy3
|
|
||||||
|
|
||||||
# Coverage environment tasks: cov-clean and cov-report
|
|
||||||
# https://pytest-cov.readthedocs.io/en/latest/tox.html
|
|
||||||
[testenv:cov-clean]
|
|
||||||
deps = coverage
|
|
||||||
skip_install=true
|
|
||||||
commands = coverage erase
|
|
||||||
|
|
||||||
[testenv:cov-report]
|
|
||||||
deps = coverage
|
|
||||||
skip_install=true
|
|
||||||
commands =
|
|
||||||
coverage html
|
|
||||||
coverage report --fail-under=90
|
|
||||||
|
|
||||||
[testenv:lint]
|
|
||||||
skipsdist = True
|
|
||||||
commands =
|
|
||||||
black --version
|
|
||||||
black --check --diff .
|
|
||||||
ruff --version
|
|
||||||
ruff check .
|
|
||||||
deps =
|
|
||||||
black
|
|
||||||
ruff
|
|
||||||
basepython = python3.10
|
|
||||||
|
|
||||||
[testenv]
|
|
||||||
# Need to use develop install so that paths
|
|
||||||
# for aggregate code coverage combine
|
|
||||||
usedevelop=true
|
|
||||||
commands =
|
|
||||||
./runtests.sh --cov=./vcr --cov-branch --cov-report=xml --cov-append {posargs}
|
|
||||||
allowlist_externals =
|
|
||||||
./runtests.sh
|
|
||||||
deps =
|
|
||||||
Werkzeug==2.0.3
|
|
||||||
pytest
|
|
||||||
pytest-httpbin>=1.0.1
|
|
||||||
pytest-cov
|
|
||||||
PyYAML
|
|
||||||
ipaddress
|
|
||||||
requests: requests>=2.22.0
|
|
||||||
httplib2: httplib2
|
|
||||||
urllib3-1: urllib3<2
|
|
||||||
urllib3-2: urllib3<3
|
|
||||||
boto3: boto3
|
|
||||||
aiohttp: aiohttp
|
|
||||||
aiohttp: pytest-asyncio
|
|
||||||
aiohttp: pytest-aiohttp
|
|
||||||
httpx: httpx
|
|
||||||
{py38,py39,py310}-{httpx}: httpx
|
|
||||||
{py38,py39,py310}-{httpx}: pytest-asyncio
|
|
||||||
httpx: httpx>0.19
|
|
||||||
httpx019: httpx==0.19
|
|
||||||
{py38,py39,py310}-{httpx}: pytest-asyncio
|
|
||||||
depends =
|
|
||||||
lint,{py38,py39,py310,py311,pypy3}-{requests-urllib3-1,httplib2,urllib3-1,tornado4,boto3},{py310,py311}-{requests-urllib3-2,urllib3-2},{py38,py39,py310,py311}-{aiohttp},{py38,py39,py310,py311}-{httpx}: cov-clean
|
|
||||||
cov-report: lint,{py38,py39,py310,py311,pypy3}-{requests-urllib3-1,httplib2,urllib3-1,tornado4,boto3},{py310,py311}-{requests-urllib3-2,urllib3-2},{py38,py39,py310,py311}-{aiohttp}
|
|
||||||
passenv =
|
|
||||||
AWS_ACCESS_KEY_ID
|
|
||||||
AWS_DEFAULT_REGION
|
|
||||||
AWS_SECRET_ACCESS_KEY
|
|
||||||
@@ -4,7 +4,7 @@ from logging import NullHandler
|
|||||||
from .config import VCR
|
from .config import VCR
|
||||||
from .record_mode import RecordMode as mode # noqa: F401
|
from .record_mode import RecordMode as mode # noqa: F401
|
||||||
|
|
||||||
__version__ = "5.1.0"
|
__version__ = "8.1.0"
|
||||||
|
|
||||||
logging.getLogger(__name__).addHandler(NullHandler())
|
logging.getLogger(__name__).addHandler(NullHandler())
|
||||||
|
|
||||||
|
|||||||
@@ -3,8 +3,7 @@ import contextlib
|
|||||||
import copy
|
import copy
|
||||||
import inspect
|
import inspect
|
||||||
import logging
|
import logging
|
||||||
import sys
|
from inspect import iscoroutinefunction
|
||||||
from asyncio import iscoroutinefunction
|
|
||||||
|
|
||||||
import wrapt
|
import wrapt
|
||||||
|
|
||||||
@@ -126,20 +125,7 @@ class CassetteContextDecorator:
|
|||||||
duration of the generator.
|
duration of the generator.
|
||||||
"""
|
"""
|
||||||
with self as cassette:
|
with self as cassette:
|
||||||
coroutine = fn(cassette)
|
return (yield from fn(cassette))
|
||||||
# We don't need to catch StopIteration. The caller (Tornado's
|
|
||||||
# gen.coroutine, for example) will handle that.
|
|
||||||
to_yield = next(coroutine)
|
|
||||||
while True:
|
|
||||||
try:
|
|
||||||
to_send = yield to_yield
|
|
||||||
except Exception:
|
|
||||||
to_yield = coroutine.throw(*sys.exc_info())
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
to_yield = coroutine.send(to_send)
|
|
||||||
except StopIteration:
|
|
||||||
break
|
|
||||||
|
|
||||||
def _handle_function(self, fn):
|
def _handle_function(self, fn):
|
||||||
with self as cassette:
|
with self as cassette:
|
||||||
@@ -191,6 +177,7 @@ class Cassette:
|
|||||||
custom_patches=(),
|
custom_patches=(),
|
||||||
inject=False,
|
inject=False,
|
||||||
allow_playback_repeats=False,
|
allow_playback_repeats=False,
|
||||||
|
drop_unused_requests=False,
|
||||||
):
|
):
|
||||||
self._persister = persister or FilesystemPersister
|
self._persister = persister or FilesystemPersister
|
||||||
self._path = path
|
self._path = path
|
||||||
@@ -203,6 +190,7 @@ class Cassette:
|
|||||||
self.record_mode = record_mode
|
self.record_mode = record_mode
|
||||||
self.custom_patches = custom_patches
|
self.custom_patches = custom_patches
|
||||||
self.allow_playback_repeats = allow_playback_repeats
|
self.allow_playback_repeats = allow_playback_repeats
|
||||||
|
self.drop_unused_requests = drop_unused_requests
|
||||||
|
|
||||||
# self.data is the list of (req, resp) tuples
|
# self.data is the list of (req, resp) tuples
|
||||||
self.data = []
|
self.data = []
|
||||||
@@ -210,6 +198,10 @@ class Cassette:
|
|||||||
self.dirty = False
|
self.dirty = False
|
||||||
self.rewound = False
|
self.rewound = False
|
||||||
|
|
||||||
|
# Subsets of self.data to store old and played interactions
|
||||||
|
self._old_interactions = []
|
||||||
|
self._played_interactions = []
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def play_count(self):
|
def play_count(self):
|
||||||
return sum(self.play_counts.values())
|
return sum(self.play_counts.values())
|
||||||
@@ -229,14 +221,14 @@ class Cassette:
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def write_protected(self):
|
def write_protected(self):
|
||||||
return self.rewound and self.record_mode == RecordMode.ONCE or self.record_mode == RecordMode.NONE
|
return (self.rewound and self.record_mode == RecordMode.ONCE) or self.record_mode == RecordMode.NONE
|
||||||
|
|
||||||
def append(self, request, response):
|
def append(self, request, response):
|
||||||
"""Add a request, response pair to this cassette"""
|
"""Add a request, response pair to this cassette"""
|
||||||
log.info("Appending request %s and response %s", request, response)
|
|
||||||
request = self._before_record_request(request)
|
request = self._before_record_request(request)
|
||||||
if not request:
|
if not request:
|
||||||
return
|
return
|
||||||
|
log.info("Appending request %s and response %s", request, response)
|
||||||
# Deepcopy is here because mutation of `response` will corrupt the
|
# Deepcopy is here because mutation of `response` will corrupt the
|
||||||
# real response.
|
# real response.
|
||||||
response = copy.deepcopy(response)
|
response = copy.deepcopy(response)
|
||||||
@@ -271,6 +263,7 @@ class Cassette:
|
|||||||
for index, response in self._responses(request):
|
for index, response in self._responses(request):
|
||||||
if self.play_counts[index] == 0 or self.allow_playback_repeats:
|
if self.play_counts[index] == 0 or self.allow_playback_repeats:
|
||||||
self.play_counts[index] += 1
|
self.play_counts[index] += 1
|
||||||
|
self._played_interactions.append((request, response))
|
||||||
return response
|
return response
|
||||||
# The cassette doesn't contain the request asked for.
|
# The cassette doesn't contain the request asked for.
|
||||||
raise UnhandledHTTPRequestError(
|
raise UnhandledHTTPRequestError(
|
||||||
@@ -331,19 +324,44 @@ class Cassette:
|
|||||||
|
|
||||||
return final_best_matches
|
return final_best_matches
|
||||||
|
|
||||||
|
def _new_interactions(self):
|
||||||
|
"""List of new HTTP interactions (request/response tuples)"""
|
||||||
|
new_interactions = []
|
||||||
|
for request, response in self.data:
|
||||||
|
if all(
|
||||||
|
not requests_match(request, old_request, self._match_on)
|
||||||
|
for old_request, _ in self._old_interactions
|
||||||
|
):
|
||||||
|
new_interactions.append((request, response))
|
||||||
|
return new_interactions
|
||||||
|
|
||||||
def _as_dict(self):
|
def _as_dict(self):
|
||||||
return {"requests": self.requests, "responses": self.responses}
|
return {"requests": self.requests, "responses": self.responses}
|
||||||
|
|
||||||
|
def _build_used_interactions_dict(self):
|
||||||
|
interactions = self._played_interactions + self._new_interactions()
|
||||||
|
cassete_dict = {
|
||||||
|
"requests": [request for request, _ in interactions],
|
||||||
|
"responses": [response for _, response in interactions],
|
||||||
|
}
|
||||||
|
return cassete_dict
|
||||||
|
|
||||||
def _save(self, force=False):
|
def _save(self, force=False):
|
||||||
|
if self.drop_unused_requests and len(self._played_interactions) < len(self._old_interactions):
|
||||||
|
cassete_dict = self._build_used_interactions_dict()
|
||||||
|
force = True
|
||||||
|
else:
|
||||||
|
cassete_dict = self._as_dict()
|
||||||
if force or self.dirty:
|
if force or self.dirty:
|
||||||
self._persister.save_cassette(self._path, self._as_dict(), serializer=self._serializer)
|
self._persister.save_cassette(self._path, cassete_dict, serializer=self._serializer)
|
||||||
self.dirty = False
|
self.dirty = False
|
||||||
|
|
||||||
def _load(self):
|
def _load(self):
|
||||||
try:
|
try:
|
||||||
requests, responses = self._persister.load_cassette(self._path, serializer=self._serializer)
|
requests, responses = self._persister.load_cassette(self._path, serializer=self._serializer)
|
||||||
for request, response in zip(requests, responses):
|
for request, response in zip(requests, responses, strict=False):
|
||||||
self.append(request, response)
|
self.append(request, response)
|
||||||
|
self._old_interactions.append((request, response))
|
||||||
self.dirty = False
|
self.dirty = False
|
||||||
self.rewound = True
|
self.rewound = True
|
||||||
except (CassetteDecodeError, CassetteNotFoundError):
|
except (CassetteDecodeError, CassetteNotFoundError):
|
||||||
|
|||||||
@@ -48,6 +48,7 @@ class VCR:
|
|||||||
func_path_generator=None,
|
func_path_generator=None,
|
||||||
decode_compressed_response=False,
|
decode_compressed_response=False,
|
||||||
record_on_exception=True,
|
record_on_exception=True,
|
||||||
|
drop_unused_requests=False,
|
||||||
):
|
):
|
||||||
self.serializer = serializer
|
self.serializer = serializer
|
||||||
self.match_on = match_on
|
self.match_on = match_on
|
||||||
@@ -81,6 +82,7 @@ class VCR:
|
|||||||
self.decode_compressed_response = decode_compressed_response
|
self.decode_compressed_response = decode_compressed_response
|
||||||
self.record_on_exception = record_on_exception
|
self.record_on_exception = record_on_exception
|
||||||
self._custom_patches = tuple(custom_patches)
|
self._custom_patches = tuple(custom_patches)
|
||||||
|
self.drop_unused_requests = drop_unused_requests
|
||||||
|
|
||||||
def _get_serializer(self, serializer_name):
|
def _get_serializer(self, serializer_name):
|
||||||
try:
|
try:
|
||||||
@@ -151,6 +153,7 @@ class VCR:
|
|||||||
"func_path_generator": func_path_generator,
|
"func_path_generator": func_path_generator,
|
||||||
"allow_playback_repeats": kwargs.get("allow_playback_repeats", False),
|
"allow_playback_repeats": kwargs.get("allow_playback_repeats", False),
|
||||||
"record_on_exception": record_on_exception,
|
"record_on_exception": record_on_exception,
|
||||||
|
"drop_unused_requests": kwargs.get("drop_unused_requests", self.drop_unused_requests),
|
||||||
}
|
}
|
||||||
path = kwargs.get("path")
|
path = kwargs.get("path")
|
||||||
if path:
|
if path:
|
||||||
|
|||||||
@@ -6,6 +6,49 @@ from urllib.parse import urlencode, urlparse, urlunparse
|
|||||||
|
|
||||||
from .util import CaseInsensitiveDict
|
from .util import CaseInsensitiveDict
|
||||||
|
|
||||||
|
try:
|
||||||
|
# This supports both brotli & brotlipy packages
|
||||||
|
import brotli
|
||||||
|
except ImportError:
|
||||||
|
try:
|
||||||
|
import brotlicffi as brotli
|
||||||
|
except ImportError:
|
||||||
|
brotli = None
|
||||||
|
|
||||||
|
|
||||||
|
def decompress_deflate(body):
|
||||||
|
try:
|
||||||
|
return zlib.decompress(body)
|
||||||
|
except zlib.error:
|
||||||
|
# Assume the response was already decompressed
|
||||||
|
return body
|
||||||
|
|
||||||
|
|
||||||
|
def decompress_gzip(body):
|
||||||
|
# To (de-)compress gzip format, use wbits = zlib.MAX_WBITS | 16.
|
||||||
|
try:
|
||||||
|
return zlib.decompress(body, zlib.MAX_WBITS | 16)
|
||||||
|
except zlib.error:
|
||||||
|
# Assume the response was already decompressed
|
||||||
|
return body
|
||||||
|
|
||||||
|
|
||||||
|
AVAILABLE_DECOMPRESSORS = {
|
||||||
|
"deflate": decompress_deflate,
|
||||||
|
"gzip": decompress_gzip,
|
||||||
|
}
|
||||||
|
|
||||||
|
if brotli is not None:
|
||||||
|
|
||||||
|
def decompress_brotli(body):
|
||||||
|
try:
|
||||||
|
return brotli.decompress(body)
|
||||||
|
except brotli.error:
|
||||||
|
# Assume the response was already decompressed
|
||||||
|
return body
|
||||||
|
|
||||||
|
AVAILABLE_DECOMPRESSORS["br"] = decompress_brotli
|
||||||
|
|
||||||
|
|
||||||
def replace_headers(request, replacements):
|
def replace_headers(request, replacements):
|
||||||
"""Replace headers in request according to replacements.
|
"""Replace headers in request according to replacements.
|
||||||
@@ -136,45 +179,30 @@ def remove_post_data_parameters(request, post_data_parameters_to_remove):
|
|||||||
|
|
||||||
def decode_response(response):
|
def decode_response(response):
|
||||||
"""
|
"""
|
||||||
If the response is compressed with gzip or deflate:
|
If the response is compressed with any supported compression (gzip,
|
||||||
|
deflate, br if available):
|
||||||
1. decompress the response body
|
1. decompress the response body
|
||||||
2. delete the content-encoding header
|
2. delete the content-encoding header
|
||||||
3. update content-length header to decompressed length
|
3. update content-length header to decompressed length
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def is_compressed(headers):
|
|
||||||
encoding = headers.get("content-encoding", [])
|
|
||||||
return encoding and encoding[0] in ("gzip", "deflate")
|
|
||||||
|
|
||||||
def decompress_body(body, encoding):
|
|
||||||
"""Returns decompressed body according to encoding using zlib.
|
|
||||||
to (de-)compress gzip format, use wbits = zlib.MAX_WBITS | 16
|
|
||||||
"""
|
|
||||||
if not body:
|
|
||||||
return ""
|
|
||||||
if encoding == "gzip":
|
|
||||||
try:
|
|
||||||
return zlib.decompress(body, zlib.MAX_WBITS | 16)
|
|
||||||
except zlib.error:
|
|
||||||
return body # assumes that the data was already decompressed
|
|
||||||
else: # encoding == 'deflate'
|
|
||||||
try:
|
|
||||||
return zlib.decompress(body)
|
|
||||||
except zlib.error:
|
|
||||||
return body # assumes that the data was already decompressed
|
|
||||||
|
|
||||||
# Deepcopy here in case `headers` contain objects that could
|
# Deepcopy here in case `headers` contain objects that could
|
||||||
# be mutated by a shallow copy and corrupt the real response.
|
# be mutated by a shallow copy and corrupt the real response.
|
||||||
response = copy.deepcopy(response)
|
response = copy.deepcopy(response)
|
||||||
headers = CaseInsensitiveDict(response["headers"])
|
headers = CaseInsensitiveDict(response["headers"])
|
||||||
if is_compressed(headers):
|
content_encoding = headers.get("content-encoding")
|
||||||
encoding = headers["content-encoding"][0]
|
if not content_encoding:
|
||||||
headers["content-encoding"].remove(encoding)
|
return response
|
||||||
if not headers["content-encoding"]:
|
decompressor = AVAILABLE_DECOMPRESSORS.get(content_encoding[0])
|
||||||
del headers["content-encoding"]
|
if not decompressor:
|
||||||
|
return response
|
||||||
|
|
||||||
new_body = decompress_body(response["body"]["string"], encoding)
|
headers["content-encoding"].remove(content_encoding[0])
|
||||||
response["body"]["string"] = new_body
|
if not headers["content-encoding"]:
|
||||||
headers["content-length"] = [str(len(new_body))]
|
del headers["content-encoding"]
|
||||||
response["headers"] = dict(headers)
|
|
||||||
|
new_body = decompressor(response["body"]["string"])
|
||||||
|
response["body"]["string"] = new_body
|
||||||
|
headers["content-length"] = [str(len(new_body))]
|
||||||
|
response["headers"] = dict(headers)
|
||||||
return response
|
return response
|
||||||
|
|||||||
@@ -3,11 +3,10 @@ import logging
|
|||||||
import urllib
|
import urllib
|
||||||
import xmlrpc.client
|
import xmlrpc.client
|
||||||
from string import hexdigits
|
from string import hexdigits
|
||||||
from typing import List, Set
|
|
||||||
|
|
||||||
from .util import read_body
|
from .util import read_body
|
||||||
|
|
||||||
_HEXDIG_CODE_POINTS: Set[int] = {ord(s.encode("ascii")) for s in hexdigits}
|
_HEXDIG_CODE_POINTS: set[int] = {ord(s.encode("ascii")) for s in hexdigits}
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -109,7 +108,7 @@ def _dechunk(body):
|
|||||||
CHUNK_GAP = b"\r\n"
|
CHUNK_GAP = b"\r\n"
|
||||||
BODY_LEN: int = len(body)
|
BODY_LEN: int = len(body)
|
||||||
|
|
||||||
chunks: List[bytes] = []
|
chunks: list[bytes] = []
|
||||||
pos: int = 0
|
pos: int = 0
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
@@ -163,7 +162,7 @@ def _get_transformers(request):
|
|||||||
|
|
||||||
|
|
||||||
def requests_match(r1, r2, matchers):
|
def requests_match(r1, r2, matchers):
|
||||||
successes, failures = get_matchers_results(r1, r2, matchers)
|
_, failures = get_matchers_results(r1, r2, matchers)
|
||||||
if failures:
|
if failures:
|
||||||
log.debug(f"Requests {r1} and {r2} differ.\nFailure details:\n{failures}")
|
log.debug(f"Requests {r1} and {r2} differ.\nFailure details:\n{failures}")
|
||||||
return len(failures) == 0
|
return len(failures) == 0
|
||||||
|
|||||||
72
vcr/patch.py
72
vcr/patch.py
@@ -1,4 +1,5 @@
|
|||||||
"""Utilities for patching in cassettes"""
|
"""Utilities for patching in cassettes"""
|
||||||
|
|
||||||
import contextlib
|
import contextlib
|
||||||
import functools
|
import functools
|
||||||
import http.client as httplib
|
import http.client as httplib
|
||||||
@@ -67,14 +68,6 @@ else:
|
|||||||
_HTTPSConnectionWithTimeout = httplib2.HTTPSConnectionWithTimeout
|
_HTTPSConnectionWithTimeout = httplib2.HTTPSConnectionWithTimeout
|
||||||
_SCHEME_TO_CONNECTION = httplib2.SCHEME_TO_CONNECTION
|
_SCHEME_TO_CONNECTION = httplib2.SCHEME_TO_CONNECTION
|
||||||
|
|
||||||
# Try to save the original types for boto
|
|
||||||
try:
|
|
||||||
import boto.https_connection
|
|
||||||
except ImportError: # pragma: no cover
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
_CertValidatingHTTPSConnection = boto.https_connection.CertValidatingHTTPSConnection
|
|
||||||
|
|
||||||
# Try to save the original types for Tornado
|
# Try to save the original types for Tornado
|
||||||
try:
|
try:
|
||||||
import tornado.simple_httpclient
|
import tornado.simple_httpclient
|
||||||
@@ -99,12 +92,12 @@ else:
|
|||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import httpx
|
import httpcore
|
||||||
except ImportError: # pragma: no cover
|
except ImportError: # pragma: no cover
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
_HttpxSyncClient_send = httpx.Client.send
|
_HttpcoreConnectionPool_handle_request = httpcore.ConnectionPool.handle_request
|
||||||
_HttpxAsyncClient_send = httpx.AsyncClient.send
|
_HttpcoreAsyncConnectionPool_handle_async_request = httpcore.AsyncConnectionPool.handle_async_request
|
||||||
|
|
||||||
|
|
||||||
class CassettePatcherBuilder:
|
class CassettePatcherBuilder:
|
||||||
@@ -126,10 +119,9 @@ class CassettePatcherBuilder:
|
|||||||
self._boto3(),
|
self._boto3(),
|
||||||
self._urllib3(),
|
self._urllib3(),
|
||||||
self._httplib2(),
|
self._httplib2(),
|
||||||
self._boto(),
|
|
||||||
self._tornado(),
|
self._tornado(),
|
||||||
self._aiohttp(),
|
self._aiohttp(),
|
||||||
self._httpx(),
|
self._httpcore(),
|
||||||
self._build_patchers_from_mock_triples(self._cassette.custom_patches),
|
self._build_patchers_from_mock_triples(self._cassette.custom_patches),
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -269,21 +261,14 @@ class CassettePatcherBuilder:
|
|||||||
|
|
||||||
yield cpool, "HTTPConnectionWithTimeout", VCRHTTPConnectionWithTimeout
|
yield cpool, "HTTPConnectionWithTimeout", VCRHTTPConnectionWithTimeout
|
||||||
yield cpool, "HTTPSConnectionWithTimeout", VCRHTTPSConnectionWithTimeout
|
yield cpool, "HTTPSConnectionWithTimeout", VCRHTTPSConnectionWithTimeout
|
||||||
yield cpool, "SCHEME_TO_CONNECTION", {
|
yield (
|
||||||
"http": VCRHTTPConnectionWithTimeout,
|
cpool,
|
||||||
"https": VCRHTTPSConnectionWithTimeout,
|
"SCHEME_TO_CONNECTION",
|
||||||
}
|
{
|
||||||
|
"http": VCRHTTPConnectionWithTimeout,
|
||||||
@_build_patchers_from_mock_triples_decorator
|
"https": VCRHTTPSConnectionWithTimeout,
|
||||||
def _boto(self):
|
},
|
||||||
try:
|
)
|
||||||
import boto.https_connection as cpool
|
|
||||||
except ImportError: # pragma: no cover
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
from .stubs.boto_stubs import VCRCertValidatingHTTPSConnection
|
|
||||||
|
|
||||||
yield cpool, "CertValidatingHTTPSConnection", VCRCertValidatingHTTPSConnection
|
|
||||||
|
|
||||||
@_build_patchers_from_mock_triples_decorator
|
@_build_patchers_from_mock_triples_decorator
|
||||||
def _tornado(self):
|
def _tornado(self):
|
||||||
@@ -319,19 +304,22 @@ class CassettePatcherBuilder:
|
|||||||
yield client.ClientSession, "_request", new_request
|
yield client.ClientSession, "_request", new_request
|
||||||
|
|
||||||
@_build_patchers_from_mock_triples_decorator
|
@_build_patchers_from_mock_triples_decorator
|
||||||
def _httpx(self):
|
def _httpcore(self):
|
||||||
try:
|
try:
|
||||||
import httpx
|
import httpcore
|
||||||
except ImportError: # pragma: no cover
|
except ImportError: # pragma: no cover
|
||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
from .stubs.httpx_stubs import async_vcr_send, sync_vcr_send
|
from .stubs.httpcore_stubs import vcr_handle_async_request, vcr_handle_request
|
||||||
|
|
||||||
new_async_client_send = async_vcr_send(self._cassette, _HttpxAsyncClient_send)
|
new_handle_async_request = vcr_handle_async_request(
|
||||||
yield httpx.AsyncClient, "send", new_async_client_send
|
self._cassette,
|
||||||
|
_HttpcoreAsyncConnectionPool_handle_async_request,
|
||||||
|
)
|
||||||
|
yield httpcore.AsyncConnectionPool, "handle_async_request", new_handle_async_request
|
||||||
|
|
||||||
new_sync_client_send = sync_vcr_send(self._cassette, _HttpxSyncClient_send)
|
new_handle_request = vcr_handle_request(self._cassette, _HttpcoreConnectionPool_handle_request)
|
||||||
yield httpx.Client, "send", new_sync_client_send
|
yield httpcore.ConnectionPool, "handle_request", new_handle_request
|
||||||
|
|
||||||
def _urllib3_patchers(self, cpool, conn, stubs):
|
def _urllib3_patchers(self, cpool, conn, stubs):
|
||||||
http_connection_remover = ConnectionRemover(
|
http_connection_remover = ConnectionRemover(
|
||||||
@@ -388,10 +376,6 @@ class ConnectionRemover:
|
|||||||
if isinstance(connection, self._connection_class):
|
if isinstance(connection, self._connection_class):
|
||||||
self._connection_pool_to_connections.setdefault(pool, set()).add(connection)
|
self._connection_pool_to_connections.setdefault(pool, set()).add(connection)
|
||||||
|
|
||||||
def remove_connection_to_pool_entry(self, pool, connection):
|
|
||||||
if isinstance(connection, self._connection_class):
|
|
||||||
self._connection_pool_to_connections[self._connection_class].remove(connection)
|
|
||||||
|
|
||||||
def __enter__(self):
|
def __enter__(self):
|
||||||
return self
|
return self
|
||||||
|
|
||||||
@@ -402,10 +386,13 @@ class ConnectionRemover:
|
|||||||
connection = pool.pool.get()
|
connection = pool.pool.get()
|
||||||
if isinstance(connection, self._connection_class):
|
if isinstance(connection, self._connection_class):
|
||||||
connections.remove(connection)
|
connections.remove(connection)
|
||||||
|
connection.close()
|
||||||
else:
|
else:
|
||||||
readd_connections.append(connection)
|
readd_connections.append(connection)
|
||||||
for connection in readd_connections:
|
for connection in readd_connections:
|
||||||
pool._put_conn(connection)
|
pool._put_conn(connection)
|
||||||
|
for connection in connections:
|
||||||
|
connection.close()
|
||||||
|
|
||||||
|
|
||||||
def reset_patchers():
|
def reset_patchers():
|
||||||
@@ -447,13 +434,6 @@ def reset_patchers():
|
|||||||
yield mock.patch.object(cpool, "HTTPSConnectionWithTimeout", _HTTPSConnectionWithTimeout)
|
yield mock.patch.object(cpool, "HTTPSConnectionWithTimeout", _HTTPSConnectionWithTimeout)
|
||||||
yield mock.patch.object(cpool, "SCHEME_TO_CONNECTION", _SCHEME_TO_CONNECTION)
|
yield mock.patch.object(cpool, "SCHEME_TO_CONNECTION", _SCHEME_TO_CONNECTION)
|
||||||
|
|
||||||
try:
|
|
||||||
import boto.https_connection as cpool
|
|
||||||
except ImportError: # pragma: no cover
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
yield mock.patch.object(cpool, "CertValidatingHTTPSConnection", _CertValidatingHTTPSConnection)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import tornado.simple_httpclient as simple
|
import tornado.simple_httpclient as simple
|
||||||
except ImportError: # pragma: no cover
|
except ImportError: # pragma: no cover
|
||||||
|
|||||||
@@ -1,9 +1,10 @@
|
|||||||
import logging
|
import logging
|
||||||
import warnings
|
import warnings
|
||||||
|
from contextlib import suppress
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
from urllib.parse import parse_qsl, urlparse
|
from urllib.parse import parse_qsl, urlparse
|
||||||
|
|
||||||
from .util import CaseInsensitiveDict
|
from .util import CaseInsensitiveDict, _is_nonsequence_iterator
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -17,13 +18,30 @@ class Request:
|
|||||||
self.method = method
|
self.method = method
|
||||||
self.uri = uri
|
self.uri = uri
|
||||||
self._was_file = hasattr(body, "read")
|
self._was_file = hasattr(body, "read")
|
||||||
|
self._was_iter = _is_nonsequence_iterator(body)
|
||||||
if self._was_file:
|
if self._was_file:
|
||||||
self.body = body.read()
|
if hasattr(body, "tell"):
|
||||||
|
tell = body.tell()
|
||||||
|
self.body = body.read()
|
||||||
|
body.seek(tell)
|
||||||
|
else:
|
||||||
|
self.body = body.read()
|
||||||
|
elif self._was_iter:
|
||||||
|
self.body = list(body)
|
||||||
else:
|
else:
|
||||||
self.body = body
|
self.body = body
|
||||||
self.headers = headers
|
self.headers = headers
|
||||||
log.debug("Invoking Request %s", self.uri)
|
log.debug("Invoking Request %s", self.uri)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def uri(self):
|
||||||
|
return self._uri
|
||||||
|
|
||||||
|
@uri.setter
|
||||||
|
def uri(self, uri):
|
||||||
|
self._uri = uri
|
||||||
|
self.parsed_uri = urlparse(uri)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def headers(self):
|
def headers(self):
|
||||||
return self._headers
|
return self._headers
|
||||||
@@ -36,7 +54,11 @@ class Request:
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def body(self):
|
def body(self):
|
||||||
return BytesIO(self._body) if self._was_file else self._body
|
if self._was_file:
|
||||||
|
return BytesIO(self._body)
|
||||||
|
if self._was_iter:
|
||||||
|
return iter(self._body)
|
||||||
|
return self._body
|
||||||
|
|
||||||
@body.setter
|
@body.setter
|
||||||
def body(self, value):
|
def body(self, value):
|
||||||
@@ -54,30 +76,28 @@ class Request:
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def scheme(self):
|
def scheme(self):
|
||||||
return urlparse(self.uri).scheme
|
return self.parsed_uri.scheme
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def host(self):
|
def host(self):
|
||||||
return urlparse(self.uri).hostname
|
return self.parsed_uri.hostname
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def port(self):
|
def port(self):
|
||||||
parse_uri = urlparse(self.uri)
|
port = self.parsed_uri.port
|
||||||
port = parse_uri.port
|
|
||||||
if port is None:
|
if port is None:
|
||||||
try:
|
with suppress(KeyError):
|
||||||
port = {"https": 443, "http": 80}[parse_uri.scheme]
|
port = {"https": 443, "http": 80}[self.parsed_uri.scheme]
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
return port
|
return port
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def path(self):
|
def path(self):
|
||||||
return urlparse(self.uri).path
|
return self.parsed_uri.path
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def query(self):
|
def query(self):
|
||||||
q = urlparse(self.uri).query
|
q = self.parsed_uri.query
|
||||||
return sorted(parse_qsl(q))
|
return sorted(parse_qsl(q))
|
||||||
|
|
||||||
# alias for backwards compatibility
|
# alias for backwards compatibility
|
||||||
|
|||||||
@@ -53,7 +53,7 @@ def serialize(cassette_dict, serializer):
|
|||||||
"request": compat.convert_to_unicode(request._to_dict()),
|
"request": compat.convert_to_unicode(request._to_dict()),
|
||||||
"response": compat.convert_to_unicode(response),
|
"response": compat.convert_to_unicode(response),
|
||||||
}
|
}
|
||||||
for request, response in zip(cassette_dict["requests"], cassette_dict["responses"])
|
for request, response in zip(cassette_dict["requests"], cassette_dict["responses"], strict=False)
|
||||||
]
|
]
|
||||||
data = {"version": CASSETTE_FORMAT_VERSION, "interactions": interactions}
|
data = {"version": CASSETTE_FORMAT_VERSION, "interactions": interactions}
|
||||||
return serializer.serialize(data)
|
return serializer.serialize(data)
|
||||||
|
|||||||
@@ -56,7 +56,7 @@ def convert_body_to_unicode(resp):
|
|||||||
If the request or responses body is bytes, decode it to a string
|
If the request or responses body is bytes, decode it to a string
|
||||||
(for python3 support)
|
(for python3 support)
|
||||||
"""
|
"""
|
||||||
if type(resp) is not dict:
|
if not isinstance(resp, dict):
|
||||||
# Some of the tests just serialize and deserialize a string.
|
# Some of the tests just serialize and deserialize a string.
|
||||||
return _convert_string_to_unicode(resp)
|
return _convert_string_to_unicode(resp)
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -1,7 +1,4 @@
|
|||||||
try:
|
import json
|
||||||
import simplejson as json
|
|
||||||
except ImportError:
|
|
||||||
import json
|
|
||||||
|
|
||||||
|
|
||||||
def deserialize(cassette_string):
|
def deserialize(cassette_string):
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
"""Stubs for patching HTTP and HTTPS requests"""
|
"""Stubs for patching HTTP and HTTPS requests"""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
from contextlib import suppress
|
||||||
from http.client import HTTPConnection, HTTPResponse, HTTPSConnection
|
from http.client import HTTPConnection, HTTPResponse, HTTPSConnection
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
|
|
||||||
@@ -66,6 +67,7 @@ class VCRHTTPResponse(HTTPResponse):
|
|||||||
self.reason = recorded_response["status"]["message"]
|
self.reason = recorded_response["status"]["message"]
|
||||||
self.status = self.code = recorded_response["status"]["code"]
|
self.status = self.code = recorded_response["status"]["code"]
|
||||||
self.version = None
|
self.version = None
|
||||||
|
self.version_string = None
|
||||||
self._content = BytesIO(self.recorded_response["body"]["string"])
|
self._content = BytesIO(self.recorded_response["body"]["string"])
|
||||||
self._closed = False
|
self._closed = False
|
||||||
self._original_response = self # for requests.session.Session cookie extraction
|
self._original_response = self # for requests.session.Session cookie extraction
|
||||||
@@ -76,7 +78,7 @@ class VCRHTTPResponse(HTTPResponse):
|
|||||||
# libraries trying to process a chunked response. By removing the
|
# libraries trying to process a chunked response. By removing the
|
||||||
# transfer-encoding: chunked header, this should cause the downstream
|
# transfer-encoding: chunked header, this should cause the downstream
|
||||||
# libraries to process this as a non-chunked response.
|
# libraries to process this as a non-chunked response.
|
||||||
te_key = [h for h in headers.keys() if h.upper() == "TRANSFER-ENCODING"]
|
te_key = [h for h in headers if h.upper() == "TRANSFER-ENCODING"]
|
||||||
if te_key:
|
if te_key:
|
||||||
del headers[te_key[0]]
|
del headers[te_key[0]]
|
||||||
self.headers = self.msg = parse_headers(headers)
|
self.headers = self.msg = parse_headers(headers)
|
||||||
@@ -186,22 +188,34 @@ class VCRConnection:
|
|||||||
"""
|
"""
|
||||||
Returns empty string for the default port and ':port' otherwise
|
Returns empty string for the default port and ':port' otherwise
|
||||||
"""
|
"""
|
||||||
port = self.real_connection.port
|
port = (
|
||||||
|
self.real_connection.port
|
||||||
|
if not self.real_connection._tunnel_host
|
||||||
|
else self.real_connection._tunnel_port
|
||||||
|
)
|
||||||
default_port = {"https": 443, "http": 80}[self._protocol]
|
default_port = {"https": 443, "http": 80}[self._protocol]
|
||||||
return f":{port}" if port != default_port else ""
|
return f":{port}" if port != default_port else ""
|
||||||
|
|
||||||
|
def _real_host(self):
|
||||||
|
"""Returns the request host"""
|
||||||
|
if self.real_connection._tunnel_host:
|
||||||
|
# The real connection is to an HTTPS proxy
|
||||||
|
return self.real_connection._tunnel_host
|
||||||
|
else:
|
||||||
|
return self.real_connection.host
|
||||||
|
|
||||||
def _uri(self, url):
|
def _uri(self, url):
|
||||||
"""Returns request absolute URI"""
|
"""Returns request absolute URI"""
|
||||||
if url and not url.startswith("/"):
|
if url and not url.startswith("/"):
|
||||||
# Then this must be a proxy request.
|
# Then this must be a proxy request.
|
||||||
return url
|
return url
|
||||||
uri = f"{self._protocol}://{self.real_connection.host}{self._port_postfix()}{url}"
|
uri = f"{self._protocol}://{self._real_host()}{self._port_postfix()}{url}"
|
||||||
log.debug("Absolute URI: %s", uri)
|
log.debug("Absolute URI: %s", uri)
|
||||||
return uri
|
return uri
|
||||||
|
|
||||||
def _url(self, uri):
|
def _url(self, uri):
|
||||||
"""Returns request selector url from absolute URI"""
|
"""Returns request selector url from absolute URI"""
|
||||||
prefix = f"{self._protocol}://{self.real_connection.host}{self._port_postfix()}"
|
prefix = f"{self._protocol}://{self._real_host()}{self._port_postfix()}"
|
||||||
return uri.replace(prefix, "", 1)
|
return uri.replace(prefix, "", 1)
|
||||||
|
|
||||||
def request(self, method, url, body=None, headers=None, *args, **kwargs):
|
def request(self, method, url, body=None, headers=None, *args, **kwargs):
|
||||||
@@ -357,12 +371,8 @@ class VCRConnection:
|
|||||||
TODO: Separately setting the attribute on the two instances is not
|
TODO: Separately setting the attribute on the two instances is not
|
||||||
ideal. We should switch to a proxying implementation.
|
ideal. We should switch to a proxying implementation.
|
||||||
"""
|
"""
|
||||||
try:
|
with suppress(AttributeError):
|
||||||
setattr(self.real_connection, name, value)
|
setattr(self.real_connection, name, value)
|
||||||
except AttributeError:
|
|
||||||
# raised if real_connection has not been set yet, such as when
|
|
||||||
# we're setting the real_connection itself for the first time
|
|
||||||
pass
|
|
||||||
|
|
||||||
super().__setattr__(name, value)
|
super().__setattr__(name, value)
|
||||||
|
|
||||||
@@ -389,6 +399,8 @@ class VCRHTTPConnection(VCRConnection):
|
|||||||
|
|
||||||
_baseclass = HTTPConnection
|
_baseclass = HTTPConnection
|
||||||
_protocol = "http"
|
_protocol = "http"
|
||||||
|
debuglevel = _baseclass.debuglevel
|
||||||
|
_http_vsn = _baseclass._http_vsn
|
||||||
|
|
||||||
|
|
||||||
class VCRHTTPSConnection(VCRConnection):
|
class VCRHTTPSConnection(VCRConnection):
|
||||||
@@ -397,3 +409,5 @@ class VCRHTTPSConnection(VCRConnection):
|
|||||||
_baseclass = HTTPSConnection
|
_baseclass = HTTPSConnection
|
||||||
_protocol = "https"
|
_protocol = "https"
|
||||||
is_verified = True
|
is_verified = True
|
||||||
|
debuglevel = _baseclass.debuglevel
|
||||||
|
_http_vsn = _baseclass._http_vsn
|
||||||
|
|||||||
@@ -1,10 +1,11 @@
|
|||||||
"""Stubs for aiohttp HTTP clients"""
|
"""Stubs for aiohttp HTTP clients"""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import functools
|
import functools
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
|
from collections.abc import Mapping
|
||||||
from http.cookies import CookieError, Morsel, SimpleCookie
|
from http.cookies import CookieError, Morsel, SimpleCookie
|
||||||
from typing import Mapping, Union
|
|
||||||
|
|
||||||
from aiohttp import ClientConnectionError, ClientResponse, CookieJar, RequestInfo, hdrs, streams
|
from aiohttp import ClientConnectionError, ClientResponse, CookieJar, RequestInfo, hdrs, streams
|
||||||
from aiohttp.helpers import strip_auth_from_url
|
from aiohttp.helpers import strip_auth_from_url
|
||||||
@@ -228,7 +229,7 @@ def _build_cookie_header(session, cookies, cookie_header, url):
|
|||||||
return c.output(header="", sep=";").strip()
|
return c.output(header="", sep=";").strip()
|
||||||
|
|
||||||
|
|
||||||
def _build_url_with_params(url_str: str, params: Mapping[str, Union[str, int, float]]) -> URL:
|
def _build_url_with_params(url_str: str, params: Mapping[str, str | int | float]) -> URL:
|
||||||
# This code is basically a copy&paste of aiohttp.
|
# This code is basically a copy&paste of aiohttp.
|
||||||
# https://github.com/aio-libs/aiohttp/blob/master/aiohttp/client_reqrep.py#L225
|
# https://github.com/aio-libs/aiohttp/blob/master/aiohttp/client_reqrep.py#L225
|
||||||
url = URL(url_str)
|
url = URL(url_str)
|
||||||
@@ -244,7 +245,11 @@ def vcr_request(cassette, real_request):
|
|||||||
headers = kwargs.get("headers")
|
headers = kwargs.get("headers")
|
||||||
auth = kwargs.get("auth")
|
auth = kwargs.get("auth")
|
||||||
headers = self._prepare_headers(headers)
|
headers = self._prepare_headers(headers)
|
||||||
data = kwargs.get("data", kwargs.get("json"))
|
data = kwargs.get("data")
|
||||||
|
if data is None:
|
||||||
|
data = kwargs.get("json")
|
||||||
|
elif kwargs.get("json") is not None:
|
||||||
|
raise ValueError("data and json parameters can not be used at the same time")
|
||||||
params = kwargs.get("params")
|
params = kwargs.get("params")
|
||||||
cookies = kwargs.get("cookies")
|
cookies = kwargs.get("cookies")
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Stubs for boto3"""
|
"""Stubs for boto3"""
|
||||||
|
|
||||||
from botocore.awsrequest import AWSHTTPConnection as HTTPConnection
|
from botocore.awsrequest import AWSHTTPConnection as HTTPConnection
|
||||||
from botocore.awsrequest import AWSHTTPSConnection as VerifiedHTTPSConnection
|
from botocore.awsrequest import AWSHTTPSConnection as VerifiedHTTPSConnection
|
||||||
|
|
||||||
|
|||||||
@@ -1,9 +0,0 @@
|
|||||||
"""Stubs for boto"""
|
|
||||||
|
|
||||||
from boto.https_connection import CertValidatingHTTPSConnection
|
|
||||||
|
|
||||||
from ..stubs import VCRHTTPSConnection
|
|
||||||
|
|
||||||
|
|
||||||
class VCRCertValidatingHTTPSConnection(VCRHTTPSConnection):
|
|
||||||
_baseclass = CertValidatingHTTPSConnection
|
|
||||||
215
vcr/stubs/httpcore_stubs.py
Normal file
215
vcr/stubs/httpcore_stubs.py
Normal file
@@ -0,0 +1,215 @@
|
|||||||
|
import asyncio
|
||||||
|
import functools
|
||||||
|
import logging
|
||||||
|
from collections import defaultdict
|
||||||
|
from collections.abc import AsyncIterable, Iterable
|
||||||
|
|
||||||
|
from httpcore import Response
|
||||||
|
from httpcore._models import ByteStream
|
||||||
|
|
||||||
|
from vcr.errors import CannotOverwriteExistingCassetteException
|
||||||
|
from vcr.filters import decode_response
|
||||||
|
from vcr.request import Request as VcrRequest
|
||||||
|
from vcr.serializers.compat import convert_body_to_bytes
|
||||||
|
|
||||||
|
_logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
async def _convert_byte_stream(stream):
|
||||||
|
if isinstance(stream, Iterable):
|
||||||
|
return list(stream)
|
||||||
|
|
||||||
|
if isinstance(stream, AsyncIterable):
|
||||||
|
return [part async for part in stream]
|
||||||
|
|
||||||
|
raise TypeError(
|
||||||
|
f"_convert_byte_stream: stream must be Iterable or AsyncIterable, got {type(stream).__name__}",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _serialize_headers(real_response):
|
||||||
|
"""
|
||||||
|
Some headers can appear multiple times, like "Set-Cookie".
|
||||||
|
Therefore serialize every header key to a list of values.
|
||||||
|
"""
|
||||||
|
|
||||||
|
headers = defaultdict(list)
|
||||||
|
|
||||||
|
for name, value in real_response.headers:
|
||||||
|
headers[name.decode("ascii")].append(value.decode("ascii"))
|
||||||
|
|
||||||
|
return dict(headers)
|
||||||
|
|
||||||
|
|
||||||
|
async def _serialize_response(real_response):
|
||||||
|
# The reason_phrase may not exist
|
||||||
|
try:
|
||||||
|
reason_phrase = real_response.extensions["reason_phrase"].decode("ascii")
|
||||||
|
except KeyError:
|
||||||
|
reason_phrase = None
|
||||||
|
|
||||||
|
# Reading the response stream consumes the iterator, so we need to restore it afterwards
|
||||||
|
content = b"".join(await _convert_byte_stream(real_response.stream))
|
||||||
|
real_response.stream = ByteStream(content)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"status": {"code": real_response.status, "message": reason_phrase},
|
||||||
|
"headers": _serialize_headers(real_response),
|
||||||
|
"body": {"string": content},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _deserialize_headers(headers):
|
||||||
|
"""
|
||||||
|
httpcore accepts headers as list of tuples of header key and value.
|
||||||
|
"""
|
||||||
|
|
||||||
|
return [
|
||||||
|
(name.encode("ascii"), value.encode("ascii")) for name, values in headers.items() for value in values
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def _deserialize_response(vcr_response):
|
||||||
|
# Cassette format generated for HTTPX requests by older versions of
|
||||||
|
# vcrpy. We restructure the content to resemble what a regular
|
||||||
|
# cassette looks like.
|
||||||
|
if "status_code" in vcr_response:
|
||||||
|
vcr_response = decode_response(
|
||||||
|
convert_body_to_bytes(
|
||||||
|
{
|
||||||
|
"headers": vcr_response["headers"],
|
||||||
|
"body": {"string": vcr_response["content"]},
|
||||||
|
"status": {"code": vcr_response["status_code"]},
|
||||||
|
},
|
||||||
|
),
|
||||||
|
)
|
||||||
|
extensions = None
|
||||||
|
else:
|
||||||
|
extensions = (
|
||||||
|
{"reason_phrase": vcr_response["status"]["message"].encode("ascii")}
|
||||||
|
if vcr_response["status"]["message"]
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
|
||||||
|
return Response(
|
||||||
|
vcr_response["status"]["code"],
|
||||||
|
headers=_deserialize_headers(vcr_response["headers"]),
|
||||||
|
content=vcr_response["body"]["string"],
|
||||||
|
extensions=extensions,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def _make_vcr_request(real_request):
|
||||||
|
# Reading the request stream consumes the iterator, so we need to restore it afterwards
|
||||||
|
body = b"".join(await _convert_byte_stream(real_request.stream))
|
||||||
|
real_request.stream = ByteStream(body)
|
||||||
|
|
||||||
|
uri = bytes(real_request.url).decode("ascii")
|
||||||
|
|
||||||
|
# As per HTTPX: If there are multiple headers with the same key, then we concatenate them with commas
|
||||||
|
headers = defaultdict(list)
|
||||||
|
|
||||||
|
for name, value in real_request.headers:
|
||||||
|
headers[name.decode("ascii")].append(value.decode("ascii"))
|
||||||
|
|
||||||
|
headers = {name: ", ".join(values) for name, values in headers.items()}
|
||||||
|
|
||||||
|
return VcrRequest(real_request.method.decode("ascii"), uri, body, headers)
|
||||||
|
|
||||||
|
|
||||||
|
async def _vcr_request(cassette, real_request):
|
||||||
|
vcr_request = await _make_vcr_request(real_request)
|
||||||
|
|
||||||
|
if cassette.can_play_response_for(vcr_request):
|
||||||
|
return vcr_request, _play_responses(cassette, vcr_request)
|
||||||
|
|
||||||
|
if cassette.write_protected and cassette.filter_request(vcr_request):
|
||||||
|
raise CannotOverwriteExistingCassetteException(
|
||||||
|
cassette=cassette,
|
||||||
|
failed_request=vcr_request,
|
||||||
|
)
|
||||||
|
|
||||||
|
_logger.info("%s not in cassette, sending to real server", vcr_request)
|
||||||
|
|
||||||
|
return vcr_request, None
|
||||||
|
|
||||||
|
|
||||||
|
async def _record_responses(cassette, vcr_request, real_response):
|
||||||
|
cassette.append(vcr_request, await _serialize_response(real_response))
|
||||||
|
|
||||||
|
|
||||||
|
def _play_responses(cassette, vcr_request):
|
||||||
|
vcr_response = cassette.play_response(vcr_request)
|
||||||
|
real_response = _deserialize_response(vcr_response)
|
||||||
|
|
||||||
|
return real_response
|
||||||
|
|
||||||
|
|
||||||
|
async def _vcr_handle_async_request(
|
||||||
|
cassette,
|
||||||
|
real_handle_async_request,
|
||||||
|
self,
|
||||||
|
real_request,
|
||||||
|
):
|
||||||
|
vcr_request, vcr_response = await _vcr_request(cassette, real_request)
|
||||||
|
|
||||||
|
if vcr_response:
|
||||||
|
return vcr_response
|
||||||
|
|
||||||
|
real_response = await real_handle_async_request(self, real_request)
|
||||||
|
await _record_responses(cassette, vcr_request, real_response)
|
||||||
|
|
||||||
|
return real_response
|
||||||
|
|
||||||
|
|
||||||
|
def vcr_handle_async_request(cassette, real_handle_async_request):
|
||||||
|
@functools.wraps(real_handle_async_request)
|
||||||
|
def _inner_handle_async_request(self, real_request):
|
||||||
|
return _vcr_handle_async_request(
|
||||||
|
cassette,
|
||||||
|
real_handle_async_request,
|
||||||
|
self,
|
||||||
|
real_request,
|
||||||
|
)
|
||||||
|
|
||||||
|
return _inner_handle_async_request
|
||||||
|
|
||||||
|
|
||||||
|
def _run_async_function(sync_func, *args, **kwargs):
|
||||||
|
"""
|
||||||
|
Safely run an asynchronous function from a synchronous context.
|
||||||
|
Handles both cases:
|
||||||
|
- An event loop is already running.
|
||||||
|
- No event loop exists yet.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
asyncio.get_running_loop()
|
||||||
|
except RuntimeError:
|
||||||
|
return asyncio.run(sync_func(*args, **kwargs))
|
||||||
|
else:
|
||||||
|
# If inside a running loop, create a task and wait for it
|
||||||
|
return asyncio.ensure_future(sync_func(*args, **kwargs))
|
||||||
|
|
||||||
|
|
||||||
|
def _vcr_handle_request(cassette, real_handle_request, self, real_request):
|
||||||
|
vcr_request, vcr_response = _run_async_function(
|
||||||
|
_vcr_request,
|
||||||
|
cassette,
|
||||||
|
real_request,
|
||||||
|
)
|
||||||
|
|
||||||
|
if vcr_response:
|
||||||
|
return vcr_response
|
||||||
|
|
||||||
|
real_response = real_handle_request(self, real_request)
|
||||||
|
_run_async_function(_record_responses, cassette, vcr_request, real_response)
|
||||||
|
|
||||||
|
return real_response
|
||||||
|
|
||||||
|
|
||||||
|
def vcr_handle_request(cassette, real_handle_request):
|
||||||
|
@functools.wraps(real_handle_request)
|
||||||
|
def _inner_handle_request(self, real_request):
|
||||||
|
return _vcr_handle_request(cassette, real_handle_request, self, real_request)
|
||||||
|
|
||||||
|
return _inner_handle_request
|
||||||
@@ -1,171 +0,0 @@
|
|||||||
import functools
|
|
||||||
import inspect
|
|
||||||
import logging
|
|
||||||
from unittest.mock import MagicMock, patch
|
|
||||||
|
|
||||||
import httpx
|
|
||||||
|
|
||||||
from vcr.errors import CannotOverwriteExistingCassetteException
|
|
||||||
from vcr.request import Request as VcrRequest
|
|
||||||
|
|
||||||
_httpx_signature = inspect.signature(httpx.Client.request)
|
|
||||||
|
|
||||||
try:
|
|
||||||
HTTPX_REDIRECT_PARAM = _httpx_signature.parameters["follow_redirects"]
|
|
||||||
except KeyError:
|
|
||||||
HTTPX_REDIRECT_PARAM = _httpx_signature.parameters["allow_redirects"]
|
|
||||||
|
|
||||||
|
|
||||||
_logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def _transform_headers(httpx_response):
|
|
||||||
"""
|
|
||||||
Some headers can appear multiple times, like "Set-Cookie".
|
|
||||||
Therefore transform to every header key to list of values.
|
|
||||||
"""
|
|
||||||
|
|
||||||
out = {}
|
|
||||||
for key, var in httpx_response.headers.raw:
|
|
||||||
decoded_key = key.decode("utf-8")
|
|
||||||
out.setdefault(decoded_key, [])
|
|
||||||
out[decoded_key].append(var.decode("utf-8"))
|
|
||||||
return out
|
|
||||||
|
|
||||||
|
|
||||||
def _to_serialized_response(httpx_response):
|
|
||||||
return {
|
|
||||||
"status_code": httpx_response.status_code,
|
|
||||||
"http_version": httpx_response.http_version,
|
|
||||||
"headers": _transform_headers(httpx_response),
|
|
||||||
"content": httpx_response.content.decode("utf-8", "ignore"),
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def _from_serialized_headers(headers):
|
|
||||||
"""
|
|
||||||
httpx accepts headers as list of tuples of header key and value.
|
|
||||||
"""
|
|
||||||
|
|
||||||
header_list = []
|
|
||||||
for key, values in headers.items():
|
|
||||||
for v in values:
|
|
||||||
header_list.append((key, v))
|
|
||||||
return header_list
|
|
||||||
|
|
||||||
|
|
||||||
@patch("httpx.Response.close", MagicMock())
|
|
||||||
@patch("httpx.Response.read", MagicMock())
|
|
||||||
def _from_serialized_response(request, serialized_response, history=None):
|
|
||||||
content = serialized_response.get("content").encode()
|
|
||||||
response = httpx.Response(
|
|
||||||
status_code=serialized_response.get("status_code"),
|
|
||||||
request=request,
|
|
||||||
headers=_from_serialized_headers(serialized_response.get("headers")),
|
|
||||||
content=content,
|
|
||||||
history=history or [],
|
|
||||||
)
|
|
||||||
response._content = content
|
|
||||||
return response
|
|
||||||
|
|
||||||
|
|
||||||
def _make_vcr_request(httpx_request, **kwargs):
|
|
||||||
body = httpx_request.read().decode("utf-8")
|
|
||||||
uri = str(httpx_request.url)
|
|
||||||
headers = dict(httpx_request.headers)
|
|
||||||
return VcrRequest(httpx_request.method, uri, body, headers)
|
|
||||||
|
|
||||||
|
|
||||||
def _shared_vcr_send(cassette, real_send, *args, **kwargs):
|
|
||||||
real_request = args[1]
|
|
||||||
|
|
||||||
vcr_request = _make_vcr_request(real_request, **kwargs)
|
|
||||||
|
|
||||||
if cassette.can_play_response_for(vcr_request):
|
|
||||||
return vcr_request, _play_responses(cassette, real_request, vcr_request, args[0], kwargs)
|
|
||||||
|
|
||||||
if cassette.write_protected and cassette.filter_request(vcr_request):
|
|
||||||
raise CannotOverwriteExistingCassetteException(cassette=cassette, failed_request=vcr_request)
|
|
||||||
|
|
||||||
_logger.info("%s not in cassette, sending to real server", vcr_request)
|
|
||||||
return vcr_request, None
|
|
||||||
|
|
||||||
|
|
||||||
def _record_responses(cassette, vcr_request, real_response):
|
|
||||||
for past_real_response in real_response.history:
|
|
||||||
past_vcr_request = _make_vcr_request(past_real_response.request)
|
|
||||||
cassette.append(past_vcr_request, _to_serialized_response(past_real_response))
|
|
||||||
|
|
||||||
if real_response.history:
|
|
||||||
# If there was a redirection keep we want the request which will hold the
|
|
||||||
# final redirect value
|
|
||||||
vcr_request = _make_vcr_request(real_response.request)
|
|
||||||
|
|
||||||
cassette.append(vcr_request, _to_serialized_response(real_response))
|
|
||||||
return real_response
|
|
||||||
|
|
||||||
|
|
||||||
def _play_responses(cassette, request, vcr_request, client, kwargs):
|
|
||||||
history = []
|
|
||||||
|
|
||||||
allow_redirects = kwargs.get(
|
|
||||||
HTTPX_REDIRECT_PARAM.name,
|
|
||||||
HTTPX_REDIRECT_PARAM.default,
|
|
||||||
)
|
|
||||||
vcr_response = cassette.play_response(vcr_request)
|
|
||||||
response = _from_serialized_response(request, vcr_response)
|
|
||||||
|
|
||||||
while allow_redirects and 300 <= response.status_code <= 399:
|
|
||||||
next_url = response.headers.get("location")
|
|
||||||
if not next_url:
|
|
||||||
break
|
|
||||||
|
|
||||||
vcr_request = VcrRequest("GET", next_url, None, dict(response.headers))
|
|
||||||
vcr_request = cassette.find_requests_with_most_matches(vcr_request)[0][0]
|
|
||||||
|
|
||||||
history.append(response)
|
|
||||||
# add cookies from response to session cookie store
|
|
||||||
client.cookies.extract_cookies(response)
|
|
||||||
|
|
||||||
vcr_response = cassette.play_response(vcr_request)
|
|
||||||
response = _from_serialized_response(vcr_request, vcr_response, history)
|
|
||||||
|
|
||||||
return response
|
|
||||||
|
|
||||||
|
|
||||||
async def _async_vcr_send(cassette, real_send, *args, **kwargs):
|
|
||||||
vcr_request, response = _shared_vcr_send(cassette, real_send, *args, **kwargs)
|
|
||||||
if response:
|
|
||||||
# add cookies from response to session cookie store
|
|
||||||
args[0].cookies.extract_cookies(response)
|
|
||||||
return response
|
|
||||||
|
|
||||||
real_response = await real_send(*args, **kwargs)
|
|
||||||
return _record_responses(cassette, vcr_request, real_response)
|
|
||||||
|
|
||||||
|
|
||||||
def async_vcr_send(cassette, real_send):
|
|
||||||
@functools.wraps(real_send)
|
|
||||||
def _inner_send(*args, **kwargs):
|
|
||||||
return _async_vcr_send(cassette, real_send, *args, **kwargs)
|
|
||||||
|
|
||||||
return _inner_send
|
|
||||||
|
|
||||||
|
|
||||||
def _sync_vcr_send(cassette, real_send, *args, **kwargs):
|
|
||||||
vcr_request, response = _shared_vcr_send(cassette, real_send, *args, **kwargs)
|
|
||||||
if response:
|
|
||||||
# add cookies from response to session cookie store
|
|
||||||
args[0].cookies.extract_cookies(response)
|
|
||||||
return response
|
|
||||||
|
|
||||||
real_response = real_send(*args, **kwargs)
|
|
||||||
return _record_responses(cassette, vcr_request, real_response)
|
|
||||||
|
|
||||||
|
|
||||||
def sync_vcr_send(cassette, real_send):
|
|
||||||
@functools.wraps(real_send)
|
|
||||||
def _inner_send(*args, **kwargs):
|
|
||||||
return _sync_vcr_send(cassette, real_send, *args, **kwargs)
|
|
||||||
|
|
||||||
return _inner_send
|
|
||||||
@@ -1,4 +1,5 @@
|
|||||||
"""Stubs for tornado HTTP clients"""
|
"""Stubs for tornado HTTP clients"""
|
||||||
|
|
||||||
import functools
|
import functools
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
|
|
||||||
@@ -73,7 +74,7 @@ def vcr_fetch_impl(cassette, real_fetch_impl):
|
|||||||
return callback(response)
|
return callback(response)
|
||||||
|
|
||||||
def new_callback(response):
|
def new_callback(response):
|
||||||
headers = [(k, response.headers.get_list(k)) for k in response.headers.keys()]
|
headers = [(k, response.headers.get_list(k)) for k in response.headers]
|
||||||
|
|
||||||
vcr_response = {
|
vcr_response = {
|
||||||
"status": {"code": response.code, "message": response.reason},
|
"status": {"code": response.code, "message": response.reason},
|
||||||
|
|||||||
19
vcr/util.py
19
vcr/util.py
@@ -89,9 +89,28 @@ def compose(*functions):
|
|||||||
return composed
|
return composed
|
||||||
|
|
||||||
|
|
||||||
|
def _is_nonsequence_iterator(obj):
|
||||||
|
return hasattr(obj, "__iter__") and not isinstance(
|
||||||
|
obj,
|
||||||
|
(bytearray, bytes, dict, list, str),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def read_body(request):
|
def read_body(request):
|
||||||
if hasattr(request.body, "read"):
|
if hasattr(request.body, "read"):
|
||||||
return request.body.read()
|
return request.body.read()
|
||||||
|
if _is_nonsequence_iterator(request.body):
|
||||||
|
body = list(request.body)
|
||||||
|
if body:
|
||||||
|
if isinstance(body[0], str):
|
||||||
|
return "".join(body).encode("utf-8")
|
||||||
|
elif isinstance(body[0], (bytes, bytearray)):
|
||||||
|
return b"".join(body)
|
||||||
|
elif isinstance(body[0], int):
|
||||||
|
return bytes(body)
|
||||||
|
else:
|
||||||
|
raise ValueError(f"Body type {type(body[0])} not supported")
|
||||||
|
return b""
|
||||||
return request.body
|
return request.body
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user