1
0
mirror of https://github.com/kevin1024/vcrpy.git synced 2025-12-08 16:53:23 +00:00

Format project with black (#467)

Format with line length 110 to match flake8

make black part of linting check

Update travis spec for updated black requirements

Add diff output for black on failure

update changelog
This commit is contained in:
Josh Peak
2019-08-24 11:36:35 +10:00
committed by GitHub
parent 75969de601
commit 7caf29735a
70 changed files with 2000 additions and 2217 deletions

View File

@@ -8,18 +8,18 @@ from assertions import assert_is_json
def _headers_are_case_insensitive(host, port):
conn = httplib.HTTPConnection(host, port)
conn.request('GET', "/cookies/set?k1=v1")
conn.request("GET", "/cookies/set?k1=v1")
r1 = conn.getresponse()
cookie_data1 = r1.getheader('set-cookie')
cookie_data1 = r1.getheader("set-cookie")
conn = httplib.HTTPConnection(host, port)
conn.request('GET', "/cookies/set?k1=v1")
conn.request("GET", "/cookies/set?k1=v1")
r2 = conn.getresponse()
cookie_data2 = r2.getheader('Set-Cookie')
cookie_data2 = r2.getheader("Set-Cookie")
return cookie_data1 == cookie_data2
def test_case_insensitivity(tmpdir, httpbin):
testfile = str(tmpdir.join('case_insensitivity.yml'))
testfile = str(tmpdir.join("case_insensitivity.yml"))
# check if headers are case insensitive outside of vcrpy
host, port = httpbin.host, httpbin.port
outside = _headers_are_case_insensitive(host, port)
@@ -35,13 +35,13 @@ def test_case_insensitivity(tmpdir, httpbin):
def _multiple_header_value(httpbin):
conn = httplib.HTTPConnection(httpbin.host, httpbin.port)
conn.request('GET', "/response-headers?foo=bar&foo=baz")
conn.request("GET", "/response-headers?foo=bar&foo=baz")
r = conn.getresponse()
return r.getheader('foo')
return r.getheader("foo")
def test_multiple_headers(tmpdir, httpbin):
testfile = str(tmpdir.join('multiple_headers.yaml'))
testfile = str(tmpdir.join("multiple_headers.yaml"))
outside = _multiple_header_value(httpbin)
with vcr.use_cassette(testfile):
@@ -51,83 +51,84 @@ def test_multiple_headers(tmpdir, httpbin):
def test_original_decoded_response_is_not_modified(tmpdir, httpbin):
testfile = str(tmpdir.join('decoded_response.yml'))
testfile = str(tmpdir.join("decoded_response.yml"))
host, port = httpbin.host, httpbin.port
conn = httplib.HTTPConnection(host, port)
conn.request('GET', '/gzip')
conn.request("GET", "/gzip")
outside = conn.getresponse()
with vcr.use_cassette(testfile, decode_compressed_response=True):
conn = httplib.HTTPConnection(host, port)
conn.request('GET', '/gzip')
conn.request("GET", "/gzip")
inside = conn.getresponse()
# Assert that we do not modify the original response while appending
# to the casssette.
assert 'gzip' == inside.headers['content-encoding']
assert "gzip" == inside.headers["content-encoding"]
# They should effectively be the same response.
inside_headers = (h for h in inside.headers.items() if h[0].lower() != 'date')
outside_headers = (h for h in outside.getheaders() if h[0].lower() != 'date')
inside_headers = (h for h in inside.headers.items() if h[0].lower() != "date")
outside_headers = (h for h in outside.getheaders() if h[0].lower() != "date")
assert set(inside_headers) == set(outside_headers)
inside = zlib.decompress(inside.read(), 16+zlib.MAX_WBITS)
outside = zlib.decompress(outside.read(), 16+zlib.MAX_WBITS)
inside = zlib.decompress(inside.read(), 16 + zlib.MAX_WBITS)
outside = zlib.decompress(outside.read(), 16 + zlib.MAX_WBITS)
assert inside == outside
# Even though the above are raw bytes, the JSON data should have been
# decoded and saved to the cassette.
with vcr.use_cassette(testfile):
conn = httplib.HTTPConnection(host, port)
conn.request('GET', '/gzip')
conn.request("GET", "/gzip")
inside = conn.getresponse()
assert 'content-encoding' not in inside.headers
assert "content-encoding" not in inside.headers
assert_is_json(inside.read())
def _make_before_record_response(fields, replacement='[REDACTED]'):
def _make_before_record_response(fields, replacement="[REDACTED]"):
def before_record_response(response):
string_body = response['body']['string'].decode('utf8')
string_body = response["body"]["string"].decode("utf8")
body = json.loads(string_body)
for field in fields:
if field in body:
body[field] = replacement
response['body']['string'] = json.dumps(body).encode()
response["body"]["string"] = json.dumps(body).encode()
return response
return before_record_response
def test_original_response_is_not_modified_by_before_filter(tmpdir, httpbin):
testfile = str(tmpdir.join('sensitive_data_scrubbed_response.yml'))
testfile = str(tmpdir.join("sensitive_data_scrubbed_response.yml"))
host, port = httpbin.host, httpbin.port
field_to_scrub = 'url'
replacement = '[YOU_CANT_HAVE_THE_MANGO]'
field_to_scrub = "url"
replacement = "[YOU_CANT_HAVE_THE_MANGO]"
conn = httplib.HTTPConnection(host, port)
conn.request('GET', '/get')
conn.request("GET", "/get")
outside = conn.getresponse()
callback = _make_before_record_response([field_to_scrub], replacement)
with vcr.use_cassette(testfile, before_record_response=callback):
conn = httplib.HTTPConnection(host, port)
conn.request('GET', '/get')
conn.request("GET", "/get")
inside = conn.getresponse()
# The scrubbed field should be the same, because no cassette existed.
# Furthermore, the responses should be identical.
inside_body = json.loads(inside.read().decode('utf-8'))
outside_body = json.loads(outside.read().decode('utf-8'))
inside_body = json.loads(inside.read().decode("utf-8"))
outside_body = json.loads(outside.read().decode("utf-8"))
assert not inside_body[field_to_scrub] == replacement
assert inside_body[field_to_scrub] == outside_body[field_to_scrub]
# Ensure that when a cassette exists, the scrubbed response is returned.
with vcr.use_cassette(testfile, before_record_response=callback):
conn = httplib.HTTPConnection(host, port)
conn.request('GET', '/get')
conn.request("GET", "/get")
inside = conn.getresponse()
inside_body = json.loads(inside.read().decode('utf-8'))
inside_body = json.loads(inside.read().decode("utf-8"))
assert inside_body[field_to_scrub] == replacement