Skip to content

Commit

Permalink
Fix flake8 issues
Browse files Browse the repository at this point in the history
  • Loading branch information
nateprewitt committed Apr 28, 2022
1 parent 6ae0f59 commit c5abdad
Show file tree
Hide file tree
Showing 15 changed files with 125 additions and 82 deletions.
18 changes: 8 additions & 10 deletions .pre-commit-config.yaml
Expand Up @@ -15,16 +15,14 @@ repos:
- repo: https://github.com/psf/black
rev: 22.3.0
hooks:
- id: black
exclude: tests/test_lowlevel.py
- id: black
exclude: tests/test_lowlevel.py
- repo: https://github.com/asottile/pyupgrade
rev: v2.31.1
hooks:
- id: pyupgrade
args: [--py37-plus]
# TODO: Add flake8 changes after we're happy
# with above formatting changes.
#- repo: https://gitlab.com/pycqa/flake8
# rev: 4.0.1
# hooks:
# - id: flake8
- id: pyupgrade
args: [--py37-plus]
- repo: https://gitlab.com/pycqa/flake8
rev: 4.0.1
hooks:
- id: flake8
13 changes: 13 additions & 0 deletions pyproject.toml
@@ -0,0 +1,13 @@
[tool.isort]
profile = "black"
src_paths = ["requests", "test"]
honor_noqa = true

[tool.pytest.ini_options]
addopts = "-p no:warnings --doctest-modules"
doctest_optionflags = "NORMALIZE_WHITESPACE ELLIPSIS"
minversion = "6.2"
testpaths = [
"requests",
"tests",
]
3 changes: 0 additions & 3 deletions pytest.ini

This file was deleted.

6 changes: 3 additions & 3 deletions requests/adapters.py
Expand Up @@ -7,7 +7,7 @@
"""

import os.path
import socket
import socket # noqa: F401

from urllib3.exceptions import ClosedPoolError, ConnectTimeoutError
from urllib3.exceptions import HTTPError as _HTTPError
Expand Down Expand Up @@ -537,9 +537,9 @@ def send(
preload_content=False,
decode_content=False,
)
except:
except Exception:
# If we hit any problems here, clean up the connection.
# Then, reraise so that we can handle the actual exception.
# Then, raise so that we can handle the actual exception.
low_conn.close()
raise

Expand Down
2 changes: 1 addition & 1 deletion requests/auth.py
Expand Up @@ -173,7 +173,7 @@ def sha512_utf8(x):

hash_utf8 = sha512_utf8

KD = lambda s, d: hash_utf8(f"{s}:{d}")
KD = lambda s, d: hash_utf8(f"{s}:{d}") # noqa:E731

if hash_utf8 is None:
return None
Expand Down
11 changes: 6 additions & 5 deletions requests/models.py
Expand Up @@ -10,7 +10,7 @@
# Import encoding now, to avoid implicit import later.
# Implicit import within threads may cause LookupError when standard library is in a ZIP,
# such as in Embedded Python. See https://github.com/psf/requests/issues/3578.
import encodings.idna
import encodings.idna # noqa: F401
from io import UnsupportedOperation

from urllib3.exceptions import (
Expand Down Expand Up @@ -965,6 +965,8 @@ def json(self, **kwargs):
# and the server didn't bother to tell us what codec *was*
# used.
pass
except JSONDecodeError as e:
raise RequestsJSONDecodeError(e.msg, e.doc, e.pos)

try:
return complexjson.loads(self.text, **kwargs)
Expand All @@ -979,17 +981,16 @@ def links(self):

header = self.headers.get("link")

# l = MultiDict()
l = {}
resolved_links = {}

if header:
links = parse_header_links(header)

for link in links:
key = link.get("rel") or link.get("url")
l[key] = link
resolved_links[key] = link

return l
return resolved_links

def raise_for_status(self):
"""Raises :class:`HTTPError`, if one occurred."""
Expand Down
9 changes: 7 additions & 2 deletions requests/sessions.py
Expand Up @@ -30,10 +30,15 @@
from .hooks import default_hooks, dispatch_hook

# formerly defined here, reexposed here for backward compatibility
from .models import DEFAULT_REDIRECT_LIMIT, REDIRECT_STATI, PreparedRequest, Request
from .models import ( # noqa: F401
DEFAULT_REDIRECT_LIMIT,
REDIRECT_STATI,
PreparedRequest,
Request,
)
from .status_codes import codes
from .structures import CaseInsensitiveDict
from .utils import (
from .utils import ( # noqa: F401
DEFAULT_PORTS,
default_headers,
get_auth_from_url,
Expand Down
5 changes: 3 additions & 2 deletions requests/utils.py
Expand Up @@ -25,7 +25,7 @@
from .__version__ import __version__

# to_native_string is unused here, but imported here for backwards compatibility
from ._internal_utils import to_native_string
from ._internal_utils import to_native_string # noqa: F401
from .compat import (
Mapping,
basestring,
Expand Down Expand Up @@ -764,7 +764,8 @@ def should_bypass_proxies(url, no_proxy):
"""
# Prioritize lowercase environment variables over uppercase
# to keep a consistent behaviour with other http projects (curl, wget).
get_proxy = lambda k: os.environ.get(k) or os.environ.get(k.upper())
def get_proxy(key):
return os.environ.get(key) or os.environ.get(key.upper())

# First check whether no_proxy is defined. If it is, check that the URL
# we're getting isn't in the no_proxy list.
Expand Down
9 changes: 6 additions & 3 deletions setup.cfg
Expand Up @@ -9,6 +9,9 @@ requires-dist =
idna>=2.5,<4
urllib3>=1.21.1,<1.27

[isort]
profile = black
honor_noqa = true
[flake8]
ignore = E203, E501, W503
per-file-ignores =
requests/__init__.py:E402, F401
requests/compat.py:E402, F401
tests/compat.py:F401
1 change: 0 additions & 1 deletion tests/__init__.py
Expand Up @@ -2,7 +2,6 @@

import warnings

import urllib3
from urllib3.exceptions import SNIMissingWarning

# urllib3 sets SNIMissingWarning to only go off once,
Expand Down
1 change: 0 additions & 1 deletion tests/conftest.py
Expand Up @@ -5,7 +5,6 @@
from SimpleHTTPServer import SimpleHTTPRequestHandler

import ssl
import tempfile
import threading

import pytest
Expand Down
4 changes: 0 additions & 4 deletions tests/test_help.py
@@ -1,7 +1,3 @@
import sys

import pytest

from requests.help import info


Expand Down
82 changes: 54 additions & 28 deletions tests/test_lowlevel.py
@@ -1,9 +1,10 @@
import threading

import pytest
from tests.testserver.server import Server, consume_socket_content

import requests
from tests.testserver.server import Server, consume_socket_content
from requests.compat import JSONDecodeError

from .utils import override_environ

Expand All @@ -12,10 +13,11 @@ def echo_response_handler(sock):
"""Simple handler that will take request and echo it back to requester."""
request_content = consume_socket_content(sock, timeout=0.5)

text_200 = (b"HTTP/1.1 200 OK\r\n" b"Content-Length: %d\r\n\r\n" b"%s") % (
len(request_content),
request_content,
)
text_200 = (
b"HTTP/1.1 200 OK\r\n"
b"Content-Length: %d\r\n\r\n"
b"%s"
) % (len(request_content), request_content)
sock.send(text_200)


Expand All @@ -41,7 +43,10 @@ def incomplete_chunked_response_handler(sock):
request_content = consume_socket_content(sock, timeout=0.5)

# The server never ends the request and doesn't provide any valid chunks
sock.send(b"HTTP/1.1 200 OK\r\n" + b"Transfer-Encoding: chunked\r\n")
sock.send(
b"HTTP/1.1 200 OK\r\n"
b"Transfer-Encoding: chunked\r\n"
)

return request_content

Expand All @@ -51,7 +56,7 @@ def incomplete_chunked_response_handler(sock):
with server as (host, port):
url = f"http://{host}:{port}/"
with pytest.raises(requests.exceptions.ChunkedEncodingError):
r = requests.get(url)
requests.get(url)
close_server.set() # release server block


Expand Down Expand Up @@ -98,12 +103,14 @@ def test_conflicting_content_lengths():

def multiple_content_length_response_handler(sock):
request_content = consume_socket_content(sock, timeout=0.5)

sock.send(b"HTTP/1.1 200 OK\r\n" +
b"Content-Type: text/plain\r\n" +
b"Content-Length: 16\r\n" +
b"Content-Length: 32\r\n\r\n" +
b"-- Bad Actor -- Original Content\r\n")
response = (
b"HTTP/1.1 200 OK\r\n"
b"Content-Type: text/plain\r\n"
b"Content-Length: 16\r\n"
b"Content-Length: 32\r\n\r\n"
b"-- Bad Actor -- Original Content\r\n"
)
sock.send(response)

return request_content

Expand All @@ -113,7 +120,7 @@ def multiple_content_length_response_handler(sock):
with server as (host, port):
url = f"http://{host}:{port}/"
with pytest.raises(requests.exceptions.InvalidHeader):
r = requests.get(url)
requests.get(url)
close_server.set()


Expand Down Expand Up @@ -307,10 +314,12 @@ def redirect_resp_handler(sock):
consume_socket_content(sock, timeout=0.5)
location = f'//{host}:{port}/{path}'
sock.send(
b'HTTP/1.1 301 Moved Permanently\r\n'
b'Content-Length: 0\r\n'
b'Location: ' + location.encode('utf8') + b'\r\n'
b'\r\n'
(
b'HTTP/1.1 301 Moved Permanently\r\n'
b'Content-Length: 0\r\n'
b'Location: %s\r\n'
b'\r\n'
) % location.encode('utf8')
)
redirect_request.append(consume_socket_content(sock, timeout=0.5))
sock.send(b'HTTP/1.1 200 OK\r\n\r\n')
Expand All @@ -329,18 +338,11 @@ def redirect_resp_handler(sock):

close_server.set()


def test_fragment_not_sent_with_request():
"""Verify that the fragment portion of a URI isn't sent to the server."""
def response_handler(sock):
req = consume_socket_content(sock, timeout=0.5)
sock.send(
b'HTTP/1.1 200 OK\r\n'
b'Content-Length: '+bytes(len(req))+b'\r\n'
b'\r\n'+req
)

close_server = threading.Event()
server = Server(response_handler, wait_to_close_event=close_server)
server = Server(echo_response_handler, wait_to_close_event=close_server)

with server as (host, port):
url = f'http://{host}:{port}/path/to/thing/#view=edit&token=hunter2'
Expand All @@ -358,6 +360,7 @@ def response_handler(sock):

close_server.set()


def test_fragment_update_on_redirect():
"""Verify we only append previous fragment if one doesn't exist on new
location. If a new fragment is encountered in a Location header, it should
Expand Down Expand Up @@ -388,7 +391,6 @@ def response_handler(sock):
with server as (host, port):
url = f'http://{host}:{port}/path/to/thing/#view=edit&token=hunter2'
r = requests.get(url)
raw_request = r.content

assert r.status_code == 200
assert len(r.history) == 2
Expand All @@ -400,3 +402,27 @@ def response_handler(sock):
assert r.url == f'http://{host}:{port}/final-url/#relevant-section'

close_server.set()


def test_json_decode_compatibility_for_alt_utf_encodings():

def response_handler(sock):
consume_socket_content(sock, timeout=0.5)
sock.send(
b'HTTP/1.1 200 OK\r\n'
b'Content-Length: 18\r\n\r\n'
b'\xff\xfe{\x00"\x00K0"\x00=\x00"\x00\xab0"\x00\r\n'
)

close_server = threading.Event()
server = Server(response_handler, wait_to_close_event=close_server)

with server as (host, port):
url = f'http://{host}:{port}/'
r = requests.get(url)
r.encoding = None
with pytest.raises(requests.exceptions.JSONDecodeError) as excinfo:
r.json()
assert isinstance(excinfo.value, requests.exceptions.RequestException)
assert isinstance(excinfo.value, JSONDecodeError)
assert r.text not in str(excinfo.value)

0 comments on commit c5abdad

Please sign in to comment.