diff --git a/.coveragerc b/.coveragerc index e9b51ab174..b5008b2b2f 100644 --- a/.coveragerc +++ b/.coveragerc @@ -1,2 +1,2 @@ [run] -omit = requests/packages/* \ No newline at end of file +omit = requests/packages/* diff --git a/.github/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE.md index e23f4c70ce..060d9262a5 100644 --- a/.github/ISSUE_TEMPLATE.md +++ b/.github/ISSUE_TEMPLATE.md @@ -25,4 +25,4 @@ import requests This command is only available on Requests v2.16.4 and greater. Otherwise, please provide some basic information about your system (Python version, -operating system, &c). \ No newline at end of file +operating system, &c). diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index cc5fd29dd3..617f2df3ef 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -42,7 +42,7 @@ jobs: with: languages: "python" # If you wish to specify custom queries, you can do so here or in a config file. - # By default, queries listed here will override any specified in a config file. + # By default, queries listed here will override any specified in a config file. # Prefix the list here with "+" to use these queries and those in the config file. # queries: ./path/to/local/query, your-org/your-repo/queries@main diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000000..cac5ddccb4 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,28 @@ +exclude: 'docs/|ext/' + +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.0.1 + hooks: + - id: check-yaml + - id: debug-statements + - id: end-of-file-fixer + - id: trailing-whitespace +- repo: https://github.com/PyCQA/isort + rev: 5.10.1 + hooks: + - id: isort +- repo: https://github.com/psf/black + rev: 22.3.0 + hooks: + - id: black + exclude: tests/test_lowlevel.py +- repo: https://github.com/asottile/pyupgrade + rev: v2.31.1 + hooks: + - id: pyupgrade + args: [--py37-plus] +- repo: https://gitlab.com/pycqa/flake8 + rev: 4.0.1 + hooks: + - id: flake8 diff --git a/docs/dev/contributing.rst b/docs/dev/contributing.rst index 63bfdfdbf2..961f7c3aba 100644 --- a/docs/dev/contributing.rst +++ b/docs/dev/contributing.rst @@ -93,6 +93,21 @@ event that you object to the code review feedback, you should make your case clearly and calmly. If, after doing so, the feedback is judged to still apply, you must either apply the feedback or withdraw your contribution. +Code Style +~~~~~~~~~~ + +Requests uses a collection of tools to ensure the code base has a consistent +style as it grows. We have these orchestrated using a tool called +`pre-commit`_. This can be installed locally and run over your changes prior +to opening a PR, and will also be run as part of the CI approval process +before a change is merged. + +You can find the full list of formatting requirements specified in the +`.pre-commit-config.yaml`_ at the top level directory of Requests. + +.. _pre-commit: https://pre-commit.com/ +.. _.pre-commit-config.yaml: https://github.com/psf/requests/blob/main/.pre-commit-config.yaml + New Contributors ~~~~~~~~~~~~~~~~ @@ -103,62 +118,6 @@ asking for help. Please also check the :ref:`early-feedback` section. -Kenneth Reitz's Code Style™ -~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -The Requests codebase uses the `PEP 8`_ code style. - -In addition to the standards outlined in PEP 8, we have a few guidelines: - -- Line-length can exceed 79 characters, to 100, when convenient. -- Line-length can exceed 100 characters, when doing otherwise would be *terribly* inconvenient. -- Always use single-quoted strings (e.g. ``'#flatearth'``), unless a single-quote occurs within the string. - -Additionally, one of the styles that PEP8 recommends for `line continuations`_ -completely lacks all sense of taste, and is not to be permitted within -the Requests codebase:: - - # Aligned with opening delimiter. - foo = long_function_name(var_one, var_two, - var_three, var_four) - -No. Just don't. Please. This is much better:: - - foo = long_function_name( - var_one, - var_two, - var_three, - var_four, - ) - -Docstrings are to follow the following syntaxes:: - - def the_earth_is_flat(): - """NASA divided up the seas into thirty-three degrees.""" - pass - -:: - - def fibonacci_spiral_tool(): - """With my feet upon the ground I lose myself / between the sounds - and open wide to suck it in. / I feel it move across my skin. / I'm - reaching up and reaching out. / I'm reaching for the random or - whatever will bewilder me. / Whatever will bewilder me. / And - following our will and wind we may just go where no one's been. / - We'll ride the spiral to the end and may just go where no one's - been. - - Spiral out. Keep going... - """ - pass - -All functions, methods, and classes are to contain docstrings. Object data -model methods (e.g. ``__repr__``) are typically the exception to this rule. - -Thanks for helping to make the world a better place! - -.. _PEP 8: https://pep8.org/ -.. _line continuations: https://www.python.org/dev/peps/pep-0008/#indentation Documentation Contributions --------------------------- diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000000..996bf14c83 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,13 @@ +[tool.isort] +profile = "black" +src_paths = ["requests", "test"] +honor_noqa = true + +[tool.pytest.ini_options] +addopts = "-p no:warnings --doctest-modules" +doctest_optionflags = "NORMALIZE_WHITESPACE ELLIPSIS" +minversion = "6.2" +testpaths = [ + "requests", + "tests", +] diff --git a/pytest.ini b/pytest.ini deleted file mode 100644 index 13fa000028..0000000000 --- a/pytest.ini +++ /dev/null @@ -1,3 +0,0 @@ -[pytest] -addopts = -p no:warnings --doctest-modules -doctest_optionflags= NORMALIZE_WHITESPACE ELLIPSIS \ No newline at end of file diff --git a/requests/__init__.py b/requests/__init__.py index 53a5b42af6..343e5082d3 100644 --- a/requests/__init__.py +++ b/requests/__init__.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - # __ # /__) _ _ _ _ _/ _ # / ( (- (/ (/ (- _) / _) @@ -40,8 +38,10 @@ :license: Apache 2.0, see LICENSE for more details. """ -import urllib3 import warnings + +import urllib3 + from .exceptions import RequestsDependencyWarning try: @@ -54,13 +54,14 @@ except ImportError: chardet_version = None + def check_compatibility(urllib3_version, chardet_version, charset_normalizer_version): - urllib3_version = urllib3_version.split('.') - assert urllib3_version != ['dev'] # Verify urllib3 isn't installed from git. + urllib3_version = urllib3_version.split(".") + assert urllib3_version != ["dev"] # Verify urllib3 isn't installed from git. # Sometimes, urllib3 only reports its version as 16.1. if len(urllib3_version) == 2: - urllib3_version.append('0') + urllib3_version.append("0") # Check urllib3 for compatibility. major, minor, patch = urllib3_version # noqa: F811 @@ -72,36 +73,46 @@ def check_compatibility(urllib3_version, chardet_version, charset_normalizer_ver # Check charset_normalizer for compatibility. if chardet_version: - major, minor, patch = chardet_version.split('.')[:3] + major, minor, patch = chardet_version.split(".")[:3] major, minor, patch = int(major), int(minor), int(patch) # chardet_version >= 3.0.2, < 5.0.0 assert (3, 0, 2) <= (major, minor, patch) < (5, 0, 0) elif charset_normalizer_version: - major, minor, patch = charset_normalizer_version.split('.')[:3] + major, minor, patch = charset_normalizer_version.split(".")[:3] major, minor, patch = int(major), int(minor), int(patch) # charset_normalizer >= 2.0.0 < 3.0.0 assert (2, 0, 0) <= (major, minor, patch) < (3, 0, 0) else: raise Exception("You need either charset_normalizer or chardet installed") + def _check_cryptography(cryptography_version): # cryptography < 1.3.4 try: - cryptography_version = list(map(int, cryptography_version.split('.'))) + cryptography_version = list(map(int, cryptography_version.split("."))) except ValueError: return if cryptography_version < [1, 3, 4]: - warning = 'Old version of cryptography ({}) may cause slowdown.'.format(cryptography_version) + warning = "Old version of cryptography ({}) may cause slowdown.".format( + cryptography_version + ) warnings.warn(warning, RequestsDependencyWarning) + # Check imported dependencies for compatibility. try: - check_compatibility(urllib3.__version__, chardet_version, charset_normalizer_version) + check_compatibility( + urllib3.__version__, chardet_version, charset_normalizer_version + ) except (AssertionError, ValueError): - warnings.warn("urllib3 ({}) or chardet ({})/charset_normalizer ({}) doesn't match a supported " - "version!".format(urllib3.__version__, chardet_version, charset_normalizer_version), - RequestsDependencyWarning) + warnings.warn( + "urllib3 ({}) or chardet ({})/charset_normalizer ({}) doesn't match a supported " + "version!".format( + urllib3.__version__, chardet_version, charset_normalizer_version + ), + RequestsDependencyWarning, + ) # Attempt to enable urllib3's fallback for SNI support # if the standard library doesn't support SNI or the @@ -114,39 +125,56 @@ def _check_cryptography(cryptography_version): if not getattr(ssl, "HAS_SNI", False): from urllib3.contrib import pyopenssl + pyopenssl.inject_into_urllib3() # Check cryptography version from cryptography import __version__ as cryptography_version + _check_cryptography(cryptography_version) except ImportError: pass # urllib3's DependencyWarnings should be silenced. from urllib3.exceptions import DependencyWarning -warnings.simplefilter('ignore', DependencyWarning) -from .__version__ import __title__, __description__, __url__, __version__ -from .__version__ import __build__, __author__, __author_email__, __license__ -from .__version__ import __copyright__, __cake__ - -from . import utils -from . import packages -from .models import Request, Response, PreparedRequest -from .api import request, get, head, post, patch, put, delete, options -from .sessions import session, Session -from .status_codes import codes -from .exceptions import ( - RequestException, Timeout, URLRequired, - TooManyRedirects, HTTPError, ConnectionError, - FileModeWarning, ConnectTimeout, ReadTimeout, JSONDecodeError -) +warnings.simplefilter("ignore", DependencyWarning) # Set default logging handler to avoid "No handler found" warnings. import logging from logging import NullHandler +from . import packages, utils +from .__version__ import ( + __author__, + __author_email__, + __build__, + __cake__, + __copyright__, + __description__, + __license__, + __title__, + __url__, + __version__, +) +from .api import delete, get, head, options, patch, post, put, request +from .exceptions import ( + ConnectionError, + ConnectTimeout, + FileModeWarning, + HTTPError, + JSONDecodeError, + ReadTimeout, + RequestException, + Timeout, + TooManyRedirects, + URLRequired, +) +from .models import PreparedRequest, Request, Response +from .sessions import Session, session +from .status_codes import codes + logging.getLogger(__name__).addHandler(NullHandler()) # FileModeWarnings go off per the default. -warnings.simplefilter('default', FileModeWarning, append=True) +warnings.simplefilter("default", FileModeWarning, append=True) diff --git a/requests/__version__.py b/requests/__version__.py index e973b03b5f..799bb17f02 100644 --- a/requests/__version__.py +++ b/requests/__version__.py @@ -2,13 +2,13 @@ # |( |- |.| | | |- `-. | `-. # ' ' `-' `-`.`-' `-' `-' ' `-' -__title__ = 'requests' -__description__ = 'Python HTTP for Humans.' -__url__ = 'https://requests.readthedocs.io' -__version__ = '2.27.1' +__title__ = "requests" +__description__ = "Python HTTP for Humans." +__url__ = "https://requests.readthedocs.io" +__version__ = "2.27.1" __build__ = 0x022701 -__author__ = 'Kenneth Reitz' -__author_email__ = 'me@kennethreitz.org' -__license__ = 'Apache 2.0' -__copyright__ = 'Copyright 2022 Kenneth Reitz' -__cake__ = u'\u2728 \U0001f370 \u2728' +__author__ = "Kenneth Reitz" +__author_email__ = "me@kennethreitz.org" +__license__ = "Apache 2.0" +__copyright__ = "Copyright 2022 Kenneth Reitz" +__cake__ = "\u2728 \U0001f370 \u2728" diff --git a/requests/_internal_utils.py b/requests/_internal_utils.py index ebab39ff1c..a82e36c8cb 100644 --- a/requests/_internal_utils.py +++ b/requests/_internal_utils.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - """ requests._internal_utils ~~~~~~~~~~~~~~ @@ -11,7 +9,7 @@ from .compat import builtin_str -def to_native_string(string, encoding='ascii'): +def to_native_string(string, encoding="ascii"): """Given a string object, regardless of type, returns a representation of that string in the native string type, encoding and decoding where necessary. This assumes ASCII unless told otherwise. @@ -33,7 +31,7 @@ def unicode_is_ascii(u_string): """ assert isinstance(u_string, str) try: - u_string.encode('ascii') + u_string.encode("ascii") return True except UnicodeEncodeError: return False diff --git a/requests/adapters.py b/requests/adapters.py index dc532999df..d3b2d5bb1e 100644 --- a/requests/adapters.py +++ b/requests/adapters.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - """ requests.adapters ~~~~~~~~~~~~~~~~~ @@ -9,58 +7,76 @@ """ import os.path -import socket +import socket # noqa: F401 -from urllib3.poolmanager import PoolManager, proxy_from_url -from urllib3.response import HTTPResponse -from urllib3.util import parse_url -from urllib3.util import Timeout as TimeoutSauce -from urllib3.util.retry import Retry -from urllib3.exceptions import ClosedPoolError -from urllib3.exceptions import ConnectTimeoutError +from urllib3.exceptions import ClosedPoolError, ConnectTimeoutError from urllib3.exceptions import HTTPError as _HTTPError from urllib3.exceptions import InvalidHeader as _InvalidHeader -from urllib3.exceptions import MaxRetryError -from urllib3.exceptions import NewConnectionError +from urllib3.exceptions import ( + LocationValueError, + MaxRetryError, + NewConnectionError, + ProtocolError, +) from urllib3.exceptions import ProxyError as _ProxyError -from urllib3.exceptions import ProtocolError -from urllib3.exceptions import ReadTimeoutError +from urllib3.exceptions import ReadTimeoutError, ResponseError from urllib3.exceptions import SSLError as _SSLError -from urllib3.exceptions import ResponseError -from urllib3.exceptions import LocationValueError +from urllib3.poolmanager import PoolManager, proxy_from_url +from urllib3.response import HTTPResponse +from urllib3.util import Timeout as TimeoutSauce +from urllib3.util import parse_url +from urllib3.util.retry import Retry +from .auth import _basic_auth_str +from .compat import basestring, urlparse +from .cookies import extract_cookies_to_jar +from .exceptions import ( + ConnectionError, + ConnectTimeout, + InvalidHeader, + InvalidProxyURL, + InvalidSchema, + InvalidURL, + ProxyError, + ReadTimeout, + RetryError, + SSLError, +) from .models import Response -from .compat import urlparse, basestring -from .utils import (DEFAULT_CA_BUNDLE_PATH, extract_zipped_paths, - get_encoding_from_headers, prepend_scheme_if_needed, - get_auth_from_url, urldefragauth, select_proxy) from .structures import CaseInsensitiveDict -from .cookies import extract_cookies_to_jar -from .exceptions import (ConnectionError, ConnectTimeout, ReadTimeout, SSLError, - ProxyError, RetryError, InvalidSchema, InvalidProxyURL, - InvalidURL, InvalidHeader) -from .auth import _basic_auth_str +from .utils import ( + DEFAULT_CA_BUNDLE_PATH, + extract_zipped_paths, + get_auth_from_url, + get_encoding_from_headers, + prepend_scheme_if_needed, + select_proxy, + urldefragauth, +) try: from urllib3.contrib.socks import SOCKSProxyManager except ImportError: + def SOCKSProxyManager(*args, **kwargs): raise InvalidSchema("Missing dependencies for SOCKS support.") + DEFAULT_POOLBLOCK = False DEFAULT_POOLSIZE = 10 DEFAULT_RETRIES = 0 DEFAULT_POOL_TIMEOUT = None -class BaseAdapter(object): +class BaseAdapter: """The Base Transport Adapter""" def __init__(self): - super(BaseAdapter, self).__init__() + super().__init__() - def send(self, request, stream=False, timeout=None, verify=True, - cert=None, proxies=None): + def send( + self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None + ): """Sends PreparedRequest object. Returns Response object. :param request: The :class:`PreparedRequest ` being sent. @@ -108,12 +124,22 @@ class HTTPAdapter(BaseAdapter): >>> a = requests.adapters.HTTPAdapter(max_retries=3) >>> s.mount('http://', a) """ - __attrs__ = ['max_retries', 'config', '_pool_connections', '_pool_maxsize', - '_pool_block'] - def __init__(self, pool_connections=DEFAULT_POOLSIZE, - pool_maxsize=DEFAULT_POOLSIZE, max_retries=DEFAULT_RETRIES, - pool_block=DEFAULT_POOLBLOCK): + __attrs__ = [ + "max_retries", + "config", + "_pool_connections", + "_pool_maxsize", + "_pool_block", + ] + + def __init__( + self, + pool_connections=DEFAULT_POOLSIZE, + pool_maxsize=DEFAULT_POOLSIZE, + max_retries=DEFAULT_RETRIES, + pool_block=DEFAULT_POOLBLOCK, + ): if max_retries == DEFAULT_RETRIES: self.max_retries = Retry(0, read=False) else: @@ -121,7 +147,7 @@ def __init__(self, pool_connections=DEFAULT_POOLSIZE, self.config = {} self.proxy_manager = {} - super(HTTPAdapter, self).__init__() + super().__init__() self._pool_connections = pool_connections self._pool_maxsize = pool_maxsize @@ -141,10 +167,13 @@ def __setstate__(self, state): for attr, value in state.items(): setattr(self, attr, value) - self.init_poolmanager(self._pool_connections, self._pool_maxsize, - block=self._pool_block) + self.init_poolmanager( + self._pool_connections, self._pool_maxsize, block=self._pool_block + ) - def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs): + def init_poolmanager( + self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs + ): """Initializes a urllib3 PoolManager. This method should not be called from user code, and is only @@ -161,8 +190,13 @@ def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool self._pool_maxsize = maxsize self._pool_block = block - self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize, - block=block, strict=True, **pool_kwargs) + self.poolmanager = PoolManager( + num_pools=connections, + maxsize=maxsize, + block=block, + strict=True, + **pool_kwargs, + ) def proxy_manager_for(self, proxy, **proxy_kwargs): """Return urllib3 ProxyManager for the given proxy. @@ -178,7 +212,7 @@ def proxy_manager_for(self, proxy, **proxy_kwargs): """ if proxy in self.proxy_manager: manager = self.proxy_manager[proxy] - elif proxy.lower().startswith('socks'): + elif proxy.lower().startswith("socks"): username, password = get_auth_from_url(proxy) manager = self.proxy_manager[proxy] = SOCKSProxyManager( proxy, @@ -187,7 +221,7 @@ def proxy_manager_for(self, proxy, **proxy_kwargs): num_pools=self._pool_connections, maxsize=self._pool_maxsize, block=self._pool_block, - **proxy_kwargs + **proxy_kwargs, ) else: proxy_headers = self.proxy_headers(proxy) @@ -197,7 +231,8 @@ def proxy_manager_for(self, proxy, **proxy_kwargs): num_pools=self._pool_connections, maxsize=self._pool_maxsize, block=self._pool_block, - **proxy_kwargs) + **proxy_kwargs, + ) return manager @@ -213,7 +248,7 @@ def cert_verify(self, conn, url, verify, cert): to a CA bundle to use :param cert: The SSL certificate to verify. """ - if url.lower().startswith('https') and verify: + if url.lower().startswith("https") and verify: cert_loc = None @@ -225,17 +260,19 @@ def cert_verify(self, conn, url, verify, cert): cert_loc = extract_zipped_paths(DEFAULT_CA_BUNDLE_PATH) if not cert_loc or not os.path.exists(cert_loc): - raise IOError("Could not find a suitable TLS CA certificate bundle, " - "invalid path: {}".format(cert_loc)) + raise OSError( + f"Could not find a suitable TLS CA certificate bundle, " + f"invalid path: {cert_loc}" + ) - conn.cert_reqs = 'CERT_REQUIRED' + conn.cert_reqs = "CERT_REQUIRED" if not os.path.isdir(cert_loc): conn.ca_certs = cert_loc else: conn.ca_cert_dir = cert_loc else: - conn.cert_reqs = 'CERT_NONE' + conn.cert_reqs = "CERT_NONE" conn.ca_certs = None conn.ca_cert_dir = None @@ -247,11 +284,14 @@ def cert_verify(self, conn, url, verify, cert): conn.cert_file = cert conn.key_file = None if conn.cert_file and not os.path.exists(conn.cert_file): - raise IOError("Could not find the TLS certificate file, " - "invalid path: {}".format(conn.cert_file)) + raise OSError( + f"Could not find the TLS certificate file, " + f"invalid path: {conn.cert_file}" + ) if conn.key_file and not os.path.exists(conn.key_file): - raise IOError("Could not find the TLS key file, " - "invalid path: {}".format(conn.key_file)) + raise OSError( + f"Could not find the TLS key file, invalid path: {conn.key_file}" + ) def build_response(self, req, resp): """Builds a :class:`Response ` object from a urllib3 @@ -266,10 +306,10 @@ def build_response(self, req, resp): response = Response() # Fallback to None if there's no status_code, for whatever reason. - response.status_code = getattr(resp, 'status', None) + response.status_code = getattr(resp, "status", None) # Make headers case-insensitive. - response.headers = CaseInsensitiveDict(getattr(resp, 'headers', {})) + response.headers = CaseInsensitiveDict(getattr(resp, "headers", {})) # Set encoding. response.encoding = get_encoding_from_headers(response.headers) @@ -277,7 +317,7 @@ def build_response(self, req, resp): response.reason = response.raw.reason if isinstance(req.url, bytes): - response.url = req.url.decode('utf-8') + response.url = req.url.decode("utf-8") else: response.url = req.url @@ -302,11 +342,13 @@ def get_connection(self, url, proxies=None): proxy = select_proxy(url, proxies) if proxy: - proxy = prepend_scheme_if_needed(proxy, 'http') + proxy = prepend_scheme_if_needed(proxy, "http") proxy_url = parse_url(proxy) if not proxy_url.host: - raise InvalidProxyURL("Please check proxy URL. It is malformed" - " and could be missing the host.") + raise InvalidProxyURL( + "Please check proxy URL. It is malformed " + "and could be missing the host." + ) proxy_manager = self.proxy_manager_for(proxy) conn = proxy_manager.connection_from_url(url) else: @@ -344,11 +386,11 @@ def request_url(self, request, proxies): proxy = select_proxy(request.url, proxies) scheme = urlparse(request.url).scheme - is_proxied_http_request = (proxy and scheme != 'https') + is_proxied_http_request = proxy and scheme != "https" using_socks_proxy = False if proxy: proxy_scheme = urlparse(proxy).scheme.lower() - using_socks_proxy = proxy_scheme.startswith('socks') + using_socks_proxy = proxy_scheme.startswith("socks") url = request.path_url if is_proxied_http_request and not using_socks_proxy: @@ -387,12 +429,13 @@ def proxy_headers(self, proxy): username, password = get_auth_from_url(proxy) if username: - headers['Proxy-Authorization'] = _basic_auth_str(username, - password) + headers["Proxy-Authorization"] = _basic_auth_str(username, password) return headers - def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None): + def send( + self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None + ): """Sends PreparedRequest object. Returns Response object. :param request: The :class:`PreparedRequest ` being sent. @@ -416,20 +459,26 @@ def send(self, request, stream=False, timeout=None, verify=True, cert=None, prox self.cert_verify(conn, request.url, verify, cert) url = self.request_url(request, proxies) - self.add_headers(request, stream=stream, timeout=timeout, verify=verify, cert=cert, proxies=proxies) + self.add_headers( + request, + stream=stream, + timeout=timeout, + verify=verify, + cert=cert, + proxies=proxies, + ) - chunked = not (request.body is None or 'Content-Length' in request.headers) + chunked = not (request.body is None or "Content-Length" in request.headers) if isinstance(timeout, tuple): try: connect, read = timeout timeout = TimeoutSauce(connect=connect, read=read) - except ValueError as e: - # this may raise a string formatting error. - err = ("Invalid timeout {}. Pass a (connect, read) " - "timeout tuple, or a single float to set " - "both timeouts to the same value".format(timeout)) - raise ValueError(err) + except ValueError: + raise ValueError( + f"Invalid timeout {timeout}. Pass a (connect, read) timeout tuple, " + f"or a single float to set both timeouts to the same value." + ) elif isinstance(timeout, TimeoutSauce): pass else: @@ -447,22 +496,24 @@ def send(self, request, stream=False, timeout=None, verify=True, cert=None, prox preload_content=False, decode_content=False, retries=self.max_retries, - timeout=timeout + timeout=timeout, ) # Send the request. else: - if hasattr(conn, 'proxy_pool'): + if hasattr(conn, "proxy_pool"): conn = conn.proxy_pool low_conn = conn._get_conn(timeout=DEFAULT_POOL_TIMEOUT) try: - skip_host = 'Host' in request.headers - low_conn.putrequest(request.method, - url, - skip_accept_encoding=True, - skip_host=skip_host) + skip_host = "Host" in request.headers + low_conn.putrequest( + request.method, + url, + skip_accept_encoding=True, + skip_host=skip_host, + ) for header, value in request.headers.items(): low_conn.putheader(header, value) @@ -470,11 +521,11 @@ def send(self, request, stream=False, timeout=None, verify=True, cert=None, prox low_conn.endheaders() for i in request.body: - low_conn.send(hex(len(i))[2:].encode('utf-8')) - low_conn.send(b'\r\n') + low_conn.send(hex(len(i))[2:].encode("utf-8")) + low_conn.send(b"\r\n") low_conn.send(i) - low_conn.send(b'\r\n') - low_conn.send(b'0\r\n\r\n') + low_conn.send(b"\r\n") + low_conn.send(b"0\r\n\r\n") # Receive the response from the server r = low_conn.getresponse() @@ -484,15 +535,15 @@ def send(self, request, stream=False, timeout=None, verify=True, cert=None, prox pool=conn, connection=low_conn, preload_content=False, - decode_content=False + decode_content=False, ) - except: + except Exception: # If we hit any problems here, clean up the connection. - # Then, reraise so that we can handle the actual exception. + # Then, raise so that we can handle the actual exception. low_conn.close() raise - except (ProtocolError, socket.error) as err: + except (ProtocolError, OSError) as err: raise ConnectionError(err, request=request) except MaxRetryError as e: diff --git a/requests/api.py b/requests/api.py index 4cba90eefe..2f71aaed1a 100644 --- a/requests/api.py +++ b/requests/api.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - """ requests.api ~~~~~~~~~~~~ @@ -72,7 +70,7 @@ def get(url, params=None, **kwargs): :rtype: requests.Response """ - return request('get', url, params=params, **kwargs) + return request("get", url, params=params, **kwargs) def options(url, **kwargs): @@ -84,7 +82,7 @@ def options(url, **kwargs): :rtype: requests.Response """ - return request('options', url, **kwargs) + return request("options", url, **kwargs) def head(url, **kwargs): @@ -98,8 +96,8 @@ def head(url, **kwargs): :rtype: requests.Response """ - kwargs.setdefault('allow_redirects', False) - return request('head', url, **kwargs) + kwargs.setdefault("allow_redirects", False) + return request("head", url, **kwargs) def post(url, data=None, json=None, **kwargs): @@ -114,7 +112,7 @@ def post(url, data=None, json=None, **kwargs): :rtype: requests.Response """ - return request('post', url, data=data, json=json, **kwargs) + return request("post", url, data=data, json=json, **kwargs) def put(url, data=None, **kwargs): @@ -129,7 +127,7 @@ def put(url, data=None, **kwargs): :rtype: requests.Response """ - return request('put', url, data=data, **kwargs) + return request("put", url, data=data, **kwargs) def patch(url, data=None, **kwargs): @@ -144,7 +142,7 @@ def patch(url, data=None, **kwargs): :rtype: requests.Response """ - return request('patch', url, data=data, **kwargs) + return request("patch", url, data=data, **kwargs) def delete(url, **kwargs): @@ -156,4 +154,4 @@ def delete(url, **kwargs): :rtype: requests.Response """ - return request('delete', url, **kwargs) + return request("delete", url, **kwargs) diff --git a/requests/auth.py b/requests/auth.py index eeface39ae..9733686ddb 100644 --- a/requests/auth.py +++ b/requests/auth.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - """ requests.auth ~~~~~~~~~~~~~ @@ -7,22 +5,21 @@ This module contains the authentication handlers for Requests. """ +import hashlib import os import re -import time -import hashlib import threading +import time import warnings - from base64 import b64encode -from .compat import urlparse, str, basestring -from .cookies import extract_cookies_to_jar from ._internal_utils import to_native_string +from .compat import basestring, str, urlparse +from .cookies import extract_cookies_to_jar from .utils import parse_dict_header -CONTENT_TYPE_FORM_URLENCODED = 'application/x-www-form-urlencoded' -CONTENT_TYPE_MULTI_PART = 'multipart/form-data' +CONTENT_TYPE_FORM_URLENCODED = "application/x-www-form-urlencoded" +CONTENT_TYPE_MULTI_PART = "multipart/form-data" def _basic_auth_str(username, password): @@ -57,23 +54,23 @@ def _basic_auth_str(username, password): # -- End Removal -- if isinstance(username, str): - username = username.encode('latin1') + username = username.encode("latin1") if isinstance(password, str): - password = password.encode('latin1') + password = password.encode("latin1") - authstr = 'Basic ' + to_native_string( - b64encode(b':'.join((username, password))).strip() + authstr = "Basic " + to_native_string( + b64encode(b":".join((username, password))).strip() ) return authstr -class AuthBase(object): +class AuthBase: """Base class that all auth implementations derive from""" def __call__(self, r): - raise NotImplementedError('Auth hooks must be callable.') + raise NotImplementedError("Auth hooks must be callable.") class HTTPBasicAuth(AuthBase): @@ -84,16 +81,18 @@ def __init__(self, username, password): self.password = password def __eq__(self, other): - return all([ - self.username == getattr(other, 'username', None), - self.password == getattr(other, 'password', None) - ]) + return all( + [ + self.username == getattr(other, "username", None), + self.password == getattr(other, "password", None), + ] + ) def __ne__(self, other): return not self == other def __call__(self, r): - r.headers['Authorization'] = _basic_auth_str(self.username, self.password) + r.headers["Authorization"] = _basic_auth_str(self.username, self.password) return r @@ -101,7 +100,7 @@ class HTTPProxyAuth(HTTPBasicAuth): """Attaches HTTP Proxy Authentication to a given Request object.""" def __call__(self, r): - r.headers['Proxy-Authorization'] = _basic_auth_str(self.username, self.password) + r.headers["Proxy-Authorization"] = _basic_auth_str(self.username, self.password) return r @@ -116,9 +115,9 @@ def __init__(self, username, password): def init_per_thread_state(self): # Ensure state is initialized just once per-thread - if not hasattr(self._thread_local, 'init'): + if not hasattr(self._thread_local, "init"): self._thread_local.init = True - self._thread_local.last_nonce = '' + self._thread_local.last_nonce = "" self._thread_local.nonce_count = 0 self._thread_local.chal = {} self._thread_local.pos = None @@ -129,44 +128,52 @@ def build_digest_header(self, method, url): :rtype: str """ - realm = self._thread_local.chal['realm'] - nonce = self._thread_local.chal['nonce'] - qop = self._thread_local.chal.get('qop') - algorithm = self._thread_local.chal.get('algorithm') - opaque = self._thread_local.chal.get('opaque') + realm = self._thread_local.chal["realm"] + nonce = self._thread_local.chal["nonce"] + qop = self._thread_local.chal.get("qop") + algorithm = self._thread_local.chal.get("algorithm") + opaque = self._thread_local.chal.get("opaque") hash_utf8 = None if algorithm is None: - _algorithm = 'MD5' + _algorithm = "MD5" else: _algorithm = algorithm.upper() # lambdas assume digest modules are imported at the top level - if _algorithm == 'MD5' or _algorithm == 'MD5-SESS': + if _algorithm == "MD5" or _algorithm == "MD5-SESS": + def md5_utf8(x): if isinstance(x, str): - x = x.encode('utf-8') + x = x.encode("utf-8") return hashlib.md5(x).hexdigest() + hash_utf8 = md5_utf8 - elif _algorithm == 'SHA': + elif _algorithm == "SHA": + def sha_utf8(x): if isinstance(x, str): - x = x.encode('utf-8') + x = x.encode("utf-8") return hashlib.sha1(x).hexdigest() + hash_utf8 = sha_utf8 - elif _algorithm == 'SHA-256': + elif _algorithm == "SHA-256": + def sha256_utf8(x): if isinstance(x, str): - x = x.encode('utf-8') + x = x.encode("utf-8") return hashlib.sha256(x).hexdigest() + hash_utf8 = sha256_utf8 - elif _algorithm == 'SHA-512': + elif _algorithm == "SHA-512": + def sha512_utf8(x): if isinstance(x, str): - x = x.encode('utf-8') + x = x.encode("utf-8") return hashlib.sha512(x).hexdigest() + hash_utf8 = sha512_utf8 - KD = lambda s, d: hash_utf8("%s:%s" % (s, d)) + KD = lambda s, d: hash_utf8(f"{s}:{d}") # noqa:E731 if hash_utf8 is None: return None @@ -177,10 +184,10 @@ def sha512_utf8(x): #: path is request-uri defined in RFC 2616 which should not be empty path = p_parsed.path or "/" if p_parsed.query: - path += '?' + p_parsed.query + path += f"?{p_parsed.query}" - A1 = '%s:%s:%s' % (self.username, realm, self.password) - A2 = '%s:%s' % (method, path) + A1 = f"{self.username}:{realm}:{self.password}" + A2 = f"{method}:{path}" HA1 = hash_utf8(A1) HA2 = hash_utf8(A2) @@ -189,22 +196,20 @@ def sha512_utf8(x): self._thread_local.nonce_count += 1 else: self._thread_local.nonce_count = 1 - ncvalue = '%08x' % self._thread_local.nonce_count - s = str(self._thread_local.nonce_count).encode('utf-8') - s += nonce.encode('utf-8') - s += time.ctime().encode('utf-8') + ncvalue = f"{self._thread_local.nonce_count:08x}" + s = str(self._thread_local.nonce_count).encode("utf-8") + s += nonce.encode("utf-8") + s += time.ctime().encode("utf-8") s += os.urandom(8) - cnonce = (hashlib.sha1(s).hexdigest()[:16]) - if _algorithm == 'MD5-SESS': - HA1 = hash_utf8('%s:%s:%s' % (HA1, nonce, cnonce)) + cnonce = hashlib.sha1(s).hexdigest()[:16] + if _algorithm == "MD5-SESS": + HA1 = hash_utf8(f"{HA1}:{nonce}:{cnonce}") if not qop: - respdig = KD(HA1, "%s:%s" % (nonce, HA2)) - elif qop == 'auth' or 'auth' in qop.split(','): - noncebit = "%s:%s:%s:%s:%s" % ( - nonce, ncvalue, cnonce, 'auth', HA2 - ) + respdig = KD(HA1, f"{nonce}:{HA2}") + elif qop == "auth" or "auth" in qop.split(","): + noncebit = f"{nonce}:{ncvalue}:{cnonce}:auth:{HA2}" respdig = KD(HA1, noncebit) else: # XXX handle auth-int. @@ -213,18 +218,20 @@ def sha512_utf8(x): self._thread_local.last_nonce = nonce # XXX should the partial digests be encoded too? - base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \ - 'response="%s"' % (self.username, realm, nonce, path, respdig) + base = ( + f'username="{self.username}", realm="{realm}", nonce="{nonce}", ' + f'uri="{path}", response="{respdig}"' + ) if opaque: - base += ', opaque="%s"' % opaque + base += f', opaque="{opaque}"' if algorithm: - base += ', algorithm="%s"' % algorithm + base += f', algorithm="{algorithm}"' if entdig: - base += ', digest="%s"' % entdig + base += f', digest="{entdig}"' if qop: - base += ', qop="auth", nc=%s, cnonce="%s"' % (ncvalue, cnonce) + base += f', qop="auth", nc={ncvalue}, cnonce="{cnonce}"' - return 'Digest %s' % (base) + return f"Digest {base}" def handle_redirect(self, r, **kwargs): """Reset num_401_calls counter on redirects.""" @@ -248,13 +255,13 @@ def handle_401(self, r, **kwargs): # Rewind the file position indicator of the body to where # it was to resend the request. r.request.body.seek(self._thread_local.pos) - s_auth = r.headers.get('www-authenticate', '') + s_auth = r.headers.get("www-authenticate", "") - if 'digest' in s_auth.lower() and self._thread_local.num_401_calls < 2: + if "digest" in s_auth.lower() and self._thread_local.num_401_calls < 2: self._thread_local.num_401_calls += 1 - pat = re.compile(r'digest ', flags=re.IGNORECASE) - self._thread_local.chal = parse_dict_header(pat.sub('', s_auth, count=1)) + pat = re.compile(r"digest ", flags=re.IGNORECASE) + self._thread_local.chal = parse_dict_header(pat.sub("", s_auth, count=1)) # Consume content and release the original connection # to allow our new request to reuse the same one. @@ -264,8 +271,9 @@ def handle_401(self, r, **kwargs): extract_cookies_to_jar(prep._cookies, r.request, r.raw) prep.prepare_cookies(prep._cookies) - prep.headers['Authorization'] = self.build_digest_header( - prep.method, prep.url) + prep.headers["Authorization"] = self.build_digest_header( + prep.method, prep.url + ) _r = r.connection.send(prep, **kwargs) _r.history.append(r) _r.request = prep @@ -280,7 +288,7 @@ def __call__(self, r): self.init_per_thread_state() # If we have a saved nonce, skip the 401 if self._thread_local.last_nonce: - r.headers['Authorization'] = self.build_digest_header(r.method, r.url) + r.headers["Authorization"] = self.build_digest_header(r.method, r.url) try: self._thread_local.pos = r.body.tell() except AttributeError: @@ -289,17 +297,19 @@ def __call__(self, r): # file position of the previous body. Ensure it's set to # None. self._thread_local.pos = None - r.register_hook('response', self.handle_401) - r.register_hook('response', self.handle_redirect) + r.register_hook("response", self.handle_401) + r.register_hook("response", self.handle_redirect) self._thread_local.num_401_calls = 1 return r def __eq__(self, other): - return all([ - self.username == getattr(other, 'username', None), - self.password == getattr(other, 'password', None) - ]) + return all( + [ + self.username == getattr(other, "username", None), + self.password == getattr(other, "password", None), + ] + ) def __ne__(self, other): return not self == other diff --git a/requests/certs.py b/requests/certs.py index d1a378d787..be422c3e91 100644 --- a/requests/certs.py +++ b/requests/certs.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- """ requests.certs @@ -14,5 +13,5 @@ """ from certifi import where -if __name__ == '__main__': +if __name__ == "__main__": print(where()) diff --git a/requests/compat.py b/requests/compat.py index aca7d58409..6776163c94 100644 --- a/requests/compat.py +++ b/requests/compat.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - """ requests.compat ~~~~~~~~~~~~~~~ @@ -24,15 +22,16 @@ _ver = sys.version_info #: Python 2.x? -is_py2 = (_ver[0] == 2) +is_py2 = _ver[0] == 2 #: Python 3.x? -is_py3 = (_ver[0] == 3) +is_py3 = _ver[0] == 3 # json/simplejson module import resolution has_simplejson = False try: import simplejson as json + has_simplejson = True except ImportError: import json @@ -42,18 +41,35 @@ else: from json import JSONDecodeError -# --------- -# Legacy Imports -# --------- -from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, quote_plus, unquote_plus, urldefrag -from urllib.request import parse_http_list, getproxies, proxy_bypass, proxy_bypass_environment, getproxies_environment +# Keep OrderedDict for backwards compatibility. +from collections import OrderedDict +from collections.abc import Callable, Mapping, MutableMapping from http import cookiejar as cookielib from http.cookies import Morsel from io import StringIO -# Keep OrderedDict for backwards compatibility. -from collections import OrderedDict -from collections.abc import Callable, Mapping, MutableMapping +# -------------- +# Legacy Imports +# -------------- +from urllib.parse import ( + quote, + quote_plus, + unquote, + unquote_plus, + urldefrag, + urlencode, + urljoin, + urlparse, + urlsplit, + urlunparse, +) +from urllib.request import ( + getproxies, + getproxies_environment, + parse_http_list, + proxy_bypass, + proxy_bypass_environment, +) builtin_str = str str = str diff --git a/requests/cookies.py b/requests/cookies.py index 56fccd9c25..bf54ab237e 100644 --- a/requests/cookies.py +++ b/requests/cookies.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - """ requests.cookies ~~~~~~~~~~~~~~~~ @@ -9,12 +7,12 @@ requests.utils imports from here, so be careful with imports. """ +import calendar import copy import time -import calendar from ._internal_utils import to_native_string -from .compat import cookielib, urlparse, urlunparse, Morsel, MutableMapping +from .compat import Morsel, MutableMapping, cookielib, urlparse, urlunparse try: import threading @@ -22,7 +20,7 @@ import dummy_threading as threading -class MockRequest(object): +class MockRequest: """Wraps a `requests.Request` to mimic a `urllib2.Request`. The code in `cookielib.CookieJar` expects this interface in order to correctly @@ -51,16 +49,22 @@ def get_origin_req_host(self): def get_full_url(self): # Only return the response's URL if the user hadn't set the Host # header - if not self._r.headers.get('Host'): + if not self._r.headers.get("Host"): return self._r.url # If they did set it, retrieve it and reconstruct the expected domain - host = to_native_string(self._r.headers['Host'], encoding='utf-8') + host = to_native_string(self._r.headers["Host"], encoding="utf-8") parsed = urlparse(self._r.url) # Reconstruct the URL as we expect it - return urlunparse([ - parsed.scheme, host, parsed.path, parsed.params, parsed.query, - parsed.fragment - ]) + return urlunparse( + [ + parsed.scheme, + host, + parsed.path, + parsed.params, + parsed.query, + parsed.fragment, + ] + ) def is_unverifiable(self): return True @@ -73,7 +77,9 @@ def get_header(self, name, default=None): def add_header(self, key, val): """cookielib has no legitimate use for this method; add it back if you find one.""" - raise NotImplementedError("Cookie headers should be added with add_unredirected_header()") + raise NotImplementedError( + "Cookie headers should be added with add_unredirected_header()" + ) def add_unredirected_header(self, name, value): self._new_headers[name] = value @@ -94,7 +100,7 @@ def host(self): return self.get_host() -class MockResponse(object): +class MockResponse: """Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`. ...what? Basically, expose the parsed HTTP headers from the server response @@ -122,8 +128,7 @@ def extract_cookies_to_jar(jar, request, response): :param request: our own requests.Request object :param response: urllib3.HTTPResponse object """ - if not (hasattr(response, '_original_response') and - response._original_response): + if not (hasattr(response, "_original_response") and response._original_response): return # the _original_response field is the wrapped httplib.HTTPResponse object, req = MockRequest(request) @@ -140,7 +145,7 @@ def get_cookie_header(jar, request): """ r = MockRequest(request) jar.add_cookie_header(r) - return r.get_new_headers().get('Cookie') + return r.get_new_headers().get("Cookie") def remove_cookie_by_name(cookiejar, name, domain=None, path=None): @@ -205,7 +210,9 @@ def set(self, name, value, **kwargs): """ # support client code that unsets cookies by assignment of a None value: if value is None: - remove_cookie_by_name(self, name, domain=kwargs.get('domain'), path=kwargs.get('path')) + remove_cookie_by_name( + self, name, domain=kwargs.get("domain"), path=kwargs.get("path") + ) return if isinstance(value, Morsel): @@ -305,16 +312,15 @@ def get_dict(self, domain=None, path=None): """ dictionary = {} for cookie in iter(self): - if ( - (domain is None or cookie.domain == domain) and - (path is None or cookie.path == path) + if (domain is None or cookie.domain == domain) and ( + path is None or cookie.path == path ): dictionary[cookie.name] = cookie.value return dictionary def __contains__(self, name): try: - return super(RequestsCookieJar, self).__contains__(name) + return super().__contains__(name) except CookieConflictError: return True @@ -341,9 +347,13 @@ def __delitem__(self, name): remove_cookie_by_name(self, name) def set_cookie(self, cookie, *args, **kwargs): - if hasattr(cookie.value, 'startswith') and cookie.value.startswith('"') and cookie.value.endswith('"'): - cookie.value = cookie.value.replace('\\"', '') - return super(RequestsCookieJar, self).set_cookie(cookie, *args, **kwargs) + if ( + hasattr(cookie.value, "startswith") + and cookie.value.startswith('"') + and cookie.value.endswith('"') + ): + cookie.value = cookie.value.replace('\\"', "") + return super().set_cookie(cookie, *args, **kwargs) def update(self, other): """Updates this jar with cookies from another CookieJar or dict-like""" @@ -351,7 +361,7 @@ def update(self, other): for cookie in other: self.set_cookie(copy.copy(cookie)) else: - super(RequestsCookieJar, self).update(other) + super().update(other) def _find(self, name, domain=None, path=None): """Requests uses this method internally to get cookie values. @@ -371,7 +381,7 @@ def _find(self, name, domain=None, path=None): if path is None or cookie.path == path: return cookie.value - raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path)) + raise KeyError(f"name={name!r}, domain={domain!r}, path={path!r}") def _find_no_duplicates(self, name, domain=None, path=None): """Both ``__get_item__`` and ``get`` call this function: it's never @@ -390,25 +400,29 @@ def _find_no_duplicates(self, name, domain=None, path=None): if cookie.name == name: if domain is None or cookie.domain == domain: if path is None or cookie.path == path: - if toReturn is not None: # if there are multiple cookies that meet passed in criteria - raise CookieConflictError('There are multiple cookies with name, %r' % (name)) - toReturn = cookie.value # we will eventually return this as long as no cookie conflict + if toReturn is not None: + # if there are multiple cookies that meet passed in criteria + raise CookieConflictError( + f"There are multiple cookies with name, {name!r}" + ) + # we will eventually return this as long as no cookie conflict + toReturn = cookie.value if toReturn: return toReturn - raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path)) + raise KeyError(f"name={name!r}, domain={domain!r}, path={path!r}") def __getstate__(self): """Unlike a normal CookieJar, this class is pickleable.""" state = self.__dict__.copy() # remove the unpickleable RLock object - state.pop('_cookies_lock') + state.pop("_cookies_lock") return state def __setstate__(self, state): """Unlike a normal CookieJar, this class is pickleable.""" self.__dict__.update(state) - if '_cookies_lock' not in self.__dict__: + if "_cookies_lock" not in self.__dict__: self._cookies_lock = threading.RLock() def copy(self): @@ -427,7 +441,7 @@ def _copy_cookie_jar(jar): if jar is None: return None - if hasattr(jar, 'copy'): + if hasattr(jar, "copy"): # We're dealing with an instance of RequestsCookieJar return jar.copy() # We're dealing with a generic CookieJar instance @@ -445,31 +459,32 @@ def create_cookie(name, value, **kwargs): and sent on every request (this is sometimes called a "supercookie"). """ result = { - 'version': 0, - 'name': name, - 'value': value, - 'port': None, - 'domain': '', - 'path': '/', - 'secure': False, - 'expires': None, - 'discard': True, - 'comment': None, - 'comment_url': None, - 'rest': {'HttpOnly': None}, - 'rfc2109': False, + "version": 0, + "name": name, + "value": value, + "port": None, + "domain": "", + "path": "/", + "secure": False, + "expires": None, + "discard": True, + "comment": None, + "comment_url": None, + "rest": {"HttpOnly": None}, + "rfc2109": False, } badargs = set(kwargs) - set(result) if badargs: - err = 'create_cookie() got unexpected keyword arguments: %s' - raise TypeError(err % list(badargs)) + raise TypeError( + f"create_cookie() got unexpected keyword arguments: {list(badargs)}" + ) result.update(kwargs) - result['port_specified'] = bool(result['port']) - result['domain_specified'] = bool(result['domain']) - result['domain_initial_dot'] = result['domain'].startswith('.') - result['path_specified'] = bool(result['path']) + result["port_specified"] = bool(result["port"]) + result["domain_specified"] = bool(result["domain"]) + result["domain_initial_dot"] = result["domain"].startswith(".") + result["path_specified"] = bool(result["path"]) return cookielib.Cookie(**result) @@ -478,30 +493,28 @@ def morsel_to_cookie(morsel): """Convert a Morsel object into a Cookie containing the one k/v pair.""" expires = None - if morsel['max-age']: + if morsel["max-age"]: try: - expires = int(time.time() + int(morsel['max-age'])) + expires = int(time.time() + int(morsel["max-age"])) except ValueError: - raise TypeError('max-age: %s must be integer' % morsel['max-age']) - elif morsel['expires']: - time_template = '%a, %d-%b-%Y %H:%M:%S GMT' - expires = calendar.timegm( - time.strptime(morsel['expires'], time_template) - ) + raise TypeError(f"max-age: {morsel['max-age']} must be integer") + elif morsel["expires"]: + time_template = "%a, %d-%b-%Y %H:%M:%S GMT" + expires = calendar.timegm(time.strptime(morsel["expires"], time_template)) return create_cookie( - comment=morsel['comment'], - comment_url=bool(morsel['comment']), + comment=morsel["comment"], + comment_url=bool(morsel["comment"]), discard=False, - domain=morsel['domain'], + domain=morsel["domain"], expires=expires, name=morsel.key, - path=morsel['path'], + path=morsel["path"], port=None, - rest={'HttpOnly': morsel['httponly']}, + rest={"HttpOnly": morsel["httponly"]}, rfc2109=False, - secure=bool(morsel['secure']), + secure=bool(morsel["secure"]), value=morsel.value, - version=morsel['version'] or 0, + version=morsel["version"] or 0, ) @@ -534,11 +547,10 @@ def merge_cookies(cookiejar, cookies): :rtype: CookieJar """ if not isinstance(cookiejar, cookielib.CookieJar): - raise ValueError('You can only merge into CookieJar') + raise ValueError("You can only merge into CookieJar") if isinstance(cookies, dict): - cookiejar = cookiejar_from_dict( - cookies, cookiejar=cookiejar, overwrite=False) + cookiejar = cookiejar_from_dict(cookies, cookiejar=cookiejar, overwrite=False) elif isinstance(cookies, cookielib.CookieJar): try: cookiejar.update(cookies) diff --git a/requests/exceptions.py b/requests/exceptions.py index 06b9ebe4b0..e1cedf883d 100644 --- a/requests/exceptions.py +++ b/requests/exceptions.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - """ requests.exceptions ~~~~~~~~~~~~~~~~~~~ @@ -18,13 +16,12 @@ class RequestException(IOError): def __init__(self, *args, **kwargs): """Initialize RequestException with `request` and `response` objects.""" - response = kwargs.pop('response', None) + response = kwargs.pop("response", None) self.response = response - self.request = kwargs.pop('request', None) - if (response is not None and not self.request and - hasattr(response, 'request')): + self.request = kwargs.pop("request", None) + if response is not None and not self.request and hasattr(response, "request"): self.request = self.response.request - super(RequestException, self).__init__(*args, **kwargs) + super().__init__(*args, **kwargs) class InvalidJSONError(RequestException): @@ -128,6 +125,7 @@ class RetryError(RequestException): class UnrewindableBodyError(RequestException): """Requests encountered an error when trying to rewind a body.""" + # Warnings diff --git a/requests/help.py b/requests/help.py index 753633f944..8fbcd6560a 100644 --- a/requests/help.py +++ b/requests/help.py @@ -1,10 +1,9 @@ """Module containing bug report helper(s).""" -from __future__ import print_function import json import platform -import sys import ssl +import sys import idna import urllib3 @@ -28,8 +27,8 @@ OpenSSL = None cryptography = None else: - import OpenSSL import cryptography + import OpenSSL def _implementation(): @@ -45,83 +44,83 @@ def _implementation(): """ implementation = platform.python_implementation() - if implementation == 'CPython': + if implementation == "CPython": implementation_version = platform.python_version() - elif implementation == 'PyPy': - implementation_version = '%s.%s.%s' % (sys.pypy_version_info.major, - sys.pypy_version_info.minor, - sys.pypy_version_info.micro) - if sys.pypy_version_info.releaselevel != 'final': - implementation_version = ''.join([ - implementation_version, sys.pypy_version_info.releaselevel - ]) - elif implementation == 'Jython': + elif implementation == "PyPy": + implementation_version = "{}.{}.{}".format( + sys.pypy_version_info.major, + sys.pypy_version_info.minor, + sys.pypy_version_info.micro, + ) + if sys.pypy_version_info.releaselevel != "final": + implementation_version = "".join( + [implementation_version, sys.pypy_version_info.releaselevel] + ) + elif implementation == "Jython": implementation_version = platform.python_version() # Complete Guess - elif implementation == 'IronPython': + elif implementation == "IronPython": implementation_version = platform.python_version() # Complete Guess else: - implementation_version = 'Unknown' + implementation_version = "Unknown" - return {'name': implementation, 'version': implementation_version} + return {"name": implementation, "version": implementation_version} def info(): """Generate information for a bug report.""" try: platform_info = { - 'system': platform.system(), - 'release': platform.release(), + "system": platform.system(), + "release": platform.release(), } - except IOError: + except OSError: platform_info = { - 'system': 'Unknown', - 'release': 'Unknown', + "system": "Unknown", + "release": "Unknown", } implementation_info = _implementation() - urllib3_info = {'version': urllib3.__version__} - charset_normalizer_info = {'version': None} - chardet_info = {'version': None} + urllib3_info = {"version": urllib3.__version__} + charset_normalizer_info = {"version": None} + chardet_info = {"version": None} if charset_normalizer: - charset_normalizer_info = {'version': charset_normalizer.__version__} + charset_normalizer_info = {"version": charset_normalizer.__version__} if chardet: - chardet_info = {'version': chardet.__version__} + chardet_info = {"version": chardet.__version__} pyopenssl_info = { - 'version': None, - 'openssl_version': '', + "version": None, + "openssl_version": "", } if OpenSSL: pyopenssl_info = { - 'version': OpenSSL.__version__, - 'openssl_version': '%x' % OpenSSL.SSL.OPENSSL_VERSION_NUMBER, + "version": OpenSSL.__version__, + "openssl_version": f"{OpenSSL.SSL.OPENSSL_VERSION_NUMBER:x}", } cryptography_info = { - 'version': getattr(cryptography, '__version__', ''), + "version": getattr(cryptography, "__version__", ""), } idna_info = { - 'version': getattr(idna, '__version__', ''), + "version": getattr(idna, "__version__", ""), } system_ssl = ssl.OPENSSL_VERSION_NUMBER - system_ssl_info = { - 'version': '%x' % system_ssl if system_ssl is not None else '' - } + system_ssl_info = {"version": f"{system_ssl:x}" if system_ssl is not None else ""} return { - 'platform': platform_info, - 'implementation': implementation_info, - 'system_ssl': system_ssl_info, - 'using_pyopenssl': pyopenssl is not None, - 'using_charset_normalizer': chardet is None, - 'pyOpenSSL': pyopenssl_info, - 'urllib3': urllib3_info, - 'chardet': chardet_info, - 'charset_normalizer': charset_normalizer_info, - 'cryptography': cryptography_info, - 'idna': idna_info, - 'requests': { - 'version': requests_version, + "platform": platform_info, + "implementation": implementation_info, + "system_ssl": system_ssl_info, + "using_pyopenssl": pyopenssl is not None, + "using_charset_normalizer": chardet is None, + "pyOpenSSL": pyopenssl_info, + "urllib3": urllib3_info, + "chardet": chardet_info, + "charset_normalizer": charset_normalizer_info, + "cryptography": cryptography_info, + "idna": idna_info, + "requests": { + "version": requests_version, }, } @@ -131,5 +130,5 @@ def main(): print(json.dumps(info(), sort_keys=True, indent=2)) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/requests/hooks.py b/requests/hooks.py index 7a51f212c8..d181ba2ec2 100644 --- a/requests/hooks.py +++ b/requests/hooks.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - """ requests.hooks ~~~~~~~~~~~~~~ @@ -11,12 +9,13 @@ ``response``: The response generated from a Request. """ -HOOKS = ['response'] +HOOKS = ["response"] def default_hooks(): return {event: [] for event in HOOKS} + # TODO: response is the only one @@ -25,7 +24,7 @@ def dispatch_hook(key, hooks, hook_data, **kwargs): hooks = hooks or {} hooks = hooks.get(key) if hooks: - if hasattr(hooks, '__call__'): + if hasattr(hooks, "__call__"): hooks = [hooks] for hook in hooks: _hook_data = hook(hook_data, **kwargs) diff --git a/requests/models.py b/requests/models.py index df0f8b3e13..7e1522837f 100644 --- a/requests/models.py +++ b/requests/models.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - """ requests.models ~~~~~~~~~~~~~~~ @@ -12,11 +10,9 @@ # Import encoding now, to avoid implicit import later. # Implicit import within threads may cause LookupError when standard library is in a ZIP, # such as in Embedded Python. See https://github.com/psf/requests/issues/3578. -import encodings.idna +import encodings.idna # noqa: F401 +from io import UnsupportedOperation -from urllib3.fields import RequestField -from urllib3.filepost import encode_multipart_formdata -from urllib3.util import parse_url from urllib3.exceptions import ( DecodeError, LocationParseError, @@ -24,37 +20,58 @@ ReadTimeoutError, SSLError, ) +from urllib3.fields import RequestField +from urllib3.filepost import encode_multipart_formdata +from urllib3.util import parse_url -from io import UnsupportedOperation -from .hooks import default_hooks -from .structures import CaseInsensitiveDict - +from ._internal_utils import to_native_string, unicode_is_ascii from .auth import HTTPBasicAuth -from .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar +from .compat import ( + Callable, + JSONDecodeError, + Mapping, + basestring, + builtin_str, + chardet, + cookielib, +) +from .compat import json as complexjson +from .compat import urlencode, urlsplit, urlunparse +from .cookies import _copy_cookie_jar, cookiejar_from_dict, get_cookie_header from .exceptions import ( - HTTPError, MissingSchema, InvalidURL, ChunkedEncodingError, - ContentDecodingError, ConnectionError, StreamConsumedError, - InvalidJSONError) + ChunkedEncodingError, + ConnectionError, + ContentDecodingError, + HTTPError, + InvalidJSONError, + InvalidURL, +) from .exceptions import JSONDecodeError as RequestsJSONDecodeError +from .exceptions import MissingSchema from .exceptions import SSLError as RequestsSSLError -from ._internal_utils import to_native_string, unicode_is_ascii -from .utils import ( - guess_filename, get_auth_from_url, requote_uri, - stream_decode_response_unicode, to_key_val_list, parse_header_links, - iter_slices, guess_json_utf, super_len, check_header_validity) -from .compat import ( - Callable, Mapping, - cookielib, urlunparse, urlsplit, urlencode, - chardet, builtin_str, basestring, JSONDecodeError) -from .compat import json as complexjson +from .exceptions import StreamConsumedError +from .hooks import default_hooks from .status_codes import codes +from .structures import CaseInsensitiveDict +from .utils import ( + check_header_validity, + get_auth_from_url, + guess_filename, + guess_json_utf, + iter_slices, + parse_header_links, + requote_uri, + stream_decode_response_unicode, + super_len, + to_key_val_list, +) #: The set of HTTP status codes that indicate an automatically #: processable redirect. REDIRECT_STATI = ( - codes.moved, # 301 - codes.found, # 302 - codes.other, # 303 + codes.moved, # 301 + codes.found, # 302 + codes.other, # 303 codes.temporary_redirect, # 307 codes.permanent_redirect, # 308 ) @@ -64,7 +81,7 @@ ITER_CHUNK_SIZE = 512 -class RequestEncodingMixin(object): +class RequestEncodingMixin: @property def path_url(self): """Build the path URL to use.""" @@ -75,16 +92,16 @@ def path_url(self): path = p.path if not path: - path = '/' + path = "/" url.append(path) query = p.query if query: - url.append('?') + url.append("?") url.append(query) - return ''.join(url) + return "".join(url) @staticmethod def _encode_params(data): @@ -97,18 +114,21 @@ def _encode_params(data): if isinstance(data, (str, bytes)): return data - elif hasattr(data, 'read'): + elif hasattr(data, "read"): return data - elif hasattr(data, '__iter__'): + elif hasattr(data, "__iter__"): result = [] for k, vs in to_key_val_list(data): - if isinstance(vs, basestring) or not hasattr(vs, '__iter__'): + if isinstance(vs, basestring) or not hasattr(vs, "__iter__"): vs = [vs] for v in vs: if v is not None: result.append( - (k.encode('utf-8') if isinstance(k, str) else k, - v.encode('utf-8') if isinstance(v, str) else v)) + ( + k.encode("utf-8") if isinstance(k, str) else k, + v.encode("utf-8") if isinstance(v, str) else v, + ) + ) return urlencode(result, doseq=True) else: return data @@ -123,7 +143,7 @@ def _encode_files(files, data): The tuples may be 2-tuples (filename, fileobj), 3-tuples (filename, fileobj, contentype) or 4-tuples (filename, fileobj, contentype, custom_headers). """ - if (not files): + if not files: raise ValueError("Files must be provided.") elif isinstance(data, basestring): raise ValueError("Data must not be a string.") @@ -133,7 +153,7 @@ def _encode_files(files, data): files = to_key_val_list(files or {}) for field, val in fields: - if isinstance(val, basestring) or not hasattr(val, '__iter__'): + if isinstance(val, basestring) or not hasattr(val, "__iter__"): val = [val] for v in val: if v is not None: @@ -142,8 +162,13 @@ def _encode_files(files, data): v = str(v) new_fields.append( - (field.decode('utf-8') if isinstance(field, bytes) else field, - v.encode('utf-8') if isinstance(v, str) else v)) + ( + field.decode("utf-8") + if isinstance(field, bytes) + else field, + v.encode("utf-8") if isinstance(v, str) else v, + ) + ) for (k, v) in files: # support for explicit filename @@ -162,7 +187,7 @@ def _encode_files(files, data): if isinstance(fp, (str, bytes, bytearray)): fdata = fp - elif hasattr(fp, 'read'): + elif hasattr(fp, "read"): fdata = fp.read() elif fp is None: continue @@ -178,16 +203,16 @@ def _encode_files(files, data): return body, content_type -class RequestHooksMixin(object): +class RequestHooksMixin: def register_hook(self, event, hook): """Properly register a hook.""" if event not in self.hooks: - raise ValueError('Unsupported event specified, with event name "%s"' % (event)) + raise ValueError(f'Unsupported event specified, with event name "{event}"') if isinstance(hook, Callable): self.hooks[event].append(hook) - elif hasattr(hook, '__iter__'): + elif hasattr(hook, "__iter__"): self.hooks[event].extend(h for h in hook if isinstance(h, Callable)) def deregister_hook(self, event, hook): @@ -230,9 +255,19 @@ class Request(RequestHooksMixin): """ - def __init__(self, - method=None, url=None, headers=None, files=None, data=None, - params=None, auth=None, cookies=None, hooks=None, json=None): + def __init__( + self, + method=None, + url=None, + headers=None, + files=None, + data=None, + params=None, + auth=None, + cookies=None, + hooks=None, + json=None, + ): # Default empty dicts for dict params. data = [] if data is None else data @@ -256,7 +291,7 @@ def __init__(self, self.cookies = cookies def __repr__(self): - return '' % (self.method) + return f"" def prepare(self): """Constructs a :class:`PreparedRequest ` for transmission and returns it.""" @@ -314,9 +349,19 @@ def __init__(self): #: integer denoting starting position of a readable file-like body. self._body_position = None - def prepare(self, - method=None, url=None, headers=None, files=None, data=None, - params=None, auth=None, cookies=None, hooks=None, json=None): + def prepare( + self, + method=None, + url=None, + headers=None, + files=None, + data=None, + params=None, + auth=None, + cookies=None, + hooks=None, + json=None, + ): """Prepares the entire request with the given parameters.""" self.prepare_method(method) @@ -333,7 +378,7 @@ def prepare(self, self.prepare_hooks(hooks) def __repr__(self): - return '' % (self.method) + return f"" def copy(self): p = PreparedRequest() @@ -357,7 +402,7 @@ def _get_idna_encoded_host(host): import idna try: - host = idna.encode(host, uts46=True).decode('utf-8') + host = idna.encode(host, uts46=True).decode("utf-8") except idna.IDNAError: raise UnicodeError return host @@ -370,7 +415,7 @@ def prepare_url(self, url, params): #: on python 3.x. #: https://github.com/psf/requests/pull/2238 if isinstance(url, bytes): - url = url.decode('utf8') + url = url.decode("utf8") else: url = str(url) @@ -380,7 +425,7 @@ def prepare_url(self, url, params): # Don't do any URL preparation for non-HTTP schemes like `mailto`, # `data` etc to work around exceptions from `url_parse`, which # handles RFC 3986 only. - if ':' in url and not url.lower().startswith('http'): + if ":" in url and not url.lower().startswith("http"): self.url = url return @@ -391,13 +436,13 @@ def prepare_url(self, url, params): raise InvalidURL(*e.args) if not scheme: - error = ("Invalid URL {0!r}: No scheme supplied. Perhaps you meant http://{0}?") - error = error.format(to_native_string(url, 'utf8')) - - raise MissingSchema(error) + raise MissingSchema( + f"Invalid URL {url!r}: No scheme supplied. " + f"Perhaps you meant http://{url}?" + ) if not host: - raise InvalidURL("Invalid URL %r: No host supplied" % url) + raise InvalidURL(f"Invalid URL {url!r}: No host supplied") # In general, we want to try IDNA encoding the hostname if the string contains # non-ASCII characters. This allows users to automatically get the correct IDNA @@ -407,21 +452,21 @@ def prepare_url(self, url, params): try: host = self._get_idna_encoded_host(host) except UnicodeError: - raise InvalidURL('URL has an invalid label.') - elif host.startswith((u'*', u'.')): - raise InvalidURL('URL has an invalid label.') + raise InvalidURL("URL has an invalid label.") + elif host.startswith(("*", ".")): + raise InvalidURL("URL has an invalid label.") # Carefully reconstruct the network location - netloc = auth or '' + netloc = auth or "" if netloc: - netloc += '@' + netloc += "@" netloc += host if port: - netloc += ':' + str(port) + netloc += f":{port}" # Bare domains aren't valid URLs. if not path: - path = '/' + path = "/" if isinstance(params, (str, bytes)): params = to_native_string(params) @@ -429,7 +474,7 @@ def prepare_url(self, url, params): enc_params = self._encode_params(params) if enc_params: if query: - query = '%s&%s' % (query, enc_params) + query = f"{query}&{enc_params}" else: query = enc_params @@ -460,7 +505,7 @@ def prepare_body(self, data, files, json=None): if not data and json is not None: # urllib3 requires a bytes-like body. Python 2's json.dumps # provides this natively, but Python 3 gives a Unicode string. - content_type = 'application/json' + content_type = "application/json" try: body = complexjson.dumps(json, allow_nan=False) @@ -468,12 +513,14 @@ def prepare_body(self, data, files, json=None): raise InvalidJSONError(ve, request=self) if not isinstance(body, bytes): - body = body.encode('utf-8') + body = body.encode("utf-8") - is_stream = all([ - hasattr(data, '__iter__'), - not isinstance(data, (basestring, list, tuple, Mapping)) - ]) + is_stream = all( + [ + hasattr(data, "__iter__"), + not isinstance(data, (basestring, list, tuple, Mapping)), + ] + ) if is_stream: try: @@ -483,24 +530,26 @@ def prepare_body(self, data, files, json=None): body = data - if getattr(body, 'tell', None) is not None: + if getattr(body, "tell", None) is not None: # Record the current file position before reading. # This will allow us to rewind a file in the event # of a redirect. try: self._body_position = body.tell() - except (IOError, OSError): + except OSError: # This differentiates from None, allowing us to catch # a failed `tell()` later when trying to rewind the body self._body_position = object() if files: - raise NotImplementedError('Streamed bodies and files are mutually exclusive.') + raise NotImplementedError( + "Streamed bodies and files are mutually exclusive." + ) if length: - self.headers['Content-Length'] = builtin_str(length) + self.headers["Content-Length"] = builtin_str(length) else: - self.headers['Transfer-Encoding'] = 'chunked' + self.headers["Transfer-Encoding"] = "chunked" else: # Multi-part file uploads. if files: @@ -508,16 +557,16 @@ def prepare_body(self, data, files, json=None): else: if data: body = self._encode_params(data) - if isinstance(data, basestring) or hasattr(data, 'read'): + if isinstance(data, basestring) or hasattr(data, "read"): content_type = None else: - content_type = 'application/x-www-form-urlencoded' + content_type = "application/x-www-form-urlencoded" self.prepare_content_length(body) # Add content-type if it wasn't explicitly provided. - if content_type and ('content-type' not in self.headers): - self.headers['Content-Type'] = content_type + if content_type and ("content-type" not in self.headers): + self.headers["Content-Type"] = content_type self.body = body @@ -528,13 +577,16 @@ def prepare_content_length(self, body): if length: # If length exists, set it. Otherwise, we fallback # to Transfer-Encoding: chunked. - self.headers['Content-Length'] = builtin_str(length) - elif self.method not in ('GET', 'HEAD') and self.headers.get('Content-Length') is None: + self.headers["Content-Length"] = builtin_str(length) + elif ( + self.method not in ("GET", "HEAD") + and self.headers.get("Content-Length") is None + ): # Set Content-Length to 0 for methods that can have a body # but don't provide one. (i.e. not GET or HEAD) - self.headers['Content-Length'] = '0' + self.headers["Content-Length"] = "0" - def prepare_auth(self, auth, url=''): + def prepare_auth(self, auth, url=""): """Prepares the given HTTP auth data.""" # If no Auth is explicitly provided, extract it from the URL first. @@ -574,7 +626,7 @@ def prepare_cookies(self, cookies): cookie_header = get_cookie_header(self._cookies, self) if cookie_header is not None: - self.headers['Cookie'] = cookie_header + self.headers["Cookie"] = cookie_header def prepare_hooks(self, hooks): """Prepares the given hooks.""" @@ -586,14 +638,22 @@ def prepare_hooks(self, hooks): self.register_hook(event, hooks[event]) -class Response(object): +class Response: """The :class:`Response ` object, which contains a server's response to an HTTP request. """ __attrs__ = [ - '_content', 'status_code', 'headers', 'url', 'history', - 'encoding', 'reason', 'cookies', 'elapsed', 'request' + "_content", + "status_code", + "headers", + "url", + "history", + "encoding", + "reason", + "cookies", + "elapsed", + "request", ] def __init__(self): @@ -662,11 +722,11 @@ def __setstate__(self, state): setattr(self, name, value) # pickled objects do not have .raw - setattr(self, '_content_consumed', True) - setattr(self, 'raw', None) + setattr(self, "_content_consumed", True) + setattr(self, "raw", None) def __repr__(self): - return '' % (self.status_code) + return f"" def __bool__(self): """Returns True if :attr:`status_code` is less than 400. @@ -712,12 +772,15 @@ def is_redirect(self): """True if this Response is a well-formed HTTP redirect that could have been processed automatically (by :meth:`Session.resolve_redirects`). """ - return ('location' in self.headers and self.status_code in REDIRECT_STATI) + return "location" in self.headers and self.status_code in REDIRECT_STATI @property def is_permanent_redirect(self): """True if this Response one of the permanent versions of redirect.""" - return ('location' in self.headers and self.status_code in (codes.moved_permanently, codes.permanent_redirect)) + return "location" in self.headers and self.status_code in ( + codes.moved_permanently, + codes.permanent_redirect, + ) @property def next(self): @@ -727,7 +790,7 @@ def next(self): @property def apparent_encoding(self): """The apparent encoding, provided by the charset_normalizer or chardet libraries.""" - return chardet.detect(self.content)['encoding'] + return chardet.detect(self.content)["encoding"] def iter_content(self, chunk_size=1, decode_unicode=False): """Iterates over the response data. When stream=True is set on the @@ -748,7 +811,7 @@ def iter_content(self, chunk_size=1, decode_unicode=False): def generate(): # Special case for urllib3. - if hasattr(self.raw, 'stream'): + if hasattr(self.raw, "stream"): try: for chunk in self.raw.stream(chunk_size, decode_content=True): yield chunk @@ -773,7 +836,9 @@ def generate(): if self._content_consumed and isinstance(self._content, bool): raise StreamConsumedError() elif chunk_size is not None and not isinstance(chunk_size, int): - raise TypeError("chunk_size must be an int, it is instead a %s." % type(chunk_size)) + raise TypeError( + f"chunk_size must be an int, it is instead a {type(chunk_size)}." + ) # simulate reading small chunks of the content reused_chunks = iter_slices(self._content, chunk_size) @@ -786,7 +851,9 @@ def generate(): return chunks - def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=False, delimiter=None): + def iter_lines( + self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=False, delimiter=None + ): """Iterates over the response data, one line at a time. When stream=True is set on the request, this avoids reading the content at once into memory for large responses. @@ -796,7 +863,9 @@ def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=False, delimiter pending = None - for chunk in self.iter_content(chunk_size=chunk_size, decode_unicode=decode_unicode): + for chunk in self.iter_content( + chunk_size=chunk_size, decode_unicode=decode_unicode + ): if pending is not None: chunk = pending + chunk @@ -811,8 +880,7 @@ def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=False, delimiter else: pending = None - for line in lines: - yield line + yield from lines if pending is not None: yield pending @@ -824,13 +892,12 @@ def content(self): if self._content is False: # Read the contents. if self._content_consumed: - raise RuntimeError( - 'The content for this response was already consumed') + raise RuntimeError("The content for this response was already consumed") if self.status_code == 0 or self.raw is None: self._content = None else: - self._content = b''.join(self.iter_content(CONTENT_CHUNK_SIZE)) or b'' + self._content = b"".join(self.iter_content(CONTENT_CHUNK_SIZE)) or b"" self._content_consumed = True # don't need to release the connection; that's been handled by urllib3 @@ -855,7 +922,7 @@ def text(self): encoding = self.encoding if not self.content: - return str('') + return "" # Fallback to auto-detected encoding. if self.encoding is None: @@ -863,7 +930,7 @@ def text(self): # Decode unicode from given encoding. try: - content = str(self.content, encoding, errors='replace') + content = str(self.content, encoding, errors="replace") except (LookupError, TypeError): # A LookupError is raised if the encoding was not found which could # indicate a misspelling or similar mistake. @@ -871,7 +938,7 @@ def text(self): # A TypeError can be raised if encoding is None # # So we try blindly encoding. - content = str(self.content, errors='replace') + content = str(self.content, errors="replace") return content @@ -891,9 +958,7 @@ def json(self, **kwargs): encoding = guess_json_utf(self.content) if encoding is not None: try: - return complexjson.loads( - self.content.decode(encoding), **kwargs - ) + return complexjson.loads(self.content.decode(encoding), **kwargs) except UnicodeDecodeError: # Wrong UTF codec detected; usually because it's not UTF-8 # but some other 8-bit codec. This is an RFC violation, @@ -914,41 +979,44 @@ def json(self, **kwargs): def links(self): """Returns the parsed header links of the response, if any.""" - header = self.headers.get('link') + header = self.headers.get("link") - # l = MultiDict() - l = {} + resolved_links = {} if header: links = parse_header_links(header) for link in links: - key = link.get('rel') or link.get('url') - l[key] = link + key = link.get("rel") or link.get("url") + resolved_links[key] = link - return l + return resolved_links def raise_for_status(self): """Raises :class:`HTTPError`, if one occurred.""" - http_error_msg = '' + http_error_msg = "" if isinstance(self.reason, bytes): # We attempt to decode utf-8 first because some servers # choose to localize their reason strings. If the string # isn't utf-8, we fall back to iso-8859-1 for all other # encodings. (See PR #3538) try: - reason = self.reason.decode('utf-8') + reason = self.reason.decode("utf-8") except UnicodeDecodeError: - reason = self.reason.decode('iso-8859-1') + reason = self.reason.decode("iso-8859-1") else: reason = self.reason if 400 <= self.status_code < 500: - http_error_msg = u'%s Client Error: %s for url: %s' % (self.status_code, reason, self.url) + http_error_msg = ( + f"{self.status_code} Client Error: {reason} for url: {self.url}" + ) elif 500 <= self.status_code < 600: - http_error_msg = u'%s Server Error: %s for url: %s' % (self.status_code, reason, self.url) + http_error_msg = ( + f"{self.status_code} Server Error: {reason} for url: {self.url}" + ) if http_error_msg: raise HTTPError(http_error_msg, response=self) @@ -962,6 +1030,6 @@ def close(self): if not self._content_consumed: self.raw.close() - release_conn = getattr(self.raw, 'release_conn', None) + release_conn = getattr(self.raw, "release_conn", None) if release_conn is not None: release_conn() diff --git a/requests/packages.py b/requests/packages.py index 00196bff25..77c45c9e90 100644 --- a/requests/packages.py +++ b/requests/packages.py @@ -3,24 +3,26 @@ try: import chardet except ImportError: - import charset_normalizer as chardet import warnings - warnings.filterwarnings('ignore', 'Trying to detect', module='charset_normalizer') + import charset_normalizer as chardet + + warnings.filterwarnings("ignore", "Trying to detect", module="charset_normalizer") # This code exists for backwards compatibility reasons. # I don't like it either. Just look the other way. :) -for package in ('urllib3', 'idna'): +for package in ("urllib3", "idna"): locals()[package] = __import__(package) # This traversal is apparently necessary such that the identities are # preserved (requests.packages.urllib3.* is urllib3.*) for mod in list(sys.modules): - if mod == package or mod.startswith(package + '.'): - sys.modules['requests.packages.' + mod] = sys.modules[mod] + if mod == package or mod.startswith(f"{package}."): + sys.modules[f"requests.packages.{mod}"] = sys.modules[mod] target = chardet.__name__ for mod in list(sys.modules): - if mod == target or mod.startswith(target + '.'): - sys.modules['requests.packages.' + target.replace(target, 'chardet')] = sys.modules[mod] + if mod == target or mod.startswith(f"{target}."): + target = target.replace(target, "chardet") + sys.modules[f"requests.packages.{target}"] = sys.modules[mod] # Kinda cool, though, right? diff --git a/requests/sessions.py b/requests/sessions.py index f2d7d3e36f..6cb3b4dae3 100644 --- a/requests/sessions.py +++ b/requests/sessions.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - """ requests.sessions ~~~~~~~~~~~~~~~~~ @@ -10,35 +8,51 @@ import os import sys import time -from datetime import timedelta from collections import OrderedDict +from datetime import timedelta +from ._internal_utils import to_native_string +from .adapters import HTTPAdapter from .auth import _basic_auth_str -from .compat import cookielib, urljoin, urlparse, Mapping +from .compat import Mapping, cookielib, urljoin, urlparse from .cookies import ( - cookiejar_from_dict, extract_cookies_to_jar, RequestsCookieJar, merge_cookies) -from .models import Request, PreparedRequest, DEFAULT_REDIRECT_LIMIT -from .hooks import default_hooks, dispatch_hook -from ._internal_utils import to_native_string -from .utils import to_key_val_list, default_headers, DEFAULT_PORTS + RequestsCookieJar, + cookiejar_from_dict, + extract_cookies_to_jar, + merge_cookies, +) from .exceptions import ( - TooManyRedirects, InvalidSchema, ChunkedEncodingError, ContentDecodingError) - -from .structures import CaseInsensitiveDict -from .adapters import HTTPAdapter - -from .utils import ( - requote_uri, get_environ_proxies, get_netrc_auth, should_bypass_proxies, - get_auth_from_url, rewind_body, resolve_proxies + ChunkedEncodingError, + ContentDecodingError, + InvalidSchema, + TooManyRedirects, ) - -from .status_codes import codes +from .hooks import default_hooks, dispatch_hook # formerly defined here, reexposed here for backward compatibility -from .models import REDIRECT_STATI +from .models import ( # noqa: F401 + DEFAULT_REDIRECT_LIMIT, + REDIRECT_STATI, + PreparedRequest, + Request, +) +from .status_codes import codes +from .structures import CaseInsensitiveDict +from .utils import ( # noqa: F401 + DEFAULT_PORTS, + default_headers, + get_auth_from_url, + get_environ_proxies, + get_netrc_auth, + requote_uri, + resolve_proxies, + rewind_body, + should_bypass_proxies, + to_key_val_list, +) # Preferred clock, based on which one is more accurate on a given system. -if sys.platform == 'win32': +if sys.platform == "win32": preferred_clock = time.perf_counter else: preferred_clock = time.time @@ -58,8 +72,7 @@ def merge_setting(request_setting, session_setting, dict_class=OrderedDict): # Bypass if not a dictionary (e.g. verify) if not ( - isinstance(session_setting, Mapping) and - isinstance(request_setting, Mapping) + isinstance(session_setting, Mapping) and isinstance(request_setting, Mapping) ): return request_setting @@ -81,17 +94,16 @@ def merge_hooks(request_hooks, session_hooks, dict_class=OrderedDict): This is necessary because when request_hooks == {'response': []}, the merge breaks Session hooks entirely. """ - if session_hooks is None or session_hooks.get('response') == []: + if session_hooks is None or session_hooks.get("response") == []: return request_hooks - if request_hooks is None or request_hooks.get('response') == []: + if request_hooks is None or request_hooks.get("response") == []: return session_hooks return merge_setting(request_hooks, session_hooks, dict_class) -class SessionRedirectMixin(object): - +class SessionRedirectMixin: def get_redirect_target(self, resp): """Receives a Response. Returns a redirect URI or ``None``""" # Due to the nature of how requests processes redirects this method will @@ -101,15 +113,15 @@ def get_redirect_target(self, resp): # to cache the redirect location onto the response object as a private # attribute. if resp.is_redirect: - location = resp.headers['location'] + location = resp.headers["location"] # Currently the underlying http module on py3 decode headers # in latin1, but empirical evidence suggests that latin1 is very # rarely used with non-ASCII characters in HTTP headers. # It is more likely to get UTF8 header rather than latin1. # This causes incorrect handling of UTF8 encoded location headers. # To solve this, we re-encode the location in latin1. - location = location.encode('latin1') - return to_native_string(location, 'utf8') + location = location.encode("latin1") + return to_native_string(location, "utf8") return None def should_strip_auth(self, old_url, new_url): @@ -122,23 +134,40 @@ def should_strip_auth(self, old_url, new_url): # ports. This isn't specified by RFC 7235, but is kept to avoid # breaking backwards compatibility with older versions of requests # that allowed any redirects on the same host. - if (old_parsed.scheme == 'http' and old_parsed.port in (80, None) - and new_parsed.scheme == 'https' and new_parsed.port in (443, None)): + if ( + old_parsed.scheme == "http" + and old_parsed.port in (80, None) + and new_parsed.scheme == "https" + and new_parsed.port in (443, None) + ): return False # Handle default port usage corresponding to scheme. changed_port = old_parsed.port != new_parsed.port changed_scheme = old_parsed.scheme != new_parsed.scheme default_port = (DEFAULT_PORTS.get(old_parsed.scheme, None), None) - if (not changed_scheme and old_parsed.port in default_port - and new_parsed.port in default_port): + if ( + not changed_scheme + and old_parsed.port in default_port + and new_parsed.port in default_port + ): return False # Standard case: root URI must match return changed_port or changed_scheme - def resolve_redirects(self, resp, req, stream=False, timeout=None, - verify=True, cert=None, proxies=None, yield_requests=False, **adapter_kwargs): + def resolve_redirects( + self, + resp, + req, + stream=False, + timeout=None, + verify=True, + cert=None, + proxies=None, + yield_requests=False, + **adapter_kwargs, + ): """Receives a Response. Returns a generator of Responses or Requests.""" hist = [] # keep track of history @@ -159,19 +188,21 @@ def resolve_redirects(self, resp, req, stream=False, timeout=None, resp.raw.read(decode_content=False) if len(resp.history) >= self.max_redirects: - raise TooManyRedirects('Exceeded {} redirects.'.format(self.max_redirects), response=resp) + raise TooManyRedirects( + f"Exceeded {self.max_redirects} redirects.", response=resp + ) # Release the connection back into the pool. resp.close() # Handle redirection without scheme (see: RFC 1808 Section 4) - if url.startswith('//'): + if url.startswith("//"): parsed_rurl = urlparse(resp.url) - url = ':'.join([to_native_string(parsed_rurl.scheme), url]) + url = ":".join([to_native_string(parsed_rurl.scheme), url]) # Normalize url case and attach previous fragment if needed (RFC 7231 7.1.2) parsed = urlparse(url) - if parsed.fragment == '' and previous_fragment: + if parsed.fragment == "" and previous_fragment: parsed = parsed._replace(fragment=previous_fragment) elif parsed.fragment: previous_fragment = parsed.fragment @@ -190,15 +221,18 @@ def resolve_redirects(self, resp, req, stream=False, timeout=None, self.rebuild_method(prepared_request, resp) # https://github.com/psf/requests/issues/1084 - if resp.status_code not in (codes.temporary_redirect, codes.permanent_redirect): + if resp.status_code not in ( + codes.temporary_redirect, + codes.permanent_redirect, + ): # https://github.com/psf/requests/issues/3490 - purged_headers = ('Content-Length', 'Content-Type', 'Transfer-Encoding') + purged_headers = ("Content-Length", "Content-Type", "Transfer-Encoding") for header in purged_headers: prepared_request.headers.pop(header, None) prepared_request.body = None headers = prepared_request.headers - headers.pop('Cookie', None) + headers.pop("Cookie", None) # Extract any cookies sent on the response to the cookiejar # in the new request. Because we've mutated our copied prepared @@ -214,9 +248,8 @@ def resolve_redirects(self, resp, req, stream=False, timeout=None, # A failed tell() sets `_body_position` to `object()`. This non-None # value ensures `rewindable` will be True, allowing us to raise an # UnrewindableBodyError, instead of hanging the connection. - rewindable = ( - prepared_request._body_position is not None and - ('Content-Length' in headers or 'Transfer-Encoding' in headers) + rewindable = prepared_request._body_position is not None and ( + "Content-Length" in headers or "Transfer-Encoding" in headers ) # Attempt to rewind consumed file-like object. @@ -238,7 +271,7 @@ def resolve_redirects(self, resp, req, stream=False, timeout=None, cert=cert, proxies=proxies, allow_redirects=False, - **adapter_kwargs + **adapter_kwargs, ) extract_cookies_to_jar(self.cookies, prepared_request, resp.raw) @@ -255,10 +288,12 @@ def rebuild_auth(self, prepared_request, response): headers = prepared_request.headers url = prepared_request.url - if 'Authorization' in headers and self.should_strip_auth(response.request.url, url): + if "Authorization" in headers and self.should_strip_auth( + response.request.url, url + ): # If we get redirected to a new host, we should strip out any # authentication headers. - del headers['Authorization'] + del headers["Authorization"] # .netrc might have more auth for us on our new host. new_auth = get_netrc_auth(url) if self.trust_env else None @@ -281,8 +316,8 @@ def rebuild_proxies(self, prepared_request, proxies): scheme = urlparse(prepared_request.url).scheme new_proxies = resolve_proxies(prepared_request, proxies, self.trust_env) - if 'Proxy-Authorization' in headers: - del headers['Proxy-Authorization'] + if "Proxy-Authorization" in headers: + del headers["Proxy-Authorization"] try: username, password = get_auth_from_url(new_proxies[scheme]) @@ -290,7 +325,7 @@ def rebuild_proxies(self, prepared_request, proxies): username, password = None, None if username and password: - headers['Proxy-Authorization'] = _basic_auth_str(username, password) + headers["Proxy-Authorization"] = _basic_auth_str(username, password) return new_proxies @@ -301,18 +336,18 @@ def rebuild_method(self, prepared_request, response): method = prepared_request.method # https://tools.ietf.org/html/rfc7231#section-6.4.4 - if response.status_code == codes.see_other and method != 'HEAD': - method = 'GET' + if response.status_code == codes.see_other and method != "HEAD": + method = "GET" # Do what the browsers do, despite standards... # First, turn 302s into GETs. - if response.status_code == codes.found and method != 'HEAD': - method = 'GET' + if response.status_code == codes.found and method != "HEAD": + method = "GET" # Second, if a POST is responded to with a 301, turn it into a GET. # This bizarre behaviour is explained in Issue 1704. - if response.status_code == codes.moved and method == 'POST': - method = 'GET' + if response.status_code == codes.moved and method == "POST": + method = "GET" prepared_request.method = method @@ -337,9 +372,18 @@ class Session(SessionRedirectMixin): """ __attrs__ = [ - 'headers', 'cookies', 'auth', 'proxies', 'hooks', 'params', 'verify', - 'cert', 'adapters', 'stream', 'trust_env', - 'max_redirects', + "headers", + "cookies", + "auth", + "proxies", + "hooks", + "params", + "verify", + "cert", + "adapters", + "stream", + "trust_env", + "max_redirects", ] def __init__(self): @@ -401,8 +445,8 @@ def __init__(self): # Default connection adapters. self.adapters = OrderedDict() - self.mount('https://', HTTPAdapter()) - self.mount('http://', HTTPAdapter()) + self.mount("https://", HTTPAdapter()) + self.mount("http://", HTTPAdapter()) def __enter__(self): return self @@ -428,7 +472,8 @@ def prepare_request(self, request): # Merge with session cookies merged_cookies = merge_cookies( - merge_cookies(RequestsCookieJar(), self.cookies), cookies) + merge_cookies(RequestsCookieJar(), self.cookies), cookies + ) # Set environment's basic authentication if not explicitly set. auth = request.auth @@ -442,7 +487,9 @@ def prepare_request(self, request): files=request.files, data=request.data, json=request.json, - headers=merge_setting(request.headers, self.headers, dict_class=CaseInsensitiveDict), + headers=merge_setting( + request.headers, self.headers, dict_class=CaseInsensitiveDict + ), params=merge_setting(request.params, self.params), auth=merge_setting(auth, self.auth), cookies=merged_cookies, @@ -450,10 +497,25 @@ def prepare_request(self, request): ) return p - def request(self, method, url, - params=None, data=None, headers=None, cookies=None, files=None, - auth=None, timeout=None, allow_redirects=True, proxies=None, - hooks=None, stream=None, verify=None, cert=None, json=None): + def request( + self, + method, + url, + params=None, + data=None, + headers=None, + cookies=None, + files=None, + auth=None, + timeout=None, + allow_redirects=True, + proxies=None, + hooks=None, + stream=None, + verify=None, + cert=None, + json=None, + ): """Constructs a :class:`Request `, prepares it and sends it. Returns :class:`Response ` object. @@ -489,7 +551,7 @@ def request(self, method, url, ``False``, requests will accept any TLS certificate presented by the server, and will ignore hostname mismatches and/or expired certificates, which will make your application vulnerable to - man-in-the-middle (MitM) attacks. Setting verify to ``False`` + man-in-the-middle (MitM) attacks. Setting verify to ``False`` may be useful during local development or testing. :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair. @@ -518,8 +580,8 @@ def request(self, method, url, # Send the request. send_kwargs = { - 'timeout': timeout, - 'allow_redirects': allow_redirects, + "timeout": timeout, + "allow_redirects": allow_redirects, } send_kwargs.update(settings) resp = self.send(prep, **send_kwargs) @@ -534,8 +596,8 @@ def get(self, url, **kwargs): :rtype: requests.Response """ - kwargs.setdefault('allow_redirects', True) - return self.request('GET', url, **kwargs) + kwargs.setdefault("allow_redirects", True) + return self.request("GET", url, **kwargs) def options(self, url, **kwargs): r"""Sends a OPTIONS request. Returns :class:`Response` object. @@ -545,8 +607,8 @@ def options(self, url, **kwargs): :rtype: requests.Response """ - kwargs.setdefault('allow_redirects', True) - return self.request('OPTIONS', url, **kwargs) + kwargs.setdefault("allow_redirects", True) + return self.request("OPTIONS", url, **kwargs) def head(self, url, **kwargs): r"""Sends a HEAD request. Returns :class:`Response` object. @@ -556,8 +618,8 @@ def head(self, url, **kwargs): :rtype: requests.Response """ - kwargs.setdefault('allow_redirects', False) - return self.request('HEAD', url, **kwargs) + kwargs.setdefault("allow_redirects", False) + return self.request("HEAD", url, **kwargs) def post(self, url, data=None, json=None, **kwargs): r"""Sends a POST request. Returns :class:`Response` object. @@ -570,7 +632,7 @@ def post(self, url, data=None, json=None, **kwargs): :rtype: requests.Response """ - return self.request('POST', url, data=data, json=json, **kwargs) + return self.request("POST", url, data=data, json=json, **kwargs) def put(self, url, data=None, **kwargs): r"""Sends a PUT request. Returns :class:`Response` object. @@ -582,7 +644,7 @@ def put(self, url, data=None, **kwargs): :rtype: requests.Response """ - return self.request('PUT', url, data=data, **kwargs) + return self.request("PUT", url, data=data, **kwargs) def patch(self, url, data=None, **kwargs): r"""Sends a PATCH request. Returns :class:`Response` object. @@ -594,7 +656,7 @@ def patch(self, url, data=None, **kwargs): :rtype: requests.Response """ - return self.request('PATCH', url, data=data, **kwargs) + return self.request("PATCH", url, data=data, **kwargs) def delete(self, url, **kwargs): r"""Sends a DELETE request. Returns :class:`Response` object. @@ -604,7 +666,7 @@ def delete(self, url, **kwargs): :rtype: requests.Response """ - return self.request('DELETE', url, **kwargs) + return self.request("DELETE", url, **kwargs) def send(self, request, **kwargs): """Send a given PreparedRequest. @@ -613,22 +675,20 @@ def send(self, request, **kwargs): """ # Set defaults that the hooks can utilize to ensure they always have # the correct parameters to reproduce the previous request. - kwargs.setdefault('stream', self.stream) - kwargs.setdefault('verify', self.verify) - kwargs.setdefault('cert', self.cert) - if 'proxies' not in kwargs: - kwargs['proxies'] = resolve_proxies( - request, self.proxies, self.trust_env - ) + kwargs.setdefault("stream", self.stream) + kwargs.setdefault("verify", self.verify) + kwargs.setdefault("cert", self.cert) + if "proxies" not in kwargs: + kwargs["proxies"] = resolve_proxies(request, self.proxies, self.trust_env) # It's possible that users might accidentally send a Request object. # Guard against that specific failure case. if isinstance(request, Request): - raise ValueError('You can only send PreparedRequests.') + raise ValueError("You can only send PreparedRequests.") # Set up variables needed for resolve_redirects and dispatching of hooks - allow_redirects = kwargs.pop('allow_redirects', True) - stream = kwargs.get('stream') + allow_redirects = kwargs.pop("allow_redirects", True) + stream = kwargs.get("stream") hooks = request.hooks # Get the appropriate adapter to use @@ -645,7 +705,7 @@ def send(self, request, **kwargs): r.elapsed = timedelta(seconds=elapsed) # Response manipulation hooks - r = dispatch_hook('response', hooks, r, **kwargs) + r = dispatch_hook("response", hooks, r, **kwargs) # Persist cookies if r.history: @@ -675,7 +735,9 @@ def send(self, request, **kwargs): # If redirects aren't being followed, store the response on the Request for Response.next(). if not allow_redirects: try: - r._next = next(self.resolve_redirects(r, request, yield_requests=True, **kwargs)) + r._next = next( + self.resolve_redirects(r, request, yield_requests=True, **kwargs) + ) except StopIteration: pass @@ -693,7 +755,7 @@ def merge_environment_settings(self, url, proxies, stream, verify, cert): # Gather clues from the surrounding environment. if self.trust_env: # Set environment's proxies. - no_proxy = proxies.get('no_proxy') if proxies is not None else None + no_proxy = proxies.get("no_proxy") if proxies is not None else None env_proxies = get_environ_proxies(url, no_proxy=no_proxy) for (k, v) in env_proxies.items(): proxies.setdefault(k, v) @@ -702,8 +764,8 @@ def merge_environment_settings(self, url, proxies, stream, verify, cert): # and be compatible with cURL. if verify is True or verify is None: verify = ( - os.environ.get('REQUESTS_CA_BUNDLE') - or os.environ.get('CURL_CA_BUNDLE') + os.environ.get("REQUESTS_CA_BUNDLE") + or os.environ.get("CURL_CA_BUNDLE") or verify ) @@ -713,12 +775,7 @@ def merge_environment_settings(self, url, proxies, stream, verify, cert): verify = merge_setting(verify, self.verify) cert = merge_setting(cert, self.cert) - return { - 'proxies': proxies, - 'stream': stream, - 'verify': verify, - 'cert': cert - } + return {"proxies": proxies, "stream": stream, "verify": verify, "cert": cert} def get_adapter(self, url): """ @@ -732,7 +789,7 @@ def get_adapter(self, url): return adapter # Nothing matches :-/ - raise InvalidSchema("No connection adapters were found for {!r}".format(url)) + raise InvalidSchema(f"No connection adapters were found for {url!r}") def close(self): """Closes all adapters and as such the session""" diff --git a/requests/status_codes.py b/requests/status_codes.py index d80a7cd4dd..4bd072be97 100644 --- a/requests/status_codes.py +++ b/requests/status_codes.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - r""" The ``codes`` object defines a mapping from common names for HTTP statuses to their numerical codes, accessible either as attributes or as dictionary @@ -23,101 +21,108 @@ from .structures import LookupDict _codes = { - # Informational. - 100: ('continue',), - 101: ('switching_protocols',), - 102: ('processing',), - 103: ('checkpoint',), - 122: ('uri_too_long', 'request_uri_too_long'), - 200: ('ok', 'okay', 'all_ok', 'all_okay', 'all_good', '\\o/', '✓'), - 201: ('created',), - 202: ('accepted',), - 203: ('non_authoritative_info', 'non_authoritative_information'), - 204: ('no_content',), - 205: ('reset_content', 'reset'), - 206: ('partial_content', 'partial'), - 207: ('multi_status', 'multiple_status', 'multi_stati', 'multiple_stati'), - 208: ('already_reported',), - 226: ('im_used',), - + 100: ("continue",), + 101: ("switching_protocols",), + 102: ("processing",), + 103: ("checkpoint",), + 122: ("uri_too_long", "request_uri_too_long"), + 200: ("ok", "okay", "all_ok", "all_okay", "all_good", "\\o/", "✓"), + 201: ("created",), + 202: ("accepted",), + 203: ("non_authoritative_info", "non_authoritative_information"), + 204: ("no_content",), + 205: ("reset_content", "reset"), + 206: ("partial_content", "partial"), + 207: ("multi_status", "multiple_status", "multi_stati", "multiple_stati"), + 208: ("already_reported",), + 226: ("im_used",), # Redirection. - 300: ('multiple_choices',), - 301: ('moved_permanently', 'moved', '\\o-'), - 302: ('found',), - 303: ('see_other', 'other'), - 304: ('not_modified',), - 305: ('use_proxy',), - 306: ('switch_proxy',), - 307: ('temporary_redirect', 'temporary_moved', 'temporary'), - 308: ('permanent_redirect', - 'resume_incomplete', 'resume',), # These 2 to be removed in 3.0 - + 300: ("multiple_choices",), + 301: ("moved_permanently", "moved", "\\o-"), + 302: ("found",), + 303: ("see_other", "other"), + 304: ("not_modified",), + 305: ("use_proxy",), + 306: ("switch_proxy",), + 307: ("temporary_redirect", "temporary_moved", "temporary"), + 308: ( + "permanent_redirect", + "resume_incomplete", + "resume", + ), # "resume" and "resume_incomplete" to be removed in 3.0 # Client Error. - 400: ('bad_request', 'bad'), - 401: ('unauthorized',), - 402: ('payment_required', 'payment'), - 403: ('forbidden',), - 404: ('not_found', '-o-'), - 405: ('method_not_allowed', 'not_allowed'), - 406: ('not_acceptable',), - 407: ('proxy_authentication_required', 'proxy_auth', 'proxy_authentication'), - 408: ('request_timeout', 'timeout'), - 409: ('conflict',), - 410: ('gone',), - 411: ('length_required',), - 412: ('precondition_failed', 'precondition'), - 413: ('request_entity_too_large',), - 414: ('request_uri_too_large',), - 415: ('unsupported_media_type', 'unsupported_media', 'media_type'), - 416: ('requested_range_not_satisfiable', 'requested_range', 'range_not_satisfiable'), - 417: ('expectation_failed',), - 418: ('im_a_teapot', 'teapot', 'i_am_a_teapot'), - 421: ('misdirected_request',), - 422: ('unprocessable_entity', 'unprocessable'), - 423: ('locked',), - 424: ('failed_dependency', 'dependency'), - 425: ('unordered_collection', 'unordered'), - 426: ('upgrade_required', 'upgrade'), - 428: ('precondition_required', 'precondition'), - 429: ('too_many_requests', 'too_many'), - 431: ('header_fields_too_large', 'fields_too_large'), - 444: ('no_response', 'none'), - 449: ('retry_with', 'retry'), - 450: ('blocked_by_windows_parental_controls', 'parental_controls'), - 451: ('unavailable_for_legal_reasons', 'legal_reasons'), - 499: ('client_closed_request',), - + 400: ("bad_request", "bad"), + 401: ("unauthorized",), + 402: ("payment_required", "payment"), + 403: ("forbidden",), + 404: ("not_found", "-o-"), + 405: ("method_not_allowed", "not_allowed"), + 406: ("not_acceptable",), + 407: ("proxy_authentication_required", "proxy_auth", "proxy_authentication"), + 408: ("request_timeout", "timeout"), + 409: ("conflict",), + 410: ("gone",), + 411: ("length_required",), + 412: ("precondition_failed", "precondition"), + 413: ("request_entity_too_large",), + 414: ("request_uri_too_large",), + 415: ("unsupported_media_type", "unsupported_media", "media_type"), + 416: ( + "requested_range_not_satisfiable", + "requested_range", + "range_not_satisfiable", + ), + 417: ("expectation_failed",), + 418: ("im_a_teapot", "teapot", "i_am_a_teapot"), + 421: ("misdirected_request",), + 422: ("unprocessable_entity", "unprocessable"), + 423: ("locked",), + 424: ("failed_dependency", "dependency"), + 425: ("unordered_collection", "unordered"), + 426: ("upgrade_required", "upgrade"), + 428: ("precondition_required", "precondition"), + 429: ("too_many_requests", "too_many"), + 431: ("header_fields_too_large", "fields_too_large"), + 444: ("no_response", "none"), + 449: ("retry_with", "retry"), + 450: ("blocked_by_windows_parental_controls", "parental_controls"), + 451: ("unavailable_for_legal_reasons", "legal_reasons"), + 499: ("client_closed_request",), # Server Error. - 500: ('internal_server_error', 'server_error', '/o\\', '✗'), - 501: ('not_implemented',), - 502: ('bad_gateway',), - 503: ('service_unavailable', 'unavailable'), - 504: ('gateway_timeout',), - 505: ('http_version_not_supported', 'http_version'), - 506: ('variant_also_negotiates',), - 507: ('insufficient_storage',), - 509: ('bandwidth_limit_exceeded', 'bandwidth'), - 510: ('not_extended',), - 511: ('network_authentication_required', 'network_auth', 'network_authentication'), + 500: ("internal_server_error", "server_error", "/o\\", "✗"), + 501: ("not_implemented",), + 502: ("bad_gateway",), + 503: ("service_unavailable", "unavailable"), + 504: ("gateway_timeout",), + 505: ("http_version_not_supported", "http_version"), + 506: ("variant_also_negotiates",), + 507: ("insufficient_storage",), + 509: ("bandwidth_limit_exceeded", "bandwidth"), + 510: ("not_extended",), + 511: ("network_authentication_required", "network_auth", "network_authentication"), } -codes = LookupDict(name='status_codes') +codes = LookupDict(name="status_codes") + def _init(): for code, titles in _codes.items(): for title in titles: setattr(codes, title, code) - if not title.startswith(('\\', '/')): + if not title.startswith(("\\", "/")): setattr(codes, title.upper(), code) def doc(code): - names = ', '.join('``%s``' % n for n in _codes[code]) - return '* %d: %s' % (code, names) + names = ", ".join(f"``{n}``" for n in _codes[code]) + return "* %d: %s" % (code, names) global __doc__ - __doc__ = (__doc__ + '\n' + - '\n'.join(doc(code) for code in sorted(_codes)) - if __doc__ is not None else None) + __doc__ = ( + __doc__ + "\n" + "\n".join(doc(code) for code in sorted(_codes)) + if __doc__ is not None + else None + ) + _init() diff --git a/requests/structures.py b/requests/structures.py index 8ee0ba7a08..188e13e482 100644 --- a/requests/structures.py +++ b/requests/structures.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - """ requests.structures ~~~~~~~~~~~~~~~~~~~ @@ -64,11 +62,7 @@ def __len__(self): def lower_items(self): """Like iteritems(), but with all lowercase keys.""" - return ( - (lowerkey, keyval[1]) - for (lowerkey, keyval) - in self._store.items() - ) + return ((lowerkey, keyval[1]) for (lowerkey, keyval) in self._store.items()) def __eq__(self, other): if isinstance(other, Mapping): @@ -91,10 +85,10 @@ class LookupDict(dict): def __init__(self, name=None): self.name = name - super(LookupDict, self).__init__() + super().__init__() def __repr__(self): - return '' % (self.name) + return f"" def __getitem__(self, key): # We allow fall-through here, so values default to None diff --git a/requests/utils.py b/requests/utils.py index a58b26a0c2..7a881b6444 100644 --- a/requests/utils.py +++ b/requests/utils.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - """ requests.utils ~~~~~~~~~~~~~~ @@ -20,28 +18,46 @@ import warnings import zipfile from collections import OrderedDict -from urllib3.util import make_headers -from urllib3.util import parse_url -from .__version__ import __version__ +from urllib3.util import make_headers, parse_url + from . import certs +from .__version__ import __version__ + # to_native_string is unused here, but imported here for backwards compatibility -from ._internal_utils import to_native_string +from ._internal_utils import to_native_string # noqa: F401 +from .compat import ( + Mapping, + basestring, + bytes, + getproxies, + getproxies_environment, + integer_types, +) from .compat import parse_http_list as _parse_list_header from .compat import ( - quote, urlparse, bytes, str, unquote, getproxies, - proxy_bypass, urlunparse, basestring, integer_types, - proxy_bypass_environment, getproxies_environment, Mapping) + proxy_bypass, + proxy_bypass_environment, + quote, + str, + unquote, + urlparse, + urlunparse, +) from .cookies import cookiejar_from_dict -from .structures import CaseInsensitiveDict from .exceptions import ( - InvalidURL, InvalidHeader, FileModeWarning, UnrewindableBodyError) + FileModeWarning, + InvalidHeader, + InvalidURL, + UnrewindableBodyError, +) +from .structures import CaseInsensitiveDict -NETRC_FILES = ('.netrc', '_netrc') +NETRC_FILES = (".netrc", "_netrc") DEFAULT_CA_BUNDLE_PATH = certs.where() -DEFAULT_PORTS = {'http': 80, 'https': 443} +DEFAULT_PORTS = {"http": 80, "https": 443} # Ensure that ', ' is used to preserve previous delimiter behavior. DEFAULT_ACCEPT_ENCODING = ", ".join( @@ -49,7 +65,7 @@ ) -if sys.platform == 'win32': +if sys.platform == "win32": # provide a proxy_bypass version on Windows without DNS lookups def proxy_bypass_registry(host): @@ -59,14 +75,14 @@ def proxy_bypass_registry(host): return False try: - internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER, - r'Software\Microsoft\Windows\CurrentVersion\Internet Settings') + internetSettings = winreg.OpenKey( + winreg.HKEY_CURRENT_USER, + r"Software\Microsoft\Windows\CurrentVersion\Internet Settings", + ) # ProxyEnable could be REG_SZ or REG_DWORD, normalizing it - proxyEnable = int(winreg.QueryValueEx(internetSettings, - 'ProxyEnable')[0]) + proxyEnable = int(winreg.QueryValueEx(internetSettings, "ProxyEnable")[0]) # ProxyOverride is almost always a string - proxyOverride = winreg.QueryValueEx(internetSettings, - 'ProxyOverride')[0] + proxyOverride = winreg.QueryValueEx(internetSettings, "ProxyOverride")[0] except OSError: return False if not proxyEnable or not proxyOverride: @@ -75,15 +91,15 @@ def proxy_bypass_registry(host): # make a check value list from the registry entry: replace the # '' string by the localhost entry and the corresponding # canonical entry. - proxyOverride = proxyOverride.split(';') + proxyOverride = proxyOverride.split(";") # now check if we match one of the registry values. for test in proxyOverride: - if test == '': - if '.' not in host: + if test == "": + if "." not in host: return True - test = test.replace(".", r"\.") # mask dots - test = test.replace("*", r".*") # change glob sequence - test = test.replace("?", r".") # change glob char + test = test.replace(".", r"\.") # mask dots + test = test.replace("*", r".*") # change glob sequence + test = test.replace("?", r".") # change glob char if re.match(test, host, re.I): return True return False @@ -103,7 +119,7 @@ def proxy_bypass(host): # noqa def dict_to_sequence(d): """Returns an internal sequence dictionary update.""" - if hasattr(d, 'items'): + if hasattr(d, "items"): d = d.items() return d @@ -113,13 +129,13 @@ def super_len(o): total_length = None current_position = 0 - if hasattr(o, '__len__'): + if hasattr(o, "__len__"): total_length = len(o) - elif hasattr(o, 'len'): + elif hasattr(o, "len"): total_length = o.len - elif hasattr(o, 'fileno'): + elif hasattr(o, "fileno"): try: fileno = o.fileno() except (io.UnsupportedOperation, AttributeError): @@ -132,21 +148,23 @@ def super_len(o): # Having used fstat to determine the file length, we need to # confirm that this file was opened up in binary mode. - if 'b' not in o.mode: - warnings.warn(( - "Requests has determined the content-length for this " - "request using the binary size of the file: however, the " - "file has been opened in text mode (i.e. without the 'b' " - "flag in the mode). This may lead to an incorrect " - "content-length. In Requests 3.0, support will be removed " - "for files in text mode."), - FileModeWarning + if "b" not in o.mode: + warnings.warn( + ( + "Requests has determined the content-length for this " + "request using the binary size of the file: however, the " + "file has been opened in text mode (i.e. without the 'b' " + "flag in the mode). This may lead to an incorrect " + "content-length. In Requests 3.0, support will be removed " + "for files in text mode." + ), + FileModeWarning, ) - if hasattr(o, 'tell'): + if hasattr(o, "tell"): try: current_position = o.tell() - except (OSError, IOError): + except OSError: # This can happen in some weird situations, such as when the file # is actually a special file descriptor like stdin. In this # instance, we don't know what the length is, so set it to zero and @@ -154,7 +172,7 @@ def super_len(o): if total_length is not None: current_position = total_length else: - if hasattr(o, 'seek') and total_length is None: + if hasattr(o, "seek") and total_length is None: # StringIO and BytesIO have seek but no usable fileno try: # seek to end of file @@ -164,7 +182,7 @@ def super_len(o): # seek back to current position to support # partially read file-like objects o.seek(current_position or 0) - except (OSError, IOError): + except OSError: total_length = 0 if total_length is None: @@ -176,14 +194,14 @@ def super_len(o): def get_netrc_auth(url, raise_errors=False): """Returns the Requests tuple auth for a given url from netrc.""" - netrc_file = os.environ.get('NETRC') + netrc_file = os.environ.get("NETRC") if netrc_file is not None: netrc_locations = (netrc_file,) else: - netrc_locations = ('~/{}'.format(f) for f in NETRC_FILES) + netrc_locations = (f"~/{f}" for f in NETRC_FILES) try: - from netrc import netrc, NetrcParseError + from netrc import NetrcParseError, netrc netrc_path = None @@ -208,18 +226,18 @@ def get_netrc_auth(url, raise_errors=False): # Strip port numbers from netloc. This weird `if...encode`` dance is # used for Python 3.2, which doesn't support unicode literals. - splitstr = b':' + splitstr = b":" if isinstance(url, str): - splitstr = splitstr.decode('ascii') + splitstr = splitstr.decode("ascii") host = ri.netloc.split(splitstr)[0] try: _netrc = netrc(netrc_path).authenticators(host) if _netrc: # Return with login / password - login_i = (0 if _netrc[0] else 1) + login_i = 0 if _netrc[0] else 1 return (_netrc[login_i], _netrc[2]) - except (NetrcParseError, IOError): + except (NetrcParseError, OSError): # If there was a parsing error or a permissions issue reading the file, # we'll just skip netrc auth unless explicitly asked to raise errors. if raise_errors: @@ -232,9 +250,8 @@ def get_netrc_auth(url, raise_errors=False): def guess_filename(obj): """Tries to guess the filename of the given object.""" - name = getattr(obj, 'name', None) - if (name and isinstance(name, basestring) and name[0] != '<' and - name[-1] != '>'): + name = getattr(obj, "name", None) + if name and isinstance(name, basestring) and name[0] != "<" and name[-1] != ">": return os.path.basename(name) @@ -256,7 +273,7 @@ def extract_zipped_paths(path): # If we don't check for an empty prefix after the split (in other words, archive remains unchanged after the split), # we _can_ end up in an infinite loop on a rare corner case affecting a small number of users break - member = '/'.join([prefix, member]) + member = "/".join([prefix, member]) if not zipfile.is_zipfile(archive): return path @@ -267,7 +284,7 @@ def extract_zipped_paths(path): # we have a valid zip archive and a valid member of that archive tmp = tempfile.gettempdir() - extracted_path = os.path.join(tmp, member.split('/')[-1]) + extracted_path = os.path.join(tmp, member.split("/")[-1]) if not os.path.exists(extracted_path): # use read + write to avoid the creating nested folders, we only want the file, avoids mkdir racing condition with atomic_open(extracted_path) as file_handler: @@ -280,7 +297,7 @@ def atomic_open(filename): """Write a file to the disk in an atomic fashion""" tmp_descriptor, tmp_name = tempfile.mkstemp(dir=os.path.dirname(filename)) try: - with os.fdopen(tmp_descriptor, 'wb') as tmp_handler: + with os.fdopen(tmp_descriptor, "wb") as tmp_handler: yield tmp_handler os.replace(tmp_name, filename) except BaseException: @@ -310,7 +327,7 @@ def from_key_val_list(value): return None if isinstance(value, (str, bytes, bool, int)): - raise ValueError('cannot encode objects that are not 2-tuples') + raise ValueError("cannot encode objects that are not 2-tuples") return OrderedDict(value) @@ -336,7 +353,7 @@ def to_key_val_list(value): return None if isinstance(value, (str, bytes, bool, int)): - raise ValueError('cannot encode objects that are not 2-tuples') + raise ValueError("cannot encode objects that are not 2-tuples") if isinstance(value, Mapping): value = value.items() @@ -401,10 +418,10 @@ def parse_dict_header(value): """ result = {} for item in _parse_list_header(value): - if '=' not in item: + if "=" not in item: result[item] = None continue - name, value = item.split('=', 1) + name, value = item.split("=", 1) if value[:1] == value[-1:] == '"': value = unquote_header_value(value[1:-1]) result[name] = value @@ -432,8 +449,8 @@ def unquote_header_value(value, is_filename=False): # replace sequence below on a UNC path has the effect of turning # the leading double slash into a single slash and then # _fix_ie_filename() doesn't work correctly. See #458. - if not is_filename or value[:2] != '\\\\': - return value.replace('\\\\', '\\').replace('\\"', '"') + if not is_filename or value[:2] != "\\\\": + return value.replace("\\\\", "\\").replace('\\"', '"') return value @@ -468,19 +485,24 @@ def get_encodings_from_content(content): :param content: bytestring to extract encodings from. """ - warnings.warn(( - 'In requests 3.0, get_encodings_from_content will be removed. For ' - 'more information, please see the discussion on issue #2266. (This' - ' warning should only appear once.)'), - DeprecationWarning) + warnings.warn( + ( + "In requests 3.0, get_encodings_from_content will be removed. For " + "more information, please see the discussion on issue #2266. (This" + " warning should only appear once.)" + ), + DeprecationWarning, + ) charset_re = re.compile(r']', flags=re.I) pragma_re = re.compile(r']', flags=re.I) xml_re = re.compile(r'^<\?xml.*?encoding=["\']*(.+?)["\'>]') - return (charset_re.findall(content) + - pragma_re.findall(content) + - xml_re.findall(content)) + return ( + charset_re.findall(content) + + pragma_re.findall(content) + + xml_re.findall(content) + ) def _parse_content_type_header(header): @@ -491,7 +513,7 @@ def _parse_content_type_header(header): parameters """ - tokens = header.split(';') + tokens = header.split(";") content_type, params = tokens[0].strip(), tokens[1:] params_dict = {} items_to_strip = "\"' " @@ -503,7 +525,7 @@ def _parse_content_type_header(header): index_of_equals = param.find("=") if index_of_equals != -1: key = param[:index_of_equals].strip(items_to_strip) - value = param[index_of_equals + 1:].strip(items_to_strip) + value = param[index_of_equals + 1 :].strip(items_to_strip) params_dict[key.lower()] = value return content_type, params_dict @@ -515,38 +537,37 @@ def get_encoding_from_headers(headers): :rtype: str """ - content_type = headers.get('content-type') + content_type = headers.get("content-type") if not content_type: return None content_type, params = _parse_content_type_header(content_type) - if 'charset' in params: - return params['charset'].strip("'\"") + if "charset" in params: + return params["charset"].strip("'\"") - if 'text' in content_type: - return 'ISO-8859-1' + if "text" in content_type: + return "ISO-8859-1" - if 'application/json' in content_type: + if "application/json" in content_type: # Assume UTF-8 based on RFC 4627: https://www.ietf.org/rfc/rfc4627.txt since the charset was unset - return 'utf-8' + return "utf-8" def stream_decode_response_unicode(iterator, r): """Stream decodes a iterator.""" if r.encoding is None: - for item in iterator: - yield item + yield from iterator return - decoder = codecs.getincrementaldecoder(r.encoding)(errors='replace') + decoder = codecs.getincrementaldecoder(r.encoding)(errors="replace") for chunk in iterator: rv = decoder.decode(chunk) if rv: yield rv - rv = decoder.decode(b'', final=True) + rv = decoder.decode(b"", final=True) if rv: yield rv @@ -557,7 +578,7 @@ def iter_slices(string, slice_length): if slice_length is None or slice_length <= 0: slice_length = len(string) while pos < len(string): - yield string[pos:pos + slice_length] + yield string[pos : pos + slice_length] pos += slice_length @@ -573,11 +594,14 @@ def get_unicode_from_response(r): :rtype: str """ - warnings.warn(( - 'In requests 3.0, get_unicode_from_response will be removed. For ' - 'more information, please see the discussion on issue #2266. (This' - ' warning should only appear once.)'), - DeprecationWarning) + warnings.warn( + ( + "In requests 3.0, get_unicode_from_response will be removed. For " + "more information, please see the discussion on issue #2266. (This" + " warning should only appear once.)" + ), + DeprecationWarning, + ) tried_encodings = [] @@ -592,14 +616,15 @@ def get_unicode_from_response(r): # Fall back: try: - return str(r.content, encoding, errors='replace') + return str(r.content, encoding, errors="replace") except TypeError: return r.content # The unreserved URI characters (RFC 3986) UNRESERVED_SET = frozenset( - "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" + "0123456789-._~") + "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" + "0123456789-._~" +) def unquote_unreserved(uri): @@ -608,22 +633,22 @@ def unquote_unreserved(uri): :rtype: str """ - parts = uri.split('%') + parts = uri.split("%") for i in range(1, len(parts)): h = parts[i][0:2] if len(h) == 2 and h.isalnum(): try: c = chr(int(h, 16)) except ValueError: - raise InvalidURL("Invalid percent-escape sequence: '%s'" % h) + raise InvalidURL(f"Invalid percent-escape sequence: '{h}'") if c in UNRESERVED_SET: parts[i] = c + parts[i][2:] else: - parts[i] = '%' + parts[i] + parts[i] = f"%{parts[i]}" else: - parts[i] = '%' + parts[i] - return ''.join(parts) + parts[i] = f"%{parts[i]}" + return "".join(parts) def requote_uri(uri): @@ -656,10 +681,10 @@ def address_in_network(ip, net): :rtype: bool """ - ipaddr = struct.unpack('=L', socket.inet_aton(ip))[0] - netaddr, bits = net.split('/') - netmask = struct.unpack('=L', socket.inet_aton(dotted_netmask(int(bits))))[0] - network = struct.unpack('=L', socket.inet_aton(netaddr))[0] & netmask + ipaddr = struct.unpack("=L", socket.inet_aton(ip))[0] + netaddr, bits = net.split("/") + netmask = struct.unpack("=L", socket.inet_aton(dotted_netmask(int(bits))))[0] + network = struct.unpack("=L", socket.inet_aton(netaddr))[0] & netmask return (ipaddr & netmask) == (network & netmask) @@ -670,8 +695,8 @@ def dotted_netmask(mask): :rtype: str """ - bits = 0xffffffff ^ (1 << 32 - mask) - 1 - return socket.inet_ntoa(struct.pack('>I', bits)) + bits = 0xFFFFFFFF ^ (1 << 32 - mask) - 1 + return socket.inet_ntoa(struct.pack(">I", bits)) def is_ipv4_address(string_ip): @@ -680,7 +705,7 @@ def is_ipv4_address(string_ip): """ try: socket.inet_aton(string_ip) - except socket.error: + except OSError: return False return True @@ -691,9 +716,9 @@ def is_valid_cidr(string_network): :rtype: bool """ - if string_network.count('/') == 1: + if string_network.count("/") == 1: try: - mask = int(string_network.split('/')[1]) + mask = int(string_network.split("/")[1]) except ValueError: return False @@ -701,8 +726,8 @@ def is_valid_cidr(string_network): return False try: - socket.inet_aton(string_network.split('/')[0]) - except socket.error: + socket.inet_aton(string_network.split("/")[0]) + except OSError: return False else: return False @@ -739,13 +764,14 @@ def should_bypass_proxies(url, no_proxy): """ # Prioritize lowercase environment variables over uppercase # to keep a consistent behaviour with other http projects (curl, wget). - get_proxy = lambda k: os.environ.get(k) or os.environ.get(k.upper()) + def get_proxy(key): + return os.environ.get(key) or os.environ.get(key.upper()) # First check whether no_proxy is defined. If it is, check that the URL # we're getting isn't in the no_proxy list. no_proxy_arg = no_proxy if no_proxy is None: - no_proxy = get_proxy('no_proxy') + no_proxy = get_proxy("no_proxy") parsed = urlparse(url) if parsed.hostname is None: @@ -755,9 +781,7 @@ def should_bypass_proxies(url, no_proxy): if no_proxy: # We need to check whether we match here. We need to see if we match # the end of the hostname, both with and without the port. - no_proxy = ( - host for host in no_proxy.replace(' ', '').split(',') if host - ) + no_proxy = (host for host in no_proxy.replace(" ", "").split(",") if host) if is_ipv4_address(parsed.hostname): for proxy_ip in no_proxy: @@ -771,7 +795,7 @@ def should_bypass_proxies(url, no_proxy): else: host_with_port = parsed.hostname if parsed.port: - host_with_port += ':{}'.format(parsed.port) + host_with_port += f":{parsed.port}" for host in no_proxy: if parsed.hostname.endswith(host) or host_with_port.endswith(host): @@ -779,7 +803,7 @@ def should_bypass_proxies(url, no_proxy): # to apply the proxies on this URL. return True - with set_environ('no_proxy', no_proxy_arg): + with set_environ("no_proxy", no_proxy_arg): # parsed.hostname can be `None` in cases such as a file URI. try: bypass = proxy_bypass(parsed.hostname) @@ -813,13 +837,13 @@ def select_proxy(url, proxies): proxies = proxies or {} urlparts = urlparse(url) if urlparts.hostname is None: - return proxies.get(urlparts.scheme, proxies.get('all')) + return proxies.get(urlparts.scheme, proxies.get("all")) proxy_keys = [ - urlparts.scheme + '://' + urlparts.hostname, + urlparts.scheme + "://" + urlparts.hostname, urlparts.scheme, - 'all://' + urlparts.hostname, - 'all', + "all://" + urlparts.hostname, + "all", ] proxy = None for proxy_key in proxy_keys: @@ -844,13 +868,13 @@ def resolve_proxies(request, proxies, trust_env=True): proxies = proxies if proxies is not None else {} url = request.url scheme = urlparse(url).scheme - no_proxy = proxies.get('no_proxy') + no_proxy = proxies.get("no_proxy") new_proxies = proxies.copy() if trust_env and not should_bypass_proxies(url, no_proxy=no_proxy): environ_proxies = get_environ_proxies(url, no_proxy=no_proxy) - proxy = environ_proxies.get(scheme, environ_proxies.get('all')) + proxy = environ_proxies.get(scheme, environ_proxies.get("all")) if proxy: new_proxies.setdefault(scheme, proxy) @@ -863,19 +887,21 @@ def default_user_agent(name="python-requests"): :rtype: str """ - return '%s/%s' % (name, __version__) + return f"{name}/{__version__}" def default_headers(): """ :rtype: requests.structures.CaseInsensitiveDict """ - return CaseInsensitiveDict({ - 'User-Agent': default_user_agent(), - 'Accept-Encoding': DEFAULT_ACCEPT_ENCODING, - 'Accept': '*/*', - 'Connection': 'keep-alive', - }) + return CaseInsensitiveDict( + { + "User-Agent": default_user_agent(), + "Accept-Encoding": DEFAULT_ACCEPT_ENCODING, + "Accept": "*/*", + "Connection": "keep-alive", + } + ) def parse_header_links(value): @@ -888,23 +914,23 @@ def parse_header_links(value): links = [] - replace_chars = ' \'"' + replace_chars = " '\"" value = value.strip(replace_chars) if not value: return links - for val in re.split(', *<', value): + for val in re.split(", *<", value): try: - url, params = val.split(';', 1) + url, params = val.split(";", 1) except ValueError: - url, params = val, '' + url, params = val, "" - link = {'url': url.strip('<> \'"')} + link = {"url": url.strip("<> '\"")} - for param in params.split(';'): + for param in params.split(";"): try: - key, value = param.split('=') + key, value = param.split("=") except ValueError: break @@ -916,7 +942,7 @@ def parse_header_links(value): # Null bytes; no need to recreate these on each call to guess_json_utf -_null = '\x00'.encode('ascii') # encoding to ASCII for Python 3 +_null = "\x00".encode("ascii") # encoding to ASCII for Python 3 _null2 = _null * 2 _null3 = _null * 3 @@ -930,25 +956,25 @@ def guess_json_utf(data): # determine the encoding. Also detect a BOM, if present. sample = data[:4] if sample in (codecs.BOM_UTF32_LE, codecs.BOM_UTF32_BE): - return 'utf-32' # BOM included + return "utf-32" # BOM included if sample[:3] == codecs.BOM_UTF8: - return 'utf-8-sig' # BOM included, MS style (discouraged) + return "utf-8-sig" # BOM included, MS style (discouraged) if sample[:2] in (codecs.BOM_UTF16_LE, codecs.BOM_UTF16_BE): - return 'utf-16' # BOM included + return "utf-16" # BOM included nullcount = sample.count(_null) if nullcount == 0: - return 'utf-8' + return "utf-8" if nullcount == 2: - if sample[::2] == _null2: # 1st and 3rd are null - return 'utf-16-be' + if sample[::2] == _null2: # 1st and 3rd are null + return "utf-16-be" if sample[1::2] == _null2: # 2nd and 4th are null - return 'utf-16-le' + return "utf-16-le" # Did not detect 2 valid UTF-16 ascii-range characters if nullcount == 3: if sample[:3] == _null3: - return 'utf-32-be' + return "utf-32-be" if sample[1:] == _null3: - return 'utf-32-le' + return "utf-32-le" # Did not detect a valid UTF-32 ascii-range character return None @@ -973,13 +999,13 @@ def prepend_scheme_if_needed(url, new_scheme): if auth: # parse_url doesn't provide the netloc with auth # so we'll add it ourselves. - netloc = '@'.join([auth, netloc]) + netloc = "@".join([auth, netloc]) if scheme is None: scheme = new_scheme if path is None: - path = '' + path = "" - return urlunparse((scheme, netloc, path, '', query, fragment)) + return urlunparse((scheme, netloc, path, "", query, fragment)) def get_auth_from_url(url): @@ -993,14 +1019,14 @@ def get_auth_from_url(url): try: auth = (unquote(parsed.username), unquote(parsed.password)) except (AttributeError, TypeError): - auth = ('', '') + auth = ("", "") return auth # Moved outside of function to avoid recompile every call -_CLEAN_HEADER_REGEX_BYTE = re.compile(b'^\\S[^\\r\\n]*$|^$') -_CLEAN_HEADER_REGEX_STR = re.compile(r'^\S[^\r\n]*$|^$') +_CLEAN_HEADER_REGEX_BYTE = re.compile(b"^\\S[^\\r\\n]*$|^$") +_CLEAN_HEADER_REGEX_STR = re.compile(r"^\S[^\r\n]*$|^$") def check_header_validity(header): @@ -1018,10 +1044,14 @@ def check_header_validity(header): pat = _CLEAN_HEADER_REGEX_STR try: if not pat.match(value): - raise InvalidHeader("Invalid return character or leading space in header: %s" % name) + raise InvalidHeader( + f"Invalid return character or leading space in header: {name}" + ) except TypeError: - raise InvalidHeader("Value for header {%s: %s} must be of type str or " - "bytes, not %s" % (name, value, type(value))) + raise InvalidHeader( + f"Value for header {{{name}: {value}}} must be of type " + f"str or bytes, not {type(value)}" + ) def urldefragauth(url): @@ -1036,21 +1066,24 @@ def urldefragauth(url): if not netloc: netloc, path = path, netloc - netloc = netloc.rsplit('@', 1)[-1] + netloc = netloc.rsplit("@", 1)[-1] - return urlunparse((scheme, netloc, path, params, query, '')) + return urlunparse((scheme, netloc, path, params, query, "")) def rewind_body(prepared_request): """Move file pointer back to its recorded starting position so it can be read again on redirect. """ - body_seek = getattr(prepared_request.body, 'seek', None) - if body_seek is not None and isinstance(prepared_request._body_position, integer_types): + body_seek = getattr(prepared_request.body, "seek", None) + if body_seek is not None and isinstance( + prepared_request._body_position, integer_types + ): try: body_seek(prepared_request._body_position) - except (IOError, OSError): - raise UnrewindableBodyError("An error occurred when rewinding request " - "body for redirect.") + except OSError: + raise UnrewindableBodyError( + "An error occurred when rewinding request body for redirect." + ) else: raise UnrewindableBodyError("Unable to rewind request body for redirect.") diff --git a/setup.cfg b/setup.cfg index fa7fc96a30..5ff5d4274e 100644 --- a/setup.cfg +++ b/setup.cfg @@ -8,3 +8,10 @@ requires-dist = charset_normalizer~=2.0.0 idna>=2.5,<4 urllib3>=1.21.1,<1.27 + +[flake8] +ignore = E203, E501, W503 +per-file-ignores = + requests/__init__.py:E402, F401 + requests/compat.py:E402, F401 + tests/compat.py:F401 diff --git a/setup.py b/setup.py index ae2f2bc37e..ca7bc3a0cd 100755 --- a/setup.py +++ b/setup.py @@ -1,13 +1,11 @@ #!/usr/bin/env python import os import sys - from codecs import open from setuptools import setup from setuptools.command.test import test as TestCommand - CURRENT_PYTHON = sys.version_info[:2] REQUIRED_PYTHON = (3, 7) @@ -29,16 +27,18 @@ ) sys.exit(1) + class PyTest(TestCommand): - user_options = [('pytest-args=', 'a', "Arguments to pass into py.test")] + user_options = [("pytest-args=", "a", "Arguments to pass into py.test")] def initialize_options(self): TestCommand.initialize_options(self) try: from multiprocessing import cpu_count - self.pytest_args = ['-n', str(cpu_count()), '--boxed'] + + self.pytest_args = ["-n", str(cpu_count()), "--boxed"] except (ImportError, NotImplementedError): - self.pytest_args = ['-n', '1', '--boxed'] + self.pytest_args = ["-n", "1", "--boxed"] def finalize_options(self): TestCommand.finalize_options(self) @@ -51,81 +51,82 @@ def run_tests(self): errno = pytest.main(self.pytest_args) sys.exit(errno) + # 'setup.py publish' shortcut. -if sys.argv[-1] == 'publish': - os.system('python setup.py sdist bdist_wheel') - os.system('twine upload dist/*') +if sys.argv[-1] == "publish": + os.system("python setup.py sdist bdist_wheel") + os.system("twine upload dist/*") sys.exit() requires = [ - 'charset_normalizer~=2.0.0', - 'idna>=2.5,<4', - 'urllib3>=1.21.1,<1.27', - 'certifi>=2017.4.17', + "charset_normalizer~=2.0.0", + "idna>=2.5,<4", + "urllib3>=1.21.1,<1.27", + "certifi>=2017.4.17", ] test_requirements = [ - 'pytest-httpbin==0.0.7', - 'pytest-cov', - 'pytest-mock', - 'pytest-xdist', - 'PySocks>=1.5.6, !=1.5.7', - 'pytest>=3', + "pytest-httpbin==0.0.7", + "pytest-cov", + "pytest-mock", + "pytest-xdist", + "PySocks>=1.5.6, !=1.5.7", + "pytest>=3", ] about = {} here = os.path.abspath(os.path.dirname(__file__)) -with open(os.path.join(here, 'requests', '__version__.py'), 'r', 'utf-8') as f: +with open(os.path.join(here, "requests", "__version__.py"), "r", "utf-8") as f: exec(f.read(), about) -with open('README.md', 'r', 'utf-8') as f: +with open("README.md", "r", "utf-8") as f: readme = f.read() setup( - name=about['__title__'], - version=about['__version__'], - description=about['__description__'], + name=about["__title__"], + version=about["__version__"], + description=about["__description__"], long_description=readme, - long_description_content_type='text/markdown', - author=about['__author__'], - author_email=about['__author_email__'], - url=about['__url__'], - packages=['requests'], - package_data={'': ['LICENSE', 'NOTICE']}, - package_dir={'requests': 'requests'}, + long_description_content_type="text/markdown", + author=about["__author__"], + author_email=about["__author_email__"], + url=about["__url__"], + packages=["requests"], + package_data={"": ["LICENSE", "NOTICE"]}, + package_dir={"requests": "requests"}, include_package_data=True, python_requires=">=3.7, <4", install_requires=requires, - license=about['__license__'], + license=about["__license__"], zip_safe=False, classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Environment :: Web Environment', - 'Intended Audience :: Developers', - 'License :: OSI Approved :: Apache Software License', - 'Natural Language :: English', - 'Operating System :: OS Independent', - 'Programming Language :: Python', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9', - 'Programming Language :: Python :: 3.10', - 'Programming Language :: Python :: 3.11', - 'Programming Language :: Python :: 3 :: Only', - 'Programming Language :: Python :: Implementation :: CPython', - 'Programming Language :: Python :: Implementation :: PyPy', - 'Topic :: Internet :: WWW/HTTP', - 'Topic :: Software Development :: Libraries', + "Development Status :: 5 - Production/Stable", + "Environment :: Web Environment", + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Natural Language :: English", + "Operating System :: OS Independent", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: Implementation :: CPython", + "Programming Language :: Python :: Implementation :: PyPy", + "Topic :: Internet :: WWW/HTTP", + "Topic :: Software Development :: Libraries", ], - cmdclass={'test': PyTest}, + cmdclass={"test": PyTest}, tests_require=test_requirements, extras_require={ - 'security': [], - 'socks': ['PySocks>=1.5.6, !=1.5.7'], - 'use_chardet_on_py3': ['chardet>=3.0.2,<5'] + "security": [], + "socks": ["PySocks>=1.5.6, !=1.5.7"], + "use_chardet_on_py3": ["chardet>=3.0.2,<5"], }, project_urls={ - 'Documentation': 'https://requests.readthedocs.io', - 'Source': 'https://github.com/psf/requests', + "Documentation": "https://requests.readthedocs.io", + "Source": "https://github.com/psf/requests", }, ) diff --git a/tests/__init__.py b/tests/__init__.py index 9be94bcc06..04385be18a 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,13 +1,10 @@ -# -*- coding: utf-8 -*- - """Requests test package initialisation.""" import warnings -import urllib3 from urllib3.exceptions import SNIMissingWarning # urllib3 sets SNIMissingWarning to only go off once, # while this test suite requires it to always fire # so that it occurs during test_requests.test_https_warnings -warnings.simplefilter('always', SNIMissingWarning) +warnings.simplefilter("always", SNIMissingWarning) diff --git a/tests/compat.py b/tests/compat.py index 62abb25dca..7618aa157b 100644 --- a/tests/compat.py +++ b/tests/compat.py @@ -1,4 +1,3 @@ -# -*- coding: utf-8 -*- import warnings try: diff --git a/tests/conftest.py b/tests/conftest.py index 4f9b2641b4..530a4c2a5f 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,26 +1,23 @@ -# -*- coding: utf-8 -*- - try: - from http.server import HTTPServer - from http.server import SimpleHTTPRequestHandler + from http.server import HTTPServer, SimpleHTTPRequestHandler except ImportError: from BaseHTTPServer import HTTPServer - from SimpleHTTPServer import SimpleHTTPRequestHandler + from SimpleHTTPServer import SimpleHTTPRequestHandler import ssl -import tempfile import threading import pytest + from requests.compat import urljoin def prepare_url(value): # Issue #1483: Make sure the URL always has a trailing slash - httpbin_url = value.url.rstrip('/') + '/' + httpbin_url = value.url.rstrip("/") + "/" def inner(*suffix): - return urljoin(httpbin_url, '/'.join(suffix)) + return urljoin(httpbin_url, "/".join(suffix)) return inner @@ -44,7 +41,7 @@ def nosan_server(tmp_path_factory): tmpdir = tmp_path_factory.mktemp("certs") ca = trustme.CA() # only commonName, no subjectAltName - server_cert = ca.issue_cert(common_name=u"localhost") + server_cert = ca.issue_cert(common_name="localhost") ca_bundle = str(tmpdir / "ca.pem") ca.cert_pem.write_to_path(ca_bundle) diff --git a/tests/test_help.py b/tests/test_help.py index 3beb65f30a..fb4e967c53 100644 --- a/tests/test_help.py +++ b/tests/test_help.py @@ -1,18 +1,12 @@ -# -*- encoding: utf-8 - -import sys - -import pytest - from requests.help import info def test_system_ssl(): """Verify we're actually setting system_ssl when it should be available.""" - assert info()['system_ssl']['version'] != '' + assert info()["system_ssl"]["version"] != "" -class VersionedPackage(object): +class VersionedPackage: def __init__(self, version): self.__version__ = version @@ -21,11 +15,11 @@ def test_idna_without_version_attribute(mocker): """Older versions of IDNA don't provide a __version__ attribute, verify that if we have such a package, we don't blow up. """ - mocker.patch('requests.help.idna', new=None) - assert info()['idna'] == {'version': ''} + mocker.patch("requests.help.idna", new=None) + assert info()["idna"] == {"version": ""} def test_idna_with_version_attribute(mocker): """Verify we're actually setting idna version when it should be available.""" - mocker.patch('requests.help.idna', new=VersionedPackage('2.6')) - assert info()['idna'] == {'version': '2.6'} + mocker.patch("requests.help.idna", new=VersionedPackage("2.6")) + assert info()["idna"] == {"version": "2.6"} diff --git a/tests/test_hooks.py b/tests/test_hooks.py index 014b439182..7445525ec8 100644 --- a/tests/test_hooks.py +++ b/tests/test_hooks.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - import pytest from requests import hooks @@ -10,14 +8,15 @@ def hook(value): @pytest.mark.parametrize( - 'hooks_list, result', ( - (hook, 'ata'), - ([hook, lambda x: None, hook], 'ta'), - ) + "hooks_list, result", + ( + (hook, "ata"), + ([hook, lambda x: None, hook], "ta"), + ), ) def test_hooks(hooks_list, result): - assert hooks.dispatch_hook('response', {'response': hooks_list}, 'Data') == result + assert hooks.dispatch_hook("response", {"response": hooks_list}, "Data") == result def test_default_hooks(): - assert hooks.default_hooks() == {'response': []} + assert hooks.default_hooks() == {"response": []} diff --git a/tests/test_lowlevel.py b/tests/test_lowlevel.py index 1d8cbde64a..859d07e8a5 100644 --- a/tests/test_lowlevel.py +++ b/tests/test_lowlevel.py @@ -1,12 +1,11 @@ -# -*- coding: utf-8 -*- - -import pytest import threading -import requests +import pytest from tests.testserver.server import Server, consume_socket_content +import requests from requests.compat import JSONDecodeError + from .utils import override_environ @@ -15,9 +14,9 @@ def echo_response_handler(sock): request_content = consume_socket_content(sock, timeout=0.5) text_200 = ( - b'HTTP/1.1 200 OK\r\n' - b'Content-Length: %d\r\n\r\n' - b'%s' + b"HTTP/1.1 200 OK\r\n" + b"Content-Length: %d\r\n\r\n" + b"%s" ) % (len(request_content), request_content) sock.send(text_200) @@ -26,15 +25,15 @@ def test_chunked_upload(): """can safely send generators""" close_server = threading.Event() server = Server.basic_response_server(wait_to_close_event=close_server) - data = iter([b'a', b'b', b'c']) + data = iter([b"a", b"b", b"c"]) with server as (host, port): - url = 'http://{}:{}/'.format(host, port) + url = f"http://{host}:{port}/" r = requests.post(url, data=data, stream=True) close_server.set() # release server block assert r.status_code == 200 - assert r.request.headers['Transfer-Encoding'] == 'chunked' + assert r.request.headers["Transfer-Encoding"] == "chunked" def test_chunked_encoding_error(): @@ -44,8 +43,10 @@ def incomplete_chunked_response_handler(sock): request_content = consume_socket_content(sock, timeout=0.5) # The server never ends the request and doesn't provide any valid chunks - sock.send(b"HTTP/1.1 200 OK\r\n" + - b"Transfer-Encoding: chunked\r\n") + sock.send( + b"HTTP/1.1 200 OK\r\n" + b"Transfer-Encoding: chunked\r\n" + ) return request_content @@ -53,9 +54,9 @@ def incomplete_chunked_response_handler(sock): server = Server(incomplete_chunked_response_handler) with server as (host, port): - url = 'http://{}:{}/'.format(host, port) + url = f"http://{host}:{port}/" with pytest.raises(requests.exceptions.ChunkedEncodingError): - r = requests.get(url) + requests.get(url) close_server.set() # release server block @@ -64,17 +65,17 @@ def test_chunked_upload_uses_only_specified_host_header(): close_server = threading.Event() server = Server(echo_response_handler, wait_to_close_event=close_server) - data = iter([b'a', b'b', b'c']) - custom_host = 'sample-host' + data = iter([b"a", b"b", b"c"]) + custom_host = "sample-host" with server as (host, port): - url = 'http://{}:{}/'.format(host, port) - r = requests.post(url, data=data, headers={'Host': custom_host}, stream=True) + url = f"http://{host}:{port}/" + r = requests.post(url, data=data, headers={"Host": custom_host}, stream=True) close_server.set() # release server block - expected_header = b'Host: %s\r\n' % custom_host.encode('utf-8') + expected_header = b"Host: %s\r\n" % custom_host.encode("utf-8") assert expected_header in r.content - assert r.content.count(b'Host: ') == 1 + assert r.content.count(b"Host: ") == 1 def test_chunked_upload_doesnt_skip_host_header(): @@ -82,17 +83,17 @@ def test_chunked_upload_doesnt_skip_host_header(): close_server = threading.Event() server = Server(echo_response_handler, wait_to_close_event=close_server) - data = iter([b'a', b'b', b'c']) + data = iter([b"a", b"b", b"c"]) with server as (host, port): - expected_host = '{}:{}'.format(host, port) - url = 'http://{}:{}/'.format(host, port) + expected_host = f"{host}:{port}" + url = f"http://{host}:{port}/" r = requests.post(url, data=data, stream=True) close_server.set() # release server block - expected_header = b'Host: %s\r\n' % expected_host.encode('utf-8') + expected_header = b"Host: %s\r\n" % expected_host.encode("utf-8") assert expected_header in r.content - assert r.content.count(b'Host: ') == 1 + assert r.content.count(b"Host: ") == 1 def test_conflicting_content_lengths(): @@ -102,12 +103,14 @@ def test_conflicting_content_lengths(): def multiple_content_length_response_handler(sock): request_content = consume_socket_content(sock, timeout=0.5) - - sock.send(b"HTTP/1.1 200 OK\r\n" + - b"Content-Type: text/plain\r\n" + - b"Content-Length: 16\r\n" + - b"Content-Length: 32\r\n\r\n" + - b"-- Bad Actor -- Original Content\r\n") + response = ( + b"HTTP/1.1 200 OK\r\n" + b"Content-Type: text/plain\r\n" + b"Content-Length: 16\r\n" + b"Content-Length: 32\r\n\r\n" + b"-- Bad Actor -- Original Content\r\n" + ) + sock.send(response) return request_content @@ -115,9 +118,9 @@ def multiple_content_length_response_handler(sock): server = Server(multiple_content_length_response_handler) with server as (host, port): - url = 'http://{}:{}/'.format(host, port) + url = f"http://{host}:{port}/" with pytest.raises(requests.exceptions.InvalidHeader): - r = requests.get(url) + requests.get(url) close_server.set() @@ -174,7 +177,7 @@ def digest_response_handler(sock): server = Server(digest_response_handler, wait_to_close_event=close_server) with server as (host, port): - url = 'http://{}:{}/'.format(host, port) + url = f'http://{host}:{port}/' r = requests.get(url, auth=auth) # Verify server succeeded in authenticating. assert r.status_code == 200 @@ -224,7 +227,7 @@ def digest_failed_response_handler(sock): server = Server(digest_failed_response_handler, wait_to_close_event=close_server) with server as (host, port): - url = 'http://{}:{}/'.format(host, port) + url = f'http://{host}:{port}/' r = requests.get(url, auth=auth) # Verify server didn't authenticate us. assert r.status_code == 401 @@ -261,7 +264,7 @@ def digest_response_handler(sock): server = Server(digest_response_handler, wait_to_close_event=close_server) with server as (host, port): - url = 'http://{}:{}/'.format(host, port) + url = f'http://{host}:{port}/' r = requests.get(url, auth=auth) # Verify server didn't receive auth from us. assert r.status_code == 200 @@ -278,17 +281,17 @@ def digest_response_handler(sock): _proxy_combos = [] for prefix, schemes in _schemes_by_var_prefix: for scheme in schemes: - _proxy_combos.append(("{}_proxy".format(prefix), scheme)) + _proxy_combos.append((f"{prefix}_proxy", scheme)) _proxy_combos += [(var.upper(), scheme) for var, scheme in _proxy_combos] @pytest.mark.parametrize("var,scheme", _proxy_combos) def test_use_proxy_from_environment(httpbin, var, scheme): - url = "{}://httpbin.org".format(scheme) + url = f"{scheme}://httpbin.org" fake_proxy = Server() # do nothing with the requests; just close the socket with fake_proxy as (host, port): - proxy_url = "socks5://{}:{}".format(host, port) + proxy_url = f"socks5://{host}:{port}" kwargs = {var: proxy_url} with override_environ(**kwargs): # fake proxy's lack of response will cause a ConnectionError @@ -303,18 +306,20 @@ def test_use_proxy_from_environment(httpbin, var, scheme): def test_redirect_rfc1808_to_non_ascii_location(): - path = u'š' + path = 'š' expected_path = b'%C5%A1' redirect_request = [] # stores the second request to the server def redirect_resp_handler(sock): consume_socket_content(sock, timeout=0.5) - location = u'//{}:{}/{}'.format(host, port, path) + location = f'//{host}:{port}/{path}' sock.send( - b'HTTP/1.1 301 Moved Permanently\r\n' - b'Content-Length: 0\r\n' - b'Location: ' + location.encode('utf8') + b'\r\n' - b'\r\n' + ( + b'HTTP/1.1 301 Moved Permanently\r\n' + b'Content-Length: 0\r\n' + b'Location: %s\r\n' + b'\r\n' + ) % location.encode('utf8') ) redirect_request.append(consume_socket_content(sock, timeout=0.5)) sock.send(b'HTTP/1.1 200 OK\r\n\r\n') @@ -323,31 +328,24 @@ def redirect_resp_handler(sock): server = Server(redirect_resp_handler, wait_to_close_event=close_server) with server as (host, port): - url = u'http://{}:{}'.format(host, port) + url = f'http://{host}:{port}' r = requests.get(url=url, allow_redirects=True) assert r.status_code == 200 assert len(r.history) == 1 assert r.history[0].status_code == 301 assert redirect_request[0].startswith(b'GET /' + expected_path + b' HTTP/1.1') - assert r.url == u'{}/{}'.format(url, expected_path.decode('ascii')) + assert r.url == '{}/{}'.format(url, expected_path.decode('ascii')) close_server.set() + def test_fragment_not_sent_with_request(): """Verify that the fragment portion of a URI isn't sent to the server.""" - def response_handler(sock): - req = consume_socket_content(sock, timeout=0.5) - sock.send( - b'HTTP/1.1 200 OK\r\n' - b'Content-Length: '+bytes(len(req))+b'\r\n' - b'\r\n'+req - ) - close_server = threading.Event() - server = Server(response_handler, wait_to_close_event=close_server) + server = Server(echo_response_handler, wait_to_close_event=close_server) with server as (host, port): - url = 'http://{}:{}/path/to/thing/#view=edit&token=hunter2'.format(host, port) + url = f'http://{host}:{port}/path/to/thing/#view=edit&token=hunter2' r = requests.get(url) raw_request = r.content @@ -362,6 +360,7 @@ def response_handler(sock): close_server.set() + def test_fragment_update_on_redirect(): """Verify we only append previous fragment if one doesn't exist on new location. If a new fragment is encountered in a Location header, it should @@ -390,21 +389,21 @@ def response_handler(sock): server = Server(response_handler, wait_to_close_event=close_server) with server as (host, port): - url = 'http://{}:{}/path/to/thing/#view=edit&token=hunter2'.format(host, port) + url = f'http://{host}:{port}/path/to/thing/#view=edit&token=hunter2' r = requests.get(url) - raw_request = r.content assert r.status_code == 200 assert len(r.history) == 2 assert r.history[0].request.url == url # Verify we haven't overwritten the location with our previous fragment. - assert r.history[1].request.url == 'http://{}:{}/get#relevant-section'.format(host, port) + assert r.history[1].request.url == f'http://{host}:{port}/get#relevant-section' # Verify previous fragment is used and not the original. - assert r.url == 'http://{}:{}/final-url/#relevant-section'.format(host, port) + assert r.url == f'http://{host}:{port}/final-url/#relevant-section' close_server.set() + def test_json_decode_compatibility_for_alt_utf_encodings(): def response_handler(sock): @@ -419,7 +418,7 @@ def response_handler(sock): server = Server(response_handler, wait_to_close_event=close_server) with server as (host, port): - url = 'http://{}:{}/'.format(host, port) + url = f'http://{host}:{port}/' r = requests.get(url) r.encoding = None with pytest.raises(requests.exceptions.JSONDecodeError) as excinfo: diff --git a/tests/test_requests.py b/tests/test_requests.py index 7c6af29984..ccd8636bad 100644 --- a/tests/test_requests.py +++ b/tests/test_requests.py @@ -1,33 +1,37 @@ -# -*- coding: utf-8 -*- - """Tests for Requests.""" +import collections +import contextlib +import io import json import os import pickle -import collections -import contextlib -import warnings import re +import warnings -import io -import requests import pytest import urllib3 +from urllib3.util import Timeout as Urllib3Timeout + +import requests from requests.adapters import HTTPAdapter from requests.auth import HTTPDigestAuth, _basic_auth_str from requests.compat import ( - Morsel, cookielib, getproxies, urlparse, - builtin_str) -from requests.cookies import ( - cookiejar_from_dict, morsel_to_cookie) + JSONDecodeError, + Morsel, + MutableMapping, + builtin_str, + cookielib, + getproxies, + urlparse, +) +from requests.cookies import cookiejar_from_dict, morsel_to_cookie from requests.exceptions import ( ChunkedEncodingError, ConnectionError, ConnectTimeout, ContentDecodingError, InvalidHeader, - InvalidJSONError, InvalidProxyURL, InvalidSchema, InvalidURL, @@ -36,31 +40,27 @@ ReadTimeout, RequestException, RetryError, - Timeout, - TooManyRedirects, - UnrewindableBodyError, ) from requests.exceptions import SSLError as RequestsSSLError -from requests.models import PreparedRequest -from requests.structures import CaseInsensitiveDict -from requests.sessions import SessionRedirectMixin -from requests.models import urlencode +from requests.exceptions import Timeout, TooManyRedirects, UnrewindableBodyError from requests.hooks import default_hooks -from requests.compat import JSONDecodeError, MutableMapping +from requests.models import PreparedRequest, urlencode +from requests.sessions import SessionRedirectMixin +from requests.structures import CaseInsensitiveDict from .compat import StringIO from .utils import override_environ -from urllib3.util import Timeout as Urllib3Timeout # Requests to this URL should always fail with a connection timeout (nothing # listening on that port) -TARPIT = 'http://10.255.255.1' +TARPIT = "http://10.255.255.1" # This is to avoid waiting the timeout of using TARPIT -INVALID_PROXY='http://localhost:1' +INVALID_PROXY = "http://localhost:1" try: from ssl import SSLContext + del SSLContext HAS_MODERN_SSL = True except ImportError: @@ -75,7 +75,7 @@ class TestRequests: - digest_auth_algo = ('MD5', 'SHA-256', 'SHA-512') + digest_auth_algo = ("MD5", "SHA-256", "SHA-512") def test_entry_points(self): @@ -88,102 +88,116 @@ def test_entry_points(self): requests.patch requests.post # Not really an entry point, but people rely on it. - from requests.packages.urllib3.poolmanager import PoolManager + from requests.packages.urllib3.poolmanager import PoolManager # noqa:F401 @pytest.mark.parametrize( - 'exception, url', ( - (MissingSchema, 'hiwpefhipowhefopw'), - (InvalidSchema, 'localhost:3128'), - (InvalidSchema, 'localhost.localdomain:3128/'), - (InvalidSchema, '10.122.1.1:3128/'), - (InvalidURL, 'http://'), - (InvalidURL, 'http://*example.com'), - (InvalidURL, 'http://.example.com'), - )) + "exception, url", + ( + (MissingSchema, "hiwpefhipowhefopw"), + (InvalidSchema, "localhost:3128"), + (InvalidSchema, "localhost.localdomain:3128/"), + (InvalidSchema, "10.122.1.1:3128/"), + (InvalidURL, "http://"), + (InvalidURL, "http://*example.com"), + (InvalidURL, "http://.example.com"), + ), + ) def test_invalid_url(self, exception, url): with pytest.raises(exception): requests.get(url) def test_basic_building(self): req = requests.Request() - req.url = 'http://kennethreitz.org/' - req.data = {'life': '42'} + req.url = "http://kennethreitz.org/" + req.data = {"life": "42"} pr = req.prepare() assert pr.url == req.url - assert pr.body == 'life=42' + assert pr.body == "life=42" - @pytest.mark.parametrize('method', ('GET', 'HEAD')) + @pytest.mark.parametrize("method", ("GET", "HEAD")) def test_no_content_length(self, httpbin, method): req = requests.Request(method, httpbin(method.lower())).prepare() - assert 'Content-Length' not in req.headers + assert "Content-Length" not in req.headers - @pytest.mark.parametrize('method', ('POST', 'PUT', 'PATCH', 'OPTIONS')) + @pytest.mark.parametrize("method", ("POST", "PUT", "PATCH", "OPTIONS")) def test_no_body_content_length(self, httpbin, method): req = requests.Request(method, httpbin(method.lower())).prepare() - assert req.headers['Content-Length'] == '0' + assert req.headers["Content-Length"] == "0" - @pytest.mark.parametrize('method', ('POST', 'PUT', 'PATCH', 'OPTIONS')) + @pytest.mark.parametrize("method", ("POST", "PUT", "PATCH", "OPTIONS")) def test_empty_content_length(self, httpbin, method): - req = requests.Request(method, httpbin(method.lower()), data='').prepare() - assert req.headers['Content-Length'] == '0' + req = requests.Request(method, httpbin(method.lower()), data="").prepare() + assert req.headers["Content-Length"] == "0" def test_override_content_length(self, httpbin): - headers = { - 'Content-Length': 'not zero' - } - r = requests.Request('POST', httpbin('post'), headers=headers).prepare() - assert 'Content-Length' in r.headers - assert r.headers['Content-Length'] == 'not zero' + headers = {"Content-Length": "not zero"} + r = requests.Request("POST", httpbin("post"), headers=headers).prepare() + assert "Content-Length" in r.headers + assert r.headers["Content-Length"] == "not zero" def test_path_is_not_double_encoded(self): - request = requests.Request('GET', "http://0.0.0.0/get/test case").prepare() + request = requests.Request("GET", "http://0.0.0.0/get/test case").prepare() - assert request.path_url == '/get/test%20case' + assert request.path_url == "/get/test%20case" @pytest.mark.parametrize( - 'url, expected', ( - ('http://example.com/path#fragment', 'http://example.com/path?a=b#fragment'), - ('http://example.com/path?key=value#fragment', 'http://example.com/path?key=value&a=b#fragment') - )) + "url, expected", + ( + ( + "http://example.com/path#fragment", + "http://example.com/path?a=b#fragment", + ), + ( + "http://example.com/path?key=value#fragment", + "http://example.com/path?key=value&a=b#fragment", + ), + ), + ) def test_params_are_added_before_fragment(self, url, expected): - request = requests.Request('GET', url, params={"a": "b"}).prepare() + request = requests.Request("GET", url, params={"a": "b"}).prepare() assert request.url == expected def test_params_original_order_is_preserved_by_default(self): - param_ordered_dict = collections.OrderedDict((('z', 1), ('a', 1), ('k', 1), ('d', 1))) + param_ordered_dict = collections.OrderedDict( + (("z", 1), ("a", 1), ("k", 1), ("d", 1)) + ) session = requests.Session() - request = requests.Request('GET', 'http://example.com/', params=param_ordered_dict) + request = requests.Request( + "GET", "http://example.com/", params=param_ordered_dict + ) prep = session.prepare_request(request) - assert prep.url == 'http://example.com/?z=1&a=1&k=1&d=1' + assert prep.url == "http://example.com/?z=1&a=1&k=1&d=1" def test_params_bytes_are_encoded(self): - request = requests.Request('GET', 'http://example.com', - params=b'test=foo').prepare() - assert request.url == 'http://example.com/?test=foo' + request = requests.Request( + "GET", "http://example.com", params=b"test=foo" + ).prepare() + assert request.url == "http://example.com/?test=foo" def test_binary_put(self): - request = requests.Request('PUT', 'http://example.com', - data=u"ööö".encode("utf-8")).prepare() + request = requests.Request( + "PUT", "http://example.com", data="ööö".encode() + ).prepare() assert isinstance(request.body, bytes) def test_whitespaces_are_removed_from_url(self): # Test for issue #3696 - request = requests.Request('GET', ' http://example.com').prepare() - assert request.url == 'http://example.com/' + request = requests.Request("GET", " http://example.com").prepare() + assert request.url == "http://example.com/" - @pytest.mark.parametrize('scheme', ('http://', 'HTTP://', 'hTTp://', 'HttP://')) + @pytest.mark.parametrize("scheme", ("http://", "HTTP://", "hTTp://", "HttP://")) def test_mixed_case_scheme_acceptable(self, httpbin, scheme): s = requests.Session() s.proxies = getproxies() - parts = urlparse(httpbin('get')) + parts = urlparse(httpbin("get")) url = scheme + parts.netloc + parts.path - r = requests.Request('GET', url) + r = requests.Request("GET", url) r = s.send(r.prepare()) - assert r.status_code == 200, 'failed for scheme {}'.format(scheme) + assert r.status_code == 200, f"failed for scheme {scheme}" def test_HTTP_200_OK_GET_ALTERNATIVE(self, httpbin): - r = requests.Request('GET', httpbin('get')) + r = requests.Request("GET", httpbin("get")) s = requests.Session() s.proxies = getproxies() @@ -192,103 +206,113 @@ def test_HTTP_200_OK_GET_ALTERNATIVE(self, httpbin): assert r.status_code == 200 def test_HTTP_302_ALLOW_REDIRECT_GET(self, httpbin): - r = requests.get(httpbin('redirect', '1')) + r = requests.get(httpbin("redirect", "1")) assert r.status_code == 200 assert r.history[0].status_code == 302 assert r.history[0].is_redirect def test_HTTP_307_ALLOW_REDIRECT_POST(self, httpbin): - r = requests.post(httpbin('redirect-to'), data='test', params={'url': 'post', 'status_code': 307}) + r = requests.post( + httpbin("redirect-to"), + data="test", + params={"url": "post", "status_code": 307}, + ) assert r.status_code == 200 assert r.history[0].status_code == 307 assert r.history[0].is_redirect - assert r.json()['data'] == 'test' + assert r.json()["data"] == "test" def test_HTTP_307_ALLOW_REDIRECT_POST_WITH_SEEKABLE(self, httpbin): - byte_str = b'test' - r = requests.post(httpbin('redirect-to'), data=io.BytesIO(byte_str), params={'url': 'post', 'status_code': 307}) + byte_str = b"test" + r = requests.post( + httpbin("redirect-to"), + data=io.BytesIO(byte_str), + params={"url": "post", "status_code": 307}, + ) assert r.status_code == 200 assert r.history[0].status_code == 307 assert r.history[0].is_redirect - assert r.json()['data'] == byte_str.decode('utf-8') + assert r.json()["data"] == byte_str.decode("utf-8") def test_HTTP_302_TOO_MANY_REDIRECTS(self, httpbin): try: - requests.get(httpbin('relative-redirect', '50')) + requests.get(httpbin("relative-redirect", "50")) except TooManyRedirects as e: - url = httpbin('relative-redirect', '20') + url = httpbin("relative-redirect", "20") assert e.request.url == url assert e.response.url == url assert len(e.response.history) == 30 else: - pytest.fail('Expected redirect to raise TooManyRedirects but it did not') + pytest.fail("Expected redirect to raise TooManyRedirects but it did not") def test_HTTP_302_TOO_MANY_REDIRECTS_WITH_PARAMS(self, httpbin): s = requests.session() s.max_redirects = 5 try: - s.get(httpbin('relative-redirect', '50')) + s.get(httpbin("relative-redirect", "50")) except TooManyRedirects as e: - url = httpbin('relative-redirect', '45') + url = httpbin("relative-redirect", "45") assert e.request.url == url assert e.response.url == url assert len(e.response.history) == 5 else: - pytest.fail('Expected custom max number of redirects to be respected but was not') + pytest.fail( + "Expected custom max number of redirects to be respected but was not" + ) def test_http_301_changes_post_to_get(self, httpbin): - r = requests.post(httpbin('status', '301')) + r = requests.post(httpbin("status", "301")) assert r.status_code == 200 - assert r.request.method == 'GET' + assert r.request.method == "GET" assert r.history[0].status_code == 301 assert r.history[0].is_redirect def test_http_301_doesnt_change_head_to_get(self, httpbin): - r = requests.head(httpbin('status', '301'), allow_redirects=True) + r = requests.head(httpbin("status", "301"), allow_redirects=True) print(r.content) assert r.status_code == 200 - assert r.request.method == 'HEAD' + assert r.request.method == "HEAD" assert r.history[0].status_code == 301 assert r.history[0].is_redirect def test_http_302_changes_post_to_get(self, httpbin): - r = requests.post(httpbin('status', '302')) + r = requests.post(httpbin("status", "302")) assert r.status_code == 200 - assert r.request.method == 'GET' + assert r.request.method == "GET" assert r.history[0].status_code == 302 assert r.history[0].is_redirect def test_http_302_doesnt_change_head_to_get(self, httpbin): - r = requests.head(httpbin('status', '302'), allow_redirects=True) + r = requests.head(httpbin("status", "302"), allow_redirects=True) assert r.status_code == 200 - assert r.request.method == 'HEAD' + assert r.request.method == "HEAD" assert r.history[0].status_code == 302 assert r.history[0].is_redirect def test_http_303_changes_post_to_get(self, httpbin): - r = requests.post(httpbin('status', '303')) + r = requests.post(httpbin("status", "303")) assert r.status_code == 200 - assert r.request.method == 'GET' + assert r.request.method == "GET" assert r.history[0].status_code == 303 assert r.history[0].is_redirect def test_http_303_doesnt_change_head_to_get(self, httpbin): - r = requests.head(httpbin('status', '303'), allow_redirects=True) + r = requests.head(httpbin("status", "303"), allow_redirects=True) assert r.status_code == 200 - assert r.request.method == 'HEAD' + assert r.request.method == "HEAD" assert r.history[0].status_code == 303 assert r.history[0].is_redirect def test_header_and_body_removal_on_redirect(self, httpbin): - purged_headers = ('Content-Length', 'Content-Type') + purged_headers = ("Content-Length", "Content-Type") ses = requests.Session() - req = requests.Request('POST', httpbin('post'), data={'test': 'data'}) + req = requests.Request("POST", httpbin("post"), data={"test": "data"}) prep = ses.prepare_request(req) resp = ses.send(prep) # Mimic a redirect response resp.status_code = 302 - resp.headers['location'] = 'get' + resp.headers["location"] = "get" # Run request through resolve_redirects next_resp = next(ses.resolve_redirects(resp, prep)) @@ -297,21 +321,21 @@ def test_header_and_body_removal_on_redirect(self, httpbin): assert header not in next_resp.request.headers def test_transfer_enc_removal_on_redirect(self, httpbin): - purged_headers = ('Transfer-Encoding', 'Content-Type') + purged_headers = ("Transfer-Encoding", "Content-Type") ses = requests.Session() - req = requests.Request('POST', httpbin('post'), data=(b'x' for x in range(1))) + req = requests.Request("POST", httpbin("post"), data=(b"x" for x in range(1))) prep = ses.prepare_request(req) - assert 'Transfer-Encoding' in prep.headers + assert "Transfer-Encoding" in prep.headers # Create Response to avoid https://github.com/kevin1024/pytest-httpbin/issues/33 resp = requests.Response() - resp.raw = io.BytesIO(b'the content') + resp.raw = io.BytesIO(b"the content") resp.request = prep - setattr(resp.raw, 'release_conn', lambda *args: args) + setattr(resp.raw, "release_conn", lambda *args: args) # Mimic a redirect response resp.status_code = 302 - resp.headers['location'] = httpbin('get') + resp.headers["location"] = httpbin("get") # Run request through resolve_redirect next_resp = next(ses.resolve_redirects(resp, prep)) @@ -321,94 +345,93 @@ def test_transfer_enc_removal_on_redirect(self, httpbin): def test_fragment_maintained_on_redirect(self, httpbin): fragment = "#view=edit&token=hunter2" - r = requests.get(httpbin('redirect-to?url=get')+fragment) + r = requests.get(httpbin("redirect-to?url=get") + fragment) assert len(r.history) > 0 - assert r.history[0].request.url == httpbin('redirect-to?url=get')+fragment - assert r.url == httpbin('get')+fragment + assert r.history[0].request.url == httpbin("redirect-to?url=get") + fragment + assert r.url == httpbin("get") + fragment def test_HTTP_200_OK_GET_WITH_PARAMS(self, httpbin): - heads = {'User-agent': 'Mozilla/5.0'} + heads = {"User-agent": "Mozilla/5.0"} - r = requests.get(httpbin('user-agent'), headers=heads) + r = requests.get(httpbin("user-agent"), headers=heads) - assert heads['User-agent'] in r.text + assert heads["User-agent"] in r.text assert r.status_code == 200 def test_HTTP_200_OK_GET_WITH_MIXED_PARAMS(self, httpbin): - heads = {'User-agent': 'Mozilla/5.0'} + heads = {"User-agent": "Mozilla/5.0"} - r = requests.get(httpbin('get') + '?test=true', params={'q': 'test'}, headers=heads) + r = requests.get( + httpbin("get") + "?test=true", params={"q": "test"}, headers=heads + ) assert r.status_code == 200 def test_set_cookie_on_301(self, httpbin): s = requests.session() - url = httpbin('cookies/set?foo=bar') + url = httpbin("cookies/set?foo=bar") s.get(url) - assert s.cookies['foo'] == 'bar' + assert s.cookies["foo"] == "bar" def test_cookie_sent_on_redirect(self, httpbin): s = requests.session() - s.get(httpbin('cookies/set?foo=bar')) - r = s.get(httpbin('redirect/1')) # redirects to httpbin('get') - assert 'Cookie' in r.json()['headers'] + s.get(httpbin("cookies/set?foo=bar")) + r = s.get(httpbin("redirect/1")) # redirects to httpbin('get') + assert "Cookie" in r.json()["headers"] def test_cookie_removed_on_expire(self, httpbin): s = requests.session() - s.get(httpbin('cookies/set?foo=bar')) - assert s.cookies['foo'] == 'bar' + s.get(httpbin("cookies/set?foo=bar")) + assert s.cookies["foo"] == "bar" s.get( - httpbin('response-headers'), - params={ - 'Set-Cookie': - 'foo=deleted; expires=Thu, 01-Jan-1970 00:00:01 GMT' - } + httpbin("response-headers"), + params={"Set-Cookie": "foo=deleted; expires=Thu, 01-Jan-1970 00:00:01 GMT"}, ) - assert 'foo' not in s.cookies + assert "foo" not in s.cookies def test_cookie_quote_wrapped(self, httpbin): s = requests.session() s.get(httpbin('cookies/set?foo="bar:baz"')) - assert s.cookies['foo'] == '"bar:baz"' + assert s.cookies["foo"] == '"bar:baz"' def test_cookie_persists_via_api(self, httpbin): s = requests.session() - r = s.get(httpbin('redirect/1'), cookies={'foo': 'bar'}) - assert 'foo' in r.request.headers['Cookie'] - assert 'foo' in r.history[0].request.headers['Cookie'] + r = s.get(httpbin("redirect/1"), cookies={"foo": "bar"}) + assert "foo" in r.request.headers["Cookie"] + assert "foo" in r.history[0].request.headers["Cookie"] def test_request_cookie_overrides_session_cookie(self, httpbin): s = requests.session() - s.cookies['foo'] = 'bar' - r = s.get(httpbin('cookies'), cookies={'foo': 'baz'}) - assert r.json()['cookies']['foo'] == 'baz' + s.cookies["foo"] = "bar" + r = s.get(httpbin("cookies"), cookies={"foo": "baz"}) + assert r.json()["cookies"]["foo"] == "baz" # Session cookie should not be modified - assert s.cookies['foo'] == 'bar' + assert s.cookies["foo"] == "bar" def test_request_cookies_not_persisted(self, httpbin): s = requests.session() - s.get(httpbin('cookies'), cookies={'foo': 'baz'}) + s.get(httpbin("cookies"), cookies={"foo": "baz"}) # Sending a request with cookies should not add cookies to the session assert not s.cookies def test_generic_cookiejar_works(self, httpbin): cj = cookielib.CookieJar() - cookiejar_from_dict({'foo': 'bar'}, cj) + cookiejar_from_dict({"foo": "bar"}, cj) s = requests.session() s.cookies = cj - r = s.get(httpbin('cookies')) + r = s.get(httpbin("cookies")) # Make sure the cookie was sent - assert r.json()['cookies']['foo'] == 'bar' + assert r.json()["cookies"]["foo"] == "bar" # Make sure the session cj is still the custom one assert s.cookies is cj def test_param_cookiejar_works(self, httpbin): cj = cookielib.CookieJar() - cookiejar_from_dict({'foo': 'bar'}, cj) + cookiejar_from_dict({"foo": "bar"}, cj) s = requests.session() - r = s.get(httpbin('cookies'), cookies=cj) + r = s.get(httpbin("cookies"), cookies=cj) # Make sure the cookie was sent - assert r.json()['cookies']['foo'] == 'bar' + assert r.json()["cookies"]["foo"] == "bar" def test_cookielib_cookiejar_on_redirect(self, httpbin): """Tests resolve_redirect doesn't fail when merging cookies @@ -416,18 +439,18 @@ def test_cookielib_cookiejar_on_redirect(self, httpbin): See GH #3579 """ - cj = cookiejar_from_dict({'foo': 'bar'}, cookielib.CookieJar()) + cj = cookiejar_from_dict({"foo": "bar"}, cookielib.CookieJar()) s = requests.Session() - s.cookies = cookiejar_from_dict({'cookie': 'tasty'}) + s.cookies = cookiejar_from_dict({"cookie": "tasty"}) # Prepare request without using Session - req = requests.Request('GET', httpbin('headers'), cookies=cj) + req = requests.Request("GET", httpbin("headers"), cookies=cj) prep_req = req.prepare() # Send request and simulate redirect resp = s.send(prep_req) resp.status_code = 302 - resp.headers['location'] = httpbin('get') + resp.headers["location"] = httpbin("get") redirects = s.resolve_redirects(resp, prep_req) resp = next(redirects) @@ -439,70 +462,70 @@ def test_cookielib_cookiejar_on_redirect(self, httpbin): cookies = {} for c in resp.request._cookies: cookies[c.name] = c.value - assert cookies['foo'] == 'bar' - assert cookies['cookie'] == 'tasty' + assert cookies["foo"] == "bar" + assert cookies["cookie"] == "tasty" def test_requests_in_history_are_not_overridden(self, httpbin): - resp = requests.get(httpbin('redirect/3')) + resp = requests.get(httpbin("redirect/3")) urls = [r.url for r in resp.history] req_urls = [r.request.url for r in resp.history] assert urls == req_urls def test_history_is_always_a_list(self, httpbin): """Show that even with redirects, Response.history is always a list.""" - resp = requests.get(httpbin('get')) + resp = requests.get(httpbin("get")) assert isinstance(resp.history, list) - resp = requests.get(httpbin('redirect/1')) + resp = requests.get(httpbin("redirect/1")) assert isinstance(resp.history, list) assert not isinstance(resp.history, tuple) def test_headers_on_session_with_None_are_not_sent(self, httpbin): """Do not send headers in Session.headers with None values.""" ses = requests.Session() - ses.headers['Accept-Encoding'] = None - req = requests.Request('GET', httpbin('get')) + ses.headers["Accept-Encoding"] = None + req = requests.Request("GET", httpbin("get")) prep = ses.prepare_request(req) - assert 'Accept-Encoding' not in prep.headers + assert "Accept-Encoding" not in prep.headers def test_headers_preserve_order(self, httpbin): """Preserve order when headers provided as OrderedDict.""" ses = requests.Session() ses.headers = collections.OrderedDict() - ses.headers['Accept-Encoding'] = 'identity' - ses.headers['First'] = '1' - ses.headers['Second'] = '2' - headers = collections.OrderedDict([('Third', '3'), ('Fourth', '4')]) - headers['Fifth'] = '5' - headers['Second'] = '222' - req = requests.Request('GET', httpbin('get'), headers=headers) + ses.headers["Accept-Encoding"] = "identity" + ses.headers["First"] = "1" + ses.headers["Second"] = "2" + headers = collections.OrderedDict([("Third", "3"), ("Fourth", "4")]) + headers["Fifth"] = "5" + headers["Second"] = "222" + req = requests.Request("GET", httpbin("get"), headers=headers) prep = ses.prepare_request(req) items = list(prep.headers.items()) - assert items[0] == ('Accept-Encoding', 'identity') - assert items[1] == ('First', '1') - assert items[2] == ('Second', '222') - assert items[3] == ('Third', '3') - assert items[4] == ('Fourth', '4') - assert items[5] == ('Fifth', '5') - - @pytest.mark.parametrize('key', ('User-agent', 'user-agent')) + assert items[0] == ("Accept-Encoding", "identity") + assert items[1] == ("First", "1") + assert items[2] == ("Second", "222") + assert items[3] == ("Third", "3") + assert items[4] == ("Fourth", "4") + assert items[5] == ("Fifth", "5") + + @pytest.mark.parametrize("key", ("User-agent", "user-agent")) def test_user_agent_transfers(self, httpbin, key): - heads = {key: 'Mozilla/5.0 (github.com/psf/requests)'} + heads = {key: "Mozilla/5.0 (github.com/psf/requests)"} - r = requests.get(httpbin('user-agent'), headers=heads) + r = requests.get(httpbin("user-agent"), headers=heads) assert heads[key] in r.text def test_HTTP_200_OK_HEAD(self, httpbin): - r = requests.head(httpbin('get')) + r = requests.head(httpbin("get")) assert r.status_code == 200 def test_HTTP_200_OK_PUT(self, httpbin): - r = requests.put(httpbin('put')) + r = requests.put(httpbin("put")) assert r.status_code == 200 def test_BASICAUTH_TUPLE_HTTP_200_OK_GET(self, httpbin): - auth = ('user', 'pass') - url = httpbin('basic-auth', 'user', 'pass') + auth = ("user", "pass") + url = httpbin("basic-auth", "user", "pass") r = requests.get(url, auth=auth) assert r.status_code == 200 @@ -516,40 +539,44 @@ def test_BASICAUTH_TUPLE_HTTP_200_OK_GET(self, httpbin): assert r.status_code == 200 @pytest.mark.parametrize( - 'username, password', ( - ('user', 'pass'), - ('имя'.encode('utf-8'), 'пароль'.encode('utf-8')), + "username, password", + ( + ("user", "pass"), + ("имя".encode(), "пароль".encode()), (42, 42), (None, None), - )) + ), + ) def test_set_basicauth(self, httpbin, username, password): auth = (username, password) - url = httpbin('get') + url = httpbin("get") - r = requests.Request('GET', url, auth=auth) + r = requests.Request("GET", url, auth=auth) p = r.prepare() - assert p.headers['Authorization'] == _basic_auth_str(username, password) + assert p.headers["Authorization"] == _basic_auth_str(username, password) def test_basicauth_encodes_byte_strings(self): """Ensure b'test' formats as the byte string "test" rather than the unicode string "b'test'" in Python 3. """ - auth = (b'\xc5\xafsername', b'test\xc6\xb6') - r = requests.Request('GET', 'http://localhost', auth=auth) + auth = (b"\xc5\xafsername", b"test\xc6\xb6") + r = requests.Request("GET", "http://localhost", auth=auth) p = r.prepare() - assert p.headers['Authorization'] == 'Basic xa9zZXJuYW1lOnRlc3TGtg==' + assert p.headers["Authorization"] == "Basic xa9zZXJuYW1lOnRlc3TGtg==" @pytest.mark.parametrize( - 'url, exception', ( + "url, exception", + ( # Connecting to an unknown domain should raise a ConnectionError - ('http://doesnotexist.google.com', ConnectionError), + ("http://doesnotexist.google.com", ConnectionError), # Connecting to an invalid port should raise a ConnectionError - ('http://localhost:1', ConnectionError), + ("http://localhost:1", ConnectionError), # Inputing a URL that cannot be parsed should raise an InvalidURL error - ('http://fe80::5054:ff:fe5a:fc0', InvalidURL) - )) + ("http://fe80::5054:ff:fe5a:fc0", InvalidURL), + ), + ) def test_errors(self, url, exception): with pytest.raises(exception): requests.get(url, timeout=1) @@ -557,33 +584,35 @@ def test_errors(self, url, exception): def test_proxy_error(self): # any proxy related error (address resolution, no route to host, etc) should result in a ProxyError with pytest.raises(ProxyError): - requests.get('http://localhost:1', proxies={'http': 'non-resolvable-address'}) + requests.get( + "http://localhost:1", proxies={"http": "non-resolvable-address"} + ) def test_proxy_error_on_bad_url(self, httpbin, httpbin_secure): with pytest.raises(InvalidProxyURL): - requests.get(httpbin_secure(), proxies={'https': 'http:/badproxyurl:3128'}) + requests.get(httpbin_secure(), proxies={"https": "http:/badproxyurl:3128"}) with pytest.raises(InvalidProxyURL): - requests.get(httpbin(), proxies={'http': 'http://:8080'}) + requests.get(httpbin(), proxies={"http": "http://:8080"}) with pytest.raises(InvalidProxyURL): - requests.get(httpbin_secure(), proxies={'https': 'https://'}) + requests.get(httpbin_secure(), proxies={"https": "https://"}) with pytest.raises(InvalidProxyURL): - requests.get(httpbin(), proxies={'http': 'http:///example.com:8080'}) + requests.get(httpbin(), proxies={"http": "http:///example.com:8080"}) def test_respect_proxy_env_on_send_self_prepared_request(self, httpbin): with override_environ(http_proxy=INVALID_PROXY): with pytest.raises(ProxyError): session = requests.Session() - request = requests.Request('GET', httpbin()) + request = requests.Request("GET", httpbin()) session.send(request.prepare()) def test_respect_proxy_env_on_send_session_prepared_request(self, httpbin): with override_environ(http_proxy=INVALID_PROXY): with pytest.raises(ProxyError): session = requests.Session() - request = requests.Request('GET', httpbin()) + request = requests.Request("GET", httpbin()) prepared = session.prepare_request(request) session.send(prepared) @@ -591,9 +620,9 @@ def test_respect_proxy_env_on_send_with_redirects(self, httpbin): with override_environ(http_proxy=INVALID_PROXY): with pytest.raises(ProxyError): session = requests.Session() - url = httpbin('redirect/1') + url = httpbin("redirect/1") print(url) - request = requests.Request('GET', url) + request = requests.Request("GET", url) session.send(request.prepare()) def test_respect_proxy_env_on_get(self, httpbin): @@ -606,27 +635,29 @@ def test_respect_proxy_env_on_request(self, httpbin): with override_environ(http_proxy=INVALID_PROXY): with pytest.raises(ProxyError): session = requests.Session() - session.request(method='GET', url=httpbin()) + session.request(method="GET", url=httpbin()) def test_proxy_authorization_preserved_on_request(self, httpbin): proxy_auth_value = "Bearer XXX" session = requests.Session() session.headers.update({"Proxy-Authorization": proxy_auth_value}) - resp = session.request(method='GET', url=httpbin('get')) - sent_headers = resp.json().get('headers', {}) + resp = session.request(method="GET", url=httpbin("get")) + sent_headers = resp.json().get("headers", {}) assert sent_headers.get("Proxy-Authorization") == proxy_auth_value def test_basicauth_with_netrc(self, httpbin): - auth = ('user', 'pass') - wrong_auth = ('wronguser', 'wrongpass') - url = httpbin('basic-auth', 'user', 'pass') + auth = ("user", "pass") + wrong_auth = ("wronguser", "wrongpass") + url = httpbin("basic-auth", "user", "pass") old_auth = requests.sessions.get_netrc_auth try: + def get_netrc_auth_mock(url): return auth + requests.sessions.get_netrc_auth = get_netrc_auth_mock # Should use netrc and work. @@ -653,28 +684,28 @@ def get_netrc_auth_mock(url): def test_DIGEST_HTTP_200_OK_GET(self, httpbin): for authtype in self.digest_auth_algo: - auth = HTTPDigestAuth('user', 'pass') - url = httpbin('digest-auth', 'auth', 'user', 'pass', authtype, 'never') + auth = HTTPDigestAuth("user", "pass") + url = httpbin("digest-auth", "auth", "user", "pass", authtype, "never") r = requests.get(url, auth=auth) assert r.status_code == 200 r = requests.get(url) assert r.status_code == 401 - print(r.headers['WWW-Authenticate']) + print(r.headers["WWW-Authenticate"]) s = requests.session() - s.auth = HTTPDigestAuth('user', 'pass') + s.auth = HTTPDigestAuth("user", "pass") r = s.get(url) assert r.status_code == 200 def test_DIGEST_AUTH_RETURNS_COOKIE(self, httpbin): for authtype in self.digest_auth_algo: - url = httpbin('digest-auth', 'auth', 'user', 'pass', authtype) - auth = HTTPDigestAuth('user', 'pass') + url = httpbin("digest-auth", "auth", "user", "pass", authtype) + auth = HTTPDigestAuth("user", "pass") r = requests.get(url) - assert r.cookies['fake'] == 'fake_value' + assert r.cookies["fake"] == "fake_value" r = requests.get(url, auth=auth) assert r.status_code == 200 @@ -682,29 +713,29 @@ def test_DIGEST_AUTH_RETURNS_COOKIE(self, httpbin): def test_DIGEST_AUTH_SETS_SESSION_COOKIES(self, httpbin): for authtype in self.digest_auth_algo: - url = httpbin('digest-auth', 'auth', 'user', 'pass', authtype) - auth = HTTPDigestAuth('user', 'pass') + url = httpbin("digest-auth", "auth", "user", "pass", authtype) + auth = HTTPDigestAuth("user", "pass") s = requests.Session() s.get(url, auth=auth) - assert s.cookies['fake'] == 'fake_value' + assert s.cookies["fake"] == "fake_value" def test_DIGEST_STREAM(self, httpbin): for authtype in self.digest_auth_algo: - auth = HTTPDigestAuth('user', 'pass') - url = httpbin('digest-auth', 'auth', 'user', 'pass', authtype) + auth = HTTPDigestAuth("user", "pass") + url = httpbin("digest-auth", "auth", "user", "pass", authtype) r = requests.get(url, auth=auth, stream=True) - assert r.raw.read() != b'' + assert r.raw.read() != b"" r = requests.get(url, auth=auth, stream=False) - assert r.raw.read() == b'' + assert r.raw.read() == b"" def test_DIGESTAUTH_WRONG_HTTP_401_GET(self, httpbin): for authtype in self.digest_auth_algo: - auth = HTTPDigestAuth('user', 'wrongpass') - url = httpbin('digest-auth', 'auth', 'user', 'pass', authtype) + auth = HTTPDigestAuth("user", "wrongpass") + url = httpbin("digest-auth", "auth", "user", "pass", authtype) r = requests.get(url, auth=auth) assert r.status_code == 401 @@ -720,41 +751,39 @@ def test_DIGESTAUTH_WRONG_HTTP_401_GET(self, httpbin): def test_DIGESTAUTH_QUOTES_QOP_VALUE(self, httpbin): for authtype in self.digest_auth_algo: - auth = HTTPDigestAuth('user', 'pass') - url = httpbin('digest-auth', 'auth', 'user', 'pass', authtype) + auth = HTTPDigestAuth("user", "pass") + url = httpbin("digest-auth", "auth", "user", "pass", authtype) r = requests.get(url, auth=auth) - assert '"auth"' in r.request.headers['Authorization'] + assert '"auth"' in r.request.headers["Authorization"] def test_POSTBIN_GET_POST_FILES(self, httpbin): - url = httpbin('post') + url = httpbin("post") requests.post(url).raise_for_status() - post1 = requests.post(url, data={'some': 'data'}) + post1 = requests.post(url, data={"some": "data"}) assert post1.status_code == 200 - with open('requirements-dev.txt') as f: - post2 = requests.post(url, files={'some': f}) + with open("requirements-dev.txt") as f: + post2 = requests.post(url, files={"some": f}) assert post2.status_code == 200 post4 = requests.post(url, data='[{"some": "json"}]') assert post4.status_code == 200 with pytest.raises(ValueError): - requests.post(url, files=['bad file data']) + requests.post(url, files=["bad file data"]) def test_invalid_files_input(self, httpbin): - url = httpbin('post') - post = requests.post(url, - files={"random-file-1": None, "random-file-2": 1}) + url = httpbin("post") + post = requests.post(url, files={"random-file-1": None, "random-file-2": 1}) assert b'name="random-file-1"' not in post.request.body assert b'name="random-file-2"' in post.request.body def test_POSTBIN_SEEKED_OBJECT_WITH_NO_ITER(self, httpbin): - - class TestStream(object): + class TestStream: def __init__(self, data): self.data = data.encode() self.length = len(self.data) @@ -765,10 +794,10 @@ def __len__(self): def read(self, size=None): if size: - ret = self.data[self.index:self.index + size] + ret = self.data[self.index : self.index + size] self.index += size else: - ret = self.data[self.index:] + ret = self.data[self.index :] self.index = self.length return ret @@ -783,34 +812,34 @@ def seek(self, offset, where=0): elif where == 2: self.index = self.length + offset - test = TestStream('test') - post1 = requests.post(httpbin('post'), data=test) + test = TestStream("test") + post1 = requests.post(httpbin("post"), data=test) assert post1.status_code == 200 - assert post1.json()['data'] == 'test' + assert post1.json()["data"] == "test" - test = TestStream('test') + test = TestStream("test") test.seek(2) - post2 = requests.post(httpbin('post'), data=test) + post2 = requests.post(httpbin("post"), data=test) assert post2.status_code == 200 - assert post2.json()['data'] == 'st' + assert post2.json()["data"] == "st" def test_POSTBIN_GET_POST_FILES_WITH_DATA(self, httpbin): - url = httpbin('post') + url = httpbin("post") requests.post(url).raise_for_status() - post1 = requests.post(url, data={'some': 'data'}) + post1 = requests.post(url, data={"some": "data"}) assert post1.status_code == 200 - with open('requirements-dev.txt') as f: - post2 = requests.post(url, data={'some': 'data'}, files={'some': f}) + with open("requirements-dev.txt") as f: + post2 = requests.post(url, data={"some": "data"}, files={"some": f}) assert post2.status_code == 200 post4 = requests.post(url, data='[{"some": "json"}]') assert post4.status_code == 200 with pytest.raises(ValueError): - requests.post(url, files=['bad file data']) + requests.post(url, files=["bad file data"]) def test_post_with_custom_mapping(self, httpbin): class CustomMapping(MutableMapping): @@ -832,128 +861,142 @@ def __iter__(self): def __len__(self): return len(self.data) - data = CustomMapping({'some': 'data'}) - url = httpbin('post') - found_json = requests.post(url, data=data).json().get('form') - assert found_json == {'some': 'data'} + data = CustomMapping({"some": "data"}) + url = httpbin("post") + found_json = requests.post(url, data=data).json().get("form") + assert found_json == {"some": "data"} def test_conflicting_post_params(self, httpbin): - url = httpbin('post') - with open('requirements-dev.txt') as f: + url = httpbin("post") + with open("requirements-dev.txt") as f: with pytest.raises(ValueError): - requests.post(url, data='[{"some": "data"}]', files={'some': f}) + requests.post(url, data='[{"some": "data"}]', files={"some": f}) def test_request_ok_set(self, httpbin): - r = requests.get(httpbin('status', '404')) + r = requests.get(httpbin("status", "404")) assert not r.ok def test_status_raising(self, httpbin): - r = requests.get(httpbin('status', '404')) + r = requests.get(httpbin("status", "404")) with pytest.raises(requests.exceptions.HTTPError): r.raise_for_status() - r = requests.get(httpbin('status', '500')) + r = requests.get(httpbin("status", "500")) assert not r.ok def test_decompress_gzip(self, httpbin): - r = requests.get(httpbin('gzip')) - r.content.decode('ascii') + r = requests.get(httpbin("gzip")) + r.content.decode("ascii") @pytest.mark.parametrize( - 'url, params', ( - ('/get', {'foo': 'føø'}), - ('/get', {'føø': 'føø'}), - ('/get', {'føø': 'føø'}), - ('/get', {'foo': 'foo'}), - ('ø', {'foo': 'foo'}), - )) + "url, params", + ( + ("/get", {"foo": "føø"}), + ("/get", {"føø": "føø"}), + ("/get", {"føø": "føø"}), + ("/get", {"foo": "foo"}), + ("ø", {"foo": "foo"}), + ), + ) def test_unicode_get(self, httpbin, url, params): requests.get(httpbin(url), params=params) def test_unicode_header_name(self, httpbin): requests.put( - httpbin('put'), - headers={str('Content-Type'): 'application/octet-stream'}, - data='\xff') # compat.str is unicode. + httpbin("put"), + headers={"Content-Type": "application/octet-stream"}, + data="\xff", + ) # compat.str is unicode. def test_pyopenssl_redirect(self, httpbin_secure, httpbin_ca_bundle): - requests.get(httpbin_secure('status', '301'), verify=httpbin_ca_bundle) + requests.get(httpbin_secure("status", "301"), verify=httpbin_ca_bundle) def test_invalid_ca_certificate_path(self, httpbin_secure): - INVALID_PATH = '/garbage' + INVALID_PATH = "/garbage" with pytest.raises(IOError) as e: requests.get(httpbin_secure(), verify=INVALID_PATH) - assert str(e.value) == 'Could not find a suitable TLS CA certificate bundle, invalid path: {}'.format(INVALID_PATH) + assert str( + e.value + ) == "Could not find a suitable TLS CA certificate bundle, invalid path: {}".format( + INVALID_PATH + ) def test_invalid_ssl_certificate_files(self, httpbin_secure): - INVALID_PATH = '/garbage' + INVALID_PATH = "/garbage" with pytest.raises(IOError) as e: requests.get(httpbin_secure(), cert=INVALID_PATH) - assert str(e.value) == 'Could not find the TLS certificate file, invalid path: {}'.format(INVALID_PATH) + assert str( + e.value + ) == "Could not find the TLS certificate file, invalid path: {}".format( + INVALID_PATH + ) with pytest.raises(IOError) as e: - requests.get(httpbin_secure(), cert=('.', INVALID_PATH)) - assert str(e.value) == 'Could not find the TLS key file, invalid path: {}'.format(INVALID_PATH) + requests.get(httpbin_secure(), cert=(".", INVALID_PATH)) + assert str(e.value) == ( + f"Could not find the TLS key file, invalid path: {INVALID_PATH}" + ) @pytest.mark.parametrize( - 'env, expected', ( + "env, expected", + ( ({}, True), - ({'REQUESTS_CA_BUNDLE': '/some/path'}, '/some/path'), - ({'REQUESTS_CA_BUNDLE': ''}, True), - ({'CURL_CA_BUNDLE': '/some/path'}, '/some/path'), - ({'CURL_CA_BUNDLE': ''}, True), - ({'REQUESTS_CA_BUNDLE': '', 'CURL_CA_BUNDLE': ''}, True), + ({"REQUESTS_CA_BUNDLE": "/some/path"}, "/some/path"), + ({"REQUESTS_CA_BUNDLE": ""}, True), + ({"CURL_CA_BUNDLE": "/some/path"}, "/some/path"), + ({"CURL_CA_BUNDLE": ""}, True), + ({"REQUESTS_CA_BUNDLE": "", "CURL_CA_BUNDLE": ""}, True), ( { - 'REQUESTS_CA_BUNDLE': '/some/path', - 'CURL_CA_BUNDLE': '/curl/path', + "REQUESTS_CA_BUNDLE": "/some/path", + "CURL_CA_BUNDLE": "/curl/path", }, - '/some/path', + "/some/path", ), ( { - 'REQUESTS_CA_BUNDLE': '', - 'CURL_CA_BUNDLE': '/curl/path', + "REQUESTS_CA_BUNDLE": "", + "CURL_CA_BUNDLE": "/curl/path", }, - '/curl/path', + "/curl/path", ), - ) + ), ) def test_env_cert_bundles(self, httpbin, mocker, env, expected): s = requests.Session() - mocker.patch('os.environ', env) + mocker.patch("os.environ", env) settings = s.merge_environment_settings( - url=httpbin('get'), - proxies={}, - stream=False, - verify=True, - cert=None + url=httpbin("get"), proxies={}, stream=False, verify=True, cert=None ) - assert settings['verify'] == expected + assert settings["verify"] == expected def test_http_with_certificate(self, httpbin): - r = requests.get(httpbin(), cert='.') + r = requests.get(httpbin(), cert=".") assert r.status_code == 200 def test_https_warnings(self, nosan_server): """warnings are emitted with requests.get""" host, port, ca_bundle = nosan_server if HAS_MODERN_SSL or HAS_PYOPENSSL: - warnings_expected = ('SubjectAltNameWarning', ) + warnings_expected = ("SubjectAltNameWarning",) else: - warnings_expected = ('SNIMissingWarning', - 'InsecurePlatformWarning', - 'SubjectAltNameWarning', ) + warnings_expected = ( + "SNIMissingWarning", + "InsecurePlatformWarning", + "SubjectAltNameWarning", + ) with pytest.warns(None) as warning_records: - warnings.simplefilter('always') - requests.get("https://localhost:{}/".format(port), verify=ca_bundle) + warnings.simplefilter("always") + requests.get(f"https://localhost:{port}/", verify=ca_bundle) - warning_records = [item for item in warning_records - if item.category.__name__ != 'ResourceWarning'] + warning_records = [ + item + for item in warning_records + if item.category.__name__ != "ResourceWarning" + ] - warnings_category = tuple( - item.category.__name__ for item in warning_records) + warnings_category = tuple(item.category.__name__ for item in warning_records) assert warnings_category == warnings_expected def test_certificate_failure(self, httpbin_secure): @@ -963,83 +1006,96 @@ def test_certificate_failure(self, httpbin_secure): with pytest.raises(RequestsSSLError): # Our local httpbin does not have a trusted CA, so this call will # fail if we use our default trust bundle. - requests.get(httpbin_secure('status', '200')) + requests.get(httpbin_secure("status", "200")) def test_urlencoded_get_query_multivalued_param(self, httpbin): - r = requests.get(httpbin('get'), params={'test': ['foo', 'baz']}) + r = requests.get(httpbin("get"), params={"test": ["foo", "baz"]}) assert r.status_code == 200 - assert r.url == httpbin('get?test=foo&test=baz') + assert r.url == httpbin("get?test=foo&test=baz") def test_form_encoded_post_query_multivalued_element(self, httpbin): - r = requests.Request(method='POST', url=httpbin('post'), - data=dict(test=['foo', 'baz'])) + r = requests.Request( + method="POST", url=httpbin("post"), data=dict(test=["foo", "baz"]) + ) prep = r.prepare() - assert prep.body == 'test=foo&test=baz' + assert prep.body == "test=foo&test=baz" def test_different_encodings_dont_break_post(self, httpbin): - r = requests.post(httpbin('post'), - data={'stuff': json.dumps({'a': 123})}, - params={'blah': 'asdf1234'}, - files={'file': ('test_requests.py', open(__file__, 'rb'))}) + r = requests.post( + httpbin("post"), + data={"stuff": json.dumps({"a": 123})}, + params={"blah": "asdf1234"}, + files={"file": ("test_requests.py", open(__file__, "rb"))}, + ) assert r.status_code == 200 @pytest.mark.parametrize( - 'data', ( - {'stuff': 'ëlïxr'}, - {'stuff': 'ëlïxr'.encode('utf-8')}, - {'stuff': 'elixr'}, - {'stuff': 'elixr'.encode('utf-8')}, - )) + "data", + ( + {"stuff": "ëlïxr"}, + {"stuff": "ëlïxr".encode()}, + {"stuff": "elixr"}, + {"stuff": b"elixr"}, + ), + ) def test_unicode_multipart_post(self, httpbin, data): - r = requests.post(httpbin('post'), + r = requests.post( + httpbin("post"), data=data, - files={'file': ('test_requests.py', open(__file__, 'rb'))}) + files={"file": ("test_requests.py", open(__file__, "rb"))}, + ) assert r.status_code == 200 def test_unicode_multipart_post_fieldnames(self, httpbin): - filename = os.path.splitext(__file__)[0] + '.py' + filename = os.path.splitext(__file__)[0] + ".py" r = requests.Request( - method='POST', url=httpbin('post'), - data={'stuff'.encode('utf-8'): 'elixr'}, - files={'file': ('test_requests.py', open(filename, 'rb'))}) + method="POST", + url=httpbin("post"), + data={b"stuff": "elixr"}, + files={"file": ("test_requests.py", open(filename, "rb"))}, + ) prep = r.prepare() assert b'name="stuff"' in prep.body - assert b'name="b\'stuff\'"' not in prep.body + assert b"name=\"b'stuff'\"" not in prep.body def test_unicode_method_name(self, httpbin): - files = {'file': open(__file__, 'rb')} - r = requests.request( - method='POST', url=httpbin('post'), files=files) + files = {"file": open(__file__, "rb")} + r = requests.request(method="POST", url=httpbin("post"), files=files) assert r.status_code == 200 def test_unicode_method_name_with_request_object(self, httpbin): - files = {'file': open(__file__, 'rb')} + files = {"file": open(__file__, "rb")} s = requests.Session() - req = requests.Request('POST', httpbin('post'), files=files) + req = requests.Request("POST", httpbin("post"), files=files) prep = s.prepare_request(req) assert isinstance(prep.method, builtin_str) - assert prep.method == 'POST' + assert prep.method == "POST" resp = s.send(prep) assert resp.status_code == 200 def test_non_prepared_request_error(self): s = requests.Session() - req = requests.Request('POST', '/') + req = requests.Request("POST", "/") with pytest.raises(ValueError) as e: s.send(req) - assert str(e.value) == 'You can only send PreparedRequests.' + assert str(e.value) == "You can only send PreparedRequests." def test_custom_content_type(self, httpbin): r = requests.post( - httpbin('post'), - data={'stuff': json.dumps({'a': 123})}, + httpbin("post"), + data={"stuff": json.dumps({"a": 123})}, files={ - 'file1': ('test_requests.py', open(__file__, 'rb')), - 'file2': ('test_requests', open(__file__, 'rb'), - 'text/py-content-type')}) + "file1": ("test_requests.py", open(__file__, "rb")), + "file2": ( + "test_requests", + open(__file__, "rb"), + "text/py-content-type", + ), + }, + ) assert r.status_code == 200 assert b"text/py-content-type" in r.request.body @@ -1049,50 +1105,56 @@ def hook(resp, **kwargs): assert kwargs != {} s = requests.Session() - r = requests.Request('GET', httpbin(), hooks={'response': hook}) + r = requests.Request("GET", httpbin(), hooks={"response": hook}) prep = s.prepare_request(r) s.send(prep) def test_session_hooks_are_used_with_no_request_hooks(self, httpbin): - hook = lambda x, *args, **kwargs: x + def hook(*args, **kwargs): + pass + s = requests.Session() - s.hooks['response'].append(hook) - r = requests.Request('GET', httpbin()) + s.hooks["response"].append(hook) + r = requests.Request("GET", httpbin()) prep = s.prepare_request(r) - assert prep.hooks['response'] != [] - assert prep.hooks['response'] == [hook] + assert prep.hooks["response"] != [] + assert prep.hooks["response"] == [hook] def test_session_hooks_are_overridden_by_request_hooks(self, httpbin): - hook1 = lambda x, *args, **kwargs: x - hook2 = lambda x, *args, **kwargs: x + def hook1(*args, **kwargs): + pass + + def hook2(*args, **kwargs): + pass + assert hook1 is not hook2 s = requests.Session() - s.hooks['response'].append(hook2) - r = requests.Request('GET', httpbin(), hooks={'response': [hook1]}) + s.hooks["response"].append(hook2) + r = requests.Request("GET", httpbin(), hooks={"response": [hook1]}) prep = s.prepare_request(r) - assert prep.hooks['response'] == [hook1] + assert prep.hooks["response"] == [hook1] def test_prepared_request_hook(self, httpbin): def hook(resp, **kwargs): resp.hook_working = True return resp - req = requests.Request('GET', httpbin(), hooks={'response': hook}) + req = requests.Request("GET", httpbin(), hooks={"response": hook}) prep = req.prepare() s = requests.Session() s.proxies = getproxies() resp = s.send(prep) - assert hasattr(resp, 'hook_working') + assert hasattr(resp, "hook_working") def test_prepared_from_session(self, httpbin): class DummyAuth(requests.auth.AuthBase): def __call__(self, r): - r.headers['Dummy-Auth-Test'] = 'dummy-auth-test-ok' + r.headers["Dummy-Auth-Test"] = "dummy-auth-test-ok" return r - req = requests.Request('GET', httpbin('headers')) + req = requests.Request("GET", httpbin("headers")) assert not req.auth s = requests.Session() @@ -1101,11 +1163,10 @@ def __call__(self, r): prep = s.prepare_request(req) resp = s.send(prep) - assert resp.json()['headers'][ - 'Dummy-Auth-Test'] == 'dummy-auth-test-ok' + assert resp.json()["headers"]["Dummy-Auth-Test"] == "dummy-auth-test-ok" def test_prepare_request_with_bytestring_url(self): - req = requests.Request('GET', b'https://httpbin.org/') + req = requests.Request("GET", b"https://httpbin.org/") s = requests.Session() prep = s.prepare_request(req) assert prep.url == "https://httpbin.org/" @@ -1113,61 +1174,63 @@ def test_prepare_request_with_bytestring_url(self): def test_request_with_bytestring_host(self, httpbin): s = requests.Session() resp = s.request( - 'GET', - httpbin('cookies/set?cookie=value'), + "GET", + httpbin("cookies/set?cookie=value"), allow_redirects=False, - headers={'Host': b'httpbin.org'} + headers={"Host": b"httpbin.org"}, ) - assert resp.cookies.get('cookie') == 'value' + assert resp.cookies.get("cookie") == "value" def test_links(self): r = requests.Response() r.headers = { - 'cache-control': 'public, max-age=60, s-maxage=60', - 'connection': 'keep-alive', - 'content-encoding': 'gzip', - 'content-type': 'application/json; charset=utf-8', - 'date': 'Sat, 26 Jan 2013 16:47:56 GMT', - 'etag': '"6ff6a73c0e446c1f61614769e3ceb778"', - 'last-modified': 'Sat, 26 Jan 2013 16:22:39 GMT', - 'link': ('; rel="next", ; ' - ' rel="last"'), - 'server': 'GitHub.com', - 'status': '200 OK', - 'vary': 'Accept', - 'x-content-type-options': 'nosniff', - 'x-github-media-type': 'github.beta', - 'x-ratelimit-limit': '60', - 'x-ratelimit-remaining': '57' + "cache-control": "public, max-age=60, s-maxage=60", + "connection": "keep-alive", + "content-encoding": "gzip", + "content-type": "application/json; charset=utf-8", + "date": "Sat, 26 Jan 2013 16:47:56 GMT", + "etag": '"6ff6a73c0e446c1f61614769e3ceb778"', + "last-modified": "Sat, 26 Jan 2013 16:22:39 GMT", + "link": ( + "; rel="next", ; " + ' rel="last"' + ), + "server": "GitHub.com", + "status": "200 OK", + "vary": "Accept", + "x-content-type-options": "nosniff", + "x-github-media-type": "github.beta", + "x-ratelimit-limit": "60", + "x-ratelimit-remaining": "57", } - assert r.links['next']['rel'] == 'next' + assert r.links["next"]["rel"] == "next" def test_cookie_parameters(self): - key = 'some_cookie' - value = 'some_value' + key = "some_cookie" + value = "some_value" secure = True - domain = 'test.com' - rest = {'HttpOnly': True} + domain = "test.com" + rest = {"HttpOnly": True} jar = requests.cookies.RequestsCookieJar() jar.set(key, value, secure=secure, domain=domain, rest=rest) assert len(jar) == 1 - assert 'some_cookie' in jar + assert "some_cookie" in jar cookie = list(jar)[0] assert cookie.secure == secure assert cookie.domain == domain - assert cookie._rest['HttpOnly'] == rest['HttpOnly'] + assert cookie._rest["HttpOnly"] == rest["HttpOnly"] def test_cookie_as_dict_keeps_len(self): - key = 'some_cookie' - value = 'some_value' + key = "some_cookie" + value = "some_value" - key1 = 'some_cookie1' - value1 = 'some_value1' + key1 = "some_cookie1" + value1 = "some_value1" jar = requests.cookies.RequestsCookieJar() jar.set(key, value) @@ -1183,11 +1246,11 @@ def test_cookie_as_dict_keeps_len(self): assert len(d3) == 2 def test_cookie_as_dict_keeps_items(self): - key = 'some_cookie' - value = 'some_value' + key = "some_cookie" + value = "some_value" - key1 = 'some_cookie1' - value1 = 'some_value1' + key1 = "some_cookie1" + value1 = "some_value1" jar = requests.cookies.RequestsCookieJar() jar.set(key, value) @@ -1197,16 +1260,16 @@ def test_cookie_as_dict_keeps_items(self): d2 = dict(jar.iteritems()) d3 = dict(jar.items()) - assert d1['some_cookie'] == 'some_value' - assert d2['some_cookie'] == 'some_value' - assert d3['some_cookie1'] == 'some_value1' + assert d1["some_cookie"] == "some_value" + assert d2["some_cookie"] == "some_value" + assert d3["some_cookie1"] == "some_value1" def test_cookie_as_dict_keys(self): - key = 'some_cookie' - value = 'some_value' + key = "some_cookie" + value = "some_value" - key1 = 'some_cookie1' - value1 = 'some_value1' + key1 = "some_cookie1" + value1 = "some_value1" jar = requests.cookies.RequestsCookieJar() jar.set(key, value) @@ -1218,11 +1281,11 @@ def test_cookie_as_dict_keys(self): assert list(keys) == list(keys) def test_cookie_as_dict_values(self): - key = 'some_cookie' - value = 'some_value' + key = "some_cookie" + value = "some_value" - key1 = 'some_cookie1' - value1 = 'some_value1' + key1 = "some_cookie1" + value1 = "some_value1" jar = requests.cookies.RequestsCookieJar() jar.set(key, value) @@ -1234,11 +1297,11 @@ def test_cookie_as_dict_values(self): assert list(values) == list(values) def test_cookie_as_dict_items(self): - key = 'some_cookie' - value = 'some_value' + key = "some_cookie" + value = "some_value" - key1 = 'some_cookie1' - value1 = 'some_value1' + key1 = "some_cookie1" + value1 = "some_value1" jar = requests.cookies.RequestsCookieJar() jar.set(key, value) @@ -1250,10 +1313,10 @@ def test_cookie_as_dict_items(self): assert list(items) == list(items) def test_cookie_duplicate_names_different_domains(self): - key = 'some_cookie' - value = 'some_value' - domain1 = 'test1.com' - domain2 = 'test2.com' + key = "some_cookie" + value = "some_value" + domain1 = "test1.com" + domain2 = "test2.com" jar = requests.cookies.RequestsCookieJar() jar.set(key, value, domain=domain1) @@ -1271,9 +1334,9 @@ def test_cookie_duplicate_names_different_domains(self): assert cookie == value def test_cookie_duplicate_names_raises_cookie_conflict_error(self): - key = 'some_cookie' - value = 'some_value' - path = 'some_path' + key = "some_cookie" + value = "some_value" + path = "some_path" jar = requests.cookies.RequestsCookieJar() jar.set(key, value, path=path) @@ -1290,9 +1353,11 @@ class MyCookiePolicy(cookielib.DefaultCookiePolicy): assert isinstance(jar.copy().get_policy(), MyCookiePolicy) def test_time_elapsed_blank(self, httpbin): - r = requests.get(httpbin('get')) + r = requests.get(httpbin("get")) td = r.elapsed - total_seconds = ((td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6) / 10**6) + total_seconds = ( + td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6 + ) / 10**6 assert total_seconds > 0.0 def test_empty_response_has_content_none(self): @@ -1301,12 +1366,13 @@ def test_empty_response_has_content_none(self): def test_response_is_iterable(self): r = requests.Response() - io = StringIO.StringIO('abc') + io = StringIO.StringIO("abc") read_ = io.read def read_mock(amt, decode_content=None): return read_(amt) - setattr(io, 'read', read_mock) + + setattr(io, "read", read_mock) r.raw = io assert next(iter(r)) io.close() @@ -1317,24 +1383,24 @@ def test_response_decode_unicode(self): """ r = requests.Response() r._content_consumed = True - r._content = b'the content' - r.encoding = 'ascii' + r._content = b"the content" + r.encoding = "ascii" chunks = r.iter_content(decode_unicode=True) assert all(isinstance(chunk, str) for chunk in chunks) # also for streaming r = requests.Response() - r.raw = io.BytesIO(b'the content') - r.encoding = 'ascii' + r.raw = io.BytesIO(b"the content") + r.encoding = "ascii" chunks = r.iter_content(decode_unicode=True) assert all(isinstance(chunk, str) for chunk in chunks) def test_response_reason_unicode(self): # check for unicode HTTP status r = requests.Response() - r.url = 'unicode URL' - r.reason = 'Komponenttia ei löydy'.encode('utf-8') + r.url = "unicode URL" + r.reason = "Komponenttia ei löydy".encode() r.status_code = 404 r.encoding = None assert not r.ok # old behaviour - crashes here @@ -1342,9 +1408,9 @@ def test_response_reason_unicode(self): def test_response_reason_unicode_fallback(self): # check raise_status falls back to ISO-8859-1 r = requests.Response() - r.url = 'some url' - reason = 'Komponenttia ei löydy' - r.reason = reason.encode('latin-1') + r.url = "some url" + reason = "Komponenttia ei löydy" + r.reason = reason.encode("latin-1") r.status_code = 500 r.encoding = None with pytest.raises(requests.exceptions.HTTPError) as e: @@ -1356,27 +1422,28 @@ def test_response_chunk_size_type(self): raise a TypeError. """ r = requests.Response() - r.raw = io.BytesIO(b'the content') + r.raw = io.BytesIO(b"the content") chunks = r.iter_content(1) assert all(len(chunk) == 1 for chunk in chunks) r = requests.Response() - r.raw = io.BytesIO(b'the content') + r.raw = io.BytesIO(b"the content") chunks = r.iter_content(None) - assert list(chunks) == [b'the content'] + assert list(chunks) == [b"the content"] r = requests.Response() - r.raw = io.BytesIO(b'the content') + r.raw = io.BytesIO(b"the content") with pytest.raises(TypeError): chunks = r.iter_content("1024") @pytest.mark.parametrize( - 'exception, args, expected', ( + "exception, args, expected", + ( (urllib3.exceptions.ProtocolError, tuple(), ChunkedEncodingError), (urllib3.exceptions.DecodeError, tuple(), ContentDecodingError), - (urllib3.exceptions.ReadTimeoutError, (None, '', ''), ConnectionError), + (urllib3.exceptions.ReadTimeoutError, (None, "", ""), ConnectionError), (urllib3.exceptions.SSLError, tuple(), RequestsSSLError), - ) + ), ) def test_iter_content_wraps_exceptions( self, httpbin, mocker, exception, args, expected @@ -1391,7 +1458,7 @@ def test_iter_content_wraps_exceptions( next(r.iter_content(1024)) def test_request_and_response_are_pickleable(self, httpbin): - r = requests.get(httpbin('get')) + r = requests.get(httpbin("get")) # verify we can pickle the original request assert pickle.loads(pickle.dumps(r.request)) @@ -1403,7 +1470,7 @@ def test_request_and_response_are_pickleable(self, httpbin): assert r.request.headers == pr.request.headers def test_prepared_request_is_pickleable(self, httpbin): - p = requests.Request('GET', httpbin('get')).prepare() + p = requests.Request("GET", httpbin("get")).prepare() # Verify PreparedRequest can be pickled and unpickled r = pickle.loads(pickle.dumps(p)) @@ -1417,8 +1484,8 @@ def test_prepared_request_is_pickleable(self, httpbin): assert resp.status_code == 200 def test_prepared_request_with_file_is_pickleable(self, httpbin): - files = {'file': open(__file__, 'rb')} - r = requests.Request('POST', httpbin('post'), files=files) + files = {"file": open(__file__, "rb")} + r = requests.Request("POST", httpbin("post"), files=files) p = r.prepare() # Verify PreparedRequest can be pickled and unpickled @@ -1433,7 +1500,7 @@ def test_prepared_request_with_file_is_pickleable(self, httpbin): assert resp.status_code == 200 def test_prepared_request_with_hook_is_pickleable(self, httpbin): - r = requests.Request('GET', httpbin('get'), hooks=default_hooks()) + r = requests.Request("GET", httpbin("get"), hooks=default_hooks()) p = r.prepare() # Verify PreparedRequest can be pickled @@ -1459,12 +1526,12 @@ def test_http_error(self): response = requests.Response() error = requests.exceptions.HTTPError(response=response) assert error.response == response - error = requests.exceptions.HTTPError('message', response=response) - assert str(error) == 'message' + error = requests.exceptions.HTTPError("message", response=response) + assert str(error) == "message" assert error.response == response def test_session_pickling(self, httpbin): - r = requests.Request('GET', httpbin('get')) + r = requests.Request("GET", httpbin("get")) s = requests.Session() s = pickle.loads(pickle.dumps(s)) @@ -1476,66 +1543,66 @@ def test_session_pickling(self, httpbin): def test_fixes_1329(self, httpbin): """Ensure that header updates are done case-insensitively.""" s = requests.Session() - s.headers.update({'ACCEPT': 'BOGUS'}) - s.headers.update({'accept': 'application/json'}) - r = s.get(httpbin('get')) + s.headers.update({"ACCEPT": "BOGUS"}) + s.headers.update({"accept": "application/json"}) + r = s.get(httpbin("get")) headers = r.request.headers - assert headers['accept'] == 'application/json' - assert headers['Accept'] == 'application/json' - assert headers['ACCEPT'] == 'application/json' + assert headers["accept"] == "application/json" + assert headers["Accept"] == "application/json" + assert headers["ACCEPT"] == "application/json" def test_uppercase_scheme_redirect(self, httpbin): - parts = urlparse(httpbin('html')) + parts = urlparse(httpbin("html")) url = "HTTP://" + parts.netloc + parts.path - r = requests.get(httpbin('redirect-to'), params={'url': url}) + r = requests.get(httpbin("redirect-to"), params={"url": url}) assert r.status_code == 200 assert r.url.lower() == url.lower() def test_transport_adapter_ordering(self): s = requests.Session() - order = ['https://', 'http://'] + order = ["https://", "http://"] assert order == list(s.adapters) - s.mount('http://git', HTTPAdapter()) - s.mount('http://github', HTTPAdapter()) - s.mount('http://github.com', HTTPAdapter()) - s.mount('http://github.com/about/', HTTPAdapter()) + s.mount("http://git", HTTPAdapter()) + s.mount("http://github", HTTPAdapter()) + s.mount("http://github.com", HTTPAdapter()) + s.mount("http://github.com/about/", HTTPAdapter()) order = [ - 'http://github.com/about/', - 'http://github.com', - 'http://github', - 'http://git', - 'https://', - 'http://', + "http://github.com/about/", + "http://github.com", + "http://github", + "http://git", + "https://", + "http://", ] assert order == list(s.adapters) - s.mount('http://gittip', HTTPAdapter()) - s.mount('http://gittip.com', HTTPAdapter()) - s.mount('http://gittip.com/about/', HTTPAdapter()) + s.mount("http://gittip", HTTPAdapter()) + s.mount("http://gittip.com", HTTPAdapter()) + s.mount("http://gittip.com/about/", HTTPAdapter()) order = [ - 'http://github.com/about/', - 'http://gittip.com/about/', - 'http://github.com', - 'http://gittip.com', - 'http://github', - 'http://gittip', - 'http://git', - 'https://', - 'http://', + "http://github.com/about/", + "http://gittip.com/about/", + "http://github.com", + "http://gittip.com", + "http://github", + "http://gittip", + "http://git", + "https://", + "http://", ] assert order == list(s.adapters) s2 = requests.Session() - s2.adapters = {'http://': HTTPAdapter()} - s2.mount('https://', HTTPAdapter()) - assert 'http://' in s2.adapters - assert 'https://' in s2.adapters + s2.adapters = {"http://": HTTPAdapter()} + s2.mount("https://", HTTPAdapter()) + assert "http://" in s2.adapters + assert "https://" in s2.adapters def test_session_get_adapter_prefix_matching(self): - prefix = 'https://example.com' - more_specific_prefix = prefix + '/some/path' + prefix = "https://example.com" + more_specific_prefix = prefix + "/some/path" - url_matching_only_prefix = prefix + '/another/path' - url_matching_more_specific_prefix = more_specific_prefix + '/longer/path' - url_not_matching_prefix = 'https://another.example.com/' + url_matching_only_prefix = prefix + "/another/path" + url_matching_more_specific_prefix = more_specific_prefix + "/longer/path" + url_not_matching_prefix = "https://another.example.com/" s = requests.Session() prefix_adapter = HTTPAdapter() @@ -1544,12 +1611,18 @@ def test_session_get_adapter_prefix_matching(self): s.mount(more_specific_prefix, more_specific_prefix_adapter) assert s.get_adapter(url_matching_only_prefix) is prefix_adapter - assert s.get_adapter(url_matching_more_specific_prefix) is more_specific_prefix_adapter - assert s.get_adapter(url_not_matching_prefix) not in (prefix_adapter, more_specific_prefix_adapter) + assert ( + s.get_adapter(url_matching_more_specific_prefix) + is more_specific_prefix_adapter + ) + assert s.get_adapter(url_not_matching_prefix) not in ( + prefix_adapter, + more_specific_prefix_adapter, + ) def test_session_get_adapter_prefix_matching_mixed_case(self): - mixed_case_prefix = 'hTtPs://eXamPle.CoM/MixEd_CAse_PREfix' - url_matching_prefix = mixed_case_prefix + '/full_url' + mixed_case_prefix = "hTtPs://eXamPle.CoM/MixEd_CAse_PREfix" + url_matching_prefix = mixed_case_prefix + "/full_url" s = requests.Session() my_adapter = HTTPAdapter() @@ -1558,8 +1631,10 @@ def test_session_get_adapter_prefix_matching_mixed_case(self): assert s.get_adapter(url_matching_prefix) is my_adapter def test_session_get_adapter_prefix_matching_is_case_insensitive(self): - mixed_case_prefix = 'hTtPs://eXamPle.CoM/MixEd_CAse_PREfix' - url_matching_prefix_with_different_case = 'HtTpS://exaMPLe.cOm/MiXeD_caSE_preFIX/another_url' + mixed_case_prefix = "hTtPs://eXamPle.CoM/MixEd_CAse_PREfix" + url_matching_prefix_with_different_case = ( + "HtTpS://exaMPLe.cOm/MiXeD_caSE_preFIX/another_url" + ) s = requests.Session() my_adapter = HTTPAdapter() @@ -1570,184 +1645,205 @@ def test_session_get_adapter_prefix_matching_is_case_insensitive(self): def test_header_remove_is_case_insensitive(self, httpbin): # From issue #1321 s = requests.Session() - s.headers['foo'] = 'bar' - r = s.get(httpbin('get'), headers={'FOO': None}) - assert 'foo' not in r.request.headers + s.headers["foo"] = "bar" + r = s.get(httpbin("get"), headers={"FOO": None}) + assert "foo" not in r.request.headers def test_params_are_merged_case_sensitive(self, httpbin): s = requests.Session() - s.params['foo'] = 'bar' - r = s.get(httpbin('get'), params={'FOO': 'bar'}) - assert r.json()['args'] == {'foo': 'bar', 'FOO': 'bar'} + s.params["foo"] = "bar" + r = s.get(httpbin("get"), params={"FOO": "bar"}) + assert r.json()["args"] == {"foo": "bar", "FOO": "bar"} def test_long_authinfo_in_url(self): - url = 'http://{}:{}@{}:9000/path?query#frag'.format( - 'E8A3BE87-9E3F-4620-8858-95478E385B5B', - 'EA770032-DA4D-4D84-8CE9-29C6D910BF1E', - 'exactly-------------sixty-----------three------------characters', + url = "http://{}:{}@{}:9000/path?query#frag".format( + "E8A3BE87-9E3F-4620-8858-95478E385B5B", + "EA770032-DA4D-4D84-8CE9-29C6D910BF1E", + "exactly-------------sixty-----------three------------characters", ) - r = requests.Request('GET', url).prepare() + r = requests.Request("GET", url).prepare() assert r.url == url def test_header_keys_are_native(self, httpbin): - headers = {'unicode': 'blah', 'byte'.encode('ascii'): 'blah'} - r = requests.Request('GET', httpbin('get'), headers=headers) + headers = {"unicode": "blah", b"byte": "blah"} + r = requests.Request("GET", httpbin("get"), headers=headers) p = r.prepare() # This is testing that they are builtin strings. A bit weird, but there # we go. - assert 'unicode' in p.headers.keys() - assert 'byte' in p.headers.keys() + assert "unicode" in p.headers.keys() + assert "byte" in p.headers.keys() def test_header_validation(self, httpbin): """Ensure prepare_headers regex isn't flagging valid header contents.""" - headers_ok = {'foo': 'bar baz qux', - 'bar': 'fbbq'.encode('utf8'), - 'baz': '', - 'qux': '1'} - r = requests.get(httpbin('get'), headers=headers_ok) - assert r.request.headers['foo'] == headers_ok['foo'] + headers_ok = { + "foo": "bar baz qux", + "bar": b"fbbq", + "baz": "", + "qux": "1", + } + r = requests.get(httpbin("get"), headers=headers_ok) + assert r.request.headers["foo"] == headers_ok["foo"] def test_header_value_not_str(self, httpbin): """Ensure the header value is of type string or bytes as per discussion in GH issue #3386 """ - headers_int = {'foo': 3} - headers_dict = {'bar': {'foo': 'bar'}} - headers_list = {'baz': ['foo', 'bar']} + headers_int = {"foo": 3} + headers_dict = {"bar": {"foo": "bar"}} + headers_list = {"baz": ["foo", "bar"]} # Test for int with pytest.raises(InvalidHeader) as excinfo: - r = requests.get(httpbin('get'), headers=headers_int) - assert 'foo' in str(excinfo.value) + requests.get(httpbin("get"), headers=headers_int) + assert "foo" in str(excinfo.value) # Test for dict with pytest.raises(InvalidHeader) as excinfo: - r = requests.get(httpbin('get'), headers=headers_dict) - assert 'bar' in str(excinfo.value) + requests.get(httpbin("get"), headers=headers_dict) + assert "bar" in str(excinfo.value) # Test for list with pytest.raises(InvalidHeader) as excinfo: - r = requests.get(httpbin('get'), headers=headers_list) - assert 'baz' in str(excinfo.value) + requests.get(httpbin("get"), headers=headers_list) + assert "baz" in str(excinfo.value) def test_header_no_return_chars(self, httpbin): """Ensure that a header containing return character sequences raise an exception. Otherwise, multiple headers are created from single string. """ - headers_ret = {'foo': 'bar\r\nbaz: qux'} - headers_lf = {'foo': 'bar\nbaz: qux'} - headers_cr = {'foo': 'bar\rbaz: qux'} + headers_ret = {"foo": "bar\r\nbaz: qux"} + headers_lf = {"foo": "bar\nbaz: qux"} + headers_cr = {"foo": "bar\rbaz: qux"} # Test for newline with pytest.raises(InvalidHeader): - r = requests.get(httpbin('get'), headers=headers_ret) + requests.get(httpbin("get"), headers=headers_ret) # Test for line feed with pytest.raises(InvalidHeader): - r = requests.get(httpbin('get'), headers=headers_lf) + requests.get(httpbin("get"), headers=headers_lf) # Test for carriage return with pytest.raises(InvalidHeader): - r = requests.get(httpbin('get'), headers=headers_cr) + requests.get(httpbin("get"), headers=headers_cr) def test_header_no_leading_space(self, httpbin): """Ensure headers containing leading whitespace raise InvalidHeader Error before sending. """ - headers_space = {'foo': ' bar'} - headers_tab = {'foo': ' bar'} + headers_space = {"foo": " bar"} + headers_tab = {"foo": " bar"} # Test for whitespace with pytest.raises(InvalidHeader): - r = requests.get(httpbin('get'), headers=headers_space) + requests.get(httpbin("get"), headers=headers_space) + # Test for tab with pytest.raises(InvalidHeader): - r = requests.get(httpbin('get'), headers=headers_tab) + requests.get(httpbin("get"), headers=headers_tab) - @pytest.mark.parametrize('files', ('foo', b'foo', bytearray(b'foo'))) + @pytest.mark.parametrize("files", ("foo", b"foo", bytearray(b"foo"))) def test_can_send_objects_with_files(self, httpbin, files): - data = {'a': 'this is a string'} - files = {'b': files} - r = requests.Request('POST', httpbin('post'), data=data, files=files) + data = {"a": "this is a string"} + files = {"b": files} + r = requests.Request("POST", httpbin("post"), data=data, files=files) p = r.prepare() - assert 'multipart/form-data' in p.headers['Content-Type'] + assert "multipart/form-data" in p.headers["Content-Type"] def test_can_send_file_object_with_non_string_filename(self, httpbin): f = io.BytesIO() f.name = 2 - r = requests.Request('POST', httpbin('post'), files={'f': f}) + r = requests.Request("POST", httpbin("post"), files={"f": f}) p = r.prepare() - assert 'multipart/form-data' in p.headers['Content-Type'] + assert "multipart/form-data" in p.headers["Content-Type"] def test_autoset_header_values_are_native(self, httpbin): - data = 'this is a string' - length = '16' - req = requests.Request('POST', httpbin('post'), data=data) + data = "this is a string" + length = "16" + req = requests.Request("POST", httpbin("post"), data=data) p = req.prepare() - assert p.headers['Content-Length'] == length + assert p.headers["Content-Length"] == length def test_nonhttp_schemes_dont_check_URLs(self): test_urls = ( - 'data:image/gif;base64,R0lGODlhAQABAHAAACH5BAUAAAAALAAAAAABAAEAAAICRAEAOw==', - 'file:///etc/passwd', - 'magnet:?xt=urn:btih:be08f00302bc2d1d3cfa3af02024fa647a271431', + "data:image/gif;base64,R0lGODlhAQABAHAAACH5BAUAAAAALAAAAAABAAEAAAICRAEAOw==", + "file:///etc/passwd", + "magnet:?xt=urn:btih:be08f00302bc2d1d3cfa3af02024fa647a271431", ) for test_url in test_urls: - req = requests.Request('GET', test_url) + req = requests.Request("GET", test_url) preq = req.prepare() assert test_url == preq.url - def test_auth_is_stripped_on_http_downgrade(self, httpbin, httpbin_secure, httpbin_ca_bundle): + def test_auth_is_stripped_on_http_downgrade( + self, httpbin, httpbin_secure, httpbin_ca_bundle + ): r = requests.get( - httpbin_secure('redirect-to'), - params={'url': httpbin('get')}, - auth=('user', 'pass'), - verify=httpbin_ca_bundle + httpbin_secure("redirect-to"), + params={"url": httpbin("get")}, + auth=("user", "pass"), + verify=httpbin_ca_bundle, ) - assert r.history[0].request.headers['Authorization'] - assert 'Authorization' not in r.request.headers + assert r.history[0].request.headers["Authorization"] + assert "Authorization" not in r.request.headers def test_auth_is_retained_for_redirect_on_host(self, httpbin): - r = requests.get(httpbin('redirect/1'), auth=('user', 'pass')) - h1 = r.history[0].request.headers['Authorization'] - h2 = r.request.headers['Authorization'] + r = requests.get(httpbin("redirect/1"), auth=("user", "pass")) + h1 = r.history[0].request.headers["Authorization"] + h2 = r.request.headers["Authorization"] assert h1 == h2 def test_should_strip_auth_host_change(self): s = requests.Session() - assert s.should_strip_auth('http://example.com/foo', 'http://another.example.com/') + assert s.should_strip_auth( + "http://example.com/foo", "http://another.example.com/" + ) def test_should_strip_auth_http_downgrade(self): s = requests.Session() - assert s.should_strip_auth('https://example.com/foo', 'http://example.com/bar') + assert s.should_strip_auth("https://example.com/foo", "http://example.com/bar") def test_should_strip_auth_https_upgrade(self): s = requests.Session() - assert not s.should_strip_auth('http://example.com/foo', 'https://example.com/bar') - assert not s.should_strip_auth('http://example.com:80/foo', 'https://example.com/bar') - assert not s.should_strip_auth('http://example.com/foo', 'https://example.com:443/bar') + assert not s.should_strip_auth( + "http://example.com/foo", "https://example.com/bar" + ) + assert not s.should_strip_auth( + "http://example.com:80/foo", "https://example.com/bar" + ) + assert not s.should_strip_auth( + "http://example.com/foo", "https://example.com:443/bar" + ) # Non-standard ports should trigger stripping - assert s.should_strip_auth('http://example.com:8080/foo', 'https://example.com/bar') - assert s.should_strip_auth('http://example.com/foo', 'https://example.com:8443/bar') + assert s.should_strip_auth( + "http://example.com:8080/foo", "https://example.com/bar" + ) + assert s.should_strip_auth( + "http://example.com/foo", "https://example.com:8443/bar" + ) def test_should_strip_auth_port_change(self): s = requests.Session() - assert s.should_strip_auth('http://example.com:1234/foo', 'https://example.com:4321/bar') + assert s.should_strip_auth( + "http://example.com:1234/foo", "https://example.com:4321/bar" + ) @pytest.mark.parametrize( - 'old_uri, new_uri', ( - ('https://example.com:443/foo', 'https://example.com/bar'), - ('http://example.com:80/foo', 'http://example.com/bar'), - ('https://example.com/foo', 'https://example.com:443/bar'), - ('http://example.com/foo', 'http://example.com:80/bar') - )) + "old_uri, new_uri", + ( + ("https://example.com:443/foo", "https://example.com/bar"), + ("http://example.com:80/foo", "http://example.com/bar"), + ("https://example.com/foo", "https://example.com:443/bar"), + ("http://example.com/foo", "http://example.com:80/bar"), + ), + ) def test_should_strip_auth_default_port(self, old_uri, new_uri): s = requests.Session() assert not s.should_strip_auth(old_uri, new_uri) def test_manual_redirect_with_partial_body_read(self, httpbin): s = requests.Session() - r1 = s.get(httpbin('redirect/2'), allow_redirects=False, stream=True) + r1 = s.get(httpbin("redirect/2"), allow_redirects=False, stream=True) assert r1.is_redirect rg = s.resolve_redirects(r1, r1.request, stream=True) @@ -1765,36 +1861,36 @@ def test_manual_redirect_with_partial_body_read(self, httpbin): assert not r3.is_redirect def test_prepare_body_position_non_stream(self): - data = b'the data' - prep = requests.Request('GET', 'http://example.com', data=data).prepare() + data = b"the data" + prep = requests.Request("GET", "http://example.com", data=data).prepare() assert prep._body_position is None def test_rewind_body(self): - data = io.BytesIO(b'the data') - prep = requests.Request('GET', 'http://example.com', data=data).prepare() + data = io.BytesIO(b"the data") + prep = requests.Request("GET", "http://example.com", data=data).prepare() assert prep._body_position == 0 - assert prep.body.read() == b'the data' + assert prep.body.read() == b"the data" # the data has all been read - assert prep.body.read() == b'' + assert prep.body.read() == b"" # rewind it back requests.utils.rewind_body(prep) - assert prep.body.read() == b'the data' + assert prep.body.read() == b"the data" def test_rewind_partially_read_body(self): - data = io.BytesIO(b'the data') + data = io.BytesIO(b"the data") data.read(4) # read some data - prep = requests.Request('GET', 'http://example.com', data=data).prepare() + prep = requests.Request("GET", "http://example.com", data=data).prepare() assert prep._body_position == 4 - assert prep.body.read() == b'data' + assert prep.body.read() == b"data" # the data has all been read - assert prep.body.read() == b'' + assert prep.body.read() == b"" # rewind it back requests.utils.rewind_body(prep) - assert prep.body.read() == b'data' + assert prep.body.read() == b"data" def test_rewind_body_no_seek(self): class BadFileObj: @@ -1807,14 +1903,14 @@ def tell(self): def __iter__(self): return - data = BadFileObj('the data') - prep = requests.Request('GET', 'http://example.com', data=data).prepare() + data = BadFileObj("the data") + prep = requests.Request("GET", "http://example.com", data=data).prepare() assert prep._body_position == 0 with pytest.raises(UnrewindableBodyError) as e: requests.utils.rewind_body(prep) - assert 'Unable to rewind request body' in str(e) + assert "Unable to rewind request body" in str(e) def test_rewind_body_failed_seek(self): class BadFileObj: @@ -1830,14 +1926,14 @@ def seek(self, pos, whence=0): def __iter__(self): return - data = BadFileObj('the data') - prep = requests.Request('GET', 'http://example.com', data=data).prepare() + data = BadFileObj("the data") + prep = requests.Request("GET", "http://example.com", data=data).prepare() assert prep._body_position == 0 with pytest.raises(UnrewindableBodyError) as e: requests.utils.rewind_body(prep) - assert 'error occurred when rewinding request body' in str(e) + assert "error occurred when rewinding request body" in str(e) def test_rewind_body_failed_tell(self): class BadFileObj: @@ -1850,14 +1946,14 @@ def tell(self): def __iter__(self): return - data = BadFileObj('the data') - prep = requests.Request('GET', 'http://example.com', data=data).prepare() + data = BadFileObj("the data") + prep = requests.Request("GET", "http://example.com", data=data).prepare() assert prep._body_position is not None with pytest.raises(UnrewindableBodyError) as e: requests.utils.rewind_body(prep) - assert 'Unable to rewind request body' in str(e) + assert "Unable to rewind request body" in str(e) def _patch_adapter_gzipped_redirect(self, session, url): adapter = session.get_adapter(url=url) @@ -1867,7 +1963,7 @@ def _patch_adapter_gzipped_redirect(self, session, url): def build_response(*args, **kwargs): resp = org_build_response(*args, **kwargs) if not self._patched_response: - resp.raw.headers['content-encoding'] = 'gzip' + resp.raw.headers["content-encoding"] = "gzip" self._patched_response = True return resp @@ -1875,22 +1971,28 @@ def build_response(*args, **kwargs): def test_redirect_with_wrong_gzipped_header(self, httpbin): s = requests.Session() - url = httpbin('redirect/1') + url = httpbin("redirect/1") self._patch_adapter_gzipped_redirect(s, url) s.get(url) @pytest.mark.parametrize( - 'username, password, auth_str', ( - ('test', 'test', 'Basic dGVzdDp0ZXN0'), - ('имя'.encode('utf-8'), 'пароль'.encode('utf-8'), 'Basic 0LjQvNGPOtC/0LDRgNC+0LvRjA=='), - )) + "username, password, auth_str", + ( + ("test", "test", "Basic dGVzdDp0ZXN0"), + ( + "имя".encode(), + "пароль".encode(), + "Basic 0LjQvNGPOtC/0LDRgNC+0LvRjA==", + ), + ), + ) def test_basic_auth_str_is_always_native(self, username, password, auth_str): s = _basic_auth_str(username, password) assert isinstance(s, builtin_str) assert s == auth_str def test_requests_history_is_saved(self, httpbin): - r = requests.get(httpbin('redirect/5')) + r = requests.get(httpbin("redirect/5")) total = r.history[-1].history i = 0 for item in r.history: @@ -1898,23 +2000,23 @@ def test_requests_history_is_saved(self, httpbin): i += 1 def test_json_param_post_content_type_works(self, httpbin): - r = requests.post( - httpbin('post'), - json={'life': 42} - ) + r = requests.post(httpbin("post"), json={"life": 42}) assert r.status_code == 200 - assert 'application/json' in r.request.headers['Content-Type'] - assert {'life': 42} == r.json()['json'] + assert "application/json" in r.request.headers["Content-Type"] + assert {"life": 42} == r.json()["json"] def test_json_param_post_should_not_override_data_param(self, httpbin): - r = requests.Request(method='POST', url=httpbin('post'), - data={'stuff': 'elixr'}, - json={'music': 'flute'}) + r = requests.Request( + method="POST", + url=httpbin("post"), + data={"stuff": "elixr"}, + json={"music": "flute"}, + ) prep = r.prepare() - assert 'stuff=elixr' == prep.body + assert "stuff=elixr" == prep.body def test_response_iter_lines(self, httpbin): - r = requests.get(httpbin('stream/4'), stream=True) + r = requests.get(httpbin("stream/4"), stream=True) assert r.status_code == 200 it = r.iter_lines() @@ -1922,7 +2024,7 @@ def test_response_iter_lines(self, httpbin): assert len(list(it)) == 3 def test_response_context_manager(self, httpbin): - with requests.get(httpbin('stream/4'), stream=True) as response: + with requests.get(httpbin("stream/4"), stream=True) as response: assert isinstance(response, requests.Response) assert response.raw.closed @@ -1930,7 +2032,7 @@ def test_response_context_manager(self, httpbin): def test_unconsumed_session_response_closes_connection(self, httpbin): s = requests.session() - with contextlib.closing(s.get(httpbin('stream/4'), stream=True)) as response: + with contextlib.closing(s.get(httpbin("stream/4"), stream=True)) as response: pass assert response._content_consumed is False @@ -1939,7 +2041,7 @@ def test_unconsumed_session_response_closes_connection(self, httpbin): @pytest.mark.xfail def test_response_iter_lines_reentrant(self, httpbin): """Response.iter_lines() is not reentrant safe""" - r = requests.get(httpbin('stream/4'), stream=True) + r = requests.get(httpbin("stream/4"), stream=True) assert r.status_code == 200 next(r.iter_lines()) @@ -1947,27 +2049,27 @@ def test_response_iter_lines_reentrant(self, httpbin): def test_session_close_proxy_clear(self, mocker): proxies = { - 'one': mocker.Mock(), - 'two': mocker.Mock(), + "one": mocker.Mock(), + "two": mocker.Mock(), } session = requests.Session() - mocker.patch.dict(session.adapters['http://'].proxy_manager, proxies) + mocker.patch.dict(session.adapters["http://"].proxy_manager, proxies) session.close() - proxies['one'].clear.assert_called_once_with() - proxies['two'].clear.assert_called_once_with() + proxies["one"].clear.assert_called_once_with() + proxies["two"].clear.assert_called_once_with() def test_proxy_auth(self): adapter = HTTPAdapter() headers = adapter.proxy_headers("http://user:pass@httpbin.org") - assert headers == {'Proxy-Authorization': 'Basic dXNlcjpwYXNz'} + assert headers == {"Proxy-Authorization": "Basic dXNlcjpwYXNz"} def test_proxy_auth_empty_pass(self): adapter = HTTPAdapter() headers = adapter.proxy_headers("http://user:@httpbin.org") - assert headers == {'Proxy-Authorization': 'Basic dXNlcjo='} + assert headers == {"Proxy-Authorization": "Basic dXNlcjo="} def test_response_json_when_content_is_None(self, httpbin): - r = requests.get(httpbin('/status/204')) + r = requests.get(httpbin("/status/204")) # Make sure r.content is None r.status_code = 0 r._content = False @@ -1982,7 +2084,7 @@ def test_response_without_release_conn(self): Should work when `release_conn` attr doesn't exist on `response.raw`. """ resp = requests.Response() - resp.raw = StringIO.StringIO('test') + resp.raw = StringIO.StringIO("test") assert not resp.raw.closed resp.close() assert resp.raw.closed @@ -1991,36 +2093,36 @@ def test_empty_stream_with_auth_does_not_set_content_length_header(self, httpbin """Ensure that a byte stream with size 0 will not set both a Content-Length and Transfer-Encoding header. """ - auth = ('user', 'pass') - url = httpbin('post') - file_obj = io.BytesIO(b'') - r = requests.Request('POST', url, auth=auth, data=file_obj) + auth = ("user", "pass") + url = httpbin("post") + file_obj = io.BytesIO(b"") + r = requests.Request("POST", url, auth=auth, data=file_obj) prepared_request = r.prepare() - assert 'Transfer-Encoding' in prepared_request.headers - assert 'Content-Length' not in prepared_request.headers + assert "Transfer-Encoding" in prepared_request.headers + assert "Content-Length" not in prepared_request.headers def test_stream_with_auth_does_not_set_transfer_encoding_header(self, httpbin): """Ensure that a byte stream with size > 0 will not set both a Content-Length and Transfer-Encoding header. """ - auth = ('user', 'pass') - url = httpbin('post') - file_obj = io.BytesIO(b'test data') - r = requests.Request('POST', url, auth=auth, data=file_obj) + auth = ("user", "pass") + url = httpbin("post") + file_obj = io.BytesIO(b"test data") + r = requests.Request("POST", url, auth=auth, data=file_obj) prepared_request = r.prepare() - assert 'Transfer-Encoding' not in prepared_request.headers - assert 'Content-Length' in prepared_request.headers + assert "Transfer-Encoding" not in prepared_request.headers + assert "Content-Length" in prepared_request.headers def test_chunked_upload_does_not_set_content_length_header(self, httpbin): """Ensure that requests with a generator body stream using Transfer-Encoding: chunked, not a Content-Length header. """ - data = (i for i in [b'a', b'b', b'c']) - url = httpbin('post') - r = requests.Request('POST', url, data=data) + data = (i for i in [b"a", b"b", b"c"]) + url = httpbin("post") + r = requests.Request("POST", url, data=data) prepared_request = r.prepare() - assert 'Transfer-Encoding' in prepared_request.headers - assert 'Content-Length' not in prepared_request.headers + assert "Transfer-Encoding" in prepared_request.headers + assert "Content-Length" not in prepared_request.headers def test_custom_redirect_mixin(self, httpbin): """Tests a custom mixin to overwrite ``get_redirect_target``. @@ -2034,23 +2136,24 @@ def test_custom_redirect_mixin(self, httpbin): location = alternate url 3. the custom session catches the edge case and follows the redirect """ - url_final = httpbin('html') - querystring_malformed = urlencode({'location': url_final}) - url_redirect_malformed = httpbin('response-headers?%s' % querystring_malformed) - querystring_redirect = urlencode({'url': url_redirect_malformed}) - url_redirect = httpbin('redirect-to?%s' % querystring_redirect) - urls_test = [url_redirect, - url_redirect_malformed, - url_final, - ] + url_final = httpbin("html") + querystring_malformed = urlencode({"location": url_final}) + url_redirect_malformed = httpbin("response-headers?%s" % querystring_malformed) + querystring_redirect = urlencode({"url": url_redirect_malformed}) + url_redirect = httpbin("redirect-to?%s" % querystring_redirect) + urls_test = [ + url_redirect, + url_redirect_malformed, + url_final, + ] class CustomRedirectSession(requests.Session): def get_redirect_target(self, resp): # default behavior if resp.is_redirect: - return resp.headers['location'] + return resp.headers["location"] # edge case - check to see if 'location' is in headers anyways - location = resp.headers.get('location') + location = resp.headers.get("location") if location and (location != resp.url): return location return None @@ -2067,144 +2170,153 @@ def get_redirect_target(self, resp): class TestCaseInsensitiveDict: - @pytest.mark.parametrize( - 'cid', ( - CaseInsensitiveDict({'Foo': 'foo', 'BAr': 'bar'}), - CaseInsensitiveDict([('Foo', 'foo'), ('BAr', 'bar')]), - CaseInsensitiveDict(FOO='foo', BAr='bar'), - )) + "cid", + ( + CaseInsensitiveDict({"Foo": "foo", "BAr": "bar"}), + CaseInsensitiveDict([("Foo", "foo"), ("BAr", "bar")]), + CaseInsensitiveDict(FOO="foo", BAr="bar"), + ), + ) def test_init(self, cid): assert len(cid) == 2 - assert 'foo' in cid - assert 'bar' in cid + assert "foo" in cid + assert "bar" in cid def test_docstring_example(self): cid = CaseInsensitiveDict() - cid['Accept'] = 'application/json' - assert cid['aCCEPT'] == 'application/json' - assert list(cid) == ['Accept'] + cid["Accept"] = "application/json" + assert cid["aCCEPT"] == "application/json" + assert list(cid) == ["Accept"] def test_len(self): - cid = CaseInsensitiveDict({'a': 'a', 'b': 'b'}) - cid['A'] = 'a' + cid = CaseInsensitiveDict({"a": "a", "b": "b"}) + cid["A"] = "a" assert len(cid) == 2 def test_getitem(self): - cid = CaseInsensitiveDict({'Spam': 'blueval'}) - assert cid['spam'] == 'blueval' - assert cid['SPAM'] == 'blueval' + cid = CaseInsensitiveDict({"Spam": "blueval"}) + assert cid["spam"] == "blueval" + assert cid["SPAM"] == "blueval" def test_fixes_649(self): """__setitem__ should behave case-insensitively.""" cid = CaseInsensitiveDict() - cid['spam'] = 'oneval' - cid['Spam'] = 'twoval' - cid['sPAM'] = 'redval' - cid['SPAM'] = 'blueval' - assert cid['spam'] == 'blueval' - assert cid['SPAM'] == 'blueval' - assert list(cid.keys()) == ['SPAM'] + cid["spam"] = "oneval" + cid["Spam"] = "twoval" + cid["sPAM"] = "redval" + cid["SPAM"] = "blueval" + assert cid["spam"] == "blueval" + assert cid["SPAM"] == "blueval" + assert list(cid.keys()) == ["SPAM"] def test_delitem(self): cid = CaseInsensitiveDict() - cid['Spam'] = 'someval' - del cid['sPam'] - assert 'spam' not in cid + cid["Spam"] = "someval" + del cid["sPam"] + assert "spam" not in cid assert len(cid) == 0 def test_contains(self): cid = CaseInsensitiveDict() - cid['Spam'] = 'someval' - assert 'Spam' in cid - assert 'spam' in cid - assert 'SPAM' in cid - assert 'sPam' in cid - assert 'notspam' not in cid + cid["Spam"] = "someval" + assert "Spam" in cid + assert "spam" in cid + assert "SPAM" in cid + assert "sPam" in cid + assert "notspam" not in cid def test_get(self): cid = CaseInsensitiveDict() - cid['spam'] = 'oneval' - cid['SPAM'] = 'blueval' - assert cid.get('spam') == 'blueval' - assert cid.get('SPAM') == 'blueval' - assert cid.get('sPam') == 'blueval' - assert cid.get('notspam', 'default') == 'default' + cid["spam"] = "oneval" + cid["SPAM"] = "blueval" + assert cid.get("spam") == "blueval" + assert cid.get("SPAM") == "blueval" + assert cid.get("sPam") == "blueval" + assert cid.get("notspam", "default") == "default" def test_update(self): cid = CaseInsensitiveDict() - cid['spam'] = 'blueval' - cid.update({'sPam': 'notblueval'}) - assert cid['spam'] == 'notblueval' - cid = CaseInsensitiveDict({'Foo': 'foo', 'BAr': 'bar'}) - cid.update({'fOO': 'anotherfoo', 'bAR': 'anotherbar'}) + cid["spam"] = "blueval" + cid.update({"sPam": "notblueval"}) + assert cid["spam"] == "notblueval" + cid = CaseInsensitiveDict({"Foo": "foo", "BAr": "bar"}) + cid.update({"fOO": "anotherfoo", "bAR": "anotherbar"}) assert len(cid) == 2 - assert cid['foo'] == 'anotherfoo' - assert cid['bar'] == 'anotherbar' + assert cid["foo"] == "anotherfoo" + assert cid["bar"] == "anotherbar" def test_update_retains_unchanged(self): - cid = CaseInsensitiveDict({'foo': 'foo', 'bar': 'bar'}) - cid.update({'foo': 'newfoo'}) - assert cid['bar'] == 'bar' + cid = CaseInsensitiveDict({"foo": "foo", "bar": "bar"}) + cid.update({"foo": "newfoo"}) + assert cid["bar"] == "bar" def test_iter(self): - cid = CaseInsensitiveDict({'Spam': 'spam', 'Eggs': 'eggs'}) - keys = frozenset(['Spam', 'Eggs']) + cid = CaseInsensitiveDict({"Spam": "spam", "Eggs": "eggs"}) + keys = frozenset(["Spam", "Eggs"]) assert frozenset(iter(cid)) == keys def test_equality(self): - cid = CaseInsensitiveDict({'SPAM': 'blueval', 'Eggs': 'redval'}) - othercid = CaseInsensitiveDict({'spam': 'blueval', 'eggs': 'redval'}) + cid = CaseInsensitiveDict({"SPAM": "blueval", "Eggs": "redval"}) + othercid = CaseInsensitiveDict({"spam": "blueval", "eggs": "redval"}) assert cid == othercid - del othercid['spam'] + del othercid["spam"] assert cid != othercid - assert cid == {'spam': 'blueval', 'eggs': 'redval'} + assert cid == {"spam": "blueval", "eggs": "redval"} assert cid != object() def test_setdefault(self): - cid = CaseInsensitiveDict({'Spam': 'blueval'}) - assert cid.setdefault('spam', 'notblueval') == 'blueval' - assert cid.setdefault('notspam', 'notblueval') == 'notblueval' + cid = CaseInsensitiveDict({"Spam": "blueval"}) + assert cid.setdefault("spam", "notblueval") == "blueval" + assert cid.setdefault("notspam", "notblueval") == "notblueval" def test_lower_items(self): - cid = CaseInsensitiveDict({ - 'Accept': 'application/json', - 'user-Agent': 'requests', - }) + cid = CaseInsensitiveDict( + { + "Accept": "application/json", + "user-Agent": "requests", + } + ) keyset = frozenset(lowerkey for lowerkey, v in cid.lower_items()) - lowerkeyset = frozenset(['accept', 'user-agent']) + lowerkeyset = frozenset(["accept", "user-agent"]) assert keyset == lowerkeyset def test_preserve_key_case(self): - cid = CaseInsensitiveDict({ - 'Accept': 'application/json', - 'user-Agent': 'requests', - }) - keyset = frozenset(['Accept', 'user-Agent']) + cid = CaseInsensitiveDict( + { + "Accept": "application/json", + "user-Agent": "requests", + } + ) + keyset = frozenset(["Accept", "user-Agent"]) assert frozenset(i[0] for i in cid.items()) == keyset assert frozenset(cid.keys()) == keyset assert frozenset(cid) == keyset def test_preserve_last_key_case(self): - cid = CaseInsensitiveDict({ - 'Accept': 'application/json', - 'user-Agent': 'requests', - }) - cid.update({'ACCEPT': 'application/json'}) - cid['USER-AGENT'] = 'requests' - keyset = frozenset(['ACCEPT', 'USER-AGENT']) + cid = CaseInsensitiveDict( + { + "Accept": "application/json", + "user-Agent": "requests", + } + ) + cid.update({"ACCEPT": "application/json"}) + cid["USER-AGENT"] = "requests" + keyset = frozenset(["ACCEPT", "USER-AGENT"]) assert frozenset(i[0] for i in cid.items()) == keyset assert frozenset(cid.keys()) == keyset assert frozenset(cid) == keyset def test_copy(self): - cid = CaseInsensitiveDict({ - 'Accept': 'application/json', - 'user-Agent': 'requests', - }) + cid = CaseInsensitiveDict( + { + "Accept": "application/json", + "user-Agent": "requests", + } + ) cid_copy = cid.copy() assert cid == cid_copy - cid['changed'] = True + cid["changed"] = True assert cid != cid_copy @@ -2215,19 +2327,21 @@ def test_expires_valid_str(self): """Test case where we convert expires from string time.""" morsel = Morsel() - morsel['expires'] = 'Thu, 01-Jan-1970 00:00:01 GMT' + morsel["expires"] = "Thu, 01-Jan-1970 00:00:01 GMT" cookie = morsel_to_cookie(morsel) assert cookie.expires == 1 @pytest.mark.parametrize( - 'value, exception', ( + "value, exception", + ( (100, TypeError), - ('woops', ValueError), - )) + ("woops", ValueError), + ), + ) def test_expires_invalid_int(self, value, exception): """Test case where an invalid type is passed for expires.""" morsel = Morsel() - morsel['expires'] = value + morsel["expires"] = value with pytest.raises(exception): morsel_to_cookie(morsel) @@ -2235,7 +2349,7 @@ def test_expires_none(self): """Test case where expires is None.""" morsel = Morsel() - morsel['expires'] = None + morsel["expires"] = None cookie = morsel_to_cookie(morsel) assert cookie.expires is None @@ -2248,7 +2362,7 @@ def test_max_age_valid_int(self): """Test case where a valid max age in seconds is passed.""" morsel = Morsel() - morsel['max-age'] = 60 + morsel["max-age"] = 60 cookie = morsel_to_cookie(morsel) assert isinstance(cookie.expires, int) @@ -2256,34 +2370,31 @@ def test_max_age_invalid_str(self): """Test case where a invalid max age is passed.""" morsel = Morsel() - morsel['max-age'] = 'woops' + morsel["max-age"] = "woops" with pytest.raises(TypeError): morsel_to_cookie(morsel) class TestTimeout: - def test_stream_timeout(self, httpbin): try: - requests.get(httpbin('delay/10'), timeout=2.0) + requests.get(httpbin("delay/10"), timeout=2.0) except requests.exceptions.Timeout as e: - assert 'Read timed out' in e.args[0].args[0] + assert "Read timed out" in e.args[0].args[0] @pytest.mark.parametrize( - 'timeout, error_text', ( - ((3, 4, 5), '(connect, read)'), - ('foo', 'must be an int, float or None'), - )) + "timeout, error_text", + ( + ((3, 4, 5), "(connect, read)"), + ("foo", "must be an int, float or None"), + ), + ) def test_invalid_timeout(self, httpbin, timeout, error_text): with pytest.raises(ValueError) as e: - requests.get(httpbin('get'), timeout=timeout) + requests.get(httpbin("get"), timeout=timeout) assert error_text in str(e) - @pytest.mark.parametrize( - 'timeout', ( - None, - Urllib3Timeout(connect=None, read=None) - )) + @pytest.mark.parametrize("timeout", (None, Urllib3Timeout(connect=None, read=None))) def test_none_timeout(self, httpbin, timeout): """Check that you can set None as a valid timeout value. @@ -2293,53 +2404,47 @@ def test_none_timeout(self, httpbin, timeout): Instead we verify that setting the timeout to None does not prevent the request from succeeding. """ - r = requests.get(httpbin('get'), timeout=timeout) + r = requests.get(httpbin("get"), timeout=timeout) assert r.status_code == 200 @pytest.mark.parametrize( - 'timeout', ( - (None, 0.1), - Urllib3Timeout(connect=None, read=0.1) - )) + "timeout", ((None, 0.1), Urllib3Timeout(connect=None, read=0.1)) + ) def test_read_timeout(self, httpbin, timeout): try: - requests.get(httpbin('delay/10'), timeout=timeout) - pytest.fail('The recv() request should time out.') + requests.get(httpbin("delay/10"), timeout=timeout) + pytest.fail("The recv() request should time out.") except ReadTimeout: pass @pytest.mark.parametrize( - 'timeout', ( - (0.1, None), - Urllib3Timeout(connect=0.1, read=None) - )) + "timeout", ((0.1, None), Urllib3Timeout(connect=0.1, read=None)) + ) def test_connect_timeout(self, timeout): try: requests.get(TARPIT, timeout=timeout) - pytest.fail('The connect() request should time out.') + pytest.fail("The connect() request should time out.") except ConnectTimeout as e: assert isinstance(e, ConnectionError) assert isinstance(e, Timeout) @pytest.mark.parametrize( - 'timeout', ( - (0.1, 0.1), - Urllib3Timeout(connect=0.1, read=0.1) - )) + "timeout", ((0.1, 0.1), Urllib3Timeout(connect=0.1, read=0.1)) + ) def test_total_timeout_connect(self, timeout): try: requests.get(TARPIT, timeout=timeout) - pytest.fail('The connect() request should time out.') + pytest.fail("The connect() request should time out.") except ConnectTimeout: pass def test_encoded_methods(self, httpbin): """See: https://github.com/psf/requests/issues/2316""" - r = requests.request(b'GET', httpbin('get')) + r = requests.request(b"GET", httpbin("get")) assert r.ok -SendCall = collections.namedtuple('SendCall', ('args', 'kwargs')) +SendCall = collections.namedtuple("SendCall", ("args", "kwargs")) class RedirectSession(SessionRedirectMixin): @@ -2363,14 +2468,14 @@ def build_response(self): except IndexError: r.status_code = 200 - r.headers = CaseInsensitiveDict({'Location': '/'}) + r.headers = CaseInsensitiveDict({"Location": "/"}) r.raw = self._build_raw() r.request = request return r def _build_raw(self): - string = StringIO.StringIO('') - setattr(string, 'release_conn', lambda *args: args) + string = StringIO.StringIO("") + setattr(string, "release_conn", lambda *args: args) return string @@ -2378,49 +2483,46 @@ def test_json_encodes_as_bytes(): # urllib3 expects bodies as bytes-like objects body = {"key": "value"} p = PreparedRequest() - p.prepare( - method='GET', - url='https://www.example.com/', - json=body - ) + p.prepare(method="GET", url="https://www.example.com/", json=body) assert isinstance(p.body, bytes) def test_requests_are_updated_each_time(httpbin): session = RedirectSession([303, 307]) - prep = requests.Request('POST', httpbin('post')).prepare() + prep = requests.Request("POST", httpbin("post")).prepare() r0 = session.send(prep) - assert r0.request.method == 'POST' + assert r0.request.method == "POST" assert session.calls[-1] == SendCall((r0.request,), {}) redirect_generator = session.resolve_redirects(r0, prep) default_keyword_args = { - 'stream': False, - 'verify': True, - 'cert': None, - 'timeout': None, - 'allow_redirects': False, - 'proxies': {}, + "stream": False, + "verify": True, + "cert": None, + "timeout": None, + "allow_redirects": False, + "proxies": {}, } for response in redirect_generator: - assert response.request.method == 'GET' + assert response.request.method == "GET" send_call = SendCall((response.request,), default_keyword_args) assert session.calls[-1] == send_call -@pytest.mark.parametrize("var,url,proxy", [ - ('http_proxy', 'http://example.com', 'socks5://proxy.com:9876'), - ('https_proxy', 'https://example.com', 'socks5://proxy.com:9876'), - ('all_proxy', 'http://example.com', 'socks5://proxy.com:9876'), - ('all_proxy', 'https://example.com', 'socks5://proxy.com:9876'), -]) +@pytest.mark.parametrize( + "var,url,proxy", + [ + ("http_proxy", "http://example.com", "socks5://proxy.com:9876"), + ("https_proxy", "https://example.com", "socks5://proxy.com:9876"), + ("all_proxy", "http://example.com", "socks5://proxy.com:9876"), + ("all_proxy", "https://example.com", "socks5://proxy.com:9876"), + ], +) def test_proxy_env_vars_override_default(var, url, proxy): session = requests.Session() prep = PreparedRequest() - prep.prepare(method='GET', url=url) + prep.prepare(method="GET", url=url) - kwargs = { - var: proxy - } + kwargs = {var: proxy} scheme = urlparse(url).scheme with override_environ(**kwargs): proxies = session.rebuild_proxies(prep, {}) @@ -2429,179 +2531,161 @@ def test_proxy_env_vars_override_default(var, url, proxy): @pytest.mark.parametrize( - 'data', ( - (('a', 'b'), ('c', 'd')), - (('c', 'd'), ('a', 'b')), - (('a', 'b'), ('c', 'd'), ('e', 'f')), - )) + "data", + ( + (("a", "b"), ("c", "d")), + (("c", "d"), ("a", "b")), + (("a", "b"), ("c", "d"), ("e", "f")), + ), +) def test_data_argument_accepts_tuples(data): """Ensure that the data argument will accept tuples of strings and properly encode them. """ p = PreparedRequest() p.prepare( - method='GET', - url='http://www.example.com', - data=data, - hooks=default_hooks() + method="GET", url="http://www.example.com", data=data, hooks=default_hooks() ) assert p.body == urlencode(data) @pytest.mark.parametrize( - 'kwargs', ( + "kwargs", + ( None, { - 'method': 'GET', - 'url': 'http://www.example.com', - 'data': 'foo=bar', - 'hooks': default_hooks() + "method": "GET", + "url": "http://www.example.com", + "data": "foo=bar", + "hooks": default_hooks(), }, { - 'method': 'GET', - 'url': 'http://www.example.com', - 'data': 'foo=bar', - 'hooks': default_hooks(), - 'cookies': {'foo': 'bar'} + "method": "GET", + "url": "http://www.example.com", + "data": "foo=bar", + "hooks": default_hooks(), + "cookies": {"foo": "bar"}, }, - { - 'method': 'GET', - 'url': 'http://www.example.com/üniçø∂é' - }, - )) + {"method": "GET", "url": "http://www.example.com/üniçø∂é"}, + ), +) def test_prepared_copy(kwargs): p = PreparedRequest() if kwargs: p.prepare(**kwargs) copy = p.copy() - for attr in ('method', 'url', 'headers', '_cookies', 'body', 'hooks'): + for attr in ("method", "url", "headers", "_cookies", "body", "hooks"): assert getattr(p, attr) == getattr(copy, attr) def test_urllib3_retries(httpbin): from urllib3.util import Retry + s = requests.Session() - s.mount('http://', HTTPAdapter(max_retries=Retry( - total=2, status_forcelist=[500] - ))) + s.mount("http://", HTTPAdapter(max_retries=Retry(total=2, status_forcelist=[500]))) with pytest.raises(RetryError): - s.get(httpbin('status/500')) + s.get(httpbin("status/500")) def test_urllib3_pool_connection_closed(httpbin): s = requests.Session() - s.mount('http://', HTTPAdapter(pool_connections=0, pool_maxsize=0)) + s.mount("http://", HTTPAdapter(pool_connections=0, pool_maxsize=0)) try: - s.get(httpbin('status/200')) + s.get(httpbin("status/200")) except ConnectionError as e: - assert u"Pool is closed." in str(e) + assert "Pool is closed." in str(e) -class TestPreparingURLs(object): +class TestPreparingURLs: @pytest.mark.parametrize( - 'url,expected', + "url,expected", ( - ('http://google.com', 'http://google.com/'), - ('http://ジェーピーニック.jp', 'http://xn--hckqz9bzb1cyrb.jp/'), - ('http://xn--n3h.net/', 'http://xn--n3h.net/'), + ("http://google.com", "http://google.com/"), + ("http://ジェーピーニック.jp", "http://xn--hckqz9bzb1cyrb.jp/"), + ("http://xn--n3h.net/", "http://xn--n3h.net/"), + ("http://ジェーピーニック.jp".encode(), "http://xn--hckqz9bzb1cyrb.jp/"), + ("http://straße.de/straße", "http://xn--strae-oqa.de/stra%C3%9Fe"), ( - 'http://ジェーピーニック.jp'.encode('utf-8'), - 'http://xn--hckqz9bzb1cyrb.jp/' + "http://straße.de/straße".encode(), + "http://xn--strae-oqa.de/stra%C3%9Fe", ), ( - 'http://straße.de/straße', - 'http://xn--strae-oqa.de/stra%C3%9Fe' + "http://Königsgäßchen.de/straße", + "http://xn--knigsgchen-b4a3dun.de/stra%C3%9Fe", ), ( - 'http://straße.de/straße'.encode('utf-8'), - 'http://xn--strae-oqa.de/stra%C3%9Fe' + "http://Königsgäßchen.de/straße".encode(), + "http://xn--knigsgchen-b4a3dun.de/stra%C3%9Fe", ), + (b"http://xn--n3h.net/", "http://xn--n3h.net/"), ( - 'http://Königsgäßchen.de/straße', - 'http://xn--knigsgchen-b4a3dun.de/stra%C3%9Fe' + b"http://[1200:0000:ab00:1234:0000:2552:7777:1313]:12345/", + "http://[1200:0000:ab00:1234:0000:2552:7777:1313]:12345/", ), ( - 'http://Königsgäßchen.de/straße'.encode('utf-8'), - 'http://xn--knigsgchen-b4a3dun.de/stra%C3%9Fe' + "http://[1200:0000:ab00:1234:0000:2552:7777:1313]:12345/", + "http://[1200:0000:ab00:1234:0000:2552:7777:1313]:12345/", ), - ( - b'http://xn--n3h.net/', - 'http://xn--n3h.net/' - ), - ( - b'http://[1200:0000:ab00:1234:0000:2552:7777:1313]:12345/', - 'http://[1200:0000:ab00:1234:0000:2552:7777:1313]:12345/' - ), - ( - 'http://[1200:0000:ab00:1234:0000:2552:7777:1313]:12345/', - 'http://[1200:0000:ab00:1234:0000:2552:7777:1313]:12345/' - ) - ) + ), ) def test_preparing_url(self, url, expected): - def normalize_percent_encode(x): - # Helper function that normalizes equivalent + # Helper function that normalizes equivalent # percent-encoded bytes before comparisons - for c in re.findall(r'%[a-fA-F0-9]{2}', x): + for c in re.findall(r"%[a-fA-F0-9]{2}", x): x = x.replace(c, c.upper()) return x - - r = requests.Request('GET', url=url) + + r = requests.Request("GET", url=url) p = r.prepare() assert normalize_percent_encode(p.url) == expected @pytest.mark.parametrize( - 'url', + "url", ( b"http://*.google.com", b"http://*", - u"http://*.google.com", - u"http://*", - u"http://☃.net/" - ) + "http://*.google.com", + "http://*", + "http://☃.net/", + ), ) def test_preparing_bad_url(self, url): - r = requests.Request('GET', url=url) + r = requests.Request("GET", url=url) with pytest.raises(requests.exceptions.InvalidURL): r.prepare() - @pytest.mark.parametrize( - 'url, exception', - ( - ('http://localhost:-1', InvalidURL), - ) - ) + @pytest.mark.parametrize("url, exception", (("http://localhost:-1", InvalidURL),)) def test_redirecting_to_bad_url(self, httpbin, url, exception): with pytest.raises(exception): - r = requests.get(httpbin('redirect-to'), params={'url': url}) + requests.get(httpbin("redirect-to"), params={"url": url}) @pytest.mark.parametrize( - 'input, expected', + "input, expected", ( ( b"http+unix://%2Fvar%2Frun%2Fsocket/path%7E", - u"http+unix://%2Fvar%2Frun%2Fsocket/path~", + "http+unix://%2Fvar%2Frun%2Fsocket/path~", ), ( - u"http+unix://%2Fvar%2Frun%2Fsocket/path%7E", - u"http+unix://%2Fvar%2Frun%2Fsocket/path~", + "http+unix://%2Fvar%2Frun%2Fsocket/path%7E", + "http+unix://%2Fvar%2Frun%2Fsocket/path~", ), ( b"mailto:user@example.org", - u"mailto:user@example.org", + "mailto:user@example.org", ), ( - u"mailto:user@example.org", - u"mailto:user@example.org", + "mailto:user@example.org", + "mailto:user@example.org", ), ( b"data:SSDimaUgUHl0aG9uIQ==", - u"data:SSDimaUgUHl0aG9uIQ==", - ) - ) + "data:SSDimaUgUHl0aG9uIQ==", + ), + ), ) def test_url_mutation(self, input, expected): """ @@ -2610,51 +2694,51 @@ def test_url_mutation(self, input, expected): any URL whose scheme doesn't begin with "http" is left alone, and those whose scheme *does* begin with "http" are mutated. """ - r = requests.Request('GET', url=input) + r = requests.Request("GET", url=input) p = r.prepare() assert p.url == expected @pytest.mark.parametrize( - 'input, params, expected', + "input, params, expected", ( ( b"http+unix://%2Fvar%2Frun%2Fsocket/path", {"key": "value"}, - u"http+unix://%2Fvar%2Frun%2Fsocket/path?key=value", + "http+unix://%2Fvar%2Frun%2Fsocket/path?key=value", ), ( - u"http+unix://%2Fvar%2Frun%2Fsocket/path", + "http+unix://%2Fvar%2Frun%2Fsocket/path", {"key": "value"}, - u"http+unix://%2Fvar%2Frun%2Fsocket/path?key=value", + "http+unix://%2Fvar%2Frun%2Fsocket/path?key=value", ), ( b"mailto:user@example.org", {"key": "value"}, - u"mailto:user@example.org", + "mailto:user@example.org", ), ( - u"mailto:user@example.org", + "mailto:user@example.org", {"key": "value"}, - u"mailto:user@example.org", + "mailto:user@example.org", ), - ) + ), ) def test_parameters_for_nonstandard_schemes(self, input, params, expected): """ Setting parameters for nonstandard schemes is allowed if those schemes begin with "http", and is forbidden otherwise. """ - r = requests.Request('GET', url=input, params=params) + r = requests.Request("GET", url=input, params=params) p = r.prepare() assert p.url == expected def test_post_json_nan(self, httpbin): data = {"foo": float("nan")} with pytest.raises(requests.exceptions.InvalidJSONError): - r = requests.post(httpbin('post'), json=data) + requests.post(httpbin("post"), json=data) def test_json_decode_compatibility(self, httpbin): - r = requests.get(httpbin('bytes/20')) + r = requests.get(httpbin("bytes/20")) with pytest.raises(requests.exceptions.JSONDecodeError) as excinfo: r.json() assert isinstance(excinfo.value, RequestException) @@ -2662,7 +2746,7 @@ def test_json_decode_compatibility(self, httpbin): assert r.text not in str(excinfo.value) def test_json_decode_persists_doc_attr(self, httpbin): - r = requests.get(httpbin('bytes/20')) + r = requests.get(httpbin("bytes/20")) with pytest.raises(requests.exceptions.JSONDecodeError) as excinfo: r.json() assert excinfo.value.doc == r.text diff --git a/tests/test_structures.py b/tests/test_structures.py index e4d2459fe7..e2fd5baaf2 100644 --- a/tests/test_structures.py +++ b/tests/test_structures.py @@ -1,26 +1,25 @@ -# -*- coding: utf-8 -*- - import pytest from requests.structures import CaseInsensitiveDict, LookupDict class TestCaseInsensitiveDict: - @pytest.fixture(autouse=True) def setup(self): """CaseInsensitiveDict instance with "Accept" header.""" self.case_insensitive_dict = CaseInsensitiveDict() - self.case_insensitive_dict['Accept'] = 'application/json' + self.case_insensitive_dict["Accept"] = "application/json" def test_list(self): - assert list(self.case_insensitive_dict) == ['Accept'] + assert list(self.case_insensitive_dict) == ["Accept"] - possible_keys = pytest.mark.parametrize('key', ('accept', 'ACCEPT', 'aCcEpT', 'Accept')) + possible_keys = pytest.mark.parametrize( + "key", ("accept", "ACCEPT", "aCcEpT", "Accept") + ) @possible_keys def test_getitem(self, key): - assert self.case_insensitive_dict[key] == 'application/json' + assert self.case_insensitive_dict[key] == "application/json" @possible_keys def test_delitem(self, key): @@ -28,7 +27,9 @@ def test_delitem(self, key): assert key not in self.case_insensitive_dict def test_lower_items(self): - assert list(self.case_insensitive_dict.lower_items()) == [('accept', 'application/json')] + assert list(self.case_insensitive_dict.lower_items()) == [ + ("accept", "application/json") + ] def test_repr(self): assert repr(self.case_insensitive_dict) == "{'Accept': 'application/json'}" @@ -39,32 +40,33 @@ def test_copy(self): assert copy == self.case_insensitive_dict @pytest.mark.parametrize( - 'other, result', ( - ({'AccePT': 'application/json'}, True), + "other, result", + ( + ({"AccePT": "application/json"}, True), ({}, False), - (None, False) - ) + (None, False), + ), ) def test_instance_equality(self, other, result): assert (self.case_insensitive_dict == other) is result class TestLookupDict: - @pytest.fixture(autouse=True) def setup(self): """LookupDict instance with "bad_gateway" attribute.""" - self.lookup_dict = LookupDict('test') + self.lookup_dict = LookupDict("test") self.lookup_dict.bad_gateway = 502 def test_repr(self): assert repr(self.lookup_dict) == "" get_item_parameters = pytest.mark.parametrize( - 'key, value', ( - ('bad_gateway', 502), - ('not_a_key', None) - ) + "key, value", + ( + ("bad_gateway", 502), + ("not_a_key", None), + ), ) @get_item_parameters diff --git a/tests/test_testserver.py b/tests/test_testserver.py index aac529261b..c73a3f1f59 100644 --- a/tests/test_testserver.py +++ b/tests/test_testserver.py @@ -1,16 +1,14 @@ -# -*- coding: utf-8 -*- - -import threading import socket +import threading import time import pytest -import requests from tests.testserver.server import Server +import requests -class TestTestServer: +class TestTestServer: def test_basic(self): """messages are sent and received properly""" question = b"success?" @@ -44,36 +42,37 @@ def test_server_closes(self): def test_text_response(self): """the text_response_server sends the given text""" server = Server.text_response_server( - "HTTP/1.1 200 OK\r\n" + - "Content-Length: 6\r\n" + - "\r\nroflol" + "HTTP/1.1 200 OK\r\n" "Content-Length: 6\r\n" "\r\nroflol" ) with server as (host, port): - r = requests.get('http://{}:{}'.format(host, port)) + r = requests.get(f"http://{host}:{port}") assert r.status_code == 200 - assert r.text == u'roflol' - assert r.headers['Content-Length'] == '6' + assert r.text == "roflol" + assert r.headers["Content-Length"] == "6" def test_basic_response(self): """the basic response server returns an empty http response""" with Server.basic_response_server() as (host, port): - r = requests.get('http://{}:{}'.format(host, port)) + r = requests.get(f"http://{host}:{port}") assert r.status_code == 200 - assert r.text == u'' - assert r.headers['Content-Length'] == '0' + assert r.text == "" + assert r.headers["Content-Length"] == "0" def test_basic_waiting_server(self): """the server waits for the block_server event to be set before closing""" block_server = threading.Event() - with Server.basic_response_server(wait_to_close_event=block_server) as (host, port): + with Server.basic_response_server(wait_to_close_event=block_server) as ( + host, + port, + ): sock = socket.socket() sock.connect((host, port)) - sock.sendall(b'send something') + sock.sendall(b"send something") time.sleep(2.5) - sock.sendall(b'still alive') + sock.sendall(b"still alive") block_server.set() # release server block def test_multiple_requests(self): @@ -83,7 +82,7 @@ def test_multiple_requests(self): server = Server.basic_response_server(requests_to_handle=requests_to_handle) with server as (host, port): - server_url = 'http://{}:{}'.format(host, port) + server_url = f"http://{host}:{port}" for _ in range(requests_to_handle): r = requests.get(server_url) assert r.status_code == 200 @@ -97,8 +96,8 @@ def test_request_recovery(self): """can check the requests content""" # TODO: figure out why this sometimes fails when using pytest-xdist. server = Server.basic_response_server(requests_to_handle=2) - first_request = b'put your hands up in the air' - second_request = b'put your hand down in the floor' + first_request = b"put your hands up in the air" + second_request = b"put your hand down in the floor" with server as address: sock1 = socket.socket() @@ -123,15 +122,15 @@ def test_requests_after_timeout_are_not_received(self): sock = socket.socket() sock.connect(address) time.sleep(1.5) - sock.sendall(b'hehehe, not received') + sock.sendall(b"hehehe, not received") sock.close() - assert server.handler_results[0] == b'' + assert server.handler_results[0] == b"" def test_request_recovery_with_bigger_timeout(self): """a biggest timeout can be specified""" server = Server.basic_response_server(request_timeout=3) - data = b'bananadine' + data = b"bananadine" with server as address: sock = socket.socket() diff --git a/tests/test_utils.py b/tests/test_utils.py index 931a1b92c7..a714b4700e 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,42 +1,61 @@ -# -*- coding: utf-8 -*- - -import os import copy import filecmp -from io import BytesIO +import os import tarfile import zipfile from collections import deque +from io import BytesIO import pytest + from requests import compat +from requests._internal_utils import unicode_is_ascii from requests.cookies import RequestsCookieJar from requests.structures import CaseInsensitiveDict from requests.utils import ( - address_in_network, dotted_netmask, extract_zipped_paths, - get_auth_from_url, _parse_content_type_header, get_encoding_from_headers, - get_encodings_from_content, get_environ_proxies, - guess_filename, guess_json_utf, is_ipv4_address, - is_valid_cidr, iter_slices, parse_dict_header, - parse_header_links, prepend_scheme_if_needed, - requote_uri, select_proxy, should_bypass_proxies, super_len, - to_key_val_list, to_native_string, - unquote_header_value, unquote_unreserved, - urldefragauth, add_dict_to_cookiejar, set_environ) -from requests._internal_utils import unicode_is_ascii + _parse_content_type_header, + add_dict_to_cookiejar, + address_in_network, + dotted_netmask, + extract_zipped_paths, + get_auth_from_url, + get_encoding_from_headers, + get_encodings_from_content, + get_environ_proxies, + guess_filename, + guess_json_utf, + is_ipv4_address, + is_valid_cidr, + iter_slices, + parse_dict_header, + parse_header_links, + prepend_scheme_if_needed, + requote_uri, + select_proxy, + set_environ, + should_bypass_proxies, + super_len, + to_key_val_list, + to_native_string, + unquote_header_value, + unquote_unreserved, + urldefragauth, +) from .compat import StringIO, cStringIO class TestSuperLen: - @pytest.mark.parametrize( - 'stream, value', ( - (StringIO.StringIO, 'Test'), - (BytesIO, b'Test'), - pytest.param(cStringIO, 'Test', - marks=pytest.mark.skipif('cStringIO is None')), - )) + "stream, value", + ( + (StringIO.StringIO, "Test"), + (BytesIO, b"Test"), + pytest.param( + cStringIO, "Test", marks=pytest.mark.skipif("cStringIO is None") + ), + ), + ) def test_io_streams(self, stream, value): """Ensures that we properly deal with different kinds of IO streams.""" assert super_len(stream()) == 0 @@ -45,13 +64,14 @@ def test_io_streams(self, stream, value): def test_super_len_correctly_calculates_len_of_partially_read_file(self): """Ensure that we handle partially consumed file like objects.""" s = StringIO.StringIO() - s.write('foobarbogus') + s.write("foobarbogus") assert super_len(s) == 0 - @pytest.mark.parametrize('error', [IOError, OSError]) + @pytest.mark.parametrize("error", [IOError, OSError]) def test_super_len_handles_files_raising_weird_errors_in_tell(self, error): """If tell() raises errors, assume the cursor is at position zero.""" - class BoomFile(object): + + class BoomFile: def __len__(self): return 5 @@ -60,10 +80,11 @@ def tell(self): assert super_len(BoomFile()) == 0 - @pytest.mark.parametrize('error', [IOError, OSError]) + @pytest.mark.parametrize("error", [IOError, OSError]) def test_super_len_tell_ioerror(self, error): """Ensure that if tell gives an IOError super_len doesn't fail""" - class NoLenBoomFile(object): + + class NoLenBoomFile: def tell(self): raise error() @@ -73,52 +94,54 @@ def seek(self, offset, whence): assert super_len(NoLenBoomFile()) == 0 def test_string(self): - assert super_len('Test') == 4 + assert super_len("Test") == 4 @pytest.mark.parametrize( - 'mode, warnings_num', ( - ('r', 1), - ('rb', 0), - )) + "mode, warnings_num", + ( + ("r", 1), + ("rb", 0), + ), + ) def test_file(self, tmpdir, mode, warnings_num, recwarn): - file_obj = tmpdir.join('test.txt') - file_obj.write('Test') + file_obj = tmpdir.join("test.txt") + file_obj.write("Test") with file_obj.open(mode) as fd: assert super_len(fd) == 4 assert len(recwarn) == warnings_num def test_tarfile_member(self, tmpdir): - file_obj = tmpdir.join('test.txt') - file_obj.write('Test') + file_obj = tmpdir.join("test.txt") + file_obj.write("Test") - tar_obj = str(tmpdir.join('test.tar')) - with tarfile.open(tar_obj, 'w') as tar: - tar.add(str(file_obj), arcname='test.txt') + tar_obj = str(tmpdir.join("test.tar")) + with tarfile.open(tar_obj, "w") as tar: + tar.add(str(file_obj), arcname="test.txt") with tarfile.open(tar_obj) as tar: - member = tar.extractfile('test.txt') + member = tar.extractfile("test.txt") assert super_len(member) == 4 def test_super_len_with__len__(self): - foo = [1,2,3,4] + foo = [1, 2, 3, 4] len_foo = super_len(foo) assert len_foo == 4 def test_super_len_with_no__len__(self): - class LenFile(object): + class LenFile: def __init__(self): self.len = 5 assert super_len(LenFile()) == 5 def test_super_len_with_tell(self): - foo = StringIO.StringIO('12345') + foo = StringIO.StringIO("12345") assert super_len(foo) == 5 foo.read(2) assert super_len(foo) == 3 def test_super_len_with_fileno(self): - with open(__file__, 'rb') as f: + with open(__file__, "rb") as f: length = super_len(f) file_data = f.read() assert length == len(file_data) @@ -129,37 +152,39 @@ def test_super_len_with_no_matches(self): class TestToKeyValList: - @pytest.mark.parametrize( - 'value, expected', ( - ([('key', 'val')], [('key', 'val')]), - ((('key', 'val'), ), [('key', 'val')]), - ({'key': 'val'}, [('key', 'val')]), - (None, None) - )) + "value, expected", + ( + ([("key", "val")], [("key", "val")]), + ((("key", "val"),), [("key", "val")]), + ({"key": "val"}, [("key", "val")]), + (None, None), + ), + ) def test_valid(self, value, expected): assert to_key_val_list(value) == expected def test_invalid(self): with pytest.raises(ValueError): - to_key_val_list('string') + to_key_val_list("string") class TestUnquoteHeaderValue: - @pytest.mark.parametrize( - 'value, expected', ( + "value, expected", + ( (None, None), - ('Test', 'Test'), - ('"Test"', 'Test'), - ('"Test\\\\"', 'Test\\'), - ('"\\\\Comp\\Res"', '\\Comp\\Res'), - )) + ("Test", "Test"), + ('"Test"', "Test"), + ('"Test\\\\"', "Test\\"), + ('"\\\\Comp\\Res"', "\\Comp\\Res"), + ), + ) def test_valid(self, value, expected): assert unquote_header_value(value) == expected def test_is_filename(self): - assert unquote_header_value('"\\\\Comp\\Res"', True) == '\\\\Comp\\Res' + assert unquote_header_value('"\\\\Comp\\Res"', True) == "\\\\Comp\\Res" class TestGetEnvironProxies: @@ -167,131 +192,143 @@ class TestGetEnvironProxies: in no_proxy variable. """ - @pytest.fixture(autouse=True, params=['no_proxy', 'NO_PROXY']) + @pytest.fixture(autouse=True, params=["no_proxy", "NO_PROXY"]) def no_proxy(self, request, monkeypatch): - monkeypatch.setenv(request.param, '192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1') + monkeypatch.setenv( + request.param, "192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1" + ) @pytest.mark.parametrize( - 'url', ( - 'http://192.168.0.1:5000/', - 'http://192.168.0.1/', - 'http://172.16.1.1/', - 'http://172.16.1.1:5000/', - 'http://localhost.localdomain:5000/v1.0/', - )) + "url", + ( + "http://192.168.0.1:5000/", + "http://192.168.0.1/", + "http://172.16.1.1/", + "http://172.16.1.1:5000/", + "http://localhost.localdomain:5000/v1.0/", + ), + ) def test_bypass(self, url): assert get_environ_proxies(url, no_proxy=None) == {} @pytest.mark.parametrize( - 'url', ( - 'http://192.168.1.1:5000/', - 'http://192.168.1.1/', - 'http://www.requests.com/', - )) + "url", + ( + "http://192.168.1.1:5000/", + "http://192.168.1.1/", + "http://www.requests.com/", + ), + ) def test_not_bypass(self, url): assert get_environ_proxies(url, no_proxy=None) != {} @pytest.mark.parametrize( - 'url', ( - 'http://192.168.1.1:5000/', - 'http://192.168.1.1/', - 'http://www.requests.com/', - )) + "url", + ( + "http://192.168.1.1:5000/", + "http://192.168.1.1/", + "http://www.requests.com/", + ), + ) def test_bypass_no_proxy_keyword(self, url): - no_proxy = '192.168.1.1,requests.com' + no_proxy = "192.168.1.1,requests.com" assert get_environ_proxies(url, no_proxy=no_proxy) == {} @pytest.mark.parametrize( - 'url', ( - 'http://192.168.0.1:5000/', - 'http://192.168.0.1/', - 'http://172.16.1.1/', - 'http://172.16.1.1:5000/', - 'http://localhost.localdomain:5000/v1.0/', - )) + "url", + ( + "http://192.168.0.1:5000/", + "http://192.168.0.1/", + "http://172.16.1.1/", + "http://172.16.1.1:5000/", + "http://localhost.localdomain:5000/v1.0/", + ), + ) def test_not_bypass_no_proxy_keyword(self, url, monkeypatch): # This is testing that the 'no_proxy' argument overrides the # environment variable 'no_proxy' - monkeypatch.setenv('http_proxy', 'http://proxy.example.com:3128/') - no_proxy = '192.168.1.1,requests.com' + monkeypatch.setenv("http_proxy", "http://proxy.example.com:3128/") + no_proxy = "192.168.1.1,requests.com" assert get_environ_proxies(url, no_proxy=no_proxy) != {} class TestIsIPv4Address: - def test_valid(self): - assert is_ipv4_address('8.8.8.8') + assert is_ipv4_address("8.8.8.8") - @pytest.mark.parametrize('value', ('8.8.8.8.8', 'localhost.localdomain')) + @pytest.mark.parametrize("value", ("8.8.8.8.8", "localhost.localdomain")) def test_invalid(self, value): assert not is_ipv4_address(value) class TestIsValidCIDR: - def test_valid(self): - assert is_valid_cidr('192.168.1.0/24') + assert is_valid_cidr("192.168.1.0/24") @pytest.mark.parametrize( - 'value', ( - '8.8.8.8', - '192.168.1.0/a', - '192.168.1.0/128', - '192.168.1.0/-1', - '192.168.1.999/24', - )) + "value", + ( + "8.8.8.8", + "192.168.1.0/a", + "192.168.1.0/128", + "192.168.1.0/-1", + "192.168.1.999/24", + ), + ) def test_invalid(self, value): assert not is_valid_cidr(value) class TestAddressInNetwork: - def test_valid(self): - assert address_in_network('192.168.1.1', '192.168.1.0/24') + assert address_in_network("192.168.1.1", "192.168.1.0/24") def test_invalid(self): - assert not address_in_network('172.16.0.1', '192.168.1.0/24') + assert not address_in_network("172.16.0.1", "192.168.1.0/24") class TestGuessFilename: - @pytest.mark.parametrize( - 'value', (1, type('Fake', (object,), {'name': 1})()), + "value", + (1, type("Fake", (object,), {"name": 1})()), ) def test_guess_filename_invalid(self, value): assert guess_filename(value) is None @pytest.mark.parametrize( - 'value, expected_type', ( - (b'value', compat.bytes), - (b'value'.decode('utf-8'), compat.str) - )) + "value, expected_type", + ( + (b"value", compat.bytes), + (b"value".decode("utf-8"), compat.str), + ), + ) def test_guess_filename_valid(self, value, expected_type): - obj = type('Fake', (object,), {'name': value})() + obj = type("Fake", (object,), {"name": value})() result = guess_filename(obj) assert result == value assert isinstance(result, expected_type) class TestExtractZippedPaths: - @pytest.mark.parametrize( - 'path', ( - '/', + "path", + ( + "/", __file__, pytest.__file__, - '/etc/invalid/location', - )) + "/etc/invalid/location", + ), + ) def test_unzipped_paths_unchanged(self, path): assert path == extract_zipped_paths(path) def test_zipped_paths_extracted(self, tmpdir): - zipped_py = tmpdir.join('test.zip') - with zipfile.ZipFile(zipped_py.strpath, 'w') as f: + zipped_py = tmpdir.join("test.zip") + with zipfile.ZipFile(zipped_py.strpath, "w") as f: f.write(__file__) _, name = os.path.splitdrive(__file__) - zipped_path = os.path.join(zipped_py.strpath, name.lstrip(r'\/')) + zipped_path = os.path.join(zipped_py.strpath, name.lstrip(r"\/")) extracted_path = extract_zipped_paths(zipped_path) assert extracted_path != zipped_path @@ -304,13 +341,13 @@ def test_invalid_unc_path(self): class TestContentEncodingDetection: - def test_none(self): - encodings = get_encodings_from_content('') + encodings = get_encodings_from_content("") assert not len(encodings) @pytest.mark.parametrize( - 'content', ( + "content", + ( # HTML5 meta charset attribute '', # HTML4 pragma directive @@ -319,246 +356,283 @@ def test_none(self): '', # XHTML 1.x served as XML '', - )) + ), + ) def test_pragmas(self, content): encodings = get_encodings_from_content(content) assert len(encodings) == 1 - assert encodings[0] == 'UTF-8' + assert encodings[0] == "UTF-8" def test_precedence(self): - content = ''' + content = """ - '''.strip() - assert get_encodings_from_content(content) == ['HTML5', 'HTML4', 'XML'] + """.strip() + assert get_encodings_from_content(content) == ["HTML5", "HTML4", "XML"] class TestGuessJSONUTF: - @pytest.mark.parametrize( - 'encoding', ( - 'utf-32', 'utf-8-sig', 'utf-16', 'utf-8', 'utf-16-be', 'utf-16-le', - 'utf-32-be', 'utf-32-le' - )) + "encoding", + ( + "utf-32", + "utf-8-sig", + "utf-16", + "utf-8", + "utf-16-be", + "utf-16-le", + "utf-32-be", + "utf-32-le", + ), + ) def test_encoded(self, encoding): - data = '{}'.encode(encoding) + data = "{}".encode(encoding) assert guess_json_utf(data) == encoding def test_bad_utf_like_encoding(self): - assert guess_json_utf(b'\x00\x00\x00\x00') is None + assert guess_json_utf(b"\x00\x00\x00\x00") is None @pytest.mark.parametrize( - ('encoding', 'expected'), ( - ('utf-16-be', 'utf-16'), - ('utf-16-le', 'utf-16'), - ('utf-32-be', 'utf-32'), - ('utf-32-le', 'utf-32') - )) + ("encoding", "expected"), + ( + ("utf-16-be", "utf-16"), + ("utf-16-le", "utf-16"), + ("utf-32-be", "utf-32"), + ("utf-32-le", "utf-32"), + ), + ) def test_guess_by_bom(self, encoding, expected): - data = u'\ufeff{}'.encode(encoding) + data = "\ufeff{}".encode(encoding) assert guess_json_utf(data) == expected USER = PASSWORD = "%!*'();:@&=+$,/?#[] " -ENCODED_USER = compat.quote(USER, '') -ENCODED_PASSWORD = compat.quote(PASSWORD, '') +ENCODED_USER = compat.quote(USER, "") +ENCODED_PASSWORD = compat.quote(PASSWORD, "") @pytest.mark.parametrize( - 'url, auth', ( + "url, auth", + ( ( - 'http://' + ENCODED_USER + ':' + ENCODED_PASSWORD + '@' + - 'request.com/url.html#test', - (USER, PASSWORD) + f"http://{ENCODED_USER}:{ENCODED_PASSWORD}@request.com/url.html#test", + (USER, PASSWORD), ), + ("http://user:pass@complex.url.com/path?query=yes", ("user", "pass")), ( - 'http://user:pass@complex.url.com/path?query=yes', - ('user', 'pass') + "http://user:pass%20pass@complex.url.com/path?query=yes", + ("user", "pass pass"), ), + ("http://user:pass pass@complex.url.com/path?query=yes", ("user", "pass pass")), ( - 'http://user:pass%20pass@complex.url.com/path?query=yes', - ('user', 'pass pass') + "http://user%25user:pass@complex.url.com/path?query=yes", + ("user%user", "pass"), ), ( - 'http://user:pass pass@complex.url.com/path?query=yes', - ('user', 'pass pass') + "http://user:pass%23pass@complex.url.com/path?query=yes", + ("user", "pass#pass"), ), - ( - 'http://user%25user:pass@complex.url.com/path?query=yes', - ('user%user', 'pass') - ), - ( - 'http://user:pass%23pass@complex.url.com/path?query=yes', - ('user', 'pass#pass') - ), - ( - 'http://complex.url.com/path?query=yes', - ('', '') - ), - )) + ("http://complex.url.com/path?query=yes", ("", "")), + ), +) def test_get_auth_from_url(url, auth): assert get_auth_from_url(url) == auth @pytest.mark.parametrize( - 'uri, expected', ( + "uri, expected", + ( ( # Ensure requoting doesn't break expectations - 'http://example.com/fiz?buz=%25ppicture', - 'http://example.com/fiz?buz=%25ppicture', + "http://example.com/fiz?buz=%25ppicture", + "http://example.com/fiz?buz=%25ppicture", ), ( # Ensure we handle unquoted percent signs in redirects - 'http://example.com/fiz?buz=%ppicture', - 'http://example.com/fiz?buz=%25ppicture', + "http://example.com/fiz?buz=%ppicture", + "http://example.com/fiz?buz=%25ppicture", ), - )) + ), +) def test_requote_uri_with_unquoted_percents(uri, expected): """See: https://github.com/psf/requests/issues/2356""" assert requote_uri(uri) == expected @pytest.mark.parametrize( - 'uri, expected', ( + "uri, expected", + ( ( # Illegal bytes - 'http://example.com/?a=%--', - 'http://example.com/?a=%--', + "http://example.com/?a=%--", + "http://example.com/?a=%--", ), ( # Reserved characters - 'http://example.com/?a=%300', - 'http://example.com/?a=00', - ) - )) + "http://example.com/?a=%300", + "http://example.com/?a=00", + ), + ), +) def test_unquote_unreserved(uri, expected): assert unquote_unreserved(uri) == expected @pytest.mark.parametrize( - 'mask, expected', ( - (8, '255.0.0.0'), - (24, '255.255.255.0'), - (25, '255.255.255.128'), - )) + "mask, expected", + ( + (8, "255.0.0.0"), + (24, "255.255.255.0"), + (25, "255.255.255.128"), + ), +) def test_dotted_netmask(mask, expected): assert dotted_netmask(mask) == expected -http_proxies = {'http': 'http://http.proxy', - 'http://some.host': 'http://some.host.proxy'} -all_proxies = {'all': 'socks5://http.proxy', - 'all://some.host': 'socks5://some.host.proxy'} -mixed_proxies = {'http': 'http://http.proxy', - 'http://some.host': 'http://some.host.proxy', - 'all': 'socks5://http.proxy'} +http_proxies = { + "http": "http://http.proxy", + "http://some.host": "http://some.host.proxy", +} +all_proxies = { + "all": "socks5://http.proxy", + "all://some.host": "socks5://some.host.proxy", +} +mixed_proxies = { + "http": "http://http.proxy", + "http://some.host": "http://some.host.proxy", + "all": "socks5://http.proxy", +} + + @pytest.mark.parametrize( - 'url, expected, proxies', ( - ('hTTp://u:p@Some.Host/path', 'http://some.host.proxy', http_proxies), - ('hTTp://u:p@Other.Host/path', 'http://http.proxy', http_proxies), - ('hTTp:///path', 'http://http.proxy', http_proxies), - ('hTTps://Other.Host', None, http_proxies), - ('file:///etc/motd', None, http_proxies), - - ('hTTp://u:p@Some.Host/path', 'socks5://some.host.proxy', all_proxies), - ('hTTp://u:p@Other.Host/path', 'socks5://http.proxy', all_proxies), - ('hTTp:///path', 'socks5://http.proxy', all_proxies), - ('hTTps://Other.Host', 'socks5://http.proxy', all_proxies), - - ('http://u:p@other.host/path', 'http://http.proxy', mixed_proxies), - ('http://u:p@some.host/path', 'http://some.host.proxy', mixed_proxies), - ('https://u:p@other.host/path', 'socks5://http.proxy', mixed_proxies), - ('https://u:p@some.host/path', 'socks5://http.proxy', mixed_proxies), - ('https://', 'socks5://http.proxy', mixed_proxies), + "url, expected, proxies", + ( + ("hTTp://u:p@Some.Host/path", "http://some.host.proxy", http_proxies), + ("hTTp://u:p@Other.Host/path", "http://http.proxy", http_proxies), + ("hTTp:///path", "http://http.proxy", http_proxies), + ("hTTps://Other.Host", None, http_proxies), + ("file:///etc/motd", None, http_proxies), + ("hTTp://u:p@Some.Host/path", "socks5://some.host.proxy", all_proxies), + ("hTTp://u:p@Other.Host/path", "socks5://http.proxy", all_proxies), + ("hTTp:///path", "socks5://http.proxy", all_proxies), + ("hTTps://Other.Host", "socks5://http.proxy", all_proxies), + ("http://u:p@other.host/path", "http://http.proxy", mixed_proxies), + ("http://u:p@some.host/path", "http://some.host.proxy", mixed_proxies), + ("https://u:p@other.host/path", "socks5://http.proxy", mixed_proxies), + ("https://u:p@some.host/path", "socks5://http.proxy", mixed_proxies), + ("https://", "socks5://http.proxy", mixed_proxies), # XXX: unsure whether this is reasonable behavior - ('file:///etc/motd', 'socks5://http.proxy', all_proxies), - )) + ("file:///etc/motd", "socks5://http.proxy", all_proxies), + ), +) def test_select_proxies(url, expected, proxies): """Make sure we can select per-host proxies correctly.""" assert select_proxy(url, proxies) == expected @pytest.mark.parametrize( - 'value, expected', ( - ('foo="is a fish", bar="as well"', {'foo': 'is a fish', 'bar': 'as well'}), - ('key_without_value', {'key_without_value': None}) - )) + "value, expected", + ( + ('foo="is a fish", bar="as well"', {"foo": "is a fish", "bar": "as well"}), + ("key_without_value", {"key_without_value": None}), + ), +) def test_parse_dict_header(value, expected): assert parse_dict_header(value) == expected @pytest.mark.parametrize( - 'value, expected', ( - ( - 'application/xml', - ('application/xml', {}) - ), + "value, expected", + ( + ("application/xml", ("application/xml", {})), ( - 'application/json ; charset=utf-8', - ('application/json', {'charset': 'utf-8'}) + "application/json ; charset=utf-8", + ("application/json", {"charset": "utf-8"}), ), ( - 'application/json ; Charset=utf-8', - ('application/json', {'charset': 'utf-8'}) + "application/json ; Charset=utf-8", + ("application/json", {"charset": "utf-8"}), ), + ("text/plain", ("text/plain", {})), ( - 'text/plain', - ('text/plain', {}) + "multipart/form-data; boundary = something ; boundary2='something_else' ; no_equals ", + ( + "multipart/form-data", + { + "boundary": "something", + "boundary2": "something_else", + "no_equals": True, + }, + ), ), ( - 'multipart/form-data; boundary = something ; boundary2=\'something_else\' ; no_equals ', - ('multipart/form-data', {'boundary': 'something', 'boundary2': 'something_else', 'no_equals': True}) + 'multipart/form-data; boundary = something ; boundary2="something_else" ; no_equals ', + ( + "multipart/form-data", + { + "boundary": "something", + "boundary2": "something_else", + "no_equals": True, + }, + ), ), ( - 'multipart/form-data; boundary = something ; boundary2="something_else" ; no_equals ', - ('multipart/form-data', {'boundary': 'something', 'boundary2': 'something_else', 'no_equals': True}) - ), - ( - 'multipart/form-data; boundary = something ; \'boundary2=something_else\' ; no_equals ', - ('multipart/form-data', {'boundary': 'something', 'boundary2': 'something_else', 'no_equals': True}) + "multipart/form-data; boundary = something ; 'boundary2=something_else' ; no_equals ", + ( + "multipart/form-data", + { + "boundary": "something", + "boundary2": "something_else", + "no_equals": True, + }, + ), ), ( 'multipart/form-data; boundary = something ; "boundary2=something_else" ; no_equals ', - ('multipart/form-data', {'boundary': 'something', 'boundary2': 'something_else', 'no_equals': True}) + ( + "multipart/form-data", + { + "boundary": "something", + "boundary2": "something_else", + "no_equals": True, + }, + ), ), - ( - 'application/json ; ; ', - ('application/json', {}) - ) - )) + ("application/json ; ; ", ("application/json", {})), + ), +) def test__parse_content_type_header(value, expected): assert _parse_content_type_header(value) == expected @pytest.mark.parametrize( - 'value, expected', ( - ( - CaseInsensitiveDict(), - None - ), + "value, expected", + ( + (CaseInsensitiveDict(), None), ( - CaseInsensitiveDict({'content-type': 'application/json; charset=utf-8'}), - 'utf-8' + CaseInsensitiveDict({"content-type": "application/json; charset=utf-8"}), + "utf-8", ), - ( - CaseInsensitiveDict({'content-type': 'text/plain'}), - 'ISO-8859-1' - ), - )) + (CaseInsensitiveDict({"content-type": "text/plain"}), "ISO-8859-1"), + ), +) def test_get_encoding_from_headers(value, expected): assert get_encoding_from_headers(value) == expected @pytest.mark.parametrize( - 'value, length', ( - ('', 0), - ('T', 1), - ('Test', 4), - ('Cont', 0), - ('Other', -5), - ('Content', None), - )) + "value, length", + ( + ("", 0), + ("T", 1), + ("Test", 4), + ("Cont", 0), + ("Other", -5), + ("Content", None), + ), +) def test_iter_slices(value, length): if length is None or (length <= 0 and len(value) > 0): # Reads all content at once @@ -568,187 +642,197 @@ def test_iter_slices(value, length): @pytest.mark.parametrize( - 'value, expected', ( + "value, expected", + ( ( '; rel=front; type="image/jpeg"', - [{'url': 'http:/.../front.jpeg', 'rel': 'front', 'type': 'image/jpeg'}] - ), - ( - '', - [{'url': 'http:/.../front.jpeg'}] - ), - ( - ';', - [{'url': 'http:/.../front.jpeg'}] + [{"url": "http:/.../front.jpeg", "rel": "front", "type": "image/jpeg"}], ), + ("", [{"url": "http:/.../front.jpeg"}]), + (";", [{"url": "http:/.../front.jpeg"}]), ( '; type="image/jpeg",;', [ - {'url': 'http:/.../front.jpeg', 'type': 'image/jpeg'}, - {'url': 'http://.../back.jpeg'} - ] + {"url": "http:/.../front.jpeg", "type": "image/jpeg"}, + {"url": "http://.../back.jpeg"}, + ], ), - ( - '', - [] - ), - )) + ("", []), + ), +) def test_parse_header_links(value, expected): assert parse_header_links(value) == expected @pytest.mark.parametrize( - 'value, expected', ( - ('example.com/path', 'http://example.com/path'), - ('//example.com/path', 'http://example.com/path'), - ('example.com:80', 'http://example.com:80'), + "value, expected", + ( + ("example.com/path", "http://example.com/path"), + ("//example.com/path", "http://example.com/path"), + ("example.com:80", "http://example.com:80"), ( - 'http://user:pass@example.com/path?query', - 'http://user:pass@example.com/path?query' + "http://user:pass@example.com/path?query", + "http://user:pass@example.com/path?query", ), - ( - 'http://user@example.com/path?query', - 'http://user@example.com/path?query' - ) - )) + ("http://user@example.com/path?query", "http://user@example.com/path?query"), + ), +) def test_prepend_scheme_if_needed(value, expected): - assert prepend_scheme_if_needed(value, 'http') == expected + assert prepend_scheme_if_needed(value, "http") == expected @pytest.mark.parametrize( - 'value, expected', ( - ('T', 'T'), - (b'T', 'T'), - (u'T', 'T'), - )) + "value, expected", + ( + ("T", "T"), + (b"T", "T"), + ("T", "T"), + ), +) def test_to_native_string(value, expected): assert to_native_string(value) == expected @pytest.mark.parametrize( - 'url, expected', ( - ('http://u:p@example.com/path?a=1#test', 'http://example.com/path?a=1'), - ('http://example.com/path', 'http://example.com/path'), - ('//u:p@example.com/path', '//example.com/path'), - ('//example.com/path', '//example.com/path'), - ('example.com/path', '//example.com/path'), - ('scheme:u:p@example.com/path', 'scheme://example.com/path'), - )) + "url, expected", + ( + ("http://u:p@example.com/path?a=1#test", "http://example.com/path?a=1"), + ("http://example.com/path", "http://example.com/path"), + ("//u:p@example.com/path", "//example.com/path"), + ("//example.com/path", "//example.com/path"), + ("example.com/path", "//example.com/path"), + ("scheme:u:p@example.com/path", "scheme://example.com/path"), + ), +) def test_urldefragauth(url, expected): assert urldefragauth(url) == expected @pytest.mark.parametrize( - 'url, expected', ( - ('http://192.168.0.1:5000/', True), - ('http://192.168.0.1/', True), - ('http://172.16.1.1/', True), - ('http://172.16.1.1:5000/', True), - ('http://localhost.localdomain:5000/v1.0/', True), - ('http://google.com:6000/', True), - ('http://172.16.1.12/', False), - ('http://172.16.1.12:5000/', False), - ('http://google.com:5000/v1.0/', False), - ('file:///some/path/on/disk', True), - )) + "url, expected", + ( + ("http://192.168.0.1:5000/", True), + ("http://192.168.0.1/", True), + ("http://172.16.1.1/", True), + ("http://172.16.1.1:5000/", True), + ("http://localhost.localdomain:5000/v1.0/", True), + ("http://google.com:6000/", True), + ("http://172.16.1.12/", False), + ("http://172.16.1.12:5000/", False), + ("http://google.com:5000/v1.0/", False), + ("file:///some/path/on/disk", True), + ), +) def test_should_bypass_proxies(url, expected, monkeypatch): """Tests for function should_bypass_proxies to check if proxy can be bypassed or not """ - monkeypatch.setenv('no_proxy', '192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1, google.com:6000') - monkeypatch.setenv('NO_PROXY', '192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1, google.com:6000') + monkeypatch.setenv( + "no_proxy", + "192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1, google.com:6000", + ) + monkeypatch.setenv( + "NO_PROXY", + "192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1, google.com:6000", + ) assert should_bypass_proxies(url, no_proxy=None) == expected @pytest.mark.parametrize( - 'url, expected', ( - ('http://172.16.1.1/', '172.16.1.1'), - ('http://172.16.1.1:5000/', '172.16.1.1'), - ('http://user:pass@172.16.1.1', '172.16.1.1'), - ('http://user:pass@172.16.1.1:5000', '172.16.1.1'), - ('http://hostname/', 'hostname'), - ('http://hostname:5000/', 'hostname'), - ('http://user:pass@hostname', 'hostname'), - ('http://user:pass@hostname:5000', 'hostname'), - )) + "url, expected", + ( + ("http://172.16.1.1/", "172.16.1.1"), + ("http://172.16.1.1:5000/", "172.16.1.1"), + ("http://user:pass@172.16.1.1", "172.16.1.1"), + ("http://user:pass@172.16.1.1:5000", "172.16.1.1"), + ("http://hostname/", "hostname"), + ("http://hostname:5000/", "hostname"), + ("http://user:pass@hostname", "hostname"), + ("http://user:pass@hostname:5000", "hostname"), + ), +) def test_should_bypass_proxies_pass_only_hostname(url, expected, mocker): """The proxy_bypass function should be called with a hostname or IP without a port number or auth credentials. """ - proxy_bypass = mocker.patch('requests.utils.proxy_bypass') + proxy_bypass = mocker.patch("requests.utils.proxy_bypass") should_bypass_proxies(url, no_proxy=None) proxy_bypass.assert_called_once_with(expected) @pytest.mark.parametrize( - 'cookiejar', ( + "cookiejar", + ( compat.cookielib.CookieJar(), - RequestsCookieJar() - )) + RequestsCookieJar(), + ), +) def test_add_dict_to_cookiejar(cookiejar): """Ensure add_dict_to_cookiejar works for non-RequestsCookieJar CookieJars """ - cookiedict = {'test': 'cookies', - 'good': 'cookies'} + cookiedict = {"test": "cookies", "good": "cookies"} cj = add_dict_to_cookiejar(cookiejar, cookiedict) cookies = {cookie.name: cookie.value for cookie in cj} assert cookiedict == cookies @pytest.mark.parametrize( - 'value, expected', ( - (u'test', True), - (u'æíöû', False), - (u'ジェーピーニック', False), - ) + "value, expected", + ( + ("test", True), + ("æíöû", False), + ("ジェーピーニック", False), + ), ) def test_unicode_is_ascii(value, expected): assert unicode_is_ascii(value) is expected @pytest.mark.parametrize( - 'url, expected', ( - ('http://192.168.0.1:5000/', True), - ('http://192.168.0.1/', True), - ('http://172.16.1.1/', True), - ('http://172.16.1.1:5000/', True), - ('http://localhost.localdomain:5000/v1.0/', True), - ('http://172.16.1.12/', False), - ('http://172.16.1.12:5000/', False), - ('http://google.com:5000/v1.0/', False), - )) -def test_should_bypass_proxies_no_proxy( - url, expected, monkeypatch): + "url, expected", + ( + ("http://192.168.0.1:5000/", True), + ("http://192.168.0.1/", True), + ("http://172.16.1.1/", True), + ("http://172.16.1.1:5000/", True), + ("http://localhost.localdomain:5000/v1.0/", True), + ("http://172.16.1.12/", False), + ("http://172.16.1.12:5000/", False), + ("http://google.com:5000/v1.0/", False), + ), +) +def test_should_bypass_proxies_no_proxy(url, expected, monkeypatch): """Tests for function should_bypass_proxies to check if proxy can be bypassed or not using the 'no_proxy' argument """ - no_proxy = '192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1' + no_proxy = "192.168.0.0/24,127.0.0.1,localhost.localdomain,172.16.1.1" # Test 'no_proxy' argument assert should_bypass_proxies(url, no_proxy=no_proxy) == expected -@pytest.mark.skipif(os.name != 'nt', reason='Test only on Windows') +@pytest.mark.skipif(os.name != "nt", reason="Test only on Windows") @pytest.mark.parametrize( - 'url, expected, override', ( - ('http://192.168.0.1:5000/', True, None), - ('http://192.168.0.1/', True, None), - ('http://172.16.1.1/', True, None), - ('http://172.16.1.1:5000/', True, None), - ('http://localhost.localdomain:5000/v1.0/', True, None), - ('http://172.16.1.22/', False, None), - ('http://172.16.1.22:5000/', False, None), - ('http://google.com:5000/v1.0/', False, None), - ('http://mylocalhostname:5000/v1.0/', True, ''), - ('http://192.168.0.1/', False, ''), - )) -def test_should_bypass_proxies_win_registry(url, expected, override, - monkeypatch): + "url, expected, override", + ( + ("http://192.168.0.1:5000/", True, None), + ("http://192.168.0.1/", True, None), + ("http://172.16.1.1/", True, None), + ("http://172.16.1.1:5000/", True, None), + ("http://localhost.localdomain:5000/v1.0/", True, None), + ("http://172.16.1.22/", False, None), + ("http://172.16.1.22:5000/", False, None), + ("http://google.com:5000/v1.0/", False, None), + ("http://mylocalhostname:5000/v1.0/", True, ""), + ("http://192.168.0.1/", False, ""), + ), +) +def test_should_bypass_proxies_win_registry(url, expected, override, monkeypatch): """Tests for function should_bypass_proxies to check if proxy can be bypassed or not with Windows registry settings """ if override is None: - override = '192.168.*;127.0.0.1;localhost.localdomain;172.16.1.1' + override = "192.168.*;127.0.0.1;localhost.localdomain;172.16.1.1" import winreg class RegHandle: @@ -763,30 +847,32 @@ def OpenKey(key, subkey): def QueryValueEx(key, value_name): if key is ie_settings: - if value_name == 'ProxyEnable': + if value_name == "ProxyEnable": # this could be a string (REG_SZ) or a 32-bit number (REG_DWORD) proxyEnableValues.rotate() return [proxyEnableValues[0]] - elif value_name == 'ProxyOverride': + elif value_name == "ProxyOverride": return [override] - monkeypatch.setenv('http_proxy', '') - monkeypatch.setenv('https_proxy', '') - monkeypatch.setenv('ftp_proxy', '') - monkeypatch.setenv('no_proxy', '') - monkeypatch.setenv('NO_PROXY', '') - monkeypatch.setattr(winreg, 'OpenKey', OpenKey) - monkeypatch.setattr(winreg, 'QueryValueEx', QueryValueEx) + monkeypatch.setenv("http_proxy", "") + monkeypatch.setenv("https_proxy", "") + monkeypatch.setenv("ftp_proxy", "") + monkeypatch.setenv("no_proxy", "") + monkeypatch.setenv("NO_PROXY", "") + monkeypatch.setattr(winreg, "OpenKey", OpenKey) + monkeypatch.setattr(winreg, "QueryValueEx", QueryValueEx) assert should_bypass_proxies(url, None) == expected @pytest.mark.parametrize( - 'env_name, value', ( - ('no_proxy', '192.168.0.0/24,127.0.0.1,localhost.localdomain'), - ('no_proxy', None), - ('a_new_key', '192.168.0.0/24,127.0.0.1,localhost.localdomain'), - ('a_new_key', None), - )) + "env_name, value", + ( + ("no_proxy", "192.168.0.0/24,127.0.0.1,localhost.localdomain"), + ("no_proxy", None), + ("a_new_key", "192.168.0.0/24,127.0.0.1,localhost.localdomain"), + ("a_new_key", None), + ), +) def test_set_environ(env_name, value): """Tests set_environ will set environ values and will restore the environ.""" environ_copy = copy.deepcopy(os.environ) @@ -800,7 +886,7 @@ def test_set_environ_raises_exception(): """Tests set_environ will raise exceptions in context when the value parameter is None.""" with pytest.raises(Exception) as exception: - with set_environ('test1', None): - raise Exception('Expected exception') + with set_environ("test1", None): + raise Exception("Expected exception") - assert 'Expected exception' in str(exception.value) + assert "Expected exception" in str(exception.value) diff --git a/tests/testserver/server.py b/tests/testserver/server.py index 92dcb6cb1a..6ca3a91716 100644 --- a/tests/testserver/server.py +++ b/tests/testserver/server.py @@ -1,13 +1,11 @@ -# -*- coding: utf-8 -*- - -import threading -import socket import select +import socket +import threading def consume_socket_content(sock, timeout=0.5): chunks = 65536 - content = b'' + content = b"" while True: more_to_read = select.select([sock], [], [], timeout)[0] @@ -25,10 +23,18 @@ def consume_socket_content(sock, timeout=0.5): class Server(threading.Thread): """Dummy server using for unit testing""" + WAIT_EVENT_TIMEOUT = 5 - def __init__(self, handler=None, host='localhost', port=0, requests_to_handle=1, wait_to_close_event=None): - super(Server, self).__init__() + def __init__( + self, + handler=None, + host="localhost", + port=0, + requests_to_handle=1, + wait_to_close_event=None, + ): + super().__init__() self.handler = handler or consume_socket_content self.handler_results = [] @@ -45,19 +51,16 @@ def __init__(self, handler=None, host='localhost', port=0, requests_to_handle=1, def text_response_server(cls, text, request_timeout=0.5, **kwargs): def text_response_handler(sock): request_content = consume_socket_content(sock, timeout=request_timeout) - sock.send(text.encode('utf-8')) + sock.send(text.encode("utf-8")) return request_content - return Server(text_response_handler, **kwargs) @classmethod def basic_response_server(cls, **kwargs): return cls.text_response_server( - "HTTP/1.1 200 OK\r\n" + - "Content-Length: 0\r\n\r\n", - **kwargs + "HTTP/1.1 200 OK\r\n" + "Content-Length: 0\r\n\r\n", **kwargs ) def run(self): @@ -71,7 +74,7 @@ def run(self): if self.wait_to_close_event: self.wait_to_close_event.wait(self.WAIT_EVENT_TIMEOUT) finally: - self.ready_event.set() # just in case of exception + self.ready_event.set() # just in case of exception self._close_server_sock_ignore_errors() self.stop_event.set() @@ -84,7 +87,7 @@ def _create_socket_and_bind(self): def _close_server_sock_ignore_errors(self): try: self.server_sock.close() - except IOError: + except OSError: pass def _handle_requests(self): @@ -100,12 +103,14 @@ def _handle_requests(self): def _accept_connection(self): try: - ready, _, _ = select.select([self.server_sock], [], [], self.WAIT_EVENT_TIMEOUT) + ready, _, _ = select.select( + [self.server_sock], [], [], self.WAIT_EVENT_TIMEOUT + ) if not ready: return None return self.server_sock.accept()[0] - except (select.error, socket.error): + except OSError: return None def __enter__(self): @@ -125,4 +130,4 @@ def __exit__(self, exc_type, exc_value, traceback): # ensure server thread doesn't get stuck waiting for connections self._close_server_sock_ignore_errors() self.join() - return False # allow exceptions to propagate + return False # allow exceptions to propagate diff --git a/tests/utils.py b/tests/utils.py index 9b797fd4e4..6cb75bfb6a 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- - import contextlib import os