Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add flake8-docstrings, fix bunch of flagged issues #6276

Merged
merged 6 commits into from Nov 11, 2021
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
2 changes: 2 additions & 0 deletions .pre-commit-config.yaml
Expand Up @@ -79,6 +79,8 @@ repos:
rev: '4.0.1'
hooks:
- id: flake8
additional_dependencies:
- flake8-docstrings==1.6.0
exclude: "^docs/"
- repo: git://github.com/Lucas-C/pre-commit-hooks-markup
rev: v1.0.1
Expand Down
1 change: 1 addition & 0 deletions CHANGES/6276.doc
@@ -0,0 +1 @@
Add flake8-docstrings to flake8 configuration, enable subset of checks.
18 changes: 9 additions & 9 deletions aiohttp/client.py
Expand Up @@ -1032,23 +1032,21 @@ def connector_owner(self) -> bool:
def raise_for_status(
self,
) -> Union[bool, Callable[[ClientResponse], Awaitable[None]]]:
"""
Should `ClientResponse.raise_for_status()`
be called for each response
"""
"""Should `ClientResponse.raise_for_status()` be called for each response."""
return self._raise_for_status

@property
def auto_decompress(self) -> bool:
"""Should the body response be automatically decompressed"""
"""Should the body response be automatically decompressed."""
return self._auto_decompress

@property
def trust_env(self) -> bool:
"""
Should get proxies information
from HTTP_PROXY / HTTPS_PROXY environment variables
or ~/.netrc file if present
Should proxies information from environment or netrc be trusted.

Information is from HTTP_PROXY / HTTPS_PROXY environment variables
or ~/.netrc file if present.
"""
return self._trust_env

Expand Down Expand Up @@ -1191,7 +1189,9 @@ def request(
connector: Optional[BaseConnector] = None,
read_bufsize: Optional[int] = None,
) -> _SessionRequestContextManager:
"""Constructs and sends a request. Returns response object.
"""Constructs and sends a request.

Returns response object.
method - HTTP method
url - request url
params - (optional) Dictionary or bytes to be sent in the query
Expand Down
3 changes: 2 additions & 1 deletion aiohttp/client_exceptions.py
Expand Up @@ -229,7 +229,8 @@ class InvalidURL(ClientError, ValueError):
"""Invalid URL.

URL used for fetching is malformed, e.g. it doesn't contains host
part."""
part.
"""

# Derive from ValueError for backward compatibility

Expand Down
19 changes: 10 additions & 9 deletions aiohttp/connector.py
Expand Up @@ -281,12 +281,10 @@ def limit(self) -> int:

@property
def limit_per_host(self) -> int:
"""The limit_per_host for simultaneous connections
to the same endpoint.
"""The limit for simultaneous connections to the same endpoint.

Endpoints are the same if they are have equal
(host, port, is_ssl) triple.

"""
return self._limit_per_host

Expand Down Expand Up @@ -344,6 +342,7 @@ def _drop_acquired_per_host(

def _cleanup_closed(self) -> None:
"""Double confirmation for transport close.

Some broken ssl servers may leave socket open without proper close.
"""
if self._cleanup_closed_handle:
Expand Down Expand Up @@ -424,13 +423,13 @@ def closed(self) -> bool:

def _available_connections(self, key: "ConnectionKey") -> int:
"""
Return number of available connections taking into account
the limit, limit_per_host and the connection key.
Return number of available connections.

If it returns less than 1 means that there is no connections
availables.
"""
The limit, limit_per_host and the connection key are taken into account.

If it returns less than 1 means that there are no connections
available.
"""
if self._limit:
# total calc available connections
available = self._limit - len(self._acquired)
Expand Down Expand Up @@ -571,7 +570,9 @@ def _get(self, key: "ConnectionKey") -> Optional[ResponseHandler]:

def _release_waiter(self) -> None:
"""
Iterates over all waiters till found one that is not finsihed and
Iterates over all waiters until one to be released is found.

The one to be released is not finsihed and
belongs to a host that has available connections.
"""
if not self._waiters:
Expand Down
6 changes: 4 additions & 2 deletions aiohttp/formdata.py
Expand Up @@ -12,8 +12,10 @@


class FormData:
"""Helper class for multipart/form-data and
application/x-www-form-urlencoded body generation."""
"""Helper class for form body generation.

Supports multipart/form-data and application/x-www-form-urlencoded.
"""

def __init__(
self,
Expand Down
11 changes: 6 additions & 5 deletions aiohttp/helpers.py
Expand Up @@ -197,7 +197,9 @@ def strip_auth_from_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]:


def netrc_from_env() -> Optional[netrc.netrc]:
"""Attempt to load the netrc file from the path specified by the env-var
"""Load netrc from file.

Attempt to load it from the path specified by the env-var
NETRC or in the default location in the user's home directory.

Returns None if it couldn't be found or fails to parse.
Expand Down Expand Up @@ -425,7 +427,6 @@ def is_expected_content_type(
) -> bool:
"""Checks if received content type is processable as an expected one.


Both arguments should be given without parameters.
"""
if expected_content_type == "application/json":
Expand All @@ -438,12 +439,13 @@ class _TSelf(Protocol, Generic[_T]):


class reify(Generic[_T]):
"""Use as a class method decorator. It operates almost exactly like
"""Use as a class method decorator.

It operates almost exactly like
the Python `@property` decorator, but it puts the result of the
method it decorates into the instance dict after the first call,
effectively replacing the function it decorates with an instance
variable. It is, in Python parlance, a data descriptor.

"""

def __init__(self, wrapped: Callable[..., _T]) -> None:
Expand Down Expand Up @@ -851,7 +853,6 @@ def set_cookie(
Sets new cookie or updates existent with new value.
Also updates only those params which are not None.
"""

old = self._cookies.get(name)
if old is not None and old.coded_value == "":
# deleted cookie
Expand Down
7 changes: 5 additions & 2 deletions aiohttp/http_parser.py
Expand Up @@ -512,7 +512,9 @@ def set_upgraded(self, val: bool) -> None:


class HttpRequestParser(HttpParser[RawRequestMessage]):
"""Read request status line. Exception .http_exceptions.BadStatusLine
"""Read request status line.

Exception .http_exceptions.BadStatusLine
could be raised in case of any errors in status line.
Returns RawRequestMessage.
"""
Expand Down Expand Up @@ -590,7 +592,8 @@ class HttpResponseParser(HttpParser[RawResponseMessage]):
"""Read response status line and headers.

BadStatusLine could be raised in case of any errors in status line.
Returns RawResponseMessage"""
Returns RawResponseMessage.
"""

def parse_message(self, lines: List[bytes]) -> RawResponseMessage:
line = lines[0].decode("utf-8", "surrogateescape")
Expand Down
5 changes: 3 additions & 2 deletions aiohttp/locks.py
Expand Up @@ -9,8 +9,9 @@


class EventResultOrError:
"""
This class wrappers the Event asyncio lock allowing either awake the
"""Event asyncio lock helper class.

Wraps the Event asyncio lock allowing either to awake the
locked Tasks without any error or raising an exception.

thanks to @vorpalsmith for the simple design.
Expand Down
34 changes: 18 additions & 16 deletions aiohttp/multipart.py
Expand Up @@ -242,8 +242,10 @@ async def next(
return item

async def release(self) -> None:
"""Releases the connection gracefully, reading all the content
to the void."""
"""Release the connection gracefully.

All remaining content is read to the void.
"""
await self.resp.release()


Expand Down Expand Up @@ -453,9 +455,7 @@ async def json(self, *, encoding: Optional[str] = None) -> Optional[Dict[str, An
return cast(Dict[str, Any], json.loads(data.decode(encoding)))

async def form(self, *, encoding: Optional[str] = None) -> List[Tuple[str, str]]:
"""Like read(), but assumes that body parts contains form
urlencoded data.
"""
"""Like read(), but assumes that body parts contain form urlencoded data."""
data = await self.read(decode=True)
if not data:
return []
Expand All @@ -474,7 +474,9 @@ def at_eof(self) -> bool:
return self._at_eof

def decode(self, data: bytes) -> bytes:
"""Decodes data according the specified Content-Encoding
"""Decodes data.

Decoding is done according the specified Content-Encoding
or Content-Transfer-Encoding headers value.
"""
if CONTENT_TRANSFER_ENCODING in self.headers:
Expand Down Expand Up @@ -517,17 +519,18 @@ def get_charset(self, default: str) -> str:

@reify
def name(self) -> Optional[str]:
"""Returns name specified in Content-Disposition header or None
if missed or header is malformed.
"""
"""Returns name specified in Content-Disposition header.

If the header is missing or malformed, returns None.
"""
_, params = parse_content_disposition(self.headers.get(CONTENT_DISPOSITION))
return content_disposition_filename(params, "name")

@reify
def filename(self) -> Optional[str]:
"""Returns filename specified in Content-Disposition header or None
if missed or header is malformed.
"""Returns filename specified in Content-Disposition header.

Returns None if the header is missing or malformed.
"""
_, params = parse_content_disposition(self.headers.get(CONTENT_DISPOSITION))
return content_disposition_filename(params, "filename")
Expand Down Expand Up @@ -612,9 +615,7 @@ def from_response(
return obj

def at_eof(self) -> bool:
"""Returns True if the final boundary was reached or
False otherwise.
"""
"""Returns True if the final boundary was reached, false otherwise."""
return self._at_eof

async def next(
Expand Down Expand Up @@ -654,8 +655,9 @@ def _get_part_reader(
self,
headers: "CIMultiDictProxy[str]",
) -> Union["MultipartReader", BodyPartReader]:
"""Dispatches the response by the `Content-Type` header, returning
suitable reader instance.
"""Dispatches the response by the `Content-Type` header.

Returns a suitable reader instance.

:param dict headers: Response headers
"""
Expand Down
21 changes: 10 additions & 11 deletions aiohttp/pytest_plugin.py
Expand Up @@ -54,7 +54,8 @@ def pytest_addoption(parser): # type: ignore[no-untyped-def]


def pytest_fixture_setup(fixturedef): # type: ignore[no-untyped-def]
"""
"""Set up pytest fixture.

Allow fixtures to be coroutines. Run coroutine fixtures in an event loop.
"""
func = fixturedef.func
Expand Down Expand Up @@ -123,8 +124,9 @@ def loop_debug(request): # type: ignore[no-untyped-def]

@contextlib.contextmanager
def _runtime_warning_context(): # type: ignore[no-untyped-def]
"""
Context manager which checks for RuntimeWarnings, specifically to
"""Context manager which checks for RuntimeWarnings.

This exists specifically to
avoid "coroutine 'X' was never awaited" warnings being missed.

If RuntimeWarnings occur in the context a RuntimeError is raised.
Expand Down Expand Up @@ -152,8 +154,9 @@ def _runtime_warning_context(): # type: ignore[no-untyped-def]

@contextlib.contextmanager
def _passthrough_loop_context(loop, fast=False): # type: ignore[no-untyped-def]
"""
setups and tears down a loop unless one is passed in via the loop
"""Passthrough loop context.

Sets up and tears down a loop unless one is passed in via the loop
argument when it's passed straight through.
"""
if loop:
Expand All @@ -167,17 +170,13 @@ def _passthrough_loop_context(loop, fast=False): # type: ignore[no-untyped-def]


def pytest_pycollect_makeitem(collector, name, obj): # type: ignore[no-untyped-def]
"""
Fix pytest collecting for coroutines.
"""
"""Fix pytest collecting for coroutines."""
if collector.funcnamefilter(name) and asyncio.iscoroutinefunction(obj):
return list(collector._genfunctions(name, obj))


def pytest_pyfunc_call(pyfuncitem): # type: ignore[no-untyped-def]
"""
Run coroutines in an event loop instead of a normal function call.
"""
"""Run coroutines in an event loop instead of a normal function call."""
fast = pyfuncitem.config.getoption("--aiohttp-fast")
if asyncio.iscoroutinefunction(pyfuncitem.function):
existing_loop = pyfuncitem.funcargs.get(
Expand Down
6 changes: 4 additions & 2 deletions aiohttp/resolver.py
Expand Up @@ -17,8 +17,10 @@


class ThreadedResolver(AbstractResolver):
"""Use Executor for synchronous getaddrinfo() calls, which defaults to
concurrent.futures.ThreadPoolExecutor.
"""Threaded resolver.

Uses an Executor for synchronous getaddrinfo() calls.
concurrent.futures.ThreadPoolExecutor is used by default.
"""

def __init__(self) -> None:
Expand Down
15 changes: 9 additions & 6 deletions aiohttp/streams.py
Expand Up @@ -74,16 +74,16 @@ def iter_chunked(self, n: int) -> AsyncStreamIterator[bytes]:
)

def iter_any(self) -> AsyncStreamIterator[bytes]:
"""Returns an asynchronous iterator that yields all the available
data as soon as it is received
"""Yield all available data as soon as it is received.

Python-3.5 available for Python 3.5+ only
"""
return AsyncStreamIterator(self.readany) # type: ignore[attr-defined]

def iter_chunks(self) -> ChunkTupleAsyncStreamIterator:
"""Returns an asynchronous iterator that yields chunks of data
as they are received by the server. The yielded objects are tuples
"""Yield chunks of data as they are received by the server.

The yielded objects are tuples
of (bytes, bool) as returned by the StreamReader.readchunk method.

Python-3.5 available for Python 3.5+ only
Expand Down Expand Up @@ -391,7 +391,9 @@ async def readany(self) -> bytes:
return self._read_nowait(-1)

async def readchunk(self) -> Tuple[bytes, bool]:
"""Returns a tuple of (data, end_of_http_chunk). When chunked transfer
"""Returns a tuple of (data, end_of_http_chunk).

When chunked transfer
encoding is used, end_of_http_chunk is a boolean indicating if the end
of the data corresponds to the end of a HTTP chunk , otherwise it is
always False.
Expand Down Expand Up @@ -625,7 +627,8 @@ def __aiter__(self) -> AsyncStreamIterator[_T]:
class FlowControlDataQueue(DataQueue[_T]):
"""FlowControlDataQueue resumes and pauses an underlying stream.

It is a destination for parsed data."""
It is a destination for parsed data.
"""

def __init__(
self, protocol: BaseProtocol, limit: int, *, loop: asyncio.AbstractEventLoop
Expand Down