Skip to content

Commit

Permalink
Add flake8-docstrings, fix bunch of flagged issues (#6276)
Browse files Browse the repository at this point in the history
* Check docstrings with flake8-docstrings

* Address pydocstyle D2xx issues

* Add CHANGES entry

* Fix linter

Co-authored-by: Andrew Svetlov <andrew.svetlov@gmail.com>
  • Loading branch information
scop and asvetlov committed Nov 11, 2021
1 parent ea51291 commit 46c5df6
Show file tree
Hide file tree
Showing 35 changed files with 153 additions and 162 deletions.
2 changes: 2 additions & 0 deletions .pre-commit-config.yaml
Expand Up @@ -79,6 +79,8 @@ repos:
rev: '4.0.1'
hooks:
- id: flake8
additional_dependencies:
- flake8-docstrings==1.6.0
exclude: "^docs/"
- repo: git://github.com/Lucas-C/pre-commit-hooks-markup
rev: v1.0.1
Expand Down
1 change: 1 addition & 0 deletions CHANGES/6276.doc
@@ -0,0 +1 @@
Add flake8-docstrings to flake8 configuration, enable subset of checks.
18 changes: 9 additions & 9 deletions aiohttp/client.py
Expand Up @@ -1057,23 +1057,21 @@ def connector_owner(self) -> bool:
def raise_for_status(
self,
) -> Union[bool, Callable[[ClientResponse], Awaitable[None]]]:
"""
Should `ClientResponse.raise_for_status()`
be called for each response
"""
"""Should `ClientResponse.raise_for_status()` be called for each response."""
return self._raise_for_status

@property
def auto_decompress(self) -> bool:
"""Should the body response be automatically decompressed"""
"""Should the body response be automatically decompressed."""
return self._auto_decompress

@property
def trust_env(self) -> bool:
"""
Should get proxies information
from HTTP_PROXY / HTTPS_PROXY environment variables
or ~/.netrc file if present
Should proxies information from environment or netrc be trusted.
Information is from HTTP_PROXY / HTTPS_PROXY environment variables
or ~/.netrc file if present.
"""
return self._trust_env

Expand Down Expand Up @@ -1229,7 +1227,9 @@ def request(
read_bufsize: Optional[int] = None,
loop: Optional[asyncio.AbstractEventLoop] = None,
) -> _SessionRequestContextManager:
"""Constructs and sends a request. Returns response object.
"""Constructs and sends a request.
Returns response object.
method - HTTP method
url - request url
params - (optional) Dictionary or bytes to be sent in the query
Expand Down
3 changes: 2 additions & 1 deletion aiohttp/client_exceptions.py
Expand Up @@ -262,7 +262,8 @@ class InvalidURL(ClientError, ValueError):
"""Invalid URL.
URL used for fetching is malformed, e.g. it doesn't contains host
part."""
part.
"""

# Derive from ValueError for backward compatibility

Expand Down
19 changes: 10 additions & 9 deletions aiohttp/connector.py
Expand Up @@ -328,12 +328,10 @@ def limit(self) -> int:

@property
def limit_per_host(self) -> int:
"""The limit_per_host for simultaneous connections
to the same endpoint.
"""The limit for simultaneous connections to the same endpoint.
Endpoints are the same if they are have equal
(host, port, is_ssl) triple.
"""
return self._limit_per_host

Expand Down Expand Up @@ -391,6 +389,7 @@ def _drop_acquired_per_host(

def _cleanup_closed(self) -> None:
"""Double confirmation for transport close.
Some broken ssl servers may leave socket open without proper close.
"""
if self._cleanup_closed_handle:
Expand Down Expand Up @@ -459,13 +458,13 @@ def closed(self) -> bool:

def _available_connections(self, key: "ConnectionKey") -> int:
"""
Return number of available connections taking into account
the limit, limit_per_host and the connection key.
Return number of available connections.
If it returns less than 1 means that there is no connections
availables.
"""
The limit, limit_per_host and the connection key are taken into account.
If it returns less than 1 means that there are no connections
available.
"""
if self._limit:
# total calc available connections
available = self._limit - len(self._acquired)
Expand Down Expand Up @@ -606,7 +605,9 @@ def _get(self, key: "ConnectionKey") -> Optional[ResponseHandler]:

def _release_waiter(self) -> None:
"""
Iterates over all waiters till found one that is not finsihed and
Iterates over all waiters until one to be released is found.
The one to be released is not finsihed and
belongs to a host that has available connections.
"""
if not self._waiters:
Expand Down
6 changes: 4 additions & 2 deletions aiohttp/formdata.py
Expand Up @@ -12,8 +12,10 @@


class FormData:
"""Helper class for multipart/form-data and
application/x-www-form-urlencoded body generation."""
"""Helper class for form body generation.
Supports multipart/form-data and application/x-www-form-urlencoded.
"""

def __init__(
self,
Expand Down
9 changes: 6 additions & 3 deletions aiohttp/helpers.py
Expand Up @@ -196,7 +196,9 @@ def strip_auth_from_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]:


def netrc_from_env() -> Optional[netrc.netrc]:
"""Attempt to load the netrc file from the path specified by the env-var
"""Load netrc from file.
Attempt to load it from the path specified by the env-var
NETRC or in the default location in the user's home directory.
Returns None if it couldn't be found or fails to parse.
Expand Down Expand Up @@ -457,12 +459,13 @@ class _TSelf(Protocol, Generic[_T]):


class reify(Generic[_T]):
"""Use as a class method decorator. It operates almost exactly like
"""Use as a class method decorator.
It operates almost exactly like
the Python `@property` decorator, but it puts the result of the
method it decorates into the instance dict after the first call,
effectively replacing the function it decorates with an instance
variable. It is, in Python parlance, a data descriptor.
"""

def __init__(self, wrapped: Callable[..., _T]) -> None:
Expand Down
8 changes: 6 additions & 2 deletions aiohttp/http_parser.py
Expand Up @@ -504,13 +504,16 @@ def parse_headers(

def set_upgraded(self, val: bool) -> None:
"""Set connection upgraded (to websocket) mode.
:param bool val: new state.
"""
self._upgraded = val


class HttpRequestParser(HttpParser[RawRequestMessage]):
"""Read request status line. Exception .http_exceptions.BadStatusLine
"""Read request status line.
Exception .http_exceptions.BadStatusLine
could be raised in case of any errors in status line.
Returns RawRequestMessage.
"""
Expand Down Expand Up @@ -588,7 +591,8 @@ class HttpResponseParser(HttpParser[RawResponseMessage]):
"""Read response status line and headers.
BadStatusLine could be raised in case of any errors in status line.
Returns RawResponseMessage"""
Returns RawResponseMessage.
"""

def parse_message(self, lines: List[bytes]) -> RawResponseMessage:
line = lines[0].decode("utf-8", "surrogateescape")
Expand Down
5 changes: 3 additions & 2 deletions aiohttp/locks.py
Expand Up @@ -4,8 +4,9 @@


class EventResultOrError:
"""
This class wrappers the Event asyncio lock allowing either awake the
"""Event asyncio lock helper class.
Wraps the Event asyncio lock allowing either to awake the
locked Tasks without any error or raising an exception.
thanks to @vorpalsmith for the simple design.
Expand Down
34 changes: 18 additions & 16 deletions aiohttp/multipart.py
Expand Up @@ -242,8 +242,10 @@ async def next(
return item

async def release(self) -> None:
"""Releases the connection gracefully, reading all the content
to the void."""
"""Release the connection gracefully.
All remaining content is read to the void.
"""
await self.resp.release()


Expand Down Expand Up @@ -416,9 +418,7 @@ async def json(self, *, encoding: Optional[str] = None) -> Optional[Dict[str, An
return cast(Dict[str, Any], json.loads(data.decode(encoding)))

async def form(self, *, encoding: Optional[str] = None) -> List[Tuple[str, str]]:
"""Like read(), but assumes that body parts contains form
urlencoded data.
"""
"""Like read(), but assumes that body parts contain form urlencoded data."""
data = await self.read(decode=True)
if not data:
return []
Expand All @@ -437,7 +437,9 @@ def at_eof(self) -> bool:
return self._at_eof

def decode(self, data: bytes) -> bytes:
"""Decodes data according the specified Content-Encoding
"""Decodes data.
Decoding is done according the specified Content-Encoding
or Content-Transfer-Encoding headers value.
"""
if CONTENT_TRANSFER_ENCODING in self.headers:
Expand Down Expand Up @@ -480,17 +482,18 @@ def get_charset(self, default: str) -> str:

@reify
def name(self) -> Optional[str]:
"""Returns name specified in Content-Disposition header or None
if missed or header is malformed.
"""
"""Returns name specified in Content-Disposition header.
If the header is missing or malformed, returns None.
"""
_, params = parse_content_disposition(self.headers.get(CONTENT_DISPOSITION))
return content_disposition_filename(params, "name")

@reify
def filename(self) -> Optional[str]:
"""Returns filename specified in Content-Disposition header or None
if missed or header is malformed.
"""Returns filename specified in Content-Disposition header.
Returns None if the header is missing or malformed.
"""
_, params = parse_content_disposition(self.headers.get(CONTENT_DISPOSITION))
return content_disposition_filename(params, "filename")
Expand Down Expand Up @@ -568,9 +571,7 @@ def from_response(
return obj

def at_eof(self) -> bool:
"""Returns True if the final boundary was reached or
False otherwise.
"""
"""Returns True if the final boundary was reached, false otherwise."""
return self._at_eof

async def next(
Expand Down Expand Up @@ -610,8 +611,9 @@ def _get_part_reader(
self,
headers: "CIMultiDictProxy[str]",
) -> Union["MultipartReader", BodyPartReader]:
"""Dispatches the response by the `Content-Type` header, returning
suitable reader instance.
"""Dispatches the response by the `Content-Type` header.
Returns a suitable reader instance.
:param dict headers: Response headers
"""
Expand Down
3 changes: 2 additions & 1 deletion aiohttp/payload_streamer.py
@@ -1,4 +1,5 @@
""" Payload implemenation for coroutines as data provider.
"""
Payload implemenation for coroutines as data provider.
As a simple case, you can upload data from file::
Expand Down
21 changes: 10 additions & 11 deletions aiohttp/pytest_plugin.py
Expand Up @@ -55,7 +55,8 @@ def pytest_addoption(parser): # type: ignore[no-untyped-def]


def pytest_fixture_setup(fixturedef): # type: ignore[no-untyped-def]
"""
"""Set up pytest fixture.
Allow fixtures to be coroutines. Run coroutine fixtures in an event loop.
"""
func = fixturedef.func
Expand Down Expand Up @@ -124,8 +125,9 @@ def loop_debug(request): # type: ignore[no-untyped-def]

@contextlib.contextmanager
def _runtime_warning_context(): # type: ignore[no-untyped-def]
"""
Context manager which checks for RuntimeWarnings, specifically to
"""Context manager which checks for RuntimeWarnings.
This exists specifically to
avoid "coroutine 'X' was never awaited" warnings being missed.
If RuntimeWarnings occur in the context a RuntimeError is raised.
Expand All @@ -147,8 +149,9 @@ def _runtime_warning_context(): # type: ignore[no-untyped-def]

@contextlib.contextmanager
def _passthrough_loop_context(loop, fast=False): # type: ignore[no-untyped-def]
"""
setups and tears down a loop unless one is passed in via the loop
"""Passthrough loop context.
Sets up and tears down a loop unless one is passed in via the loop
argument when it's passed straight through.
"""
if loop:
Expand All @@ -162,17 +165,13 @@ def _passthrough_loop_context(loop, fast=False): # type: ignore[no-untyped-def]


def pytest_pycollect_makeitem(collector, name, obj): # type: ignore[no-untyped-def]
"""
Fix pytest collecting for coroutines.
"""
"""Fix pytest collecting for coroutines."""
if collector.funcnamefilter(name) and asyncio.iscoroutinefunction(obj):
return list(collector._genfunctions(name, obj))


def pytest_pyfunc_call(pyfuncitem): # type: ignore[no-untyped-def]
"""
Run coroutines in an event loop instead of a normal function call.
"""
"""Run coroutines in an event loop instead of a normal function call."""
fast = pyfuncitem.config.getoption("--aiohttp-fast")
if asyncio.iscoroutinefunction(pyfuncitem.function):
existing_loop = pyfuncitem.funcargs.get(
Expand Down
6 changes: 4 additions & 2 deletions aiohttp/resolver.py
Expand Up @@ -18,8 +18,10 @@


class ThreadedResolver(AbstractResolver):
"""Use Executor for synchronous getaddrinfo() calls, which defaults to
concurrent.futures.ThreadPoolExecutor.
"""Threaded resolver.
Uses an Executor for synchronous getaddrinfo() calls.
concurrent.futures.ThreadPoolExecutor is used by default.
"""

def __init__(self, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
Expand Down
15 changes: 9 additions & 6 deletions aiohttp/streams.py
Expand Up @@ -68,16 +68,16 @@ def iter_chunked(self, n: int) -> AsyncStreamIterator[bytes]:
)

def iter_any(self) -> AsyncStreamIterator[bytes]:
"""Returns an asynchronous iterator that yields all the available
data as soon as it is received
"""Yield all available data as soon as it is received.
Python-3.5 available for Python 3.5+ only
"""
return AsyncStreamIterator(self.readany) # type: ignore[attr-defined]

def iter_chunks(self) -> ChunkTupleAsyncStreamIterator:
"""Returns an asynchronous iterator that yields chunks of data
as they are received by the server. The yielded objects are tuples
"""Yield chunks of data as they are received by the server.
The yielded objects are tuples
of (bytes, bool) as returned by the StreamReader.readchunk method.
Python-3.5 available for Python 3.5+ only
Expand Down Expand Up @@ -399,7 +399,9 @@ async def readany(self) -> bytes:
return self._read_nowait(-1)

async def readchunk(self) -> Tuple[bytes, bool]:
"""Returns a tuple of (data, end_of_http_chunk). When chunked transfer
"""Returns a tuple of (data, end_of_http_chunk).
When chunked transfer
encoding is used, end_of_http_chunk is a boolean indicating if the end
of the data corresponds to the end of a HTTP chunk , otherwise it is
always False.
Expand Down Expand Up @@ -633,7 +635,8 @@ def __aiter__(self) -> AsyncStreamIterator[_T]:
class FlowControlDataQueue(DataQueue[_T]):
"""FlowControlDataQueue resumes and pauses an underlying stream.
It is a destination for parsed data."""
It is a destination for parsed data.
"""

def __init__(
self, protocol: BaseProtocol, limit: int, *, loop: asyncio.AbstractEventLoop
Expand Down

0 comments on commit 46c5df6

Please sign in to comment.