Skip to content

Commit

Permalink
Ruff lint: configure rules, format files
Browse files Browse the repository at this point in the history
  • Loading branch information
deedy5 committed Apr 2, 2024
1 parent ba3eb25 commit 12c83e5
Show file tree
Hide file tree
Showing 16 changed files with 113 additions and 124 deletions.
8 changes: 4 additions & 4 deletions benchmark/benchmark.py
Expand Up @@ -68,7 +68,7 @@ def __del__(self):
stats[name] = dur
results.append({"name": name, "size": size, "duration": dur})

print("One worker, {}: ".format(size), stats)
print(f"One worker, {size}: {stats}")

df = pd.DataFrame(results)
df.to_csv("single_worker.csv", index=False, float_format="%.4f")
Expand Down Expand Up @@ -132,7 +132,7 @@ async def httpx_worker(q, done, s):
t.join()
# print(stats)

async def test_asyncs_workers():
async def test_asyncs_workers(url, size, stats):
for name, worker, SessionClass in [
("aiohttp", aiohttp_worker, aiohttp.ClientSession),
("httpx_async", httpx_worker, httpx.AsyncClient),
Expand All @@ -156,8 +156,8 @@ async def test_asyncs_workers():
for w in workers:
w.cancel()

asyncio.run(test_asyncs_workers())
print("10 Workers, {}: ".format(size), stats)
asyncio.run(test_asyncs_workers(url, size, stats))
print(f"10 Workers, {size}: {stats}")

df = pd.DataFrame(results)
df.to_csv("multiple_workers.csv", index=False, float_format="%.4f")
13 changes: 4 additions & 9 deletions curl_cffi/_asyncio_selector.py
Expand Up @@ -15,6 +15,7 @@
import socket
import threading
import typing
from contextlib import suppress
from typing import (
Any,
Callable,
Expand Down Expand Up @@ -48,10 +49,8 @@ def _atexit_callback() -> None:
with loop._select_cond:
loop._closing_selector = True
loop._select_cond.notify()
try:
with suppress(BlockingIOError):
loop._waker_w.send(b"a")
except BlockingIOError:
pass
if loop._thread is not None:
# If we don't join our (daemon) thread here, we may get a deadlock
# during interpreter shutdown. I don't really understand why. This
Expand Down Expand Up @@ -152,16 +151,12 @@ async def _thread_manager(self) -> typing.AsyncGenerator[None, None]:
def _wake_selector(self) -> None:
if self._closed:
return
try:
with suppress(BlockingIOError):
self._waker_w.send(b"a")
except BlockingIOError:
pass

def _consume_waker(self) -> None:
try:
with suppress(BlockingIOError):
self._waker_r.recv(1024)
except BlockingIOError:
pass

def _start_select(self) -> None:
# Capture reader and writer sets here in the event loop
Expand Down
4 changes: 2 additions & 2 deletions curl_cffi/aio.py
Expand Up @@ -36,7 +36,7 @@ def _get_selector(asyncio_loop) -> asyncio.AbstractEventLoop:
if not isinstance(asyncio_loop, getattr(asyncio, "ProactorEventLoop", type(None))):
return asyncio_loop

warnings.warn(PROACTOR_WARNING, RuntimeWarning)
warnings.warn(PROACTOR_WARNING, RuntimeWarning, stacklevel=2)

from ._asyncio_selector import AddThreadSelectorEventLoop

Expand Down Expand Up @@ -201,7 +201,7 @@ def socket_action(self, sockfd: int, ev_bitmask: int) -> int:
def process_data(self, sockfd: int, ev_bitmask: int):
"""Call curl_multi_info_read to read data for given socket."""
if not self._curlm:
warnings.warn("Curlm alread closed! quitting from process_data")
warnings.warn("Curlm alread closed! quitting from process_data", stacklevel=2)
return

self.socket_action(sockfd, ev_bitmask)
Expand Down
13 changes: 5 additions & 8 deletions curl_cffi/curl.py
Expand Up @@ -69,11 +69,11 @@ def write_callback(ptr, size, nmemb, userdata):
callback = ffi.from_handle(userdata)
wrote = callback(ffi.buffer(ptr, nmemb)[:])
wrote = ensure_int(wrote)
if wrote == CURL_WRITEFUNC_PAUSE or wrote == CURL_WRITEFUNC_ERROR:
if wrote in (CURL_WRITEFUNC_PAUSE, CURL_WRITEFUNC_ERROR):
return wrote
# should make this an exception in future versions
if wrote != nmemb * size:
warnings.warn("Wrote bytes != received bytes.", RuntimeWarning)
warnings.warn("Wrote bytes != received bytes.", RuntimeWarning, stacklevel=2)
return nmemb * size


Expand Down Expand Up @@ -101,7 +101,7 @@ def __init__(self, cacert: str = "", debug: bool = False, handle=None) -> None:
debug: whether to show curl debug messages.
handle: a curl handle instance from ``curl_easy_init``.
"""
self._curl = lib.curl_easy_init() if not handle else handle
self._curl = handle if handle else lib.curl_easy_init()
self._headers = ffi.NULL
self._proxy_headers = ffi.NULL
self._resolve = ffi.NULL
Expand All @@ -118,7 +118,7 @@ def __init__(self, cacert: str = "", debug: bool = False, handle=None) -> None:
def _set_error_buffer(self) -> None:
ret = lib._curl_easy_setopt(self._curl, CurlOpt.ERRORBUFFER, self._error_buffer)
if ret != 0:
warnings.warn("Failed to set error buffer")
warnings.warn("Failed to set error buffer", stacklevel=2)
if self._debug:
self.setopt(CurlOpt.VERBOSE, 1)
lib._curl_easy_setopt(self._curl, CurlOpt.DEBUGFUNCTION, lib.debug_function)
Expand Down Expand Up @@ -189,10 +189,7 @@ def setopt(self, option: CurlOpt, value: Any) -> int:
lib._curl_easy_setopt(self._curl, CurlOpt.WRITEFUNCTION, lib.write_callback)
option = CurlOpt.HEADERDATA
elif value_type == "char*":
if isinstance(value, str):
c_value = value.encode()
else:
c_value = value
c_value = value.encode() if isinstance(value, str) else value
# Must keep a reference, otherwise may be GCed.
if option == CurlOpt.POSTFIELDS:
self._body_handle = c_value
Expand Down
12 changes: 8 additions & 4 deletions curl_cffi/requests/__init__.py
Expand Up @@ -72,8 +72,10 @@ def request(
method: http method for the request: GET/POST/PUT/DELETE etc.
url: url for the requests.
params: query string for the requests.
data: form values or binary data to use in body, ``Content-Type: application/x-www-form-urlencoded`` will be added if a dict is given.
json: json values to use in body, `Content-Type: application/json` will be added automatically.
data: form values or binary data to use in body,
``Content-Type: application/x-www-form-urlencoded`` will be added if a dict is given.
json: json values to use in body, `Content-Type: application/json` will be added
automatically.
headers: headers to send.
cookies: cookies to use.
files: not supported, use ``multipart`` instead.
Expand All @@ -82,12 +84,14 @@ def request(
allow_redirects: whether to allow redirection.
max_redirects: max redirect counts, default unlimited(-1).
proxies: dict of proxies to use, format: ``{"http": proxy_url, "https": proxy_url}``.
proxy: proxy to use, format: "http://user@pass:proxy_url". Cannot be used with the above parameter.
proxy: proxy to use, format: "http://user@pass:proxy_url".
Can't be used with proxy parameter.
proxy_auth: HTTP basic auth for proxy, a tuple of (username, password).
verify: whether to verify https certs.
referer: shortcut for setting referer header.
accept_encoding: shortcut for setting accept-encoding header.
content_callback: a callback function to receive response body. ``def callback(chunk: bytes) -> None:``
content_callback: a callback function to receive response body.
``def callback(chunk: bytes) -> None:``
impersonate: which browser version to impersonate.
thread: work with other thread implementations. choices: eventlet, gevent.
default_headers: whether to set default browser headers.
Expand Down
47 changes: 25 additions & 22 deletions curl_cffi/requests/cookies.py
Expand Up @@ -70,7 +70,7 @@ def from_curl_format(cls, set_cookie_line: bytes):

def to_curl_format(self):
if not self.hostname:
raise RequestsError("Domain not found for cookie {}={}".format(self.name, self.value))
raise RequestsError(f"Domain not found for cookie {self.name}={self.value}")
return "\t".join(
[
self.hostname,
Expand Down Expand Up @@ -114,7 +114,7 @@ def to_cookiejar_cookie(self) -> Cookie:
secure=self.secure,
# using if explicitly to make it clear.
expires=None if self.expires == 0 else self.expires,
discard=True if self.expires == 0 else False,
discard=self.expires == 0,
comment=None,
comment_url=None,
rest=dict(http_only=f"{self.http_only}"),
Expand Down Expand Up @@ -193,12 +193,13 @@ def set(self, name: str, value: str, domain: str = "", path: str = "/", secure=F
"""
# https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Set-Cookie
if name.startswith("__Secure-") and secure is False:
warnings.warn("`secure` changed to True for `__Secure-` prefixed cookies")
warnings.warn("`secure` changed to True for `__Secure-` prefixed cookies", stacklevel=2)
secure = True
elif name.startswith("__Host-") and (secure is False or domain or path != "/"):
warnings.warn(
"`host` changed to True, `domain` removed, `path` changed to `/` "
"for `__Host-` prefixed cookies"
"for `__Host-` prefixed cookies",
stacklevel=2,
)
secure = True
domain = ""
Expand Down Expand Up @@ -239,24 +240,26 @@ def get( # type: ignore
value = None
matched_domain = ""
for cookie in self.jar:
if cookie.name == name:
if domain is None or cookie.domain == domain:
if path is None or cookie.path == path:
# if cookies on two different domains do not share a same value
if (
value is not None
and not matched_domain.endswith(cookie.domain)
and not str(cookie.domain).endswith(matched_domain)
and value != cookie.value
):
message = (
f"Multiple cookies exist with name={name} on "
f"{matched_domain} and {cookie.domain}, add domain "
"parameter to suppress this error."
)
raise CookieConflict(message)
value = cookie.value
matched_domain = cookie.domain or ""
if (
cookie.name == name
and (domain is None or cookie.domain == domain)
and (path is None or cookie.path == path)
):
# if cookies on two different domains do not share a same value
if (
value is not None
and not matched_domain.endswith(cookie.domain)
and not str(cookie.domain).endswith(matched_domain)
and value != cookie.value
):
message = (
f"Multiple cookies exist with name={name} on "
f"{matched_domain} and {cookie.domain}, add domain "
"parameter to suppress this error."
)
raise CookieConflict(message)
value = cookie.value
matched_domain = cookie.domain or ""

if value is None:
return default
Expand Down
7 changes: 2 additions & 5 deletions curl_cffi/requests/headers.py
Expand Up @@ -80,10 +80,7 @@ def normalize_header_key(
"""
Coerce str/bytes into a strictly byte-wise HTTP header key.
"""
if isinstance(value, bytes):
bytes_value = value
else:
bytes_value = value.encode(encoding or "ascii")
bytes_value = value if isinstance(value, bytes) else value.encode(encoding or "ascii")

return bytes_value.lower() if lower else bytes_value

Expand Down Expand Up @@ -250,7 +247,7 @@ def get_list(self, key: str, split_commas: bool = False) -> List[str]:

def update(self, headers: Optional[HeaderTypes] = None) -> None: # type: ignore
headers = Headers(headers)
for key in headers.keys():
for key in headers:
if key in self:
self.pop(key)
self._list.extend(headers._list)
Expand Down
17 changes: 6 additions & 11 deletions curl_cffi/requests/models.py
Expand Up @@ -44,7 +44,8 @@ class Response:
elapsed: how many seconds the request cost.
encoding: http body encoding.
charset_encoding: encoding specified by the Content-Type header.
default_encoding: user-defined encoding used for decoding content if charset is not found in headers.
default_encoding: user-defined encoding used for decoding content if charset
is not found in headers.
redirect_count: how many redirects happened.
redirect_url: the final redirected url.
http_version: http version used.
Expand Down Expand Up @@ -123,10 +124,7 @@ def iter_lines(self, chunk_size=None, decode_unicode=False, delimiter=None):
for chunk in self.iter_content(chunk_size=chunk_size, decode_unicode=decode_unicode):
if pending is not None:
chunk = pending + chunk
if delimiter:
lines = chunk.split(delimiter)
else:
lines = chunk.splitlines()
lines = chunk.split(delimiter) if delimiter else chunk.splitlines()
if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]:
pending = lines.pop()
else:
Expand All @@ -142,7 +140,7 @@ def iter_content(self, chunk_size=None, decode_unicode=False):
iterate streaming content chunk by chunk in bytes.
"""
if chunk_size:
warnings.warn("chunk_size is ignored, there is no way to tell curl that.")
warnings.warn("chunk_size is ignored, there is no way to tell curl that.", stacklevel=2)
if decode_unicode:
raise NotImplementedError()

Expand Down Expand Up @@ -187,10 +185,7 @@ async def aiter_lines(self, chunk_size=None, decode_unicode=False, delimiter=Non
async for chunk in self.aiter_content(chunk_size=chunk_size, decode_unicode=decode_unicode):
if pending is not None:
chunk = pending + chunk
if delimiter:
lines = chunk.split(delimiter)
else:
lines = chunk.splitlines()
lines = chunk.split(delimiter) if delimiter else chunk.splitlines()
if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]:
pending = lines.pop()
else:
Expand All @@ -207,7 +202,7 @@ async def aiter_content(self, chunk_size=None, decode_unicode=False):
iterate streaming content chunk by chunk in bytes.
"""
if chunk_size:
warnings.warn("chunk_size is ignored, there is no way to tell curl that.")
warnings.warn("chunk_size is ignored, there is no way to tell curl that.", stacklevel=2)
if decode_unicode:
raise NotImplementedError()

Expand Down

0 comments on commit 12c83e5

Please sign in to comment.