Skip to content

Commit

Permalink
Add aiohttp (#4282)
Browse files Browse the repository at this point in the history
This doesn't actually work without an additional patch but it's possible
to get it to work by monkey patching a method. 

See discussion about adding support for Pyodide to aiohttp:
aio-libs/aiohttp#7253
aio-libs/aiohttp#7803
  • Loading branch information
hoodmane committed Nov 18, 2023
1 parent 6016151 commit 46ccc98
Show file tree
Hide file tree
Showing 8 changed files with 347 additions and 12 deletions.
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ repos:
name: mypy-tests
args: [--ignore-missing-imports]
files: ^(packages/|docs|/conftest.py|src/tests|pyodide-build/pyodide_build/tests)
exclude: (^packages/.*/setup.py|/src)
exclude: (^packages/.*/setup.py|/src|^packages/aiohttp/aiohttp_patch.py$)
additional_dependencies: *mypy-deps

ci:
Expand Down
13 changes: 2 additions & 11 deletions docs/project/changelog.md
Original file line number Diff line number Diff line change
Expand Up @@ -60,17 +60,8 @@ myst:

### Packages

- Added `pysam` version 0.22.0 {pr}`4268`

- Added `river` version 0.19.0 {pr}`4197`

- Added `sisl` version 0.14.2 {pr}`4210`

- Added `frozenlist` {pr}`4231`

- Added `zengl` version 1.16.0 {pr}`4208`

- Added `msgspec` version 0.18.4 {pr}`4265`
- New Packages: `river` {pr}`4197`, `sisl` {pr}`4210`, `frozenlist` {pr}`4231`,
`zengl` {pr}`4208`, `msgspec` {pr}`4265`, `aiohttp` {pr}`4282`, `pysam` {pr}`4268`

- Upgraded `contourpy` to 1.2.0 {pr}`4291`

Expand Down
245 changes: 245 additions & 0 deletions packages/aiohttp/aiohttp_patch.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,245 @@
"""Used in test_aiohttp.py"""

from collections.abc import Iterable
from contextlib import suppress
from typing import Any

from aiohttp import ClientSession, ClientTimeout, InvalidURL, hdrs, payload
from aiohttp.client_reqrep import _merge_ssl_params
from aiohttp.helpers import TimeoutHandle, get_env_proxy_for_url, strip_auth_from_url
from multidict import CIMultiDict, istr
from yarl import URL


class Content:
__slots__ = ("_jsresp", "_exception")

def __init__(self, _jsresp):
self._jsresp = _jsresp
self._exception = None

async def read(self):
if self._exception:
raise self._exception
buf = await self._jsresp.arrayBuffer()
self._jsresp = None
return buf.to_bytes()

def exception(self):
return self._exception

def set_exception(self, exc: BaseException) -> None:
self._exception = exc


async def _request(
self,
method: str,
str_or_url,
*,
params=None,
data: Any = None,
json: Any = None,
cookies=None,
headers=None,
skip_auto_headers: Iterable[str] | None = None,
auth=None,
allow_redirects: bool = True,
max_redirects: int = 10,
compress: str | None = None,
chunked: bool | None = None,
expect100: bool = False,
raise_for_status=None,
read_until_eof: bool = True,
proxy=None,
proxy_auth=None,
timeout=None,
verify_ssl: bool | None = None,
fingerprint: bytes | None = None,
ssl_context=None,
ssl=None,
proxy_headers=None,
trace_request_ctx=None,
read_bufsize: int | None = None,
):
# NOTE: timeout clamps existing connect and read timeouts. We cannot
# set the default to None because we need to detect if the user wants
# to use the existing timeouts by setting timeout to None.

if self.closed:
raise RuntimeError("Session is closed")

ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint)

if data is not None and json is not None:
raise ValueError("data and json parameters can not be used at the same time")
elif json is not None:
data = payload.JsonPayload(json, dumps=self._json_serialize)

history = []
version = self._version
params = params or {}

# Merge with default headers and transform to CIMultiDict
headers = self._prepare_headers(headers)
proxy_headers = self._prepare_headers(proxy_headers)

try:
url = self._build_url(str_or_url)
except ValueError as e:
raise InvalidURL(str_or_url) from e

skip_headers = set(self._skip_auto_headers)
if skip_auto_headers is not None:
for i in skip_auto_headers:
skip_headers.add(istr(i))

if proxy is not None:
try:
proxy = URL(proxy)
except ValueError as e:
raise InvalidURL(proxy) from e

if timeout is None:
real_timeout = self._timeout
else:
if not isinstance(timeout, ClientTimeout):
real_timeout = ClientTimeout(total=timeout) # type: ignore[arg-type]
else:
real_timeout = timeout
# timeout is cumulative for all request operations
# (request, redirects, responses, data consuming)
tm = TimeoutHandle(self._loop, real_timeout.total)
handle = tm.start()

if read_bufsize is None:
read_bufsize = self._read_bufsize

traces = []

timer = tm.timer()
try:
with timer:
url, auth_from_url = strip_auth_from_url(url)
if auth and auth_from_url:
raise ValueError(
"Cannot combine AUTH argument with " "credentials encoded in URL"
)

if auth is None:
auth = auth_from_url
if auth is None:
auth = self._default_auth
# It would be confusing if we support explicit
# Authorization header with auth argument
if auth is not None and hdrs.AUTHORIZATION in headers:
raise ValueError(
"Cannot combine AUTHORIZATION header "
"with AUTH argument or credentials "
"encoded in URL"
)

all_cookies = self._cookie_jar.filter_cookies(url)

if proxy is not None:
proxy = URL(proxy)
elif self._trust_env:
with suppress(LookupError):
proxy, proxy_auth = get_env_proxy_for_url(url)

req = self._request_class(
method,
url,
params=params,
headers=headers,
skip_auto_headers=skip_headers,
data=data,
cookies=all_cookies,
auth=auth,
version=version,
compress=compress,
chunked=chunked,
expect100=expect100,
loop=self._loop,
response_class=self._response_class,
proxy=proxy,
proxy_auth=proxy_auth,
timer=timer,
session=self,
ssl=ssl,
proxy_headers=proxy_headers,
traces=traces,
)

req.response = resp = req.response_class(
req.method,
req.original_url,
writer=None,
continue100=req._continue,
timer=req._timer,
request_info=req.request_info,
traces=req._traces,
loop=req.loop,
session=req._session,
)
from js import Headers, fetch
from pyodide.ffi import to_js

body = None
if req.body:
body = to_js(req.body._value)
jsresp = await fetch(
str(req.url),
method=req.method,
headers=Headers.new(headers.items()),
body=body,
)
resp.version = version
resp.status = jsresp.status
resp.reason = jsresp.statusText
# This is not quite correct in handling of repeated headers
resp._headers = CIMultiDict(jsresp.headers)
resp._raw_headers = tuple(tuple(e) for e in jsresp.headers)
resp.content = Content(jsresp)

# check response status
if raise_for_status is None:
raise_for_status = self._raise_for_status

if raise_for_status is None:
pass
elif callable(raise_for_status):
await raise_for_status(resp)
elif raise_for_status:
resp.raise_for_status()

# register connection
if handle is not None:
if resp.connection is not None:
resp.connection.add_callback(handle.cancel)
else:
handle.cancel()

resp._history = tuple(history)

for trace in traces:
await trace.send_request_end(
method, url.update_query(params), headers, resp
)
return resp

except BaseException as e:
# cleanup timer
tm.close()
if handle:
handle.cancel()
handle = None

for trace in traces:
await trace.send_request_exception(
method, url.update_query(params), headers, e
)
raise


ClientSession._request = _request
24 changes: 24 additions & 0 deletions packages/aiohttp/meta.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
package:
name: aiohttp
version: 3.8.6
top-level:
- aiohttp
source:
url: https://files.pythonhosted.org/packages/fd/01/f180d31923751fd20185c96938994823f00918ee5ac7b058edc005382406/aiohttp-3.8.6.tar.gz
sha256: b0cf2a4501bff9330a8a5248b4ce951851e415bdcce9dc158e76cfd55e15085c

requirements:
run:
- aiosignal
- async-timeout
- attrs
- charset-normalizer
- frozenlist
- multidict
- yarl

about:
home: https://github.com/aio-libs/aiohttp
PyPI: https://pypi.org/project/aiohttp
summary: Async http client/server framework (asyncio)
license: Apache 2
29 changes: 29 additions & 0 deletions packages/aiohttp/test_aiohttp.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
from pathlib import Path
from typing import cast

import pytest
from pytest_pyodide import run_in_pyodide


@run_in_pyodide(packages=["aiohttp"])
async def aiohttp_test_helper(selenium, patch, base_url, lock_data):
exec(patch, {})
import json

import aiohttp

async with aiohttp.ClientSession() as session:
async with session.get(base_url + "/pyodide-lock.json") as response:
assert response.status == 200
assert response.headers["content-type"] == "application/json"

body = await response.json()
expected = json.loads(lock_data)
assert body == expected


def test_aiohttp(selenium):
patch = (Path(__file__).parent / "aiohttp_patch.py").read_text()
dist_dir = cast(str, pytest.pyodide_dist_dir)
lock_data = (Path(dist_dir) / "pyodide-lock.json").read_text()
aiohttp_test_helper(selenium, patch, selenium.base_url, lock_data)
18 changes: 18 additions & 0 deletions packages/aiosignal/meta.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
package:
name: aiosignal
version: 1.3.1
top-level:
- aiosignal
source:
url: https://files.pythonhosted.org/packages/76/ac/a7305707cb852b7e16ff80eaf5692309bde30e2b1100a1fcacdc8f731d97/aiosignal-1.3.1-py3-none-any.whl
sha256: f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17

requirements:
run:
- frozenlist

about:
home: https://github.com/aio-libs/aiosignal
PyPI: https://pypi.org/project/aiosignal
summary: "aiosignal: a list of registered asynchronous callbacks"
license: Apache 2.0
13 changes: 13 additions & 0 deletions packages/async-timeout/meta.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
package:
name: async-timeout
version: 4.0.3
top-level:
- async_timeout
source:
url: https://files.pythonhosted.org/packages/a7/fa/e01228c2938de91d47b307831c62ab9e4001e747789d0b05baf779a6488c/async_timeout-4.0.3-py3-none-any.whl
sha256: 7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028
about:
home: https://github.com/aio-libs/async-timeout
PyPI: https://pypi.org/project/async-timeout
summary: Timeout context manager for asyncio programs
license: Apache 2
15 changes: 15 additions & 0 deletions packages/charset-normalizer/meta.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
package:
name: charset-normalizer
version: 3.3.2
top-level:
- charset_normalizer
source:
url: https://files.pythonhosted.org/packages/28/76/e6222113b83e3622caa4bb41032d0b1bf785250607392e1b778aca0b8a7d/charset_normalizer-3.3.2-py3-none-any.whl
sha256: 3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc
about:
home: https://github.com/Ousret/charset_normalizer
PyPI: https://pypi.org/project/charset-normalizer
summary:
The Real First Universal Charset Detector. Open, modern and actively maintained
alternative to Chardet.
license: MIT

0 comments on commit 46ccc98

Please sign in to comment.