Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/main' into meilisearch-v0.30
Browse files Browse the repository at this point in the history
  • Loading branch information
sanders41 committed Nov 22, 2022
2 parents b8dcedc + 87c9461 commit 4e4c08f
Show file tree
Hide file tree
Showing 3 changed files with 33 additions and 15 deletions.
42 changes: 30 additions & 12 deletions meilisearch_python_async/task.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

from asyncio import sleep
from datetime import datetime
from typing import TYPE_CHECKING
from urllib.parse import urlencode

from httpx import AsyncClient
Expand All @@ -10,6 +11,9 @@
from meilisearch_python_async.errors import MeiliSearchTimeoutError
from meilisearch_python_async.models.task import TaskInfo, TaskStatus

if TYPE_CHECKING:
from meilisearch_python_async.client import Client # pragma: no cover


async def cancel_tasks(
http_client: AsyncClient,
Expand Down Expand Up @@ -158,7 +162,7 @@ async def delete_tasks(


async def get_tasks(
http_client: AsyncClient,
client: AsyncClient | Client,
*,
index_ids: list[str] | None = None,
types: str | list[str] | None = None,
Expand All @@ -167,7 +171,7 @@ async def get_tasks(
Args:
http_client: An AsyncClient instance.
client: An httpx AsyncClient or meilisearch_python_async Client instance.
index_ids: A list of index UIDs for which to get the tasks. If provided this will get the
tasks only for the specified indexes, if not all tasks will be returned. Default = None
types: Specify specific task types to retrieve. Default = None
Expand All @@ -188,23 +192,24 @@ async def get_tasks(
>>> from meilisearch_python_async.task import get_tasks
>>>
>>> async with Client("http://localhost.com", "masterKey") as client:
>>> await get_tasks(client.http_client)
>>> await get_tasks(client)
"""
url = f"tasks?indexUids={','.join(index_ids)}" if index_ids else "tasks"
if types:
formatted_types = ",".join(types) if isinstance(types, list) else types
url = f"{url}&types={formatted_types}" if "?" in url else f"{url}?types={formatted_types}"
response = await http_client.get(url)
client_ = _get_client(client)
response = await client_.get(url)

return [TaskStatus(**x) for x in response.json()["results"]]


async def get_task(http_client: AsyncClient, task_id: int) -> TaskStatus:
async def get_task(client: AsyncClient | Client, task_id: int) -> TaskStatus:
"""Get a single task from it's task id.
Args:
http_client: An AsyncClient instance.
client: An httpx AsyncClient or meilisearch_python_async Client instance.
task_id: Identifier of the task to retrieve.
Returns:
Expand All @@ -223,21 +228,26 @@ async def get_task(http_client: AsyncClient, task_id: int) -> TaskStatus:
>>> from meilisearch_python_async.task import get_task
>>>
>>> async with Client("http://localhost.com", "masterKey") as client:
>>> await get_task(client.http_client, 1244)
>>> await get_task(client, 1244)
"""
response = await http_client.get(f"tasks/{task_id}")
client_ = _get_client(client)
response = await client_.get(f"tasks/{task_id}")

return TaskStatus(**response.json())


async def wait_for_task(
http_client: AsyncClient, task_id: int, *, timeout_in_ms: int = 5000, interval_in_ms: int = 50
client: AsyncClient | Client,
task_id: int,
*,
timeout_in_ms: int = 5000,
interval_in_ms: int = 50,
) -> TaskStatus:
"""Wait until MeiliSearch processes a task, and get its status.
Args:
http_client: An AsyncClient instance.
client: An httpx AsyncClient or meilisearch_python_async Client instance.
task_id: Identifier of the task to retrieve.
timeout_in_ms: Amount of time in milliseconds to wait before raising a
MeiliSearchTimeoutError. Defaults to 5000.
Expand All @@ -264,10 +274,11 @@ async def wait_for_task(
>>> async with Client("http://localhost.com", "masterKey") as client:
>>> index = client.index("movies")
>>> response = await index.add_documents(documents)
>>> await wait_for_pending_task(client.http_client, response.update_id)
>>> await wait_for_pending_task(client, response.update_id)
"""
client_ = _get_client(client)
url = f"tasks/{task_id}"
http_requests = HttpRequests(http_client)
http_requests = HttpRequests(client_)
start_time = datetime.now()
elapsed_time = 0.0
while elapsed_time < timeout_in_ms:
Expand All @@ -281,3 +292,10 @@ async def wait_for_task(
raise MeiliSearchTimeoutError(
f"timeout of {timeout_in_ms}ms has exceeded on process {task_id} when waiting for pending update to resolve."
)


def _get_client(client: AsyncClient | Client) -> AsyncClient:
if isinstance(client, AsyncClient):
return client

return client.http_client
2 changes: 1 addition & 1 deletion tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ async def clear_indexes(test_client):
if indexes:
for index in indexes:
response = await test_client.index(index.uid).delete()
await wait_for_task(test_client.http_client, response.task_uid)
await wait_for_task(test_client, response.task_uid)


@pytest.fixture(scope="session")
Expand Down
4 changes: 2 additions & 2 deletions tests/test_index.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,13 +91,13 @@ def sortable_attributes():
@pytest.mark.usefixtures("indexes_sample")
async def test_delete_index(test_client, index_uid, index_uid2):
response = await test_client.index(uid=index_uid).delete()
await wait_for_task(test_client.http_client, response.task_uid)
await wait_for_task(test_client, response.task_uid)

with pytest.raises(MeiliSearchApiError):
await test_client.get_index(uid=index_uid)

response = await test_client.index(uid=index_uid2).delete()
await wait_for_task(test_client.http_client, response.task_uid)
await wait_for_task(test_client, response.task_uid)

with pytest.raises(MeiliSearchApiError):
await test_client.get_index(uid=index_uid2)
Expand Down

0 comments on commit 4e4c08f

Please sign in to comment.