diff --git a/.github/workflows/ci-testing-deploy.yml b/.github/workflows/ci-testing-deploy.yml index 1e6a945f70d..742b53a8be5 100644 --- a/.github/workflows/ci-testing-deploy.yml +++ b/.github/workflows/ci-testing-deploy.yml @@ -14,6 +14,8 @@ on: - "!.vscode/**" - "**.py" - "**.js" + - "**/requirements/*.txt" + - "**.json" - ".github/workflows/ci-testing-deploy.yml" pull_request: branches: @@ -27,6 +29,8 @@ on: - "!.vscode-template/**" - "**.py" - "**.js" + - "**/requirements/*.txt" + - "**.json" - ".github/workflows/ci-testing-deploy.yml" env: diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 7091534d22a..ad8adb1e4f9 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -6,11 +6,13 @@ repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.2.0 hooks: - - id: trailing-whitespace - - id: end-of-file-fixer - - id: check-yaml - id: check-added-large-files + args: ["--maxkb=1024"] - id: check-merge-conflict + - id: check-yaml + - id: detect-private-key + - id: end-of-file-fixer + - id: trailing-whitespace # NOTE: Keep order as pyupgrade (will update code) then pycln (remove unused imports), then isort (sort them) and black (final formatting) - repo: https://github.com/asottile/pyupgrade rev: v2.34.0 @@ -18,16 +20,21 @@ repos: - id: pyupgrade args: - "--py39-plus" + name: upgrade code - repo: https://github.com/hadialqattan/pycln rev: v1.2.5 hooks: - id: pycln args: [--all, --expand-stars] + name: prune imports - repo: https://github.com/PyCQA/isort rev: 5.10.1 hooks: - id: isort + args: ["--profile", "black"] + name: sort imports - repo: https://github.com/psf/black rev: 22.3.0 hooks: - id: black + name: format code diff --git a/mypy.ini b/mypy.ini index ab2add147bd..0eab7a8b646 100644 --- a/mypy.ini +++ b/mypy.ini @@ -19,8 +19,8 @@ disallow_any_generics = True check_untyped_defs = True no_implicit_reexport = True -# for strict mypy: (this is the tricky one :-)) -disallow_untyped_defs = True +# If True, it enforces things like `def __init__(self) -> CLASSNAME` or `def test_() -> None` which does not worth the effort +disallow_untyped_defs = False # removes all the missing imports stuff from external libraries which is annoying to the least ignore_missing_imports = True diff --git a/packages/dask-task-models-library/requirements/_base.txt b/packages/dask-task-models-library/requirements/_base.txt index d86f3c4eebd..ce2aa0d32c1 100644 --- a/packages/dask-task-models-library/requirements/_base.txt +++ b/packages/dask-task-models-library/requirements/_base.txt @@ -12,17 +12,17 @@ cloudpickle==2.0.0 # via # dask # distributed -dask==2022.05.0 +dask==2022.6.0 # via # -r requirements/_base.in # distributed -distributed==2022.05.0 +distributed==2022.6.0 # via dask dnspython==2.2.1 # via email-validator email-validator==1.2.1 # via pydantic -fsspec==2022.3.0 +fsspec==2022.5.0 # via dask heapdict==1.0.1 # via zict diff --git a/packages/models-library/src/models_library/services.py b/packages/models-library/src/models_library/services.py index a7edf947273..2839fe48793 100644 --- a/packages/models-library/src/models_library/services.py +++ b/packages/models-library/src/models_library/services.py @@ -4,6 +4,7 @@ python -c "from models_library.services import ServiceDockerData as cls; print(cls.schema_json(indent=2))" > services-schema.json """ +from datetime import datetime from enum import Enum from typing import Any, Optional, Union @@ -562,6 +563,7 @@ class ServiceMetaData(_BaseServiceCommonDataModel): name: Optional[str] thumbnail: Optional[HttpUrl] description: Optional[str] + deprecated: Optional[datetime] # user-defined metatada classifiers: Optional[list[str]] diff --git a/packages/models-library/src/models_library/services_db.py b/packages/models-library/src/models_library/services_db.py index 3097755039b..bdfbd96a832 100644 --- a/packages/models-library/src/models_library/services_db.py +++ b/packages/models-library/src/models_library/services_db.py @@ -3,7 +3,7 @@ NOTE: to dump json-schema from CLI use python -c "from models_library.services import ServiceDockerData as cls; print(cls.schema_json(indent=2))" > services-schema.json """ -from typing import List, Optional +from typing import Optional from pydantic import Field from pydantic.types import PositiveInt @@ -19,7 +19,7 @@ class ServiceMetaDataAtDB(ServiceKeyVersion, ServiceMetaData): # for a partial update all members must be Optional - classifiers: Optional[List[str]] = Field([]) + classifiers: Optional[list[str]] = Field([]) owner: Optional[PositiveInt] class Config: @@ -34,6 +34,7 @@ class Config: "thumbnail": "http://thumbnailit.org/image", "created": "2021-01-18 12:46:57.7315", "modified": "2021-01-19 12:45:00", + "deprecated": "2099-01-19 12:45:00", "quality": { "enabled": True, "tsr_target": { diff --git a/packages/models-library/src/models_library/utils/database_models_factory.py b/packages/models-library/src/models_library/utils/database_models_factory.py index ac49714a48f..b98395e7139 100644 --- a/packages/models-library/src/models_library/utils/database_models_factory.py +++ b/packages/models-library/src/models_library/utils/database_models_factory.py @@ -6,13 +6,14 @@ import json import warnings from datetime import datetime -from typing import Any, Callable, Container, Dict, List, Optional, Tuple, Type +from typing import Any, Callable, Container, Optional from uuid import UUID import sqlalchemy as sa import sqlalchemy.sql.functions from pydantic import BaseConfig, BaseModel, Field, create_model from pydantic.types import NonNegativeInt +from sqlalchemy import null from sqlalchemy.sql.schema import Column warnings.warn( @@ -34,7 +35,7 @@ class OrmConfig(BaseConfig): def _eval_defaults( - column: Column, pydantic_type: Type, *, include_server_defaults: bool = True + column: Column, pydantic_type: type, *, include_server_defaults: bool = True ): """ Uses some heuristics to determine the default value/factory produced @@ -74,16 +75,17 @@ def _eval_defaults( ) elif issubclass(pydantic_type, datetime): assert isinstance( # nosec - column.server_default.arg, sqlalchemy.sql.functions.now + column.server_default.arg, + (type(null()), sqlalchemy.sql.functions.now), ) default_factory = datetime.now return default, default_factory -PolicyCallable = Callable[[Column, Any, Type], Tuple[Any, Type]] +PolicyCallable = Callable[[Column, Any, type], tuple[Any, type]] -def eval_name_policy(column: Column, default: Any, pydantic_type: Type): +def eval_name_policy(column: Column, default: Any, pydantic_type: type): """All string columns including 'uuid' in their name are set as UUIDs""" new_default, new_pydantic_type = default, pydantic_type if "uuid" in str(column.name).split("_") and pydantic_type == str: @@ -101,11 +103,11 @@ def eval_name_policy(column: Column, default: Any, pydantic_type: Type): def create_pydantic_model_from_sa_table( table: sa.Table, *, - config: Type = OrmConfig, + config: type = OrmConfig, exclude: Optional[Container[str]] = None, include_server_defaults: bool = False, - extra_policies: Optional[List[PolicyCallable]] = None, -) -> Type[BaseModel]: + extra_policies: Optional[list[PolicyCallable]] = None, +) -> type[BaseModel]: fields = {} exclude = exclude or [] @@ -117,7 +119,7 @@ def create_pydantic_model_from_sa_table( if name in exclude: continue - field_args: Dict[str, Any] = {} + field_args: dict[str, Any] = {} if name in _RESERVED: field_args["alias"] = name diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/README.md b/packages/postgres-database/src/simcore_postgres_database/migration/README.md index 01678a705f0..f33850b585e 100644 --- a/packages/postgres-database/src/simcore_postgres_database/migration/README.md +++ b/packages/postgres-database/src/simcore_postgres_database/migration/README.md @@ -2,6 +2,10 @@ Generic single-database configuration. +### NOTE: THIS IS DEPRECATED! + +This does not need to be run if you want to use alembic with simcore, as the folder-init is already done. Instead navigate your shell to `osparc-simcore/packages/postgres-database/` and follow the instructions at the Makefile there. Only use this Makefile to learn about the tool alembic, dont actually execute the commands listed here! + ## Basic workflow @@ -78,8 +82,8 @@ We create a revision script for the change by using the local db as follows: pip install -r packages/postgres-database/requirements/dev.txt # install sc-pg package docker-compose -f services/docker-compose.yml -f services/docker-compose-ops.yml up adminer # bring db and ui up docker ps # find the published port for the db -sc-pg discover -u simcore -p simcore --port=32787 # discover the db -sp-pg info # what revision are we at? +sc-pg discover -u scu -p adminadmin --port=5432 # discover the db +sc-pg info # what revision are we at? sc-pg upgrade head # to to latest if necessary sc-pg review -m "Altered_table_why" # create a revision, note: the string will be part of the script sc-pg upgrade head # apply the revision diff --git a/packages/postgres-database/src/simcore_postgres_database/migration/versions/cf3bac482ce0_add_file_meta_data_deprecation_date.py b/packages/postgres-database/src/simcore_postgres_database/migration/versions/cf3bac482ce0_add_file_meta_data_deprecation_date.py new file mode 100644 index 00000000000..c4f3c5aad62 --- /dev/null +++ b/packages/postgres-database/src/simcore_postgres_database/migration/versions/cf3bac482ce0_add_file_meta_data_deprecation_date.py @@ -0,0 +1,30 @@ +"""add services_meta_data deprecation date + +Revision ID: cf3bac482ce0 +Revises: 90ed05df20d7 +Create Date: 2022-06-21 14:43:57.915189+00:00 + +""" +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "cf3bac482ce0" +down_revision = "90ed05df20d7" +branch_labels = None +depends_on = None + + +def downgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column("services_meta_data", "deprecated") + # ### end Alembic commands ### + + +def upgrade(): + # ### commands auto generated by Alembic - please adjust! ### + op.add_column( + "services_meta_data", + sa.Column("deprecated", sa.DateTime(), autoincrement=False, nullable=True), + ) + # ### end Alembic commands ### diff --git a/packages/postgres-database/src/simcore_postgres_database/models/services.py b/packages/postgres-database/src/simcore_postgres_database/models/services.py index 3ed6ff07703..0ee1955b414 100644 --- a/packages/postgres-database/src/simcore_postgres_database/models/services.py +++ b/packages/postgres-database/src/simcore_postgres_database/models/services.py @@ -5,6 +5,7 @@ """ import sqlalchemy as sa +from sqlalchemy import null from sqlalchemy.dialects.postgresql import ARRAY, JSONB from sqlalchemy.sql import expression, func @@ -69,6 +70,13 @@ onupdate=func.now(), doc="Timestamp with last update", ), + sa.Column( + "deprecated", + sa.DateTime(), + nullable=True, + server_default=null(), + doc="Timestamp with deprecation date", + ), sa.Column( "quality", JSONB, diff --git a/packages/pytest-simcore/src/pytest_simcore/docker_swarm.py b/packages/pytest-simcore/src/pytest_simcore/docker_swarm.py index b8f6bf70ec6..a0ea89ed26c 100644 --- a/packages/pytest-simcore/src/pytest_simcore/docker_swarm.py +++ b/packages/pytest-simcore/src/pytest_simcore/docker_swarm.py @@ -31,24 +31,16 @@ # HELPERS -------------------------------------------------------------------------------- -class _NotInSwarmException(Exception): - pass - - class _ResourceStillNotRemoved(Exception): pass -def _in_docker_swarm( - docker_client: docker.client.DockerClient, raise_error: bool = False -) -> bool: +def _is_docker_swarm_init(docker_client: docker.client.DockerClient) -> bool: try: docker_client.swarm.reload() inspect_result = docker_client.swarm.attrs assert type(inspect_result) == dict except APIError as error: - if raise_error: - raise _NotInSwarmException() from error return False return True @@ -147,6 +139,7 @@ def _fetch_and_print_services( def docker_client() -> Iterator[docker.client.DockerClient]: client = docker.from_env() yield client + client.close() @pytest.fixture(scope="session") @@ -164,14 +157,13 @@ def docker_swarm( wait=wait_fixed(2), stop=stop_after_delay(15), reraise=True ): with attempt: - if not _in_docker_swarm(docker_client): + if not _is_docker_swarm_init(docker_client): print("--> initializing docker swarm...") docker_client.swarm.init(advertise_addr=get_localhost_ip()) print("--> docker swarm initialized.") - # if still not in swarm, raise an error to try and initialize again - _in_docker_swarm(docker_client, raise_error=True) - assert _in_docker_swarm(docker_client) is True + # if still not in swarm, raise an error to try and initialize again + assert _is_docker_swarm_init(docker_client) yield @@ -180,6 +172,8 @@ def docker_swarm( assert docker_client.swarm.leave(force=True) print("<-- docker swarm left.") + assert _is_docker_swarm_init(docker_client) is keep_docker_up + @pytest.fixture(scope="module") def docker_stack( diff --git a/packages/pytest-simcore/src/pytest_simcore/monkeypatch_extra.py b/packages/pytest-simcore/src/pytest_simcore/monkeypatch_extra.py index 5d534b1abe8..492fff91643 100644 --- a/packages/pytest-simcore/src/pytest_simcore/monkeypatch_extra.py +++ b/packages/pytest-simcore/src/pytest_simcore/monkeypatch_extra.py @@ -2,12 +2,18 @@ # pylint: disable=unused-argument # pylint: disable=unused-variable +import warnings from typing import Iterator import pytest from _pytest.fixtures import FixtureRequest from _pytest.monkeypatch import MonkeyPatch +warnings.warn( + f"{__name__} is deprecated, we highly recommend to use pytest.monkeypatch at function-scope level." + "Large scopes lead to complex problems during tests", + DeprecationWarning, +) # Some extras to overcome https://github.com/pytest-dev/pytest/issues/363 # SEE https://github.com/pytest-dev/pytest/issues/363#issuecomment-289830794 diff --git a/packages/pytest-simcore/src/pytest_simcore/repository_paths.py b/packages/pytest-simcore/src/pytest_simcore/repository_paths.py index b398b3b340b..1fd0dc9ddba 100644 --- a/packages/pytest-simcore/src/pytest_simcore/repository_paths.py +++ b/packages/pytest-simcore/src/pytest_simcore/repository_paths.py @@ -96,6 +96,13 @@ def pylintrc(osparc_simcore_root_dir: Path) -> Path: @pytest.fixture(scope="session") def project_slug_dir() -> Path: raise NotImplementedError("Override fixture in project's tests/conftest.py") + # + # Implementation example + # folder = CURRENT_DIR.parent + # assert folder.exists() + # assert any(folder.glob("src/simcore_service_dynamic_sidecar")) + # return folder + # @pytest.fixture(scope="session") diff --git a/packages/settings-library/src/settings_library/utils_logging.py b/packages/settings-library/src/settings_library/utils_logging.py index 4d925809e9f..7dbbbec058e 100644 --- a/packages/settings-library/src/settings_library/utils_logging.py +++ b/packages/settings-library/src/settings_library/utils_logging.py @@ -4,8 +4,13 @@ class MixinLoggingSettings: + """ + USAGE example in packages/settings-library/tests/test_utils_logging.py::test_mixin_logging + """ + @classmethod def validate_log_level(cls, value: Any) -> str: + """Standard implementation for @validator("LOG_LEVEL")""" try: getattr(logging, value.upper()) except AttributeError as err: diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py index f160a897006..a25010c6176 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py @@ -331,6 +331,7 @@ async def set(self, new_value: ItemConcreteValue) -> None: """ await self._set(new_concrete_value=new_value) await self._node_ports.save_to_db_cb(self._node_ports) + async def set_value(self, new_item_value: Optional[ItemValue]) -> None: """set the value on the port using an item-value diff --git a/services/catalog/src/simcore_service_catalog/core/background_tasks.py b/services/catalog/src/simcore_service_catalog/core/background_tasks.py index fe9f64cf84d..4c889fe48e2 100644 --- a/services/catalog/src/simcore_service_catalog/core/background_tasks.py +++ b/services/catalog/src/simcore_service_catalog/core/background_tasks.py @@ -13,7 +13,6 @@ import logging from contextlib import suppress from pprint import pformat -from typing import Dict, Set, Tuple from fastapi import FastAPI from models_library.services import ServiceDockerData @@ -34,7 +33,7 @@ ServiceKey = str ServiceVersion = str -ServiceDockerDataMap = Dict[Tuple[ServiceKey, ServiceVersion], ServiceDockerData] +ServiceDockerDataMap = dict[tuple[ServiceKey, ServiceVersion], ServiceDockerData] async def _list_registry_services( @@ -64,7 +63,7 @@ async def _list_registry_services( async def _list_db_services( db_engine: AsyncEngine, -) -> Set[Tuple[ServiceKey, ServiceVersion]]: +) -> set[tuple[ServiceKey, ServiceVersion]]: services_repo = ServicesRepository(db_engine=db_engine) return { (service.key, service.version) @@ -74,8 +73,8 @@ async def _list_db_services( async def _create_services_in_db( app: FastAPI, - service_keys: Set[Tuple[ServiceKey, ServiceVersion]], - services_in_registry: Dict[Tuple[ServiceKey, ServiceVersion], ServiceDockerData], + service_keys: set[tuple[ServiceKey, ServiceVersion]], + services_in_registry: dict[tuple[ServiceKey, ServiceVersion], ServiceDockerData], ) -> None: """Adds a new service in the database @@ -89,6 +88,7 @@ async def _create_services_in_db( service_metadata: ServiceDockerData = services_in_registry[ (service_key, service_version) ] + ## Set deprecation date to null (is valid date value for postgres) # DEFAULT policies ( @@ -106,9 +106,10 @@ async def _create_services_in_db( service_access_rights ) + service_metadata_dict = service_metadata.dict() # set the service in the DB await services_repo.create_service( - ServiceMetaDataAtDB(**service_metadata.dict(), owner=owner_gid), + ServiceMetaDataAtDB(**service_metadata_dict, owner=owner_gid), service_access_rights, ) @@ -118,10 +119,10 @@ async def _ensure_registry_insync_with_db(app: FastAPI) -> None: Notice that a services here refers to a 2-tuple (key, version) """ - services_in_registry: Dict[ - Tuple[ServiceKey, ServiceVersion], ServiceDockerData + services_in_registry: dict[ + tuple[ServiceKey, ServiceVersion], ServiceDockerData ] = await _list_registry_services(app) - services_in_db: Set[Tuple[ServiceKey, ServiceVersion]] = await _list_db_services( + services_in_db: set[tuple[ServiceKey, ServiceVersion]] = await _list_db_services( app.state.engine ) @@ -143,7 +144,7 @@ async def _ensure_published_templates_accessible( # Rationale: if a project template was published, its services must be available to everyone. # a published template has a column Published that is set to True projects_repo = ProjectsRepository(db_engine) - published_services: Set[Tuple[str, str]] = { + published_services: set[tuple[str, str]] = { (service.key, service.version) for service in await projects_repo.list_services_from_published_templates() } @@ -152,7 +153,7 @@ async def _ensure_published_templates_accessible( everyone_gid = (await groups_repo.get_everyone_group()).gid services_repo = ServicesRepository(db_engine) - available_services: Set[Tuple[str, str]] = { + available_services: set[tuple[str, str]] = { (service.key, service.version) for service in await services_repo.list_services( gids=[everyone_gid], execute_access=True diff --git a/services/catalog/src/simcore_service_catalog/db/repositories/services.py b/services/catalog/src/simcore_service_catalog/db/repositories/services.py index 43ebf053e44..15229fb17ff 100644 --- a/services/catalog/src/simcore_service_catalog/db/repositories/services.py +++ b/services/catalog/src/simcore_service_catalog/db/repositories/services.py @@ -1,7 +1,7 @@ import logging from collections import defaultdict from itertools import chain -from typing import Any, Dict, Iterable, List, Optional, Tuple +from typing import Any, Iterable, Optional import packaging.version import sqlalchemy as sa @@ -31,7 +31,7 @@ def _make_list_services_query( - gids: Optional[List[int]] = None, + gids: Optional[list[int]] = None, execute_access: Optional[bool] = None, write_access: Optional[bool] = None, combine_access_with_and: Optional[bool] = True, @@ -79,12 +79,12 @@ class ServicesRepository(BaseRepository): async def list_services( self, *, - gids: Optional[List[int]] = None, + gids: Optional[list[int]] = None, execute_access: Optional[bool] = None, write_access: Optional[bool] = None, combine_access_with_and: Optional[bool] = True, product_name: Optional[str] = None, - ) -> List[ServiceMetaDataAtDB]: + ) -> list[ServiceMetaDataAtDB]: services_in_db = [] async with self.db_engine.connect() as conn: @@ -107,7 +107,7 @@ async def list_service_releases( major: Optional[int] = None, minor: Optional[int] = None, limit_count: Optional[int] = None, - ) -> List[ServiceMetaDataAtDB]: + ) -> list[ServiceMetaDataAtDB]: """Lists LAST n releases of a given service, sorted from latest first major, minor is used to filter as major.minor.* or major.* @@ -153,7 +153,7 @@ async def get_service( key: str, version: str, *, - gids: Optional[List[int]] = None, + gids: Optional[list[int]] = None, execute_access: Optional[bool] = None, write_access: Optional[bool] = None, product_name: Optional[str] = None, @@ -192,7 +192,7 @@ async def get_service( async def create_service( self, new_service: ServiceMetaDataAtDB, - new_service_access_rights: List[ServiceAccessRightsAtDB], + new_service_access_rights: list[ServiceAccessRightsAtDB], ) -> ServiceMetaDataAtDB: for access_rights in new_service_access_rights: @@ -203,13 +203,16 @@ async def create_service( raise ValueError( f"{access_rights} does not correspond to service {new_service.key}:{new_service.version}" ) - + # Set the deprecation datetime to None (will be converted top sql's null) if not given + new_service_dict = new_service.dict(by_alias=True) + if "deprecated" not in new_service_dict: + new_service_dict["deprecated"] = None async with self.db_engine.begin() as conn: # NOTE: this ensure proper rollback in case of issue result = await conn.execute( # pylint: disable=no-value-for-parameter services_meta_data.insert() - .values(**new_service.dict(by_alias=True)) + .values(**new_service_dict) .returning(literal_column("*")) ) row = result.first() @@ -248,7 +251,7 @@ async def get_service_access_rights( key: str, version: str, product_name: Optional[str] = None, - ) -> List[ServiceAccessRightsAtDB]: + ) -> list[ServiceAccessRightsAtDB]: """ - If product_name is not specificed, then all are considered in the query """ @@ -268,9 +271,9 @@ async def get_service_access_rights( async def list_services_access_rights( self, - key_versions: Iterable[Tuple[str, str]], + key_versions: Iterable[tuple[str, str]], product_name: Optional[str] = None, - ) -> Dict[Tuple[str, str], List[ServiceAccessRightsAtDB]]: + ) -> dict[tuple[str, str], list[ServiceAccessRightsAtDB]]: """Batch version of get_service_access_rights""" service_to_access_rights = defaultdict(list) query = ( @@ -296,7 +299,7 @@ async def list_services_access_rights( return service_to_access_rights async def upsert_service_access_rights( - self, new_access_rights: List[ServiceAccessRightsAtDB] + self, new_access_rights: list[ServiceAccessRightsAtDB] ) -> None: # update the services_access_rights table (some might be added/removed/modified) for rights in new_access_rights: @@ -328,7 +331,7 @@ async def upsert_service_access_rights( ) async def delete_service_access_rights( - self, delete_access_rights: List[ServiceAccessRightsAtDB] + self, delete_access_rights: list[ServiceAccessRightsAtDB] ) -> None: async with self.db_engine.begin() as conn: for rights in delete_access_rights: @@ -375,11 +378,7 @@ async def get_service_specifications( if not allow_use_latest_service_version else True ) - & ( - services_specifications.c.gid.in_( - (group.gid for group in groups) - ) - ) + & (services_specifications.c.gid.in_(group.gid for group in groups)) ), ): try: diff --git a/services/catalog/tests/unit/with_dbs/conftest.py b/services/catalog/tests/unit/with_dbs/conftest.py index abe82f863d6..15f3ce4ad8d 100644 --- a/services/catalog/tests/unit/with_dbs/conftest.py +++ b/services/catalog/tests/unit/with_dbs/conftest.py @@ -6,7 +6,7 @@ import itertools import random from random import randint -from typing import Any, AsyncIterator, Callable, Dict, Iterable, Iterator, List, Tuple +from typing import Any, AsyncIterator, Callable, Iterable, Iterator import pytest import respx @@ -35,7 +35,7 @@ def app( mocker: MockerFixture, service_test_environ: None, postgres_db: sa.engine.Engine, - postgres_host_config: Dict[str, str], + postgres_host_config: dict[str, str], ) -> Iterable[FastAPI]: monkeypatch.setenv("CATALOG_TRACING", "null") monkeypatch.setenv("SC_BOOT_MODE", "local-development") @@ -90,7 +90,7 @@ def user_id() -> UserID: @pytest.fixture() -def user_db(postgres_db: sa.engine.Engine, user_id: UserID) -> Iterator[Dict]: +def user_db(postgres_db: sa.engine.Engine, user_id: UserID) -> Iterator[dict]: with postgres_db.connect() as con: # removes all users before continuing con.execute(users.delete()) @@ -118,7 +118,7 @@ def user_db(postgres_db: sa.engine.Engine, user_id: UserID) -> Iterator[Dict]: @pytest.fixture() async def products_names( sqlalchemy_async_engine: AsyncEngine, -) -> AsyncIterator[List[str]]: +) -> AsyncIterator[list[str]]: """Inits products db table and returns product names""" data = [ # already upon creation: ("osparc", r"([\.-]{0,1}osparc[\.-])"), @@ -145,8 +145,8 @@ async def products_names( @pytest.fixture() async def user_groups_ids( - sqlalchemy_async_engine: AsyncEngine, user_db: Dict[str, Any] -) -> AsyncIterator[List[int]]: + sqlalchemy_async_engine: AsyncEngine, user_db: dict[str, Any] +) -> AsyncIterator[list[int]]: """Inits groups table and returns group identifiers""" cols = ("gid", "name", "description", "type", "thumbnail", "inclusion_rules") @@ -206,7 +206,7 @@ async def services_db_tables_injector( """ # pylint: disable=no-value-for-parameter - async def inject_in_db(fake_catalog: List[Tuple]): + async def inject_in_db(fake_catalog: list[tuple]): # [(service, ar1, ...), (service2, ar1, ...) ] async with sqlalchemy_async_engine.begin() as conn: @@ -235,8 +235,8 @@ async def inject_in_db(fake_catalog: List[Tuple]): @pytest.fixture() async def service_catalog_faker( - user_groups_ids: List[int], - products_names: List[str], + user_groups_ids: list[int], + products_names: list[str], faker: Faker, ) -> Callable: """Returns a fake factory that creates catalog DATA that can be used to fill @@ -257,7 +257,7 @@ async def service_catalog_faker( """ everyone_gid, user_gid, team_gid = user_groups_ids - def _random_service(**overrides) -> Dict[str, Any]: + def _random_service(**overrides) -> dict[str, Any]: data = dict( key=f"simcore/services/{random.choice(['dynamic', 'computational'])}/{faker.name()}", version=".".join([str(faker.pyint()) for _ in range(3)]), @@ -267,11 +267,12 @@ def _random_service(**overrides) -> Dict[str, Any]: thumbnail=random.choice([faker.image_url(), None]), classifiers=[], quality={}, + deprecated=None, ) data.update(overrides) return data - def _random_access(service, **overrides) -> Dict[str, Any]: + def _random_access(service, **overrides) -> dict[str, Any]: data = dict( key=service["key"], version=service["version"], @@ -285,7 +286,7 @@ def _random_access(service, **overrides) -> Dict[str, Any]: def _fake_factory( key, version, team_access=None, everyone_access=None, product=products_names[0] - ) -> Tuple[Dict[str, Any], ...]: + ) -> tuple[dict[str, Any], ...]: service = _random_service(key=key, version=version) diff --git a/services/catalog/tests/unit/with_dbs/test_db_repositories.py b/services/catalog/tests/unit/with_dbs/test_db_repositories.py index c7fa11550e6..259c015da2b 100644 --- a/services/catalog/tests/unit/with_dbs/test_db_repositories.py +++ b/services/catalog/tests/unit/with_dbs/test_db_repositories.py @@ -3,7 +3,7 @@ # pylint: disable=unused-variable from dataclasses import dataclass, field -from typing import Callable, List +from typing import Callable import pytest from models_library.services_db import ServiceAccessRightsAtDB, ServiceMetaDataAtDB @@ -30,13 +30,13 @@ class FakeCatalogInfo: jupyter_service_key: str = "simcore/services/dynamic/jupyterlab" expected_services_count: int = 5 expected_latest: str = "1.1.3" - expected_1_1_x: List[str] = field(default_factory=list) - expected_0_x_x: List[str] = field(default_factory=list) + expected_1_1_x: list[str] = field(default_factory=list) + expected_0_x_x: list[str] = field(default_factory=list) @pytest.fixture() async def fake_catalog_with_jupyterlab( - products_names: List[str], + products_names: list[str], service_catalog_faker: Callable, services_db_tables_injector: Callable, ) -> FakeCatalogInfo: @@ -120,8 +120,8 @@ async def test_create_services( async def test_read_services( services_repo: ServicesRepository, - user_groups_ids: List[int], - products_names: List[str], + user_groups_ids: list[int], + products_names: list[str], service_catalog_faker: Callable, services_db_tables_injector: Callable, ): @@ -197,7 +197,7 @@ async def test_list_service_releases( fake_catalog_with_jupyterlab: FakeCatalogInfo, services_repo: ServicesRepository, ): - services: List[ServiceMetaDataAtDB] = await services_repo.list_service_releases( + services: list[ServiceMetaDataAtDB] = await services_repo.list_service_releases( "simcore/services/dynamic/jupyterlab" ) assert len(services) == fake_catalog_with_jupyterlab.expected_services_count @@ -235,7 +235,7 @@ async def test_list_service_releases_version_filtered( assert latest assert latest.version == fake_catalog_with_jupyterlab.expected_latest - releases_1_1_x: List[ + releases_1_1_x: list[ ServiceMetaDataAtDB ] = await services_repo.list_service_releases( "simcore/services/dynamic/jupyterlab", major=1, minor=1 @@ -244,7 +244,7 @@ async def test_list_service_releases_version_filtered( s.version for s in releases_1_1_x ] == fake_catalog_with_jupyterlab.expected_1_1_x - expected_0_x_x: List[ + expected_0_x_x: list[ ServiceMetaDataAtDB ] = await services_repo.list_service_releases( "simcore/services/dynamic/jupyterlab", major=0 diff --git a/services/dask-sidecar/docker/boot.sh b/services/dask-sidecar/docker/boot.sh index 0668cd9032e..95f7374c648 100755 --- a/services/dask-sidecar/docker/boot.sh +++ b/services/dask-sidecar/docker/boot.sh @@ -111,9 +111,7 @@ else dask-worker "${DASK_SCHEDULER_URL}" \ --local-directory /tmp/dask-sidecar \ --preload simcore_service_dask_sidecar.tasks \ - --reconnect \ - --no-nanny \ - --nprocs ${DASK_NPROCS} \ + --nworkers ${DASK_NPROCS} \ --nthreads "${DASK_NTHREADS}" \ --dashboard-address 8787 \ --memory-limit "${DASK_MEMORY_LIMIT}" \ @@ -123,9 +121,7 @@ else exec dask-worker "${DASK_SCHEDULER_URL}" \ --local-directory /tmp/dask-sidecar \ --preload simcore_service_dask_sidecar.tasks \ - --reconnect \ - --no-nanny \ - --nprocs ${DASK_NPROCS} \ + --nworkers ${DASK_NPROCS} \ --nthreads "${DASK_NTHREADS}" \ --dashboard-address 8787 \ --memory-limit "${DASK_MEMORY_LIMIT}" \ diff --git a/services/dask-sidecar/requirements/_base.txt b/services/dask-sidecar/requirements/_base.txt index 7992cec7836..666bdfdf601 100644 --- a/services/dask-sidecar/requirements/_base.txt +++ b/services/dask-sidecar/requirements/_base.txt @@ -4,7 +4,7 @@ # # pip-compile --output-file=requirements/_base.txt --strip-extras requirements/_base.in # -aiobotocore==2.2.0 +aiobotocore==2.3.3 # via s3fs aiodocker==0.21.0 # via -r requirements/_base.in @@ -49,7 +49,7 @@ bleach==3.3.0 # via nbconvert blosc==1.10.6 # via -r requirements/_base.in -bokeh==2.4.2 +bokeh==2.4.3 # via dask botocore==1.24.21 # via aiobotocore @@ -62,24 +62,26 @@ charset-normalizer==2.0.12 # aiohttp # requests click==8.1.3 - # via distributed + # via + # dask-gateway + # distributed cloudpickle==2.0.0 # via # dask # distributed cytoolz==0.11.0 # via -r requirements/_base.in -dask==2022.4.0 +dask==2022.6.0 # via # -c requirements/../../../packages/dask-task-models-library/requirements/_base.in # -r requirements/_base.in # dask-gateway # distributed -dask-gateway==0.9.0 +dask-gateway==2022.6.1 # via -r requirements/_base.in defusedxml==0.7.1 # via nbconvert -distributed==2022.4.0 +distributed==2022.6.0 # via # dask # dask-gateway @@ -95,7 +97,7 @@ frozenlist==1.3.0 # via # aiohttp # aiosignal -fsspec==2022.3.0 +fsspec==2022.5.0 # via # -c requirements/constraints.txt # dask @@ -146,8 +148,10 @@ jupyter-server-proxy==3.2.1 # via -r requirements/_base.in jupyterlab-pygments==0.1.2 # via nbconvert -locket==0.2.1 - # via partd +locket==1.0.0 + # via + # distributed + # partd lz4==4.0.0 # via -r requirements/_base.in markupsafe==2.1.1 @@ -247,6 +251,7 @@ pyyaml==5.4.1 # -c requirements/../../../requirements/constraints.txt # bokeh # dask + # dask-gateway # distributed pyzmq==22.1.0 # via @@ -254,7 +259,7 @@ pyzmq==22.1.0 # jupyter-server requests==2.27.1 # via -r requirements/_base.in -s3fs==2022.3.0 +s3fs==2022.5.0 # via -r requirements/_base.in send2trash==1.7.1 # via jupyter-server @@ -288,6 +293,7 @@ toolz==0.11.1 tornado==6.1 # via # bokeh + # dask-gateway # distributed # jupyter-client # jupyter-server diff --git a/services/dask-sidecar/requirements/_dask-complete.txt b/services/dask-sidecar/requirements/_dask-complete.txt index 15bea011cc6..b635ed867e4 100644 --- a/services/dask-sidecar/requirements/_dask-complete.txt +++ b/services/dask-sidecar/requirements/_dask-complete.txt @@ -8,7 +8,7 @@ blosc==1.10.6 # via # -c requirements/./_base.txt # -r requirements/_dask-complete.in -bokeh==2.4.2 +bokeh==2.4.3 # via # -c requirements/./_base.txt # dask @@ -21,16 +21,16 @@ cloudpickle==2.0.0 # -c requirements/./_base.txt # dask # distributed -dask==2022.4.0 +dask==2022.6.0 # via # -c requirements/./_base.txt # -r requirements/_dask-complete.in # distributed -distributed==2022.4.0 +distributed==2022.6.0 # via # -c requirements/./_base.txt # dask -fsspec==2022.3.0 +fsspec==2022.5.0 # via # -c requirements/./_base.txt # dask @@ -44,9 +44,10 @@ jinja2==2.11.3 # bokeh # dask # distributed -locket==0.2.1 +locket==1.0.0 # via # -c requirements/./_base.txt + # distributed # partd lz4==4.0.0 # via diff --git a/services/dask-sidecar/requirements/_dask-distributed.txt b/services/dask-sidecar/requirements/_dask-distributed.txt index e54793e9abb..bfcea89a86c 100644 --- a/services/dask-sidecar/requirements/_dask-distributed.txt +++ b/services/dask-sidecar/requirements/_dask-distributed.txt @@ -17,16 +17,16 @@ cloudpickle==2.0.0 # -c requirements/./_base.txt # dask # distributed -dask==2022.4.0 +dask==2022.6.0 # via # -c requirements/./_base.txt # -r requirements/_dask-distributed.in # distributed -distributed==2022.4.0 +distributed==2022.6.0 # via # -c requirements/./_base.txt # dask -fsspec==2022.3.0 +fsspec==2022.5.0 # via # -c requirements/./_base.txt # dask @@ -38,9 +38,10 @@ jinja2==2.11.3 # via # -c requirements/./_base.txt # distributed -locket==0.2.1 +locket==1.0.0 # via # -c requirements/./_base.txt + # distributed # partd lz4==4.0.0 # via diff --git a/services/dask-sidecar/requirements/_packages.txt b/services/dask-sidecar/requirements/_packages.txt index 34790255084..bc7012c3b1d 100644 --- a/services/dask-sidecar/requirements/_packages.txt +++ b/services/dask-sidecar/requirements/_packages.txt @@ -24,12 +24,12 @@ cloudpickle==2.0.0 # -c requirements/_base.txt # dask # distributed -dask==2022.4.0 +dask==2022.6.0 # via # -c requirements/_base.txt # -r requirements/../../../packages/dask-task-models-library/requirements/_base.in # distributed -distributed==2022.4.0 +distributed==2022.6.0 # via # -c requirements/_base.txt # dask @@ -41,7 +41,7 @@ email-validator==1.2.1 # via # -c requirements/_base.txt # pydantic -fsspec==2022.3.0 +fsspec==2022.5.0 # via # -c requirements/_base.txt # dask @@ -68,9 +68,10 @@ jsonschema==3.2.0 # -c requirements/_base.txt # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/_base.in -locket==0.2.1 +locket==1.0.0 # via # -c requirements/_base.txt + # distributed # partd markupsafe==2.1.1 # via diff --git a/services/director-v2/requirements/_base.txt b/services/director-v2/requirements/_base.txt index 34741684fe3..28ad2cf4e23 100644 --- a/services/director-v2/requirements/_base.txt +++ b/services/director-v2/requirements/_base.txt @@ -88,6 +88,7 @@ charset-normalizer==2.0.12 click==8.1.3 # via # -r requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt + # dask-gateway # distributed # typer # uvicorn @@ -96,17 +97,17 @@ cloudpickle==2.0.0 # -r requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt # dask # distributed -dask==2022.4.0 +dask==2022.6.0 # via # -r requirements/../../../packages/dask-task-models-library/requirements/_base.in # -r requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt # dask-gateway # distributed -dask-gateway==0.9.0 +dask-gateway==2022.6.1 # via -r requirements/_base.in decorator==4.4.2 # via networkx -distributed==2022.4.0 +distributed==2022.6.0 # via # -r requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt # dask @@ -128,7 +129,7 @@ frozenlist==1.3.0 # via # aiohttp # aiosignal -fsspec==2022.3.0 +fsspec==2022.5.0 # via # -r requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt # dask @@ -199,9 +200,10 @@ jsonschema==3.2.0 # -r requirements/../../../packages/dask-task-models-library/requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/models-library/requirements/_base.in # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/models-library/requirements/_base.in -locket==0.2.1 +locket==1.0.0 # via # -r requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt + # distributed # partd lz4==4.0.0 # via -r requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt @@ -320,6 +322,7 @@ pyyaml==5.4.1 # -r requirements/../../../packages/simcore-sdk/requirements/../../../packages/service-library/requirements/_base.in # -r requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt # dask + # dask-gateway # distributed # fastapi # uvicorn @@ -386,6 +389,7 @@ toolz==0.11.1 tornado==6.1 # via # -r requirements/../../../services/dask-sidecar/requirements/_dask-distributed.txt + # dask-gateway # distributed # jaeger-client # threadloop diff --git a/services/director-v2/requirements/_test.txt b/services/director-v2/requirements/_test.txt index 1905a458a77..536c16ee22a 100644 --- a/services/director-v2/requirements/_test.txt +++ b/services/director-v2/requirements/_test.txt @@ -98,7 +98,7 @@ cryptography==37.0.2 # -c requirements/../../../requirements/constraints.txt # dask-gateway-server # paramiko -dask-gateway-server==2022.4.0 +dask-gateway-server==2022.6.1 # via -r requirements/_test.in dill==0.3.4 # via pylint @@ -331,7 +331,7 @@ tornado==6.1 # via # -c requirements/_base.txt # bokeh -traitlets==5.1.1 +traitlets==5.3.0 # via dask-gateway-server typing-extensions==4.2.0 # via diff --git a/services/director-v2/src/simcore_service_director_v2/api/routes/dynamic_services.py b/services/director-v2/src/simcore_service_director_v2/api/routes/dynamic_services.py index 7021f28721e..0835b96f09d 100644 --- a/services/director-v2/src/simcore_service_director_v2/api/routes/dynamic_services.py +++ b/services/director-v2/src/simcore_service_director_v2/api/routes/dynamic_services.py @@ -1,9 +1,9 @@ import asyncio +import json import logging -from typing import Coroutine, List, Optional, Union, cast +from typing import Coroutine, Optional, Union, cast from uuid import UUID -import async_timeout import httpx from fastapi import APIRouter, Depends, Header from fastapi.responses import RedirectResponse @@ -14,6 +14,11 @@ from models_library.users import UserID from starlette import status from starlette.datastructures import URL +from tenacity import RetryCallState, TryAgain +from tenacity._asyncio import AsyncRetrying +from tenacity.before_sleep import before_sleep_log +from tenacity.stop import stop_after_delay +from tenacity.wait import wait_fixed from ...api.dependencies.database import get_repository from ...api.dependencies.rabbitmq import get_rabbitmq_client @@ -56,7 +61,7 @@ @router.get( "", status_code=status.HTTP_200_OK, - response_model=List[DynamicServiceOut], + response_model=list[DynamicServiceOut], response_model_exclude_unset=True, summary=( "returns a list of running interactive services filtered by user_id and/or project_id" @@ -71,20 +76,20 @@ async def list_running_dynamic_services( get_dynamic_services_settings ), scheduler: DynamicSidecarsScheduler = Depends(get_scheduler), -) -> List[DynamicServiceOut]: - legacy_running_services: List[DynamicServiceOut] = cast( - List[DynamicServiceOut], +) -> list[DynamicServiceOut]: + legacy_running_services: list[DynamicServiceOut] = cast( + list[DynamicServiceOut], await director_v0_client.get_running_services(user_id, project_id), ) - get_stack_statuse_tasks: List[Coroutine] = [ + get_stack_statuse_tasks: list[Coroutine] = [ scheduler.get_stack_status(UUID(service["Spec"]["Labels"]["uuid"])) for service in await list_dynamic_sidecar_services( dynamic_services_settings.DYNAMIC_SIDECAR, user_id, project_id ) ] - dynamic_sidecar_running_services: List[DynamicServiceOut] = cast( - List[DynamicServiceOut], await asyncio.gather(*get_stack_statuse_tasks) + dynamic_sidecar_running_services: list[DynamicServiceOut] = cast( + list[DynamicServiceOut], await asyncio.gather(*get_stack_statuse_tasks) ) return legacy_running_services + dynamic_sidecar_running_services @@ -204,11 +209,26 @@ async def stop_dynamic_service( dynamic_services_settings.DYNAMIC_SIDECAR ) _STOPPED_CHECK_INTERVAL = 1.0 - async with async_timeout.timeout( - dynamic_sidecar_settings.DYNAMIC_SIDECAR_WAIT_FOR_SERVICE_TO_STOP + + def _log_error(retry_state: RetryCallState): + logger.error( + "Service with %s could not be untracked after %s", + f"{node_uuid=}", + f"{json.dumps(retry_state.retry_object.statistics)}", + ) + + async for attempt in AsyncRetrying( + wait=wait_fixed(_STOPPED_CHECK_INTERVAL), + stop=stop_after_delay( + dynamic_sidecar_settings.DYNAMIC_SIDECAR_WAIT_FOR_SERVICE_TO_STOP + ), + before_sleep=before_sleep_log(logger=logger, log_level=logging.INFO), + reraise=False, + retry_error_callback=_log_error, ): - while scheduler.is_service_tracked(node_uuid): - await asyncio.sleep(_STOPPED_CHECK_INTERVAL) + with attempt: + if scheduler.is_service_tracked(node_uuid): + raise TryAgain return NoContentResponse() diff --git a/services/director-v2/src/simcore_service_director_v2/core/settings.py b/services/director-v2/src/simcore_service_director_v2/core/settings.py index 772bff38109..beda23d930b 100644 --- a/services/director-v2/src/simcore_service_director_v2/core/settings.py +++ b/services/director-v2/src/simcore_service_director_v2/core/settings.py @@ -5,7 +5,7 @@ from enum import Enum from functools import cached_property from pathlib import Path -from typing import Dict, List, Optional, Set +from typing import Optional from models_library.basic_types import ( BootModeEnum, @@ -52,13 +52,13 @@ SERVICE_REVERSE_PROXY_SETTINGS: str = "simcore.service.reverse-proxy-settings" SERVICE_RUNTIME_BOOTSETTINGS: str = "simcore.service.bootsettings" -ORG_LABELS_TO_SCHEMA_LABELS: Dict[str, str] = { +ORG_LABELS_TO_SCHEMA_LABELS: dict[str, str] = { "org.label-schema.build-date": "build_date", "org.label-schema.vcs-ref": "vcs_ref", "org.label-schema.vcs-url": "vcs_url", } -SUPPORTED_TRAEFIK_LOG_LEVELS: Set[str] = {"info", "debug", "warn", "error"} +SUPPORTED_TRAEFIK_LOG_LEVELS: set[str] = {"info", "debug", "warn", "error"} PlacementConstraintStr = constr( strip_whitespace=True, regex=r"^[a-zA-Z0-9. ]*(!=|==){1}[a-zA-Z0-9. ]*$" @@ -92,10 +92,8 @@ def enforce_r_clone_requirement(cls, v, values) -> PositiveInt: dir_cache_time = values["R_CLONE_DIR_CACHE_TIME_SECONDS"] if not v < dir_cache_time: raise ValueError( - ( - f"R_CLONE_POLL_INTERVAL_SECONDS={v} must be lower " - f"than R_CLONE_DIR_CACHE_TIME_SECONDS={dir_cache_time}" - ) + f"R_CLONE_POLL_INTERVAL_SECONDS={v} must be lower " + f"than R_CLONE_DIR_CACHE_TIME_SECONDS={dir_cache_time}" ) return v @@ -185,6 +183,9 @@ class DynamicSidecarSettings(BaseCustomSettings): regex=SERVICE_NETWORK_RE, description="network all dynamic services are connected to", ) + DYNAMIC_SIDECAR_API_CLIENT_REQUEST_MAX_RETRIES: int = Field( + 4, description="maximum attempts to retry a request before giving up" + ) DYNAMIC_SIDECAR_API_REQUEST_TIMEOUT: PositiveFloat = Field( 15.0, description=( @@ -252,6 +253,13 @@ class DynamicSidecarSettings(BaseCustomSettings): "time to wait before giving up on removing dynamic-sidecar's volumes" ), ) + DYNAMIC_SIDECAR_STATUS_API_TIMEOUT_S: PositiveFloat = Field( + 1.0, + description=( + "when requesting the status of a service this is the " + "maximum amount of time the request can last" + ), + ) TRAEFIK_SIMCORE_ZONE: str = Field( ..., @@ -294,14 +302,6 @@ class DynamicServicesSchedulerSettings(BaseCustomSettings): 5.0, description="interval at which the scheduler cycle is repeated" ) - DIRECTOR_V2_DYNAMIC_SCHEDULER_MAX_STATUS_API_DURATION: PositiveFloat = Field( - 1.0, - description=( - "when requesting the status of a service this is the " - "maximum amount of time the request can last" - ), - ) - class DynamicServicesSettings(BaseCustomSettings): # TODO: PC->ANE: refactor dynamic-sidecar settings. One settings per app module @@ -443,7 +443,7 @@ class AppSettings(BaseCustomSettings, MixinLoggingSettings): # This is just a service placement constraint, see # https://docs.docker.com/engine/swarm/services/#control-service-placement. - DIRECTOR_V2_SERVICES_CUSTOM_CONSTRAINTS: List[PlacementConstraintStr] = Field( + DIRECTOR_V2_SERVICES_CUSTOM_CONSTRAINTS: list[PlacementConstraintStr] = Field( default_factory=list, example='["node.labels.region==east", "one!=yes"]', ) diff --git a/services/director-v2/src/simcore_service_director_v2/models/schemas/dynamic_services/scheduler.py b/services/director-v2/src/simcore_service_director_v2/models/schemas/dynamic_services/scheduler.py index 0864255c9d7..932e26892f0 100644 --- a/services/director-v2/src/simcore_service_director_v2/models/schemas/dynamic_services/scheduler.py +++ b/services/director-v2/src/simcore_service_director_v2/models/schemas/dynamic_services/scheduler.py @@ -1,7 +1,7 @@ import json import logging from enum import Enum -from typing import Any, Dict, List, Mapping, Optional +from typing import Any, Mapping, Optional from uuid import UUID, uuid4 from models_library.projects_nodes_io import NodeID @@ -11,7 +11,15 @@ SimcoreServiceLabels, ) from models_library.services_resources import ServiceResourcesDict -from pydantic import BaseModel, Extra, Field, PositiveInt, constr +from pydantic import ( + AnyHttpUrl, + BaseModel, + Extra, + Field, + PositiveInt, + constr, + parse_obj_as, +) from ..constants import ( DYNAMIC_PROXY_SERVICE_PREFIX, @@ -92,7 +100,7 @@ class DockerContainerInspect(BaseModel): id: str = Field(..., description="docker id of the container") @classmethod - def from_container(cls, container: Dict[str, Any]) -> "DockerContainerInspect": + def from_container(cls, container: dict[str, Any]) -> "DockerContainerInspect": return cls( status=DockerStatus(container["State"]["Status"]), name=container["Name"], @@ -168,7 +176,7 @@ def compose_spec_submitted(self) -> bool: description="if the docker-compose spec was already submitted this fields is True", ) - containers_inspect: List[DockerContainerInspect] = Field( + containers_inspect: list[DockerContainerInspect] = Field( [], scription="docker inspect results from all the container ran at regular intervals", ) @@ -235,9 +243,11 @@ def can_save_state(self) -> bool: # consider adding containers for healthchecks but this is more difficult and it depends on each service @property - def endpoint(self): + def endpoint(self) -> AnyHttpUrl: """endpoint where all the services are exposed""" - return f"http://{self.hostname}:{self.port}" + return parse_obj_as( + AnyHttpUrl, f"http://{self.hostname}:{self.port}" # NOSONAR + ) @property def are_containers_ready(self) -> bool: @@ -300,8 +310,9 @@ def make(cls, node_uuid: UUID) -> "DynamicSidecarNames": class SchedulerData(CommonServiceDetails, DynamicSidecarServiceLabels): - service_name: str = Field( - ..., description="Name of the current dynamic-sidecar being observed" + service_name: constr(strip_whitespace=True, min_length=2) = Field( + ..., + description="Name of the current dynamic-sidecar being observed", ) dynamic_sidecar: DynamicSidecar = Field( @@ -354,8 +365,8 @@ def from_http_request( service: "DynamicServiceCreate", simcore_service_labels: SimcoreServiceLabels, port: Optional[int], - request_dns: str = None, - request_scheme: str = None, + request_dns: Optional[str] = None, + request_scheme: Optional[str] = None, ) -> "SchedulerData": dynamic_sidecar_names = DynamicSidecarNames.make(service.node_uuid) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dask_client.py b/services/director-v2/src/simcore_service_director_v2/modules/dask_client.py index 5ed728c9ba0..7e945b7b799 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dask_client.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dask_client.py @@ -15,7 +15,7 @@ from collections import deque from dataclasses import dataclass, field from http.client import HTTPException -from typing import Callable, Deque, Dict, Final, List, Optional, Tuple +from typing import Any, Callable, Deque, Final, Optional import distributed from dask_task_models_library.container_tasks.docker import DockerBasicAuth @@ -88,7 +88,7 @@ ServiceKey = str ServiceVersion = str LogFileUploadURL = AnyUrl -Commands = List[str] +Commands = list[str] RemoteFct = Callable[ [ DockerBasicAuth, @@ -112,7 +112,7 @@ class DaskClient: settings: ComputationalBackendSettings tasks_file_link_type: Final[FileLinkType] - _subscribed_tasks: List[asyncio.Task] = field(default_factory=list) + _subscribed_tasks: list[asyncio.Task] = field(default_factory=list) @classmethod async def create( @@ -190,10 +190,10 @@ async def send_computation_tasks( user_id: UserID, project_id: ProjectID, cluster_id: ClusterID, - tasks: Dict[NodeID, Image], + tasks: dict[NodeID, Image], callback: UserCallbackInSepThread, remote_fct: Optional[RemoteFct] = None, - ) -> List[Tuple[NodeID, str]]: + ) -> list[tuple[NodeID, str]]: """actually sends the function remote_fct to be remotely executed. if None is kept then the default function that runs container will be started.""" @@ -204,7 +204,7 @@ def _comp_sidecar_fct( input_data: TaskInputData, output_data_keys: TaskOutputDataSchema, log_file_url: AnyUrl, - command: List[str], + command: list[str], s3_settings: Optional[S3Settings], ) -> TaskOutputData: """This function is serialized by the Dask client and sent over to the Dask sidecar(s) @@ -224,7 +224,7 @@ def _comp_sidecar_fct( if remote_fct is None: remote_fct = _comp_sidecar_fct - list_of_node_id_to_job_id: List[Tuple[NodeID, str]] = [] + list_of_node_id_to_job_id: list[tuple[NodeID, str]] = [] for node_id, node_image in tasks.items(): job_id = generate_dask_job_id( service_key=node_image.name, @@ -340,7 +340,7 @@ def _comp_sidecar_fct( async def get_task_status(self, job_id: str) -> RunningState: return (await self.get_tasks_status(job_ids=[job_id]))[0] - async def get_tasks_status(self, job_ids: List[str]) -> List[RunningState]: + async def get_tasks_status(self, job_ids: list[str]) -> list[RunningState]: check_scheduler_is_still_the_same( self.backend.scheduler_id, self.backend.client ) @@ -426,10 +426,21 @@ async def get_cluster_details(self) -> ClusterDetails: scheduler_info = self.backend.client.scheduler_info() scheduler_status = self.backend.client.status dashboard_link = self.backend.client.dashboard_link - used_resources = await self.backend.client.run_on_scheduler( - lambda dask_scheduler: dict(dask_scheduler.used_resources) - ) # type: ignore - for k, v in used_resources.items(): + + def _get_worker_used_resources( + dask_scheduler: distributed.Scheduler, + ) -> dict[str, dict]: + used_resources = {} + for worker_name in dask_scheduler.workers: + worker = dask_scheduler.workers[worker_name] + used_resources[worker_name] = worker.used_resources + return used_resources + + used_resources_per_worker: dict[ + str, dict[str, Any] + ] = await self.backend.client.run_on_scheduler(_get_worker_used_resources) + + for k, v in used_resources_per_worker.items(): scheduler_info.get("workers", {}).get(k, {}).update(used_resources=v) assert dashboard_link # nosec diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/__init__.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/__init__.py new file mode 100644 index 00000000000..a116e134865 --- /dev/null +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/__init__.py @@ -0,0 +1,19 @@ +from ._errors import BaseClientHTTPError, ClientHttpError, UnexpectedStatusError +from ._public import ( + DynamicSidecarClient, + get_dynamic_sidecar_client, + get_dynamic_sidecar_service_health, + setup, + shutdown, +) + +__all__: tuple[str, ...] = ( + "BaseClientHTTPError", + "ClientHttpError", + "DynamicSidecarClient", + "get_dynamic_sidecar_client", + "get_dynamic_sidecar_service_health", + "setup", + "shutdown", + "UnexpectedStatusError", +) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_base.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_base.py new file mode 100644 index 00000000000..499124af5df --- /dev/null +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_base.py @@ -0,0 +1,161 @@ +import asyncio +import functools +import inspect +import logging +from logging import Logger +from typing import Any, Awaitable, Callable, Optional + +from httpx import AsyncClient, ConnectError, HTTPError, PoolTimeout, Response +from httpx._types import TimeoutTypes, URLTypes +from tenacity import RetryCallState +from tenacity._asyncio import AsyncRetrying +from tenacity.before import before_log +from tenacity.retry import retry_if_exception_type +from tenacity.stop import stop_after_attempt +from tenacity.wait import wait_exponential + +from ._errors import ClientHttpError, UnexpectedStatusError, _WrongReturnType + +logger = logging.getLogger(__name__) + + +def _log_requests_in_pool(client: AsyncClient, event_name: str) -> None: + # pylint: disable=protected-access + logger.warning( + "Requests while event '%s': %s", + event_name.upper(), + [ + (r.request.method, r.request.url, r.request.headers) + for r in client._transport._pool._requests + ], + ) + + +def _log_retry(log: Logger, max_retries: int) -> Callable[[RetryCallState], None]: + def log_it(retry_state: RetryCallState) -> None: + # pylint: disable=protected-access + + assert retry_state.outcome # nosec + e = retry_state.outcome.exception() + assert isinstance(e, HTTPError) # nosec + assert e._request # nosec + + log.info( + "[%s/%s]Retry. Unexpected %s while requesting '%s %s': %s", + retry_state.attempt_number, + max_retries, + e.__class__.__name__, + e._request.method, + e._request.url, + f"{e=}", + ) + + return log_it + + +def retry_on_errors( + request_func: Callable[..., Awaitable[Response]] +) -> Callable[..., Awaitable[Response]]: + """ + Will retry the request on `ConnectError` and `PoolTimeout`. + Also wraps `httpx.HTTPError` + raises: + - `ClientHttpError` + """ + assert asyncio.iscoroutinefunction(request_func) + + RETRY_ERRORS = (ConnectError, PoolTimeout) + + @functools.wraps(request_func) + async def request_wrapper(zelf: "BaseThinClient", *args, **kwargs) -> Response: + # pylint: disable=protected-access + try: + async for attempt in AsyncRetrying( + stop=stop_after_attempt(zelf._request_max_retries), + wait=wait_exponential(min=1), + retry=retry_if_exception_type(RETRY_ERRORS), + before=before_log(logger, logging.DEBUG), + after=_log_retry(logger, zelf._request_max_retries), + reraise=True, + ): + with attempt: + r: Response = await request_func(zelf, *args, **kwargs) + return r + except HTTPError as e: + if isinstance(e, PoolTimeout): + _log_requests_in_pool(zelf._client, "pool timeout") + raise ClientHttpError(e) from e + + return request_wrapper + + +def expect_status(expected_code: int): + """ + raises an `UnexpectedStatusError` if the request's status is different + from `expected_code` + NOTE: always apply after `retry_on_errors` + + raises: + - `UnexpectedStatusError` + - `ClientHttpError` + """ + + def decorator( + request_func: Callable[..., Awaitable[Response]] + ) -> Callable[..., Awaitable[Response]]: + assert asyncio.iscoroutinefunction(request_func) + + @functools.wraps(request_func) + async def request_wrapper(zelf: "BaseThinClient", *args, **kwargs) -> Response: + response = await request_func(zelf, *args, **kwargs) + if response.status_code != expected_code: + raise UnexpectedStatusError(response, expected_code) + + return response + + return request_wrapper + + return decorator + + +class BaseThinClient: + SKIP_METHODS: set[str] = {"close"} + + def __init__( + self, + *, + request_max_retries: int, + base_url: Optional[URLTypes] = None, + timeout: Optional[TimeoutTypes] = None, + ) -> None: + self._request_max_retries: int = request_max_retries + + client_args: dict[str, Any] = {} + if base_url: + client_args["base_url"] = base_url + if timeout: + client_args["timeout"] = timeout + self._client = AsyncClient(**client_args) + + # ensure all user defined public methods return `httpx.Response` + # NOTE: ideally these checks should be ran at import time! + public_methods = [ + t[1] + for t in inspect.getmembers(self, predicate=inspect.ismethod) + if not (t[0].startswith("_") or t[0] in self.SKIP_METHODS) + ] + + for method in public_methods: + signature = inspect.signature(method) + if signature.return_annotation != Response: + raise _WrongReturnType(method, signature.return_annotation) + + async def close(self) -> None: + _log_requests_in_pool(self._client, "closing") + await self._client.aclose() + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_t, exc_v, exc_tb): + await self.close() diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_errors.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_errors.py new file mode 100644 index 00000000000..e867005af65 --- /dev/null +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_errors.py @@ -0,0 +1,56 @@ +""" +Exception hierarchy: + +* BaseClientError + x BaseRequestError + + ClientHttpError + + UnexpectedStatusError + x WrongReturnType +""" + +from httpx import Response + + +class BaseClientError(Exception): + """ + Used as based for all the raised errors + """ + + +class _WrongReturnType(BaseClientError): + """ + used internally to signal the user that the defined method + has an invalid return time annotation + """ + + def __init__(self, method, return_annotation) -> None: + super().__init__( + ( + f"{method=} should return an instance " + f"of {Response}, not '{return_annotation}'!" + ) + ) + + +class BaseClientHTTPError(BaseClientError): + """Base class to wrap all http related client errors""" + + +class ClientHttpError(BaseClientHTTPError): + """used to captures all httpx.HttpError""" + + def __init__(self, error: Exception) -> None: + super().__init__() + self.error: Exception = error + + +class UnexpectedStatusError(BaseClientHTTPError): + """raised when the status of the request is not the one it was expected""" + + def __init__(self, response: Response, expecting: int) -> None: + message = ( + f"Expected status: {expecting}, got {response.status_code} for: {response.url}: " + f"headers={response.headers}, body='{response.text}'" + ) + super().__init__(message) + self.response = response diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_public.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_public.py new file mode 100644 index 00000000000..688d59e6971 --- /dev/null +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_public.py @@ -0,0 +1,311 @@ +import logging +from collections import deque +from typing import Any, Optional + +from fastapi import FastAPI, status +from models_library.projects import ProjectID +from models_library.projects_networks import DockerNetworkAlias +from pydantic import AnyHttpUrl +from servicelib.utils import logged_gather + +from ....models.schemas.dynamic_services import SchedulerData +from ....modules.dynamic_sidecar.docker_api import get_or_create_networks_ids +from ....utils.logging_utils import log_decorator +from ..errors import EntrypointContainerNotFoundError, NodeportsDidNotFindNodeError +from ._errors import BaseClientHTTPError, UnexpectedStatusError +from ._thin import ThinDynamicSidecarClient + +logger = logging.getLogger(__name__) + + +class DynamicSidecarClient: + def __init__(self, app: FastAPI): + self.thin_client: ThinDynamicSidecarClient = ThinDynamicSidecarClient(app) + + async def is_healthy(self, dynamic_sidecar_endpoint: AnyHttpUrl) -> bool: + """returns True if service is UP and running else False""" + try: + # this request uses a very short timeout + response = await self.thin_client.get_health(dynamic_sidecar_endpoint) + return response.json()["is_healthy"] + except BaseClientHTTPError: + return False + + @log_decorator(logger=logger) + async def containers_inspect( + self, dynamic_sidecar_endpoint: AnyHttpUrl + ) -> dict[str, Any]: + """ + returns dict containing docker inspect result form + all dynamic-sidecar started containers + """ + response = await self.thin_client.get_containers( + dynamic_sidecar_endpoint, only_status=False + ) + return response.json() + + @log_decorator(logger=logger) + async def containers_docker_status( + self, dynamic_sidecar_endpoint: AnyHttpUrl + ) -> dict[str, dict[str, str]]: + try: + response = await self.thin_client.get_containers( + dynamic_sidecar_endpoint, only_status=True + ) + return response.json() + except UnexpectedStatusError: + return {} + + @log_decorator(logger=logger) + async def start_service_creation( + self, dynamic_sidecar_endpoint: AnyHttpUrl, compose_spec: str + ) -> None: + response = await self.thin_client.post_containers( + dynamic_sidecar_endpoint, compose_spec=compose_spec + ) + logger.info("Spec submit result %s", response.text) + + @log_decorator(logger=logger) + async def begin_service_destruction( + self, dynamic_sidecar_endpoint: AnyHttpUrl + ) -> None: + """runs docker compose down on the started spec""" + response = await self.thin_client.post_containers_down(dynamic_sidecar_endpoint) + logger.info("Compose down result %s", response.text) + + @log_decorator(logger=logger) + async def service_save_state(self, dynamic_sidecar_endpoint: AnyHttpUrl) -> None: + await self.thin_client.post_containers_state_save(dynamic_sidecar_endpoint) + + @log_decorator(logger=logger) + async def service_restore_state(self, dynamic_sidecar_endpoint: AnyHttpUrl) -> None: + await self.thin_client.post_containers_state_restore(dynamic_sidecar_endpoint) + + @log_decorator(logger=logger) + async def service_pull_input_ports( + self, + dynamic_sidecar_endpoint: AnyHttpUrl, + port_keys: Optional[list[str]] = None, + ) -> int: + port_keys = [] if port_keys is None else port_keys + response = await self.thin_client.post_containers_ports_inputs_pull( + dynamic_sidecar_endpoint, port_keys=port_keys + ) + return int(response.text) + + @log_decorator(logger=logger) + async def service_disable_dir_watcher( + self, dynamic_sidecar_endpoint: AnyHttpUrl + ) -> None: + await self.thin_client.patch_containers_directory_watcher( + dynamic_sidecar_endpoint, is_enabled=False + ) + + @log_decorator(logger=logger) + async def service_enable_dir_watcher( + self, dynamic_sidecar_endpoint: AnyHttpUrl + ) -> None: + await self.thin_client.patch_containers_directory_watcher( + dynamic_sidecar_endpoint, is_enabled=True + ) + + @log_decorator(logger=logger) + async def service_outputs_create_dirs( + self, dynamic_sidecar_endpoint: AnyHttpUrl, outputs_labels: dict[str, Any] + ) -> None: + await self.thin_client.post_containers_ports_outputs_dirs( + dynamic_sidecar_endpoint, outputs_labels=outputs_labels + ) + + @log_decorator(logger=logger) + async def service_pull_output_ports( + self, + dynamic_sidecar_endpoint: AnyHttpUrl, + port_keys: Optional[list[str]] = None, + ) -> int: + response = await self.thin_client.post_containers_ports_outputs_pull( + dynamic_sidecar_endpoint, port_keys=port_keys + ) + return int(response.text) + + @log_decorator(logger=logger) + async def service_push_output_ports( + self, + dynamic_sidecar_endpoint: AnyHttpUrl, + port_keys: Optional[list[str]] = None, + ) -> None: + port_keys = [] if port_keys is None else port_keys + try: + await self.thin_client.post_containers_ports_outputs_push( + dynamic_sidecar_endpoint, port_keys=port_keys + ) + except UnexpectedStatusError as e: + if e.response.status_code == status.HTTP_404_NOT_FOUND: + json_error = e.response.json() + if json_error.get("code") == "dynamic_sidecar.nodeports.node_not_found": + raise NodeportsDidNotFindNodeError( + node_uuid=json_error["node_uuid"] + ) from e + raise e + + @log_decorator(logger=logger) + async def get_entrypoint_container_name( + self, dynamic_sidecar_endpoint: AnyHttpUrl, dynamic_sidecar_network_name: str + ) -> str: + """ + While this API raises EntrypointContainerNotFoundError + it should be called again, because in the menwhile the containers + might still be starting. + """ + try: + response = await self.thin_client.get_containers_name( + dynamic_sidecar_endpoint, + dynamic_sidecar_network_name=dynamic_sidecar_network_name, + ) + return response.json() + except UnexpectedStatusError as e: + if e.response.status_code == status.HTTP_404_NOT_FOUND: + raise EntrypointContainerNotFoundError() from e + raise e + + @log_decorator(logger=logger) + async def restart_containers(self, dynamic_sidecar_endpoint: AnyHttpUrl) -> None: + """ + runs docker-compose stop and docker-compose start in succession + resulting in a container restart without loosing state + """ + await self.thin_client.post_containers_restart(dynamic_sidecar_endpoint) + + async def _attach_container_to_network( + self, + dynamic_sidecar_endpoint: AnyHttpUrl, + container_id: str, + network_id: str, + network_aliases: list[str], + ) -> None: + """attaches a container to a network if not already attached""" + await self.thin_client.post_containers_networks_attach( + dynamic_sidecar_endpoint, + container_id=container_id, + network_id=network_id, + network_aliases=network_aliases, + ) + + async def _detach_container_from_network( + self, dynamic_sidecar_endpoint: AnyHttpUrl, container_id: str, network_id: str + ) -> None: + """detaches a container from a network if not already detached""" + await self.thin_client.post_containers_networks_detach( + dynamic_sidecar_endpoint, container_id=container_id, network_id=network_id + ) + + async def attach_service_containers_to_project_network( + self, + dynamic_sidecar_endpoint: AnyHttpUrl, + dynamic_sidecar_network_name: str, + project_network: str, + project_id: ProjectID, + network_alias: DockerNetworkAlias, + ) -> None: + """All containers spawned by the dynamic-sidecar need to be attached to the project network""" + try: + containers_status = await self.containers_docker_status( + dynamic_sidecar_endpoint=dynamic_sidecar_endpoint + ) + except BaseClientHTTPError: + # if no containers are found it is ok to skip the operations, + # there are no containers to attach the network to + return + + sorted_container_names = sorted(containers_status.keys()) + + entrypoint_container_name = await self.get_entrypoint_container_name( + dynamic_sidecar_endpoint=dynamic_sidecar_endpoint, + dynamic_sidecar_network_name=dynamic_sidecar_network_name, + ) + + network_names_to_ids: dict[str, str] = await get_or_create_networks_ids( + [project_network], project_id + ) + network_id = network_names_to_ids[project_network] + + tasks = deque() + + for k, container_name in enumerate(sorted_container_names): + # by default we attach `alias-0`, `alias-1`, etc... + # to all containers + aliases = [f"{network_alias}-{k}"] + if container_name == entrypoint_container_name: + # by definition the entrypoint container will be exposed as the `alias` + aliases.append(network_alias) + + tasks.append( + self._attach_container_to_network( + dynamic_sidecar_endpoint=dynamic_sidecar_endpoint, + container_id=container_name, + network_id=network_id, + network_aliases=aliases, + ) + ) + + await logged_gather(*tasks) + + async def detach_service_containers_from_project_network( + self, + dynamic_sidecar_endpoint: AnyHttpUrl, + project_network: str, + project_id: ProjectID, + ) -> None: + # the network needs to be detached from all started containers + try: + containers_status = await self.containers_docker_status( + dynamic_sidecar_endpoint=dynamic_sidecar_endpoint + ) + except BaseClientHTTPError: + # if no containers are found it is ok to skip the operations, + # there are no containers to detach the network from + return + + network_names_to_ids: dict[str, str] = await get_or_create_networks_ids( + [project_network], project_id + ) + network_id = network_names_to_ids[project_network] + + await logged_gather( + *[ + self._detach_container_from_network( + dynamic_sidecar_endpoint=dynamic_sidecar_endpoint, + container_id=container_name, + network_id=network_id, + ) + for container_name in containers_status + ] + ) + + +async def setup(app: FastAPI) -> None: + logger.debug("dynamic-sidecar api client setup") + app.state.dynamic_sidecar_api_client = DynamicSidecarClient(app) + + +async def shutdown(app: FastAPI) -> None: + logger.debug("dynamic-sidecar api client closing...") + client: Optional[DynamicSidecarClient] + if client := app.state.dynamic_sidecar_api_client: + await client.thin_client.close() + + +def get_dynamic_sidecar_client(app: FastAPI) -> DynamicSidecarClient: + assert app.state.dynamic_sidecar_api_client # nosec + return app.state.dynamic_sidecar_api_client + + +async def get_dynamic_sidecar_service_health( + app: FastAPI, scheduler_data: SchedulerData +) -> None: + api_client = get_dynamic_sidecar_client(app) + service_endpoint = scheduler_data.dynamic_sidecar.endpoint + + # update service health + is_healthy = await api_client.is_healthy(service_endpoint) + scheduler_data.dynamic_sidecar.is_available = is_healthy diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_thin.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_thin.py new file mode 100644 index 00000000000..afbb819a81c --- /dev/null +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/api_client/_thin.py @@ -0,0 +1,223 @@ +import json +import logging +from typing import Any, Optional + +from fastapi import FastAPI, status +from httpx import AsyncClient, Response, Timeout +from pydantic import AnyHttpUrl + +from ....core.settings import DynamicSidecarSettings +from ._base import BaseThinClient, expect_status, retry_on_errors + +logger = logging.getLogger(__name__) + + +class ThinDynamicSidecarClient(BaseThinClient): + """ + NOTE: all calls can raise the following errors. + - `UnexpectedStatusError` + - `ClientHttpError` wraps httpx.HttpError errors + """ + + API_VERSION = "v1" + + def __init__(self, app: FastAPI): + settings: DynamicSidecarSettings = ( + app.state.settings.DYNAMIC_SERVICES.DYNAMIC_SIDECAR + ) + + self._client = AsyncClient( + timeout=Timeout( + settings.DYNAMIC_SIDECAR_API_REQUEST_TIMEOUT, + connect=settings.DYNAMIC_SIDECAR_API_CONNECT_TIMEOUT, + ) + ) + self._request_max_retries: int = ( + settings.DYNAMIC_SIDECAR_API_CLIENT_REQUEST_MAX_RETRIES + ) + + # timeouts + self._health_request_timeout = Timeout(1.0, connect=1.0) + self._save_restore_timeout = Timeout( + settings.DYNAMIC_SIDECAR_API_SAVE_RESTORE_STATE_TIMEOUT, + connect=settings.DYNAMIC_SIDECAR_API_CONNECT_TIMEOUT, + ) + self._restart_containers_timeout = Timeout( + settings.DYNAMIC_SIDECAR_API_RESTART_CONTAINERS_TIMEOUT, + connect=settings.DYNAMIC_SIDECAR_API_CONNECT_TIMEOUT, + ) + self._attach_detach_network_timeout = Timeout( + settings.DYNAMIC_SIDECAR_PROJECT_NETWORKS_ATTACH_DETACH_S, + connect=settings.DYNAMIC_SIDECAR_API_CONNECT_TIMEOUT, + ) + + super().__init__(request_max_retries=self._request_max_retries) + + def _get_url( + self, + dynamic_sidecar_endpoint: AnyHttpUrl, + postfix: str, + no_api_version: bool = False, + ) -> str: + """formats and returns an url for the request""" + api_version = "" if no_api_version else f"/{self.API_VERSION}" + return f"{dynamic_sidecar_endpoint}{api_version}{postfix}" + + @retry_on_errors + @expect_status(status.HTTP_200_OK) + async def get_health(self, dynamic_sidecar_endpoint: AnyHttpUrl) -> Response: + url = self._get_url(dynamic_sidecar_endpoint, "/health", no_api_version=True) + return await self._client.get(url, timeout=self._health_request_timeout) + + @retry_on_errors + @expect_status(status.HTTP_200_OK) + async def get_containers( + self, dynamic_sidecar_endpoint: AnyHttpUrl, *, only_status: bool + ) -> Response: + url = self._get_url(dynamic_sidecar_endpoint, "/containers") + return await self._client.get(url, params=dict(only_status=only_status)) + + @retry_on_errors + @expect_status(status.HTTP_202_ACCEPTED) + async def post_containers( + self, dynamic_sidecar_endpoint: AnyHttpUrl, *, compose_spec: str + ) -> Response: + # NOTE: this sometimes takes longer that the default timeout, maybe raise timeout here as well! + url = self._get_url(dynamic_sidecar_endpoint, "/containers") + return await self._client.post(url, data=compose_spec) + + @retry_on_errors + @expect_status(status.HTTP_200_OK) + async def post_containers_down( + self, dynamic_sidecar_endpoint: AnyHttpUrl + ) -> Response: + url = self._get_url(dynamic_sidecar_endpoint, "/containers:down") + return await self._client.post(url) + + @retry_on_errors + @expect_status(status.HTTP_204_NO_CONTENT) + async def post_containers_state_save( + self, dynamic_sidecar_endpoint: AnyHttpUrl + ) -> Response: + url = self._get_url(dynamic_sidecar_endpoint, "/containers/state:save") + return await self._client.post(url, timeout=self._save_restore_timeout) + + @retry_on_errors + @expect_status(status.HTTP_204_NO_CONTENT) + async def post_containers_state_restore( + self, dynamic_sidecar_endpoint: AnyHttpUrl + ) -> Response: + url = self._get_url(dynamic_sidecar_endpoint, "/containers/state:restore") + return await self._client.post(url, timeout=self._save_restore_timeout) + + @retry_on_errors + @expect_status(status.HTTP_200_OK) + async def post_containers_ports_inputs_pull( + self, + dynamic_sidecar_endpoint: AnyHttpUrl, + *, + port_keys: Optional[list[str]] = None, + ) -> Response: + url = self._get_url(dynamic_sidecar_endpoint, "/containers/ports/inputs:pull") + port_keys = [] if port_keys is None else port_keys + return await self._client.post( + url, json=port_keys, timeout=self._save_restore_timeout + ) + + @retry_on_errors + @expect_status(status.HTTP_204_NO_CONTENT) + async def patch_containers_directory_watcher( + self, dynamic_sidecar_endpoint: AnyHttpUrl, *, is_enabled: bool + ) -> Response: + url = self._get_url(dynamic_sidecar_endpoint, "/containers/directory-watcher") + return await self._client.patch(url, json=dict(is_enabled=is_enabled)) + + @retry_on_errors + @expect_status(status.HTTP_204_NO_CONTENT) + async def post_containers_ports_outputs_dirs( + self, dynamic_sidecar_endpoint: AnyHttpUrl, *, outputs_labels: dict[str, Any] + ) -> Response: + url = self._get_url(dynamic_sidecar_endpoint, "/containers/ports/outputs/dirs") + return await self._client.post(url, json=dict(outputs_labels=outputs_labels)) + + @retry_on_errors + @expect_status(status.HTTP_200_OK) + async def post_containers_ports_outputs_pull( + self, + dynamic_sidecar_endpoint: AnyHttpUrl, + *, + port_keys: Optional[list[str]] = None, + ) -> Response: + url = self._get_url(dynamic_sidecar_endpoint, "/containers/ports/outputs:pull") + return await self._client.post( + url, json=port_keys, timeout=self._save_restore_timeout + ) + + @retry_on_errors + @expect_status(status.HTTP_204_NO_CONTENT) + async def post_containers_ports_outputs_push( + self, + dynamic_sidecar_endpoint: AnyHttpUrl, + *, + port_keys: Optional[list[str]] = None, + ) -> Response: + url = self._get_url(dynamic_sidecar_endpoint, "/containers/ports/outputs:push") + return await self._client.post( + url, json=port_keys, timeout=self._save_restore_timeout + ) + + @retry_on_errors + @expect_status(status.HTTP_200_OK) + async def get_containers_name( + self, dynamic_sidecar_endpoint: AnyHttpUrl, *, dynamic_sidecar_network_name: str + ) -> Response: + filters = json.dumps({"network": dynamic_sidecar_network_name}) + url = self._get_url( + dynamic_sidecar_endpoint, f"/containers/name?filters={filters}" + ) + return await self._client.get(url=url) + + @retry_on_errors + @expect_status(status.HTTP_204_NO_CONTENT) + async def post_containers_restart( + self, dynamic_sidecar_endpoint: AnyHttpUrl + ) -> Response: + url = self._get_url(dynamic_sidecar_endpoint, "/containers:restart") + return await self._client.post(url, timeout=self._restart_containers_timeout) + + @retry_on_errors + @expect_status(status.HTTP_204_NO_CONTENT) + async def post_containers_networks_attach( + self, + dynamic_sidecar_endpoint: AnyHttpUrl, + *, + container_id: str, + network_id: str, + network_aliases: list[str], + ) -> Response: + url = self._get_url( + dynamic_sidecar_endpoint, f"/containers/{container_id}/networks:attach" + ) + return await self._client.post( + url, + json=dict(network_id=network_id, network_aliases=network_aliases), + timeout=self._attach_detach_network_timeout, + ) + + @retry_on_errors + @expect_status(status.HTTP_204_NO_CONTENT) + async def post_containers_networks_detach( + self, + dynamic_sidecar_endpoint: AnyHttpUrl, + *, + container_id: str, + network_id: str, + ) -> Response: + url = self._get_url( + dynamic_sidecar_endpoint, f"/containers/{container_id}/networks:detach" + ) + return await self._client.post( + url, + json=dict(network_id=network_id), + timeout=self._attach_detach_network_timeout, + ) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/client_api.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/client_api.py deleted file mode 100644 index f8b8eca7817..00000000000 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/client_api.py +++ /dev/null @@ -1,445 +0,0 @@ -import json -import logging -from collections import deque -from typing import Any, Dict, List, Optional - -import httpx -from fastapi import FastAPI -from models_library.projects import ProjectID -from models_library.projects_networks import DockerNetworkAlias -from servicelib.utils import logged_gather -from starlette import status - -from ...core.settings import DynamicSidecarSettings -from ...models.schemas.dynamic_services import SchedulerData -from ...modules.dynamic_sidecar.docker_api import get_or_create_networks_ids -from ...utils.logging_utils import log_decorator -from .errors import ( - DynamicSidecarUnexpectedResponseStatus, - EntrypointContainerNotFoundError, - NodeportsDidNotFindNodeError, -) - -# PC -> SAN improvements to discuss -# -# TODO: Use logger, not logging! -# - compose error msgs instead of log functions -# TODO: Single instance of httpx client for all requests?: https://www.python-httpx.org/advanced/#why-use-a-client -# - see services/api-server/src/simcore_service_api_server/utils/client_base.py (-> move to servicelib/fastapi ?) -# TODO: context to unify session's error handling and logging -# TODO: client function names equal/very similar to server handlers -# - -logger = logging.getLogger(__name__) - - -def get_url(dynamic_sidecar_endpoint: str, postfix: str) -> str: - """formats and returns an url for the request""" - url = f"{dynamic_sidecar_endpoint}{postfix}" - return url - - -def log_httpx_http_error(url: str, method: str, formatted_traceback: str) -> None: - # mainly used to debug issues with the API - logging.debug( - ( - "%s -> %s generated:\n %s\nThe above logs can safely " - "be ignored, except when debugging an issue " - "regarding the dynamic-sidecar" - ), - method, - url, - formatted_traceback, - ) - - -class DynamicSidecarClient: - """Will handle connections to the service sidecar""" - - API_VERSION = "v1" - - def __init__(self, app: FastAPI): - settings: DynamicSidecarSettings = ( - app.state.settings.DYNAMIC_SERVICES.DYNAMIC_SIDECAR - ) - - self._app: FastAPI = app - - self._client = httpx.AsyncClient( - timeout=httpx.Timeout( - settings.DYNAMIC_SIDECAR_API_REQUEST_TIMEOUT, - connect=settings.DYNAMIC_SIDECAR_API_CONNECT_TIMEOUT, - ) - ) - - # timeouts - self._health_request_timeout = httpx.Timeout(1.0, connect=1.0) - self._save_restore_timeout = httpx.Timeout( - settings.DYNAMIC_SIDECAR_API_SAVE_RESTORE_STATE_TIMEOUT, - connect=settings.DYNAMIC_SIDECAR_API_CONNECT_TIMEOUT, - ) - self._restart_containers_timeout = httpx.Timeout( - settings.DYNAMIC_SIDECAR_API_RESTART_CONTAINERS_TIMEOUT, - connect=settings.DYNAMIC_SIDECAR_API_CONNECT_TIMEOUT, - ) - self._attach_detach_network_timeout = httpx.Timeout( - settings.DYNAMIC_SIDECAR_PROJECT_NETWORKS_ATTACH_DETACH_S, - connect=settings.DYNAMIC_SIDECAR_API_CONNECT_TIMEOUT, - ) - - async def is_healthy(self, dynamic_sidecar_endpoint: str) -> bool: - """returns True if service is UP and running else False""" - url = get_url(dynamic_sidecar_endpoint, "/health") - try: - # this request uses a very short timeout - response = await self._client.get( - url=url, timeout=self._health_request_timeout - ) - response.raise_for_status() - - return response.json()["is_healthy"] - except httpx.HTTPError: - return False - - async def close(self) -> None: - await self._client.aclose() - - @log_decorator(logger=logger) - async def containers_inspect(self, dynamic_sidecar_endpoint: str) -> Dict[str, Any]: - """ - returns dict containing docker inspect result form - all dynamic-sidecar started containers - """ - url = get_url(dynamic_sidecar_endpoint, f"/{self.API_VERSION}/containers") - - response = await self._client.get(url=url) - if response.status_code != status.HTTP_200_OK: - raise DynamicSidecarUnexpectedResponseStatus(response) - - return response.json() - - @log_decorator(logger=logger) - async def containers_docker_status( - self, dynamic_sidecar_endpoint: str - ) -> Dict[str, Dict[str, str]]: - url = get_url(dynamic_sidecar_endpoint, f"/{self.API_VERSION}/containers") - - response = await self._client.get(url=url, params=dict(only_status=True)) - if response.status_code != status.HTTP_200_OK: - logging.warning( - "Unexpected response: status=%s, body=%s", - response.status_code, - response.text, - ) - return {} - - return response.json() - - @log_decorator(logger=logger) - async def start_service_creation( - self, dynamic_sidecar_endpoint: str, compose_spec: str - ) -> None: - url = get_url(dynamic_sidecar_endpoint, f"/{self.API_VERSION}/containers") - response = await self._client.post(url, data=compose_spec) - if response.status_code != status.HTTP_202_ACCEPTED: - raise DynamicSidecarUnexpectedResponseStatus(response, "service creation") - - # request was ok - logger.info("Spec submit result %s", response.text) - - @log_decorator(logger=logger) - async def begin_service_destruction(self, dynamic_sidecar_endpoint: str) -> None: - """runs docker compose down on the started spec""" - url = get_url(dynamic_sidecar_endpoint, f"/{self.API_VERSION}/containers:down") - - response = await self._client.post(url) - if response.status_code != status.HTTP_200_OK: - raise DynamicSidecarUnexpectedResponseStatus( - response, "service destruction" - ) - - logger.info("Compose down result %s", response.text) - - @log_decorator(logger=logger) - async def service_save_state(self, dynamic_sidecar_endpoint: str) -> None: - url = get_url(dynamic_sidecar_endpoint, "/v1/containers/state:save") - - response = await self._client.post(url, timeout=self._save_restore_timeout) - if response.status_code != status.HTTP_204_NO_CONTENT: - raise DynamicSidecarUnexpectedResponseStatus(response, "state saving") - - @log_decorator(logger=logger) - async def service_restore_state(self, dynamic_sidecar_endpoint: str) -> None: - url = get_url(dynamic_sidecar_endpoint, "/v1/containers/state:restore") - - response = await self._client.post(url, timeout=self._save_restore_timeout) - if response.status_code != status.HTTP_204_NO_CONTENT: - raise DynamicSidecarUnexpectedResponseStatus(response, "state restore") - - @log_decorator(logger=logger) - async def service_pull_input_ports( - self, dynamic_sidecar_endpoint: str, port_keys: Optional[List[str]] = None - ) -> int: - port_keys = [] if port_keys is None else port_keys - url = get_url(dynamic_sidecar_endpoint, "/v1/containers/ports/inputs:pull") - - response = await self._client.post( - url, json=port_keys, timeout=self._save_restore_timeout - ) - if response.status_code != status.HTTP_200_OK: - raise DynamicSidecarUnexpectedResponseStatus(response, "pull input ports") - return int(response.text) - - @log_decorator(logger=logger) - async def service_disable_dir_watcher(self, dynamic_sidecar_endpoint: str) -> None: - url = get_url(dynamic_sidecar_endpoint, "/v1/containers/directory-watcher") - - response = await self._client.patch(url, json=dict(is_enabled=False)) - if response.status_code != status.HTTP_204_NO_CONTENT: - raise DynamicSidecarUnexpectedResponseStatus( - response, "disable dir watcher" - ) - - @log_decorator(logger=logger) - async def service_enable_dir_watcher(self, dynamic_sidecar_endpoint: str) -> None: - url = get_url(dynamic_sidecar_endpoint, "/v1/containers/directory-watcher") - - response = await self._client.patch(url, json=dict(is_enabled=True)) - if response.status_code != status.HTTP_204_NO_CONTENT: - raise DynamicSidecarUnexpectedResponseStatus(response, "enable dir watcher") - - @log_decorator(logger=logger) - async def service_outputs_create_dirs( - self, dynamic_sidecar_endpoint: str, outputs_labels: Dict[str, Any] - ) -> None: - url = get_url(dynamic_sidecar_endpoint, "/v1/containers/ports/outputs/dirs") - - response = await self._client.post( - url, json=dict(outputs_labels=outputs_labels) - ) - if response.status_code != status.HTTP_204_NO_CONTENT: - raise DynamicSidecarUnexpectedResponseStatus( - response, "output dir creation" - ) - - @log_decorator(logger=logger) - async def service_pull_output_ports( - self, dynamic_sidecar_endpoint: str, port_keys: Optional[List[str]] = None - ) -> int: - port_keys = [] if port_keys is None else port_keys - url = get_url(dynamic_sidecar_endpoint, "/v1/containers/ports/outputs:pull") - - response = await self._client.post( - url, json=port_keys, timeout=self._save_restore_timeout - ) - if response.status_code != status.HTTP_200_OK: - raise DynamicSidecarUnexpectedResponseStatus(response, "output ports pull") - return int(response.text) - - @log_decorator(logger=logger) - async def service_push_output_ports( - self, dynamic_sidecar_endpoint: str, port_keys: Optional[List[str]] = None - ) -> None: - port_keys = [] if port_keys is None else port_keys - url = get_url(dynamic_sidecar_endpoint, "/v1/containers/ports/outputs:push") - - response = await self._client.post( - url, json=port_keys, timeout=self._save_restore_timeout - ) - if response.status_code == status.HTTP_404_NOT_FOUND: - json_error = response.json() - if json_error.get("code") == "dynamic_sidecar.nodeports.node_not_found": - raise NodeportsDidNotFindNodeError(node_uuid=json_error["node_uuid"]) - - if response.status_code != status.HTTP_204_NO_CONTENT: - raise DynamicSidecarUnexpectedResponseStatus(response, "output ports push") - - @log_decorator(logger=logger) - async def get_entrypoint_container_name( - self, dynamic_sidecar_endpoint: str, dynamic_sidecar_network_name: str - ) -> str: - """ - While this API raises EntrypointContainerNotFoundError - it should be called again, because in the menwhile the containers - might still be starting. - """ - filters = json.dumps({"network": dynamic_sidecar_network_name}) - url = get_url( - dynamic_sidecar_endpoint, - f"/{self.API_VERSION}/containers/name?filters={filters}", - ) - - response = await self._client.get(url=url) - if response.status_code == status.HTTP_404_NOT_FOUND: - raise EntrypointContainerNotFoundError() - response.raise_for_status() - - return response.json() - - @log_decorator(logger=logger) - async def restart_containers(self, dynamic_sidecar_endpoint: str) -> None: - """ - runs docker-compose stop and docker-compose start in succession - resulting in a container restart without loosing state - """ - url = get_url( - dynamic_sidecar_endpoint, f"/{self.API_VERSION}/containers:restart" - ) - - response = await self._client.post( - url=url, timeout=self._restart_containers_timeout - ) - if response.status_code != status.HTTP_204_NO_CONTENT: - raise DynamicSidecarUnexpectedResponseStatus(response, "containers restart") - - async def _attach_container_to_network( - self, - dynamic_sidecar_endpoint: str, - container_id: str, - network_id: str, - network_aliases: List[str], - ) -> None: - """attaches a container to a network if not already attached""" - url = get_url( - dynamic_sidecar_endpoint, f"/v1/containers/{container_id}/networks:attach" - ) - data = dict(network_id=network_id, network_aliases=network_aliases) - - async with httpx.AsyncClient( - timeout=self._attach_detach_network_timeout - ) as client: - response = await client.post(url, json=data) - if response.status_code != status.HTTP_204_NO_CONTENT: - raise DynamicSidecarUnexpectedResponseStatus( - response, "attach containers to network" - ) - - async def _detach_container_from_network( - self, dynamic_sidecar_endpoint: str, container_id: str, network_id: str - ) -> None: - """detaches a container from a network if not already detached""" - url = get_url( - dynamic_sidecar_endpoint, f"/v1/containers/{container_id}/networks:detach" - ) - data = dict(network_id=network_id) - - async with httpx.AsyncClient( - timeout=self._attach_detach_network_timeout - ) as client: - response = await client.post(url, json=data) - if response.status_code != status.HTTP_204_NO_CONTENT: - raise DynamicSidecarUnexpectedResponseStatus( - response, "detach containers from network" - ) - - async def attach_service_containers_to_project_network( - self, - dynamic_sidecar_endpoint: str, - dynamic_sidecar_network_name: str, - project_network: str, - project_id: ProjectID, - network_alias: DockerNetworkAlias, - ) -> None: - """All containers spawned by the dynamic-sidecar need to be attached to the project network""" - try: - containers_status = await self.containers_docker_status( - dynamic_sidecar_endpoint=dynamic_sidecar_endpoint - ) - except httpx.HTTPError: - return - - sorted_container_names = sorted(containers_status.keys()) - - entrypoint_container_name = await self.get_entrypoint_container_name( - dynamic_sidecar_endpoint=dynamic_sidecar_endpoint, - dynamic_sidecar_network_name=dynamic_sidecar_network_name, - ) - - network_names_to_ids: Dict[str, str] = await get_or_create_networks_ids( - [project_network], project_id - ) - network_id = network_names_to_ids[project_network] - - tasks = deque() - - for k, container_name in enumerate(sorted_container_names): - # by default we attach `alias-0`, `alias-1`, etc... - # to all containers - aliases = [f"{network_alias}-{k}"] - if container_name == entrypoint_container_name: - # by definition the entrypoint container will be exposed as the `alias` - aliases.append(network_alias) - - tasks.append( - self._attach_container_to_network( - dynamic_sidecar_endpoint=dynamic_sidecar_endpoint, - container_id=container_name, - network_id=network_id, - network_aliases=aliases, - ) - ) - - await logged_gather(*tasks) - - async def detach_service_containers_from_project_network( - self, dynamic_sidecar_endpoint: str, project_network: str, project_id: ProjectID - ) -> None: - # the network needs to be detached from all started containers - try: - containers_status = await self.containers_docker_status( - dynamic_sidecar_endpoint=dynamic_sidecar_endpoint - ) - except httpx.HTTPError: - return - - network_names_to_ids: Dict[str, str] = await get_or_create_networks_ids( - [project_network], project_id - ) - network_id = network_names_to_ids[project_network] - - await logged_gather( - *[ - self._detach_container_from_network( - dynamic_sidecar_endpoint=dynamic_sidecar_endpoint, - container_id=container_name, - network_id=network_id, - ) - for container_name in containers_status - ] - ) - - -async def setup_api_client(app: FastAPI) -> None: - logger.debug("dynamic-sidecar api client setup") - app.state.dynamic_sidecar_api_client = DynamicSidecarClient(app) - - -async def close_api_client(app: FastAPI) -> None: - logger.debug("dynamic-sidecar api client closing...") - client: Optional[DynamicSidecarClient] - if client := app.state.dynamic_sidecar_api_client: - # pylint: disable=protected-access - logger.debug( - "REQUESTS WHILE CLOSING %s", - [ - (r.request.method, r.request.url, r.request.headers) - for r in client._client._transport._pool._requests - ], - ) - await client.close() - - -def get_dynamic_sidecar_client(app: FastAPI) -> DynamicSidecarClient: - assert app.state.dynamic_sidecar_api_client # nosec - return app.state.dynamic_sidecar_api_client - - -async def update_dynamic_sidecar_health( - app: FastAPI, scheduler_data: SchedulerData -) -> None: - api_client = get_dynamic_sidecar_client(app) - service_endpoint = scheduler_data.dynamic_sidecar.endpoint - - # update service health - is_healthy = await api_client.is_healthy(service_endpoint) - scheduler_data.dynamic_sidecar.is_available = is_healthy diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/errors.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/errors.py index d0ff009df1b..90170b81cb1 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/errors.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/errors.py @@ -1,7 +1,4 @@ -from typing import Optional - from aiodocker.exceptions import DockerError -from httpx import Response from models_library.projects_nodes import NodeID from pydantic.errors import PydanticErrorMixin @@ -27,13 +24,6 @@ def __init__(self, node_uuid: NodeID): super().__init__(f"node {node_uuid} not found") -class DynamicSchedulerException(DirectorException): - """ - Used to signal that something was wrong with during - the service's observation. - """ - - class EntrypointContainerNotFoundError(DirectorException): """Raised while the entrypoint container was nto yet started""" @@ -42,19 +32,6 @@ class LegacyServiceIsNotSupportedError(DirectorException): """This API is not implemented by the director-v0""" -class DynamicSidecarUnexpectedResponseStatus(DirectorException): - """Used to signal that there was an issue with a request""" - - def __init__(self, response: Response, msg: Optional[str] = None): - formatted_tag = f"[during {msg}]" if msg is not None else "" - message = ( - f"Unexpected response {formatted_tag}: status={response.status_code}, " - f"url={response.url}, body={response.text}" - ) - super().__init__(message) - self.response = response - - class NodeportsDidNotFindNodeError(PydanticErrorMixin, DirectorException): code = "dynamic_scheduler.output_ports_pulling.node_not_found" msg_template = ( diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/module_setup.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/module_setup.py index 6202c3e194c..a38b3fcbde5 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/module_setup.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/module_setup.py @@ -1,17 +1,16 @@ from fastapi import FastAPI -from .client_api import close_api_client, setup_api_client -from .scheduler import setup_scheduler, shutdown_scheduler +from . import api_client, scheduler def setup(app: FastAPI) -> None: async def on_startup() -> None: - await setup_api_client(app) - await setup_scheduler(app) + await api_client.setup(app) + await scheduler.setup_scheduler(app) async def on_shutdown() -> None: - await shutdown_scheduler(app) - await close_api_client(app) + await scheduler.shutdown_scheduler(app) + await api_client.shutdown(app) app.add_event_handler("startup", on_startup) app.add_event_handler("shutdown", on_shutdown) diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/events.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/events.py index d1cd703296d..be58547e560 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/events.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/events.py @@ -2,7 +2,6 @@ import logging from typing import Any, Coroutine, Final, Optional, Type, cast -import httpx from fastapi import FastAPI from fastapi.encoders import jsonable_encoder from models_library.aiodocker_api import AioDockerServiceSpec @@ -28,7 +27,11 @@ from ...catalog import CatalogClient from ...db.repositories.projects import ProjectsRepository from ...db.repositories.projects_networks import ProjectsNetworksRepository -from ..client_api import DynamicSidecarClient, get_dynamic_sidecar_client +from ..api_client import ( + BaseClientHTTPError, + DynamicSidecarClient, + get_dynamic_sidecar_client, +) from ..docker_api import ( are_all_services_present, constrain_service_to_node, @@ -49,11 +52,7 @@ get_dynamic_sidecar_spec, merge_settings_before_use, ) -from ..errors import ( - DynamicSidecarUnexpectedResponseStatus, - EntrypointContainerNotFoundError, - NodeportsDidNotFindNodeError, -) +from ..errors import EntrypointContainerNotFoundError, NodeportsDidNotFindNodeError from .abc import DynamicSchedulerEvent from .events_utils import ( all_containers_running, @@ -232,7 +231,7 @@ async def action(cls, app: FastAPI, scheduler_data: SchedulerData) -> None: ] = await dynamic_sidecar_client.containers_inspect( dynamic_sidecar_endpoint ) - except (httpx.HTTPError, DynamicSidecarUnexpectedResponseStatus): + except BaseClientHTTPError: # After the service creation it takes a bit of time for the container to start # If the same message appears in the log multiple times in a row (for the same # service) something might be wrong with the service. @@ -508,10 +507,12 @@ async def action(cls, app: FastAPI, scheduler_data: SchedulerData) -> None: await dynamic_sidecar_client.begin_service_destruction( dynamic_sidecar_endpoint=scheduler_data.dynamic_sidecar.endpoint ) - # NOTE: ANE: need to use more specific exception here - except Exception as e: # pylint: disable=broad-except + except BaseClientHTTPError as e: logger.warning( - "Could not contact dynamic-sidecar to begin destruction of %s\n%s", + ( + "Could not contact dynamic-sidecar to begin destruction of " + "%s\n%s. Will continue service removal!" + ), scheduler_data.service_name, f"{e}", ) @@ -557,8 +558,7 @@ async def action(cls, app: FastAPI, scheduler_data: SchedulerData) -> None: logger.warning("%s", f"{err}") logger.info("Ports data pushed by dynamic-sidecar") - # NOTE: ANE: need to use more specific exception here - except Exception as e: # pylint: disable=broad-except + except BaseClientHTTPError as e: logger.warning( ( "Could not contact dynamic-sidecar to save service " @@ -570,6 +570,8 @@ async def action(cls, app: FastAPI, scheduler_data: SchedulerData) -> None: # ensure dynamic-sidecar does not get removed # user data can be manually saved and manual # cleanup of the dynamic-sidecar is required + # TODO: ANE: maybe have a mechanism stop the dynamic sidecar + # and make the director warn about hanging sidecars? raise e # remove the 2 services diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/events_utils.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/events_utils.py index fa0bf13f740..6bc42d698bb 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/events_utils.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/events_utils.py @@ -3,18 +3,19 @@ from typing import Any, AsyncIterator, Deque, Dict, List, Optional, Type from fastapi import FastAPI +from pydantic import AnyHttpUrl from ....api.dependencies.database import get_base_repository from ....models.schemas.dynamic_services import DockerContainerInspect from ....models.schemas.dynamic_services.scheduler import DockerStatus from ....modules.db.repositories import BaseRepository from ....modules.director_v0 import DirectorV0Client -from ..client_api import DynamicSidecarClient +from ..api_client import DynamicSidecarClient @asynccontextmanager async def disabled_directory_watcher( - dynamic_sidecar_client: DynamicSidecarClient, dynamic_sidecar_endpoint: str + dynamic_sidecar_client: DynamicSidecarClient, dynamic_sidecar_endpoint: AnyHttpUrl ) -> AsyncIterator[None]: try: # disable file system event watcher while writing diff --git a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/task.py b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/task.py index 72aa3cd4f3c..2b7a9a4746c 100644 --- a/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/task.py +++ b/services/director-v2/src/simcore_service_director_v2/modules/dynamic_sidecar/scheduler/task.py @@ -20,8 +20,6 @@ from typing import Optional from uuid import UUID -import httpx -from async_timeout import timeout from fastapi import FastAPI from models_library.projects_networks import DockerNetworkAlias from models_library.projects_nodes_io import NodeID @@ -38,10 +36,11 @@ RunningDynamicServiceDetails, SchedulerData, ) -from ..client_api import ( +from ..api_client import ( + ClientHttpError, DynamicSidecarClient, get_dynamic_sidecar_client, - update_dynamic_sidecar_health, + get_dynamic_sidecar_service_health, ) from ..docker_api import ( are_all_services_present, @@ -89,14 +88,7 @@ async def _apply_observation_cycle( node_uuid=scheduler_data.node_uuid, can_save=scheduler_data.dynamic_sidecar.can_save_state, ) - - try: - async with timeout( - dynamic_services_settings.DYNAMIC_SCHEDULER.DIRECTOR_V2_DYNAMIC_SCHEDULER_MAX_STATUS_API_DURATION - ): - await update_dynamic_sidecar_health(app, scheduler_data) - except asyncio.TimeoutError: - scheduler_data.dynamic_sidecar.is_available = False + await get_dynamic_sidecar_service_health(app, scheduler_data) for dynamic_scheduler_event in REGISTERED_EVENTS: if await dynamic_scheduler_event.will_trigger( @@ -139,12 +131,6 @@ async def add_service(self, scheduler_data: SchedulerData) -> None: keep track of the service for faster searches. """ async with self._lock: - - if not scheduler_data.service_name: - raise DynamicSidecarError( - "a service with no name is not valid. Invalid usage." - ) - if scheduler_data.service_name in self._to_observe: logger.warning( "Service %s is already being observed", scheduler_data.service_name @@ -243,7 +229,7 @@ async def get_stack_status(self, node_uuid: NodeID) -> RunningDynamicServiceDeta ] = await dynamic_sidecar_client.containers_docker_status( dynamic_sidecar_endpoint=scheduler_data.dynamic_sidecar.endpoint ) - except httpx.HTTPError: + except ClientHttpError: # error fetching docker_statues, probably someone should check return RunningDynamicServiceDetails.from_scheduler_data( node_uuid=node_uuid, diff --git a/services/director-v2/tests/integration/02/test_dynamic_services_routes.py b/services/director-v2/tests/integration/02/test_dynamic_services_routes.py index 4d3b74c0b5d..6c4f64710a1 100644 --- a/services/director-v2/tests/integration/02/test_dynamic_services_routes.py +++ b/services/director-v2/tests/integration/02/test_dynamic_services_routes.py @@ -2,16 +2,17 @@ # pylint: disable=unused-argument import asyncio +import json import logging -from typing import Any, AsyncIterable, AsyncIterator, Callable, Dict, List, Tuple +from typing import Any, AsyncIterable, AsyncIterator, Callable from unittest.mock import Mock import aiodocker import pytest from async_asgi_testclient import TestClient from async_asgi_testclient.response import Response -from async_timeout import timeout from faker import Faker +from fastapi import FastAPI from models_library.projects import ProjectAtDB, ProjectID from models_library.projects_nodes_io import NodeID from models_library.services import ServiceKeyVersion @@ -25,6 +26,10 @@ from settings_library.rabbit import RabbitSettings from simcore_service_director_v2.core.application import init_app from simcore_service_director_v2.core.settings import AppSettings +from tenacity._asyncio import AsyncRetrying +from tenacity.retry import retry_if_exception_type +from tenacity.stop import stop_after_delay +from tenacity.wait import wait_fixed from utils import ensure_network_cleanup, patch_dynamic_service_url SERVICE_IS_READY_TIMEOUT = 2 * 60 @@ -47,7 +52,7 @@ def minimal_configuration( postgres_db, postgres_host_config: dict[str, str], - dy_static_file_server_dynamic_sidecar_service: Dict, + dy_static_file_server_dynamic_sidecar_service: dict, simcore_services_ready: None, rabbit_service: RabbitSettings, ): @@ -81,10 +86,10 @@ def start_request_data( user_id: UserID, project_id: ProjectID, node_uuid: NodeID, - dy_static_file_server_dynamic_sidecar_service: Dict, + dy_static_file_server_dynamic_sidecar_service: dict, service_resources: ServiceResourcesDict, ensure_swarm_and_networks: None, -) -> Dict[str, Any]: +) -> dict[str, Any]: return dict( user_id=user_id, project_id=project_id, @@ -153,7 +158,7 @@ async def director_v2_client( @pytest.fixture async def ensure_services_stopped( - start_request_data: Dict[str, Any], director_v2_client: TestClient + start_request_data: dict[str, Any], director_v2_client: TestClient ) -> AsyncIterator[None]: yield # ensure service cleanup when done testing @@ -187,7 +192,7 @@ def mock_project_repository(mocker: MockerFixture) -> None: @pytest.fixture def mock_dynamic_sidecar_api_calls(mocker: MockerFixture) -> None: class_path = ( - f"{DIRECTOR_V2_MODULES}.dynamic_sidecar.client_api.DynamicSidecarClient" + f"{DIRECTOR_V2_MODULES}.dynamic_sidecar.api_client.DynamicSidecarClient" ) for function_name, return_value in [ ("service_save_state", None), @@ -205,12 +210,12 @@ def mock_dynamic_sidecar_api_calls(mocker: MockerFixture) -> None: @pytest.fixture async def key_version_expected( - dy_static_file_server_dynamic_sidecar_service: Dict, - dy_static_file_server_service: Dict, + dy_static_file_server_dynamic_sidecar_service: dict, + dy_static_file_server_service: dict, docker_registry_image_injector: Callable, -) -> List[Tuple[ServiceKeyVersion, bool]]: +) -> list[tuple[ServiceKeyVersion, bool]]: - results: List[Tuple[ServiceKeyVersion, bool]] = [] + results: list[tuple[ServiceKeyVersion, bool]] = [] sleeper_service = docker_registry_image_injector( "itisfoundation/sleeper", "2.1.1", "user@e.mail" @@ -235,7 +240,7 @@ async def key_version_expected( async def test_start_status_stop( director_v2_client: TestClient, node_uuid: str, - start_request_data: Dict[str, Any], + start_request_data: dict[str, Any], ensure_services_stopped: None, mock_project_repository: None, mock_dynamic_sidecar_api_calls: None, @@ -252,28 +257,36 @@ async def test_start_status_stop( "/v2/dynamic_services", json=start_request_data, headers=headers ) assert response.status_code == 201, response.text - + assert isinstance(director_v2_client.application, FastAPI) await patch_dynamic_service_url( app=director_v2_client.application, node_uuid=node_uuid ) # awaiting for service to be running data = {} - async with timeout(SERVICE_IS_READY_TIMEOUT): - status_is_not_running = True - while status_is_not_running: - + async for attempt in AsyncRetrying( + reraise=True, + retry=retry_if_exception_type(AssertionError), + stop=stop_after_delay(SERVICE_IS_READY_TIMEOUT), + wait=wait_fixed(5), + ): + with attempt: + print( + f"--> getting service {node_uuid=} status... attempt {attempt.retry_state.attempt_number}" + ) response: Response = await director_v2_client.get( f"/v2/dynamic_services/{node_uuid}", json=start_request_data ) - logger.warning("sidecar status result %s", response.text) + print("-- sidecar status result %s", response.text) assert response.status_code == 200, response.text data = response.json() - status_is_not_running = data.get("service_state", "") != "running" + assert data.get("service_state", "") == "running" + print( + "<-- sidecar is running %s", + f"{json.dumps(attempt.retry_state.retry_object.statistics)}", + ) - # give the service some time to keep up - await asyncio.sleep(5) assert "service_state" in data assert data["service_state"] == "running" diff --git a/services/director-v2/tests/integration/02/test_mixed_dynamic_sidecar_and_legacy_project.py b/services/director-v2/tests/integration/02/test_mixed_dynamic_sidecar_and_legacy_project.py index f32dda21d82..e12414b8fba 100644 --- a/services/director-v2/tests/integration/02/test_mixed_dynamic_sidecar_and_legacy_project.py +++ b/services/director-v2/tests/integration/02/test_mixed_dynamic_sidecar_and_legacy_project.py @@ -215,7 +215,7 @@ def mock_dynamic_sidecar_client(mocker: MockerFixture) -> None: ("service_push_output_ports", None), ]: mocker.patch( - f"simcore_service_director_v2.modules.dynamic_sidecar.client_api.DynamicSidecarClient.{method_name}", + f"simcore_service_director_v2.modules.dynamic_sidecar.api_client.DynamicSidecarClient.{method_name}", return_value=return_value, ) diff --git a/services/director-v2/tests/integration/02/utils.py b/services/director-v2/tests/integration/02/utils.py index a1c08557b30..a3406e82966 100644 --- a/services/director-v2/tests/integration/02/utils.py +++ b/services/director-v2/tests/integration/02/utils.py @@ -5,11 +5,10 @@ import logging import os import urllib.parse -from typing import Any, Dict, Optional, Set +from typing import Any, Optional import aiodocker import httpx -from async_timeout import timeout from fastapi import FastAPI from models_library.projects import Node from models_library.services_resources import ( @@ -27,7 +26,8 @@ DynamicSidecarsScheduler, ) from tenacity._asyncio import AsyncRetrying -from tenacity.stop import stop_after_attempt +from tenacity.retry import retry_if_exception_type +from tenacity.stop import stop_after_attempt, stop_after_delay from tenacity.wait import wait_fixed from yarl import URL @@ -51,9 +51,9 @@ def is_legacy(node_data: Node) -> bool: async def ensure_volume_cleanup( docker_client: aiodocker.Docker, node_uuid: str ) -> None: - async def _get_volume_names() -> Set[str]: + async def _get_volume_names() -> set[str]: volumes_list = await docker_client.volumes.list() - volume_names: Set[str] = {x["Name"] for x in volumes_list["Volumes"]} + volume_names: set[str] = {x["Name"] for x in volumes_list["Volumes"]} return volume_names for volume_name in await _get_volume_names(): @@ -90,6 +90,41 @@ async def ensure_network_cleanup( assert delete_result is True +async def _get_service_published_port(service_name: str) -> int: + # it takes a bit of time for the port to be auto generated + # keep trying until it is there + async with aiodocker.Docker() as docker_client: + async for attempt in AsyncRetrying( + wait=wait_fixed(1), + stop=stop_after_delay(SERVICE_WAS_CREATED_BY_DIRECTOR_V2), + reraise=True, + retry=retry_if_exception_type(AssertionError), + ): + with attempt: + print( + f"--> getting {service_name=} published port... (attempt {attempt.retry_state.attempt_number}) " + ) + services = await docker_client.services.list() + services = list( + filter(lambda s: s["Spec"]["Name"] == service_name, services) + ) + assert len(services) == 1, f"{service_name=} is not running!" + service = services[0] + assert service["Spec"]["Name"] == service_name + assert "Endpoint" in service + ports = service["Endpoint"].get("Ports", []) + assert len(ports) == 1, f"number of ports in {service_name=} is not 1!" + published_port = ports[0]["PublishedPort"] + assert ( + published_port is not None + ), f"published port of {service_name=} is not set!" + print( + f"--> found {service_name=} {published_port=}, statistics: {json.dumps(attempt.retry_state.retry_object.statistics)}" + ) + return published_port + assert False, f"no published port found for {service_name=}" + + async def patch_dynamic_service_url(app: FastAPI, node_uuid: str) -> str: """ Normally director-v2 talks via docker-netwoks with the dynamic-sidecar. @@ -100,23 +135,8 @@ async def patch_dynamic_service_url(app: FastAPI, node_uuid: str) -> str: returns: the local endpoint """ service_name = f"{DYNAMIC_SIDECAR_SERVICE_PREFIX}_{node_uuid}" - port = None - - async with aiodocker.Docker() as docker_client: - async with timeout(SERVICE_WAS_CREATED_BY_DIRECTOR_V2): - # it takes a bit of time for the port to be auto generated - # keep trying until it is there - while port is None: - services = await docker_client.services.list() - for service in services: - if service["Spec"]["Name"] == service_name: - ports = service["Endpoint"].get("Ports", []) - if len(ports) == 1: - port = ports[0]["PublishedPort"] - break - - await asyncio.sleep(1) - + published_port = await _get_service_published_port(service_name) + assert published_port is not None # patch the endppoint inside the scheduler scheduler: DynamicSidecarsScheduler = app.state.dynamic_sidecar_scheduler endpoint: Optional[str] = None @@ -126,10 +146,10 @@ async def patch_dynamic_service_url(app: FastAPI, node_uuid: str) -> str: ) in scheduler._to_observe.values(): # pylint: disable=protected-access if scheduler_data.service_name == service_name: scheduler_data.dynamic_sidecar.hostname = f"{get_localhost_ip()}" - scheduler_data.dynamic_sidecar.port = port + scheduler_data.dynamic_sidecar.port = published_port endpoint = scheduler_data.dynamic_sidecar.endpoint - assert endpoint == f"http://{get_localhost_ip()}:{port}" + assert endpoint == f"http://{get_localhost_ip()}:{published_port}" break assert endpoint is not None @@ -146,23 +166,7 @@ async def _get_proxy_port(node_uuid: str) -> PositiveInt: returns: the local endpoint """ service_name = f"{DYNAMIC_PROXY_SERVICE_PREFIX}_{node_uuid}" - port = None - - async with aiodocker.Docker() as docker_client: - async with timeout(SERVICE_WAS_CREATED_BY_DIRECTOR_V2): - # it takes a bit of time for the port to be auto generated - # keep trying until it is there - while port is None: - services = await docker_client.services.list() - for service in services: - if service["Spec"]["Name"] == service_name: - ports = service["Endpoint"].get("Ports", []) - if len(ports) == 1: - port = ports[0]["PublishedPort"] - break - - await asyncio.sleep(1) - + port = await _get_service_published_port(service_name) assert port is not None return port @@ -219,7 +223,7 @@ async def get_service_data( director_v2_client: httpx.AsyncClient, service_uuid: str, node_data: Node, -) -> Dict[str, Any]: +) -> dict[str, Any]: # result = response = await director_v2_client.get( @@ -250,12 +254,15 @@ async def _get_service_state( async def assert_all_services_running( director_v2_client: httpx.AsyncClient, - workbench: Dict[str, Node], + workbench: dict[str, Node], ) -> None: - async with timeout(SERVICES_ARE_READY_TIMEOUT): - not_all_services_running = True - - while not_all_services_running: + async for attempt in AsyncRetrying( + reraise=True, + retry=retry_if_exception_type(AssertionError), + stop=stop_after_delay(SERVICES_ARE_READY_TIMEOUT), + wait=wait_fixed(0.1), + ): + with attempt: service_states = await asyncio.gather( *( _get_service_state( @@ -271,10 +278,7 @@ async def assert_all_services_running( for service_state in service_states: assert service_state != "failed" - are_services_running = [x == "running" for x in service_states] - not_all_services_running = not all(are_services_running) - # let the services boot - await asyncio.sleep(1.0) + assert all(x == "running" for x in service_states) async def assert_retrieve_service( @@ -409,7 +413,7 @@ async def assert_service_is_available( # pylint: disable=redefined-outer-name async def assert_services_reply_200( director_v2_client: httpx.AsyncClient, - workbench: Dict[str, Node], + workbench: dict[str, Node], ) -> None: for service_uuid, node_data in workbench.items(): service_data = await get_service_data( diff --git a/services/director-v2/tests/unit/conftest.py b/services/director-v2/tests/unit/conftest.py index 8cf04bfca0f..4a9e827205b 100644 --- a/services/director-v2/tests/unit/conftest.py +++ b/services/director-v2/tests/unit/conftest.py @@ -2,14 +2,24 @@ # pylint: disable=unused-argument import json +import logging import random import urllib.parse -from typing import Any, AsyncIterable, AsyncIterator, Callable, Iterator, Mapping +from typing import ( + Any, + AsyncIterable, + AsyncIterator, + Callable, + Iterable, + Iterator, + Mapping, +) import pytest import respx import traitlets.config from _dask_helpers import DaskGatewayServer +from _pytest.logging import LogCaptureFixture from _pytest.monkeypatch import MonkeyPatch from dask.distributed import Scheduler, Worker from dask_gateway_server.app import DaskGateway @@ -57,7 +67,7 @@ def dynamic_service_create() -> DynamicServiceCreate: ) -@pytest.fixture(scope="session") +@pytest.fixture def dynamic_sidecar_port() -> int: return 1222 @@ -361,3 +371,9 @@ def mocked_catalog_service_api( ).respond(json=fake_service_specifications) yield respx_mock + + +@pytest.fixture() +def caplog_info_level(caplog: LogCaptureFixture) -> Iterable[LogCaptureFixture]: + with caplog.at_level(logging.INFO): + yield caplog diff --git a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_base.py b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_base.py new file mode 100644 index 00000000000..8908b1e9eff --- /dev/null +++ b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_base.py @@ -0,0 +1,198 @@ +# pylint:disable=redefined-outer-name + +import pytest +from _pytest.logging import LogCaptureFixture +from httpx import ( + ConnectError, + HTTPError, + PoolTimeout, + Request, + RequestError, + Response, + codes, +) +from pydantic import AnyHttpUrl, parse_obj_as +from respx import MockRouter +from simcore_service_director_v2.modules.dynamic_sidecar.api_client._base import ( + BaseThinClient, + expect_status, + retry_on_errors, +) +from simcore_service_director_v2.modules.dynamic_sidecar.api_client._errors import ( + ClientHttpError, + UnexpectedStatusError, + _WrongReturnType, +) + +# UTILS + + +class TestThickClient(BaseThinClient): + @retry_on_errors + async def get_provided_url(self, provided_url: str) -> Response: + return await self._client.get(provided_url) + + @retry_on_errors + async def get_retry_for_status(self) -> Response: + return await self._client.get("http://missing-host:1111") + + +# FIXTURES + + +@pytest.fixture +def thick_client() -> TestThickClient: + return TestThickClient(request_max_retries=1) + + +@pytest.fixture +def test_url() -> AnyHttpUrl: + return parse_obj_as(AnyHttpUrl, "http://missing-host:1111") + + +# TESTS + + +async def test_base_with_async_context_manager(test_url: AnyHttpUrl) -> None: + async with TestThickClient(request_max_retries=1) as client: + with pytest.raises(ClientHttpError): + await client.get_provided_url(test_url) + + +async def test_connection_error( + thick_client: TestThickClient, test_url: AnyHttpUrl +) -> None: + with pytest.raises(ClientHttpError) as exe_info: + await thick_client.get_provided_url(test_url) + + assert isinstance(exe_info.value, ClientHttpError) + assert isinstance(exe_info.value.error, ConnectError) + + +@pytest.mark.parametrize("retry_count", [2, 1]) +async def test_retry_on_errors( + retry_count: int, test_url: AnyHttpUrl, caplog_info_level: LogCaptureFixture +) -> None: + client = TestThickClient(request_max_retries=retry_count) + + with pytest.raises(ClientHttpError): + await client.get_provided_url(test_url) + + # check if the right amount of messages was captured by the logs + assert len(caplog_info_level.messages) == retry_count + for i, log_message in enumerate(caplog_info_level.messages): + assert log_message.startswith( + f"[{i+1}/{retry_count}]Retry. Unexpected ConnectError" + ) + + +@pytest.mark.parametrize("error_class", [ConnectError, PoolTimeout]) +@pytest.mark.parametrize("retry_count", [1, 2]) +async def test_retry_on_errors_by_error_type( + error_class: type[RequestError], + caplog_info_level: LogCaptureFixture, + retry_count: int, + test_url: AnyHttpUrl, +) -> None: + class ATestClient(BaseThinClient): + # pylint: disable=no-self-use + @retry_on_errors + async def raises_request_error(self) -> Response: + raise error_class( + "mock_connect_error", + request=Request(method="GET", url=test_url), + ) + + client = ATestClient(request_max_retries=retry_count) + + with pytest.raises(ClientHttpError): + await client.raises_request_error() + + log_count = retry_count + 1 if error_class == PoolTimeout else retry_count + assert len(caplog_info_level.messages) == log_count + + if error_class == PoolTimeout: + for i, retry_message in enumerate(caplog_info_level.messages[:-1]): + assert retry_message.startswith( + f"[{i+1}/{retry_count}]Retry. Unexpected PoolTimeout" + ) + connections_message = caplog_info_level.messages[-1] + assert connections_message == "Requests while event 'POOL TIMEOUT': []" + else: + for i, log_message in enumerate(caplog_info_level.messages): + assert log_message.startswith( + f"[{i+1}/{retry_count}]Retry. Unexpected ConnectError" + ) + + +async def test_retry_on_errors_raises_client_http_error() -> None: + class ATestClient(BaseThinClient): + # pylint: disable=no-self-use + @retry_on_errors + async def raises_http_error(self) -> Response: + raise HTTPError("mock_http_error") + + client = ATestClient(request_max_retries=1) + + with pytest.raises(ClientHttpError): + await client.raises_http_error() + + +async def test_methods_do_not_return_response() -> None: + class OKTestClient(BaseThinClient): + async def public_method_ok(self) -> Response: # type: ignore + """this method will be ok even if no code is used""" + + # OK + OKTestClient(request_max_retries=1) + + class FailWrongAnnotationTestClient(BaseThinClient): + async def public_method_wrong_annotation(self) -> None: + """this method will raise an error""" + + with pytest.raises(_WrongReturnType): + FailWrongAnnotationTestClient(request_max_retries=1) + + class FailNoAnnotationTestClient(BaseThinClient): + async def public_method_no_annotation(self): + """this method will raise an error""" + + with pytest.raises(_WrongReturnType): + FailNoAnnotationTestClient(request_max_retries=1) + + +async def test_expect_state_decorator( + test_url: AnyHttpUrl, respx_mock: MockRouter +) -> None: + + url_get_200_ok = f"{test_url}/ok" + get_wrong_state = f"{test_url}/wrong-state" + error_status = codes.NOT_FOUND + + class ATestClient(BaseThinClient): + @expect_status(codes.OK) + async def get_200_ok(self) -> Response: + return await self._client.get(url_get_200_ok) + + @expect_status(error_status) + async def get_wrong_state(self) -> Response: + return await self._client.get(get_wrong_state) + + respx_mock.get(url_get_200_ok).mock(return_value=Response(codes.OK)) + respx_mock.get(get_wrong_state).mock(return_value=Response(codes.OK)) + + test_client = ATestClient(request_max_retries=1) + + # OK + response = await test_client.get_200_ok() + assert response.status_code == codes.OK + + # RAISES EXPECTED ERROR + with pytest.raises(UnexpectedStatusError) as err_info: + await test_client.get_wrong_state() + + assert err_info.value.response.status_code == codes.OK + assert ( + f"{err_info.value}" + == f"Expected status: {error_status}, got {codes.OK} for: {get_wrong_state}: headers=Headers({{}}), body=''" + ) diff --git a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_public.py b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_public.py new file mode 100644 index 00000000000..964009c0951 --- /dev/null +++ b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_public.py @@ -0,0 +1,475 @@ +# pylint:disable=unused-argument +# pylint:disable=redefined-outer-name + +from contextlib import contextmanager +from typing import Any, AsyncIterable, Callable, Iterator, Optional, Type +from unittest.mock import AsyncMock + +import pytest +from _pytest.logging import LogCaptureFixture +from _pytest.monkeypatch import MonkeyPatch +from fastapi import FastAPI, status +from httpx import HTTPError, Response +from pydantic import AnyHttpUrl, parse_obj_as +from pytest_mock import MockerFixture +from simcore_service_director_v2.core.settings import AppSettings +from simcore_service_director_v2.modules.dynamic_sidecar.api_client._errors import ( + ClientHttpError, + UnexpectedStatusError, +) +from simcore_service_director_v2.modules.dynamic_sidecar.api_client._public import ( + DynamicSidecarClient, + get_dynamic_sidecar_client, + setup, + shutdown, +) +from simcore_service_director_v2.modules.dynamic_sidecar.errors import ( + EntrypointContainerNotFoundError, + NodeportsDidNotFindNodeError, +) + +# FIXTURES + + +@pytest.fixture +def dynamic_sidecar_endpoint() -> AnyHttpUrl: + return parse_obj_as(AnyHttpUrl, "http://missing-host:1111") + + +@pytest.fixture +def mock_env(monkeypatch: MonkeyPatch, mock_env: None) -> None: + monkeypatch.setenv("S3_ACCESS_KEY", "") + monkeypatch.setenv("S3_SECRET_KEY", "") + monkeypatch.setenv("S3_BUCKET_NAME", "") + monkeypatch.setenv("R_CLONE_PROVIDER", "MINIO") + + monkeypatch.setenv("POSTGRES_HOST", "") + monkeypatch.setenv("POSTGRES_USER", "") + monkeypatch.setenv("POSTGRES_PASSWORD", "") + monkeypatch.setenv("POSTGRES_DB", "") + + # reduce number of retries to make more reliable + monkeypatch.setenv("DYNAMIC_SIDECAR_API_CLIENT_REQUEST_MAX_RETRIES", "1") + monkeypatch.setenv("S3_ENDPOINT", "") + + +@pytest.fixture +async def dynamic_sidecar_client(mock_env: None) -> AsyncIterable[DynamicSidecarClient]: + app = FastAPI() + app.state.settings = AppSettings.create_from_envs() + + await setup(app) + yield get_dynamic_sidecar_client(app) + await shutdown(app) + + +@pytest.fixture +def retry_count() -> int: + return 2 + + +@pytest.fixture +def raise_retry_count( + monkeypatch: MonkeyPatch, retry_count: int, mock_env: None +) -> None: + monkeypatch.setenv( + "DYNAMIC_SIDECAR_API_CLIENT_REQUEST_MAX_RETRIES", f"{retry_count}" + ) + + +@pytest.fixture +def get_patched_client( + dynamic_sidecar_client: DynamicSidecarClient, mocker: MockerFixture +) -> Callable: + @contextmanager + def wrapper( + method: str, + return_value: Optional[Any] = None, + side_effect: Optional[Callable] = None, + ) -> Iterator[DynamicSidecarClient]: + mocker.patch( + f"simcore_service_director_v2.modules.dynamic_sidecar.api_client._thin.ThinDynamicSidecarClient.{method}", + return_value=return_value, + side_effect=side_effect, + ) + yield dynamic_sidecar_client + + return wrapper + + +# TESTS + + +@pytest.mark.parametrize("is_healthy", [True, False]) +async def test_is_healthy_api_ok( + get_patched_client: Callable, dynamic_sidecar_endpoint: AnyHttpUrl, is_healthy: bool +) -> None: + mock_json = {"is_healthy": is_healthy} + with get_patched_client( + "get_health", + return_value=Response(status_code=status.HTTP_200_OK, json=mock_json), + ) as client: + assert await client.is_healthy(dynamic_sidecar_endpoint) == is_healthy + + +async def test_is_healthy_times_out( + raise_retry_count: None, + dynamic_sidecar_client: DynamicSidecarClient, + dynamic_sidecar_endpoint: AnyHttpUrl, + caplog_info_level: LogCaptureFixture, + retry_count: int, +) -> None: + assert await dynamic_sidecar_client.is_healthy(dynamic_sidecar_endpoint) is False + for i, log_message in enumerate(caplog_info_level.messages): + assert log_message.startswith( + f"[{i+1}/{retry_count}]Retry. Unexpected ConnectError" + ) + + +@pytest.mark.parametrize( + "side_effect", + [ + pytest.param( + UnexpectedStatusError( + Response( + status_code=status.HTTP_400_BAD_REQUEST, + content="some mocked error", + request=AsyncMock(), + ), + status.HTTP_200_OK, + ), + id="UnexpectedStatusError", + ), + pytest.param( + ClientHttpError(HTTPError("another mocked error")), id="HTTPError" + ), + ], +) +async def test_is_healthy_api_error( + get_patched_client: Callable, + dynamic_sidecar_endpoint: AnyHttpUrl, + side_effect: Exception, +) -> None: + with get_patched_client( + "get_health", + side_effect=side_effect, + ) as client: + assert await client.is_healthy(dynamic_sidecar_endpoint) == False + + +async def test_containers_inspect( + get_patched_client: Callable, dynamic_sidecar_endpoint: AnyHttpUrl +) -> None: + mock_json = {"ok": "data"} + with get_patched_client( + "get_containers", + return_value=Response(status_code=status.HTTP_200_OK, json=mock_json), + ) as client: + assert await client.containers_inspect(dynamic_sidecar_endpoint) == mock_json + + +async def test_containers_docker_status_api_ok( + get_patched_client: Callable, dynamic_sidecar_endpoint: AnyHttpUrl +) -> None: + mock_json = {"container_id": {"ok": "data"}} + with get_patched_client( + "get_containers", + return_value=Response(status_code=status.HTTP_200_OK, json=mock_json), + ) as client: + assert ( + await client.containers_docker_status(dynamic_sidecar_endpoint) == mock_json + ) + + +async def test_containers_docker_status_api_error( + get_patched_client: Callable, dynamic_sidecar_endpoint: AnyHttpUrl +) -> None: + with get_patched_client( + "get_containers", + side_effect=UnexpectedStatusError( + Response( + status_code=status.HTTP_400_BAD_REQUEST, + content="some mocked error", + request=AsyncMock(), + ), + status.HTTP_200_OK, + ), + ) as client: + assert await client.containers_docker_status(dynamic_sidecar_endpoint) == {} + + +async def test_start_service_creation( + get_patched_client: Callable, dynamic_sidecar_endpoint: AnyHttpUrl +) -> None: + docker_compose_reply = "a mocked docker compose plain string reply" + with get_patched_client( + "post_containers", + return_value=Response( + status_code=status.HTTP_200_OK, content=docker_compose_reply + ), + ) as client: + assert ( + await client.start_service_creation( + dynamic_sidecar_endpoint, compose_spec="mock compose spec" + ) + == None + ) + + +async def test_begin_service_destruction( + get_patched_client: Callable, dynamic_sidecar_endpoint: AnyHttpUrl +) -> None: + docker_compose_reply = "a mocked docker compose plain string reply" + with get_patched_client( + "post_containers_down", + return_value=Response( + status_code=status.HTTP_200_OK, content=docker_compose_reply + ), + ) as client: + assert await client.begin_service_destruction(dynamic_sidecar_endpoint) == None + + +async def test_service_save_state( + get_patched_client: Callable, dynamic_sidecar_endpoint: AnyHttpUrl +) -> None: + with get_patched_client( + "post_containers_state_save", + return_value=Response(status_code=status.HTTP_204_NO_CONTENT), + ) as client: + assert await client.service_save_state(dynamic_sidecar_endpoint) == None + + +async def test_service_restore_state( + get_patched_client: Callable, dynamic_sidecar_endpoint: AnyHttpUrl +) -> None: + with get_patched_client( + "post_containers_state_restore", + return_value=Response(status_code=status.HTTP_204_NO_CONTENT), + ) as client: + assert await client.service_restore_state(dynamic_sidecar_endpoint) == None + + +@pytest.mark.parametrize("port_keys", [None, ["1", [""], [""]]]) +async def test_service_pull_input_ports( + get_patched_client: Callable, + dynamic_sidecar_endpoint: AnyHttpUrl, + port_keys: Optional[list[str]], +) -> None: + with get_patched_client( + "post_containers_ports_inputs_pull", + return_value=Response(status_code=status.HTTP_200_OK, content="42"), + ) as client: + assert ( + await client.service_pull_input_ports(dynamic_sidecar_endpoint, port_keys) + == 42 + ) + + +async def test_service_disable_dir_watcher( + get_patched_client: Callable, + dynamic_sidecar_endpoint: AnyHttpUrl, +) -> None: + with get_patched_client( + "patch_containers_directory_watcher", + return_value=Response(status_code=status.HTTP_204_NO_CONTENT), + ) as client: + assert ( + await client.service_disable_dir_watcher(dynamic_sidecar_endpoint) == None + ) + + +async def test_service_enable_dir_watcher( + get_patched_client: Callable, + dynamic_sidecar_endpoint: AnyHttpUrl, +) -> None: + with get_patched_client( + "patch_containers_directory_watcher", + return_value=Response(status_code=status.HTTP_204_NO_CONTENT), + ) as client: + assert await client.service_enable_dir_watcher(dynamic_sidecar_endpoint) == None + + +@pytest.mark.parametrize("outputs_labels", [{}, {"ok": "data"}]) +async def test_service_outputs_create_dirs( + get_patched_client: Callable, + dynamic_sidecar_endpoint: AnyHttpUrl, + outputs_labels: dict[str, Any], +) -> None: + with get_patched_client( + "post_containers_ports_outputs_dirs", + return_value=Response(status_code=status.HTTP_204_NO_CONTENT), + ) as client: + assert ( + await client.service_outputs_create_dirs( + dynamic_sidecar_endpoint, outputs_labels + ) + == None + ) + + +@pytest.mark.parametrize("port_keys", [None, ["1", [""], [""]]]) +async def test_service_pull_output_ports( + get_patched_client: Callable, + dynamic_sidecar_endpoint: AnyHttpUrl, + port_keys: Optional[list[str]], +) -> None: + with get_patched_client( + "post_containers_ports_outputs_pull", + return_value=Response(status_code=status.HTTP_200_OK, content="42"), + ) as client: + assert ( + await client.service_pull_output_ports(dynamic_sidecar_endpoint, port_keys) + == 42 + ) + + +@pytest.mark.parametrize("port_keys", [None, ["1", [""], [""]]]) +async def test_service_push_output_ports_ok( + get_patched_client: Callable, + dynamic_sidecar_endpoint: AnyHttpUrl, + port_keys: Optional[list[str]], +) -> None: + with get_patched_client( + "post_containers_ports_outputs_push", + return_value=Response(status_code=status.HTTP_204_NO_CONTENT), + ) as client: + assert ( + await client.service_push_output_ports(dynamic_sidecar_endpoint, port_keys) + == None + ) + + +@pytest.mark.parametrize("port_keys", [None, ["1", [""], [""]]]) +@pytest.mark.parametrize( + "side_effect, expected_error", + [ + pytest.param( + UnexpectedStatusError( + Response( + status_code=status.HTTP_404_NOT_FOUND, + json={ + "code": "dynamic_sidecar.nodeports.node_not_found", + "node_uuid": "mock_node_uuid", + }, + request=AsyncMock(), + ), + status.HTTP_204_NO_CONTENT, + ), + NodeportsDidNotFindNodeError, + id="NodeportsDidNotFindNodeError", + ), + pytest.param( + UnexpectedStatusError( + Response( + status_code=status.HTTP_404_NOT_FOUND, + json={"code": "other"}, + request=AsyncMock(), + ), + status.HTTP_204_NO_CONTENT, + ), + UnexpectedStatusError, + id="UnexpectedStatusError", + ), + ], +) +async def test_service_push_output_ports_api_fail( + get_patched_client: Callable, + dynamic_sidecar_endpoint: AnyHttpUrl, + port_keys: Optional[list[str]], + side_effect: UnexpectedStatusError, + expected_error: Type[Exception], +) -> None: + with get_patched_client( + "post_containers_ports_outputs_push", side_effect=side_effect + ) as client: + with pytest.raises(expected_error): + await client.service_push_output_ports(dynamic_sidecar_endpoint, port_keys) + + +@pytest.mark.parametrize("dynamic_sidecar_network_name", ["a_test_network"]) +async def test_get_entrypoint_container_name_ok( + get_patched_client: Callable, + dynamic_sidecar_endpoint: AnyHttpUrl, + dynamic_sidecar_network_name: str, +) -> None: + with get_patched_client( + "get_containers_name", + return_value=Response(status_code=status.HTTP_200_OK, json="a_test_container"), + ) as client: + assert ( + await client.get_entrypoint_container_name( + dynamic_sidecar_endpoint, dynamic_sidecar_network_name + ) + == "a_test_container" + ) + + +@pytest.mark.parametrize("dynamic_sidecar_network_name", ["a_test_network"]) +async def test_get_entrypoint_container_name_api_not_found( + get_patched_client: Callable, + dynamic_sidecar_endpoint: AnyHttpUrl, + dynamic_sidecar_network_name: str, +) -> None: + with get_patched_client( + "get_containers_name", + side_effect=UnexpectedStatusError( + Response(status_code=status.HTTP_404_NOT_FOUND, request=AsyncMock()), + status.HTTP_204_NO_CONTENT, + ), + ) as client: + with pytest.raises(EntrypointContainerNotFoundError): + await client.get_entrypoint_container_name( + dynamic_sidecar_endpoint, dynamic_sidecar_network_name + ) + + +async def test_restart_containers( + get_patched_client: Callable, + dynamic_sidecar_endpoint: AnyHttpUrl, +) -> None: + with get_patched_client( + "post_containers_restart", + return_value=Response(status_code=status.HTTP_204_NO_CONTENT), + ) as client: + assert await client.restart_containers(dynamic_sidecar_endpoint) == None + + +@pytest.mark.parametrize("network_aliases", [[], ["an-alias"], ["alias-1", "alias-2"]]) +async def test_attach_container_to_network( + get_patched_client: Callable, + dynamic_sidecar_endpoint: AnyHttpUrl, + network_aliases: list[str], +) -> None: + with get_patched_client( + "post_containers_networks_attach", + return_value=Response(status_code=status.HTTP_204_NO_CONTENT), + ) as client: + assert ( + # pylint:disable=protected-access + await client._attach_container_to_network( + dynamic_sidecar_endpoint, + container_id="container_id", + network_id="network_id", + network_aliases=network_aliases, + ) + == None + ) + + +async def test_detach_container_from_network( + get_patched_client: Callable, + dynamic_sidecar_endpoint: AnyHttpUrl, +) -> None: + with get_patched_client( + "post_containers_networks_detach", + return_value=Response(status_code=status.HTTP_204_NO_CONTENT), + ) as client: + assert ( + # pylint:disable=protected-access + await client._detach_container_from_network( + dynamic_sidecar_endpoint, + container_id="container_id", + network_id="network_id", + ) + == None + ) diff --git a/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_thin.py b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_thin.py new file mode 100644 index 00000000000..243a404a5cf --- /dev/null +++ b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_client_api_thin.py @@ -0,0 +1,383 @@ +# pylint:disable=unused-argument +# pylint:disable=redefined-outer-name + +import json +from typing import Any, Callable, Optional + +import pytest +from _pytest.monkeypatch import MonkeyPatch +from fastapi import FastAPI, status +from httpx import Response +from pydantic import AnyHttpUrl, parse_obj_as +from respx import MockRouter, Route +from respx.types import SideEffectTypes +from simcore_service_director_v2.core.settings import AppSettings +from simcore_service_director_v2.modules.dynamic_sidecar.api_client._thin import ( + ThinDynamicSidecarClient, +) + +# NOTE: typing and callables cannot +MockRequestType = Callable[ + [str, str, Optional[Response], Optional[SideEffectTypes]], Route +] + + +# UTILS + + +def assert_responses(mocked: Response, result: Optional[Response]) -> None: + assert result is not None + assert mocked.status_code == result.status_code + assert mocked.headers == result.headers + assert mocked.text == result.text + + +# FIXTURES + + +@pytest.fixture +def mocked_app(monkeypatch: MonkeyPatch, mock_env: None) -> FastAPI: + monkeypatch.setenv("S3_ENDPOINT", "") + monkeypatch.setenv("S3_ACCESS_KEY", "") + monkeypatch.setenv("S3_SECRET_KEY", "") + monkeypatch.setenv("S3_BUCKET_NAME", "") + monkeypatch.setenv("R_CLONE_PROVIDER", "MINIO") + + monkeypatch.setenv("POSTGRES_HOST", "") + monkeypatch.setenv("POSTGRES_USER", "") + monkeypatch.setenv("POSTGRES_PASSWORD", "") + monkeypatch.setenv("POSTGRES_DB", "") + + # reduce number of retries to make more reliable + monkeypatch.setenv("DYNAMIC_SIDECAR_API_CLIENT_REQUEST_MAX_RETRIES", "1") + + app = FastAPI() + app.state.settings = AppSettings.create_from_envs() + return app + + +@pytest.fixture +def thin_client(mocked_app: FastAPI) -> ThinDynamicSidecarClient: + return ThinDynamicSidecarClient(mocked_app) + + +@pytest.fixture +def dynamic_sidecar_endpoint() -> AnyHttpUrl: + return parse_obj_as(AnyHttpUrl, "http://missing-host:1111") + + +@pytest.fixture +def mock_request( + dynamic_sidecar_endpoint: AnyHttpUrl, respx_mock: MockRouter +) -> MockRequestType: + def request_mock( + method: str, + path: str, + return_value: Optional[Response] = None, + side_effect: Optional[SideEffectTypes] = None, + ) -> Route: + print(f"Mocking {path=}") + return respx_mock.request( + method=method, url=f"{dynamic_sidecar_endpoint}{path}" + ).mock(return_value=return_value, side_effect=side_effect) + + return request_mock + + +# TESTS + + +async def test_get_health( + thin_client: ThinDynamicSidecarClient, + dynamic_sidecar_endpoint: AnyHttpUrl, + mock_request: MockRequestType, +) -> None: + mock_response = Response(status.HTTP_200_OK) + mock_request("GET", "/health", mock_response, None) + + response = await thin_client.get_health(dynamic_sidecar_endpoint) + assert_responses(mock_response, response) + + +@pytest.mark.parametrize("only_status", [False, True]) +async def test_get_containers( + thin_client: ThinDynamicSidecarClient, + dynamic_sidecar_endpoint: AnyHttpUrl, + mock_request: MockRequestType, + only_status: bool, +) -> None: + mock_response = Response(status.HTTP_200_OK) + mock_request( + "GET", + f"{dynamic_sidecar_endpoint}/{thin_client.API_VERSION}/containers?only_status={str(only_status).lower()}", + mock_response, + None, + ) + + response = await thin_client.get_containers( + dynamic_sidecar_endpoint, only_status=only_status + ) + assert_responses(mock_response, response) + + +async def test_post_containers( + thin_client: ThinDynamicSidecarClient, + dynamic_sidecar_endpoint: AnyHttpUrl, + mock_request: MockRequestType, +) -> None: + mock_response = Response(status.HTTP_202_ACCEPTED) + mock_request( + "POST", + f"{dynamic_sidecar_endpoint}/{thin_client.API_VERSION}/containers", + mock_response, + None, + ) + + response = await thin_client.post_containers( + dynamic_sidecar_endpoint, compose_spec="some_fake_compose_as_str" + ) + assert_responses(mock_response, response) + + +async def test_post_containers_down( + thin_client: ThinDynamicSidecarClient, + dynamic_sidecar_endpoint: AnyHttpUrl, + mock_request: MockRequestType, +) -> None: + mock_response = Response(status.HTTP_200_OK) + mock_request( + "POST", + f"{dynamic_sidecar_endpoint}/{thin_client.API_VERSION}/containers:down", + mock_response, + None, + ) + + response = await thin_client.post_containers_down(dynamic_sidecar_endpoint) + assert_responses(mock_response, response) + + +async def test_post_containers_state_save( + thin_client: ThinDynamicSidecarClient, + dynamic_sidecar_endpoint: AnyHttpUrl, + mock_request: MockRequestType, +) -> None: + mock_response = Response(status.HTTP_204_NO_CONTENT) + mock_request( + "POST", + f"{dynamic_sidecar_endpoint}/{thin_client.API_VERSION}/containers/state:save", + mock_response, + None, + ) + + response = await thin_client.post_containers_state_save(dynamic_sidecar_endpoint) + assert_responses(mock_response, response) + + +async def test_post_containers_state_restore( + thin_client: ThinDynamicSidecarClient, + dynamic_sidecar_endpoint: AnyHttpUrl, + mock_request: MockRequestType, +) -> None: + mock_response = Response(status.HTTP_204_NO_CONTENT) + mock_request( + "POST", + f"{dynamic_sidecar_endpoint}/{thin_client.API_VERSION}/containers/state:restore", + mock_response, + None, + ) + + response = await thin_client.post_containers_state_restore(dynamic_sidecar_endpoint) + assert_responses(mock_response, response) + + +@pytest.mark.parametrize("port_keys", [None, ["1", "2"], []]) +async def test_post_containers_ports_inputs_pull( + thin_client: ThinDynamicSidecarClient, + dynamic_sidecar_endpoint: AnyHttpUrl, + mock_request: MockRequestType, + port_keys: Optional[list[str]], +) -> None: + mock_response = Response(status.HTTP_200_OK) + mock_request( + "POST", + f"{dynamic_sidecar_endpoint}/{thin_client.API_VERSION}/containers/ports/inputs:pull", + mock_response, + None, + ) + + response = await thin_client.post_containers_ports_inputs_pull( + dynamic_sidecar_endpoint, port_keys=port_keys + ) + assert_responses(mock_response, response) + + +@pytest.mark.parametrize("is_enabled", [False, True]) +async def test_post_patch_containers_directory_watcher( + thin_client: ThinDynamicSidecarClient, + dynamic_sidecar_endpoint: AnyHttpUrl, + mock_request: MockRequestType, + is_enabled: bool, +) -> None: + mock_response = Response(status.HTTP_204_NO_CONTENT) + mock_request( + "PATCH", + f"{dynamic_sidecar_endpoint}/{thin_client.API_VERSION}/containers/directory-watcher", + mock_response, + None, + ) + + response = await thin_client.patch_containers_directory_watcher( + dynamic_sidecar_endpoint, is_enabled=is_enabled + ) + assert_responses(mock_response, response) + + +@pytest.mark.parametrize("outputs_labels", [{}, {"some": "data"}]) +async def test_post_containers_ports_outputs_dirs( + thin_client: ThinDynamicSidecarClient, + dynamic_sidecar_endpoint: AnyHttpUrl, + mock_request: MockRequestType, + outputs_labels: dict[str, Any], +) -> None: + mock_response = Response(status.HTTP_204_NO_CONTENT) + mock_request( + "POST", + f"{dynamic_sidecar_endpoint}/{thin_client.API_VERSION}/containers/ports/outputs/dirs", + mock_response, + None, + ) + + response = await thin_client.post_containers_ports_outputs_dirs( + dynamic_sidecar_endpoint, outputs_labels=outputs_labels + ) + assert_responses(mock_response, response) + + +@pytest.mark.parametrize("port_keys", [None, ["1", "2"], []]) +async def test_post_containers_ports_outputs_pull( + thin_client: ThinDynamicSidecarClient, + dynamic_sidecar_endpoint: AnyHttpUrl, + mock_request: MockRequestType, + port_keys: Optional[list[str]], +) -> None: + mock_response = Response(status.HTTP_200_OK) + mock_request( + "POST", + f"{dynamic_sidecar_endpoint}/{thin_client.API_VERSION}/containers/ports/outputs:pull", + mock_response, + None, + ) + + response = await thin_client.post_containers_ports_outputs_pull( + dynamic_sidecar_endpoint, port_keys=port_keys + ) + assert_responses(mock_response, response) + + +@pytest.mark.parametrize("port_keys", [None, ["1", "2"], []]) +async def test_post_containers_ports_outputs_push( + thin_client: ThinDynamicSidecarClient, + dynamic_sidecar_endpoint: AnyHttpUrl, + mock_request: MockRequestType, + port_keys: Optional[list[str]], +) -> None: + mock_response = Response(status.HTTP_204_NO_CONTENT) + mock_request( + "POST", + f"{dynamic_sidecar_endpoint}/{thin_client.API_VERSION}/containers/ports/outputs:push", + mock_response, + None, + ) + + response = await thin_client.post_containers_ports_outputs_push( + dynamic_sidecar_endpoint, port_keys=port_keys + ) + assert_responses(mock_response, response) + + +@pytest.mark.parametrize("dynamic_sidecar_network_name", ["test_nw_name"]) +async def test_get_containers_name( + thin_client: ThinDynamicSidecarClient, + dynamic_sidecar_endpoint: AnyHttpUrl, + mock_request: MockRequestType, + dynamic_sidecar_network_name: str, +) -> None: + mock_response = Response(status.HTTP_200_OK) + encoded_filters = json.dumps(dict(network=dynamic_sidecar_network_name)) + mock_request( + "GET", + ( + f"{dynamic_sidecar_endpoint}/{thin_client.API_VERSION}" + f"/containers/name?filters={encoded_filters}" + ), + mock_response, + None, + ) + + response = await thin_client.get_containers_name( + dynamic_sidecar_endpoint, + dynamic_sidecar_network_name=dynamic_sidecar_network_name, + ) + assert_responses(mock_response, response) + + +async def test_post_containers_restart( + thin_client: ThinDynamicSidecarClient, + dynamic_sidecar_endpoint: AnyHttpUrl, + mock_request: MockRequestType, +) -> None: + mock_response = Response(status.HTTP_204_NO_CONTENT) + mock_request( + "POST", + f"{dynamic_sidecar_endpoint}/{thin_client.API_VERSION}/containers:restart", + mock_response, + None, + ) + + response = await thin_client.post_containers_restart(dynamic_sidecar_endpoint) + assert_responses(mock_response, response) + + +@pytest.mark.parametrize("network_aliases", [[], ["an_alias"], ["multuple_aliases"]]) +async def test_post_containers_networks_attach( + thin_client: ThinDynamicSidecarClient, + dynamic_sidecar_endpoint: AnyHttpUrl, + mock_request: MockRequestType, + network_aliases: list[str], +) -> None: + mock_response = Response(status.HTTP_204_NO_CONTENT) + container_id = "a_container_id" + mock_request( + "POST", + f"{dynamic_sidecar_endpoint}/{thin_client.API_VERSION}/containers/{container_id}/networks:attach", + mock_response, + None, + ) + + response = await thin_client.post_containers_networks_attach( + dynamic_sidecar_endpoint, + container_id=container_id, + network_id="network_id", + network_aliases=network_aliases, + ) + assert_responses(mock_response, response) + + +async def test_post_containers_networks_detach( + thin_client: ThinDynamicSidecarClient, + dynamic_sidecar_endpoint: AnyHttpUrl, + mock_request: MockRequestType, +) -> None: + mock_response = Response(status.HTTP_204_NO_CONTENT) + container_id = "a_container_id" + mock_request( + "POST", + f"{dynamic_sidecar_endpoint}/{thin_client.API_VERSION}/containers/{container_id}/networks:detach", + mock_response, + None, + ) + + response = await thin_client.post_containers_networks_detach( + dynamic_sidecar_endpoint, container_id=container_id, network_id="network_id" + ) + assert_responses(mock_response, response) diff --git a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_scheduler.py b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_scheduler.py similarity index 68% rename from services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_scheduler.py rename to services/director-v2/tests/unit/test_modules_dynamic_sidecar_scheduler.py index b04dbd8e6df..c00fa0f46f2 100644 --- a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_scheduler.py +++ b/services/director-v2/tests/unit/test_modules_dynamic_sidecar_scheduler.py @@ -1,13 +1,13 @@ # pylint: disable=redefined-outer-name # pylint: disable=unused-argument +# pylint: disable=protected-access -import asyncio import logging import re import urllib.parse from contextlib import asynccontextmanager, contextmanager -from typing import AsyncGenerator, AsyncIterator, Callable, Iterator, List, Type, Union +from typing import AsyncGenerator, Awaitable, Callable, Iterator, Union from unittest.mock import AsyncMock import httpx @@ -15,7 +15,6 @@ import respx from _pytest.monkeypatch import MonkeyPatch from fastapi import FastAPI -from fastapi.testclient import TestClient from models_library.service_settings_labels import SimcoreServiceLabels from pytest_mock.plugin import MockerFixture from respx.router import MockRouter @@ -26,38 +25,37 @@ SchedulerData, ServiceState, ) -from simcore_service_director_v2.modules.director_v0 import DirectorV0Client -from simcore_service_director_v2.modules.dynamic_sidecar import module_setup -from simcore_service_director_v2.modules.dynamic_sidecar.client_api import ( - close_api_client, - get_url, - setup_api_client, -) from simcore_service_director_v2.modules.dynamic_sidecar.errors import ( DynamicSidecarError, DynamicSidecarNotFoundError, ) from simcore_service_director_v2.modules.dynamic_sidecar.scheduler import ( DynamicSidecarsScheduler, - setup_scheduler, - shutdown_scheduler, ) from simcore_service_director_v2.modules.dynamic_sidecar.scheduler.events import ( REGISTERED_EVENTS, DynamicSchedulerEvent, ) +from simcore_service_director_v2.modules.dynamic_sidecar.scheduler.task import ( + _apply_observation_cycle, +) # running scheduler at a hight rate to stress out the system # and ensure faster tests TEST_SCHEDULER_INTERVAL_SECONDS = 0.01 -SLEEP_TO_AWAIT_SCHEDULER_TRIGGERS = 10 * TEST_SCHEDULER_INTERVAL_SECONDS log = logging.getLogger(__name__) -# UTILS +pytest_simcore_core_services_selection = ["postgres"] +pytest_simcore_ops_services_selection = ["adminer"] + +def get_url(dynamic_sidecar_endpoint: str, postfix: str) -> str: + return f"{dynamic_sidecar_endpoint}{postfix}" + +# UTILS @contextmanager def _mock_containers_docker_status( scheduler_data: SchedulerData, @@ -86,16 +84,6 @@ def _mock_containers_docker_status( yield mock -async def _assert_remove_service( - scheduler: DynamicSidecarsScheduler, scheduler_data: SchedulerData -) -> None: - # pylint: disable=protected-access - await scheduler.mark_service_for_removal(scheduler_data.node_uuid, True) - assert scheduler_data.service_name in scheduler._to_observe - await scheduler.finish_service_removal(scheduler_data.node_uuid) - assert scheduler_data.service_name not in scheduler._to_observe - - @asynccontextmanager async def _assert_get_dynamic_services_mocked( scheduler: DynamicSidecarsScheduler, @@ -111,17 +99,48 @@ async def _assert_get_dynamic_services_mocked( yield stack_status - await _assert_remove_service(scheduler, scheduler_data) + await scheduler.mark_service_for_removal(scheduler_data.node_uuid, True) + assert scheduler_data.service_name in scheduler._to_observe + await scheduler.finish_service_removal(scheduler_data.node_uuid) + assert scheduler_data.service_name not in scheduler._to_observe # FIXTURES +@pytest.fixture(scope="function") +def mock_env( + mock_env: None, + monkeypatch: MonkeyPatch, + simcore_services_network_name: str, + mock_docker_api: None, +) -> None: + monkeypatch.setenv("SIMCORE_SERVICES_NETWORK_NAME", simcore_services_network_name) + monkeypatch.setenv("DIRECTOR_HOST", "mocked_out") + monkeypatch.setenv( + "DIRECTOR_V2_DYNAMIC_SCHEDULER_INTERVAL_SECONDS", + str(TEST_SCHEDULER_INTERVAL_SECONDS), + ) + monkeypatch.setenv("DIRECTOR_V2_DYNAMIC_SCHEDULER_ENABLED", "true") + monkeypatch.setenv("S3_ENDPOINT", "endpoint") + monkeypatch.setenv("S3_ACCESS_KEY", "access_key") + monkeypatch.setenv("S3_SECRET_KEY", "secret_key") + monkeypatch.setenv("S3_BUCKET_NAME", "bucket_name") + monkeypatch.setenv("S3_SECURE", "false") + monkeypatch.setenv("DIRECTOR_V2_POSTGRES_ENABLED", "false") + monkeypatch.setenv("POSTGRES_HOST", "test") + monkeypatch.setenv("POSTGRES_USER", "test") + monkeypatch.setenv("POSTGRES_PASSWORD", "test") + monkeypatch.setenv("POSTGRES_DB", "test") + # NOTE: makes retries go faster + monkeypatch.setenv("DYNAMIC_SIDECAR_API_CLIENT_REQUEST_MAX_RETRIES", "1") + + @pytest.fixture def mocked_director_v0( - app_settings: AppSettings, scheduler_data: SchedulerData + minimal_config: AppSettings, scheduler_data: SchedulerData ) -> Iterator[MockRouter]: - endpoint = app_settings.DIRECTOR_V0.endpoint + endpoint = minimal_config.DIRECTOR_V0.endpoint with respx.mock as mock: mock.get( @@ -136,7 +155,7 @@ def mocked_director_v0( @pytest.fixture -def mocked_dynamic_scheduler_events() -> Iterator[None]: +def mocked_dynamic_scheduler_events() -> None: class AlwaysTriggersDynamicSchedulerEvent(DynamicSchedulerEvent): @classmethod async def will_trigger( @@ -149,96 +168,19 @@ async def action(cls, app: FastAPI, scheduler_data: SchedulerData) -> None: message = f"{cls.__name__} action triggered" log.warning(message) - test_defined_scheduler_events: List[Type[DynamicSchedulerEvent]] = [ + test_defined_scheduler_events: list[type[DynamicSchedulerEvent]] = [ AlwaysTriggersDynamicSchedulerEvent ] - # add to REGISTERED_EVENTS + # replace REGISTERED EVENTS + REGISTERED_EVENTS.clear() for event in test_defined_scheduler_events: REGISTERED_EVENTS.append(event) - yield - - # make sure to cleanup and remove them after usage - for event in test_defined_scheduler_events: - REGISTERED_EVENTS.remove(event) - - -@pytest.fixture -def ensure_scheduler_runs_once() -> Callable: - async def check_scheduler_ran_once() -> None: - await asyncio.sleep(SLEEP_TO_AWAIT_SCHEDULER_TRIGGERS) - - return check_scheduler_ran_once - - -@pytest.fixture -def app_settings( - simcore_services_network_name: str, monkeypatch: MonkeyPatch -) -> AppSettings: - monkeypatch.setenv("REGISTRY_AUTH", "false") - monkeypatch.setenv("REGISTRY_USER", "test") - monkeypatch.setenv("REGISTRY_PW", "test") - monkeypatch.setenv("REGISTRY_SSL", "false") - monkeypatch.setenv("SIMCORE_SERVICES_NETWORK_NAME", simcore_services_network_name) - monkeypatch.setenv("TRAEFIK_SIMCORE_ZONE", "test_traefik_zone") - monkeypatch.setenv("SWARM_STACK_NAME", "test_swarm_name") - monkeypatch.setenv("DYNAMIC_SIDECAR_IMAGE", "local/dynamic-sidecar:MOCKED") - monkeypatch.setenv("POSTGRES_HOST", "mocked_out") - monkeypatch.setenv("POSTGRES_USER", "mocked_out") - monkeypatch.setenv("POSTGRES_PASSWORD", "mocked_out") - monkeypatch.setenv("POSTGRES_DB", "mocked_out") - monkeypatch.setenv("DIRECTOR_HOST", "mocked_out") - monkeypatch.setenv("SC_BOOT_MODE", "local-development") - monkeypatch.setenv( - "DIRECTOR_V2_DYNAMIC_SCHEDULER_INTERVAL_SECONDS", - str(TEST_SCHEDULER_INTERVAL_SECONDS), - ) - monkeypatch.setenv("R_CLONE_PROVIDER", "MINIO") - monkeypatch.setenv("S3_ENDPOINT", "endpoint") - monkeypatch.setenv("S3_ACCESS_KEY", "access_key") - monkeypatch.setenv("S3_SECRET_KEY", "secret_key") - monkeypatch.setenv("S3_BUCKET_NAME", "bucket_name") - monkeypatch.setenv("S3_SECURE", "false") - - app_settings = AppSettings.create_from_envs() - return app_settings - - -@pytest.fixture -async def mocked_app( - app_settings: AppSettings, - mocked_director_v0: MockRouter, - docker_swarm: None, -) -> AsyncIterator[FastAPI]: - - app = FastAPI() - app.state.settings = app_settings - log.info("AppSettings=%s", app_settings) - - try: - # TODO: PC->ANE: use instead setup for corresponding modules! - async with httpx.AsyncClient( - base_url=f"{app_settings.DIRECTOR_V0.endpoint}", - timeout=app_settings.CLIENT_REQUEST.HTTP_CLIENT_REQUEST_TOTAL_TIMEOUT, - ) as httpx_client: - DirectorV0Client.create( - app, - client=httpx_client, - ) - await setup_api_client(app) - await setup_scheduler(app) - - yield app - - finally: - await shutdown_scheduler(app) - await close_api_client(app) - @pytest.fixture -def scheduler(mocked_app: FastAPI) -> DynamicSidecarsScheduler: - return mocked_app.state.dynamic_sidecar_scheduler +def scheduler(minimal_app: FastAPI) -> DynamicSidecarsScheduler: + return minimal_app.state.dynamic_sidecar_scheduler @pytest.fixture @@ -247,7 +189,7 @@ def scheduler_data(scheduler_data_from_http_request: SchedulerData) -> Scheduler @pytest.fixture -def mocked_client_api(scheduler_data: SchedulerData) -> Iterator[MockRouter]: +def mocked_api_client(scheduler_data: SchedulerData) -> Iterator[MockRouter]: service_endpoint = scheduler_data.dynamic_sidecar.endpoint with respx.mock as mock: mock.get(get_url(service_endpoint, "/health"), name="is_healthy").respond( @@ -262,7 +204,25 @@ def mocked_client_api(scheduler_data: SchedulerData) -> Iterator[MockRouter]: @pytest.fixture -def mock_service_running(mocker: MockerFixture) -> Iterator[AsyncMock]: +def mock_docker_api(mocker: MockerFixture) -> None: + mocker.patch( + "simcore_service_director_v2.modules.dynamic_sidecar.scheduler.task.get_dynamic_sidecars_to_observe", + autospec=True, + return_value=[], + ) + mocker.patch( + "simcore_service_director_v2.modules.dynamic_sidecar.scheduler.task.are_all_services_present", + autospec=True, + return_value=True, + ) + mocker.patch( + "simcore_service_director_v2.modules.dynamic_sidecar.scheduler.task.get_dynamic_sidecar_state", + return_value=(ServiceState.PENDING, ""), + ) + + +@pytest.fixture +def mock_service_running(mock_docker_api, mocker: MockerFixture) -> Iterator[AsyncMock]: mock = mocker.patch( "simcore_service_director_v2.modules.dynamic_sidecar.scheduler.task.get_dynamic_sidecar_state", return_value=(ServiceState.RUNNING, ""), @@ -283,132 +243,140 @@ def mock_update_label(mocker: MockerFixture) -> Iterator[None]: @pytest.fixture def mock_max_status_api_duration(monkeypatch: MonkeyPatch) -> Iterator[None]: - monkeypatch.setenv( - "DIRECTOR_V2_DYNAMIC_SCHEDULER_MAX_STATUS_API_DURATION", "0.0001" - ) + monkeypatch.setenv("DYNAMIC_SIDECAR_STATUS_API_TIMEOUT_S", "0.0001") yield +@pytest.fixture +def disabled_scheduler_background_task(mocker: MockerFixture): + mocker.patch( + "simcore_service_director_v2.modules.dynamic_sidecar.scheduler.task.DynamicSidecarsScheduler.start", + autospec=True, + ) + + +@pytest.fixture +async def manually_trigger_scheduler( + minimal_app: FastAPI, + scheduler: DynamicSidecarsScheduler, + scheduler_data: SchedulerData, +) -> Callable[[], Awaitable[None]]: + async def _triggerer() -> None: + await _apply_observation_cycle(minimal_app, scheduler, scheduler_data) + + return _triggerer + + # TESTS async def test_scheduler_add_remove( - ensure_scheduler_runs_once: Callable, + disabled_scheduler_background_task: None, + manually_trigger_scheduler: Callable[[], Awaitable[None]], scheduler: DynamicSidecarsScheduler, scheduler_data: SchedulerData, - mocked_client_api: MockRouter, + mocked_api_client: MockRouter, docker_swarm: None, mocked_dynamic_scheduler_events: None, mock_update_label: None, ) -> None: - await ensure_scheduler_runs_once() await scheduler.add_service(scheduler_data) - - await ensure_scheduler_runs_once() + await manually_trigger_scheduler() assert scheduler_data.dynamic_sidecar.is_available is True - await _assert_remove_service(scheduler, scheduler_data) + await scheduler.mark_service_for_removal(scheduler_data.node_uuid, True) + assert scheduler_data.service_name in scheduler._to_observe + await scheduler.finish_service_removal(scheduler_data.node_uuid) + assert scheduler_data.service_name not in scheduler._to_observe async def test_scheduler_removes_partially_started_services( - ensure_scheduler_runs_once: Callable, + disabled_scheduler_background_task: None, + manually_trigger_scheduler: Callable[[], Awaitable[None]], scheduler: DynamicSidecarsScheduler, scheduler_data: SchedulerData, - docker_swarm: None, mocked_dynamic_scheduler_events: None, + mock_docker_api: None, ) -> None: - await ensure_scheduler_runs_once() + await manually_trigger_scheduler() await scheduler.add_service(scheduler_data) scheduler_data.dynamic_sidecar.were_services_created = True - await ensure_scheduler_runs_once() + await manually_trigger_scheduler() async def test_scheduler_is_failing( - ensure_scheduler_runs_once: Callable, + disabled_scheduler_background_task: None, + manually_trigger_scheduler: Callable[[], Awaitable[None]], scheduler: DynamicSidecarsScheduler, scheduler_data: SchedulerData, - docker_swarm: None, mocked_dynamic_scheduler_events: None, ) -> None: - await ensure_scheduler_runs_once() + await manually_trigger_scheduler() await scheduler.add_service(scheduler_data) scheduler_data.dynamic_sidecar.status.current = DynamicSidecarStatus.FAILING - await ensure_scheduler_runs_once() + await manually_trigger_scheduler() async def test_scheduler_health_timing_out( - ensure_scheduler_runs_once: Callable, + disabled_scheduler_background_task: None, + manually_trigger_scheduler: Callable[[], Awaitable[None]], scheduler: DynamicSidecarsScheduler, scheduler_data: SchedulerData, mock_max_status_api_duration: None, - docker_swarm: None, mocked_dynamic_scheduler_events: None, -) -> None: +): - await ensure_scheduler_runs_once() + await manually_trigger_scheduler() await scheduler.add_service(scheduler_data) - await ensure_scheduler_runs_once() + await manually_trigger_scheduler() assert scheduler_data.dynamic_sidecar.is_available is False -async def test_adding_service_two_times( +async def test_adding_service_two_times_does_not_raise( scheduler: DynamicSidecarsScheduler, scheduler_data: SchedulerData, - docker_swarm: None, mocked_dynamic_scheduler_events: None, -) -> None: +): await scheduler.add_service(scheduler_data) + assert scheduler_data.service_name in scheduler._to_observe await scheduler.add_service(scheduler_data) -async def test_collition_at_global_level( +async def test_collition_at_global_level_raises( scheduler: DynamicSidecarsScheduler, scheduler_data: SchedulerData, - docker_swarm: None, mocked_dynamic_scheduler_events: None, -) -> None: - # pylint: disable=protected-access + mock_docker_api: None, +): scheduler._inverse_search_mapping[scheduler_data.node_uuid] = "mock_service_name" with pytest.raises(DynamicSidecarError) as execinfo: await scheduler.add_service(scheduler_data) - assert "node_uuids at a global level collided." in str(execinfo.value) - - -async def test_no_service_name( - scheduler: DynamicSidecarsScheduler, - scheduler_data: SchedulerData, - docker_swarm: None, - mocked_dynamic_scheduler_events: None, -) -> None: - scheduler_data.service_name = "" - with pytest.raises(DynamicSidecarError) as execinfo: - await scheduler.add_service(scheduler_data) - assert "a service with no name is not valid. Invalid usage." == str(execinfo.value) + assert "collide" in str(execinfo.value) async def test_remove_missing_no_error( scheduler: DynamicSidecarsScheduler, scheduler_data: SchedulerData, - docker_swarm: None, mocked_dynamic_scheduler_events: None, + mock_docker_api: None, ) -> None: with pytest.raises(DynamicSidecarNotFoundError) as execinfo: await scheduler.mark_service_for_removal(scheduler_data.node_uuid, True) - assert f"node {scheduler_data.node_uuid} not found" == str(execinfo.value) + assert "not found" in str(execinfo.value) async def test_get_stack_status( - ensure_scheduler_runs_once: Callable, + disabled_scheduler_background_task: None, + manually_trigger_scheduler: Callable[[], Awaitable[None]], scheduler: DynamicSidecarsScheduler, scheduler_data: SchedulerData, - docker_swarm: None, mocked_dynamic_scheduler_events: None, + mock_docker_api: None, ) -> None: - await ensure_scheduler_runs_once() - + await manually_trigger_scheduler() await scheduler.add_service(scheduler_data) stack_status = await scheduler.get_stack_status(scheduler_data.node_uuid) @@ -423,19 +391,19 @@ async def test_get_stack_status( async def test_get_stack_status_missing( scheduler: DynamicSidecarsScheduler, scheduler_data: SchedulerData, - docker_swarm: None, mocked_dynamic_scheduler_events: None, + mock_docker_api: None, ) -> None: with pytest.raises(DynamicSidecarNotFoundError) as execinfo: await scheduler.get_stack_status(scheduler_data.node_uuid) - assert f"node {scheduler_data.node_uuid} not found" in str(execinfo) + assert f"{scheduler_data.node_uuid} not found" in str(execinfo) async def test_get_stack_status_failing_sidecar( scheduler: DynamicSidecarsScheduler, scheduler_data: SchedulerData, - docker_swarm: None, mocked_dynamic_scheduler_events: None, + mock_docker_api: None, ) -> None: failing_message = "some_failing_message" scheduler_data.dynamic_sidecar.status.update_failing_status(failing_message) @@ -455,9 +423,9 @@ async def test_get_stack_status_report_missing_statuses( scheduler: DynamicSidecarsScheduler, scheduler_data: SchedulerData, mock_service_running: AsyncMock, - docker_swarm: None, mocked_dynamic_scheduler_events: None, mock_update_label: None, + mock_docker_api: None, ) -> None: async with _assert_get_dynamic_services_mocked( scheduler, @@ -477,9 +445,9 @@ async def test_get_stack_status_containers_are_starting( scheduler: DynamicSidecarsScheduler, scheduler_data: SchedulerData, mock_service_running: AsyncMock, - docker_swarm: None, mocked_dynamic_scheduler_events: None, mock_update_label: None, + mock_docker_api: None, ) -> None: async with _assert_get_dynamic_services_mocked( scheduler, @@ -499,9 +467,9 @@ async def test_get_stack_status_ok( scheduler: DynamicSidecarsScheduler, scheduler_data: SchedulerData, mock_service_running: AsyncMock, - docker_swarm: None, mocked_dynamic_scheduler_events: None, mock_update_label: None, + mock_docker_api: None, ) -> None: async with _assert_get_dynamic_services_mocked( scheduler, @@ -517,15 +485,3 @@ async def test_get_stack_status_ok( service_state=ServiceState.RUNNING, service_message="", ) - - -def test_module_setup( - app_settings: AppSettings, - docker_swarm: None, -) -> None: - app = FastAPI() - app.state.settings = app_settings - module_setup.setup(app) - with TestClient(app): - - pass diff --git a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py b/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py index e84b1942d7e..79b2085df03 100644 --- a/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py +++ b/services/director-v2/tests/unit/with_dbs/test_modules_dynamic_sidecar_docker_api.py @@ -3,7 +3,8 @@ # pylint: disable=protected-access import asyncio -from typing import Any, AsyncIterable, AsyncIterator, Dict, List +import sys +from typing import Any, AsyncIterable, AsyncIterator from uuid import UUID, uuid4 import aiodocker @@ -38,7 +39,8 @@ from tenacity.stop import stop_after_delay from tenacity.wait import wait_fixed -MAX_INT64 = 9223372036854775807 +MAX_INT64 = sys.maxsize + # FIXTURES pytest_simcore_core_services_selection = ["postgres"] @@ -46,17 +48,13 @@ @pytest.fixture -async def async_docker_client( - docker_swarm: None, -) -> AsyncIterator[aiodocker.docker.Docker]: +async def async_docker_client() -> AsyncIterator[aiodocker.docker.Docker]: async with aiodocker.Docker() as client: yield client @pytest.fixture -def dynamic_sidecar_settings( - monkeypatch: MonkeyPatch, docker_swarm: None -) -> DynamicSidecarSettings: +def dynamic_sidecar_settings(monkeypatch: MonkeyPatch) -> DynamicSidecarSettings: monkeypatch.setenv("DYNAMIC_SIDECAR_IMAGE", "local/dynamic-sidecar:MOCKED") monkeypatch.setenv("DIRECTOR_V2_DYNAMIC_SCHEDULER_ENABLED", "false") monkeypatch.setenv("TRAEFIK_SIMCORE_ZONE", "test_traefik_zone") @@ -72,19 +70,18 @@ def dynamic_sidecar_settings( @pytest.fixture -def network_config(simcore_services_network_name: str) -> Dict[str, Any]: +def network_config(simcore_services_network_name: str, faker: Faker) -> dict[str, Any]: return { "Name": simcore_services_network_name, "Driver": "overlay", - "Labels": {"uuid": f"{uuid4()}"}, + "Labels": {"uuid": f"{faker.uuid4()}"}, } @pytest.fixture async def ensure_swarm_network( - network_config: Dict[str, Any], + network_config: dict[str, Any], async_docker_client: aiodocker.docker.Docker, - docker_swarm: None, ) -> AsyncIterator[None]: network_id = await docker_api.create_network(network_config) yield @@ -106,7 +103,6 @@ async def ensure_swarm_network( async def cleanup_swarm_network( simcore_services_network_name: str, async_docker_client: aiodocker.docker.Docker, - docker_swarm: None, ) -> AsyncIterator[None]: yield # docker containers must be gone before network removal is functional @@ -125,17 +121,12 @@ async def cleanup_swarm_network( @pytest.fixture -def missing_network_name() -> str: - return "this_network_is_missing" - - -@pytest.fixture -def test_service_name() -> str: - return "test_service_name" +def test_service_name(faker: Faker) -> str: + return f"test_service_name_{faker.hostname(0)}" @pytest.fixture -def service_spec(test_service_name: str) -> Dict[str, Any]: +def service_spec(test_service_name: str) -> dict[str, Any]: # "joseluisq/static-web-server" is ~2MB docker image return { "name": test_service_name, @@ -165,7 +156,7 @@ def dynamic_sidecar_service_spec( dynamic_sidecar_service_name: str, dynamic_sidecar_settings: DynamicSidecarSettings, scheduler_data_from_http_request: SchedulerData, -) -> Dict[str, Any]: +) -> dict[str, Any]: # "joseluisq/static-web-server" is ~2MB docker image scheduler_data_from_http_request.service_name = dynamic_sidecar_service_name @@ -219,7 +210,7 @@ def dynamic_sidecar_stack_specs( user_id: UserID, project_id: ProjectID, dynamic_sidecar_settings: DynamicSidecarSettings, -) -> List[Dict[str, Any]]: +) -> list[dict[str, Any]]: return [ { "name": f"{DYNAMIC_PROXY_SERVICE_PREFIX}_fake_proxy", @@ -252,7 +243,7 @@ def dynamic_sidecar_stack_specs( @pytest.fixture async def cleanup_dynamic_sidecar_stack( - dynamic_sidecar_stack_specs: List[Dict[str, Any]], + dynamic_sidecar_stack_specs: list[dict[str, Any]], async_docker_client: aiodocker.docker.Docker, ) -> AsyncIterator[None]: yield @@ -283,7 +274,7 @@ async def project_id_labeled_network( @pytest.fixture async def test_networks( async_docker_client: aiodocker.docker.Docker, docker_swarm: None -) -> AsyncIterator[List[str]]: +) -> AsyncIterator[list[str]]: network_names = [f"test_network_name__{k}" for k in range(5)] yield network_names @@ -368,73 +359,30 @@ async def mock_service( await docker.services.delete(service_data["ID"]) -# UTILS - - -def _assert_service( - service_spec: Dict[str, Any], service_inspect: Dict[str, Any] -) -> None: - assert service_inspect["Spec"]["Labels"] == service_spec["labels"] - assert service_inspect["Spec"]["Name"] == service_spec["name"] - assert ( - service_inspect["Spec"]["TaskTemplate"]["ContainerSpec"]["Image"] - == service_spec["task_template"]["ContainerSpec"]["Image"] - ) - - -async def _count_services_in_stack( - node_uuid: UUID, - dynamic_sidecar_settings: DynamicSidecarSettings, - async_docker_client: aiodocker.docker.Docker, -) -> int: - services = await async_docker_client.services.list( - filters={ - "label": [ - f"swarm_stack_name={dynamic_sidecar_settings.SWARM_STACK_NAME}", - f"uuid={node_uuid}", - ] - } - ) - return len(services) - - -def _inject_impossible_resources(dynamic_sidecar_service_spec: Dict[str, Any]) -> None: - dynamic_sidecar_service_spec["task_template"]["Resources"] = { - "Reservations": {"NanoCPUs": MAX_INT64, "MemoryBytes": MAX_INT64} - } - - # TESTS -def test_new_docker_swarm(docker_swarm: None) -> None: - pass - - @pytest.mark.parametrize( "simcore_services_network_name", ("n", "network", "with_underscore", "with-dash", "with-dash_with_underscore"), ) -def test_valid_network_names( - simcore_services_network_name: str, monkeypatch: MonkeyPatch +def test_settings__valid_network_names( + simcore_services_network_name: str, + monkeypatch: MonkeyPatch, + dynamic_sidecar_settings: DynamicSidecarSettings, ) -> None: - monkeypatch.setenv("DYNAMIC_SIDECAR_IMAGE", "local/dynamic-sidecar:MOCKED") - monkeypatch.setenv("SIMCORE_SERVICES_NETWORK_NAME", simcore_services_network_name) - monkeypatch.setenv("TRAEFIK_SIMCORE_ZONE", "test_traefik_zone") - monkeypatch.setenv("SWARM_STACK_NAME", "test_swarm_name") - monkeypatch.setenv("R_CLONE_PROVIDER", "MINIO") - monkeypatch.setenv("S3_ENDPOINT", "endpoint") - monkeypatch.setenv("S3_ACCESS_KEY", "access_key") - monkeypatch.setenv("S3_SECRET_KEY", "secret_key") - monkeypatch.setenv("S3_BUCKET_NAME", "bucket_name") - monkeypatch.setenv("S3_SECURE", "false") - dynamic_sidecar_settings = DynamicSidecarSettings.create_from_envs() - assert dynamic_sidecar_settings + items = dynamic_sidecar_settings.dict() + items["SIMCORE_SERVICES_NETWORK_NAME"] = simcore_services_network_name + + # validate network names + DynamicSidecarSettings.parse_obj(items) + + +async def test_failed_docker_client_request(docker_swarm: None): + + missing_network_name = "this_network_cannot_be_found" -async def test_failed_docker_client_request( - missing_network_name: str, docker_swarm: None -) -> None: with pytest.raises(GenericDockerError) as execinfo: async with docker_api.docker_client() as client: await client.networks.get(missing_network_name) @@ -449,16 +397,17 @@ async def test_get_swarm_network_ok( simcore_services_network_name: str, ensure_swarm_network: None, docker_swarm: None, -) -> None: +): swarm_network = await docker_api.get_swarm_network(dynamic_sidecar_settings) assert swarm_network["Name"] == simcore_services_network_name async def test_get_swarm_network_missing_network( dynamic_sidecar_settings: DynamicSidecarSettings, docker_swarm: None -) -> None: +): with pytest.raises(DynamicSidecarError) as excinfo: await docker_api.get_swarm_network(dynamic_sidecar_settings) + assert str(excinfo.value) == ( "Swarm network name (searching for '*test_network_name*') is not configured." "Found following networks: []" @@ -466,46 +415,49 @@ async def test_get_swarm_network_missing_network( async def test_recreate_network_multiple_times( - network_config: Dict[str, Any], + network_config: dict[str, Any], cleanup_swarm_network: None, docker_swarm: None, -) -> None: +): network_ids = [await docker_api.create_network(network_config) for _ in range(10)] - network_ids_set = set(network_ids) - assert len(network_ids_set) == 1 - network_id = network_ids_set.pop() - assert type(network_id) == str + assert len(set(network_ids)) == 1, "expected same perh config" + assert all(isinstance(nid, str) for nid in network_ids) async def test_create_service( - service_spec: Dict[str, Any], + service_spec: dict[str, Any], cleanup_test_service_name: None, docker_swarm: None, -) -> None: +): service_id = await docker_api.create_service_and_get_id(service_spec) assert service_id async def test_inspect_service( - service_spec: Dict[str, Any], + service_spec: dict[str, Any], cleanup_test_service_name: None, docker_swarm: None, -) -> None: +): service_id = await docker_api.create_service_and_get_id(service_spec) assert service_id service_inspect = await docker_api.inspect_service(service_id) - _assert_service(service_spec, service_inspect) + assert service_inspect["Spec"]["Labels"] == service_spec["labels"] + assert service_inspect["Spec"]["Name"] == service_spec["name"] + assert ( + service_inspect["Spec"]["TaskTemplate"]["ContainerSpec"]["Image"] + == service_spec["task_template"]["ContainerSpec"]["Image"] + ) async def test_services_to_observe_exist( dynamic_sidecar_service_name: str, - dynamic_sidecar_service_spec: Dict[str, Any], + dynamic_sidecar_service_spec: dict[str, Any], dynamic_sidecar_settings: DynamicSidecarSettings, cleanup_test_dynamic_sidecar_service: None, docker_swarm: None, -) -> None: +): service_id = await docker_api.create_service_and_get_id( dynamic_sidecar_service_spec ) @@ -516,16 +468,15 @@ async def test_services_to_observe_exist( ) assert len(dynamic_services) == 1 - for entry in dynamic_services: - assert entry.service_name == dynamic_sidecar_service_name + assert dynamic_services[0].service_name == dynamic_sidecar_service_name async def test_dynamic_sidecar_in_running_state_and_node_id_is_recovered( - dynamic_sidecar_service_spec: Dict[str, Any], + dynamic_sidecar_service_spec: dict[str, Any], dynamic_sidecar_settings: DynamicSidecarSettings, cleanup_test_dynamic_sidecar_service: None, docker_swarm: None, -) -> None: +): service_id = await docker_api.create_service_and_get_id( dynamic_sidecar_service_spec ) @@ -543,12 +494,17 @@ async def test_dynamic_sidecar_in_running_state_and_node_id_is_recovered( async def test_dynamic_sidecar_get_dynamic_sidecar_sate_fail_to_schedule( - dynamic_sidecar_service_spec: Dict[str, Any], + dynamic_sidecar_service_spec: dict[str, Any], dynamic_sidecar_settings: DynamicSidecarSettings, cleanup_test_dynamic_sidecar_service: None, docker_swarm: None, -) -> None: - _inject_impossible_resources(dynamic_sidecar_service_spec) +): + + # set unachievable resource + dynamic_sidecar_service_spec["task_template"]["Resources"] = { + "Reservations": {"NanoCPUs": MAX_INT64, "MemoryBytes": MAX_INT64} + } + service_id = await docker_api.create_service_and_get_id( dynamic_sidecar_service_spec ) @@ -567,10 +523,10 @@ async def test_dynamic_sidecar_get_dynamic_sidecar_sate_fail_to_schedule( async def test_is_dynamic_sidecar_missing( node_uuid: UUID, dynamic_sidecar_settings: DynamicSidecarSettings, - dynamic_sidecar_stack_specs: List[Dict[str, Any]], + dynamic_sidecar_stack_specs: list[dict[str, Any]], cleanup_dynamic_sidecar_stack: None, docker_swarm: None, -) -> None: +): services_are_missing = await docker_api.is_dynamic_sidecar_missing( node_uuid, dynamic_sidecar_settings @@ -591,7 +547,7 @@ async def test_is_dynamic_sidecar_missing( async def test_are_all_services_present( node_uuid: UUID, dynamic_sidecar_settings: DynamicSidecarSettings, - dynamic_sidecar_stack_specs: List[Dict[str, Any]], + dynamic_sidecar_stack_specs: list[dict[str, Any]], cleanup_dynamic_sidecar_stack: None, docker_swarm: None, ): @@ -614,10 +570,27 @@ async def test_are_all_services_present( async def test_remove_dynamic_sidecar_stack( node_uuid: UUID, dynamic_sidecar_settings: DynamicSidecarSettings, - dynamic_sidecar_stack_specs: List[Dict[str, Any]], + dynamic_sidecar_stack_specs: list[dict[str, Any]], docker_swarm: None, async_docker_client: aiodocker.docker.Docker, ): + async def _count_services_in_stack( + node_uuid: UUID, + dynamic_sidecar_settings: DynamicSidecarSettings, + async_docker_client: aiodocker.docker.Docker, + ) -> int: + services = await async_docker_client.services.list( + filters={ + "label": [ + f"swarm_stack_name={dynamic_sidecar_settings.SWARM_STACK_NAME}", + f"uuid={node_uuid}", + ] + } + ) + return len(services) + + # --------- + assert ( await _count_services_in_stack( node_uuid, dynamic_sidecar_settings, async_docker_client @@ -648,10 +621,10 @@ async def test_remove_dynamic_sidecar_stack( async def test_remove_dynamic_sidecar_network( - network_config: Dict[str, Any], + network_config: dict[str, Any], simcore_services_network_name: str, docker_swarm: None, -) -> None: +): network_ids = [await docker_api.create_network(network_config) for _ in range(10)] assert len(set(network_ids)) == 1 @@ -663,7 +636,7 @@ async def test_remove_dynamic_sidecar_network( async def test_remove_dynamic_sidecar_network_fails( simcore_services_network_name: str, docker_swarm: None -) -> None: +): delete_result = await docker_api.remove_dynamic_sidecar_network( simcore_services_network_name ) @@ -671,11 +644,10 @@ async def test_remove_dynamic_sidecar_network_fails( async def test_list_dynamic_sidecar_services( - node_uuid: UUID, user_id: UserID, project_id: ProjectID, dynamic_sidecar_settings: DynamicSidecarSettings, - dynamic_sidecar_stack_specs: List[Dict[str, Any]], + dynamic_sidecar_stack_specs: list[dict[str, Any]], cleanup_dynamic_sidecar_stack: None, docker_swarm: None, ): @@ -693,10 +665,10 @@ async def test_list_dynamic_sidecar_services( async def test_is_dynamic_service_running( node_uuid: UUID, dynamic_sidecar_settings: DynamicSidecarSettings, - dynamic_sidecar_stack_specs: List[Dict[str, Any]], + dynamic_sidecar_stack_specs: list[dict[str, Any]], cleanup_dynamic_sidecar_stack: None, docker_swarm: None, -) -> None: +): assert ( await docker_api.is_dynamic_service_running(node_uuid, dynamic_sidecar_settings) is False @@ -718,7 +690,7 @@ async def test_get_projects_networks_containers( project_id_labeled_network: str, project_id: ProjectID, docker_swarm: None, -) -> None: +): # make sure API does not change params = {"filters": clean_filters({"label": [f"project_id={project_id}"]})} filtered_networks = ( @@ -732,10 +704,16 @@ async def test_get_projects_networks_containers( async def test_get_or_create_networks_ids( - test_networks: List[str], existing_network: str, project_id: ProjectID + test_networks: list[str], existing_network: str, project_id: ProjectID ): # test with duplicate networks and existing networks - networks_to_test = test_networks + test_networks + [existing_network] + networks_to_test = ( + test_networks + + test_networks + + [ + existing_network, + ] + ) network_ids = await docker_api.get_or_create_networks_ids( networks=networks_to_test, project_id=project_id, @@ -748,7 +726,7 @@ async def test_update_scheduler_data_label( mock_service: str, mock_scheduler_data: SchedulerData, docker_swarm: None, -) -> None: +): await docker_api.update_scheduler_data_label(mock_scheduler_data) # fetch stored data in labels @@ -762,7 +740,7 @@ async def test_update_scheduler_data_label( async def test_update_scheduler_data_label_skip_if_service_is_missing( docker: aiodocker.Docker, mock_scheduler_data: SchedulerData -) -> None: +): # NOTE: checks that docker engine replies with # `service mock-service-name not found` # the error is handled and that the error is not raised @@ -771,7 +749,7 @@ async def test_update_scheduler_data_label_skip_if_service_is_missing( async def test_regression_update_service_update_out_of_sequence( docker: aiodocker.Docker, mock_service: str, docker_swarm: None -) -> None: +): # NOTE: checks that the docker engine replies with # `rpc error: code = Unknown desc = update out of sequence` # the error is captured and raised as `docker_api._RetryError` @@ -791,7 +769,7 @@ async def test_regression_update_service_update_out_of_sequence( async def test_constrain_service_to_node( docker: aiodocker.Docker, mock_service: str, docker_swarm: None -) -> None: +): # get a node's ID docker_nodes = await docker.nodes.list() target_node_id = docker_nodes[0]["ID"] diff --git a/services/dynamic-sidecar/.env-devel b/services/dynamic-sidecar/.env-devel index 6d762204518..2abaf3d51cf 100644 --- a/services/dynamic-sidecar/.env-devel +++ b/services/dynamic-sidecar/.env-devel @@ -5,7 +5,7 @@ # environs in Dockerfile ---------------- SC_BOOT_MODE=local-development - +DYNAMIC_SIDECAR_DY_VOLUMES_MOUNT_DIR="/tmp/dy-volumes" # service specific required vars DYNAMIC_SIDECAR_COMPOSE_NAMESPACE=dev-namespace @@ -28,4 +28,4 @@ S3_ENDPOINT=MINIO S3_ACCESS_KEY=mocked S3_SECRET_KEY=mocked S3_BUCKET_NAME=mocked -R_CLONE_PROVIDER=MINIO \ No newline at end of file +R_CLONE_PROVIDER=MINIO diff --git a/services/dynamic-sidecar/Dockerfile b/services/dynamic-sidecar/Dockerfile index bb602e84aa0..d9e427137ac 100644 --- a/services/dynamic-sidecar/Dockerfile +++ b/services/dynamic-sidecar/Dockerfile @@ -45,7 +45,7 @@ ENV PYTHONDONTWRITEBYTECODE=1 \ ENV PATH="${VIRTUAL_ENV}/bin:$PATH" # directory where dynamic-sidecar stores creates and shares # volumes between itself and the spawned containers -ENV DY_VOLUMES="/dy-volumes" +ENV DYNAMIC_SIDECAR_DY_VOLUMES_MOUNT_DIR="/dy-volumes" # rclone installation ARG R_CLONE_VERSION="1.58.0" @@ -70,7 +70,7 @@ RUN apt-get update &&\ # NOTE: python virtualenv is used here such that installed # packages may be moved to production image easily by copying the venv RUN python -m venv ${VIRTUAL_ENV} -RUN mkdir -p ${DY_VOLUMES} +RUN mkdir -p ${DYNAMIC_SIDECAR_DY_VOLUMES_MOUNT_DIR} RUN pip install --upgrade --no-cache-dir \ pip~=22.0 \ @@ -85,8 +85,8 @@ COPY --chown=scu:scu services/dynamic-sidecar/requirements/_base.txt . RUN pip --no-cache-dir install -r _base.txt # copy utility devops scripts -COPY --chown=scu:scu services/dynamic-sidecar/extra/Makefile /home/scu -COPY --chown=root:root services/dynamic-sidecar/extra/Makefile /root +COPY --chown=scu:scu services/dynamic-sidecar/scripts/Makefile /home/scu +COPY --chown=root:root services/dynamic-sidecar/scripts/Makefile /root # --------------------------Prod-depends-only stage ------------------- # This stage is for production only dependencies that get partially wiped out afterwards (final docker image concerns) @@ -124,7 +124,7 @@ WORKDIR /home/scu # Starting from clean base image, copies pre-installed virtualenv from prod-only-deps COPY --chown=scu:scu --from=prod-only-deps ${VIRTUAL_ENV} ${VIRTUAL_ENV} -COPY --chown=scu:scu --from=prod-only-deps ${DY_VOLUMES} ${DY_VOLUMES} +COPY --chown=scu:scu --from=prod-only-deps ${DYNAMIC_SIDECAR_DY_VOLUMES_MOUNT_DIR} ${DYNAMIC_SIDECAR_DY_VOLUMES_MOUNT_DIR} # Copies booting scripts COPY --chown=scu:scu services/dynamic-sidecar/docker services/dynamic-sidecar/docker @@ -157,7 +157,7 @@ ENV SC_BUILD_TARGET=development WORKDIR /devel RUN chown -R scu:scu ${VIRTUAL_ENV} -RUN chown -R scu:scu ${DY_VOLUMES} +RUN chown -R scu:scu ${DYNAMIC_SIDECAR_DY_VOLUMES_MOUNT_DIR} EXPOSE 8000 EXPOSE 3000 diff --git a/services/dynamic-sidecar/Makefile b/services/dynamic-sidecar/Makefile index 234af0650ea..7ecf6c298a6 100644 --- a/services/dynamic-sidecar/Makefile +++ b/services/dynamic-sidecar/Makefile @@ -1,8 +1,6 @@ include ../../scripts/common.Makefile include ../../scripts/common-service.Makefile -APP_NAME := $(notdir $(CURDIR)) - .DEFAULT_GOAL := help @@ -12,10 +10,17 @@ APP_NAME := $(notdir $(CURDIR)) @echo "WARNING ##### $@ does not exist, cloning $< as $@ ############"; cp $< $@) -.PHONY: openapi.json + + +.PHONY: openapi-specs openapi.json +openapi-specs: openapi.json openapi.json: .env ## Creates OAS document openapi.json - # generating openapi specs (OAS) file - export $(shell grep -v '^#' $< | xargs -0) && python3 -c "import json; from $(APP_PACKAGE_NAME).main import *; print( json.dumps(app.openapi(), indent=2) )" > $@ + # generating openapi specs file under $< + @set -o allexport; \ + source .env; \ + set +o allexport; \ + simcore-service-dynamic-sidecar openapi > $@ + # validates OAS file: $@ @cd $(CURDIR); \ $(SCRIPTS_DIR)/openapi-generator-cli.bash validate --input-spec /local/$@ diff --git a/services/dynamic-sidecar/docker/boot.sh b/services/dynamic-sidecar/docker/boot.sh index e10b8764d88..0f752df7d72 100755 --- a/services/dynamic-sidecar/docker/boot.sh +++ b/services/dynamic-sidecar/docker/boot.sh @@ -35,7 +35,7 @@ if [ "${SC_BOOT_MODE}" = "debug-ptvsd" ]; then exec sh -c " cd services/dynamic-sidecar/src/simcore_service_dynamic_sidecar && \ - uvicorn main:app \ + uvicorn main:the_app \ --host 0.0.0.0 \ --reload \ $reload_dir_packages @@ -43,7 +43,7 @@ if [ "${SC_BOOT_MODE}" = "debug-ptvsd" ]; then --log-level \"${SERVER_LOG_LEVEL}\" " else - exec uvicorn simcore_service_dynamic_sidecar.main:app \ + exec uvicorn simcore_service_dynamic_sidecar.main:the_app \ --host 0.0.0.0 \ --log-level "${SERVER_LOG_LEVEL}" diff --git a/services/dynamic-sidecar/docker/entrypoint.sh b/services/dynamic-sidecar/docker/entrypoint.sh index b7b6268dd17..e6d007a9246 100755 --- a/services/dynamic-sidecar/docker/entrypoint.sh +++ b/services/dynamic-sidecar/docker/entrypoint.sh @@ -98,16 +98,16 @@ fi # Change ownership of volumes mount directory # directories are empty at this point # each individual subdirectory is a unique volume -chown --verbose --recursive "$SC_USER_NAME":"$GROUPNAME" "${DY_VOLUMES}" +chown --verbose --recursive "$SC_USER_NAME":"$GROUPNAME" "${DYNAMIC_SIDECAR_DY_VOLUMES_MOUNT_DIR}" # Allow owner and group to edit write and execute # files from all the subdirectories # When the service access files downloaded by the dynamic-sidecar # it uses group permissions -chmod --verbose --recursive 774 "${DY_VOLUMES}" +chmod --verbose --recursive 774 "${DYNAMIC_SIDECAR_DY_VOLUMES_MOUNT_DIR}" echo "$INFO Starting $* ..." echo " $SC_USER_NAME rights : $(id "$SC_USER_NAME")" echo " local dir : $(ls -al)" -echo " volumes dir : $(ls -al "${DY_VOLUMES}")" +echo " volumes dir : $(ls -al "${DYNAMIC_SIDECAR_DY_VOLUMES_MOUNT_DIR}")" exec gosu "$SC_USER_NAME" "$@" diff --git a/services/dynamic-sidecar/openapi.json b/services/dynamic-sidecar/openapi.json index 5fda174802a..48c8664669d 100644 --- a/services/dynamic-sidecar/openapi.json +++ b/services/dynamic-sidecar/openapi.json @@ -224,9 +224,9 @@ "tags": [ "containers" ], - "summary": "Get Entrypoint Container Name", + "summary": "Get Containers Name", "description": "Searches for the container's name given the network\non which the proxy communicates with it.\nSupported filters:\n network: name of the network", - "operationId": "get_entrypoint_container_name_v1_containers_name_get", + "operationId": "get_containers_name_v1_containers_name_get", "parameters": [ { "description": "JSON encoded dictionary. FastAPI does not allow for dict as type in query parameters", diff --git a/services/dynamic-sidecar/requirements/_base.in b/services/dynamic-sidecar/requirements/_base.in index b1638b36428..3a56b213329 100644 --- a/services/dynamic-sidecar/requirements/_base.in +++ b/services/dynamic-sidecar/requirements/_base.in @@ -16,16 +16,22 @@ --requirement ../../../packages/simcore-sdk/requirements/_base.in + +# +# TODO: Fix warning messages due to aiocache required in simcore-sdk (for r-clone!) +# WARNING message is +# ujson module not found, using json +# msgpack not installed, MsgPackSerializer unavailable +# SEE aiocache[mspack] + aio-pika aiodocker aiofiles -async_generator -async-timeout docker-compose fastapi httpx pydantic -python-magic +python-magic # file type identification library. See 'magic.from_file(...)' PyYAML uvicorn watchdog diff --git a/services/dynamic-sidecar/requirements/_base.txt b/services/dynamic-sidecar/requirements/_base.txt index 3466cb763c0..9df9d95164c 100644 --- a/services/dynamic-sidecar/requirements/_base.txt +++ b/services/dynamic-sidecar/requirements/_base.txt @@ -53,11 +53,8 @@ anyio==3.6.1 # starlette asgiref==3.5.2 # via uvicorn -async-generator==1.10 - # via -r requirements/_base.in async-timeout==4.0.2 # via - # -r requirements/_base.in # aiohttp # aiopg attrs==20.3.0 diff --git a/services/dynamic-sidecar/requirements/_test.in b/services/dynamic-sidecar/requirements/_test.in index 30feb398bf0..b0bf4d8228c 100644 --- a/services/dynamic-sidecar/requirements/_test.in +++ b/services/dynamic-sidecar/requirements/_test.in @@ -2,15 +2,13 @@ --constraint ./constraints.txt --constraint _base.txt -pytest -pytest-cov -pytest-asyncio -pytest-mock + async-asgi-testclient faker - -# missing mypy stubs -types-pkg_resources -types-aiofiles -types-PyYAML +pytest pytest-asyncio +pytest-cov +pytest-mock +types-aiofiles # missing mypy stubs +types-pkg_resources # missing mypy stubs +types-PyYAML # missing mypy stubs diff --git a/services/dynamic-sidecar/requirements/_tools.in b/services/dynamic-sidecar/requirements/_tools.in index 09e992d6bec..7eeb6453522 100644 --- a/services/dynamic-sidecar/requirements/_tools.in +++ b/services/dynamic-sidecar/requirements/_tools.in @@ -5,8 +5,4 @@ --requirement ../../../requirements/devenv.txt -# basic dev tools -mypy pylint -black -isort diff --git a/services/dynamic-sidecar/requirements/_tools.txt b/services/dynamic-sidecar/requirements/_tools.txt index c85ff81268c..c09a77b086a 100644 --- a/services/dynamic-sidecar/requirements/_tools.txt +++ b/services/dynamic-sidecar/requirements/_tools.txt @@ -4,12 +4,10 @@ # # pip-compile --output-file=requirements/_tools.txt --strip-extras requirements/_tools.in # -astroid==2.11.5 +astroid==2.11.6 # via pylint black==22.3.0 - # via - # -r requirements/../../../requirements/devenv.txt - # -r requirements/_tools.in + # via -r requirements/../../../requirements/devenv.txt bump2version==1.0.1 # via -r requirements/../../../requirements/devenv.txt cfgv==3.3.1 @@ -23,25 +21,20 @@ dill==0.3.5.1 # via pylint distlib==0.3.4 # via virtualenv -filelock==3.7.0 +filelock==3.7.1 # via virtualenv identify==2.5.1 # via pre-commit isort==5.10.1 # via # -r requirements/../../../requirements/devenv.txt - # -r requirements/_tools.in # pylint lazy-object-proxy==1.7.1 # via astroid mccabe==0.7.0 # via pylint -mypy==0.960 - # via -r requirements/_tools.in mypy-extensions==0.4.3 - # via - # black - # mypy + # via black nodeenv==1.6.0 # via pre-commit pathspec==0.9.0 @@ -57,7 +50,7 @@ platformdirs==2.5.2 # virtualenv pre-commit==2.19.0 # via -r requirements/../../../requirements/devenv.txt -pylint==2.13.9 +pylint==2.13.8 # via -r requirements/_tools.in pyyaml==5.4.1 # via @@ -75,7 +68,6 @@ tomli==2.0.1 # via # -c requirements/_test.txt # black - # mypy # pep517 # pylint typing-extensions==4.2.0 @@ -83,7 +75,6 @@ typing-extensions==4.2.0 # -c requirements/_base.txt # astroid # black - # mypy # pylint virtualenv==20.14.1 # via pre-commit diff --git a/services/dynamic-sidecar/extra/Makefile b/services/dynamic-sidecar/scripts/Makefile similarity index 84% rename from services/dynamic-sidecar/extra/Makefile rename to services/dynamic-sidecar/scripts/Makefile index 1587f1dfafa..665cece87d8 100644 --- a/services/dynamic-sidecar/extra/Makefile +++ b/services/dynamic-sidecar/scripts/Makefile @@ -15,10 +15,12 @@ push-outputs: ## push the outputs for this service @echo ">>>>> Expect a 204 reply if OK <<<<<" curl -v -X POST ${BASE_ADDRESS}/containers/ports/outputs:push +.PHONY: info +info: ## displays app info + # app settings + @simcore-service-dynamic-sidecar settings --as-json .PHONY: help help: ## this help @grep -E '^[a-zA-Z0-9_-]+:.*?## .*$$' $(MAKEFILE_LIST) \ | sed -n 's/^\(.*\): \(.*\)##\(.*\)/\1\t\3/p' - - diff --git a/services/dynamic-sidecar/setup.py b/services/dynamic-sidecar/setup.py index 841105e82af..c4171130c44 100644 --- a/services/dynamic-sidecar/setup.py +++ b/services/dynamic-sidecar/setup.py @@ -1,12 +1,11 @@ import re import sys from pathlib import Path -from typing import Set from setuptools import find_packages, setup -def read_reqs(reqs_path: Path) -> Set[str]: +def read_reqs(reqs_path: Path) -> set[str]: return { r for r in re.findall( @@ -54,6 +53,11 @@ def read_reqs(reqs_path: Path) -> Set[str]: PROD_REQUIREMENTS=PROD_REQUIREMENTS, TEST_REQUIREMENTS=TEST_REQUIREMENTS, setup_requires=["setuptools_scm"], + entry_points={ + "console_scripts": [ + "simcore-service-dynamic-sidecar=simcore_service_dynamic_sidecar.cli:main", + ], + }, ) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/__init__.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/__init__.py index 97f65e482d1..1a25517afd4 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/__init__.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/__init__.py @@ -1,3 +1,3 @@ from ._routing import main_router -__all__ = ["main_router"] +__all__: tuple[str, ...] = ("main_router",) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/_routing.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/_routing.py index 0750c059c52..8e7f0c5f91b 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/_routing.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/_routing.py @@ -13,4 +13,4 @@ main_router.include_router(containers_router, prefix=f"/{API_VTAG}") main_router.include_router(containers_router_extension, prefix=f"/{API_VTAG}") -__all__ = ["main_router"] +__all__: tuple[str, ...] = ("main_router",) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/containers.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/containers.py index ffe50483bd3..8dbfc2e7143 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/containers.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/containers.py @@ -4,7 +4,7 @@ import json import logging import traceback -from typing import Any, Dict, List, Union +from typing import Any, Union from fastapi import ( APIRouter, @@ -30,7 +30,11 @@ from ..core.docker_utils import docker_client from ..core.rabbitmq import RabbitMQ from ..core.settings import DynamicSidecarSettings -from ..core.shared_handlers import remove_the_compose_spec, write_file_and_run_command +from ..core.shared_handlers import ( + cleanup_containers_and_volumes, + remove_the_compose_spec, + write_file_and_run_command, +) from ..core.utils import assemble_container_names from ..core.validation import ( InvalidComposeSpec, @@ -61,11 +65,14 @@ async def _task_docker_compose_up( ) -> None: # building is a security risk hence is disabled via "--no-build" parameter await send_message(rabbitmq, "starting service containers") - command = ( - "docker-compose --project-name {project} --file {file_path} " - "up --no-build --detach" - ) + with directory_watcher_disabled(app): + await cleanup_containers_and_volumes(shared_store, settings) + + command = ( + "docker-compose --project-name {project} --file {file_path} " + "up --no-build --detach" + ) finished_without_errors, stdout = await write_file_and_run_command( settings=settings, file_content=shared_store.compose_spec, @@ -88,7 +95,7 @@ async def _task_docker_compose_up( return None -def _raise_if_container_is_missing(id: str, container_names: List[str]) -> None: +def _raise_if_container_is_missing(id: str, container_names: list[str]) -> None: if id not in container_names: message = ( f"No container '{id}' was started. Started containers '{container_names}'" @@ -115,7 +122,7 @@ async def runs_docker_compose_up( application_health: ApplicationHealth = Depends(get_application_health), rabbitmq: RabbitMQ = Depends(get_rabbitmq), mounted_volumes: MountedVolumes = Depends(get_mounted_volumes), -) -> Union[List[str], Dict[str, Any]]: +) -> Union[list[str], dict[str, Any]]: """Expects the docker-compose spec as raw-body utf-8 encoded text""" # stores the compose spec after validation @@ -166,7 +173,7 @@ async def runs_docker_compose_down( settings: DynamicSidecarSettings = Depends(get_settings), shared_store: SharedStore = Depends(get_shared_store), app: FastAPI = Depends(get_application), -) -> Union[str, Dict[str, Any]]: +) -> Union[str, dict[str, Any]]: """Removes the previously started service and returns the docker-compose output""" @@ -204,13 +211,13 @@ async def containers_docker_inspect( False, description="if True only show the status of the container" ), shared_store: SharedStore = Depends(get_shared_store), -) -> Dict[str, Any]: +) -> dict[str, Any]: """ Returns entire docker inspect data, if only_state is True, the status of the containers is returned """ - def _format_result(container_inspect: Dict[str, Any]) -> Dict[str, Any]: + def _format_result(container_inspect: dict[str, Any]) -> dict[str, Any]: if only_status: container_state = container_inspect.get("State", {}) @@ -262,7 +269,7 @@ async def get_container_logs( description="Enabling this parameter will include timestamps in logs", ), shared_store: SharedStore = Depends(get_shared_store), -) -> List[str]: +) -> list[str]: """Returns the logs of a given container if found""" _raise_if_container_is_missing(id, shared_store.container_names) @@ -273,7 +280,7 @@ async def get_container_logs( if timestamps: args["timestamps"] = True - container_logs: List[str] = await container_instance.log(**args) + container_logs: list[str] = await container_instance.log(**args) return container_logs @@ -288,7 +295,7 @@ async def get_container_logs( }, }, ) -async def get_entrypoint_container_name( +async def get_containers_name( filters: str = Query( ..., description=( @@ -297,14 +304,14 @@ async def get_entrypoint_container_name( ), ), shared_store: SharedStore = Depends(get_shared_store), -) -> Union[str, Dict[str, Any]]: +) -> Union[str, dict[str, Any]]: """ Searches for the container's name given the network on which the proxy communicates with it. Supported filters: network: name of the network """ - filters_dict: Dict[str, str] = json.loads(filters) + filters_dict: dict[str, str] = json.loads(filters) if not isinstance(filters_dict, dict): raise HTTPException( status.HTTP_422_UNPROCESSABLE_ENTITY, @@ -348,14 +355,11 @@ async def get_entrypoint_container_name( ) async def inspect_container( id: str, shared_store: SharedStore = Depends(get_shared_store) -) -> Dict[str, Any]: +) -> dict[str, Any]: """Returns information about the container, like docker inspect command""" _raise_if_container_is_missing(id, shared_store.container_names) async with docker_client() as docker: container_instance = await docker.containers.get(id) - inspect_result: Dict[str, Any] = await container_instance.show() + inspect_result: dict[str, Any] = await container_instance.show() return inspect_result - - -__all__ = ["containers_router"] diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/containers_extension.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/containers_extension.py index 90d8342af7b..70889f61f2b 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/containers_extension.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/api/containers_extension.py @@ -3,7 +3,7 @@ import logging from collections import deque -from typing import Any, Awaitable, Deque, Dict, List, Optional, Set +from typing import Any, Awaitable, Deque, Optional from aiodocker.networks import DockerNetwork from fastapi import APIRouter, Depends, FastAPI, HTTPException, Query, Response, status @@ -41,7 +41,7 @@ class CreateDirsRequestItem(BaseModel): - outputs_labels: Dict[str, ServiceOutput] + outputs_labels: dict[str, ServiceOutput] class PatchDirectoryWatcherItem(BaseModel): @@ -53,7 +53,7 @@ class _BaseNetworkItem(BaseModel): class AttachContainerToNetworkItem(_BaseNetworkItem): - network_aliases: List[str] + network_aliases: list[str] class DetachContainerFromNetworkItem(_BaseNetworkItem): @@ -69,6 +69,7 @@ class DetachContainerFromNetworkItem(_BaseNetworkItem): async def restore_state( rabbitmq: RabbitMQ = Depends(get_rabbitmq), mounted_volumes: MountedVolumes = Depends(get_mounted_volumes), + settings: DynamicSidecarSettings = Depends(get_settings), ) -> None: """ When restoring the state: @@ -81,7 +82,7 @@ async def restore_state( for state_path in mounted_volumes.disk_state_paths(): await send_message(rabbitmq, f"Downloading state for {state_path}") - awaitables.append(pull_path_if_exists(state_path)) + awaitables.append(pull_path_if_exists(state_path, settings)) await logged_gather(*awaitables) @@ -97,6 +98,7 @@ async def restore_state( async def save_state( rabbitmq: RabbitMQ = Depends(get_rabbitmq), mounted_volumes: MountedVolumes = Depends(get_mounted_volumes), + settings: DynamicSidecarSettings = Depends(get_settings), ) -> None: awaitables: Deque[Awaitable[Optional[Any]]] = deque() @@ -104,7 +106,7 @@ async def save_state( for state_path in mounted_volumes.disk_state_paths(): await send_message(rabbitmq, f"Saving state for {state_path}") awaitables.append( - upload_path_if_exists(state_path, mounted_volumes.state_exclude) + upload_path_if_exists(state_path, mounted_volumes.state_exclude, settings) ) await logged_gather(*awaitables) @@ -118,7 +120,7 @@ async def save_state( status_code=status.HTTP_200_OK, ) async def pull_input_ports( - port_keys: Optional[List[str]] = None, + port_keys: Optional[list[str]] = None, rabbitmq: RabbitMQ = Depends(get_rabbitmq), mounted_volumes: MountedVolumes = Depends(get_mounted_volumes), ) -> int: @@ -176,7 +178,7 @@ async def create_output_dirs( status_code=status.HTTP_200_OK, ) async def pull_output_ports( - port_keys: Optional[List[str]] = None, + port_keys: Optional[list[str]] = None, rabbitmq: RabbitMQ = Depends(get_rabbitmq), mounted_volumes: MountedVolumes = Depends(get_mounted_volumes), ) -> int: @@ -202,7 +204,7 @@ async def pull_output_ports( }, ) async def push_output_ports( - port_keys: Optional[List[str]] = None, + port_keys: Optional[list[str]] = None, rabbitmq: RabbitMQ = Depends(get_rabbitmq), mounted_volumes: MountedVolumes = Depends(get_mounted_volumes), ) -> None: @@ -284,7 +286,7 @@ async def attach_container_to_network( container_instance = await docker.containers.get(id) container_inspect = await container_instance.show() - attached_network_ids: Set[str] = { + attached_network_ids: set[str] = { x["NetworkID"] for x in container_inspect["NetworkSettings"]["Networks"].values() } @@ -319,7 +321,7 @@ async def detach_container_from_network( container_instance = await docker.containers.get(id) container_inspect = await container_instance.show() - attached_network_ids: Set[str] = { + attached_network_ids: set[str] = { x["NetworkID"] for x in container_inspect["NetworkSettings"]["Networks"].values() } diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/cli.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/cli.py new file mode 100644 index 00000000000..de316d115ca --- /dev/null +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/cli.py @@ -0,0 +1,28 @@ +import json +import logging + +import typer +from settings_library.utils_cli import create_settings_command +from simcore_service_dynamic_sidecar.core.application import create_base_app + +from ._meta import PROJECT_NAME +from .core.settings import DynamicSidecarSettings + +log = logging.getLogger(__name__) +main = typer.Typer(name=PROJECT_NAME) + + +main.command()(create_settings_command(settings_cls=DynamicSidecarSettings, logger=log)) + + +@main.command() +def openapi(): + """Prints OpenAPI specifications in json format""" + app = create_base_app() + typer.secho(json.dumps(app.openapi(), indent=2)) + + +# +# NOTE: We intentionally did NOT create a command to run the application +# Use instead $ uvicorn simcore_service_dynamic_sidecar.main:the_app +# diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/application.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/application.py index d13b5a24f05..521b7323769 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/application.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/application.py @@ -1,5 +1,4 @@ import logging -from typing import Any, Callable, Coroutine from fastapi import FastAPI from servicelib.fastapi.openapi import override_fastapi_openapi_method @@ -10,13 +9,14 @@ from ..models.domains.shared_store import SharedStore from ..models.schemas.application_health import ApplicationHealth from ..modules.directory_watcher import setup_directory_watcher +from ..modules.mounted_fs import MountedVolumes, setup_mounted_fs from .docker_logs import setup_background_log_fetcher from .error_handlers import http_error_handler, node_not_found_error_handler from .errors import BaseDynamicSidecarError from .rabbitmq import setup_rabbitmq from .remote_debug import setup as remote_debug_setup from .settings import DynamicSidecarSettings -from .shared_handlers import on_shutdown_handler +from .shared_handlers import remove_the_compose_spec from .utils import login_registry, volumes_fix_permissions logger = logging.getLogger(__name__) @@ -32,76 +32,105 @@ S S S ss. S S S S sSSs S S sSSO S sS' S P S b S S P S S S O S S S S S P S S S S S S O S S -P ss" P ` ss' P P ss" P sSSss "sss' P P P P {0} +P ss" P ` ss' P P ss" P sSSss "sss' P P P P {} """.format( f"v{__version__}" ) -def assemble_application() -> FastAPI: - """ - Creates the application from using the env vars as a context - Also stores inside the state all instances of classes - needed in other requests and used to share data. - """ +def setup_logger(settings: DynamicSidecarSettings): + # SEE https://github.com/ITISFoundation/osparc-simcore/issues/3148 + logging.basicConfig(level=settings.log_level) + logging.root.setLevel(settings.log_level) - dynamic_sidecar_settings = DynamicSidecarSettings.create_from_envs() - logging.basicConfig(level=dynamic_sidecar_settings.loglevel) - logging.root.setLevel(dynamic_sidecar_settings.loglevel) - logger.debug(dynamic_sidecar_settings.json(indent=2)) +def create_base_app() -> FastAPI: + # settings + settings = DynamicSidecarSettings.create_from_envs() + setup_logger(settings) + logger.debug(settings.json(indent=2)) - application = FastAPI( - debug=dynamic_sidecar_settings.DEBUG, + # minimal + app = FastAPI( + debug=settings.DEBUG, openapi_url=f"/api/{API_VTAG}/openapi.json", docs_url="/dev/doc", ) - override_fastapi_openapi_method(application) - - # store "settings" and "shared_store" for later usage - application.state.settings = dynamic_sidecar_settings - application.state.shared_store = SharedStore(settings=dynamic_sidecar_settings) # type: ignore - # used to keep track of the health of the application - # also will be used in the /health endpoint - application.state.application_health = ApplicationHealth() - - # enable debug if required - if dynamic_sidecar_settings.is_development_mode: - remote_debug_setup(application) - - if dynamic_sidecar_settings.RABBIT_SETTINGS: - setup_rabbitmq(application) - # requires rabbitmq to be in place - setup_background_log_fetcher(application) + override_fastapi_openapi_method(app) + app.state.settings = settings - # add routing paths - application.include_router(main_router) + app.include_router(main_router) + return app - # error handlers - application.add_exception_handler(NodeNotFound, node_not_found_error_handler) - application.add_exception_handler(BaseDynamicSidecarError, http_error_handler) - # also sets up mounted_volumes - setup_directory_watcher(application) - - def create_start_app_handler() -> Callable[[], Coroutine[Any, Any, None]]: - async def on_startup() -> None: - await login_registry(application.state.settings.REGISTRY_SETTINGS) - await volumes_fix_permissions(application.state.mounted_volumes) +def create_app(): + """ + Creates the application from using the env vars as a context + Also stores inside the state all instances of classes + needed in other requests and used to share data. + """ - print(WELCOME_MSG, flush=True) + app = create_base_app() - return on_startup + # MODULES SETUP -------------- - def create_stop_app_handler() -> Callable[[], Coroutine[Any, Any, None]]: - async def on_shutdown() -> None: - await on_shutdown_handler(application) - logger.info("shutdown cleanup completed") + app.state.shared_store = SharedStore() + app.state.application_health = ApplicationHealth() - return on_shutdown + if app.state.settings.is_development_mode: + remote_debug_setup(app) - application.add_event_handler("startup", create_start_app_handler()) - application.add_event_handler("shutdown", create_stop_app_handler()) + if app.state.settings.RABBIT_SETTINGS: + setup_rabbitmq(app) + setup_background_log_fetcher(app) - return application + # also sets up mounted_volumes + setup_mounted_fs(app) + setup_directory_watcher(app) + + # ERROR HANDLERS ------------ + app.add_exception_handler(NodeNotFound, node_not_found_error_handler) + app.add_exception_handler(BaseDynamicSidecarError, http_error_handler) + + # EVENTS --------------------- + async def _on_startup() -> None: + await login_registry(app.state.settings.REGISTRY_SETTINGS) + await volumes_fix_permissions(app.state.mounted_volumes) + print(WELCOME_MSG, flush=True) + + async def _on_shutdown() -> None: + logger.info("Going to remove spawned containers") + result = await remove_the_compose_spec( + shared_store=app.state.shared_store, + settings=app.state.settings, + command_timeout=app.state.settings.DYNAMIC_SIDECAR_DOCKER_COMPOSE_DOWN_TIMEOUT, + ) + logger.info("Container removal did_succeed=%s\n%s", result[0], result[1]) + + logger.info("shutdown cleanup completed") + + app.add_event_handler("startup", _on_startup) + app.add_event_handler("shutdown", _on_shutdown) + + return app + + +class AppState: + def __init__(self, app: FastAPI): + self._app = app + + @property + def settings(self) -> DynamicSidecarSettings: + assert isinstance(self._app.state.settings, DynamicSidecarSettings) # nosec + return self._app.state.settings + + @property + def mounted_volumes(self) -> MountedVolumes: + assert isinstance(self._app.state.mounted_volumes, MountedVolumes) # nosec + return self._app.state.mounted_volumes + + @property + def shared_store(self) -> SharedStore: + assert isinstance(self._app.state.shared_store, SharedStore) # nosec + return self._app.state.shared_store diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/docker_logs.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/docker_logs.py index ee8766b1b03..aaa32671921 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/docker_logs.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/docker_logs.py @@ -1,7 +1,15 @@ +""" + BackgroundLogFetcher: + Creates background task that + reads every line of a container's log and + posts it as a message to rabbit's log channel (logger) +""" + + import logging from asyncio import CancelledError, Task, create_task from contextlib import suppress -from typing import Any, Callable, Coroutine, Dict, Optional, cast +from typing import Any, Callable, Coroutine, Optional, cast from fastapi import FastAPI @@ -15,6 +23,7 @@ async def _logs_fetcher_worker( container_name: str, dispatch_log: Callable[..., Coroutine[Any, Any, None]] ) -> None: logger.info("Started log fetching for container %s", container_name) + async with docker_client() as docker: container = await docker.containers.get(container_name) @@ -36,7 +45,7 @@ def __init__(self, app: FastAPI) -> None: self._app: FastAPI = app - self._log_processor_tasks: Dict[str, Task[None]] = {} + self._log_processor_tasks: dict[str, Task[None]] = {} @property def rabbitmq(self) -> RabbitMQ: @@ -52,18 +61,19 @@ async def start_log_feching(self, container_name: str) -> None: _logs_fetcher_worker( container_name=container_name, dispatch_log=self._dispatch_logs ), - name="rabbitmq_log_processor_tasks", + name=f"rabbitmq_log_processor_tasks/{container_name}", ) logger.info("Subscribed to fetch logs from '%s'", container_name) async def stop_log_fetching(self, container_name: str) -> None: logger.debug("Stopping logs fetching from container '%s'", container_name) - task = self._log_processor_tasks[container_name] + + task = self._log_processor_tasks.pop(container_name) task.cancel() with suppress(CancelledError): await task - del self._log_processor_tasks[container_name] + logger.debug("Logs fetching stopped for container '%s'", container_name) async def stop_fetcher(self) -> None: diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/docker_utils.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/docker_utils.py index 19dab49f820..2b9134c1154 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/docker_utils.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/docker_utils.py @@ -1,6 +1,6 @@ import logging from contextlib import asynccontextmanager -from typing import Any, AsyncGenerator, Dict +from typing import Any, AsyncGenerator from uuid import UUID import aiodocker @@ -25,7 +25,7 @@ async def docker_client() -> AsyncGenerator[aiodocker.Docker, None]: await docker.close() -async def get_volume_by_label(label: str, run_id: UUID) -> Dict[str, Any]: +async def get_volume_by_label(label: str, run_id: UUID) -> dict[str, Any]: async with docker_client() as docker: filters = {"label": [f"source={label}", f"run_id={run_id}"]} params = {"filters": clean_filters(filters)} diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/errors.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/errors.py index d6876c88c41..be746080a51 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/errors.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/errors.py @@ -1,4 +1,3 @@ -from typing import List from uuid import UUID @@ -12,7 +11,7 @@ def __init__(self, nessage: str, status: int = 500) -> None: class VolumeNotFoundError(BaseDynamicSidecarError): - def __init__(self, source_label: str, run_id: UUID, volumes: List[str]) -> None: + def __init__(self, source_label: str, run_id: UUID, volumes: list[str]) -> None: super().__init__( ( f"Expected 1 volume with {source_label=}, {run_id=}, " diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/rabbitmq.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/rabbitmq.py index 269175bcd1f..7f05b0baa5e 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/rabbitmq.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/rabbitmq.py @@ -5,7 +5,7 @@ import os import socket from asyncio import CancelledError, Queue, Task -from typing import Any, Dict, List, Optional, Union +from typing import Any import aio_pika from fastapi import FastAPI @@ -28,10 +28,11 @@ # limit logs displayed logging.getLogger("aio_pika").setLevel(logging.WARNING) + SLEEP_BETWEEN_SENDS: float = 1.0 -def _close_callback(sender: Any, exc: Optional[BaseException]) -> None: +def _close_callback(sender: Any, exc: BaseException | None) -> None: if exc: if isinstance(exc, CancelledError): log.info("Rabbit connection was cancelled", exc_info=True) @@ -43,7 +44,7 @@ def _close_callback(sender: Any, exc: Optional[BaseException]) -> None: ) -def _channel_close_callback(sender: Any, exc: Optional[BaseException]) -> None: +def _channel_close_callback(sender: Any, exc: BaseException | None) -> None: if exc: log.error( "Rabbit channel closed with exception from %s:", sender, exc_info=True @@ -55,7 +56,7 @@ async def _wait_till_rabbit_responsive(url: str) -> None: **RabbitMQRetryPolicyUponInitialization().kwargs ): with attempt: - connection = await aio_pika.connect(url, timeout=1.0) + connection: aio_pika.Connection = await aio_pika.connect(url, timeout=1.0) await connection.close() @@ -71,16 +72,16 @@ def __init__(self, app: FastAPI, max_messages_to_send: int = 100) -> None: self._project_id: ProjectID = settings.DY_SIDECAR_PROJECT_ID self._node_id: NodeID = settings.DY_SIDECAR_NODE_ID - self._connection: Optional[aio_pika.Connection] = None - self._channel: Optional[aio_pika.Channel] = None - self._logs_exchange: Optional[aio_pika.Exchange] = None - self._events_exchange: Optional[aio_pika.Exchange] = None + self._connection: aio_pika.Connection | None = None + self._channel: aio_pika.Channel | None = None + self._logs_exchange: aio_pika.Exchange | None = None + self._events_exchange: aio_pika.Exchange | None = None self.max_messages_to_send: int = max_messages_to_send # pylint: disable=unsubscriptable-object - self._channel_queues: Dict[str, Queue[str]] = {} + self._channel_queues: dict[str, Queue[str]] = {} self._keep_running: bool = True - self._queues_worker: Optional[Task[Any]] = None + self._queues_worker: Task[Any] | None = None async def connect(self) -> None: url = self._rabbit_settings.dsn @@ -136,7 +137,7 @@ async def _dispatch_messages_worker(self) -> None: await asyncio.sleep(SLEEP_BETWEEN_SENDS) - async def _publish_messages(self, messages: List[str]) -> None: + async def _publish_messages(self, messages: list[str]) -> None: data = LoggerRabbitMessage( node_id=self._node_id, user_id=self._user_id, @@ -164,7 +165,7 @@ async def _publish_event(self, action: RabbitEventMessageType) -> None: async def send_event_reload_iframe(self) -> None: await self._publish_event(action=RabbitEventMessageType.RELOAD_IFRAME) - async def post_log_message(self, log_msg: Union[str, List[str]]) -> None: + async def post_log_message(self, log_msg: str | list[str]) -> None: if isinstance(log_msg, str): log_msg = [log_msg] diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/settings.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/settings.py index 6c8e98001bd..4e89646a612 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/settings.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/settings.py @@ -1,7 +1,7 @@ -import logging +import warnings from functools import lru_cache from pathlib import Path -from typing import List, Optional, cast +from typing import Optional, cast from uuid import UUID from models_library.basic_types import BootModeEnum, PortInt @@ -13,34 +13,34 @@ from settings_library.docker_registry import RegistrySettings from settings_library.r_clone import RCloneSettings from settings_library.rabbit import RabbitSettings +from settings_library.utils_logging import MixinLoggingSettings -class DynamicSidecarSettings(BaseCustomSettings): +class DynamicSidecarSettings(BaseCustomSettings, MixinLoggingSettings): SC_BOOT_MODE: Optional[BootModeEnum] = Field( ..., description="boot mode helps determine if in development mode or normal operation", ) - # LOGGING - LOG_LEVEL: str = Field("WARNING") + DYNAMIC_SIDECAR_DY_VOLUMES_MOUNT_DIR: Path = Field( + ..., + description="Base directory where dynamic-sidecar stores creates " + "and shares volumes between itself and the spawned containers. " + "It is used as a mount directory for the director-v2." + "Sidecar must have r/w permissions in this folder.", + ) - @validator("LOG_LEVEL") - @classmethod - def match_logging_level(cls, v: str) -> str: - try: - getattr(logging, v.upper()) - except AttributeError as err: - raise ValueError(f"{v.upper()} is not a valid level") from err - return v.upper() + # LOGGING + LOG_LEVEL: str = Field(default="WARNING") # SERVICE SERVER (see : https://www.uvicorn.org/settings/) DYNAMIC_SIDECAR_HOST: str = Field( - "0.0.0.0", # nosec + default="0.0.0.0", # nosec description="host where to bind the application on which to serve", ) DYNAMIC_SIDECAR_PORT: PortInt = Field( - 8000, description="port where the server will be currently serving" + default=8000, description="port where the server will be currently serving" ) DYNAMIC_SIDECAR_COMPOSE_NAMESPACE: str = Field( @@ -52,11 +52,11 @@ def match_logging_level(cls, v: str) -> str: ) DYNAMIC_SIDECAR_MAX_COMBINED_CONTAINER_NAME_LENGTH: PositiveInt = Field( - 63, description="the container name which will be used as hostname" + default=63, description="the container name which will be used as hostname" ) DYNAMIC_SIDECAR_STOP_AND_REMOVE_TIMEOUT: PositiveInt = Field( - 5, + default=5, description=( "When receiving SIGTERM the process has 10 seconds to cleanup its children " "forcing our children to stop in 5 seconds in all cases" @@ -64,16 +64,17 @@ def match_logging_level(cls, v: str) -> str: ) DEBUG: bool = Field( - False, + default=False, description="If set to True the application will boot into debug mode", ) DYNAMIC_SIDECAR_REMOTE_DEBUG_PORT: PortInt = Field( - 3000, description="ptsvd remote debugger starting port" + default=3000, description="ptsvd remote debugger starting port" ) DYNAMIC_SIDECAR_DOCKER_COMPOSE_DOWN_TIMEOUT: PositiveInt = Field( - 15, description="used during shutdown when containers swapend will be removed" + default=15, + description="used during shutdown when containers swapend will be removed", ) DY_SIDECAR_PATH_INPUTS: Path = Field( @@ -82,10 +83,10 @@ def match_logging_level(cls, v: str) -> str: DY_SIDECAR_PATH_OUTPUTS: Path = Field( ..., description="path where to expect the outputs folder" ) - DY_SIDECAR_STATE_PATHS: List[Path] = Field( + DY_SIDECAR_STATE_PATHS: list[Path] = Field( ..., description="list of additional paths to be synced" ) - DY_SIDECAR_STATE_EXCLUDE: List[str] = Field( + DY_SIDECAR_STATE_EXCLUDE: list[str] = Field( ..., description="list of patterns to exclude files when saving states" ) DY_SIDECAR_USER_ID: UserID @@ -98,15 +99,16 @@ def match_logging_level(cls, v: str) -> str: RABBIT_SETTINGS: Optional[RabbitSettings] = Field(auto_default_from_env=True) DY_SIDECAR_R_CLONE_SETTINGS: RCloneSettings = Field(auto_default_from_env=True) + @validator("LOG_LEVEL") + @classmethod + def _check_log_level(cls, value): + return cls.validate_log_level(value) + @property def is_development_mode(self) -> bool: """If in development mode this will be True""" return self.SC_BOOT_MODE is BootModeEnum.DEVELOPMENT - @property - def loglevel(self) -> int: - return int(getattr(logging, self.LOG_LEVEL)) - @property def rclone_settings_for_nodeports(self) -> Optional[RCloneSettings]: """ @@ -123,4 +125,8 @@ def rclone_settings_for_nodeports(self) -> Optional[RCloneSettings]: @lru_cache def get_settings() -> DynamicSidecarSettings: """used outside the context of a request""" + warnings.warn( + "Use instead app.state.settings", + DeprecationWarning, + ) return cast(DynamicSidecarSettings, DynamicSidecarSettings.create_from_envs()) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/shared_handlers.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/shared_handlers.py index 1854f3c154c..7f6697371e1 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/shared_handlers.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/shared_handlers.py @@ -1,7 +1,5 @@ import logging -from typing import Optional, Tuple - -from fastapi import FastAPI +from typing import Optional from ..models.domains.shared_store import SharedStore from .settings import DynamicSidecarSettings @@ -10,12 +8,30 @@ logger = logging.getLogger(__name__) +async def cleanup_containers_and_volumes( + shared_store: SharedStore, settings: DynamicSidecarSettings +) -> None: + cleanup_command = ( + "docker-compose --project-name {project} --file {file_path} rm --force -v" + ) + finished_without_errors, stdout = await write_file_and_run_command( + settings=settings, + file_content=shared_store.compose_spec, + command=cleanup_command, + command_timeout=None, + ) + if not finished_without_errors: + logger.warning( + "Unexpected error while running command\n%s:\n%s", cleanup_command, stdout + ) + + async def write_file_and_run_command( settings: DynamicSidecarSettings, - file_content: Optional[str], + file_content: str, command: str, command_timeout: Optional[float], -) -> Tuple[bool, str]: +) -> tuple[bool, str]: """The command which accepts {file_path} as an argument for string formatting""" # pylint: disable=not-async-context-manager @@ -31,15 +47,17 @@ async def write_file_and_run_command( async def remove_the_compose_spec( shared_store: SharedStore, settings: DynamicSidecarSettings, command_timeout: float -) -> Tuple[bool, str]: +) -> tuple[bool, str]: stored_compose_content = shared_store.compose_spec if stored_compose_content is None: return True, "No started spec to remove was found" + await cleanup_containers_and_volumes(shared_store, settings) + command = ( - "docker-compose -p {project} -f {file_path} " - "down --volumes --remove-orphans -t {stop_and_remove_timeout}" + 'docker-compose --project-name {project} --file "{file_path}" ' + "down --volumes --remove-orphans --timeout {stop_and_remove_timeout}" ) result = await write_file_and_run_command( settings=settings, @@ -52,16 +70,3 @@ async def remove_the_compose_spec( shared_store.container_names = [] return result - - -async def on_shutdown_handler(app: FastAPI) -> None: - logging.info("Going to remove spawned containers") - shared_store: SharedStore = app.state.shared_store - settings: DynamicSidecarSettings = app.state.settings - - result = await remove_the_compose_spec( - shared_store=shared_store, - settings=settings, - command_timeout=settings.DYNAMIC_SIDECAR_DOCKER_COMPOSE_DOWN_TIMEOUT, - ) - logging.info("Container removal did_succeed=%s\n%s", result[0], result[1]) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/utils.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/utils.py index b48e614f9e3..19186a82bb4 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/utils.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/utils.py @@ -7,16 +7,15 @@ from collections import namedtuple from contextlib import asynccontextmanager from pathlib import Path -from typing import AsyncGenerator, List, Optional +from typing import AsyncGenerator, Optional import aiofiles import httpx import yaml from aiofiles import os as aiofiles_os -from async_timeout import timeout from settings_library.docker_registry import RegistrySettings from starlette import status -from tenacity._asyncio import AsyncRetrying +from tenacity import retry from tenacity.before_sleep import before_sleep_log from tenacity.stop import stop_after_attempt from tenacity.wait import wait_fixed @@ -36,39 +35,40 @@ class _RegistryNotReachableException(Exception): pass +@retry( + wait=wait_fixed(1), + stop=stop_after_attempt(1), + before_sleep=before_sleep_log(logger, logging.INFO), + reraise=True, +) async def _is_registry_reachable(registry_settings: RegistrySettings) -> None: - async for attempt in AsyncRetrying( - wait=wait_fixed(1), - stop=stop_after_attempt(1), - before_sleep=before_sleep_log(logger, logging.INFO), - reraise=True, - ): - with attempt: - async with httpx.AsyncClient() as client: - params = {} - if registry_settings.REGISTRY_AUTH: - params["auth"] = ( - registry_settings.REGISTRY_USER, - registry_settings.REGISTRY_PW.get_secret_value(), - ) - - protocol = "https" if registry_settings.REGISTRY_SSL else "http" - url = f"{protocol}://{registry_settings.api_url}/" - logging.info("Registry test url ='%s'", url) - response = await client.get(url, timeout=1, **params) - reachable = ( - response.status_code == status.HTTP_200_OK and response.json() == {} - ) - if not reachable: - logger.error("Response: %s", response) - error_message = ( - f"Could not reach registry {registry_settings.api_url} " - f"auth={registry_settings.REGISTRY_AUTH}" - ) - raise _RegistryNotReachableException(error_message) + async with httpx.AsyncClient() as client: + params = {} + if registry_settings.REGISTRY_AUTH: + params["auth"] = ( + registry_settings.REGISTRY_USER, + registry_settings.REGISTRY_PW.get_secret_value(), + ) + + protocol = "https" if registry_settings.REGISTRY_SSL else "http" + url = f"{protocol}://{registry_settings.api_url}/" + + logging.info("Registry test url ='%s'", url) + response = await client.get(url, timeout=1, **params) + reachable = response.status_code == status.HTTP_200_OK and response.json() == {} + if not reachable: + logger.error("Response: %s", response) + error_message = ( + f"Could not reach registry {registry_settings.api_url} " + f"auth={registry_settings.REGISTRY_AUTH}" + ) + raise _RegistryNotReachableException(error_message) async def login_registry(registry_settings: RegistrySettings) -> None: + """ + Creates ~/.docker/config.json and adds docker registry credentials + """ await _is_registry_reachable(registry_settings) def create_docker_config_file(registry_settings: RegistrySettings) -> None: @@ -77,9 +77,9 @@ def create_docker_config_file(registry_settings: RegistrySettings) -> None: docker_config = { "auths": { f"{registry_settings.resolved_registry_url}": { - "auth": base64.b64encode( - f"{user}:{password}".encode("utf-8") - ).decode("utf-8") + "auth": base64.b64encode(f"{user}:{password}".encode()).decode( + "utf-8" + ) } } } @@ -117,11 +117,8 @@ async def async_command( stderr=asyncio.subprocess.STDOUT, ) - # because the Processes returned by create_subprocess_shell it is not possible to - # have a timeout otherwise nor to stream the response from the process. try: - async with timeout(command_timeout): - stdout, _ = await proc.communicate() + stdout, _ = await asyncio.wait_for(proc.communicate(), timeout=command_timeout) except asyncio.TimeoutError: message = ( f"{traceback.format_exc()}\nTimed out after {command_timeout} " @@ -139,7 +136,7 @@ async def async_command( ) -def assemble_container_names(validated_compose_content: str) -> List[str]: +def assemble_container_names(validated_compose_content: str) -> list[str]: """returns the list of container names from a validated compose_spec""" parsed_compose_spec = yaml.safe_load(validated_compose_content) return [ @@ -152,13 +149,10 @@ async def volumes_fix_permissions(mounted_volumes: MountedVolumes) -> None: # NOTE: by creating a hidden file on all mounted volumes # the same permissions are ensured and avoids # issues when starting the services - for volume_path in [ - mounted_volumes.disk_inputs_path, - mounted_volumes.disk_outputs_path, - ] + list(mounted_volumes.disk_state_paths()): + for volume_path in mounted_volumes.all_disk_paths(): hidden_file = volume_path / HIDDEN_FILE_NAME hidden_file.write_text( - f"Directory must not be empty.\nCreated by {__file__}.\nRequired by " - "oSPARC internals to properly enforce permissions on this " + f"Directory must not be empty.\nCreated by {__file__}.\n" + "Required by oSPARC internals to properly enforce permissions on this " "directory and all its files" ) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/validation.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/validation.py index a9b426e3b8b..13890cfee8c 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/validation.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/core/validation.py @@ -2,7 +2,7 @@ import logging import os import re -from typing import Any, Dict, Generator, List, Tuple +from typing import Any, Generator import yaml @@ -39,7 +39,7 @@ def _assemble_container_name( return container_name -def _get_forwarded_env_vars(container_key: str) -> List[str]: +def _get_forwarded_env_vars(container_key: str) -> list[str]: """returns env vars targeted to each container in the compose spec""" results = [ # some services expect it, using it as empty @@ -60,14 +60,14 @@ def _get_forwarded_env_vars(container_key: str) -> List[str]: return results -def _extract_templated_entries(text: str) -> List[str]: +def _extract_templated_entries(text: str) -> list[str]: return re.findall(TEMPLATE_SEARCH_PATTERN, text) def _apply_templating_directives( stringified_compose_spec: str, - services: Dict[str, Any], - spec_services_to_container_name: Dict[str, str], + services: dict[str, Any], + spec_services_to_container_name: dict[str, str], ) -> str: """ Some custom rules are supported for replacing `container_name` @@ -104,11 +104,11 @@ def _apply_templating_directives( def _merge_env_vars( - compose_spec_env_vars: List[str], settings_env_vars: List[str] -) -> List[str]: + compose_spec_env_vars: list[str], settings_env_vars: list[str] +) -> list[str]: def _gen_parts_env_vars( - env_vars: List[str], - ) -> Generator[Tuple[str, str], None, None]: + env_vars: list[str], + ) -> Generator[tuple[str, str], None, None]: for env_var in env_vars: key, value = env_var.split("=") yield key, value @@ -126,7 +126,7 @@ def _gen_parts_env_vars( def _inject_backend_networking( - parsed_compose_spec: Dict[str, Any], network_name: str = "__backend__" + parsed_compose_spec: dict[str, Any], network_name: str = "__backend__" ) -> None: """ Put all containers in the compose spec in the same network. @@ -185,7 +185,7 @@ async def validate_compose_spec( if version.startswith("1"): raise InvalidComposeSpec(f"Provided spec version '{version}' is not supported") - spec_services_to_container_name: Dict[str, str] = {} + spec_services_to_container_name: dict[str, str] = {} spec_services = parsed_compose_spec["services"] for index, service in enumerate(spec_services): diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/main.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/main.py index afaad5a39d2..52c91f22837 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/main.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/main.py @@ -2,7 +2,7 @@ """ from fastapi import FastAPI -from simcore_service_dynamic_sidecar.core.application import assemble_application +from simcore_service_dynamic_sidecar.core.application import create_app # SINGLETON FastAPI app -app: FastAPI = assemble_application() +the_app: FastAPI = create_app() diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/models/domains/shared_store.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/models/domains/shared_store.py index a956faddc27..fef5c19633a 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/models/domains/shared_store.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/models/domains/shared_store.py @@ -1,12 +1,13 @@ -from typing import List, Optional +from typing import Optional from pydantic import BaseModel, Field class SharedStore(BaseModel): compose_spec: Optional[str] = Field( - None, description="stores the stringified compose spec" + default=None, description="stores the stringified compose spec" ) - container_names: List[str] = Field( - [], description="stores the container names from the compose_spec" + container_names: list[str] = Field( + default_factory=list, + description="stores the container names from the compose_spec", ) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/models/schemas/application_health.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/models/schemas/application_health.py index 90d7a86d5db..4da644858b9 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/models/schemas/application_health.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/models/schemas/application_health.py @@ -5,8 +5,8 @@ class ApplicationHealth(BaseModel): is_healthy: bool = Field( - True, description="returns True if the service sis running correctly" + default=True, description="returns True if the service sis running correctly" ) error_message: Optional[str] = Field( - None, description="in case of error this gets set" + default=None, description="in case of error this gets set" ) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/data_manager.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/data_manager.py index 55b4a711f5d..ee7c4bcf96a 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/data_manager.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/data_manager.py @@ -3,27 +3,23 @@ import tempfile from contextlib import asynccontextmanager from pathlib import Path -from typing import AsyncIterator, List +from typing import AsyncIterator from servicelib.archiving_utils import archive_dir from servicelib.pools import async_on_threadpool from simcore_sdk.node_data import data_manager -from simcore_service_dynamic_sidecar.core.settings import ( - DynamicSidecarSettings, - get_settings, -) +from simcore_service_dynamic_sidecar.core.settings import DynamicSidecarSettings logger = logging.getLogger(__name__) -async def pull_path_if_exists(path: Path) -> None: +async def pull_path_if_exists(path: Path, settings: DynamicSidecarSettings) -> None: """ If the path already exist in storage pull it. Otherwise it is assumed this is the first time the service starts. In each and every other case an error is raised and logged """ - settings: DynamicSidecarSettings = get_settings() if not await data_manager.is_file_present_in_storage( user_id=settings.DY_SIDECAR_USER_ID, @@ -55,11 +51,12 @@ async def _isolated_temp_zip_path(path_to_compress: Path) -> AsyncIterator[Path] await async_on_threadpool(lambda: shutil.rmtree(base_dir, ignore_errors=True)) -async def upload_path_if_exists(path: Path, state_exclude: List[str]) -> None: +async def upload_path_if_exists( + path: Path, state_exclude: list[str], settings: DynamicSidecarSettings +) -> None: """ Zips the path in a temporary directory and uploads to storage """ - settings: DynamicSidecarSettings = get_settings() # pylint: disable=unnecessary-comprehension logger.info("Files in %s: %s", path, [x for x in path.rglob("*")]) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/directory_watcher.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/directory_watcher.py index 896d6388e82..e5a8d6a6e6f 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/directory_watcher.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/directory_watcher.py @@ -15,7 +15,7 @@ from watchdog.events import FileSystemEvent, FileSystemEventHandler from watchdog.observers import Observer -from .mounted_fs import MountedVolumes, setup_mounted_fs +from .mounted_fs import MountedVolumes DETECTION_INTERVAL: float = 1.0 TASK_NAME_FOR_CLEANUP = f"{name}.InvokeTask" @@ -39,7 +39,7 @@ async def get_value(self) -> Optional[float]: return self._value -def async_run_once_after_event_chain( # type:ignore +def async_run_once_after_event_chain( detection_interval: float, ): """ @@ -51,11 +51,11 @@ def async_run_once_after_event_chain( # type:ignore returns: decorator to be applied to async functions """ - def internal(decorated_function: Callable[..., Awaitable[Any]]): # type:ignore + def internal(decorated_function: Callable[..., Awaitable[Any]]): last = AsyncLockedFloat(initial_value=None) @wraps(decorated_function) - async def wrapper(*args: Any, **kwargs: Any): # type:ignore + async def wrapper(*args: Any, **kwargs: Any): # skipping the first time the event chain starts if await last.get_value() is None: await last.set_value(time.time()) @@ -199,7 +199,8 @@ async def stop(self) -> None: def setup_directory_watcher(app: FastAPI) -> None: async def on_startup() -> None: - mounted_volumes: MountedVolumes = setup_mounted_fs(app) + mounted_volumes: MountedVolumes + mounted_volumes = app.state.mounted_volumes # nosec app.state.dir_watcher = DirectoryWatcherObservers() app.state.dir_watcher.observe_directory(mounted_volumes.disk_outputs_path) @@ -233,9 +234,9 @@ def directory_watcher_disabled(app: FastAPI) -> Generator[None, None, None]: enable_directory_watcher(app) -__all__ = [ +__all__: tuple[str, ...] = ( + "directory_watcher_disabled", "disable_directory_watcher", "enable_directory_watcher", - "directory_watcher_disabled", "setup_directory_watcher", -] +) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/mounted_fs.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/mounted_fs.py index 2a1ef7d66fb..69755338072 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/mounted_fs.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/mounted_fs.py @@ -1,19 +1,14 @@ import os from functools import cached_property from pathlib import Path -from typing import AsyncGenerator, Generator, List +from typing import AsyncGenerator, Generator, Iterator from uuid import UUID from fastapi import FastAPI -from simcore_service_dynamic_sidecar.core.settings import ( - DynamicSidecarSettings, - get_settings, -) +from simcore_service_dynamic_sidecar.core.settings import DynamicSidecarSettings from ..core.docker_utils import get_volume_by_label -DY_VOLUMES = Path("/dy-volumes") - def _ensure_path(path: Path) -> Path: path.mkdir(parents=True, exist_ok=True) @@ -41,50 +36,57 @@ def __init__( self, inputs_path: Path, outputs_path: Path, - state_paths: List[Path], - state_exclude: List[str], + state_paths: list[Path], + state_exclude: list[str], + compose_namespace: str, + dy_volumes: Path, ) -> None: self.inputs_path: Path = inputs_path self.outputs_path: Path = outputs_path - self.state_paths: List[Path] = state_paths - self.state_exclude: List[str] = state_exclude + self.state_paths: list[Path] = state_paths + self.state_exclude: list[str] = state_exclude + self.compose_namespace = compose_namespace + self._dy_volumes = dy_volumes self._ensure_directories() @cached_property def volume_name_inputs(self) -> str: """Same name as the namespace, to easily track components""" - compose_namespace = get_settings().DYNAMIC_SIDECAR_COMPOSE_NAMESPACE - return f"{compose_namespace}{_name_from_full_path(self.inputs_path)}" + return f"{self.compose_namespace}{_name_from_full_path(self.inputs_path)}" @cached_property def volume_name_outputs(self) -> str: - compose_namespace = get_settings().DYNAMIC_SIDECAR_COMPOSE_NAMESPACE - return f"{compose_namespace}{_name_from_full_path(self.outputs_path)}" + return f"{self.compose_namespace}{_name_from_full_path(self.outputs_path)}" def volume_name_state_paths(self) -> Generator[str, None, None]: - compose_namespace = get_settings().DYNAMIC_SIDECAR_COMPOSE_NAMESPACE for state_path in self.state_paths: - yield f"{compose_namespace}{_name_from_full_path(state_path)}" + yield f"{self.compose_namespace}{_name_from_full_path(state_path)}" @cached_property def disk_inputs_path(self) -> Path: - return _ensure_path(DY_VOLUMES / self.inputs_path.relative_to("/")) + return _ensure_path(self._dy_volumes / self.inputs_path.relative_to("/")) @cached_property def disk_outputs_path(self) -> Path: - return _ensure_path(DY_VOLUMES / self.outputs_path.relative_to("/")) + return _ensure_path(self._dy_volumes / self.outputs_path.relative_to("/")) - def disk_state_paths(self) -> Generator[Path, None, None]: + def disk_state_paths(self) -> Iterator[Path]: for state_path in self.state_paths: - yield _ensure_path(DY_VOLUMES / state_path.relative_to("/")) + yield _ensure_path(self._dy_volumes / state_path.relative_to("/")) + + def all_disk_paths(self) -> Iterator[Path]: + # PC: keeps iterator to follow same style as disk_state_paths but IMO it is overreaching + yield self.disk_inputs_path + yield self.disk_outputs_path + yield from self.disk_state_paths() def _ensure_directories(self) -> None: """ Creates the directories on its file system, these will be mounted elsewere. """ - _ensure_path(DY_VOLUMES) + _ensure_path(self._dy_volumes) self.disk_inputs_path # pylint:disable= pointless-statement self.disk_outputs_path # pylint:disable= pointless-statement set(self.disk_state_paths()) @@ -119,17 +121,18 @@ async def iter_state_paths_to_docker_volumes( def setup_mounted_fs(app: FastAPI) -> MountedVolumes: - # TODO: replace this with app version - settings: DynamicSidecarSettings = get_settings() + settings: DynamicSidecarSettings = app.state.settings app.state.mounted_volumes = MountedVolumes( inputs_path=settings.DY_SIDECAR_PATH_INPUTS, outputs_path=settings.DY_SIDECAR_PATH_OUTPUTS, state_paths=settings.DY_SIDECAR_STATE_PATHS, state_exclude=settings.DY_SIDECAR_STATE_EXCLUDE, + compose_namespace=settings.DYNAMIC_SIDECAR_COMPOSE_NAMESPACE, + dy_volumes=settings.DYNAMIC_SIDECAR_DY_VOLUMES_MOUNT_DIR, ) return app.state.mounted_volumes -__all__ = ["MountedVolumes"] +__all__: tuple[str, ...] = ("MountedVolumes",) diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/nodeports.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/nodeports.py index a6e05ef8da9..59e13bd53d5 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/nodeports.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/nodeports.py @@ -127,7 +127,7 @@ async def upload_outputs(outputs_path: Path, port_keys: list[str]) -> None: await PORTS.set_multiple(ports_values) elapsed_time = time.perf_counter() - start_time - total_bytes = sum([_get_size_of_value(x) for x in ports_values.values()]) + total_bytes = sum(_get_size_of_value(x) for x in ports_values.values()) logger.info("Uploaded %s bytes in %s seconds", total_bytes, elapsed_time) finally: # clean up possible compressed files @@ -288,4 +288,7 @@ async def download_target_ports( return transferred_bytes -__all__ = ["dispatch_update_for_directory", "download_target_ports"] +__all__: tuple[str, ...] = ( + "dispatch_update_for_directory", + "download_target_ports", +) diff --git a/services/dynamic-sidecar/tests/conftest.py b/services/dynamic-sidecar/tests/conftest.py index a08b01dd838..3659f28bbe7 100644 --- a/services/dynamic-sidecar/tests/conftest.py +++ b/services/dynamic-sidecar/tests/conftest.py @@ -1,235 +1,170 @@ -# pylint: disable=unused-argument # pylint: disable=redefined-outer-name +# pylint: disable=too-many-arguments +# pylint: disable=unused-argument +# pylint: disable=unused-variable + -import asyncio import json -import os -import random +import logging import sys -import tempfile -import uuid from pathlib import Path -from typing import Any, AsyncGenerator, AsyncIterable, Iterator, List -from unittest.mock import AsyncMock, Mock +from uuid import UUID -import aiodocker import pytest -from _pytest.monkeypatch import MonkeyPatch -from async_asgi_testclient import TestClient -from fastapi import FastAPI -from pytest_mock.plugin import MockerFixture -from simcore_service_dynamic_sidecar.core import utils -from simcore_service_dynamic_sidecar.core.application import assemble_application -from simcore_service_dynamic_sidecar.core.docker_utils import docker_client -from simcore_service_dynamic_sidecar.core.settings import DynamicSidecarSettings -from simcore_service_dynamic_sidecar.core.shared_handlers import ( - write_file_and_run_command, -) -from simcore_service_dynamic_sidecar.models.domains.shared_store import SharedStore -from simcore_service_dynamic_sidecar.modules import mounted_fs +from faker import Faker +from models_library.projects import ProjectID +from models_library.projects_nodes import NodeID +from models_library.users import UserID +from pytest import MonkeyPatch +from pytest_simcore.helpers.utils_envs import EnvVarsDict, setenvs_as_envfile + +logger = logging.getLogger(__name__) pytest_plugins = [ + "pytest_simcore.docker_compose", + "pytest_simcore.docker_registry", "pytest_simcore.docker_swarm", "pytest_simcore.monkeypatch_extra", "pytest_simcore.pytest_global_environs", + "pytest_simcore.rabbit_service", + "pytest_simcore.repository_paths", + "pytest_simcore.tmp_path_extra", ] - -@pytest.fixture(scope="session") -def mock_dy_volumes() -> Iterator[Path]: - with tempfile.TemporaryDirectory() as temp_dir: - yield Path(temp_dir) +CURRENT_DIR = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent @pytest.fixture(scope="session") -def io_temp_dir() -> Iterator[Path]: - with tempfile.TemporaryDirectory() as temp_dir: - yield Path(temp_dir) +def project_slug_dir() -> Path: + folder = CURRENT_DIR.parent + assert folder.exists() + assert any(folder.glob("src/simcore_service_dynamic_sidecar")) + return folder -@pytest.fixture(scope="session") -def compose_namespace() -> str: - return f"dy-sidecar_{uuid.uuid4()}" +# +# Fixtures associated to the configuration of a dynamic-sidecar service +# - Can be used both to create new fixture or as references +# -@pytest.fixture(scope="session") -def inputs_dir(io_temp_dir: Path) -> Path: - return io_temp_dir / "inputs" - - -@pytest.fixture(scope="session") -def outputs_dir(io_temp_dir: Path) -> Path: - return io_temp_dir / "outputs" +@pytest.fixture +def dy_volumes(tmp_path: Path) -> Path: + """mount folder on the sidecar (path withn the sidecar)""" + return tmp_path / "dy-volumes" -@pytest.fixture(scope="session") -def state_paths_dirs(io_temp_dir: Path) -> List[Path]: - return [io_temp_dir / f"dir_{x}" for x in range(4)] - +@pytest.fixture +def container_base_dir() -> Path: + return Path("/data") -@pytest.fixture(scope="session") -def state_exclude_dirs(io_temp_dir: Path) -> List[Path]: - return [io_temp_dir / f"dir_exclude_{x}" for x in range(4)] +@pytest.fixture +def compose_namespace(faker: Faker) -> str: + return f"dy-sidecar_{faker.uuid4()}" -@pytest.fixture(scope="module") -def mock_environment( - monkeypatch_module: MonkeyPatch, - mock_dy_volumes: Path, - compose_namespace: str, - inputs_dir: Path, - outputs_dir: Path, - state_paths_dirs: List[Path], - state_exclude_dirs: List[Path], -) -> None: - monkeypatch_module.setenv("SC_BOOT_MODE", "production") - monkeypatch_module.setenv("DYNAMIC_SIDECAR_COMPOSE_NAMESPACE", compose_namespace) - monkeypatch_module.setenv("REGISTRY_AUTH", "false") - monkeypatch_module.setenv("REGISTRY_USER", "test") - monkeypatch_module.setenv("REGISTRY_PW", "test") - monkeypatch_module.setenv("REGISTRY_SSL", "false") - monkeypatch_module.setenv("DY_SIDECAR_USER_ID", "1") - monkeypatch_module.setenv("DY_SIDECAR_PROJECT_ID", f"{uuid.uuid4()}") - monkeypatch_module.setenv("DY_SIDECAR_NODE_ID", f"{uuid.uuid4()}") - monkeypatch_module.setenv("DY_SIDECAR_RUN_ID", f"{uuid.uuid4()}") - monkeypatch_module.setenv("DY_SIDECAR_PATH_INPUTS", str(inputs_dir)) - monkeypatch_module.setenv("DY_SIDECAR_PATH_OUTPUTS", str(outputs_dir)) - monkeypatch_module.setenv( - "DY_SIDECAR_STATE_PATHS", json.dumps([str(x) for x in state_paths_dirs]) - ) - monkeypatch_module.setenv( - "DY_SIDECAR_STATE_EXCLUDE", json.dumps([str(x) for x in state_exclude_dirs]) - ) - monkeypatch_module.setenv("RABBIT_SETTINGS", "null") - monkeypatch_module.setenv("S3_ENDPOINT", "endpoint") - monkeypatch_module.setenv("S3_ACCESS_KEY", "access_key") - monkeypatch_module.setenv("S3_SECRET_KEY", "secret_key") - monkeypatch_module.setenv("S3_BUCKET_NAME", "bucket_name") - monkeypatch_module.setenv("S3_SECURE", "false") - monkeypatch_module.setenv("R_CLONE_PROVIDER", "MINIO") +@pytest.fixture +def inputs_dir(container_base_dir: Path) -> Path: + return container_base_dir / "inputs" - monkeypatch_module.setattr(mounted_fs, "DY_VOLUMES", mock_dy_volumes) +@pytest.fixture +def outputs_dir(container_base_dir: Path) -> Path: + return container_base_dir / "outputs" -@pytest.fixture(scope="module") -def disable_registry_check(monkeypatch_module: MockerFixture) -> None: - async def _mock_is_registry_reachable(*args, **kwargs) -> None: - pass - monkeypatch_module.setattr( - utils, "_is_registry_reachable", _mock_is_registry_reachable - ) +@pytest.fixture +def state_paths_dirs(container_base_dir: Path) -> list[Path]: + return [container_base_dir / f"state_dir{i}" for i in range(4)] -@pytest.fixture(scope="module") -def app(mock_environment: None, disable_registry_check: None) -> FastAPI: - app = assemble_application() - app.state.rabbitmq = AsyncMock() - return app +@pytest.fixture +def state_exclude_dirs(container_base_dir: Path) -> list[Path]: + return [container_base_dir / f"exclude_{i}" for i in range(4)] @pytest.fixture -def dynamic_sidecar_settings() -> DynamicSidecarSettings: - return DynamicSidecarSettings.create_from_envs() +def user_id(faker: Faker) -> UserID: + return faker.pyint(min_value=1) @pytest.fixture -async def ensure_external_volumes( - compose_namespace: str, - inputs_dir: Path, - outputs_dir: Path, - state_paths_dirs: List[Path], - dynamic_sidecar_settings: DynamicSidecarSettings, -) -> AsyncGenerator[None, None]: - """ensures inputs and outputs volumes for the service are present""" - - volume_names = [] - for state_paths_dir in [inputs_dir, outputs_dir] + state_paths_dirs: - name_from_path = str(state_paths_dir).replace(os.sep, "_") - volume_names.append(f"{compose_namespace}{name_from_path}") - - async with docker_client() as client: - volumes = await asyncio.gather( - *[ - client.volumes.create( - { - "Labels": { - "source": volume_name, - "run_id": f"{dynamic_sidecar_settings.DY_SIDECAR_RUN_ID}", - } - } - ) - for volume_name in volume_names - ] - ) - - yield - - await asyncio.gather(*[volume.delete() for volume in volumes]) +def project_id(faker: Faker) -> ProjectID: + return faker.uuid4(cast_to=None) @pytest.fixture -async def test_client(app: FastAPI) -> AsyncIterable[TestClient]: - async with TestClient(app) as client: - yield client +def node_id(faker: Faker) -> NodeID: + return faker.uuid4(cast_to=None) -@pytest.fixture(autouse=True) -async def cleanup_containers( - app: FastAPI, ensure_external_volumes: None -) -> AsyncGenerator[None, None]: - yield - # run docker compose down here - - shared_store: SharedStore = app.state.shared_store - stored_compose_content = shared_store.compose_spec +@pytest.fixture +def run_id(faker: Faker) -> UUID: + return faker.uuid4(cast_to=None) - if stored_compose_content is None: - # if no compose-spec is stored skip this operation - return - settings: DynamicSidecarSettings = app.state.settings - command = ( - "docker-compose -p {project} -f {file_path} " - "down --remove-orphans -t {stop_and_remove_timeout}" +@pytest.fixture +def mock_environment( + monkeypatch: MonkeyPatch, + dy_volumes: Path, + compose_namespace: str, + inputs_dir: Path, + outputs_dir: Path, + state_paths_dirs: list[Path], + state_exclude_dirs: list[Path], + user_id: UserID, + project_id: ProjectID, + node_id: NodeID, + run_id: UUID, +) -> None: + """Main test environment used to build the application + + Override if new configuration for the app is needed. + """ + # envs in Dockerfile + monkeypatch.setenv("SC_BOOT_MODE", "production") + monkeypatch.setenv("SC_BUILD_TARGET", "production") + monkeypatch.setenv("DYNAMIC_SIDECAR_DY_VOLUMES_MOUNT_DIR", f"{dy_volumes}") + + # envs on container + monkeypatch.setenv("DYNAMIC_SIDECAR_COMPOSE_NAMESPACE", compose_namespace) + + monkeypatch.setenv("REGISTRY_AUTH", "false") + monkeypatch.setenv("REGISTRY_USER", "test") + monkeypatch.setenv("REGISTRY_PW", "test") + monkeypatch.setenv("REGISTRY_SSL", "false") + + monkeypatch.setenv("DY_SIDECAR_USER_ID", f"{user_id}") + monkeypatch.setenv("DY_SIDECAR_PROJECT_ID", f"{project_id}") + monkeypatch.setenv("DY_SIDECAR_RUN_ID", f"{run_id}") + monkeypatch.setenv("DY_SIDECAR_NODE_ID", f"{node_id}") + monkeypatch.setenv("DY_SIDECAR_PATH_INPUTS", f"{inputs_dir}") + monkeypatch.setenv("DY_SIDECAR_PATH_OUTPUTS", f"{outputs_dir}") + monkeypatch.setenv( + "DY_SIDECAR_STATE_PATHS", json.dumps([f"{x}" for x in state_paths_dirs]) ) - await write_file_and_run_command( - settings=settings, - file_content=stored_compose_content, - command=command, - command_timeout=5.0, + monkeypatch.setenv( + "DY_SIDECAR_STATE_EXCLUDE", json.dumps([f"{x}" for x in state_exclude_dirs]) ) + monkeypatch.setenv("S3_ENDPOINT", "endpoint") + monkeypatch.setenv("S3_ACCESS_KEY", "access_key") + monkeypatch.setenv("S3_SECRET_KEY", "secret_key") + monkeypatch.setenv("S3_BUCKET_NAME", "bucket_name") + monkeypatch.setenv("S3_SECURE", "false") -@pytest.fixture -def mock_containers_get(mocker: MockerFixture) -> int: - """raises a DockerError with a random HTTP status which is also returned""" - mock_status_code = random.randint(1, 999) - - async def mock_get(*args: str, **kwargs: Any) -> None: - raise aiodocker.exceptions.DockerError( - status=mock_status_code, data=dict(message="aiodocker_mocked_error") - ) - - mocker.patch("aiodocker.containers.DockerContainers.get", side_effect=mock_get) - - return mock_status_code + monkeypatch.setenv("R_CLONE_PROVIDER", "MINIO") @pytest.fixture -def tests_dir() -> Path: - return Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent - - -@pytest.fixture -def mock_dir_watcher_on_any_event( - app: FastAPI, monkeypatch: MonkeyPatch -) -> Iterator[Mock]: - - mock = Mock(return_value=None) - - monkeypatch.setattr( - app.state.dir_watcher.outputs_event_handle, "_invoke_push_directory", mock - ) - yield mock +def mock_environment_with_envdevel( + monkeypatch: MonkeyPatch, project_slug_dir: Path +) -> EnvVarsDict: + """Alternative environment loaded fron .env-devel. + + .env-devel is used mainly to run CLI + """ + env_file = project_slug_dir / ".env-devel" + envs = setenvs_as_envfile(monkeypatch, env_file.read_text()) + return envs diff --git a/services/dynamic-sidecar/tests/unit/conftest.py b/services/dynamic-sidecar/tests/unit/conftest.py index a1f2817a25d..3c2007c3fa2 100644 --- a/services/dynamic-sidecar/tests/unit/conftest.py +++ b/services/dynamic-sidecar/tests/unit/conftest.py @@ -1,13 +1,184 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable + +import asyncio +import logging +from typing import AsyncIterable, AsyncIterator +from unittest.mock import AsyncMock + import pytest +from aiodocker.volumes import DockerVolume +from async_asgi_testclient import TestClient from fastapi import FastAPI -from simcore_service_dynamic_sidecar.modules.mounted_fs import ( - MountedVolumes, - setup_mounted_fs, +from pytest_mock import MockerFixture +from simcore_service_dynamic_sidecar.core.application import AppState, create_app +from simcore_service_dynamic_sidecar.core.docker_utils import docker_client +from simcore_service_dynamic_sidecar.core.shared_handlers import ( + write_file_and_run_command, ) +from tenacity import retry +from tenacity.after import after_log +from tenacity.stop import stop_after_delay +from tenacity.wait import wait_fixed + +logger = logging.getLogger(__name__) + + +# +# APP and CLIENT fixtures +# +# In this context by default all external services are +# mocked (e.g. registry, rabbitmq, ...) +# +# @pytest.fixture -def mounted_volumes(app: FastAPI) -> MountedVolumes: - created_volumes: MountedVolumes = setup_mounted_fs(app) - assert created_volumes == app.state.mounted_volumes - return app.state.mounted_volumes +def mock_registry_service(mocker: MockerFixture) -> AsyncMock: + return mocker.patch( + "simcore_service_dynamic_sidecar.core.utils._is_registry_reachable", + autospec=True, + ) + + +@pytest.fixture +def mock_core_rabbitmq(mocker: MockerFixture) -> dict[str, AsyncMock]: + """mocks simcore_service_dynamic_sidecar.core.rabbitmq.RabbitMQ member functions""" + return { + "connect": mocker.patch( + "simcore_service_dynamic_sidecar.core.rabbitmq.RabbitMQ.connect", + return_value=None, + autospec=True, + ), + "send_event_reload_iframe": mocker.patch( + "simcore_service_dynamic_sidecar.core.rabbitmq.RabbitMQ.send_event_reload_iframe", + return_value=None, + autospec=True, + ), + "post_log_message": mocker.patch( + "simcore_service_dynamic_sidecar.core.rabbitmq.RabbitMQ.post_log_message", + return_value=None, + autospec=True, + ), + "close": mocker.patch( + "simcore_service_dynamic_sidecar.core.rabbitmq.RabbitMQ.close", + return_value=None, + autospec=True, + ), + } + + +@pytest.fixture +def app( + mock_environment: None, + mock_registry_service: AsyncMock, + mock_core_rabbitmq: dict[str, AsyncMock], +) -> FastAPI: + """creates app with registry and rabbitMQ services mocked""" + app = create_app() + return app + + +@pytest.fixture +async def test_client(app: FastAPI) -> AsyncIterable[TestClient]: + async with TestClient(app) as client: + yield client + + +# +# DOCKER Fixtures +# +# + + +@pytest.fixture +async def ensure_external_volumes( + app: FastAPI, +) -> AsyncIterator[tuple[DockerVolume]]: + """ensures inputs and outputs volumes for the service are present + + Emulates creation of volumes by the directorv2 when it spawns the dynamic-sidecar service + """ + app_state = AppState(app) + volume_labels_source = [ + app_state.mounted_volumes.volume_name_inputs, + app_state.mounted_volumes.volume_name_outputs, + ] + list(app_state.mounted_volumes.volume_name_state_paths()) + + async with docker_client() as docker: + + volumes = await asyncio.gather( + *[ + docker.volumes.create( + { + "Labels": { + "source": source, + "run_id": f"{app_state.settings.DY_SIDECAR_RUN_ID}", + } + } + ) + for source in volume_labels_source + ] + ) + + # + # docker volume ls --format "{{.Name}} {{.Labels}}" | grep run_id | awk '{print $1}') + # + # + # Example + # { + # "CreatedAt": "2022-06-23T03:22:08+02:00", + # "Driver": "local", + # "Labels": { + # "run_id": "f7c1bd87-4da5-4709-9471-3d60c8a70639", + # "source": "dy-sidecar_e3e70682-c209-4cac-a29f-6fbed82c07cd_data_dir_2" + # }, + # "Mountpoint": "/var/lib/docker/volumes/22bfd79a50eb9097d45cc946736cb66f3670a2fadccb62a77ffbe5e1d88f0034/_data", + # "Name": "22bfd79a50eb9097d45cc946736cb66f3670a2fadccb62a77ffbe5e1d88f0034", + # "Options": null, + # "Scope": "local", + # "CreatedTime": 1655947328000, + # "Containers": {} + # } + + yield volumes + + @retry( + wait=wait_fixed(1), + stop=stop_after_delay(3), + reraise=True, + after=after_log(logger, logging.WARNING), + ) + async def _delete(volume): + # Ocasionally might raise because volumes are mount to closing containers + await volume.delete() + + deleted = await asyncio.gather( + *(_delete(volume) for volume in volumes), return_exceptions=True + ) + assert not [r for r in deleted if isinstance(r, Exception)] + + +@pytest.fixture +async def cleanup_containers(app: FastAPI) -> AsyncIterator[None]: + + app_state = AppState(app) + + yield + # run docker compose down here + + if app_state.shared_store.compose_spec is None: + # if no compose-spec is stored skip this operation + return + + command = ( + 'docker-compose --project-name {project} --file "{file_path}" ' + "down --remove-orphans --timeout {stop_and_remove_timeout}" + ) + await write_file_and_run_command( + settings=app_state.settings, + file_content=app_state.shared_store.compose_spec, + command=command, + command_timeout=5.0, + ) diff --git a/services/dynamic-sidecar/tests/unit/test_oas_spec.py b/services/dynamic-sidecar/tests/unit/test__oas_spec.py similarity index 69% rename from services/dynamic-sidecar/tests/unit/test_oas_spec.py rename to services/dynamic-sidecar/tests/unit/test__oas_spec.py index 1c64b65799f..5ee4bf4be2c 100644 --- a/services/dynamic-sidecar/tests/unit/test_oas_spec.py +++ b/services/dynamic-sidecar/tests/unit/test__oas_spec.py @@ -4,9 +4,9 @@ from fastapi import FastAPI -def test_openapi_spec(app: FastAPI, tests_dir: Path) -> None: +def test_openapi_spec(app: FastAPI, project_slug_dir: Path) -> None: spec_from_app = app.openapi() - open_api_json_file = tests_dir / ".." / "openapi.json" + open_api_json_file = project_slug_dir / "openapi.json" stored_openapi_json_file = json.loads(open_api_json_file.read_text()) assert ( spec_from_app == stored_openapi_json_file diff --git a/services/dynamic-sidecar/tests/unit/test_api_containers.py b/services/dynamic-sidecar/tests/unit/test_api_containers.py index ee123f9d567..f63bc1370e7 100644 --- a/services/dynamic-sidecar/tests/unit/test_api_containers.py +++ b/services/dynamic-sidecar/tests/unit/test_api_containers.py @@ -1,14 +1,15 @@ # pylint: disable=redefined-outer-name # pylint: disable=unused-argument - +# pylint: disable=unused-variable import asyncio import importlib import json +import random from collections import namedtuple from inspect import signature -from typing import Any, AsyncIterable, Dict, Iterable, List -from unittest.mock import AsyncMock +from typing import Any, AsyncIterable, Iterator +from unittest.mock import AsyncMock, Mock from uuid import uuid4 import aiodocker @@ -16,12 +17,16 @@ import pytest import yaml from aiodocker.containers import DockerContainer +from aiodocker.volumes import DockerVolume from async_asgi_testclient import TestClient +from faker import Faker from fastapi import FastAPI, status from models_library.services import ServiceOutput +from pytest import MonkeyPatch from pytest_mock.plugin import MockerFixture from simcore_sdk.node_ports_common.exceptions import NodeNotFound from simcore_service_dynamic_sidecar._meta import API_VTAG +from simcore_service_dynamic_sidecar.core.application import AppState from simcore_service_dynamic_sidecar.core.settings import DynamicSidecarSettings from simcore_service_dynamic_sidecar.core.shared_handlers import ( write_file_and_run_command, @@ -29,7 +34,6 @@ from simcore_service_dynamic_sidecar.core.utils import HIDDEN_FILE_NAME, async_command from simcore_service_dynamic_sidecar.core.validation import parse_compose_spec from simcore_service_dynamic_sidecar.models.domains.shared_store import SharedStore -from simcore_service_dynamic_sidecar.modules.mounted_fs import MountedVolumes ContainerTimes = namedtuple("ContainerTimes", "created, started_at, finished_at") @@ -40,6 +44,16 @@ # FIXTURES +@pytest.fixture +def client( + test_client: TestClient, + ensure_external_volumes: tuple[DockerVolume], + cleanup_containers, +): + """creates external volumes and provides a client to dy-sidecar service""" + return test_client + + @pytest.fixture def dynamic_sidecar_network_name() -> str: return "entrypoint_container_network" @@ -53,7 +67,9 @@ def compose_spec(dynamic_sidecar_network_name: str) -> str: "services": { "first-box": { "image": "busybox", - "networks": [dynamic_sidecar_network_name], + "networks": [ + dynamic_sidecar_network_name, + ], }, "second-box": {"image": "busybox"}, }, @@ -89,7 +105,7 @@ def selected_spec(request, compose_spec: str, compose_spec_single_service: str) return result -async def _docker_ps_a_container_names() -> List[str]: +async def _docker_ps_a_container_names() -> list[str]: command = 'docker ps -a --format "{{.Names}}"' finished_without_errors, stdout = await async_command( command=command, command_timeout=None @@ -101,11 +117,11 @@ async def _docker_ps_a_container_names() -> List[str]: async def _assert_compose_spec_pulled( compose_spec: str, settings: DynamicSidecarSettings -) -> None: +): """ensures all containers inside compose_spec are pulled""" command = ( - "docker-compose --project-name {project} --file {file_path} " + 'docker-compose --project-name {project} --file "{file_path}" ' "up --no-build --detach" ) finished_without_errors, stdout = await write_file_and_run_command( @@ -130,13 +146,13 @@ async def _assert_compose_spec_pulled( async def _get_container_timestamps( - container_names: List[str], -) -> Dict[str, ContainerTimes]: - container_timestamps: Dict[str, ContainerTimes] = {} + container_names: list[str], +) -> dict[str, ContainerTimes]: + container_timestamps: dict[str, ContainerTimes] = {} async with aiodocker.Docker() as client: for container_name in container_names: container: DockerContainer = await client.containers.get(container_name) - container_inspect: Dict[str, Any] = await container.show() + container_inspect: dict[str, Any] = await container.show() container_timestamps[container_name] = ContainerTimes( created=container_inspect["Created"], started_at=container_inspect["State"]["StartedAt"], @@ -147,15 +163,15 @@ async def _get_container_timestamps( @pytest.fixture -async def started_containers(test_client: TestClient, compose_spec: str) -> List[str]: - settings: DynamicSidecarSettings = test_client.application.state.settings +async def started_containers(client: TestClient, compose_spec: str) -> list[str]: + settings: DynamicSidecarSettings = client.application.state.settings await _assert_compose_spec_pulled(compose_spec, settings) # start containers - response = await test_client.post(f"/{API_VTAG}/containers", data=compose_spec) + response = await client.post(f"/{API_VTAG}/containers", data=compose_spec) assert response.status_code == status.HTTP_202_ACCEPTED, response.text - shared_store: SharedStore = test_client.application.state.shared_store + shared_store: SharedStore = client.application.state.shared_store container_names = shared_store.container_names assert len(container_names) == 2 assert response.json() == container_names @@ -164,7 +180,7 @@ async def started_containers(test_client: TestClient, compose_spec: str) -> List @pytest.fixture -def not_started_containers() -> List[str]: +def not_started_containers() -> list[str]: return [f"missing-container-{i}" for i in range(5)] @@ -215,12 +231,12 @@ def mock_data_manager(mocker: MockerFixture) -> None: @pytest.fixture -def mock_port_keys() -> List[str]: +def mock_port_keys() -> list[str]: return ["first_port", "second_port"] @pytest.fixture -def mock_outputs_labels() -> Dict[str, ServiceOutput]: +def mock_outputs_labels() -> dict[str, ServiceOutput]: return { "output_port_1": ServiceOutput.parse_obj( ServiceOutput.Config.schema_extra["examples"][3] @@ -232,24 +248,14 @@ def mock_outputs_labels() -> Dict[str, ServiceOutput]: @pytest.fixture -def mutable_settings(test_client: TestClient) -> DynamicSidecarSettings: - settings: DynamicSidecarSettings = test_client.application.state.settings - # disable mutability for this test - settings.__config__.allow_mutation = True - settings.__config__.frozen = False - return settings - - -@pytest.fixture -def rabbitmq_mock(mocker, app: FastAPI) -> Iterable[None]: +def rabbitmq_mock(mocker, app: FastAPI) -> None: app.state.rabbitmq = mocker.AsyncMock() - yield @pytest.fixture -async def attachable_networks_and_ids() -> AsyncIterable[Dict[str, str]]: +async def attachable_networks_and_ids(faker: Faker) -> AsyncIterable[dict[str, str]]: # generate some network names - unique_id = uuid4() + unique_id = faker.uuid4() network_names = {f"test_network_{i}_{unique_id}": "" for i in range(10)} # create networks @@ -276,20 +282,20 @@ async def attachable_networks_and_ids() -> AsyncIterable[Dict[str, str]]: # UTILS -def _create_network_aliases(network_name: str) -> List[str]: +def _create_network_aliases(network_name: str) -> list[str]: return [f"alias_{i}_{network_name}" for i in range(10)] -async def _assert_enable_directory_watcher(test_client: TestClient) -> None: - response = await test_client.patch( +async def _assert_enable_directory_watcher(client: TestClient) -> None: + response = await client.patch( f"/{API_VTAG}/containers/directory-watcher", json=dict(is_enabled=True) ) assert response.status_code == status.HTTP_204_NO_CONTENT, response.text assert response.text == "" -async def _assert_disable_directory_watcher(test_client: TestClient) -> None: - response = await test_client.patch( +async def _assert_disable_directory_watcher(client: TestClient) -> None: + response = await client.patch( f"/{API_VTAG}/containers/directory-watcher", json=dict(is_enabled=False) ) assert response.status_code == status.HTTP_204_NO_CONTENT, response.text @@ -299,14 +305,12 @@ async def _assert_disable_directory_watcher(test_client: TestClient) -> None: # TESTS -def test_ensure_api_vtag_is_v1() -> None: +def test_ensure_api_vtag_is_v1(): assert API_VTAG == "v1" -async def test_start_containers_wrong_spec( - test_client: TestClient, rabbitmq_mock: None -) -> None: - response = await test_client.post( +async def test_start_containers_wrong_spec(client: TestClient, rabbitmq_mock: None): + response = await client.post( f"/{API_VTAG}/containers", data={"opsie": "shame on me"} ) assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY @@ -314,37 +318,39 @@ async def test_start_containers_wrong_spec( async def test_start_same_space_twice( - compose_spec: str, mutable_settings: DynamicSidecarSettings -) -> None: - mutable_settings.DYNAMIC_SIDECAR_COMPOSE_NAMESPACE = "test_name_space_1" - await _assert_compose_spec_pulled(compose_spec, mutable_settings) + compose_spec: str, + client: TestClient, +): + settings = client.application.state.settings - mutable_settings.DYNAMIC_SIDECAR_COMPOSE_NAMESPACE = "test_name_space_2" - await _assert_compose_spec_pulled(compose_spec, mutable_settings) + settings_1 = settings.copy( + update={"DYNAMIC_SIDECAR_COMPOSE_NAMESPACE": "test_name_space_1"}, deep=True + ) + await _assert_compose_spec_pulled(compose_spec, settings_1) + settings_2 = settings.copy( + update={"DYNAMIC_SIDECAR_COMPOSE_NAMESPACE": "test_name_space_2"}, deep=True + ) + await _assert_compose_spec_pulled(compose_spec, settings_2) -async def test_compose_up( - test_client: TestClient, compose_spec: Dict[str, Any] -) -> None: - response = await test_client.post(f"/{API_VTAG}/containers", data=compose_spec) +async def test_compose_up(client: TestClient, compose_spec: dict[str, Any]): + response = await client.post(f"/{API_VTAG}/containers", data=compose_spec) assert response.status_code == status.HTTP_202_ACCEPTED, response.text - shared_store: SharedStore = test_client.application.state.shared_store + shared_store: SharedStore = client.application.state.shared_store container_names = shared_store.container_names assert response.json() == container_names -async def test_compose_up_spec_not_provided(test_client: TestClient) -> None: - response = await test_client.post(f"/{API_VTAG}/containers") +async def test_compose_up_spec_not_provided(client: TestClient): + response = await client.post(f"/{API_VTAG}/containers") assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY, response.text assert response.json() == {"detail": "\nProvided yaml is not valid!"} -async def test_compose_up_spec_invalid(test_client: TestClient) -> None: +async def test_compose_up_spec_invalid(client: TestClient): invalid_compose_spec = faker.Faker().text() # pylint: disable=no-member - response = await test_client.post( - f"/{API_VTAG}/containers", data=invalid_compose_spec - ) + response = await client.post(f"/{API_VTAG}/containers", data=invalid_compose_spec) assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY, response.text assert "Provided yaml is not valid!" in response.text # 28+ characters means the compos spec is also present in the error message @@ -352,16 +358,16 @@ async def test_compose_up_spec_invalid(test_client: TestClient) -> None: async def test_containers_down_after_starting( - test_client: TestClient, compose_spec: Dict[str, Any] -) -> None: + client: TestClient, compose_spec: dict[str, Any] +): # store spec first - response = await test_client.post(f"/{API_VTAG}/containers", data=compose_spec) + response = await client.post(f"/{API_VTAG}/containers", data=compose_spec) assert response.status_code == status.HTTP_202_ACCEPTED, response.text - shared_store: SharedStore = test_client.application.state.shared_store + shared_store: SharedStore = client.application.state.shared_store container_names = shared_store.container_names assert response.json() == container_names - response = await test_client.post( + response = await client.post( f"/{API_VTAG}/containers:down", query_string=dict(command_timeout=DEFAULT_COMMAND_TIMEOUT), ) @@ -370,9 +376,9 @@ async def test_containers_down_after_starting( async def test_containers_down_missing_spec( - test_client: TestClient, compose_spec: Dict[str, Any] -) -> None: - response = await test_client.post( + client: TestClient, compose_spec: dict[str, Any] +): + response = await client.post( f"/{API_VTAG}/containers:down", query_string=dict(command_timeout=DEFAULT_COMMAND_TIMEOUT), ) @@ -380,19 +386,12 @@ async def test_containers_down_missing_spec( assert response.json() == {"detail": "No spec for docker-compose down was found"} -def assert_keys_exist(result: Dict[str, Any]) -> bool: - for entry in result.values(): - assert "Status" in entry - assert "Error" in entry - return True - - async def test_containers_get( - test_client: TestClient, - started_containers: List[str], + client: TestClient, + started_containers: list[str], ensure_external_volumes: None, -) -> None: - response = await test_client.get(f"/{API_VTAG}/containers") +): + response = await client.get(f"/{API_VTAG}/containers") assert response.status_code == status.HTTP_200_OK, response.text decoded_response = response.json() @@ -403,55 +402,72 @@ async def test_containers_get( async def test_containers_get_status( - test_client: TestClient, - started_containers: List[str], + client: TestClient, + started_containers: list[str], ensure_external_volumes: None, -) -> None: - response = await test_client.get( +): + response = await client.get( f"/{API_VTAG}/containers", query_string=dict(only_status=True) ) assert response.status_code == status.HTTP_200_OK, response.text decoded_response = response.json() assert set(decoded_response) == set(started_containers) + + def assert_keys_exist(result: dict[str, Any]) -> bool: + for entry in result.values(): + assert "Status" in entry + assert "Error" in entry + return True + assert assert_keys_exist(decoded_response) is True -async def test_containers_inspect_docker_error( - test_client: TestClient, started_containers: List[str], mock_containers_get: int -) -> None: - response = await test_client.get(f"/{API_VTAG}/containers") - assert response.status_code == mock_containers_get, response.text +@pytest.fixture +def mock_aiodocker_containers_get(mocker: MockerFixture) -> int: + """raises a DockerError with a random HTTP status which is also returned""" + mock_status_code = random.randint(1, 999) + + async def mock_get(*args: str, **kwargs: Any) -> None: + raise aiodocker.exceptions.DockerError( + status=mock_status_code, data=dict(message="aiodocker_mocked_error") + ) + + mocker.patch("aiodocker.containers.DockerContainers.get", side_effect=mock_get) + + return mock_status_code async def test_containers_docker_status_docker_error( - test_client: TestClient, started_containers: List[str], mock_containers_get: int -) -> None: - response = await test_client.get(f"/{API_VTAG}/containers") - assert response.status_code == mock_containers_get, response.text + client: TestClient, + started_containers: list[str], + mock_aiodocker_containers_get: int, +): + response = await client.get(f"/{API_VTAG}/containers") + assert response.status_code == mock_aiodocker_containers_get, response.text async def test_container_inspect_logs_remove( - test_client: TestClient, started_containers: List[str] -) -> None: + client: TestClient, started_containers: list[str] +): for container in started_containers: # get container logs - response = await test_client.get(f"/{API_VTAG}/containers/{container}/logs") + response = await client.get(f"/{API_VTAG}/containers/{container}/logs") assert response.status_code == status.HTTP_200_OK, response.text # inspect container - response = await test_client.get(f"/{API_VTAG}/containers/{container}") + response = await client.get(f"/{API_VTAG}/containers/{container}") assert response.status_code == status.HTTP_200_OK, response.text parsed_response = response.json() assert parsed_response["Name"] == f"/{container}" async def test_container_logs_with_timestamps( - test_client: TestClient, started_containers: List[str] -) -> None: + client: TestClient, started_containers: list[str] +): for container in started_containers: # get container logs - response = await test_client.get( + response = await client.get( f"/{API_VTAG}/containers/{container}/logs", query_string=dict(timestamps=True), ) @@ -459,31 +475,31 @@ async def test_container_logs_with_timestamps( async def test_container_missing_container( - test_client: TestClient, not_started_containers: List[str] -) -> None: - def _expected_error_string(container: str) -> Dict[str, str]: + client: TestClient, not_started_containers: list[str] +): + def _expected_error_string(container: str) -> dict[str, str]: return dict( detail=f"No container '{container}' was started. Started containers '[]'" ) for container in not_started_containers: # get container logs - response = await test_client.get(f"/{API_VTAG}/containers/{container}/logs") + response = await client.get(f"/{API_VTAG}/containers/{container}/logs") assert response.status_code == status.HTTP_404_NOT_FOUND, response.text assert response.json() == _expected_error_string(container) # inspect container - response = await test_client.get(f"/{API_VTAG}/containers/{container}") + response = await client.get(f"/{API_VTAG}/containers/{container}") assert response.status_code == status.HTTP_404_NOT_FOUND, response.text assert response.json() == _expected_error_string(container) async def test_container_docker_error( - test_client: TestClient, - started_containers: List[str], - mock_containers_get: int, -) -> None: - def _expected_error_string(status_code: int) -> Dict[str, Any]: + client: TestClient, + started_containers: list[str], + mock_aiodocker_containers_get: int, +): + def _expected_error_string(status_code: int) -> dict[str, Any]: return { "errors": [ f"An unexpected Docker error occurred status={status_code}, message='aiodocker_mocked_error'" @@ -492,47 +508,58 @@ def _expected_error_string(status_code: int) -> Dict[str, Any]: for container in started_containers: # get container logs - response = await test_client.get(f"/{API_VTAG}/containers/{container}/logs") - assert response.status_code == mock_containers_get, response.text - assert response.json() == _expected_error_string(mock_containers_get) + response = await client.get(f"/{API_VTAG}/containers/{container}/logs") + assert response.status_code == mock_aiodocker_containers_get, response.text + assert response.json() == _expected_error_string(mock_aiodocker_containers_get) # inspect container - response = await test_client.get(f"/{API_VTAG}/containers/{container}") - assert response.status_code == mock_containers_get, response.text - assert response.json() == _expected_error_string(mock_containers_get) + response = await client.get(f"/{API_VTAG}/containers/{container}") + assert response.status_code == mock_aiodocker_containers_get, response.text + assert response.json() == _expected_error_string(mock_aiodocker_containers_get) -async def test_container_save_state( - test_client: TestClient, mock_data_manager: None -) -> None: - response = await test_client.post(f"/{API_VTAG}/containers/state:save") +async def test_container_save_state(client: TestClient, mock_data_manager: None): + response = await client.post(f"/{API_VTAG}/containers/state:save") assert response.status_code == status.HTTP_204_NO_CONTENT, response.text assert response.text == "" -async def test_container_restore_state( - test_client: TestClient, mock_data_manager: None -) -> None: - response = await test_client.post(f"/{API_VTAG}/containers/state:restore") +async def test_container_restore_state(client: TestClient, mock_data_manager: None): + response = await client.post(f"/{API_VTAG}/containers/state:restore") assert response.status_code == status.HTTP_204_NO_CONTENT, response.text assert response.text == "" async def test_container_pull_input_ports( - test_client: TestClient, mock_port_keys: List[str], mock_nodeports: None -) -> None: - response = await test_client.post( + client: TestClient, mock_port_keys: list[str], mock_nodeports: None +): + response = await client.post( f"/{API_VTAG}/containers/ports/inputs:pull", json=mock_port_keys ) assert response.status_code == status.HTTP_200_OK, response.text assert response.text == "42" +@pytest.fixture +def mock_dir_watcher_on_any_event( + app: FastAPI, monkeypatch: MonkeyPatch +) -> Iterator[Mock]: + + mock = Mock(return_value=None) + + monkeypatch.setattr( + app.state.dir_watcher.outputs_event_handle, "_invoke_push_directory", mock + ) + yield mock + + async def test_directory_watcher_disabling( - test_client: TestClient, + client: TestClient, mock_dir_watcher_on_any_event: AsyncMock, - mounted_volumes: MountedVolumes, -) -> None: +): + assert isinstance(client.application, FastAPI) + mounted_volumes = AppState(client.application).mounted_volumes + def _create_random_dir_in_inputs() -> int: dir_name = mounted_volumes.disk_outputs_path / f"{uuid4()}" dir_name.mkdir(parents=True) @@ -548,7 +575,7 @@ def _create_random_dir_in_inputs() -> int: EVENTS_PER_DIR_CREATION = 2 # by default directory-watcher it is disabled - await _assert_enable_directory_watcher(test_client) + await _assert_enable_directory_watcher(client) assert mock_dir_watcher_on_any_event.call_count == 0 dir_count = _create_random_dir_in_inputs() assert dir_count == 1 @@ -556,14 +583,14 @@ def _create_random_dir_in_inputs() -> int: assert mock_dir_watcher_on_any_event.call_count == EVENTS_PER_DIR_CREATION # disable and wait for events should have the same count as before - await _assert_disable_directory_watcher(test_client) + await _assert_disable_directory_watcher(client) dir_count = _create_random_dir_in_inputs() assert dir_count == 2 await asyncio.sleep(WAIT_FOR_DIRECTORY_WATCHER) assert mock_dir_watcher_on_any_event.call_count == EVENTS_PER_DIR_CREATION # enable and wait for events - await _assert_enable_directory_watcher(test_client) + await _assert_enable_directory_watcher(client) dir_count = _create_random_dir_in_inputs() assert dir_count == 3 await asyncio.sleep(WAIT_FOR_DIRECTORY_WATCHER) @@ -571,20 +598,22 @@ def _create_random_dir_in_inputs() -> int: async def test_container_create_outputs_dirs( - test_client: TestClient, - mock_outputs_labels: Dict[str, ServiceOutput], + client: TestClient, + mock_outputs_labels: dict[str, ServiceOutput], mock_dir_watcher_on_any_event: AsyncMock, - mounted_volumes: MountedVolumes, -) -> None: +): + assert isinstance(client.application, FastAPI) + mounted_volumes = AppState(client.application).mounted_volumes + # by default directory-watcher it is disabled - await _assert_enable_directory_watcher(test_client) + await _assert_enable_directory_watcher(client) assert mock_dir_watcher_on_any_event.call_count == 0 json_outputs_labels = { k: v.dict(by_alias=True) for k, v in mock_outputs_labels.items() } - response = await test_client.post( + response = await client.post( f"/{API_VTAG}/containers/ports/outputs/dirs", json={"outputs_labels": json_outputs_labels}, ) @@ -599,9 +628,9 @@ async def test_container_create_outputs_dirs( async def test_container_pull_output_ports( - test_client: TestClient, mock_port_keys: List[str], mock_nodeports: None -) -> None: - response = await test_client.post( + client: TestClient, mock_port_keys: list[str], mock_nodeports: None +): + response = await client.post( f"/{API_VTAG}/containers/ports/outputs:pull", json=mock_port_keys ) assert response.status_code == status.HTTP_200_OK, response.text @@ -609,9 +638,9 @@ async def test_container_pull_output_ports( async def test_container_push_output_ports( - test_client: TestClient, mock_port_keys: List[str], mock_nodeports: None -) -> None: - response = await test_client.post( + client: TestClient, mock_port_keys: list[str], mock_nodeports: None +): + response = await client.post( f"/{API_VTAG}/containers/ports/outputs:push", json=mock_port_keys ) assert response.status_code == status.HTTP_204_NO_CONTENT, response.text @@ -619,12 +648,12 @@ async def test_container_push_output_ports( async def test_container_push_output_ports_missing_node( - test_client: TestClient, - mock_port_keys: List[str], + client: TestClient, + mock_port_keys: list[str], missing_node_uuid: str, mock_node_missing: None, -) -> None: - response = await test_client.post( +): + response = await client.post( f"/{API_VTAG}/containers/ports/outputs:push", json=mock_port_keys ) assert response.status_code == status.HTTP_404_NOT_FOUND, response.text @@ -635,11 +664,9 @@ async def test_container_push_output_ports_missing_node( def _get_entrypoint_container_name( - test_client: TestClient, dynamic_sidecar_network_name: str + client: TestClient, dynamic_sidecar_network_name: str ) -> str: - parsed_spec = parse_compose_spec( - test_client.application.state.shared_store.compose_spec - ) + parsed_spec = parse_compose_spec(client.application.state.shared_store.compose_spec) container_name = None for service_name, service_details in parsed_spec["services"].items(): if dynamic_sidecar_network_name in service_details.get("networks", []): @@ -650,57 +677,51 @@ def _get_entrypoint_container_name( async def test_containers_entrypoint_name_ok( - test_client: TestClient, + client: TestClient, dynamic_sidecar_network_name: str, - started_containers: List[str], -) -> None: + started_containers: list[str], +): filters = json.dumps({"network": dynamic_sidecar_network_name}) - response = await test_client.get(f"/{API_VTAG}/containers/name?filters={filters}") + response = await client.get(f"/{API_VTAG}/containers/name?filters={filters}") assert response.status_code == status.HTTP_200_OK, response.text assert response.json() == _get_entrypoint_container_name( - test_client, dynamic_sidecar_network_name + client, dynamic_sidecar_network_name ) async def test_containers_entrypoint_name_containers_not_started( - test_client: TestClient, + client: TestClient, dynamic_sidecar_network_name: str, - started_containers: List[str], -) -> None: + started_containers: list[str], +): entrypoint_container = _get_entrypoint_container_name( - test_client, dynamic_sidecar_network_name + client, dynamic_sidecar_network_name ) # remove the container from the spec - parsed_spec = parse_compose_spec( - test_client.application.state.shared_store.compose_spec - ) + parsed_spec = parse_compose_spec(client.application.state.shared_store.compose_spec) del parsed_spec["services"][entrypoint_container] - test_client.application.state.shared_store.compose_spec = yaml.safe_dump( - parsed_spec - ) + client.application.state.shared_store.compose_spec = yaml.safe_dump(parsed_spec) filters = json.dumps({"network": dynamic_sidecar_network_name}) - response = await test_client.get(f"/{API_VTAG}/containers/name?filters={filters}") + response = await client.get(f"/{API_VTAG}/containers/name?filters={filters}") assert response.status_code == status.HTTP_404_NOT_FOUND, response.text assert response.json() == { "detail": "No container found for network=entrypoint_container_network" } -async def test_containers_restart( - test_client: TestClient, compose_spec: Dict[str, Any] -) -> None: +async def test_containers_restart(client: TestClient, compose_spec: dict[str, Any]): # store spec first - response = await test_client.post(f"/{API_VTAG}/containers", data=compose_spec) + response = await client.post(f"/{API_VTAG}/containers", data=compose_spec) assert response.status_code == status.HTTP_202_ACCEPTED, response.text - shared_store: SharedStore = test_client.application.state.shared_store + shared_store: SharedStore = client.application.state.shared_store container_names = shared_store.container_names assert response.json() == container_names container_timestamps_before = await _get_container_timestamps(container_names) - response = await test_client.post( + response = await client.post( f"/{API_VTAG}/containers:restart", query_string=dict(command_timeout=DEFAULT_COMMAND_TIMEOUT), ) @@ -720,24 +741,24 @@ async def test_containers_restart( async def test_attach_detach_container_to_network( docker_swarm: None, - test_client: TestClient, + client: TestClient, selected_spec: str, - attachable_networks_and_ids: Dict[str, str], -) -> None: - response = await test_client.post(f"/{API_VTAG}/containers", data=selected_spec) + attachable_networks_and_ids: dict[str, str], +): + response = await client.post(f"/{API_VTAG}/containers", data=selected_spec) assert response.status_code == status.HTTP_202_ACCEPTED, response.text - shared_store: SharedStore = test_client.application.state.shared_store + shared_store: SharedStore = client.application.state.shared_store container_names = shared_store.container_names assert response.json() == container_names - async with aiodocker.Docker() as client: + async with aiodocker.Docker() as docker: for container_name in container_names: for network_name, network_id in attachable_networks_and_ids.items(): network_aliases = _create_network_aliases(network_name) # attach network to containers - for _ in range(2): # callin 2 times in a row - response = await test_client.post( + for _ in range(2): # calling 2 times in a row + response = await client.post( f"/{API_VTAG}/containers/{container_name}/networks:attach", json={ "network_id": network_id, @@ -748,7 +769,7 @@ async def test_attach_detach_container_to_network( response.status_code == status.HTTP_204_NO_CONTENT ), response.text - container = await client.containers.get(container_name) + container = await docker.containers.get(container_name) container_inspect = await container.show() networks = container_inspect["NetworkSettings"]["Networks"] assert network_id in networks @@ -756,7 +777,7 @@ async def test_attach_detach_container_to_network( # detach network from containers for _ in range(2): # running twice in a row - response = await test_client.post( + response = await client.post( f"/{API_VTAG}/containers/{container_name}/networks:detach", json={"network_id": network_id}, ) @@ -764,7 +785,7 @@ async def test_attach_detach_container_to_network( response.status_code == status.HTTP_204_NO_CONTENT ), response.text - container = await client.containers.get(container_name) + container = await docker.containers.get(container_name) container_inspect = await container.show() networks = container_inspect["NetworkSettings"]["Networks"] assert network_id in networks diff --git a/services/dynamic-sidecar/tests/unit/test_api_health.py b/services/dynamic-sidecar/tests/unit/test_api_health.py index 2d0553a91e6..64648f10666 100644 --- a/services/dynamic-sidecar/tests/unit/test_api_health.py +++ b/services/dynamic-sidecar/tests/unit/test_api_health.py @@ -1,12 +1,9 @@ -import pytest from async_asgi_testclient import TestClient from fastapi import status from simcore_service_dynamic_sidecar.models.schemas.application_health import ( ApplicationHealth, ) -pytestmark = pytest.mark.asyncio - async def test_is_healthy(test_client: TestClient) -> None: test_client.application.state.application_health.is_healthy = True diff --git a/services/dynamic-sidecar/tests/unit/test_core_application.py b/services/dynamic-sidecar/tests/unit/test_core_application.py new file mode 100644 index 00000000000..83112cace58 --- /dev/null +++ b/services/dynamic-sidecar/tests/unit/test_core_application.py @@ -0,0 +1,28 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable +import inspect + +from pytest_simcore.helpers.typing_env import EnvVarsDict +from simcore_service_dynamic_sidecar.core.application import AppState, create_app +from simcore_service_dynamic_sidecar.core.settings import DynamicSidecarSettings + + +def test_create_app(mock_environment_with_envdevel: EnvVarsDict): + app = create_app() + assert isinstance(app.state.settings, DynamicSidecarSettings) + + +def test_AppState_decorator_class(mock_environment_with_envdevel: EnvVarsDict): + app = create_app() + app_state = AppState(app) + + # ensure exposed properties are init after creation + properties = inspect.getmembers(AppState, lambda o: isinstance(o, property)) + for prop_name, prop in properties: + # app.state.prop_name -> ReturnType annotation? + value = getattr(app_state, prop_name) + assert isinstance(value, inspect.signature(prop.fget).return_annotation) + + # app.state.prop_name == app_state.prop_name + assert getattr(app.state, prop_name) == value diff --git a/services/dynamic-sidecar/tests/unit/test_core_docker_logs.py b/services/dynamic-sidecar/tests/unit/test_core_docker_logs.py index 154337cabc7..b730cf5c157 100644 --- a/services/dynamic-sidecar/tests/unit/test_core_docker_logs.py +++ b/services/dynamic-sidecar/tests/unit/test_core_docker_logs.py @@ -2,88 +2,26 @@ # pylint: disable=unused-argument import asyncio -import json -from pathlib import Path -from typing import AsyncIterable, Dict, Iterable, List +from typing import AsyncIterable from unittest.mock import AsyncMock -from uuid import uuid4 import aiodocker import pytest -from _pytest.monkeypatch import MonkeyPatch from async_asgi_testclient import TestClient from fastapi import FastAPI -from simcore_service_dynamic_sidecar.core.application import assemble_application +from pytest import MonkeyPatch from simcore_service_dynamic_sidecar.core.docker_logs import ( _get_background_log_fetcher, start_log_fetching, stop_log_fetching, ) -from simcore_service_dynamic_sidecar.modules import mounted_fs - -pytestmark = pytest.mark.asyncio - # FIXTURES -@pytest.fixture(scope="module") -def app( - monkeypatch_module: MonkeyPatch, - mock_dy_volumes: Path, - inputs_dir: Path, - outputs_dir: Path, - state_paths_dirs: List[Path], - state_exclude_dirs: List[Path], - disable_registry_check: None, -) -> Iterable[FastAPI]: - monkeypatch_module.setenv("SC_BOOT_MODE", "production") - monkeypatch_module.setenv("DYNAMIC_SIDECAR_COMPOSE_NAMESPACE", "test-space") - monkeypatch_module.setenv("REGISTRY_AUTH", "false") - monkeypatch_module.setenv("REGISTRY_USER", "test") - monkeypatch_module.setenv("REGISTRY_PW", "test") - monkeypatch_module.setenv("REGISTRY_SSL", "false") - monkeypatch_module.setenv("DY_SIDECAR_USER_ID", "1") - monkeypatch_module.setenv("DY_SIDECAR_PROJECT_ID", f"{uuid4()}") - monkeypatch_module.setenv("DY_SIDECAR_NODE_ID", f"{uuid4()}") - monkeypatch_module.setenv("DY_SIDECAR_RUN_ID", f"{uuid4()}") - monkeypatch_module.setenv("DY_SIDECAR_PATH_INPUTS", str(inputs_dir)) - monkeypatch_module.setenv("DY_SIDECAR_PATH_OUTPUTS", str(outputs_dir)) - monkeypatch_module.setenv( - "DY_SIDECAR_STATE_PATHS", json.dumps([str(x) for x in state_paths_dirs]) - ) - monkeypatch_module.setenv( - "DY_SIDECAR_STATE_EXCLUDE", json.dumps([str(x) for x in state_exclude_dirs]) - ) - - monkeypatch_module.setattr(mounted_fs, "DY_VOLUMES", mock_dy_volumes) - - monkeypatch_module.setenv("S3_ENDPOINT", "endpoint") - monkeypatch_module.setenv("S3_ACCESS_KEY", "access_key") - monkeypatch_module.setenv("S3_SECRET_KEY", "secret_key") - monkeypatch_module.setenv("S3_BUCKET_NAME", "bucket_name") - monkeypatch_module.setenv("S3_SECURE", "false") - monkeypatch_module.setenv("R_CLONE_PROVIDER", "MINIO") - - yield assemble_application() - - @pytest.fixture -def mock_rabbitmq(mocker) -> Iterable[Dict[str, AsyncMock]]: - yield { - "connect": mocker.patch( - "simcore_service_dynamic_sidecar.core.rabbitmq.RabbitMQ.connect", - return_value=None, - ), - "post_log_message": mocker.patch( - "simcore_service_dynamic_sidecar.core.rabbitmq.RabbitMQ.post_log_message", - return_value=None, - ), - "close": mocker.patch( - "simcore_service_dynamic_sidecar.core.rabbitmq.RabbitMQ.close", - return_value=None, - ), - } +def mock_environment(monkeypatch: MonkeyPatch, mock_environment: None) -> None: + monkeypatch.setenv("DYNAMIC_SIDECAR_COMPOSE_NAMESPACE", "test-space") @pytest.fixture @@ -107,18 +45,20 @@ async def container_name() -> AsyncIterable[str]: async def test_background_log_fetcher( - mock_rabbitmq: Dict[str, AsyncMock], test_client: TestClient, container_name: str -) -> None: - app: FastAPI = test_client.application + mock_core_rabbitmq: dict[str, AsyncMock], + test_client: TestClient, + container_name: str, + app: FastAPI, +): assert _get_background_log_fetcher(app=app) is not None - assert mock_rabbitmq["connect"].call_count == 1 + assert mock_core_rabbitmq["connect"].call_count == 1 await start_log_fetching(app=app, container_name=container_name) # wait for background log fetcher await asyncio.sleep(1) - assert mock_rabbitmq["post_log_message"].call_count == 1 + assert mock_core_rabbitmq["post_log_message"].call_count == 1 await stop_log_fetching(app=app, container_name=container_name) - assert mock_rabbitmq["connect"].call_count == 1 + assert mock_core_rabbitmq["connect"].call_count == 1 diff --git a/services/dynamic-sidecar/tests/unit/test_docker_utils.py b/services/dynamic-sidecar/tests/unit/test_core_docker_utils.py similarity index 97% rename from services/dynamic-sidecar/tests/unit/test_docker_utils.py rename to services/dynamic-sidecar/tests/unit/test_core_docker_utils.py index ec44140b0f9..456d9563a21 100644 --- a/services/dynamic-sidecar/tests/unit/test_docker_utils.py +++ b/services/dynamic-sidecar/tests/unit/test_core_docker_utils.py @@ -1,5 +1,6 @@ # pylint: disable=redefined-outer-name # pylint: disable=unused-argument +# pylint: disable=unused-variable from typing import AsyncIterable from uuid import UUID @@ -9,8 +10,6 @@ from simcore_service_dynamic_sidecar.core.docker_utils import get_volume_by_label from simcore_service_dynamic_sidecar.core.errors import VolumeNotFoundError -pytestmark = pytest.mark.asyncio - @pytest.fixture(scope="session") def volume_name() -> str: diff --git a/services/dynamic-sidecar/tests/unit/test_core_rabbitmq.py b/services/dynamic-sidecar/tests/unit/test_core_rabbitmq.py deleted file mode 100644 index 6ef83742d5d..00000000000 --- a/services/dynamic-sidecar/tests/unit/test_core_rabbitmq.py +++ /dev/null @@ -1,200 +0,0 @@ -# pylint: disable=unused-argument -# pylint: disable=redefined-outer-name -# pylint: disable=too-many-arguments -import asyncio -import json -import uuid -from asyncio import AbstractEventLoop -from pathlib import Path -from pprint import pformat -from typing import Iterator, List - -import aio_pika -import pytest -from _pytest.fixtures import FixtureRequest -from _pytest.monkeypatch import MonkeyPatch -from fastapi.applications import FastAPI -from models_library.projects import ProjectID -from models_library.projects_nodes import NodeID -from models_library.rabbitmq_messages import LoggerRabbitMessage -from models_library.users import UserID -from pytest_mock.plugin import MockerFixture -from settings_library.rabbit import RabbitSettings -from simcore_service_dynamic_sidecar.core.application import assemble_application -from simcore_service_dynamic_sidecar.core.rabbitmq import SLEEP_BETWEEN_SENDS, RabbitMQ -from simcore_service_dynamic_sidecar.modules import mounted_fs - -pytestmark = pytest.mark.asyncio - - -pytest_plugins = [ - "pytest_simcore.docker_compose", - "pytest_simcore.docker_swarm", - "pytest_simcore.rabbit_service", - "pytest_simcore.repository_paths", - "pytest_simcore.tmp_path_extra", - "pytest_simcore.pytest_global_environs", -] - -pytest_simcore_core_services_selection = ["rabbit"] - -# FIXTURE - - -@pytest.fixture(scope="module") -def event_loop(request: FixtureRequest) -> Iterator[AbstractEventLoop]: - loop = asyncio.get_event_loop_policy().new_event_loop() - yield loop - loop.close() - - -@pytest.fixture(scope="module") -def user_id() -> UserID: - return 1 - - -@pytest.fixture(scope="module") -def project_id() -> ProjectID: - return uuid.uuid4() - - -@pytest.fixture(scope="module") -def node_id() -> NodeID: - return uuid.uuid4() - - -@pytest.fixture(scope="module") -def run_id() -> uuid.UUID: - return uuid.uuid4() - - -@pytest.fixture -def mock_environment( - event_loop: AbstractEventLoop, - monkeypatch_module: MonkeyPatch, - mock_dy_volumes: Path, - compose_namespace: str, - inputs_dir: Path, - outputs_dir: Path, - state_paths_dirs: List[Path], - state_exclude_dirs: List[Path], - user_id: UserID, - project_id: ProjectID, - node_id: NodeID, - run_id: uuid.UUID, - rabbit_service: RabbitSettings, -) -> None: - monkeypatch_module.setenv("SC_BOOT_MODE", "production") - monkeypatch_module.setenv("DYNAMIC_SIDECAR_COMPOSE_NAMESPACE", compose_namespace) - monkeypatch_module.setenv("REGISTRY_AUTH", "false") - monkeypatch_module.setenv("REGISTRY_USER", "test") - monkeypatch_module.setenv("REGISTRY_PW", "test") - monkeypatch_module.setenv("REGISTRY_SSL", "false") - monkeypatch_module.setenv("DY_SIDECAR_USER_ID", f"{user_id}") - monkeypatch_module.setenv("DY_SIDECAR_PROJECT_ID", f"{project_id}") - monkeypatch_module.setenv("DY_SIDECAR_RUN_ID", f"{run_id}") - monkeypatch_module.setenv("DY_SIDECAR_NODE_ID", f"{node_id}") - monkeypatch_module.setenv("DY_SIDECAR_PATH_INPUTS", str(inputs_dir)) - monkeypatch_module.setenv("DY_SIDECAR_PATH_OUTPUTS", str(outputs_dir)) - monkeypatch_module.setenv( - "DY_SIDECAR_STATE_PATHS", json.dumps([str(x) for x in state_paths_dirs]) - ) - monkeypatch_module.setenv( - "DY_SIDECAR_STATE_EXCLUDE", json.dumps([str(x) for x in state_exclude_dirs]) - ) - # TODO: PC->ANE: this is already guaranteed in the pytest_simcore.rabbit_service fixture - monkeypatch_module.setenv("RABBIT_HOST", rabbit_service.RABBIT_HOST) - monkeypatch_module.setenv("RABBIT_PORT", f"{rabbit_service.RABBIT_PORT}") - monkeypatch_module.setenv("RABBIT_USER", rabbit_service.RABBIT_USER) - monkeypatch_module.setenv( - "RABBIT_PASSWORD", rabbit_service.RABBIT_PASSWORD.get_secret_value() - ) - # --- - - monkeypatch_module.setattr(mounted_fs, "DY_VOLUMES", mock_dy_volumes) - - monkeypatch_module.setenv("S3_ENDPOINT", "endpoint") - monkeypatch_module.setenv("S3_ACCESS_KEY", "access_key") - monkeypatch_module.setenv("S3_SECRET_KEY", "secret_key") - monkeypatch_module.setenv("S3_BUCKET_NAME", "bucket_name") - monkeypatch_module.setenv("S3_SECURE", "false") - monkeypatch_module.setenv("R_CLONE_PROVIDER", "MINIO") - - -@pytest.fixture -def app(mock_environment: None) -> FastAPI: - return assemble_application() - - -# UTILS - - -# TESTS - - -async def test_rabbitmq( - rabbit_service: RabbitSettings, - rabbit_queue: aio_pika.Queue, - mocker: MockerFixture, - user_id: UserID, - project_id: ProjectID, - node_id: NodeID, - app: FastAPI, -): - rabbit = RabbitMQ(app) - assert rabbit - - mock_close_connection_cb = mocker.patch( - "simcore_service_dynamic_sidecar.core.rabbitmq._close_callback" - ) - mock_close_channel_cb = mocker.patch( - "simcore_service_dynamic_sidecar.core.rabbitmq._channel_close_callback" - ) - - incoming_data: List[LoggerRabbitMessage] = [] - - async def rabbit_message_handler(message: aio_pika.IncomingMessage): - incoming_data.append(LoggerRabbitMessage.parse_raw(message.body)) - - await rabbit_queue.consume(rabbit_message_handler, exclusive=True, no_ack=True) - - await rabbit.connect() - assert rabbit._connection.ready # pylint: disable=protected-access - - log_msg: str = "I am logging" - log_messages: List[str] = ["I", "am a logger", "man..."] - log_more_messages: List[str] = [f"msg{1}" for i in range(10)] - - await rabbit.post_log_message(log_msg) - await rabbit.post_log_message(log_messages) - - # make sure the first 2 messages are - # sent in the same chunk - await asyncio.sleep(SLEEP_BETWEEN_SENDS * 1.1) - await rabbit.post_log_message(log_more_messages) - # wait for all the messages to be delivered, - # need to make sure all messages are delivered - await asyncio.sleep(SLEEP_BETWEEN_SENDS * 1.1) - - # if this fails the above sleep did not work - - assert len(incoming_data) == 2, f"missing incoming data: {pformat(incoming_data)}" - - assert incoming_data[0] == LoggerRabbitMessage( - messages=[log_msg] + log_messages, - node_id=node_id, - project_id=project_id, - user_id=user_id, - ) - - assert incoming_data[1] == LoggerRabbitMessage( - messages=log_more_messages, - node_id=node_id, - project_id=project_id, - user_id=user_id, - ) - - # ensure closes correctly - await rabbit.close() - mock_close_connection_cb.assert_called_once() - mock_close_channel_cb.assert_called_once() diff --git a/services/dynamic-sidecar/tests/unit/test_core_settings.py b/services/dynamic-sidecar/tests/unit/test_core_settings.py index 9ec3a9988b4..08b8fc3e161 100644 --- a/services/dynamic-sidecar/tests/unit/test_core_settings.py +++ b/services/dynamic-sidecar/tests/unit/test_core_settings.py @@ -2,52 +2,12 @@ # pylint: disable=redefined-outer-name -import uuid -from pathlib import Path, PosixPath +from simcore_service_dynamic_sidecar.core.settings import DynamicSidecarSettings -import pytest -from _pytest.monkeypatch import MonkeyPatch -from simcore_service_dynamic_sidecar.core.settings import ( - DynamicSidecarSettings, - get_settings, -) - -@pytest.fixture -def tmp_dir(tmp_path: PosixPath) -> Path: - return Path(tmp_path) - - -@pytest.fixture -def mocked_non_request_settings(tmp_dir: Path, monkeypatch: MonkeyPatch) -> None: - inputs_dir = tmp_dir / "inputs" - outputs_dir = tmp_dir / "outputs" - - monkeypatch.setenv("SC_BOOT_MODE", "production") - monkeypatch.setenv("DYNAMIC_SIDECAR_COMPOSE_NAMESPACE", "test-space") - monkeypatch.setenv("REGISTRY_AUTH", "false") - monkeypatch.setenv("REGISTRY_USER", "test") - monkeypatch.setenv("REGISTRY_PW", "test") - monkeypatch.setenv("REGISTRY_SSL", "false") - monkeypatch.setenv("DY_SIDECAR_PATH_INPUTS", str(inputs_dir)) - monkeypatch.setenv("DY_SIDECAR_PATH_OUTPUTS", str(outputs_dir)) - monkeypatch.setenv("DY_SIDECAR_USER_ID", "1") - monkeypatch.setenv("DY_SIDECAR_PROJECT_ID", f"{uuid.uuid4()}") - monkeypatch.setenv("DY_SIDECAR_NODE_ID", f"{uuid.uuid4()}") - - monkeypatch.setenv("S3_ENDPOINT", "endpoint") - monkeypatch.setenv("S3_ACCESS_KEY", "access_key") - monkeypatch.setenv("S3_SECRET_KEY", "secret_key") - monkeypatch.setenv("S3_BUCKET_NAME", "bucket_name") - monkeypatch.setenv("S3_SECURE", "false") - monkeypatch.setenv("R_CLONE_PROVIDER", "MINIO") - - -def test_non_request_dynamic_sidecar_settings( - mocked_non_request_settings: None, -) -> None: +def test_settings_with_mock_environment(mock_environment): assert DynamicSidecarSettings.create_from_envs() -def test_cached_settings_is_same_object(mocked_non_request_settings: None) -> None: - assert id(get_settings()) == id(get_settings()) +def test_settings_with_envdevel_file(mock_environment_with_envdevel): + assert DynamicSidecarSettings.create_from_envs() diff --git a/services/dynamic-sidecar/tests/unit/test_utils.py b/services/dynamic-sidecar/tests/unit/test_core_utils.py similarity index 79% rename from services/dynamic-sidecar/tests/unit/test_utils.py rename to services/dynamic-sidecar/tests/unit/test_core_utils.py index d6757838531..38037b170f9 100644 --- a/services/dynamic-sidecar/tests/unit/test_utils.py +++ b/services/dynamic-sidecar/tests/unit/test_core_utils.py @@ -2,18 +2,10 @@ # pylint: disable=expression-not-assigned import pytest -from _pytest.monkeypatch import MonkeyPatch +from pytest import MonkeyPatch from settings_library.docker_registry import RegistrySettings from simcore_service_dynamic_sidecar.core.utils import _is_registry_reachable -pytestmark = pytest.mark.asyncio - -pytest_plugins = [ - "pytest_simcore.docker_registry", - "pytest_simcore.docker_swarm", - "pytest_simcore.pytest_global_environs", -] - @pytest.fixture def registry_with_auth( diff --git a/services/dynamic-sidecar/tests/unit/test_modules_directory_watcher.py b/services/dynamic-sidecar/tests/unit/test_modules_directory_watcher.py index 68a1ad8f0ee..a440be9f345 100644 --- a/services/dynamic-sidecar/tests/unit/test_modules_directory_watcher.py +++ b/services/dynamic-sidecar/tests/unit/test_modules_directory_watcher.py @@ -8,8 +8,7 @@ from unittest.mock import AsyncMock import pytest -from _pytest.monkeypatch import MonkeyPatch -from py._path.local import LocalPath +from pytest import MonkeyPatch from simcore_service_dynamic_sidecar.modules import directory_watcher from simcore_service_dynamic_sidecar.modules.directory_watcher import ( DirectoryWatcherObservers, @@ -23,7 +22,6 @@ # - todo make it run on a separate thread, already there # - todo use absolute patterns for monitoring -pytestmark = pytest.mark.asyncio TICK_INTERVAL = 0.001 @@ -40,16 +38,11 @@ def patch_directory_watcher(monkeypatch: MonkeyPatch) -> Iterator[AsyncMock]: yield mocked_upload_data -@pytest.fixture -def temp_dir(tmpdir: LocalPath) -> Path: - return Path(tmpdir) - - # UTILS -async def _generate_event_burst(temp_dir: Path, subfolder: str = None) -> None: - full_dir_path = temp_dir if subfolder is None else temp_dir / subfolder +async def _generate_event_burst(tmp_path: Path, subfolder: str = None) -> None: + full_dir_path = tmp_path if subfolder is None else tmp_path / subfolder full_dir_path.mkdir(parents=True, exist_ok=True) file_path_1 = full_dir_path / "file1.txt" file_path_2 = full_dir_path / "file2.txt" @@ -70,11 +63,11 @@ async def _generate_event_burst(temp_dir: Path, subfolder: str = None) -> None: async def test_run_observer( patch_directory_watcher: AsyncMock, - temp_dir: Path, + tmp_path: Path, ) -> None: directory_watcher_observers = DirectoryWatcherObservers() - directory_watcher_observers.observe_directory(temp_dir) + directory_watcher_observers.observe_directory(tmp_path) directory_watcher_observers.start() directory_watcher_observers.start() @@ -82,12 +75,12 @@ async def test_run_observer( await asyncio.sleep(TICK_INTERVAL) # generates the first event chain - await _generate_event_burst(temp_dir) + await _generate_event_burst(tmp_path) await asyncio.sleep(2) # generates the second event chain - await _generate_event_burst(temp_dir, "ciao") + await _generate_event_burst(tmp_path, "ciao") await directory_watcher_observers.stop() await directory_watcher_observers.stop() diff --git a/services/dynamic-sidecar/tests/unit/test_modules_mounted_fs.py b/services/dynamic-sidecar/tests/unit/test_modules_mounted_fs.py index 9e739654da4..3a4e7ac574b 100644 --- a/services/dynamic-sidecar/tests/unit/test_modules_mounted_fs.py +++ b/services/dynamic-sidecar/tests/unit/test_modules_mounted_fs.py @@ -4,12 +4,16 @@ import os from pathlib import Path -from typing import List from uuid import UUID import pytest +from aiodocker.volumes import DockerVolume from fastapi import FastAPI -from simcore_service_dynamic_sidecar.modules import mounted_fs +from simcore_service_dynamic_sidecar.core.application import AppState +from simcore_service_dynamic_sidecar.modules.mounted_fs import ( + MountedVolumes, + _name_from_full_path, +) # UTILS @@ -27,31 +31,32 @@ def path_to_transform() -> Path: @pytest.fixture -def run_id(app: FastAPI) -> UUID: - return app.state.settings.DY_SIDECAR_RUN_ID +def mounted_volumes(app: FastAPI) -> MountedVolumes: + return AppState(app).mounted_volumes # TESTS -def test_name_from_full_path(path_to_transform: Path) -> None: - assert mounted_fs._name_from_full_path( # pylint: disable=protected-access +def test_name_from_full_path(path_to_transform: Path): + assert _name_from_full_path( # pylint: disable=protected-access path_to_transform ) == _replace_slashes(path_to_transform) -def test_setup_ok(mounted_volumes: mounted_fs.MountedVolumes) -> None: +def test_setup_ok(mounted_volumes: MountedVolumes): assert mounted_volumes async def test_expected_paths_and_volumes( - mounted_volumes: mounted_fs.MountedVolumes, + ensure_external_volumes: tuple[DockerVolume], + mounted_volumes: MountedVolumes, inputs_dir: Path, outputs_dir: Path, - state_paths_dirs: List[Path], + state_paths_dirs: list[Path], compose_namespace: str, run_id: UUID, -) -> None: +): assert ( len(set(mounted_volumes.volume_name_state_paths())) == len( @@ -68,15 +73,15 @@ async def test_expected_paths_and_volumes( # check location on disk assert ( mounted_volumes.disk_outputs_path - == mounted_fs.DY_VOLUMES / outputs_dir.relative_to("/") + == mounted_volumes._dy_volumes / outputs_dir.relative_to("/") ) assert ( mounted_volumes.disk_inputs_path - == mounted_fs.DY_VOLUMES / inputs_dir.relative_to("/") + == mounted_volumes._dy_volumes / inputs_dir.relative_to("/") ) assert set(mounted_volumes.disk_state_paths()) == { - mounted_fs.DY_VOLUMES / x.relative_to("/") for x in state_paths_dirs + mounted_volumes._dy_volumes / x.relative_to("/") for x in state_paths_dirs } # check volume mount point diff --git a/services/dynamic-sidecar/tests/unit/with_rabbit/test_core_rabbitmq.py b/services/dynamic-sidecar/tests/unit/with_rabbit/test_core_rabbitmq.py new file mode 100644 index 00000000000..fb53c795843 --- /dev/null +++ b/services/dynamic-sidecar/tests/unit/with_rabbit/test_core_rabbitmq.py @@ -0,0 +1,119 @@ +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument +# pylint: disable=unused-variable + +import asyncio +from pprint import pformat + +import aio_pika +import pytest +from async_asgi_testclient import TestClient +from fastapi.applications import FastAPI +from models_library.projects import ProjectID +from models_library.projects_nodes import NodeID +from models_library.rabbitmq_messages import LoggerRabbitMessage +from models_library.users import UserID +from pytest import MonkeyPatch +from pytest_mock.plugin import MockerFixture +from settings_library.rabbit import RabbitSettings +from simcore_service_dynamic_sidecar.core.application import create_app +from simcore_service_dynamic_sidecar.core.rabbitmq import SLEEP_BETWEEN_SENDS, RabbitMQ + +pytest_simcore_core_services_selection = [ + "rabbit", +] + +# FIXTURE + + +@pytest.fixture +def mock_environment( + mock_environment: None, + monkeypatch: MonkeyPatch, + rabbit_service: RabbitSettings, +) -> None: + + # TODO: PC->ANE: this is already guaranteed in the pytest_simcore.rabbit_service fixture + monkeypatch.setenv("RABBIT_HOST", rabbit_service.RABBIT_HOST) + monkeypatch.setenv("RABBIT_PORT", f"{rabbit_service.RABBIT_PORT}") + monkeypatch.setenv("RABBIT_USER", rabbit_service.RABBIT_USER) + monkeypatch.setenv( + "RABBIT_PASSWORD", rabbit_service.RABBIT_PASSWORD.get_secret_value() + ) + # --- + + +@pytest.fixture +def app(mock_environment: None) -> FastAPI: + """app w/o mocking registry or rabbit""" + return create_app() + + +async def test_rabbitmq( + rabbit_queue: aio_pika.Queue, + mocker: MockerFixture, + user_id: UserID, + project_id: ProjectID, + node_id: NodeID, + test_client: TestClient, +): + app = test_client.application + assert isinstance(app, FastAPI) + + rabbit = app.state.rabbitmq + assert isinstance(rabbit, RabbitMQ) + + mock_close_connection_cb = mocker.patch( + "simcore_service_dynamic_sidecar.core.rabbitmq._close_callback" + ) + mock_close_channel_cb = mocker.patch( + "simcore_service_dynamic_sidecar.core.rabbitmq._channel_close_callback" + ) + + incoming_data: list[LoggerRabbitMessage] = [] + + async def rabbit_message_handler(message: aio_pika.IncomingMessage): + incoming_data.append(LoggerRabbitMessage.parse_raw(message.body)) + + await rabbit_queue.consume(rabbit_message_handler, exclusive=True, no_ack=True) + + await rabbit.connect() + assert rabbit._connection.ready # pylint: disable=protected-access + + log_msg: str = "I am logging" + log_messages: list[str] = ["I", "am a logger", "man..."] + log_more_messages: list[str] = [f"msg{1}" for i in range(10)] + + await rabbit.post_log_message(log_msg) + await rabbit.post_log_message(log_messages) + + # make sure the first 2 messages are + # sent in the same chunk + await asyncio.sleep(SLEEP_BETWEEN_SENDS * 1.1) + await rabbit.post_log_message(log_more_messages) + # wait for all the messages to be delivered, + # need to make sure all messages are delivered + await asyncio.sleep(SLEEP_BETWEEN_SENDS * 1.1) + + # if this fails the above sleep did not work + + assert len(incoming_data) == 2, f"missing incoming data: {pformat(incoming_data)}" + + assert incoming_data[0] == LoggerRabbitMessage( + messages=[log_msg] + log_messages, + node_id=node_id, + project_id=project_id, + user_id=user_id, + ) + + assert incoming_data[1] == LoggerRabbitMessage( + messages=log_more_messages, + node_id=node_id, + project_id=project_id, + user_id=user_id, + ) + + # ensure closes correctly + await rabbit.close() + mock_close_connection_cb.assert_called_once() + mock_close_channel_cb.assert_called_once() diff --git a/services/storage/requirements/_base.txt b/services/storage/requirements/_base.txt index 269ee89c281..1cd5b4a0cbd 100644 --- a/services/storage/requirements/_base.txt +++ b/services/storage/requirements/_base.txt @@ -4,7 +4,7 @@ # # pip-compile --output-file=requirements/_base.txt --strip-extras requirements/_base.in # -aiobotocore==2.2.0 +aiobotocore==2.3.3 # via -r requirements/_base.in aiodebug==2.3.0 # via diff --git a/services/web/client/source/class/osparc/Application.js b/services/web/client/source/class/osparc/Application.js index 8ec30456910..102ae83aa77 100644 --- a/services/web/client/source/class/osparc/Application.js +++ b/services/web/client/source/class/osparc/Application.js @@ -324,12 +324,17 @@ qx.Class.define("osparc.Application", { view = new osparc.auth.LoginPageS4L(); this.__loadView(view); break; - default: + case "tis": + view = new osparc.auth.LoginPageTI(); + this.__loadView(view); + break; + default: { view = new osparc.auth.LoginPage(); this.__loadView(view, { top: "15%" }); break; + } } view.addListener("done", () => this.__restart(), this); }, diff --git a/services/web/client/source/class/osparc/auth/LoginPage.js b/services/web/client/source/class/osparc/auth/LoginPage.js index 3594f002e48..774061e60a0 100644 --- a/services/web/client/source/class/osparc/auth/LoginPage.js +++ b/services/web/client/source/class/osparc/auth/LoginPage.js @@ -45,12 +45,21 @@ qx.Class.define("osparc.auth.LoginPage", { layout.setRowFlex(1, 1); layout.setColumnFlex(0, 1); this._setLayout(layout); - - const image = this._getLogoWPlatform(); - this._add(image, { - row: 0, - column: 0 - }); + osparc.utils.LibVersions.getPlatformName() + .then(platformName => { + let image = null; + const now = new Date().getTime(); + const afterKZ = new Date("2022-07-01").getTime(); + if ((now < afterKZ) && platformName === "master") { + image = this._getLogoWPlatform2(); + } else { + image = this._getLogoWPlatform(); + } + this._add(image, { + row: 0, + column: 0 + }); + }); const pages = this._getLoginStack(); this._add(pages, { @@ -75,6 +84,38 @@ qx.Class.define("osparc.auth.LoginPage", { return image; }, + _getLogoWPlatform2: function() { + const container = new qx.ui.container.Stack(); + [ + "osparc/kz_1.jpg", + "osparc/kz_2.jpg", + "osparc/kz_3.png", + "osparc/kz_4.png" + ].forEach((src, i) => { + const layout = new qx.ui.container.Composite(new qx.ui.layout.HBox()); + layout.add(new qx.ui.core.Spacer(), { + flex: 1 + }); + const image = new qx.ui.basic.Image(src).set({ + allowShrinkX: true, + allowShrinkY: true, + width: 300, + height: 150, + scale: true + }); + image.addListener("tap", () => { + const nextIdx = i === 3 ? 0 : i+1; + container.setSelection([container.getSelectables()[nextIdx]]); + }); + layout.add(image); + layout.add(new qx.ui.core.Spacer(), { + flex: 1 + }); + container.add(layout); + }); + return container; + }, + _getLoginStack: function() { const pages = new qx.ui.container.Stack().set({ allowGrowX: false, diff --git a/services/web/client/source/class/osparc/auth/LoginPageTI.js b/services/web/client/source/class/osparc/auth/LoginPageTI.js new file mode 100644 index 00000000000..87863eb32bd --- /dev/null +++ b/services/web/client/source/class/osparc/auth/LoginPageTI.js @@ -0,0 +1,81 @@ +/* ************************************************************************ + + osparc - the simcore frontend + + https://osparc.io + + Copyright: + 2022 IT'IS Foundation, https://itis.swiss + + License: + MIT: https://opensource.org/licenses/MIT + + Authors: + * Odei Maiz (odeimaiz) + +************************************************************************ */ + +/** + * Main Authentication Page: + * A multi-page view that fills all page + */ + +qx.Class.define("osparc.auth.LoginPageTI", { + extend: osparc.auth.LoginPage, + + /* + ***************************************************************************** + CONSTRUCTOR + ***************************************************************************** + */ + construct: function() { + this.base(arguments); + }, + + events: { + "done": "qx.event.type.Data" + }, + + members: { + // overridden + _buildLayout: function() { + const layout = new qx.ui.layout.HBox(); + this._setLayout(layout); + + this.getContentElement().setStyles({ + "background-image": "url(resource/osparc/ti_splitimage.png)", + "background-repeat": "no-repeat", + "background-size": "auto 100%" + }); + + this._add(new qx.ui.core.Spacer(), { + width: "50%" + }); + + const loginLayout = new qx.ui.container.Composite(new qx.ui.layout.VBox(10)).set({ + alignX: "center", + alignY: "middle" + }); + this._add(loginLayout, { + width: "50%" + }); + + loginLayout.add(new qx.ui.core.Spacer(), { + flex: 1 + }); + + const image = this._getLogoWPlatform(); + loginLayout.add(image); + + const pages = this._getLoginStack(); + loginLayout.add(pages); + + const versionLink = this._getVersionLink(); + loginLayout.add(versionLink); + + loginLayout.add(new qx.ui.core.Spacer(), { + flex: 1 + }); + } + } +}); diff --git a/services/web/client/source/class/osparc/component/form/json/JsonSchemaForm.js b/services/web/client/source/class/osparc/component/form/json/JsonSchemaForm.js index 3b737a0cc6f..e7e94cc3b11 100644 --- a/services/web/client/source/class/osparc/component/form/json/JsonSchemaForm.js +++ b/services/web/client/source/class/osparc/component/form/json/JsonSchemaForm.js @@ -187,13 +187,14 @@ qx.Class.define("osparc.component.form.json.JsonSchemaForm", { container.setLayout(new qx.ui.layout.VBox()); } Object.entries(schema.properties).forEach(([key, value], index) => { - const allProps = Object.values(schema.properties); - const nextProp = index < allProps.length - 1 ? allProps[index+1] : null; + // const allProps = Object.values(schema.properties); + // const nextProp = index < allProps.length - 1 ? allProps[index+1] : null; container.add(this.__expand(key, value, data ? data[key] : data, depth+1, { required: schema.required && schema.required.includes(key) }), { - lineBreak: nextProp && nextProp.type === "array" || value.type === "array", - stretch: value.type === "array" + // "lineBreak" and "stretch" are not VBox's properties + // lineBreak: nextProp && nextProp.type === "array" || value.type === "array", + // stretch: value.type === "array" }); }); return container; diff --git a/services/web/client/source/class/osparc/component/form/renderer/PropForm.js b/services/web/client/source/class/osparc/component/form/renderer/PropForm.js index 09a273cc048..2575f81ebe7 100644 --- a/services/web/client/source/class/osparc/component/form/renderer/PropForm.js +++ b/services/web/client/source/class/osparc/component/form/renderer/PropForm.js @@ -184,6 +184,16 @@ qx.Class.define("osparc.component.form.renderer.PropForm", { } }, + __connectToInputNode: function(targetPortId, inputNodeId, outputKey) { + this.getNode().addInputNode(inputNodeId); + this.getNode().addPortLink(targetPortId, inputNodeId, outputKey) + .then(connected => { + if (connected) { + this.getNode().fireEvent("reloadModel"); + } + }); + }, + __addInputsMenuButtons: function(targetPortId, menu) { const study = this.getStudy(); const thisNode = this.getNode(); @@ -197,15 +207,7 @@ qx.Class.define("osparc.component.form.renderer.PropForm", { inputNode.bind("label", paramButton, "label", { converter: val => val + " : " + inputNode.getOutput(outputKey).label }); - paramButton.addListener("execute", () => { - this.getNode().addInputNode(inputNodeId); - this.getNode().addPortLink(targetPortId, inputNodeId, outputKey) - .then(connected => { - if (connected) { - this.getNode().fireEvent("reloadModel"); - } - }); - }, this); + paramButton.addListener("execute", () => this.__connectToInputNode(targetPortId, inputNodeId, outputKey), this); menu.add(paramButton); osparc.utils.Ports.arePortsCompatible(inputNode, outputKey, this.getNode(), targetPortId) .then(compatible => { @@ -272,15 +274,7 @@ qx.Class.define("osparc.component.form.renderer.PropForm", { .then(compatible => { if (compatible) { const paramButton = new qx.ui.menu.Button(inputNode.getOutput(outputKey).label); - paramButton.addListener("execute", () => { - this.getNode().addInputNode(inputNodeId); - this.getNode().addPortLink(targetPortId, inputNodeId, outputKey) - .then(connected => { - if (connected) { - this.getNode().fireEvent("reloadModel"); - } - }); - }, this); + paramButton.addListener("execute", () => this.__connectToInputNode(targetPortId, inputNodeId, outputKey), this); menu.add(paramButton); menuBtn.show(); } @@ -303,15 +297,7 @@ qx.Class.define("osparc.component.form.renderer.PropForm", { const paramButton = new qx.ui.menu.Button(); paramButton.nodeId = inputNodeId; paramNode.bind("label", paramButton, "label"); - paramButton.addListener("execute", () => { - this.getNode().addInputNode(inputNodeId); - this.getNode().addPortLink(targetPortId, inputNodeId, outputKey) - .then(connected => { - if (connected) { - this.getNode().fireEvent("reloadModel"); - } - }); - }, this); + paramButton.addListener("execute", () => this.__connectToInputNode(targetPortId, inputNodeId, outputKey), this); if (!menu.getChildren().some(child => child.nodeId === paramButton.nodeId)) { menu.add(paramButton); menuBtn.show(); diff --git a/services/web/client/source/class/osparc/component/node/ParameterEditor.js b/services/web/client/source/class/osparc/component/node/ParameterEditor.js index 2b675cfcadc..697937a7dd7 100644 --- a/services/web/client/source/class/osparc/component/node/ParameterEditor.js +++ b/services/web/client/source/class/osparc/component/node/ParameterEditor.js @@ -30,7 +30,11 @@ qx.Class.define("osparc.component.node.ParameterEditor", { statics: { getParameterOutputTypeFromMD: function(metaData) { - return metaData["outputs"]["out_1"]["type"]; + let type = metaData["outputs"]["out_1"]["type"]; + if (type === "ref_contentSchema") { + type = metaData["outputs"]["out_1"]["contentSchema"]["type"]; + } + return type; }, getParameterOutputType: function(node) { @@ -40,7 +44,7 @@ qx.Class.define("osparc.component.node.ParameterEditor", { setParameterOutputValue: function(node, val) { node.setOutputData({ - "out_1": val + "out_1": this.self().getParameterOutputType(node) === "array" ? osparc.ui.form.ContentSchemaArray.addArrayBrackets(val) : val }); } }, @@ -58,6 +62,8 @@ qx.Class.define("osparc.component.node.ParameterEditor", { let control; switch (id) { case "label": + case "string": + case "number": control = new qx.ui.form.TextField(); break; case "data-type": { @@ -75,9 +81,6 @@ qx.Class.define("osparc.component.node.ParameterEditor", { }); break; } - case "number": - control = new qx.ui.form.TextField(); - break; case "integer": control = new qx.ui.form.Spinner(); control.set({ @@ -89,6 +92,7 @@ qx.Class.define("osparc.component.node.ParameterEditor", { control = new qx.ui.form.CheckBox(); break; case "ref_contentSchema": + case "array": control = new osparc.ui.form.ContentSchemaArray(); break; } diff --git a/services/web/client/source/class/osparc/component/permissions/Permissions.js b/services/web/client/source/class/osparc/component/permissions/Permissions.js index 0e80ec8ad64..12524ca096c 100644 --- a/services/web/client/source/class/osparc/component/permissions/Permissions.js +++ b/services/web/client/source/class/osparc/component/permissions/Permissions.js @@ -223,7 +223,7 @@ qx.Class.define("osparc.component.permissions.Permissions", { getCollaborators: function() { osparc.store.Store.getInstance().getPotentialCollaborators() .then(potentialCollaborators => { - this.__collaborators = potentialCollaborators; + this.__collaborators = Object.assign(this.__collaborators, potentialCollaborators); this.__reloadOrganizationsAndMembers(); this.__reloadCollaboratorsList(); }); @@ -284,11 +284,7 @@ qx.Class.define("osparc.component.permissions.Permissions", { collaborator["accessRights"] = aceessRights[gid]; collaborator["showOptions"] = this._isUserOwner(); const collaboratorModel = qx.data.marshal.Json.createModel(collaborator); - if (parseInt(gid) === osparc.auth.Data.getInstance().getGroupId()) { - this.__collaboratorsModel.insertAt(0, collaboratorModel); - } else { - this.__collaboratorsModel.append(collaboratorModel); - } + this.__collaboratorsModel.append(collaboratorModel); } }); }, diff --git a/services/web/client/source/class/osparc/component/permissions/Study.js b/services/web/client/source/class/osparc/component/permissions/Study.js index 2ec2e63d826..ce300f158f2 100644 --- a/services/web/client/source/class/osparc/component/permissions/Study.js +++ b/services/web/client/source/class/osparc/component/permissions/Study.js @@ -33,7 +33,11 @@ qx.Class.define("osparc.component.permissions.Study", { construct: function(studyData) { this.__studyData = osparc.data.model.Study.deepCloneStudyObject(studyData); - this.base(arguments, this.__studyData); + const initCollabs = []; + if (osparc.data.Permissions.getInstance().canDo("study.everyone.share")) { + initCollabs.push(this.self().getEveryoneObj()); + } + this.base(arguments, this.__studyData, initCollabs); }, events: { @@ -89,6 +93,17 @@ qx.Class.define("osparc.component.permissions.Study", { removeCollaborator: function(studyData, gid) { return delete studyData["accessRights"][gid]; + }, + + getEveryoneObj: function() { + return { + "gid": 1, + "label": "Everyone", + "description": "", + "thumbnail": null, + "accessRights": this.getCollaboratorAccessRight(), + "collabType": 0 + }; } }, diff --git a/services/web/client/source/class/osparc/component/service/ServiceButtonList.js b/services/web/client/source/class/osparc/component/service/ServiceButtonList.js index 104b2c20c6f..7bd9792fb00 100644 --- a/services/web/client/source/class/osparc/component/service/ServiceButtonList.js +++ b/services/web/client/source/class/osparc/component/service/ServiceButtonList.js @@ -48,31 +48,11 @@ qx.Class.define("osparc.component.service.ServiceButtonList", { statics: { ITEM_WIDTH: 550, - ITEM_HEIGHT: 40, + ITEM_HEIGHT: 35, SERVICE_ICON: "@FontAwesome5Solid/paw/24" }, members: { - _createChildControlImpl: function(id) { - let control; - switch (id) { - case "hits": - control = new qx.ui.basic.Label().set({ - anonymous: true, - font: "text-13", - allowGrowY: false, - minWidth: 120, - alignY: "middle" - }); - this._add(control, { - row: 0, - column: osparc.dashboard.ListButtonBase.POS.HITS - }); - break; - } - return control || this.base(arguments, id); - }, - __applyServiceModel: function(serviceModel) { // BASE if (serviceModel.getThumbnail()) { @@ -85,7 +65,7 @@ qx.Class.define("osparc.component.service.ServiceButtonList", { // ITEM this.__applyLatestVersion(serviceModel); - this.__applyHits(serviceModel); + this.__applyHitsOnItem(serviceModel); }, __applyLatestVersion: function(serviceModel) { @@ -98,14 +78,14 @@ qx.Class.define("osparc.component.service.ServiceButtonList", { }); }, - __applyHits: function(serviceModel) { - const hitsLabel = new qx.ui.basic.Label(this.tr("Latest: ") + String(serviceModel.hits)).set({ + __applyHitsOnItem: function(serviceModel) { + const hitsLabel = new qx.ui.basic.Label(this.tr("Hits: ") + String(serviceModel.getHits())).set({ alignY: "middle", toolTipText: this.tr("Number of times it was instantiated") }); this._add(hitsLabel, { row: 0, - column: osparc.dashboard.ListButtonBase.POS.HITS_LABEL + column: osparc.dashboard.ListButtonBase.POS.HITS }); }, diff --git a/services/web/client/source/class/osparc/component/service/ServiceList.js b/services/web/client/source/class/osparc/component/service/ServiceList.js index a5cf96a1672..480e644a457 100644 --- a/services/web/client/source/class/osparc/component/service/ServiceList.js +++ b/services/web/client/source/class/osparc/component/service/ServiceList.js @@ -62,7 +62,6 @@ qx.Class.define("osparc.component.service.ServiceList", { allowEmptySelection: true }); - osparc.utils.Services.sortBasedOnFav(model); model.toArray().forEach(service => { const button = new osparc.component.service.ServiceButtonList(service); if (this.__filterGroup !== null) { diff --git a/services/web/client/source/class/osparc/component/widget/logger/LoggerView.js b/services/web/client/source/class/osparc/component/widget/logger/LoggerView.js index 1fe56567523..b0a3ad7f9a2 100644 --- a/services/web/client/source/class/osparc/component/widget/logger/LoggerView.js +++ b/services/web/client/source/class/osparc/component/widget/logger/LoggerView.js @@ -66,6 +66,13 @@ qx.Class.define("osparc.component.widget.logger.LoggerView", { init: 0 }, + lockLogs: { + apply : "__updateTable", + nullable: false, + check : "Boolean", + init: true + }, + currentNodeId: { check: "String", nullable: true, @@ -74,6 +81,12 @@ qx.Class.define("osparc.component.widget.logger.LoggerView", { }, statics: { + POS: { + ORIGIN: 0, + TIMESTAMP: 1, + MESSAGE: 2 + }, + LOG_LEVELS: { debug: -1, info: 0, @@ -141,6 +154,19 @@ qx.Class.define("osparc.component.widget.logger.LoggerView", { toolbar.add(control); break; } + case "lock-logs-button": { + control = new qx.ui.form.ToggleButton().set({ + toolTipText: this.tr("Toggle auto-scroll"), + appearance: "toolbar-button" + }); + control.bind("value", this, "lockLogs"); + control.bind("value", control, "icon", { + converter: val => val ? "@FontAwesome5Solid/lock/14" : "@FontAwesome5Solid/lock-open/14" + }); + const toolbar = this.getChildControl("toolbar"); + toolbar.add(control); + break; + } case "copy-to-clipboard": { const toolbar = this.getChildControl("toolbar"); control = new qx.ui.form.Button().set({ @@ -152,6 +178,17 @@ qx.Class.define("osparc.component.widget.logger.LoggerView", { toolbar.add(control); break; } + case "download-logs-button": { + const toolbar = this.getChildControl("toolbar"); + control = new qx.ui.form.Button().set({ + icon: "@FontAwesome5Solid/download/14", + toolTipText: this.tr("Download logs"), + appearance: "toolbar-button" + }); + osparc.utils.Utils.setIdToWidget(control, "copyLogsToClipboardButton"); + toolbar.add(control); + break; + } } return control || this.base(arguments, id); }, @@ -173,10 +210,17 @@ qx.Class.define("osparc.component.widget.logger.LoggerView", { }, this); toolbar.add(logLevelSelectBox); + const lockLogsButton = this.getChildControl("lock-logs-button"); + toolbar.add(lockLogsButton); + const copyToClipboardButton = this.getChildControl("copy-to-clipboard"); copyToClipboardButton.addListener("execute", () => this.__copyLogsToClipboard(), this); toolbar.add(copyToClipboardButton); + const downloadButton = this.getChildControl("download-logs-button"); + downloadButton.addListener("execute", () => this.__downloadLogs(), this); + toolbar.add(downloadButton); + return toolbar; }, @@ -193,21 +237,22 @@ qx.Class.define("osparc.component.widget.logger.LoggerView", { const table = this.__logView = new qx.ui.table.Table(loggerModel, custom).set({ selectable: true, statusBarVisible: false, - showCellFocusIndicator: false + showCellFocusIndicator: false, + rowHeight: 15, + forceLineHeight: false }); osparc.utils.Utils.setIdToWidget(table, "logsViewer"); const colModel = table.getTableColumnModel(); - colModel.setDataCellRenderer(0, new qx.ui.table.cellrenderer.Html()); - colModel.setDataCellRenderer(1, new osparc.ui.table.cellrenderer.Html().set({ + colModel.setDataCellRenderer(this.self().POS.ORIGIN, new qx.ui.table.cellrenderer.Html()); + colModel.setDataCellRenderer(this.self().POS.TIMESTAMP, new osparc.ui.table.cellrenderer.Html().set({ defaultCellStyle: "user-select: text" })); - colModel.setDataCellRenderer(2, new osparc.ui.table.cellrenderer.Html().set({ + colModel.setDataCellRenderer(this.self().POS.MESSAGE, new osparc.ui.table.cellrenderer.Html().set({ defaultCellStyle: "user-select: text" })); let resizeBehavior = colModel.getBehavior(); - resizeBehavior.setWidth(0, "15%"); - resizeBehavior.setWidth(1, "10%"); - resizeBehavior.setWidth(2, "75%"); + resizeBehavior.setWidth(this.self().POS.ORIGIN, 100); + resizeBehavior.setWidth(this.self().POS.TIMESTAMP, 80); this.__applyFilters(); @@ -232,12 +277,21 @@ qx.Class.define("osparc.component.widget.logger.LoggerView", { this.__textFilterField.setValue(node ? node.getLabel() : ""); }, - __copyLogsToClipboard: function() { + __getLogsString: function() { let logs = ""; this.__loggerModel.getRows().forEach(row => { - logs += `(${row.nodeId}) ${row.label}: ${row.msg} \n`; + logs += `(${row.nodeId}) - [${row.timeStamp}] ${row.label}: ${row.msg} \n`; }); - osparc.utils.Utils.copyTextToClipboard(logs); + return logs; + }, + + __copyLogsToClipboard: function() { + osparc.utils.Utils.copyTextToClipboard(this.__getLogsString()); + }, + + __downloadLogs: function() { + const logs = this.__getLogsString(); + osparc.utils.Utils.downloadContent("data:text/json;charset=utf-8," + logs, "logs.json"); }, debug: function(nodeId, msg = "") { @@ -287,6 +341,7 @@ qx.Class.define("osparc.component.widget.logger.LoggerView", { label, timeStamp: new Date(), msg, + tooltip: msg, logLevel }; msgLogs.push(msgLog); @@ -297,9 +352,13 @@ qx.Class.define("osparc.component.widget.logger.LoggerView", { }, __updateTable: function() { - this.__loggerModel.reloadData(); - const nFilteredRows = this.__loggerModel.getFilteredRowCount(); - this.__logView.scrollCellVisible(0, nFilteredRows); + if (this.__loggerModel) { + this.__loggerModel.reloadData(); + if (!this.isLockLogs()) { + const nFilteredRows = this.__loggerModel.getFilteredRowCount(); + this.__logView.scrollCellVisible(0, nFilteredRows); + } + } }, __applyFilters: function() { diff --git a/services/web/client/source/class/osparc/component/workbench/BaseNodeUI.js b/services/web/client/source/class/osparc/component/workbench/BaseNodeUI.js index 4d290d7416e..184c4fb9b66 100644 --- a/services/web/client/source/class/osparc/component/workbench/BaseNodeUI.js +++ b/services/web/client/source/class/osparc/component/workbench/BaseNodeUI.js @@ -69,7 +69,7 @@ qx.Class.define("osparc.component.workbench.BaseNodeUI", { // eslint-disable-next-line no-underscore-dangle const width = captionTitle.__contentSize.width; if (width > maxWidth) { - captionTitle.setToolTipText(this.getNode().getLabel()); + this.getNode().bind("label", captionTitle, "toolTipText"); } }, this, 50); }); diff --git a/services/web/client/source/class/osparc/component/workbench/NodeUI.js b/services/web/client/source/class/osparc/component/workbench/NodeUI.js index cd4a1742209..87ce4850134 100644 --- a/services/web/client/source/class/osparc/component/workbench/NodeUI.js +++ b/services/web/client/source/class/osparc/component/workbench/NodeUI.js @@ -297,7 +297,8 @@ qx.Class.define("osparc.component.workbench.NodeUI", { const title = this.getChildControl("title"); title.set({ wrap: true, - maxHeight: 28 + maxHeight: 28, + maxWidth: 90 }); const outputs = this.getNode().getOutputs(); diff --git a/services/web/client/source/class/osparc/component/workbench/ServiceCatalog.js b/services/web/client/source/class/osparc/component/workbench/ServiceCatalog.js index d9f96a54e62..84fca3fce80 100644 --- a/services/web/client/source/class/osparc/component/workbench/ServiceCatalog.js +++ b/services/web/client/source/class/osparc/component/workbench/ServiceCatalog.js @@ -93,6 +93,7 @@ qx.Class.define("osparc.component.workbench.ServiceCatalog", { __infoBtn: null, __serviceBrowser: null, __addBtn: null, + __sortByGroup: null, __createFilterLayout: function() { const layout = new qx.ui.container.Composite(new qx.ui.layout.HBox(10)).set({ @@ -109,9 +110,34 @@ qx.Class.define("osparc.component.workbench.ServiceCatalog", { flex: 1 }); - const reloadBtn = new qx.ui.form.Button(this.tr("Reload"), "@FontAwesome5Solid/sync-alt/12"); - reloadBtn.addListener("execute", () => this.__populateList(true), this); - layout.add(reloadBtn); + if (osparc.data.Permissions.getInstance().isTester()) { + const reloadBtn = new qx.ui.form.Button(this.tr("Reload"), "@FontAwesome5Solid/sync-alt/12"); + reloadBtn.addListener("execute", () => this.__populateList(true), this); + layout.add(reloadBtn); + } + + const containterSortBtns = new qx.ui.container.Composite(new qx.ui.layout.HBox(4)); + const byHitsBtn = new qx.ui.form.ToggleButton(null, "@FontAwesome5Solid/sort-numeric-down/12"); + byHitsBtn.sortBy = "hits"; + const byNameBtn = new qx.ui.form.ToggleButton(null, "@FontAwesome5Solid/sort-alpha-down/12"); + byNameBtn.sortBy = "name"; + const sortByGroup = this.__sortByGroup = new qx.ui.form.RadioGroup().set({ + allowEmptySelection: false + }); + [ + byHitsBtn, + byNameBtn + ].forEach(btn => { + containterSortBtns.add(btn); + sortByGroup.add(btn); + btn.getContentElement().setStyles({ + "border-radius": "8px" + }); + }); + layout.add(containterSortBtns); + + sortByGroup.addListener("changeSelection", () => this.__populateList()); + return layout; }, @@ -199,6 +225,7 @@ qx.Class.define("osparc.component.workbench.ServiceCatalog", { }, __updateList: function() { + osparc.component.filter.UIFilterController.getInstance().resetGroup("serviceCatalog"); const filteredServices = []; this.__allServicesList.forEach(service => { if (this.__contextLeftNodeId === null && this.__contextRightNodeId === null) { @@ -215,11 +242,14 @@ qx.Class.define("osparc.component.workbench.ServiceCatalog", { } }); + osparc.utils.Services.addHits(filteredServices); + osparc.utils.Services.sortObjectsBasedOn(filteredServices, this.__sortByGroup.getSelection()[0].sortBy); const filteredServicesObj = this.__filteredServicesObj = osparc.utils.Services.convertArrayToObject(filteredServices); const groupedServicesList = []; for (const key in filteredServicesObj) { let service = osparc.utils.Services.getLatest(filteredServicesObj, key); + osparc.utils.Services.addHits([service]); service = osparc.utils.Utils.deepCloneObject(service); osparc.utils.Services.removeFileToKeyMap(service); groupedServicesList.push(qx.data.marshal.Json.createModel(service)); diff --git a/services/web/client/source/class/osparc/component/workbench/WorkbenchUI.js b/services/web/client/source/class/osparc/component/workbench/WorkbenchUI.js index 3c50d99bdf5..7e32dd870e0 100644 --- a/services/web/client/source/class/osparc/component/workbench/WorkbenchUI.js +++ b/services/web/client/source/class/osparc/component/workbench/WorkbenchUI.js @@ -308,11 +308,7 @@ qx.Class.define("osparc.component.workbench.WorkbenchUI", { const newNodeUI = this.__addNode(service, nodePos); if (nodeLeftId !== null || nodeRightId !== null) { const newNodeId = newNodeUI.getNodeId(); - this._createEdgeBetweenNodes({ - nodeId: nodeLeftId ? nodeLeftId : newNodeId - }, { - nodeId: nodeRightId ? nodeRightId : newNodeId - }); + this._createEdgeBetweenNodes(nodeLeftId ? nodeLeftId : newNodeId, nodeRightId ? nodeRightId : newNodeId, true); } }, this); srvCat.open(); @@ -326,8 +322,9 @@ qx.Class.define("osparc.component.workbench.WorkbenchUI", { } const newNodeUI = this._createNodeUI(node.getNodeId()); - this.__createDragDropMechanism(newNodeUI); this._addNodeUIToWorkbench(newNodeUI, pos); + qx.ui.core.queue.Layout.flush(); + this.__createDragDropMechanism(newNodeUI); return newNodeUI; }, @@ -359,7 +356,6 @@ qx.Class.define("osparc.component.workbench.WorkbenchUI", { let onNodeUI = null; this.__nodesUI.forEach(nodeUI => { const nBounds = nodeUI.getBounds(); - console.log(); if (onNodeUI === null && pos.x > nBounds.left && pos.x < nBounds.left + nBounds.width && @@ -386,9 +382,7 @@ qx.Class.define("osparc.component.workbench.WorkbenchUI", { nodeUI.open(); this.__nodesUI.push(nodeUI); - nodeUI.addListener("appear", () => { - this.__updateNodeUIPos(nodeUI); - }, this); + nodeUI.addListener("appear", () => this.__updateNodeUIPos(nodeUI), this); const isStudyReadOnly = this.getStudy().isReadOnly(); nodeUI.set({ @@ -404,8 +398,6 @@ qx.Class.define("osparc.component.workbench.WorkbenchUI", { this.__addNodeListeners(nodeUI); } - qx.ui.core.queue.Layout.flush(); - this.__updateHint(); }, @@ -600,58 +592,6 @@ qx.Class.define("osparc.component.workbench.WorkbenchUI", { return nodeUI; }, - __createEdgeUI: function(node1Id, node2Id, edgeId) { - const edge = this.__getWorkbench().createEdge(edgeId, node1Id, node2Id); - if (!edge) { - return null; - } - if (this.__edgeRepresentationExists(edge)) { - return null; - } - - // build representation - const nodeUI1 = this.getNodeUI(node1Id); - const nodeUI2 = this.getNodeUI(node2Id); - if (nodeUI1.getCurrentBounds() === null || nodeUI2.getCurrentBounds() === null) { - console.error("bounds not ready"); - return null; - } - const port1 = nodeUI1.getOutputPort(); - const port2 = nodeUI2.getInputPort(); - if (port1 && port2) { - nodeUI2.getNode().addInputNode(node1Id); - const pointList = this.__getEdgePoints(nodeUI1, port1, nodeUI2, port2); - const x1 = pointList[0] ? pointList[0][0] : 0; - const y1 = pointList[0] ? pointList[0][1] : 0; - const x2 = pointList[1] ? pointList[1][0] : 0; - const y2 = pointList[1] ? pointList[1][1] : 0; - const edgeRepresentation = this.__svgLayer.drawCurve(x1, y1, x2, y2, !edge.isPortConnected()); - - edge.addListener("changePortConnected", e => { - const portConnected = e.getData(); - osparc.wrapper.Svg.updateCurveDashes(edgeRepresentation, !portConnected); - }, this); - - const edgeUI = new osparc.component.workbench.EdgeUI(edge, edgeRepresentation); - this.__edgesUI.push(edgeUI); - - const that = this; - edgeUI.getRepresentation().widerCurve.node.addEventListener("click", e => { - // this is needed to get out of the context of svg - that.__selectedItemChanged(edgeUI.getEdgeId()); // eslint-disable-line no-underscore-dangle - e.stopPropagation(); - }, this); - edgeUI.getRepresentation().node.addEventListener("click", e => { - // this is needed to get out of the context of svg - that.__selectedItemChanged(edgeUI.getEdgeId()); // eslint-disable-line no-underscore-dangle - e.stopPropagation(); - }, this); - - return edgeUI; - } - return null; - }, - __edgeRepresentationExists: function(edge) { for (let i=0; i { + const portConnected = e.getData(); + osparc.wrapper.Svg.updateCurveDashes(edgeRepresentation, !portConnected); + }, this); + + const edgeUI = new osparc.component.workbench.EdgeUI(edge, edgeRepresentation); + this.__edgesUI.push(edgeUI); + + const that = this; + edgeUI.getRepresentation().widerCurve.node.addEventListener("click", e => { + // this is needed to get out of the context of svg + that.__selectedItemChanged(edgeUI.getEdgeId()); // eslint-disable-line no-underscore-dangle + e.stopPropagation(); + }, this); + edgeUI.getRepresentation().node.addEventListener("click", e => { + // this is needed to get out of the context of svg + that.__selectedItemChanged(edgeUI.getEdgeId()); // eslint-disable-line no-underscore-dangle + e.stopPropagation(); + }, this); + } }, __updateAllEdges: function() { @@ -1041,19 +1021,17 @@ qx.Class.define("osparc.component.workbench.WorkbenchUI", { this.__clearAllAnnotations(); }, + __reloadCurrentModel: function() { + if (this._currentModel) { + this.loadModel(this.getStudy().getWorkbench()); + } + }, + loadModel: function(model) { if (this.__svgLayer.getReady()) { this._loadModel(model); } else { - this.__svgLayer.addListenerOnce("SvgWidgetReady", () => { - this._loadModel(model); - }, this); - } - }, - - __reloadCurrentModel: function() { - if (this._currentModel) { - this.loadModel(this._currentModel); + this.__svgLayer.addListenerOnce("SvgWidgetReady", () => this._loadModel(model), this); } }, @@ -1061,18 +1039,16 @@ qx.Class.define("osparc.component.workbench.WorkbenchUI", { this._clearAll(); this._currentModel = model; if (model) { - qx.ui.core.queue.Visibility.flush(); - // create nodes const nodes = model.getNodes(); const nodeUIs = []; for (const nodeId in nodes) { const node = nodes[nodeId]; const nodeUI = this._createNodeUI(nodeId); - this.__createDragDropMechanism(nodeUI); this._addNodeUIToWorkbench(nodeUI, node.getPosition()); nodeUIs.push(nodeUI); } + qx.ui.core.queue.Layout.flush(); let tries = 0; const maxTries = 40; @@ -1084,17 +1060,15 @@ qx.Class.define("osparc.component.workbench.WorkbenchUI", { } console.log("nodes visible", nodeUIs.length, tries*sleepFor); + nodeUIs.forEach(nodeUI => this.__createDragDropMechanism(nodeUI)); + // create edges for (const nodeId in nodes) { const node = nodes[nodeId]; const inputNodeIDs = node.getInputNodes(); inputNodeIDs.forEach(inputNodeId => { if (inputNodeId in nodes) { - this._createEdgeBetweenNodes({ - nodeId: inputNodeId - }, { - nodeId: nodeId - }); + this._createEdgeBetweenNodes(inputNodeId, nodeId, false); } }); } diff --git a/services/web/client/source/class/osparc/component/workbench/WorkbenchUIPreview.js b/services/web/client/source/class/osparc/component/workbench/WorkbenchUIPreview.js index 89d24dcc801..ba78dabd4e9 100644 --- a/services/web/client/source/class/osparc/component/workbench/WorkbenchUIPreview.js +++ b/services/web/client/source/class/osparc/component/workbench/WorkbenchUIPreview.js @@ -62,6 +62,7 @@ qx.Class.define("osparc.component.workbench.WorkbenchUIPreview", { const nodeUI = this._createNodeUI(nodeId); this._addNodeUIToWorkbench(nodeUI, node.getPosition()); } + qx.ui.core.queue.Layout.flush(); // create edges for (const nodeId in nodes) { @@ -69,11 +70,7 @@ qx.Class.define("osparc.component.workbench.WorkbenchUIPreview", { const inputNodeIDs = node.getInputNodes(); inputNodeIDs.forEach(inputNodeId => { if (inputNodeId in nodes) { - this._createEdgeBetweenNodes({ - nodeId: inputNodeId - }, { - nodeId: nodeId - }); + this._createEdgeBetweenNodes(inputNodeId, nodeId, false); } }); } diff --git a/services/web/client/source/class/osparc/dashboard/CardBase.js b/services/web/client/source/class/osparc/dashboard/CardBase.js index 2c2b390567f..c7ca5662609 100644 --- a/services/web/client/source/class/osparc/dashboard/CardBase.js +++ b/services/web/client/source/class/osparc/dashboard/CardBase.js @@ -163,6 +163,12 @@ qx.Class.define("osparc.dashboard.CardBase", { apply: "__applyUiMode" }, + hits: { + check: "Number", + nullable: true, + apply: "__applyHits" + }, + state: { check: "Object", nullable: false, @@ -216,6 +222,7 @@ qx.Class.define("osparc.dashboard.CardBase", { let uuid = null; let owner = ""; let accessRights = {}; + let defaultHits = null; let workbench = null; switch (studyData["resourceType"]) { case "study": @@ -243,6 +250,7 @@ qx.Class.define("osparc.dashboard.CardBase", { if (osparc.data.model.Node.isDynamic(studyData)) { defaultThumbnail = this.self().DYNAMIC_SERVICE_ICON; } + defaultHits = 0; break; } @@ -259,6 +267,7 @@ qx.Class.define("osparc.dashboard.CardBase", { classifiers: studyData.classifiers && studyData.classifiers ? studyData.classifiers : [], quality: studyData.quality ? studyData.quality : null, uiMode: studyData.ui && studyData.ui.mode ? studyData.ui.mode : null, + hits: studyData.hits ? studyData.hits : defaultHits, workbench }); }, @@ -331,6 +340,13 @@ qx.Class.define("osparc.dashboard.CardBase", { } }, + __applyHits: function(hits) { + if (hits !== null) { + const hitsLabel = this.getChildControl("hits-service"); + hitsLabel.setValue(this.tr("Hits: ") + String(hits)); + } + }, + __applyWorkbench: function(workbench) { if (workbench === null) { return; diff --git a/services/web/client/source/class/osparc/dashboard/GridButtonItem.js b/services/web/client/source/class/osparc/dashboard/GridButtonItem.js index 17471e5d2d2..33d7ea49a98 100644 --- a/services/web/client/source/class/osparc/dashboard/GridButtonItem.js +++ b/services/web/client/source/class/osparc/dashboard/GridButtonItem.js @@ -76,6 +76,17 @@ qx.Class.define("osparc.dashboard.GridButtonItem", { layout.add(control); break; } + case "hits-service": { + const layout = this.getChildControl("tsr-mode-update-layout"); + control = new qx.ui.basic.Label().set({ + toolTipText: this.tr("Number of times it was instantiated") + }); + layout.add(new qx.ui.core.Spacer(), { + flex: 1 + }); + layout.add(control); + break; + } case "tags": control = new qx.ui.container.Composite(new qx.ui.layout.Flow(5, 3)).set({ anonymous: true diff --git a/services/web/client/source/class/osparc/dashboard/ListButtonBase.js b/services/web/client/source/class/osparc/dashboard/ListButtonBase.js index 61adb5cbecb..98e609dfc4a 100644 --- a/services/web/client/source/class/osparc/dashboard/ListButtonBase.js +++ b/services/web/client/source/class/osparc/dashboard/ListButtonBase.js @@ -55,9 +55,8 @@ qx.Class.define("osparc.dashboard.ListButtonBase", { UI_MODE: 8, UPDATE_STUDY: 9, LAST_CHANGE: 10, - HITS_LABEL: 11, - HITS: 12, - OPTIONS: 13 + HITS: 11, + OPTIONS: 12 } }, diff --git a/services/web/client/source/class/osparc/dashboard/ResourceBrowserBase.js b/services/web/client/source/class/osparc/dashboard/ResourceBrowserBase.js index a46517a01f2..f7befa197b1 100644 --- a/services/web/client/source/class/osparc/dashboard/ResourceBrowserBase.js +++ b/services/web/client/source/class/osparc/dashboard/ResourceBrowserBase.js @@ -134,7 +134,7 @@ qx.Class.define("osparc.dashboard.ResourceBrowserBase", { flex: 1 }); - const containterModeBtns = new qx.ui.container.Composite(new qx.ui.layout.HBox(6)); + const containterModeBtns = new qx.ui.container.Composite(new qx.ui.layout.HBox(4)); const viewGridBtn = this._viewGridBtn = new qx.ui.form.ToggleButton(null, "@MaterialIcons/apps/18"); const viewListBtn = this._viewListBtn = new qx.ui.form.ToggleButton(null, "@MaterialIcons/reorder/18"); const group = new qx.ui.form.RadioGroup(); diff --git a/services/web/client/source/class/osparc/dashboard/ServiceBrowser.js b/services/web/client/source/class/osparc/dashboard/ServiceBrowser.js index 856273644ba..ea26ae7ee9d 100644 --- a/services/web/client/source/class/osparc/dashboard/ServiceBrowser.js +++ b/services/web/client/source/class/osparc/dashboard/ServiceBrowser.js @@ -28,6 +28,7 @@ qx.Class.define("osparc.dashboard.ServiceBrowser", { members: { __servicesAll: null, __servicesLatestList: null, + __sortByGroup: null, __reloadService: function(key, version, reload) { osparc.store.Store.getInstance().getService(key, version, reload) @@ -46,10 +47,13 @@ qx.Class.define("osparc.dashboard.ServiceBrowser", { const store = osparc.store.Store.getInstance(); store.getServicesOnly() .then(services => { + const favServices = osparc.utils.Utils.localCache.getFavServices(); this.__servicesAll = services; const servicesList = []; for (const key in services) { const latestService = osparc.utils.Services.getLatest(services, key); + const found = Object.keys(favServices).find(favSrv => favSrv === key); + latestService.hits = found ? favServices[found]["hits"] : 0; servicesList.push(latestService); } this._resetResourcesList(servicesList); @@ -87,6 +91,7 @@ qx.Class.define("osparc.dashboard.ServiceBrowser", { this._createResourcesLayout("service"); this.__addNewServiceButtons(); + this.__addSortingButtons(); osparc.utils.Utils.setIdToWidget(this._resourcesContainer, "servicesList"); @@ -145,6 +150,7 @@ qx.Class.define("osparc.dashboard.ServiceBrowser", { } this.__servicesLatestList = servicesList; this._resourcesContainer.removeAll(); + osparc.utils.Services.sortObjectsBasedOn(servicesList, this.__sortByGroup.getSelection()[0].sortBy); servicesList.forEach(service => { service["resourceType"] = "service"; const serviceItem = this.__createServiceItem(service, this._resourcesContainer.getMode()); @@ -199,9 +205,7 @@ qx.Class.define("osparc.dashboard.ServiceBrowser", { }); const addServiceButton = new qx.ui.form.Button(this.tr("Submit new service"), "@FontAwesome5Solid/plus-circle/14"); - addServiceButton.addListener("execute", () => { - this.__displayServiceSubmissionForm(); - }); + addServiceButton.addListener("execute", () => this.__displayServiceSubmissionForm()); this._secondaryBar.add(addServiceButton); }, @@ -256,6 +260,36 @@ qx.Class.define("osparc.dashboard.ServiceBrowser", { .finally(() => form.setFetching(false)); }); scroll.add(form); + }, + + __addSortingButtons: function() { + this._secondaryBar.add(new qx.ui.core.Spacer(), { + flex: 1 + }); + + const containterSortBtns = new qx.ui.container.Composite(new qx.ui.layout.HBox(4)).set({ + marginRight: 8 + }); + const byHitsBtn = new qx.ui.form.ToggleButton(null, "@FontAwesome5Solid/sort-numeric-down/14"); + byHitsBtn.sortBy = "hits"; + const byNameBtn = new qx.ui.form.ToggleButton(null, "@FontAwesome5Solid/sort-alpha-down/14"); + byNameBtn.sortBy = "name"; + const sortByGroup = this.__sortByGroup = new qx.ui.form.RadioGroup().set({ + allowEmptySelection: false + }); + [ + byHitsBtn, + byNameBtn + ].forEach(btn => { + containterSortBtns.add(btn); + sortByGroup.add(btn); + btn.getContentElement().setStyles({ + "border-radius": "8px" + }); + }); + this._secondaryBar.add(containterSortBtns); + + sortByGroup.addListener("changeSelection", () => this._resetResourcesList()); } } }); diff --git a/services/web/client/source/class/osparc/data/Permissions.js b/services/web/client/source/class/osparc/data/Permissions.js index b9328fbe390..f2bdbaa6a3b 100644 --- a/services/web/client/source/class/osparc/data/Permissions.js +++ b/services/web/client/source/class/osparc/data/Permissions.js @@ -121,6 +121,7 @@ qx.Class.define("osparc.data.Permissions", { "services.all.read", "user.role.update", "user.clusters.create", + "study.everyone.share", "study.snapshot.read", "study.snapshot.create", "study.nodestree.uuid.read", diff --git a/services/web/client/source/class/osparc/data/model/Node.js b/services/web/client/source/class/osparc/data/model/Node.js index be319371ab3..6aefdf44e99 100644 --- a/services/web/client/source/class/osparc/data/model/Node.js +++ b/services/web/client/source/class/osparc/data/model/Node.js @@ -158,6 +158,12 @@ qx.Class.define("osparc.data.model.Node", { apply: "__applyErrors" }, + bootOptions: { + check: "Object", + init: null, + nullable: true + }, + // GUI elements // propsForm: { check: "osparc.component.form.renderer.PropForm", @@ -425,6 +431,9 @@ qx.Class.define("osparc.data.model.Node", { if (nodeData.thumbnail) { this.setThumbnail(nodeData.thumbnail); } + if (nodeData.bootOptions) { + this.setBootOptions(nodeData.bootOptions); + } } this.__initLogger(); @@ -753,15 +762,8 @@ qx.Class.define("osparc.data.model.Node", { } }, - // post edge creation routine - edgeAdded: function(edge) { - const inputNode = this.getWorkbench().getNode(edge.getInputNodeId()); - const outputNode = this.getWorkbench().getNode(edge.getOutputNodeId()); - this.__createAutoPortConnection(inputNode, outputNode); - }, - // Iterate over output ports and connect them to first compatible input port - __createAutoPortConnection: async function(node1, node2) { + createAutoPortConnection: async function(node1, node2) { const preferencesSettings = osparc.desktop.preferences.Preferences.getInstance(); if (!preferencesSettings.getAutoConnectPorts()) { return; @@ -967,6 +969,9 @@ qx.Class.define("osparc.data.model.Node", { case "integer": val = 1; break; + case "array": + val = "[1]"; + break; } if (val !== null) { osparc.component.node.ParameterEditor.setParameterOutputValue(this, val); @@ -1327,7 +1332,8 @@ qx.Class.define("osparc.data.model.Node", { inputAccess: this.getInputAccess(), inputNodes: this.getInputNodes(), parent: this.getParentNodeId(), - thumbnail: this.getThumbnail() + thumbnail: this.getThumbnail(), + bootOptions: this.getBootOptions() }; if (!clean) { nodeEntry.progress = this.getStatus().getProgress(); diff --git a/services/web/client/source/class/osparc/data/model/Workbench.js b/services/web/client/source/class/osparc/data/model/Workbench.js index 8bbcd1203db..da2a798faf9 100644 --- a/services/web/client/source/class/osparc/data/model/Workbench.js +++ b/services/web/client/source/class/osparc/data/model/Workbench.js @@ -226,7 +226,7 @@ qx.Class.define("osparc.data.model.Workbench", { return null; }, - createEdge: function(edgeId, nodeLeftId, nodeRightId) { + createEdge: function(edgeId, nodeLeftId, nodeRightId, autoConnect = true) { const existingEdge = this.getEdge(edgeId, nodeLeftId, nodeRightId); if (existingEdge) { return existingEdge; @@ -240,8 +240,9 @@ qx.Class.define("osparc.data.model.Workbench", { const edge = new osparc.data.model.Edge(edgeId, nodeLeft, nodeRight); this.addEdge(edge); - // post edge creation - this.getNode(nodeRightId).edgeAdded(edge); + if (autoConnect) { + nodeRight.createAutoPortConnection(nodeLeft, nodeRight); + } nodeRight.addInputNode(nodeLeftId); @@ -320,13 +321,6 @@ qx.Class.define("osparc.data.model.Workbench", { node.addListener("showInLogger", e => this.fireDataEvent("showInLogger", e.getData()), this); node.addListener("retrieveInputs", e => this.fireDataEvent("retrieveInputs", e.getData()), this); node.addListener("fileRequested", e => this.fireDataEvent("fileRequested", e.getData()), this); - node.addListener("parameterRequested", e => { - const { - portId, - nodeId - } = e.getData(); - this.__parameterNodeRequested(nodeId, portId); - }, this); node.addListener("filePickerRequested", e => { const { portId, @@ -335,6 +329,13 @@ qx.Class.define("osparc.data.model.Workbench", { } = e.getData(); this.__filePickerNodeRequested(nodeId, portId, file); }, this); + node.addListener("parameterRequested", e => { + const { + portId, + nodeId + } = e.getData(); + this.__parameterNodeRequested(nodeId, portId); + }, this); node.addListener("probeRequested", e => { const { portId, @@ -395,14 +396,14 @@ qx.Class.define("osparc.data.model.Workbench", { // create a new FP const filePickerMetadata = osparc.utils.Services.getFilePicker(); - const parentNodeId = requesterNode.getParentNodeId(); - const parent = parentNodeId ? this.getNode(parentNodeId) : null; - const filePicker = this.createNode(filePickerMetadata["key"], filePickerMetadata["version"], null, parent); + const filePicker = this.createNode(filePickerMetadata["key"], filePickerMetadata["version"]); filePicker.setPosition(freePos); // create connection const filePickerId = filePicker.getNodeId(); requesterNode.addInputNode(filePickerId); + // reload also before port connection happens + this.fireEvent("reloadModel"); requesterNode.addPortLink(portId, filePickerId, "outFile") .then(success => { if (success) { @@ -442,9 +443,7 @@ qx.Class.define("osparc.data.model.Workbench", { const type = osparc.utils.Ports.getPortType(requesterNode.getMetaData()["inputs"], portId); const pmMD = osparc.utils.Services.getParameterMetadata(type); if (pmMD) { - const parentNodeId = requesterNode.getParentNodeId(); - const parent = parentNodeId ? this.getNode(parentNodeId) : null; - const pm = this.createNode(pmMD["key"], pmMD["version"], null, parent); + const pm = this.createNode(pmMD["key"], pmMD["version"]); // do not overlap the new Parameter Node with other nodes const freePos = this.getFreePosition(requesterNode); @@ -453,17 +452,13 @@ qx.Class.define("osparc.data.model.Workbench", { // create connection const pmId = pm.getNodeId(); requesterNode.addInputNode(pmId); - requesterNode.addPortLink(portId, pmId, "out_1") - .then(success => { - if (success) { - this.fireDataEvent("openNode", pmId); - this.fireEvent("reloadModel"); - } else { - this.removeNode(pmId); - const msg = qx.locale.Manager.tr("Parameter couldn't be assigned"); - osparc.component.message.FlashMessenger.getInstance().logAs(msg, "ERROR"); - } - }); + // bypass the compatibility check + if (requesterNode.getPropsForm().addPortLink(portId, pmId, "out_1") !== true) { + this.removeNode(pmId); + const msg = qx.locale.Manager.tr("Parameter couldn't be assigned"); + osparc.component.message.FlashMessenger.getInstance().logAs(msg, "ERROR"); + } + this.fireEvent("reloadModel"); } }, @@ -475,9 +470,7 @@ qx.Class.define("osparc.data.model.Workbench", { const type = osparc.utils.Ports.getPortType(requesterNode.getMetaData()["outputs"], portId); const probeMD = osparc.utils.Services.getProbeMetadata(type); if (probeMD) { - const parentNodeId = requesterNode.getParentNodeId(); - const parent = parentNodeId ? this.getNode(parentNodeId) : null; - const probeNode = this.createNode(probeMD["key"], probeMD["version"], null, parent); + const probeNode = this.createNode(probeMD["key"], probeMD["version"]); probeNode.setLabel(requesterPortMD.label); // do not overlap the new Parameter Node with other nodes @@ -487,16 +480,13 @@ qx.Class.define("osparc.data.model.Workbench", { // create connection const probeId = probeNode.getNodeId(); probeNode.addInputNode(nodeId); - probeNode.addPortLink("in_1", nodeId, portId) - .then(success => { - if (success) { - this.fireEvent("reloadModel"); - } else { - this.removeNode(probeId); - const msg = qx.locale.Manager.tr("Probe couldn't be assigned"); - osparc.component.message.FlashMessenger.getInstance().logAs(msg, "ERROR"); - } - }); + // bypass the compatibility check + if (probeNode.getPropsForm().addPortLink("in_1", nodeId, portId) !== true) { + this.removeNode(probeId); + const msg = qx.locale.Manager.tr("Probe couldn't be assigned"); + osparc.component.message.FlashMessenger.getInstance().logAs(msg, "ERROR"); + } + this.fireEvent("reloadModel"); } }, @@ -587,10 +577,10 @@ qx.Class.define("osparc.data.model.Workbench", { // create connections if (leftNodeId) { - this.createEdge(null, leftNodeId, node.getNodeId()); + this.createEdge(null, leftNodeId, node.getNodeId(), true); } if (rightNodeId) { - this.createEdge(null, node.getNodeId(), rightNodeId); + this.createEdge(null, node.getNodeId(), rightNodeId, true); } this.fireEvent("reloadModel"); diff --git a/services/web/client/source/class/osparc/desktop/SlideshowView.js b/services/web/client/source/class/osparc/desktop/SlideshowView.js index 509ebd2bad3..8a1c89d693a 100644 --- a/services/web/client/source/class/osparc/desktop/SlideshowView.js +++ b/services/web/client/source/class/osparc/desktop/SlideshowView.js @@ -439,7 +439,7 @@ qx.Class.define("osparc.desktop.SlideshowView", { // bypass connection const workbench = this.getStudy().getWorkbench(); - workbench.createEdge(null, leftNodeId, rightNodeId); + workbench.createEdge(null, leftNodeId, rightNodeId, true); // remove node workbench.removeNode(nodeId); diff --git a/services/web/client/source/class/osparc/desktop/WorkbenchView.js b/services/web/client/source/class/osparc/desktop/WorkbenchView.js index 5ff91745f91..fda1c0d5c93 100644 --- a/services/web/client/source/class/osparc/desktop/WorkbenchView.js +++ b/services/web/client/source/class/osparc/desktop/WorkbenchView.js @@ -610,16 +610,20 @@ qx.Class.define("osparc.desktop.WorkbenchView", { // callback for incoming progress const slotName2 = "progress"; if (!socket.slotExists(slotName2)) { - socket.on(slotName2, data => { - const d = JSON.parse(data); - const nodeId = d["node_id"]; - const progress = Number.parseFloat(d["progress"]).toFixed(4); + socket.on(slotName2, jsonString => { + const data = JSON.parse(jsonString); + if (Object.prototype.hasOwnProperty.call(data, "project_id") && this.getStudy().getUuid() !== data["project_id"]) { + // Filtering out logs from other studies + return; + } + const nodeId = data["node_id"]; + const progress = Number.parseFloat(data["progress"]).toFixed(4); const workbench = this.getStudy().getWorkbench(); const node = workbench.getNode(nodeId); if (node) { node.getStatus().setProgress(progress); } else if (osparc.data.Permissions.getInstance().isTester()) { - console.log("Ignored ws 'progress' msg", d); + console.log("Ignored ws 'progress' msg", data); } }, this); } @@ -627,15 +631,19 @@ qx.Class.define("osparc.desktop.WorkbenchView", { this.listenToNodeUpdated(); // callback for events - const slotName4 = "event"; - if (!socket.slotExists(slotName4)) { - socket.on(slotName4, eventData => { - const eventPayload = JSON.parse(eventData); - const action = eventPayload["action"]; + const slotName3 = "event"; + if (!socket.slotExists(slotName3)) { + socket.on(slotName3, jsonString => { + const data = JSON.parse(jsonString); + if (Object.prototype.hasOwnProperty.call(data, "project_id") && this.getStudy().getUuid() !== data["project_id"]) { + // Filtering out logs from other studies + return; + } + const action = data["action"]; if (action == "RELOAD_IFRAME") { // TODO: maybe reload iframe in the future // for now a message is displayed to the user - const nodeId = eventPayload["node_id"]; + const nodeId = data["node_id"]; const workbench = this.getStudy().getWorkbench(); const node = workbench.getNode(nodeId); @@ -652,9 +660,9 @@ qx.Class.define("osparc.desktop.WorkbenchView", { const slotName = "nodeUpdated"; if (!socket.slotExists(slotName)) { - socket.on(slotName, data => { - const d = JSON.parse(data); - this.getStudy().nodeUpdated(d); + socket.on(slotName, jsonString => { + const data = JSON.parse(jsonString); + this.getStudy().nodeUpdated(data); }, this); } }, diff --git a/services/web/client/source/class/osparc/servicecard/Large.js b/services/web/client/source/class/osparc/servicecard/Large.js index c5396e9546a..ec8177e5603 100644 --- a/services/web/client/source/class/osparc/servicecard/Large.js +++ b/services/web/client/source/class/osparc/servicecard/Large.js @@ -149,6 +149,9 @@ qx.Class.define("osparc.servicecard.Large", { this._add(more, { flex: 1 }); + const copy2Clip = osparc.utils.Utils.getCopyButton(); + copy2Clip.addListener("execute", () => osparc.utils.Utils.copyTextToClipboard(osparc.utils.Utils.prettifyJson(this.getService())), this); + more.getChildControl("header").add(copy2Clip); }, __createViewWithEdit: function(view, cb) { diff --git a/services/web/client/source/class/osparc/store/Store.js b/services/web/client/source/class/osparc/store/Store.js index 7a5124b48fb..68ccba906a2 100644 --- a/services/web/client/source/class/osparc/store/Store.js +++ b/services/web/client/source/class/osparc/store/Store.js @@ -310,13 +310,6 @@ qx.Class.define("osparc.store.Store", { }); }, - /** - * @param {String} key - */ - getServiceVersions: function(key) { - console.log(this.getServices()); - }, - /** * This functions does the needed processing in order to have a working list of services and DAGs. * @param {Boolean} reload @@ -337,30 +330,6 @@ qx.Class.define("osparc.store.Store", { }); }, - /** - * This functions does the needed processing in order to have a working list of services and DAGs. - * @param {Boolean} reload - */ - getServicesDAGs: function(reload = false) { - return new Promise((resolve, reject) => { - const allServices = []; - const servicesPromise = osparc.data.Resources.get("services", null, !reload); - const dagsPromise = osparc.data.Resources.get("dags", null, !reload); - Promise.all([servicesPromise, dagsPromise]) - .then(values => { - allServices.push(...values[0], ...values[1]); - }) - .catch(err => { - console.error("getServicesDAGs failed", err); - }) - .finally(() => { - const servicesObj = osparc.utils.Services.convertArrayToObject(allServices); - osparc.utils.Services.servicesToCache(servicesObj, true); - resolve(osparc.utils.Services.servicesCached); - }); - }); - }, - getInaccessibleServices: function(studyData) { return new Promise((resolve, reject) => { const inaccessibleServices = []; diff --git a/services/web/client/source/class/osparc/utils/Services.js b/services/web/client/source/class/osparc/utils/Services.js index a94e77e118d..47ff8dc3b06 100644 --- a/services/web/client/source/class/osparc/utils/Services.js +++ b/services/web/client/source/class/osparc/utils/Services.js @@ -101,26 +101,21 @@ qx.Class.define("osparc.utils.Services", { return 0; }, - sortBasedOnFav: function(servicesArray) { + sortObjectsBasedOn: function(servicesArray, basedOn = "name") { + servicesArray.sort((a, b) => { + if (basedOn === "hits" && a[basedOn] !== b[basedOn]) { + return b[basedOn] - a[basedOn]; + } + return a["name"].localeCompare(b["name"]); + }); + }, + + addHits: function(servicesArray) { const favServices = osparc.utils.Utils.localCache.getFavServices(); servicesArray.forEach(service => { - const found = Object.keys(favServices).find(favSrv => favSrv === service.getKey()); + const found = Object.keys(favServices).find(favSrv => favSrv === service["key"]); service.hits = found ? favServices[found]["hits"] : 0; }); - servicesArray.sort((a, b) => { - let aIdx = Object.keys(favServices).indexOf(a.getKey()); - if (aIdx !== -1) { - aIdx = favServices[a.getKey()]["hits"]; - } - let bIdx = Object.keys(favServices).indexOf(b.getKey()); - if (bIdx !== -1) { - bIdx = favServices[b.getKey()]["hits"]; - } - if (aIdx !== bIdx) { - return bIdx - aIdx; - } - return a.getName().localeCompare(b.getName()); - }); }, convertArrayToObject: function(servicesArray) { diff --git a/services/web/client/source/class/osparc/utils/Utils.js b/services/web/client/source/class/osparc/utils/Utils.js index b9438f8648e..4c7354292e9 100644 --- a/services/web/client/source/class/osparc/utils/Utils.js +++ b/services/web/client/source/class/osparc/utils/Utils.js @@ -331,6 +331,10 @@ qx.Class.define("osparc.utils.Utils", { return JSON.parse(JSON.stringify(src)); }, + prettifyJson: function(json) { + return JSON.stringify(json, undefined, 2); + }, + getRandomColor: function() { let letters = "0123456789ABCDEF"; let color = "#"; @@ -417,14 +421,10 @@ qx.Class.define("osparc.utils.Utils", { if (xhr.status == 200) { let blob = new Blob([xhr.response]); let urlBlob = window.URL.createObjectURL(blob); - let downloadAnchorNode = document.createElement("a"); - downloadAnchorNode.setAttribute("href", urlBlob); if (!fileName) { fileName = this.self().filenameFromContentDisposition(xhr); } - downloadAnchorNode.setAttribute("download", fileName); - downloadAnchorNode.click(); - downloadAnchorNode.remove(); + this.self().downloadContent(urlBlob, fileName); resolve(); } else { reject(xhr); @@ -436,6 +436,14 @@ qx.Class.define("osparc.utils.Utils", { }); }, + downloadContent: function(content, filename = "file") { + let downloadAnchorNode = document.createElement("a"); + downloadAnchorNode.setAttribute("href", content); + downloadAnchorNode.setAttribute("download", filename); + downloadAnchorNode.click(); + downloadAnchorNode.remove(); + }, + filenameFromContentDisposition: function(xhr) { // https://stackoverflow.com/questions/40939380/how-to-get-file-name-from-content-disposition let filename = ""; diff --git a/services/web/client/source/resource/osparc/kz_1.jpg b/services/web/client/source/resource/osparc/kz_1.jpg new file mode 100644 index 00000000000..4fe1eecab5f Binary files /dev/null and b/services/web/client/source/resource/osparc/kz_1.jpg differ diff --git a/services/web/client/source/resource/osparc/kz_2.jpg b/services/web/client/source/resource/osparc/kz_2.jpg new file mode 100644 index 00000000000..fb8dcd7adeb Binary files /dev/null and b/services/web/client/source/resource/osparc/kz_2.jpg differ diff --git a/services/web/client/source/resource/osparc/kz_3.png b/services/web/client/source/resource/osparc/kz_3.png new file mode 100644 index 00000000000..07f004e2a70 Binary files /dev/null and b/services/web/client/source/resource/osparc/kz_3.png differ diff --git a/services/web/client/source/resource/osparc/kz_4.png b/services/web/client/source/resource/osparc/kz_4.png new file mode 100644 index 00000000000..2c582953dee Binary files /dev/null and b/services/web/client/source/resource/osparc/kz_4.png differ diff --git a/services/web/client/source/resource/osparc/ti_splitimage.png b/services/web/client/source/resource/osparc/ti_splitimage.png new file mode 100644 index 00000000000..28beb6e799c Binary files /dev/null and b/services/web/client/source/resource/osparc/ti_splitimage.png differ diff --git a/tests/public-api/examples/.gitignore b/tests/public-api/examples/.gitignore index 842f89d48da..2e81a7ed691 100644 --- a/tests/public-api/examples/.gitignore +++ b/tests/public-api/examples/.gitignore @@ -2,3 +2,5 @@ *.json *.txt *.ipynb +# input files +!data_rabbit_cardiac/ diff --git a/tests/public-api/examples/data_rabbit_cardiac/initial_WTstates.txt b/tests/public-api/examples/data_rabbit_cardiac/initial_WTstates.txt new file mode 100644 index 00000000000..9e270032e81 --- /dev/null +++ b/tests/public-api/examples/data_rabbit_cardiac/initial_WTstates.txt @@ -0,0 +1 @@ +1.37502380000000e-03 9.87089040000000e-01 9.91792330000000e-01 7.01839599059895e-06 1.00067670000000e+00 2.71686070000000e-02 1.61672490000000e-02 4.30233143427199e-03 7.80717071021313e-01 4.30070614007387e-03 9.91776471924169e-01 1.92953960206578e-01 6.03487435980430e-02 7.83830949712806e-01 6.87394160908516e-07 1.89572432945224e-07 2.72749907930016e+00 5.94139243736699e-01 1.01817556868506e-02 1.07772490680837e-01 1.53675089037119e-02 2.53281010000000e-04 1.45194984664938e-03 1.38049879142935e-01 2.42453028953050e-03 7.94665117780574e-03 1.15032206558552e-02 7.72148958403062e-02 1.29512937945466e-01 1.22533949113428e+00 5.41856748595056e-01 5.66263070928663e+00 5.65803488782485e+00 5.65793960414529e+00 1.35000000000000e+02 1.87973841710593e-04 1.24175755576104e-04 9.80439326045149e-05 -8.46709654219819e+01 4.76724435751784e-01 0.00000000000000e+00 0.00000000000000e+00 2.54396524740325e+04 -1.61779918341570e+04 -1.89056393255094e+05 1.79868522042596e+05 2.63837800000000e-01 8.96327497908432e-02 8.42492982261963e-03 1.21975521245901e-03 3.26626538090676e-01 9.83564517952387e-03 1.12758648482790e-01 1.06074803099061e-02 1.54342185242217e-03 4.13351671573873e-01 1.23740090409898e-02 1.82464940000000e-08 2.17002140000000e-11 8.26062916474434e-01 2.24698596325533e-05 1.46097101414056e-02 1.08508307672484e-01 4.09476672435635e-05 5.07533657008860e-02 8.28518788627629e-01 2.34971524306373e-05 1.44586344628127e-02 1.07405533939698e-01 3.99761539610029e-05 4.95264603326507e-02 8.79587288126774e-01 2.27195074764805e-05 3.59555538012108e-03 2.67047140344325e-02 7.32547547699722e-05 9.00141866610291e-02 8.85784874410360e-01 2.31126063678609e-05 3.48258885399952e-03 2.58703452502185e-02 6.88339462452406e-05 8.47461860899517e-02 1.61197296381461e-06 2.08577373084540e-04 1.19065445556838e-01 7.58655435631373e-01 6.04551404486629e-02 9.48755677017729e-03 1.66090987262331e-05 0.00000000000000e+00 0.00000000000000e+00 2.39524978782952e-02 6.41644384969010e-11 1.19820407631612e-04 1.88032327044201e-05 3.28971137723882e-08 4.32110272330849e-16 4.14600638669982e-16 2.36953168780722e-13 1.50956358017391e-12 2.80183257033658e-02 8.58915687332272e-08 1.35149200867794e-08 2.45339571906417e-11 4.36074558006248e-08 0.00000000000000e+00 1.62668363490110e-24 6.41053442026113e-20 1.00686660060463e-20 1.79607163383628e-23 0.00000000000000e+00 0.00000000000000e+00 0.00000000000000e+00 0.00000000000000e+00 1.49843392018885e-304 0.00000000000000e+00 0.00000000000000e+00 0.00000000000000e+00 0.00000000000000e+00 0.00000000000000e+00 0.00000000000000e+00 4.64736956982462e-308 0.00000000000000e+00 0.00000000000000e+00 3.10083021551703e+02 1.52889432592164e+01 2.17165991156356e-02 0.00000000000000e+00 0.00000000000000e+00 0.00000000000000e+00 6.25423662824822e-01 1.70392706443729e-01 2.56713533507026e-04 1.42012427431816e-07 4.64851570532643e-08 3.24347125705691e-05 8.51926937302723e-04 6.95689012955649e-03 3.60966328077369e+00 5.43669998029717e-02 1.30758805639508e-04 1.35952373082443e-08 4.02034169944412e+00 2.54454801911598e+00 9.22860104152380e-05 2.66162821507126e-05 2.05179346147960e-05 1.87751969476251e-10 1.92476135791525e-15 3.85036350012499e-13 7.50785280086099e-04 3.49663333278284e-06 8.33961549884873e-06 7.89029758893487e-05 5.47294143136563e-02 1.04661405029684e-04 2.44123287931858e-09 5.01983277671203e+00 5.44221357647924e-01 1.22873074199756e-05 2.11059375521054e-05 2.00550698354832e-07 3.42362146247288e-15 3.53881975068991e-20 2.51868615353371e-17 7.87540959809223e-05 3.68753352895029e-07 1.75011101599083e-07 1.25079095005594e-07 1.64543925700837e+01 1.09157042751614e+01 2.97357231348733e+02 7.31005853939613e+01 2.83951986455465e-01 2.40580245812117e-07 0.00000000000000e+00 2.47551246368891e-02 3.82712905888321e+00 2.76260657536887e-02 1.29022777091755e-15 3.73934129310940e-04 5.74188223353930e-02 6.54316118275670e-04 5.80191388872594e-02 5.20107532057645e-02 0.00000000000000e+00 4.15919308703744e-02 3.60000000000000e-02 0.00000000000000e+00 4.86632231379200e-01 7.16671468785635e-02 8.27172793207883e-03 3.17804779433581e-03 2.78595406619341e-01 8.07506891729879e-03 9.14572121457175e-06 8.81934076803921e-01 6.95895393319336e-04 8.21087151601780e-04 3.29852404512875e-03 2.19146094413653e-01 1.53192920196010e-03 1.53192920195145e-03 1.83611114431063e-03 4.05926843054623e-03 0.00000000000000e+00 diff --git a/tests/public-api/examples/data_rabbit_cardiac/ss1d_meta.json b/tests/public-api/examples/data_rabbit_cardiac/ss1d_meta.json new file mode 100644 index 00000000000..0733f63a5d0 --- /dev/null +++ b/tests/public-api/examples/data_rabbit_cardiac/ss1d_meta.json @@ -0,0 +1,128 @@ +{ + "inputs": { + "Na": { + "displayOrder": 1, + "label": "Na blocker", + "description": "Na blocker drug concentration", + "type": "integer", + "defaultValue": "0" + }, + "GKr": { + "displayOrder": 2, + "label": "GKr ratio", + "description": "GKr ratio", + "type": "integer", + "defaultValue": 1 + }, + "TotalSimulationTime": { + "displayOrder": 3, + "label": "Total simulation time", + "description": "Total simulation time", + "type": "integer", + "defaultValue": 300 + }, + "TargetHeartRatePhase1": { + "displayOrder": 4, + "label": "Target heart rate for phase 1", + "description": "Target heart rate for phase 1", + "type": "integer", + "defaultValue": 60 + }, + "TargetHeartRatePhase2": { + "displayOrder": 5, + "label": "Target heart rate for phase 2", + "description": "Target heart rate for phase 2", + "type": "integer", + "defaultValue": 150 + }, + "TargetHeartRatePhase3": { + "displayOrder": 6, + "label": "Target heart rate for phase 3", + "description": "Target heart rate for phase 3", + "type": "integer", + "defaultValue": 60 + }, + "cAMKII": { + "displayOrder": 7, + "label": "cAMKII activity levels", + "description": "cAMKII activity levels (expression = 'WT', 'OE', or 'KO'", + "type": "string", + "defaultValue": "WT" + }, + "tissue_size_tw": { + "displayOrder": 8, + "label": "Tissue width (# of cells)", + "description": "Tissue width (# of cells)", + "type": "integer", + "defaultValue": "165" + }, + "tissue_size_tl": { + "displayOrder": 9, + "label": "Tissue length (# of cells)", + "description": "Tissue length (# of cells)", + "type": "integer", + "defaultValue": "165" + }, + "Homogeneity": { + "displayOrder": 10, + "label": "Homogeneity", + "description": "Homogeneity (expression = 'homogeneous', or 'heterogeneous'", + "type": "string", + "defaultValue": "homogeneous" + }, + "initialWTStates": { + "displayOrder": 11, + "label": "Initial WT states", + "description": "Initial WT states", + "type": "data:text/plain", + "fileToKeyMap": { + "initial_WTstates.txt": "initialWTStates" + } + }, + "num_threads": { + "displayOrder": 12, + "label": "# threads", + "description": "Desired number of threads", + "type": "integer", + "defaultValue": 2 + } + }, + "outputs": { + "output_1": { + "displayOrder": 1, + "label": "ECGs", + "description": "ECGs", + "type": "data:text/plain", + "fileToKeyMap": { + "ECGs.txt": "output_1" + } + }, + "output_2": { + "displayOrder": 3, + "label": "APs", + "description": "APs", + "type": "data:text/plain", + "fileToKeyMap": { + "ap_1D.txt": "output_2" + } + }, + "output_3": { + "displayOrder": 4, + "label": "CAIs", + "description": "CAIs", + "type": "data:text/plain", + "fileToKeyMap": { + "cai_1D.txt": "output_3" + } + }, + "output_4": { + "displayOrder": 5, + "label": "Input from 1D", + "description": "Model input from 1D to 2D solver", + "type": "data:*/*", + "fileToKeyMap": { + "model_INPUT.from1D": "output_4" + } + } + } +} diff --git a/tests/public-api/examples/opencor.py b/tests/public-api/examples/opencor.py index fdff8ce63ff..12be8e987fa 100644 --- a/tests/public-api/examples/opencor.py +++ b/tests/public-api/examples/opencor.py @@ -4,6 +4,8 @@ $ make install-ci $ make .env + +Based on example https://github.com/nih-sparc/sparc-api/blob/v1.5.0/app/osparc.py """ import json import os diff --git a/tests/public-api/examples/rabbit_cardiac_ss1d.py b/tests/public-api/examples/rabbit_cardiac_ss1d.py new file mode 100644 index 00000000000..39d7e59deb2 --- /dev/null +++ b/tests/public-api/examples/rabbit_cardiac_ss1d.py @@ -0,0 +1,138 @@ +""" +Multi-scale rabbit cardiac electrophysiology models +Rabbit Soltis-Saucerman model with full b-AR signalling (Rabbit SS 1D cardiac) + + $ cd examples + $ make install-ci + $ make .env + +SEE https://sparc.science/datasets/4?type=dataset +""" +import os +import sys +import time +from pathlib import Path +from time import sleep +from typing import Optional + +import osparc +from dotenv import load_dotenv +from osparc.models import File, JobStatus + +assert osparc.__version__ == "0.4.3" + +current_dir = Path(sys.argv[0] if __name__ == "__main__" else __file__).resolve().parent +data_dir = current_dir / "data_rabbit_cardiac" + +load_dotenv() +cfg = osparc.Configuration( + host=os.environ.get("OSPARC_API_URL", "http://127.0.0.1:8006"), + username=os.environ["OSPARC_API_KEY"], + password=os.environ["OSPARC_API_SECRET"], +) +print("Entrypoint", cfg.host) + + +with osparc.ApiClient(cfg) as api_client: + # Upload init states file. + + files_api = osparc.FilesApi(api_client) + initial_wtstates_file = files_api.upload_file( + str(data_dir / "initial_WTstates.txt") + ) + + # Create our simulation. + + solvers_api = osparc.SolversApi(api_client) + + solver = solvers_api.get_solver_release( + "simcore/services/comp/rabbit-ss-1d-cardiac-model", "1.0.0" + ) + + # SEE data_rabbit_cardiac/ss1d_meta.json::inputs + job = solvers_api.create_job( + solver.id, + solver.version, + osparc.JobInputs( + { + "Na": 0, + "GKr": 1, + "TotalSimulationTime": 50, + "TargetHeartRatePhase1": 60, + "TargetHeartRatePhase2": 150, + "TargetHeartRatePhase3": 60, + "cAMKII": "WT", + "tissue_size_tw": 165, + "tissue_size_tl": 165, + "Homogeneity": "homogeneous", + "num_threads": 4, + "initialWTStates": initial_wtstates_file, + } + ), + ) + print("Job created", job) + + # Start our simulation. + status = solvers_api.start_job(solver.id, solver.version, job.id) + start_t = time.perf_counter() + + # Check the status of our simulation until it has completed. + while True: + status = solvers_api.inspect_job(solver.id, solver.version, job.id) + + print( + f">>> Progress: {status.progress}% ", + f"[elapsed:{time.perf_counter() - start_t:4.2f}s]...", + flush=True, + ) + + if status.progress == 100: + break + + sleep(1) + + # Retrieve our simulation outputs. + + print("---------------------------------------") + last_status: JobStatus = solvers_api.inspect_job(solver.id, solver.version, job.id) + print(">>> What is the status?", last_status) + + outputs = solvers_api.get_job_outputs(solver.id, solver.version, job.id) + + # SEE data_rabbit_cardiac/ss1d_meta.json::outputs + for output_name, result in outputs.results.items(): + print(f">>> {output_name} = {result}") + + # Retrieve our simulation results. + + print("---------------------------------------") + result: Optional[File] + + for output_name, result in outputs.results.items(): + if result is None: + print( + "Can't retrieve our simulation results {output_name}...?!", + "Failed ?", + last_status.state, + "Finished ?", + last_status.progress == 100 or not last_status.stopped_at, + ) + else: + + # Print out the id of our simulation results file (?). + + print("---------------------------------------") + print(">>> ", result.id) + + # Download our simulation results file (?). + + download_path: str = files_api.download_file(result.id) + print("Downloaded to", download_path) + print("Content-Type: ", result.content_type) + if result.content_type == "text/plain": + print("Result:", Path(download_path).read_text()[:100]) + print("Status: ", Path(download_path).stat()) + + # List all the files that are available. + print("---------------------------------------") + print(files_api.list_files())