Skip to content

Commit

Permalink
Merge branch 'master' into maintenance/vulnerability-ujson
Browse files Browse the repository at this point in the history
  • Loading branch information
pcrespov committed Jun 30, 2022
2 parents 39d9f2e + 1449f68 commit 0598c50
Show file tree
Hide file tree
Showing 139 changed files with 4,382 additions and 2,424 deletions.
4 changes: 4 additions & 0 deletions .github/workflows/ci-testing-deploy.yml
Expand Up @@ -14,6 +14,8 @@ on:
- "!.vscode/**"
- "**.py"
- "**.js"
- "**/requirements/*.txt"
- "**.json"
- ".github/workflows/ci-testing-deploy.yml"
pull_request:
branches:
Expand All @@ -27,6 +29,8 @@ on:
- "!.vscode-template/**"
- "**.py"
- "**.js"
- "**/requirements/*.txt"
- "**.json"
- ".github/workflows/ci-testing-deploy.yml"

env:
Expand Down
13 changes: 10 additions & 3 deletions .pre-commit-config.yaml
Expand Up @@ -6,28 +6,35 @@ repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.2.0
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
- id: check-yaml
- id: check-added-large-files
args: ["--maxkb=1024"]
- id: check-merge-conflict
- id: check-yaml
- id: detect-private-key
- id: end-of-file-fixer
- id: trailing-whitespace
# NOTE: Keep order as pyupgrade (will update code) then pycln (remove unused imports), then isort (sort them) and black (final formatting)
- repo: https://github.com/asottile/pyupgrade
rev: v2.34.0
hooks:
- id: pyupgrade
args:
- "--py39-plus"
name: upgrade code
- repo: https://github.com/hadialqattan/pycln
rev: v1.2.5
hooks:
- id: pycln
args: [--all, --expand-stars]
name: prune imports
- repo: https://github.com/PyCQA/isort
rev: 5.10.1
hooks:
- id: isort
args: ["--profile", "black"]
name: sort imports
- repo: https://github.com/psf/black
rev: 22.3.0
hooks:
- id: black
name: format code
4 changes: 2 additions & 2 deletions mypy.ini
Expand Up @@ -19,8 +19,8 @@ disallow_any_generics = True
check_untyped_defs = True
no_implicit_reexport = True

# for strict mypy: (this is the tricky one :-))
disallow_untyped_defs = True
# If True, it enforces things like `def __init__(self) -> CLASSNAME` or `def test_() -> None` which does not worth the effort
disallow_untyped_defs = False

# removes all the missing imports stuff from external libraries which is annoying to the least
ignore_missing_imports = True
Expand Down
6 changes: 3 additions & 3 deletions packages/dask-task-models-library/requirements/_base.txt
Expand Up @@ -12,17 +12,17 @@ cloudpickle==2.0.0
# via
# dask
# distributed
dask==2022.05.0
dask==2022.6.0
# via
# -r requirements/_base.in
# distributed
distributed==2022.05.0
distributed==2022.6.0
# via dask
dnspython==2.2.1
# via email-validator
email-validator==1.2.1
# via pydantic
fsspec==2022.3.0
fsspec==2022.5.0
# via dask
heapdict==1.0.1
# via zict
Expand Down
2 changes: 2 additions & 0 deletions packages/models-library/src/models_library/services.py
Expand Up @@ -4,6 +4,7 @@
python -c "from models_library.services import ServiceDockerData as cls; print(cls.schema_json(indent=2))" > services-schema.json
"""

from datetime import datetime
from enum import Enum
from typing import Any, Optional, Union

Expand Down Expand Up @@ -562,6 +563,7 @@ class ServiceMetaData(_BaseServiceCommonDataModel):
name: Optional[str]
thumbnail: Optional[HttpUrl]
description: Optional[str]
deprecated: Optional[datetime]

# user-defined metatada
classifiers: Optional[list[str]]
Expand Down
5 changes: 3 additions & 2 deletions packages/models-library/src/models_library/services_db.py
Expand Up @@ -3,7 +3,7 @@
NOTE: to dump json-schema from CLI use
python -c "from models_library.services import ServiceDockerData as cls; print(cls.schema_json(indent=2))" > services-schema.json
"""
from typing import List, Optional
from typing import Optional

from pydantic import Field
from pydantic.types import PositiveInt
Expand All @@ -19,7 +19,7 @@

class ServiceMetaDataAtDB(ServiceKeyVersion, ServiceMetaData):
# for a partial update all members must be Optional
classifiers: Optional[List[str]] = Field([])
classifiers: Optional[list[str]] = Field([])
owner: Optional[PositiveInt]

class Config:
Expand All @@ -34,6 +34,7 @@ class Config:
"thumbnail": "http://thumbnailit.org/image",
"created": "2021-01-18 12:46:57.7315",
"modified": "2021-01-19 12:45:00",
"deprecated": "2099-01-19 12:45:00",
"quality": {
"enabled": True,
"tsr_target": {
Expand Down
Expand Up @@ -6,13 +6,14 @@
import json
import warnings
from datetime import datetime
from typing import Any, Callable, Container, Dict, List, Optional, Tuple, Type
from typing import Any, Callable, Container, Optional
from uuid import UUID

import sqlalchemy as sa
import sqlalchemy.sql.functions
from pydantic import BaseConfig, BaseModel, Field, create_model
from pydantic.types import NonNegativeInt
from sqlalchemy import null
from sqlalchemy.sql.schema import Column

warnings.warn(
Expand All @@ -34,7 +35,7 @@ class OrmConfig(BaseConfig):


def _eval_defaults(
column: Column, pydantic_type: Type, *, include_server_defaults: bool = True
column: Column, pydantic_type: type, *, include_server_defaults: bool = True
):
"""
Uses some heuristics to determine the default value/factory produced
Expand Down Expand Up @@ -74,16 +75,17 @@ def _eval_defaults(
)
elif issubclass(pydantic_type, datetime):
assert isinstance( # nosec
column.server_default.arg, sqlalchemy.sql.functions.now
column.server_default.arg,
(type(null()), sqlalchemy.sql.functions.now),
)
default_factory = datetime.now
return default, default_factory


PolicyCallable = Callable[[Column, Any, Type], Tuple[Any, Type]]
PolicyCallable = Callable[[Column, Any, type], tuple[Any, type]]


def eval_name_policy(column: Column, default: Any, pydantic_type: Type):
def eval_name_policy(column: Column, default: Any, pydantic_type: type):
"""All string columns including 'uuid' in their name are set as UUIDs"""
new_default, new_pydantic_type = default, pydantic_type
if "uuid" in str(column.name).split("_") and pydantic_type == str:
Expand All @@ -101,11 +103,11 @@ def eval_name_policy(column: Column, default: Any, pydantic_type: Type):
def create_pydantic_model_from_sa_table(
table: sa.Table,
*,
config: Type = OrmConfig,
config: type = OrmConfig,
exclude: Optional[Container[str]] = None,
include_server_defaults: bool = False,
extra_policies: Optional[List[PolicyCallable]] = None,
) -> Type[BaseModel]:
extra_policies: Optional[list[PolicyCallable]] = None,
) -> type[BaseModel]:

fields = {}
exclude = exclude or []
Expand All @@ -117,7 +119,7 @@ def create_pydantic_model_from_sa_table(
if name in exclude:
continue

field_args: Dict[str, Any] = {}
field_args: dict[str, Any] = {}

if name in _RESERVED:
field_args["alias"] = name
Expand Down
Expand Up @@ -2,6 +2,10 @@

Generic single-database configuration.

### NOTE: THIS IS DEPRECATED!

This does not need to be run if you want to use alembic with simcore, as the folder-init is already done. Instead navigate your shell to `osparc-simcore/packages/postgres-database/` and follow the instructions at the Makefile there. Only use this Makefile to learn about the tool alembic, dont actually execute the commands listed here!


## Basic workflow

Expand Down Expand Up @@ -78,8 +82,8 @@ We create a revision script for the change by using the local db as follows:
pip install -r packages/postgres-database/requirements/dev.txt # install sc-pg package
docker-compose -f services/docker-compose.yml -f services/docker-compose-ops.yml up adminer # bring db and ui up
docker ps # find the published port for the db
sc-pg discover -u simcore -p simcore --port=32787 # discover the db
sp-pg info # what revision are we at?
sc-pg discover -u scu -p adminadmin --port=5432 # discover the db
sc-pg info # what revision are we at?
sc-pg upgrade head # to to latest if necessary
sc-pg review -m "Altered_table_why" # create a revision, note: the string will be part of the script
sc-pg upgrade head # apply the revision
Expand Down
@@ -0,0 +1,30 @@
"""add services_meta_data deprecation date
Revision ID: cf3bac482ce0
Revises: 90ed05df20d7
Create Date: 2022-06-21 14:43:57.915189+00:00
"""
import sqlalchemy as sa
from alembic import op

# revision identifiers, used by Alembic.
revision = "cf3bac482ce0"
down_revision = "90ed05df20d7"
branch_labels = None
depends_on = None


def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column("services_meta_data", "deprecated")
# ### end Alembic commands ###


def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column(
"services_meta_data",
sa.Column("deprecated", sa.DateTime(), autoincrement=False, nullable=True),
)
# ### end Alembic commands ###
Expand Up @@ -5,6 +5,7 @@
"""

import sqlalchemy as sa
from sqlalchemy import null
from sqlalchemy.dialects.postgresql import ARRAY, JSONB
from sqlalchemy.sql import expression, func

Expand Down Expand Up @@ -69,6 +70,13 @@
onupdate=func.now(),
doc="Timestamp with last update",
),
sa.Column(
"deprecated",
sa.DateTime(),
nullable=True,
server_default=null(),
doc="Timestamp with deprecation date",
),
sa.Column(
"quality",
JSONB,
Expand Down
20 changes: 7 additions & 13 deletions packages/pytest-simcore/src/pytest_simcore/docker_swarm.py
Expand Up @@ -31,24 +31,16 @@
# HELPERS --------------------------------------------------------------------------------


class _NotInSwarmException(Exception):
pass


class _ResourceStillNotRemoved(Exception):
pass


def _in_docker_swarm(
docker_client: docker.client.DockerClient, raise_error: bool = False
) -> bool:
def _is_docker_swarm_init(docker_client: docker.client.DockerClient) -> bool:
try:
docker_client.swarm.reload()
inspect_result = docker_client.swarm.attrs
assert type(inspect_result) == dict
except APIError as error:
if raise_error:
raise _NotInSwarmException() from error
return False
return True

Expand Down Expand Up @@ -147,6 +139,7 @@ def _fetch_and_print_services(
def docker_client() -> Iterator[docker.client.DockerClient]:
client = docker.from_env()
yield client
client.close()


@pytest.fixture(scope="session")
Expand All @@ -164,14 +157,13 @@ def docker_swarm(
wait=wait_fixed(2), stop=stop_after_delay(15), reraise=True
):
with attempt:
if not _in_docker_swarm(docker_client):
if not _is_docker_swarm_init(docker_client):
print("--> initializing docker swarm...")
docker_client.swarm.init(advertise_addr=get_localhost_ip())
print("--> docker swarm initialized.")
# if still not in swarm, raise an error to try and initialize again
_in_docker_swarm(docker_client, raise_error=True)

assert _in_docker_swarm(docker_client) is True
# if still not in swarm, raise an error to try and initialize again
assert _is_docker_swarm_init(docker_client)

yield

Expand All @@ -180,6 +172,8 @@ def docker_swarm(
assert docker_client.swarm.leave(force=True)
print("<-- docker swarm left.")

assert _is_docker_swarm_init(docker_client) is keep_docker_up


@pytest.fixture(scope="module")
def docker_stack(
Expand Down
Expand Up @@ -2,12 +2,18 @@
# pylint: disable=unused-argument
# pylint: disable=unused-variable

import warnings
from typing import Iterator

import pytest
from _pytest.fixtures import FixtureRequest
from _pytest.monkeypatch import MonkeyPatch

warnings.warn(
f"{__name__} is deprecated, we highly recommend to use pytest.monkeypatch at function-scope level."
"Large scopes lead to complex problems during tests",
DeprecationWarning,
)
# Some extras to overcome https://github.com/pytest-dev/pytest/issues/363
# SEE https://github.com/pytest-dev/pytest/issues/363#issuecomment-289830794

Expand Down
Expand Up @@ -96,6 +96,13 @@ def pylintrc(osparc_simcore_root_dir: Path) -> Path:
@pytest.fixture(scope="session")
def project_slug_dir() -> Path:
raise NotImplementedError("Override fixture in project's tests/conftest.py")
#
# Implementation example
# folder = CURRENT_DIR.parent
# assert folder.exists()
# assert any(folder.glob("src/simcore_service_dynamic_sidecar"))
# return folder
#


@pytest.fixture(scope="session")
Expand Down
Expand Up @@ -4,8 +4,13 @@


class MixinLoggingSettings:
"""
USAGE example in packages/settings-library/tests/test_utils_logging.py::test_mixin_logging
"""

@classmethod
def validate_log_level(cls, value: Any) -> str:
"""Standard implementation for @validator("LOG_LEVEL")"""
try:
getattr(logging, value.upper())
except AttributeError as err:
Expand Down
1 change: 1 addition & 0 deletions packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py
Expand Up @@ -331,6 +331,7 @@ async def set(self, new_value: ItemConcreteValue) -> None:
"""
await self._set(new_concrete_value=new_value)
await self._node_ports.save_to_db_cb(self._node_ports)


async def set_value(self, new_item_value: Optional[ItemValue]) -> None:
"""set the value on the port using an item-value
Expand Down

0 comments on commit 0598c50

Please sign in to comment.