Skip to content

Commit

Permalink
chore: sync start changes with serve
Browse files Browse the repository at this point in the history
Signed-off-by: Aaron Pham <29749331+aarnphm@users.noreply.github.com>
  • Loading branch information
aarnphm committed Oct 2, 2022
1 parent 91a1887 commit ec85e25
Show file tree
Hide file tree
Showing 3 changed files with 13 additions and 8 deletions.
8 changes: 8 additions & 0 deletions bentoml/_internal/configuration/v1/__init__.py
Expand Up @@ -4,6 +4,7 @@

import schema as s

from ....exceptions import BentoMLConfigException
from ..helpers import ensure_range
from ..helpers import flatten_dict
from ..helpers import rename_fields
Expand Down Expand Up @@ -134,6 +135,13 @@


def migrate_to_v2(*, override_config: dict[str, t.Any]):
try:
SCHEMA.validate(override_config)
except s.SchemaError as e:
raise BentoMLConfigException(
f"Invalid configuration file was given:\n{e}"
) from None

# We will use a flattened config to make it easier to migrate,
# Then we will convert it back to a nested config.
flatten = dict(flatten_dict(override_config))
Expand Down
11 changes: 4 additions & 7 deletions bentoml/start.py
Expand Up @@ -3,7 +3,6 @@
import os
import sys
import json
import math
import typing as t
import logging
import contextlib
Expand Down Expand Up @@ -122,7 +121,7 @@ def start_http_server(
port: int = Provide[BentoMLContainer.api_server_config.port],
host: str = Provide[BentoMLContainer.api_server_config.host],
backlog: int = Provide[BentoMLContainer.api_server_config.backlog],
api_workers: int | None = None,
api_workers: int = Provide[BentoMLContainer.api_server_workers],
ssl_certfile: str | None = Provide[BentoMLContainer.api_server_config.ssl.certfile],
ssl_keyfile: str | None = Provide[BentoMLContainer.api_server_config.ssl.keyfile],
ssl_keyfile_password: str
Expand All @@ -140,7 +139,6 @@ def start_http_server(
from .serve import construct_ssl_args
from .serve import PROMETHEUS_MESSAGE
from .serve import ensure_prometheus_dir
from ._internal.resource import CpuResource
from ._internal.utils.circus import create_standalone_arbiter
from ._internal.utils.analytics import track_serve

Expand Down Expand Up @@ -200,7 +198,7 @@ def start_http_server(
),
],
working_dir=working_dir,
numprocesses=api_workers or math.ceil(CpuResource.from_system()),
numprocesses=api_workers,
)
)
if BentoMLContainer.api_server_config.metrics.enabled.get():
Expand Down Expand Up @@ -235,7 +233,7 @@ def start_grpc_server(
port: int = Provide[BentoMLContainer.grpc.port],
host: str = Provide[BentoMLContainer.grpc.host],
backlog: int = Provide[BentoMLContainer.api_server_config.backlog],
api_workers: int | None = None,
api_workers: int = Provide[BentoMLContainer.api_server_workers],
reflection: bool = Provide[BentoMLContainer.grpc.reflection.enabled],
max_concurrent_streams: int
| None = Provide[BentoMLContainer.grpc.max_concurrent_streams],
Expand All @@ -247,7 +245,6 @@ def start_grpc_server(
from .serve import ensure_prometheus_dir
from .serve import PROMETHEUS_SERVER_NAME
from ._internal.utils import reserve_free_port
from ._internal.resource import CpuResource
from ._internal.utils.circus import create_standalone_arbiter
from ._internal.utils.analytics import track_serve

Expand Down Expand Up @@ -304,7 +301,7 @@ def start_grpc_server(
args=args,
use_sockets=False,
working_dir=working_dir,
numprocesses=api_workers or math.ceil(CpuResource.from_system()),
numprocesses=api_workers,
)
)

Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Expand Up @@ -379,7 +379,7 @@ exclude = [
"bazel-*",
]
analysis.useLibraryCodeForTypes = true
analysis.stubPath = "./typings/"
analysis.stubPath = "typings/"
strictListInference = true
strictDictionaryInference = true
strictSetInference = true
Expand Down

0 comments on commit ec85e25

Please sign in to comment.