Skip to content

Commit

Permalink
chore: sync start changes with serve
Browse files Browse the repository at this point in the history
Signed-off-by: Aaron Pham <29749331+aarnphm@users.noreply.github.com>
  • Loading branch information
aarnphm committed Oct 2, 2022
1 parent 91a1887 commit c20fa0c
Showing 1 changed file with 4 additions and 7 deletions.
11 changes: 4 additions & 7 deletions bentoml/start.py
Expand Up @@ -3,7 +3,6 @@
import os
import sys
import json
import math
import typing as t
import logging
import contextlib
Expand Down Expand Up @@ -122,7 +121,7 @@ def start_http_server(
port: int = Provide[BentoMLContainer.api_server_config.port],
host: str = Provide[BentoMLContainer.api_server_config.host],
backlog: int = Provide[BentoMLContainer.api_server_config.backlog],
api_workers: int | None = None,
api_workers: int = Provide[BentoMLContainer.api_server_workers],
ssl_certfile: str | None = Provide[BentoMLContainer.api_server_config.ssl.certfile],
ssl_keyfile: str | None = Provide[BentoMLContainer.api_server_config.ssl.keyfile],
ssl_keyfile_password: str
Expand All @@ -140,7 +139,6 @@ def start_http_server(
from .serve import construct_ssl_args
from .serve import PROMETHEUS_MESSAGE
from .serve import ensure_prometheus_dir
from ._internal.resource import CpuResource
from ._internal.utils.circus import create_standalone_arbiter
from ._internal.utils.analytics import track_serve

Expand Down Expand Up @@ -200,7 +198,7 @@ def start_http_server(
),
],
working_dir=working_dir,
numprocesses=api_workers or math.ceil(CpuResource.from_system()),
numprocesses=api_workers,
)
)
if BentoMLContainer.api_server_config.metrics.enabled.get():
Expand Down Expand Up @@ -235,7 +233,7 @@ def start_grpc_server(
port: int = Provide[BentoMLContainer.grpc.port],
host: str = Provide[BentoMLContainer.grpc.host],
backlog: int = Provide[BentoMLContainer.api_server_config.backlog],
api_workers: int | None = None,
api_workers: int = Provide[BentoMLContainer.api_server_workers],
reflection: bool = Provide[BentoMLContainer.grpc.reflection.enabled],
max_concurrent_streams: int
| None = Provide[BentoMLContainer.grpc.max_concurrent_streams],
Expand All @@ -247,7 +245,6 @@ def start_grpc_server(
from .serve import ensure_prometheus_dir
from .serve import PROMETHEUS_SERVER_NAME
from ._internal.utils import reserve_free_port
from ._internal.resource import CpuResource
from ._internal.utils.circus import create_standalone_arbiter
from ._internal.utils.analytics import track_serve

Expand Down Expand Up @@ -304,7 +301,7 @@ def start_grpc_server(
args=args,
use_sockets=False,
working_dir=working_dir,
numprocesses=api_workers or math.ceil(CpuResource.from_system()),
numprocesses=api_workers,
)
)

Expand Down

0 comments on commit c20fa0c

Please sign in to comment.