Skip to content

Commit

Permalink
chore: fixing bad merge
Browse files Browse the repository at this point in the history
Signed-off-by: Aaron Pham <29749331+aarnphm@users.noreply.github.com>
  • Loading branch information
aarnphm committed Aug 14, 2022
1 parent acf482d commit 35265b6
Show file tree
Hide file tree
Showing 31 changed files with 662 additions and 372 deletions.
4 changes: 4 additions & 0 deletions MANIFEST.in
Original file line number Diff line number Diff line change
@@ -1,11 +1,14 @@
# All files tracked by Git are included in PyPI distribution
include bentoml/grpc/**/*_pb2*.py
include bentoml/grpc/**/*.pyi

# Files to exclude in PyPI distribution
exclude CONTRIBUTING.md GOVERNANCE.md CODE_OF_CONDUCT.md DEVELOPMENT.md SECURITY.md
exclude Makefile MANIFEST.in
exclude *.yml
exclude .git*
exclude bentoml/grpc/buf.yaml
exclude bentoml/_internal/frameworks/FRAMEWORK_TEMPLATE_PY

# Directories to exclude in PyPI package
prune requirements
Expand All @@ -19,6 +22,7 @@ prune .git*
prune */__pycache__
prune */.DS_Store
prune */.ipynb_checkpoints
prune */README*

# Patterns to exclude from any directory
global-exclude *.py[cod]
2 changes: 1 addition & 1 deletion bentoml/_internal/bento/build_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -484,7 +484,7 @@ def write_to_bento(self, bento_fs: FS, build_ctx: str) -> None:
install_script_content = (
"""\
#!/usr/bin/env bash
set -ex
set -exuo pipefail
# Parent directory https://stackoverflow.com/a/246128/8643197
BASEDIR="$( cd -- "$( dirname -- "${BASH_SOURCE[0]:-$0}"; )" &> /dev/null && pwd 2> /dev/null; )"
Expand Down
17 changes: 8 additions & 9 deletions bentoml/_internal/bento/build_dev_bentoml_whl.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,15 @@
import logging

from bentoml.exceptions import BentoMLException
from bentoml.exceptions import MissingDependencyException

from ..utils.pkg import source_locations
from ..configuration import is_pypi_installed_bentoml

logger = logging.getLogger(__name__)

BENTOML_DEV_BUILD = "BENTOML_BUNDLE_LOCAL_BUILD"
_exc_message = f"'{BENTOML_DEV_BUILD}=True', which requires the 'pypa/build' package. Install development dependencies with 'pip install -r requirements/dev-requirements.txt' and try again."


def build_bentoml_editable_wheel(target_path: str) -> None:
Expand All @@ -26,6 +28,12 @@ def build_bentoml_editable_wheel(target_path: str) -> None:
# skip this entirely if BentoML is installed from PyPI
return

try:
from build import ProjectBuilder
from build.env import IsolatedEnvBuilder
except ModuleNotFoundError:
raise MissingDependencyException(_exc_message)

# Find bentoml module path
module_location = source_locations("bentoml")
if not module_location:
Expand All @@ -41,15 +49,6 @@ def build_bentoml_editable_wheel(target_path: str) -> None:
"BentoML is installed in `editable` mode; building BentoML distribution with the local BentoML code base. The built wheel file will be included in the target bento."
)

try:
from build.env import IsolatedEnvBuilder

from build import ProjectBuilder
except ModuleNotFoundError:
raise BentoMLException(
f"'{BENTOML_DEV_BUILD}=True', which requires the 'pypa/build' package. Install development dependencies with 'pip install -r requirements/dev-requirements.txt' and try again."
)

with IsolatedEnvBuilder() as env:
builder = ProjectBuilder(os.path.dirname(pyproject))
builder.python_executable = env.executable
Expand Down
4 changes: 4 additions & 0 deletions bentoml/_internal/bento/docker/templates/base.j2
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,10 @@ RUN {{ __setup_script__ }}
{% block SETUP_BENTO_ENTRYPOINT %}
# Default port for BentoServer
EXPOSE 3000
{% if __options__grpc is true %}
# expose Prometheus port
EXPOSE 9090
{% endif %}

RUN chmod +x {{ bento__entrypoint }}

Expand Down
23 changes: 8 additions & 15 deletions bentoml/_internal/configuration/containers.py
Original file line number Diff line number Diff line change
Expand Up @@ -395,30 +395,23 @@ def tracer_provider(

if tracer_type == "zipkin" and zipkin_server_url is not None:
# pylint: disable=no-name-in-module # https://github.com/open-telemetry/opentelemetry-python-contrib/issues/290
from opentelemetry.exporter.zipkin.json import (
ZipkinExporter, # type: ignore (no opentelemetry types)
)
from opentelemetry.exporter.zipkin.json import ZipkinExporter

exporter = ZipkinExporter( # type: ignore (no opentelemetry types)
endpoint=zipkin_server_url,
)
provider.add_span_processor(BatchSpanProcessor(exporter)) # type: ignore (no opentelemetry types)
exporter = ZipkinExporter(endpoint=zipkin_server_url)
provider.add_span_processor(BatchSpanProcessor(exporter))
return provider
elif (
tracer_type == "jaeger"
and jaeger_server_address is not None
and jaeger_server_port is not None
):
# pylint: disable=no-name-in-module # https://github.com/open-telemetry/opentelemetry-python-contrib/issues/290
from opentelemetry.exporter.jaeger.thrift import (
JaegerExporter, # type: ignore (no opentelemetry types)
)
from opentelemetry.exporter.jaeger.thrift import JaegerExporter

exporter = JaegerExporter( # type: ignore (no opentelemetry types)
agent_host_name=jaeger_server_address,
agent_port=jaeger_server_port,
exporter = JaegerExporter(
agent_host_name=jaeger_server_address, agent_port=jaeger_server_port
)
provider.add_span_processor(BatchSpanProcessor(exporter)) # type: ignore (no opentelemetry types)
provider.add_span_processor(BatchSpanProcessor(exporter))
return provider
else:
return provider
Expand All @@ -436,7 +429,7 @@ def tracing_excluded_urls(
if isinstance(excluded_urls, list):
return ExcludeList(excluded_urls)
else:
return parse_excluded_urls(excluded_urls)
return parse_excluded_urls(excluded_urls) # type: ignore (already handle None)

# Mapping from runner name to RunnerApp file descriptor
remote_runner_mapping = providers.Static[t.Dict[str, str]]({})
Expand Down
2 changes: 1 addition & 1 deletion bentoml/_internal/configuration/default_configuration.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ api_server:
grpc:
# https://github.com/grpc/grpc/blob/ebfb028a29ef5f0f4be6f33c131c740ae0a32c84/src/core/ext/transport/chttp2/transport/http2_settings.cc#L51
max_concurrent_streams: ~
max_message_length: ~
max_message_length: -1
maximum_concurrent_rpcs: ~
metrics_port: 9090
metrics_host: 0.0.0.0
Expand Down
6 changes: 6 additions & 0 deletions bentoml/_internal/external_typing/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@
if TYPE_CHECKING:
from typing import Literal

F = t.Callable[..., t.Any]

from pandas import Series as _PdSeries
from pandas import DataFrame as PdDataFrame
from pyarrow.plasma import ObjectID
Expand All @@ -30,6 +32,8 @@
from .starlette import ASGIReceive
from .starlette import AsgiMiddleware

WSGIApp = t.Callable[[F, t.Mapping[str, t.Any]], t.Iterable[bytes]]

__all__ = [
"PdSeries",
"PdDataFrame",
Expand All @@ -51,4 +55,6 @@
"ASGISend",
"ASGIReceive",
"ASGIMessage",
# misc
"WSGIApp",
]
16 changes: 8 additions & 8 deletions bentoml/_internal/io_descriptors/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,13 @@
from ..service.openapi.specification import Reference
from ..service.openapi.specification import RequestBody

InputType = (
UnionType
| t.Type[t.Any]
| LazyType[t.Any]
| dict[str, t.Type[t.Any] | UnionType | LazyType[t.Any]]
)


IOType = t.TypeVar("IOType")

Expand Down Expand Up @@ -78,14 +85,7 @@ def accepted_proto_fields(self) -> list[str]:
return self._proto_fields

@abstractmethod
def input_type(
self,
) -> t.Union[
"UnionType",
t.Type[t.Any],
"LazyType[t.Any]",
t.Dict[str, t.Union[t.Type[t.Any], "UnionType", "LazyType[t.Any]"]],
]:
def input_type(self) -> InputType:
...

@abstractmethod
Expand Down
17 changes: 10 additions & 7 deletions bentoml/_internal/io_descriptors/numpy.py
Original file line number Diff line number Diff line change
Expand Up @@ -188,13 +188,17 @@ def __init__(
try:
dtype = np.dtype(dtype)
except TypeError as e:
raise UnprocessableEntity(f'NumpyNdarray: Invalid dtype "{dtype}": {e}')
raise UnprocessableEntity(
f'NumpyNdarray: Invalid dtype "{dtype}": {e}'
) from e

self._dtype: ext.NpDTypeLike | None = dtype
self._dtype = dtype
self._shape = shape
self._enforce_dtype = enforce_dtype
self._enforce_shape = enforce_shape

self._sample_input = None

# whether to use packed representation of numpy while sending protobuf
# this means users should be using raw_value instead of array_value or multi_dimensional_array_value
self._packed = packed
Expand All @@ -207,8 +211,6 @@ def __init__(
# https://numpy.org/doc/stable/user/basics.byteswapping.html#introduction-to-byte-ordering-and-ndarrays
self._bytesorder: t.Literal["C", "F", "A"] = bytesorder

self._sample_input = None

def _openapi_types(self) -> str:
# convert numpy dtypes to openapi compatible types.
var_type = "integer"
Expand Down Expand Up @@ -270,11 +272,11 @@ def openapi_responses(self) -> OpenAPIResponse:

def _verify_ndarray(
self,
obj: "ext.NpNDArray",
dtype: np.dtype[t.Any] | None,
obj: ext.NpNDArray,
dtype: ext.NpDTypeLike | None,
shape: tuple[int, ...] | None,
exception_cls: t.Type[Exception] = BadInput,
) -> "ext.NpNDArray":
) -> ext.NpNDArray:
if dtype is not None and dtype != obj.dtype:
# ‘same_kind’ means only safe casts or casts within a kind, like float64
# to float32, are allowed.
Expand Down Expand Up @@ -408,6 +410,7 @@ async def from_grpc_request(
else:
# {'float_contents': [1.0, 2.0, 3.0]}
array = serialized
shape = self._shape

dtype_string, content = get_array_proto(array)
dtype = np.dtype(_VALUES_TO_NP_DTYPE_MAP[dtype_string])
Expand Down
Empty file.
77 changes: 1 addition & 76 deletions bentoml/_internal/server/grpc/interceptors/__init__.py
Original file line number Diff line number Diff line change
@@ -1,25 +1,17 @@
from __future__ import annotations

import typing as t
import logging
import functools
from timeit import default_timer
from typing import TYPE_CHECKING

import grpc
from grpc import aio

from ....utils import LazyLoader
from ....utils.grpc import ProtoCodec
from ....utils.grpc import to_http_status
from ....utils.grpc import wrap_rpc_handler
from ....utils.grpc import get_grpc_content_type
from ....utils.grpc.codec import GRPC_CONTENT_TYPE

if TYPE_CHECKING:
from grpc.aio._typing import MetadataType

from bentoml.grpc.v1 import service_pb2

from ..types import Request
from ..types import Response
Expand All @@ -31,13 +23,11 @@
else:
service_pb2 = LazyLoader("service_pb2", globals(), "bentoml.grpc.v1.service_pb2")

logger = logging.getLogger(__name__)


class GenericHeadersServerInterceptor(aio.ServerInterceptor):
"""
A light header interceptor that provides some initial metadata to the client.
TODO: https://chromium.googlesource.com/external/github.com/grpc/grpc/+/HEAD/doc/PROTOCOL-HTTP2.md
Refers to https://chromium.googlesource.com/external/github.com/grpc/grpc/+/HEAD/doc/PROTOCOL-HTTP2.md
"""

def __init__(self, *, codec: Codec | None = None):
Expand Down Expand Up @@ -82,68 +72,3 @@ async def new_behaviour(
return new_behaviour

return wrap_rpc_handler(wrapper, handler)


class AccessLogServerInterceptor(aio.ServerInterceptor):
"""
An asyncio interceptor for access log.
"""

async def intercept_service(
self,
continuation: t.Callable[[HandlerCallDetails], t.Awaitable[RpcMethodHandler]],
handler_call_details: HandlerCallDetails,
) -> RpcMethodHandler:
logger = logging.getLogger("bentoml.access")
handler = await continuation(handler_call_details)
method_name = handler_call_details.method

if handler and (handler.response_streaming or handler.request_streaming):
return handler

def wrapper(behaviour: AsyncHandlerMethod[Response]):
@functools.wraps(behaviour)
async def new_behaviour(
request: Request, context: BentoServicerContext
) -> Response | t.Awaitable[Response]:

content_type = GRPC_CONTENT_TYPE

trailing_metadata: MetadataType | None = context.trailing_metadata()
if trailing_metadata:
trailing = dict(trailing_metadata)
content_type = trailing.get("content-type", GRPC_CONTENT_TYPE)

start = default_timer()
try:
response = behaviour(request, context)
if not hasattr(response, "__aiter__"):
response = await response
except Exception as e:
context.set_code(grpc.StatusCode.INTERNAL)
context.set_details(str(e))
response = service_pb2.Response()
finally:
latency = max(default_timer() - start, 0)
req = [
"scheme=http", # TODO: support https when ssl is added
f"path={method_name}",
f"type={content_type}",
f"size={request.ByteSize()}",
]
resp = [
f"http_status={to_http_status(context.code())}",
f"grpc_status={context.code().value[0]}",
f"type={content_type}",
f"size={response.ByteSize()}",
]

# TODO: fix ports
logger.info(
f"{context.peer()} ({','.join(req)}) ({','.join(resp)}) {latency:.3f}ms"
)
return response

return new_behaviour

return wrap_rpc_handler(wrapper, handler)

0 comments on commit 35265b6

Please sign in to comment.