Skip to content

Commit

Permalink
fix(configuration): merging global runner config to runner specific c…
Browse files Browse the repository at this point in the history
…onfig (#2849)

* merge global and runner specific instructions

* tests

* fix pep8

* remove breakpoing

* flush

* more asserts

* close file to work in windows

Signed-off-by: Aaron Pham <29749331+aarnphm@users.noreply.github.com>
  • Loading branch information
jjmachan authored and aarnphm committed Aug 3, 2022
1 parent 8f682bf commit 3d0fffe
Show file tree
Hide file tree
Showing 2 changed files with 76 additions and 13 deletions.
21 changes: 8 additions & 13 deletions bentoml/_internal/configuration/containers.py
Expand Up @@ -3,6 +3,7 @@
import typing as t
import logging
import multiprocessing
from copy import deepcopy
from typing import TYPE_CHECKING
from dataclasses import dataclass

Expand Down Expand Up @@ -177,28 +178,22 @@ def __init__(
override_config = yaml.safe_load(f)
config_merger.merge(self.config, override_config)

global_runner_cfg = {
k: self.config["runners"][k]
for k in ("batching", "resources", "logging")
}
for key in self.config["runners"]:
if key not in ["batching", "resources", "logging"]:
runner_cfg = self.config["runners"][key]

# key is a runner name
override_resources = False
resource_cfg = None
if "resources" in runner_cfg:
override_resources = True
resource_cfg = runner_cfg["resources"]
if resource_cfg == "system":
resource_cfg = system_resources()
if runner_cfg.get("resources") == "system":
runner_cfg["resources"] = system_resources()

self.config["runners"][key] = config_merger.merge(
self.config["runners"], runner_cfg
deepcopy(global_runner_cfg), runner_cfg
)

if override_resources:
# we don't want to merge resource configuration, override
# it with previous resource config if it was set
self.config["runners"][key]["resources"] = resource_cfg

if validate_schema:
try:
SCHEMA.validate(self.config)
Expand Down
68 changes: 68 additions & 0 deletions tests/unit/_internal/test_configuration.py
@@ -0,0 +1,68 @@
from tempfile import NamedTemporaryFile

from bentoml._internal.configuration.containers import BentoMLConfiguration

OVERRIDE_RUNNERS = """
runners:
batching:
enabled: False
max_batch_size: 10
resources:
cpu: 4
logging:
access:
enabled: False
test_runner_1:
resources: system
test_runner_2:
resources:
cpu: 2
test_runner_gpu:
resources:
nvidia.com/gpu: 1
test_runner_batching:
batching:
enabled: True
logging:
access:
enabled: True
"""


def test_bentoml_configuration_runner_override():
tmpfile = NamedTemporaryFile(mode="w+", delete=False)
tmpfile.write(OVERRIDE_RUNNERS)
tmpfile.flush()
tmpfile.close()

bentoml_cfg = BentoMLConfiguration(override_config_file=tmpfile.name).as_dict()
runner_cfg = bentoml_cfg["runners"]

# test_runner_1
test_runner_1 = runner_cfg["test_runner_1"]
assert test_runner_1["batching"]["enabled"] is False
assert test_runner_1["batching"]["max_batch_size"] == 10
assert test_runner_1["logging"]["access"]["enabled"] is False
# assert test_runner_1["resources"]["cpu"] == 4

# test_runner_2
test_runner_2 = runner_cfg["test_runner_2"]
assert test_runner_2["batching"]["enabled"] is False
assert test_runner_2["batching"]["max_batch_size"] == 10
assert test_runner_2["logging"]["access"]["enabled"] is False
assert test_runner_2["resources"]["cpu"] == 2

# test_runner_gpu
test_runner_gpu = runner_cfg["test_runner_gpu"]
assert test_runner_gpu["batching"]["enabled"] is False
assert test_runner_gpu["batching"]["max_batch_size"] == 10
assert test_runner_gpu["logging"]["access"]["enabled"] is False
assert test_runner_gpu["resources"]["cpu"] == 4 # should use global
assert test_runner_gpu["resources"]["nvidia.com/gpu"] == 1

# test_runner_batching
test_runner_batching = runner_cfg["test_runner_batching"]
assert test_runner_batching["batching"]["enabled"] is True
assert test_runner_batching["batching"]["max_batch_size"] == 10
assert test_runner_batching["logging"]["access"]["enabled"] is True
assert test_runner_batching["resources"]["cpu"] == 4 # should use global

0 comments on commit 3d0fffe

Please sign in to comment.