Skip to content

Commit

Permalink
Support for grid search algorithm in Optuna Suggestion Service (#2060)
Browse files Browse the repository at this point in the history
Signed-off-by: Yuki Iwai <yuki.iwai.tz@gmail.com>

Signed-off-by: Yuki Iwai <yuki.iwai.tz@gmail.com>
  • Loading branch information
tenzen-y committed Dec 24, 2022
1 parent 1dd7251 commit 7c509ba
Show file tree
Hide file tree
Showing 5 changed files with 162 additions and 274 deletions.
2 changes: 1 addition & 1 deletion manifests/v1beta1/components/controller/katib-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ data:
"image": "docker.io/kubeflowkatib/suggestion-hyperopt:latest"
},
"grid": {
"image": "docker.io/kubeflowkatib/suggestion-chocolate:latest"
"image": "docker.io/kubeflowkatib/suggestion-optuna:latest"
},
"hyperband": {
"image": "docker.io/kubeflowkatib/suggestion-hyperband:latest"
Expand Down
30 changes: 27 additions & 3 deletions pkg/suggestion/v1beta1/internal/search_space.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,11 +13,12 @@
# limitations under the License.

import logging
import numpy as np

from pkg.apis.manager.v1beta1.python import api_pb2 as api
from pkg.suggestion.v1beta1.internal.constant import INTEGER, DOUBLE, CATEGORICAL, DISCRETE
import pkg.suggestion.v1beta1.internal.constant as constant


logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger(__name__)

Expand All @@ -36,15 +37,38 @@ def convert(experiment):
search_space.goal = constant.MIN_GOAL
for p in experiment.spec.parameter_specs.parameters:
search_space.params.append(
HyperParameterSearchSpace.convertParameter(p))
HyperParameterSearchSpace.convert_parameter(p))
return search_space

@staticmethod
def convert_to_combinations(search_space):
combinations = {}

for parameter in search_space.params:
if parameter.type == INTEGER:
combinations[parameter.name] = range(int(parameter.min), int(parameter.max)+1, int(parameter.step))
elif parameter.type == DOUBLE:
if parameter.step == "" or parameter.step is None:
raise Exception(
"Param {} step is nil; For discrete search space, all parameters must include step".
format(parameter.name)
)
double_list = np.arange(float(parameter.min), float(parameter.max)+float(parameter.step),
float(parameter.step))
if double_list[-1] > float(parameter.max):
double_list = double_list[:-1]
combinations[parameter.name] = double_list
elif parameter.type == CATEGORICAL or parameter.type == DISCRETE:
combinations[parameter.name] = parameter.list

return combinations

def __str__(self):
return "HyperParameterSearchSpace(goal: {}, ".format(self.goal) + \
"params: {})".format(", ".join([element.__str__() for element in self.params]))

@staticmethod
def convertParameter(p):
def convert_parameter(p):
if p.parameter_type == api.INT:
# Default value for INT parameter step is 1
step = 1
Expand Down
5 changes: 5 additions & 0 deletions pkg/suggestion/v1beta1/optuna/base_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@

from pkg.suggestion.v1beta1.internal.constant import INTEGER, DOUBLE, CATEGORICAL, DISCRETE, MAX_GOAL
from pkg.suggestion.v1beta1.internal.trial import Assignment
from pkg.suggestion.v1beta1.internal.search_space import HyperParameterSearchSpace


class BaseOptunaService(object):
Expand Down Expand Up @@ -48,6 +49,10 @@ def _create_sampler(self):
elif self.algorithm_name == "random":
return optuna.samplers.RandomSampler(**self.algorithm_config)

elif self.algorithm_name == "grid":
combinations = HyperParameterSearchSpace.convert_to_combinations(self.search_space)
return optuna.samplers.GridSampler(combinations, **self.algorithm_config)

def get_suggestions(self, trials, current_request_number):
if len(trials) != 0:
self._tell(trials)
Expand Down
45 changes: 42 additions & 3 deletions pkg/suggestion/v1beta1/optuna/service.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,10 @@
import threading
import grpc
import logging
import itertools

from pkg.apis.manager.v1beta1.python import api_pb2
from pkg.apis.manager.v1beta1.python import api_pb2_grpc
from pkg.suggestion.v1beta1.internal.constant import INTEGER, DOUBLE
from pkg.suggestion.v1beta1.internal.search_space import HyperParameterSearchSpace
from pkg.suggestion.v1beta1.internal.trial import Trial, Assignment
from pkg.suggestion.v1beta1.optuna.base_service import BaseOptunaService
Expand Down Expand Up @@ -55,7 +55,7 @@ def GetSuggestions(self, request, context):

def ValidateAlgorithmSettings(self, request, context):
is_valid, message = OptimizerConfiguration.validate_algorithm_spec(
request.experiment.spec.algorithm)
request.experiment)
if not is_valid:
context.set_code(grpc.StatusCode.INVALID_ARGUMENT)
context.set_details(message)
Expand Down Expand Up @@ -86,6 +86,9 @@ class OptimizerConfiguration(object):
"random": {
"seed": lambda x: int(x),
},
"grid": {
"seed": lambda x: int(x),
}
}

@classmethod
Expand All @@ -110,7 +113,8 @@ def convert_algorithm_spec(cls, algorithm_spec):
return algorithm_spec.algorithm_name, config

@classmethod
def validate_algorithm_spec(cls, algorithm_spec):
def validate_algorithm_spec(cls, experiment):
algorithm_spec = experiment.spec.algorithm
algorithm_name = algorithm_spec.algorithm_name
algorithm_settings = algorithm_spec.algorithm_settings

Expand All @@ -120,6 +124,10 @@ def validate_algorithm_spec(cls, algorithm_spec):
return cls._validate_cmaes_setting(algorithm_settings)
elif algorithm_name == "random":
return cls._validate_random_setting(algorithm_settings)
elif algorithm_name == "grid":
return cls._validate_grid_setting(experiment)
else:
return False, "unknown algorithm name {}".format(algorithm_name)

@classmethod
def _validate_tpe_setting(cls, algorithm_spec):
Expand Down Expand Up @@ -178,3 +186,34 @@ def _validate_random_setting(cls, algorithm_settings):
exception=e)

return True, ""

@classmethod
def _validate_grid_setting(cls, experiment):
algorithm_settings = experiment.spec.algorithm.algorithm_settings
search_space = HyperParameterSearchSpace.convert(experiment)

for s in algorithm_settings:
try:
if s.name == "random_state":
if not int(s.value) >= 0:
return False, ""
else:
return False, "unknown setting {} for algorithm grid".format(s.name)

except Exception as e:
return False, "failed to validate {name}({value}): {exception}".format(name=s.name, value=s.value,
exception=e)

try:
combinations = HyperParameterSearchSpace.convert_to_combinations(search_space)
num_combinations = len(list(itertools.product(*combinations.values())))
max_trial_count = experiment.spec.max_trial_count
if max_trial_count > num_combinations:
return False, "Max Trial Count: {max_trial} > all possible search combinations: {combinations}".\
format(max_trial=max_trial_count, combinations=num_combinations)

except Exception as e:
return False, "failed to validate parameters({parameters}): {exception}".\
format(parameters=search_space.params, exception=e)

return True, ""

0 comments on commit 7c509ba

Please sign in to comment.