Skip to content

Commit

Permalink
Remove inheritance from object (#5240)
Browse files Browse the repository at this point in the history
Signed-off-by: harupy <hkawamura0130@gmail.com>
  • Loading branch information
harupy committed Jan 10, 2022
1 parent 6721e8a commit 19e84f6
Show file tree
Hide file tree
Showing 52 changed files with 63 additions and 63 deletions.
2 changes: 1 addition & 1 deletion examples/flower_classifier/image_pyfunc.py
Expand Up @@ -32,7 +32,7 @@ def decode_and_resize_image(raw_bytes, size):
return np.asarray(Image.open(BytesIO(raw_bytes)).resize(size), dtype=np.float32)


class KerasImageClassifierPyfunc(object):
class KerasImageClassifierPyfunc:
"""
Image classification model with embedded pre-processing.
Expand Down
4 changes: 2 additions & 2 deletions mlflow/entities/_mlflow_object.py
Expand Up @@ -2,7 +2,7 @@
import pprint


class _MLflowObject(object):
class _MLflowObject:
def __iter__(self):
# Iterate through list of properties and yield as key -> value
for prop in self._properties():
Expand Down Expand Up @@ -38,7 +38,7 @@ def get_classname(obj):
return type(obj).__name__


class _MLflowObjectPrinter(object):
class _MLflowObjectPrinter:
def __init__(self):
super().__init__()
self.printer = pprint.PrettyPrinter()
Expand Down
2 changes: 1 addition & 1 deletion mlflow/entities/lifecycle_stage.py
Expand Up @@ -2,7 +2,7 @@
from mlflow.exceptions import MlflowException


class LifecycleStage(object):
class LifecycleStage:
ACTIVE = "active"
DELETED = "deleted"
_VALID_STAGES = set([ACTIVE, DELETED])
Expand Down
2 changes: 1 addition & 1 deletion mlflow/entities/model_registry/model_version_status.py
@@ -1,7 +1,7 @@
from mlflow.protos.model_registry_pb2 import ModelVersionStatus as ProtoModelVersionStatus


class ModelVersionStatus(object):
class ModelVersionStatus:
"""Enum for status of an :py:class:`mlflow.entities.model_registry.ModelVersion`."""

PENDING_REGISTRATION = ProtoModelVersionStatus.Value("PENDING_REGISTRATION")
Expand Down
2 changes: 1 addition & 1 deletion mlflow/entities/run_status.py
@@ -1,7 +1,7 @@
from mlflow.protos.service_pb2 import RunStatus as ProtoRunStatus


class RunStatus(object):
class RunStatus:
"""Enum for status of an :py:class:`mlflow.entities.Run`."""

RUNNING = ProtoRunStatus.Value("RUNNING")
Expand Down
2 changes: 1 addition & 1 deletion mlflow/entities/source_type.py
@@ -1,4 +1,4 @@
class SourceType(object):
class SourceType:
"""Enum for originating source of a :py:class:`mlflow.entities.Run`."""

NOTEBOOK, JOB, PROJECT, LOCAL, UNKNOWN = range(1, 6)
Expand Down
2 changes: 1 addition & 1 deletion mlflow/entities/view_type.py
@@ -1,7 +1,7 @@
from mlflow.protos import service_pb2


class ViewType(object):
class ViewType:
"""Enum to filter requested experiment types."""

ACTIVE_ONLY, DELETED_ONLY, ALL = range(1, 4)
Expand Down
2 changes: 1 addition & 1 deletion mlflow/models/flavor_backend.py
@@ -1,7 +1,7 @@
from abc import ABCMeta, abstractmethod


class FlavorBackend(object):
class FlavorBackend:
"""
Abstract class for Flavor Backend.
This class defines the API interface for local model deployment of MLflow model flavors.
Expand Down
2 changes: 1 addition & 1 deletion mlflow/models/model.py
Expand Up @@ -28,7 +28,7 @@
)


class Model(object):
class Model:
"""
An MLflow Model that can support multiple model flavors. Provides APIs for implementing
new Model flavors.
Expand Down
2 changes: 1 addition & 1 deletion mlflow/models/signature.py
Expand Up @@ -24,7 +24,7 @@
MlflowInferableDataset = Union[pd.DataFrame, np.ndarray, Dict[str, np.ndarray]]


class ModelSignature(object):
class ModelSignature:
"""
ModelSignature specifies schema of model's inputs and outputs.
Expand Down
2 changes: 1 addition & 1 deletion mlflow/models/utils.py
Expand Up @@ -14,7 +14,7 @@
ModelInputExample = Union[pd.DataFrame, np.ndarray, dict, list, csr_matrix, csc_matrix]


class _Example(object):
class _Example:
"""
Represents an input example for MLflow model.
Expand Down
2 changes: 1 addition & 1 deletion mlflow/paddle/__init__.py
Expand Up @@ -419,7 +419,7 @@ def _load_pyfunc(path):
return _PaddleWrapper(load_model(path))


class _PaddleWrapper(object):
class _PaddleWrapper:
"""
Wrapper class that creates a predict function such that
predict(data: pd.DataFrame) -> model's output as pd.DataFrame (pandas DataFrame)
Expand Down
6 changes: 3 additions & 3 deletions mlflow/projects/_project_spec.py
Expand Up @@ -107,7 +107,7 @@ def load_project(directory):
)


class Project(object):
class Project:
"""A project specification loaded from an MLproject file in the passed-in directory."""

def __init__(self, conda_env_path, entry_points, docker_env, name):
Expand Down Expand Up @@ -136,7 +136,7 @@ def get_entry_point(self, entry_point):
)


class EntryPoint(object):
class EntryPoint:
"""An entry point in an MLproject specification."""

def __init__(self, name, parameters, command):
Expand Down Expand Up @@ -199,7 +199,7 @@ def _sanitize_param_dict(param_dict):
return {str(key): quote(str(value)) for key, value in param_dict.items()}


class Parameter(object):
class Parameter:
"""A parameter in an MLproject entry point."""

def __init__(self, name, yaml_obj):
Expand Down
2 changes: 1 addition & 1 deletion mlflow/projects/databricks.py
Expand Up @@ -69,7 +69,7 @@ def before_run_validations(tracking_uri, backend_config):
)


class DatabricksJobRunner(object):
class DatabricksJobRunner:
"""
Helper class for running an MLflow project as a Databricks Job.
:param databricks_profile: Optional Databricks CLI profile to use to fetch hostname &
Expand Down
2 changes: 1 addition & 1 deletion mlflow/projects/submitted_run.py
Expand Up @@ -9,7 +9,7 @@
_logger = logging.getLogger(__name__)


class SubmittedRun(object):
class SubmittedRun:
"""
Wrapper around an MLflow project run (e.g. a subprocess running an entry point
command or a Databricks job run) and exposing methods for waiting on and cancelling the run.
Expand Down
2 changes: 1 addition & 1 deletion mlflow/pyfunc/__init__.py
Expand Up @@ -563,7 +563,7 @@ def _enforce_schema(pfInput: PyFuncInput, input_schema: Schema):
)


class PyFuncModel(object):
class PyFuncModel:
"""
MLflow 'python function' model.
Expand Down
6 changes: 3 additions & 3 deletions mlflow/pyfunc/model.py
Expand Up @@ -57,7 +57,7 @@ def get_default_conda_env():
return _mlflow_conda_env(additional_pip_deps=get_default_pip_requirements())


class PythonModel(object):
class PythonModel:
"""
Represents a generic Python model that evaluates inputs and produces API-compatible outputs.
By subclassing :class:`~PythonModel`, users can create customized MLflow models with the
Expand Down Expand Up @@ -94,7 +94,7 @@ def predict(self, context, model_input):
"""


class PythonModelContext(object):
class PythonModelContext:
"""
A collection of artifacts that a :class:`~PythonModel` can use when performing inference.
:class:`~PythonModelContext` objects are created *implicitly* by the
Expand Down Expand Up @@ -281,7 +281,7 @@ def _load_pyfunc(model_path):
return _PythonModelPyfuncWrapper(python_model=python_model, context=context)


class _PythonModelPyfuncWrapper(object):
class _PythonModelPyfuncWrapper:
"""
Wrapper class that creates a predict function such that
predict(model_input: pd.DataFrame) -> model's output as pd.DataFrame (pandas DataFrame)
Expand Down
2 changes: 1 addition & 1 deletion mlflow/pyfunc/spark_model_cache.py
Expand Up @@ -6,7 +6,7 @@
from pyspark.files import SparkFiles


class SparkModelCache(object):
class SparkModelCache:
"""Caches models in memory on Spark Executors, to avoid continually reloading from disk.
This class has to be part of a different module than the one that _uses_ it. This is
Expand Down
2 changes: 1 addition & 1 deletion mlflow/pytorch/__init__.py
Expand Up @@ -730,7 +730,7 @@ def _load_pyfunc(path, **kwargs):
return _PyTorchWrapper(_load_model(path, **kwargs))


class _PyTorchWrapper(object):
class _PyTorchWrapper:
"""
Wrapper class that creates a predict function such that
predict(data: pd.DataFrame) -> model's output as pd.DataFrame (pandas DataFrame)
Expand Down
2 changes: 1 addition & 1 deletion mlflow/spark.py
Expand Up @@ -708,7 +708,7 @@ def _load_pyfunc(path):
return _PyFuncModelWrapper(spark, _load_model(model_uri=path))


class _PyFuncModelWrapper(object):
class _PyFuncModelWrapper:
"""
Wrapper around Spark MLlib PipelineModel providing interface for scoring pandas DataFrame.
"""
Expand Down
2 changes: 1 addition & 1 deletion mlflow/tensorflow/__init__.py
Expand Up @@ -455,7 +455,7 @@ def _load_pyfunc(path):
return _TF2Wrapper(model=loaded_model, infer=loaded_model.signatures[tf_signature_def_key])


class _TF2Wrapper(object):
class _TF2Wrapper:
"""
Wrapper class that exposes a TensorFlow model for inference via a ``predict`` function such that
``predict(data: pandas.DataFrame) -> pandas.DataFrame``. For TensorFlow versions >= 2.0.0.
Expand Down
2 changes: 1 addition & 1 deletion mlflow/tracking/_model_registry/client.py
Expand Up @@ -19,7 +19,7 @@
AWAIT_MODEL_VERSION_CREATE_SLEEP_DURATION_SECONDS = 3


class ModelRegistryClient(object):
class ModelRegistryClient:
"""
Client of an MLflow Model Registry Server that creates and manages registered
models and model versions.
Expand Down
2 changes: 1 addition & 1 deletion mlflow/tracking/_tracking_service/client.py
Expand Up @@ -29,7 +29,7 @@
from collections import OrderedDict


class TrackingServiceClient(object):
class TrackingServiceClient:
"""
Client of an MLflow Tracking Server that creates and manages experiments and runs.
"""
Expand Down
2 changes: 1 addition & 1 deletion mlflow/tracking/client.py
Expand Up @@ -47,7 +47,7 @@
_logger = logging.getLogger(__name__)


class MlflowClient(object):
class MlflowClient:
"""
Client of an MLflow Tracking Server that creates and manages experiments and runs, and of an
MLflow Registry Server that creates and manages registered models and model versions. It's a
Expand Down
2 changes: 1 addition & 1 deletion mlflow/tracking/context/abstract_context.py
@@ -1,7 +1,7 @@
from abc import ABCMeta, abstractmethod


class RunContextProvider(object):
class RunContextProvider:
"""
Abstract base class for context provider objects specifying custom tags at run-creation time
(e.g. tags specifying the git repo with which the run is associated).
Expand Down
2 changes: 1 addition & 1 deletion mlflow/tracking/context/registry.py
Expand Up @@ -13,7 +13,7 @@
_logger = logging.getLogger(__name__)


class RunContextProviderRegistry(object):
class RunContextProviderRegistry:
"""Registry for run context provider implementations
This class allows the registration of a run context provider which can be used to infer meta
Expand Down
@@ -1,7 +1,7 @@
from abc import ABCMeta, abstractmethod


class RequestHeaderProvider(object):
class RequestHeaderProvider:
"""
Abstract base class for specifying custom request headers to add to outgoing requests
(e.g. request headers specifying the environment from which mlflow is running).
Expand Down
2 changes: 1 addition & 1 deletion mlflow/tracking/request_header/registry.py
Expand Up @@ -9,7 +9,7 @@
_logger = logging.getLogger(__name__)


class RequestHeaderProviderRegistry(object):
class RequestHeaderProviderRegistry:
def __init__(self):
self._registry = []

Expand Down
8 changes: 4 additions & 4 deletions mlflow/types/schema.py
Expand Up @@ -68,7 +68,7 @@ def to_spark(self):
return getattr(pyspark.sql.types, self._spark_type)()


class ColSpec(object):
class ColSpec:
"""
Specification of name and type of a single column in a dataset.
"""
Expand Down Expand Up @@ -119,7 +119,7 @@ def __repr__(self) -> str:
return "{name}: {type}".format(name=repr(self.name), type=repr(self.type))


class TensorInfo(object):
class TensorInfo:
"""
Representation of the shape and type of a Tensor.
"""
Expand Down Expand Up @@ -181,7 +181,7 @@ def __repr__(self) -> str:
return "Tensor({type}, {shape})".format(type=repr(self.dtype.name), shape=repr(self.shape))


class TensorSpec(object):
class TensorSpec:
"""
Specification used to represent a dataset stored as a Tensor.
"""
Expand Down Expand Up @@ -249,7 +249,7 @@ def __repr__(self) -> str:
return "{name}: {info}".format(name=repr(self.name), info=repr(self._tensorInfo))


class Schema(object):
class Schema:
"""
Specification of a dataset.
Expand Down
2 changes: 1 addition & 1 deletion mlflow/types/utils.py
Expand Up @@ -212,7 +212,7 @@ def _infer_pandas_column(col: pd.Series) -> DataType:
if len(col.values.shape) > 1:
raise MlflowException("Expected 1d array, got array with shape {}".format(col.shape))

class IsInstanceOrNone(object):
class IsInstanceOrNone:
def __init__(self, *args):
self.classes = args
self.seen_instances = 0
Expand Down
2 changes: 1 addition & 1 deletion mlflow/utils/autologging_utils/__init__.py
Expand Up @@ -505,7 +505,7 @@ def _get_new_training_session_class():
# 1. We don't currently have any use cases for allow_children=True.
# 2. The list append & pop operations are thread-safe, so we will always clear the session stack
# once all _TrainingSessions exit.
class _TrainingSession(object):
class _TrainingSession:
_session_stack = []

def __init__(self, clazz, allow_children=True):
Expand Down
2 changes: 1 addition & 1 deletion mlflow/utils/file_utils.py
Expand Up @@ -181,7 +181,7 @@ def read_yaml(root, file_name):
raise e


class TempDir(object):
class TempDir:
def __init__(self, chdr=False, remove_on_exit=True):
self._dir = None
self._path = None
Expand Down
6 changes: 3 additions & 3 deletions mlflow/utils/gorilla.py
Expand Up @@ -92,7 +92,7 @@ def default_filter(name, obj):
return not (isinstance(obj, types.ModuleType) or name.startswith("_"))


class DecoratorData(object):
class DecoratorData:

"""Decorator data.
Expand All @@ -115,7 +115,7 @@ def __init__(self):
self.filter = None


class Settings(object):
class Settings:

"""Define the patching behaviour.
Expand Down Expand Up @@ -171,7 +171,7 @@ def _update(self, **kwargs):
self.__dict__.update(**kwargs)


class Patch(object):
class Patch:

"""Describe all the information required to apply a patch.
Expand Down
2 changes: 1 addition & 1 deletion mlflow/utils/rest_utils.py
Expand Up @@ -291,7 +291,7 @@ def cloud_storage_http_request(
raise MlflowException("API request failed with exception %s" % e)


class MlflowHostCreds(object):
class MlflowHostCreds:
"""
Provides a hostname and optional authentication for talking to an MLflow tracking server.
:param host: Hostname (e.g., http://localhost:5000) to MLflow server. Required.
Expand Down

0 comments on commit 19e84f6

Please sign in to comment.