-
Notifications
You must be signed in to change notification settings - Fork 4k
/
__init__.py
150 lines (119 loc) · 5.13 KB
/
__init__.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
import os
import shlex
import sys
import textwrap
from flask import Flask, send_from_directory, Response
from mlflow.server import handlers
from mlflow.server.handlers import (
get_artifact_handler,
STATIC_PREFIX_ENV_VAR,
_add_static_prefix,
get_model_version_artifact_handler,
)
from mlflow.utils.process import exec_cmd
# NB: These are intenrnal environment variables used for communication between
# the cli and the forked gunicorn processes.
BACKEND_STORE_URI_ENV_VAR = "_MLFLOW_SERVER_FILE_STORE"
ARTIFACT_ROOT_ENV_VAR = "_MLFLOW_SERVER_ARTIFACT_ROOT"
ARTIFACTS_DESTINATION_ENV_VAR = "_MLFLOW_SERVER_ARTIFACT_DESTINATION"
PROMETHEUS_EXPORTER_ENV_VAR = "prometheus_multiproc_dir"
SERVE_ARTIFACTS_ENV_VAR = "_MLFLOW_SERVER_SERVE_ARTIFACTS"
ARTIFACTS_ONLY_ENV_VAR = "_MLFLOW_SERVER_ARTIFACTS_ONLY"
REL_STATIC_DIR = "js/build"
app = Flask(__name__, static_folder=REL_STATIC_DIR)
STATIC_DIR = os.path.join(app.root_path, REL_STATIC_DIR)
for http_path, handler, methods in handlers.get_endpoints():
app.add_url_rule(http_path, handler.__name__, handler, methods=methods)
if os.getenv(PROMETHEUS_EXPORTER_ENV_VAR):
from mlflow.server.prometheus_exporter import activate_prometheus_exporter
prometheus_metrics_path = os.getenv(PROMETHEUS_EXPORTER_ENV_VAR)
if not os.path.exists(prometheus_metrics_path):
os.makedirs(prometheus_metrics_path)
activate_prometheus_exporter(app)
# Provide a health check endpoint to ensure the application is responsive
@app.route("/health")
def health():
return "OK", 200
# Serve the "get-artifact" route.
@app.route(_add_static_prefix("/get-artifact"))
def serve_artifacts():
return get_artifact_handler()
# Serve the "model-versions/get-artifact" route.
@app.route(_add_static_prefix("/model-versions/get-artifact"))
def serve_model_version_artifact():
return get_model_version_artifact_handler()
# We expect the react app to be built assuming it is hosted at /static-files, so that requests for
# CSS/JS resources will be made to e.g. /static-files/main.css and we can handle them here.
@app.route(_add_static_prefix("/static-files/<path:path>"))
def serve_static_file(path):
return send_from_directory(STATIC_DIR, path)
# Serve the index.html for the React App for all other routes.
@app.route(_add_static_prefix("/"))
def serve():
if os.path.exists(os.path.join(STATIC_DIR, "index.html")):
return send_from_directory(STATIC_DIR, "index.html")
text = textwrap.dedent(
"""
Unable to display MLflow UI - landing page (index.html) not found.
You are very likely running the MLflow server using a source installation of the Python MLflow
package.
If you are a developer making MLflow source code changes and intentionally running a source
installation of MLflow, you can view the UI by running the Javascript dev server:
https://github.com/mlflow/mlflow/blob/master/CONTRIBUTING.rst#running-the-javascript-dev-server
Otherwise, uninstall MLflow via 'pip uninstall mlflow', reinstall an official MLflow release
from PyPI via 'pip install mlflow', and rerun the MLflow server.
"""
)
return Response(text, mimetype="text/plain")
def _build_waitress_command(waitress_opts, host, port):
opts = shlex.split(waitress_opts) if waitress_opts else []
return (
["waitress-serve"]
+ opts
+ ["--host=%s" % host, "--port=%s" % port, "--ident=mlflow", "mlflow.server:app"]
)
def _build_gunicorn_command(gunicorn_opts, host, port, workers):
bind_address = "%s:%s" % (host, port)
opts = shlex.split(gunicorn_opts) if gunicorn_opts else []
return ["gunicorn"] + opts + ["-b", bind_address, "-w", "%s" % workers, "mlflow.server:app"]
def _run_server(
file_store_path,
default_artifact_root,
serve_artifacts,
artifacts_only,
artifacts_destination,
host,
port,
static_prefix=None,
workers=None,
gunicorn_opts=None,
waitress_opts=None,
expose_prometheus=None,
):
"""
Run the MLflow server, wrapping it in gunicorn or waitress on windows
:param static_prefix: If set, the index.html asset will be served from the path static_prefix.
If left None, the index.html asset will be served from the root path.
:return: None
"""
env_map = {}
if file_store_path:
env_map[BACKEND_STORE_URI_ENV_VAR] = file_store_path
if default_artifact_root:
env_map[ARTIFACT_ROOT_ENV_VAR] = default_artifact_root
if serve_artifacts:
env_map[SERVE_ARTIFACTS_ENV_VAR] = "true"
if artifacts_only:
env_map[ARTIFACTS_ONLY_ENV_VAR] = "true"
if artifacts_destination:
env_map[ARTIFACTS_DESTINATION_ENV_VAR] = artifacts_destination
if static_prefix:
env_map[STATIC_PREFIX_ENV_VAR] = static_prefix
if expose_prometheus:
env_map[PROMETHEUS_EXPORTER_ENV_VAR] = expose_prometheus
# TODO: eventually may want waitress on non-win32
if sys.platform == "win32":
full_command = _build_waitress_command(waitress_opts, host, port)
else:
full_command = _build_gunicorn_command(gunicorn_opts, host, port, workers or 4)
exec_cmd(full_command, env=env_map, stream_output=True)