Skip to content

Commit 009863c

Browse files
authoredNov 24, 2020
Accept class definition from Python Client directly (#1587)
1 parent 907b993 commit 009863c

File tree

9 files changed

+203
-102
lines changed

9 files changed

+203
-102
lines changed
 

‎dev/generate_python_client_md.sh

+1-10
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@ sed -i "s/# cortex.client.Client/# cortex.client.Client\n/g" $ROOT/docs/miscella
5252
sed -i "s/](#cortex\./](#/g" $ROOT/docs/miscellaneous/python-client.md
5353
sed -i "s/](#client\.Client\./](#/g" $ROOT/docs/miscellaneous/python-client.md
5454

55-
# indentdation
55+
# indentation
5656
sed -i "s/ \* / \* /g" $ROOT/docs/miscellaneous/python-client.md
5757
sed -i "s/#### /## /g" $ROOT/docs/miscellaneous/python-client.md
5858

@@ -61,12 +61,3 @@ sed -i 's/[[:space:]]*$//' $ROOT/docs/miscellaneous/python-client.md
6161
truncate -s -1 $ROOT/docs/miscellaneous/python-client.md
6262

6363
pip3 uninstall -y cortex
64-
65-
cat << EOF
66-
67-
#### MANUAL EDITS REQUIRED ####
68-
69-
- Copy the docstring for \`client(env: str)\` in pkg/workloads/cortex/client/__init__.py into the generated docs and unindent
70-
71-
Then check the diff
72-
EOF

‎docs/cluster-management/install.md

+14-1
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,20 @@ import requests
5656
local_client = cortex.client("local")
5757

5858
# deploy the model as a realtime api and wait for it to become active
59-
deployments = local_client.deploy("./cortex.yaml", wait=True)
59+
60+
api_spec={
61+
"name": "iris-classifier",
62+
"kind": "RealtimeAPI",
63+
"predictor": {
64+
"type": "python",
65+
"path": "predictor.py",
66+
"config": {
67+
"model": "s3://cortex-examples/pytorch/iris-classifier/weights.pth"
68+
}
69+
}
70+
}
71+
72+
deployments = local_client.deploy(api_spec, project_dir=".", wait=True)
6073

6174
# get the api's endpoint
6275
url = deployments[0]["api"]["endpoint"]

‎docs/miscellaneous/python-client.md

+12-34
Original file line numberDiff line numberDiff line change
@@ -29,37 +29,6 @@ client(env: str)
2929

3030
Initialize a client based on the specified environment.
3131

32-
To deploy and manage APIs on a new cluster:
33-
34-
1. Spin up a cluster using the CLI command `cortex cluster up`.
35-
An environment named "aws" will be created once the cluster is ready.
36-
2. Initialize your client:
37-
38-
```python
39-
import cortex
40-
c = cortex.client("aws")
41-
c.deploy("./cortex.yaml")
42-
```
43-
44-
To deploy and manage APIs on an existing cluster:
45-
46-
1. Use the command `cortex cluster info` to get the Operator Endpoint.
47-
2. Configure a client to your cluster:
48-
49-
```python
50-
import cortex
51-
c = cortex.cluster_client("aws", operator_endpoint, aws_access_key_id, aws_secret_access_key)
52-
c.deploy("./cortex.yaml")
53-
```
54-
55-
To deploy and manage APIs locally:
56-
57-
```python
58-
import cortex
59-
c = cortex.client("local")
60-
c.deploy("./cortex.yaml")
61-
```
62-
6332
**Arguments**:
6433

6534
- `env` - Name of the environment to use.
@@ -136,14 +105,23 @@ Delete an environment configured on this machine.
136105
## deploy
137106

138107
```python
139-
| deploy(config_file: str, force: bool = False, wait: bool = False) -> list
108+
| deploy(api_spec: dict, predictor=None, pip_dependencies=[], conda_dependencies=[], project_dir: Optional[str] = None, force: bool = False, wait: bool = False) -> list
140109
```
141110

142-
Deploy or update APIs specified in the config_file.
111+
Deploy an API.
143112

144113
**Arguments**:
145114

146-
- `config_file` - Local path to a yaml file defining Cortex APIs.
115+
- `api_spec` - A dictionary defining a single Cortex API. Schema can be found here:
116+
→ Realtime API: https://docs.cortex.dev/v/master/deployments/realtime-api/api-configuration
117+
→ Batch API: https://docs.cortex.dev/v/master/deployments/batch-api/api-configuration
118+
→ Traffic Splitter: https://docs.cortex.dev/v/master/deployments/realtime-api/traffic-splitter
119+
- `predictor` - A Cortex Predictor class implementation. Not required when deploying a traffic splitter.
120+
→ Realtime API: https://docs.cortex.dev/v/master/deployments/realtime-api/predictors
121+
→ Batch API: https://docs.cortex.dev/v/master/deployments/batch-api/predictors
122+
- `pip_dependencies` - A list of PyPI dependencies that will be installed before running your predictor class.
123+
- `conda_dependencies` - A list of Conda dependencies that will be installed before running your predictor class.
124+
- `project_dir` - Path to a python project.
147125
- `force` - Override any in-progress api updates.
148126
- `wait` - Streams logs until the APIs are ready.
149127

‎pkg/workloads/cortex/client/README.md

+1-1
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,7 @@ import requests
5151
local_client = cortex.client("local")
5252

5353
# deploy the model as a realtime api and wait for it to become active
54-
deployments = local_client.deploy("./cortex.yaml", wait=True)
54+
deployments = local_client.deploy_project(config_file="./cortex.yaml", wait=True)
5555

5656
# get the api's endpoint
5757
url = deployments[0]["api"]["endpoint"]

‎pkg/workloads/cortex/client/cortex/__init__.py

-31
Original file line numberDiff line numberDiff line change
@@ -24,37 +24,6 @@ def client(env: str):
2424
"""
2525
Initialize a client based on the specified environment.
2626
27-
To deploy and manage APIs on a new cluster:
28-
29-
1. Spin up a cluster using the CLI command `cortex cluster up`.
30-
An environment named "aws" will be created once the cluster is ready.
31-
2. Initialize your client:
32-
33-
```python
34-
import cortex
35-
c = cortex.client("aws")
36-
c.deploy("./cortex.yaml")
37-
```
38-
39-
To deploy and manage APIs on an existing cluster:
40-
41-
1. Use the command `cortex cluster info` to get the Operator Endpoint.
42-
2. Configure a client to your cluster:
43-
44-
```python
45-
import cortex
46-
c = cortex.cluster_client("aws", operator_endpoint, aws_access_key_id, aws_secret_access_key)
47-
c.deploy("./cortex.yaml")
48-
```
49-
50-
To deploy and manage APIs locally:
51-
52-
```python
53-
import cortex
54-
c = cortex.client("local")
55-
c.deploy("./cortex.yaml")
56-
```
57-
5827
Args:
5928
env: Name of the environment to use.
6029

‎pkg/workloads/cortex/client/cortex/client.py

+106
Original file line numberDiff line numberDiff line change
@@ -18,9 +18,15 @@
1818
import sys
1919
import subprocess
2020
import threading
21+
import yaml
22+
import uuid
23+
import dill
24+
import inspect
25+
from pathlib import Path
2126

2227
from typing import List, Dict, Optional, Tuple, Callable, Union
2328
from cortex.binary import run_cli, get_cli_path
29+
from cortex import util
2430

2531

2632
class Client:
@@ -33,7 +39,107 @@ def __init__(self, env: str):
3339
"""
3440
self.env = env
3541

42+
# CORTEX_VERSION_MINOR x5
3643
def deploy(
44+
self,
45+
api_spec: dict,
46+
predictor=None,
47+
pip_dependencies=[],
48+
conda_dependencies=[],
49+
project_dir: Optional[str] = None,
50+
force: bool = False,
51+
wait: bool = False,
52+
) -> list:
53+
"""
54+
Deploy an API.
55+
56+
Args:
57+
api_spec: A dictionary defining a single Cortex API. Schema can be found here:
58+
→ Realtime API: https://docs.cortex.dev/v/master/deployments/realtime-api/api-configuration
59+
→ Batch API: https://docs.cortex.dev/v/master/deployments/batch-api/api-configuration
60+
→ Traffic Splitter: https://docs.cortex.dev/v/master/deployments/realtime-api/traffic-splitter
61+
predictor: A Cortex Predictor class implementation. Not required when deploying a traffic splitter.
62+
→ Realtime API: https://docs.cortex.dev/v/master/deployments/realtime-api/predictors
63+
→ Batch API: https://docs.cortex.dev/v/master/deployments/batch-api/predictors
64+
pip_dependencies: A list of PyPI dependencies that will be installed before the predictor class implementation is invoked.
65+
conda_dependencies: A list of Conda dependencies that will be installed before the predictor class implementation is invoked.
66+
project_dir: Path to a python project.
67+
force: Override any in-progress api updates.
68+
wait: Streams logs until the APIs are ready.
69+
70+
Returns:
71+
Deployment status, API specification, and endpoint for each API.
72+
"""
73+
74+
if project_dir is not None and predictor is not None:
75+
raise ValueError(
76+
"`predictor` and `project_dir` parameters cannot be specified at the same time, please choose one"
77+
)
78+
79+
if project_dir is not None:
80+
cortex_yaml_path = os.path.join(project_dir, f".cortex-{uuid.uuid4()}.yaml")
81+
82+
with util.open_temporarily(cortex_yaml_path, "w") as f:
83+
yaml.dump([api_spec], f) # write a list
84+
return self._deploy(cortex_yaml_path, force, wait)
85+
86+
project_dir = Path.home() / ".cortex" / "deployments" / str(uuid.uuid4())
87+
with util.open_tempdir(str(project_dir)):
88+
cortex_yaml_path = os.path.join(project_dir, "cortex.yaml")
89+
90+
if predictor is None:
91+
# for deploying a traffic splitter
92+
with open(cortex_yaml_path, "w") as f:
93+
yaml.dump([api_spec], f) # write a list
94+
return self._deploy(cortex_yaml_path, force=force, wait=wait)
95+
96+
# Change if PYTHONVERSION changes
97+
expected_version = "3.6"
98+
actual_version = f"{sys.version_info.major}.{sys.version_info.minor}"
99+
if actual_version < expected_version:
100+
raise Exception("cortex is only supported for python versions >= 3.6") # unexpected
101+
if actual_version > expected_version:
102+
is_python_set = any(
103+
conda_dep.startswith("python=") or "::python=" in conda_dep
104+
for conda_dep in conda_dependencies
105+
)
106+
107+
if not is_python_set:
108+
conda_dependencies = [
109+
f"conda-forge::python={sys.version_info.major}.{sys.version_info.minor}.{sys.version_info.micro}"
110+
] + conda_dependencies
111+
112+
if len(pip_dependencies) > 0:
113+
with open(project_dir / "requirements.txt", "w") as requirements_file:
114+
requirements_file.write("\n".join(pip_dependencies))
115+
116+
if len(conda_dependencies) > 0:
117+
with open(project_dir / "conda-packages.txt", "w") as conda_file:
118+
conda_file.write("\n".join(conda_dependencies))
119+
120+
if not inspect.isclass(predictor):
121+
raise ValueError("predictor parameter must be a class definition")
122+
123+
with open(project_dir / "predictor.pickle", "wb") as pickle_file:
124+
dill.dump(predictor, pickle_file)
125+
if api_spec.get("predictor") is None:
126+
api_spec["predictor"] = {}
127+
128+
if predictor.__name__ == "PythonPredictor":
129+
predictor_type = "python"
130+
if predictor.__name__ == "TensorFlowPredictor":
131+
predictor_type = "tensorflow"
132+
if predictor.__name__ == "ONNXPredictor":
133+
predictor_type = "onnx"
134+
135+
api_spec["predictor"]["path"] = "predictor.pickle"
136+
api_spec["predictor"]["type"] = predictor_type
137+
138+
with open(cortex_yaml_path, "w") as f:
139+
yaml.dump([api_spec], f) # write a list
140+
return self._deploy(cortex_yaml_path, force=force, wait=wait)
141+
142+
def _deploy(
37143
self,
38144
config_file: str,
39145
force: bool = False,
+39
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,39 @@
1+
# Copyright 2020 Cortex Labs, Inc.
2+
#
3+
# Licensed under the Apache License, Version 2.0 (the "License");
4+
# you may not use this file except in compliance with the License.
5+
# You may obtain a copy of the License at
6+
#
7+
# http://www.apache.org/licenses/LICENSE-2.0
8+
#
9+
# Unless required by applicable law or agreed to in writing, software
10+
# distributed under the License is distributed on an "AS IS" BASIS,
11+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12+
# See the License for the specific language governing permissions and
13+
# limitations under the License.
14+
15+
from contextlib import contextmanager
16+
import os
17+
from pathlib import Path
18+
import shutil
19+
20+
21+
@contextmanager
22+
def open_temporarily(path, mode):
23+
file = open(path, mode)
24+
25+
try:
26+
yield file
27+
finally:
28+
file.close()
29+
os.remove(path)
30+
31+
32+
@contextmanager
33+
def open_tempdir(dir_path):
34+
Path(dir_path).mkdir(parents=True)
35+
36+
try:
37+
yield dir_path
38+
finally:
39+
shutil.rmtree(dir_path)

‎pkg/workloads/cortex/client/setup.py

+7-1
Original file line numberDiff line numberDiff line change
@@ -94,7 +94,13 @@ def run(self):
9494
"cortex = cortex.binary:run",
9595
],
9696
},
97-
install_requires=(["importlib-resources; python_version < '3.7'"]),
97+
install_requires=(
98+
[
99+
"importlib-resources; python_version < '3.7'",
100+
"pyyaml>=5.3.0",
101+
"dill==0.3.2", # lines up with dill package version used in cortex serving code
102+
]
103+
),
98104
python_requires=">=3.6",
99105
cmdclass={
100106
"install": InstallBinary,

‎pkg/workloads/cortex/lib/api/predictor.py

+23-24
Original file line numberDiff line numberDiff line change
@@ -246,25 +246,16 @@ def class_impl(self, project_dir):
246246

247247
try:
248248
with FileLock("/run/init_stagger.lock"):
249-
impl = self._load_module("cortex_predictor", os.path.join(project_dir, self.path))
249+
predictor_class = self._get_class_impl(
250+
"cortex_predictor", os.path.join(project_dir, self.path), target_class_name
251+
)
250252
except CortexException as e:
251253
e.wrap("error in " + self.path)
252254
raise
253255
finally:
254256
refresh_logger()
255257

256258
try:
257-
classes = inspect.getmembers(impl, inspect.isclass)
258-
predictor_class = None
259-
for class_df in classes:
260-
if class_df[0] == target_class_name:
261-
if predictor_class is not None:
262-
raise UserException(
263-
f"multiple definitions for {target_class_name} class found; please check your imports and class definitions and ensure that there is only one Predictor class definition"
264-
)
265-
predictor_class = class_df[1]
266-
if predictor_class is None:
267-
raise UserException(f"{target_class_name} class is not defined")
268259
_validate_impl(predictor_class, validations, self.api_spec)
269260
if self.type == PythonPredictorType:
270261
_validate_python_predictor_with_models(predictor_class, self.api_spec)
@@ -273,24 +264,32 @@ def class_impl(self, project_dir):
273264
raise
274265
return predictor_class
275266

276-
def _load_module(self, module_name, impl_path):
267+
def _get_class_impl(self, module_name, impl_path, target_class_name):
277268
if impl_path.endswith(".pickle"):
278269
try:
279-
impl = imp.new_module(module_name)
280-
281270
with open(impl_path, "rb") as pickle_file:
282-
pickled_dict = dill.load(pickle_file)
283-
for key in pickled_dict:
284-
setattr(impl, key, pickled_dict[key])
271+
return dill.load(pickle_file)
285272
except Exception as e:
286273
raise UserException("unable to load pickle", str(e)) from e
287-
else:
288-
try:
289-
impl = imp.load_source(module_name, impl_path)
290-
except Exception as e:
291-
raise UserException(str(e)) from e
292274

293-
return impl
275+
try:
276+
impl = imp.load_source(module_name, impl_path)
277+
except Exception as e:
278+
raise UserException(str(e)) from e
279+
280+
classes = inspect.getmembers(impl, inspect.isclass)
281+
predictor_class = None
282+
for class_df in classes:
283+
if class_df[0] == target_class_name:
284+
if predictor_class is not None:
285+
raise UserException(
286+
f"multiple definitions for {target_class_name} class found; please check your imports and class definitions and ensure that there is only one Predictor class definition"
287+
)
288+
predictor_class = class_df[1]
289+
if predictor_class is None:
290+
raise UserException(f"{target_class_name} class is not defined")
291+
292+
return predictor_class
294293

295294
def _is_model_caching_enabled(self) -> bool:
296295
"""

0 commit comments

Comments
 (0)
Please sign in to comment.