Skip to content

Commit

Permalink
Merge pull request #78 from Maximophone/54-epsilon_greedy
Browse files Browse the repository at this point in the history
54 epsilon greedy
  • Loading branch information
ukclivecox authored Feb 1, 2018
2 parents fc61e90 + 4f550a7 commit aacd0ad
Show file tree
Hide file tree
Showing 19 changed files with 705 additions and 41 deletions.
2 changes: 1 addition & 1 deletion cluster-manager/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
<groupId>io.seldon.clustermanager</groupId>
<artifactId>seldon-cluster-manager</artifactId>
<packaging>jar</packaging>
<version>0.1.4-SNAPSHOT</version>
<version>SNAPSHOT-MF-0</version>
<name>seldon-cluster-manager</name>
<url>http://maven.apache.org</url>

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -128,14 +128,15 @@ private V1.Container createEngineContainer(SeldonDeployment dep,PredictorSpec pr
// Add engine resources if specified
if (predictorDef.hasEngineResources())
cBuilder.setResources(predictorDef.getEngineResources());
else // set default resource requests for cpu
cBuilder.setResources(V1.ResourceRequirements.newBuilder().putRequests("cpu", Quantity.newBuilder().setString("0.1").build()));

else {// set default resource requests for cpu
final String DEFAULT_ENGINE_CPU_REQUEST = "0.1";
cBuilder.setResources(V1.ResourceRequirements.newBuilder().putRequests("cpu", Quantity.newBuilder().setString(DEFAULT_ENGINE_CPU_REQUEST).build()));
}
return cBuilder.build();
}



;
private Set<String> getEnvNamesProto(List<EnvVar> envs)
{
Set<String> s = new HashSet<>();
Expand Down Expand Up @@ -184,7 +185,7 @@ private PredictiveUnit findPredictiveUnitForContainer(PredictiveUnit unit,String
}
}

private V1.Container updateContainer(V1.Container c,PredictiveUnit pu,int idx)
private V1.Container updateContainer(V1.Container c,PredictiveUnit pu,int idx,String deploymentName,String predictorName)
{
V1.Container.Builder c2Builder = V1.Container.newBuilder(c);

Expand Down Expand Up @@ -257,6 +258,17 @@ private V1.Container updateContainer(V1.Container c,PredictiveUnit pu,int idx)
if (!envNames.contains(ENV_PREDICTIVE_UNIT_PARAMETERS))
c2Builder.addEnv(EnvVar.newBuilder().setName(ENV_PREDICTIVE_UNIT_PARAMETERS).setValue(extractPredictiveUnitParametersAsJson(pu)));

//Add environment variable for the predictive unit ID, the predictor ID and the Deployment ID
final String ENV_PREDICTIVE_UNIT_ID = "PREDICTIVE_UNIT_ID";
final String ENV_PREDICTOR_ID = "PREDICTOR_ID";
final String ENV_SELDON_DEPLOYMENT_ID = "SELDON_DEPLOYMENT_ID";
if (!envNames.contains(ENV_PREDICTIVE_UNIT_ID))
c2Builder.addEnv(EnvVar.newBuilder().setName(ENV_PREDICTIVE_UNIT_ID).setValue(c.getName()));
if (!envNames.contains(ENV_PREDICTOR_ID))
c2Builder.addEnv(EnvVar.newBuilder().setName(ENV_PREDICTOR_ID).setValue(predictorName));
if (!envNames.contains(ENV_SELDON_DEPLOYMENT_ID))
c2Builder.addEnv(EnvVar.newBuilder().setName(ENV_SELDON_DEPLOYMENT_ID).setValue(deploymentName));

// Add a default lifecycle pre-stop if non exists
if (!c.hasLifecycle())
{
Expand Down Expand Up @@ -301,16 +313,19 @@ public SeldonDeployment defaulting(SeldonDeployment mlDep) {
SeldonDeployment.Builder mlBuilder = SeldonDeployment.newBuilder(mlDep);
int idx = 0;
String serviceName = mlDep.getSpec().getName();
String deploymentName = mlDep.getMetadata().getName();

for(PredictorSpec p : mlDep.getSpec().getPredictorsList())
{
ObjectMeta.Builder metaBuilder = ObjectMeta.newBuilder(p.getComponentSpec().getMetadata())
.putLabels(LABEL_SELDON_APP, serviceName);
mlBuilder.getSpecBuilder().getPredictorsBuilder(idx).getComponentSpecBuilder().setMetadata(metaBuilder);
int cIdx = 0;
mlBuilder.getSpecBuilder().getPredictorsBuilder(idx).getComponentSpecBuilder().getSpecBuilder().clearContainers();
String predictorName = p.getName();
for(V1.Container c : p.getComponentSpec().getSpec().getContainersList())
{
V1.Container c2 = this.updateContainer(c, findPredictiveUnitForContainer(mlDep.getSpec().getPredictors(idx).getGraph(),c.getName()),cIdx);
V1.Container c2 = this.updateContainer(c, findPredictiveUnitForContainer(mlDep.getSpec().getPredictors(idx).getGraph(),c.getName()),cIdx,deploymentName,predictorName);
mlBuilder.getSpecBuilder().getPredictorsBuilder(idx).getComponentSpecBuilder().getSpecBuilder().addContainers(cIdx, c2);
updatePredictiveUnitBuilderByName(mlBuilder.getSpecBuilder().getPredictorsBuilder(idx).getGraphBuilder(),c2);
cIdx++;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ public void testDefaulting() throws IOException
Assert.assertTrue(mlDep2.getSpec().getPredictors(0).getComponentSpec().getSpec().getContainers(0).hasLivenessProbe());
Assert.assertTrue(mlDep2.getSpec().getPredictors(0).getComponentSpec().getSpec().getContainers(0).hasReadinessProbe());
Assert.assertTrue(mlDep2.getSpec().getPredictors(0).getComponentSpec().getSpec().getContainers(0).hasLifecycle());
Assert.assertEquals(2,mlDep2.getSpec().getPredictors(0).getComponentSpec().getSpec().getContainers(0).getEnvCount());
Assert.assertEquals("Incorrect number of environment variables in container",5,mlDep2.getSpec().getPredictors(0).getComponentSpec().getSpec().getContainers(0).getEnvCount());
Assert.assertEquals(1,mlDep2.getSpec().getPredictors(0).getComponentSpec().getSpec().getContainers(0).getPortsCount());
Assert.assertEquals("http",mlDep2.getSpec().getPredictors(0).getComponentSpec().getSpec().getContainers(0).getPorts(0).getName());
Assert.assertEquals(Endpoint.EndpointType.REST_VALUE,mlDep2.getSpec().getPredictors(0).getGraph().getEndpoint().getType().getNumber());
Expand All @@ -56,7 +56,7 @@ public void testDefaultingGrpc() throws IOException
Assert.assertTrue(mlDep2.getSpec().getPredictors(0).getComponentSpec().getSpec().getContainers(0).hasLivenessProbe());
Assert.assertTrue(mlDep2.getSpec().getPredictors(0).getComponentSpec().getSpec().getContainers(0).hasReadinessProbe());
Assert.assertTrue(mlDep2.getSpec().getPredictors(0).getComponentSpec().getSpec().getContainers(0).hasLifecycle());
Assert.assertEquals(2,mlDep2.getSpec().getPredictors(0).getComponentSpec().getSpec().getContainers(0).getEnvCount());
Assert.assertEquals("Incorrect number of environment variables in container",5,mlDep2.getSpec().getPredictors(0).getComponentSpec().getSpec().getContainers(0).getEnvCount());
Assert.assertEquals(1,mlDep2.getSpec().getPredictors(0).getComponentSpec().getSpec().getContainers(0).getPortsCount());
Assert.assertEquals("grpc",mlDep2.getSpec().getPredictors(0).getComponentSpec().getSpec().getContainers(0).getPorts(0).getName());
Assert.assertEquals(Endpoint.EndpointType.GRPC_VALUE,mlDep2.getSpec().getPredictors(0).getGraph().getEndpoint().getType().getNumber());
Expand Down
2 changes: 1 addition & 1 deletion docs/getting_started/minikube.md
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ In this session, we show how to wrap the sklearn iris classifier in the [seldon-
3. Wrap your saved model using the core-python-wrapper docker image:
```bash
docker run -v $(pwd):/model seldonio/core-python-wrapper:0.5 /model IrisClassifier 0.1 seldonio --force
docker run -v $(pwd):/model seldonio/core-python-wrapper:0.6 /model IrisClassifier 0.1 seldonio --force
```
4. Build the docker image locally
Expand Down
4 changes: 2 additions & 2 deletions docs/wrappers/h2o.md
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ Detailed steps:
3. Run the python wrapping scripts, with the additional ````--base-image``` argument:
```bash
docker run -v /path/to/your/model/folder:/model seldonio/core-python-wrapper:0.5 /model H2OModel 0.1 myrepo --base-image=H2OBase:1.0
docker run -v /path/to/your/model/folder:/model seldonio/core-python-wrapper:0.6 /model H2OModel 0.1 myrepo --base-image=H2OBase:1.0
```
"0.1" is the version of the docker image that will be created. "myrepo" is the name of your dockerhub repository.
Expand Down Expand Up @@ -88,7 +88,7 @@ Here we give a step by step example in which we will train and save a [H2O model
```bash
cd ../../
docker run -v models/h2o_example:my_model seldonio/core-python-wrapper:0.5 my_model H2OModel 0.1 myrepo --base-image=H2OBase:1.0
docker run -v models/h2o_example:my_model seldonio/core-python-wrapper:0.6 my_model H2OModel 0.1 myrepo --base-image=H2OBase:1.0
```
This will create a docker image "seldonio/h2omodel:0.1", which is ready to be deployed in seldon-core.
8 changes: 4 additions & 4 deletions docs/wrappers/python.md
Original file line number Diff line number Diff line change
Expand Up @@ -61,13 +61,13 @@ After you have copied the required files in your model folder, you run the Seldo
In order to make things even simpler (and because we love Docker!) we have dockerised the wrapper script so that you don't need to install anything on your machine to run it - except Docker.
```
docker run -v /path/to/model/dir:/my_model seldonio/core-python-wrapper:0.5 /my_model MnistClassifier 0.1 seldonio
docker run -v /path/to/model/dir:/my_model seldonio/core-python-wrapper:0.6 /my_model MnistClassifier 0.1 seldonio
```
Let's explain each piece of this command in more details.
``` docker run seldonio/core-python-wrapper:0.5 ``` : run the core-python-wrapper container (version 0.5)
``` docker run seldonio/core-python-wrapper:0.6 ``` : run the core-python-wrapper container (version 0.6)
``` -v /path/to/model/dir:/my_model ``` : Tells docker to mount your local folder to /my_model in the container. This is used to access your files and generate the wrapped model files.
Expand All @@ -76,7 +76,7 @@ Let's explain each piece of this command in more details.
For reference, here is the complete list of arguments that can be passed to the script.
```
docker run -v /path:<model_path> seldonio/core-python-wrapper:0.5
docker run -v /path:<model_path> seldonio/core-python-wrapper:0.6
<model_path>
<model_name>
<image_version>
Expand Down Expand Up @@ -108,7 +108,7 @@ Optional:
Note that you can access the command line help of the script by using the -h or --help argument as follows:
```
docker run seldonio/core-python-wrapper:0.5 -h
docker run seldonio/core-python-wrapper:0.6 -h
```
Note also that you could use the python script directly if you feel so enclined, but you would have to check out seldon-core and install some python libraries on your local machine - by using the docker image you don't have to care about these dependencies.
Expand Down
62 changes: 62 additions & 0 deletions examples/routers/epsilon_greedy/EpsilonGreedy.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
import random
import numpy as np

__version__ = "v1.1"

def n_success_failures(features,reward):
n_predictions = features.shape[0]
n_success = int(reward*n_predictions)
n_failures = n_predictions - n_success
return n_success, n_failures

class EpsilonGreedy(object):

def __init__(self,n_branches=None,epsilon=0.1,verbose=False):
print "Starting Epsilon Greedy Microservice, version {}".format(__version__)
if n_branches is None:
raise Exception("n_branches parameter must be given")
self.verbose = verbose
self.epsilon = epsilon
self.best_branch = 0
self.branches_success = [0 for _ in range(n_branches)]
self.branches_tries = [0 for _ in range(n_branches)]
self.n_branches = n_branches
if self.verbose:
print "Router initialised"
print "# branches:",self.n_branches
print "Epsilon:",self.epsilon
print

def route(self,features,feature_names):
x = random.random()
best_branch = self.best_branch
other_branches = [i for i in range(self.n_branches) if i!=best_branch]
selected_branch = best_branch if x>self.epsilon else random.choice(other_branches)
if self.verbose:
print "Routing"
print "Current best branch:",best_branch
print "Selected branch:",selected_branch
print
return selected_branch

def send_feedback(self,features,feature_names,routing,reward,truth):
if self.verbose:
print "Training"
print "Prev success #", self.branches_success
print "Prev tries #", self.branches_tries
print "Prev best branch:", self.best_branch
n_success, n_failures = n_success_failures(features,reward)
self.branches_success[routing] += n_success
self.branches_tries[routing] += n_success + n_failures
perfs = [
(self.branches_success[i]+1)/float(self.branches_tries[i]+1)
for i
in range(self.n_branches)
]
self.best_branch = np.argmax(perfs)
if self.verbose:
print "New success #", self.branches_success
print "New tries #", self.branches_tries
print "New best branch:",self.best_branch
print

1 change: 1 addition & 0 deletions examples/routers/epsilon_greedy/requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
numpy==1.11.2
2 changes: 1 addition & 1 deletion helm-charts/seldon-core/values.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ cluster_manager:
enabled: true
image:
pull_policy: IfNotPresent
tag: 0.1.4-SNAPSHOT
tag: SNAPSHOT-MF-0
java_opts: ''
rbac: false
spring_opts: ''
Expand Down
Loading

0 comments on commit aacd0ad

Please sign in to comment.