-
Notifications
You must be signed in to change notification settings - Fork 1
/
predict.py
174 lines (154 loc) · 6.24 KB
/
predict.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
#!/bin/python
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Examples of using the Cloud ML Engine's online prediction service."""
import argparse
import base64
import json
# [START import_libraries]
import googleapiclient.discovery
from oauth2client.client import GoogleCredentials
# [END import_libraries]
import six
# from sklearn.feature_extraction.text import CountVectorizer
import pickle
# [START predict_json]
def predict_json(project, model, instances, version=None):
"""Send json data to a deployed model for prediction.
Args:
project (str): project where the Cloud ML Engine Model is deployed.
model (str): model name.
instances ([Mapping[str: Any]]): Keys should be the names of Tensors
your deployed model expects as inputs. Values should be datatypes
convertible to Tensors, or (potentially nested) lists of datatypes
convertible to tensors.
version: str, version of the model to target.
Returns:
Mapping[str: any]: dictionary of prediction results defined by the
model.
"""
# Create the ML Engine service object.
# To authenticate set the environment variable
# GOOGLE_APPLICATION_CREDENTIALS=<path_to_service_account_file>
service = googleapiclient.discovery.build('ml', 'v1')
name = 'projects/{}/models/{}'.format(project, model)
if version is not None:
name += '/versions/{}'.format(version)
response = service.projects().predict(
name=name,
body={'instances': instances}
).execute()
if 'error' in response:
raise RuntimeError(response['error'])
return response['predictions']
# [END predict_json]
# [START predict_tf_records]
def predict_examples(project,
model,
example_bytes_list,
version=None):
"""Send protocol buffer data to a deployed model for prediction.
Args:
project (str): project where the Cloud ML Engine Model is deployed.
model (str): model name.
example_bytes_list ([str]): A list of bytestrings representing
serialized tf.train.Example protocol buffers. The contents of this
protocol buffer will change depending on the signature of your
deployed model.
version: str, version of the model to target.
Returns:
Mapping[str: any]: dictionary of prediction results defined by the
model.
"""
service = googleapiclient.discovery.build('ml', 'v1')
name = 'projects/{}/models/{}'.format(project, model)
if version is not None:
name += '/versions/{}'.format(version)
response = service.projects().predict(
name=name,
body={'instances': [
{'b64': base64.b64encode(example_bytes).decode('utf-8')}
for example_bytes in example_bytes_list
]}
).execute()
if 'error' in response:
raise RuntimeError(response['error'])
return response['predictions']
# [END predict_tf_records]
# [START census_to_example_bytes]
def census_to_example_bytes(json_instance):
"""Serialize a JSON example to the bytes of a tf.train.Example.
This method is specific to the signature of the Census example.
See: https://cloud.google.com/ml-engine/docs/concepts/prediction-overview
for details.
Args:
json_instance (Mapping[str: Any]): Keys should be the names of Tensors
your deployed model expects to parse using it's tf.FeatureSpec.
Values should be datatypes convertible to Tensors, or (potentially
nested) lists of datatypes convertible to tensors.
Returns:
str: A string as a container for the serialized bytes of
tf.train.Example protocol buffer.
"""
import tensorflow as tf
feature_dict = {}
for key, data in six.iteritems(json_instance):
if isinstance(data, six.string_types):
feature_dict[key] = tf.train.Feature(
bytes_list=tf.train.BytesList(value=[data.encode('utf-8')]))
elif isinstance(data, float):
feature_dict[key] = tf.train.Feature(
float_list=tf.train.FloatList(value=[data]))
elif isinstance(data, int):
feature_dict[key] = tf.train.Feature(
int64_list=tf.train.Int64List(value=[data]))
return tf.train.Example(
features=tf.train.Features(
feature=feature_dict
)
).SerializeToString()
# [END census_to_example_bytes]
def tweet_to_instance(tweet):
vec = pickle.load(open('vectorizer.pickle', 'rb'))
return vec.transform(tweet)
def predict_json_with_tweet (project, model, tweet, version=None):
return predict_json(project, model, tweet_to_instance(tweet), version)
def main(project, model, version=None, force_tfrecord=False):
"""Send user input to the prediction service."""
while True:
try:
user_input = json.loads(input("Valid JSON >>>"))
except KeyboardInterrupt:
return
if not isinstance(user_input, list):
user_input = [user_input]
try:
if force_tfrecord:
example_bytes_list = [
census_to_example_bytes(e)
for e in user_input
]
result = predict_examples(
project, model, example_bytes_list, version=version)
else:
result = predict_json(
project, model, user_input, version=version)
except RuntimeError as err:
print(str(err))
else:
print(result)
if __name__ == "__main__":
# print "Hello world"
# predict_json_with_tweet("RuTroll", "testModel", "Hello, world")
vec_tweet = tweet_to_instance(["Hello world!"])