-
Notifications
You must be signed in to change notification settings - Fork 1.1k
/
test_tfs.py
122 lines (93 loc) · 4.91 KB
/
test_tfs.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
# Copyright 2017-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from __future__ import absolute_import
import botocore.exceptions
import pytest
import sagemaker
import sagemaker.predictor
import sagemaker.utils
import tests.integ
import tests.integ.timeout
from sagemaker.tensorflow.serving import Model, Predictor
@pytest.fixture(scope='session', params=[
'ml.c5.xlarge',
pytest.param('ml.p3.2xlarge',
marks=pytest.mark.skipif(
tests.integ.test_region() in tests.integ.HOSTING_NO_P3_REGIONS,
reason='no ml.p3 instances in this region'))])
def instance_type(request):
return request.param
@pytest.fixture(scope='module')
def tfs_predictor(instance_type, sagemaker_session, tf_full_version):
endpoint_name = sagemaker.utils.unique_name_from_base('sagemaker-tensorflow-serving')
model_data = sagemaker_session.upload_data(
path='tests/data/tensorflow-serving-test-model.tar.gz',
key_prefix='tensorflow-serving/models')
with tests.integ.timeout.timeout_and_delete_endpoint_by_name(endpoint_name, sagemaker_session):
model = Model(model_data=model_data, role='SageMakerRole',
framework_version=tf_full_version,
sagemaker_session=sagemaker_session)
predictor = model.deploy(1, instance_type, endpoint_name=endpoint_name)
yield predictor
@pytest.mark.canary_quick
def test_predict(tfs_predictor, instance_type): # pylint: disable=W0613
input_data = {'instances': [1.0, 2.0, 5.0]}
expected_result = {'predictions': [3.5, 4.0, 5.5]}
result = tfs_predictor.predict(input_data)
assert expected_result == result
def test_predict_generic_json(tfs_predictor):
input_data = [[1.0, 2.0, 5.0], [1.0, 2.0, 5.0]]
expected_result = {'predictions': [[3.5, 4.0, 5.5], [3.5, 4.0, 5.5]]}
result = tfs_predictor.predict(input_data)
assert expected_result == result
def test_predict_jsons_json_content_type(tfs_predictor):
input_data = '[1.0, 2.0, 5.0]\n[1.0, 2.0, 5.0]'
expected_result = {'predictions': [[3.5, 4.0, 5.5], [3.5, 4.0, 5.5]]}
predictor = sagemaker.RealTimePredictor(tfs_predictor.endpoint,
tfs_predictor.sagemaker_session, serializer=None,
deserializer=sagemaker.predictor.json_deserializer,
content_type='application/json',
accept='application/json')
result = predictor.predict(input_data)
assert expected_result == result
def test_predict_jsons(tfs_predictor):
input_data = '[1.0, 2.0, 5.0]\n[1.0, 2.0, 5.0]'
expected_result = {'predictions': [[3.5, 4.0, 5.5], [3.5, 4.0, 5.5]]}
predictor = sagemaker.RealTimePredictor(tfs_predictor.endpoint,
tfs_predictor.sagemaker_session, serializer=None,
deserializer=sagemaker.predictor.json_deserializer,
content_type='application/jsons',
accept='application/jsons')
result = predictor.predict(input_data)
assert expected_result == result
def test_predict_jsonlines(tfs_predictor):
input_data = '[1.0, 2.0, 5.0]\n[1.0, 2.0, 5.0]'
expected_result = {'predictions': [[3.5, 4.0, 5.5], [3.5, 4.0, 5.5]]}
predictor = sagemaker.RealTimePredictor(tfs_predictor.endpoint,
tfs_predictor.sagemaker_session, serializer=None,
deserializer=sagemaker.predictor.json_deserializer,
content_type='application/jsonlines',
accept='application/jsonlines')
result = predictor.predict(input_data)
assert expected_result == result
def test_predict_csv(tfs_predictor):
input_data = '1.0,2.0,5.0\n1.0,2.0,5.0'
expected_result = {'predictions': [[3.5, 4.0, 5.5], [3.5, 4.0, 5.5]]}
predictor = Predictor(tfs_predictor.endpoint, tfs_predictor.sagemaker_session,
serializer=sagemaker.predictor.csv_serializer)
result = predictor.predict(input_data)
assert expected_result == result
def test_predict_bad_input(tfs_predictor):
input_data = {'junk': 'data'}
with pytest.raises(botocore.exceptions.ClientError):
tfs_predictor.predict(input_data)