Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Make debug easier #854

Merged
merged 5 commits into from
Sep 13, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
116 changes: 66 additions & 50 deletions python/seldon_core/seldon_methods.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,10 +34,11 @@ def predict(
logger.warning("predict_grpc is deprecated. Please use predict_raw")
return user_model.predict_grpc(request)
else:
try:
return user_model.predict_raw(request)
except (NotImplementedError, AttributeError):
pass
if hasattr(user_model, "predict_raw"):
try:
return user_model.predict_raw(request)
except SeldonNotImplementedError:
pass

if is_proto:
(features, meta, datadef, data_type) = extract_request_parts(request)
Expand Down Expand Up @@ -76,18 +77,21 @@ def send_feedback(user_model: Any, request: prediction_pb2.Feedback,
response_json = user_model.send_feedback_grpc(request)
return json_to_seldon_message(response_json)
else:
try:
return user_model.send_feedback_raw(request)
except (NotImplementedError, AttributeError):
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

How do AttributeErrors can handled now? Can we add a test

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Do we need to rescue AttributeError? send_feedback_raw originally raises NotImplementedError.

Copy link
Contributor Author

@dtaniwaki dtaniwaki Sep 11, 2019

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I got your point. I'll add has_attr check. Can I also include the error class change for #700?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes I added Fix in your top level comment. So yes if you could extend to cover #700

(datadef_request, features, truth, reward) = extract_feedback_request_parts(request)
routing = request.response.meta.routing.get(predictive_unit_id)
client_response = client_send_feedback(user_model, features, datadef_request.names, reward, truth, routing)

if client_response is None:
client_response = np.array([])
else:
client_response = np.array(client_response)
return construct_response(user_model, False, request.request, client_response)
if hasattr(user_model, "send_feedback_raw"):
try:
return user_model.send_feedback_raw(request)
except SeldonNotImplementedError:
pass

(datadef_request, features, truth, reward) = extract_feedback_request_parts(request)
routing = request.response.meta.routing.get(predictive_unit_id)
client_response = client_send_feedback(user_model, features, datadef_request.names, reward, truth, routing)

if client_response is None:
client_response = np.array([])
else:
client_response = np.array(client_response)
return construct_response(user_model, False, request.request, client_response)


def transform_input(user_model: Any, request: prediction_pb2.SeldonMessage) -> prediction_pb2.SeldonMessage:
Expand All @@ -114,13 +118,16 @@ def transform_input(user_model: Any, request: prediction_pb2.SeldonMessage) -> p
logger.warning("transform_input_grpc is deprecated. Please use transform_input_raw")
return user_model.transform_input_grpc(request)
else:
try:
return user_model.transform_input_raw(request)
except (NotImplementedError, AttributeError):
(features, meta, datadef, data_type) = extract_request_parts(request)
client_response = client_transform_input(user_model, features, datadef.names, meta=meta)
if hasattr(user_model, "transform_input_raw"):
try:
return user_model.transform_input_raw(request)
except SeldonNotImplementedError:
pass

(features, meta, datadef, data_type) = extract_request_parts(request)
client_response = client_transform_input(user_model, features, datadef.names, meta=meta)

return construct_response(user_model, True, request, client_response)
return construct_response(user_model, True, request, client_response)


def transform_output(user_model: Any,
Expand Down Expand Up @@ -148,12 +155,15 @@ def transform_output(user_model: Any,
logger.warning("transform_input_grpc is deprecated. Please use transform_input_raw")
return user_model.transform_output_grpc(request)
else:
try:
return user_model.transform_output_raw(request)
except (NotImplementedError, AttributeError):
(features, meta, datadef, data_type) = extract_request_parts(request)
client_response = client_transform_output(user_model, features, datadef.names, meta=meta)
return construct_response(user_model, False, request, client_response)
if hasattr(user_model, "transform_output_raw"):
try:
return user_model.transform_output_raw(request)
except SeldonNotImplementedError:
pass

(features, meta, datadef, data_type) = extract_request_parts(request)
client_response = client_transform_output(user_model, features, datadef.names, meta=meta)
return construct_response(user_model, False, request, client_response)


def route(user_model: Any, request: prediction_pb2.SeldonMessage) -> prediction_pb2.SeldonMessage:
Expand All @@ -178,15 +188,18 @@ def route(user_model: Any, request: prediction_pb2.SeldonMessage) -> prediction_
logger.warning("route_grpc is deprecated. Please use route_raw")
return user_model.route_grpc(request)
else:
try:
return user_model.route_raw(request)
except (NotImplementedError, AttributeError):
(features, meta, datadef, _) = extract_request_parts(request)
client_response = client_route(user_model, features, datadef.names)
if not isinstance(client_response, int):
raise SeldonMicroserviceException("Routing response must be int but got " + str(client_response))
client_response_arr = np.array([[client_response]])
return construct_response(user_model, True, request, client_response_arr)
if hasattr(user_model, "route_raw"):
try:
return user_model.route_raw(request)
except SeldonNotImplementedError:
pass

(features, meta, datadef, _) = extract_request_parts(request)
client_response = client_route(user_model, features, datadef.names)
if not isinstance(client_response, int):
raise SeldonMicroserviceException("Routing response must be int but got " + str(client_response))
client_response_arr = np.array([[client_response]])
return construct_response(user_model, True, request, client_response_arr)


def aggregate(user_model: Any, request: prediction_pb2.SeldonMessageList) -> prediction_pb2.SeldonMessage:
Expand Down Expand Up @@ -214,16 +227,19 @@ def aggregate(user_model: Any, request: prediction_pb2.SeldonMessageList) -> pre
logger.warning("aggregate_grpc is deprecated. Please use aggregate_raw")
return user_model.aggregate_grpc(request)
else:
try:
return user_model.aggregate_raw(request)
except (NotImplementedError, AttributeError):
features_list = []
names_list = []

for msg in request.seldonMessages:
(features, meta, datadef, data_type) = extract_request_parts(msg)
features_list.append(features)
names_list.append(datadef.names)

client_response = client_aggregate(user_model, features_list, names_list)
return construct_response(user_model, False, request.seldonMessages[0], client_response)
if hasattr(user_model, "aggregate_raw"):
try:
return user_model.aggregate_raw(request)
except SeldonNotImplementedError:
pass

features_list = []
names_list = []

for msg in request.seldonMessages:
(features, meta, datadef, data_type) = extract_request_parts(msg)
features_list.append(features)
names_list.append(datadef.names)

client_response = client_aggregate(user_model, features_list, names_list)
return construct_response(user_model, False, request.seldonMessages[0], client_response)
Loading