Skip to content

Commit

Permalink
[C-Api/Service] add new API (ML-API ext)
Browse files Browse the repository at this point in the history
Add new API set for ML service, these functions support constructing new handle from json configuration.
- type for ml-service: single, pipeline

Signed-off-by: Jaeyun Jung <jy1210.jung@samsung.com>
  • Loading branch information
jaeyun-jung committed Mar 8, 2024
1 parent 237fdcf commit ddbd595
Show file tree
Hide file tree
Showing 23 changed files with 3,297 additions and 29 deletions.
207 changes: 201 additions & 6 deletions c/include/ml-api-service.h
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
#define __ML_API_SERVICE_H__

#include <nnstreamer.h>
#include <nnstreamer-single.h>

#ifdef __cplusplus
extern "C" {
Expand All @@ -46,15 +47,209 @@ extern "C" {
typedef void *ml_service_h;

/**
* @brief Destroys the given service handle.
* @details If given service handle is created by ml_service_launch_pipeline(), this requests machine learning agent daemon to destroy the pipeline.
* @brief Callbacks for the events from ml-service.
* @since_tizen 9.0
*/
typedef struct {
void (*new_data) (ml_service_h handle, const char *name, const ml_tensors_data_h data, void *user_data); /**< Called when new data is processed from ml-service. */
void (*event) (ml_service_h handle, int event, void *event_data, void *user_data); /**< Called when new event is occured from ml-service. */
} ml_service_callbacks_s;

/**
* @brief Creates a handle for machine learning service using a configuration file.
* @since_tizen 9.0
* @remarks %http://tizen.org/privilege/mediastorage is needed if the configuration is relevant to media storage.
* @remarks %http://tizen.org/privilege/externalstorage is needed if the configuration is relevant to external storage.
* @remarks The @a handle should be released using ml_service_destroy().
* @param[in] config The absolute path to configuration file.
* @param[out] handle The handle of ml-service.
* @return @c 0 on success. Otherwise a negative error value.
* @retval #ML_ERROR_NONE Successful.
* @retval #ML_ERROR_NOT_SUPPORTED Not supported.
* @retval #ML_ERROR_PERMISSION_DENIED The application does not have the privilege to access to the media storage or external storage.
* @retval #ML_ERROR_INVALID_PARAMETER The parameter is invalid.
* @retval #ML_ERROR_IO_ERROR Failed to parse the configuration file.
* @retval #ML_ERROR_STREAMS_PIPE Failed to open the model.
* @retval #ML_ERROR_OUT_OF_MEMORY Failed to allocate required memory.
*
* Here is an example of the usage:
* @code
*
* // Callback function for ml-service.
* // Note that the handle of tensors-data will be deallocated after the return and this is synchronously called.
* // Thus, if you need the data afterwards, copy the data to another buffer and return fast.
* // Do not spend too much time in the callback.
* static void
* _ml_service_cb_new_data (ml_service_h handle, const char *name, const ml_tensors_data_h data, void *user_data)
* {
* void *_data;
* size_t _size;
*
* ml_tensors_data_get_tensor_data (data, 0, &_data, &_size);
* // Handle output data.
* }
*
* // The path to the configuration file.
* const char config_path[] = "/path/to/application/configuration/my_application_config.conf";
*
* // Create ml-service for model inference from configuration.
* ml_service_h handle;
* ml_service_callbacks_s cb = { 0 };
*
* cb.new_data = _ml_service_cb_new_data;
*
* ml_service_new (config_path, &handle);
* ml_service_set_event_cb (handle, &cb, NULL);
*
* // Get input information and allocate input buffer.
* ml_tensors_info_h input_info;
* void *input_buffer;
* size_t input_size;
* ml_service_get_input_information (handle, NULL, &input_info);
*
* ml_tensors_info_get_tensor_size (input_info, 0, &input_size);
* input_buffer = malloc (input_size);
*
* // Create input data handle.
* ml_tensors_data_h input;
*
* ml_tensors_data_create (input_info, &input);
* ml_tensors_data_set_tensor_data (input, 0, input_buffer, input_size);
*
* // Push input data into ml-service and process the output in the callback.
* ml_service_request (handle, NULL, input);
*
* // Finally, release all handles and allocated memories.
* ml_tensors_info_destroy (input_info);
* ml_tensors_data_destroy (input);
* ml_service_destroy (handle);
* free (input_buffer);
*
* @endcode
*/
int ml_service_new (const char *config, ml_service_h *handle);

/**
* @brief Sets the callbacks which will be invoked when a new event occurs from ml-service.
* @since_tizen 9.0
* @param[in] handle The handle of ml-service.
* @param[in] cb The callbacks to handle the events from ml-service.
* @param[in] user_data Private data for the callback. This value is passed to the callback when it's invoked.
* @return @c 0 on success. Otherwise a negative error value.
* @retval #ML_ERROR_NONE Successful.
* @retval #ML_ERROR_NOT_SUPPORTED Not supported.
* @retval #ML_ERROR_INVALID_PARAMETER Given parameter is invalid.
*/
int ml_service_set_event_cb (ml_service_h handle, ml_service_callbacks_s *cb, void *user_data);

/**
* @brief Starts the process of ml-service.
* @since_tizen 9.0
* @param[in] handle The handle of ml-service.
* @return @c 0 on success. Otherwise a negative error value.
* @retval #ML_ERROR_NONE Successful.
* @retval #ML_ERROR_NOT_SUPPORTED Not supported.
* @retval #ML_ERROR_INVALID_PARAMETER Given parameter is invalid.
* @retval #ML_ERROR_STREAMS_PIPE Failed to start the process.
*/
int ml_service_start (ml_service_h handle);

/**
* @brief Stops the process of ml-service.
* @since_tizen 9.0
* @param[in] handle The handle of ml-service.
* @return @c 0 on success. Otherwise a negative error value.
* @retval #ML_ERROR_NONE Successful.
* @retval #ML_ERROR_NOT_SUPPORTED Not supported.
* @retval #ML_ERROR_INVALID_PARAMETER Given parameter is invalid.
* @retval #ML_ERROR_STREAMS_PIPE Failed to stop the process.
*/
int ml_service_stop (ml_service_h handle);

/**
* @brief Gets the information of required input data.
* @details Note that a model may not have such information if its input type is not determined statically.
* @since_tizen 9.0
* @remarks The @a info should be released using ml_tensors_info_destroy().
* @param[in] handle The handle of ml-service.
* @param[in] name The name of input node in the pipeline. You can set NULL if ml-service is constructed from model configuration.
* @param[out] info The handle of input tensors information.
* @return @c 0 on success. Otherwise a negative error value.
* @retval #ML_ERROR_NONE Successful.
* @retval #ML_ERROR_NOT_SUPPORTED Not supported.
* @retval #ML_ERROR_INVALID_PARAMETER The parameter is invalid.
*/
int ml_service_get_input_information (ml_service_h handle, const char *name, ml_tensors_info_h *info);

/**
* @brief Gets the information of output data.
* @details Note that a model may not have such information if its output is not determined statically.
* @since_tizen 9.0
* @remarks The @a info should be released using ml_tensors_info_destroy().
* @param[in] handle The handle of ml-service.
* @param[in] name The name of output node in the pipeline. You can set NULL if ml-service is constructed from model configuration.
* @param[out] info The handle of output tensors information.
* @return @c 0 on success. Otherwise a negative error value.
* @retval #ML_ERROR_NONE Successful.
* @retval #ML_ERROR_NOT_SUPPORTED Not supported.
* @retval #ML_ERROR_INVALID_PARAMETER The parameter is invalid.
*/
int ml_service_get_output_information (ml_service_h handle, const char *name, ml_tensors_info_h *info);

/**
* @brief Sets the information for ml-service.
* @since_tizen 9.0
* @param[in] handle The handle of ml-service.
* @param[in] name The name to set the corresponding value.
* @param[out] value The value of the name.
* @return @c 0 on success. Otherwise a negative error value.
* @retval #ML_ERROR_NONE Successful.
* @retval #ML_ERROR_NOT_SUPPORTED Not supported.
* @retval #ML_ERROR_INVALID_PARAMETER The parameter is invalid.
*/
int ml_service_set_information (ml_service_h handle, const char *name, const char *value);

/**
* @brief Gets the information from ml-service.
* @details Note that a configuration file may not have such information field.
* @since_tizen 9.0
* @remarks The @a value should be released using free().
* @param[in] handle The handle of ml-service.
* @param[in] name The name to get the corresponding value.
* @param[out] value The value of the name.
* @return @c 0 on success. Otherwise a negative error value.
* @retval #ML_ERROR_NONE Successful.
* @retval #ML_ERROR_NOT_SUPPORTED Not supported.
* @retval #ML_ERROR_INVALID_PARAMETER The parameter is invalid.
*/
int ml_service_get_information (ml_service_h handle, const char *name, char **value);

/**
* @brief Adds an input data to process the model in ml-service handle.
* @since_tizen 9.0
* @param[in] handle The handle of ml-service.
* @param[in] name The name of input node in the pipeline. You can set NULL if ml-service is constructed from model configuration.
* @param[in] data The handle of tensors data to be processed.
* @return @c 0 on success. Otherwise a negative error value.
* @retval #ML_ERROR_NONE Successful.
* @retval #ML_ERROR_NOT_SUPPORTED Not supported.
* @retval #ML_ERROR_INVALID_PARAMETER The parameter is invalid.
* @retval #ML_ERROR_STREAMS_PIPE Failed to process the input data.
* @retval #ML_ERROR_OUT_OF_MEMORY Failed to allocate required memory.
*/
int ml_service_request (ml_service_h handle, const char *name, const ml_tensors_data_h data);

/**
* @brief Destroys the handle for machine learning service.
* @details If given service handle is created by ml_service_launch_pipeline(), this requests machine learning agent to destroy the pipeline.
* @since_tizen 7.0
* @param[in] handle The service handle.
* @return @c 0 on Success. Otherwise a negative error value.
* @param[in] handle The handle of ml-service.
* @return @c 0 on success. Otherwise a negative error value.
* @retval #ML_ERROR_NONE Successful.
* @retval #ML_ERROR_NOT_SUPPORTED Not supported.
* @retval #ML_ERROR_INVALID_PARAMETER Fail. The parameter is invalid.
* @retval #ML_ERROR_STREAMS_PIPE Failed to access the pipeline state.
* @retval #ML_ERROR_INVALID_PARAMETER The parameter is invalid.
* @retval #ML_ERROR_STREAMS_PIPE Failed to stop the process.
*/
int ml_service_destroy (ml_service_h handle);

Expand Down
11 changes: 5 additions & 6 deletions c/include/nnstreamer-tizen-internal.h
Original file line number Diff line number Diff line change
Expand Up @@ -39,16 +39,15 @@ typedef struct {
char *fw_name; /**< The explicit framework name given by user */
} ml_single_preset;

typedef void *ml_service_event_h;

/**
* @brief Enumeration for the event types of ml-service.
* @since_tizen 9.0
* @todo TBU, need ACR later (update enum for ml-service event, see ml_service_callbacks_s)
*/
typedef enum {
ML_SERVICE_EVENT_MODEL_REGISTERED = 0,
ML_SERVICE_EVENT_PIPELINE_REGISTERED,

ML_SERVICE_EVENT_UNKNOWN
ML_SERVICE_EVENT_MODEL_REGISTERED = 0, /**< TBU */
ML_SERVICE_EVENT_PIPELINE_REGISTERED = 1, /**< TBU */
ML_SERVICE_EVENT_UNKNOWN /**< Unknown or invalid event type. */
} ml_service_event_e;

/**
Expand Down
2 changes: 1 addition & 1 deletion c/src/meson.build
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
nns_capi_common_srcs = files('ml-api-common.c', 'ml-api-inference-internal.c')
nns_capi_single_srcs = files('ml-api-inference-single.c')
nns_capi_pipeline_srcs = files('ml-api-inference-pipeline.c')
nns_capi_service_srcs = files('ml-api-service-common.c', 'ml-api-service-agent-client.c', 'ml-api-service-query-client.c')
nns_capi_service_srcs = files('ml-api-service-common.c', 'ml-api-service-extension.c', 'ml-api-service-agent-client.c', 'ml-api-service-query-client.c')
if support_remote_service
nns_capi_service_srcs += files('ml-api-service-remote.c')
endif
Expand Down
2 changes: 0 additions & 2 deletions c/src/ml-api-service-agent-client.c
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,6 @@
#include <glib/gstdio.h>
#include <json-glib/json-glib.h>

#include <ml/mlops-agent-interface.h>

#include "ml-api-internal.h"
#include "ml-api-service-private.h"
#include "ml-api-service.h"
Expand Down
Loading

0 comments on commit ddbd595

Please sign in to comment.