Skip to content

Commit

Permalink
[C API] NP_GetType implementation (#419)
Browse files Browse the repository at this point in the history
### Summary:
Continue with basic implementation of a C API for Neuropod. Issue #407

### Test Plan:
Added test for input and output tensors.
  • Loading branch information
vkuzmin-uber authored Aug 19, 2020
1 parent 6c3d499 commit fd4dae6
Show file tree
Hide file tree
Showing 4 changed files with 46 additions and 0 deletions.
8 changes: 8 additions & 0 deletions source/neuropod/bindings/c/c_api.cc
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ limitations under the License.
#include "neuropod/bindings/c/np_status_internal.h"
#include "neuropod/bindings/c/np_tensor_allocator_internal.h"
#include "neuropod/bindings/c/np_valuemap_internal.h"
#include "neuropod/core/generic_tensor.hh"

#include <exception>
#include <string>
Expand Down Expand Up @@ -137,3 +138,10 @@ NP_TensorAllocator *NP_GetAllocator(NP_Neuropod *model)
out->allocator = model->model->get_tensor_allocator();
return out;
}

NP_TensorAllocator *NP_GetGenericAllocator()
{
auto out = new NP_TensorAllocator();
out->allocator = neuropod::get_generic_tensor_allocator();
return out;
}
4 changes: 4 additions & 0 deletions source/neuropod/bindings/c/c_api.h
Original file line number Diff line number Diff line change
Expand Up @@ -122,6 +122,10 @@ NP_TensorSpec *NP_GetOutputSpec(NP_Neuropod *model, size_t index);
// Note: The caller is responsible for freeing the returned TensorAllocator
NP_TensorAllocator *NP_GetAllocator(NP_Neuropod *model);

// Get a generic allocator to allocate generic tensors that is usefull for tests.
// Note: The caller is responsible for freeing the returned TensorAllocator
NP_TensorAllocator *NP_GetGenericAllocator();

#ifdef __cplusplus
}
#endif
5 changes: 5 additions & 0 deletions source/neuropod/bindings/c/np_tensor.cc
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,11 @@ limitations under the License.
#include "neuropod/bindings/c/np_tensor_internal.h"
#include "neuropod/internal/neuropod_tensor_raw_data_access.hh"

NP_TensorType NP_GetType(const NP_NeuropodTensor *tensor)
{
return static_cast<NP_TensorType>(tensor->tensor->as_tensor()->get_tensor_type());
}

void *NP_GetData(NP_NeuropodTensor *tensor)
{
return neuropod::internal::NeuropodTensorRawDataAccess::get_untyped_data_ptr(*tensor->tensor->as_tensor());
Expand Down
29 changes: 29 additions & 0 deletions source/neuropod/tests/test_c_api.c
Original file line number Diff line number Diff line change
Expand Up @@ -105,6 +105,9 @@ static void TestLoadAndInference(void)
NP_InsertTensor(inputs, "x", x);
NP_InsertTensor(inputs, "y", y);

ASSERT_EQ(NP_GetType(x), FLOAT_TENSOR);
ASSERT_EQ(NP_GetType(y), FLOAT_TENSOR);

// Free the input tensors
NP_FreeTensor(x);
NP_FreeTensor(y);
Expand All @@ -124,6 +127,9 @@ static void TestLoadAndInference(void)
NP_NeuropodTensor *out = NP_GetTensor(outputs, "out");
float * out_data = (float *) NP_GetData(out);
size_t nout_data = NP_GetNumElements(out);

ASSERT_EQ(NP_GetType(out), FLOAT_TENSOR);

for (size_t i = 0; i < nout_data; ++i)
{
ASSERT_EQ(out_data[i], target[i]);
Expand Down Expand Up @@ -231,10 +237,33 @@ static void TestLoadAndInferenceWithOptions(void)
NP_FreeNeuropod(model);
}

static void TestTensorGetters(void)
{
NP_TensorAllocator *allocator = NP_GetGenericAllocator();
ASSERT_NE(allocator, NULL);

// Create tensors with different types and test it.
int64_t dims[] = {2, 2};
NP_NeuropodTensor *x = NP_AllocateTensor(allocator, sizeof(dims) / sizeof(int64_t), dims, FLOAT_TENSOR);
NP_NeuropodTensor *y = NP_AllocateTensor(allocator, sizeof(dims) / sizeof(int64_t), dims, DOUBLE_TENSOR);
NP_NeuropodTensor *z = NP_AllocateTensor(allocator, sizeof(dims) / sizeof(int64_t), dims, STRING_TENSOR);

ASSERT_EQ(NP_GetType(x), FLOAT_TENSOR);
ASSERT_EQ(NP_GetType(y), DOUBLE_TENSOR);
ASSERT_EQ(NP_GetType(z), STRING_TENSOR);

NP_FreeTensor(x);
NP_FreeTensor(y);
NP_FreeTensor(z);

NP_FreeAllocator(allocator);
}

static void RunTests(void)
{
TestLoadAndInference();
TestLoadAndInferenceWithOptions();
TestTensorGetters();
}

int main(void)
Expand Down

0 comments on commit fd4dae6

Please sign in to comment.