Skip to content

Commit

Permalink
RSDK-9338: add optional parameters in data client (viamrobotics#4595)
Browse files Browse the repository at this point in the history
  • Loading branch information
purplenicole730 authored and vijayvuyyuru committed Dec 4, 2024
1 parent 1c43a7e commit 5ab540c
Show file tree
Hide file tree
Showing 2 changed files with 141 additions and 97 deletions.
175 changes: 106 additions & 69 deletions app/data_client.go
Original file line number Diff line number Diff line change
Expand Up @@ -274,26 +274,41 @@ type FileData struct {
Data []byte
}

// BinaryOptions represents optional parameters for the BinaryDataCaptureUpload method.
type BinaryOptions struct {
// DataByFilterOptions contains optional parameters for TabularDataByFilter and BinaryDataByFilter.
type DataByFilterOptions struct {
// No Filter implies all data.
Filter *Filter
// Limit is the maximum number of entries to include in a page. Limit defaults to 50 if unspecified.
Limit int
// Last indicates the object identifier of the Last-returned data.
// This is returned by calls to TabularDataByFilter and BinaryDataByFilter as the `Last` value.
// If provided, the server will return the next data entries after the last object identifier.
Last string
SortOrder Order
CountOnly bool
IncludeInternalData bool
}

// BinaryDataCaptureUploadOptions represents optional parameters for the BinaryDataCaptureUpload method.
type BinaryDataCaptureUploadOptions struct {
Type *DataType
FileName *string
MethodParameters map[string]interface{}
Tags []string
DataRequestTimes *[2]time.Time
}

// TabularOptions represents optional parameters for the TabularDataCaptureUpload method.
type TabularOptions struct {
// TabularDataCaptureUploadOptions represents optional parameters for the TabularDataCaptureUpload method.
type TabularDataCaptureUploadOptions struct {
Type *DataType
FileName *string
MethodParameters map[string]interface{}
FileExtension *string
Tags []string
}

// StreamingOptions represents optional parameters for the StreamingDataCaptureUpload method.
type StreamingOptions struct {
// StreamingDataCaptureUploadOptions represents optional parameters for the StreamingDataCaptureUpload method.
type StreamingDataCaptureUploadOptions struct {
ComponentType *string
ComponentName *string
MethodName *string
Expand All @@ -315,6 +330,17 @@ type FileUploadOptions struct {
Tags []string
}

// UpdateBoundingBoxOptions contains optional parameters for UpdateBoundingBox.
type UpdateBoundingBoxOptions struct {
Label *string

// Normalized coordinates where all coordinates must be in the range [0, 1].
XMinNormalized *float64
YMinNormalized *float64
XMaxNormalized *float64
YMaxNormalized *float64
}

// Dataset contains the information of a dataset.
type Dataset struct {
ID string
Expand Down Expand Up @@ -360,23 +386,24 @@ func BsonToGo(rawData [][]byte) ([]map[string]interface{}, error) {

// TabularDataByFilter queries tabular data and metadata based on given filters.
// Deprecated: This endpoint will be removed in a future version.
func (d *DataClient) TabularDataByFilter(
ctx context.Context,
filter Filter,
limit int,
last string,
sortOrder Order,
countOnly bool,
includeInternalData bool,
) (TabularDataReturn, error) {
func (d *DataClient) TabularDataByFilter(ctx context.Context, opts *DataByFilterOptions) (TabularDataReturn, error) {
dataReq := pb.DataRequest{}
var countOnly, includeInternalData bool
if opts != nil {
dataReq.Filter = filterToProto(opts.Filter)
if opts.Limit != 0 {
dataReq.Limit = uint64(opts.Limit)
}
if opts.Last != "" {
dataReq.Last = opts.Last
}
dataReq.SortOrder = orderToProto(opts.SortOrder)
countOnly = opts.CountOnly
includeInternalData = opts.IncludeInternalData
}
//nolint:deprecated,staticcheck
resp, err := d.dataClient.TabularDataByFilter(ctx, &pb.TabularDataByFilterRequest{
DataRequest: &pb.DataRequest{
Filter: filterToProto(filter),
Limit: uint64(limit),
Last: last,
SortOrder: orderToProto(sortOrder),
},
DataRequest: &dataReq,
CountOnly: countOnly,
IncludeInternalData: includeInternalData,
})
Expand Down Expand Up @@ -459,22 +486,24 @@ func (d *DataClient) GetLatestTabularData(ctx context.Context, partID, resourceN

// BinaryDataByFilter queries binary data and metadata based on given filters.
func (d *DataClient) BinaryDataByFilter(
ctx context.Context,
filter Filter,
limit int,
sortOrder Order,
last string,
includeBinary bool,
countOnly bool,
includeInternalData bool,
ctx context.Context, includeBinary bool, opts *DataByFilterOptions,
) (BinaryDataReturn, error) {
dataReq := pb.DataRequest{}
var countOnly, includeInternalData bool
if opts != nil {
dataReq.Filter = filterToProto(opts.Filter)
if opts.Limit != 0 {
dataReq.Limit = uint64(opts.Limit)
}
if opts.Last != "" {
dataReq.Last = opts.Last
}
dataReq.SortOrder = orderToProto(opts.SortOrder)
countOnly = opts.CountOnly
includeInternalData = opts.IncludeInternalData
}
resp, err := d.dataClient.BinaryDataByFilter(ctx, &pb.BinaryDataByFilterRequest{
DataRequest: &pb.DataRequest{
Filter: filterToProto(filter),
Limit: uint64(limit),
Last: last,
SortOrder: orderToProto(sortOrder),
},
DataRequest: &dataReq,
IncludeBinary: includeBinary,
CountOnly: countOnly,
IncludeInternalData: includeInternalData,
Expand All @@ -494,7 +523,7 @@ func (d *DataClient) BinaryDataByFilter(
}

// BinaryDataByIDs queries binary data and metadata based on given IDs.
func (d *DataClient) BinaryDataByIDs(ctx context.Context, binaryIDs []BinaryID) ([]BinaryData, error) {
func (d *DataClient) BinaryDataByIDs(ctx context.Context, binaryIDs []*BinaryID) ([]BinaryData, error) {
resp, err := d.dataClient.BinaryDataByIDs(ctx, &pb.BinaryDataByIDsRequest{
IncludeBinary: true,
BinaryIds: binaryIDsToProto(binaryIDs),
Expand Down Expand Up @@ -522,9 +551,9 @@ func (d *DataClient) DeleteTabularData(ctx context.Context, organizationID strin
return int(resp.DeletedCount), nil
}

// DeleteBinaryDataByFilter deletes binary data based on given filters.
// DeleteBinaryDataByFilter deletes binary data based on given filters. If filter is empty, delete all data.
// It returns the number of binary datapoints deleted.
func (d *DataClient) DeleteBinaryDataByFilter(ctx context.Context, filter Filter) (int, error) {
func (d *DataClient) DeleteBinaryDataByFilter(ctx context.Context, filter *Filter) (int, error) {
resp, err := d.dataClient.DeleteBinaryDataByFilter(ctx, &pb.DeleteBinaryDataByFilterRequest{
Filter: filterToProto(filter),
IncludeInternalData: true,
Expand All @@ -537,7 +566,7 @@ func (d *DataClient) DeleteBinaryDataByFilter(ctx context.Context, filter Filter

// DeleteBinaryDataByIDs deletes binary data based on given IDs.
// It returns the number of binary datapoints deleted.
func (d *DataClient) DeleteBinaryDataByIDs(ctx context.Context, binaryIDs []BinaryID) (int, error) {
func (d *DataClient) DeleteBinaryDataByIDs(ctx context.Context, binaryIDs []*BinaryID) (int, error) {
resp, err := d.dataClient.DeleteBinaryDataByIDs(ctx, &pb.DeleteBinaryDataByIDsRequest{
BinaryIds: binaryIDsToProto(binaryIDs),
})
Expand All @@ -548,7 +577,7 @@ func (d *DataClient) DeleteBinaryDataByIDs(ctx context.Context, binaryIDs []Bina
}

// AddTagsToBinaryDataByIDs adds string tags, unless the tags are already present, to binary data based on given IDs.
func (d *DataClient) AddTagsToBinaryDataByIDs(ctx context.Context, tags []string, binaryIDs []BinaryID) error {
func (d *DataClient) AddTagsToBinaryDataByIDs(ctx context.Context, tags []string, binaryIDs []*BinaryID) error {
_, err := d.dataClient.AddTagsToBinaryDataByIDs(ctx, &pb.AddTagsToBinaryDataByIDsRequest{
BinaryIds: binaryIDsToProto(binaryIDs),
Tags: tags,
Expand All @@ -557,7 +586,8 @@ func (d *DataClient) AddTagsToBinaryDataByIDs(ctx context.Context, tags []string
}

// AddTagsToBinaryDataByFilter adds string tags, unless the tags are already present, to binary data based on the given filter.
func (d *DataClient) AddTagsToBinaryDataByFilter(ctx context.Context, tags []string, filter Filter) error {
// If no filter is given, all data will be tagged.
func (d *DataClient) AddTagsToBinaryDataByFilter(ctx context.Context, tags []string, filter *Filter) error {
_, err := d.dataClient.AddTagsToBinaryDataByFilter(ctx, &pb.AddTagsToBinaryDataByFilterRequest{
Filter: filterToProto(filter),
Tags: tags,
Expand All @@ -568,7 +598,7 @@ func (d *DataClient) AddTagsToBinaryDataByFilter(ctx context.Context, tags []str
// RemoveTagsFromBinaryDataByIDs removes string tags from binary data based on given IDs.
// It returns the number of binary files which had tags removed.
func (d *DataClient) RemoveTagsFromBinaryDataByIDs(ctx context.Context,
tags []string, binaryIDs []BinaryID,
tags []string, binaryIDs []*BinaryID,
) (int, error) {
resp, err := d.dataClient.RemoveTagsFromBinaryDataByIDs(ctx, &pb.RemoveTagsFromBinaryDataByIDsRequest{
BinaryIds: binaryIDsToProto(binaryIDs),
Expand All @@ -581,9 +611,10 @@ func (d *DataClient) RemoveTagsFromBinaryDataByIDs(ctx context.Context,
}

// RemoveTagsFromBinaryDataByFilter removes the specified string tags from binary data that match the given filter.
// If no filter is given, all data will be untagged.
// It returns the number of binary files from which tags were removed.
func (d *DataClient) RemoveTagsFromBinaryDataByFilter(ctx context.Context,
tags []string, filter Filter,
tags []string, filter *Filter,
) (int, error) {
resp, err := d.dataClient.RemoveTagsFromBinaryDataByFilter(ctx, &pb.RemoveTagsFromBinaryDataByFilterRequest{
Filter: filterToProto(filter),
Expand All @@ -596,8 +627,8 @@ func (d *DataClient) RemoveTagsFromBinaryDataByFilter(ctx context.Context,
}

// TagsByFilter retrieves all unique tags associated with the data that match the specified filter.
// It returns the list of these unique tags.
func (d *DataClient) TagsByFilter(ctx context.Context, filter Filter) ([]string, error) {
// It returns the list of these unique tags. If no filter is given, all data tags are returned.
func (d *DataClient) TagsByFilter(ctx context.Context, filter *Filter) ([]string, error) {
resp, err := d.dataClient.TagsByFilter(ctx, &pb.TagsByFilterRequest{
Filter: filterToProto(filter),
})
Expand All @@ -612,7 +643,7 @@ func (d *DataClient) TagsByFilter(ctx context.Context, filter Filter) ([]string,
// All normalized coordinates (xMin, yMin, xMax, yMax) must be float values in the range [0, 1].
func (d *DataClient) AddBoundingBoxToImageByID(
ctx context.Context,
binaryID BinaryID,
binaryID *BinaryID,
label string,
xMinNormalized float64,
yMinNormalized float64,
Expand All @@ -637,7 +668,7 @@ func (d *DataClient) AddBoundingBoxToImageByID(
func (d *DataClient) RemoveBoundingBoxFromImageByID(
ctx context.Context,
bboxID string,
binaryID BinaryID,
binaryID *BinaryID,
) error {
_, err := d.dataClient.RemoveBoundingBoxFromImageByID(ctx, &pb.RemoveBoundingBoxFromImageByIDRequest{
BinaryId: binaryIDToProto(binaryID),
Expand All @@ -647,8 +678,8 @@ func (d *DataClient) RemoveBoundingBoxFromImageByID(
}

// BoundingBoxLabelsByFilter retrieves all unique string labels for bounding boxes that match the specified filter.
// It returns a list of these labels.
func (d *DataClient) BoundingBoxLabelsByFilter(ctx context.Context, filter Filter) ([]string, error) {
// It returns a list of these labels. If no filter is given, all labels are returned.
func (d *DataClient) BoundingBoxLabelsByFilter(ctx context.Context, filter *Filter) ([]string, error) {
resp, err := d.dataClient.BoundingBoxLabelsByFilter(ctx, &pb.BoundingBoxLabelsByFilterRequest{
Filter: filterToProto(filter),
})
Expand All @@ -658,18 +689,18 @@ func (d *DataClient) BoundingBoxLabelsByFilter(ctx context.Context, filter Filte
return resp.Labels, nil
}

// UpdateBoundingBox updates the bounding box for a given bbox ID for the file represented by the binary ID,
// modifying its label and position using optional normalized coordinates (xMin, yMin, xMax, yMax),
// where all coordinates must be in the range [0, 1].
func (d *DataClient) UpdateBoundingBox(ctx context.Context,
binaryID BinaryID,
bboxID string,
label *string, // optional
xMinNormalized *float64, // optional
yMinNormalized *float64, // optional
xMaxNormalized *float64, // optional
yMaxNormalized *float64, // optional
) error {
// UpdateBoundingBox updates the bounding box for a given bbox ID for the file represented by the binary ID.
func (d *DataClient) UpdateBoundingBox(ctx context.Context, binaryID *BinaryID, bboxID string, opts *UpdateBoundingBoxOptions) error {
var label *string
var xMinNormalized, yMinNormalized, xMaxNormalized, yMaxNormalized *float64
if opts != nil {
label = opts.Label
xMinNormalized = opts.XMinNormalized
yMinNormalized = opts.YMinNormalized
xMaxNormalized = opts.XMaxNormalized
yMaxNormalized = opts.YMaxNormalized
}

_, err := d.dataClient.UpdateBoundingBox(ctx, &pb.UpdateBoundingBoxRequest{
BinaryId: binaryIDToProto(binaryID),
BboxId: bboxID,
Expand Down Expand Up @@ -740,7 +771,7 @@ func (d *DataClient) ExportTabularData(
// AddBinaryDataToDatasetByIDs adds the binary data with the given binary IDs to the dataset.
func (d *DataClient) AddBinaryDataToDatasetByIDs(
ctx context.Context,
binaryIDs []BinaryID,
binaryIDs []*BinaryID,
datasetID string,
) error {
_, err := d.dataClient.AddBinaryDataToDatasetByIDs(ctx, &pb.AddBinaryDataToDatasetByIDsRequest{
Expand All @@ -753,7 +784,7 @@ func (d *DataClient) AddBinaryDataToDatasetByIDs(
// RemoveBinaryDataFromDatasetByIDs removes the binary data with the given binary IDs from the dataset.
func (d *DataClient) RemoveBinaryDataFromDatasetByIDs(
ctx context.Context,
binaryIDs []BinaryID,
binaryIDs []*BinaryID,
datasetID string,
) error {
_, err := d.dataClient.RemoveBinaryDataFromDatasetByIDs(ctx, &pb.RemoveBinaryDataFromDatasetByIDsRequest{
Expand All @@ -772,7 +803,7 @@ func (d *DataClient) BinaryDataCaptureUpload(
componentName string,
methodName string,
fileExtension string,
options *BinaryOptions,
options *BinaryDataCaptureUploadOptions,
) (string, error) {
var sensorMetadata SensorMetadata
if options.DataRequestTimes != nil && len(options.DataRequestTimes) == 2 {
Expand Down Expand Up @@ -820,7 +851,7 @@ func (d *DataClient) TabularDataCaptureUpload(
componentName string,
methodName string,
dataRequestTimes [][2]time.Time,
options *TabularOptions,
options *TabularDataCaptureUploadOptions,
) (string, error) {
if len(dataRequestTimes) != len(tabularData) {
return "", errors.New("dataRequestTimes and tabularData lengths must be equal")
Expand Down Expand Up @@ -890,7 +921,7 @@ func (d *DataClient) StreamingDataCaptureUpload(
data []byte,
partID string,
fileExt string,
options *StreamingOptions,
options *StreamingDataCaptureUploadOptions,
) (string, error) {
uploadMetadata := UploadMetadata{
PartID: partID,
Expand Down Expand Up @@ -1265,23 +1296,29 @@ func exportTabularDataReturnFromProto(proto *pb.ExportTabularDataResponse) *Expo
}
}

func binaryIDToProto(binaryID BinaryID) *pb.BinaryID {
func binaryIDToProto(binaryID *BinaryID) *pb.BinaryID {
if binaryID == nil {
return nil
}
return &pb.BinaryID{
FileId: binaryID.FileID,
OrganizationId: binaryID.OrganizationID,
LocationId: binaryID.LocationID,
}
}

func binaryIDsToProto(binaryIDs []BinaryID) []*pb.BinaryID {
func binaryIDsToProto(binaryIDs []*BinaryID) []*pb.BinaryID {
var protoBinaryIDs []*pb.BinaryID
for _, binaryID := range binaryIDs {
protoBinaryIDs = append(protoBinaryIDs, binaryIDToProto(binaryID))
}
return protoBinaryIDs
}

func filterToProto(filter Filter) *pb.Filter {
func filterToProto(filter *Filter) *pb.Filter {
if filter == nil {
return nil
}
return &pb.Filter{
ComponentName: filter.ComponentName,
ComponentType: filter.ComponentType,
Expand Down
Loading

0 comments on commit 5ab540c

Please sign in to comment.