Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

RSDK-9338: add optional parameters in data client #4595

Merged
Show file tree
Hide file tree
Changes from 6 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
175 changes: 106 additions & 69 deletions app/data_client.go
Original file line number Diff line number Diff line change
Expand Up @@ -235,26 +235,41 @@ type FileData struct {
Data []byte
}

// BinaryOptions represents optional parameters for the BinaryDataCaptureUpload method.
type BinaryOptions struct {
// DataByFilterOptions contains optional parameters for TabularDataByFilter and BinaryDataByFilter.
type DataByFilterOptions struct {
// No Filter implies all data.
Filter *Filter
// Limit defaults to 50 if unspecified.
Limit int
purplenicole730 marked this conversation as resolved.
Show resolved Hide resolved
// Last indicates the object identifier of the Last-returned data.
// This is returned by calls to TabularDataByFilter and BinaryDataByFilter as the `Last` value.
// If provided, the server will return the next data entries after the last object identifier.
Last string
SortOrder Order
CountOnly bool
IncludeInternalData bool
}

// BinaryDataCaptureUploadOptions represents optional parameters for the BinaryDataCaptureUpload method.
type BinaryDataCaptureUploadOptions struct {
Type *DataType
FileName *string
MethodParameters map[string]interface{}
Tags []string
DataRequestTimes *[2]time.Time
}

// TabularOptions represents optional parameters for the TabularDataCaptureUpload method.
type TabularOptions struct {
// TabularDataCaptureUploadOptions represents optional parameters for the TabularDataCaptureUpload method.
type TabularDataCaptureUploadOptions struct {
Type *DataType
FileName *string
MethodParameters map[string]interface{}
FileExtension *string
Tags []string
}

// StreamingOptions represents optional parameters for the StreamingDataCaptureUpload method.
type StreamingOptions struct {
// StreamingDataCaptureUploadOptions represents optional parameters for the StreamingDataCaptureUpload method.
type StreamingDataCaptureUploadOptions struct {
ComponentType *string
ComponentName *string
MethodName *string
Expand All @@ -276,6 +291,17 @@ type FileUploadOptions struct {
Tags []string
}

// UpdateBoundingBoxOptions contains optional parameters for UpdateBoundingBox.
type UpdateBoundingBoxOptions struct {
Label *string

// Normalized coordinates where all coordinates must be in the range [0, 1].
XMinNormalized *float64
YMinNormalized *float64
XMaxNormalized *float64
YMaxNormalized *float64
}

// Dataset contains the information of a dataset.
type Dataset struct {
ID string
Expand Down Expand Up @@ -320,22 +346,23 @@ func BsonToGo(rawData [][]byte) ([]map[string]interface{}, error) {
}

// TabularDataByFilter queries tabular data and metadata based on given filters.
func (d *DataClient) TabularDataByFilter(
ctx context.Context,
filter Filter,
limit int,
last string,
sortOrder Order,
countOnly bool,
includeInternalData bool,
) (TabularDataReturn, error) {
func (d *DataClient) TabularDataByFilter(ctx context.Context, opts *DataByFilterOptions) (TabularDataReturn, error) {
dataReq := pb.DataRequest{}
var countOnly, includeInternalData bool
if opts != nil {
dataReq.Filter = filterToProto(opts.Filter)
if opts.Limit != 0 {
purplenicole730 marked this conversation as resolved.
Show resolved Hide resolved
dataReq.Limit = uint64(opts.Limit)
}
if opts.Last != "" {
dataReq.Last = opts.Last
}
dataReq.SortOrder = orderToProto(opts.SortOrder)
countOnly = opts.CountOnly
includeInternalData = opts.IncludeInternalData
}
resp, err := d.dataClient.TabularDataByFilter(ctx, &pb.TabularDataByFilterRequest{
DataRequest: &pb.DataRequest{
Filter: filterToProto(filter),
Limit: uint64(limit),
Last: last,
SortOrder: orderToProto(sortOrder),
},
DataRequest: &dataReq,
CountOnly: countOnly,
IncludeInternalData: includeInternalData,
})
Expand Down Expand Up @@ -396,22 +423,24 @@ func (d *DataClient) TabularDataByMQL(ctx context.Context, organizationID string

// BinaryDataByFilter queries binary data and metadata based on given filters.
func (d *DataClient) BinaryDataByFilter(
ctx context.Context,
filter Filter,
limit int,
sortOrder Order,
last string,
includeBinary bool,
countOnly bool,
includeInternalData bool,
ctx context.Context, includeBinary bool, opts *DataByFilterOptions,
) (BinaryDataReturn, error) {
dataReq := pb.DataRequest{}
var countOnly, includeInternalData bool
if opts != nil {
dataReq.Filter = filterToProto(opts.Filter)
if opts.Limit != 0 {
dataReq.Limit = uint64(opts.Limit)
}
if opts.Last != "" {
dataReq.Last = opts.Last
}
dataReq.SortOrder = orderToProto(opts.SortOrder)
countOnly = opts.CountOnly
includeInternalData = opts.IncludeInternalData
}
resp, err := d.dataClient.BinaryDataByFilter(ctx, &pb.BinaryDataByFilterRequest{
DataRequest: &pb.DataRequest{
Filter: filterToProto(filter),
Limit: uint64(limit),
Last: last,
SortOrder: orderToProto(sortOrder),
},
DataRequest: &dataReq,
IncludeBinary: includeBinary,
CountOnly: countOnly,
IncludeInternalData: includeInternalData,
Expand All @@ -431,7 +460,7 @@ func (d *DataClient) BinaryDataByFilter(
}

// BinaryDataByIDs queries binary data and metadata based on given IDs.
func (d *DataClient) BinaryDataByIDs(ctx context.Context, binaryIDs []BinaryID) ([]BinaryData, error) {
func (d *DataClient) BinaryDataByIDs(ctx context.Context, binaryIDs []*BinaryID) ([]BinaryData, error) {
resp, err := d.dataClient.BinaryDataByIDs(ctx, &pb.BinaryDataByIDsRequest{
IncludeBinary: true,
BinaryIds: binaryIDsToProto(binaryIDs),
Expand Down Expand Up @@ -459,9 +488,9 @@ func (d *DataClient) DeleteTabularData(ctx context.Context, organizationID strin
return int(resp.DeletedCount), nil
}

// DeleteBinaryDataByFilter deletes binary data based on given filters.
// DeleteBinaryDataByFilter deletes binary data based on given filters. If filter is empty, delete all data.
// It returns the number of binary datapoints deleted.
func (d *DataClient) DeleteBinaryDataByFilter(ctx context.Context, filter Filter) (int, error) {
func (d *DataClient) DeleteBinaryDataByFilter(ctx context.Context, filter *Filter) (int, error) {
resp, err := d.dataClient.DeleteBinaryDataByFilter(ctx, &pb.DeleteBinaryDataByFilterRequest{
Filter: filterToProto(filter),
IncludeInternalData: true,
Expand All @@ -474,7 +503,7 @@ func (d *DataClient) DeleteBinaryDataByFilter(ctx context.Context, filter Filter

// DeleteBinaryDataByIDs deletes binary data based on given IDs.
// It returns the number of binary datapoints deleted.
func (d *DataClient) DeleteBinaryDataByIDs(ctx context.Context, binaryIDs []BinaryID) (int, error) {
func (d *DataClient) DeleteBinaryDataByIDs(ctx context.Context, binaryIDs []*BinaryID) (int, error) {
resp, err := d.dataClient.DeleteBinaryDataByIDs(ctx, &pb.DeleteBinaryDataByIDsRequest{
BinaryIds: binaryIDsToProto(binaryIDs),
})
Expand All @@ -485,7 +514,7 @@ func (d *DataClient) DeleteBinaryDataByIDs(ctx context.Context, binaryIDs []Bina
}

// AddTagsToBinaryDataByIDs adds string tags, unless the tags are already present, to binary data based on given IDs.
func (d *DataClient) AddTagsToBinaryDataByIDs(ctx context.Context, tags []string, binaryIDs []BinaryID) error {
func (d *DataClient) AddTagsToBinaryDataByIDs(ctx context.Context, tags []string, binaryIDs []*BinaryID) error {
_, err := d.dataClient.AddTagsToBinaryDataByIDs(ctx, &pb.AddTagsToBinaryDataByIDsRequest{
BinaryIds: binaryIDsToProto(binaryIDs),
Tags: tags,
Expand All @@ -494,7 +523,8 @@ func (d *DataClient) AddTagsToBinaryDataByIDs(ctx context.Context, tags []string
}

// AddTagsToBinaryDataByFilter adds string tags, unless the tags are already present, to binary data based on the given filter.
func (d *DataClient) AddTagsToBinaryDataByFilter(ctx context.Context, tags []string, filter Filter) error {
// If no filter is given, all data will be tagged.
func (d *DataClient) AddTagsToBinaryDataByFilter(ctx context.Context, tags []string, filter *Filter) error {
_, err := d.dataClient.AddTagsToBinaryDataByFilter(ctx, &pb.AddTagsToBinaryDataByFilterRequest{
Filter: filterToProto(filter),
Tags: tags,
Expand All @@ -505,7 +535,7 @@ func (d *DataClient) AddTagsToBinaryDataByFilter(ctx context.Context, tags []str
// RemoveTagsFromBinaryDataByIDs removes string tags from binary data based on given IDs.
// It returns the number of binary files which had tags removed.
func (d *DataClient) RemoveTagsFromBinaryDataByIDs(ctx context.Context,
tags []string, binaryIDs []BinaryID,
tags []string, binaryIDs []*BinaryID,
) (int, error) {
resp, err := d.dataClient.RemoveTagsFromBinaryDataByIDs(ctx, &pb.RemoveTagsFromBinaryDataByIDsRequest{
BinaryIds: binaryIDsToProto(binaryIDs),
Expand All @@ -518,9 +548,10 @@ func (d *DataClient) RemoveTagsFromBinaryDataByIDs(ctx context.Context,
}

// RemoveTagsFromBinaryDataByFilter removes the specified string tags from binary data that match the given filter.
// If no filter is given, all data will be untagged.
// It returns the number of binary files from which tags were removed.
func (d *DataClient) RemoveTagsFromBinaryDataByFilter(ctx context.Context,
tags []string, filter Filter,
tags []string, filter *Filter,
) (int, error) {
resp, err := d.dataClient.RemoveTagsFromBinaryDataByFilter(ctx, &pb.RemoveTagsFromBinaryDataByFilterRequest{
Filter: filterToProto(filter),
Expand All @@ -533,8 +564,8 @@ func (d *DataClient) RemoveTagsFromBinaryDataByFilter(ctx context.Context,
}

// TagsByFilter retrieves all unique tags associated with the data that match the specified filter.
// It returns the list of these unique tags.
func (d *DataClient) TagsByFilter(ctx context.Context, filter Filter) ([]string, error) {
// It returns the list of these unique tags. If no filter is given, all data tags are returned.
func (d *DataClient) TagsByFilter(ctx context.Context, filter *Filter) ([]string, error) {
resp, err := d.dataClient.TagsByFilter(ctx, &pb.TagsByFilterRequest{
Filter: filterToProto(filter),
})
Expand All @@ -549,7 +580,7 @@ func (d *DataClient) TagsByFilter(ctx context.Context, filter Filter) ([]string,
// All normalized coordinates (xMin, yMin, xMax, yMax) must be float values in the range [0, 1].
func (d *DataClient) AddBoundingBoxToImageByID(
ctx context.Context,
binaryID BinaryID,
binaryID *BinaryID,
label string,
xMinNormalized float64,
yMinNormalized float64,
Expand All @@ -574,7 +605,7 @@ func (d *DataClient) AddBoundingBoxToImageByID(
func (d *DataClient) RemoveBoundingBoxFromImageByID(
ctx context.Context,
bboxID string,
binaryID BinaryID,
binaryID *BinaryID,
) error {
_, err := d.dataClient.RemoveBoundingBoxFromImageByID(ctx, &pb.RemoveBoundingBoxFromImageByIDRequest{
BinaryId: binaryIDToProto(binaryID),
Expand All @@ -584,8 +615,8 @@ func (d *DataClient) RemoveBoundingBoxFromImageByID(
}

// BoundingBoxLabelsByFilter retrieves all unique string labels for bounding boxes that match the specified filter.
// It returns a list of these labels.
func (d *DataClient) BoundingBoxLabelsByFilter(ctx context.Context, filter Filter) ([]string, error) {
// It returns a list of these labels. If no filter is given, all labels are returned.
func (d *DataClient) BoundingBoxLabelsByFilter(ctx context.Context, filter *Filter) ([]string, error) {
resp, err := d.dataClient.BoundingBoxLabelsByFilter(ctx, &pb.BoundingBoxLabelsByFilterRequest{
Filter: filterToProto(filter),
})
Expand All @@ -595,18 +626,18 @@ func (d *DataClient) BoundingBoxLabelsByFilter(ctx context.Context, filter Filte
return resp.Labels, nil
}

// UpdateBoundingBox updates the bounding box for a given bbox ID for the file represented by the binary ID,
// modifying its label and position using optional normalized coordinates (xMin, yMin, xMax, yMax),
// where all coordinates must be in the range [0, 1].
func (d *DataClient) UpdateBoundingBox(ctx context.Context,
binaryID BinaryID,
bboxID string,
label *string, // optional
xMinNormalized *float64, // optional
yMinNormalized *float64, // optional
xMaxNormalized *float64, // optional
yMaxNormalized *float64, // optional
) error {
// UpdateBoundingBox updates the bounding box for a given bbox ID for the file represented by the binary ID.
func (d *DataClient) UpdateBoundingBox(ctx context.Context, binaryID *BinaryID, bboxID string, opts *UpdateBoundingBoxOptions) error {
var label *string
var xMinNormalized, yMinNormalized, xMaxNormalized, yMaxNormalized *float64
if opts != nil {
label = opts.Label
xMinNormalized = opts.XMinNormalized
yMinNormalized = opts.YMinNormalized
xMaxNormalized = opts.XMaxNormalized
yMaxNormalized = opts.YMaxNormalized
}

_, err := d.dataClient.UpdateBoundingBox(ctx, &pb.UpdateBoundingBoxRequest{
BinaryId: binaryIDToProto(binaryID),
BboxId: bboxID,
Expand Down Expand Up @@ -652,7 +683,7 @@ func (d *DataClient) ConfigureDatabaseUser(
// AddBinaryDataToDatasetByIDs adds the binary data with the given binary IDs to the dataset.
func (d *DataClient) AddBinaryDataToDatasetByIDs(
ctx context.Context,
binaryIDs []BinaryID,
binaryIDs []*BinaryID,
datasetID string,
) error {
_, err := d.dataClient.AddBinaryDataToDatasetByIDs(ctx, &pb.AddBinaryDataToDatasetByIDsRequest{
Expand All @@ -665,7 +696,7 @@ func (d *DataClient) AddBinaryDataToDatasetByIDs(
// RemoveBinaryDataFromDatasetByIDs removes the binary data with the given binary IDs from the dataset.
func (d *DataClient) RemoveBinaryDataFromDatasetByIDs(
ctx context.Context,
binaryIDs []BinaryID,
binaryIDs []*BinaryID,
datasetID string,
) error {
_, err := d.dataClient.RemoveBinaryDataFromDatasetByIDs(ctx, &pb.RemoveBinaryDataFromDatasetByIDsRequest{
Expand All @@ -684,7 +715,7 @@ func (d *DataClient) BinaryDataCaptureUpload(
componentName string,
methodName string,
fileExtension string,
options *BinaryOptions,
options *BinaryDataCaptureUploadOptions,
) (string, error) {
var sensorMetadata SensorMetadata
if options.DataRequestTimes != nil && len(options.DataRequestTimes) == 2 {
Expand Down Expand Up @@ -732,7 +763,7 @@ func (d *DataClient) TabularDataCaptureUpload(
componentName string,
methodName string,
dataRequestTimes [][2]time.Time,
options *TabularOptions,
options *TabularDataCaptureUploadOptions,
) (string, error) {
if len(dataRequestTimes) != len(tabularData) {
return "", errors.New("dataRequestTimes and tabularData lengths must be equal")
Expand Down Expand Up @@ -802,7 +833,7 @@ func (d *DataClient) StreamingDataCaptureUpload(
data []byte,
partID string,
fileExt string,
options *StreamingOptions,
options *StreamingDataCaptureUploadOptions,
) (string, error) {
uploadMetadata := UploadMetadata{
PartID: partID,
Expand Down Expand Up @@ -1158,23 +1189,29 @@ func tabularDataFromProto(proto *pb.TabularData, metadata *pb.CaptureMetadata) T
}
}

func binaryIDToProto(binaryID BinaryID) *pb.BinaryID {
func binaryIDToProto(binaryID *BinaryID) *pb.BinaryID {
if binaryID == nil {
return nil
}
return &pb.BinaryID{
FileId: binaryID.FileID,
OrganizationId: binaryID.OrganizationID,
LocationId: binaryID.LocationID,
}
}

func binaryIDsToProto(binaryIDs []BinaryID) []*pb.BinaryID {
func binaryIDsToProto(binaryIDs []*BinaryID) []*pb.BinaryID {
var protoBinaryIDs []*pb.BinaryID
for _, binaryID := range binaryIDs {
protoBinaryIDs = append(protoBinaryIDs, binaryIDToProto(binaryID))
}
return protoBinaryIDs
}

func filterToProto(filter Filter) *pb.Filter {
func filterToProto(filter *Filter) *pb.Filter {
if filter == nil {
return nil
}
return &pb.Filter{
ComponentName: filter.ComponentName,
ComponentType: filter.ComponentType,
Expand Down
Loading
Loading