Skip to content

Commit

Permalink
Merge pull request #325 from 0chain/bug/ref-json-tag
Browse files Browse the repository at this point in the history
Fix json tag issue
  • Loading branch information
service-0chain authored Sep 3, 2021
2 parents 7370c1e + 59fd498 commit 6d0abe2
Show file tree
Hide file tree
Showing 5 changed files with 91 additions and 57 deletions.
10 changes: 5 additions & 5 deletions code/go/0chain.net/blobbercore/blobberhttp/response.go
Original file line number Diff line number Diff line change
Expand Up @@ -35,11 +35,11 @@ type ReferencePathResult struct {
}

type RefResult struct {
TotalPages int `json:"total_pages"`
NewOffsetPath string `json:"offsetPath,omitempty"`
NewOffsetDate string `json:"offsetDate,omitempty"`
Refs *[]reference.Ref `json:"refs"`
LatestWM *writemarker.WriteMarker `json:"latest_write_marker"`
TotalPages int `json:"total_pages"`
OffsetPath string `json:"offset_path,omitempty"` //used for pagination; index for path is created in database
OffsetDate string `json:"offset_date,omitempty"` //used for pagination; idex for updated_at is created in database
Refs *[]reference.PaginatedRef `json:"refs"`
LatestWM *writemarker.WriteMarker `json:"latest_write_marker"`
}

type ObjectPathResult struct {
Expand Down
4 changes: 2 additions & 2 deletions code/go/0chain.net/blobbercore/handler/helper.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,9 @@ import (
"github.com/0chain/blobber/code/go/0chain.net/core/common"
)

func checkValidDate(s string) error {
func checkValidDate(s, dateLayOut string) error {
if s != "" {
_, err := time.Parse("2006-01-02 15:04:05.999999999", s)
_, err := time.Parse(dateLayOut, s)
if err != nil {
return common.NewError("invalid_parameters", err.Error())
}
Expand Down
24 changes: 12 additions & 12 deletions code/go/0chain.net/blobbercore/handler/storage_handler.go
Original file line number Diff line number Diff line change
Expand Up @@ -23,10 +23,10 @@ import (

const (
FormFileParseMaxMemory = 10 * 1024 * 1024

DownloadCcontentFull = "full"
DownloadContentThumb = "thumbnail"
PageLimit = 100 //100 rows will make upto 100 KB
OffsetDateLayout = "2006-01-02T15:04:05.99999Z07:00"
DownloadCcontentFull = "full"
DownloadContentThumb = "thumbnail"
PageLimit = 100 //100 rows will make upto 100 KB
)

type StorageHandler struct{}
Expand Down Expand Up @@ -671,6 +671,7 @@ func (fsh *StorageHandler) GetObjectTree(ctx context.Context, r *http.Request) (

//Retrieves file refs. One can use three types to refer to regular, updated and deleted. Regular type gives all undeleted rows.
//Updated gives rows that is updated compared to the date given. And deleted gives deleted refs compared to the date given.
//Updated date time format should be as declared in above constant; OffsetDateLayout
func (fsh *StorageHandler) GetRefs(ctx context.Context, r *http.Request) (*blobberhttp.RefResult, error) {
allocationTx := ctx.Value(constants.ALLOCATION_CONTEXT_KEY).(string)
allocationObj, err := fsh.verifyAllocation(ctx, allocationTx, false)
Expand Down Expand Up @@ -715,11 +716,11 @@ func (fsh *StorageHandler) GetRefs(ctx context.Context, r *http.Request) (*blobb
offsetPath := r.FormValue("offsetPath")
offsetDate := r.FormValue("offsetDate")
updatedDate := r.FormValue("updatedDate")
err = checkValidDate(offsetDate)
err = checkValidDate(offsetDate, OffsetDateLayout)
if err != nil {
return nil, err
}
err = checkValidDate(updatedDate)
err = checkValidDate(updatedDate, OffsetDateLayout)
if err != nil {
return nil, err
}
Expand All @@ -737,7 +738,7 @@ func (fsh *StorageHandler) GetRefs(ctx context.Context, r *http.Request) (*blobb
}

refType := r.FormValue("refType")
var refs *[]reference.Ref
var refs *[]reference.PaginatedRef
var totalPages int
var newOffsetPath string
var newOffsetDate string
Expand All @@ -747,10 +748,10 @@ func (fsh *StorageHandler) GetRefs(ctx context.Context, r *http.Request) (*blobb
refs, totalPages, newOffsetPath, err = reference.GetRefs(ctx, allocationID, path, offsetPath, fileType, level, pageLimit)

case refType == "updated":
refs, totalPages, newOffsetPath, newOffsetDate, err = reference.GetUpdatedRefs(ctx, allocationID, path, offsetPath, fileType, updatedDate, offsetDate, level, pageLimit)
refs, totalPages, newOffsetPath, newOffsetDate, err = reference.GetUpdatedRefs(ctx, allocationID, path, offsetPath, fileType, updatedDate, offsetDate, level, pageLimit, OffsetDateLayout)

case refType == "deleted":
refs, totalPages, newOffsetPath, newOffsetDate, err = reference.GetDeletedRefs(ctx, allocationID, updatedDate, offsetPath, offsetDate, pageLimit)
refs, totalPages, newOffsetPath, newOffsetDate, err = reference.GetDeletedRefs(ctx, allocationID, updatedDate, offsetPath, offsetDate, pageLimit, OffsetDateLayout)

default:
return nil, common.NewError("invalid_parameters", "refType param should have value regular/updated/deleted")
Expand All @@ -772,8 +773,8 @@ func (fsh *StorageHandler) GetRefs(ctx context.Context, r *http.Request) (*blobb
var refResult blobberhttp.RefResult
refResult.Refs = refs
refResult.TotalPages = totalPages
refResult.NewOffsetPath = newOffsetPath
refResult.NewOffsetDate = newOffsetDate
refResult.OffsetPath = newOffsetPath
refResult.OffsetDate = newOffsetDate
if latestWM != nil {
refResult.LatestWM = &latestWM.WM
}
Expand Down Expand Up @@ -867,4 +868,3 @@ func pathHashFromReq(r *http.Request, allocationID string) (string, error) {

return pathHash, nil
}

91 changes: 62 additions & 29 deletions code/go/0chain.net/blobbercore/reference/ref.go
Original file line number Diff line number Diff line change
Expand Up @@ -56,39 +56,72 @@ func (a *Attributes) Validate() (err error) {
}

type Ref struct {
ID int64 `gorm:"column:id;primary_key" json:"id,omitempty"`
Type string `gorm:"column:type" dirlist:"type" filelist:"type" json:"type,omitempty"`
ID int64 `gorm:"column:id;primary_key"`
Type string `gorm:"column:type" dirlist:"type" filelist:"type"`
AllocationID string `gorm:"column:allocation_id"`
LookupHash string `gorm:"column:lookup_hash" dirlist:"lookup_hash" filelist:"lookup_hash"`
Name string `gorm:"column:name" dirlist:"name" filelist:"name"`
Path string `gorm:"column:path" dirlist:"path" filelist:"path"`
Hash string `gorm:"column:hash" dirlist:"hash" filelist:"hash"`
NumBlocks int64 `gorm:"column:num_of_blocks" dirlist:"num_of_blocks" filelist:"num_of_blocks"`
PathHash string `gorm:"column:path_hash" dirlist:"path_hash" filelist:"path_hash"`
ParentPath string `gorm:"column:parent_path"`
PathLevel int `gorm:"column:level"`
CustomMeta string `gorm:"column:custom_meta" filelist:"custom_meta"`
ContentHash string `gorm:"column:content_hash" filelist:"content_hash"`
Size int64 `gorm:"column:size" dirlist:"size" filelist:"size"`
MerkleRoot string `gorm:"column:merkle_root" filelist:"merkle_root"`
ActualFileSize int64 `gorm:"column:actual_file_size" filelist:"actual_file_size"`
ActualFileHash string `gorm:"column:actual_file_hash" filelist:"actual_file_hash"`
MimeType string `gorm:"column:mimetype" filelist:"mimetype"`
WriteMarker string `gorm:"column:write_marker"`
ThumbnailSize int64 `gorm:"column:thumbnail_size" filelist:"thumbnail_size"`
ThumbnailHash string `gorm:"column:thumbnail_hash" filelist:"thumbnail_hash"`
ActualThumbnailSize int64 `gorm:"column:actual_thumbnail_size" filelist:"actual_thumbnail_size"`
ActualThumbnailHash string `gorm:"column:actual_thumbnail_hash" filelist:"actual_thumbnail_hash"`
EncryptedKey string `gorm:"column:encrypted_key" filelist:"encrypted_key"`
Attributes datatypes.JSON `gorm:"column:attributes" filelist:"attributes"`
Children []*Ref `gorm:"-"`
childrenLoaded bool

OnCloud bool `gorm:"column:on_cloud" filelist:"on_cloud"`
CommitMetaTxns []CommitMetaTxn `gorm:"foreignkey:ref_id" filelist:"commit_meta_txns"`
CreatedAt time.Time `gorm:"column:created_at" dirlist:"created_at" filelist:"created_at"`
UpdatedAt time.Time `gorm:"column:updated_at" dirlist:"updated_at" filelist:"updated_at"`

DeletedAt gorm.DeletedAt `gorm:"column:deleted_at"` // soft deletion
}

type PaginatedRef struct { //Gorm smart select fields.
ID int64 `gorm:"column:id" json:"id,omitempty"`
Type string `gorm:"column:type" json:"type,omitempty"`
AllocationID string `gorm:"column:allocation_id" json:"allocation_id,omitempty"`
LookupHash string `gorm:"column:lookup_hash" dirlist:"lookup_hash" filelist:"lookup_hash" json:"lookup_hash,omitempty"`
Name string `gorm:"column:name" dirlist:"name" filelist:"name" json:"name,omitempty"`
Path string `gorm:"column:path" dirlist:"path" filelist:"path" json:"path,omitempty"`
Hash string `gorm:"column:hash" dirlist:"hash" filelist:"hash" json:"hash,omitempty"`
NumBlocks int64 `gorm:"column:num_of_blocks" dirlist:"num_of_blocks" filelist:"num_of_blocks" json:"num_blocks,omitempty"`
PathHash string `gorm:"column:path_hash" dirlist:"path_hash" filelist:"path_hash" json:"path_hash,omitempty"`
LookupHash string `gorm:"column:lookup_hash" json:"lookup_hash,omitempty"`
Name string `gorm:"column:name" json:"name,omitempty"`
Path string `gorm:"column:path" json:"path,omitempty"`
Hash string `gorm:"column:hash" json:"hash,omitempty"`
NumBlocks int64 `gorm:"column:num_of_blocks" json:"num_blocks,omitempty"`
PathHash string `gorm:"column:path_hash" json:"path_hash,omitempty"`
ParentPath string `gorm:"column:parent_path" json:"parent_path,omitempty"`
PathLevel int `gorm:"column:level" json:"level,omitempty"`
CustomMeta string `gorm:"column:custom_meta" filelist:"custom_meta" json:"custom_meta,omitempty"`
ContentHash string `gorm:"column:content_hash" filelist:"content_hash" json:"content_hash,omitempty"`
Size int64 `gorm:"column:size" dirlist:"size" filelist:"size" json:"size,omitempty"`
MerkleRoot string `gorm:"column:merkle_root" filelist:"merkle_root" json:"merkle_root,omitempty"`
ActualFileSize int64 `gorm:"column:actual_file_size" filelist:"actual_file_size" json:"actual_file_size,omitempty"`
ActualFileHash string `gorm:"column:actual_file_hash" filelist:"actual_file_hash" json:"actual_file_hash,omitempty"`
MimeType string `gorm:"column:mimetype" filelist:"mimetype" json:"mimetype,omitempty"`
CustomMeta string `gorm:"column:custom_meta" json:"custom_meta,omitempty"`
ContentHash string `gorm:"column:content_hash" json:"content_hash,omitempty"`
Size int64 `gorm:"column:size" json:"size,omitempty"`
MerkleRoot string `gorm:"column:merkle_root" json:"merkle_root,omitempty"`
ActualFileSize int64 `gorm:"column:actual_file_size" json:"actual_file_size,omitempty"`
ActualFileHash string `gorm:"column:actual_file_hash" json:"actual_file_hash,omitempty"`
MimeType string `gorm:"column:mimetype" json:"mimetype,omitempty"`
WriteMarker string `gorm:"column:write_marker" json:"write_marker,omitempty"`
ThumbnailSize int64 `gorm:"column:thumbnail_size" filelist:"thumbnail_size" json:"thumbnail_size,omitempty"`
ThumbnailHash string `gorm:"column:thumbnail_hash" filelist:"thumbnail_hash" json:"thumbnail_hash,omitempty"`
ActualThumbnailSize int64 `gorm:"column:actual_thumbnail_size" filelist:"actual_thumbnail_size" json:"actual_thumbnail_size,omitempty"`
ActualThumbnailHash string `gorm:"column:actual_thumbnail_hash" filelist:"actual_thumbnail_hash" json:"actual_thumbnail_hash,omitempty"`
EncryptedKey string `gorm:"column:encrypted_key" filelist:"encrypted_key" json:"encrypted_key,omitempty"`
Attributes datatypes.JSON `gorm:"column:attributes" filelist:"attributes" json:"attributes,omitempty"`
Children []*Ref `gorm:"-" json:"-"`
childrenLoaded bool `json:"-"`

OnCloud bool `gorm:"column:on_cloud" filelist:"on_cloud" json:"on_cloud,omitempty"`
CommitMetaTxns []CommitMetaTxn `gorm:"foreignkey:ref_id" filelist:"commit_meta_txns" json:"-"`
CreatedAt time.Time `gorm:"column:created_at" dirlist:"created_at" filelist:"created_at" json:"created_at,omitempty"`
UpdatedAt time.Time `gorm:"column:updated_at" dirlist:"updated_at" filelist:"updated_at" json:"updated_at,omitempty"`

ThumbnailSize int64 `gorm:"column:thumbnail_size" json:"thumbnail_size,omitempty"`
ThumbnailHash string `gorm:"column:thumbnail_hash" json:"thumbnail_hash,omitempty"`
ActualThumbnailSize int64 `gorm:"column:actual_thumbnail_size" json:"actual_thumbnail_size,omitempty"`
ActualThumbnailHash string `gorm:"column:actual_thumbnail_hash" json:"actual_thumbnail_hash,omitempty"`
EncryptedKey string `gorm:"column:encrypted_key" json:"encrypted_key,omitempty"`
Attributes datatypes.JSON `gorm:"column:attributes" json:"attributes,omitempty"`

OnCloud bool `gorm:"column:on_cloud" json:"on_cloud,omitempty"`
CreatedAt time.Time `gorm:"column:created_at" json:"created_at,omitempty"`
UpdatedAt time.Time `gorm:"column:updated_at" json:"updated_at,omitempty"`
DeletedAt gorm.DeletedAt `gorm:"column:deleted_at" json:"-"` // soft deletion
}

Expand Down
19 changes: 10 additions & 9 deletions code/go/0chain.net/blobbercore/reference/referencepath.go
Original file line number Diff line number Diff line change
Expand Up @@ -105,9 +105,11 @@ func GetObjectTree(ctx context.Context, allocationID string, path string) (*Ref,

//This function retrieves refrence_objects tables rows with pagination. Check for issue https://github.com/0chain/gosdk/issues/117
//Might need to consider covering index for efficient search https://blog.crunchydata.com/blog/why-covering-indexes-are-incredibly-helpful
func GetRefs(ctx context.Context, allocationID, path, offsetPath, _type string, level, pageLimit int) (refs *[]Ref, totalPages int, newOffsetPath string, err error) {
//To retrieve refs efficiently form pagination index is created in postgresql on path column so it can be used to paginate refs
//very easily and effectively; Same case for offsetDate.
func GetRefs(ctx context.Context, allocationID, path, offsetPath, _type string, level, pageLimit int) (refs *[]PaginatedRef, totalPages int, newOffsetPath string, err error) {
var totalRows int64
var pRefs []Ref
var pRefs []PaginatedRef
path = filepath.Clean(path)

db := datastore.GetStore().GetDB()
Expand All @@ -125,7 +127,6 @@ func GetRefs(ctx context.Context, allocationID, path, offsetPath, _type string,
if level != 0 {
db1 = db1.Where("level >= ?", level)
}
db1 = db1.Count(&totalRows)

db1 = db1.Where("path > ?", offsetPath)

Expand Down Expand Up @@ -161,9 +162,9 @@ func GetRefs(ctx context.Context, allocationID, path, offsetPath, _type string,
}

//Retrieves updated refs compared to some update_at value. Useful to localCache
func GetUpdatedRefs(ctx context.Context, allocationID, path, offsetPath, _type, updatedDate, offsetDate string, level, pageLimit int) (refs *[]Ref, totalPages int, newOffsetPath, newOffsetDate string, err error) {
func GetUpdatedRefs(ctx context.Context, allocationID, path, offsetPath, _type, updatedDate, offsetDate string, level, pageLimit int, dateLayOut string) (refs *[]PaginatedRef, totalPages int, newOffsetPath, newOffsetDate string, err error) {
var totalRows int64
var pRefs []Ref
var pRefs []PaginatedRef
db := datastore.GetStore().GetDB()
db1 := db.Session(&gorm.Session{}) //TODO Might need to use transaction from db1/db2 to avoid injection attack
db2 := db.Session(&gorm.Session{})
Expand Down Expand Up @@ -215,7 +216,7 @@ func GetUpdatedRefs(ctx context.Context, allocationID, path, offsetPath, _type,

if len(pRefs) != 0 {
lastIdx := len(pRefs) - 1
newOffsetDate = pRefs[lastIdx].UpdatedAt.String()
newOffsetDate = pRefs[lastIdx].UpdatedAt.Format(dateLayOut)
newOffsetPath = pRefs[lastIdx].Path
}
refs = &pRefs
Expand All @@ -224,9 +225,9 @@ func GetUpdatedRefs(ctx context.Context, allocationID, path, offsetPath, _type,
}

//Retrieves deleted refs compared to some update_at value. Useful for localCache.
func GetDeletedRefs(ctx context.Context, allocationID, updatedDate, offsetPath, offsetDate string, pageLimit int) (refs *[]Ref, totalPages int, newOffsetPath, newOffsetDate string, err error) {
func GetDeletedRefs(ctx context.Context, allocationID, updatedDate, offsetPath, offsetDate string, pageLimit int, dateLayOut string) (refs *[]PaginatedRef, totalPages int, newOffsetPath, newOffsetDate string, err error) {
var totalRows int64
var pRefs []Ref
var pRefs []PaginatedRef
db := datastore.GetStore().GetDB()

db1 := db.Session(&gorm.Session{})
Expand Down Expand Up @@ -269,7 +270,7 @@ func GetDeletedRefs(ctx context.Context, allocationID, updatedDate, offsetPath,
wg.Wait()
if len(pRefs) != 0 {
lastIdx := len(pRefs) - 1
newOffsetDate = pRefs[lastIdx].DeletedAt.Time.String()
newOffsetDate = pRefs[lastIdx].DeletedAt.Time.Format(dateLayOut)
newOffsetPath = pRefs[lastIdx].Path

}
Expand Down

0 comments on commit 6d0abe2

Please sign in to comment.