diff --git a/base/fill_struct.go b/base/fill_struct.go new file mode 100644 index 000000000..58e943024 --- /dev/null +++ b/base/fill_struct.go @@ -0,0 +1,240 @@ +package base + +import ( + "fmt" + "reflect" + "strings" + "time" +) + +// FillStruct fills in the values of an arbitrary structure using an already deserialized +// map of nested data. Fields names are case-insensitive. Unknown fields are treated as an +// error, *unless* the output structure implementes the ArbitrarySetter interface. +func FillStruct(fields map[string]interface{}, output interface{}) error { + target := reflect.ValueOf(output) + if target.Kind() == reflect.Ptr { + target = target.Elem() + } + return putFieldsToTargetStruct(fields, target) +} + +// ArbitrarySetter should be implemented by structs that can store arbitrary fields in a private map. +type ArbitrarySetter interface { + SetArbitrary(string, interface{}) error +} + +// timeObj and ifaceObj are used for reflect.TypeOf +var timeObj time.Time +var ifaceObj interface{} + +// putFieldsToTargetStruct iterates over the fields in the target struct, and assigns each +// field the value from the `fields` map. Recursively call this for an sub structures. Field +// names are treated as case-insensitive. Return any errors found during this process, or nil if +// there are no errors. +func putFieldsToTargetStruct(fields map[string]interface{}, target reflect.Value) error { + if target.Kind() != reflect.Struct { + return fmt.Errorf("can only put fields to a struct") + } + + // Collect errors that occur in this single call, at the end of the function, join them + // using newlines, if any exist. + errs := make([]string, 0) + + // Collect real key names used by the `fields` map. + realKeys := make([]string, 0) + for k := range fields { + realKeys = append(realKeys, k) + } + // Handle case-insensitivity by building a map from lowercase keys to real keys. + caseMap := make(map[string]string) + for i := 0; i < len(realKeys); i++ { + realKey := realKeys[i] + lowerKey := strings.ToLower(realKey) + caseMap[lowerKey] = realKey + } + + // Keep track of which keys have been used from the `fields` map + usedKeys := make(map[string]bool) + + for i := 0; i < target.NumField(); i++ { + // Lowercase the key in order to make matching case-insensitive. + fieldName := target.Type().Field(i).Name + lowerName := strings.ToLower(fieldName) + + val := fields[caseMap[lowerName]] + if val == nil { + // Nothing to assign to this field, go to next. + continue + } + usedKeys[caseMap[lowerName]] = true + + err := putValueToPlace(val, target.Field(i)) + if err != nil { + errs = append(errs, fmt.Sprintf("field %s: %s", fieldName, err.Error())) + } + } + + // If the target struct is able, assign unknown keys to it. + arbitrarySetter := getArbitrarySetter(target) + + // Iterate over keys in the `fields` data, see if there were any keys that were not stored in + // the target struct. + for i := 0; i < len(realKeys); i++ { + k := realKeys[i] + if _, ok := usedKeys[k]; !ok { + // If target struct allows storing unknown keys to a map of arbitrary data. + if arbitrarySetter != nil { + arbitrarySetter.SetArbitrary(k, fields[k]) + continue + } + // Otherwise, unknown fields are an error. + errs = append(errs, fmt.Sprintf("field \"%s\" not found in target", k)) + } + } + + if len(errs) == 0 { + return nil + } + return fmt.Errorf("%s", strings.Join(errs, "\n")) +} + +func putValueToPlace(val interface{}, place reflect.Value) error { + switch place.Kind() { + case reflect.Int: + num, ok := val.(int) + if ok { + place.SetInt(int64(num)) + return nil + } + numFloat, ok := val.(float64) + if ok { + place.SetInt(int64(numFloat)) + return nil + } + return fmt.Errorf("need type int, value %s", val) + case reflect.Float64: + num, ok := val.(int) + if ok { + place.SetFloat(float64(num)) + return nil + } + numFloat, ok := val.(float64) + if ok { + place.SetFloat(numFloat) + return nil + } + return fmt.Errorf("need type string, value %s", val) + case reflect.String: + text, ok := val.(string) + if ok { + place.SetString(text) + return nil + } + return fmt.Errorf("need type string, value %s", val) + case reflect.Bool: + b, ok := val.(bool) + if ok { + place.SetBool(b) + return nil + } + return fmt.Errorf("need type string, value %s", val) + case reflect.Struct: + // Specially handle time.Time, represented as a string, which needs to be parsed. + if place.Type() == reflect.TypeOf(timeObj) { + timeText, ok := val.(string) + if ok { + ts, err := time.Parse(time.RFC3339, timeText) + if err == nil { + place.Set(reflect.ValueOf(ts)) + return nil + } + return err + } + return fmt.Errorf("need type time, value %s", val) + } + // Other struct types are not handled currently. Should probably do the same thing + // as what's done for `pointer` below. + return fmt.Errorf("unknown struct %s", place.Type()) + case reflect.Map: + m, ok := val.(map[string]interface{}) + if ok { + place.Set(reflect.ValueOf(m)) + return nil + } + return fmt.Errorf("need type map, value %s", val) + case reflect.Slice: + slice, ok := val.([]interface{}) + if !ok { + return fmt.Errorf("need type slice, value %s", val) + } + // Get size of type of the slice to deserialize. + size := len(slice) + sliceType := reflect.TypeOf(ifaceObj) + if size > 0 { + sliceType = place.Type().Elem() + } + // Construct a new, empty slice of the same size. + create := reflect.MakeSlice(reflect.SliceOf(sliceType), size, size) + // Fill in each element. + for i := 0; i < size; i++ { + elem := reflect.Indirect(reflect.New(sliceType)) + err := putValueToPlace(slice[i], elem) + if err != nil { + return err + } + create.Index(i).Set(elem) + } + place.Set(create) + return nil + case reflect.Ptr: + // Allocate a new pointer for the sub-component to be filled in. + alloc := reflect.New(place.Type().Elem()) + place.Set(alloc) + inner := alloc.Elem() + // For now, can only point to a struct. + if inner.Kind() != reflect.Struct { + return fmt.Errorf("can only assign to *struct") + } + // Struct must be assigned from a map. + component, err := toStringMap(val) + if err != nil { + return err + } + // Recursion to handle sub-component. + return putFieldsToTargetStruct(component, inner) + default: + return fmt.Errorf("unknown kind %s", place.Kind()) + } +} + +// toStringMap converts the input to a map[string] if able. This is needed because, while JSON +// correctly deserializes sub structures to map[string], YAML instead deserializes to +// map[interface{}]interface{}, so we need to manually convert this case to map[string]. +func toStringMap(val interface{}) (map[string]interface{}, error) { + m, ok := val.(map[string]interface{}) + if ok { + return m, nil + } + imap, ok := val.(map[interface{}]interface{}) + if ok { + convert := make(map[string]interface{}) + for k, v := range imap { + convert[fmt.Sprintf("%v", k)] = v + } + return convert, nil + } + return nil, fmt.Errorf("could not convert to map[string]") +} + +// getArbitrarySetter returns a ArbitrarySetter if the target implements it. +func getArbitrarySetter(target reflect.Value) ArbitrarySetter { + if !target.CanAddr() { + return nil + } + ptr := target.Addr() + iface := ptr.Interface() + if setter, ok := iface.(ArbitrarySetter); ok { + return setter + } + return nil +} diff --git a/base/fill_struct_test.go b/base/fill_struct_test.go new file mode 100644 index 000000000..73563cbe9 --- /dev/null +++ b/base/fill_struct_test.go @@ -0,0 +1,319 @@ +package base + +import ( + "encoding/json" + "reflect" + "testing" + "time" + + "github.com/qri-io/dataset" + "gopkg.in/yaml.v2" +) + +func TestFillStruct(t *testing.T) { + jsonData := `{ + "Name": "test_name", + "ProfileID": "test_profile_id", + "Qri": "qri:0" +}` + + data := make(map[string]interface{}) + err := json.Unmarshal([]byte(jsonData), &data) + if err != nil { + panic(err) + } + + var ds dataset.Dataset + err = FillStruct(data, &ds) + if err != nil { + panic(err) + } + + if ds.Name != "test_name" { + t.Errorf("expected: ds.Name should be \"test_name\", got: %s", ds.Name) + } + if ds.ProfileID != "test_profile_id" { + t.Errorf("expected: ds.ProfileID should be \"test_profile_id\", got: %s", ds.Name) + } + if ds.Qri != "qri:0" { + t.Errorf("expected: ds.Qri should be \"qri:0\", got: %s", ds.Qri) + } +} + +func TestFillCommitTimestamp(t *testing.T) { + jsonData := `{ + "Name": "test_commit_timestamp", + "Commit": { + "Timestamp": "1999-03-31T19:30:00.000Z" + } +}` + + data := make(map[string]interface{}) + err := json.Unmarshal([]byte(jsonData), &data) + if err != nil { + panic(err) + } + + var ds dataset.Dataset + err = FillStruct(data, &ds) + if err != nil { + panic(err) + } + + loc := time.FixedZone("UTC", 0) + expect := time.Date(1999, 03, 31, 19, 30, 0, 0, loc) + + if ds.Name != "test_commit_timestamp" { + t.Errorf("expected: ds.Name should be \"test_name\", got: %s", ds.Name) + } + if !ds.Commit.Timestamp.Equal(expect) { + t.Errorf("expected: timestamp expected %s, got: %s", expect, ds.Commit.Timestamp) + } +} + +func TestFillStructInsensitive(t *testing.T) { + jsonData := `{ + "name": "test_name", + "pRoFiLeId": "test_profile_id", + "QRI": "qri:0" +}` + + data := make(map[string]interface{}) + err := json.Unmarshal([]byte(jsonData), &data) + if err != nil { + panic(err) + } + + var ds dataset.Dataset + err = FillStruct(data, &ds) + if err != nil { + panic(err) + } + + if ds.Name != "test_name" { + t.Errorf("expected: ds.Name should be \"test_name\", got: %s", ds.Name) + } + if ds.ProfileID != "test_profile_id" { + t.Errorf("expected: ds.ProfileID should be \"test_profile_id\", got: %s", ds.Name) + } + if ds.Qri != "qri:0" { + t.Errorf("expected: ds.Qri should be \"qri:0\", got: %s", ds.Qri) + } +} + +func TestFillStructUnknownFields(t *testing.T) { + jsonData := `{ + "Name": "test_name", + "ProfileID": "test_profile_id", + "Qri": "qri:0", + "Unknown": "value" +}` + + data := make(map[string]interface{}) + err := json.Unmarshal([]byte(jsonData), &data) + if err != nil { + panic(err) + } + + var ds dataset.Dataset + err = FillStruct(data, &ds) + if err == nil { + t.Errorf("expected: error for unknown field, but no error returned") + } + + expect := "field \"Unknown\" not found in target" + if err.Error() != expect { + t.Errorf("expected: expect: \"%s\", got: \"%s\"", expect, err.Error()) + } +} + +func TestFillStructYaml(t *testing.T) { + yamlData := `name: test_name +profileID: test_profile_id +qri: qri:0 +` + + data := make(map[string]interface{}) + err := yaml.Unmarshal([]byte(yamlData), &data) + if err != nil { + panic(err) + } + + var ds dataset.Dataset + err = FillStruct(data, &ds) + if err != nil { + panic(err) + } + + if ds.Name != "test_name" { + t.Errorf("expected: ds.Name should be \"test_name\", got: %s", ds.Name) + } + if ds.ProfileID != "test_profile_id" { + t.Errorf("expected: ds.ProfileID should be \"test_profile_id\", got: %s", ds.Name) + } + if ds.Qri != "qri:0" { + t.Errorf("expected: ds.Qri should be \"qri:0\", got: %s", ds.Qri) + } +} + +type Collection struct { + More map[string]interface{} + Name string + Age int + IsOn bool + Xpos float64 +} + +func (c *Collection) SetArbitrary(key string, val interface{}) error { + if c.More == nil { + c.More = make(map[string]interface{}) + } + c.More[key] = val + return nil +} + +func TestFillArbitrarySetter(t *testing.T) { + jsonData := `{ + "Name": "Alice", + "Age": 42, + "Unknown": "value" +}` + + data := make(map[string]interface{}) + err := json.Unmarshal([]byte(jsonData), &data) + if err != nil { + panic(err) + } + + var c Collection + err = FillStruct(data, &c) + if err != nil { + panic(err) + } + + if c.Name != "Alice" { + t.Errorf("expected: c.Name should be \"Alice\", got: %s", c.Name) + } + if c.Age != 42 { + t.Errorf("expected: c.Age should be 42, got: %d", c.Age) + } + if c.More["Unknown"] != "value" { + t.Errorf("expected: c.More[\"Unknown\"] should be \"value\", got: %s", c.More["Unknown"]) + } +} + +func TestFillBoolean(t *testing.T) { + jsonData := `{ + "Name": "Bob", + "IsOn": true +}` + + data := make(map[string]interface{}) + err := json.Unmarshal([]byte(jsonData), &data) + if err != nil { + panic(err) + } + + var c Collection + err = FillStruct(data, &c) + if err != nil { + panic(err) + } + + if c.Name != "Bob" { + t.Errorf("expected: c.Name should be \"Bob\", got: %s", c.Name) + } + if c.IsOn != true { + t.Errorf("expected: c.IsOn should be true, got: %v", c.IsOn) + } +} + +func TestFillFloatingPoint(t *testing.T) { + jsonData := `{ + "Name": "Carol", + "Xpos": 6.283 +}` + + data := make(map[string]interface{}) + err := json.Unmarshal([]byte(jsonData), &data) + if err != nil { + panic(err) + } + + var c Collection + err = FillStruct(data, &c) + if err != nil { + panic(err) + } + + if c.Name != "Carol" { + t.Errorf("expected: c.Name should be \"Carol\", got: %s", c.Name) + } + if c.Xpos != 6.283 { + t.Errorf("expected: c.Xpos should be 6.283, got: %v", c.Xpos) + } +} + +func TestFillMetaKeywords(t *testing.T) { + jsonData := `{ + "Keywords": [ + "Test0", + "Test1", + "Test2" + ] +}` + + data := make(map[string]interface{}) + err := json.Unmarshal([]byte(jsonData), &data) + if err != nil { + panic(err) + } + + var meta dataset.Meta + err = FillStruct(data, &meta) + if err != nil { + panic(err) + } + + expect := []string{"Test0", "Test1", "Test2"} + if !reflect.DeepEqual(meta.Keywords, expect) { + t.Errorf("expected: c.Keywords should expect: %s, got: %s", expect, meta.Keywords) + } +} + +func TestFillMetaCitations(t *testing.T) { + jsonData := `{ + "Citations": [ + { + "Name": "A website", + "URL": "http://example.com", + "Email": "me@example.com" + } + ] +}` + + data := make(map[string]interface{}) + err := json.Unmarshal([]byte(jsonData), &data) + if err != nil { + panic(err) + } + + var meta dataset.Meta + err = FillStruct(data, &meta) + if err != nil { + panic(err) + } + + expect := dataset.Meta{ + Citations: []*dataset.Citation{ + &dataset.Citation{ + Name: "A website", + URL: "http://example.com", + Email: "me@example.com", + }, + }, + } + if !reflect.DeepEqual(meta, expect) { + t.Errorf("expected: c.Keywords should expect: %s, got: %s", expect, meta.Keywords) + } +} diff --git a/cmd/cmd_test.go b/cmd/cmd_test.go index c501edba3..3da25a34b 100644 --- a/cmd/cmd_test.go +++ b/cmd/cmd_test.go @@ -12,6 +12,7 @@ import ( "testing" "time" + "github.com/qri-io/dataset/dsfs" "github.com/qri-io/ioes" "github.com/qri-io/qfs/cafs/ipfs" "github.com/qri-io/qri/config" @@ -373,6 +374,44 @@ func TestRemoveAllRevisionsShortForm(t *testing.T) { } } +// Test that save can override a single component, meta in this case. +func TestSaveThenOverrideMetaComponent(t *testing.T) { + if err := confirmQriNotRunning(); err != nil { + t.Skip(err.Error()) + } + + // To keep hashes consistent, artificially specify the timestamp by overriding + // the dsfs.Timestamp func + prev := dsfs.Timestamp + defer func() { dsfs.Timestamp = prev }() + dsfs.Timestamp = func() time.Time { return time.Date(2001, 01, 01, 01, 01, 01, 01, time.UTC) } + + r := NewTestRepoRoot(t, "qri_test_save_then_override_meta") + defer r.Delete() + + cmdR := r.CreateCommandRunner() + _, err := executeCommand(cmdR, "qri save --file=testdata/movies/ds_ten.yaml me/test_ds") + if err != nil { + t.Fatalf(err.Error()) + } + + cmdR = r.CreateCommandRunner() + _, err = executeCommand(cmdR, "qri save --file=testdata/movies/meta_override.yaml me/test_ds") + if err != nil { + t.Fatalf(err.Error()) + } + + // Read head from the dataset that was saved, as json string. + dsPath := r.GetPathForDataset(0) + actual := r.DatasetMarshalJSON(dsPath) + + // This dataset is ds_ten.yaml, with the meta replaced by meta_override.yaml. + expect := `{"bodyPath":"/ipfs/QmXhsUK6vGZrqarhw9Z8RCXqhmEpvtVByKtaYVarbDZ5zn","commit":{"author":{"id":"QmeL2mdVka1eahKENjehK6tBxkkpk5dNQ1qMcgWi7Hrb4B"},"message":"\t- modified title\n","path":"/ipfs/QmfEiBVM5MLdw2zBgFVMcCRjavFmna8cLnPQiRzkjmWPBu","qri":"cm:0","signature":"I/nrDkgwt1IPtdFKvgMQAIRYvOqKfqm6x0qfpuJ14rEtO3+uPnY3K5pVDMWJ7K+pYJz6fyguYWgXHKkbo5wZl0ICVyoIiPa9zIVbqc1d6j1v13WqtRb0bn1CXQvuI6HcBhb7+VqkSW1m+ALpxhNQuI4ZfRv8Nm8MbEpL6Ct55fJpWX1zszJ2rQP1LcH2AlEZ8bl0qpcFMk03LENUHSt1DjlaApxrEJzDgAs5drfndxXgGKYjPpkjdF+qGhn2ALV2tC64I5aIn1SJPAQnVwprUr1FmVZjZcF9m9r8WnzQ6ldj29eZIciiFlT4n2Cbw+dgPo/hNRsgzn7Our2a6r5INw==","timestamp":"2001-01-01T01:01:01.000000001Z","title":"Meta: 1 change"},"meta":{"qri":"md:0","title":"different title"},"path":"/ipfs/QmXHG7tsnNWcztDB6Dqde3qGEh7BFvgho8F138A9ho5yMM/dataset.json","peername":"me","previousPath":"/ipfs/QmdFjgWLL5nGdXLb9383x4dNskxQX84iqm7hPJiBCHij1p","qri":"ds:0","structure":{"checksum":"QmcXDEGeWdyzfFRYyPsQVab5qszZfKqxTMEoXRDSZMyrhf","depth":2,"errCount":1,"entries":8,"format":"csv","formatConfig":{"headerRow":true,"lazyQuotes":true},"length":224,"qri":"st:0","schema":{"items":{"items":[{"title":"movie_title","type":"string"},{"title":"duration","type":"integer"}],"type":"array"},"type":"array"}}}` + if actual != expect { + t.Errorf("error, dataset actual:\n%s\nexpect:\n%s\n", actual, expect) + } +} + // TODO: Perhaps this utility should move to a lower package, and be used as a way to validate the // bodies of dataset in more of our test case. That would require extracting some parts out, like // pathFactory, which would probably necessitate the pathFactory taking the testRepoRoot as a @@ -498,3 +537,20 @@ func (r *TestRepoRoot) ReadBodyFromIPFS(keyPath string) string { return string(bodyBytes) } + +// DatasetMarshalJSON reads the dataset head and marshals it as json. +func (r *TestRepoRoot) DatasetMarshalJSON(ref string) string { + fs, err := ipfs_filestore.NewFilestore(func(cfg *ipfs_filestore.StoreCfg) { + cfg.Online = false + cfg.FsRepoPath = r.ipfsPath + }) + ds, err := dsfs.LoadDataset(fs, ref) + if err != nil { + r.t.Fatal(err) + } + bytes, err := json.Marshal(ds) + if err != nil { + r.t.Fatal(err) + } + return string(bytes) +} diff --git a/cmd/list.go b/cmd/list.go index 48da07a41..0f474ac5a 100644 --- a/cmd/list.go +++ b/cmd/list.go @@ -107,9 +107,9 @@ func (o *ListOptions) Run() (err error) { // TODO: It would be a bit more efficient to pass dsName to the ListParams // and only retrieve information about that one dataset. p := &lib.ListParams{ - Peername: peername, - Limit: o.Limit, - Offset: o.Offset, + Peername: peername, + Limit: o.Limit, + Offset: o.Offset, ShowNumVersions: o.ShowNumVersions, } if err = o.DatasetRequests.List(p, &refs); err != nil { diff --git a/cmd/save.go b/cmd/save.go index 888c1329a..e5682ff51 100644 --- a/cmd/save.go +++ b/cmd/save.go @@ -141,7 +141,7 @@ func (o *SaveOptions) Run() (err error) { p := &lib.SaveParams{ Dataset: dsp, - DatasetPath: o.FilePath, + FilePath: o.FilePath, Private: false, Publish: o.Publish, DryRun: o.DryRun, diff --git a/cmd/save_test.go b/cmd/save_test.go index b9d024012..5d9e637b9 100644 --- a/cmd/save_test.go +++ b/cmd/save_test.go @@ -155,9 +155,9 @@ func TestSaveRun(t *testing.T) { {"no changes detected", "me/movies", "testdata/movies/dataset.json", "testdata/movies/body_twenty.csv", "trying to add again", "hopefully this errors", false, false, "", "error saving: no changes detected", ""}, - {"add viz", "me/movies", "testdata/movies/dataset_with_viz.json", "", "", "", false, false, "dataset saved: peer/movies@QmZePf5LeXow3RW5U1AgEiNbW46YnRGhZ7HPvm1UmPFPwt/map/QmPauqpHsrRKmg1TbT6aBi7Axse9uPETDgMGxZShv3c79F\nthis dataset has 1 validation errors\n", "", ""}, + {"add viz", "me/movies", "testdata/movies/dataset_with_viz.json", "", "", "", false, false, "dataset saved: peer/movies@QmZePf5LeXow3RW5U1AgEiNbW46YnRGhZ7HPvm1UmPFPwt/map/Qmcmjcw1onuvWCvM2NZzqDztfp3djpCErcD2zSirmAw65p\nthis dataset has 1 validation errors\n", "", ""}, - {"add transform", "me/movies", "testdata/movies/dataset_with_tf.json", "", "", "", false, false, "dataset saved: peer/movies@QmZePf5LeXow3RW5U1AgEiNbW46YnRGhZ7HPvm1UmPFPwt/map/QmSHGct4PB15jvW6N5xVdWmJL8MPiK3jDwcTAt9uAzpz5f\nthis dataset has 1 validation errors\n", "", ""}, + {"add transform", "me/movies", "testdata/movies/dataset_with_tf.json", "", "", "", false, false, "dataset saved: peer/movies@QmZePf5LeXow3RW5U1AgEiNbW46YnRGhZ7HPvm1UmPFPwt/map/QmTwkFwddz8pFHj4z7JCxNEydzZbMY8L77hAH3aMokZzy2\nthis dataset has 1 validation errors\n", "", ""}, } for _, c := range cases { diff --git a/cmd/testdata/movies/meta_override.yaml b/cmd/testdata/movies/meta_override.yaml new file mode 100644 index 000000000..75d242b05 --- /dev/null +++ b/cmd/testdata/movies/meta_override.yaml @@ -0,0 +1,2 @@ +qri: md:0 +title: different title diff --git a/lib/datasets.go b/lib/datasets.go index 6d2499ab8..c67b76462 100644 --- a/lib/datasets.go +++ b/lib/datasets.go @@ -183,11 +183,11 @@ func (r *DatasetRequests) Get(p *GetParams, res *GetResult) (err error) { // SaveParams encapsulates arguments to Save type SaveParams struct { - // dataset to create. If both Dataset and DatasetPath are provided - // dataset values will override any values in the document at DatasetPath + // dataset to create. If both Dataset and FilePath are provided + // dataset values will override any values in the document at FilePath Dataset *dataset.Dataset - // absolute path or URL to a dataset file to load dataset from - DatasetPath string + // absolute path or URL to a dataset file or component to load + FilePath string // secrets for transform execution Secrets map[string]string // option to make dataset private. private data is not currently implimented, @@ -222,8 +222,8 @@ func (r *DatasetRequests) Save(p *SaveParams, res *repo.DatasetRef) (err error) } ds := p.Dataset - if ds == nil && p.DatasetPath == "" { - return fmt.Errorf("at least one of Dataset, DatasetPath is required") + if ds == nil && p.FilePath == "" { + return fmt.Errorf("at least one of Dataset, FilePath is required") } if p.Recall != "" { @@ -243,9 +243,9 @@ func (r *DatasetRequests) Save(p *SaveParams, res *repo.DatasetRef) (err error) ds = recall } - if p.DatasetPath != "" { + if p.FilePath != "" { // TODO (b5): handle this with a fs.Filesystem - dsf, err := ReadDatasetFile(p.DatasetPath) + dsf, err := ReadDatasetFile(p.FilePath) if err != nil { return err } diff --git a/lib/datasets_test.go b/lib/datasets_test.go index 315599844..8c6c1cbee 100644 --- a/lib/datasets_test.go +++ b/lib/datasets_test.go @@ -208,7 +208,7 @@ func TestDatasetRequestsSaveZip(t *testing.T) { dsp := &dataset.Dataset{Peername: "me"} res := repo.DatasetRef{} - err = req.Save(&SaveParams{Dataset: dsp, DatasetPath: "testdata/import.zip"}, &res) + err = req.Save(&SaveParams{Dataset: dsp, FilePath: "testdata/import.zip"}, &res) if err != nil { t.Fatal(err.Error()) } diff --git a/lib/file.go b/lib/file.go index abd84ea15..fe7ad88ba 100644 --- a/lib/file.go +++ b/lib/file.go @@ -11,6 +11,8 @@ import ( "github.com/qri-io/dataset" "github.com/qri-io/dataset/dsutil" + "github.com/qri-io/qri/base" + "gopkg.in/yaml.v2" ) // AbsPath adjusts the provided string to a path lib functions can work with @@ -96,16 +98,19 @@ func ReadDatasetFile(path string) (ds *dataset.Dataset, err error) { if err != nil { return } - if err = dsutil.UnmarshalYAMLDataset(data, ds); err != nil { + + fields := make(map[string]interface{}) + if err = yaml.Unmarshal(data, fields); err != nil { return } - absDatasetPaths(path, ds) + err = fillDatasetOrComponent(fields, path, ds) case ".json": - if err = json.NewDecoder(f).Decode(ds); err != nil { + fields := make(map[string]interface{}) + if err = json.NewDecoder(f).Decode(&fields); err != nil { return } - absDatasetPaths(path, ds) + err = fillDatasetOrComponent(fields, path, ds) case ".zip": data, err = ioutil.ReadAll(f) @@ -122,6 +127,31 @@ func ReadDatasetFile(path string) (ds *dataset.Dataset, err error) { return } +func fillDatasetOrComponent(fields map[string]interface{}, path string, ds *dataset.Dataset) (err error) { + var fill interface{} + fill = ds + + if kindStr, ok := fields["qri"].(string); ok && len(kindStr) > 3 { + switch kindStr[:2] { + case "md": + ds.Meta = &dataset.Meta{} + fill = ds.Meta + case "cm": + ds.Commit = &dataset.Commit{} + fill = ds.Commit + case "st": + ds.Structure = &dataset.Structure{} + fill = ds.Structure + } + } + + if err = base.FillStruct(fields, fill); err != nil { + return err + } + absDatasetPaths(path, ds) + return nil +} + // absDatasetPaths converts any relative filepath references in a Dataset to // their absolute counterpart func absDatasetPaths(path string, dsp *dataset.Dataset) {