diff --git a/api/fsi_test.go b/api/fsi_test.go index 7b83236cd..35aa34af9 100644 --- a/api/fsi_test.go +++ b/api/fsi_test.go @@ -103,7 +103,7 @@ func TestNoHistory(t *testing.T) { t.Fatal(err) } - if ref != "peer/test_ds" { + if ref.Human() != "peer/test_ds" { t.Errorf("expected ref to be \"peer/test_ds\", got \"%s\"", ref) } diff --git a/api/testdata/api.snapshot b/api/testdata/api.snapshot index 01c5387a2..85bb0e12c 100755 Binary files a/api/testdata/api.snapshot and b/api/testdata/api.snapshot differ diff --git a/cmd/fsi_integration_test.go b/cmd/fsi_integration_test.go index c103fa6fa..6208d2e90 100644 --- a/cmd/fsi_integration_test.go +++ b/cmd/fsi_integration_test.go @@ -16,6 +16,7 @@ import ( "github.com/qri-io/qri/base/component" "github.com/qri-io/qri/dsref" "github.com/qri-io/qri/fsi" + "github.com/qri-io/qri/lib" ) // FSITestRunner holds test info for fsi integration tests, for convenient cleanup. @@ -66,15 +67,24 @@ func (run *FSITestRunner) GetCommandOutput() string { if buffer, ok := run.Streams.Out.(*bytes.Buffer); ok { outputText = buffer.String() } + return run.niceifyTempDirs(outputText) +} + +// niceifyTempDirs replaces temporary directories with nice replacements +func (run *FSITestRunner) niceifyTempDirs(text string) string { realRoot, err := filepath.EvalSymlinks(run.RepoRoot.RootPath) if err == nil { - outputText = strings.Replace(outputText, realRoot, "/root", -1) + text = strings.Replace(text, realRoot, "/root", -1) } realTmp, err := filepath.EvalSymlinks(run.RootPath) if err == nil { - outputText = strings.Replace(outputText, realTmp, "/tmp", -1) + text = strings.Replace(text, realTmp, "/tmp", -1) + } + workPath, err := filepath.EvalSymlinks(run.WorkPath) + if err == nil { + text = strings.Replace(text, workPath, "/work", -1) } - return outputText + return text } // NewFSITestRunnerWithMockRemoteClient returns a new FSITestRunner. @@ -272,14 +282,21 @@ func TestInitDscache(t *testing.T) { } // Access the dscache - repo, err := run.RepoRoot.Repo() + // TODO(dustmop): A hack in place for now. The instance does not have an accessor for the + // dscache, and the dscache on the repo is not correct to use here. + instCopy, err := lib.NewInstance( + run.Context, + run.RepoRoot.QriPath, + lib.OptStdIOStreams(), + lib.OptSetIPFSPath(run.RepoRoot.IPFSPath), + ) if err != nil { t.Fatal(err) } - cache := repo.Dscache() + cache := instCopy.Dscache() // Dscache should have one reference. It has topIndex 0 because there there is only "init". - actual := cache.VerboseString(false) + actual := run.niceifyTempDirs(cache.VerboseString(false)) expect := `Dscache: Dscache.Users: 0) user=test_peer profileID=QmeL2mdVka1eahKENjehK6tBxkkpk5dNQ1qMcgWi7Hrb4B @@ -290,6 +307,7 @@ func TestInitDscache(t *testing.T) { cursorIndex = 0 prettyName = init_dscache commitTime = -62135596800 + fsiPath = /tmp/init_dscache ` if diff := cmp.Diff(expect, actual); diff != "" { t.Errorf("result mismatch (-want +got):%s\n", diff) diff --git a/cmd/save_test.go b/cmd/save_test.go index aacb48226..96b80be58 100644 --- a/cmd/save_test.go +++ b/cmd/save_test.go @@ -548,7 +548,7 @@ func TestSaveDscacheFirstCommit(t *testing.T) { cacheFilename := cache.Filename ctx := context.Background() // TODO(dustmop): Do we need to pass a book? - cache = dscache.NewDscache(ctx, fs, nil, cacheFilename) + cache = dscache.NewDscache(ctx, fs, nil, run.Username(), cacheFilename) // Dscache should have two entries now. They are alphabetized by pretty name, and have all // the expected data. @@ -628,7 +628,7 @@ func TestSaveDscacheExistingDataset(t *testing.T) { fs := localfs.NewFS() cacheFilename := cache.Filename ctx := context.Background() - cache = dscache.NewDscache(ctx, fs, nil, cacheFilename) + cache = dscache.NewDscache(ctx, fs, nil, run.Username(), cacheFilename) // Dscache should now have one reference. Now topIndex is 2 because there is another "commit". actual = cache.VerboseString(false) @@ -710,7 +710,7 @@ func TestSaveDscacheThenRemoveAll(t *testing.T) { fs := localfs.NewFS() cacheFilename := cache.Filename ctx := context.Background() - cache = dscache.NewDscache(ctx, fs, nil, cacheFilename) + cache = dscache.NewDscache(ctx, fs, nil, run.Username(), cacheFilename) // Dscache should now have one reference. actual = cache.VerboseString(false) @@ -786,7 +786,7 @@ func TestSaveDscacheThenRemoveVersions(t *testing.T) { fs := localfs.NewFS() cacheFilename := cache.Filename ctx := context.Background() - cache = dscache.NewDscache(ctx, fs, nil, cacheFilename) + cache = dscache.NewDscache(ctx, fs, nil, run.Username(), cacheFilename) // Dscache should now have one reference. actual = cache.VerboseString(false) diff --git a/cmd/test_runner_test.go b/cmd/test_runner_test.go index 2046abc12..4864f52f8 100644 --- a/cmd/test_runner_test.go +++ b/cmd/test_runner_test.go @@ -249,6 +249,11 @@ func (run *TestRunner) newCommandRunner(ctx context.Context, combineOutErr bool) return cmd } +// Username returns the test username from the config's profile +func (run *TestRunner) Username() string { + return run.RepoRoot.GetConfig().Profile.Peername +} + // IOReset resets the io streams func (run *TestRunner) IOReset() { run.InStream.Reset() @@ -409,6 +414,7 @@ func (run *TestRunner) GetCommandErrOutput() string { } func (run *TestRunner) niceifyTempDirs(text string) string { + text = strings.Replace(text, run.RepoRoot.RootPath, "/root", -1) realRoot, err := filepath.EvalSymlinks(run.RepoRoot.RootPath) if err == nil { text = strings.Replace(text, realRoot, "/root", -1) diff --git a/dscache/dscache.go b/dscache/dscache.go index 7d7091e49..cf4a3f9d5 100644 --- a/dscache/dscache.go +++ b/dscache/dscache.go @@ -13,7 +13,7 @@ import ( "github.com/qri-io/qfs" "github.com/qri-io/qri/dscache/dscachefb" "github.com/qri-io/qri/dsref" - "github.com/qri-io/qri/logbook" + "github.com/qri-io/qri/event/hook" "github.com/qri-io/qri/repo/profile" reporef "github.com/qri-io/qri/repo/ref" ) @@ -40,7 +40,7 @@ type Dscache struct { // NewDscache will construct a dscache from the given filename, or will construct an empty dscache // that will save to the given filename. Using an empty filename will disable loading and saving -func NewDscache(ctx context.Context, fsys qfs.Filesystem, book *logbook.Book, filename string) *Dscache { +func NewDscache(ctx context.Context, fsys qfs.Filesystem, hooks []hook.ChangeNotifier, username, filename string) *Dscache { cache := Dscache{Filename: filename} f, err := fsys.Get(ctx, filename) if err == nil { @@ -54,9 +54,9 @@ func NewDscache(ctx context.Context, fsys qfs.Filesystem, book *logbook.Book, fi cache = Dscache{Filename: filename, Root: root, Buffer: buffer} } } - if book != nil { - book.Observe(cache.update) - cache.DefaultUsername = book.AuthorName() + cache.DefaultUsername = username + for _, h := range hooks { + h.SetChangeHook(cache.update) } return &cache } @@ -211,26 +211,30 @@ func (d *Dscache) validateProfileID(profileID string) bool { return len(profileID) == lengthOfProfileID } -func (d *Dscache) update(act *logbook.Action) { +func (d *Dscache) update(act hook.DsChange) { switch act.Type { - case logbook.ActionDatasetNameInit: + case hook.DatasetNameInit: if err := d.updateInitDataset(act); err != nil && err != ErrNoDscache { log.Error(err) } - case logbook.ActionDatasetCommitChange: + case hook.DatasetCommitChange: if err := d.updateChangeCursor(act); err != nil && err != ErrNoDscache { log.Error(err) } - case logbook.ActionDatasetDeleteAll: + case hook.DatasetDeleteAll: if err := d.updateDeleteDataset(act); err != nil && err != ErrNoDscache { log.Error(err) } - case logbook.ActionDatasetRename: + case hook.DatasetRename: // TODO(dustmop): Handle renames + case hook.DatasetCreateLink: + if err := d.updateCreateLink(act); err != nil && err != ErrNoDscache { + log.Error(err) + } } } -func (d *Dscache) updateInitDataset(act *logbook.Action) error { +func (d *Dscache) updateInitDataset(act hook.DsChange) error { if d.IsEmpty() { // Only create a new dscache if that feature is enabled. This way no one is forced to // use dscache without opting in. @@ -278,7 +282,7 @@ func (d *Dscache) updateInitDataset(act *logbook.Action) error { } // Copy the entire dscache, except for the matching entry, rebuild that one to modify it -func (d *Dscache) updateChangeCursor(act *logbook.Action) error { +func (d *Dscache) updateChangeCursor(act hook.DsChange) error { if d.IsEmpty() { return ErrNoDscache } @@ -322,7 +326,7 @@ func (d *Dscache) updateChangeCursor(act *logbook.Action) error { } // Copy the entire dscache, except leave out the matching entry. -func (d *Dscache) updateDeleteDataset(act *logbook.Action) error { +func (d *Dscache) updateDeleteDataset(act hook.DsChange) error { if d.IsEmpty() { return ErrNoDscache } @@ -344,6 +348,40 @@ func (d *Dscache) updateDeleteDataset(act *logbook.Action) error { return d.save() } +// Copy the entire dscache, except for the matching entry, which is copied then assigned an fsiPath +func (d *Dscache) updateCreateLink(act hook.DsChange) error { + if d.IsEmpty() { + return ErrNoDscache + } + // Flatbuffers for go do not allow mutation (for complex types like strings). So we construct + // a new flatbuffer entirely, copying the old one while replacing the entry we care to change. + builder := flatbuffers.NewBuilder(0) + users := d.copyUserAssociationList(builder) + refs := d.copyReferenceListWithReplacement( + builder, + // Function to match the entry we're looking to replace + func(r *dscachefb.RefEntryInfo) bool { + if act.InitID != "" { + return string(r.InitID()) == act.InitID + } + return d.DefaultUsername == act.Username && string(r.PrettyName()) == act.PrettyName + }, + // Function to replace the matching entry + func(refStartMutationFunc func(builder *flatbuffers.Builder)) { + fsiDir := builder.CreateString(string(act.Dir)) + // Start building a ref object, by mutating an existing ref object. + refStartMutationFunc(builder) + // For this kind of update, only the fsiDir is modified + dscachefb.RefEntryInfoAddFsiPath(builder, fsiDir) + // Don't call RefEntryInfoEnd, that is handled by copyReferenceListWithReplacement + }, + ) + root, serialized := d.finishBuilding(builder, users, refs) + d.Root = root + d.Buffer = serialized + return d.save() +} + func convertEntryToVersionInfo(r *dscachefb.RefEntryInfo) dsref.VersionInfo { return dsref.VersionInfo{ InitID: string(r.InitID()), diff --git a/dscache/dscache_test.go b/dscache/dscache_test.go index 7b6f2acc9..9de70aa85 100644 --- a/dscache/dscache_test.go +++ b/dscache/dscache_test.go @@ -47,21 +47,22 @@ func TestDscacheAssignSaveAndLoad(t *testing.T) { fs := localfs.NewFS() peerInfo := testPeers.GetTestPeerInfo(0) + peername := "test_user" // Construct a dscache, will not save without a filename builder := NewBuilder() - builder.AddUser("test_user", profile.IDFromPeerID(peerInfo.PeerID).String()) + builder.AddUser(peername, profile.IDFromPeerID(peerInfo.PeerID).String()) builder.AddDsVersionInfo(dsref.VersionInfo{InitID: "abcd1"}) builder.AddDsVersionInfo(dsref.VersionInfo{InitID: "efgh2"}) constructed := builder.Build() // A dscache that will save when it is assigned dscacheFile := filepath.Join(tmpdir, "dscache.qfb") - saveable := NewDscache(ctx, fs, nil, dscacheFile) + saveable := NewDscache(ctx, fs, nil, peername, dscacheFile) saveable.Assign(constructed) // Load the dscache from its serialized file, verify it has correct data - loadable := NewDscache(ctx, fs, nil, dscacheFile) + loadable := NewDscache(ctx, fs, nil, peername, dscacheFile) if loadable.Root.UsersLength() != 1 { t.Errorf("expected, 1 user, got %d users", loadable.Root.UsersLength()) } diff --git a/event/hook/change.go b/event/hook/change.go new file mode 100644 index 000000000..669140a2b --- /dev/null +++ b/event/hook/change.go @@ -0,0 +1,39 @@ +package hook + +import ( + "github.com/qri-io/qri/dsref" +) + +// ChangeType is the type of change that has happened to a dataset +type ChangeType byte + +const ( + // DatasetNameInit is when a dataset is initialized + DatasetNameInit ChangeType = iota + // DatasetCommitChange is when a dataset changes its newest commit + DatasetCommitChange + // DatasetDeleteAll is when a dataset is entirely deleted + DatasetDeleteAll + // DatasetRename is when a dataset is renamed + DatasetRename + // DatasetCreateLink is when a dataset is linked to a working directory + DatasetCreateLink +) + +// DsChange represents the result of a change to a dataset +type DsChange struct { + Type ChangeType + InitID string + TopIndex int + ProfileID string + Username string + PrettyName string + HeadRef string + Info *dsref.VersionInfo + Dir string +} + +// ChangeNotifier is something that provides a hook which will be called when a dataset changes +type ChangeNotifier interface { + SetChangeHook(func(change DsChange)) +} diff --git a/fsi/fsi.go b/fsi/fsi.go index d423e7ebf..1bfc70eff 100644 --- a/fsi/fsi.go +++ b/fsi/fsi.go @@ -22,6 +22,7 @@ import ( "github.com/qri-io/qri/base/component" "github.com/qri-io/qri/dsref" "github.com/qri-io/qri/event" + "github.com/qri-io/qri/event/hook" "github.com/qri-io/qri/fsi/linkfile" "github.com/qri-io/qri/repo" reporef "github.com/qri-io/qri/repo/ref" @@ -52,8 +53,9 @@ func RepoPath(repoPath string) string { // FSI is a repo-side struct for coordinating file system integration type FSI struct { // repository for resolving dataset names - repo repo.Repo - pub event.Publisher + repo repo.Repo + pub event.Publisher + onChangeHook func(hook.DsChange) } // NewFSI creates an FSI instance from a path to a links flatbuffer file @@ -157,6 +159,15 @@ func (fsi *FSI) CreateLink(dirPath, refStr string) (alias string, rollback func( Dsname: datasetRef.Name, }) + if fsi.onChangeHook != nil { + fsi.onChangeHook(hook.DsChange{ + Type: hook.DatasetCreateLink, + Username: datasetRef.Peername, + PrettyName: datasetRef.Name, + Dir: dirPath, + }) + } + return datasetRef.AliasString(), removeLinkAndRemoveRefFunc, err } @@ -252,6 +263,11 @@ func (fsi *FSI) RemoveAll(dirPath string) error { return nil } +// SetChangeHook assigns a hook that will be called when a dataset changes +func (fsi *FSI) SetChangeHook(changeHook func(hook.DsChange)) { + fsi.onChangeHook = changeHook +} + func (fsi *FSI) getRepoRef(refStr string) (ref reporef.DatasetRef, err error) { ref, err = repo.ParseDatasetRef(refStr) if err != nil { diff --git a/fsi/init.go b/fsi/init.go index e6768f310..c3200f43f 100644 --- a/fsi/init.go +++ b/fsi/init.go @@ -10,6 +10,7 @@ import ( "github.com/qri-io/dataset" "github.com/qri-io/qri/base/component" "github.com/qri-io/qri/dsref" + "github.com/qri-io/qri/event/hook" "github.com/qri-io/qri/fsi/linkfile" "github.com/qri-io/qri/logbook" "github.com/qri-io/qri/repo" @@ -39,7 +40,7 @@ var PrepareToWrite = func(comp component.Component) { } // InitDataset creates a new dataset -func (fsi *FSI) InitDataset(p InitParams) (refstr string, err error) { +func (fsi *FSI) InitDataset(p InitParams) (ref dsref.Ref, err error) { // Create a rollback handler rollback := func() { log.Debug("did rollback InitDataset due to error") @@ -52,16 +53,16 @@ func (fsi *FSI) InitDataset(p InitParams) (refstr string, err error) { }() if !dsref.IsValidName(p.Name) { - return "", dsref.ErrDescribeValidName + return ref, dsref.ErrDescribeValidName } if p.Dir == "" { - return "", fmt.Errorf("directory is required to initialize a dataset") + return ref, fmt.Errorf("directory is required to initialize a dataset") } if fi, err := os.Stat(p.Dir); err != nil { - return "", err + return ref, err } else if !fi.IsDir() { - return "", fmt.Errorf("invalid path to initialize. '%s' is not a directory", p.Dir) + return ref, fmt.Errorf("invalid path to initialize. '%s' is not a directory", p.Dir) } // Either use an existing directory, or create one at the given directory. @@ -78,7 +79,7 @@ func (fsi *FSI) InitDataset(p InitParams) (refstr string, err error) { // Not an error if directory already exists err = nil } else { - return "", err + return ref, err } } else { // If directory was successfully created, add a step to the rollback in case future @@ -97,7 +98,7 @@ func (fsi *FSI) InitDataset(p InitParams) (refstr string, err error) { // Pass the sourceBodyPath, because it's okay if this file already exists, as long as its // being used to create the body. if err = fsi.CanInitDatasetWorkDir(targetPath, p.SourceBodyPath); err != nil { - return "", err + return ref, err } datasetRef := reporef.DatasetRef{Peername: "me", Name: p.Name} @@ -106,7 +107,7 @@ func (fsi *FSI) InitDataset(p InitParams) (refstr string, err error) { // Make sure a dataset with this name does not exist in your repo. if err = repo.CanonicalizeDatasetRef(fsi.repo, &datasetRef); err == nil { // TODO(dlong): Tell user to use `checkout` if the dataset already exists in their repo? - return "", fmt.Errorf("a dataset with the name %s already exists in your repo", datasetRef) + return ref, fmt.Errorf("a dataset with the name %s already exists in your repo", datasetRef) } // Derive format from --source-body-path if provided. @@ -119,13 +120,13 @@ func (fsi *FSI) InitDataset(p InitParams) (refstr string, err error) { // Validate dataset format if p.Format != "csv" && p.Format != "json" { - return "", fmt.Errorf(`invalid format "%s", only "csv" and "json" accepted`, p.Format) + return ref, fmt.Errorf(`invalid format "%s", only "csv" and "json" accepted`, p.Format) } // Create the link file, containing the dataset reference. var undo func() - if refstr, undo, err = fsi.CreateLink(targetPath, datasetRef.AliasString()); err != nil { - return refstr, err + if _, undo, err = fsi.CreateLink(targetPath, datasetRef.AliasString()); err != nil { + return ref, err } // If future steps fail, rollback the link creation. rollback = concatFunc(undo, rollback) @@ -143,14 +144,14 @@ func (fsi *FSI) InitDataset(p InitParams) (refstr string, err error) { // Create structure by detecting it from the body. file, err := os.Open(p.SourceBodyPath) if err != nil { - return "", err + return ref, err } defer file.Close() // TODO(dlong): This should move into `dsio` package. entries, err := component.OpenEntryReader(file, p.Format) if err != nil { log.Errorf("opening entry reader: %s", err) - return "", err + return ref, err } initDs.Structure = entries.Structure() } else if p.Format == "csv" { @@ -184,7 +185,7 @@ func (fsi *FSI) InitDataset(p InitParams) (refstr string, err error) { wroteFile, err := aComp.WriteTo(targetPath) if err != nil { log.Errorf("writing component file %s: %s", compName, err) - return "", err + return ref, err } // If future steps fail, rollback the components that have been written rollback = concatFunc(func() { @@ -202,18 +203,28 @@ func (fsi *FSI) InitDataset(p InitParams) (refstr string, err error) { if err != nil { if err == logbook.ErrNoLogbook { rollback = func() {} - return refstr, nil + return ref, nil } - return refstr, err + return ref, err } - // TODO(dustmop): Add initID to dsref.Ref, change the return value of this function to be - // a dsref.Ref instead. - _ = initID + if fsi.onChangeHook != nil { + fsi.onChangeHook(hook.DsChange{ + Type: hook.DatasetCreateLink, + InitID: initID, + Dir: p.Dir, + }) + } + + ref = dsref.Ref{ + InitID: initID, + Username: datasetRef.Peername, + Name: datasetRef.Name, + } // Success, no need to rollback. rollback = nil - return refstr, nil + return ref, nil } // CanInitDatasetWorkDir returns nil if the directory can init a dataset, or an error if not diff --git a/lib/diff_test.go b/lib/diff_test.go index 888c180b6..bb21dd670 100644 --- a/lib/diff_test.go +++ b/lib/diff_test.go @@ -46,13 +46,13 @@ func TestDatasetRequestsDiff(t *testing.T) { {"two fully qualified references", dsRef1.String(), dsRef2.String(), "", - &DiffStat{Left: 40, Right: 41, LeftWeight: 2724, RightWeight: 2682, Inserts: 8, Updates: 0, Deletes: 8}, + &DiffStat{Left: 40, Right: 41, LeftWeight: 2754, RightWeight: 2712, Inserts: 9, Updates: 0, Deletes: 9}, 8, }, {"fill left path from history", dsRef2.AliasString(), dsRef2.AliasString(), "", - &DiffStat{Left: 40, Right: 41, LeftWeight: 2724, RightWeight: 2682, Inserts: 8, Updates: 0, Deletes: 8}, + &DiffStat{Left: 40, Right: 41, LeftWeight: 2754, RightWeight: 2712, Inserts: 9, Updates: 0, Deletes: 9}, 8, }, {"two local file paths", @@ -180,9 +180,9 @@ func TestDiffPrevRevision(t *testing.T) { defer run.Delete() // Save three versions, then diff the last head against its previous version - run.SaveDatasetFromBody(t, "test_cities", "testdata/cities_2/body.csv") - run.SaveDatasetFromBody(t, "test_cities", "testdata/cities_2/body_more.csv") - run.SaveDatasetFromBody(t, "test_cities", "testdata/cities_2/body_even_more.csv") + run.MustSaveFromBody(t, "test_cities", "testdata/cities_2/body.csv") + run.MustSaveFromBody(t, "test_cities", "testdata/cities_2/body_more.csv") + run.MustSaveFromBody(t, "test_cities", "testdata/cities_2/body_even_more.csv") output, err := run.Diff("me/test_cities", "", "body") if err != nil { @@ -204,10 +204,10 @@ func TestDiff(t *testing.T) { defer run.Delete() // Save a dataset with one version - run.SaveDatasetFromBody(t, "test_cities", "testdata/cities_2/body.csv") + run.MustSaveFromBody(t, "test_cities", "testdata/cities_2/body.csv") // Save a different dataset with one version - run.SaveDatasetFromBody(t, "test_more", "testdata/cities_2/body_more.csv") + run.MustSaveFromBody(t, "test_more", "testdata/cities_2/body_more.csv") // Diff the heads output, err := run.Diff("me/test_cities", "me/test_more", "") @@ -219,7 +219,7 @@ func TestDiff(t *testing.T) { // compared with cmp.Diff. // TODO(dustmop): Would be better if Diff only returned the changes, instead of things that // stay the same, since the delta in this case is pretty small. - expect := `{"stat":{"leftNodes":40,"rightNodes":40,"leftWeight":2447,"rightWeight":2477,"inserts":9,"deletes":9},"diff":[["-","bodyPath","/map/Qmc7AoCfFVW5xe8qhyjNYewSgBHFubp6yLM3mfBzQp7iTr"],["+","bodyPath","/map/QmYuVj1JvALB9Au5DNcVxGLMcWCDBUfbKCN3QbpvissSC4"],[" ","commit",null,[[" ","author",{"id":"QmZePf5LeXow3RW5U1AgEiNbW46YnRGhZ7HPvm1UmPFPwt"}],["-","message","created dataset from body.csv"],["+","message","created dataset from body_more.csv"],["-","path","/map/Qmdn6131L4BhNPcfsobZdwzoucKGvwcQiH6bn9LSs8Tiq3"],["+","path","/map/Qmd1EtMQTYe5HRNZbfcgfp8rAos8eRuQQXQ8VsyZfRBEGN"],[" ","qri","cm:0"],["-","signature","i2h0nvSCjsXu5iTter4l/Ax3dVC3yvFR1Nff1VkSsI55jAvglOiN4Zr+n7vIe2dvyiWJv0TyTE2K8ZXKNY3lBYeA7P+mS36IxBORxk4FSBuxbuTjRZkEPp+wpCXehO+lEKAlIwzAnKgPtocRr1aVKiVU8osB+FULRSK2A4obscpVmZ00z1E4t7wSvZi/FnvSeNwh3gIrgsbuqwmRArqvyudGAiFzf75Gs7sQEVO/q6oHDld7cMhBvqTChLADOELbM5AlvlvGdOH3XAdzUF/XbIqnMKFZAxBlq/c2lyit4oCUV9+Yy1j8K5B/PhYBWmFjcUwX9vTatjcdGFbAkMb0IQ=="],["+","signature","mylW8R29Bu3fm9KoF0ElW7f/Jm9Lm7EYcyqwKvVWcwcxk2vFJjgOuuK8fSLgfYn8j+nT0hFRz/fimF5YF8nw333GXXXJUH+83hZUKWh9biTdDIRkldZ1wS15ZP821VXRLmGInK8xsJt1xyl7IleNe2s4dcNo4oSWO6LoW1DyVsy4aB9iAaUpJGXgQAiFumLYeCJtU/O7muP6h/JTQEw5O5QRV7ctZAc1rQCjTJ+zDQAy2Kz31nh38xYatoLPjK3WWn1CAa7+9O0cROtbXPcGiVYx+XIxIefpSgr9uGaVCJSRuabfN5j8MGl5x6HR7XjaRFW8APd4wNAhF9GKkjb1gw=="],[" ","timestamp","0001-01-01T00:00:00Z"],["-","title","created dataset from body.csv"],["+","title","created dataset from body_more.csv"]]],["-","path","/map/QmVwjEtrevyHb5JDcsE6Fm9kFJ6D6ewZEAQY4wx52WASzP"],["+","path","/map/QmbpvARqbLEMzuUi5VPyoWPRAFB2CRFYhVCHaEk3BzbYEZ"],[" ","qri","ds:0"],[" ","structure",null,[["-","checksum","Qmc7AoCfFVW5xe8qhyjNYewSgBHFubp6yLM3mfBzQp7iTr"],["+","checksum","QmYuVj1JvALB9Au5DNcVxGLMcWCDBUfbKCN3QbpvissSC4"],[" ","depth",2],["-","entries",5],["+","entries",7],[" ","format","csv"],[" ","formatConfig",{"headerRow":true,"lazyQuotes":true}],["-","length",155],["+","length",217],[" ","qri","st:0"],[" ","schema",{"items":{"items":[{"title":"city","type":"string"},{"title":"pop","type":"integer"},{"title":"avg_age","type":"number"},{"title":"in_usa","type":"boolean"}],"type":"array"},"type":"array"}]]]]}` + expect := `{"stat":{"leftNodes":40,"rightNodes":40,"leftWeight":2477,"rightWeight":2507,"inserts":10,"deletes":10},"diff":[["-","bodyPath","/map/Qmc7AoCfFVW5xe8qhyjNYewSgBHFubp6yLM3mfBzQp7iTr"],["+","bodyPath","/map/QmYuVj1JvALB9Au5DNcVxGLMcWCDBUfbKCN3QbpvissSC4"],[" ","commit",null,[[" ","author",{"id":"QmZePf5LeXow3RW5U1AgEiNbW46YnRGhZ7HPvm1UmPFPwt"}],["-","message","created dataset from body.csv"],["+","message","created dataset from body_more.csv"],["-","path","/map/QmdPogFjnw6sSyeCZ72waNmEtggG3YgJwDL9n8qktD4f3d"],["+","path","/map/QmRGWDwpR9TN4dRiB9AUEJURb6t8ZET1EuHryrj9FGo7aa"],[" ","qri","cm:0"],["-","signature","lWv0B2quffDZ/PuyH7Mddn0LILORrklUTuV7EtAIN7GYLkN9ES2oxpmIlLVznX44zAGJ3K9NEQ73y+B8grCl52goteTsoGdYwu7e0X6mY4fVH7+1277AOd8172OZI5aS0u8o6/8C/1H9XwJRbKYSaUD1Klffmq3FTcP7yRB3au4WumpN6GjEd9Dgk4pao5iWmw+OpEUf7X9Dk62ZKgk4ohjAMaMgYydi6CxhV9+QnC32QKxQaPZ/yF46Lj4w20DvfONZmoUd4l3ZL+Z4V29o6OGWZ02SAzhAYMf1meJ+Qzy5UHTXiFuHk+wSAGTH9C9Jzr+SV3Qg2uzHg8PK5DOCJg=="],["+","signature","j7MHjITIGpQmlg+4YxISYJ/hxOqfTLvrXiR8ZRQWJF/QhXNWOwalbGXIvv83sMNgTR1qHBif+8D/Y8YS76zaLK7qXKz39AjpCNfEoqZXqkyd8lp1OBNBiuXVYCCPLh2w0tpV0kOMwIZPmKjDRNQwwXxZe3QsHMvFPblEUiivUzYzm1yNmXZagsyD08ZFAG9PyV4O2iMVhNr/u6RuUEi/CRyptUJPYT5BQgSBe3M8Lt2qDjFyFrcblrgNmoHjV+80ZtR5ucUSLbbcWV0uXoqm1CGp/AAZQBDgAmsGxs8j9E1cH7D73iq2Vnw2lrqvPWFjoo6ij6Ve6ylBT4HRk47koQ=="],["-","timestamp","2001-01-01T01:01:01.000000001Z"],["+","timestamp","2001-01-01T01:02:01.000000001Z"],["-","title","created dataset from body.csv"],["+","title","created dataset from body_more.csv"]]],["-","path","/map/QmPd5jh8ZTFwgbpHaNSu6u2277BKWaiBsqX4uFw7rGWNLu"],["+","path","/map/QmRhY6vfQzGpU64A1RUaUj4CVWaHE2FDskNF9sNDn6MRH9"],[" ","qri","ds:0"],[" ","structure",null,[["-","checksum","Qmc7AoCfFVW5xe8qhyjNYewSgBHFubp6yLM3mfBzQp7iTr"],["+","checksum","QmYuVj1JvALB9Au5DNcVxGLMcWCDBUfbKCN3QbpvissSC4"],[" ","depth",2],["-","entries",5],["+","entries",7],[" ","format","csv"],[" ","formatConfig",{"headerRow":true,"lazyQuotes":true}],["-","length",155],["+","length",217],[" ","qri","st:0"],[" ","schema",{"items":{"items":[{"title":"city","type":"string"},{"title":"pop","type":"integer"},{"title":"avg_age","type":"number"},{"title":"in_usa","type":"boolean"}],"type":"array"},"type":"array"}]]]]}` if diff := cmp.Diff(expect, output); diff != "" { t.Errorf("output mismatch (-want +got):\n%s", diff) } @@ -241,7 +241,7 @@ func TestDiffOnlyOneRevision(t *testing.T) { run := newTestRunner(t) defer run.Delete() - run.SaveDatasetFromBody(t, "test_cities", "testdata/cities_2/body.csv") + run.MustSaveFromBody(t, "test_cities", "testdata/cities_2/body.csv") _, err := run.Diff("me/test_cities", "", "body") if err == nil { t.Fatal("expected error, did not get one") @@ -287,10 +287,10 @@ func TestDiffErrors(t *testing.T) { defer run.Delete() // Save a dataset with one version - run.SaveDatasetFromBody(t, "test_cities", "testdata/cities_2/body.csv") + run.MustSaveFromBody(t, "test_cities", "testdata/cities_2/body.csv") // Save a different dataset with one version - run.SaveDatasetFromBody(t, "test_more", "testdata/cities_2/body_more.csv") + run.MustSaveFromBody(t, "test_more", "testdata/cities_2/body_more.csv") // Error to compare a dataset ref to a file _, err := run.Diff("me/test_cities", "testdata/cities_2/body_even_more.csv", "") diff --git a/lib/fsi.go b/lib/fsi.go index 2d47507a0..4b928d2f2 100644 --- a/lib/fsi.go +++ b/lib/fsi.go @@ -159,10 +159,12 @@ func (m *FSIMethods) Checkout(p *CheckoutParams, out *string) (err error) { } ctx := context.TODO() - log.Debugf("Checkout started, stat'ing %q", p.Dir) + // Require a non-empty, absolute path for the checkout + if p.Dir == "" || !filepath.IsAbs(p.Dir) { + return fmt.Errorf("need Dir to be a non-empty, absolute path") + } - // TODO(dlong): Fail if Dir is "", should be required to specify a location. Should probably - // only allow absolute paths. Add tests. + log.Debugf("Checkout started, stat'ing %q", p.Dir) // If directory exists, error. if _, err = os.Stat(p.Dir); !os.IsNotExist(err) { @@ -359,11 +361,18 @@ func (m *FSIMethods) InitDataset(p *InitFSIDatasetParams, refstr *string) (err e // If the dscache doesn't exist yet, it will only be created if the appropriate flag enables it. if p.UseDscache { + // TODO(dustmop): Dscache exists on both repo and instance, and tests and production code + // use these fields differently. It should be removed from repo and only be used from + // the instance. c := m.inst.Repo().Dscache() + if c == nil { + c = m.inst.Dscache() + } c.CreateNewEnabled = true } - *refstr, err = m.inst.fsi.InitDataset(*p) + ref, err := m.inst.fsi.InitDataset(*p) + *refstr = ref.Human() return err } diff --git a/lib/fsi_test.go b/lib/fsi_test.go index ec71c1c41..81349c10c 100644 --- a/lib/fsi_test.go +++ b/lib/fsi_test.go @@ -137,3 +137,142 @@ func TestFSIMethodsWrite(t *testing.T) { }) } } + +// Test that checkout requires a valid directory +func TestCheckoutInvalidDirs(t *testing.T) { + run := newTestRunner(t) + defer run.Delete() + + // Save a dataset with one version. + run.MustSaveFromBody(t, "movie_ds", "testdata/cities_2/body.csv") + + run.ChdirToRoot() + + // Checkout fails with a blank directory + err := run.Checkout("me/movie_ds", "") + if err == nil { + t.Fatal("expected error from checkout, did not get one") + } + expectErr := `need Dir to be a non-empty, absolute path` + if diff := cmp.Diff(expectErr, err.Error()); diff != "" { + t.Errorf("error mismatch (-want +got):\n%s", diff) + } + + // Checkout fails with a relative directory + err = run.Checkout("me/movie_ds", "relative/dir/") + if err == nil { + t.Fatal("expected error from checkout, did not get one") + } + expectErr = `need Dir to be a non-empty, absolute path` + if diff := cmp.Diff(expectErr, err.Error()); diff != "" { + t.Errorf("error mismatch (-want +got):\n%s", diff) + } + + // Checkout with an absolute path succeeds + checkoutPath := filepath.Join(run.TmpDir, "movie_ds") + err = run.Checkout("me/movie_ds", checkoutPath) + if err != nil { + t.Errorf("checkout err: %s", err) + } +} + +// Test that FSI checkout modifies dscache if it exists +func TestDscacheCheckout(t *testing.T) { + run := newTestRunner(t) + defer run.Delete() + + // Save a dataset with one version. + _, err := run.SaveWithParams(&SaveParams{ + Ref: "me/cities_ds", + BodyPath: "testdata/cities_2/body.csv", + UseDscache: true, + }) + if err != nil { + t.Fatal(err) + } + + run.ChdirToRoot() + + // Checkout the dataset, which should update the dscache + checkoutPath := filepath.Join(run.TmpDir, "cities_ds") + run.Checkout("me/cities_ds", checkoutPath) + + // Access the dscache + cache := run.Instance.Dscache() + + // Dscache should have one entry, with an fsiPath set + actual := run.NiceifyTempDirs(cache.VerboseString(false)) + expect := `Dscache: + Dscache.Users: + 0) user=peer profileID=QmZePf5LeXow3RW5U1AgEiNbW46YnRGhZ7HPvm1UmPFPwt + Dscache.Refs: + 0) initID = vrh4iurbzeyx42trlddzvtoiqevmy2d3mxex4ojd4mxv7cudhlwq + profileID = QmZePf5LeXow3RW5U1AgEiNbW46YnRGhZ7HPvm1UmPFPwt + topIndex = 1 + cursorIndex = 1 + prettyName = cities_ds + bodySize = 155 + bodyRows = 5 + commitTime = 978310861 + headRef = /map/QmPd5jh8ZTFwgbpHaNSu6u2277BKWaiBsqX4uFw7rGWNLu + fsiPath = /tmp/cities_ds +` + if diff := cmp.Diff(expect, actual); diff != "" { + t.Errorf("result mismatch (-want +got):%s\n", diff) + } +} + +// Test that FSI init modifies dscache if it exists +func TestDscacheInit(t *testing.T) { + run := newTestRunner(t) + defer run.Delete() + + // Save a dataset with one version. + _, err := run.SaveWithParams(&SaveParams{ + Ref: "me/cities_ds", + BodyPath: "testdata/cities_2/body.csv", + UseDscache: true, + }) + if err != nil { + t.Fatal(err) + } + + workDir := run.CreateAndChdirToWorkDir("json_body") + _ = workDir + + // Init a new dataset, which should update the dscache + err = run.Init("me/new_ds", "csv") + if err != nil { + t.Fatal(err) + } + + // Access the dscache + cache := run.Instance.Dscache() + + // Dscache should have two entries, one has a version, the other has an fsiPath + actual := run.NiceifyTempDirs(cache.VerboseString(false)) + expect := `Dscache: + Dscache.Users: + 0) user=peer profileID=QmZePf5LeXow3RW5U1AgEiNbW46YnRGhZ7HPvm1UmPFPwt + Dscache.Refs: + 0) initID = vrh4iurbzeyx42trlddzvtoiqevmy2d3mxex4ojd4mxv7cudhlwq + profileID = QmZePf5LeXow3RW5U1AgEiNbW46YnRGhZ7HPvm1UmPFPwt + topIndex = 1 + cursorIndex = 1 + prettyName = cities_ds + bodySize = 155 + bodyRows = 5 + commitTime = 978310861 + headRef = /map/QmPd5jh8ZTFwgbpHaNSu6u2277BKWaiBsqX4uFw7rGWNLu + 1) initID = ekwzgcu4s4o4xchsoip3oa3j45ko5n7pybtizgvsbudojbhxuita + profileID = QmZePf5LeXow3RW5U1AgEiNbW46YnRGhZ7HPvm1UmPFPwt + topIndex = 0 + cursorIndex = 0 + prettyName = new_ds + commitTime = -62135596800 + fsiPath = /tmp/json_body +` + if diff := cmp.Diff(expect, actual); diff != "" { + t.Errorf("result mismatch (-want +got):%s\n", diff) + } +} diff --git a/lib/lib.go b/lib/lib.go index 00172ed97..89066c3c4 100644 --- a/lib/lib.go +++ b/lib/lib.go @@ -25,6 +25,7 @@ import ( "github.com/qri-io/qri/dscache" qrierr "github.com/qri-io/qri/errors" "github.com/qri-io/qri/event" + "github.com/qri-io/qri/event/hook" "github.com/qri-io/qri/fsi" "github.com/qri-io/qri/fsi/hiddenfile" "github.com/qri-io/qri/logbook" @@ -357,17 +358,15 @@ func NewInstance(ctx context.Context, repoPath string, opts ...Option) (qri *Ins } } - if inst.logbook == nil { - inst.logbook, err = newLogbook(inst.qfs, cfg, inst.repoPath) - if err != nil { - return nil, fmt.Errorf("newLogbook: %w", err) - } + var pro *profile.Profile + if pro, err = profile.NewProfile(cfg.Profile); err != nil { + return nil, fmt.Errorf("newProfile: %s", err) } - if inst.dscache == nil { - inst.dscache, err = newDscache(ctx, inst.qfs, inst.logbook, cfg, inst.repoPath) + if inst.logbook == nil { + inst.logbook, err = newLogbook(inst.qfs, cfg, pro, inst.repoPath) if err != nil { - return nil, fmt.Errorf("newDsache: %w", err) + return nil, fmt.Errorf("newLogbook: %w", err) } } @@ -396,6 +395,13 @@ func NewInstance(ctx context.Context, repoPath string, opts ...Option) (qri *Ins inst.fsi = fsi.NewFSI(inst.repo, inst.bus) } + if inst.dscache == nil { + inst.dscache, err = newDscache(ctx, inst.qfs, []hook.ChangeNotifier{inst.logbook, inst.fsi}, pro.Peername, inst.repoPath) + if err != nil { + return nil, fmt.Errorf("newDsache: %w", err) + } + } + if inst.node == nil { if inst.node, err = p2p.NewQriNode(inst.repo, cfg.P2P); err != nil { log.Error("intializing p2p:", err.Error()) @@ -461,20 +467,14 @@ func newRegClient(ctx context.Context, cfg *config.Config) (rc *regclient.Client return nil } -func newLogbook(fs qfs.Filesystem, cfg *config.Config, repoPath string) (book *logbook.Book, err error) { - var pro *profile.Profile - if pro, err = profile.NewProfile(cfg.Profile); err != nil { - return - } - +func newLogbook(fs qfs.Filesystem, cfg *config.Config, pro *profile.Profile, repoPath string) (book *logbook.Book, err error) { logbookPath := filepath.Join(repoPath, "logbook.qfb") - return logbook.NewJournal(pro.PrivKey, pro.Peername, fs, logbookPath) } -func newDscache(ctx context.Context, fs qfs.Filesystem, book *logbook.Book, cfg *config.Config, repoPath string) (*dscache.Dscache, error) { +func newDscache(ctx context.Context, fs qfs.Filesystem, hooks []hook.ChangeNotifier, username, repoPath string) (*dscache.Dscache, error) { dscachePath := filepath.Join(repoPath, "dscache.qfb") - return dscache.NewDscache(ctx, fs, book, dscachePath), nil + return dscache.NewDscache(ctx, fs, hooks, username, dscachePath), nil } func newEventBus(ctx context.Context) event.Bus { @@ -533,26 +533,36 @@ func newStats(repoPath string, cfg *config.Config) *stats.Stats { // and options that can be fed to NewInstance func NewInstanceFromConfigAndNode(cfg *config.Config, node *p2p.QriNode) *Instance { ctx, teardown := context.WithCancel(context.Background()) + + r := node.Repo + pro, err := r.Profile() + if err != nil { + panic(err) + } + bus := event.NewBus(ctx) + fsint := fsi.NewFSI(r, bus) + dc := dscache.NewDscache(ctx, r.Filesystem(), []hook.ChangeNotifier{r.Logbook(), fsint}, pro.Peername, "") + inst := &Instance{ ctx: ctx, teardown: teardown, cfg: cfg, node: node, + dscache: dc, stats: stats.New(nil), } - var err error inst.remoteClient, err = remote.NewClient(node) if err != nil { panic(err) } - if node != nil && node.Repo != nil { - inst.repo = node.Repo - inst.store = node.Repo.Store() - inst.qfs = node.Repo.Filesystem() - inst.bus = event.NewBus(ctx) - inst.fsi = fsi.NewFSI(inst.repo, inst.bus) + if node != nil && r != nil { + inst.repo = r + inst.store = r.Store() + inst.qfs = r.Filesystem() + inst.bus = bus + inst.fsi = fsint } return inst @@ -673,6 +683,14 @@ func (inst *Instance) RepoPath() string { return inst.repoPath } +// Dscache returns the dscache that the instance has +func (inst *Instance) Dscache() *dscache.Dscache { + if inst == nil { + return nil + } + return inst.dscache +} + // RPC accesses the instance RPC client if one exists func (inst *Instance) RPC() *rpc.Client { if inst == nil { diff --git a/lib/test_runner_test.go b/lib/test_runner_test.go index da73018bf..22919002e 100644 --- a/lib/test_runner_test.go +++ b/lib/test_runner_test.go @@ -7,12 +7,14 @@ import ( "io/ioutil" "os" "path/filepath" + "strings" "testing" "time" "github.com/qri-io/qri/base/dsfs" "github.com/qri-io/qri/config" "github.com/qri-io/qri/dsref" + "github.com/qri-io/qri/logbook" "github.com/qri-io/qri/p2p" "github.com/qri-io/qri/repo/profile" reporef "github.com/qri-io/qri/repo/ref" @@ -23,20 +25,40 @@ type testRunner struct { Ctx context.Context Profile *profile.Profile Instance *Instance + Pwd string TmpDir string - PrevTs func() time.Time + WorkDir string + dsfsTs func() time.Time + bookTs func() int64 } func newTestRunner(t *testing.T) *testRunner { - prevTs := dsfs.Timestamp - dsfs.Timestamp = func() time.Time { return time.Time{} } + dsfsCounter := 0 + dsfsTsFunc := dsfs.Timestamp + dsfs.Timestamp = func() time.Time { + dsfsCounter++ + return time.Date(2001, 01, 01, 01, dsfsCounter, 01, 01, time.UTC) + } + + bookCounter := 0 + bookTsFunc := logbook.NewTimestamp + logbook.NewTimestamp = func() int64 { + bookCounter++ + return time.Date(2001, 01, 01, 01, bookCounter, 01, 01, time.UTC).Unix() + } + + pwd, err := os.Getwd() + if err != nil { + t.Fatal(err) + } + // A temporary directory for doing filesystem work. tmpDir, err := ioutil.TempDir("", "lib_test_runner") if err != nil { t.Fatal(err) } - mr, err := testrepo.NewTestRepo() + mr, err := testrepo.NewEmptyTestRepo() if err != nil { t.Fatalf("error allocating test repo: %s", err.Error()) } @@ -51,12 +73,16 @@ func newTestRunner(t *testing.T) *testRunner { Profile: testPeerProfile, Instance: NewInstanceFromConfigAndNode(config.DefaultConfigForTesting(), node), TmpDir: tmpDir, - PrevTs: prevTs, + Pwd: pwd, + dsfsTs: dsfsTsFunc, + bookTs: bookTsFunc, } } func (tr *testRunner) Delete() { - dsfs.Timestamp = tr.PrevTs + dsfs.Timestamp = tr.dsfsTs + logbook.NewTimestamp = tr.bookTs + os.Chdir(tr.Pwd) os.RemoveAll(tr.TmpDir) } @@ -76,7 +102,38 @@ func (tr *testRunner) MakeTmpFilename(filename string) (path string) { return filepath.Join(tr.TmpDir, filename) } -func (tr *testRunner) SaveDatasetFromBody(t *testing.T, dsName, bodyFilename string) dsref.Ref { +func (tr *testRunner) NiceifyTempDirs(text string) string { + // Replace the temporary directory + text = strings.Replace(text, tr.TmpDir, "/tmp", -1) + // Replace that same directory with symlinks resolved + realTmp, err := filepath.EvalSymlinks(tr.TmpDir) + if err == nil { + text = strings.Replace(text, realTmp, "/tmp", -1) + } + return text +} + +func (tr *testRunner) ChdirToRoot() { + os.Chdir(tr.TmpDir) +} + +func (tr *testRunner) CreateAndChdirToWorkDir(subdir string) string { + tr.WorkDir = filepath.Join(tr.TmpDir, subdir) + err := os.Mkdir(tr.WorkDir, 0755) + if err != nil { + panic(err) + } + err = os.Chdir(tr.WorkDir) + if err != nil { + panic(err) + } + return tr.WorkDir +} + +func (tr *testRunner) MustSaveFromBody(t *testing.T, dsName, bodyFilename string) dsref.Ref { + if !dsref.IsValidName(dsName) { + t.Fatalf("invalid dataset name: %q", dsName) + } m := NewDatasetMethods(tr.Instance) p := SaveParams{ Ref: fmt.Sprintf("peer/%s", dsName), @@ -89,6 +146,15 @@ func (tr *testRunner) SaveDatasetFromBody(t *testing.T, dsName, bodyFilename str return reporef.ConvertToDsref(r) } +func (tr *testRunner) SaveWithParams(p *SaveParams) (dsref.Ref, error) { + m := NewDatasetMethods(tr.Instance) + r := reporef.DatasetRef{} + if err := m.Save(p, &r); err != nil { + return dsref.Ref{}, err + } + return reporef.ConvertToDsref(r), nil +} + func (tr *testRunner) Diff(left, right, selector string) (string, error) { m := NewDatasetMethods(tr.Instance) p := DiffParams{ @@ -122,3 +188,28 @@ func (tr *testRunner) DiffWithParams(p *DiffParams) (string, error) { } return string(data), nil } + +func (tr *testRunner) Init(refstr, format string) error { + ref, err := dsref.Parse(refstr) + if err != nil { + return err + } + m := NewFSIMethods(tr.Instance) + out := "" + p := InitFSIDatasetParams{ + Name: ref.Name, + Dir: tr.WorkDir, + Format: format, + } + return m.InitDataset(&p, &out) +} + +func (tr *testRunner) Checkout(refstr, dir string) error { + m := NewFSIMethods(tr.Instance) + out := "" + p := CheckoutParams{ + Ref: refstr, + Dir: dir, + } + return m.Checkout(&p, &out) +} diff --git a/logbook/action.go b/logbook/action.go deleted file mode 100644 index 9f491a654..000000000 --- a/logbook/action.go +++ /dev/null @@ -1,31 +0,0 @@ -package logbook - -import ( - "github.com/qri-io/qri/dsref" -) - -// ActionType is the type of action that a logbook just completed -type ActionType byte - -const ( - // ActionDatasetNameInit is an action that inits a dataset name - ActionDatasetNameInit ActionType = iota - // ActionDatasetCommitChange is an action for when a dataset changes its newest commit - ActionDatasetCommitChange - // ActionDatasetDeleteAll is an action for when a dataset is entirely deleted - ActionDatasetDeleteAll - // ActionDatasetRename is when a dataset is renamed - ActionDatasetRename -) - -// Action represents the result of an action that logbook just completed -type Action struct { - Type ActionType - InitID string - TopIndex int - ProfileID string - Username string - PrettyName string - HeadRef string - Info *dsref.VersionInfo -} diff --git a/logbook/logbook.go b/logbook/logbook.go index 2d3dd9406..3dd610d27 100644 --- a/logbook/logbook.go +++ b/logbook/logbook.go @@ -19,6 +19,7 @@ import ( "github.com/qri-io/dataset" "github.com/qri-io/qfs" "github.com/qri-io/qri/dsref" + "github.com/qri-io/qri/event/hook" "github.com/qri-io/qri/identity" "github.com/qri-io/qri/logbook/oplog" ) @@ -92,7 +93,7 @@ type Book struct { fsLocation string fs qfs.Filesystem - listener func(*Action) + onChangeHook func(hook.DsChange) } // NewBook creates a book with a user-provided logstore @@ -320,11 +321,11 @@ func (book *Book) WriteDatasetInit(ctx context.Context, dsName string) (string, initID := dsLog.ID() - if book.listener != nil { + if book.onChangeHook != nil { // TODO(dlong): Perhaps in the future, pass the authorID (hash of the author creation // block) to the dscache, use that instead-of or in-addition-to the profileID. - book.listener(&Action{ - Type: ActionDatasetNameInit, + book.onChangeHook(hook.DsChange{ + Type: hook.DatasetNameInit, InitID: initID, Username: book.AuthorName(), ProfileID: profileID, @@ -357,9 +358,9 @@ func (book *Book) WriteDatasetRename(ctx context.Context, initID string, newName Name: newName, Timestamp: NewTimestamp(), }) - if book.listener != nil { - book.listener(&Action{ - Type: ActionDatasetRename, + if book.onChangeHook != nil { + book.onChangeHook(hook.DsChange{ + Type: hook.DatasetRename, InitID: initID, PrettyName: newName, }) @@ -445,9 +446,9 @@ func (book *Book) WriteDatasetDelete(ctx context.Context, initID string) error { Model: DatasetModel, Timestamp: NewTimestamp(), }) - if book.listener != nil { - book.listener(&Action{ - Type: ActionDatasetDeleteAll, + if book.onChangeHook != nil { + book.onChangeHook(hook.DsChange{ + Type: hook.DatasetDeleteAll, InitID: initID, }) } @@ -477,9 +478,9 @@ func (book *Book) WriteVersionSave(ctx context.Context, initID string, ds *datas info := dsref.ConvertDatasetToVersionInfo(ds) - if book.listener != nil { - book.listener(&Action{ - Type: ActionDatasetCommitChange, + if book.onChangeHook != nil { + book.onChangeHook(hook.DsChange{ + Type: hook.DatasetCommitChange, InitID: initID, TopIndex: topIndex, HeadRef: info.Path, @@ -561,9 +562,9 @@ func (book *Book) WriteVersionDelete(ctx context.Context, initID string, revisio if len(items) > 0 { lastItem := items[len(items)-1] - if book.listener != nil { - book.listener(&Action{ - Type: ActionDatasetCommitChange, + if book.onChangeHook != nil { + book.onChangeHook(hook.DsChange{ + Type: hook.DatasetCommitChange, InitID: initID, TopIndex: len(items), HeadRef: lastItem.Path, @@ -624,9 +625,9 @@ func (book *Book) WriteUnpublish(ctx context.Context, initID string, revisions i return book.save(ctx) } -// Observe saves a function which listens for changes -func (book *Book) Observe(listener func(*Action)) { - book.listener = listener +// SetChangeHook assigns a hook that will be called when a dataset changes +func (book *Book) SetChangeHook(changeHook func(hook.DsChange)) { + book.onChangeHook = changeHook } // ListAllLogs lists all of the logs in the logbook diff --git a/repo/buildrepo/build.go b/repo/buildrepo/build.go index da96e822f..c552543ee 100644 --- a/repo/buildrepo/build.go +++ b/repo/buildrepo/build.go @@ -18,6 +18,7 @@ import ( "github.com/qri-io/qfs/localfs" "github.com/qri-io/qri/config" "github.com/qri-io/qri/dscache" + "github.com/qri-io/qri/event/hook" "github.com/qri-io/qri/logbook" "github.com/qri-io/qri/repo" "github.com/qri-io/qri/repo/fs" @@ -71,7 +72,7 @@ func New(ctx context.Context, path string, cfg *config.Config) (repo.Repo, error return nil, err } - cache, err := newDscache(ctx, fs, book, path) + cache, err := newDscache(ctx, fs, book, pro.Peername, path) if err != nil { return nil, err } @@ -153,11 +154,11 @@ func newLogbook(fs qfs.Filesystem, pro *profile.Profile, repoPath string) (book return logbook.NewJournal(pro.PrivKey, pro.Peername, fs, logbookPath) } -func newDscache(ctx context.Context, fs qfs.Filesystem, book *logbook.Book, repoPath string) (*dscache.Dscache, error) { +func newDscache(ctx context.Context, fs qfs.Filesystem, book *logbook.Book, username, repoPath string) (*dscache.Dscache, error) { // This seems to be a bug, the repoPath does not end in "qri" in some tests. if !strings.HasSuffix(repoPath, "qri") { return nil, fmt.Errorf("invalid repo path") } dscachePath := filepath.Join(repoPath, "dscache.qfb") - return dscache.NewDscache(ctx, fs, book, dscachePath), nil + return dscache.NewDscache(ctx, fs, []hook.ChangeNotifier{book}, username, dscachePath), nil } diff --git a/repo/fs/fs_test.go b/repo/fs/fs_test.go index 08349000c..83809d7e5 100644 --- a/repo/fs/fs_test.go +++ b/repo/fs/fs_test.go @@ -36,7 +36,7 @@ func TestRepo(t *testing.T) { } ctx := context.Background() - cache := dscache.NewDscache(ctx, fs, nil, "") + cache := dscache.NewDscache(ctx, fs, nil, pro.Peername, "") store := cafs.NewMapstore() r, err := NewRepo(store, fs, book, cache, pro, path) diff --git a/repo/mem_repo.go b/repo/mem_repo.go index 30d50e101..850b6ab95 100644 --- a/repo/mem_repo.go +++ b/repo/mem_repo.go @@ -8,6 +8,7 @@ import ( "github.com/qri-io/qfs" "github.com/qri-io/qfs/cafs" "github.com/qri-io/qri/dscache" + "github.com/qri-io/qri/event/hook" "github.com/qri-io/qri/logbook" "github.com/qri-io/qri/repo/profile" ) @@ -36,13 +37,18 @@ func NewMemRepo(p *profile.Profile, store cafs.Filestore, fsys qfs.Filesystem, p return nil, err } ctx := context.Background() + + // NOTE: This dscache won't get change notifications from FSI, because it's not constructed + // with the hook for FSI. + cache := dscache.NewDscache(ctx, fsys, []hook.ChangeNotifier{book}, p.Peername, "") + return &MemRepo{ store: store, filesystem: fsys, MemRefstore: &MemRefstore{}, refCache: &MemRefstore{}, logbook: book, - dscache: dscache.NewDscache(ctx, fsys, book, ""), + dscache: cache, profile: p, profiles: ps, }, nil