diff --git a/core/datasets.go b/core/datasets.go index f622fae77..5332da947 100644 --- a/core/datasets.go +++ b/core/datasets.go @@ -23,6 +23,7 @@ import ( "github.com/qri-io/jsonschema" "github.com/qri-io/qri/p2p" "github.com/qri-io/qri/repo" + "github.com/qri-io/qri/repo/actions" "github.com/qri-io/qri/repo/profile" "github.com/qri-io/varName" ) @@ -30,7 +31,7 @@ import ( // DatasetRequests encapsulates business logic for this node's // user profile type DatasetRequests struct { - repo repo.Repo + repo actions.Dataset cli *rpc.Client Node *p2p.QriNode } @@ -53,7 +54,7 @@ func NewDatasetRequests(r repo.Repo, cli *rpc.Client) *DatasetRequests { } return &DatasetRequests{ - repo: r, + repo: actions.Dataset{r}, cli: cli, } } diff --git a/core/datasets_test.go b/core/datasets_test.go index 77306d96f..5b908ba49 100644 --- a/core/datasets_test.go +++ b/core/datasets_test.go @@ -13,6 +13,7 @@ import ( "github.com/qri-io/dsdiff" "github.com/qri-io/jsonschema" "github.com/qri-io/qri/repo" + "github.com/qri-io/qri/repo/actions" testrepo "github.com/qri-io/qri/repo/test" ) @@ -456,17 +457,14 @@ func TestDataRequestsDiff(t *testing.T) { t.Errorf("couldn't load file 1: %s", err.Error()) return } - if err := mr.ReadDataset(dsRef1); err != nil { + act := actions.Dataset{mr} + if err := act.ReadDataset(dsRef1); err != nil { t.Errorf("error reading dataset 1: %s", err.Error()) return } dsBase := dsRef1.Dataset - // dsBase, err := dsfs.LoadDataset(mr.Store(), datastore.NewKey(dsRef1.Path)) - // if err != nil { - // return - // } // File 2 dsRef2 := &repo.DatasetRef{} initParams = &InitParams{ diff --git a/core/history.go b/core/history.go index a2c8c0a43..f09fdaa28 100644 --- a/core/history.go +++ b/core/history.go @@ -6,12 +6,13 @@ import ( "github.com/qri-io/qri/p2p" "github.com/qri-io/qri/repo" + "github.com/qri-io/qri/repo/actions" ) // HistoryRequests encapsulates business logic for the log // of changes to datasets, think "git log" type HistoryRequests struct { - repo repo.Repo + repo actions.Dataset cli *rpc.Client Node *p2p.QriNode } @@ -26,7 +27,7 @@ func NewHistoryRequests(r repo.Repo, cli *rpc.Client) *HistoryRequests { panic(fmt.Errorf("both repo and client supplied to NewHistoryRequests")) } return &HistoryRequests{ - repo: r, + repo: actions.Dataset{r}, cli: cli, } } diff --git a/p2p/dataset.go b/p2p/dataset.go index cd203e03b..f2a31fafa 100644 --- a/p2p/dataset.go +++ b/p2p/dataset.go @@ -5,6 +5,7 @@ import ( "fmt" "github.com/qri-io/qri/repo" + "github.com/qri-io/qri/repo/actions" peer "gx/ipfs/QmXYjuNuxVzXKJCfWasQk1RqkhVLDM9jtUKhqc2WPQmFSB/go-libp2p-peer" ) @@ -23,12 +24,14 @@ func (n *QriNode) RequestDataset(ref *repo.DatasetRef) (err error) { return fmt.Errorf("path is required") } + act := actions.Dataset{n.Repo} + // if peer ID is *our* peer.ID check for local dataset // note that data may be on another machine, so this can still fail back to a // network request if ref.PeerID != "" { if pro, err := n.Repo.Profile(); err == nil && pro.ID == ref.PeerID { - if err := n.Repo.ReadDataset(ref); err == nil { + if err := act.ReadDataset(ref); err == nil { return nil } } @@ -88,11 +91,12 @@ func (n *QriNode) handleDataset(ws *WrappedStream, msg Message) (hangup bool) { return } res := msg + act := actions.Dataset{n.Repo} if err := repo.CanonicalizeDatasetRef(n.Repo, &dsr); err == nil { if ref, err := n.Repo.GetRef(dsr); err == nil { - if err := n.Repo.ReadDataset(&ref); err != nil { + if err := act.ReadDataset(&ref); err != nil { log.Debug(err.Error()) } diff --git a/p2p/events.go b/p2p/events.go index ec087101f..4ba750fca 100644 --- a/p2p/events.go +++ b/p2p/events.go @@ -2,12 +2,8 @@ package p2p import ( "encoding/json" - // "strings" "time" - // "github.com/ipfs/go-datastore" - // "github.com/qri-io/cafs" - // ipfs "github.com/qri-io/cafs/ipfs" - // "github.com/qri-io/dataset/dsfs" + "github.com/qri-io/qri/repo" peer "gx/ipfs/QmXYjuNuxVzXKJCfWasQk1RqkhVLDM9jtUKhqc2WPQmFSB/go-libp2p-peer" diff --git a/repo/actions/actions.go b/repo/actions/actions.go new file mode 100644 index 000000000..56508c794 --- /dev/null +++ b/repo/actions/actions.go @@ -0,0 +1,10 @@ +// Package actions provides canonical business logic that operates on Repos +// to get higher-order functionality. Actions use only Repo methods +// to do their work, allowing them to be used across any repo.Repo implementation +package actions + +import ( + golog "github.com/ipfs/go-log" +) + +var log = golog.Logger("actions") diff --git a/repo/actions/dataset.go b/repo/actions/dataset.go new file mode 100644 index 000000000..229981997 --- /dev/null +++ b/repo/actions/dataset.go @@ -0,0 +1,120 @@ +package actions + +import ( + "github.com/ipfs/go-datastore" + "github.com/qri-io/cafs" + "github.com/qri-io/dataset" + "github.com/qri-io/dataset/dsfs" + "github.com/qri-io/qri/repo" + "github.com/qri-io/qri/repo/profile" +) + +// Dataset wraps a repo.Repo, adding actions related to working +// with datasets +type Dataset struct { + repo.Repo +} + +// CreateDataset initializes a dataset from a dataset pointer and data file +func (act Dataset) CreateDataset(name string, ds *dataset.Dataset, data cafs.File, pin bool) (ref repo.DatasetRef, err error) { + var ( + path datastore.Key + pro *profile.Profile + ) + pro, err = act.Profile() + if err != nil { + return + } + + path, err = dsfs.CreateDataset(act.Store(), ds, data, act.PrivateKey(), pin) + if err != nil { + return + } + + if ds.PreviousPath != "" && ds.PreviousPath != "/" { + prev := repo.DatasetRef{ + PeerID: pro.ID, + Peername: pro.Peername, + Name: name, + Path: ds.PreviousPath, + } + if err = act.DeleteRef(prev); err != nil { + log.Error(err.Error()) + err = nil + } + } + + ref = repo.DatasetRef{ + PeerID: pro.ID, + Peername: pro.Peername, + Name: name, + Path: path.String(), + } + + if err = act.PutRef(ref); err != nil { + log.Error(err.Error()) + return + } + + if err = act.LogEvent(repo.ETDsCreated, ref); err != nil { + return + } + + _, storeIsPinner := act.Store().(cafs.Pinner) + if pin && storeIsPinner { + act.LogEvent(repo.ETDsPinned, ref) + } + return +} + +// ReadDataset grabs a dataset from the store +func (act Dataset) ReadDataset(ref *repo.DatasetRef) (err error) { + if act.Repo.Store() != nil { + ref.Dataset, err = dsfs.LoadDataset(act.Store(), datastore.NewKey(ref.Path)) + return + } + + return datastore.ErrNotFound +} + +// RenameDataset alters a dataset name +func (act Dataset) RenameDataset(a, b repo.DatasetRef) (err error) { + if err = act.DeleteRef(a); err != nil { + return err + } + if err = act.PutRef(b); err != nil { + return err + } + + return act.LogEvent(repo.ETDsRenamed, b) +} + +// PinDataset marks a dataset for retention in a store +func (act Dataset) PinDataset(ref repo.DatasetRef) error { + if pinner, ok := act.Store().(cafs.Pinner); ok { + pinner.Pin(datastore.NewKey(ref.Path), true) + return act.LogEvent(repo.ETDsPinned, ref) + } + return repo.ErrNotPinner +} + +// UnpinDataset unmarks a dataset for retention in a store +func (act Dataset) UnpinDataset(ref repo.DatasetRef) error { + if pinner, ok := act.Store().(cafs.Pinner); ok { + pinner.Unpin(datastore.NewKey(ref.Path), true) + return act.LogEvent(repo.ETDsUnpinned, ref) + } + return repo.ErrNotPinner +} + +// DeleteDataset removes a dataset from the store +func (act Dataset) DeleteDataset(ref repo.DatasetRef) error { + if err := act.DeleteRef(ref); err != nil { + return err + } + if err := act.UnpinDataset(ref); err != nil && err != repo.ErrNotPinner { + return err + } + + return act.LogEvent(repo.ETDsDeleted, ref) +} diff --git a/repo/actions/dataset_test.go b/repo/actions/dataset_test.go new file mode 100644 index 000000000..5fa2f8b78 --- /dev/null +++ b/repo/actions/dataset_test.go @@ -0,0 +1,250 @@ +package actions + +import ( + "encoding/base64" + "fmt" + "os" + "path/filepath" + "testing" + + "github.com/libp2p/go-libp2p-crypto" + "github.com/qri-io/cafs" + "github.com/qri-io/dataset/dstest" + "github.com/qri-io/qri/repo" + "github.com/qri-io/qri/repo/profile" +) + +// base64-encoded Test Private Key, decoded in init +// peerId: QmZePf5LeXow3RW5U1AgEiNbW46YnRGhZ7HPvm1UmPFPwt +var ( + testPk = []byte(`CAASpgkwggSiAgEAAoIBAQC/7Q7fILQ8hc9g07a4HAiDKE4FahzL2eO8OlB1K99Ad4L1zc2dCg+gDVuGwdbOC29IngMA7O3UXijycckOSChgFyW3PafXoBF8Zg9MRBDIBo0lXRhW4TrVytm4Etzp4pQMyTeRYyWR8e2hGXeHArXM1R/A/SjzZUbjJYHhgvEE4OZy7WpcYcW6K3qqBGOU5GDMPuCcJWac2NgXzw6JeNsZuTimfVCJHupqG/dLPMnBOypR22dO7yJIaQ3d0PFLxiDG84X9YupF914RzJlopfdcuipI+6gFAgBw3vi6gbECEzcohjKf/4nqBOEvCDD6SXfl5F/MxoHurbGBYB2CJp+FAgMBAAECggEAaVOxe6Y5A5XzrxHBDtzjlwcBels3nm/fWScvjH4dMQXlavwcwPgKhy2NczDhr4X69oEw6Msd4hQiqJrlWd8juUg6vIsrl1wS/JAOCS65fuyJfV3Pw64rWbTPMwO3FOvxj+rFghZFQgjg/i45uHA2UUkM+h504M5Nzs6Arr/rgV7uPGR5e5OBw3lfiS9ZaA7QZiOq7sMy1L0qD49YO1ojqWu3b7UaMaBQx1Dty7b5IVOSYG+Y3U/dLjhTj4Hg1VtCHWRm3nMOE9cVpMJRhRzKhkq6gnZmni8obz2BBDF02X34oQLcHC/Wn8F3E8RiBjZDI66g+iZeCCUXvYz0vxWAQQKBgQDEJu6flyHPvyBPAC4EOxZAw0zh6SF/r8VgjbKO3n/8d+kZJeVmYnbsLodIEEyXQnr35o2CLqhCvR2kstsRSfRz79nMIt6aPWuwYkXNHQGE8rnCxxyJmxV4S63GczLk7SIn4KmqPlCI08AU0TXJS3zwh7O6e6kBljjPt1mnMgvr3QKBgQD6fAkdI0FRZSXwzygx4uSg47Co6X6ESZ9FDf6ph63lvSK5/eue/ugX6p/olMYq5CHXbLpgM4EJYdRfrH6pwqtBwUJhlh1xI6C48nonnw+oh8YPlFCDLxNG4tq6JVo071qH6CFXCIank3ThZeW5a3ZSe5pBZ8h4bUZ9H8pJL4C7yQKBgFb8SN/+/qCJSoOeOcnohhLMSSD56MAeK7KIxAF1jF5isr1TP+rqiYBtldKQX9bIRY3/8QslM7r88NNj+aAuIrjzSausXvkZedMrkXbHgS/7EAPflrkzTA8fyH10AsLgoj/68mKr5bz34nuY13hgAJUOKNbvFeC9RI5g6eIqYH0FAoGAVqFTXZp12rrK1nAvDKHWRLa6wJCQyxvTU8S1UNi2EgDJ492oAgNTLgJdb8kUiH0CH0lhZCgr9py5IKW94OSM6l72oF2UrS6PRafHC7D9b2IV5Al9lwFO/3MyBrMocapeeyaTcVBnkclz4Qim3OwHrhtFjF1ifhP9DwVRpuIg+dECgYANwlHxLe//tr6BM31PUUrOxP5Y/cj+ydxqM/z6papZFkK6Mvi/vMQQNQkh95GH9zqyC5Z/yLxur4ry1eNYty/9FnuZRAkEmlUSZ/DobhU0Pmj8Hep6JsTuMutref6vCk2n02jc9qYmJuD7iXkdXDSawbEG6f5C4MUkJ38z1t1OjA==`) + privKey crypto.PrivKey + + testPeerProfile = &profile.Profile{ + Peername: "peer", + ID: "QmZePf5LeXow3RW5U1AgEiNbW46YnRGhZ7HPvm1UmPFPwt", + } +) + +func testdataPath(path string) string { + return filepath.Join(os.Getenv("GOPATH"), "/src/github.com/qri-io/qri/repo/test/testdata", path) +} + +func init() { + data, err := base64.StdEncoding.DecodeString(string(testPk)) + if err != nil { + panic(err) + } + testPk = data + + privKey, err = crypto.UnmarshalPrivateKey(testPk) + if err != nil { + panic(fmt.Errorf("error unmarshaling private key: %s", err.Error())) + return + } +} + +func TestDataset(t *testing.T) { + rmf := func(t *testing.T) repo.Repo { + mr, err := repo.NewMemRepo(testPeerProfile, cafs.NewMapstore(), repo.MemProfiles{}) + if err != nil { + panic(err) + } + mr.SetPrivateKey(privKey) + return mr + } + DatasetTests(t, rmf) +} + +type RepoMakerFunc func(t *testing.T) repo.Repo +type RepoTestFunc func(t *testing.T, rmf RepoMakerFunc) + +func DatasetTests(t *testing.T, rmf RepoMakerFunc) { + for _, test := range []RepoTestFunc{ + testCreateDataset, + testReadDataset, + testRenameDataset, + testDatasetPinning, + testDeleteDataset, + testEventsLog, + } { + test(t, rmf) + } +} + +func testCreateDataset(t *testing.T, rmf RepoMakerFunc) { + createDataset(t, rmf) +} + +func createDataset(t *testing.T, rmf RepoMakerFunc) (repo.Repo, repo.DatasetRef) { + r := rmf(t) + r.SaveProfile(testPeerProfile) + r.SetPrivateKey(privKey) + act := Dataset{r} + + tc, err := dstest.NewTestCaseFromDir(testdataPath("cities")) + if err != nil { + t.Error(err.Error()) + return r, repo.DatasetRef{} + } + + ref, err := act.CreateDataset(tc.Name, tc.Input, tc.DataFile(), true) + if err != nil { + t.Error(err.Error()) + } + + return r, ref +} + +func testReadDataset(t *testing.T, rmf RepoMakerFunc) { + r, ref := createDataset(t, rmf) + act := Dataset{r} + + if err := act.ReadDataset(&ref); err != nil { + t.Error(err.Error()) + return + } + + if ref.Dataset == nil { + t.Error("expected dataset to not equal nil") + return + } +} + +func testRenameDataset(t *testing.T, rmf RepoMakerFunc) { + r, ref := createDataset(t, rmf) + act := Dataset{r} + + b := repo.DatasetRef{ + Name: "cities2", + Path: ref.Path, + Peername: ref.Peername, + PeerID: ref.PeerID, + } + + if err := act.RenameDataset(ref, b); err != nil { + t.Error(err.Error()) + return + } + + if err := act.ReadDataset(&b); err != nil { + t.Error(err.Error()) + return + } + + if b.Dataset == nil { + t.Error("expected dataset to not equal nil") + return + } +} + +func testDatasetPinning(t *testing.T, rmf RepoMakerFunc) { + r, ref := createDataset(t, rmf) + act := Dataset{r} + + if err := act.PinDataset(ref); err != nil { + if err == repo.ErrNotPinner { + t.Log("repo store doesn't support pinning") + } else { + t.Error(err.Error()) + return + } + } + + tc, err := dstest.NewTestCaseFromDir(testdataPath("counter")) + if err != nil { + t.Error(err.Error()) + return + } + + ref2, err := act.CreateDataset(tc.Name, tc.Input, tc.DataFile(), false) + if err != nil { + t.Error(err.Error()) + return + } + + if err := act.PinDataset(ref2); err != nil && err != repo.ErrNotPinner { + t.Error(err.Error()) + return + } + + if err := act.UnpinDataset(ref); err != nil && err != repo.ErrNotPinner { + t.Error(err.Error()) + return + } + + if err := act.UnpinDataset(ref2); err != nil && err != repo.ErrNotPinner { + t.Error(err.Error()) + return + } +} + +func testDeleteDataset(t *testing.T, rmf RepoMakerFunc) { + r, ref := createDataset(t, rmf) + act := Dataset{r} + + if err := act.DeleteDataset(ref); err != nil { + t.Error(err.Error()) + return + } +} + +func testEventsLog(t *testing.T, rmf RepoMakerFunc) { + r, ref := createDataset(t, rmf) + act := Dataset{r} + pinner := true + + b := repo.DatasetRef{ + Name: "cities2", + Path: ref.Path, + Peername: ref.Peername, + PeerID: ref.PeerID, + } + + if err := act.RenameDataset(ref, b); err != nil { + t.Error(err.Error()) + return + } + + if err := act.PinDataset(b); err != nil { + if err == repo.ErrNotPinner { + pinner = false + } else { + t.Error(err.Error()) + return + } + } + + if err := act.UnpinDataset(b); err != nil && err != repo.ErrNotPinner { + t.Error(err.Error()) + return + } + + if err := act.DeleteDataset(b); err != nil { + t.Error(err.Error()) + return + } + + events, err := r.Events(10, 0) + if err != nil { + t.Error(err.Error()) + return + } + + ets := []repo.EventType{repo.ETDsDeleted, repo.ETDsUnpinned, repo.ETDsPinned, repo.ETDsRenamed, repo.ETDsPinned, repo.ETDsCreated} + + if !pinner { + ets = []repo.EventType{repo.ETDsDeleted, repo.ETDsRenamed, repo.ETDsCreated} + } + + if len(events) != len(ets) { + t.Errorf("event log length mismatch. expected: %d, got: %d", len(ets), len(events)) + return + } + + for i, et := range ets { + if events[i].Type != et { + t.Errorf("case %d eventType mismatch. expected: %s, got: %s", i, et, events[i].Type) + } + } +} diff --git a/repo/events.go b/repo/events.go index 8aacec784..37e412f35 100644 --- a/repo/events.go +++ b/repo/events.go @@ -49,7 +49,7 @@ func (log *MemEventLog) LogEvent(t EventType, ref DatasetRef) error { Ref: ref, } logs := append([]*Event{e}, *log...) - sort.Slice(logs, func(i, j int) bool { return logs[i].Time.Before(logs[j].Time) }) + sort.Slice(logs, func(i, j int) bool { return logs[i].Time.After(logs[j].Time) }) *log = logs return nil } diff --git a/repo/fs/datasets.go b/repo/fs/datasets.go deleted file mode 100644 index e98215e5c..000000000 --- a/repo/fs/datasets.go +++ /dev/null @@ -1,114 +0,0 @@ -package fsrepo - -import ( - "github.com/ipfs/go-datastore" - "github.com/qri-io/cafs" - "github.com/qri-io/dataset" - "github.com/qri-io/dataset/dsfs" - "github.com/qri-io/qri/repo" - "github.com/qri-io/qri/repo/profile" -) - -// CreateDataset initializes a dataset from a dataset pointer and data file -func (r *Repo) CreateDataset(name string, ds *dataset.Dataset, data cafs.File, pin bool) (ref repo.DatasetRef, err error) { - var ( - path datastore.Key - pro *profile.Profile - ) - pro, err = r.Profile() - if err != nil { - return - } - - path, err = dsfs.CreateDataset(r.store, ds, data, r.pk, pin) - if err != nil { - return - } - - if ds.PreviousPath != "" && ds.PreviousPath != "/" { - prev := repo.DatasetRef{ - PeerID: pro.ID, - Peername: pro.Peername, - Name: name, - Path: ds.PreviousPath, - } - if err = r.Refstore.DeleteRef(prev); err != nil { - log.Error(err.Error()) - err = nil - } - } - - ref = repo.DatasetRef{ - PeerID: pro.ID, - Peername: pro.Peername, - Name: name, - Path: path.String(), - } - - if err = r.PutRef(ref); err != nil { - log.Error(err.Error()) - return - } - - if err = r.LogEvent(repo.ETDsCreated, ref); err != nil { - return - } - - _, storeIsPinner := r.Store().(cafs.Pinner) - if pin && storeIsPinner { - r.LogEvent(repo.ETDsPinned, ref) - } - return -} - -// ReadDataset grabs a dataset from the store -func (r *Repo) ReadDataset(ref *repo.DatasetRef) (err error) { - if r.store != nil { - ref.Dataset, err = dsfs.LoadDataset(r.store, datastore.NewKey(ref.Path)) - return - } - - return datastore.ErrNotFound -} - -// RenameDataset alters a dataset name -func (r *Repo) RenameDataset(a, b repo.DatasetRef) (err error) { - if err = r.Refstore.DeleteRef(a); err != nil { - return err - } - if err = r.Refstore.PutRef(b); err != nil { - return err - } - - return r.LogEvent(repo.ETDsRenamed, b) -} - -// PinDataset marks a dataset for retention in a store -func (r *Repo) PinDataset(ref repo.DatasetRef) error { - if pinner, ok := r.store.(cafs.Pinner); ok { - pinner.Pin(datastore.NewKey(ref.Path), true) - return r.LogEvent(repo.ETDsPinned, ref) - } - return repo.ErrNotPinner -} - -// UnpinDataset unmarks a dataset for retention in a store -func (r *Repo) UnpinDataset(ref repo.DatasetRef) error { - if pinner, ok := r.store.(cafs.Pinner); ok { - pinner.Unpin(datastore.NewKey(ref.Path), true) - return r.LogEvent(repo.ETDsUnpinned, ref) - } - return repo.ErrNotPinner -} - -// DeleteDataset removes a dataset from the store -func (r *Repo) DeleteDataset(ref repo.DatasetRef) error { - if err := r.Refstore.DeleteRef(ref); err != nil { - return err - } - if err := r.UnpinDataset(ref); err != nil && err != repo.ErrNotPinner { - return err - } - - return r.LogEvent(repo.ETDsDeleted, ref) -} diff --git a/repo/fs/fs.go b/repo/fs/fs.go index 02bdeff48..b30e8d466 100644 --- a/repo/fs/fs.go +++ b/repo/fs/fs.go @@ -3,6 +3,7 @@ package fsrepo import ( "encoding/json" "fmt" + "github.com/qri-io/qri/repo/actions" "io/ioutil" "os" @@ -161,6 +162,11 @@ func (r *Repo) SetPrivateKey(pk crypto.PrivKey) error { return nil } +// PrivateKey returns this repo's private key +func (r *Repo) PrivateKey() crypto.PrivKey { + return r.pk +} + // Search this repo for dataset references func (r *Repo) Search(p repo.SearchParams) ([]repo.DatasetRef, error) { if r.index == nil { @@ -179,7 +185,8 @@ func (r *Repo) Search(p repo.SearchParams) ([]repo.DatasetRef, error) { } } - if err := r.ReadDataset(&ref); err != nil { + act := actions.Dataset{r} + if err := act.ReadDataset(&ref); err != nil { log.Debug(err.Error()) } } diff --git a/repo/graph_test.go b/repo/graph_test.go index cbb16ff86..9660eee14 100644 --- a/repo/graph_test.go +++ b/repo/graph_test.go @@ -3,6 +3,7 @@ package repo import ( "encoding/base64" "fmt" + "github.com/qri-io/dataset/dsfs" "testing" "github.com/ipfs/go-datastore" @@ -159,26 +160,19 @@ func makeTestRepo() (Repo, error) { }) data1f := cafs.NewMemfileBytes("data1", []byte("dataset_1")) - // ds1p, err := dsfs.WriteDataset(store, ds1, data1f, true) - // if err != nil { - // return nil, fmt.Errorf("error putting dataset: %s", err.Error()) - // } - // r.PutDataset(ds1p, ds1) - // r.PutRef(DatasetRef{Peername: "peer", Name: "ds1", Path: ds1p.String()}) - if _, err := r.CreateDataset("ds1", ds1, data1f, true); err != nil { - return nil, fmt.Errorf("error creating dataset: %s", err.Error()) + + ds1p, err := dsfs.WriteDataset(store, ds1, data1f, true) + if err != nil { + return nil, fmt.Errorf("error putting dataset: %s", err.Error()) } + r.PutRef(DatasetRef{Peername: "peer", Name: "ds1", Path: ds1p.String()}) data2f := cafs.NewMemfileBytes("data2", []byte("dataset_2")) - // ds2p, err := dsfs.WriteDataset(store, ds2, data2f, true) - // if err != nil { - // return nil, fmt.Errorf("error putting dataset: %s", err.Error()) - // } - // r.PutDataset(ds2p, ds2) - // r.PutRef(DatasetRef{Peername: "peer", Name: "ds2", Path: ds2p.String()}) - if _, err := r.CreateDataset("ds2", ds2, data2f, true); err != nil { - return nil, fmt.Errorf("error creating dataset: %s", err.Error()) + ds2p, err := dsfs.WriteDataset(store, ds2, data2f, true) + if err != nil { + return nil, fmt.Errorf("error putting dataset: %s", err.Error()) } + r.PutRef(DatasetRef{Peername: "peer", Name: "ds2", Path: ds2p.String()}) return r, nil } diff --git a/repo/mem_datasets.go b/repo/mem_datasets.go deleted file mode 100644 index 76e011c54..000000000 --- a/repo/mem_datasets.go +++ /dev/null @@ -1,121 +0,0 @@ -package repo - -import ( - "github.com/ipfs/go-datastore" - "github.com/qri-io/cafs" - "github.com/qri-io/dataset" - "github.com/qri-io/dataset/dsfs" -) - -// // MemDatasets is an in-memory implementation of the DatasetStore interface -// type MemDatasets struct { -// datasets map[string]*dataset.Dataset -// store cafs.Filestore -// } - -// // NewMemDatasets creates a datasets instance from a cafs.Filstore -// func NewMemDatasets(store cafs.Filestore) MemDatasets { -// return MemDatasets{ -// datasets: map[string]*dataset.Dataset{}, -// store: store, -// } -// } - -// CreateDataset initializes a dataset from a dataset pointer and data file -func (r *MemRepo) CreateDataset(name string, ds *dataset.Dataset, data cafs.File, pin bool) (ref DatasetRef, err error) { - var ( - path datastore.Key - pro = r.profile - ) - - path, err = dsfs.CreateDataset(r.store, ds, data, r.pk, pin) - if err != nil { - return - } - - if ds.PreviousPath != "" && ds.PreviousPath != "/" { - prev := DatasetRef{ - PeerID: pro.ID, - Peername: pro.Peername, - Name: name, - Path: ds.PreviousPath, - } - if err = r.DeleteRef(prev); err != nil { - // log.Error(err.Error()) - err = nil - } - } - - ref = DatasetRef{ - Peername: r.profile.Peername, - Name: name, - PeerID: r.profile.ID, - Path: path.String(), - } - - if err = r.PutRef(ref); err != nil { - return ref, err - } - - if err = r.LogEvent(ETDsCreated, ref); err != nil { - return ref, err - } - - if pin { - if err = r.LogEvent(ETDsPinned, ref); err != nil { - return ref, err - } - } - return ref, nil -} - -// ReadDataset fetches a dataset from the store -func (r *MemRepo) ReadDataset(ref *DatasetRef) error { - ds, err := dsfs.LoadDataset(r.store, datastore.NewKey(ref.Path)) - if err != nil { - return err - } - ref.Dataset = ds - return nil -} - -// RenameDataset alters a dataset name -func (r *MemRepo) RenameDataset(a, b DatasetRef) (err error) { - if err = r.DeleteRef(a); err != nil { - return err - } - if err = r.PutRef(b); err != nil { - return err - } - return r.LogEvent(ETDsRenamed, b) -} - -// PinDataset marks a dataset for retention in a store -func (r *MemRepo) PinDataset(ref DatasetRef) (err error) { - if pinner, ok := r.store.(cafs.Pinner); ok { - pinner.Pin(datastore.NewKey(ref.Path), true) - return r.LogEvent(ETDsUnpinned, ref) - } - return ErrNotPinner -} - -// UnpinDataset unmarks a dataset for retention in a store -func (r *MemRepo) UnpinDataset(ref DatasetRef) error { - if pinner, ok := r.store.(cafs.Pinner); ok { - pinner.Unpin(datastore.NewKey(ref.Path), true) - return r.LogEvent(ETDsUnpinned, ref) - } - return ErrNotPinner -} - -// DeleteDataset removes a dataset from the store -func (r *MemRepo) DeleteDataset(ref DatasetRef) error { - if err := r.DeleteRef(ref); err != nil { - return err - } - if err := r.UnpinDataset(ref); err != nil && err != ErrNotPinner { - return err - } - - return r.LogEvent(ETDsDeleted, ref) -} diff --git a/repo/mem_repo.go b/repo/mem_repo.go index 7ed478cdb..bf73d4654 100644 --- a/repo/mem_repo.go +++ b/repo/mem_repo.go @@ -42,6 +42,11 @@ func (r *MemRepo) SetPrivateKey(pk crypto.PrivKey) error { return nil } +// PrivateKey returns this repo's private key +func (r *MemRepo) PrivateKey() crypto.PrivKey { + return r.pk +} + // RefCache gives access to the ephemeral Refstore func (r *MemRepo) RefCache() Refstore { return r.refCache diff --git a/repo/repo.go b/repo/repo.go index 614d509b8..bad233278 100644 --- a/repo/repo.go +++ b/repo/repo.go @@ -10,7 +10,6 @@ import ( "github.com/libp2p/go-libp2p-crypto" "github.com/qri-io/cafs" - "github.com/qri-io/dataset" "github.com/qri-io/dataset/dsgraph" "github.com/qri-io/qri/repo/profile" ) @@ -51,18 +50,8 @@ type Repo interface { // to work properly, but the whole notion of transforms needs a rethink first. Graph() (map[string]*dsgraph.Node, error) - // All Repos must keep a Refstore, defining a given peer's datasets + // All Repos must keep a Refstore, defining a store of known datasets Refstore - // Caches of dataset references - RefCache() Refstore - - // Repos also serve as a store of dataset information. - // It's important that this store maintain sync with any underlying filestore. - // (which is why we might want to kill this in favor of just having a cache?) - // The behaviour of the embedded DatasetStore will typically differ from the cache, - // by only returning saved/pinned/permanent datasets. - Datasets - // EventLog keeps a log of Profile activity for this repo EventLog @@ -75,6 +64,10 @@ type Repo interface { // PrivateKey is used to tie peer actions to this profile. Repo implementations must // never expose this private key once set. SetPrivateKey(pk crypto.PrivKey) error + // PrivateKey hands over this repo's private key + // TODO - this is needed to create action structs, any way we can make this + // privately-negotiated or created at init? + PrivateKey() crypto.PrivKey // A repository must maintain profile information about encountered peers. // Decsisions regarding retentaion of peers is left to the the implementation // TODO - should rename this to "profiles" to separate from the networking @@ -82,22 +75,6 @@ type Repo interface { Profiles() Profiles } -// Datasets is the minimum interface to act as a store of datasets. -// Datasets stored here should be reasonably dereferenced to avoid -// additional lookups. -// All fields here work only with paths (which are datastore.Key's) -type Datasets interface { - // CreateDataset initializes a dataset from a dataset pointer and data file - // It's not part of the Datasets interface because creating a dataset requires - // access to this repos store & private key - CreateDataset(name string, ds *dataset.Dataset, data cafs.File, pin bool) (ref DatasetRef, err error) - ReadDataset(ref *DatasetRef) error - RenameDataset(a, b DatasetRef) error - DeleteDataset(ref DatasetRef) (err error) - PinDataset(ref DatasetRef) error - UnpinDataset(ref DatasetRef) error -} - // SearchParams encapsulates parameters provided to Searchable.Search type SearchParams struct { Q string diff --git a/repo/test/test.go b/repo/test/test.go index 1152fa1f3..e95559fc5 100644 --- a/repo/test/test.go +++ b/repo/test/test.go @@ -5,7 +5,6 @@ import ( "path/filepath" "testing" - "github.com/qri-io/dataset/dstest" "github.com/qri-io/qri/repo" ) @@ -24,13 +23,8 @@ func testdataPath(path string) string { func RunRepoTests(t *testing.T, rmf RepoMakerFunc) { tests := []repoTestFunc{ testProfile, - testCreateDataset, - testReadDataset, - testRenameDataset, - testDatasetPinning, - testDeleteDataset, - testEventsLog, testRefstore, + DatasetActions, } for _, test := range tests { @@ -53,174 +47,3 @@ func testProfile(t *testing.T, rmf RepoMakerFunc) { } } - -func testCreateDataset(t *testing.T, rmf RepoMakerFunc) { - createDataset(t, rmf) -} - -func createDataset(t *testing.T, rmf RepoMakerFunc) (repo.Repo, repo.DatasetRef) { - r := rmf(t) - r.SaveProfile(testPeerProfile) - r.SetPrivateKey(privKey) - - tc, err := dstest.NewTestCaseFromDir(testdataPath("cities")) - if err != nil { - t.Error(err.Error()) - return r, repo.DatasetRef{} - } - - ref, err := r.CreateDataset(tc.Name, tc.Input, tc.DataFile(), true) - if err != nil { - t.Error(err.Error()) - } - - return r, ref -} - -func testReadDataset(t *testing.T, rmf RepoMakerFunc) { - r, ref := createDataset(t, rmf) - - if err := r.ReadDataset(&ref); err != nil { - t.Error(err.Error()) - return - } - - if ref.Dataset == nil { - t.Error("expected dataset to not equal nil") - return - } -} - -func testRenameDataset(t *testing.T, rmf RepoMakerFunc) { - r, ref := createDataset(t, rmf) - - b := repo.DatasetRef{ - Name: "cities2", - Path: ref.Path, - Peername: ref.Peername, - PeerID: ref.PeerID, - } - - if err := r.RenameDataset(ref, b); err != nil { - t.Error(err.Error()) - return - } - - if err := r.ReadDataset(&b); err != nil { - t.Error(err.Error()) - return - } - - if b.Dataset == nil { - t.Error("expected dataset to not equal nil") - return - } -} - -func testDatasetPinning(t *testing.T, rmf RepoMakerFunc) { - r, ref := createDataset(t, rmf) - - if err := r.PinDataset(ref); err != nil { - if err == repo.ErrNotPinner { - t.Log("repo store doesn't support pinning") - } else { - t.Error(err.Error()) - return - } - } - - tc, err := dstest.NewTestCaseFromDir(testdataPath("counter")) - if err != nil { - t.Error(err.Error()) - return - } - - ref2, err := r.CreateDataset(tc.Name, tc.Input, tc.DataFile(), false) - if err != nil { - t.Error(err.Error()) - return - } - - if err := r.PinDataset(ref2); err != nil && err != repo.ErrNotPinner { - t.Error(err.Error()) - return - } - - if err := r.UnpinDataset(ref); err != nil && err != repo.ErrNotPinner { - t.Error(err.Error()) - return - } - - if err := r.UnpinDataset(ref2); err != nil && err != repo.ErrNotPinner { - t.Error(err.Error()) - return - } -} - -func testDeleteDataset(t *testing.T, rmf RepoMakerFunc) { - r, ref := createDataset(t, rmf) - - if err := r.DeleteDataset(ref); err != nil { - t.Error(err.Error()) - return - } -} - -func testEventsLog(t *testing.T, rmf RepoMakerFunc) { - r, ref := createDataset(t, rmf) - pinner := true - - b := repo.DatasetRef{ - Name: "cities2", - Path: ref.Path, - Peername: ref.Peername, - PeerID: ref.PeerID, - } - - if err := r.RenameDataset(ref, b); err != nil { - t.Error(err.Error()) - return - } - - if err := r.PinDataset(b); err != nil { - if err == repo.ErrNotPinner { - pinner = false - } else { - t.Error(err.Error()) - return - } - } - - if err := r.UnpinDataset(b); err != nil && err != repo.ErrNotPinner { - t.Error(err.Error()) - return - } - - if err := r.DeleteDataset(b); err != nil { - t.Error(err.Error()) - return - } - - events, err := r.Events(10, 0) - if err != nil { - t.Error(err.Error()) - return - } - - ets := []repo.EventType{repo.ETDsDeleted, repo.ETDsUnpinned, repo.ETDsPinned, repo.ETDsRenamed, repo.ETDsPinned, repo.ETDsCreated} - - if !pinner { - ets = []repo.EventType{repo.ETDsDeleted, repo.ETDsRenamed, repo.ETDsCreated} - } - - if len(events) != len(ets) { - t.Errorf("event log length mismatch. expected: %d, got: %d", len(ets), len(events)) - return - } - - for i, et := range ets { - if events[i].Type != et { - t.Errorf("case %d eventType mismatch. expected: %s, got: %s", i, et, events[i].Type) - } - } -} diff --git a/repo/test/test_dataset_actions.go b/repo/test/test_dataset_actions.go new file mode 100644 index 000000000..fdea5c554 --- /dev/null +++ b/repo/test/test_dataset_actions.go @@ -0,0 +1,202 @@ +package test + +import ( + "testing" + + "github.com/qri-io/dataset/dstest" + "github.com/qri-io/qri/repo" + "github.com/qri-io/qri/repo/actions" +) + +// DatasetActions runs actions.Dataset tests against a given repo +// TODO - actions should only use public repo methods. +// remove this test in favor of a lower-level test suite +func DatasetActions(t *testing.T, rmf RepoMakerFunc) { + for _, test := range []repoTestFunc{ + testCreateDataset, + testReadDataset, + testRenameDataset, + testDatasetPinning, + testDeleteDataset, + testEventsLog, + } { + test(t, rmf) + } +} + +func testCreateDataset(t *testing.T, rmf RepoMakerFunc) { + createDataset(t, rmf) +} + +func createDataset(t *testing.T, rmf RepoMakerFunc) (repo.Repo, repo.DatasetRef) { + r := rmf(t) + r.SaveProfile(testPeerProfile) + r.SetPrivateKey(privKey) + act := actions.Dataset{r} + + tc, err := dstest.NewTestCaseFromDir(testdataPath("cities")) + if err != nil { + t.Error(err.Error()) + return r, repo.DatasetRef{} + } + + ref, err := act.CreateDataset(tc.Name, tc.Input, tc.DataFile(), true) + if err != nil { + t.Error(err.Error()) + } + + return r, ref +} + +func testReadDataset(t *testing.T, rmf RepoMakerFunc) { + r, ref := createDataset(t, rmf) + act := actions.Dataset{r} + + if err := act.ReadDataset(&ref); err != nil { + t.Error(err.Error()) + return + } + + if ref.Dataset == nil { + t.Error("expected dataset to not equal nil") + return + } +} + +func testRenameDataset(t *testing.T, rmf RepoMakerFunc) { + r, ref := createDataset(t, rmf) + act := actions.Dataset{r} + + b := repo.DatasetRef{ + Name: "cities2", + Path: ref.Path, + Peername: ref.Peername, + PeerID: ref.PeerID, + } + + if err := act.RenameDataset(ref, b); err != nil { + t.Error(err.Error()) + return + } + + if err := act.ReadDataset(&b); err != nil { + t.Error(err.Error()) + return + } + + if b.Dataset == nil { + t.Error("expected dataset to not equal nil") + return + } +} + +func testDatasetPinning(t *testing.T, rmf RepoMakerFunc) { + r, ref := createDataset(t, rmf) + act := actions.Dataset{r} + + if err := act.PinDataset(ref); err != nil { + if err == repo.ErrNotPinner { + t.Log("repo store doesn't support pinning") + } else { + t.Error(err.Error()) + return + } + } + + tc, err := dstest.NewTestCaseFromDir(testdataPath("counter")) + if err != nil { + t.Error(err.Error()) + return + } + + ref2, err := act.CreateDataset(tc.Name, tc.Input, tc.DataFile(), false) + if err != nil { + t.Error(err.Error()) + return + } + + if err := act.PinDataset(ref2); err != nil && err != repo.ErrNotPinner { + t.Error(err.Error()) + return + } + + if err := act.UnpinDataset(ref); err != nil && err != repo.ErrNotPinner { + t.Error(err.Error()) + return + } + + if err := act.UnpinDataset(ref2); err != nil && err != repo.ErrNotPinner { + t.Error(err.Error()) + return + } +} + +func testDeleteDataset(t *testing.T, rmf RepoMakerFunc) { + r, ref := createDataset(t, rmf) + act := actions.Dataset{r} + + if err := act.DeleteDataset(ref); err != nil { + t.Error(err.Error()) + return + } +} + +func testEventsLog(t *testing.T, rmf RepoMakerFunc) { + r, ref := createDataset(t, rmf) + act := actions.Dataset{r} + pinner := true + + b := repo.DatasetRef{ + Name: "cities2", + Path: ref.Path, + Peername: ref.Peername, + PeerID: ref.PeerID, + } + + if err := act.RenameDataset(ref, b); err != nil { + t.Error(err.Error()) + return + } + + if err := act.PinDataset(b); err != nil { + if err == repo.ErrNotPinner { + pinner = false + } else { + t.Error(err.Error()) + return + } + } + + if err := act.UnpinDataset(b); err != nil && err != repo.ErrNotPinner { + t.Error(err.Error()) + return + } + + if err := act.DeleteDataset(b); err != nil { + t.Error(err.Error()) + return + } + + events, err := r.Events(10, 0) + if err != nil { + t.Error(err.Error()) + return + } + + ets := []repo.EventType{repo.ETDsDeleted, repo.ETDsUnpinned, repo.ETDsPinned, repo.ETDsRenamed, repo.ETDsPinned, repo.ETDsCreated} + + if !pinner { + ets = []repo.EventType{repo.ETDsDeleted, repo.ETDsRenamed, repo.ETDsCreated} + } + + if len(events) != len(ets) { + t.Errorf("event log length mismatch. expected: %d, got: %d", len(ets), len(events)) + return + } + + for i, et := range ets { + if events[i].Type != et { + t.Errorf("case %d eventType mismatch. expected: %s, got: %s", i, et, events[i].Type) + } + } +} diff --git a/repo/test/test_repo.go b/repo/test/test_repo.go index 0f8787369..98ddcecf7 100644 --- a/repo/test/test_repo.go +++ b/repo/test/test_repo.go @@ -12,6 +12,7 @@ import ( "github.com/qri-io/cafs" "github.com/qri-io/dataset/dstest" "github.com/qri-io/qri/repo" + "github.com/qri-io/qri/repo/actions" "github.com/qri-io/qri/repo/profile" ) @@ -52,10 +53,8 @@ func NewTestRepo() (mr repo.Repo, err error) { } mr.SetPrivateKey(privKey) + act := actions.Dataset{mr} - // var ( - // dskey datastore.Key - // ) gopath := os.Getenv("GOPATH") for _, k := range datasets { @@ -66,13 +65,9 @@ func NewTestRepo() (mr repo.Repo, err error) { datafile := cafs.NewMemfileBytes(tc.DataFilename, tc.Data) - if _, err = mr.CreateDataset(tc.Name, tc.Input, datafile, true); err != nil { + if _, err = act.CreateDataset(tc.Name, tc.Input, datafile, true); err != nil { return nil, fmt.Errorf("%s error creating dataset: %s", k, err.Error()) } - - // if err = mr.PutRef(repo.DatasetRef{PeerID: "QmZePf5LeXow3RW5U1AgEiNbW46YnRGhZ7HPvm1UmPFPwt", Peername: "peer", Name: k, Path: dskey.String()}); err != nil { - // return nil, err - // } } return @@ -98,6 +93,7 @@ func NewMemRepoFromDir(path string) (repo.Repo, crypto.PrivKey, error) { return mr, pk, err } mr.SetPrivateKey(pk) + act := actions.Dataset{mr} tc, err := dstest.LoadTestCases(path) if err != nil { @@ -105,7 +101,7 @@ func NewMemRepoFromDir(path string) (repo.Repo, crypto.PrivKey, error) { } for _, c := range tc { - if _, err := mr.CreateDataset(c.Name, c.Input, c.DataFile(), true); err != nil { + if _, err := act.CreateDataset(c.Name, c.Input, c.DataFile(), true); err != nil { return mr, pk, err } }