Skip to content

Commit 1a7cd42

Browse files
committed
feat(base): defining base package
For a while now we've wanted to carve a "local" package out of actions that both p2p and actions can use. (Okay, we might get right of actions entirely, I'm not sure yet). This starts that process, and already it's feelin' like a great move. I want to call it "base" instead of "core" b/c IPFS already has very developed terminology around the word "core".
1 parent af3f854 commit 1a7cd42

33 files changed

+721
-356
lines changed

actions/dataset.go

Lines changed: 8 additions & 39 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@ import (
1313
"github.com/qri-io/dataset/detect"
1414
"github.com/qri-io/dataset/dsfs"
1515
"github.com/qri-io/dataset/validate"
16+
"github.com/qri-io/qri/base"
1617
"github.com/qri-io/qri/p2p"
1718
"github.com/qri-io/qri/repo"
1819
"github.com/qri-io/qri/repo/profile"
@@ -52,7 +53,7 @@ func NewDataset(dsp *dataset.DatasetPod) (ds *dataset.Dataset, body cafs.File, s
5253
}
5354

5455
// open a data file if we can
55-
if body, err = repo.DatasetPodBodyFile(dsp); err == nil {
56+
if body, err = base.DatasetPodBodyFile(dsp); err == nil {
5657
// defer body.Close()
5758

5859
// validate / generate dataset name
@@ -146,7 +147,7 @@ func UpdateDataset(node *p2p.QriNode, dsp *dataset.DatasetPod) (ds *dataset.Data
146147
}
147148

148149
if dsp.BodyBytes != nil || dsp.BodyPath != "" {
149-
if body, err = repo.DatasetPodBodyFile(dsp); err != nil {
150+
if body, err = base.DatasetPodBodyFile(dsp); err != nil {
150151
return
151152
}
152153
} else {
@@ -277,7 +278,7 @@ func SaveDataset(node *p2p.QriNode, name string, ds *dataset.Dataset, data cafs.
277278
ds.Assign(userSet)
278279
}
279280

280-
if err = PrepareViz(ds); err != nil {
281+
if err = base.PrepareViz(ds); err != nil {
281282
return
282283
}
283284

@@ -290,7 +291,7 @@ func SaveDataset(node *p2p.QriNode, name string, ds *dataset.Dataset, data cafs.
290291
// memRepo should be able to wrap another repo & check that before returning not found
291292
}
292293

293-
if ref, err = repo.CreateDataset(r, name, ds, data, pin); err != nil {
294+
if ref, err = base.CreateDataset(r, name, ds, data, pin); err != nil {
294295
return
295296
}
296297

@@ -303,7 +304,7 @@ func SaveDataset(node *p2p.QriNode, name string, ds *dataset.Dataset, data cafs.
303304
r.LogEvent(repo.ETDsPinned, ref)
304305
}
305306

306-
if err = ReadDataset(r, &ref); err != nil {
307+
if err = base.ReadDataset(r, &ref); err != nil {
307308
return
308309
}
309310

@@ -347,7 +348,7 @@ func AddDataset(node *p2p.QriNode, ref *repo.DatasetRef) (err error) {
347348
return fmt.Errorf("error fetching file: %s", err.Error())
348349
}
349350

350-
if err = PinDataset(r, *ref); err != nil {
351+
if err = base.PinDataset(r, *ref); err != nil {
351352
log.Debug(err.Error())
352353
return fmt.Errorf("error pinning root key: %s", err.Error())
353354
}
@@ -367,20 +368,6 @@ func AddDataset(node *p2p.QriNode, ref *repo.DatasetRef) (err error) {
367368
return
368369
}
369370

370-
// ReadDataset grabs a dataset from the store
371-
func ReadDataset(r repo.Repo, ref *repo.DatasetRef) (err error) {
372-
if store := r.Store(); store != nil {
373-
ds, e := dsfs.LoadDataset(store, datastore.NewKey(ref.Path))
374-
if e != nil {
375-
return e
376-
}
377-
ref.Dataset = ds.Encode()
378-
return
379-
}
380-
381-
return datastore.ErrNotFound
382-
}
383-
384371
// RenameDataset alters a dataset name
385372
func RenameDataset(node *p2p.QriNode, current, new *repo.DatasetRef) (err error) {
386373
r := node.Repo
@@ -410,24 +397,6 @@ func RenameDataset(node *p2p.QriNode, current, new *repo.DatasetRef) (err error)
410397
return r.LogEvent(repo.ETDsRenamed, *new)
411398
}
412399

413-
// PinDataset marks a dataset for retention in a store
414-
func PinDataset(r repo.Repo, ref repo.DatasetRef) error {
415-
if pinner, ok := r.Store().(cafs.Pinner); ok {
416-
pinner.Pin(datastore.NewKey(ref.Path), true)
417-
return r.LogEvent(repo.ETDsPinned, ref)
418-
}
419-
return repo.ErrNotPinner
420-
}
421-
422-
// UnpinDataset unmarks a dataset for retention in a store
423-
func UnpinDataset(r repo.Repo, ref repo.DatasetRef) error {
424-
if pinner, ok := r.Store().(cafs.Pinner); ok {
425-
pinner.Unpin(datastore.NewKey(ref.Path), true)
426-
return r.LogEvent(repo.ETDsUnpinned, ref)
427-
}
428-
return repo.ErrNotPinner
429-
}
430-
431400
// DeleteDataset removes a dataset from the store
432401
func DeleteDataset(node *p2p.QriNode, ref *repo.DatasetRef) (err error) {
433402
r := node.Repo
@@ -455,7 +424,7 @@ func DeleteDataset(node *p2p.QriNode, ref *repo.DatasetRef) (err error) {
455424
return err
456425
}
457426

458-
if err = UnpinDataset(r, *ref); err != nil && err != repo.ErrNotPinner {
427+
if err = base.UnpinDataset(r, *ref); err != nil && err != repo.ErrNotPinner {
459428
return err
460429
}
461430

actions/dataset_head.go

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@ package actions
33
import (
44
"fmt"
55

6+
"github.com/qri-io/qri/base"
67
"github.com/qri-io/qri/p2p"
78
"github.com/qri-io/qri/repo"
89
)
@@ -22,5 +23,5 @@ func DatasetHead(node *p2p.QriNode, ds *repo.DatasetRef) error {
2223
return node.RequestDataset(ds)
2324
}
2425

25-
return ReadDataset(node.Repo, ds)
26+
return base.ReadDataset(node.Repo, ds)
2627
}

actions/dataset_test.go

Lines changed: 4 additions & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@ import (
88

99
"github.com/qri-io/cafs"
1010
"github.com/qri-io/dataset/dstest"
11+
"github.com/qri-io/qri/base"
1112
"github.com/qri-io/qri/config"
1213
"github.com/qri-io/qri/p2p"
1314
"github.com/qri-io/qri/p2p/test"
@@ -132,7 +133,6 @@ func DatasetTests(t *testing.T, rmf RepoMakerFunc) {
132133
testSaveDataset,
133134
testReadDataset,
134135
testRenameDataset,
135-
testDatasetPinning,
136136
testDeleteDataset,
137137
testEventsLog,
138138
} {
@@ -170,7 +170,7 @@ func createDataset(t *testing.T, rmf RepoMakerFunc) (*p2p.QriNode, repo.DatasetR
170170
func testReadDataset(t *testing.T, rmf RepoMakerFunc) {
171171
n, ref := createDataset(t, rmf)
172172

173-
if err := ReadDataset(n.Repo, &ref); err != nil {
173+
if err := base.ReadDataset(n.Repo, &ref); err != nil {
174174
t.Error(err.Error())
175175
return
176176
}
@@ -194,7 +194,7 @@ func testRenameDataset(t *testing.T, rmf RepoMakerFunc) {
194194
return
195195
}
196196

197-
if err := ReadDataset(node.Repo, b); err != nil {
197+
if err := base.ReadDataset(node.Repo, b); err != nil {
198198
t.Error(err.Error())
199199
return
200200
}
@@ -205,46 +205,6 @@ func testRenameDataset(t *testing.T, rmf RepoMakerFunc) {
205205
}
206206
}
207207

208-
func testDatasetPinning(t *testing.T, rmf RepoMakerFunc) {
209-
node, ref := createDataset(t, rmf)
210-
211-
if err := PinDataset(node.Repo, ref); err != nil {
212-
if err == repo.ErrNotPinner {
213-
t.Log("repo store doesn't support pinning")
214-
} else {
215-
t.Error(err.Error())
216-
return
217-
}
218-
}
219-
220-
tc, err := dstest.NewTestCaseFromDir(testdataPath("counter"))
221-
if err != nil {
222-
t.Error(err.Error())
223-
return
224-
}
225-
226-
ref2, _, err := SaveDataset(node, tc.Name, tc.Input, tc.BodyFile(), nil, false, false)
227-
if err != nil {
228-
t.Error(err.Error())
229-
return
230-
}
231-
232-
if err := PinDataset(node.Repo, ref2); err != nil && err != repo.ErrNotPinner {
233-
t.Error(err.Error())
234-
return
235-
}
236-
237-
if err := UnpinDataset(node.Repo, ref); err != nil && err != repo.ErrNotPinner {
238-
t.Error(err.Error())
239-
return
240-
}
241-
242-
if err := UnpinDataset(node.Repo, ref2); err != nil && err != repo.ErrNotPinner {
243-
t.Error(err.Error())
244-
return
245-
}
246-
}
247-
248208
func testDeleteDataset(t *testing.T, rmf RepoMakerFunc) {
249209
node, ref := createDataset(t, rmf)
250210

@@ -268,7 +228,7 @@ func testEventsLog(t *testing.T, rmf RepoMakerFunc) {
268228
return
269229
}
270230

271-
if err := PinDataset(node.Repo, *b); err != nil {
231+
if err := base.PinDataset(node.Repo, *b); err != nil {
272232
if err == repo.ErrNotPinner {
273233
pinner = false
274234
} else {

actions/list_datasets.go

Lines changed: 2 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,7 @@ package actions
33
import (
44
"fmt"
55

6-
"github.com/ipfs/go-datastore"
7-
"github.com/qri-io/dataset/dsfs"
6+
"github.com/qri-io/qri/base"
87
"github.com/qri-io/qri/p2p"
98
"github.com/qri-io/qri/repo"
109
"github.com/qri-io/qri/repo/profile"
@@ -72,29 +71,5 @@ func ListDatasets(node *p2p.QriNode, ds *repo.DatasetRef, limit, offset int, RPC
7271
return
7372
}
7473

75-
store := r.Store()
76-
res, err = r.References(limit, offset)
77-
if err != nil {
78-
log.Debug(err.Error())
79-
return nil, fmt.Errorf("error getting dataset list: %s", err.Error())
80-
}
81-
82-
renames := repo.NewNeedPeernameRenames()
83-
for i, ref := range res {
84-
// May need to change peername.
85-
if err := repo.CanonicalizeProfile(r, &res[i], &renames); err != nil {
86-
return nil, fmt.Errorf("error canonicalizing dataset peername: %s", err.Error())
87-
}
88-
89-
ds, err := dsfs.LoadDataset(store, datastore.NewKey(ref.Path))
90-
if err != nil {
91-
return nil, fmt.Errorf("error loading path: %s, err: %s", ref.Path, err.Error())
92-
}
93-
res[i].Dataset = ds.Encode()
94-
if RPC {
95-
res[i].Dataset.Structure.Schema = nil
96-
}
97-
}
98-
// TODO: If renames.Renames is non-empty, apply it to r
99-
return
74+
return base.ListDatasets(node.Repo, limit, offset, RPC)
10075
}

actions/log.go

Lines changed: 2 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,7 @@
11
package actions
22

33
import (
4-
"github.com/ipfs/go-datastore"
5-
"github.com/qri-io/dataset/dsfs"
4+
"github.com/qri-io/qri/base"
65
"github.com/qri-io/qri/p2p"
76
"github.com/qri-io/qri/repo"
87
)
@@ -18,26 +17,5 @@ func DatasetLog(node *p2p.QriNode, ref repo.DatasetRef, limit, offset int) (rlog
1817
return node.RequestDatasetLog(ref, limit, offset)
1918
}
2019

21-
for {
22-
ds, e := dsfs.LoadDataset(node.Repo.Store(), datastore.NewKey(ref.Path))
23-
if e != nil {
24-
return nil, e
25-
}
26-
ref.Dataset = ds.Encode()
27-
28-
offset--
29-
if offset > 0 {
30-
continue
31-
}
32-
33-
rlog = append(rlog, ref)
34-
35-
limit--
36-
if limit == 0 || ref.Dataset.PreviousPath == "" {
37-
break
38-
}
39-
ref.Path = ref.Dataset.PreviousPath
40-
}
41-
42-
return rlog, nil
20+
return base.DatasetLog(node.Repo, ref, limit, offset)
4321
}

actions/render_test.go

Lines changed: 0 additions & 16 deletions
This file was deleted.

base/base.go

Lines changed: 37 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,37 @@
1+
// Package base defines business that operates on local data
2+
// it's main job is to composing APIs from the lower half of our tech stack,
3+
// providing uniform functions for higher up packages, mainly p2p and actions.
4+
// p2p and actions should use base as the only way of operate on the local repo
5+
// Here's some ascii art to clarify the stack:
6+
//
7+
// ┌───────────────┐ ┌───────────────┐
8+
// │ cmd │ │ api │
9+
// └───────────────┘ └───────────────┘
10+
// ┌─────────────────────────────────┐
11+
// │ lib │
12+
// └─────────────────────────────────┘
13+
// ┌─────────────────────────────────┐
14+
// │ actions │
15+
// └─────────────────────────────────┘
16+
// ┌───────────────────────┐
17+
// │ p2p │
18+
// └───────────────────────┘
19+
// ┌─────────────────────────────────┐
20+
// │ base │ <-- you are here
21+
// └─────────────────────────────────┘
22+
// ┌──────┐ ┌──────┐ ┌──────┐ ┌──────┐
23+
// │ repo │ │ dsfs │ │ cafs │ │ ... │
24+
// └──────┘ └──────┘ └──────┘ └──────┘
25+
//
26+
// There are packages omitted from this diagram, but these are the vitals.
27+
// base functions mainly work with repo.Repo instances, using repo interface methods
28+
// and related packages to do their work. This is part of a larger pattern of
29+
// having actions rely on lower level interfaces wherever possible to enhance
30+
// configurability
31+
package base
32+
33+
import (
34+
golog "github.com/ipfs/go-log"
35+
)
36+
37+
var log = golog.Logger("base")

0 commit comments

Comments
 (0)