Skip to content

Commit

Permalink
refactor: remove CanonicalizeDatasetRef from parts of base and all of…
Browse files Browse the repository at this point in the history
… FSI packages

Merge pull request #1359 from qri-io/refactor_remove_canonicalize_dsref
  • Loading branch information
b5 authored May 25, 2020
2 parents f2af23c + 324a428 commit 321e0d3
Show file tree
Hide file tree
Showing 42 changed files with 682 additions and 607 deletions.
32 changes: 6 additions & 26 deletions api/datasets.go
Original file line number Diff line number Diff line change
Expand Up @@ -522,39 +522,19 @@ func (h *DatasetHandlers) removeHandler(w http.ResponseWriter, r *http.Request)
util.WriteResponse(w, res)
}

// RenameReqParams is an encoding struct
// its intent is to be a more user-friendly structure for the api endpoint
// that will map to and from the lib.RenameParams struct
type RenameReqParams struct {
Current string
New string
}

func (h DatasetHandlers) renameHandler(w http.ResponseWriter, r *http.Request) {
reqParams := &RenameReqParams{}
p := &lib.RenameParams{}
if r.Header.Get("Content-Type") == "application/json" {
if err := json.NewDecoder(r.Body).Decode(reqParams); err != nil {
if err := json.NewDecoder(r.Body).Decode(p); err != nil {
util.WriteErrResponse(w, http.StatusBadRequest, err)
return
}
} else {
reqParams.Current = r.URL.Query().Get("current")
reqParams.New = r.URL.Query().Get("new")
}
current, err := repo.ParseDatasetRef(reqParams.Current)
if err != nil {
util.WriteErrResponse(w, http.StatusBadRequest, fmt.Errorf("error parsing current param: %s", err.Error()))
return
}
next, err := repo.ParseDatasetRef(reqParams.New)
if err != nil {
util.WriteErrResponse(w, http.StatusBadRequest, fmt.Errorf("error parsing new param: %s", err.Error()))
return
}
p = &lib.RenameParams{
Current: reporef.ConvertToDsref(current),
Next: reporef.ConvertToDsref(next),
p.Current = r.URL.Query().Get("current")
p.Next = r.URL.Query().Get("new")
if p.Next == "" {
p.Next = r.URL.Query().Get("next")
}
}

res := &dsref.VersionInfo{}
Expand Down
Binary file modified api/testdata/api.snapshot
Binary file not shown.
5 changes: 4 additions & 1 deletion base/body_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -20,10 +20,13 @@ func TestReadBody(t *testing.T) {
r := newTestRepo(t)
ref := addCitiesDataset(t, r)

ds, err := ReadDatasetPath(ctx, r, ref.String())
ds, err := ReadDataset(ctx, r, ref.Path)
if err != nil {
t.Fatal(err)
}
if err = OpenDataset(ctx, r.Filesystem(), ds); err != nil {
t.Fatal(err)
}

data, err := ReadBody(ds, dataset.JSONDataFormat, nil, 1, 1, false)
if err != nil {
Expand Down
48 changes: 8 additions & 40 deletions base/dataset.go
Original file line number Diff line number Diff line change
Expand Up @@ -178,7 +178,7 @@ func ListDatasets(ctx context.Context, r repo.Repo, term string, limit, offset i
}

if showVersions {
dsVersions, err := DatasetLog(ctx, r, ref, 1000000, 0, false)
dsVersions, err := DatasetLog(ctx, r, reporef.ConvertToDsref(ref), 1000000, 0, false)
if err != nil {
return nil, err
}
Expand Down Expand Up @@ -273,48 +273,16 @@ func FetchDataset(ctx context.Context, r repo.Repo, ref *reporef.DatasetRef, pin
return
}

// ReadDatasetPath takes a path string, parses, canonicalizes, loads a dataset pointer, and opens the file
// The medium-term goal here is to obfuscate use of reporef.DatasetRef, which we're hoping to deprecate
func ReadDatasetPath(ctx context.Context, r repo.Repo, path string) (ds *dataset.Dataset, err error) {
ref, err := repo.ParseDatasetRef(path)
if err != nil {
return nil, fmt.Errorf("'%s' is not a valid dataset reference", path)
}

if err = repo.CanonicalizeDatasetRef(r, &ref); err != nil {
return
}

loaded, err := dsfs.LoadDataset(ctx, r.Store(), ref.Path)
if err != nil {
return nil, fmt.Errorf("error loading dataset")
}
loaded.Name = ref.Name
loaded.Peername = ref.Peername
ds = loaded

err = OpenDataset(ctx, r.Filesystem(), ds)
return
}

// ReadDataset grabs a dataset from the store
func ReadDataset(ctx context.Context, r repo.Repo, ref *reporef.DatasetRef) (err error) {
if err = repo.CanonicalizeDatasetRef(r, ref); err != nil {
return
}

if store := r.Store(); store != nil {
ds, e := dsfs.LoadDataset(ctx, store, ref.Path)
if e != nil {
return e
}
ds.Name = ref.Name
ds.Peername = ref.Peername
ref.Dataset = ds
return
//
// Deprecated - use LoadDataset instead
func ReadDataset(ctx context.Context, r repo.Repo, path string) (ds *dataset.Dataset, err error) {
store := r.Store()
if store == nil {
return nil, cafs.ErrNotFound
}

return cafs.ErrNotFound
return dsfs.LoadDataset(ctx, store, path)
}

// PinDataset marks a dataset for retention in a store
Expand Down
66 changes: 34 additions & 32 deletions base/log.go
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ import (
"github.com/qri-io/qri/dsref"
"github.com/qri-io/qri/logbook"
"github.com/qri-io/qri/repo"
"github.com/qri-io/qri/repo/profile"
reporef "github.com/qri-io/qri/repo/ref"
)

Expand All @@ -23,9 +24,9 @@ const TimeoutDuration = 100 * time.Millisecond
var ErrDatasetLogTimeout = fmt.Errorf("datasetLog: timeout")

// DatasetLog fetches the change version history of a dataset
func DatasetLog(ctx context.Context, r repo.Repo, ref reporef.DatasetRef, limit, offset int, loadDatasets bool) ([]DatasetLogItem, error) {
func DatasetLog(ctx context.Context, r repo.Repo, ref dsref.Ref, limit, offset int, loadDatasets bool) ([]DatasetLogItem, error) {
if book := r.Logbook(); book != nil {
if items, err := book.Items(ctx, reporef.ConvertToDsref(ref), offset, limit); err == nil {
if items, err := book.Items(ctx, ref, offset, limit); err == nil {
// logs are ok with history not existing. This keeps FSI interaction behaviour consistent
// TODO (b5) - we should consider having "empty history" be an ok state, instead of marking as an error
if len(items) == 0 {
Expand Down Expand Up @@ -54,19 +55,30 @@ func DatasetLog(ctx context.Context, r repo.Repo, ref reporef.DatasetRef, limit,
}
}

rlog, err := DatasetLogFromHistory(ctx, r, ref, offset, limit, loadDatasets)
if ref.Path == "" {
return nil, fmt.Errorf("cannot build history: %w", dsref.ErrPathRequired)
}

datasets, err := StoredHistoricalDatasets(ctx, r, ref.Path, offset, limit, loadDatasets)
if err != nil {
return nil, err
}
items := make([]DatasetLogItem, len(rlog))
for i, dref := range rlog {
items[i] = reporef.ConvertToDatasetLogItem(&dref)
items := make([]DatasetLogItem, len(datasets))
for i, ds := range datasets {
ref := &reporef.DatasetRef{
// TODO(b5): using the ref.Username & ref.ProfileID here is a hack that assumes single-author histories
Peername: ref.Username,
ProfileID: profile.IDB58DecodeOrEmpty(ref.ProfileID),
Name: ref.Name,
Dataset: ds,
}
items[i] = reporef.ConvertToDatasetLogItem(ref)
}

// add a history entry b/c we didn't have one, but repo didn't error
if pro, err := r.Profile(); err == nil && ref.Peername == pro.Peername {
if pro, err := r.Profile(); err == nil && ref.Username == pro.Peername {
go func() {
if err := constructDatasetLogFromHistory(context.Background(), r, reporef.ConvertToDsref(ref)); err != nil {
if err := constructDatasetLogFromHistory(context.Background(), r, ref); err != nil {
log.Errorf("constructDatasetLogFromHistory: %s", err)
}
}()
Expand All @@ -75,45 +87,40 @@ func DatasetLog(ctx context.Context, r repo.Repo, ref reporef.DatasetRef, limit,
return items, err
}

// DatasetLogFromHistory fetches the history of changes to a dataset by walking
// StoredHistoricalDatasets fetches the history of changes to a dataset by walking
// backwards through dataset commits. if loadDatasets is true, dataset
// information will be populated
// TODO(dlong): Convert to use dsref.Ref (for input) and dsref.VersionInfo (for output)
func DatasetLogFromHistory(ctx context.Context, r repo.Repo, ref reporef.DatasetRef, offset, limit int, loadDatasets bool) (rlog []reporef.DatasetRef, err error) {
if err := repo.CanonicalizeDatasetRef(r, &ref); err != nil {
return nil, err
}

func StoredHistoricalDatasets(ctx context.Context, r repo.Repo, headPath string, offset, limit int, loadDatasets bool) (log []*dataset.Dataset, err error) {
timeoutCtx, cancel := context.WithTimeout(ctx, TimeoutDuration)
defer cancel()

versions := make(chan reporef.DatasetRef)
versions := make(chan *dataset.Dataset)
done := make(chan struct{})
path := headPath
go func() {
for {
var ds *dataset.Dataset
if loadDatasets {
if ds, err = dsfs.LoadDataset(timeoutCtx, r.Store(), ref.Path); err != nil {
if ds, err = dsfs.LoadDataset(timeoutCtx, r.Store(), path); err != nil {
return
}
} else {
if ds, err = dsfs.LoadDatasetRefs(timeoutCtx, r.Store(), ref.Path); err != nil {
if ds, err = dsfs.LoadDatasetRefs(timeoutCtx, r.Store(), path); err != nil {
return
}
}
ref.Dataset = ds

if offset <= 0 {
versions <- ref
versions <- ds
limit--
if limit == 0 {
break
}
}
if ref.Dataset.PreviousPath == "" {
if ds.PreviousPath == "" {
break
}
ref.Path = ref.Dataset.PreviousPath
path = ds.PreviousPath
offset--
}
done <- struct{}{}
Expand All @@ -122,29 +129,24 @@ func DatasetLogFromHistory(ctx context.Context, r repo.Repo, ref reporef.Dataset
for {
select {
case ref := <-versions:
rlog = append(rlog, ref)
log = append(log, ref)
case <-done:
return rlog, nil
return log, nil
case <-timeoutCtx.Done():
return rlog, ErrDatasetLogTimeout
return log, ErrDatasetLogTimeout
case <-ctx.Done():
return rlog, ErrDatasetLogTimeout
return log, ErrDatasetLogTimeout
}
}
}

// constructDatasetLogFromHistory constructs a log for a name if one doesn't
// exist.
func constructDatasetLogFromHistory(ctx context.Context, r repo.Repo, ref dsref.Ref) error {
repoRef := reporef.DatasetRef{Peername: ref.Username, Name: ref.Name, Path: ref.Path}
refs, err := DatasetLogFromHistory(ctx, r, repoRef, 0, 1000000, true)
history, err := StoredHistoricalDatasets(ctx, r, ref.Path, 0, 1000000, true)
if err != nil {
return err
}
history := make([]*dataset.Dataset, len(refs))
for i, ref := range refs {
history[i] = ref.Dataset
}

book := r.Logbook()
return book.ConstructDatasetLog(ctx, ref, history)
Expand Down
50 changes: 18 additions & 32 deletions base/log_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -22,16 +22,15 @@ func TestDatasetLog(t *testing.T) {
ctx := context.Background()
mr := newTestRepo(t)
addCitiesDataset(t, mr)
updateCitiesDataset(t, mr, "")
cities := updateCitiesDataset(t, mr, "")

ref := repo.MustParseDatasetRef("me/not_a_dataset")
ref := dsref.MustParse("me/not_a_dataset")
log, err := DatasetLog(ctx, mr, ref, -1, 0, true)
if err == nil {
t.Errorf("expected lookup for nonexistent log to fail")
}

ref = repo.MustParseDatasetRef("me/cities")
if log, err = DatasetLog(ctx, mr, ref, 1, 0, true); err != nil {
if log, err = DatasetLog(ctx, mr, reporef.ConvertToDsref(cities), 1, 0, true); err != nil {
t.Error(err.Error())
}
if len(log) != 1 {
Expand Down Expand Up @@ -86,7 +85,8 @@ func TestDatasetLogForeign(t *testing.T) {
initID := foreignBuilder.DatasetInit(ctx, t, ref.Name)
// NOTE: Need to assign ProfileID because nothing is resolving the username
ref.ProfileID = otherPeerInfo.EncodedPeerID
foreignBuilder.Commit(ctx, t, initID, "their commit", "QmExample")
ref.Path = "QmExample"
foreignBuilder.Commit(ctx, t, initID, "their commit", ref.Path)
foreignBook := foreignBuilder.Logbook()
foreignLog, err := foreignBook.UserDatasetRef(ctx, ref)
if err != nil {
Expand All @@ -102,14 +102,7 @@ func TestDatasetLogForeign(t *testing.T) {
book := builder.Logbook()
mr.SetLogbook(book)

// Get the log, test against expectation
r := reporef.DatasetRef{
Peername: ref.Username,
ProfileID: profile.IDB58DecodeOrEmpty(ref.ProfileID),
Name: ref.Name,
Path: ref.Path,
}
log, err := DatasetLog(ctx, mr, r, 1, 0, true)
log, err := DatasetLog(ctx, mr, ref, 1, 0, true)
if err != nil {
t.Error(err)
}
Expand Down Expand Up @@ -156,7 +149,7 @@ func TestDatasetLogForeignTimeout(t *testing.T) {
}

// Get a dataset log, which should timeout with an error
_, err = DatasetLog(ctx, mr, datasetRef, -1, 0, true)
_, err = DatasetLog(ctx, mr, reporef.ConvertToDsref(datasetRef), -1, 0, true)
if err == nil {
t.Fatal("expected lookup for foreign log to fail")
}
Expand All @@ -166,34 +159,34 @@ func TestDatasetLogForeignTimeout(t *testing.T) {
}
}

func TestDatasetLogFromHistory(t *testing.T) {
func TestStoredHistoricalDatasets(t *testing.T) {
ctx := context.Background()
r := newTestRepo(t)
addCitiesDataset(t, r)
head := updateCitiesDataset(t, r, "")
expectLen := 2

dlog, err := DatasetLogFromHistory(ctx, r, head, 0, 100, true)
datasets, err := StoredHistoricalDatasets(ctx, r, head.Path, 0, 100, true)
if err != nil {
t.Error(err)
}
if len(dlog) != expectLen {
t.Fatalf("log length mismatch. expected: %d, got: %d", expectLen, len(dlog))
if len(datasets) != expectLen {
t.Fatalf("log length mismatch. expected: %d, got: %d", expectLen, len(datasets))
}
if dlog[0].Dataset.Meta.Title != head.Dataset.Meta.Title {
if datasets[0].Meta.Title != head.Dataset.Meta.Title {
t.Errorf("expected log with loadDataset == true to populate datasets")
}

dlog, err = DatasetLogFromHistory(ctx, r, head, 0, 100, false)
datasets, err = StoredHistoricalDatasets(ctx, r, head.Path, 0, 100, false)
if err != nil {
t.Error(err)
}

if len(dlog) != expectLen {
t.Errorf("log length mismatch. expected: %d, got: %d", expectLen, len(dlog))
if len(datasets) != expectLen {
t.Errorf("log length mismatch. expected: %d, got: %d", expectLen, len(datasets))
}
if dlog[0].Dataset.Meta.Title != "" {
t.Errorf("expected log with loadDataset == false to not load a dataset. got: %v", dlog[0].Dataset)
if datasets[0].Meta.Title != "" {
t.Errorf("expected log with loadDataset == false to not load a dataset. got: %v", datasets[0])
}
}

Expand Down Expand Up @@ -254,15 +247,8 @@ func TestConstructDatasetLogFromHistory(t *testing.T) {
},
}

r := reporef.DatasetRef{
Peername: "peer",
ProfileID: profile.IDB58DecodeOrEmpty("QmZePf5LeXow3RW5U1AgEiNbW46YnRGhZ7HPvm1UmPFPwt"),
Name: "cities",
Path: "/map/QmaTfAQNUKqtPe2EUcCELJNprRLJWswsVPHHNhiKgZoTMR",
}

// confirm history exists:
log, err := DatasetLog(ctx, mr, r, 100, 0, true)
log, err := DatasetLog(ctx, mr, cities, 100, 0, true)
if err != nil {
t.Error(err)
}
Expand Down
Loading

0 comments on commit 321e0d3

Please sign in to comment.