Skip to content

Commit

Permalink
refactor(dsfs): harmonize errors & ensure use of %w in fmt.Errorf
Browse files Browse the repository at this point in the history
declare dsfs.ErrNoChanges & make sure it's properly wrapped up the call stack.
We'll use this in lib for proper no-change detection
  • Loading branch information
b5 committed Feb 17, 2021
1 parent 8e69e5e commit 32dfe7d
Show file tree
Hide file tree
Showing 9 changed files with 35 additions and 39 deletions.
6 changes: 3 additions & 3 deletions base/dsfs/commit.go
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ func commitFileAddFunc(privKey crypto.PrivKey, pub event.Publisher) addWriteFile
signedBytes, err := privKey.Sign(ds.SigningBytes())
if err != nil {
log.Debug(err.Error())
return nil, fmt.Errorf("error signing commit title: %s", err.Error())
return nil, fmt.Errorf("error signing commit title: %w", err)
}
ds.Commit.Signature = base64.StdEncoding.EncodeToString(signedBytes)
return JSONFile(PackageFileCommit.Filename(), ds.Commit)
Expand Down Expand Up @@ -173,7 +173,7 @@ func confirmByteChangesExist(ds, prev *dataset.Dataset, added map[string]string,
}
}

return fmt.Errorf("no changes")
return ErrNoChanges
}

// ensureCommitTitleAndMessage creates the commit and title, message, skipping
Expand Down Expand Up @@ -423,7 +423,7 @@ func generateCommitDescriptions(ctx context.Context, fs qfs.Filesystem, ds, prev
if forceIfNoChanges {
return "forced update", "forced update", nil
}
return "", "", fmt.Errorf("no changes")
return "", "", ErrNoChanges
}

log.Debugf("set friendly diff descriptions. shortTitle=%q message=%q", shortTitle, longMessage)
Expand Down
4 changes: 2 additions & 2 deletions base/dsfs/compute_fields.go
Original file line number Diff line number Diff line change
Expand Up @@ -329,7 +329,7 @@ func (cff *computeFieldsFile) flushBatch(ctx context.Context, buf *dsio.EntryBuf

if e := buf.Close(); e != nil {
log.Debugf("closing batch buffer: %s", e)
return 0, fmt.Errorf("error closing buffer: %s", e.Error())
return 0, fmt.Errorf("error closing buffer: %w", e)
}

if len(buf.Bytes()) == 0 {
Expand All @@ -339,7 +339,7 @@ func (cff *computeFieldsFile) flushBatch(ctx context.Context, buf *dsio.EntryBuf

var doc interface{}
if err := json.Unmarshal(buf.Bytes(), &doc); err != nil {
return 0, fmt.Errorf("error parsing JSON bytes: %s", err.Error())
return 0, fmt.Errorf("error parsing JSON bytes: %w", err)
}
validationState := jsch.Validate(ctx, doc)

Expand Down
8 changes: 0 additions & 8 deletions base/dsfs/doc.go

This file was deleted.

27 changes: 27 additions & 0 deletions base/dsfs/dsfs.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
// Package dsfs glues datsets to cafs (content-addressed-file-system)
package dsfs

import (
"fmt"

logger "github.com/ipfs/go-log"
)

var (
log = logger.Logger("dsfs")
// ErrNoChanges indicates a save failed because no values changed, and
// force-saving was disabled
ErrNoChanges = fmt.Errorf("no changes")
// ErrNoReadme is the error for asking a dataset without a readme component
// for readme info
ErrNoReadme = fmt.Errorf("this dataset has no readme component")
// ErrNoTransform is the error for asking a dataset without a tranform
// component for transform info
ErrNoTransform = fmt.Errorf("this dataset has no transform component")
// ErrNoViz is the error for asking a dataset without a viz component for
// viz info
ErrNoViz = fmt.Errorf("this dataset has no viz component")
// ErrStrictMode indicates a dataset failed validation when it is required to
// pass (Structure.Strict == true)
ErrStrictMode = fmt.Errorf("dataset body did not validate against schema in strict-mode")
)
7 changes: 2 additions & 5 deletions base/dsfs/readme.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ func DerefReadme(ctx context.Context, store qfs.Filesystem, ds *dataset.Dataset)
rm, err := loadReadme(ctx, store, ds.Readme.Path)
if err != nil {
log.Debug(err.Error())
return fmt.Errorf("loading dataset readme: %s", err)
return fmt.Errorf("loading dataset readme: %w", err)
}
rm.Path = ds.Readme.Path
ds.Readme = rm
Expand All @@ -28,14 +28,11 @@ func loadReadme(ctx context.Context, fs qfs.Filesystem, path string) (st *datase
data, err := fileBytes(fs.Get(ctx, path))
if err != nil {
log.Debug(err.Error())
return nil, fmt.Errorf("error loading readme file: %s", err.Error())
return nil, fmt.Errorf("error loading readme file: %w", err)
}
return dataset.UnmarshalReadme(data)
}

// ErrNoReadme is the error for asking a dataset without a readme component for readme info
var ErrNoReadme = fmt.Errorf("this dataset has no readme component")

// LoadReadmeScript loads script data from a dataset path if the given dataset has a readme script is specified
// the returned qfs.File will be the value of dataset.Readme.ScriptPath
func LoadReadmeScript(ctx context.Context, fs qfs.Filesystem, dspath string) (qfs.File, error) {
Expand Down
4 changes: 0 additions & 4 deletions base/dsfs/structure.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,6 @@ import (
"github.com/qri-io/qfs"
)

// ErrStrictMode indicates a dataset failed validation when it is required to
// pass (Structure.Strict == true)
var ErrStrictMode = fmt.Errorf("dataset body did not validate against schema in strict-mode")

// DerefStructure derferences a dataset's structure element if required
// should be a no-op if ds.Structure is nil or isn't a reference
func DerefStructure(ctx context.Context, store qfs.Filesystem, ds *dataset.Dataset) error {
Expand Down
3 changes: 0 additions & 3 deletions base/dsfs/transform.go
Original file line number Diff line number Diff line change
Expand Up @@ -35,9 +35,6 @@ func loadTransform(ctx context.Context, fs qfs.Filesystem, path string) (q *data
return dataset.UnmarshalTransform(data)
}

// ErrNoTransform is the error for asking a dataset without a tranform component for viz info
var ErrNoTransform = fmt.Errorf("this dataset has no transform component")

// LoadTransformScript loads transform script data from a dataset path if the given dataset has a transform script specified
// the returned qfs.File will be the value of dataset.Transform.ScriptPath
// TODO - this is broken, assumes file is JSON. fix & test or depricate
Expand Down
3 changes: 0 additions & 3 deletions base/dsfs/viz.go
Original file line number Diff line number Diff line change
Expand Up @@ -35,9 +35,6 @@ func loadViz(ctx context.Context, fs qfs.Filesystem, path string) (st *dataset.V
return dataset.UnmarshalViz(data)
}

// ErrNoViz is the error for asking a dataset without a viz component for viz info
var ErrNoViz = fmt.Errorf("this dataset has no viz component")

func vizFilesAddFunc(fs qfs.Filesystem, sw SaveSwitches) addWriteFileFunc {
return func(ds *dataset.Dataset, wfs *writeFiles) error {
if ds.Viz == nil {
Expand Down
12 changes: 1 addition & 11 deletions base/save.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@ import (
"github.com/qri-io/qfs"
"github.com/qri-io/qri/base/dsfs"
"github.com/qri-io/qri/dsref"
"github.com/qri-io/qri/logbook"
"github.com/qri-io/qri/profile"
"github.com/qri-io/qri/repo"
)
Expand Down Expand Up @@ -53,11 +52,7 @@ func SaveDataset(ctx context.Context, r repo.Repo, writeDest qfs.Filesystem, ini
return nil, err
}

// TODO(dustmop): Stop removing the transform once we move to apply, and untangle the
// save command from applying a transform.
// remove the Transform & commit
// transform & commit must be created from scratch with each new version
mutable.Transform = nil
// remove the commit. commit must be created from scratch with each new version
mutable.Commit = nil
}

Expand Down Expand Up @@ -108,11 +103,6 @@ func SaveDataset(ctx context.Context, r repo.Repo, writeDest qfs.Filesystem, ini
return nil, err
}

// Write the save to logbook
err = r.Logbook().WriteVersionSave(ctx, initID, ds)
if err != nil && err != logbook.ErrNoLogbook {
return ds, err
}
return ds, nil
}

Expand Down

0 comments on commit 32dfe7d

Please sign in to comment.