Skip to content

Commit

Permalink
feat(cmd.Export): added --blank flag to export dataset template
Browse files Browse the repository at this point in the history
In prepping for tutorials I think it's well worth it to have users be able to do the following:
```
$ qri export --blank
exported blank dataset file to dataset.yaml

$ qri add --file=dataset.yaml
```
Feels easier.

cleaned up some docs, removed dead code in print commands.

closes #409
  • Loading branch information
b5 committed May 23, 2018
1 parent e63694f commit 27d13a0
Show file tree
Hide file tree
Showing 6 changed files with 69 additions and 96 deletions.
17 changes: 4 additions & 13 deletions cmd/add.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,6 @@ import (
var (
addDsFile string
addDsDataPath string
addDsMetaFilepath string
addDsStructureFilepath string
addDsName string
addDsTitle string
addDsMessage string
Expand Down Expand Up @@ -52,14 +50,14 @@ changes to qri.`,
Example: ` add a new dataset named annual_pop:
$ qri add --data data.csv me/annual_pop
create a dataset with a metadata and data file:
$ qri add --meta meta.json --data comics.csv me/comic_characters`,
create a dataset with a dataset data file:
$ qri add --file dataset.yaml --data comics.csv me/comic_characters`,
PreRun: func(cmd *cobra.Command, args []string) {
loadConfig()
},
Run: func(cmd *cobra.Command, args []string) {

ingest := (addDsFile != "" || addDsDataPath != "" || addDsMetaFilepath != "" || addDsStructureFilepath != "")
ingest := (addDsFile != "" || addDsDataPath != "")

if ingest {
ref, err := repo.ParseDatasetRef(args[0])
Expand Down Expand Up @@ -117,11 +115,6 @@ func initDataset(name repo.DatasetRef, cmd *cobra.Command) {
dsp.DataPath = addDsDataPath
}

// metaFile, err = loadFileIfPath(addDsMetaFilepath)
// ExitIfErr(err)
// structureFile, err = loadFileIfPath(addDsStructureFilepath)
// ExitIfErr(err)

p := &core.SaveParams{
Dataset: dsp,
Private: addDsPrivate,
Expand Down Expand Up @@ -157,12 +150,10 @@ func initDataset(name repo.DatasetRef, cmd *cobra.Command) {
}

func init() {
datasetAddCmd.Flags().StringVarP(&addDsFile, "file", "f", "", "dataset data file in either (.yaml or .json) file")
datasetAddCmd.Flags().StringVarP(&addDsFile, "file", "f", "", "dataset data file in either yaml or json format")
datasetAddCmd.Flags().StringVarP(&addDsDataPath, "data", "d", "", "path to file or url to initialize from")
datasetAddCmd.Flags().StringVarP(&addDsTitle, "title", "t", "", "commit title")
datasetAddCmd.Flags().StringVarP(&addDsMessage, "messsage", "m", "", "commit message")
// datasetAddCmd.Flags().StringVarP(&addDsStructureFilepath, "structure", "", "", "dataset structure JSON file")
// datasetAddCmd.Flags().StringVarP(&addDsMetaFilepath, "meta", "", "", "dataset metadata JSON file")
datasetAddCmd.Flags().BoolVarP(&addDsPrivate, "private", "", false, "make dataset private. WARNING: not yet implimented. Please refer to https://github.com/qri-io/qri/issues/291 for updates")
// datasetAddCmd.Flags().BoolVarP(&addDsShowValidation, "show-validation", "s", false, "display a list of validation errors upon adding")
RootCmd.AddCommand(datasetAddCmd)
Expand Down
1 change: 1 addition & 0 deletions cmd/cmd_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -165,6 +165,7 @@ func TestCommandsIntegration(t *testing.T) {
{"data", "--limit=1", "--data-format=cbor", "me/movie"},
{"validate", "me/movie"},
{"remove", "me/movie"},
{"export", "--blank", "-o"+path+"/blank_dataset.yaml"},
{"setup", "--remove"},
}

Expand Down
64 changes: 60 additions & 4 deletions cmd/export.go
Original file line number Diff line number Diff line change
Expand Up @@ -44,13 +44,23 @@ To export everything about a dataset, use the --dataset flag.`,
loadConfig()
},
Run: func(cmd *cobra.Command, args []string) {
if len(args) == 0 {
fmt.Println("please specify a dataset name to export")
return
}
requireNotRPC(cmd.Name())
path := cmd.Flag("output").Value.String()

if blank, err := cmd.Flags().GetBool("blank"); err == nil && blank {
if path == "" {
path = "dataset.yaml"
}
if _, err := os.Stat(path); os.IsNotExist(err) {
err := ioutil.WriteFile(path, []byte(blankYamlDataset), os.ModePerm)
ExitIfErr(err)
printSuccess("blank dataset file saved to %s", path)
} else {
ErrExit(fmt.Errorf("'%s' already exists", path))
}
return
}

r := getRepo(false)
req := core.NewDatasetRequests(r, nil)

Expand Down Expand Up @@ -194,6 +204,7 @@ To export everything about a dataset, use the --dataset flag.`,

func init() {
RootCmd.AddCommand(exportCmd)
exportCmd.Flags().BoolP("blank", "", false, "export a blank dataset YAML file, overrides all other flags except output")
exportCmd.Flags().StringP("output", "o", "", "path to write to, default is current directory")
exportCmd.Flags().BoolVarP(&exportCmdZipped, "zip", "z", false, "compress export as zip archive")
exportCmd.Flags().BoolVarP(&exportCmdAll, "all", "a", false, "export full dataset package")
Expand All @@ -208,3 +219,48 @@ func init() {
// TODO - get format conversion up & running
// exportCmd.Flags().StringP("format", "f", "csv", "set output format [csv,json,cbor]")
}


const blankYamlDataset = `# This file defines a qri dataset. Change this file, save it, then from a terminal run:
# $ qri add --file=dataset.yaml
# For more info check out https://qri.io/docs
# Name is a short name for working with this dataset without spaces for example:
# "my_dataset" or "number_of_cows_that_have_jumped_the_moon"
# name is required
name:
# Commit contains notes about this dataset at the time it was saved
# commit is optional
commit:
title:
message:
# Meta stores descriptive information about a dataset.
# all meta info is optional, but you should at least add a title.
# detailed, accurate metadata helps you & others find your data later.
meta:
title:
# description:
# category:
# tags:
# Structure contains the info a computer needs to interpret this dataset
# qri will figure structure out for you if you don't one
# and later you can change structure to do neat stuff like validate your
# data and make your data work with other datasets.
# Below is an example structure
structure:
# Syntax in JSON format:
# format: json
# Schema defines the "shape" data should take, here we're saying
# data should be an array of strings, like this: ["foo", "bar", "baz"]
# schema:
# type: array
# items:
# type: string
# data itself is either a path to a file on your computer,
# or a URL that leads to the raw data
# dataPath:
`
65 changes: 2 additions & 63 deletions cmd/print.go
Original file line number Diff line number Diff line change
Expand Up @@ -46,15 +46,6 @@ func printNotYetFinished(cmd *cobra.Command) {
color.Yellow("%s command is not yet implemented", cmd.Name())
}

// func PrintValidationErrors(errs map[string][]*history.ValidationError) {
// for key, es := range errs {
// color.Yellow("%s:", key)
// for _, e := range es {
// color.Yellow("\t%s", e.String())
// }
// }
// }

func printDatasetRefInfo(i int, ref repo.DatasetRef) {
white := color.New(color.FgWhite).SprintFunc()
cyan := color.New(color.FgCyan).SprintFunc()
Expand Down Expand Up @@ -101,48 +92,19 @@ func printDatasetRefInfo(i int, ref repo.DatasetRef) {

func printPeerInfo(i int, p *config.ProfilePod) {
white := color.New(color.FgWhite).SprintFunc()
grey := color.New(color.FgBlack).SprintFunc()
yellow := color.New(color.FgYellow).SprintFunc()
// blue := color.New(color.FgBlue).SprintFunc()
blue := color.New(color.FgBlue).SprintFunc()
if p.Online {
fmt.Printf("%s | %s\n", white(p.Peername), yellow("online"))
} else {
fmt.Printf("%s\n", white(p.Peername))
}
fmt.Printf("%s\n", grey(p.ID))
fmt.Printf("%s\n", blue(p.ID))
fmt.Printf("%s\n", p.Twitter)
fmt.Printf("%s\n", p.Description)
fmt.Println("")
}

// func PrintDatasetDetailedInfo(ds *dataset.Dataset) {
// fmt.Println("")
// white := color.New(color.FgWhite).SprintFunc()
// cyan := color.New(color.FgCyan).SprintFunc()
// blue := color.New(color.FgBlue).SprintFunc()
// fmt.Printf("\taddress: %s\n", white(ds.Address))
// fmt.Printf("\tname: %s\n", white(ds.Name))
// if ds.Description != "" {
// fmt.Printf("\tdescription: %s\n", white(ds.Description))
// }

// fmt.Println("\tfields:")
// for _, f := range ds.Fields {
// fmt.Printf("\t%s", cyan(f.Name))
// }
// fmt.Printf("\n")
// for _, f := range ds.Fields {
// fmt.Printf("\t%s", blue(f.Type.String()))
// }
// fmt.Printf("\n")
// }

func printQuery(i int, r *repo.DatasetRef) {
white := color.New(color.FgWhite).SprintFunc()
cyan := color.New(color.FgCyan).SprintFunc()
blue := color.New(color.FgBlue).SprintFunc()
fmt.Printf("%s:\t%s\n\t%s\n", cyan(i), white(r.Dataset.Transform.Data), blue(r.Path))
}

func printResults(r *dataset.Structure, data []byte, format dataset.DataFormat) {
switch format {
Expand Down Expand Up @@ -200,18 +162,6 @@ func terribleHackToGetHeaderRow(st *dataset.Structure) ([]string, error) {
return nil, fmt.Errorf("nope")
}

// func PrintTree(ds *dataset.Dataset, indent int) {
// fmt.Println(strings.Repeat(" ", indent), ds.Address.String())
// for i, d := range ds.Datasets {
// if i < len(ds.Datasets)-1 {
// fmt.Println(strings.Repeat(" ", indent), "├──", d.Address.String())
// } else {
// fmt.Println(strings.Repeat(" ", indent), "└──", d.Address.String())

// }
// }
// }

func prompt(msg string) string {
var input string
printPrompt(msg)
Expand Down Expand Up @@ -248,15 +198,4 @@ func printDiffs(diffText string) {
fmt.Printf("%s\n", line)
}
}
// output := ""
// for _, line := range lines {
// if len(line) >= 3 && (line[:2] == "+ " || line[:2] == "++") {
// output += fmt.Sprintf("🎾%s\n", green(line))
// } else if len(line) >= 3 && (line[:2] == "- " || line[:2] == "--") {
// output += fmt.Sprintf("🏓%s\n", red(line))
// } else {
// output += fmt.Sprintf("%s\n", line)
// }
// }
// fmt.Printf("%s", output)
}
15 changes: 2 additions & 13 deletions cmd/save.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,6 @@ import (
var (
saveFilePath string
saveDataPath string
saveMetaFile string
saveStructureFile string
saveTitle string
saveMessage string
savePassive bool
Expand All @@ -30,7 +28,7 @@ var (
// saveCmd represents the save command
var saveCmd = &cobra.Command{
Use: "save",
Aliases: []string{"update"},
Aliases: []string{"update", "commit"},
Short: "save changes to a dataset",
Long: `
Save is how you change a dataset, updating one or more of data, metadata, and
Expand All @@ -50,7 +48,7 @@ collaboration are in the works. Sit tight sportsfans.`,
},
Run: func(cmd *cobra.Command, args []string) {
if len(args) < 1 && saveFilePath == "" {
ErrExit(fmt.Errorf("please provide the name of an existing dataset to save updates to"))
ErrExit(fmt.Errorf("please provide the name of an existing dataset to save updates to, or specify a dataset --file with name and peername"))
}

ref, err := repo.ParseDatasetRef(args[0])
Expand Down Expand Up @@ -93,13 +91,6 @@ collaboration are in the works. Sit tight sportsfans.`,
if saveDataPath != "" {
dsp.DataPath = saveDataPath
}
// if saveMetaFile == "" && saveDataPath == "" && saveStructureFile == "" {
// ErrExit(fmt.Errorf("one of --structure, --meta or --data or --url is required"))
// }
// metaFile, err = loadFileIfPath(saveMetaFile)
// ExitIfErr(err)
// structureFile, err = loadFileIfPath(saveStructureFile)
// ExitIfErr(err)

p := &core.SaveParams{
Dataset: dsp,
Expand Down Expand Up @@ -139,8 +130,6 @@ func init() {
saveCmd.Flags().StringVarP(&saveTitle, "title", "t", "", "title of commit message for save")
saveCmd.Flags().StringVarP(&saveMessage, "message", "m", "", "commit message for save")
saveCmd.Flags().StringVarP(&saveDataPath, "data", "", "", "path to file or url to initialize from")
// saveCmd.Flags().StringVarP(&saveMetaFile, "meta", "", "", "metadata.json file")
// saveCmd.Flags().StringVarP(&saveStructureFile, "structure", "", "", "structure.json file")
saveCmd.Flags().BoolVarP(&saveShowValidation, "show-validation", "s", false, "display a list of validation errors upon adding")
RootCmd.AddCommand(saveCmd)
}
3 changes: 0 additions & 3 deletions core/datasets.go
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,6 @@ func (r *DatasetRequests) List(p *ListParams, res *[]repo.DatasetRef) error {
p.RPC = true
return r.cli.Call("DatasetRequests.List", p, res)
}
log.Debugf("list datasets: %s %d/%d", p.Peername, p.Limit, p.Offset)

ds := &repo.DatasetRef{
Peername: p.Peername,
Expand Down Expand Up @@ -149,15 +148,13 @@ func (r *DatasetRequests) List(p *ListParams, res *[]repo.DatasetRef) error {
log.Debug(err.Error())
return fmt.Errorf("error getting dataset list: %s", err.Error())
}
log.Debugf("found %d references", len(replies))

for i, ref := range replies {
if err := repo.CanonicalizeProfile(r.repo, &replies[i]); err != nil {
log.Debug(err.Error())
return fmt.Errorf("error canonicalizing dataset peername: %s", err.Error())
}

log.Debugf("loading %s", ref.Path)
ds, err := dsfs.LoadDataset(store, datastore.NewKey(ref.Path))
if err != nil {
return fmt.Errorf("error loading path: %s, err: %s", ref.Path, err.Error())
Expand Down

0 comments on commit 27d13a0

Please sign in to comment.