Skip to content

Commit

Permalink
feat(core.QueryRequests.DatasetQueries): first implementation
Browse files Browse the repository at this point in the history
Merge pull request #155 from qri-io/feat/dataset_queries
  • Loading branch information
b5 authored Nov 29, 2017
2 parents e18fe27 + a5776b3 commit 2cfa5f1
Show file tree
Hide file tree
Showing 11 changed files with 334 additions and 5 deletions.
51 changes: 51 additions & 0 deletions api/handlers/datasets.go
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,15 @@ func (h *DatasetHandlers) AddDatasetHandler(w http.ResponseWriter, r *http.Reque
}
}

func (h *DatasetHandlers) RenameDatasetHandler(w http.ResponseWriter, r *http.Request) {
switch r.Method {
case "POST", "PUT":
h.renameDatasetHandler(w, r)
default:
util.NotFoundHandler(w, r)
}
}

func (h *DatasetHandlers) ZipDatasetHandler(w http.ResponseWriter, r *http.Request) {
res := &repo.DatasetRef{}
args := &core.GetDatasetParams{
Expand Down Expand Up @@ -274,3 +283,45 @@ func (h *DatasetHandlers) addDatasetHandler(w http.ResponseWriter, r *http.Reque

util.WriteResponse(w, res)
}

func (h DatasetHandlers) renameDatasetHandler(w http.ResponseWriter, r *http.Request) {
p := &core.RenameParams{}
if r.Header.Get("Content-Type") == "application/json" {
if err := json.NewDecoder(r.Body).Decode(p); err != nil {
util.WriteErrResponse(w, http.StatusBadRequest, err)
return
}
} else {
p = &core.RenameParams{
Current: r.URL.Query().Get("current"),
New: r.URL.Query().Get("new"),
}
}

res := ""
if err := h.Rename(p, &res); err != nil {
h.log.Infof("error renaming dataset: %s", err.Error())
util.WriteErrResponse(w, http.StatusBadRequest, err)
return
}

path, err := h.repo.GetPath(res)
if err != nil {
h.log.Infof("error getting renamed dataset: %s", err.Error())
util.WriteErrResponse(w, http.StatusInternalServerError, err)
return
}

ds, err := h.repo.GetDataset(path)
if err != nil {
h.log.Infof("error reading dataset: %s", err.Error())
util.WriteErrResponse(w, http.StatusInternalServerError, err)
return
}

util.WriteResponse(w, &repo.DatasetRef{
Name: res,
Path: path,
Dataset: ds,
})
}
24 changes: 24 additions & 0 deletions api/handlers/queries.go
Original file line number Diff line number Diff line change
Expand Up @@ -114,3 +114,27 @@ func (h *QueryHandlers) runHandler(w http.ResponseWriter, r *http.Request) {

util.WriteResponse(w, res)
}

func (h *QueryHandlers) DatasetQueriesHandler(w http.ResponseWriter, r *http.Request) {
switch r.Method {
case "GET":
h.datasetQueriesHandler(w, r)
default:
util.NotFoundHandler(w, r)
}
}

func (h *QueryHandlers) datasetQueriesHandler(w http.ResponseWriter, r *http.Request) {
p := &core.DatasetQueriesParams{
Path: r.URL.Path[len("/queries"):],
}

res := []*repo.DatasetRef{}
if err := h.DatasetQueries(p, &res); err != nil {
h.log.Info("error listing dataset queries: %s", err.Error())
util.WriteErrResponse(w, http.StatusInternalServerError, err)
return
}

util.WriteResponse(w, res)
}
2 changes: 2 additions & 0 deletions api/server.go
Original file line number Diff line number Diff line change
Expand Up @@ -101,6 +101,7 @@ func NewServerRoutes(s *Server) *http.ServeMux {
m.Handle("/datasets/", s.middleware(dsh.DatasetHandler))
m.Handle("/add/", s.middleware(dsh.AddDatasetHandler))
m.Handle("/init/", s.middleware(dsh.InitDatasetHandler))
m.Handle("/rename", s.middleware(dsh.RenameDatasetHandler))
m.Handle("/data/ipfs/", s.middleware(dsh.StructuredDataHandler))
m.Handle("/download/", s.middleware(dsh.ZipDatasetHandler))

Expand All @@ -109,6 +110,7 @@ func NewServerRoutes(s *Server) *http.ServeMux {

qh := handlers.NewQueryHandlers(s.log, s.qriNode.Repo)
m.Handle("/queries", s.middleware(qh.ListHandler))
m.Handle("/queries/", s.middleware(qh.DatasetQueriesHandler))
m.Handle("/run", s.middleware(qh.RunHandler))

return m
Expand Down
2 changes: 1 addition & 1 deletion cmd/add.go
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ var datasetAddCmd = &cobra.Command{
err := req.AddDataset(p, res)
ExitIfErr(err)

PrintInfo("Successfully added dataset %s : %s", addDsName, res.Path.String())
PrintInfo("Successfully added dataset %s: %s", addDsName, res.Path.String())
} else {
initDataset()
}
Expand Down
35 changes: 35 additions & 0 deletions cmd/rename.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
package cmd

import (
"fmt"

"github.com/qri-io/qri/core"
"github.com/spf13/cobra"
)

var datasetRenameCmd = &cobra.Command{
Use: "rename",
Aliases: []string{"mv"},
Short: "rename a dataset from your local namespace based on a resource hash",
Long: ``,
Run: func(cmd *cobra.Command, args []string) {
if len(args) != 2 {
ErrExit(fmt.Errorf("please provide current & new dataset names"))
}

req := core.NewDatasetRequests(GetRepo(false))
p := &core.RenameParams{
Current: args[0],
New: args[1],
}
newName := ""
err := req.Rename(p, &newName)
ExitIfErr(err)

PrintSuccess("renamed dataset %s", newName)
},
}

func init() {
RootCmd.AddCommand(datasetRenameCmd)
}
36 changes: 33 additions & 3 deletions core/datasets.go
Original file line number Diff line number Diff line change
Expand Up @@ -158,13 +158,13 @@ func (r *DatasetRequests) InitDataset(p *InitDatasetParams, res *repo.DatasetRef
// return fmt.Errorf("data is invalid")
// }

datakey, err := store.Put(memfs.NewMemfileBytes("data."+st.Format.String(), data), true)
datakey, err := store.Put(memfs.NewMemfileBytes("data."+st.Format.String(), data), false)
if err != nil {
return fmt.Errorf("error putting data file in store: %s", err.Error())
}

dataexists, err := repo.HasPath(r.repo, datakey)
if err != nil {
if err != nil && !strings.Contains(err.Error(), repo.ErrRepoEmpty.Error()) {
return fmt.Errorf("error checking repo for already-existing data: %s", err.Error())
}
if dataexists {
Expand Down Expand Up @@ -213,6 +213,7 @@ func (r *DatasetRequests) InitDataset(p *InitDatasetParams, res *repo.DatasetRef
return fmt.Errorf("error putting dataset in repo: %s", err.Error())
}

fmt.Println(dskey.String())
if err = r.repo.PutName(name, dskey); err != nil {
return fmt.Errorf("error adding dataset name to repo: %s", err.Error())
}
Expand Down Expand Up @@ -319,6 +320,34 @@ func (r *DatasetRequests) Update(p *UpdateParams, res *repo.DatasetRef) (err err
return nil
}

type RenameParams struct {
Current, New string
}

func (r *DatasetRequests) Rename(p *RenameParams, res *string) (err error) {
if p.Current == "" {
return fmt.Errorf("current name is required to rename a dataset")
}

path, err := r.repo.GetPath(p.Current)
if err != nil {
return fmt.Errorf("error getting dataset: %s", err.Error())
}

if err := validate.ValidName(p.New); err != nil {
return err
}
if err := r.repo.DeleteName(p.Current); err != nil {
return err
}
if err := r.repo.PutName(p.New, path); err != nil {
return err
}

*res = p.New
return nil
}

type DeleteParams struct {
Path datastore.Key
Name string
Expand All @@ -342,7 +371,8 @@ func (r *DatasetRequests) Delete(p *DeleteParams, ok *bool) (err error) {
}

if pinner, ok := r.repo.Store().(cafs.Pinner); ok {
if err = pinner.Unpin(p.Path, true); err != nil {
path := datastore.NewKey(strings.TrimSuffix(p.Path.String(), "/"+dsfs.PackageFileDataset.String()))
if err = pinner.Unpin(path, true); err != nil {
return
}
}
Expand Down
34 changes: 34 additions & 0 deletions core/datasets_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -209,6 +209,40 @@ func TestDatasetRequestsUpdate(t *testing.T) {
}
}

func TestDatasetRequestsRename(t *testing.T) {
mr, err := testrepo.NewTestRepo()
if err != nil {
t.Errorf("error allocating test repo: %s", err.Error())
return
}

cases := []struct {
p *RenameParams
res string
err string
}{
{&RenameParams{}, "", "current name is required to rename a dataset"},
{&RenameParams{Current: "movies", New: "new movies"}, "", "error: illegal name 'new movies', names must start with a letter and consist of only a-z,0-9, and _. max length 144 characters"},
{&RenameParams{Current: "movies", New: "new_movies"}, "new_movies", ""},
}

req := NewDatasetRequests(mr)
for i, c := range cases {
got := ""
err := req.Rename(c.p, &got)

if !(err == nil && c.err == "" || err != nil && err.Error() == c.err) {
t.Errorf("case %d error mismatch: expected: %s, got: %s", i, c.err, err)
continue
}

if got != c.res {
t.Errorf("case %d response name mismatch. expected: '%s', got: '%s'", i, c.err, err)
continue
}
}
}

func TestDatasetRequestsDelete(t *testing.T) {
mr, err := testrepo.NewTestRepo()
if err != nil {
Expand Down
56 changes: 55 additions & 1 deletion core/queries.go
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,17 @@ func (d *QueryRequests) List(p *ListParams, res *[]*repo.DatasetRef) error {
ref.Dataset = ds
}
}
*res = results

// TODO - clean this up, this is a hack to prevent null datasets from
// being sent back as responses.
// Warning - this could throw off pagination :/
final := []*repo.DatasetRef{}
for _, ref := range results {
if ref.Dataset != nil {
final = append(final, ref)
}
}
*res = final
return nil
}

Expand Down Expand Up @@ -185,3 +195,47 @@ func (r *QueryRequests) Run(p *RunParams, res *repo.DatasetRef) error {
*res = *ref
return nil
}

type DatasetQueriesParams struct {
Path string
Orderby string
Limit int
Offset int
}

func (r *QueryRequests) DatasetQueries(p *DatasetQueriesParams, res *[]*repo.DatasetRef) error {
if p.Path == "" {
return fmt.Errorf("path is required")
}

store := r.repo.Store()
_, err := dsfs.LoadDataset(store, datastore.NewKey(p.Path))
if err != nil {
return fmt.Errorf("error loading dataset: %s", err.Error())
}

nodes, err := r.repo.Graph()
if err != nil {
return fmt.Errorf("error loading graph: %s", err.Error())
}

dsq := repo.DatasetQueries(nodes)
list := []*repo.DatasetRef{}

for dshash, qKey := range dsq {
if dshash == p.Path {
ds, err := dsfs.LoadDataset(store, datastore.NewKey(dshash))
if err != nil {
return fmt.Errorf("error loading dataset: %s", err.Error())
}

list = append(list, &repo.DatasetRef{
Path: qKey,
Dataset: ds,
})
}
}

*res = list
return nil
}
Loading

0 comments on commit 2cfa5f1

Please sign in to comment.