Skip to content

Commit

Permalink
Remove thirdparty/tar/extractor
Browse files Browse the repository at this point in the history
License: MIT
Signed-off-by: rht <rhtbot@gmail.com>
  • Loading branch information
rht committed Aug 11, 2015
1 parent d1366cd commit bb69b77
Show file tree
Hide file tree
Showing 5 changed files with 150 additions and 227 deletions.
99 changes: 36 additions & 63 deletions core/commands/get.go
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
package commands

import (
"bufio"
"compress/gzip"
"errors"
"fmt"
Expand All @@ -11,13 +10,11 @@ import (
"strings"

"github.com/ipfs/go-ipfs/Godeps/_workspace/src/github.com/cheggaaa/pb"
context "github.com/ipfs/go-ipfs/Godeps/_workspace/src/golang.org/x/net/context"
mdag "github.com/ipfs/go-ipfs/merkledag"

cmds "github.com/ipfs/go-ipfs/commands"
core "github.com/ipfs/go-ipfs/core"
path "github.com/ipfs/go-ipfs/path"
tar "github.com/ipfs/go-ipfs/thirdparty/tar"
uio "github.com/ipfs/go-ipfs/unixfs/io"
utar "github.com/ipfs/go-ipfs/unixfs/tar"
)

Expand Down Expand Up @@ -64,28 +61,30 @@ may also specify the level of compression by specifying '-l=<1-9>'.
res.SetError(err, cmds.ErrNormal)
return
}

p := path.Path(req.Arguments()[0])
ctx := req.Context()
dn, err := core.Resolve(ctx, node, p)
if err != nil {
res.SetError(err, cmds.ErrNormal)
return
}

var reader io.Reader
if archive, _, _ := req.Option("archive").Bool(); !archive && cmplvl != gzip.NoCompression {
// only use this when the flag is '-C' without '-a'
reader, err = getZip(req.Context(), node, p, cmplvl)
} else {
reader, err = get(req.Context(), node, p, cmplvl)
archive, _, _ := req.Option("archive").Bool()
if !archive && cmplvl == gzip.NoCompression {
if err := get(req, dn, node.DAG, p.String()); err != nil {
res.SetError(err, cmds.ErrNormal)
return
}
}
reader, err = utar.DagArchive(ctx, dn, p.String(), node.DAG, archive, cmplvl)
if err != nil {
res.SetError(err, cmds.ErrNormal)
return
}
res.SetOutput(reader)
},
PostRun: func(req cmds.Request, res cmds.Response) {
if res.Output() == nil {
return
}
outReader := res.Output().(io.Reader)
res.SetOutput(nil)

outPath, _, _ := req.Option("output").String()
if len(outPath) == 0 {
_, outPath = gopath.Split(req.Arguments()[0])
Expand All @@ -99,6 +98,17 @@ may also specify the level of compression by specifying '-l=<1-9>'.
}

archive, _, _ := req.Option("archive").Bool()
if !archive && cmplvl == gzip.NoCompression {
fmt.Fprintf(os.Stdout, "Saving file(s) to %s\n", outPath)
return
}

if res.Output() == nil {
return
}

outReader := res.Output().(io.Reader)
res.SetOutput(nil)

gw := getWriter{
Out: os.Stdout,
Expand Down Expand Up @@ -135,7 +145,10 @@ func (gw *getWriter) Write(r io.Reader, fpath string) error {
if gw.Archive || gw.Compression != gzip.NoCompression {
return gw.writeArchive(r, fpath)
}
return gw.writeExtracted(r, fpath)
//bar, barR := progressBarForReader(gw.Err, r)
//bar.Start()
//defer bar.Finish()
return nil
}

func (gw *getWriter) writeArchive(r io.Reader, fpath string) error {
Expand Down Expand Up @@ -169,16 +182,6 @@ func (gw *getWriter) writeArchive(r io.Reader, fpath string) error {
return err
}

func (gw *getWriter) writeExtracted(r io.Reader, fpath string) error {
fmt.Fprintf(gw.Out, "Saving file(s) to %s\n", fpath)
bar, barR := progressBarForReader(gw.Err, r)
bar.Start()
defer bar.Finish()

extractor := &tar.Extractor{fpath}
return extractor.Extract(barR)
}

func getCompressOptions(req cmds.Request) (int, error) {
cmprs, _, _ := req.Option("compress").Bool()
cmplvl, cmplvlFound, _ := req.Option("compression-level").Int()
Expand All @@ -193,41 +196,11 @@ func getCompressOptions(req cmds.Request) (int, error) {
return gzip.NoCompression, nil
}

func get(ctx context.Context, node *core.IpfsNode, p path.Path, compression int) (io.Reader, error) {
dn, err := core.Resolve(ctx, node, p)
if err != nil {
return nil, err
func get(req cmds.Request, dn *mdag.Node, dag mdag.DAGService, p string) error {
outPath, _, _ := req.Option("output").String()
if len(outPath) == 0 {
_, outPath = gopath.Split(p)
outPath = gopath.Clean(outPath)
}

return utar.DagArchive(ctx, dn, p.String(), node.DAG, compression)
}

// getZip is equivalent to `ipfs getdag $hash | gzip`
func getZip(ctx context.Context, node *core.IpfsNode, p path.Path, compression int) (io.Reader, error) {
dagnode, err := core.Resolve(ctx, node, p)
if err != nil {
return nil, err
}

reader, err := uio.NewDagReader(ctx, dagnode, node.DAG)
if err != nil {
return nil, err
}

pr, pw := io.Pipe()
gw, err := gzip.NewWriterLevel(pw, compression)
if err != nil {
return nil, err
}
bufin := bufio.NewReader(reader)
go func() {
_, err := bufin.WriteTo(gw)
if err != nil {
log.Error("Fail to compress the stream")
}
gw.Close()
pw.Close()
}()

return pr, nil
return utar.DagFS(req.Context(), dn, dag, outPath)
}
2 changes: 1 addition & 1 deletion fuse/readonly/readonly_unix.go
Original file line number Diff line number Diff line change
Expand Up @@ -180,7 +180,7 @@ func (s *Node) Read(ctx context.Context, req *fuse.ReadRequest, resp *fuse.ReadR
return err
}

buf := resp.Data[:min(req.Size, int(r.Size()-req.Offset))]
buf := resp.Data[:min(req.Size, int(int64(r.Size())-req.Offset))]
n, err := io.ReadFull(r, buf)
if err != nil && err != io.EOF {
return err
Expand Down
109 changes: 0 additions & 109 deletions thirdparty/tar/extractor.go

This file was deleted.

4 changes: 2 additions & 2 deletions unixfs/io/dagreader.go
Original file line number Diff line number Diff line change
Expand Up @@ -137,8 +137,8 @@ func (dr *DagReader) precalcNextBuf(ctx context.Context) error {
}

// Size return the total length of the data from the DAG structured file.
func (dr *DagReader) Size() int64 {
return int64(dr.pbdata.GetFilesize())
func (dr *DagReader) Size() uint64 {
return dr.pbdata.GetFilesize()
}

// Read reads data from the DAG structured file
Expand Down
Loading

0 comments on commit bb69b77

Please sign in to comment.