Skip to content

Commit

Permalink
Decompose DagArchive from unixfs tar
Browse files Browse the repository at this point in the history
License: MIT
Signed-off-by: rht <rhtbot@gmail.com>
  • Loading branch information
rht committed Aug 20, 2015
1 parent dfa0351 commit 9f0c813
Show file tree
Hide file tree
Showing 3 changed files with 85 additions and 71 deletions.
4 changes: 2 additions & 2 deletions core/commands/get.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ import (
core "github.com/ipfs/go-ipfs/core"
path "github.com/ipfs/go-ipfs/path"
tar "github.com/ipfs/go-ipfs/thirdparty/tar"
utar "github.com/ipfs/go-ipfs/unixfs/tar"
uarchive "github.com/ipfs/go-ipfs/unixfs/archive"
)

var ErrInvalidCompressionLevel = errors.New("Compression level must be between 1 and 9")
Expand Down Expand Up @@ -70,7 +70,7 @@ may also specify the level of compression by specifying '-l=<1-9>'.
}

archive, _, _ := req.Option("archive").Bool()
reader, err := utar.DagArchive(ctx, dn, p.String(), node.DAG, archive, cmplvl)
reader, err := uarchive.DagArchive(ctx, dn, p.String(), node.DAG, archive, cmplvl)
if err != nil {
res.SetError(err, cmds.ErrNormal)
return
Expand Down
83 changes: 83 additions & 0 deletions unixfs/archive/archive.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
package archive

import (
"bufio"
"compress/gzip"
"io"
"path"

cxt "github.com/ipfs/go-ipfs/Godeps/_workspace/src/golang.org/x/net/context"

mdag "github.com/ipfs/go-ipfs/merkledag"
tar "github.com/ipfs/go-ipfs/unixfs/archive/tar"
uio "github.com/ipfs/go-ipfs/unixfs/io"
)

// DefaultBufSize is the buffer size for gets. for now, 1MB, which is ~4 blocks.
// TODO: does this need to be configurable?
var DefaultBufSize = 1048576

// DagArchive is equivalent to `ipfs getdag $hash | maybe_tar | maybe_gzip`
func DagArchive(ctx cxt.Context, nd *mdag.Node, name string, dag mdag.DAGService, archive bool, compression int) (io.Reader, error) {

_, filename := path.Split(name)

// need to connect a writer to a reader
piper, pipew := io.Pipe()

// use a buffered writer to parallelize task
bufw := bufio.NewWriterSize(pipew, DefaultBufSize)

// compression determines whether to use gzip compression.
var maybeGzw io.Writer
if compression != gzip.NoCompression {
var err error
maybeGzw, err = gzip.NewWriterLevel(bufw, compression)
if err != nil {
return nil, err
}
} else {
maybeGzw = bufw
}

if !archive && compression != gzip.NoCompression {
// the case when the node is a file
dagr, err := uio.NewDagReader(ctx, nd, dag)
if err != nil {
pipew.CloseWithError(err)
return nil, err
}

go func() {
if _, err := dagr.WriteTo(maybeGzw); err != nil {
pipew.CloseWithError(err)
return
}
pipew.Close() // everything seems to be ok.
}()
} else {
// the case for 1. archive, and 2. not archived and not compressed, in which tar is used anyway as a transport format

// construct the tar writer
w, err := tar.NewWriter(ctx, dag, archive, compression, maybeGzw)
if err != nil {
return nil, err
}

go func() {
// write all the nodes recursively
if err := w.WriteNode(nd, filename); err != nil {
pipew.CloseWithError(err)
return
}
if err := bufw.Flush(); err != nil {
pipew.CloseWithError(err)
return
}
w.Close()
pipew.Close() // everything seems to be ok.
}()
}

return piper, nil
}
69 changes: 0 additions & 69 deletions unixfs/tar/writer.go → unixfs/archive/tar/writer.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,6 @@ package tar

import (
"archive/tar"
"bufio"
"compress/gzip"
"io"
"path"
"time"
Expand All @@ -17,73 +15,6 @@ import (
upb "github.com/ipfs/go-ipfs/unixfs/pb"
)

// DefaultBufSize is the buffer size for gets. for now, 1MB, which is ~4 blocks.
// TODO: does this need to be configurable?
var DefaultBufSize = 1048576

// DagArchive is equivalent to `ipfs getdag $hash | maybe_tar | maybe_gzip`
func DagArchive(ctx cxt.Context, nd *mdag.Node, name string, dag mdag.DAGService, archive bool, compression int) (io.Reader, error) {

_, filename := path.Split(name)

// need to connect a writer to a reader
piper, pipew := io.Pipe()

// use a buffered writer to parallelize task
bufw := bufio.NewWriterSize(pipew, DefaultBufSize)

// compression determines whether to use gzip compression.
var maybeGzw io.Writer
if compression != gzip.NoCompression {
var err error
maybeGzw, err = gzip.NewWriterLevel(bufw, compression)
if err != nil {
return nil, err
}
} else {
maybeGzw = bufw
}

// construct the tar writer
w, err := NewWriter(ctx, dag, archive, compression, maybeGzw)
if err != nil {
return nil, err
}

// write all the nodes recursively
go func() {
if !archive && compression != gzip.NoCompression {
// the case when the node is a file
dagr, err := uio.NewDagReader(w.ctx, nd, w.Dag)
if err != nil {
pipew.CloseWithError(err)
return
}

if _, err := dagr.WriteTo(maybeGzw); err != nil {
pipew.CloseWithError(err)
return
}
} else {
// the case for 1. archive, and 2. not archived and not compressed, in which tar is used anyway as a transport format
if err := w.WriteNode(nd, filename); err != nil {
pipew.CloseWithError(err)
return
}
}

if err := bufw.Flush(); err != nil {
pipew.CloseWithError(err)
return
}

w.Close()
pipew.Close() // everything seems to be ok.
}()

return piper, nil
}

// Writer is a utility structure that helps to write
// unixfs merkledag nodes as a tar archive format.
// It wraps any io.Writer.
Expand Down

0 comments on commit 9f0c813

Please sign in to comment.