|
| 1 | +'use strict' |
| 2 | + |
| 3 | +const debug = require('debug') |
| 4 | +const log = debug('importer') |
| 5 | +log.err = debug('importer:error') |
| 6 | +const fs = require('fs') |
| 7 | +const mDAG = require('ipfs-merkle-dag') |
| 8 | +const FixedSizeChunker = require('./chunker-fixed-size') |
| 9 | +const through2 = require('through2') |
| 10 | +const UnixFS = require('ipfs-unixfs') |
| 11 | +const async = require('async') |
| 12 | + |
| 13 | +exports = module.exports |
| 14 | + |
| 15 | +const CHUNK_SIZE = 262144 |
| 16 | + |
| 17 | +// Use a layout + chunkers to convert a directory (or file) to the layout format |
| 18 | +exports.import = (target, dagService, options, callback) => { |
| 19 | + if (typeof options === 'function') { callback = options; options = {} } |
| 20 | + |
| 21 | + if (!target) { return callback(new Error('must specify target')) } |
| 22 | + if (!dagService) { return callback(new Error('must specify dag service')) } |
| 23 | + |
| 24 | + // options.recursive : follow dirs |
| 25 | + // options.chunkers : obj with chunkers to each type of data, { default: dumb-chunker } |
| 26 | + |
| 27 | + options = options || {} |
| 28 | + |
| 29 | + if (Buffer.isBuffer(target)) { |
| 30 | + bufferImporter(target, callback) |
| 31 | + } else if (typeof target.on === 'function') { |
| 32 | + // TODO Create Stream Importer |
| 33 | + // streamImporter(options.stream, callback) |
| 34 | + return callback(new Error('stream importer has not been built yet')) |
| 35 | + } else if (typeof target === 'string') { |
| 36 | + var stats |
| 37 | + try { |
| 38 | + stats = fs.statSync(target) |
| 39 | + } catch (e) { |
| 40 | + return callback(e) |
| 41 | + } |
| 42 | + if (stats.isFile()) { |
| 43 | + fileImporter(target, callback) |
| 44 | + } else if (stats.isDirectory() && options.recursive) { |
| 45 | + dirImporter(target, callback) |
| 46 | + } else { |
| 47 | + return callback(new Error('recursive must be true to add a directory')) |
| 48 | + } |
| 49 | + } |
| 50 | + |
| 51 | + function fileImporter (path, callback) { |
| 52 | + const stats = fs.statSync(path) |
| 53 | + if (stats.size > CHUNK_SIZE) { |
| 54 | + const links = [] // { Hash: , Size: , Name: } |
| 55 | + fs.createReadStream(path) |
| 56 | + .pipe(new FixedSizeChunker(CHUNK_SIZE)) |
| 57 | + .pipe(through2((chunk, enc, cb) => { |
| 58 | + // TODO: check if this is right (I believe it should be type 'raw' |
| 59 | + // https://github.com/ipfs/go-ipfs/issues/2331 |
| 60 | + const raw = new UnixFS('file', chunk) |
| 61 | + |
| 62 | + const node = new mDAG.DAGNode(raw.marshal()) |
| 63 | + |
| 64 | + dagService.add(node, function (err) { |
| 65 | + if (err) { |
| 66 | + return log.err(err) |
| 67 | + } |
| 68 | + links.push({ |
| 69 | + Hash: node.multihash(), |
| 70 | + Size: node.size(), |
| 71 | + leafSize: raw.fileSize(), |
| 72 | + Name: '' |
| 73 | + }) |
| 74 | + cb() |
| 75 | + }) |
| 76 | + }, (cb) => { |
| 77 | + const file = new UnixFS('file') |
| 78 | + const parentNode = new mDAG.DAGNode() |
| 79 | + links.forEach((l) => { |
| 80 | + file.addBlockSize(l.leafSize) |
| 81 | + const link = new mDAG.DAGLink(l.Name, l.Size, l.Hash) |
| 82 | + parentNode.addRawLink(link) |
| 83 | + }) |
| 84 | + |
| 85 | + parentNode.data = file.marshal() |
| 86 | + dagService.add(parentNode, (err) => { |
| 87 | + if (err) { |
| 88 | + return log.err(err) |
| 89 | + } |
| 90 | + |
| 91 | + const pathSplit = path.split('/') |
| 92 | + const fileName = pathSplit[pathSplit.length - 1] |
| 93 | + |
| 94 | + callback(null, { |
| 95 | + Hash: parentNode.multihash(), |
| 96 | + Size: parentNode.size(), |
| 97 | + Name: fileName |
| 98 | + }) && cb() |
| 99 | + }) |
| 100 | + })) |
| 101 | + } else { |
| 102 | + // create just one file node with the data directly |
| 103 | + var buf = fs.readFileSync(path) |
| 104 | + const fileUnixFS = new UnixFS('file', buf) |
| 105 | + const fileNode = new mDAG.DAGNode(fileUnixFS.marshal()) |
| 106 | + |
| 107 | + dagService.add(fileNode, (err) => { |
| 108 | + if (err) { |
| 109 | + return log.err(err) |
| 110 | + } |
| 111 | + |
| 112 | + const split = path.split('/') |
| 113 | + const fileName = split[split.length - 1] |
| 114 | + |
| 115 | + callback(null, { |
| 116 | + Hash: fileNode.multihash(), |
| 117 | + Size: fileNode.size(), |
| 118 | + Name: fileName |
| 119 | + }) |
| 120 | + }) |
| 121 | + } |
| 122 | + } |
| 123 | + |
| 124 | + function dirImporter (path, callback) { |
| 125 | + const files = fs.readdirSync(path) |
| 126 | + const dirUnixFS = new UnixFS('directory') |
| 127 | + const dirNode = new mDAG.DAGNode() |
| 128 | + |
| 129 | + if (files.length === 0) { |
| 130 | + dirNode.data = dirUnixFS.marshal() |
| 131 | + dagService.add(dirNode, (err) => { |
| 132 | + if (err) { |
| 133 | + return callback(err) |
| 134 | + } |
| 135 | + |
| 136 | + const split = path.split('/') |
| 137 | + const dirName = split[split.length - 1] |
| 138 | + |
| 139 | + callback(null, { |
| 140 | + Hash: dirNode.multihash(), |
| 141 | + Size: dirNode.size(), |
| 142 | + Name: dirName |
| 143 | + }) |
| 144 | + }) |
| 145 | + return |
| 146 | + } |
| 147 | + |
| 148 | + async.map( |
| 149 | + files, |
| 150 | + (file, cb) => { |
| 151 | + const filePath = path + '/' + file |
| 152 | + const stats = fs.statSync(filePath) |
| 153 | + if (stats.isFile()) { |
| 154 | + return fileImporter(filePath, cb) |
| 155 | + } if (stats.isDirectory()) { |
| 156 | + return dirImporter(filePath, cb) |
| 157 | + } else { |
| 158 | + return cb(new Error('Found a weird file' + path + file)) |
| 159 | + } |
| 160 | + }, |
| 161 | + (err, results) => { |
| 162 | + if (err) { |
| 163 | + return callback(err) |
| 164 | + } |
| 165 | + results.forEach((result) => { |
| 166 | + dirNode.addRawLink(new mDAG.DAGLink(result.Name, result.Size, result.Hash)) |
| 167 | + }) |
| 168 | + |
| 169 | + dirNode.data = dirUnixFS.marshal() |
| 170 | + |
| 171 | + dagService.add(dirNode, (err) => { |
| 172 | + if (err) { |
| 173 | + return callback(err) |
| 174 | + } |
| 175 | + |
| 176 | + const split = path.split('/') |
| 177 | + const dirName = split[split.length - 1] |
| 178 | + |
| 179 | + callback(null, { |
| 180 | + Hash: dirNode.multihash(), |
| 181 | + Size: dirNode.size(), |
| 182 | + Name: dirName |
| 183 | + }) |
| 184 | + }) |
| 185 | + }) |
| 186 | + } |
| 187 | + function bufferImporter (buffer, callback) { |
| 188 | + const links = [] // { Hash: , Size: , Name: } |
| 189 | + if (buffer.length > CHUNK_SIZE) { |
| 190 | + var fsc = new FixedSizeChunker(CHUNK_SIZE) |
| 191 | + fsc.write(buffer) |
| 192 | + fsc.end() |
| 193 | + fsc.pipe(through2((chunk, enc, cb) => { |
| 194 | + // TODO: check if this is right (I believe it should be type 'raw' |
| 195 | + // https://github.com/ipfs/go-ipfs/issues/2331 |
| 196 | + const raw = new UnixFS('file', chunk) |
| 197 | + const node = new mDAG.DAGNode(raw.marshal()) |
| 198 | + |
| 199 | + dagService.add(node, function (err) { |
| 200 | + if (err) { |
| 201 | + return log.err(err) |
| 202 | + } |
| 203 | + links.push({ |
| 204 | + Hash: node.multihash(), |
| 205 | + Size: node.size(), |
| 206 | + leafSize: raw.fileSize(), |
| 207 | + Name: '' |
| 208 | + }) |
| 209 | + cb() |
| 210 | + }) |
| 211 | + }, (cb) => { |
| 212 | + const file = new UnixFS('file') |
| 213 | + const parentNode = new mDAG.DAGNode() |
| 214 | + links.forEach((l) => { |
| 215 | + file.addBlockSize(l.leafSize) |
| 216 | + const link = new mDAG.DAGLink(l.Name, l.Size, l.Hash) |
| 217 | + parentNode.addRawLink(link) |
| 218 | + }) |
| 219 | + parentNode.data = file.marshal() |
| 220 | + dagService.add(parentNode, (err) => { |
| 221 | + if (err) { |
| 222 | + return log.err(err) |
| 223 | + } |
| 224 | + |
| 225 | + callback(null, { |
| 226 | + Hash: parentNode.multihash(), |
| 227 | + Size: parentNode.size() |
| 228 | + }) && cb() |
| 229 | + }) |
| 230 | + })) |
| 231 | + } else { |
| 232 | + // create just one file node with the data directly |
| 233 | + const fileUnixFS = new UnixFS('file', buffer) |
| 234 | + const fileNode = new mDAG.DAGNode(fileUnixFS.marshal()) |
| 235 | + |
| 236 | + dagService.add(fileNode, (err) => { |
| 237 | + if (err) { |
| 238 | + return log.err(err) |
| 239 | + } |
| 240 | + |
| 241 | + callback(null, { |
| 242 | + Hash: fileNode.multihash(), |
| 243 | + Size: fileNode.size() |
| 244 | + }) |
| 245 | + }) |
| 246 | + } |
| 247 | + } |
| 248 | + // function streamImporter (stream, callback) {} |
| 249 | +} |
0 commit comments