1
1
'use strict'
2
2
3
3
const Block = require ( 'ipfs-block' )
4
- const pull = require ( 'pull-stream' )
5
4
const Lock = require ( 'lock' )
6
5
const base32 = require ( 'base32.js' )
7
6
const path = require ( 'path' )
8
- const write = require ( 'pull-write' )
9
7
const parallel = require ( 'run-parallel' )
10
- const defer = require ( 'pull-defer/source' )
8
+ const pull = require ( 'pull-stream' )
9
+ const pullWrite = require ( 'pull-write' )
10
+ const pullDefer = require ( 'pull-defer/source' )
11
11
12
12
const PREFIX_LENGTH = 5
13
+ const EXTENSION = 'data'
13
14
14
15
exports = module . exports
15
16
16
- function multihashToPath ( multihash , extension ) {
17
- extension = extension || 'data'
17
+ function multihashToPath ( multihash ) {
18
18
const encoder = new base32 . Encoder ( )
19
19
const hash = encoder . write ( multihash ) . finalize ( )
20
- const filename = `${ hash } .${ extension } `
20
+ const filename = `${ hash } .${ EXTENSION } `
21
21
const folder = filename . slice ( 0 , PREFIX_LENGTH )
22
22
23
23
return path . join ( folder , filename )
@@ -27,82 +27,103 @@ exports.setUp = (basePath, BlobStore, locks) => {
27
27
const store = new BlobStore ( basePath + '/blocks' )
28
28
const lock = new Lock ( )
29
29
30
- function writeBlock ( block , cb ) {
31
- if ( ! block || ! block . data ) {
32
- return cb ( new Error ( 'Invalid block' ) )
30
+ // blockBlob is an object with:
31
+ // { data: <>, key: <> }
32
+ function writeBlock ( blockBlob , callback ) {
33
+ if ( ! blockBlob || ! blockBlob . data ) {
34
+ return callback ( new Error ( 'Invalid block' ) )
33
35
}
34
36
35
- const key = multihashToPath ( block . key , block . extension )
36
-
37
- lock ( key , ( release ) => pull (
38
- pull . values ( [ block . data ] ) ,
39
- store . write ( key , release ( ( err ) => {
40
- if ( err ) {
41
- return cb ( err )
42
- }
43
- cb ( null , { key} )
44
- } ) )
45
- ) )
37
+ const key = multihashToPath ( blockBlob . key )
38
+
39
+ lock ( key , ( release ) => {
40
+ pull (
41
+ pull . values ( [
42
+ blockBlob . data
43
+ ] ) ,
44
+ store . write ( key , release ( released ) )
45
+ )
46
+ } )
47
+
48
+ // called once the lock is released
49
+ function released ( err ) {
50
+ if ( err ) {
51
+ return callback ( err )
52
+ }
53
+ callback ( null , { key : key } )
54
+ }
46
55
}
47
56
48
57
return {
49
- getStream ( key , extension ) {
58
+ // returns a pull-stream of one block being read
59
+ getStream ( key ) {
50
60
if ( ! key ) {
51
61
return pull . error ( new Error ( 'Invalid key' ) )
52
62
}
53
63
54
- const p = multihashToPath ( key , extension )
55
- const deferred = defer ( )
64
+ const blockPath = multihashToPath ( key )
65
+ const deferred = pullDefer ( )
56
66
57
- lock ( p , ( release ) => {
58
- const ext = extension === 'data' ? 'protobuf' : extension
67
+ lock ( blockPath , ( release ) => {
59
68
pull (
60
- store . read ( p ) ,
61
- pull . collect ( release ( ( err , data ) => {
62
- if ( err ) {
63
- return deferred . abort ( err )
64
- }
65
-
66
- deferred . resolve ( pull . values ( [
67
- new Block ( Buffer . concat ( data ) , ext )
68
- ] ) )
69
- } ) )
69
+ store . read ( blockPath ) ,
70
+ pull . collect ( release ( released ) )
70
71
)
71
72
} )
72
73
74
+ function released ( err , data ) {
75
+ if ( err ) {
76
+ return deferred . abort ( err )
77
+ }
78
+
79
+ deferred . resolve (
80
+ pull . values ( [
81
+ new Block ( Buffer . concat ( data ) )
82
+ ] )
83
+ )
84
+ }
85
+
73
86
return deferred
74
87
} ,
75
88
89
+ /*
90
+ * putStream - write multiple blocks
91
+ *
92
+ * returns a pull-stream that expects blockBlobs
93
+ *
94
+ * NOTE: blockBlob is a { data: <>, key: <> } and not a
95
+ * ipfs-block instance. This is because Block instances support
96
+ * several types of hashing and it is up to the BlockService
97
+ * to understand the right one to use (given the CID)
98
+ */
99
+ // TODO
100
+ // consider using a more explicit name, this can cause some confusion
101
+ // since the natural association is
102
+ // getStream - createReadStream - read one
103
+ // putStream - createWriteStream - write one
104
+ // where in fact it is:
105
+ // getStream - createReadStream - read one (the same)
106
+ // putStream - createFilesWriteStream = write several
107
+ //
76
108
putStream ( ) {
77
109
let ended = false
78
110
let written = [ ]
79
111
let push = null
80
112
81
- const sink = write ( ( blocks , cb ) => {
82
- parallel ( blocks . map ( ( block ) => ( cb ) => {
83
- writeBlock ( block , ( err , meta ) => {
84
- if ( err ) {
85
- return cb ( err )
86
- }
87
-
88
- if ( push ) {
89
- const read = push
90
- push = null
91
- read ( null , meta )
92
- return cb ( )
93
- }
94
-
95
- written . push ( meta )
96
- cb ( )
97
- } )
98
- } ) , cb )
113
+ const sink = pullWrite ( ( blockBlobs , cb ) => {
114
+ const tasks = writeTasks ( blockBlobs )
115
+ parallel ( tasks , cb )
99
116
} , null , 100 , ( err ) => {
100
117
ended = err || true
101
- if ( push ) push ( ended )
118
+ if ( push ) {
119
+ push ( ended )
120
+ }
102
121
} )
103
122
104
123
const source = ( end , cb ) => {
105
- if ( end ) ended = end
124
+ if ( end ) {
125
+ ended = end
126
+ }
106
127
if ( ended ) {
107
128
return cb ( ended )
108
129
}
@@ -114,35 +135,54 @@ exports.setUp = (basePath, BlobStore, locks) => {
114
135
push = cb
115
136
}
116
137
117
- return { source, sink}
118
- } ,
138
+ /*
139
+ * Creates individual tasks to write each block blob that can be
140
+ * exectured in parallel
141
+ */
142
+ function writeTasks ( blockBlobs ) {
143
+ return blockBlobs . map ( ( blockBlob ) => {
144
+ return ( cb ) => {
145
+ writeBlock ( blockBlob , ( err , meta ) => {
146
+ if ( err ) {
147
+ return cb ( err )
148
+ }
149
+
150
+ if ( push ) {
151
+ const read = push
152
+ push = null
153
+ read ( null , meta )
154
+ return cb ( )
155
+ }
156
+
157
+ written . push ( meta )
158
+ cb ( )
159
+ } )
160
+ }
161
+ } )
162
+ }
119
163
120
- has ( key , extension , cb ) {
121
- if ( typeof extension === 'function' ) {
122
- cb = extension
123
- extension = undefined
164
+ return {
165
+ source : source ,
166
+ sink : sink
124
167
}
168
+ } ,
125
169
170
+ has ( key , callback ) {
126
171
if ( ! key ) {
127
- return cb ( new Error ( 'Invalid key' ) )
172
+ return callback ( new Error ( 'Invalid key' ) )
128
173
}
129
174
130
- const p = multihashToPath ( key , extension )
131
- store . exists ( p , cb )
175
+ const blockPath = multihashToPath ( key )
176
+ store . exists ( blockPath , callback )
132
177
} ,
133
178
134
- delete ( key , extension , cb ) {
135
- if ( typeof extension === 'function' ) {
136
- cb = extension
137
- extension = undefined
138
- }
139
-
179
+ delete ( key , callback ) {
140
180
if ( ! key ) {
141
- return cb ( new Error ( 'Invalid key' ) )
181
+ return callback ( new Error ( 'Invalid key' ) )
142
182
}
143
183
144
- const p = multihashToPath ( key , extension )
145
- store . remove ( p , cb )
184
+ const blockPath = multihashToPath ( key )
185
+ store . remove ( blockPath , callback )
146
186
}
147
187
}
148
188
}
0 commit comments