@@ -20,6 +20,7 @@ const $details = document.querySelector('#details')
20
20
const $allDisabledButtons = document . querySelectorAll ( 'button:disabled' )
21
21
const $allDisabledInputs = document . querySelectorAll ( 'input:disabled' )
22
22
const $filesList = document . querySelector ( '.file-list' )
23
+ const streamBuffers = require ( 'stream-buffers' )
23
24
24
25
let node
25
26
let peerInfo
@@ -137,16 +138,71 @@ function onDrop (event) {
137
138
filesArray . map ( ( file ) => {
138
139
readFileContents ( file )
139
140
. then ( ( buffer ) => {
140
- return node . files . add ( [ {
141
- path : file . name ,
142
- content : new node . types . Buffer ( buffer )
143
- } ] )
141
+ let fileSize = buffer . byteLength
142
+
143
+ if ( fileSize < 50000000 ) {
144
+ return node . files . add ( [ {
145
+ path : file . name ,
146
+ content : new node . types . Buffer ( buffer )
147
+ } ] )
148
+ } else {
149
+ // use createAddStream and chunk the file.
150
+ let progress = 0
151
+
152
+ let myReadableStreamBuffer = new streamBuffers . ReadableStreamBuffer ( {
153
+ // frequency: 10, // in milliseconds.
154
+ chunkSize : 32048 // in bytes.
155
+ } )
156
+
157
+ node . files . createAddStream ( ( err , stream ) => {
158
+ if ( err ) throw err
159
+
160
+ stream . on ( 'data' , ( file ) => {
161
+ $multihashInput . value = file . hash
162
+ $filesStatus . innerHTML = `Added ${ file . path } as ${ file . hash } `
163
+
164
+ if ( progressbar ) {
165
+ clearInterval ( progressbar )
166
+ progress = 0
167
+ }
168
+ } )
169
+
170
+ myReadableStreamBuffer . on ( 'data' , ( chunk ) => {
171
+ progress += chunk . byteLength
172
+ } )
173
+
174
+ if ( ! myReadableStreamBuffer . destroy ) {
175
+ myReadableStreamBuffer . destroy = ( ) => { }
176
+ }
177
+
178
+ stream . write ( {
179
+ path : file . name ,
180
+ content : myReadableStreamBuffer
181
+ } )
182
+
183
+ myReadableStreamBuffer . put ( Buffer . from ( buffer ) )
184
+ myReadableStreamBuffer . stop ( )
185
+
186
+ myReadableStreamBuffer . on ( 'end' , ( ) => {
187
+ stream . end ( )
188
+ } )
189
+
190
+ myReadableStreamBuffer . resume ( )
191
+
192
+ // progress.
193
+ let progressbar = setInterval ( ( ) => {
194
+ console . log ( 'progress: ' , progress , '/' , fileSize , ' = ' , Math . floor ( ( progress / fileSize ) * 100 ) , '%' )
195
+ } , 5000 )
196
+ } )
197
+ }
144
198
} )
145
199
. then ( ( files ) => {
146
- $multihashInput . value = files [ 0 ] . hash
147
- $filesStatus . innerHTML = files
200
+ if ( files && files . length ) {
201
+ $multihashInput . value = files [ 0 ] . hash
202
+ $filesStatus . innerHTML = files
148
203
. map ( ( e ) => `Added ${ e . path } as ${ e . hash } ` )
149
204
. join ( '<br>' )
205
+ }
150
206
} )
151
207
. catch ( onError )
152
208
} )
@@ -178,14 +234,15 @@ function refreshPeerList () {
178
234
if ( err ) {
179
235
return onError ( err )
180
236
}
181
-
182
237
const peersAsHtml = peers
183
238
. map ( ( peer ) => {
184
- const addr = peer . addr . toString ( )
185
- if ( addr . indexOf ( 'ipfs' ) >= 0 ) {
186
- return addr
187
- } else {
188
- return addr + peer . peer . id . toB58String ( )
239
+ if ( peer . addr ) {
240
+ const addr = peer . addr . toString ( )
241
+ if ( addr . indexOf ( 'ipfs' ) >= 0 ) {
242
+ return addr
243
+ } else {
244
+ return addr + peer . peer . id . toB58String ( )
245
+ }
189
246
}
190
247
} )
191
248
. map ( ( addr ) => {
0 commit comments