Skip to content
This repository was archived by the owner on Feb 12, 2024. It is now read-only.

Commit 1ad6767

Browse files
dignifiedquiredaviddias
authored andcommitted
docs(example): improve exchange example
Always stream incoming data through pull streams and use browserify-aes directly from master now. I was able to add a 300Mb sized file with this quite quickly.
1 parent bc022a1 commit 1ad6767

File tree

3 files changed

+40
-85
lines changed

3 files changed

+40
-85
lines changed

examples/exchange-files-in-browser/package.json

+3
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,9 @@
1212
"http-server": "^0.10.0"
1313
},
1414
"dependencies": {
15+
"browserify-aes": "crypto-browserify/browserify-aes#master",
16+
"pull-filereader": "^1.0.1",
17+
"pull-stream": "^3.6.0",
1518
"stream-buffers": "^3.0.1"
1619
}
1720
}

examples/exchange-files-in-browser/public/index.html

+1-1
Original file line numberDiff line numberDiff line change
@@ -62,7 +62,7 @@ <h2>Peers</h2>
6262
</div>
6363

6464
<!-- The IPFS node module -->
65-
<script src="//unpkg.com/ipfs/dist/index.min.js"></script>
65+
<!-- <script src="//unpkg.com/ipfs/dist/index.min.js"></script> -->
6666
<!-- <script src="js/app.js"></script> -->
6767
<script src="js/bundle.js"></script>
6868
</body>

examples/exchange-files-in-browser/public/js/app.js

+36-84
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
11
'use strict'
2-
/* global self */
32

43
const $startButton = document.querySelector('#start')
54
const $stopButton = document.querySelector('#stop')
@@ -20,7 +19,9 @@ const $details = document.querySelector('#details')
2019
const $allDisabledButtons = document.querySelectorAll('button:disabled')
2120
const $allDisabledInputs = document.querySelectorAll('input:disabled')
2221
const $filesList = document.querySelector('.file-list')
23-
const streamBuffers = require('stream-buffers')
22+
const Ipfs = require('../../../../src/core')
23+
const pullFilereader = require('pull-filereader')
24+
const pull = require('pull-stream')
2425

2526
let node
2627
let peerInfo
@@ -33,7 +34,7 @@ function start () {
3334
if (!node) {
3435
updateView('starting', node)
3536

36-
node = new self.Ipfs({repo: 'ipfs-' + Math.random()})
37+
node = new Ipfs({repo: 'ipfs-' + Math.random()})
3738

3839
node.on('start', () => {
3940
node.id().then((id) => {
@@ -119,93 +120,44 @@ function onDrop (event) {
119120
onError('IPFS must be started before files can be added')
120121
return
121122
}
122-
const dt = event.dataTransfer
123-
const files = dt.files
124-
125-
function readFileContents (file) {
126-
return new Promise((resolve) => {
127-
const reader = new window.FileReader()
128-
reader.onload = (event) => resolve(event.target.result)
129-
reader.readAsArrayBuffer(file)
130-
})
131-
}
132123

133-
let filesArray = []
134-
for (let i = 0; i < files.length; i++) {
135-
filesArray.push(files[i])
124+
let files = []
125+
for (let i = 0; i < event.dataTransfer.files.length; i++) {
126+
files.push(event.dataTransfer.files[i])
136127
}
137128

138-
filesArray.map((file) => {
139-
readFileContents(file)
140-
.then((buffer) => {
141-
let fileSize = buffer.byteLength
142-
143-
if (fileSize < 50000000) {
144-
return node.files.add([{
145-
path: file.name,
146-
content: new node.types.Buffer(buffer)
147-
}])
148-
} else {
149-
// use createAddStream and chunk the file.
150-
let progress = 0
151-
152-
let myReadableStreamBuffer = new streamBuffers.ReadableStreamBuffer({
153-
// frequency: 10, // in milliseconds.
154-
chunkSize: 32048 // in bytes.
155-
})
156-
157-
node.files.createAddStream((err, stream) => {
158-
if (err) throw err
159-
160-
stream.on('data', (file) => {
161-
$multihashInput.value = file.hash
162-
$filesStatus.innerHTML = `Added ${file.path} as ${file.hash}`
163-
164-
if (progressbar) {
165-
clearInterval(progressbar)
166-
progress = 0
167-
}
168-
})
169-
170-
myReadableStreamBuffer.on('data', (chunk) => {
171-
progress += chunk.byteLength
172-
})
173-
174-
if (!myReadableStreamBuffer.destroy) {
175-
myReadableStreamBuffer.destroy = () => {}
176-
}
177-
178-
stream.write({
179-
path: file.name,
180-
content: myReadableStreamBuffer
181-
})
182-
183-
myReadableStreamBuffer.put(Buffer.from(buffer))
184-
myReadableStreamBuffer.stop()
185-
186-
myReadableStreamBuffer.on('end', () => {
187-
stream.end()
188-
})
189-
190-
myReadableStreamBuffer.resume()
191-
192-
// progress.
193-
let progressbar = setInterval(() => {
194-
console.log('progress: ', progress, '/', fileSize, ' = ', Math.floor((progress / fileSize) * 100), '%')
195-
}, 5000)
196-
})
129+
pull(
130+
pull.values(files),
131+
pull.through((file) => console.log('Adding %s', file)),
132+
pull.asyncMap((file, cb) => pull(
133+
pull.values([{
134+
path: file.name,
135+
content: pullFilereader(file)
136+
}]),
137+
node.files.createAddPullStream(),
138+
pull.collect((err, res) => {
139+
if (err) {
140+
return cb(err)
197141
}
198-
})
199-
.then((files) => {
200-
if (files && files.length) {
201-
$multihashInput.value = files[0].hash
202-
$filesStatus.innerHTML = files
142+
const file = res[0]
143+
console.log('Adding %s finished', file.path)
144+
145+
$multihashInput.value = file.hash
146+
$filesStatus.innerHTML = `Added ${file.path} as ${file.hash}`
147+
cb(null, file)
148+
}))),
149+
pull.collect((err, files) => {
150+
if (err) {
151+
return onError(err)
152+
}
153+
if (files && files.length) {
154+
$multihashInput.value = files[0].hash
155+
$filesStatus.innerHTML = files
203156
.map((e) => `Added ${e.path} as ${e.hash}`)
204157
.join('<br>')
205-
}
206-
})
207-
.catch(onError)
208-
})
158+
}
159+
})
160+
)
209161
}
210162

211163
/*

0 commit comments

Comments
 (0)