Skip to content
This repository was archived by the owner on Apr 29, 2020. It is now read-only.

Commit f964eee

Browse files
committed
refactor: update to new IPLD API
This is part of the Awesome Endeavour: Async Iterators: ipfs/js-ipfs#1670
1 parent bf8bad2 commit f964eee

File tree

6 files changed

+103
-93
lines changed

6 files changed

+103
-93
lines changed

package.json

+3-3
Original file line numberDiff line numberDiff line change
@@ -40,12 +40,12 @@
4040
"chai": "^4.2.0",
4141
"detect-node": "^2.0.4",
4242
"dirty-chai": "^2.0.1",
43-
"ipld": "~0.21.1",
43+
"ipld": "~0.22.0",
4444
"ipld-dag-pb": "~0.15.2",
4545
"ipld-in-memory": "^2.0.0",
46+
"multicodec": "~0.5.0",
4647
"pull-pushable": "^2.2.0",
4748
"pull-stream-to-stream": "^1.3.4",
48-
"pull-zip": "^2.0.1",
4949
"sinon": "^7.1.0",
5050
"stream-to-pull-stream": "^1.7.2"
5151
},
@@ -54,7 +54,7 @@
5454
"cids": "~0.5.5",
5555
"hamt-sharding": "0.0.2",
5656
"ipfs-unixfs": "~0.1.16",
57-
"ipfs-unixfs-importer": "~0.38.0",
57+
"ipfs-unixfs-importer": "git+https://github.com/ipfs/js-ipfs-unixfs-importer.git#new-ipld-api",
5858
"pull-cat": "^1.1.11",
5959
"pull-defer": "~0.2.3",
6060
"pull-paramap": "^1.2.2",

src/file.js

+19-17
Original file line numberDiff line numberDiff line change
@@ -150,23 +150,25 @@ function getChildren (dag, offset, end) {
150150

151151
return pull(
152152
once(filteredLinks),
153-
paramap((children, cb) => {
154-
dag.getMany(children.map(child => child.link.cid), (err, results) => {
155-
if (err) {
156-
return cb(err)
157-
}
158-
159-
cb(null, results.map((result, index) => {
160-
const child = children[index]
161-
162-
return {
163-
start: child.start,
164-
end: child.end,
165-
node: result,
166-
size: child.size
167-
}
168-
}))
169-
})
153+
paramap(async (children, cb) => {
154+
const results = dag.getMany(children.map(child => child.link.cid))
155+
const final = []
156+
for (
157+
let index = 0, result = await results.next();
158+
!result.done;
159+
index++, result = await results.next()
160+
) {
161+
const child = children[index]
162+
const node = result.value
163+
164+
final.push({
165+
start: child.start,
166+
end: child.end,
167+
node: node,
168+
size: child.size
169+
})
170+
}
171+
cb(null, final)
170172
}),
171173
flatten()
172174
)

src/resolve.js

+5-2
Original file line numberDiff line numberDiff line change
@@ -42,8 +42,11 @@ function createResolver (dag, options, depth, parent) {
4242
}
4343

4444
waterfall([
45-
(done) => dag.get(item.cid, done),
46-
(node, done) => done(null, resolveItem(item.cid, node.value, item, options))
45+
(done) => dag.get(item.cid).then(
46+
(node) => done(null, node),
47+
(error) => done(error)
48+
),
49+
(node, done) => done(null, resolveItem(item.cid, node, item, options))
4750
], cb)
4851
}),
4952
flatten(),

test/exporter-sharded.spec.js

+21-13
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@ const {
2020
DAGLink,
2121
DAGNode
2222
} = require('ipld-dag-pb')
23+
const multicodec = require('multicodec')
2324

2425
const SHARD_SPLIT_THRESHOLD = 10
2526

@@ -96,10 +97,13 @@ describe('exporter sharded', function () {
9697
files[imported.path].cid = new CID(imported.multihash)
9798
})
9899

99-
ipld.get(directory, cb)
100+
ipld.get(directory).then(
101+
(data) => cb(null, data),
102+
(error) => cb(error)
103+
)
100104
},
101-
({ value, cid }, cb) => {
102-
const dir = UnixFS.unmarshal(value.data)
105+
({ data }, cb) => {
106+
const dir = UnixFS.unmarshal(data)
103107

104108
expect(dir.type).to.equal('hamt-sharded-directory')
105109

@@ -375,23 +379,27 @@ describe('exporter sharded', function () {
375379
], cb)
376380
},
377381
(node, cb) => {
378-
ipld.put(node, {
379-
version: 0,
380-
format: 'dag-pb',
381-
hashAlg: 'sha2-256'
382-
}, cb)
382+
ipld.put(node, multicodec.DAG_PB, {
383+
cidVersion: 0,
384+
hashAlg: multicodec.SHA2_256
385+
}).then(
386+
(cid) => cb(null, cid),
387+
(error) => cb(error)
388+
)
383389
},
384390
(cid, cb) => {
385391
DAGNode.create(new UnixFS('hamt-sharded-directory').marshal(), [
386392
new DAGLink('75normal-dir', 5, cid)
387393
], cb)
388394
},
389395
(node, cb) => {
390-
ipld.put(node, {
391-
version: 1,
392-
format: 'dag-pb',
393-
hashAlg: 'sha2-256'
394-
}, cb)
396+
ipld.put(node, multicodec.DAG_PB, {
397+
cidVersion: 1,
398+
hashAlg: multicodec.SHA_256
399+
}).then(
400+
(cid) => cb(null, cid),
401+
(error) => cb(error)
402+
)
395403
},
396404
(dir, cb) => {
397405
pull(

test/exporter-subtree.spec.js

+9-1
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@ const pull = require('pull-stream')
1111
const randomBytes = require('./helpers/random-bytes')
1212
const waterfall = require('async/waterfall')
1313
const importer = require('ipfs-unixfs-importer')
14+
const multicodec = require('multicodec')
1415

1516
const ONE_MEG = Math.pow(1024, 2)
1617

@@ -132,7 +133,14 @@ describe('exporter subtree', () => {
132133
),
133134
(files, cb) => cb(null, files.pop().multihash),
134135
(buf, cb) => cb(null, new CID(buf)),
135-
(cid, cb) => ipld.put({ a: { file: cid } }, { format: 'dag-cbor' }, cb),
136+
(cid, cb) => {
137+
ipld.put(
138+
{ a: { file: cid } }, multicodec.DAG_CBOR
139+
).then(
140+
(cborNodeCid) => cb(null, cborNodeCid),
141+
(error) => cb(error)
142+
)
143+
},
136144
(cborNodeCid, cb) => pull(
137145
exporter(`${cborNodeCid.toBaseEncodedString()}/a/file/level-1/200Bytes.txt`, ipld),
138146
pull.collect(cb)

test/exporter.spec.js

+46-57
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,6 @@ const IPLD = require('ipld')
88
const inMemory = require('ipld-in-memory')
99
const UnixFS = require('ipfs-unixfs')
1010
const pull = require('pull-stream')
11-
const zip = require('pull-zip')
1211
const CID = require('cids')
1312
const doUntil = require('async/doUntil')
1413
const waterfall = require('async/waterfall')
@@ -25,6 +24,7 @@ const {
2524
} = require('ipld-dag-pb')
2625
const isNode = require('detect-node')
2726
const randomBytes = require('./helpers/random-bytes')
27+
const multicodec = require('multicodec')
2828

2929
const exporter = require('../src')
3030
const importer = require('ipfs-unixfs-importer')
@@ -51,13 +51,11 @@ describe('exporter', () => {
5151
DAGNode.create(file.marshal(), options.links, (err, node) => {
5252
expect(err).to.not.exist()
5353

54-
ipld.put(node, {
55-
version: 0,
56-
hashAlg: 'sha2-256',
57-
format: 'dag-pb'
58-
}, (err, cid) => {
59-
cb(err, { file: file, node: node, cid: cid })
60-
})
54+
ipld.put(node, multicodec.DAG_PB, {
55+
cidVersion: 0,
56+
hashAlg: multicodec.SHA2_256
57+
}).then((cid) => cb(null, { file: file, node: node, cid: cid }))
58+
.catch((error) => cb(error))
6159
})
6260
}
6361

@@ -182,47 +180,41 @@ describe('exporter', () => {
182180
})
183181

184182
it('ensure hash inputs are sanitized', (done) => {
185-
dagPut((err, result) => {
183+
dagPut(async (err, result) => {
186184
expect(err).to.not.exist()
187185

188-
ipld.get(result.cid, (err, res) => {
189-
expect(err).to.not.exist()
190-
const unmarsh = UnixFS.unmarshal(result.node.data)
186+
const node = await ipld.get(result.cid)
187+
const unmarsh = UnixFS.unmarshal(node.data)
191188

192-
expect(unmarsh.data).to.deep.equal(result.file.data)
189+
expect(unmarsh.data).to.deep.equal(result.file.data)
193190

194-
pull(
195-
exporter(result.cid, ipld),
196-
pull.collect(onFiles)
197-
)
191+
pull(
192+
exporter(result.cid, ipld),
193+
pull.collect(onFiles)
194+
)
198195

199-
function onFiles (err, files) {
200-
expect(err).to.equal(null)
201-
expect(files).to.have.length(1)
202-
expect(files[0]).to.have.property('cid')
203-
expect(files[0]).to.have.property('path', result.cid.toBaseEncodedString())
204-
fileEql(files[0], unmarsh.data, done)
205-
}
206-
})
196+
function onFiles (err, files) {
197+
expect(err).to.equal(null)
198+
expect(files).to.have.length(1)
199+
expect(files[0]).to.have.property('cid')
200+
expect(files[0]).to.have.property('path', result.cid.toBaseEncodedString())
201+
fileEql(files[0], unmarsh.data, done)
202+
}
207203
})
208204
})
209205

210206
it('exports a file with no links', (done) => {
211-
dagPut((err, result) => {
207+
dagPut(async (err, result) => {
212208
expect(err).to.not.exist()
213209

210+
const node = await ipld.get(result.cid)
211+
const unmarsh = UnixFS.unmarshal(node.data)
212+
214213
pull(
215-
zip(
216-
pull(
217-
ipld.getStream(result.cid),
218-
pull.map((res) => UnixFS.unmarshal(res.value.data))
219-
),
220-
exporter(result.cid, ipld)
221-
),
214+
exporter(result.cid, ipld),
222215
pull.collect((err, values) => {
223216
expect(err).to.not.exist()
224-
const unmarsh = values[0][0]
225-
const file = values[0][1]
217+
const file = values[0]
226218

227219
fileEql(file, unmarsh.data, done)
228220
})
@@ -292,25 +284,20 @@ describe('exporter', () => {
292284

293285
dagPut({
294286
content: randomBytes(100)
295-
}, (err, result) => {
287+
}, async (err, result) => {
296288
expect(err).to.not.exist()
297289

290+
const node = await ipld.get(result.cid)
291+
const unmarsh = UnixFS.unmarshal(node.data)
292+
298293
pull(
299-
zip(
300-
pull(
301-
ipld.getStream(result.cid),
302-
pull.map((res) => UnixFS.unmarshal(res.value.data))
303-
),
304-
exporter(result.cid, ipld, {
305-
offset,
306-
length
307-
})
308-
),
294+
exporter(result.cid, ipld, {
295+
offset,
296+
length
297+
}),
309298
pull.collect((err, values) => {
310299
expect(err).to.not.exist()
311-
312-
const unmarsh = values[0][0]
313-
const file = values[0][1]
300+
const file = values[0]
314301

315302
fileEql(file, unmarsh.data.slice(offset, offset + length), done)
316303
})
@@ -1153,13 +1140,15 @@ function createAndPersistNode (ipld, type, data, children, callback) {
11531140
return callback(error)
11541141
}
11551142

1156-
ipld.put(node, {
1157-
version: 1,
1158-
hashAlg: 'sha2-256',
1159-
format: 'dag-pb'
1160-
}, (error, cid) => callback(error, {
1161-
node,
1162-
cid
1163-
}))
1143+
ipld.put(node, multicodec.DAG_PB, {
1144+
cidVersion: 1,
1145+
hashAlg: multicodec.SHA2_256
1146+
}).then(
1147+
(cid) => callback(null, {
1148+
cid,
1149+
node
1150+
}),
1151+
(error) => callback(error)
1152+
)
11641153
})
11651154
}

0 commit comments

Comments
 (0)