Thanks to visit codestin.com
Credit goes to github.com

Skip to content
This repository was archived by the owner on Aug 12, 2020. It is now read-only.

Commit aa61cce

Browse files
committed
fix: updates ipld-dag-pb dep to version without .cid properties
Follows on from ipld/js-ipld-dag-pb#99 and updates this module to not rely on DAGNodes having knowledge of their CIDs. Bonus: also removes use of js-ipfs from this module breaking another circular dependency from the project.
1 parent c5c0720 commit aa61cce

16 files changed

+201
-220
lines changed

package.json

+2-3
Original file line numberDiff line numberDiff line change
@@ -40,10 +40,9 @@
4040
"aegir": "^17.0.0",
4141
"chai": "^4.2.0",
4242
"dirty-chai": "^2.0.1",
43-
"ipfs": "~0.32.3",
4443
"ipfs-block-service": "~0.15.1",
4544
"ipfs-repo": "~0.25.0",
46-
"ipld": "~0.19.1",
45+
"ipld": "~0.20.0",
4746
"mkdirp": "~0.5.1",
4847
"multihashes": "~0.4.14",
4948
"ncp": "^2.0.0",
@@ -58,7 +57,7 @@
5857
"cids": "~0.5.5",
5958
"deep-extend": "~0.6.0",
6059
"ipfs-unixfs": "~0.1.16",
61-
"ipld-dag-pb": "~0.14.11",
60+
"ipld-dag-pb": "~0.15.0",
6261
"left-pad": "^1.3.0",
6362
"multihashing-async": "~0.5.1",
6463
"pull-batch": "^1.0.0",

src/builder/builder.js

+28-65
Original file line numberDiff line numberDiff line change
@@ -6,13 +6,11 @@ const pull = require('pull-stream')
66
const through = require('pull-through')
77
const parallel = require('async/parallel')
88
const waterfall = require('async/waterfall')
9-
const dagPB = require('ipld-dag-pb')
10-
const CID = require('cids')
11-
const multihash = require('multihashing-async')
12-
9+
const persist = require('../utils/persist')
1310
const reduce = require('./reduce')
14-
15-
const DAGNode = dagPB.DAGNode
11+
const {
12+
DAGNode
13+
} = require('ipld-dag-pb')
1614

1715
const defaultOptions = {
1816
chunkerOptions: {
@@ -27,12 +25,6 @@ const defaultOptions = {
2725

2826
module.exports = function builder (createChunker, ipld, createReducer, _options) {
2927
const options = extend({}, defaultOptions, _options)
30-
options.cidVersion = options.cidVersion || options.cidVersion
31-
options.hashAlg = options.hashAlg || defaultOptions.hashAlg
32-
33-
if (options.hashAlg !== 'sha2-256') {
34-
options.cidVersion = 1
35-
}
3628

3729
return function (source) {
3830
return function (items, cb) {
@@ -71,33 +63,17 @@ module.exports = function builder (createChunker, ipld, createReducer, _options)
7163
const d = new UnixFS('directory')
7264

7365
waterfall([
74-
(cb) => DAGNode.create(d.marshal(), [], options.hashAlg, cb),
75-
(node, cb) => {
76-
if (options.onlyHash) {
77-
return cb(null, node)
78-
}
79-
80-
const cid = new CID(options.cidVersion, 'dag-pb', node.multihash)
81-
82-
node = new DAGNode(
83-
node.data,
84-
node.links,
85-
node.serialized,
86-
cid
87-
)
88-
89-
ipld.put(node, {
90-
cid
91-
}, (err) => cb(err, node))
92-
}
93-
], (err, node) => {
66+
(cb) => DAGNode.create(d.marshal(), [], cb),
67+
(node, cb) => persist(node, ipld, options, cb)
68+
], (err, result) => {
9469
if (err) {
9570
return callback(err)
9671
}
72+
9773
callback(null, {
9874
path: item.path,
99-
multihash: node.multihash,
100-
size: node.size
75+
multihash: result.cid.buffer,
76+
size: result.node.size
10177
})
10278
})
10379
}
@@ -134,55 +110,42 @@ module.exports = function builder (createChunker, ipld, createReducer, _options)
134110
}),
135111
pull.asyncMap((buffer, callback) => {
136112
if (options.rawLeaves) {
137-
return multihash(buffer, options.hashAlg, (error, hash) => {
138-
if (error) {
139-
return callback(error)
140-
}
141-
142-
return callback(null, {
143-
multihash: hash,
144-
size: buffer.length,
145-
leafSize: buffer.length,
146-
cid: new CID(1, 'raw', hash),
147-
data: buffer
148-
})
113+
return callback(null, {
114+
size: buffer.length,
115+
leafSize: buffer.length,
116+
data: buffer
149117
})
150118
}
151119

152120
const file = new UnixFS(options.leafType, buffer)
153121

154-
DAGNode.create(file.marshal(), [], options.hashAlg, (err, node) => {
122+
DAGNode.create(file.marshal(), [], (err, node) => {
155123
if (err) {
156124
return callback(err)
157125
}
158126

159127
callback(null, {
160-
multihash: node.multihash,
161128
size: node.size,
162129
leafSize: file.fileSize(),
163-
cid: new CID(options.cidVersion, 'dag-pb', node.multihash),
164130
data: node
165131
})
166132
})
167133
}),
168134
pull.asyncMap((leaf, callback) => {
169-
if (options.onlyHash) {
170-
return callback(null, leaf)
171-
}
135+
persist(leaf.data, ipld, options, (error, results) => {
136+
if (error) {
137+
return callback(error)
138+
}
172139

173-
ipld.put(leaf.data, {
174-
cid: leaf.cid
175-
}, (error) => callback(error, leaf))
176-
}),
177-
pull.map((leaf) => {
178-
return {
179-
path: file.path,
180-
multihash: leaf.cid.buffer,
181-
size: leaf.size,
182-
leafSize: leaf.leafSize,
183-
name: '',
184-
cid: leaf.cid
185-
}
140+
callback(null, {
141+
size: leaf.size,
142+
leafSize: leaf.leafSize,
143+
data: results.node,
144+
multihash: results.cid.buffer,
145+
path: leaf.path,
146+
name: ''
147+
})
148+
})
186149
}),
187150
through( // mark as single node if only one single node
188151
function onData (data) {

src/builder/reduce.js

+10-31
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
const waterfall = require('async/waterfall')
44
const dagPB = require('ipld-dag-pb')
55
const UnixFS = require('ipfs-unixfs')
6-
const CID = require('cids')
6+
const persist = require('../utils/persist')
77

88
const DAGLink = dagPB.DAGLink
99
const DAGNode = dagPB.DAGNode
@@ -14,10 +14,10 @@ module.exports = function reduce (file, ipld, options) {
1414
const leaf = leaves[0]
1515

1616
return callback(null, {
17-
path: file.path,
18-
multihash: leaf.multihash,
1917
size: leaf.size,
2018
leafSize: leaf.leafSize,
19+
multihash: leaf.multihash,
20+
path: file.path,
2121
name: leaf.name
2222
})
2323
}
@@ -28,44 +28,23 @@ module.exports = function reduce (file, ipld, options) {
2828
const links = leaves.map((leaf) => {
2929
f.addBlockSize(leaf.leafSize)
3030

31-
let cid = leaf.cid
32-
33-
if (!cid) {
34-
// we are an intermediate node
35-
cid = new CID(options.cidVersion, 'dag-pb', leaf.multihash)
36-
}
37-
38-
return new DAGLink(leaf.name, leaf.size, cid.buffer)
31+
return new DAGLink(leaf.name, leaf.size, leaf.multihash)
3932
})
4033

4134
waterfall([
42-
(cb) => DAGNode.create(f.marshal(), links, options.hashAlg, cb),
43-
(node, cb) => {
44-
const cid = new CID(options.cidVersion, 'dag-pb', node.multihash)
45-
46-
if (options.onlyHash) {
47-
return cb(null, {
48-
node, cid
49-
})
50-
}
51-
52-
ipld.put(node, {
53-
cid
54-
}, (error) => cb(error, {
55-
node, cid
56-
}))
57-
}
35+
(cb) => DAGNode.create(f.marshal(), links, cb),
36+
(node, cb) => persist(node, ipld, options, cb)
5837
], (error, result) => {
5938
if (error) {
6039
return callback(error)
6140
}
6241

6342
callback(null, {
64-
name: '',
65-
path: file.path,
66-
multihash: result.cid.buffer,
6743
size: result.node.size,
68-
leafSize: f.fileSize()
44+
leafSize: f.fileSize(),
45+
multihash: result.cid.buffer,
46+
path: file.path,
47+
name: ''
6948
})
7049
})
7150
}

src/exporter/dir-flat.js

+2-2
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ function dirExporter (cid, node, name, path, pathRest, resolve, size, dag, paren
1313
name: name,
1414
depth: depth,
1515
path: path,
16-
hash: cid,
16+
multihash: cid.buffer,
1717
size: node.size,
1818
type: 'dir'
1919
}
@@ -26,7 +26,7 @@ function dirExporter (cid, node, name, path, pathRest, resolve, size, dag, paren
2626
size: link.size,
2727
name: link.name,
2828
path: path + '/' + link.name,
29-
multihash: link.multihash,
29+
multihash: link.cid.buffer,
3030
linkName: link.name,
3131
pathRest: pathRest.slice(1),
3232
type: 'dir'

src/exporter/dir-hamt-sharded.js

+2-2
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ function shardedDirExporter (cid, node, name, path, pathRest, resolve, size, dag
1313
name: name,
1414
depth: depth,
1515
path: path,
16-
hash: cid,
16+
multihash: cid.buffer,
1717
size: node.size,
1818
type: 'dir'
1919
}
@@ -36,7 +36,7 @@ function shardedDirExporter (cid, node, name, path, pathRest, resolve, size, dag
3636
depth: depth + 1,
3737
name: p,
3838
path: pp,
39-
multihash: link.multihash,
39+
multihash: link.cid.buffer,
4040
pathRest: p ? pathRest.slice(1) : pathRest,
4141
parent: dir || parent
4242
}

src/exporter/file.js

+3-4
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,6 @@
22

33
const traverse = require('pull-traverse')
44
const UnixFS = require('ipfs-unixfs')
5-
const CID = require('cids')
65
const pull = require('pull-stream')
76
const paramap = require('pull-paramap')
87
const extractDataFromBlock = require('./extract-data-from-block')
@@ -43,7 +42,7 @@ module.exports = (cid, node, name, path, pathRest, resolve, size, dag, parent, d
4342
content: pull.once(Buffer.alloc(0)),
4443
name: name,
4544
path: path,
46-
hash: cid,
45+
multihash: cid.buffer,
4746
size: fileSize,
4847
type: 'file'
4948
})
@@ -64,7 +63,7 @@ module.exports = (cid, node, name, path, pathRest, resolve, size, dag, parent, d
6463
content: content,
6564
name: name,
6665
path: path,
67-
hash: cid,
66+
multihash: cid.buffer,
6867
size: fileSize,
6968
type: 'file'
7069
}])
@@ -142,7 +141,7 @@ function getChildren (dag, offset, end) {
142141
return pull(
143142
pull.values(filteredLinks),
144143
paramap((child, cb) => {
145-
dag.get(new CID(child.link.multihash), (error, result) => cb(error, {
144+
dag.get(child.link.cid, (error, result) => cb(error, {
146145
start: child.start,
147146
end: child.end,
148147
node: result && result.value,

src/exporter/index.js

+4-2
Original file line numberDiff line numberDiff line change
@@ -54,9 +54,11 @@ module.exports = (path, dag, options) => {
5454
const pathLengthToCut = join(
5555
[dPath.base].concat(dPath.rest.slice(0, dPath.rest.length - 1))).length
5656

57+
const cid = new CID(dPath.base)
58+
5759
return pull(
5860
pull.values([{
59-
multihash: new CID(dPath.base),
61+
multihash: cid.buffer,
6062
name: dPath.base,
6163
path: dPath.base,
6264
pathRest: dPath.rest,
@@ -70,7 +72,7 @@ module.exports = (path, dag, options) => {
7072
name: node.name,
7173
path: finalPathFor(node),
7274
size: node.size,
73-
hash: node.hash || node.multihash,
75+
hash: node.multihash,
7476
content: node.content,
7577
type: node.type
7678
}

src/importer/dir-flat.js

+6-17
Original file line numberDiff line numberDiff line change
@@ -2,12 +2,12 @@
22

33
const asyncEachSeries = require('async/eachSeries')
44
const waterfall = require('async/waterfall')
5-
const CID = require('cids')
65
const dagPB = require('ipld-dag-pb')
76
const UnixFS = require('ipfs-unixfs')
87
const DAGLink = dagPB.DAGLink
98
const DAGNode = dagPB.DAGNode
109
const Dir = require('./dir')
10+
const persist = require('../utils/persist')
1111

1212
class DirFlat extends Dir {
1313
constructor (props, _options) {
@@ -56,28 +56,17 @@ class DirFlat extends Dir {
5656
})
5757

5858
const dir = new UnixFS('directory')
59-
const options = this._options
6059

6160
waterfall(
6261
[
63-
(callback) => DAGNode.create(dir.marshal(), links, options.hashAlg, callback),
64-
(node, callback) => {
65-
if (options.onlyHash) return callback(null, node)
66-
67-
let cid = new CID(node.multihash)
68-
69-
if (options.cidVersion === 1) {
70-
cid = cid.toV1()
71-
}
72-
73-
ipld.put(node, { cid }, (err) => callback(err, node))
74-
},
75-
(node, callback) => {
76-
this.multihash = node.multihash
62+
(callback) => DAGNode.create(dir.marshal(), links, callback),
63+
(node, callback) => persist(node, ipld, this._options, callback),
64+
({cid, node}, callback) => {
65+
this.multihash = cid.buffer
7766
this.size = node.size
7867
const pushable = {
7968
path: path,
80-
multihash: node.multihash,
69+
multihash: cid.buffer,
8170
size: node.size
8271
}
8372
source.push(pushable)

0 commit comments

Comments
 (0)