@@ -12,8 +12,6 @@ IPFS unixFS Engine
12
12
![ ] ( https://img.shields.io/badge/npm-%3E%3D3.0.0-orange.svg?style=flat-square )
13
13
![ ] ( https://img.shields.io/badge/Node.js-%3E%3D4.0.0-orange.svg?style=flat-square )
14
14
15
- [ ![ Sauce Test Status] ( https://saucelabs.com/browser-matrix/ipfs-unixfs-engine.svg )] ( https://saucelabs.com/u/ipfs-unixfs-engine )
16
-
17
15
> JavaScript implementation of the layout and chunking mechanisms used by IPFS
18
16
19
17
## Table of Contents
@@ -29,20 +27,10 @@ IPFS unixFS Engine
29
27
- [ Contribute] ( #contribute )
30
28
- [ License] ( #license )
31
29
32
- ## BEWARE BEWARE BEWARE there might be 🐉
33
-
34
- This module has passed through several iterations and still is far from a nice and easy understandable codebase. Currently missing features:
35
-
36
- - [ ] tar importer
37
- - [x] trickle dag exporter
38
- - [ ] sharding (HAMT)
39
-
40
30
## Install
41
31
42
- With [ npm] ( https://npmjs.org/ ) installed, run
43
-
44
32
```
45
- $ npm install ipfs-unixfs-engine
33
+ > npm install ipfs-unixfs-engine
46
34
```
47
35
48
36
## Usage
@@ -51,56 +39,35 @@ $ npm install ipfs-unixfs-engine
51
39
52
40
Let's create a little directory to import:
53
41
``` sh
54
- $ cd /tmp
55
- $ mkdir foo
56
- $ echo ' hello' > foo/bar
57
- $ echo ' world' > foo/quux
42
+ > cd /tmp
43
+ > mkdir foo
44
+ > echo ' hello' > foo/bar
45
+ > echo ' world' > foo/quux
58
46
```
59
47
60
48
And write the importing logic:
61
49
``` js
62
- // Dependencies to create a DAG Service (where the dir will be imported into)
63
- const memStore = require (' abstract-blob-store' )
64
- const Repo = require (' ipfs-repo' )
65
- const Block = require (' ipfs-block' )
66
- const BlockService = require (' ipfs-block-service' )
67
- const MerkleDag = require (' ipfs-merkle-dag' )
68
- const fs = require (' fs' )
69
-
70
- const repo = new Repo (' ' , { stores: memStore })
71
- const blockService = new BlockService (repo)
72
- const dagService = new ipfsMerkleDag.DAGService (blocks)
73
-
74
-
75
50
const Importer = require (' ipfs-unixfs-engine' ).Importer
76
- const filesAddStream = new Importer (dagService )
51
+ const filesAddStream = new Importer (< dag or ipld - resolver instance )
77
52
78
53
// An array to hold the return of nested file/dir info from the importer
79
54
// A root DAG Node is received upon completion
80
55
81
56
const res = []
82
57
83
58
// Import path /tmp/foo/bar
84
-
85
59
const rs = fs .createReadStream (file)
86
60
const rs2 = fs .createReadStream (file2)
87
- const input = {path: / tmp/ foo/ bar, content: rs}
88
- const input2 = {path: / tmp/ foo/ quxx, content: rs2}
61
+ const input = { path: / tmp/ foo/ bar, content: rs }
62
+ const input2 = { path: / tmp/ foo/ quxx, content: rs2 }
89
63
90
64
// Listen for the data event from the importer stream
91
-
92
- filesAddStream .on (' data' , (info ) => {
93
- res .push (info)
94
- })
65
+ filesAddStream .on (' data' , (info ) => res .push (info))
95
66
96
67
// The end event of the stream signals that the importer is done
97
-
98
- filesAddStream .on (' end' , () => {
99
- console .log (' Finished filesAddStreaming files!' )
100
- })
68
+ filesAddStream .on (' end' , () => console .log (' Finished filesAddStreaming files!' ))
101
69
102
70
// Calling write on the importer to filesAddStream the file/object tuples
103
-
104
71
filesAddStream .write (input)
105
72
filesAddStream .write (input2)
106
73
filesAddStream .end ()
@@ -129,7 +96,7 @@ When run, the stat of DAG Node is outputted for each file on data event until th
129
96
### Importer API
130
97
131
98
``` js
132
- const Importer = require (' ipfs-unixfs-engine' ).importer
99
+ const Importer = require (' ipfs-unixfs-engine' ).Importer
133
100
```
134
101
135
102
#### const add = new Importer(dag)
@@ -173,24 +140,11 @@ In the second argument of the importer constructor you can specify the following
173
140
### Example Exporter
174
141
175
142
```
176
- const Repo = require('ipfs-repo')
177
- const Block = require('ipfs-block')
178
- const BlockService = require('ipfs-block-service')
179
- const MerkleDAG = require('ipfs-merkle-dag')
180
-
181
- const repo = new Repo('', { stores: memStore })
182
- const blockService = new BlockService(repo)
183
- const dagService = new MerkleDag.DAGService(blockService)
184
-
185
143
// Create an export readable object stream with the hash you want to export and a dag service
186
-
187
- const filesStream = Exporter(<multihash>, dag)
144
+ const filesStream = Exporter(<multihash>, <dag or ipld-resolver instance>)
188
145
189
146
// Pipe the return stream to console
190
-
191
- filesStream.on('data', (file) => {
192
- file.content.pipe(process.stdout)
193
- }
147
+ filesStream.on('data', (file) => file.content.pipe(process.stdout))
194
148
```
195
149
196
150
### Exporter: API
@@ -199,9 +153,9 @@ filesStream.on('data', (file) => {
199
153
const Exporter = require (' ipfs-unixfs-engine' ).Exporter
200
154
```
201
155
202
- ### new Exporter(hash, dagService )
156
+ ### new Exporter(< hash >, < dag or ipld-resolver > )
203
157
204
- Uses the given [ DAG Service ] [ ] to fetch an IPFS [ UnixFS] [ ] object(s) by their multiaddress.
158
+ Uses the given [ dag API or an ipld-resolver instance ] [ ] to fetch an IPFS [ UnixFS] [ ] object(s) by their multiaddress.
205
159
206
160
Creates a new readable stream in object mode that outputs objects of the form
207
161
@@ -215,7 +169,7 @@ Creates a new readable stream in object mode that outputs objects of the form
215
169
Errors are received as with a normal stream, by listening on the ` 'error' ` event to be emitted.
216
170
217
171
218
- [ DAG Service ] : https://github.com/vijayee /js-ipfs-merkle-dag/
172
+ [ IPLD Resolver ] : https://github.com/ipld /js-ipld-resolver
219
173
[ UnixFS ] : https://github.com/ipfs/specs/tree/master/unixfs
220
174
221
175
## Contribute
0 commit comments