@@ -31,81 +31,81 @@ This library provides functionality similar to [ipfs-unixfs-importer][], but it
31
31
You can encode a file as follows
32
32
33
33
``` js
34
- import * as UnixFS from " @ipld/unixfs"
34
+ import * as UnixFS from " @ipld/unixfs" ;
35
35
36
36
// Create a redable & writable streams with internal queue that can
37
37
// hold around 32 blocks
38
38
const { readable , writable } = new TransformStream (
39
39
{},
40
40
UnixFS .withCapacity (1048576 * 32 )
41
- )
41
+ );
42
42
// Next we create a writer with filesystem like API for encoding files and
43
43
// directories into IPLD blocks that will come out on `readable` end.
44
- const writer = UnixFS .createWriter ({ writable })
44
+ const writer = UnixFS .createWriter ({ writable });
45
45
46
46
// Create file writer that can be used to encode UnixFS file.
47
- const file = UnixFS .createFileWriter (writer)
47
+ const file = UnixFS .createFileWriter (writer);
48
48
// write some content
49
- file .write (new TextEncoder ().encode (" hello world" ))
49
+ file .write (new TextEncoder ().encode (" hello world" ));
50
50
// Finalize file by closing it.
51
- const { cid } = await file .close ()
51
+ const { cid } = await file .close ();
52
52
53
53
// close the writer to close underlying block stream.
54
- writer .close ()
54
+ writer .close ();
55
55
56
56
// We could encode all this as car file
57
- encodeCAR ({ roots: [cid], blocks: readable })
57
+ encodeCAR ({ roots: [cid], blocks: readable });
58
58
```
59
59
60
60
You can encode (non sharded) directories with provided API as well
61
61
62
62
``` ts
63
- import * as UnixFS from " @ipld/unixfs"
63
+ import * as UnixFS from " @ipld/unixfs" ;
64
64
65
65
export const demo = async () => {
66
- const { readable, writable } = new TransformStream ()
67
- const writer = UnixFS .createWriter ({ writable })
66
+ const { readable, writable } = new TransformStream ();
67
+ const writer = UnixFS .createWriter ({ writable });
68
68
69
69
// write a file
70
- const file = UnixFS .createFileWriter (writer )
71
- file .write (new TextEncoder ().encode (" hello world" ))
72
- const fileLink = await file .close ()
70
+ const file = UnixFS .createFileWriter (writer );
71
+ file .write (new TextEncoder ().encode (" hello world" ));
72
+ const fileLink = await file .close ();
73
73
74
74
// create directory and add a file we encoded above
75
- const dir = UnixFS .createDirectoryWriter (writer )
76
- dir .set (" intro.md" , fileLink )
77
- const dirLink = await dir .close ()
75
+ const dir = UnixFS .createDirectoryWriter (writer );
76
+ dir .set (" intro.md" , fileLink );
77
+ const dirLink = await dir .close ();
78
78
79
79
// now wrap above directory with another and also add the same file
80
80
// there
81
- const root = UnixFS .createDirectoryWriter (fs )
82
- root .set (" user" , dirLink )
83
- root .set (" hello.md" , fileLink )
81
+ const root = UnixFS .createDirectoryWriter (fs );
82
+ root .set (" user" , dirLink );
83
+ root .set (" hello.md" , fileLink );
84
84
85
85
// Creates following UnixFS structure where intro.md and hello.md link to same
86
86
// IPFS file.
87
87
// ./
88
88
// ./user/intro.md
89
89
// ./hello.md
90
- const rootLink = await root .close ()
90
+ const rootLink = await root .close ();
91
91
// ...
92
- writer .close ()
93
- }
92
+ writer .close ();
93
+ };
94
94
```
95
95
96
96
### Configuration
97
97
98
98
You can configure DAG layout, chunking and bunch of other things by providing API compatible components. Library provides bunch of them but you can also bring your own.
99
99
100
100
``` js
101
- import * as UnixFS from " @ipld/unixfs"
102
- import * as Rabin from " @ipld/unixfs/file/chunker/rabin"
103
- import * as Trickle from " @ipld/unixfs/file/layout/trickle"
104
- import * as RawLeaf from " multiformats/codecs/raw"
105
- import { sha256 } from " multiformats/hashes/sha2"
106
-
107
- const demo = async blob => {
108
- const { readable , writable } = new TransformStream ()
101
+ import * as UnixFS from " @ipld/unixfs" ;
102
+ import * as Rabin from " @ipld/unixfs/file/chunker/rabin" ;
103
+ import * as Trickle from " @ipld/unixfs/file/layout/trickle" ;
104
+ import * as RawLeaf from " multiformats/codecs/raw" ;
105
+ import { sha256 } from " multiformats/hashes/sha2" ;
106
+
107
+ const demo = async ( blob ) => {
108
+ const { readable , writable } = new TransformStream ();
109
109
const writer = UnixFS .createWriter ({
110
110
writable,
111
111
// you can pass only things you want to override
@@ -122,11 +122,90 @@ const demo = async blob => {
122
122
fileEncoder: UnixFS,
123
123
hasher: sha256,
124
124
},
125
- })
125
+ });
126
126
127
- const file = UnixFS .createFileWriter (writer)
127
+ const file = UnixFS .createFileWriter (writer);
128
128
// ...
129
+ };
130
+ ```
131
+
132
+ ### Collecting UnixFS FileLinks
133
+
134
+ You can optionally pass a unixFsFileLinkWriter stream to capture metadata for each link (useful for indexing or tracking layout information).
135
+
136
+ ``` js
137
+ import { createWriter , createFileWriter } from " @vascosantos/unixfs" ;
138
+
139
+ import { withMaxChunkSize } from " @vascosantos/unixfs/file/chunker/fixed" ;
140
+ import { withWidth } from " @vascosantos/unixfs/file/layout/balanced" ;
141
+
142
+ const defaultSettings = UnixFS .configure ({
143
+ fileChunkEncoder: raw,
144
+ smallFileEncoder: raw,
145
+ chunker: withMaxChunkSize (1024 * 1024 ),
146
+ fileLayout: withWidth (1024 ),
147
+ });
148
+
149
+ /**
150
+ * @param {Blob} blob
151
+ * @returns {Promise<import('@vascosantos/unixfs').FileLink[]>}
152
+ */
153
+ async function collectUnixFsFileLinks (blob ) {
154
+ const fileLinks = [];
155
+
156
+ // Create a stream to collect metadata (FileLinks)
157
+ const { readable , writable } = new TransformStream ();
158
+
159
+ // Set up the main UnixFS writer (data goes nowhere here)
160
+ const unixfsWriter = createWriter ({
161
+ writable: new WritableStream (), // Discard actual DAG output
162
+ settings: defaultSettings,
163
+ });
164
+
165
+ // Set up the file writer with link metadata writer
166
+ const unixFsFileLinkWriter = writable .getWriter ();
167
+
168
+ const fileWriter = createFileWriter ({
169
+ ... unixfsWriter,
170
+ initOptions: {
171
+ unixFsFileLinkWriter,
172
+ },
173
+ });
174
+
175
+ // Start concurrent reading of the metadata stream
176
+ const fileLinkReader = readable .getReader ();
177
+ const readLinks = (async () => {
178
+ while (true ) {
179
+ const { done , value } = await fileLinkReader .read ();
180
+ if (done) break ;
181
+ fileLinks .push (value);
182
+ }
183
+ })();
184
+
185
+ // Pipe the blob to the file writer
186
+ await blob .stream ().pipeTo (
187
+ new WritableStream ({
188
+ async write (chunk ) {
189
+ await fileWriter .write (chunk);
190
+ },
191
+ })
192
+ );
193
+
194
+ // Finalize everything
195
+ await fileWriter .close ();
196
+ await unixfsWriter .close ();
197
+ await unixFsFileLinkWriter .close ();
198
+
199
+ // Wait for all links to be read
200
+ await readLinks;
201
+
202
+ return fileLinks;
129
203
}
204
+
205
+ // Usage
206
+ const blob = new Blob ([" Hello UnixFS links" ]);
207
+ const links = await collectUnixFsFileLinks (blob);
208
+ console .log (links);
130
209
```
131
210
132
211
## License
0 commit comments