Skip to content

Commit a6023f4

Browse files
committed
chore: add readme
1 parent a33ec24 commit a6023f4

File tree

1 file changed

+112
-33
lines changed

1 file changed

+112
-33
lines changed

README.md

Lines changed: 112 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -31,81 +31,81 @@ This library provides functionality similar to [ipfs-unixfs-importer][], but it
3131
You can encode a file as follows
3232

3333
```js
34-
import * as UnixFS from "@ipld/unixfs"
34+
import * as UnixFS from "@ipld/unixfs";
3535

3636
// Create a redable & writable streams with internal queue that can
3737
// hold around 32 blocks
3838
const { readable, writable } = new TransformStream(
3939
{},
4040
UnixFS.withCapacity(1048576 * 32)
41-
)
41+
);
4242
// Next we create a writer with filesystem like API for encoding files and
4343
// directories into IPLD blocks that will come out on `readable` end.
44-
const writer = UnixFS.createWriter({ writable })
44+
const writer = UnixFS.createWriter({ writable });
4545

4646
// Create file writer that can be used to encode UnixFS file.
47-
const file = UnixFS.createFileWriter(writer)
47+
const file = UnixFS.createFileWriter(writer);
4848
// write some content
49-
file.write(new TextEncoder().encode("hello world"))
49+
file.write(new TextEncoder().encode("hello world"));
5050
// Finalize file by closing it.
51-
const { cid } = await file.close()
51+
const { cid } = await file.close();
5252

5353
// close the writer to close underlying block stream.
54-
writer.close()
54+
writer.close();
5555

5656
// We could encode all this as car file
57-
encodeCAR({ roots: [cid], blocks: readable })
57+
encodeCAR({ roots: [cid], blocks: readable });
5858
```
5959

6060
You can encode (non sharded) directories with provided API as well
6161

6262
```ts
63-
import * as UnixFS from "@ipld/unixfs"
63+
import * as UnixFS from "@ipld/unixfs";
6464

6565
export const demo = async () => {
66-
const { readable, writable } = new TransformStream()
67-
const writer = UnixFS.createWriter({ writable })
66+
const { readable, writable } = new TransformStream();
67+
const writer = UnixFS.createWriter({ writable });
6868

6969
// write a file
70-
const file = UnixFS.createFileWriter(writer)
71-
file.write(new TextEncoder().encode("hello world"))
72-
const fileLink = await file.close()
70+
const file = UnixFS.createFileWriter(writer);
71+
file.write(new TextEncoder().encode("hello world"));
72+
const fileLink = await file.close();
7373

7474
// create directory and add a file we encoded above
75-
const dir = UnixFS.createDirectoryWriter(writer)
76-
dir.set("intro.md", fileLink)
77-
const dirLink = await dir.close()
75+
const dir = UnixFS.createDirectoryWriter(writer);
76+
dir.set("intro.md", fileLink);
77+
const dirLink = await dir.close();
7878

7979
// now wrap above directory with another and also add the same file
8080
// there
81-
const root = UnixFS.createDirectoryWriter(fs)
82-
root.set("user", dirLink)
83-
root.set("hello.md", fileLink)
81+
const root = UnixFS.createDirectoryWriter(fs);
82+
root.set("user", dirLink);
83+
root.set("hello.md", fileLink);
8484

8585
// Creates following UnixFS structure where intro.md and hello.md link to same
8686
// IPFS file.
8787
// ./
8888
// ./user/intro.md
8989
// ./hello.md
90-
const rootLink = await root.close()
90+
const rootLink = await root.close();
9191
// ...
92-
writer.close()
93-
}
92+
writer.close();
93+
};
9494
```
9595

9696
### Configuration
9797

9898
You can configure DAG layout, chunking and bunch of other things by providing API compatible components. Library provides bunch of them but you can also bring your own.
9999

100100
```js
101-
import * as UnixFS from "@ipld/unixfs"
102-
import * as Rabin from "@ipld/unixfs/file/chunker/rabin"
103-
import * as Trickle from "@ipld/unixfs/file/layout/trickle"
104-
import * as RawLeaf from "multiformats/codecs/raw"
105-
import { sha256 } from "multiformats/hashes/sha2"
106-
107-
const demo = async blob => {
108-
const { readable, writable } = new TransformStream()
101+
import * as UnixFS from "@ipld/unixfs";
102+
import * as Rabin from "@ipld/unixfs/file/chunker/rabin";
103+
import * as Trickle from "@ipld/unixfs/file/layout/trickle";
104+
import * as RawLeaf from "multiformats/codecs/raw";
105+
import { sha256 } from "multiformats/hashes/sha2";
106+
107+
const demo = async (blob) => {
108+
const { readable, writable } = new TransformStream();
109109
const writer = UnixFS.createWriter({
110110
writable,
111111
// you can pass only things you want to override
@@ -122,11 +122,90 @@ const demo = async blob => {
122122
fileEncoder: UnixFS,
123123
hasher: sha256,
124124
},
125-
})
125+
});
126126

127-
const file = UnixFS.createFileWriter(writer)
127+
const file = UnixFS.createFileWriter(writer);
128128
// ...
129+
};
130+
```
131+
132+
### Collecting UnixFS FileLinks
133+
134+
You can optionally pass a unixFsFileLinkWriter stream to capture metadata for each link (useful for indexing or tracking layout information).
135+
136+
```js
137+
import { createWriter, createFileWriter } from "@vascosantos/unixfs";
138+
139+
import { withMaxChunkSize } from "@vascosantos/unixfs/file/chunker/fixed";
140+
import { withWidth } from "@vascosantos/unixfs/file/layout/balanced";
141+
142+
const defaultSettings = UnixFS.configure({
143+
fileChunkEncoder: raw,
144+
smallFileEncoder: raw,
145+
chunker: withMaxChunkSize(1024 * 1024),
146+
fileLayout: withWidth(1024),
147+
});
148+
149+
/**
150+
* @param {Blob} blob
151+
* @returns {Promise<import('@vascosantos/unixfs').FileLink[]>}
152+
*/
153+
async function collectUnixFsFileLinks(blob) {
154+
const fileLinks = [];
155+
156+
// Create a stream to collect metadata (FileLinks)
157+
const { readable, writable } = new TransformStream();
158+
159+
// Set up the main UnixFS writer (data goes nowhere here)
160+
const unixfsWriter = createWriter({
161+
writable: new WritableStream(), // Discard actual DAG output
162+
settings: defaultSettings,
163+
});
164+
165+
// Set up the file writer with link metadata writer
166+
const unixFsFileLinkWriter = writable.getWriter();
167+
168+
const fileWriter = createFileWriter({
169+
...unixfsWriter,
170+
initOptions: {
171+
unixFsFileLinkWriter,
172+
},
173+
});
174+
175+
// Start concurrent reading of the metadata stream
176+
const fileLinkReader = readable.getReader();
177+
const readLinks = (async () => {
178+
while (true) {
179+
const { done, value } = await fileLinkReader.read();
180+
if (done) break;
181+
fileLinks.push(value);
182+
}
183+
})();
184+
185+
// Pipe the blob to the file writer
186+
await blob.stream().pipeTo(
187+
new WritableStream({
188+
async write(chunk) {
189+
await fileWriter.write(chunk);
190+
},
191+
})
192+
);
193+
194+
// Finalize everything
195+
await fileWriter.close();
196+
await unixfsWriter.close();
197+
await unixFsFileLinkWriter.close();
198+
199+
// Wait for all links to be read
200+
await readLinks;
201+
202+
return fileLinks;
129203
}
204+
205+
// Usage
206+
const blob = new Blob(["Hello UnixFS links"]);
207+
const links = await collectUnixFsFileLinks(blob);
208+
console.log(links);
130209
```
131210

132211
## License

0 commit comments

Comments
 (0)