diff --git a/README.md b/README.md index 27c0b1c..68e2348 100644 --- a/README.md +++ b/README.md @@ -129,6 +129,88 @@ const demo = async blob => { } ``` +### Collecting UnixFS FileLinks + +You can optionally pass a unixFsFileLinkWriter stream to capture metadata for each link (useful for indexing or tracking layout information). + +```js +import { + createWriter, + createFileWriter, +} from '@ipld/unixfs' + +import { withMaxChunkSize } from '@ipld/unixfs/file/chunker/fixed' +import { withWidth } from '@ipld/unixfs/file/layout/balanced' + +const defaultSettings = UnixFS.configure({ + fileChunkEncoder: raw, + smallFileEncoder: raw, + chunker: withMaxChunkSize(1024 * 1024), + fileLayout: withWidth(1024), +}) + +/** + * @param {Blob} blob + * @returns {Promise} + */ +async function collectUnixFsFileLinks(blob) { + const fileLinks = [] + + // Create a stream to collect metadata (FileLinks) + const { readable, writable } = new TransformStream() + + // Set up the main UnixFS writer (data goes nowhere here) + const unixfsWriter = createWriter({ + writable: new WritableStream(), // Discard actual DAG output + settings: defaultSettings, + }) + + // Set up the file writer with link metadata writer + const unixFsFileLinkWriter = writable.getWriter() + + const fileWriter = createFileWriter({ + ...unixfsWriter, + initOptions: { + unixFsFileLinkWriter, + }, + }) + + // Start concurrent reading of the metadata stream + const fileLinkReader = readable.getReader() + const readLinks = (async () => { + while (true) { + const { done, value } = await fileLinkReader.read() + if (done) break + fileLinks.push(value) + } + })() + + // Pipe the blob to the file writer + await blob.stream().pipeTo( + new WritableStream({ + async write(chunk) { + await fileWriter.write(chunk) + }, + }) + ) + + // Finalize everything + await fileWriter.close() + await unixfsWriter.close() + await unixFsFileLinkWriter.close() + + // Wait for all links to be read + await readLinks + + return fileLinks +} + +// Usage +const blob = new Blob(['Hello UnixFS links']) +const links = await collectUnixFsFileLinks(blob) +console.log(links) +``` + ## License Licensed under either of @@ -144,4 +226,4 @@ Unless you explicitly state otherwise, any contribution intentionally submitted [readablestream]: https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream [car]: https://ipld.io/specs/transport/car/carv1/ [`transformstream`]: https://developer.mozilla.org/en-US/docs/Web/API/TransformStream -[`writablestream`]: https://developer.mozilla.org/en-US/docs/Web/API/WritableStream +[`writablestream`]: https://developer.mozilla.org/en-US/docs/Web/API/WritableStream \ No newline at end of file diff --git a/src/api.ts b/src/api.ts index fec8a76..8f9b4b0 100644 --- a/src/api.ts +++ b/src/api.ts @@ -23,7 +23,7 @@ import type { Options as DirectoryWriterOptions, State as DirectoryWriterState, } from "./directory.js" -import { Metadata } from "./unixfs.js" +import { Metadata, FileLink } from "./unixfs.js" export type { WriterOptions, @@ -47,6 +47,7 @@ export type { MultihashHasher, MultihashDigest, Metadata, + FileLink, } /** diff --git a/src/file.js b/src/file.js index 2d72632..5fa4e4b 100644 --- a/src/file.js +++ b/src/file.js @@ -28,7 +28,7 @@ export const defaults = () => ({ * @param {Partial>} config * @returns {API.EncoderSettings} */ -export const configure = config => ({ +export const configure = (config) => ({ ...defaults(), ...config, }) @@ -50,8 +50,15 @@ export const UnixFSRawLeaf = { * @param {API.Options} options * @returns {API.View} */ -export const create = ({ writer, metadata = {}, settings = defaults() }) => - new FileWriterView(Writer.init(writer, metadata, configure(settings))) +export const create = ({ + writer, + metadata = {}, + settings = defaults(), + initOptions = {}, +}) => + new FileWriterView( + Writer.init(writer, metadata, configure(settings), initOptions) + ) /** * @template T @@ -98,7 +105,7 @@ export const close = async ( */ const perform = (view, effect) => Task.fork( - Task.loop(effect, message => { + Task.loop(effect, (message) => { const { state, effect } = Writer.update(message, view.state) view.state = state return effect diff --git a/src/file/api.ts b/src/file/api.ts index 1b3d78b..b86eeb4 100644 --- a/src/file/api.ts +++ b/src/file/api.ts @@ -72,10 +72,17 @@ export interface EncoderSettings { linker: Linker } +export interface InitOptions { + unixFsFileLinkWriter?: UnixFsFileLinkWriter +} + +export interface UnixFsFileLinkWriter extends StreamWriter {} + export interface Options { writer: BlockWriter metadata?: UnixFS.Metadata settings?: EncoderSettings + initOptions?: InitOptions } export interface CloseOptions { diff --git a/src/file/writer.js b/src/file/writer.js index 1f3c225..4cf2ec7 100644 --- a/src/file/writer.js +++ b/src/file/writer.js @@ -13,6 +13,7 @@ import * as Queue from "./layout/queue.js" * readonly metadata: UnixFS.Metadata * readonly config: API.EncoderSettings * readonly writer: API.BlockWriter + * readonly unixFsFileLinkWriter?: API.UnixFsFileLinkWriter * chunker: Chunker.Chunker * layout: Layout * nodeQueue: Queue.Queue @@ -25,6 +26,7 @@ import * as Queue from "./layout/queue.js" * readonly metadata: UnixFS.Metadata * readonly config: API.EncoderSettings * readonly writer: API.BlockWriter + * readonly unixFsFileLinkWriter?: API.UnixFsFileLinkWriter * readonly rootID: Layout.NodeID * readonly end?: Task.Fork * chunker?: null @@ -39,6 +41,7 @@ import * as Queue from "./layout/queue.js" * readonly metadata: UnixFS.Metadata * readonly config: API.EncoderSettings * readonly writer: API.BlockWriter + * readonly unixFsFileLinkWriter?: API.UnixFsFileLinkWriter * readonly link: Layout.Link * chunker?: null * layout?: null @@ -63,6 +66,7 @@ import * as Queue from "./layout/queue.js" * |{type:"write", bytes:Uint8Array} * |{type:"link", link:API.EncodedFile} * |{type:"block"} + * |{type:"fileLink"} * |{type: "close"} * |{type: "end"} * } Message @@ -82,6 +86,9 @@ export const update = (message, state) => { /* c8 ignore next 2 */ case "block": return { state, effect: Task.none() } + /* c8 ignore next 2 */ + case "fileLink": + return { state, effect: Task.none() } case "close": return close(state) case "end": @@ -96,9 +103,10 @@ export const update = (message, state) => { * @param {API.BlockWriter} writer * @param {UnixFS.Metadata} metadata * @param {API.EncoderSettings} config + * @param {API.InitOptions} [options] * @returns {State} */ -export const init = (writer, metadata, config) => { +export const init = (writer, metadata, config, options = {}) => { return { status: "open", metadata, @@ -116,6 +124,7 @@ export const init = (writer, metadata, config) => { // overhead. // @see https://github.com/Gozala/vectrie nodeQueue: Queue.mutable(), + unixFsFileLinkWriter: options.unixFsFileLinkWriter, } } /** @@ -188,11 +197,23 @@ export const link = (state, { id, link, block }) => { ? state.end.resume() : Task.none() + if (!state.unixFsFileLinkWriter) { + return { + state: newState, + effect: Task.listen({ + link: Task.effects(tasks), + block: writeBlock(state.writer, block), + end, + }), + } + } + return { state: newState, effect: Task.listen({ link: Task.effects(tasks), block: writeBlock(state.writer, block), + fileLink: writeFileLink(state.unixFsFileLinkWriter, link), end, }), } @@ -203,7 +224,7 @@ export const link = (state, { id, link, block }) => { * @param {State} state * @returns {Update} */ -export const close = state => { +export const close = (state) => { if (state.status === "open") { const { chunks } = Chunker.close(state.chunker) const { layout, ...write } = state.config.fileLayout.write( @@ -269,7 +290,7 @@ export const close = state => { * @param {API.EncoderSettings} config */ const encodeLeaves = (leaves, config) => - leaves.map(leaf => encodeLeaf(config, leaf, config.fileChunkEncoder)) + leaves.map((leaf) => encodeLeaf(config, leaf, config.fileChunkEncoder)) /** * @param {API.EncoderSettings} config @@ -286,6 +307,7 @@ const encodeLeaf = function* ({ hasher, linker }, { id, content }, encoder) { const link = /** @type {UnixFS.FileLink} */ ({ cid, contentByteLength: content ? content.byteLength : 0, + contentByteOffset: content ? content.byteOffset : 0, dagByteLength: bytes.byteLength, }) @@ -297,7 +319,7 @@ const encodeLeaf = function* ({ hasher, linker }, { id, content }, encoder) { * @param {API.EncoderSettings} config */ const encodeBranches = (nodes, config) => - nodes.map(node => encodeBranch(config, node)) + nodes.map((node) => encodeBranch(config, node)) /** * @template Layout @@ -338,13 +360,30 @@ export const writeBlock = function* (writer, block) { writer.write(block) } +/** + * @param {API.UnixFsFileLinkWriter} writer + * @param {Layout.Link} link + * @returns {Task.Task} + */ + +export const writeFileLink = function* (writer, link) { + /* c8 ignore next 3 */ + if (!writer) { + return + } + if ((writer.desiredSize || 0) <= 0) { + yield* Task.wait(writer.ready) + } + writer.write(link) +} + /** * * @param {Uint8Array|Chunker.Chunk} buffer * @returns */ -const asUint8Array = buffer => +const asUint8Array = (buffer) => buffer instanceof Uint8Array ? buffer : buffer.copyTo(new Uint8Array(buffer.byteLength), 0) @@ -353,4 +392,4 @@ const asUint8Array = buffer => * @param {Layout.Node} node * @returns {node is Layout.Leaf} */ -const isLeafNode = node => node.children == null +const isLeafNode = (node) => node.children == null diff --git a/src/unixfs.ts b/src/unixfs.ts index a4e88b7..dcc0e15 100644 --- a/src/unixfs.ts +++ b/src/unixfs.ts @@ -6,10 +6,15 @@ import type { Link as IPLDLink, Version as LinkVersion, Block as IPLDBlock, - BlockView as IPLDBlockView + BlockView as IPLDBlockView, } from "multiformats" import { Data, type IData } from "../gen/unixfs.js" -export type { MultihashHasher, MultibaseEncoder, MultihashDigest, BlockEncoder } +export type { + MultihashHasher, + MultibaseEncoder, + MultihashDigest, + BlockEncoder, +} export * as Layout from "./file/layout/api" import NodeType = Data.DataType @@ -161,6 +166,11 @@ export interface ContentDAGLink extends DAGLink { * Total number of bytes in the file */ readonly contentByteLength: number + + /** + * Offset bytes in the file + */ + readonly contentByteOffset?: number } /** diff --git a/test/directory.spec.js b/test/directory.spec.js index ce466d3..012a6b1 100644 --- a/test/directory.spec.js +++ b/test/directory.spec.js @@ -21,7 +21,7 @@ describe("test directory", () => { const output = await collect(readable) assert.deepEqual( - output.map($ => $.cid), + output.map(($) => $.cid), [ Link.parse( "bafybeiczsscdsbs7ffqz55asqdf3smv6klcw3gofszvwlyarci47bgf354" @@ -47,6 +47,7 @@ describe("test directory", () => { ), dagByteLength: 45, contentByteLength: 37, + contentByteOffset: 0, }) root.set("file.txt", fileLink) @@ -65,7 +66,7 @@ describe("test directory", () => { const output = await blocks assert.deepEqual( - output.map($ => $.cid), + output.map(($) => $.cid), [ Link.parse( "bafybeidequ5soq6smzafv4lb76i5dkvl5fzgvrxz4bmlc2k4dkikklv2j4" @@ -142,6 +143,7 @@ describe("test directory", () => { "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq" ), contentByteLength: 5, + contentByteOffset: 0, dagByteLength: 13, }) @@ -152,6 +154,7 @@ describe("test directory", () => { "bafybeigl43jff4muiw2m6kzqhm7xpz6ti7etiujklpnc6vpblzjvvwqmta" ), dagByteLength: 11, + contentByteOffset: 0, contentByteLength: 3, }) @@ -173,7 +176,7 @@ describe("test directory", () => { writer.close() const items = await blocks assert.deepEqual( - items.map(item => item.cid.toString()), + items.map((item) => item.cid.toString()), [ "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq", "bafybeigl43jff4muiw2m6kzqhm7xpz6ti7etiujklpnc6vpblzjvvwqmta", @@ -196,6 +199,7 @@ describe("test directory", () => { "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq" ), contentByteLength: 5, + contentByteOffset: 0, dagByteLength: 13, }) @@ -207,6 +211,7 @@ describe("test directory", () => { ), dagByteLength: 11, contentByteLength: 3, + contentByteOffset: 0, }) root.set("hello", hello) @@ -223,7 +228,7 @@ describe("test directory", () => { writer.close() const items = await blocks assert.deepEqual( - items.map(item => item.cid.toString()), + items.map((item) => item.cid.toString()), [ "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq", "bafybeigl43jff4muiw2m6kzqhm7xpz6ti7etiujklpnc6vpblzjvvwqmta", @@ -246,6 +251,7 @@ describe("test directory", () => { "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq" ), contentByteLength: 5, + contentByteOffset: 0, dagByteLength: 13, }) @@ -263,7 +269,7 @@ describe("test directory", () => { writer.close() const blocks = await reader assert.deepEqual( - blocks.map(block => block.cid.toString()), + blocks.map((block) => block.cid.toString()), [ "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq", "bafybeiczsscdsbs7ffqz55asqdf3smv6klcw3gofszvwlyarci47bgf354", @@ -300,6 +306,7 @@ describe("test directory", () => { "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq" ), contentByteLength: 5, + contentByteOffset: 0, dagByteLength: 13, }) @@ -311,6 +318,7 @@ describe("test directory", () => { ), dagByteLength: 11, contentByteLength: 3, + contentByteOffset: 0, }) root.set("hello", hello) @@ -330,7 +338,7 @@ describe("test directory", () => { writer.close() const blocks = await reader assert.deepEqual( - blocks.map(block => block.cid.toString()), + blocks.map((block) => block.cid.toString()), [ "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq", "bafybeigl43jff4muiw2m6kzqhm7xpz6ti7etiujklpnc6vpblzjvvwqmta", @@ -353,6 +361,7 @@ describe("test directory", () => { "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq" ), contentByteLength: 5, + contentByteOffset: 0, dagByteLength: 13, }) @@ -364,6 +373,7 @@ describe("test directory", () => { ), dagByteLength: 11, contentByteLength: 3, + contentByteOffset: 0, }) root.set("hello", hello) @@ -388,7 +398,7 @@ describe("test directory", () => { writer.close() const blocks = await reader assert.deepEqual( - blocks.map(block => block.cid.toString()), + blocks.map((block) => block.cid.toString()), [ "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq", "bafybeigl43jff4muiw2m6kzqhm7xpz6ti7etiujklpnc6vpblzjvvwqmta", @@ -417,7 +427,7 @@ describe("test directory", () => { const blocks = await reader assert.deepEqual( - blocks.map(block => block.cid.toString()), + blocks.map((block) => block.cid.toString()), [ "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq", "bafybeieuo4clbaujw35wxt7s4jlorbgztvufvdrcxxb6hik5mzfqku2tbq", @@ -439,6 +449,7 @@ describe("test directory", () => { "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq" ), contentByteLength: 5, + contentByteOffset: 0, dagByteLength: 13, }) @@ -450,6 +461,7 @@ describe("test directory", () => { ), dagByteLength: 11, contentByteLength: 3, + contentByteOffset: 0, }) root.set("hello", hello) @@ -478,7 +490,7 @@ describe("test directory", () => { writer.close() const blocks = await reader assert.deepEqual( - blocks.map(block => block.cid.toString()), + blocks.map((block) => block.cid.toString()), [ "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq", "bafybeigl43jff4muiw2m6kzqhm7xpz6ti7etiujklpnc6vpblzjvvwqmta", @@ -489,7 +501,7 @@ describe("test directory", () => { patchWriter.close() const delta = await patchReader assert.deepEqual( - delta.map(block => block.cid.toString()), + delta.map((block) => block.cid.toString()), ["bafybeibpefc2sgzngxttfwrawvaiewk4hj5yxdp5kik52jpds5ujg3ij44"] ) }) diff --git a/test/file.spec.js b/test/file.spec.js index 6140f55..57e9625 100644 --- a/test/file.spec.js +++ b/test/file.spec.js @@ -46,6 +46,11 @@ describe("test file", () => { }) it("splits into 3 chunks", async function () { + const rawFiles = [ + new Uint8Array(CHUNK_SIZE).fill(1), + new Uint8Array(CHUNK_SIZE).fill(2), + new Uint8Array(CHUNK_SIZE).fill(3), + ] const { readable, writable } = new TransformStream( {}, {}, @@ -54,15 +59,37 @@ describe("test file", () => { } ) const writer = writable.getWriter() - const file = UnixFS.createFileWriter({ writer }) - file.write(new Uint8Array(CHUNK_SIZE).fill(1)) - file.write(new Uint8Array(CHUNK_SIZE).fill(2)) - file.write(new Uint8Array(CHUNK_SIZE).fill(3)) + + // Capture links metadata + /** @type {import('../src/unixfs.js').FileLink[]} */ + const fileLinkItems = [] + const { readable: fileLinkReadable, writable: fileLinkWritable } = + new TransformStream() + // Start consuming links stream asynchronously + void (async () => { + const reader = fileLinkReadable.getReader() + while (true) { + const { done, value } = await reader.read() + if (done) break + fileLinkItems.push(value) + } + })() + + const file = UnixFS.createFileWriter({ + writer, + initOptions: { + unixFsFileLinkWriter: fileLinkWritable.getWriter(), + }, + }) + for (const rawFile of rawFiles) { + file.write(rawFile) + } const link = await file.close() + // Check the root CID // TODO: So go-ipfs sets CIDv0 links which casuse a mismatch assert.deepEqual(link, { - contentByteLength: 786432, + contentByteLength: CHUNK_SIZE * 3, dagByteLength: 786632, /** @type {Link.Link} */ cid: Link.parse( @@ -71,16 +98,23 @@ describe("test file", () => { }) const blocks = readable.getReader() + + // Check the first block const r1 = await blocks.read() if (r1.done) { assert.fail("expected to get a block") } - assert.deepEqual( r1.value.cid, Link.parse("bafybeihhsdoupgd3fnl3e3367ymsanmikafpllldsdt37jzyoh6nuatowe") ) + const l1 = fileLinkItems.find((l) => l.cid.equals(r1.value.cid)) + assert.isTrue(l1 !== undefined) + assert.equal(l1?.contentByteLength, CHUNK_SIZE) + assert.equal(l1?.dagByteLength, CHUNK_SIZE + 14) + assert.equal(l1?.contentByteOffset, 0) + // Check the second block const r2 = await blocks.read() if (r2.done) { assert.fail("expected to get a block") @@ -89,7 +123,13 @@ describe("test file", () => { r2.value.cid, Link.parse("bafybeief3dmadxfymhhhrflqytqmlhlz47w6glaxvyzmm6s6tpfb6izzee") ) + const l2 = fileLinkItems.find((l) => l.cid.equals(r2.value.cid)) + assert.isTrue(l2 !== undefined) + assert.equal(l2?.contentByteLength, CHUNK_SIZE) + assert.equal(l2?.dagByteLength, CHUNK_SIZE + 14) + assert.equal(l2?.contentByteOffset, CHUNK_SIZE) + // Check the third block const r3 = await blocks.read() if (r3.done) { assert.fail("expected to get a block") @@ -98,8 +138,19 @@ describe("test file", () => { r3.value.cid, Link.parse("bafybeihznihf5g5ibdyoawn7uu3inlyqrxjv63lt6lop6h3w6rzwrp67a4") ) + const l3 = fileLinkItems.find((l) => l.cid.equals(r3.value.cid)) + assert.isTrue(l3 !== undefined) + assert.equal(l3?.contentByteLength, CHUNK_SIZE) + assert.equal(l3?.dagByteLength, CHUNK_SIZE + 14) + assert.equal(l3?.contentByteOffset, CHUNK_SIZE * 2) await writer.close() + + // Check root + assert.isTrue( + fileLinkItems.find((l) => l.cid.equals(link.cid)) !== undefined + ) + assert.equal(fileLinkItems.length, 4) }) it("--chunker=size-65535 --trickle=false --raw-leaves=false --cid-version=1", async () => { @@ -130,6 +181,7 @@ describe("test file", () => { } const link = await file.close() + console.log("link", link) assert.deepEqual(link, { /** @type {Link.Link} */ cid: Link.parse( @@ -292,6 +344,7 @@ describe("test file", () => { "bafybeif7ztnhq65lumvvtr4ekcwd2ifwgm3awq4zfr3srh462rwyinlb4y" ), contentByteLength: 0, + contentByteOffset: 0, dagByteLength: 6, }) }) @@ -312,6 +365,7 @@ describe("test file", () => { "bafybeidequ5soq6smzafv4lb76i5dkvl5fzgvrxz4bmlc2k4dkikklv2j4" ), contentByteLength: 37, + contentByteOffset: 0, dagByteLength: 45, }) }) @@ -335,6 +389,7 @@ describe("test file", () => { "bafybeidequ5soq6smzafv4lb76i5dkvl5fzgvrxz4bmlc2k4dkikklv2j4" ), contentByteLength: 37, + contentByteOffset: 0, dagByteLength: 45, }) }) diff --git a/test/lib.spec.js b/test/lib.spec.js index f82a3f3..aafc7fc 100644 --- a/test/lib.spec.js +++ b/test/lib.spec.js @@ -15,13 +15,14 @@ describe("UnixFS.createWriter", () => { "bafybeihykld7uyxzogax6vgyvag42y7464eywpf55gxi5qpoisibh3c5wa" ), dagByteLength: 19, + contentByteOffset: 0, contentByteLength: 11, }) writer.close() const blocks = await reader assert.deepEqual( - blocks.map($ => $.cid.toString()), + blocks.map(($) => $.cid.toString()), ["bafybeihykld7uyxzogax6vgyvag42y7464eywpf55gxi5qpoisibh3c5wa"] ) }) @@ -39,13 +40,14 @@ describe("UnixFS.createWriter", () => { ), dagByteLength: 19, contentByteLength: 11, + contentByteOffset: 0, }) writer.close() const blocks = await reader assert.deepEqual( - blocks.map($ => $.cid.toString()), + blocks.map(($) => $.cid.toString()), ["bafybeihykld7uyxzogax6vgyvag42y7464eywpf55gxi5qpoisibh3c5wa"] ) }) @@ -69,7 +71,7 @@ describe("UnixFS.createWriter", () => { const blocks = await reader assert.deepEqual( - blocks.map($ => $.cid.toString()), + blocks.map(($) => $.cid.toString()), [ "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq", "bafybeieuo4clbaujw35wxt7s4jlorbgztvufvdrcxxb6hik5mzfqku2tbq", @@ -96,7 +98,7 @@ describe("UnixFS.createWriter", () => { const blocks = await reader assert.deepEqual( - blocks.map($ => $.cid.toString()), + blocks.map(($) => $.cid.toString()), [ "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq", "bafybeieuo4clbaujw35wxt7s4jlorbgztvufvdrcxxb6hik5mzfqku2tbq", @@ -129,7 +131,7 @@ describe("UnixFS.createWriter", () => { const blocks = await reader assert.deepEqual( - blocks.map($ => $.cid.toString()), + blocks.map(($) => $.cid.toString()), [ "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq", "bafybeieuo4clbaujw35wxt7s4jlorbgztvufvdrcxxb6hik5mzfqku2tbq", @@ -154,6 +156,7 @@ describe("UnixFS.withCapacity", async () => { ), dagByteLength: 19, contentByteLength: 11, + contentByteOffset: 0, }) assert.equal(fs.writer.desiredSize, 128 - 19) @@ -168,6 +171,7 @@ describe("UnixFS.withCapacity", async () => { ), dagByteLength: 11, contentByteLength: 3, + contentByteOffset: 0, }) assert.equal(fs.writer.desiredSize, 128 - 19 - 11) diff --git a/test/sharded-directory.spec.js b/test/sharded-directory.spec.js index e5a5bad..e3c9cd3 100644 --- a/test/sharded-directory.spec.js +++ b/test/sharded-directory.spec.js @@ -21,7 +21,7 @@ describe("test directory", () => { const output = await collect(readable) assert.deepEqual( - output.map($ => $.cid), + output.map(($) => $.cid), [ Link.parse( "bafybeifoplefg5piy3pjhlp73q7unqx4hwecxeu7opfqfmg352pkpljt6m" @@ -47,6 +47,7 @@ describe("test directory", () => { ), dagByteLength: 45, contentByteLength: 37, + contentByteOffset: 0, }) root.set("file.txt", fileLink) @@ -65,7 +66,7 @@ describe("test directory", () => { const output = await blocks assert.deepEqual( - output.map($ => $.cid), + output.map(($) => $.cid), [ Link.parse( "bafybeidequ5soq6smzafv4lb76i5dkvl5fzgvrxz4bmlc2k4dkikklv2j4" @@ -94,6 +95,7 @@ describe("test directory", () => { ), dagByteLength: 45, contentByteLength: 37, + contentByteOffset: 0, }) for (let i = 0; i < 100; i++) { @@ -115,28 +117,68 @@ describe("test directory", () => { const output = await blocks assert.deepEqual( - output.map($ => $.cid), + output.map(($) => $.cid), [ - Link.parse("bafybeidequ5soq6smzafv4lb76i5dkvl5fzgvrxz4bmlc2k4dkikklv2j4"), - Link.parse("bafybeic66itcox6c3pozwsktz552f3pd3eanqr74jpvjezchwrkpqemjru"), - Link.parse("bafybeigyad752jkaj6qrlgtvovw5dzvhcj7pfvo5pjxkdlzec3kn3qqcoy"), - Link.parse("bafybeiflrsirdjonnavtsdg7vb63z7mcnzuymuv6eiwxw2wxqkezhludjm"), - Link.parse("bafybeigw2ilsvwhg3uglrmryyuk7dtu4yudr5naerrzb5e7ibmk7rscu3y"), - Link.parse("bafybeicprkb6dv56v3ezgj4yffbsueamhkkodfsxvwyaty3okfu6tgq3rm"), - Link.parse("bafybeienx5re7fb3s2crypbkkyp5l5zo5xb5bqfxh67ieq2aivgtaw5bqq"), - Link.parse("bafybeiewng4vb4elq23cjybjhehg2z3lshskzstxzgrhllyb7jsz2dckdq"), - Link.parse("bafybeifz4lbafvzkj7njb3cdr7r3ngl5643jhtghl2ntbvoyx5hocvepvy"), - Link.parse("bafybeibperpo4gxoi7x3g7entslorxizzy3imr44hujjqrus4hfs4ekqge"), - Link.parse("bafybeiamtplq4n5kdlhorxmougus3y54r52frrvotkduzy7kfgyrepvylu"), - Link.parse("bafybeieqvwd6ditluxwzrbvq3ffusuykxbljlqyf7gbf7esi6ake4xh27a"), - Link.parse("bafybeigkk3fanqwihj5qautj4yzluxnh3okblouotd2qkreijejdic2fui"), - Link.parse("bafybeiafn56xmx6hqgs4ig4yc24cdnbzyghjml6yhg3hmmemkrwl4irluu"), - Link.parse("bafybeieu5uzq5jbtuhnaazl36pjygv57virwr3tbdgqujhpya5w7dfosz4"), - Link.parse("bafybeid57gn3655jtgnnocwnjznifyltepqoiu3chbawyy2f263hm3qylm"), - Link.parse("bafybeig3iwqy4v44nvgyabirtbel6sbk6pzfuwdpzj4z26vczda2nycyrq"), - Link.parse("bafybeigrpoorhusehwpw2caoe7mw65xaundu227vcxqv6mqfeo65tcwxqm"), - Link.parse("bafybeif3iq6dnq2qixkoqnmyvijplu6x5depgmfgpfncpxkcx5ytajrxxy"), - Link.parse("bafybeidzpkzefoys5ani6qfvrpxyjiolmy6ng445uceov2a33r5bw43qwe"), + Link.parse( + "bafybeidequ5soq6smzafv4lb76i5dkvl5fzgvrxz4bmlc2k4dkikklv2j4" + ), + Link.parse( + "bafybeic66itcox6c3pozwsktz552f3pd3eanqr74jpvjezchwrkpqemjru" + ), + Link.parse( + "bafybeigyad752jkaj6qrlgtvovw5dzvhcj7pfvo5pjxkdlzec3kn3qqcoy" + ), + Link.parse( + "bafybeiflrsirdjonnavtsdg7vb63z7mcnzuymuv6eiwxw2wxqkezhludjm" + ), + Link.parse( + "bafybeigw2ilsvwhg3uglrmryyuk7dtu4yudr5naerrzb5e7ibmk7rscu3y" + ), + Link.parse( + "bafybeicprkb6dv56v3ezgj4yffbsueamhkkodfsxvwyaty3okfu6tgq3rm" + ), + Link.parse( + "bafybeienx5re7fb3s2crypbkkyp5l5zo5xb5bqfxh67ieq2aivgtaw5bqq" + ), + Link.parse( + "bafybeiewng4vb4elq23cjybjhehg2z3lshskzstxzgrhllyb7jsz2dckdq" + ), + Link.parse( + "bafybeifz4lbafvzkj7njb3cdr7r3ngl5643jhtghl2ntbvoyx5hocvepvy" + ), + Link.parse( + "bafybeibperpo4gxoi7x3g7entslorxizzy3imr44hujjqrus4hfs4ekqge" + ), + Link.parse( + "bafybeiamtplq4n5kdlhorxmougus3y54r52frrvotkduzy7kfgyrepvylu" + ), + Link.parse( + "bafybeieqvwd6ditluxwzrbvq3ffusuykxbljlqyf7gbf7esi6ake4xh27a" + ), + Link.parse( + "bafybeigkk3fanqwihj5qautj4yzluxnh3okblouotd2qkreijejdic2fui" + ), + Link.parse( + "bafybeiafn56xmx6hqgs4ig4yc24cdnbzyghjml6yhg3hmmemkrwl4irluu" + ), + Link.parse( + "bafybeieu5uzq5jbtuhnaazl36pjygv57virwr3tbdgqujhpya5w7dfosz4" + ), + Link.parse( + "bafybeid57gn3655jtgnnocwnjznifyltepqoiu3chbawyy2f263hm3qylm" + ), + Link.parse( + "bafybeig3iwqy4v44nvgyabirtbel6sbk6pzfuwdpzj4z26vczda2nycyrq" + ), + Link.parse( + "bafybeigrpoorhusehwpw2caoe7mw65xaundu227vcxqv6mqfeo65tcwxqm" + ), + Link.parse( + "bafybeif3iq6dnq2qixkoqnmyvijplu6x5depgmfgpfncpxkcx5ytajrxxy" + ), + Link.parse( + "bafybeidzpkzefoys5ani6qfvrpxyjiolmy6ng445uceov2a33r5bw43qwe" + ), ] ) }) @@ -206,6 +248,7 @@ describe("test directory", () => { "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq" ), contentByteLength: 5, + contentByteOffset: 0, dagByteLength: 13, }) @@ -217,6 +260,7 @@ describe("test directory", () => { ), dagByteLength: 11, contentByteLength: 3, + contentByteOffset: 0, }) root.set("hello", hello) @@ -237,7 +281,7 @@ describe("test directory", () => { writer.close() const items = await blocks assert.deepEqual( - items.map(item => item.cid.toString()), + items.map((item) => item.cid.toString()), [ "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq", "bafybeigl43jff4muiw2m6kzqhm7xpz6ti7etiujklpnc6vpblzjvvwqmta", @@ -260,6 +304,7 @@ describe("test directory", () => { "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq" ), contentByteLength: 5, + contentByteOffset: 0, dagByteLength: 13, }) @@ -271,6 +316,7 @@ describe("test directory", () => { ), dagByteLength: 11, contentByteLength: 3, + contentByteOffset: 0, }) root.set("hello", hello) @@ -287,7 +333,7 @@ describe("test directory", () => { writer.close() const items = await blocks assert.deepEqual( - items.map(item => item.cid.toString()), + items.map((item) => item.cid.toString()), [ "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq", "bafybeigl43jff4muiw2m6kzqhm7xpz6ti7etiujklpnc6vpblzjvvwqmta", @@ -310,6 +356,7 @@ describe("test directory", () => { "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq" ), contentByteLength: 5, + contentByteOffset: 0, dagByteLength: 13, }) @@ -327,7 +374,7 @@ describe("test directory", () => { writer.close() const blocks = await reader assert.deepEqual( - blocks.map(block => block.cid.toString()), + blocks.map((block) => block.cid.toString()), [ "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq", "bafybeifoplefg5piy3pjhlp73q7unqx4hwecxeu7opfqfmg352pkpljt6m", @@ -364,6 +411,7 @@ describe("test directory", () => { "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq" ), contentByteLength: 5, + contentByteOffset: 0, dagByteLength: 13, }) @@ -375,6 +423,7 @@ describe("test directory", () => { ), dagByteLength: 11, contentByteLength: 3, + contentByteOffset: 0, }) root.set("hello", hello) @@ -394,7 +443,7 @@ describe("test directory", () => { writer.close() const blocks = await reader assert.deepEqual( - blocks.map(block => block.cid.toString()), + blocks.map((block) => block.cid.toString()), [ "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq", "bafybeigl43jff4muiw2m6kzqhm7xpz6ti7etiujklpnc6vpblzjvvwqmta", @@ -406,7 +455,10 @@ describe("test directory", () => { await root.close() assert.fail() } catch (/** @type {any} */ err) { - assert.equal(err.message, "Can not change written HAMT directory, but you can .fork() and make changes to it") + assert.equal( + err.message, + "Can not change written HAMT directory, but you can .fork() and make changes to it" + ) } }) @@ -424,6 +476,7 @@ describe("test directory", () => { "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq" ), contentByteLength: 5, + contentByteOffset: 0, dagByteLength: 13, }) @@ -435,6 +488,7 @@ describe("test directory", () => { ), dagByteLength: 11, contentByteLength: 3, + contentByteOffset: 0, }) root.set("hello", hello) @@ -459,7 +513,7 @@ describe("test directory", () => { writer.close() const blocks = await reader assert.deepEqual( - blocks.map(block => block.cid.toString()), + blocks.map((block) => block.cid.toString()), [ "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq", "bafybeigl43jff4muiw2m6kzqhm7xpz6ti7etiujklpnc6vpblzjvvwqmta", @@ -488,7 +542,7 @@ describe("test directory", () => { const blocks = await reader assert.deepEqual( - blocks.map(block => block.cid.toString()), + blocks.map((block) => block.cid.toString()), [ "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq", "bafybeihccqhztoqxfi5mmnv55iofsz7slpzq4gnktf3vzycavqbms5eote", @@ -510,6 +564,7 @@ describe("test directory", () => { "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq" ), contentByteLength: 5, + contentByteOffset: 0, dagByteLength: 13, }) @@ -521,6 +576,7 @@ describe("test directory", () => { ), dagByteLength: 11, contentByteLength: 3, + contentByteOffset: 0, }) root.set("hello", hello) @@ -549,7 +605,7 @@ describe("test directory", () => { writer.close() const blocks = await reader assert.deepEqual( - blocks.map(block => block.cid.toString()), + blocks.map((block) => block.cid.toString()), [ "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq", "bafybeigl43jff4muiw2m6kzqhm7xpz6ti7etiujklpnc6vpblzjvvwqmta", @@ -560,7 +616,7 @@ describe("test directory", () => { patchWriter.close() const delta = await patchReader assert.deepEqual( - delta.map(block => block.cid.toString()), + delta.map((block) => block.cid.toString()), ["bafybeihxagpxz7lekn7exw6ob526d6pgvnzc3kgtpkbh7ze73e2oc7oxpa"] ) }) @@ -714,7 +770,7 @@ describe("test directory", () => { } root.set("file.txt", fileLink) assert.equal(root.size, 1) - root.state.entries.forEach(entry => assert.deepEqual(entry, fileLink)) + root.state.entries.forEach((entry) => assert.deepEqual(entry, fileLink)) }) it("writer state .get", async function () {