From 9b29bd1215384e50a0cd67b5e17449c7f5555be1 Mon Sep 17 00:00:00 2001 From: Vasco Santos Date: Tue, 13 May 2025 13:27:41 +0200 Subject: [PATCH 1/6] feat: expose link metadata writer in create file --- package.json | 3 +- src/file.js | 81 ++-- src/file/api.ts | 132 +++--- src/file/writer.js | 165 +++++--- src/unixfs.ts | 160 +++---- test/directory.spec.js | 498 +++++++++++----------- test/file.spec.js | 393 +++++++++-------- test/lib.spec.js | 154 +++---- test/sharded-directory.spec.js | 744 ++++++++++++++++++--------------- yarn.lock | 7 + 10 files changed, 1268 insertions(+), 1069 deletions(-) diff --git a/package.json b/package.json index d6f4423..f9bffae 100644 --- a/package.json +++ b/package.json @@ -133,5 +133,6 @@ "tsv": "^0.2.0", "typescript": "^4.8.4" }, - "license": "Apache-2.0 OR MIT" + "license": "Apache-2.0 OR MIT", + "packageManager": "yarn@1.22.19+sha1.4ba7fc5c6e704fce2066ecbfb0b0d8976fe62447" } diff --git a/src/file.js b/src/file.js index 2d72632..b29031d 100644 --- a/src/file.js +++ b/src/file.js @@ -1,14 +1,14 @@ -import * as API from "./file/api.js" -import * as UnixFS from "./codec.js" -import * as Writer from "./file/writer.js" -import * as Task from "actor" -import { panic } from "./writer/util.js" -import * as FixedSize from "./file/chunker/fixed.js" -import { sha256 } from "multiformats/hashes/sha2" -import { CID } from "multiformats/cid" -import * as Balanced from "./file/layout/balanced.js" +import * as API from "./file/api.js"; +import * as UnixFS from "./codec.js"; +import * as Writer from "./file/writer.js"; +import * as Task from "actor"; +import { panic } from "./writer/util.js"; +import * as FixedSize from "./file/chunker/fixed.js"; +import { sha256 } from "multiformats/hashes/sha2"; +import { CID } from "multiformats/cid"; +import * as Balanced from "./file/layout/balanced.js"; -export * from "./file/api.js" +export * from "./file/api.js"; /** * @returns {API.EncoderSettings} @@ -21,37 +21,44 @@ export const defaults = () => ({ fileLayout: Balanced.withWidth(174), hasher: sha256, linker: { createLink: CID.createV1 }, -}) +}); /** * @template {unknown} Layout * @param {Partial>} config * @returns {API.EncoderSettings} */ -export const configure = config => ({ +export const configure = (config) => ({ ...defaults(), ...config, -}) +}); export const UnixFSLeaf = { code: UnixFS.code, name: UnixFS.name, encode: UnixFS.encodeFileChunk, -} +}; export const UnixFSRawLeaf = { code: UnixFS.code, name: UnixFS.name, encode: UnixFS.encodeRaw, -} +}; /** * @template Layout * @param {API.Options} options * @returns {API.View} */ -export const create = ({ writer, metadata = {}, settings = defaults() }) => - new FileWriterView(Writer.init(writer, metadata, configure(settings))) +export const create = ({ + writer, + metadata = {}, + settings = defaults(), + initOptions = {}, +}) => + new FileWriterView( + Writer.init(writer, metadata, configure(settings), initOptions) + ); /** * @template T @@ -61,9 +68,9 @@ export const create = ({ writer, metadata = {}, settings = defaults() }) => */ export const write = async (view, bytes) => { - await perform(view, Task.send({ type: "write", bytes })) - return view -} + await perform(view, Task.send({ type: "write", bytes })); + return view; +}; /** * @template T @@ -74,22 +81,22 @@ export const close = async ( view, { releaseLock = false, closeWriter = false } = {} ) => { - await perform(view, Task.send({ type: "close" })) - const { state } = view + await perform(view, Task.send({ type: "close" })); + const { state } = view; if (state.status === "linked") { if (closeWriter) { - await view.state.writer.close() + await view.state.writer.close(); } else if (releaseLock) { - view.state.writer.releaseLock() + view.state.writer.releaseLock(); } - return state.link + return state.link; /* c8 ignore next 5 */ } else { panic( `Expected writer to be in 'linked' state after close, but it is in "${state.status}" instead` - ) + ); } -} +}; /** * @template T @@ -98,12 +105,12 @@ export const close = async ( */ const perform = (view, effect) => Task.fork( - Task.loop(effect, message => { - const { state, effect } = Writer.update(message, view.state) - view.state = state - return effect + Task.loop(effect, (message) => { + const { state, effect } = Writer.update(message, view.state); + view.state = state; + return effect; }) - ) + ); /** * @template Layout @@ -114,26 +121,26 @@ class FileWriterView { * @param {Writer.State} state */ constructor(state) { - this.state = state + this.state = state; } get writer() { - return this.state.writer + return this.state.writer; } get settings() { - return this.state.config + return this.state.config; } /** * @param {Uint8Array} bytes * @returns {Promise>} */ write(bytes) { - return write(this, bytes) + return write(this, bytes); } /** * @param {API.CloseOptions} [options] * @returns {Promise} */ close(options) { - return close(this, options) + return close(this, options); } } diff --git a/src/file/api.ts b/src/file/api.ts index 1b3d78b..cde6504 100644 --- a/src/file/api.ts +++ b/src/file/api.ts @@ -1,7 +1,7 @@ -import type { Chunker } from "./chunker/api.js" -import type { Writer as StreamWriter } from "../writer/api.js" -import type { LayoutEngine, NodeID } from "./layout/api.js" -import * as UnixFS from "../unixfs.js" +import type { Chunker } from "./chunker/api.js"; +import type { Writer as StreamWriter } from "../writer/api.js"; +import type { LayoutEngine, NodeID } from "./layout/api.js"; +import * as UnixFS from "../unixfs.js"; import type { Block, BlockEncoder, @@ -9,11 +9,12 @@ import type { MultihashDigest, Link, LinkVersion, -} from "../unixfs.js" -import type { State } from "./writer.js" +} from "../unixfs.js"; +import type { State } from "./writer.js"; -export * from "../writer/api.js" -import * as ChunkerService from "./chunker.js" +export * from "../writer/api.js"; +import * as ChunkerService from "./chunker.js"; +import init from "rabin-rs/gen/wasm.js"; export type { Chunker, @@ -22,97 +23,104 @@ export type { MultihashDigest, Block, State, -} +}; export interface FileWriterService extends EncoderSettings { - writer: BlockWriter + writer: BlockWriter; } export interface WriterOptions { - readonly settings?: EncoderSettings - readonly metadata?: UnixFS.Metadata + readonly settings?: EncoderSettings; + readonly metadata?: UnixFS.Metadata; } export interface EncoderSettings { /** * Chunker which will be used to split file content into chunks. */ - chunker: Chunker + chunker: Chunker; /** * If provided leaves will be encoded as raw blocks, unless file has a * metadata. This is what `rawLeaves` options used to be except instead * of boolean you pass an encoder that will be used. */ - fileChunkEncoder: FileChunkEncoder + fileChunkEncoder: FileChunkEncoder; /** * If provided and file contains single chunk it will be encoded with this * encoder. This is what `reduceSingleLeafToSelf` option used to be except * instead of boolean you pass an encoder that will be used. */ - smallFileEncoder: FileChunkEncoder + smallFileEncoder: FileChunkEncoder; - fileEncoder: FileEncoder + fileEncoder: FileEncoder; /** * Builder that will be used to build file DAG from the leaf nodes. */ - fileLayout: LayoutEngine + fileLayout: LayoutEngine; /** * Hasher used to compute multihash for each block in the file. */ - hasher: MultihashHasher + hasher: MultihashHasher; /** * This function is used to create CIDs from multihashes. This is similar * to `cidVersion` option except you give it CID creator to use. */ - linker: Linker + linker: Linker; +} + +export interface InitOptions { + linkMetadataWriter?: LinkMetadataWriter; } +export interface LinkMetadataWriter extends StreamWriter {} + export interface Options { - writer: BlockWriter - metadata?: UnixFS.Metadata - settings?: EncoderSettings + writer: BlockWriter; + metadata?: UnixFS.Metadata; + settings?: EncoderSettings; + initOptions?: InitOptions; } export interface CloseOptions { - releaseLock?: boolean - closeWriter?: boolean + releaseLock?: boolean; + closeWriter?: boolean; } export interface BlockWriter extends StreamWriter {} export interface WritableBlockStream { - getWriter(): BlockWriter + getWriter(): BlockWriter; } export type FileChunkEncoder = | BlockEncoder - | BlockEncoder + | BlockEncoder; export interface FileEncoder { - code: PB - encode(node: UnixFS.File): Uint8Array + code: PB; + encode(node: UnixFS.File): Uint8Array; } export interface Linker { createLink( code: Code, hash: MultihashDigest - ): Link + ): Link; } export interface EncodedFile { - id: NodeID - block: Block - link: UnixFS.FileLink + id: NodeID; + block: Block; + link: UnixFS.FileLink; } -export type PB = 0x70 -export type RAW = 0x55 +export type PB = 0x70; +export type RAW = 0x55; /** * Interface defines API for importable content that is just a subset of `Blob` @@ -122,9 +130,9 @@ export type RAW = 0x55 * with optional metadata. */ export interface BlobContent extends BlobMetadata { - readonly size: number + readonly size: number; - stream(): ReadableStream + stream(): ReadableStream; // text(): Promise // arrayBuffer(): Promise // slice(start?: number, end?: number, contentType?: string): Blob @@ -134,7 +142,7 @@ export interface BlobContent extends BlobMetadata { * Optional unixfs metadata. */ export interface BlobMetadata extends UnixFS.Metadata { - readonly type: string + readonly type: string; } /** @@ -151,54 +159,54 @@ export interface FileContent extends BlobContent { * **Note:** File name is actually used as a file path which is to imply it * can contain contains `/` delimiters. */ - readonly name: string + readonly name: string; } export type FileState = | OpenFile | ClosedFile - | LinkedFile + | LinkedFile; export interface FileView { - state: State + state: State; } export interface OpenFile { - readonly type: "file" - readonly status: "open" - readonly metadata: UnixFS.Metadata - readonly service: FileWriterService + readonly type: "file"; + readonly status: "open"; + readonly metadata: UnixFS.Metadata; + readonly service: FileWriterService; - writing: boolean + writing: boolean; - chunker: ChunkerService.Chunker - layout: Layout + chunker: ChunkerService.Chunker; + layout: Layout; } export interface ClosedFile { - readonly type: "file" - readonly status: "closed" - readonly service: FileWriterService - readonly metadata: UnixFS.Metadata - writing: boolean - chunker: ChunkerService.Chunker - layout: Layout + readonly type: "file"; + readonly status: "closed"; + readonly service: FileWriterService; + readonly metadata: UnixFS.Metadata; + writing: boolean; + chunker: ChunkerService.Chunker; + layout: Layout; } export interface LinkedFile { - readonly type: "file" - readonly status: "linked" + readonly type: "file"; + readonly status: "linked"; - state: UnixFS.FileLink + state: UnixFS.FileLink; } export interface Writer { - write(bytes: Uint8Array): Promise> - close(options?: CloseOptions): Promise + write(bytes: Uint8Array): Promise>; + close(options?: CloseOptions): Promise; } export interface View extends Writer { - readonly writer: BlockWriter - readonly settings: EncoderSettings - state: State + readonly writer: BlockWriter; + readonly settings: EncoderSettings; + state: State; } diff --git a/src/file/writer.js b/src/file/writer.js index 1f3c225..76dab01 100644 --- a/src/file/writer.js +++ b/src/file/writer.js @@ -1,10 +1,10 @@ -import * as Task from "actor" -import * as API from "./api.js" -import * as Layout from "./layout/api.js" -import * as UnixFS from "../codec.js" -import * as Chunker from "./chunker.js" -import { EMPTY_BUFFER, panic, unreachable } from "../writer/util.js" -import * as Queue from "./layout/queue.js" +import * as Task from "actor"; +import * as API from "./api.js"; +import * as Layout from "./layout/api.js"; +import * as UnixFS from "../codec.js"; +import * as Chunker from "./chunker.js"; +import { EMPTY_BUFFER, panic, unreachable } from "../writer/util.js"; +import * as Queue from "./layout/queue.js"; /** * @template Layout @@ -13,6 +13,7 @@ import * as Queue from "./layout/queue.js" * readonly metadata: UnixFS.Metadata * readonly config: API.EncoderSettings * readonly writer: API.BlockWriter + * readonly linkMetadataWriter?: API.LinkMetadataWriter * chunker: Chunker.Chunker * layout: Layout * nodeQueue: Queue.Queue @@ -25,6 +26,7 @@ import * as Queue from "./layout/queue.js" * readonly metadata: UnixFS.Metadata * readonly config: API.EncoderSettings * readonly writer: API.BlockWriter + * readonly linkMetadataWriter?: API.LinkMetadataWriter * readonly rootID: Layout.NodeID * readonly end?: Task.Fork * chunker?: null @@ -39,6 +41,7 @@ import * as Queue from "./layout/queue.js" * readonly metadata: UnixFS.Metadata * readonly config: API.EncoderSettings * readonly writer: API.BlockWriter + * readonly linkMetadataWriter?: API.LinkMetadataWriter * readonly link: Layout.Link * chunker?: null * layout?: null @@ -63,6 +66,7 @@ import * as Queue from "./layout/queue.js" * |{type:"write", bytes:Uint8Array} * |{type:"link", link:API.EncodedFile} * |{type:"block"} + * |{type:"linkMetadata"} * |{type: "close"} * |{type: "end"} * } Message @@ -76,29 +80,33 @@ import * as Queue from "./layout/queue.js" export const update = (message, state) => { switch (message.type) { case "write": - return write(state, message.bytes) + return write(state, message.bytes); case "link": - return link(state, message.link) + return link(state, message.link); /* c8 ignore next 2 */ case "block": - return { state, effect: Task.none() } + return { state, effect: Task.none() }; + /* c8 ignore next 2 */ + case "linkMetadata": + return { state, effect: Task.none() }; case "close": - return close(state) + return close(state); case "end": - return { state, effect: Task.none() } + return { state, effect: Task.none() }; default: - return unreachable`File Writer got unknown message ${message}` + return unreachable`File Writer got unknown message ${message}`; } -} +}; /** * @template Layout * @param {API.BlockWriter} writer * @param {UnixFS.Metadata} metadata * @param {API.EncoderSettings} config + * @param {API.InitOptions} [options] * @returns {State} */ -export const init = (writer, metadata, config) => { +export const init = (writer, metadata, config, options = {}) => { return { status: "open", metadata, @@ -116,8 +124,9 @@ export const init = (writer, metadata, config) => { // overhead. // @see https://github.com/Gozala/vectrie nodeQueue: Queue.mutable(), - } -} + linkMetadataWriter: options.linkMetadataWriter, + }; +}; /** * @template Layout * @param {State} state @@ -127,21 +136,21 @@ export const init = (writer, metadata, config) => { export const write = (state, bytes) => { if (state.status === "open") { // Chunk up provided bytes - const { chunks, ...chunker } = Chunker.write(state.chunker, bytes) + const { chunks, ...chunker } = Chunker.write(state.chunker, bytes); // Pass chunks to layout engine to produce nodes const { nodes, leaves, layout } = state.config.fileLayout.write( state.layout, chunks - ) + ); - const { linked, ...nodeQueue } = Queue.addNodes(nodes, state.nodeQueue) + const { linked, ...nodeQueue } = Queue.addNodes(nodes, state.nodeQueue); // Create leaf encode tasks for all new leaves const tasks = [ ...encodeLeaves(leaves, state.config), ...encodeBranches(linked, state.config), - ] + ]; return { state: { @@ -153,11 +162,11 @@ export const write = (state, bytes) => { effect: Task.listen({ link: Task.effects(tasks), }), - } + }; } else { - return panic("Unable to perform write on closed file") + return panic("Unable to perform write on closed file"); } -} +}; /** * @template Layout @@ -166,9 +175,9 @@ export const write = (state, bytes) => { * @returns {Update} */ export const link = (state, { id, link, block }) => { - let { linked, ...nodeQueue } = Queue.addLink(id, link, state.nodeQueue) + let { linked, ...nodeQueue } = Queue.addLink(id, link, state.nodeQueue); - const tasks = encodeBranches(linked, state.config) + const tasks = encodeBranches(linked, state.config); /** @type {State} */ const newState = @@ -179,42 +188,54 @@ export const link = (state, { id, link, block }) => { link, nodeQueue, } - : { ...state, nodeQueue } + : { ...state, nodeQueue }; // If we just linked a root and there is a **suspended** "end" task we create // a task to resume it. const end = state.status === "closed" && id === state.rootID && state.end ? state.end.resume() - : Task.none() + : Task.none(); + + if (!state.linkMetadataWriter) { + return { + state: newState, + effect: Task.listen({ + link: Task.effects(tasks), + block: writeBlock(state.writer, block), + end, + }), + }; + } return { state: newState, effect: Task.listen({ link: Task.effects(tasks), block: writeBlock(state.writer, block), + linkMetadata: writeLinkMetadata(state.linkMetadataWriter, link), end, }), - } -} + }; +}; /** * @template Layout * @param {State} state * @returns {Update} */ -export const close = state => { +export const close = (state) => { if (state.status === "open") { - const { chunks } = Chunker.close(state.chunker) + const { chunks } = Chunker.close(state.chunker); const { layout, ...write } = state.config.fileLayout.write( state.layout, chunks - ) + ); const { root, ...close } = state.config.fileLayout.close( layout, state.metadata - ) + ); const [nodes, leaves] = isLeafNode(root) ? [ @@ -224,14 +245,14 @@ export const close = state => { : [ [...write.nodes, ...close.nodes, root], [...write.leaves, ...close.leaves], - ] + ]; - const { linked, ...nodeQueue } = Queue.addNodes(nodes, state.nodeQueue) + const { linked, ...nodeQueue } = Queue.addNodes(nodes, state.nodeQueue); const tasks = [ ...encodeLeaves(leaves, state.config), ...encodeBranches(linked, state.config), - ] + ]; // We want to keep run loop around until root node is linked. To // accomplish this we fork a task that suspends itself, which we will @@ -239,7 +260,7 @@ export const close = state => { // Below we join this forked task in our effect, this way effect is not // complete until task forked task is, which will do once we link the // root. - const fork = Task.fork(Task.suspend()) + const fork = Task.fork(Task.suspend()); return { state: { @@ -255,11 +276,11 @@ export const close = state => { link: Task.effects(tasks), end: Task.join(fork), }), - } + }; } else { - return { state, effect: Task.none() } + return { state, effect: Task.none() }; } -} +}; /** * Creates concurrent leaf encode tasks. Each one will have an ID corresponding @@ -269,7 +290,7 @@ export const close = state => { * @param {API.EncoderSettings} config */ const encodeLeaves = (leaves, config) => - leaves.map(leaf => encodeLeaf(config, leaf, config.fileChunkEncoder)) + leaves.map((leaf) => encodeLeaf(config, leaf, config.fileChunkEncoder)); /** * @param {API.EncoderSettings} config @@ -278,26 +299,27 @@ const encodeLeaves = (leaves, config) => * @returns {Task.Task} */ const encodeLeaf = function* ({ hasher, linker }, { id, content }, encoder) { - const bytes = encoder.encode(content ? asUint8Array(content) : EMPTY_BUFFER) - const hash = yield* Task.wait(hasher.digest(bytes)) - const cid = linker.createLink(encoder.code, hash) + const bytes = encoder.encode(content ? asUint8Array(content) : EMPTY_BUFFER); + const hash = yield* Task.wait(hasher.digest(bytes)); + const cid = linker.createLink(encoder.code, hash); - const block = { cid, bytes } + const block = { cid, bytes }; const link = /** @type {UnixFS.FileLink} */ ({ cid, contentByteLength: content ? content.byteLength : 0, + contentByteOffset: content ? content.byteOffset : 0, dagByteLength: bytes.byteLength, - }) + }); - return { id, block, link } -} + return { id, block, link }; +}; /** * @param {Queue.LinkedNode[]} nodes * @param {API.EncoderSettings} config */ const encodeBranches = (nodes, config) => - nodes.map(node => encodeBranch(config, node)) + nodes.map((node) => encodeBranch(config, node)); /** * @template Layout @@ -312,18 +334,18 @@ export const encodeBranch = function* (config, { id, links }, metadata) { layout: "advanced", parts: links, metadata, - }) - const hash = yield* Task.wait(Promise.resolve(config.hasher.digest(bytes))) - const cid = config.linker.createLink(config.fileEncoder.code, hash) - const block = { bytes, cid } + }); + const hash = yield* Task.wait(Promise.resolve(config.hasher.digest(bytes))); + const cid = config.linker.createLink(config.fileEncoder.code, hash); + const block = { bytes, cid }; const link = /** @type {UnixFS.FileLink} */ ({ cid, contentByteLength: UnixFS.cumulativeContentByteLength(links), dagByteLength: UnixFS.cumulativeDagByteLength(bytes, links), - }) + }); - return { id, block, link } -} + return { id, block, link }; +}; /** * @param {API.BlockWriter} writer @@ -333,10 +355,27 @@ export const encodeBranch = function* (config, { id, links }, metadata) { export const writeBlock = function* (writer, block) { if ((writer.desiredSize || 0) <= 0) { - yield* Task.wait(writer.ready) + yield* Task.wait(writer.ready); + } + writer.write(block); +}; + +/** + * @param {API.LinkMetadataWriter} writer + * @param {Layout.Link} link + * @returns {Task.Task} + */ + +export const writeLinkMetadata = function* (writer, link) { + /* c8 ignore next 3 */ + if (!writer) { + return; + } + if ((writer.desiredSize || 0) <= 0) { + yield* Task.wait(writer.ready); } - writer.write(block) -} + writer.write(link); +}; /** * @@ -344,13 +383,13 @@ export const writeBlock = function* (writer, block) { * @returns */ -const asUint8Array = buffer => +const asUint8Array = (buffer) => buffer instanceof Uint8Array ? buffer - : buffer.copyTo(new Uint8Array(buffer.byteLength), 0) + : buffer.copyTo(new Uint8Array(buffer.byteLength), 0); /** * @param {Layout.Node} node * @returns {node is Layout.Leaf} */ -const isLeafNode = node => node.children == null +const isLeafNode = (node) => node.children == null; diff --git a/src/unixfs.ts b/src/unixfs.ts index a4e88b7..d6980bc 100644 --- a/src/unixfs.ts +++ b/src/unixfs.ts @@ -6,16 +6,21 @@ import type { Link as IPLDLink, Version as LinkVersion, Block as IPLDBlock, - BlockView as IPLDBlockView -} from "multiformats" -import { Data, type IData } from "../gen/unixfs.js" -export type { MultihashHasher, MultibaseEncoder, MultihashDigest, BlockEncoder } -export * as Layout from "./file/layout/api" + BlockView as IPLDBlockView, +} from "multiformats"; +import { Data, type IData } from "../gen/unixfs.js"; +export type { + MultihashHasher, + MultibaseEncoder, + MultihashDigest, + BlockEncoder, +}; +export * as Layout from "./file/layout/api"; -import NodeType = Data.DataType +import NodeType = Data.DataType; -export { NodeType } -export type { IData, LinkVersion } +export { NodeType }; +export type { IData, LinkVersion }; /** * Type representing any UnixFS node. @@ -28,9 +33,9 @@ export type Node = | Directory | DirectoryShard | ShardedDirectory - | Symlink + | Symlink; -export type File = SimpleFile | AdvancedFile | ComplexFile +export type File = SimpleFile | AdvancedFile | ComplexFile; /** * Logical representation of a file that fits a single block. Note this is only @@ -38,16 +43,16 @@ export type File = SimpleFile | AdvancedFile | ComplexFile * depending on where you encounter the node (In root of the DAG or not). */ export interface SimpleFile { - readonly metadata?: Metadata + readonly metadata?: Metadata; - readonly type: NodeType.File - readonly layout: "simple" - readonly content: Uint8Array + readonly type: NodeType.File; + readonly layout: "simple"; + readonly content: Uint8Array; } export interface Metadata { - readonly mode?: Mode - readonly mtime?: MTime + readonly mode?: Mode; + readonly mtime?: MTime; } /** @@ -58,14 +63,14 @@ export interface Metadata { */ export interface AdvancedFile { - readonly metadata?: Metadata + readonly metadata?: Metadata; - readonly type: NodeType.File - readonly layout: "advanced" - readonly parts: ReadonlyArray + readonly type: NodeType.File; + readonly layout: "advanced"; + readonly parts: ReadonlyArray; } -export type Chunk = Raw | FileChunk +export type Chunk = Raw | FileChunk; /** * Encodes UnixFS Raw node (a leaf node of the file DAG layout). This @@ -88,12 +93,12 @@ export type Chunk = Raw | FileChunk * @deprecated */ export interface Raw { - readonly type: NodeType.Raw + readonly type: NodeType.Raw; /** * Raw bytes of the content */ - readonly content: Uint8Array + readonly content: Uint8Array; } /** @@ -121,11 +126,11 @@ export interface Raw { * take `mode` and `mtime` fields into account. */ export interface FileChunk { - readonly type: NodeType.File - readonly layout: "simple" - readonly content: Uint8Array + readonly type: NodeType.File; + readonly layout: "simple"; + readonly content: Uint8Array; - readonly metadata?: Metadata + readonly metadata?: Metadata; } /** @@ -146,21 +151,26 @@ export interface FileChunk { * in any other position (that is ignore `mode`, `mtime` fileds). */ export interface FileShard { - readonly type: NodeType.File - readonly layout: "advanced" - readonly parts: ReadonlyArray + readonly type: NodeType.File; + readonly layout: "advanced"; + readonly parts: ReadonlyArray; } export type FileLink = | ContentDAGLink | ContentDAGLink - | ContentDAGLink + | ContentDAGLink; export interface ContentDAGLink extends DAGLink { /** * Total number of bytes in the file */ - readonly contentByteLength: number + readonly contentByteLength: number; + + /** + * Offset bytes in the file + */ + readonly contentByteOffset?: number; } /** @@ -170,13 +180,13 @@ export interface DAGLink extends Phantom { /** * *C*ontent *Id*entifier of the target DAG. */ - readonly cid: Link + readonly cid: Link; /** * Cumulative number of bytes in the target DAG, that is number of bytes in * the block and all the blocks it links to. */ - readonly dagByteLength: number + readonly dagByteLength: number; } /** * These type of nodes are not produces by referenece IPFS implementations, yet @@ -192,13 +202,13 @@ export interface DAGLink extends Phantom { * @deprecated */ export interface ComplexFile { - readonly type: NodeType.File - readonly layout: "complex" - readonly content: Uint8Array + readonly type: NodeType.File; + readonly layout: "complex"; + readonly content: Uint8Array; - readonly parts: ReadonlyArray + readonly parts: ReadonlyArray; - readonly metadata?: Metadata + readonly metadata?: Metadata; } /** @@ -207,38 +217,38 @@ export interface ComplexFile { * the other definitions. */ export interface UnknownFile { - readonly type: NodeType.File + readonly type: NodeType.File; - readonly content?: Uint8Array - readonly parts?: ReadonlyArray + readonly content?: Uint8Array; + readonly parts?: ReadonlyArray; - readonly metadata?: Metadata + readonly metadata?: Metadata; } /** * Type for either UnixFS directory representation. */ -export type Directory = FlatDirectory | ShardedDirectory +export type Directory = FlatDirectory | ShardedDirectory; /** * Logacal representation of a directory that fits single block. */ export interface FlatDirectory { - readonly type: NodeType.Directory - readonly entries: ReadonlyArray + readonly type: NodeType.Directory; + readonly entries: ReadonlyArray; - readonly metadata?: Metadata + readonly metadata?: Metadata; } export type DirectoryEntryLink = | NamedDAGLink | NamedDAGLink - | NamedDAGLink + | NamedDAGLink; -export type DirectoryLink = DAGLink +export type DirectoryLink = DAGLink; export interface NamedDAGLink extends DAGLink { - readonly name: string + readonly name: string; } /** @@ -259,43 +269,43 @@ export interface ShardedDirectory extends DirectoryShard {} * `mtime` and `mode` field to be ignored. */ export interface DirectoryShard { - readonly type: NodeType.HAMTShard + readonly type: NodeType.HAMTShard; - readonly bitfield: Uint8Array + readonly bitfield: Uint8Array; /* * HAMT table width (In IPFS it's usually 256) */ - readonly fanout: uint64 + readonly fanout: uint64; /** * Multihash code for the hashing function used (In IPFS it's [murmur3-64][]) * * [murmur3-64]:https://github.com/multiformats/multicodec/blob/master/table.csv#L24 */ - readonly hashType: uint64 + readonly hashType: uint64; - readonly entries: ReadonlyArray + readonly entries: ReadonlyArray; - readonly metadata?: Metadata + readonly metadata?: Metadata; } export type ShardedDirectoryLink = | NamedDAGLink | NamedDAGLink | NamedDAGLink - | NamedDAGLink + | NamedDAGLink; /** * Logical representation of a [symbolic link][]. * * [symbolic link]:https://en.wikipedia.org/wiki/Symbolic_link */ export interface Symlink { - readonly type: NodeType.Symlink + readonly type: NodeType.Symlink; /** * UTF-8 encoded path to the symlink target. */ - readonly content: ByteView + readonly content: ByteView; - readonly metadata?: Metadata + readonly metadata?: Metadata; } /** @@ -307,14 +317,14 @@ export interface UnixTime { * (signed 64bit integer): represents the amount of seconds after or before * the epoch. */ - readonly Seconds: int64 + readonly Seconds: int64; /** * (optional, 32bit unsigned integer ): when specified represents the * fractional part of the mtime as the amount of nanoseconds. The valid * range for this value are the integers [1, 999999999]. */ - readonly FractionalNanoseconds?: fixed32 + readonly FractionalNanoseconds?: fixed32; } /** @@ -340,15 +350,15 @@ export interface UnixTime { * * @see https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/sys_stat.h.html */ -export type Mode = uint32 +export type Mode = uint32; /** * representing the modification time in seconds relative to the unix epoch * 1970-01-01T00:00:00Z. */ export interface MTime { - readonly secs: number - readonly nsecs?: number + readonly secs: number; + readonly nsecs?: number; } /** @@ -360,15 +370,15 @@ export interface ByteView extends Uint8Array, Phantom {} /** * @see https://github.com/ipfs/go-bitfield */ -export type Bitfield = Uint8Array +export type Bitfield = Uint8Array; // TS does not really have these, create aliases so it's aligned closer // to protobuf spec -export type int64 = number -export type fixed32 = number -export type uint64 = number +export type int64 = number; +export type fixed32 = number; +export type uint64 = number; -export type uint32 = number +export type uint32 = number; /** * This is an utility type to retain unused type parameter `T`. It can be used @@ -378,10 +388,10 @@ export interface Phantom { // This field can not be represented because field name is non-existings // unique symbol. But given that field is optional any object will valid // type contstraint. - [PhantomKey]?: T + [PhantomKey]?: T; } -declare const PhantomKey: unique symbol +declare const PhantomKey: unique symbol; export interface Link< Data extends unknown = unknown, @@ -391,9 +401,9 @@ export interface Link< > extends IPLDLink {} export interface PBLink { - Name?: string - Tsize?: number - Hash: Link + Name?: string; + Tsize?: number; + Hash: Link; } export interface Block< diff --git a/test/directory.spec.js b/test/directory.spec.js index ce466d3..161d772 100644 --- a/test/directory.spec.js +++ b/test/directory.spec.js @@ -1,15 +1,15 @@ -import * as UnixFS from "../src/lib.js" -import { assert } from "chai" -import { encodeUTF8, Link, collect, importFile } from "./util.js" +import * as UnixFS from "../src/lib.js"; +import { assert } from "chai"; +import { encodeUTF8, Link, collect, importFile } from "./util.js"; -const createChannel = () => new TransformStream() +const createChannel = () => new TransformStream(); describe("test directory", () => { it("empty dir", async () => { - const { readable, writable } = new TransformStream() - const writer = writable.getWriter() - const root = UnixFS.createDirectoryWriter({ writer }) - const link = await root.close() - writer.close() + const { readable, writable } = new TransformStream(); + const writer = writable.getWriter(); + const root = UnixFS.createDirectoryWriter({ writer }); + const link = await root.close(); + writer.close(); assert.deepEqual(link, { /** @type {Link.Link} */ @@ -17,28 +17,28 @@ describe("test directory", () => { "bafybeiczsscdsbs7ffqz55asqdf3smv6klcw3gofszvwlyarci47bgf354" ), dagByteLength: 4, - }) - const output = await collect(readable) + }); + const output = await collect(readable); assert.deepEqual( - output.map($ => $.cid), + output.map(($) => $.cid), [ Link.parse( "bafybeiczsscdsbs7ffqz55asqdf3smv6klcw3gofszvwlyarci47bgf354" ), ] - ) - }) + ); + }); it("basic file in directory", async () => { - const { readable, writable } = new TransformStream() - const writer = writable.getWriter() - const blocks = collect(readable) - const root = UnixFS.createDirectoryWriter({ writer }) - const file = UnixFS.createFileWriter(root) - const content = encodeUTF8("this file does not have much content\n") - file.write(content) - const fileLink = await file.close() + const { readable, writable } = new TransformStream(); + const writer = writable.getWriter(); + const blocks = collect(readable); + const root = UnixFS.createDirectoryWriter({ writer }); + const file = UnixFS.createFileWriter(root); + const content = encodeUTF8("this file does not have much content\n"); + file.write(content); + const fileLink = await file.close(); assert.deepEqual(fileLink, { /** @type {Link.Link} */ @@ -47,10 +47,11 @@ describe("test directory", () => { ), dagByteLength: 45, contentByteLength: 37, - }) + contentByteOffset: 0, + }); - root.set("file.txt", fileLink) - const rootLink = await root.close() + root.set("file.txt", fileLink); + const rootLink = await root.close(); assert.deepEqual(rootLink, { dagByteLength: 101, @@ -58,14 +59,14 @@ describe("test directory", () => { cid: Link.parse( "bafybeic7trkgurgp22uhxq5rnii5e75v4m4hf2ovohyxwntm4ymp7myh5i" ), - }) + }); - writer.close() + writer.close(); - const output = await blocks + const output = await blocks; assert.deepEqual( - output.map($ => $.cid), + output.map(($) => $.cid), [ Link.parse( "bafybeidequ5soq6smzafv4lb76i5dkvl5fzgvrxz4bmlc2k4dkikklv2j4" @@ -74,50 +75,50 @@ describe("test directory", () => { "bafybeic7trkgurgp22uhxq5rnii5e75v4m4hf2ovohyxwntm4ymp7myh5i" ), ] - ) - }) + ); + }); it("nested directory", async () => { - const { readable, writable } = new TransformStream() - const blocks = collect(readable) - const writer = writable.getWriter() - const root = UnixFS.createDirectoryWriter({ writer }) - const nested = UnixFS.createDirectoryWriter(root) + const { readable, writable } = new TransformStream(); + const blocks = collect(readable); + const writer = writable.getWriter(); + const root = UnixFS.createDirectoryWriter({ writer }); + const nested = UnixFS.createDirectoryWriter(root); - root.set("nested", await nested.close()) + root.set("nested", await nested.close()); assert.deepEqual(await root.close(), { /** @type {Link.Link} */ cid: Link.parse( "bafybeibjme43s5mbvupa25dl3xpbkmuqeje7hefvavy6k7cuhm3nxz2m3q" ), dagByteLength: 58, - }) - writer.close() + }); + writer.close(); - const items = await blocks + const items = await blocks; assert.deepEqual( items.map(({ cid }) => cid.toString()), [ "bafybeiczsscdsbs7ffqz55asqdf3smv6klcw3gofszvwlyarci47bgf354", "bafybeibjme43s5mbvupa25dl3xpbkmuqeje7hefvavy6k7cuhm3nxz2m3q", ] - ) - }) + ); + }); it("double nested directory", async () => { - const { readable, writable } = new TransformStream() - const blocks = collect(readable) - const writer = writable.getWriter() - - const root = UnixFS.createDirectoryWriter({ writer }) - const nested = UnixFS.createDirectoryWriter(root) - - root.set("nested", await nested.close()) - const main = UnixFS.createDirectoryWriter({ writer }) - main.set("root", await root.close()) - const link = await main.close() - writer.close() - const items = await blocks + const { readable, writable } = new TransformStream(); + const blocks = collect(readable); + const writer = writable.getWriter(); + + const root = UnixFS.createDirectoryWriter({ writer }); + const nested = UnixFS.createDirectoryWriter(root); + + root.set("nested", await nested.close()); + const main = UnixFS.createDirectoryWriter({ writer }); + main.set("root", await root.close()); + const link = await main.close(); + writer.close(); + const items = await blocks; assert.deepEqual( items.map(({ cid }) => cid.toString()), [ @@ -125,43 +126,45 @@ describe("test directory", () => { "bafybeibjme43s5mbvupa25dl3xpbkmuqeje7hefvavy6k7cuhm3nxz2m3q", "bafybeifr5xx3ihkbvvodn6xgejnkeuzyak3pwgrbqahb2afazqfes6opla", ] - ) - }) + ); + }); it("throws if file already exists", async () => { - const { readable, writable } = new TransformStream() - const blocks = collect(readable) - const writer = writable.getWriter() + const { readable, writable } = new TransformStream(); + const blocks = collect(readable); + const writer = writable.getWriter(); - const root = UnixFS.createDirectoryWriter({ writer }) + const root = UnixFS.createDirectoryWriter({ writer }); - const hello = await importFile(root, ["hello"]) + const hello = await importFile(root, ["hello"]); assert.deepEqual(hello, { /** @type {Link.Link} */ cid: Link.parse( "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq" ), contentByteLength: 5, + contentByteOffset: 0, dagByteLength: 13, - }) + }); - const bye = await importFile(root, ["bye"]) + const bye = await importFile(root, ["bye"]); assert.deepEqual(bye, { /** @type {Link.Link} */ cid: Link.parse( "bafybeigl43jff4muiw2m6kzqhm7xpz6ti7etiujklpnc6vpblzjvvwqmta" ), dagByteLength: 11, + contentByteOffset: 0, contentByteLength: 3, - }) + }); - root.set("hello", hello) + root.set("hello", hello); assert.throws( () => root.set("hello", bye), /Directory already contains entry with name "hello"/ - ) - root.set("bye", bye) - const link = await root.close() + ); + root.set("bye", bye); + const link = await root.close(); assert.deepEqual(link, { /** @type {Link.Link} */ @@ -169,37 +172,38 @@ describe("test directory", () => { "bafybeibpefc2sgzngxttfwrawvaiewk4hj5yxdp5kik52jpds5ujg3ij44" ), dagByteLength: 124, - }) - writer.close() - const items = await blocks + }); + writer.close(); + const items = await blocks; assert.deepEqual( - items.map(item => item.cid.toString()), + items.map((item) => item.cid.toString()), [ "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq", "bafybeigl43jff4muiw2m6kzqhm7xpz6ti7etiujklpnc6vpblzjvvwqmta", "bafybeibpefc2sgzngxttfwrawvaiewk4hj5yxdp5kik52jpds5ujg3ij44", ] - ) - }) + ); + }); it("can overwrite existing", async () => { - const { readable, writable } = new TransformStream() - const blocks = collect(readable) - const writer = writable.getWriter() + const { readable, writable } = new TransformStream(); + const blocks = collect(readable); + const writer = writable.getWriter(); - const root = UnixFS.createDirectoryWriter({ writer }) + const root = UnixFS.createDirectoryWriter({ writer }); - const hello = await importFile(root, ["hello"]) + const hello = await importFile(root, ["hello"]); assert.deepEqual(hello, { /** @type {Link.Link} */ cid: Link.parse( "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq" ), contentByteLength: 5, + contentByteOffset: 0, dagByteLength: 13, - }) + }); - const bye = await importFile(root, ["bye"]) + const bye = await importFile(root, ["bye"]); assert.deepEqual(bye, { /** @type {Link.Link} */ cid: Link.parse( @@ -207,11 +211,12 @@ describe("test directory", () => { ), dagByteLength: 11, contentByteLength: 3, - }) + contentByteOffset: 0, + }); - root.set("hello", hello) - root.set("hello", bye, { overwrite: true }) - const link = await root.close() + root.set("hello", hello); + root.set("hello", bye, { overwrite: true }); + const link = await root.close(); assert.deepEqual(link, { /** @type {Link.Link} */ @@ -219,39 +224,40 @@ describe("test directory", () => { "bafybeid6gy6b24lpyqtdmch7chsef4wykmxsh3ysuj2ou3wlz3cevdcc4a" ), dagByteLength: 64, - }) - writer.close() - const items = await blocks + }); + writer.close(); + const items = await blocks; assert.deepEqual( - items.map(item => item.cid.toString()), + items.map((item) => item.cid.toString()), [ "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq", "bafybeigl43jff4muiw2m6kzqhm7xpz6ti7etiujklpnc6vpblzjvvwqmta", "bafybeid6gy6b24lpyqtdmch7chsef4wykmxsh3ysuj2ou3wlz3cevdcc4a", ] - ) - }) + ); + }); it("can delete entries", async () => { - const { readable, writable } = createChannel() - const writer = writable.getWriter() - const reader = collect(readable) + const { readable, writable } = createChannel(); + const writer = writable.getWriter(); + const reader = collect(readable); - const root = UnixFS.createDirectoryWriter({ writer }) + const root = UnixFS.createDirectoryWriter({ writer }); - const hello = await importFile(root, ["hello"]) + const hello = await importFile(root, ["hello"]); assert.deepEqual(hello, { /** @type {Link.Link} */ cid: Link.parse( "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq" ), contentByteLength: 5, + contentByteOffset: 0, dagByteLength: 13, - }) + }); - root.set("hello", hello) - root.remove("hello") - const link = await root.close() + root.set("hello", hello); + root.remove("hello"); + const link = await root.close(); assert.deepEqual(link, { /** @type {Link.Link} */ @@ -259,51 +265,52 @@ describe("test directory", () => { "bafybeiczsscdsbs7ffqz55asqdf3smv6klcw3gofszvwlyarci47bgf354" ), dagByteLength: 4, - }) - writer.close() - const blocks = await reader + }); + writer.close(); + const blocks = await reader; assert.deepEqual( - blocks.map(block => block.cid.toString()), + blocks.map((block) => block.cid.toString()), [ "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq", "bafybeiczsscdsbs7ffqz55asqdf3smv6klcw3gofszvwlyarci47bgf354", ] - ) - }) + ); + }); it("throws on invalid filenames", async () => { - const { readable, writable } = new TransformStream() - const writer = writable.getWriter() - const reader = collect(readable) + const { readable, writable } = new TransformStream(); + const writer = writable.getWriter(); + const reader = collect(readable); - const root = UnixFS.createDirectoryWriter({ writer }) - const hello = await importFile(root, ["hello"]) + const root = UnixFS.createDirectoryWriter({ writer }); + const hello = await importFile(root, ["hello"]); assert.throws( () => root.set("hello/world", hello), /Directory entry name "hello\/world" contains forbidden "\/" character/ - ) - writer.close() - }) + ); + writer.close(); + }); it("can not change after close", async () => { - const { readable, writable } = new TransformStream() - const writer = writable.getWriter() - const reader = collect(readable) + const { readable, writable } = new TransformStream(); + const writer = writable.getWriter(); + const reader = collect(readable); - const root = UnixFS.createDirectoryWriter({ writer }) + const root = UnixFS.createDirectoryWriter({ writer }); - const hello = await importFile(root, ["hello"]) + const hello = await importFile(root, ["hello"]); assert.deepEqual(hello, { /** @type {Link.Link} */ cid: Link.parse( "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq" ), contentByteLength: 5, + contentByteOffset: 0, dagByteLength: 13, - }) + }); - const bye = await importFile(root, ["bye"]) + const bye = await importFile(root, ["bye"]); assert.deepEqual(bye, { /** @type {Link.Link} */ cid: Link.parse( @@ -311,52 +318,54 @@ describe("test directory", () => { ), dagByteLength: 11, contentByteLength: 3, - }) + contentByteOffset: 0, + }); - root.set("hello", hello) + root.set("hello", hello); assert.deepEqual(await root.close(), { /** @type {Link.Link} */ cid: Link.parse( "bafybeieuo4clbaujw35wxt7s4jlorbgztvufvdrcxxb6hik5mzfqku2tbq" ), dagByteLength: 66, - }) + }); assert.throws( () => root.set("bye", bye), /Can not change written directory, but you can \.fork\(\) and make changes to it/ - ) + ); - writer.close() - const blocks = await reader + writer.close(); + const blocks = await reader; assert.deepEqual( - blocks.map(block => block.cid.toString()), + blocks.map((block) => block.cid.toString()), [ "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq", "bafybeigl43jff4muiw2m6kzqhm7xpz6ti7etiujklpnc6vpblzjvvwqmta", "bafybeieuo4clbaujw35wxt7s4jlorbgztvufvdrcxxb6hik5mzfqku2tbq", ] - ) - }) + ); + }); it("can fork and edit", async () => { - const { readable, writable } = new TransformStream() - const writer = writable.getWriter() - const reader = collect(readable) + const { readable, writable } = new TransformStream(); + const writer = writable.getWriter(); + const reader = collect(readable); - const root = UnixFS.createDirectoryWriter({ writer }) + const root = UnixFS.createDirectoryWriter({ writer }); - const hello = await importFile(root, ["hello"]) + const hello = await importFile(root, ["hello"]); assert.deepEqual(hello, { /** @type {Link.Link} */ cid: Link.parse( "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq" ), contentByteLength: 5, + contentByteOffset: 0, dagByteLength: 13, - }) + }); - const bye = await importFile(root, ["bye"]) + const bye = await importFile(root, ["bye"]); assert.deepEqual(bye, { /** @type {Link.Link} */ cid: Link.parse( @@ -364,85 +373,87 @@ describe("test directory", () => { ), dagByteLength: 11, contentByteLength: 3, - }) + contentByteOffset: 0, + }); - root.set("hello", hello) + root.set("hello", hello); assert.deepEqual(await root.close(), { /** @type {Link.Link} */ cid: Link.parse( "bafybeieuo4clbaujw35wxt7s4jlorbgztvufvdrcxxb6hik5mzfqku2tbq" ), dagByteLength: 66, - }) + }); - const fork = root.fork() - fork.set("bye", bye) + const fork = root.fork(); + fork.set("bye", bye); assert.deepEqual(await fork.close(), { /** @type {Link.Link} */ cid: Link.parse( "bafybeibpefc2sgzngxttfwrawvaiewk4hj5yxdp5kik52jpds5ujg3ij44" ), dagByteLength: 124, - }) + }); - writer.close() - const blocks = await reader + writer.close(); + const blocks = await reader; assert.deepEqual( - blocks.map(block => block.cid.toString()), + blocks.map((block) => block.cid.toString()), [ "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq", "bafybeigl43jff4muiw2m6kzqhm7xpz6ti7etiujklpnc6vpblzjvvwqmta", "bafybeieuo4clbaujw35wxt7s4jlorbgztvufvdrcxxb6hik5mzfqku2tbq", "bafybeibpefc2sgzngxttfwrawvaiewk4hj5yxdp5kik52jpds5ujg3ij44", ] - ) - }) + ); + }); it("can autoclose", async () => { - const { readable, writable } = new TransformStream() - const writer = writable.getWriter() - const reader = collect(readable) - - const root = UnixFS.createDirectoryWriter({ writer }) - const file = UnixFS.createFileWriter(root) - file.write(new TextEncoder().encode("hello")) - root.set("hello", await file.close()) + const { readable, writable } = new TransformStream(); + const writer = writable.getWriter(); + const reader = collect(readable); + + const root = UnixFS.createDirectoryWriter({ writer }); + const file = UnixFS.createFileWriter(root); + file.write(new TextEncoder().encode("hello")); + root.set("hello", await file.close()); assert.deepEqual(await root.close({ closeWriter: true }), { /** @type {Link.Link} */ cid: Link.parse( "bafybeieuo4clbaujw35wxt7s4jlorbgztvufvdrcxxb6hik5mzfqku2tbq" ), dagByteLength: 66, - }) + }); - const blocks = await reader + const blocks = await reader; assert.deepEqual( - blocks.map(block => block.cid.toString()), + blocks.map((block) => block.cid.toString()), [ "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq", "bafybeieuo4clbaujw35wxt7s4jlorbgztvufvdrcxxb6hik5mzfqku2tbq", ] - ) - }) + ); + }); it("fork into other stream", async () => { - const { readable, writable } = new TransformStream() - const writer = writable.getWriter() - const reader = collect(readable) + const { readable, writable } = new TransformStream(); + const writer = writable.getWriter(); + const reader = collect(readable); - const root = UnixFS.createDirectoryWriter({ writer }) + const root = UnixFS.createDirectoryWriter({ writer }); - const hello = await importFile(root, ["hello"]) + const hello = await importFile(root, ["hello"]); assert.deepEqual(hello, { /** @type {Link.Link} */ cid: Link.parse( "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq" ), contentByteLength: 5, + contentByteOffset: 0, dagByteLength: 13, - }) + }); - const bye = await importFile(root, ["bye"]) + const bye = await importFile(root, ["bye"]); assert.deepEqual(bye, { /** @type {Link.Link} */ cid: Link.parse( @@ -450,63 +461,64 @@ describe("test directory", () => { ), dagByteLength: 11, contentByteLength: 3, - }) + contentByteOffset: 0, + }); - root.set("hello", hello) + root.set("hello", hello); assert.deepEqual(await root.close(), { /** @type {Link.Link} */ cid: Link.parse( "bafybeieuo4clbaujw35wxt7s4jlorbgztvufvdrcxxb6hik5mzfqku2tbq" ), dagByteLength: 66, - }) + }); - const patch = new TransformStream() - const patchWriter = patch.writable.getWriter() - const patchReader = collect(patch.readable) + const patch = new TransformStream(); + const patchWriter = patch.writable.getWriter(); + const patchReader = collect(patch.readable); - const fork = root.fork({ writer: patchWriter }) - fork.set("bye", bye) + const fork = root.fork({ writer: patchWriter }); + fork.set("bye", bye); assert.deepEqual(await fork.close(), { /** @type {Link.Link} */ cid: Link.parse( "bafybeibpefc2sgzngxttfwrawvaiewk4hj5yxdp5kik52jpds5ujg3ij44" ), dagByteLength: 124, - }) + }); - writer.close() - const blocks = await reader + writer.close(); + const blocks = await reader; assert.deepEqual( - blocks.map(block => block.cid.toString()), + blocks.map((block) => block.cid.toString()), [ "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq", "bafybeigl43jff4muiw2m6kzqhm7xpz6ti7etiujklpnc6vpblzjvvwqmta", "bafybeieuo4clbaujw35wxt7s4jlorbgztvufvdrcxxb6hik5mzfqku2tbq", ] - ) + ); - patchWriter.close() - const delta = await patchReader + patchWriter.close(); + const delta = await patchReader; assert.deepEqual( - delta.map(block => block.cid.toString()), + delta.map((block) => block.cid.toString()), ["bafybeibpefc2sgzngxttfwrawvaiewk4hj5yxdp5kik52jpds5ujg3ij44"] - ) - }) + ); + }); it("can close writer", async function () { - const { readable, writable } = new TransformStream() - const writer = writable.getWriter() - const blocks = collect(readable) - const root = UnixFS.createDirectoryWriter({ writer }) - const file = UnixFS.createFileWriter(root) + const { readable, writable } = new TransformStream(); + const writer = writable.getWriter(); + const blocks = collect(readable); + const root = UnixFS.createDirectoryWriter({ writer }); + const file = UnixFS.createFileWriter(root); - file.write(encodeUTF8("this file does not have much content\n")) - assert.equal(writable.locked, true) - root.set("file.txt", await file.close()) - const link = await root.close({ releaseLock: true, closeWriter: true }) + file.write(encodeUTF8("this file does not have much content\n")); + assert.equal(writable.locked, true); + root.set("file.txt", await file.close()); + const link = await root.close({ releaseLock: true, closeWriter: true }); - await blocks + await blocks; assert.deepEqual(link, { dagByteLength: 101, @@ -514,24 +526,24 @@ describe("test directory", () => { cid: Link.parse( "bafybeic7trkgurgp22uhxq5rnii5e75v4m4hf2ovohyxwntm4ymp7myh5i" ), - }) - }) + }); + }); it("can release writer lock", async function () { - const { readable, writable } = new TransformStream() - const writer = writable.getWriter() - const blocks = collect(readable) - const root = UnixFS.createDirectoryWriter({ writer }) - const file = UnixFS.createFileWriter(root) + const { readable, writable } = new TransformStream(); + const writer = writable.getWriter(); + const blocks = collect(readable); + const root = UnixFS.createDirectoryWriter({ writer }); + const file = UnixFS.createFileWriter(root); - file.write(encodeUTF8("this file does not have much content\n")) - assert.equal(writable.locked, true) - root.set("file.txt", await file.close()) - const link = await root.close({ releaseLock: true }) - assert.equal(writable.locked, false) + file.write(encodeUTF8("this file does not have much content\n")); + assert.equal(writable.locked, true); + root.set("file.txt", await file.close()); + const link = await root.close({ releaseLock: true }); + assert.equal(writable.locked, false); - writable.close() - await blocks + writable.close(); + await blocks; assert.deepEqual(link, { dagByteLength: 101, @@ -539,68 +551,68 @@ describe("test directory", () => { cid: Link.parse( "bafybeic7trkgurgp22uhxq5rnii5e75v4m4hf2ovohyxwntm4ymp7myh5i" ), - }) - }) + }); + }); it("can enumerate entries", async function () { - const { writable } = new TransformStream() - const writer = writable.getWriter() - const root = UnixFS.createDirectoryWriter({ writer }) + const { writable } = new TransformStream(); + const writer = writable.getWriter(); + const root = UnixFS.createDirectoryWriter({ writer }); - assert.deepEqual([...root.entries()], []) + assert.deepEqual([...root.entries()], []); /** @type {Link.Link} */ const cid = Link.parse( "bafybeidequ5soq6smzafv4lb76i5dkvl5fzgvrxz4bmlc2k4dkikklv2j4" - ) + ); const fileLink = { cid, dagByteLength: 45, contentByteLength: 37, - } + }; - root.set("file.txt", fileLink) - assert.deepEqual([...root.entries()], [["file.txt", fileLink]]) - }) + root.set("file.txt", fileLink); + assert.deepEqual([...root.entries()], [["file.txt", fileLink]]); + }); it(".has", async function () { - const { writable } = new TransformStream() - const writer = writable.getWriter() - const root = UnixFS.createDirectoryWriter({ writer }) - assert.equal(root.has("file.txt"), false) + const { writable } = new TransformStream(); + const writer = writable.getWriter(); + const root = UnixFS.createDirectoryWriter({ writer }); + assert.equal(root.has("file.txt"), false); /** @type {Link.Link} */ const cid = Link.parse( "bafybeidequ5soq6smzafv4lb76i5dkvl5fzgvrxz4bmlc2k4dkikklv2j4" - ) + ); root.set("file.txt", { cid, dagByteLength: 45, contentByteLength: 37, - }) - assert.equal(root.has("file.txt"), true) + }); + assert.equal(root.has("file.txt"), true); - root.remove("file.txt") - assert.equal(root.has("file.txt"), false) - }) + root.remove("file.txt"); + assert.equal(root.has("file.txt"), false); + }); it(".size", async function () { - const { writable } = new TransformStream() - const writer = writable.getWriter() - const root = UnixFS.createDirectoryWriter({ writer }) - assert.equal(root.size, 0) + const { writable } = new TransformStream(); + const writer = writable.getWriter(); + const root = UnixFS.createDirectoryWriter({ writer }); + assert.equal(root.size, 0); /** @type {Link.Link} */ const cid = Link.parse( "bafybeidequ5soq6smzafv4lb76i5dkvl5fzgvrxz4bmlc2k4dkikklv2j4" - ) + ); root.set("file.txt", { cid, dagByteLength: 45, contentByteLength: 37, - }) - assert.equal(root.size, 1) + }); + assert.equal(root.size, 1); - root.remove("file.txt") - assert.equal(root.size, 0) - }) -}) + root.remove("file.txt"); + assert.equal(root.size, 0); + }); +}); diff --git a/test/file.spec.js b/test/file.spec.js index 6140f55..d583890 100644 --- a/test/file.spec.js +++ b/test/file.spec.js @@ -1,110 +1,161 @@ /* eslint-env mocha */ -import { assert } from "chai" -import { encodeUTF8, Link, hashrecur, collect } from "./util.js" -import * as UnixFS from "../src/lib.js" -import * as Trickle from "../src/file/layout/trickle.js" -import * as Balanced from "../src/file/layout/balanced.js" -import * as FixedSize from "../src/file/chunker/fixed.js" -import * as Rabin from "../src/file/chunker/rabin.js" -import { sha256 } from "multiformats/hashes/sha2" - -const CHUNK_SIZE = 262144 +import { assert } from "chai"; +import { encodeUTF8, Link, hashrecur, collect } from "./util.js"; +import * as UnixFS from "../src/lib.js"; +import * as Trickle from "../src/file/layout/trickle.js"; +import * as Balanced from "../src/file/layout/balanced.js"; +import * as FixedSize from "../src/file/chunker/fixed.js"; +import * as Rabin from "../src/file/chunker/rabin.js"; +import { sha256 } from "multiformats/hashes/sha2"; + +const CHUNK_SIZE = 262144; describe("test file", () => { it("basic file", async function () { - this.timeout(30000) - const content = encodeUTF8("this file does not have much content\n") - const { readable, writable } = new TransformStream() - const writer = writable.getWriter() - - const file = UnixFS.createFileWriter({ writer }) - await file.write(content) - const link = await file.close() - writer.close() - - assert.equal(link.contentByteLength, 37) - assert.equal(link.dagByteLength, 45) + this.timeout(30000); + const content = encodeUTF8("this file does not have much content\n"); + const { readable, writable } = new TransformStream(); + const writer = writable.getWriter(); + + const file = UnixFS.createFileWriter({ writer }); + await file.write(content); + const link = await file.close(); + writer.close(); + + assert.equal(link.contentByteLength, 37); + assert.equal(link.dagByteLength, 45); assert.equal( link.cid.toString(), "bafybeidequ5soq6smzafv4lb76i5dkvl5fzgvrxz4bmlc2k4dkikklv2j4" - ) + ); - const blocks = readable.getReader() - const read = await blocks.read() + const blocks = readable.getReader(); + const read = await blocks.read(); if (read.done) { - assert.fail("expected to get a block") + assert.fail("expected to get a block"); } - const block = read.value + const block = read.value; assert.deepEqual( block.cid.toString(), "bafybeidequ5soq6smzafv4lb76i5dkvl5fzgvrxz4bmlc2k4dkikklv2j4" - ) + ); - const end = await blocks.read() - assert.deepEqual(end, { done: true, value: undefined }) - }) + const end = await blocks.read(); + assert.deepEqual(end, { done: true, value: undefined }); + }); it("splits into 3 chunks", async function () { + const rawFiles = [ + new Uint8Array(CHUNK_SIZE).fill(1), + new Uint8Array(CHUNK_SIZE).fill(2), + new Uint8Array(CHUNK_SIZE).fill(3), + ]; const { readable, writable } = new TransformStream( {}, {}, { highWaterMark: 5, } - ) - const writer = writable.getWriter() - const file = UnixFS.createFileWriter({ writer }) - file.write(new Uint8Array(CHUNK_SIZE).fill(1)) - file.write(new Uint8Array(CHUNK_SIZE).fill(2)) - file.write(new Uint8Array(CHUNK_SIZE).fill(3)) - const link = await file.close() + ); + const writer = writable.getWriter(); + + // Capture links metadata + /** @type {import('../src/unixfs.js').FileLink[]} */ + const linkMetadataItems = []; + const { readable: linkMetadataReadable, writable: linkMetadataWritable } = + new TransformStream(); + // Start consuming links stream asynchronously + void (async () => { + const reader = linkMetadataReadable.getReader(); + while (true) { + const { done, value } = await reader.read(); + if (done) break; + linkMetadataItems.push(value); + } + })(); + + const file = UnixFS.createFileWriter({ + writer, + initOptions: { + linkMetadataWriter: linkMetadataWritable.getWriter(), + }, + }); + for (const rawFile of rawFiles) { + file.write(rawFile); + } + const link = await file.close(); + // Check the root CID // TODO: So go-ipfs sets CIDv0 links which casuse a mismatch assert.deepEqual(link, { - contentByteLength: 786432, + contentByteLength: CHUNK_SIZE * 3, dagByteLength: 786632, /** @type {Link.Link} */ cid: Link.parse( "bafybeiegda62p2cdi5sono3h3hqjnxwc56z4nocynrj44rz7rtc2p246cy" ), - }) + }); + + const blocks = readable.getReader(); - const blocks = readable.getReader() - const r1 = await blocks.read() + // Check the first block + const r1 = await blocks.read(); if (r1.done) { - assert.fail("expected to get a block") + assert.fail("expected to get a block"); } - assert.deepEqual( r1.value.cid, Link.parse("bafybeihhsdoupgd3fnl3e3367ymsanmikafpllldsdt37jzyoh6nuatowe") - ) - - const r2 = await blocks.read() + ); + const l1 = linkMetadataItems.find((l) => l.cid.equals(r1.value.cid)); + assert.isTrue(l1 !== undefined); + assert.equal(l1?.contentByteLength, CHUNK_SIZE); + assert.equal(l1?.dagByteLength, CHUNK_SIZE + 14); + assert.equal(l1?.contentByteOffset, 0); + + // Check the second block + const r2 = await blocks.read(); if (r2.done) { - assert.fail("expected to get a block") + assert.fail("expected to get a block"); } assert.deepEqual( r2.value.cid, Link.parse("bafybeief3dmadxfymhhhrflqytqmlhlz47w6glaxvyzmm6s6tpfb6izzee") - ) - - const r3 = await blocks.read() + ); + const l2 = linkMetadataItems.find((l) => l.cid.equals(r2.value.cid)); + assert.isTrue(l2 !== undefined); + assert.equal(l2?.contentByteLength, CHUNK_SIZE); + assert.equal(l2?.dagByteLength, CHUNK_SIZE + 14); + assert.equal(l2?.contentByteOffset, CHUNK_SIZE); + + // Check the third block + const r3 = await blocks.read(); if (r3.done) { - assert.fail("expected to get a block") + assert.fail("expected to get a block"); } assert.deepEqual( r3.value.cid, Link.parse("bafybeihznihf5g5ibdyoawn7uu3inlyqrxjv63lt6lop6h3w6rzwrp67a4") - ) - - await writer.close() - }) + ); + const l3 = linkMetadataItems.find((l) => l.cid.equals(r3.value.cid)); + assert.isTrue(l3 !== undefined); + assert.equal(l3?.contentByteLength, CHUNK_SIZE); + assert.equal(l3?.dagByteLength, CHUNK_SIZE + 14); + assert.equal(l3?.contentByteOffset, CHUNK_SIZE * 2); + + await writer.close(); + + // Check root + assert.isTrue( + linkMetadataItems.find((l) => l.cid.equals(link.cid)) !== undefined + ); + assert.equal(linkMetadataItems.length, 4); + }); it("--chunker=size-65535 --trickle=false --raw-leaves=false --cid-version=1", async () => { - const chunkSize = 65535 - const { readable, writable } = new TransformStream() + const chunkSize = 65535; + const { readable, writable } = new TransformStream(); const settings = { chunker: FixedSize.withMaxChunkSize(chunkSize), fileChunkEncoder: UnixFS.UnixFSLeaf, @@ -113,23 +164,24 @@ describe("test file", () => { linker: { createLink: Link.create }, hasher: sha256, fileEncoder: UnixFS, - } - const writer = writable.getWriter() - collect(readable) + }; + const writer = writable.getWriter(); + collect(readable); - const file = UnixFS.createFileWriter({ writer, settings }) + const file = UnixFS.createFileWriter({ writer, settings }); - const size = Math.round(chunkSize * 2.2) - const FRAME = Math.round(size / 10) - let offset = 0 - let n = 0 + const size = Math.round(chunkSize * 2.2); + const FRAME = Math.round(size / 10); + let offset = 0; + let n = 0; while (offset < size) { - const slice = new Uint8Array(Math.min(FRAME, size - offset)).fill(++n) - file.write(slice) - offset += FRAME + const slice = new Uint8Array(Math.min(FRAME, size - offset)).fill(++n); + file.write(slice); + offset += FRAME; } - const link = await file.close() + const link = await file.close(); + console.log("link", link); assert.deepEqual(link, { /** @type {Link.Link} */ cid: Link.parse( @@ -137,63 +189,63 @@ describe("test file", () => { ), contentByteLength: 144177, dagByteLength: 144372, - }) + }); - await writer.close() - }) + await writer.close(); + }); it("chunks with rabin chunker", async function () { - this.timeout(30000) + this.timeout(30000); const content = hashrecur({ byteLength: CHUNK_SIZE * 2, - }) - const chunker = await Rabin.create() + }); + const chunker = await Rabin.create(); - const { readable, writable } = new TransformStream() - const writer = writable.getWriter() + const { readable, writable } = new TransformStream(); + const writer = writable.getWriter(); - const settings = UnixFS.configure({ chunker }) + const settings = UnixFS.configure({ chunker }); - const collector = collect(readable) - const file = UnixFS.createFileWriter({ writer, settings }) + const collector = collect(readable); + const file = UnixFS.createFileWriter({ writer, settings }); for await (const slice of content) { - file.write(slice) + file.write(slice); } - const link = await file.close() - writer.close() - const blocks = await collector + const link = await file.close(); + writer.close(); + const blocks = await collector; assert.deepEqual( link.cid, Link.parse("bafybeicj5kf4mohavbbh4j5izwy3k23cysewxfhgtmlaoxq6sewx2tsr7u") - ) + ); - assert.deepEqual((await blocks).length, 4) - }) + assert.deepEqual((await blocks).length, 4); + }); it("trickle layout", async function () { - this.timeout(30000) + this.timeout(30000); const content = hashrecur({ byteLength: CHUNK_SIZE * 2, - }) - const { readable, writable } = new TransformStream() - const writer = writable.getWriter() + }); + const { readable, writable } = new TransformStream(); + const writer = writable.getWriter(); const settings = UnixFS.configure({ chunker: FixedSize.withMaxChunkSize(1300), fileLayout: Trickle, fileChunkEncoder: UnixFS.UnixFSRawLeaf, - }) + }); - const file = UnixFS.createFileWriter({ writer, settings }) - const collector = collect(readable) + const file = UnixFS.createFileWriter({ writer, settings }); + const collector = collect(readable); for await (const slice of content) { - file.write(slice) + file.write(slice); } - const link = await file.close() - writer.close() - const blocks = await collector + const link = await file.close(); + writer.close(); + const blocks = await collector; assert.deepEqual(link, { /** @type {Link.Link} */ @@ -202,32 +254,32 @@ describe("test file", () => { ), contentByteLength: 524288, dagByteLength: 548251, - }) - }) + }); + }); it("trickle layout with overflow", async function () { - this.timeout(30000) + this.timeout(30000); const content = hashrecur({ byteLength: CHUNK_SIZE * 2, - }) - const { readable, writable } = new TransformStream() + }); + const { readable, writable } = new TransformStream(); - const writer = writable.getWriter() + const writer = writable.getWriter(); const settings = UnixFS.configure({ chunker: FixedSize.withMaxChunkSize(100000), fileLayout: Trickle.configure({ maxDirectLeaves: 5 }), fileChunkEncoder: UnixFS.UnixFSRawLeaf, - }) + }); - const blocks = collect(readable) - const file = UnixFS.createFileWriter({ writer, settings }) + const blocks = collect(readable); + const file = UnixFS.createFileWriter({ writer, settings }); for await (const slice of content) { - file.write(slice) + file.write(slice); } - const link = await file.close() - writer.close() - await blocks + const link = await file.close(); + writer.close(); + await blocks; assert.deepEqual(link, { /** @type {Link.Link} */ @@ -236,34 +288,34 @@ describe("test file", () => { ), contentByteLength: 524288, dagByteLength: 524738, - }) - }) + }); + }); it("trickle with several levels deep", async function () { - this.timeout(30000) - const chunkSize = 128 - const maxLeaves = 4 - const leafCount = 42 + this.timeout(30000); + const chunkSize = 128; + const maxLeaves = 4; + const leafCount = 42; - const content = hashrecur({ byteLength: chunkSize * leafCount }) - const { readable, writable } = new TransformStream() - const writer = writable.getWriter() + const content = hashrecur({ byteLength: chunkSize * leafCount }); + const { readable, writable } = new TransformStream(); + const writer = writable.getWriter(); const settings = UnixFS.configure({ chunker: FixedSize.withMaxChunkSize(chunkSize), fileLayout: Trickle.configure({ maxDirectLeaves: maxLeaves }), fileChunkEncoder: UnixFS.UnixFSRawLeaf, - }) + }); - const blocks = collect(readable) - const file = UnixFS.createFileWriter({ writer, settings }) + const blocks = collect(readable); + const file = UnixFS.createFileWriter({ writer, settings }); for await (const slice of content) { - file.write(slice) + file.write(slice); } - const link = await file.close() - writer.close() - await blocks + const link = await file.close(); + writer.close(); + await blocks; assert.deepEqual(link, { /** @type {Link.Link} */ @@ -272,19 +324,19 @@ describe("test file", () => { ), contentByteLength: chunkSize * leafCount, dagByteLength: 8411, - }) - }) + }); + }); it("write empty with defaults", async function () { - const { readable, writable } = new TransformStream() - const writer = writable.getWriter() - const file = UnixFS.createFileWriter({ writer }) - const blocks = collect(readable) + const { readable, writable } = new TransformStream(); + const writer = writable.getWriter(); + const file = UnixFS.createFileWriter({ writer }); + const blocks = collect(readable); - file.write(new Uint8Array()) - const link = await file.close() - writer.close() - await blocks + file.write(new Uint8Array()); + const link = await file.close(); + writer.close(); + await blocks; assert.deepEqual(link, { /** @type {Link.Link} */ @@ -292,19 +344,20 @@ describe("test file", () => { "bafybeif7ztnhq65lumvvtr4ekcwd2ifwgm3awq4zfr3srh462rwyinlb4y" ), contentByteLength: 0, + contentByteOffset: 0, dagByteLength: 6, - }) - }) + }); + }); it("can close writer", async function () { - const { readable, writable } = new TransformStream() - const writer = writable.getWriter() - const file = UnixFS.createFileWriter({ writer }) - const blocks = collect(readable) + const { readable, writable } = new TransformStream(); + const writer = writable.getWriter(); + const file = UnixFS.createFileWriter({ writer }); + const blocks = collect(readable); - file.write(encodeUTF8("this file does not have much content\n")) - const link = await file.close({ closeWriter: true }) - await blocks + file.write(encodeUTF8("this file does not have much content\n")); + const link = await file.close({ closeWriter: true }); + await blocks; assert.deepEqual(link, { /** @type {Link.Link} */ @@ -312,22 +365,23 @@ describe("test file", () => { "bafybeidequ5soq6smzafv4lb76i5dkvl5fzgvrxz4bmlc2k4dkikklv2j4" ), contentByteLength: 37, + contentByteOffset: 0, dagByteLength: 45, - }) - }) + }); + }); it("can release writer lock", async function () { - const { readable, writable } = new TransformStream() - const writer = writable.getWriter() - const file = UnixFS.createFileWriter({ writer }) - const blocks = collect(readable) + const { readable, writable } = new TransformStream(); + const writer = writable.getWriter(); + const file = UnixFS.createFileWriter({ writer }); + const blocks = collect(readable); - file.write(encodeUTF8("this file does not have much content\n")) - const link = await file.close({ releaseLock: true }) - assert.equal(writable.locked, false) + file.write(encodeUTF8("this file does not have much content\n")); + const link = await file.close({ releaseLock: true }); + assert.equal(writable.locked, false); - writable.close() - await blocks + writable.close(); + await blocks; assert.deepEqual(link, { /** @type {Link.Link} */ @@ -335,24 +389,25 @@ describe("test file", () => { "bafybeidequ5soq6smzafv4lb76i5dkvl5fzgvrxz4bmlc2k4dkikklv2j4" ), contentByteLength: 37, + contentByteOffset: 0, dagByteLength: 45, - }) - }) + }); + }); it("can create writer from writer", async function () { - const { readable, writable } = new TransformStream() - const writer = writable.getWriter() + const { readable, writable } = new TransformStream(); + const writer = writable.getWriter(); const settings = UnixFS.configure({ chunker: FixedSize.withMaxChunkSize(18), - }) + }); const file = UnixFS.createFileWriter({ writer, settings, - }) + }); - const file2 = UnixFS.createFileWriter(file) - assert.equal(file2.writer, writer) - assert.deepEqual(file2.settings, settings) - }) -}) + const file2 = UnixFS.createFileWriter(file); + assert.equal(file2.writer, writer); + assert.deepEqual(file2.settings, settings); + }); +}); diff --git a/test/lib.spec.js b/test/lib.spec.js index f82a3f3..ed4f818 100644 --- a/test/lib.spec.js +++ b/test/lib.spec.js @@ -1,37 +1,38 @@ -import * as UnixFS from "../src/lib.js" -import { assert } from "chai" -import { encodeUTF8, Link, collect, importFile } from "./util.js" +import * as UnixFS from "../src/lib.js"; +import { assert } from "chai"; +import { encodeUTF8, Link, collect, importFile } from "./util.js"; describe("UnixFS.createWriter", () => { it("UnixFS.createFileWriter", async () => { - const { readable, writable } = new TransformStream() - const reader = collect(readable) - const writer = UnixFS.createWriter({ writable }) - const file = UnixFS.createFileWriter(writer) - file.write(new TextEncoder().encode("hello world")) + const { readable, writable } = new TransformStream(); + const reader = collect(readable); + const writer = UnixFS.createWriter({ writable }); + const file = UnixFS.createFileWriter(writer); + file.write(new TextEncoder().encode("hello world")); assert.deepEqual(await file.close(), { /** @type {Link.Link} */ cid: Link.parse( "bafybeihykld7uyxzogax6vgyvag42y7464eywpf55gxi5qpoisibh3c5wa" ), dagByteLength: 19, + contentByteOffset: 0, contentByteLength: 11, - }) - writer.close() + }); + writer.close(); - const blocks = await reader + const blocks = await reader; assert.deepEqual( - blocks.map($ => $.cid.toString()), + blocks.map(($) => $.cid.toString()), ["bafybeihykld7uyxzogax6vgyvag42y7464eywpf55gxi5qpoisibh3c5wa"] - ) - }) + ); + }); it("fs.createFileWriter", async () => { - const { readable, writable } = new TransformStream() - const reader = collect(readable) - const writer = UnixFS.createWriter({ writable }) - const file = writer.createFileWriter() - file.write(encodeUTF8("hello world")) + const { readable, writable } = new TransformStream(); + const reader = collect(readable); + const writer = UnixFS.createWriter({ writable }); + const file = writer.createFileWriter(); + file.write(encodeUTF8("hello world")); assert.deepEqual(await file.close(), { /** @type {Link.Link} */ cid: Link.parse( @@ -39,114 +40,115 @@ describe("UnixFS.createWriter", () => { ), dagByteLength: 19, contentByteLength: 11, - }) + contentByteOffset: 0, + }); - writer.close() + writer.close(); - const blocks = await reader + const blocks = await reader; assert.deepEqual( - blocks.map($ => $.cid.toString()), + blocks.map(($) => $.cid.toString()), ["bafybeihykld7uyxzogax6vgyvag42y7464eywpf55gxi5qpoisibh3c5wa"] - ) - }) + ); + }); it("UnixFS.createDirectoryWriter", async () => { - const { readable, writable } = new TransformStream() - const reader = collect(readable) - const writer = UnixFS.createWriter({ writable }) - const root = UnixFS.createDirectoryWriter(writer) + const { readable, writable } = new TransformStream(); + const reader = collect(readable); + const writer = UnixFS.createWriter({ writable }); + const root = UnixFS.createDirectoryWriter(writer); - root.set("hello", await importFile(root, ["hello"])) + root.set("hello", await importFile(root, ["hello"])); assert.deepEqual(await root.close(), { /** @type {Link.Link} */ cid: Link.parse( "bafybeieuo4clbaujw35wxt7s4jlorbgztvufvdrcxxb6hik5mzfqku2tbq" ), dagByteLength: 66, - }) - writer.close() + }); + writer.close(); - const blocks = await reader + const blocks = await reader; assert.deepEqual( - blocks.map($ => $.cid.toString()), + blocks.map(($) => $.cid.toString()), [ "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq", "bafybeieuo4clbaujw35wxt7s4jlorbgztvufvdrcxxb6hik5mzfqku2tbq", ] - ) - }) + ); + }); it("fs.createDirectoryWriter", async () => { - const { readable, writable } = new TransformStream() - const reader = collect(readable) - const writer = UnixFS.createWriter({ writable }) - const root = writer.createDirectoryWriter() + const { readable, writable } = new TransformStream(); + const reader = collect(readable); + const writer = UnixFS.createWriter({ writable }); + const root = writer.createDirectoryWriter(); - root.set("hello", await importFile(root, ["hello"])) + root.set("hello", await importFile(root, ["hello"])); assert.deepEqual(await root.close(), { /** @type {Link.Link} */ cid: Link.parse( "bafybeieuo4clbaujw35wxt7s4jlorbgztvufvdrcxxb6hik5mzfqku2tbq" ), dagByteLength: 66, - }) - writer.close() + }); + writer.close(); - const blocks = await reader + const blocks = await reader; assert.deepEqual( - blocks.map($ => $.cid.toString()), + blocks.map(($) => $.cid.toString()), [ "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq", "bafybeieuo4clbaujw35wxt7s4jlorbgztvufvdrcxxb6hik5mzfqku2tbq", ] - ) - }) + ); + }); it("can release lock", async () => { - const { readable, writable } = new TransformStream() - const reader = collect(readable) - const writer = UnixFS.createWriter({ writable }) - const root = UnixFS.createDirectoryWriter(writer) + const { readable, writable } = new TransformStream(); + const reader = collect(readable); + const writer = UnixFS.createWriter({ writable }); + const root = UnixFS.createDirectoryWriter(writer); - root.set("hello", await importFile(root, ["hello"])) + root.set("hello", await importFile(root, ["hello"])); assert.deepEqual(await root.close(), { /** @type {Link.Link} */ cid: Link.parse( "bafybeieuo4clbaujw35wxt7s4jlorbgztvufvdrcxxb6hik5mzfqku2tbq" ), dagByteLength: 66, - }) - writer.close({ closeWriter: false }) - assert.equal(writable.locked, false) + }); + writer.close({ closeWriter: false }); + assert.equal(writable.locked, false); - const wr = writable.getWriter() - assert.equal(writable.locked, true) + const wr = writable.getWriter(); + assert.equal(writable.locked, true); - wr.close() + wr.close(); - const blocks = await reader + const blocks = await reader; assert.deepEqual( - blocks.map($ => $.cid.toString()), + blocks.map(($) => $.cid.toString()), [ "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq", "bafybeieuo4clbaujw35wxt7s4jlorbgztvufvdrcxxb6hik5mzfqku2tbq", ] - ) - }) -}) + ); + }); +}); describe("UnixFS.withCapacity", async () => { const { readable, writable } = new TransformStream( {}, UnixFS.withCapacity(128) - ) + ); - const fs = UnixFS.createWriter({ writable }) - const file = UnixFS.createFileWriter(fs) - file.write(new TextEncoder().encode("hello world")) + const fs = UnixFS.createWriter({ writable }); + const file = UnixFS.createFileWriter(fs); + file.write(new TextEncoder().encode("hello world")); assert.deepEqual(await file.close(), { /** @type {Link.Link} */ cid: Link.parse( @@ -154,12 +156,13 @@ describe("UnixFS.withCapacity", async () => { ), dagByteLength: 19, contentByteLength: 11, - }) + contentByteOffset: 0, + }); - assert.equal(fs.writer.desiredSize, 128 - 19) + assert.equal(fs.writer.desiredSize, 128 - 19); - const bye = UnixFS.createFileWriter(fs) - bye.write(new TextEncoder().encode("bye")) + const bye = UnixFS.createFileWriter(fs); + bye.write(new TextEncoder().encode("bye")); assert.deepEqual(await bye.close(), { /** @type {Link.Link} */ @@ -168,8 +171,9 @@ describe("UnixFS.withCapacity", async () => { ), dagByteLength: 11, contentByteLength: 3, - }) + contentByteOffset: 0, + }); - assert.equal(fs.writer.desiredSize, 128 - 19 - 11) - fs.close() -}) + assert.equal(fs.writer.desiredSize, 128 - 19 - 11); + fs.close(); +}); diff --git a/test/sharded-directory.spec.js b/test/sharded-directory.spec.js index e5a5bad..57219cf 100644 --- a/test/sharded-directory.spec.js +++ b/test/sharded-directory.spec.js @@ -1,15 +1,15 @@ -import * as UnixFS from "../src/lib.js" -import { assert } from "chai" -import { encodeUTF8, Link, collect, importFile } from "./util.js" +import * as UnixFS from "../src/lib.js"; +import { assert } from "chai"; +import { encodeUTF8, Link, collect, importFile } from "./util.js"; -const createChannel = () => new TransformStream() +const createChannel = () => new TransformStream(); describe("test directory", () => { it("empty dir", async () => { - const { readable, writable } = new TransformStream() - const writer = writable.getWriter() - const root = UnixFS.createShardedDirectoryWriter({ writer }) - const link = await root.close() - writer.close() + const { readable, writable } = new TransformStream(); + const writer = writable.getWriter(); + const root = UnixFS.createShardedDirectoryWriter({ writer }); + const link = await root.close(); + writer.close(); assert.deepEqual(link, { /** @type {Link.Link} */ @@ -17,28 +17,28 @@ describe("test directory", () => { "bafybeifoplefg5piy3pjhlp73q7unqx4hwecxeu7opfqfmg352pkpljt6m" ), dagByteLength: 9, - }) - const output = await collect(readable) + }); + const output = await collect(readable); assert.deepEqual( - output.map($ => $.cid), + output.map(($) => $.cid), [ Link.parse( "bafybeifoplefg5piy3pjhlp73q7unqx4hwecxeu7opfqfmg352pkpljt6m" ), ] - ) - }) + ); + }); it("basic file in directory", async () => { - const { readable, writable } = new TransformStream() - const writer = writable.getWriter() - const blocks = collect(readable) - const root = UnixFS.createShardedDirectoryWriter({ writer }) - const file = UnixFS.createFileWriter(root) - const content = encodeUTF8("this file does not have much content\n") - file.write(content) - const fileLink = await file.close() + const { readable, writable } = new TransformStream(); + const writer = writable.getWriter(); + const blocks = collect(readable); + const root = UnixFS.createShardedDirectoryWriter({ writer }); + const file = UnixFS.createFileWriter(root); + const content = encodeUTF8("this file does not have much content\n"); + file.write(content); + const fileLink = await file.close(); assert.deepEqual(fileLink, { /** @type {Link.Link} */ @@ -47,10 +47,11 @@ describe("test directory", () => { ), dagByteLength: 45, contentByteLength: 37, - }) + contentByteOffset: 0, + }); - root.set("file.txt", fileLink) - const rootLink = await root.close() + root.set("file.txt", fileLink); + const rootLink = await root.close(); assert.deepEqual(rootLink, { dagByteLength: 133, @@ -58,14 +59,14 @@ describe("test directory", () => { cid: Link.parse( "bafybeibbyshlpvztob4mtwznmnkzoc4upgcf6ghaulujxglzgmglcdubtm" ), - }) + }); - writer.close() + writer.close(); - const output = await blocks + const output = await blocks; assert.deepEqual( - output.map($ => $.cid), + output.map(($) => $.cid), [ Link.parse( "bafybeidequ5soq6smzafv4lb76i5dkvl5fzgvrxz4bmlc2k4dkikklv2j4" @@ -74,18 +75,18 @@ describe("test directory", () => { "bafybeibbyshlpvztob4mtwznmnkzoc4upgcf6ghaulujxglzgmglcdubtm" ), ] - ) - }) + ); + }); it("many files in directory", async () => { - const { readable, writable } = new TransformStream() - const writer = writable.getWriter() - const blocks = collect(readable) - const root = UnixFS.createShardedDirectoryWriter({ writer }) - const file = UnixFS.createFileWriter(root) - const content = encodeUTF8("this file does not have much content\n") - file.write(content) - const fileLink = await file.close() + const { readable, writable } = new TransformStream(); + const writer = writable.getWriter(); + const blocks = collect(readable); + const root = UnixFS.createShardedDirectoryWriter({ writer }); + const file = UnixFS.createFileWriter(root); + const content = encodeUTF8("this file does not have much content\n"); + file.write(content); + const fileLink = await file.close(); assert.deepEqual(fileLink, { /** @type {Link.Link} */ @@ -94,13 +95,14 @@ describe("test directory", () => { ), dagByteLength: 45, contentByteLength: 37, - }) + contentByteOffset: 0, + }); for (let i = 0; i < 100; i++) { - root.set(`file${i}.txt`, fileLink) + root.set(`file${i}.txt`, fileLink); } - const rootLink = await root.close() + const rootLink = await root.close(); assert.deepEqual(rootLink, { dagByteLength: 11591, @@ -108,80 +110,120 @@ describe("test directory", () => { cid: Link.parse( "bafybeidzpkzefoys5ani6qfvrpxyjiolmy6ng445uceov2a33r5bw43qwe" ), - }) + }); - writer.close() + writer.close(); - const output = await blocks + const output = await blocks; assert.deepEqual( - output.map($ => $.cid), + output.map(($) => $.cid), [ - Link.parse("bafybeidequ5soq6smzafv4lb76i5dkvl5fzgvrxz4bmlc2k4dkikklv2j4"), - Link.parse("bafybeic66itcox6c3pozwsktz552f3pd3eanqr74jpvjezchwrkpqemjru"), - Link.parse("bafybeigyad752jkaj6qrlgtvovw5dzvhcj7pfvo5pjxkdlzec3kn3qqcoy"), - Link.parse("bafybeiflrsirdjonnavtsdg7vb63z7mcnzuymuv6eiwxw2wxqkezhludjm"), - Link.parse("bafybeigw2ilsvwhg3uglrmryyuk7dtu4yudr5naerrzb5e7ibmk7rscu3y"), - Link.parse("bafybeicprkb6dv56v3ezgj4yffbsueamhkkodfsxvwyaty3okfu6tgq3rm"), - Link.parse("bafybeienx5re7fb3s2crypbkkyp5l5zo5xb5bqfxh67ieq2aivgtaw5bqq"), - Link.parse("bafybeiewng4vb4elq23cjybjhehg2z3lshskzstxzgrhllyb7jsz2dckdq"), - Link.parse("bafybeifz4lbafvzkj7njb3cdr7r3ngl5643jhtghl2ntbvoyx5hocvepvy"), - Link.parse("bafybeibperpo4gxoi7x3g7entslorxizzy3imr44hujjqrus4hfs4ekqge"), - Link.parse("bafybeiamtplq4n5kdlhorxmougus3y54r52frrvotkduzy7kfgyrepvylu"), - Link.parse("bafybeieqvwd6ditluxwzrbvq3ffusuykxbljlqyf7gbf7esi6ake4xh27a"), - Link.parse("bafybeigkk3fanqwihj5qautj4yzluxnh3okblouotd2qkreijejdic2fui"), - Link.parse("bafybeiafn56xmx6hqgs4ig4yc24cdnbzyghjml6yhg3hmmemkrwl4irluu"), - Link.parse("bafybeieu5uzq5jbtuhnaazl36pjygv57virwr3tbdgqujhpya5w7dfosz4"), - Link.parse("bafybeid57gn3655jtgnnocwnjznifyltepqoiu3chbawyy2f263hm3qylm"), - Link.parse("bafybeig3iwqy4v44nvgyabirtbel6sbk6pzfuwdpzj4z26vczda2nycyrq"), - Link.parse("bafybeigrpoorhusehwpw2caoe7mw65xaundu227vcxqv6mqfeo65tcwxqm"), - Link.parse("bafybeif3iq6dnq2qixkoqnmyvijplu6x5depgmfgpfncpxkcx5ytajrxxy"), - Link.parse("bafybeidzpkzefoys5ani6qfvrpxyjiolmy6ng445uceov2a33r5bw43qwe"), + Link.parse( + "bafybeidequ5soq6smzafv4lb76i5dkvl5fzgvrxz4bmlc2k4dkikklv2j4" + ), + Link.parse( + "bafybeic66itcox6c3pozwsktz552f3pd3eanqr74jpvjezchwrkpqemjru" + ), + Link.parse( + "bafybeigyad752jkaj6qrlgtvovw5dzvhcj7pfvo5pjxkdlzec3kn3qqcoy" + ), + Link.parse( + "bafybeiflrsirdjonnavtsdg7vb63z7mcnzuymuv6eiwxw2wxqkezhludjm" + ), + Link.parse( + "bafybeigw2ilsvwhg3uglrmryyuk7dtu4yudr5naerrzb5e7ibmk7rscu3y" + ), + Link.parse( + "bafybeicprkb6dv56v3ezgj4yffbsueamhkkodfsxvwyaty3okfu6tgq3rm" + ), + Link.parse( + "bafybeienx5re7fb3s2crypbkkyp5l5zo5xb5bqfxh67ieq2aivgtaw5bqq" + ), + Link.parse( + "bafybeiewng4vb4elq23cjybjhehg2z3lshskzstxzgrhllyb7jsz2dckdq" + ), + Link.parse( + "bafybeifz4lbafvzkj7njb3cdr7r3ngl5643jhtghl2ntbvoyx5hocvepvy" + ), + Link.parse( + "bafybeibperpo4gxoi7x3g7entslorxizzy3imr44hujjqrus4hfs4ekqge" + ), + Link.parse( + "bafybeiamtplq4n5kdlhorxmougus3y54r52frrvotkduzy7kfgyrepvylu" + ), + Link.parse( + "bafybeieqvwd6ditluxwzrbvq3ffusuykxbljlqyf7gbf7esi6ake4xh27a" + ), + Link.parse( + "bafybeigkk3fanqwihj5qautj4yzluxnh3okblouotd2qkreijejdic2fui" + ), + Link.parse( + "bafybeiafn56xmx6hqgs4ig4yc24cdnbzyghjml6yhg3hmmemkrwl4irluu" + ), + Link.parse( + "bafybeieu5uzq5jbtuhnaazl36pjygv57virwr3tbdgqujhpya5w7dfosz4" + ), + Link.parse( + "bafybeid57gn3655jtgnnocwnjznifyltepqoiu3chbawyy2f263hm3qylm" + ), + Link.parse( + "bafybeig3iwqy4v44nvgyabirtbel6sbk6pzfuwdpzj4z26vczda2nycyrq" + ), + Link.parse( + "bafybeigrpoorhusehwpw2caoe7mw65xaundu227vcxqv6mqfeo65tcwxqm" + ), + Link.parse( + "bafybeif3iq6dnq2qixkoqnmyvijplu6x5depgmfgpfncpxkcx5ytajrxxy" + ), + Link.parse( + "bafybeidzpkzefoys5ani6qfvrpxyjiolmy6ng445uceov2a33r5bw43qwe" + ), ] - ) - }) + ); + }); it("nested directory", async () => { - const { readable, writable } = new TransformStream() - const blocks = collect(readable) - const writer = writable.getWriter() - const root = UnixFS.createShardedDirectoryWriter({ writer }) - const nested = UnixFS.createShardedDirectoryWriter(root) + const { readable, writable } = new TransformStream(); + const blocks = collect(readable); + const writer = writable.getWriter(); + const root = UnixFS.createShardedDirectoryWriter({ writer }); + const nested = UnixFS.createShardedDirectoryWriter(root); - root.set("nested", await nested.close()) + root.set("nested", await nested.close()); assert.deepEqual(await root.close(), { /** @type {Link.Link} */ cid: Link.parse( "bafybeiesoparpjbe5rwoo6liouikyw2nypo6v3d3n36vb334oddrmp52mq" ), dagByteLength: 102, - }) - writer.close() + }); + writer.close(); - const items = await blocks + const items = await blocks; assert.deepEqual( items.map(({ cid }) => cid.toString()), [ "bafybeifoplefg5piy3pjhlp73q7unqx4hwecxeu7opfqfmg352pkpljt6m", "bafybeiesoparpjbe5rwoo6liouikyw2nypo6v3d3n36vb334oddrmp52mq", ] - ) - }) + ); + }); it("double nested directory", async () => { - const { readable, writable } = new TransformStream() - const blocks = collect(readable) - const writer = writable.getWriter() - - const root = UnixFS.createShardedDirectoryWriter({ writer }) - const nested = UnixFS.createShardedDirectoryWriter(root) - - root.set("nested", await nested.close()) - const main = UnixFS.createShardedDirectoryWriter({ writer }) - main.set("root", await root.close()) - const link = await main.close() - writer.close() - const items = await blocks + const { readable, writable } = new TransformStream(); + const blocks = collect(readable); + const writer = writable.getWriter(); + + const root = UnixFS.createShardedDirectoryWriter({ writer }); + const nested = UnixFS.createShardedDirectoryWriter(root); + + root.set("nested", await nested.close()); + const main = UnixFS.createShardedDirectoryWriter({ writer }); + main.set("root", await root.close()); + const link = await main.close(); + writer.close(); + const items = await blocks; assert.deepEqual( items.map(({ cid }) => cid.toString()), [ @@ -189,27 +231,28 @@ describe("test directory", () => { "bafybeiesoparpjbe5rwoo6liouikyw2nypo6v3d3n36vb334oddrmp52mq", "bafybeifni4qs2xfgtzhk2xw7emp5j7h5ayyw73xizcba2qxry6dc4vqaom", ] - ) - }) + ); + }); it("throws if file already exists", async () => { - const { readable, writable } = new TransformStream() - const blocks = collect(readable) - const writer = writable.getWriter() + const { readable, writable } = new TransformStream(); + const blocks = collect(readable); + const writer = writable.getWriter(); - const root = UnixFS.createShardedDirectoryWriter({ writer }) + const root = UnixFS.createShardedDirectoryWriter({ writer }); - const hello = await importFile(root, ["hello"]) + const hello = await importFile(root, ["hello"]); assert.deepEqual(hello, { /** @type {Link.Link} */ cid: Link.parse( "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq" ), contentByteLength: 5, + contentByteOffset: 0, dagByteLength: 13, - }) + }); - const bye = await importFile(root, ["bye"]) + const bye = await importFile(root, ["bye"]); assert.deepEqual(bye, { /** @type {Link.Link} */ cid: Link.parse( @@ -217,15 +260,16 @@ describe("test directory", () => { ), dagByteLength: 11, contentByteLength: 3, - }) + contentByteOffset: 0, + }); - root.set("hello", hello) + root.set("hello", hello); assert.throws( () => root.set("hello", bye), /Directory already contains entry with name "hello"/ - ) - root.set("bye", bye) - const link = await root.close() + ); + root.set("bye", bye); + const link = await root.close(); assert.deepEqual(link, { /** @type {Link.Link} */ @@ -233,37 +277,38 @@ describe("test directory", () => { "bafybeihxagpxz7lekn7exw6ob526d6pgvnzc3kgtpkbh7ze73e2oc7oxpa" ), dagByteLength: 164, - }) - writer.close() - const items = await blocks + }); + writer.close(); + const items = await blocks; assert.deepEqual( - items.map(item => item.cid.toString()), + items.map((item) => item.cid.toString()), [ "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq", "bafybeigl43jff4muiw2m6kzqhm7xpz6ti7etiujklpnc6vpblzjvvwqmta", "bafybeihxagpxz7lekn7exw6ob526d6pgvnzc3kgtpkbh7ze73e2oc7oxpa", ] - ) - }) + ); + }); it("can overwrite existing", async () => { - const { readable, writable } = new TransformStream() - const blocks = collect(readable) - const writer = writable.getWriter() + const { readable, writable } = new TransformStream(); + const blocks = collect(readable); + const writer = writable.getWriter(); - const root = UnixFS.createShardedDirectoryWriter({ writer }) + const root = UnixFS.createShardedDirectoryWriter({ writer }); - const hello = await importFile(root, ["hello"]) + const hello = await importFile(root, ["hello"]); assert.deepEqual(hello, { /** @type {Link.Link} */ cid: Link.parse( "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq" ), contentByteLength: 5, + contentByteOffset: 0, dagByteLength: 13, - }) + }); - const bye = await importFile(root, ["bye"]) + const bye = await importFile(root, ["bye"]); assert.deepEqual(bye, { /** @type {Link.Link} */ cid: Link.parse( @@ -271,11 +316,12 @@ describe("test directory", () => { ), dagByteLength: 11, contentByteLength: 3, - }) + contentByteOffset: 0, + }); - root.set("hello", hello) - root.set("hello", bye, { overwrite: true }) - const link = await root.close() + root.set("hello", hello); + root.set("hello", bye, { overwrite: true }); + const link = await root.close(); assert.deepEqual(link, { /** @type {Link.Link} */ @@ -283,39 +329,40 @@ describe("test directory", () => { "bafybeibzscho4rtevqlxvlen7te535kvrawffcdry42iol2kr5nr3itjgy" ), dagByteLength: 99, - }) - writer.close() - const items = await blocks + }); + writer.close(); + const items = await blocks; assert.deepEqual( - items.map(item => item.cid.toString()), + items.map((item) => item.cid.toString()), [ "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq", "bafybeigl43jff4muiw2m6kzqhm7xpz6ti7etiujklpnc6vpblzjvvwqmta", "bafybeibzscho4rtevqlxvlen7te535kvrawffcdry42iol2kr5nr3itjgy", ] - ) - }) + ); + }); it("can delete entries", async () => { - const { readable, writable } = createChannel() - const writer = writable.getWriter() - const reader = collect(readable) + const { readable, writable } = createChannel(); + const writer = writable.getWriter(); + const reader = collect(readable); - const root = UnixFS.createShardedDirectoryWriter({ writer }) + const root = UnixFS.createShardedDirectoryWriter({ writer }); - const hello = await importFile(root, ["hello"]) + const hello = await importFile(root, ["hello"]); assert.deepEqual(hello, { /** @type {Link.Link} */ cid: Link.parse( "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq" ), contentByteLength: 5, + contentByteOffset: 0, dagByteLength: 13, - }) + }); - root.set("hello", hello) - root.remove("hello") - const link = await root.close() + root.set("hello", hello); + root.remove("hello"); + const link = await root.close(); assert.deepEqual(link, { /** @type {Link.Link} */ @@ -323,51 +370,52 @@ describe("test directory", () => { "bafybeifoplefg5piy3pjhlp73q7unqx4hwecxeu7opfqfmg352pkpljt6m" ), dagByteLength: 9, - }) - writer.close() - const blocks = await reader + }); + writer.close(); + const blocks = await reader; assert.deepEqual( - blocks.map(block => block.cid.toString()), + blocks.map((block) => block.cid.toString()), [ "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq", "bafybeifoplefg5piy3pjhlp73q7unqx4hwecxeu7opfqfmg352pkpljt6m", ] - ) - }) + ); + }); it("throws on invalid filenames", async () => { - const { readable, writable } = new TransformStream() - const writer = writable.getWriter() - const reader = collect(readable) + const { readable, writable } = new TransformStream(); + const writer = writable.getWriter(); + const reader = collect(readable); - const root = UnixFS.createShardedDirectoryWriter({ writer }) - const hello = await importFile(root, ["hello"]) + const root = UnixFS.createShardedDirectoryWriter({ writer }); + const hello = await importFile(root, ["hello"]); assert.throws( () => root.set("hello/world", hello), /Directory entry name "hello\/world" contains forbidden "\/" character/ - ) - writer.close() - }) + ); + writer.close(); + }); it("can not change after close", async () => { - const { readable, writable } = new TransformStream() - const writer = writable.getWriter() - const reader = collect(readable) + const { readable, writable } = new TransformStream(); + const writer = writable.getWriter(); + const reader = collect(readable); - const root = UnixFS.createShardedDirectoryWriter({ writer }) + const root = UnixFS.createShardedDirectoryWriter({ writer }); - const hello = await importFile(root, ["hello"]) + const hello = await importFile(root, ["hello"]); assert.deepEqual(hello, { /** @type {Link.Link} */ cid: Link.parse( "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq" ), contentByteLength: 5, + contentByteOffset: 0, dagByteLength: 13, - }) + }); - const bye = await importFile(root, ["bye"]) + const bye = await importFile(root, ["bye"]); assert.deepEqual(bye, { /** @type {Link.Link} */ cid: Link.parse( @@ -375,59 +423,64 @@ describe("test directory", () => { ), dagByteLength: 11, contentByteLength: 3, - }) + contentByteOffset: 0, + }); - root.set("hello", hello) + root.set("hello", hello); assert.deepEqual(await root.close(), { /** @type {Link.Link} */ cid: Link.parse( "bafybeihccqhztoqxfi5mmnv55iofsz7slpzq4gnktf3vzycavqbms5eote" ), dagByteLength: 101, - }) + }); assert.throws( () => root.set("bye", bye), /Can not change written directory, but you can \.fork\(\) and make changes to it/ - ) + ); - writer.close() - const blocks = await reader + writer.close(); + const blocks = await reader; assert.deepEqual( - blocks.map(block => block.cid.toString()), + blocks.map((block) => block.cid.toString()), [ "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq", "bafybeigl43jff4muiw2m6kzqhm7xpz6ti7etiujklpnc6vpblzjvvwqmta", "bafybeihccqhztoqxfi5mmnv55iofsz7slpzq4gnktf3vzycavqbms5eote", ] - ) + ); try { - await root.close() - assert.fail() + await root.close(); + assert.fail(); } catch (/** @type {any} */ err) { - assert.equal(err.message, "Can not change written HAMT directory, but you can .fork() and make changes to it") + assert.equal( + err.message, + "Can not change written HAMT directory, but you can .fork() and make changes to it" + ); } - }) + }); it("can fork and edit", async () => { - const { readable, writable } = new TransformStream() - const writer = writable.getWriter() - const reader = collect(readable) + const { readable, writable } = new TransformStream(); + const writer = writable.getWriter(); + const reader = collect(readable); - const root = UnixFS.createShardedDirectoryWriter({ writer }) + const root = UnixFS.createShardedDirectoryWriter({ writer }); - const hello = await importFile(root, ["hello"]) + const hello = await importFile(root, ["hello"]); assert.deepEqual(hello, { /** @type {Link.Link} */ cid: Link.parse( "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq" ), contentByteLength: 5, + contentByteOffset: 0, dagByteLength: 13, - }) + }); - const bye = await importFile(root, ["bye"]) + const bye = await importFile(root, ["bye"]); assert.deepEqual(bye, { /** @type {Link.Link} */ cid: Link.parse( @@ -435,85 +488,87 @@ describe("test directory", () => { ), dagByteLength: 11, contentByteLength: 3, - }) + contentByteOffset: 0, + }); - root.set("hello", hello) + root.set("hello", hello); assert.deepEqual(await root.close(), { /** @type {Link.Link} */ cid: Link.parse( "bafybeihccqhztoqxfi5mmnv55iofsz7slpzq4gnktf3vzycavqbms5eote" ), dagByteLength: 101, - }) + }); - const fork = root.fork() - fork.set("bye", bye) + const fork = root.fork(); + fork.set("bye", bye); assert.deepEqual(await fork.close(), { /** @type {Link.Link} */ cid: Link.parse( "bafybeihxagpxz7lekn7exw6ob526d6pgvnzc3kgtpkbh7ze73e2oc7oxpa" ), dagByteLength: 164, - }) + }); - writer.close() - const blocks = await reader + writer.close(); + const blocks = await reader; assert.deepEqual( - blocks.map(block => block.cid.toString()), + blocks.map((block) => block.cid.toString()), [ "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq", "bafybeigl43jff4muiw2m6kzqhm7xpz6ti7etiujklpnc6vpblzjvvwqmta", "bafybeihccqhztoqxfi5mmnv55iofsz7slpzq4gnktf3vzycavqbms5eote", "bafybeihxagpxz7lekn7exw6ob526d6pgvnzc3kgtpkbh7ze73e2oc7oxpa", ] - ) - }) + ); + }); it("can autoclose", async () => { - const { readable, writable } = new TransformStream() - const writer = writable.getWriter() - const reader = collect(readable) - - const root = UnixFS.createShardedDirectoryWriter({ writer }) - const file = UnixFS.createFileWriter(root) - file.write(new TextEncoder().encode("hello")) - root.set("hello", await file.close()) + const { readable, writable } = new TransformStream(); + const writer = writable.getWriter(); + const reader = collect(readable); + + const root = UnixFS.createShardedDirectoryWriter({ writer }); + const file = UnixFS.createFileWriter(root); + file.write(new TextEncoder().encode("hello")); + root.set("hello", await file.close()); assert.deepEqual(await root.close({ closeWriter: true }), { /** @type {Link.Link} */ cid: Link.parse( "bafybeihccqhztoqxfi5mmnv55iofsz7slpzq4gnktf3vzycavqbms5eote" ), dagByteLength: 101, - }) + }); - const blocks = await reader + const blocks = await reader; assert.deepEqual( - blocks.map(block => block.cid.toString()), + blocks.map((block) => block.cid.toString()), [ "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq", "bafybeihccqhztoqxfi5mmnv55iofsz7slpzq4gnktf3vzycavqbms5eote", ] - ) - }) + ); + }); it("fork into other stream", async () => { - const { readable, writable } = new TransformStream() - const writer = writable.getWriter() - const reader = collect(readable) + const { readable, writable } = new TransformStream(); + const writer = writable.getWriter(); + const reader = collect(readable); - const root = UnixFS.createShardedDirectoryWriter({ writer }) + const root = UnixFS.createShardedDirectoryWriter({ writer }); - const hello = await importFile(root, ["hello"]) + const hello = await importFile(root, ["hello"]); assert.deepEqual(hello, { /** @type {Link.Link} */ cid: Link.parse( "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq" ), contentByteLength: 5, + contentByteOffset: 0, dagByteLength: 13, - }) + }); - const bye = await importFile(root, ["bye"]) + const bye = await importFile(root, ["bye"]); assert.deepEqual(bye, { /** @type {Link.Link} */ cid: Link.parse( @@ -521,63 +576,64 @@ describe("test directory", () => { ), dagByteLength: 11, contentByteLength: 3, - }) + contentByteOffset: 0, + }); - root.set("hello", hello) + root.set("hello", hello); assert.deepEqual(await root.close(), { /** @type {Link.Link} */ cid: Link.parse( "bafybeihccqhztoqxfi5mmnv55iofsz7slpzq4gnktf3vzycavqbms5eote" ), dagByteLength: 101, - }) + }); - const patch = new TransformStream() - const patchWriter = patch.writable.getWriter() - const patchReader = collect(patch.readable) + const patch = new TransformStream(); + const patchWriter = patch.writable.getWriter(); + const patchReader = collect(patch.readable); - const fork = root.fork({ writer: patchWriter }) - fork.set("bye", bye) + const fork = root.fork({ writer: patchWriter }); + fork.set("bye", bye); assert.deepEqual(await fork.close(), { /** @type {Link.Link} */ cid: Link.parse( "bafybeihxagpxz7lekn7exw6ob526d6pgvnzc3kgtpkbh7ze73e2oc7oxpa" ), dagByteLength: 164, - }) + }); - writer.close() - const blocks = await reader + writer.close(); + const blocks = await reader; assert.deepEqual( - blocks.map(block => block.cid.toString()), + blocks.map((block) => block.cid.toString()), [ "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq", "bafybeigl43jff4muiw2m6kzqhm7xpz6ti7etiujklpnc6vpblzjvvwqmta", "bafybeihccqhztoqxfi5mmnv55iofsz7slpzq4gnktf3vzycavqbms5eote", ] - ) + ); - patchWriter.close() - const delta = await patchReader + patchWriter.close(); + const delta = await patchReader; assert.deepEqual( - delta.map(block => block.cid.toString()), + delta.map((block) => block.cid.toString()), ["bafybeihxagpxz7lekn7exw6ob526d6pgvnzc3kgtpkbh7ze73e2oc7oxpa"] - ) - }) + ); + }); it("can close writer", async function () { - const { readable, writable } = new TransformStream() - const writer = writable.getWriter() - const blocks = collect(readable) - const root = UnixFS.createShardedDirectoryWriter({ writer }) - const file = UnixFS.createFileWriter(root) + const { readable, writable } = new TransformStream(); + const writer = writable.getWriter(); + const blocks = collect(readable); + const root = UnixFS.createShardedDirectoryWriter({ writer }); + const file = UnixFS.createFileWriter(root); - file.write(encodeUTF8("this file does not have much content\n")) - assert.equal(writable.locked, true) - root.set("file.txt", await file.close()) - const link = await root.close({ releaseLock: true, closeWriter: true }) + file.write(encodeUTF8("this file does not have much content\n")); + assert.equal(writable.locked, true); + root.set("file.txt", await file.close()); + const link = await root.close({ releaseLock: true, closeWriter: true }); - await blocks + await blocks; assert.deepEqual(link, { dagByteLength: 133, @@ -585,24 +641,24 @@ describe("test directory", () => { cid: Link.parse( "bafybeibbyshlpvztob4mtwznmnkzoc4upgcf6ghaulujxglzgmglcdubtm" ), - }) - }) + }); + }); it("can release writer lock", async function () { - const { readable, writable } = new TransformStream() - const writer = writable.getWriter() - const blocks = collect(readable) - const root = UnixFS.createShardedDirectoryWriter({ writer }) - const file = UnixFS.createFileWriter(root) + const { readable, writable } = new TransformStream(); + const writer = writable.getWriter(); + const blocks = collect(readable); + const root = UnixFS.createShardedDirectoryWriter({ writer }); + const file = UnixFS.createFileWriter(root); - file.write(encodeUTF8("this file does not have much content\n")) - assert.equal(writable.locked, true) - root.set("file.txt", await file.close()) - const link = await root.close({ releaseLock: true }) - assert.equal(writable.locked, false) + file.write(encodeUTF8("this file does not have much content\n")); + assert.equal(writable.locked, true); + root.set("file.txt", await file.close()); + const link = await root.close({ releaseLock: true }); + assert.equal(writable.locked, false); - writable.close() - await blocks + writable.close(); + await blocks; assert.deepEqual(link, { dagByteLength: 133, @@ -610,190 +666,190 @@ describe("test directory", () => { cid: Link.parse( "bafybeibbyshlpvztob4mtwznmnkzoc4upgcf6ghaulujxglzgmglcdubtm" ), - }) - }) + }); + }); it("can enumerate entries", async function () { - const { writable } = new TransformStream() - const writer = writable.getWriter() - const root = UnixFS.createShardedDirectoryWriter({ writer }) + const { writable } = new TransformStream(); + const writer = writable.getWriter(); + const root = UnixFS.createShardedDirectoryWriter({ writer }); - assert.deepEqual([...root.entries()], []) + assert.deepEqual([...root.entries()], []); /** @type {Link.Link} */ const cid = Link.parse( "bafybeidequ5soq6smzafv4lb76i5dkvl5fzgvrxz4bmlc2k4dkikklv2j4" - ) + ); const fileLink = { cid, dagByteLength: 45, contentByteLength: 37, - } + }; - root.set("file.txt", fileLink) - assert.deepEqual([...root.entries()], [["file.txt", fileLink]]) - }) + root.set("file.txt", fileLink); + assert.deepEqual([...root.entries()], [["file.txt", fileLink]]); + }); it(".has", async function () { - const { writable } = new TransformStream() - const writer = writable.getWriter() - const root = UnixFS.createShardedDirectoryWriter({ writer }) - assert.equal(root.has("file.txt"), false) + const { writable } = new TransformStream(); + const writer = writable.getWriter(); + const root = UnixFS.createShardedDirectoryWriter({ writer }); + assert.equal(root.has("file.txt"), false); /** @type {Link.Link} */ const cid = Link.parse( "bafybeidequ5soq6smzafv4lb76i5dkvl5fzgvrxz4bmlc2k4dkikklv2j4" - ) + ); root.set("file.txt", { cid, dagByteLength: 45, contentByteLength: 37, - }) - assert.equal(root.has("file.txt"), true) + }); + assert.equal(root.has("file.txt"), true); - root.remove("file.txt") - assert.equal(root.has("file.txt"), false) - }) + root.remove("file.txt"); + assert.equal(root.has("file.txt"), false); + }); it(".size", async function () { - const { writable } = new TransformStream() - const writer = writable.getWriter() - const root = UnixFS.createShardedDirectoryWriter({ writer }) - assert.equal(root.size, 0) + const { writable } = new TransformStream(); + const writer = writable.getWriter(); + const root = UnixFS.createShardedDirectoryWriter({ writer }); + assert.equal(root.size, 0); /** @type {Link.Link} */ const cid = Link.parse( "bafybeidequ5soq6smzafv4lb76i5dkvl5fzgvrxz4bmlc2k4dkikklv2j4" - ) + ); root.set("file.txt", { cid, dagByteLength: 45, contentByteLength: 37, - }) - assert.equal(root.size, 1) + }); + assert.equal(root.size, 1); - root.remove("file.txt") - assert.equal(root.size, 0) - }) + root.remove("file.txt"); + assert.equal(root.size, 0); + }); it("writer state .clear", async function () { - const { writable } = new TransformStream() - const writer = writable.getWriter() - const root = UnixFS.createShardedDirectoryWriter({ writer }) - assert.equal(root.size, 0) + const { writable } = new TransformStream(); + const writer = writable.getWriter(); + const root = UnixFS.createShardedDirectoryWriter({ writer }); + assert.equal(root.size, 0); /** @type {Link.Link} */ const cid = Link.parse( "bafybeidequ5soq6smzafv4lb76i5dkvl5fzgvrxz4bmlc2k4dkikklv2j4" - ) + ); const fileLink = { cid, dagByteLength: 45, contentByteLength: 37, - } - root.set("file.txt", fileLink) - assert.equal(root.size, 1) + }; + root.set("file.txt", fileLink); + assert.equal(root.size, 1); - root.state.entries.clear() - assert.equal(root.size, 0) - }) + root.state.entries.clear(); + assert.equal(root.size, 0); + }); it("writer state .forEach", async function () { - const { writable } = new TransformStream() - const writer = writable.getWriter() - const root = UnixFS.createShardedDirectoryWriter({ writer }) - assert.equal(root.size, 0) + const { writable } = new TransformStream(); + const writer = writable.getWriter(); + const root = UnixFS.createShardedDirectoryWriter({ writer }); + assert.equal(root.size, 0); /** @type {Link.Link} */ const cid = Link.parse( "bafybeidequ5soq6smzafv4lb76i5dkvl5fzgvrxz4bmlc2k4dkikklv2j4" - ) + ); const fileLink = { cid, dagByteLength: 45, contentByteLength: 37, - } - root.set("file.txt", fileLink) - assert.equal(root.size, 1) - root.state.entries.forEach(entry => assert.deepEqual(entry, fileLink)) - }) + }; + root.set("file.txt", fileLink); + assert.equal(root.size, 1); + root.state.entries.forEach((entry) => assert.deepEqual(entry, fileLink)); + }); it("writer state .get", async function () { - const { writable } = new TransformStream() - const writer = writable.getWriter() - const root = UnixFS.createShardedDirectoryWriter({ writer }) - assert.equal(root.size, 0) + const { writable } = new TransformStream(); + const writer = writable.getWriter(); + const root = UnixFS.createShardedDirectoryWriter({ writer }); + assert.equal(root.size, 0); /** @type {Link.Link} */ const cid = Link.parse( "bafybeidequ5soq6smzafv4lb76i5dkvl5fzgvrxz4bmlc2k4dkikklv2j4" - ) + ); const fileLink = { cid, dagByteLength: 45, contentByteLength: 37, - } - root.set("file.txt", fileLink) - assert.equal(root.size, 1) - assert.deepEqual(root.state.entries.get("file.txt"), fileLink) - }) + }; + root.set("file.txt", fileLink); + assert.equal(root.size, 1); + assert.deepEqual(root.state.entries.get("file.txt"), fileLink); + }); it("writer state .[Symbol.iterator]", async function () { - const { writable } = new TransformStream() - const writer = writable.getWriter() - const root = UnixFS.createShardedDirectoryWriter({ writer }) - assert.equal(root.size, 0) + const { writable } = new TransformStream(); + const writer = writable.getWriter(); + const root = UnixFS.createShardedDirectoryWriter({ writer }); + assert.equal(root.size, 0); /** @type {Link.Link} */ const cid = Link.parse( "bafybeidequ5soq6smzafv4lb76i5dkvl5fzgvrxz4bmlc2k4dkikklv2j4" - ) + ); const fileLink = { cid, dagByteLength: 45, contentByteLength: 37, - } - root.set("file.txt", fileLink) - assert.equal(root.size, 1) - assert.deepEqual([...root.state.entries], [["file.txt", fileLink]]) - }) + }; + root.set("file.txt", fileLink); + assert.equal(root.size, 1); + assert.deepEqual([...root.state.entries], [["file.txt", fileLink]]); + }); it("writer state .keys", async function () { - const { writable } = new TransformStream() - const writer = writable.getWriter() - const root = UnixFS.createShardedDirectoryWriter({ writer }) - assert.equal(root.size, 0) + const { writable } = new TransformStream(); + const writer = writable.getWriter(); + const root = UnixFS.createShardedDirectoryWriter({ writer }); + assert.equal(root.size, 0); /** @type {Link.Link} */ const cid = Link.parse( "bafybeidequ5soq6smzafv4lb76i5dkvl5fzgvrxz4bmlc2k4dkikklv2j4" - ) + ); const fileLink = { cid, dagByteLength: 45, contentByteLength: 37, - } - root.set("file.txt", fileLink) - assert.equal(root.size, 1) - assert.deepEqual([...root.state.entries.keys()], ["file.txt"]) - }) + }; + root.set("file.txt", fileLink); + assert.equal(root.size, 1); + assert.deepEqual([...root.state.entries.keys()], ["file.txt"]); + }); it("writer state .values", async function () { - const { writable } = new TransformStream() - const writer = writable.getWriter() - const root = UnixFS.createShardedDirectoryWriter({ writer }) - assert.equal(root.size, 0) + const { writable } = new TransformStream(); + const writer = writable.getWriter(); + const root = UnixFS.createShardedDirectoryWriter({ writer }); + assert.equal(root.size, 0); /** @type {Link.Link} */ const cid = Link.parse( "bafybeidequ5soq6smzafv4lb76i5dkvl5fzgvrxz4bmlc2k4dkikklv2j4" - ) + ); const fileLink = { cid, dagByteLength: 45, contentByteLength: 37, - } - root.set("file.txt", fileLink) - assert.equal(root.size, 1) - assert.deepEqual([...root.state.entries.values()], [fileLink]) - }) -}) + }; + root.set("file.txt", fileLink); + assert.equal(root.size, 1); + assert.deepEqual([...root.state.entries.values()], [fileLink]); + }); +}); diff --git a/yarn.lock b/yarn.lock index 3729eb7..ec2ae9a 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2925,6 +2925,13 @@ typescript@^4.8.4: resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.9.5.tgz#095979f9bcc0d09da324d58d03ce8f8374cbe65a" integrity sha512-1FXk9E2Hm+QzZQ7z+McJiHL4NW1F2EzMu9Nq9i3zAaGqibafqYwCVU6WyWAuyQRRzOlxou8xZSyXLEN8oKj24g== +uint8arrays@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/uint8arrays/-/uint8arrays-5.1.0.tgz#14047c9bdf825d025b7391299436e5e50e7270f1" + integrity sha512-vA6nFepEmlSKkMBnLBaUMVvAC4G3CTmO58C12y4sq6WPDOR7mOFYOi7GlrQ4djeSbP6JG9Pv9tJDM97PedRSww== + dependencies: + multiformats "^13.0.0" + unbox-primitive@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/unbox-primitive/-/unbox-primitive-1.0.2.tgz#29032021057d5e6cdbd08c5129c226dff8ed6f9e" From a6e23b14f30c89281e4f0418ebb4d0ec8bfcca59 Mon Sep 17 00:00:00 2001 From: Vasco Santos Date: Tue, 13 May 2025 18:51:04 +0200 Subject: [PATCH 2/6] fix: export file link type --- src/api.ts | 23 ++++++++++++----------- 1 file changed, 12 insertions(+), 11 deletions(-) diff --git a/src/api.ts b/src/api.ts index fec8a76..b91a0a7 100644 --- a/src/api.ts +++ b/src/api.ts @@ -14,7 +14,7 @@ import type { MultihashHasher, MultihashDigest, EncodedFile, -} from "./file.js" +} from "./file.js"; import type { DirectoryEntry, @@ -22,8 +22,8 @@ import type { View as DirectoryWriterView, Options as DirectoryWriterOptions, State as DirectoryWriterState, -} from "./directory.js" -import { Metadata } from "./unixfs.js" +} from "./directory.js"; +import { Metadata, FileLink } from "./unixfs.js"; export type { WriterOptions, @@ -47,7 +47,8 @@ export type { MultihashHasher, MultihashDigest, Metadata, -} + FileLink, +}; /** * @@ -56,7 +57,7 @@ export interface Writer { /** * Closes this writer and corresponding */ - close(options?: CloseOptions): Promise + close(options?: CloseOptions): Promise; } /** @@ -70,12 +71,12 @@ export interface View extends Writer { /** * Underlaying stream where [UnixFS][] blocks will be written into. */ - readonly writer: BlockWriter + readonly writer: BlockWriter; /** * Encoder configuration of this writer. */ - readonly settings: EncoderSettings + readonly settings: EncoderSettings; /** * Creates new file writer that will write blocks into the same underlying @@ -84,7 +85,7 @@ export interface View extends Writer { */ createFileWriter( settings?: WriterOptions - ): FileWriterView + ): FileWriterView; /** * Creates new directory writer that will write blocks into the same @@ -94,10 +95,10 @@ export interface View extends Writer { */ createDirectoryWriter( settings?: WriterOptions - ): DirectoryWriterView + ): DirectoryWriterView; } export interface Options { - writable: WritableBlockStream - settings?: EncoderSettings + writable: WritableBlockStream; + settings?: EncoderSettings; } From 215052117a1c5731ca05a11cbc5a9e7ec1000616 Mon Sep 17 00:00:00 2001 From: Vasco Santos Date: Fri, 16 May 2025 11:53:53 +0200 Subject: [PATCH 3/6] fix: remove semi colons commited --- package.json | 3 +- src/api.ts | 22 +- src/file.js | 68 ++-- src/file/api.ts | 130 +++---- src/file/writer.js | 132 +++---- src/unixfs.ts | 150 ++++---- test/directory.spec.js | 466 ++++++++++++------------ test/file.spec.js | 376 ++++++++++---------- test/lib.spec.js | 140 ++++---- test/sharded-directory.spec.js | 628 ++++++++++++++++----------------- yarn.lock | 7 - 11 files changed, 1057 insertions(+), 1065 deletions(-) diff --git a/package.json b/package.json index f9bffae..d6f4423 100644 --- a/package.json +++ b/package.json @@ -133,6 +133,5 @@ "tsv": "^0.2.0", "typescript": "^4.8.4" }, - "license": "Apache-2.0 OR MIT", - "packageManager": "yarn@1.22.19+sha1.4ba7fc5c6e704fce2066ecbfb0b0d8976fe62447" + "license": "Apache-2.0 OR MIT" } diff --git a/src/api.ts b/src/api.ts index b91a0a7..8f9b4b0 100644 --- a/src/api.ts +++ b/src/api.ts @@ -14,7 +14,7 @@ import type { MultihashHasher, MultihashDigest, EncodedFile, -} from "./file.js"; +} from "./file.js" import type { DirectoryEntry, @@ -22,8 +22,8 @@ import type { View as DirectoryWriterView, Options as DirectoryWriterOptions, State as DirectoryWriterState, -} from "./directory.js"; -import { Metadata, FileLink } from "./unixfs.js"; +} from "./directory.js" +import { Metadata, FileLink } from "./unixfs.js" export type { WriterOptions, @@ -48,7 +48,7 @@ export type { MultihashDigest, Metadata, FileLink, -}; +} /** * @@ -57,7 +57,7 @@ export interface Writer { /** * Closes this writer and corresponding */ - close(options?: CloseOptions): Promise; + close(options?: CloseOptions): Promise } /** @@ -71,12 +71,12 @@ export interface View extends Writer { /** * Underlaying stream where [UnixFS][] blocks will be written into. */ - readonly writer: BlockWriter; + readonly writer: BlockWriter /** * Encoder configuration of this writer. */ - readonly settings: EncoderSettings; + readonly settings: EncoderSettings /** * Creates new file writer that will write blocks into the same underlying @@ -85,7 +85,7 @@ export interface View extends Writer { */ createFileWriter( settings?: WriterOptions - ): FileWriterView; + ): FileWriterView /** * Creates new directory writer that will write blocks into the same @@ -95,10 +95,10 @@ export interface View extends Writer { */ createDirectoryWriter( settings?: WriterOptions - ): DirectoryWriterView; + ): DirectoryWriterView } export interface Options { - writable: WritableBlockStream; - settings?: EncoderSettings; + writable: WritableBlockStream + settings?: EncoderSettings } diff --git a/src/file.js b/src/file.js index b29031d..5fa4e4b 100644 --- a/src/file.js +++ b/src/file.js @@ -1,14 +1,14 @@ -import * as API from "./file/api.js"; -import * as UnixFS from "./codec.js"; -import * as Writer from "./file/writer.js"; -import * as Task from "actor"; -import { panic } from "./writer/util.js"; -import * as FixedSize from "./file/chunker/fixed.js"; -import { sha256 } from "multiformats/hashes/sha2"; -import { CID } from "multiformats/cid"; -import * as Balanced from "./file/layout/balanced.js"; +import * as API from "./file/api.js" +import * as UnixFS from "./codec.js" +import * as Writer from "./file/writer.js" +import * as Task from "actor" +import { panic } from "./writer/util.js" +import * as FixedSize from "./file/chunker/fixed.js" +import { sha256 } from "multiformats/hashes/sha2" +import { CID } from "multiformats/cid" +import * as Balanced from "./file/layout/balanced.js" -export * from "./file/api.js"; +export * from "./file/api.js" /** * @returns {API.EncoderSettings} @@ -21,7 +21,7 @@ export const defaults = () => ({ fileLayout: Balanced.withWidth(174), hasher: sha256, linker: { createLink: CID.createV1 }, -}); +}) /** * @template {unknown} Layout @@ -31,19 +31,19 @@ export const defaults = () => ({ export const configure = (config) => ({ ...defaults(), ...config, -}); +}) export const UnixFSLeaf = { code: UnixFS.code, name: UnixFS.name, encode: UnixFS.encodeFileChunk, -}; +} export const UnixFSRawLeaf = { code: UnixFS.code, name: UnixFS.name, encode: UnixFS.encodeRaw, -}; +} /** * @template Layout @@ -58,7 +58,7 @@ export const create = ({ }) => new FileWriterView( Writer.init(writer, metadata, configure(settings), initOptions) - ); + ) /** * @template T @@ -68,9 +68,9 @@ export const create = ({ */ export const write = async (view, bytes) => { - await perform(view, Task.send({ type: "write", bytes })); - return view; -}; + await perform(view, Task.send({ type: "write", bytes })) + return view +} /** * @template T @@ -81,22 +81,22 @@ export const close = async ( view, { releaseLock = false, closeWriter = false } = {} ) => { - await perform(view, Task.send({ type: "close" })); - const { state } = view; + await perform(view, Task.send({ type: "close" })) + const { state } = view if (state.status === "linked") { if (closeWriter) { - await view.state.writer.close(); + await view.state.writer.close() } else if (releaseLock) { - view.state.writer.releaseLock(); + view.state.writer.releaseLock() } - return state.link; + return state.link /* c8 ignore next 5 */ } else { panic( `Expected writer to be in 'linked' state after close, but it is in "${state.status}" instead` - ); + ) } -}; +} /** * @template T @@ -106,11 +106,11 @@ export const close = async ( const perform = (view, effect) => Task.fork( Task.loop(effect, (message) => { - const { state, effect } = Writer.update(message, view.state); - view.state = state; - return effect; + const { state, effect } = Writer.update(message, view.state) + view.state = state + return effect }) - ); + ) /** * @template Layout @@ -121,26 +121,26 @@ class FileWriterView { * @param {Writer.State} state */ constructor(state) { - this.state = state; + this.state = state } get writer() { - return this.state.writer; + return this.state.writer } get settings() { - return this.state.config; + return this.state.config } /** * @param {Uint8Array} bytes * @returns {Promise>} */ write(bytes) { - return write(this, bytes); + return write(this, bytes) } /** * @param {API.CloseOptions} [options] * @returns {Promise} */ close(options) { - return close(this, options); + return close(this, options) } } diff --git a/src/file/api.ts b/src/file/api.ts index cde6504..59d62e3 100644 --- a/src/file/api.ts +++ b/src/file/api.ts @@ -1,7 +1,7 @@ -import type { Chunker } from "./chunker/api.js"; -import type { Writer as StreamWriter } from "../writer/api.js"; -import type { LayoutEngine, NodeID } from "./layout/api.js"; -import * as UnixFS from "../unixfs.js"; +import type { Chunker } from "./chunker/api.js" +import type { Writer as StreamWriter } from "../writer/api.js" +import type { LayoutEngine, NodeID } from "./layout/api.js" +import * as UnixFS from "../unixfs.js" import type { Block, BlockEncoder, @@ -9,12 +9,12 @@ import type { MultihashDigest, Link, LinkVersion, -} from "../unixfs.js"; -import type { State } from "./writer.js"; +} from "../unixfs.js" +import type { State } from "./writer.js" -export * from "../writer/api.js"; -import * as ChunkerService from "./chunker.js"; -import init from "rabin-rs/gen/wasm.js"; +export * from "../writer/api.js" +import * as ChunkerService from "./chunker.js" +import init from "rabin-rs/gen/wasm.js" export type { Chunker, @@ -23,104 +23,104 @@ export type { MultihashDigest, Block, State, -}; +} export interface FileWriterService extends EncoderSettings { - writer: BlockWriter; + writer: BlockWriter } export interface WriterOptions { - readonly settings?: EncoderSettings; - readonly metadata?: UnixFS.Metadata; + readonly settings?: EncoderSettings + readonly metadata?: UnixFS.Metadata } export interface EncoderSettings { /** * Chunker which will be used to split file content into chunks. */ - chunker: Chunker; + chunker: Chunker /** * If provided leaves will be encoded as raw blocks, unless file has a * metadata. This is what `rawLeaves` options used to be except instead * of boolean you pass an encoder that will be used. */ - fileChunkEncoder: FileChunkEncoder; + fileChunkEncoder: FileChunkEncoder /** * If provided and file contains single chunk it will be encoded with this * encoder. This is what `reduceSingleLeafToSelf` option used to be except * instead of boolean you pass an encoder that will be used. */ - smallFileEncoder: FileChunkEncoder; + smallFileEncoder: FileChunkEncoder - fileEncoder: FileEncoder; + fileEncoder: FileEncoder /** * Builder that will be used to build file DAG from the leaf nodes. */ - fileLayout: LayoutEngine; + fileLayout: LayoutEngine /** * Hasher used to compute multihash for each block in the file. */ - hasher: MultihashHasher; + hasher: MultihashHasher /** * This function is used to create CIDs from multihashes. This is similar * to `cidVersion` option except you give it CID creator to use. */ - linker: Linker; + linker: Linker } export interface InitOptions { - linkMetadataWriter?: LinkMetadataWriter; + linkMetadataWriter?: LinkMetadataWriter } export interface LinkMetadataWriter extends StreamWriter {} export interface Options { - writer: BlockWriter; - metadata?: UnixFS.Metadata; - settings?: EncoderSettings; - initOptions?: InitOptions; + writer: BlockWriter + metadata?: UnixFS.Metadata + settings?: EncoderSettings + initOptions?: InitOptions } export interface CloseOptions { - releaseLock?: boolean; - closeWriter?: boolean; + releaseLock?: boolean + closeWriter?: boolean } export interface BlockWriter extends StreamWriter {} export interface WritableBlockStream { - getWriter(): BlockWriter; + getWriter(): BlockWriter } export type FileChunkEncoder = | BlockEncoder - | BlockEncoder; + | BlockEncoder export interface FileEncoder { - code: PB; - encode(node: UnixFS.File): Uint8Array; + code: PB + encode(node: UnixFS.File): Uint8Array } export interface Linker { createLink( code: Code, hash: MultihashDigest - ): Link; + ): Link } export interface EncodedFile { - id: NodeID; - block: Block; - link: UnixFS.FileLink; + id: NodeID + block: Block + link: UnixFS.FileLink } -export type PB = 0x70; -export type RAW = 0x55; +export type PB = 0x70 +export type RAW = 0x55 /** * Interface defines API for importable content that is just a subset of `Blob` @@ -130,9 +130,9 @@ export type RAW = 0x55; * with optional metadata. */ export interface BlobContent extends BlobMetadata { - readonly size: number; + readonly size: number - stream(): ReadableStream; + stream(): ReadableStream // text(): Promise // arrayBuffer(): Promise // slice(start?: number, end?: number, contentType?: string): Blob @@ -142,7 +142,7 @@ export interface BlobContent extends BlobMetadata { * Optional unixfs metadata. */ export interface BlobMetadata extends UnixFS.Metadata { - readonly type: string; + readonly type: string } /** @@ -159,54 +159,54 @@ export interface FileContent extends BlobContent { * **Note:** File name is actually used as a file path which is to imply it * can contain contains `/` delimiters. */ - readonly name: string; + readonly name: string } export type FileState = | OpenFile | ClosedFile - | LinkedFile; + | LinkedFile export interface FileView { - state: State; + state: State } export interface OpenFile { - readonly type: "file"; - readonly status: "open"; - readonly metadata: UnixFS.Metadata; - readonly service: FileWriterService; + readonly type: "file" + readonly status: "open" + readonly metadata: UnixFS.Metadata + readonly service: FileWriterService - writing: boolean; + writing: boolean - chunker: ChunkerService.Chunker; - layout: Layout; + chunker: ChunkerService.Chunker + layout: Layout } export interface ClosedFile { - readonly type: "file"; - readonly status: "closed"; - readonly service: FileWriterService; - readonly metadata: UnixFS.Metadata; - writing: boolean; - chunker: ChunkerService.Chunker; - layout: Layout; + readonly type: "file" + readonly status: "closed" + readonly service: FileWriterService + readonly metadata: UnixFS.Metadata + writing: boolean + chunker: ChunkerService.Chunker + layout: Layout } export interface LinkedFile { - readonly type: "file"; - readonly status: "linked"; + readonly type: "file" + readonly status: "linked" - state: UnixFS.FileLink; + state: UnixFS.FileLink } export interface Writer { - write(bytes: Uint8Array): Promise>; - close(options?: CloseOptions): Promise; + write(bytes: Uint8Array): Promise> + close(options?: CloseOptions): Promise } export interface View extends Writer { - readonly writer: BlockWriter; - readonly settings: EncoderSettings; - state: State; + readonly writer: BlockWriter + readonly settings: EncoderSettings + state: State } diff --git a/src/file/writer.js b/src/file/writer.js index 76dab01..95f8e35 100644 --- a/src/file/writer.js +++ b/src/file/writer.js @@ -1,10 +1,10 @@ -import * as Task from "actor"; -import * as API from "./api.js"; -import * as Layout from "./layout/api.js"; -import * as UnixFS from "../codec.js"; -import * as Chunker from "./chunker.js"; -import { EMPTY_BUFFER, panic, unreachable } from "../writer/util.js"; -import * as Queue from "./layout/queue.js"; +import * as Task from "actor" +import * as API from "./api.js" +import * as Layout from "./layout/api.js" +import * as UnixFS from "../codec.js" +import * as Chunker from "./chunker.js" +import { EMPTY_BUFFER, panic, unreachable } from "../writer/util.js" +import * as Queue from "./layout/queue.js" /** * @template Layout @@ -80,23 +80,23 @@ import * as Queue from "./layout/queue.js"; export const update = (message, state) => { switch (message.type) { case "write": - return write(state, message.bytes); + return write(state, message.bytes) case "link": - return link(state, message.link); + return link(state, message.link) /* c8 ignore next 2 */ case "block": - return { state, effect: Task.none() }; + return { state, effect: Task.none() } /* c8 ignore next 2 */ case "linkMetadata": - return { state, effect: Task.none() }; + return { state, effect: Task.none() } case "close": - return close(state); + return close(state) case "end": - return { state, effect: Task.none() }; + return { state, effect: Task.none() } default: - return unreachable`File Writer got unknown message ${message}`; + return unreachable`File Writer got unknown message ${message}` } -}; +} /** * @template Layout @@ -125,8 +125,8 @@ export const init = (writer, metadata, config, options = {}) => { // @see https://github.com/Gozala/vectrie nodeQueue: Queue.mutable(), linkMetadataWriter: options.linkMetadataWriter, - }; -}; + } +} /** * @template Layout * @param {State} state @@ -136,21 +136,21 @@ export const init = (writer, metadata, config, options = {}) => { export const write = (state, bytes) => { if (state.status === "open") { // Chunk up provided bytes - const { chunks, ...chunker } = Chunker.write(state.chunker, bytes); + const { chunks, ...chunker } = Chunker.write(state.chunker, bytes) // Pass chunks to layout engine to produce nodes const { nodes, leaves, layout } = state.config.fileLayout.write( state.layout, chunks - ); + ) - const { linked, ...nodeQueue } = Queue.addNodes(nodes, state.nodeQueue); + const { linked, ...nodeQueue } = Queue.addNodes(nodes, state.nodeQueue) // Create leaf encode tasks for all new leaves const tasks = [ ...encodeLeaves(leaves, state.config), ...encodeBranches(linked, state.config), - ]; + ] return { state: { @@ -162,11 +162,11 @@ export const write = (state, bytes) => { effect: Task.listen({ link: Task.effects(tasks), }), - }; + } } else { - return panic("Unable to perform write on closed file"); + return panic("Unable to perform write on closed file") } -}; +} /** * @template Layout @@ -175,9 +175,9 @@ export const write = (state, bytes) => { * @returns {Update} */ export const link = (state, { id, link, block }) => { - let { linked, ...nodeQueue } = Queue.addLink(id, link, state.nodeQueue); + let { linked, ...nodeQueue } = Queue.addLink(id, link, state.nodeQueue) - const tasks = encodeBranches(linked, state.config); + const tasks = encodeBranches(linked, state.config) /** @type {State} */ const newState = @@ -188,14 +188,14 @@ export const link = (state, { id, link, block }) => { link, nodeQueue, } - : { ...state, nodeQueue }; + : { ...state, nodeQueue } // If we just linked a root and there is a **suspended** "end" task we create // a task to resume it. const end = state.status === "closed" && id === state.rootID && state.end ? state.end.resume() - : Task.none(); + : Task.none() if (!state.linkMetadataWriter) { return { @@ -205,7 +205,7 @@ export const link = (state, { id, link, block }) => { block: writeBlock(state.writer, block), end, }), - }; + } } return { @@ -216,8 +216,8 @@ export const link = (state, { id, link, block }) => { linkMetadata: writeLinkMetadata(state.linkMetadataWriter, link), end, }), - }; -}; + } +} /** * @template Layout @@ -226,16 +226,16 @@ export const link = (state, { id, link, block }) => { */ export const close = (state) => { if (state.status === "open") { - const { chunks } = Chunker.close(state.chunker); + const { chunks } = Chunker.close(state.chunker) const { layout, ...write } = state.config.fileLayout.write( state.layout, chunks - ); + ) const { root, ...close } = state.config.fileLayout.close( layout, state.metadata - ); + ) const [nodes, leaves] = isLeafNode(root) ? [ @@ -245,14 +245,14 @@ export const close = (state) => { : [ [...write.nodes, ...close.nodes, root], [...write.leaves, ...close.leaves], - ]; + ] - const { linked, ...nodeQueue } = Queue.addNodes(nodes, state.nodeQueue); + const { linked, ...nodeQueue } = Queue.addNodes(nodes, state.nodeQueue) const tasks = [ ...encodeLeaves(leaves, state.config), ...encodeBranches(linked, state.config), - ]; + ] // We want to keep run loop around until root node is linked. To // accomplish this we fork a task that suspends itself, which we will @@ -260,7 +260,7 @@ export const close = (state) => { // Below we join this forked task in our effect, this way effect is not // complete until task forked task is, which will do once we link the // root. - const fork = Task.fork(Task.suspend()); + const fork = Task.fork(Task.suspend()) return { state: { @@ -276,11 +276,11 @@ export const close = (state) => { link: Task.effects(tasks), end: Task.join(fork), }), - }; + } } else { - return { state, effect: Task.none() }; + return { state, effect: Task.none() } } -}; +} /** * Creates concurrent leaf encode tasks. Each one will have an ID corresponding @@ -290,7 +290,7 @@ export const close = (state) => { * @param {API.EncoderSettings} config */ const encodeLeaves = (leaves, config) => - leaves.map((leaf) => encodeLeaf(config, leaf, config.fileChunkEncoder)); + leaves.map((leaf) => encodeLeaf(config, leaf, config.fileChunkEncoder)) /** * @param {API.EncoderSettings} config @@ -299,27 +299,27 @@ const encodeLeaves = (leaves, config) => * @returns {Task.Task} */ const encodeLeaf = function* ({ hasher, linker }, { id, content }, encoder) { - const bytes = encoder.encode(content ? asUint8Array(content) : EMPTY_BUFFER); - const hash = yield* Task.wait(hasher.digest(bytes)); - const cid = linker.createLink(encoder.code, hash); + const bytes = encoder.encode(content ? asUint8Array(content) : EMPTY_BUFFER) + const hash = yield* Task.wait(hasher.digest(bytes)) + const cid = linker.createLink(encoder.code, hash) - const block = { cid, bytes }; + const block = { cid, bytes } const link = /** @type {UnixFS.FileLink} */ ({ cid, contentByteLength: content ? content.byteLength : 0, contentByteOffset: content ? content.byteOffset : 0, dagByteLength: bytes.byteLength, - }); + }) - return { id, block, link }; -}; + return { id, block, link } +} /** * @param {Queue.LinkedNode[]} nodes * @param {API.EncoderSettings} config */ const encodeBranches = (nodes, config) => - nodes.map((node) => encodeBranch(config, node)); + nodes.map((node) => encodeBranch(config, node)) /** * @template Layout @@ -334,18 +334,18 @@ export const encodeBranch = function* (config, { id, links }, metadata) { layout: "advanced", parts: links, metadata, - }); - const hash = yield* Task.wait(Promise.resolve(config.hasher.digest(bytes))); - const cid = config.linker.createLink(config.fileEncoder.code, hash); - const block = { bytes, cid }; + }) + const hash = yield* Task.wait(Promise.resolve(config.hasher.digest(bytes))) + const cid = config.linker.createLink(config.fileEncoder.code, hash) + const block = { bytes, cid } const link = /** @type {UnixFS.FileLink} */ ({ cid, contentByteLength: UnixFS.cumulativeContentByteLength(links), dagByteLength: UnixFS.cumulativeDagByteLength(bytes, links), - }); + }) - return { id, block, link }; -}; + return { id, block, link } +} /** * @param {API.BlockWriter} writer @@ -355,10 +355,10 @@ export const encodeBranch = function* (config, { id, links }, metadata) { export const writeBlock = function* (writer, block) { if ((writer.desiredSize || 0) <= 0) { - yield* Task.wait(writer.ready); + yield* Task.wait(writer.ready) } - writer.write(block); -}; + writer.write(block) +} /** * @param {API.LinkMetadataWriter} writer @@ -369,13 +369,13 @@ export const writeBlock = function* (writer, block) { export const writeLinkMetadata = function* (writer, link) { /* c8 ignore next 3 */ if (!writer) { - return; + return } if ((writer.desiredSize || 0) <= 0) { - yield* Task.wait(writer.ready); + yield* Task.wait(writer.ready) } - writer.write(link); -}; + writer.write(link) +} /** * @@ -386,10 +386,10 @@ export const writeLinkMetadata = function* (writer, link) { const asUint8Array = (buffer) => buffer instanceof Uint8Array ? buffer - : buffer.copyTo(new Uint8Array(buffer.byteLength), 0); + : buffer.copyTo(new Uint8Array(buffer.byteLength), 0) /** * @param {Layout.Node} node * @returns {node is Layout.Leaf} */ -const isLeafNode = (node) => node.children == null; +const isLeafNode = (node) => node.children == null diff --git a/src/unixfs.ts b/src/unixfs.ts index d6980bc..dcc0e15 100644 --- a/src/unixfs.ts +++ b/src/unixfs.ts @@ -7,20 +7,20 @@ import type { Version as LinkVersion, Block as IPLDBlock, BlockView as IPLDBlockView, -} from "multiformats"; -import { Data, type IData } from "../gen/unixfs.js"; +} from "multiformats" +import { Data, type IData } from "../gen/unixfs.js" export type { MultihashHasher, MultibaseEncoder, MultihashDigest, BlockEncoder, -}; -export * as Layout from "./file/layout/api"; +} +export * as Layout from "./file/layout/api" -import NodeType = Data.DataType; +import NodeType = Data.DataType -export { NodeType }; -export type { IData, LinkVersion }; +export { NodeType } +export type { IData, LinkVersion } /** * Type representing any UnixFS node. @@ -33,9 +33,9 @@ export type Node = | Directory | DirectoryShard | ShardedDirectory - | Symlink; + | Symlink -export type File = SimpleFile | AdvancedFile | ComplexFile; +export type File = SimpleFile | AdvancedFile | ComplexFile /** * Logical representation of a file that fits a single block. Note this is only @@ -43,16 +43,16 @@ export type File = SimpleFile | AdvancedFile | ComplexFile; * depending on where you encounter the node (In root of the DAG or not). */ export interface SimpleFile { - readonly metadata?: Metadata; + readonly metadata?: Metadata - readonly type: NodeType.File; - readonly layout: "simple"; - readonly content: Uint8Array; + readonly type: NodeType.File + readonly layout: "simple" + readonly content: Uint8Array } export interface Metadata { - readonly mode?: Mode; - readonly mtime?: MTime; + readonly mode?: Mode + readonly mtime?: MTime } /** @@ -63,14 +63,14 @@ export interface Metadata { */ export interface AdvancedFile { - readonly metadata?: Metadata; + readonly metadata?: Metadata - readonly type: NodeType.File; - readonly layout: "advanced"; - readonly parts: ReadonlyArray; + readonly type: NodeType.File + readonly layout: "advanced" + readonly parts: ReadonlyArray } -export type Chunk = Raw | FileChunk; +export type Chunk = Raw | FileChunk /** * Encodes UnixFS Raw node (a leaf node of the file DAG layout). This @@ -93,12 +93,12 @@ export type Chunk = Raw | FileChunk; * @deprecated */ export interface Raw { - readonly type: NodeType.Raw; + readonly type: NodeType.Raw /** * Raw bytes of the content */ - readonly content: Uint8Array; + readonly content: Uint8Array } /** @@ -126,11 +126,11 @@ export interface Raw { * take `mode` and `mtime` fields into account. */ export interface FileChunk { - readonly type: NodeType.File; - readonly layout: "simple"; - readonly content: Uint8Array; + readonly type: NodeType.File + readonly layout: "simple" + readonly content: Uint8Array - readonly metadata?: Metadata; + readonly metadata?: Metadata } /** @@ -151,26 +151,26 @@ export interface FileChunk { * in any other position (that is ignore `mode`, `mtime` fileds). */ export interface FileShard { - readonly type: NodeType.File; - readonly layout: "advanced"; - readonly parts: ReadonlyArray; + readonly type: NodeType.File + readonly layout: "advanced" + readonly parts: ReadonlyArray } export type FileLink = | ContentDAGLink | ContentDAGLink - | ContentDAGLink; + | ContentDAGLink export interface ContentDAGLink extends DAGLink { /** * Total number of bytes in the file */ - readonly contentByteLength: number; + readonly contentByteLength: number /** * Offset bytes in the file */ - readonly contentByteOffset?: number; + readonly contentByteOffset?: number } /** @@ -180,13 +180,13 @@ export interface DAGLink extends Phantom { /** * *C*ontent *Id*entifier of the target DAG. */ - readonly cid: Link; + readonly cid: Link /** * Cumulative number of bytes in the target DAG, that is number of bytes in * the block and all the blocks it links to. */ - readonly dagByteLength: number; + readonly dagByteLength: number } /** * These type of nodes are not produces by referenece IPFS implementations, yet @@ -202,13 +202,13 @@ export interface DAGLink extends Phantom { * @deprecated */ export interface ComplexFile { - readonly type: NodeType.File; - readonly layout: "complex"; - readonly content: Uint8Array; + readonly type: NodeType.File + readonly layout: "complex" + readonly content: Uint8Array - readonly parts: ReadonlyArray; + readonly parts: ReadonlyArray - readonly metadata?: Metadata; + readonly metadata?: Metadata } /** @@ -217,38 +217,38 @@ export interface ComplexFile { * the other definitions. */ export interface UnknownFile { - readonly type: NodeType.File; + readonly type: NodeType.File - readonly content?: Uint8Array; - readonly parts?: ReadonlyArray; + readonly content?: Uint8Array + readonly parts?: ReadonlyArray - readonly metadata?: Metadata; + readonly metadata?: Metadata } /** * Type for either UnixFS directory representation. */ -export type Directory = FlatDirectory | ShardedDirectory; +export type Directory = FlatDirectory | ShardedDirectory /** * Logacal representation of a directory that fits single block. */ export interface FlatDirectory { - readonly type: NodeType.Directory; - readonly entries: ReadonlyArray; + readonly type: NodeType.Directory + readonly entries: ReadonlyArray - readonly metadata?: Metadata; + readonly metadata?: Metadata } export type DirectoryEntryLink = | NamedDAGLink | NamedDAGLink - | NamedDAGLink; + | NamedDAGLink -export type DirectoryLink = DAGLink; +export type DirectoryLink = DAGLink export interface NamedDAGLink extends DAGLink { - readonly name: string; + readonly name: string } /** @@ -269,43 +269,43 @@ export interface ShardedDirectory extends DirectoryShard {} * `mtime` and `mode` field to be ignored. */ export interface DirectoryShard { - readonly type: NodeType.HAMTShard; + readonly type: NodeType.HAMTShard - readonly bitfield: Uint8Array; + readonly bitfield: Uint8Array /* * HAMT table width (In IPFS it's usually 256) */ - readonly fanout: uint64; + readonly fanout: uint64 /** * Multihash code for the hashing function used (In IPFS it's [murmur3-64][]) * * [murmur3-64]:https://github.com/multiformats/multicodec/blob/master/table.csv#L24 */ - readonly hashType: uint64; + readonly hashType: uint64 - readonly entries: ReadonlyArray; + readonly entries: ReadonlyArray - readonly metadata?: Metadata; + readonly metadata?: Metadata } export type ShardedDirectoryLink = | NamedDAGLink | NamedDAGLink | NamedDAGLink - | NamedDAGLink; + | NamedDAGLink /** * Logical representation of a [symbolic link][]. * * [symbolic link]:https://en.wikipedia.org/wiki/Symbolic_link */ export interface Symlink { - readonly type: NodeType.Symlink; + readonly type: NodeType.Symlink /** * UTF-8 encoded path to the symlink target. */ - readonly content: ByteView; + readonly content: ByteView - readonly metadata?: Metadata; + readonly metadata?: Metadata } /** @@ -317,14 +317,14 @@ export interface UnixTime { * (signed 64bit integer): represents the amount of seconds after or before * the epoch. */ - readonly Seconds: int64; + readonly Seconds: int64 /** * (optional, 32bit unsigned integer ): when specified represents the * fractional part of the mtime as the amount of nanoseconds. The valid * range for this value are the integers [1, 999999999]. */ - readonly FractionalNanoseconds?: fixed32; + readonly FractionalNanoseconds?: fixed32 } /** @@ -350,15 +350,15 @@ export interface UnixTime { * * @see https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/sys_stat.h.html */ -export type Mode = uint32; +export type Mode = uint32 /** * representing the modification time in seconds relative to the unix epoch * 1970-01-01T00:00:00Z. */ export interface MTime { - readonly secs: number; - readonly nsecs?: number; + readonly secs: number + readonly nsecs?: number } /** @@ -370,15 +370,15 @@ export interface ByteView extends Uint8Array, Phantom {} /** * @see https://github.com/ipfs/go-bitfield */ -export type Bitfield = Uint8Array; +export type Bitfield = Uint8Array // TS does not really have these, create aliases so it's aligned closer // to protobuf spec -export type int64 = number; -export type fixed32 = number; -export type uint64 = number; +export type int64 = number +export type fixed32 = number +export type uint64 = number -export type uint32 = number; +export type uint32 = number /** * This is an utility type to retain unused type parameter `T`. It can be used @@ -388,10 +388,10 @@ export interface Phantom { // This field can not be represented because field name is non-existings // unique symbol. But given that field is optional any object will valid // type contstraint. - [PhantomKey]?: T; + [PhantomKey]?: T } -declare const PhantomKey: unique symbol; +declare const PhantomKey: unique symbol export interface Link< Data extends unknown = unknown, @@ -401,9 +401,9 @@ export interface Link< > extends IPLDLink {} export interface PBLink { - Name?: string; - Tsize?: number; - Hash: Link; + Name?: string + Tsize?: number + Hash: Link } export interface Block< diff --git a/test/directory.spec.js b/test/directory.spec.js index 161d772..012a6b1 100644 --- a/test/directory.spec.js +++ b/test/directory.spec.js @@ -1,15 +1,15 @@ -import * as UnixFS from "../src/lib.js"; -import { assert } from "chai"; -import { encodeUTF8, Link, collect, importFile } from "./util.js"; +import * as UnixFS from "../src/lib.js" +import { assert } from "chai" +import { encodeUTF8, Link, collect, importFile } from "./util.js" -const createChannel = () => new TransformStream(); +const createChannel = () => new TransformStream() describe("test directory", () => { it("empty dir", async () => { - const { readable, writable } = new TransformStream(); - const writer = writable.getWriter(); - const root = UnixFS.createDirectoryWriter({ writer }); - const link = await root.close(); - writer.close(); + const { readable, writable } = new TransformStream() + const writer = writable.getWriter() + const root = UnixFS.createDirectoryWriter({ writer }) + const link = await root.close() + writer.close() assert.deepEqual(link, { /** @type {Link.Link} */ @@ -17,8 +17,8 @@ describe("test directory", () => { "bafybeiczsscdsbs7ffqz55asqdf3smv6klcw3gofszvwlyarci47bgf354" ), dagByteLength: 4, - }); - const output = await collect(readable); + }) + const output = await collect(readable) assert.deepEqual( output.map(($) => $.cid), @@ -27,18 +27,18 @@ describe("test directory", () => { "bafybeiczsscdsbs7ffqz55asqdf3smv6klcw3gofszvwlyarci47bgf354" ), ] - ); - }); + ) + }) it("basic file in directory", async () => { - const { readable, writable } = new TransformStream(); - const writer = writable.getWriter(); - const blocks = collect(readable); - const root = UnixFS.createDirectoryWriter({ writer }); - const file = UnixFS.createFileWriter(root); - const content = encodeUTF8("this file does not have much content\n"); - file.write(content); - const fileLink = await file.close(); + const { readable, writable } = new TransformStream() + const writer = writable.getWriter() + const blocks = collect(readable) + const root = UnixFS.createDirectoryWriter({ writer }) + const file = UnixFS.createFileWriter(root) + const content = encodeUTF8("this file does not have much content\n") + file.write(content) + const fileLink = await file.close() assert.deepEqual(fileLink, { /** @type {Link.Link} */ @@ -48,10 +48,10 @@ describe("test directory", () => { dagByteLength: 45, contentByteLength: 37, contentByteOffset: 0, - }); + }) - root.set("file.txt", fileLink); - const rootLink = await root.close(); + root.set("file.txt", fileLink) + const rootLink = await root.close() assert.deepEqual(rootLink, { dagByteLength: 101, @@ -59,11 +59,11 @@ describe("test directory", () => { cid: Link.parse( "bafybeic7trkgurgp22uhxq5rnii5e75v4m4hf2ovohyxwntm4ymp7myh5i" ), - }); + }) - writer.close(); + writer.close() - const output = await blocks; + const output = await blocks assert.deepEqual( output.map(($) => $.cid), @@ -75,50 +75,50 @@ describe("test directory", () => { "bafybeic7trkgurgp22uhxq5rnii5e75v4m4hf2ovohyxwntm4ymp7myh5i" ), ] - ); - }); + ) + }) it("nested directory", async () => { - const { readable, writable } = new TransformStream(); - const blocks = collect(readable); - const writer = writable.getWriter(); - const root = UnixFS.createDirectoryWriter({ writer }); - const nested = UnixFS.createDirectoryWriter(root); + const { readable, writable } = new TransformStream() + const blocks = collect(readable) + const writer = writable.getWriter() + const root = UnixFS.createDirectoryWriter({ writer }) + const nested = UnixFS.createDirectoryWriter(root) - root.set("nested", await nested.close()); + root.set("nested", await nested.close()) assert.deepEqual(await root.close(), { /** @type {Link.Link} */ cid: Link.parse( "bafybeibjme43s5mbvupa25dl3xpbkmuqeje7hefvavy6k7cuhm3nxz2m3q" ), dagByteLength: 58, - }); - writer.close(); + }) + writer.close() - const items = await blocks; + const items = await blocks assert.deepEqual( items.map(({ cid }) => cid.toString()), [ "bafybeiczsscdsbs7ffqz55asqdf3smv6klcw3gofszvwlyarci47bgf354", "bafybeibjme43s5mbvupa25dl3xpbkmuqeje7hefvavy6k7cuhm3nxz2m3q", ] - ); - }); + ) + }) it("double nested directory", async () => { - const { readable, writable } = new TransformStream(); - const blocks = collect(readable); - const writer = writable.getWriter(); - - const root = UnixFS.createDirectoryWriter({ writer }); - const nested = UnixFS.createDirectoryWriter(root); - - root.set("nested", await nested.close()); - const main = UnixFS.createDirectoryWriter({ writer }); - main.set("root", await root.close()); - const link = await main.close(); - writer.close(); - const items = await blocks; + const { readable, writable } = new TransformStream() + const blocks = collect(readable) + const writer = writable.getWriter() + + const root = UnixFS.createDirectoryWriter({ writer }) + const nested = UnixFS.createDirectoryWriter(root) + + root.set("nested", await nested.close()) + const main = UnixFS.createDirectoryWriter({ writer }) + main.set("root", await root.close()) + const link = await main.close() + writer.close() + const items = await blocks assert.deepEqual( items.map(({ cid }) => cid.toString()), [ @@ -126,17 +126,17 @@ describe("test directory", () => { "bafybeibjme43s5mbvupa25dl3xpbkmuqeje7hefvavy6k7cuhm3nxz2m3q", "bafybeifr5xx3ihkbvvodn6xgejnkeuzyak3pwgrbqahb2afazqfes6opla", ] - ); - }); + ) + }) it("throws if file already exists", async () => { - const { readable, writable } = new TransformStream(); - const blocks = collect(readable); - const writer = writable.getWriter(); + const { readable, writable } = new TransformStream() + const blocks = collect(readable) + const writer = writable.getWriter() - const root = UnixFS.createDirectoryWriter({ writer }); + const root = UnixFS.createDirectoryWriter({ writer }) - const hello = await importFile(root, ["hello"]); + const hello = await importFile(root, ["hello"]) assert.deepEqual(hello, { /** @type {Link.Link} */ cid: Link.parse( @@ -145,9 +145,9 @@ describe("test directory", () => { contentByteLength: 5, contentByteOffset: 0, dagByteLength: 13, - }); + }) - const bye = await importFile(root, ["bye"]); + const bye = await importFile(root, ["bye"]) assert.deepEqual(bye, { /** @type {Link.Link} */ cid: Link.parse( @@ -156,15 +156,15 @@ describe("test directory", () => { dagByteLength: 11, contentByteOffset: 0, contentByteLength: 3, - }); + }) - root.set("hello", hello); + root.set("hello", hello) assert.throws( () => root.set("hello", bye), /Directory already contains entry with name "hello"/ - ); - root.set("bye", bye); - const link = await root.close(); + ) + root.set("bye", bye) + const link = await root.close() assert.deepEqual(link, { /** @type {Link.Link} */ @@ -172,9 +172,9 @@ describe("test directory", () => { "bafybeibpefc2sgzngxttfwrawvaiewk4hj5yxdp5kik52jpds5ujg3ij44" ), dagByteLength: 124, - }); - writer.close(); - const items = await blocks; + }) + writer.close() + const items = await blocks assert.deepEqual( items.map((item) => item.cid.toString()), [ @@ -182,17 +182,17 @@ describe("test directory", () => { "bafybeigl43jff4muiw2m6kzqhm7xpz6ti7etiujklpnc6vpblzjvvwqmta", "bafybeibpefc2sgzngxttfwrawvaiewk4hj5yxdp5kik52jpds5ujg3ij44", ] - ); - }); + ) + }) it("can overwrite existing", async () => { - const { readable, writable } = new TransformStream(); - const blocks = collect(readable); - const writer = writable.getWriter(); + const { readable, writable } = new TransformStream() + const blocks = collect(readable) + const writer = writable.getWriter() - const root = UnixFS.createDirectoryWriter({ writer }); + const root = UnixFS.createDirectoryWriter({ writer }) - const hello = await importFile(root, ["hello"]); + const hello = await importFile(root, ["hello"]) assert.deepEqual(hello, { /** @type {Link.Link} */ cid: Link.parse( @@ -201,9 +201,9 @@ describe("test directory", () => { contentByteLength: 5, contentByteOffset: 0, dagByteLength: 13, - }); + }) - const bye = await importFile(root, ["bye"]); + const bye = await importFile(root, ["bye"]) assert.deepEqual(bye, { /** @type {Link.Link} */ cid: Link.parse( @@ -212,11 +212,11 @@ describe("test directory", () => { dagByteLength: 11, contentByteLength: 3, contentByteOffset: 0, - }); + }) - root.set("hello", hello); - root.set("hello", bye, { overwrite: true }); - const link = await root.close(); + root.set("hello", hello) + root.set("hello", bye, { overwrite: true }) + const link = await root.close() assert.deepEqual(link, { /** @type {Link.Link} */ @@ -224,9 +224,9 @@ describe("test directory", () => { "bafybeid6gy6b24lpyqtdmch7chsef4wykmxsh3ysuj2ou3wlz3cevdcc4a" ), dagByteLength: 64, - }); - writer.close(); - const items = await blocks; + }) + writer.close() + const items = await blocks assert.deepEqual( items.map((item) => item.cid.toString()), [ @@ -234,17 +234,17 @@ describe("test directory", () => { "bafybeigl43jff4muiw2m6kzqhm7xpz6ti7etiujklpnc6vpblzjvvwqmta", "bafybeid6gy6b24lpyqtdmch7chsef4wykmxsh3ysuj2ou3wlz3cevdcc4a", ] - ); - }); + ) + }) it("can delete entries", async () => { - const { readable, writable } = createChannel(); - const writer = writable.getWriter(); - const reader = collect(readable); + const { readable, writable } = createChannel() + const writer = writable.getWriter() + const reader = collect(readable) - const root = UnixFS.createDirectoryWriter({ writer }); + const root = UnixFS.createDirectoryWriter({ writer }) - const hello = await importFile(root, ["hello"]); + const hello = await importFile(root, ["hello"]) assert.deepEqual(hello, { /** @type {Link.Link} */ cid: Link.parse( @@ -253,11 +253,11 @@ describe("test directory", () => { contentByteLength: 5, contentByteOffset: 0, dagByteLength: 13, - }); + }) - root.set("hello", hello); - root.remove("hello"); - const link = await root.close(); + root.set("hello", hello) + root.remove("hello") + const link = await root.close() assert.deepEqual(link, { /** @type {Link.Link} */ @@ -265,41 +265,41 @@ describe("test directory", () => { "bafybeiczsscdsbs7ffqz55asqdf3smv6klcw3gofszvwlyarci47bgf354" ), dagByteLength: 4, - }); - writer.close(); - const blocks = await reader; + }) + writer.close() + const blocks = await reader assert.deepEqual( blocks.map((block) => block.cid.toString()), [ "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq", "bafybeiczsscdsbs7ffqz55asqdf3smv6klcw3gofszvwlyarci47bgf354", ] - ); - }); + ) + }) it("throws on invalid filenames", async () => { - const { readable, writable } = new TransformStream(); - const writer = writable.getWriter(); - const reader = collect(readable); + const { readable, writable } = new TransformStream() + const writer = writable.getWriter() + const reader = collect(readable) - const root = UnixFS.createDirectoryWriter({ writer }); - const hello = await importFile(root, ["hello"]); + const root = UnixFS.createDirectoryWriter({ writer }) + const hello = await importFile(root, ["hello"]) assert.throws( () => root.set("hello/world", hello), /Directory entry name "hello\/world" contains forbidden "\/" character/ - ); - writer.close(); - }); + ) + writer.close() + }) it("can not change after close", async () => { - const { readable, writable } = new TransformStream(); - const writer = writable.getWriter(); - const reader = collect(readable); + const { readable, writable } = new TransformStream() + const writer = writable.getWriter() + const reader = collect(readable) - const root = UnixFS.createDirectoryWriter({ writer }); + const root = UnixFS.createDirectoryWriter({ writer }) - const hello = await importFile(root, ["hello"]); + const hello = await importFile(root, ["hello"]) assert.deepEqual(hello, { /** @type {Link.Link} */ cid: Link.parse( @@ -308,9 +308,9 @@ describe("test directory", () => { contentByteLength: 5, contentByteOffset: 0, dagByteLength: 13, - }); + }) - const bye = await importFile(root, ["bye"]); + const bye = await importFile(root, ["bye"]) assert.deepEqual(bye, { /** @type {Link.Link} */ cid: Link.parse( @@ -319,24 +319,24 @@ describe("test directory", () => { dagByteLength: 11, contentByteLength: 3, contentByteOffset: 0, - }); + }) - root.set("hello", hello); + root.set("hello", hello) assert.deepEqual(await root.close(), { /** @type {Link.Link} */ cid: Link.parse( "bafybeieuo4clbaujw35wxt7s4jlorbgztvufvdrcxxb6hik5mzfqku2tbq" ), dagByteLength: 66, - }); + }) assert.throws( () => root.set("bye", bye), /Can not change written directory, but you can \.fork\(\) and make changes to it/ - ); + ) - writer.close(); - const blocks = await reader; + writer.close() + const blocks = await reader assert.deepEqual( blocks.map((block) => block.cid.toString()), [ @@ -344,17 +344,17 @@ describe("test directory", () => { "bafybeigl43jff4muiw2m6kzqhm7xpz6ti7etiujklpnc6vpblzjvvwqmta", "bafybeieuo4clbaujw35wxt7s4jlorbgztvufvdrcxxb6hik5mzfqku2tbq", ] - ); - }); + ) + }) it("can fork and edit", async () => { - const { readable, writable } = new TransformStream(); - const writer = writable.getWriter(); - const reader = collect(readable); + const { readable, writable } = new TransformStream() + const writer = writable.getWriter() + const reader = collect(readable) - const root = UnixFS.createDirectoryWriter({ writer }); + const root = UnixFS.createDirectoryWriter({ writer }) - const hello = await importFile(root, ["hello"]); + const hello = await importFile(root, ["hello"]) assert.deepEqual(hello, { /** @type {Link.Link} */ cid: Link.parse( @@ -363,9 +363,9 @@ describe("test directory", () => { contentByteLength: 5, contentByteOffset: 0, dagByteLength: 13, - }); + }) - const bye = await importFile(root, ["bye"]); + const bye = await importFile(root, ["bye"]) assert.deepEqual(bye, { /** @type {Link.Link} */ cid: Link.parse( @@ -374,29 +374,29 @@ describe("test directory", () => { dagByteLength: 11, contentByteLength: 3, contentByteOffset: 0, - }); + }) - root.set("hello", hello); + root.set("hello", hello) assert.deepEqual(await root.close(), { /** @type {Link.Link} */ cid: Link.parse( "bafybeieuo4clbaujw35wxt7s4jlorbgztvufvdrcxxb6hik5mzfqku2tbq" ), dagByteLength: 66, - }); + }) - const fork = root.fork(); - fork.set("bye", bye); + const fork = root.fork() + fork.set("bye", bye) assert.deepEqual(await fork.close(), { /** @type {Link.Link} */ cid: Link.parse( "bafybeibpefc2sgzngxttfwrawvaiewk4hj5yxdp5kik52jpds5ujg3ij44" ), dagByteLength: 124, - }); + }) - writer.close(); - const blocks = await reader; + writer.close() + const blocks = await reader assert.deepEqual( blocks.map((block) => block.cid.toString()), [ @@ -405,44 +405,44 @@ describe("test directory", () => { "bafybeieuo4clbaujw35wxt7s4jlorbgztvufvdrcxxb6hik5mzfqku2tbq", "bafybeibpefc2sgzngxttfwrawvaiewk4hj5yxdp5kik52jpds5ujg3ij44", ] - ); - }); + ) + }) it("can autoclose", async () => { - const { readable, writable } = new TransformStream(); - const writer = writable.getWriter(); - const reader = collect(readable); - - const root = UnixFS.createDirectoryWriter({ writer }); - const file = UnixFS.createFileWriter(root); - file.write(new TextEncoder().encode("hello")); - root.set("hello", await file.close()); + const { readable, writable } = new TransformStream() + const writer = writable.getWriter() + const reader = collect(readable) + + const root = UnixFS.createDirectoryWriter({ writer }) + const file = UnixFS.createFileWriter(root) + file.write(new TextEncoder().encode("hello")) + root.set("hello", await file.close()) assert.deepEqual(await root.close({ closeWriter: true }), { /** @type {Link.Link} */ cid: Link.parse( "bafybeieuo4clbaujw35wxt7s4jlorbgztvufvdrcxxb6hik5mzfqku2tbq" ), dagByteLength: 66, - }); + }) - const blocks = await reader; + const blocks = await reader assert.deepEqual( blocks.map((block) => block.cid.toString()), [ "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq", "bafybeieuo4clbaujw35wxt7s4jlorbgztvufvdrcxxb6hik5mzfqku2tbq", ] - ); - }); + ) + }) it("fork into other stream", async () => { - const { readable, writable } = new TransformStream(); - const writer = writable.getWriter(); - const reader = collect(readable); + const { readable, writable } = new TransformStream() + const writer = writable.getWriter() + const reader = collect(readable) - const root = UnixFS.createDirectoryWriter({ writer }); + const root = UnixFS.createDirectoryWriter({ writer }) - const hello = await importFile(root, ["hello"]); + const hello = await importFile(root, ["hello"]) assert.deepEqual(hello, { /** @type {Link.Link} */ cid: Link.parse( @@ -451,9 +451,9 @@ describe("test directory", () => { contentByteLength: 5, contentByteOffset: 0, dagByteLength: 13, - }); + }) - const bye = await importFile(root, ["bye"]); + const bye = await importFile(root, ["bye"]) assert.deepEqual(bye, { /** @type {Link.Link} */ cid: Link.parse( @@ -462,33 +462,33 @@ describe("test directory", () => { dagByteLength: 11, contentByteLength: 3, contentByteOffset: 0, - }); + }) - root.set("hello", hello); + root.set("hello", hello) assert.deepEqual(await root.close(), { /** @type {Link.Link} */ cid: Link.parse( "bafybeieuo4clbaujw35wxt7s4jlorbgztvufvdrcxxb6hik5mzfqku2tbq" ), dagByteLength: 66, - }); + }) - const patch = new TransformStream(); - const patchWriter = patch.writable.getWriter(); - const patchReader = collect(patch.readable); + const patch = new TransformStream() + const patchWriter = patch.writable.getWriter() + const patchReader = collect(patch.readable) - const fork = root.fork({ writer: patchWriter }); - fork.set("bye", bye); + const fork = root.fork({ writer: patchWriter }) + fork.set("bye", bye) assert.deepEqual(await fork.close(), { /** @type {Link.Link} */ cid: Link.parse( "bafybeibpefc2sgzngxttfwrawvaiewk4hj5yxdp5kik52jpds5ujg3ij44" ), dagByteLength: 124, - }); + }) - writer.close(); - const blocks = await reader; + writer.close() + const blocks = await reader assert.deepEqual( blocks.map((block) => block.cid.toString()), [ @@ -496,29 +496,29 @@ describe("test directory", () => { "bafybeigl43jff4muiw2m6kzqhm7xpz6ti7etiujklpnc6vpblzjvvwqmta", "bafybeieuo4clbaujw35wxt7s4jlorbgztvufvdrcxxb6hik5mzfqku2tbq", ] - ); + ) - patchWriter.close(); - const delta = await patchReader; + patchWriter.close() + const delta = await patchReader assert.deepEqual( delta.map((block) => block.cid.toString()), ["bafybeibpefc2sgzngxttfwrawvaiewk4hj5yxdp5kik52jpds5ujg3ij44"] - ); - }); + ) + }) it("can close writer", async function () { - const { readable, writable } = new TransformStream(); - const writer = writable.getWriter(); - const blocks = collect(readable); - const root = UnixFS.createDirectoryWriter({ writer }); - const file = UnixFS.createFileWriter(root); + const { readable, writable } = new TransformStream() + const writer = writable.getWriter() + const blocks = collect(readable) + const root = UnixFS.createDirectoryWriter({ writer }) + const file = UnixFS.createFileWriter(root) - file.write(encodeUTF8("this file does not have much content\n")); - assert.equal(writable.locked, true); - root.set("file.txt", await file.close()); - const link = await root.close({ releaseLock: true, closeWriter: true }); + file.write(encodeUTF8("this file does not have much content\n")) + assert.equal(writable.locked, true) + root.set("file.txt", await file.close()) + const link = await root.close({ releaseLock: true, closeWriter: true }) - await blocks; + await blocks assert.deepEqual(link, { dagByteLength: 101, @@ -526,24 +526,24 @@ describe("test directory", () => { cid: Link.parse( "bafybeic7trkgurgp22uhxq5rnii5e75v4m4hf2ovohyxwntm4ymp7myh5i" ), - }); - }); + }) + }) it("can release writer lock", async function () { - const { readable, writable } = new TransformStream(); - const writer = writable.getWriter(); - const blocks = collect(readable); - const root = UnixFS.createDirectoryWriter({ writer }); - const file = UnixFS.createFileWriter(root); + const { readable, writable } = new TransformStream() + const writer = writable.getWriter() + const blocks = collect(readable) + const root = UnixFS.createDirectoryWriter({ writer }) + const file = UnixFS.createFileWriter(root) - file.write(encodeUTF8("this file does not have much content\n")); - assert.equal(writable.locked, true); - root.set("file.txt", await file.close()); - const link = await root.close({ releaseLock: true }); - assert.equal(writable.locked, false); + file.write(encodeUTF8("this file does not have much content\n")) + assert.equal(writable.locked, true) + root.set("file.txt", await file.close()) + const link = await root.close({ releaseLock: true }) + assert.equal(writable.locked, false) - writable.close(); - await blocks; + writable.close() + await blocks assert.deepEqual(link, { dagByteLength: 101, @@ -551,68 +551,68 @@ describe("test directory", () => { cid: Link.parse( "bafybeic7trkgurgp22uhxq5rnii5e75v4m4hf2ovohyxwntm4ymp7myh5i" ), - }); - }); + }) + }) it("can enumerate entries", async function () { - const { writable } = new TransformStream(); - const writer = writable.getWriter(); - const root = UnixFS.createDirectoryWriter({ writer }); + const { writable } = new TransformStream() + const writer = writable.getWriter() + const root = UnixFS.createDirectoryWriter({ writer }) - assert.deepEqual([...root.entries()], []); + assert.deepEqual([...root.entries()], []) /** @type {Link.Link} */ const cid = Link.parse( "bafybeidequ5soq6smzafv4lb76i5dkvl5fzgvrxz4bmlc2k4dkikklv2j4" - ); + ) const fileLink = { cid, dagByteLength: 45, contentByteLength: 37, - }; + } - root.set("file.txt", fileLink); - assert.deepEqual([...root.entries()], [["file.txt", fileLink]]); - }); + root.set("file.txt", fileLink) + assert.deepEqual([...root.entries()], [["file.txt", fileLink]]) + }) it(".has", async function () { - const { writable } = new TransformStream(); - const writer = writable.getWriter(); - const root = UnixFS.createDirectoryWriter({ writer }); - assert.equal(root.has("file.txt"), false); + const { writable } = new TransformStream() + const writer = writable.getWriter() + const root = UnixFS.createDirectoryWriter({ writer }) + assert.equal(root.has("file.txt"), false) /** @type {Link.Link} */ const cid = Link.parse( "bafybeidequ5soq6smzafv4lb76i5dkvl5fzgvrxz4bmlc2k4dkikklv2j4" - ); + ) root.set("file.txt", { cid, dagByteLength: 45, contentByteLength: 37, - }); - assert.equal(root.has("file.txt"), true); + }) + assert.equal(root.has("file.txt"), true) - root.remove("file.txt"); - assert.equal(root.has("file.txt"), false); - }); + root.remove("file.txt") + assert.equal(root.has("file.txt"), false) + }) it(".size", async function () { - const { writable } = new TransformStream(); - const writer = writable.getWriter(); - const root = UnixFS.createDirectoryWriter({ writer }); - assert.equal(root.size, 0); + const { writable } = new TransformStream() + const writer = writable.getWriter() + const root = UnixFS.createDirectoryWriter({ writer }) + assert.equal(root.size, 0) /** @type {Link.Link} */ const cid = Link.parse( "bafybeidequ5soq6smzafv4lb76i5dkvl5fzgvrxz4bmlc2k4dkikklv2j4" - ); + ) root.set("file.txt", { cid, dagByteLength: 45, contentByteLength: 37, - }); - assert.equal(root.size, 1); + }) + assert.equal(root.size, 1) - root.remove("file.txt"); - assert.equal(root.size, 0); - }); -}); + root.remove("file.txt") + assert.equal(root.size, 0) + }) +}) diff --git a/test/file.spec.js b/test/file.spec.js index d583890..c6faefe 100644 --- a/test/file.spec.js +++ b/test/file.spec.js @@ -1,90 +1,90 @@ /* eslint-env mocha */ -import { assert } from "chai"; -import { encodeUTF8, Link, hashrecur, collect } from "./util.js"; -import * as UnixFS from "../src/lib.js"; -import * as Trickle from "../src/file/layout/trickle.js"; -import * as Balanced from "../src/file/layout/balanced.js"; -import * as FixedSize from "../src/file/chunker/fixed.js"; -import * as Rabin from "../src/file/chunker/rabin.js"; -import { sha256 } from "multiformats/hashes/sha2"; - -const CHUNK_SIZE = 262144; +import { assert } from "chai" +import { encodeUTF8, Link, hashrecur, collect } from "./util.js" +import * as UnixFS from "../src/lib.js" +import * as Trickle from "../src/file/layout/trickle.js" +import * as Balanced from "../src/file/layout/balanced.js" +import * as FixedSize from "../src/file/chunker/fixed.js" +import * as Rabin from "../src/file/chunker/rabin.js" +import { sha256 } from "multiformats/hashes/sha2" + +const CHUNK_SIZE = 262144 describe("test file", () => { it("basic file", async function () { - this.timeout(30000); - const content = encodeUTF8("this file does not have much content\n"); - const { readable, writable } = new TransformStream(); - const writer = writable.getWriter(); - - const file = UnixFS.createFileWriter({ writer }); - await file.write(content); - const link = await file.close(); - writer.close(); - - assert.equal(link.contentByteLength, 37); - assert.equal(link.dagByteLength, 45); + this.timeout(30000) + const content = encodeUTF8("this file does not have much content\n") + const { readable, writable } = new TransformStream() + const writer = writable.getWriter() + + const file = UnixFS.createFileWriter({ writer }) + await file.write(content) + const link = await file.close() + writer.close() + + assert.equal(link.contentByteLength, 37) + assert.equal(link.dagByteLength, 45) assert.equal( link.cid.toString(), "bafybeidequ5soq6smzafv4lb76i5dkvl5fzgvrxz4bmlc2k4dkikklv2j4" - ); + ) - const blocks = readable.getReader(); - const read = await blocks.read(); + const blocks = readable.getReader() + const read = await blocks.read() if (read.done) { - assert.fail("expected to get a block"); + assert.fail("expected to get a block") } - const block = read.value; + const block = read.value assert.deepEqual( block.cid.toString(), "bafybeidequ5soq6smzafv4lb76i5dkvl5fzgvrxz4bmlc2k4dkikklv2j4" - ); + ) - const end = await blocks.read(); - assert.deepEqual(end, { done: true, value: undefined }); - }); + const end = await blocks.read() + assert.deepEqual(end, { done: true, value: undefined }) + }) it("splits into 3 chunks", async function () { const rawFiles = [ new Uint8Array(CHUNK_SIZE).fill(1), new Uint8Array(CHUNK_SIZE).fill(2), new Uint8Array(CHUNK_SIZE).fill(3), - ]; + ] const { readable, writable } = new TransformStream( {}, {}, { highWaterMark: 5, } - ); - const writer = writable.getWriter(); + ) + const writer = writable.getWriter() // Capture links metadata /** @type {import('../src/unixfs.js').FileLink[]} */ - const linkMetadataItems = []; + const linkMetadataItems = [] const { readable: linkMetadataReadable, writable: linkMetadataWritable } = - new TransformStream(); + new TransformStream() // Start consuming links stream asynchronously void (async () => { - const reader = linkMetadataReadable.getReader(); + const reader = linkMetadataReadable.getReader() while (true) { - const { done, value } = await reader.read(); - if (done) break; - linkMetadataItems.push(value); + const { done, value } = await reader.read() + if (done) break + linkMetadataItems.push(value) } - })(); + })() const file = UnixFS.createFileWriter({ writer, initOptions: { linkMetadataWriter: linkMetadataWritable.getWriter(), }, - }); + }) for (const rawFile of rawFiles) { - file.write(rawFile); + file.write(rawFile) } - const link = await file.close(); + const link = await file.close() // Check the root CID // TODO: So go-ipfs sets CIDv0 links which casuse a mismatch @@ -95,67 +95,67 @@ describe("test file", () => { cid: Link.parse( "bafybeiegda62p2cdi5sono3h3hqjnxwc56z4nocynrj44rz7rtc2p246cy" ), - }); + }) - const blocks = readable.getReader(); + const blocks = readable.getReader() // Check the first block - const r1 = await blocks.read(); + const r1 = await blocks.read() if (r1.done) { - assert.fail("expected to get a block"); + assert.fail("expected to get a block") } assert.deepEqual( r1.value.cid, Link.parse("bafybeihhsdoupgd3fnl3e3367ymsanmikafpllldsdt37jzyoh6nuatowe") - ); - const l1 = linkMetadataItems.find((l) => l.cid.equals(r1.value.cid)); - assert.isTrue(l1 !== undefined); - assert.equal(l1?.contentByteLength, CHUNK_SIZE); - assert.equal(l1?.dagByteLength, CHUNK_SIZE + 14); - assert.equal(l1?.contentByteOffset, 0); + ) + const l1 = linkMetadataItems.find((l) => l.cid.equals(r1.value.cid)) + assert.isTrue(l1 !== undefined) + assert.equal(l1?.contentByteLength, CHUNK_SIZE) + assert.equal(l1?.dagByteLength, CHUNK_SIZE + 14) + assert.equal(l1?.contentByteOffset, 0) // Check the second block - const r2 = await blocks.read(); + const r2 = await blocks.read() if (r2.done) { - assert.fail("expected to get a block"); + assert.fail("expected to get a block") } assert.deepEqual( r2.value.cid, Link.parse("bafybeief3dmadxfymhhhrflqytqmlhlz47w6glaxvyzmm6s6tpfb6izzee") - ); - const l2 = linkMetadataItems.find((l) => l.cid.equals(r2.value.cid)); - assert.isTrue(l2 !== undefined); - assert.equal(l2?.contentByteLength, CHUNK_SIZE); - assert.equal(l2?.dagByteLength, CHUNK_SIZE + 14); - assert.equal(l2?.contentByteOffset, CHUNK_SIZE); + ) + const l2 = linkMetadataItems.find((l) => l.cid.equals(r2.value.cid)) + assert.isTrue(l2 !== undefined) + assert.equal(l2?.contentByteLength, CHUNK_SIZE) + assert.equal(l2?.dagByteLength, CHUNK_SIZE + 14) + assert.equal(l2?.contentByteOffset, CHUNK_SIZE) // Check the third block - const r3 = await blocks.read(); + const r3 = await blocks.read() if (r3.done) { - assert.fail("expected to get a block"); + assert.fail("expected to get a block") } assert.deepEqual( r3.value.cid, Link.parse("bafybeihznihf5g5ibdyoawn7uu3inlyqrxjv63lt6lop6h3w6rzwrp67a4") - ); - const l3 = linkMetadataItems.find((l) => l.cid.equals(r3.value.cid)); - assert.isTrue(l3 !== undefined); - assert.equal(l3?.contentByteLength, CHUNK_SIZE); - assert.equal(l3?.dagByteLength, CHUNK_SIZE + 14); - assert.equal(l3?.contentByteOffset, CHUNK_SIZE * 2); + ) + const l3 = linkMetadataItems.find((l) => l.cid.equals(r3.value.cid)) + assert.isTrue(l3 !== undefined) + assert.equal(l3?.contentByteLength, CHUNK_SIZE) + assert.equal(l3?.dagByteLength, CHUNK_SIZE + 14) + assert.equal(l3?.contentByteOffset, CHUNK_SIZE * 2) - await writer.close(); + await writer.close() // Check root assert.isTrue( linkMetadataItems.find((l) => l.cid.equals(link.cid)) !== undefined - ); - assert.equal(linkMetadataItems.length, 4); - }); + ) + assert.equal(linkMetadataItems.length, 4) + }) it("--chunker=size-65535 --trickle=false --raw-leaves=false --cid-version=1", async () => { - const chunkSize = 65535; - const { readable, writable } = new TransformStream(); + const chunkSize = 65535 + const { readable, writable } = new TransformStream() const settings = { chunker: FixedSize.withMaxChunkSize(chunkSize), fileChunkEncoder: UnixFS.UnixFSLeaf, @@ -164,24 +164,24 @@ describe("test file", () => { linker: { createLink: Link.create }, hasher: sha256, fileEncoder: UnixFS, - }; - const writer = writable.getWriter(); - collect(readable); + } + const writer = writable.getWriter() + collect(readable) - const file = UnixFS.createFileWriter({ writer, settings }); + const file = UnixFS.createFileWriter({ writer, settings }) - const size = Math.round(chunkSize * 2.2); - const FRAME = Math.round(size / 10); - let offset = 0; - let n = 0; + const size = Math.round(chunkSize * 2.2) + const FRAME = Math.round(size / 10) + let offset = 0 + let n = 0 while (offset < size) { - const slice = new Uint8Array(Math.min(FRAME, size - offset)).fill(++n); - file.write(slice); - offset += FRAME; + const slice = new Uint8Array(Math.min(FRAME, size - offset)).fill(++n) + file.write(slice) + offset += FRAME } - const link = await file.close(); - console.log("link", link); + const link = await file.close() + console.log("link", link) assert.deepEqual(link, { /** @type {Link.Link} */ cid: Link.parse( @@ -189,63 +189,63 @@ describe("test file", () => { ), contentByteLength: 144177, dagByteLength: 144372, - }); + }) - await writer.close(); - }); + await writer.close() + }) it("chunks with rabin chunker", async function () { - this.timeout(30000); + this.timeout(30000) const content = hashrecur({ byteLength: CHUNK_SIZE * 2, - }); - const chunker = await Rabin.create(); + }) + const chunker = await Rabin.create() - const { readable, writable } = new TransformStream(); - const writer = writable.getWriter(); + const { readable, writable } = new TransformStream() + const writer = writable.getWriter() - const settings = UnixFS.configure({ chunker }); + const settings = UnixFS.configure({ chunker }) - const collector = collect(readable); - const file = UnixFS.createFileWriter({ writer, settings }); + const collector = collect(readable) + const file = UnixFS.createFileWriter({ writer, settings }) for await (const slice of content) { - file.write(slice); + file.write(slice) } - const link = await file.close(); - writer.close(); - const blocks = await collector; + const link = await file.close() + writer.close() + const blocks = await collector assert.deepEqual( link.cid, Link.parse("bafybeicj5kf4mohavbbh4j5izwy3k23cysewxfhgtmlaoxq6sewx2tsr7u") - ); + ) - assert.deepEqual((await blocks).length, 4); - }); + assert.deepEqual((await blocks).length, 4) + }) it("trickle layout", async function () { - this.timeout(30000); + this.timeout(30000) const content = hashrecur({ byteLength: CHUNK_SIZE * 2, - }); - const { readable, writable } = new TransformStream(); - const writer = writable.getWriter(); + }) + const { readable, writable } = new TransformStream() + const writer = writable.getWriter() const settings = UnixFS.configure({ chunker: FixedSize.withMaxChunkSize(1300), fileLayout: Trickle, fileChunkEncoder: UnixFS.UnixFSRawLeaf, - }); + }) - const file = UnixFS.createFileWriter({ writer, settings }); - const collector = collect(readable); + const file = UnixFS.createFileWriter({ writer, settings }) + const collector = collect(readable) for await (const slice of content) { - file.write(slice); + file.write(slice) } - const link = await file.close(); - writer.close(); - const blocks = await collector; + const link = await file.close() + writer.close() + const blocks = await collector assert.deepEqual(link, { /** @type {Link.Link} */ @@ -254,32 +254,32 @@ describe("test file", () => { ), contentByteLength: 524288, dagByteLength: 548251, - }); - }); + }) + }) it("trickle layout with overflow", async function () { - this.timeout(30000); + this.timeout(30000) const content = hashrecur({ byteLength: CHUNK_SIZE * 2, - }); - const { readable, writable } = new TransformStream(); + }) + const { readable, writable } = new TransformStream() - const writer = writable.getWriter(); + const writer = writable.getWriter() const settings = UnixFS.configure({ chunker: FixedSize.withMaxChunkSize(100000), fileLayout: Trickle.configure({ maxDirectLeaves: 5 }), fileChunkEncoder: UnixFS.UnixFSRawLeaf, - }); + }) - const blocks = collect(readable); - const file = UnixFS.createFileWriter({ writer, settings }); + const blocks = collect(readable) + const file = UnixFS.createFileWriter({ writer, settings }) for await (const slice of content) { - file.write(slice); + file.write(slice) } - const link = await file.close(); - writer.close(); - await blocks; + const link = await file.close() + writer.close() + await blocks assert.deepEqual(link, { /** @type {Link.Link} */ @@ -288,34 +288,34 @@ describe("test file", () => { ), contentByteLength: 524288, dagByteLength: 524738, - }); - }); + }) + }) it("trickle with several levels deep", async function () { - this.timeout(30000); - const chunkSize = 128; - const maxLeaves = 4; - const leafCount = 42; + this.timeout(30000) + const chunkSize = 128 + const maxLeaves = 4 + const leafCount = 42 - const content = hashrecur({ byteLength: chunkSize * leafCount }); - const { readable, writable } = new TransformStream(); - const writer = writable.getWriter(); + const content = hashrecur({ byteLength: chunkSize * leafCount }) + const { readable, writable } = new TransformStream() + const writer = writable.getWriter() const settings = UnixFS.configure({ chunker: FixedSize.withMaxChunkSize(chunkSize), fileLayout: Trickle.configure({ maxDirectLeaves: maxLeaves }), fileChunkEncoder: UnixFS.UnixFSRawLeaf, - }); + }) - const blocks = collect(readable); - const file = UnixFS.createFileWriter({ writer, settings }); + const blocks = collect(readable) + const file = UnixFS.createFileWriter({ writer, settings }) for await (const slice of content) { - file.write(slice); + file.write(slice) } - const link = await file.close(); - writer.close(); - await blocks; + const link = await file.close() + writer.close() + await blocks assert.deepEqual(link, { /** @type {Link.Link} */ @@ -324,19 +324,19 @@ describe("test file", () => { ), contentByteLength: chunkSize * leafCount, dagByteLength: 8411, - }); - }); + }) + }) it("write empty with defaults", async function () { - const { readable, writable } = new TransformStream(); - const writer = writable.getWriter(); - const file = UnixFS.createFileWriter({ writer }); - const blocks = collect(readable); + const { readable, writable } = new TransformStream() + const writer = writable.getWriter() + const file = UnixFS.createFileWriter({ writer }) + const blocks = collect(readable) - file.write(new Uint8Array()); - const link = await file.close(); - writer.close(); - await blocks; + file.write(new Uint8Array()) + const link = await file.close() + writer.close() + await blocks assert.deepEqual(link, { /** @type {Link.Link} */ @@ -346,18 +346,18 @@ describe("test file", () => { contentByteLength: 0, contentByteOffset: 0, dagByteLength: 6, - }); - }); + }) + }) it("can close writer", async function () { - const { readable, writable } = new TransformStream(); - const writer = writable.getWriter(); - const file = UnixFS.createFileWriter({ writer }); - const blocks = collect(readable); + const { readable, writable } = new TransformStream() + const writer = writable.getWriter() + const file = UnixFS.createFileWriter({ writer }) + const blocks = collect(readable) - file.write(encodeUTF8("this file does not have much content\n")); - const link = await file.close({ closeWriter: true }); - await blocks; + file.write(encodeUTF8("this file does not have much content\n")) + const link = await file.close({ closeWriter: true }) + await blocks assert.deepEqual(link, { /** @type {Link.Link} */ @@ -367,21 +367,21 @@ describe("test file", () => { contentByteLength: 37, contentByteOffset: 0, dagByteLength: 45, - }); - }); + }) + }) it("can release writer lock", async function () { - const { readable, writable } = new TransformStream(); - const writer = writable.getWriter(); - const file = UnixFS.createFileWriter({ writer }); - const blocks = collect(readable); + const { readable, writable } = new TransformStream() + const writer = writable.getWriter() + const file = UnixFS.createFileWriter({ writer }) + const blocks = collect(readable) - file.write(encodeUTF8("this file does not have much content\n")); - const link = await file.close({ releaseLock: true }); - assert.equal(writable.locked, false); + file.write(encodeUTF8("this file does not have much content\n")) + const link = await file.close({ releaseLock: true }) + assert.equal(writable.locked, false) - writable.close(); - await blocks; + writable.close() + await blocks assert.deepEqual(link, { /** @type {Link.Link} */ @@ -391,23 +391,23 @@ describe("test file", () => { contentByteLength: 37, contentByteOffset: 0, dagByteLength: 45, - }); - }); + }) + }) it("can create writer from writer", async function () { - const { readable, writable } = new TransformStream(); - const writer = writable.getWriter(); + const { readable, writable } = new TransformStream() + const writer = writable.getWriter() const settings = UnixFS.configure({ chunker: FixedSize.withMaxChunkSize(18), - }); + }) const file = UnixFS.createFileWriter({ writer, settings, - }); + }) - const file2 = UnixFS.createFileWriter(file); - assert.equal(file2.writer, writer); - assert.deepEqual(file2.settings, settings); - }); -}); + const file2 = UnixFS.createFileWriter(file) + assert.equal(file2.writer, writer) + assert.deepEqual(file2.settings, settings) + }) +}) diff --git a/test/lib.spec.js b/test/lib.spec.js index ed4f818..aafc7fc 100644 --- a/test/lib.spec.js +++ b/test/lib.spec.js @@ -1,14 +1,14 @@ -import * as UnixFS from "../src/lib.js"; -import { assert } from "chai"; -import { encodeUTF8, Link, collect, importFile } from "./util.js"; +import * as UnixFS from "../src/lib.js" +import { assert } from "chai" +import { encodeUTF8, Link, collect, importFile } from "./util.js" describe("UnixFS.createWriter", () => { it("UnixFS.createFileWriter", async () => { - const { readable, writable } = new TransformStream(); - const reader = collect(readable); - const writer = UnixFS.createWriter({ writable }); - const file = UnixFS.createFileWriter(writer); - file.write(new TextEncoder().encode("hello world")); + const { readable, writable } = new TransformStream() + const reader = collect(readable) + const writer = UnixFS.createWriter({ writable }) + const file = UnixFS.createFileWriter(writer) + file.write(new TextEncoder().encode("hello world")) assert.deepEqual(await file.close(), { /** @type {Link.Link} */ cid: Link.parse( @@ -17,22 +17,22 @@ describe("UnixFS.createWriter", () => { dagByteLength: 19, contentByteOffset: 0, contentByteLength: 11, - }); - writer.close(); + }) + writer.close() - const blocks = await reader; + const blocks = await reader assert.deepEqual( blocks.map(($) => $.cid.toString()), ["bafybeihykld7uyxzogax6vgyvag42y7464eywpf55gxi5qpoisibh3c5wa"] - ); - }); + ) + }) it("fs.createFileWriter", async () => { - const { readable, writable } = new TransformStream(); - const reader = collect(readable); - const writer = UnixFS.createWriter({ writable }); - const file = writer.createFileWriter(); - file.write(encodeUTF8("hello world")); + const { readable, writable } = new TransformStream() + const reader = collect(readable) + const writer = UnixFS.createWriter({ writable }) + const file = writer.createFileWriter() + file.write(encodeUTF8("hello world")) assert.deepEqual(await file.close(), { /** @type {Link.Link} */ cid: Link.parse( @@ -41,34 +41,34 @@ describe("UnixFS.createWriter", () => { dagByteLength: 19, contentByteLength: 11, contentByteOffset: 0, - }); + }) - writer.close(); + writer.close() - const blocks = await reader; + const blocks = await reader assert.deepEqual( blocks.map(($) => $.cid.toString()), ["bafybeihykld7uyxzogax6vgyvag42y7464eywpf55gxi5qpoisibh3c5wa"] - ); - }); + ) + }) it("UnixFS.createDirectoryWriter", async () => { - const { readable, writable } = new TransformStream(); - const reader = collect(readable); - const writer = UnixFS.createWriter({ writable }); - const root = UnixFS.createDirectoryWriter(writer); + const { readable, writable } = new TransformStream() + const reader = collect(readable) + const writer = UnixFS.createWriter({ writable }) + const root = UnixFS.createDirectoryWriter(writer) - root.set("hello", await importFile(root, ["hello"])); + root.set("hello", await importFile(root, ["hello"])) assert.deepEqual(await root.close(), { /** @type {Link.Link} */ cid: Link.parse( "bafybeieuo4clbaujw35wxt7s4jlorbgztvufvdrcxxb6hik5mzfqku2tbq" ), dagByteLength: 66, - }); - writer.close(); + }) + writer.close() - const blocks = await reader; + const blocks = await reader assert.deepEqual( blocks.map(($) => $.cid.toString()), @@ -76,26 +76,26 @@ describe("UnixFS.createWriter", () => { "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq", "bafybeieuo4clbaujw35wxt7s4jlorbgztvufvdrcxxb6hik5mzfqku2tbq", ] - ); - }); + ) + }) it("fs.createDirectoryWriter", async () => { - const { readable, writable } = new TransformStream(); - const reader = collect(readable); - const writer = UnixFS.createWriter({ writable }); - const root = writer.createDirectoryWriter(); + const { readable, writable } = new TransformStream() + const reader = collect(readable) + const writer = UnixFS.createWriter({ writable }) + const root = writer.createDirectoryWriter() - root.set("hello", await importFile(root, ["hello"])); + root.set("hello", await importFile(root, ["hello"])) assert.deepEqual(await root.close(), { /** @type {Link.Link} */ cid: Link.parse( "bafybeieuo4clbaujw35wxt7s4jlorbgztvufvdrcxxb6hik5mzfqku2tbq" ), dagByteLength: 66, - }); - writer.close(); + }) + writer.close() - const blocks = await reader; + const blocks = await reader assert.deepEqual( blocks.map(($) => $.cid.toString()), @@ -103,32 +103,32 @@ describe("UnixFS.createWriter", () => { "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq", "bafybeieuo4clbaujw35wxt7s4jlorbgztvufvdrcxxb6hik5mzfqku2tbq", ] - ); - }); + ) + }) it("can release lock", async () => { - const { readable, writable } = new TransformStream(); - const reader = collect(readable); - const writer = UnixFS.createWriter({ writable }); - const root = UnixFS.createDirectoryWriter(writer); + const { readable, writable } = new TransformStream() + const reader = collect(readable) + const writer = UnixFS.createWriter({ writable }) + const root = UnixFS.createDirectoryWriter(writer) - root.set("hello", await importFile(root, ["hello"])); + root.set("hello", await importFile(root, ["hello"])) assert.deepEqual(await root.close(), { /** @type {Link.Link} */ cid: Link.parse( "bafybeieuo4clbaujw35wxt7s4jlorbgztvufvdrcxxb6hik5mzfqku2tbq" ), dagByteLength: 66, - }); - writer.close({ closeWriter: false }); - assert.equal(writable.locked, false); + }) + writer.close({ closeWriter: false }) + assert.equal(writable.locked, false) - const wr = writable.getWriter(); - assert.equal(writable.locked, true); + const wr = writable.getWriter() + assert.equal(writable.locked, true) - wr.close(); + wr.close() - const blocks = await reader; + const blocks = await reader assert.deepEqual( blocks.map(($) => $.cid.toString()), @@ -136,19 +136,19 @@ describe("UnixFS.createWriter", () => { "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq", "bafybeieuo4clbaujw35wxt7s4jlorbgztvufvdrcxxb6hik5mzfqku2tbq", ] - ); - }); -}); + ) + }) +}) describe("UnixFS.withCapacity", async () => { const { readable, writable } = new TransformStream( {}, UnixFS.withCapacity(128) - ); + ) - const fs = UnixFS.createWriter({ writable }); - const file = UnixFS.createFileWriter(fs); - file.write(new TextEncoder().encode("hello world")); + const fs = UnixFS.createWriter({ writable }) + const file = UnixFS.createFileWriter(fs) + file.write(new TextEncoder().encode("hello world")) assert.deepEqual(await file.close(), { /** @type {Link.Link} */ cid: Link.parse( @@ -157,12 +157,12 @@ describe("UnixFS.withCapacity", async () => { dagByteLength: 19, contentByteLength: 11, contentByteOffset: 0, - }); + }) - assert.equal(fs.writer.desiredSize, 128 - 19); + assert.equal(fs.writer.desiredSize, 128 - 19) - const bye = UnixFS.createFileWriter(fs); - bye.write(new TextEncoder().encode("bye")); + const bye = UnixFS.createFileWriter(fs) + bye.write(new TextEncoder().encode("bye")) assert.deepEqual(await bye.close(), { /** @type {Link.Link} */ @@ -172,8 +172,8 @@ describe("UnixFS.withCapacity", async () => { dagByteLength: 11, contentByteLength: 3, contentByteOffset: 0, - }); + }) - assert.equal(fs.writer.desiredSize, 128 - 19 - 11); - fs.close(); -}); + assert.equal(fs.writer.desiredSize, 128 - 19 - 11) + fs.close() +}) diff --git a/test/sharded-directory.spec.js b/test/sharded-directory.spec.js index 57219cf..c840664 100644 --- a/test/sharded-directory.spec.js +++ b/test/sharded-directory.spec.js @@ -1,15 +1,15 @@ -import * as UnixFS from "../src/lib.js"; -import { assert } from "chai"; -import { encodeUTF8, Link, collect, importFile } from "./util.js"; +import * as UnixFS from "../src/lib.js" +import { assert } from "chai" +import { encodeUTF8, Link, collect, importFile } from "./util.js" -const createChannel = () => new TransformStream(); +const createChannel = () => new TransformStream() describe("test directory", () => { it("empty dir", async () => { - const { readable, writable } = new TransformStream(); - const writer = writable.getWriter(); - const root = UnixFS.createShardedDirectoryWriter({ writer }); - const link = await root.close(); - writer.close(); + const { readable, writable } = new TransformStream() + const writer = writable.getWriter() + const root = UnixFS.createShardedDirectoryWriter({ writer }) + const link = await root.close() + writer.close() assert.deepEqual(link, { /** @type {Link.Link} */ @@ -17,8 +17,8 @@ describe("test directory", () => { "bafybeifoplefg5piy3pjhlp73q7unqx4hwecxeu7opfqfmg352pkpljt6m" ), dagByteLength: 9, - }); - const output = await collect(readable); + }) + const output = await collect(readable) assert.deepEqual( output.map(($) => $.cid), @@ -27,18 +27,18 @@ describe("test directory", () => { "bafybeifoplefg5piy3pjhlp73q7unqx4hwecxeu7opfqfmg352pkpljt6m" ), ] - ); - }); + ) + }) it("basic file in directory", async () => { - const { readable, writable } = new TransformStream(); - const writer = writable.getWriter(); - const blocks = collect(readable); - const root = UnixFS.createShardedDirectoryWriter({ writer }); - const file = UnixFS.createFileWriter(root); - const content = encodeUTF8("this file does not have much content\n"); - file.write(content); - const fileLink = await file.close(); + const { readable, writable } = new TransformStream() + const writer = writable.getWriter() + const blocks = collect(readable) + const root = UnixFS.createShardedDirectoryWriter({ writer }) + const file = UnixFS.createFileWriter(root) + const content = encodeUTF8("this file does not have much content\n") + file.write(content) + const fileLink = await file.close() assert.deepEqual(fileLink, { /** @type {Link.Link} */ @@ -48,10 +48,10 @@ describe("test directory", () => { dagByteLength: 45, contentByteLength: 37, contentByteOffset: 0, - }); + }) - root.set("file.txt", fileLink); - const rootLink = await root.close(); + root.set("file.txt", fileLink) + const rootLink = await root.close() assert.deepEqual(rootLink, { dagByteLength: 133, @@ -59,11 +59,11 @@ describe("test directory", () => { cid: Link.parse( "bafybeibbyshlpvztob4mtwznmnkzoc4upgcf6ghaulujxglzgmglcdubtm" ), - }); + }) - writer.close(); + writer.close() - const output = await blocks; + const output = await blocks assert.deepEqual( output.map(($) => $.cid), @@ -75,18 +75,18 @@ describe("test directory", () => { "bafybeibbyshlpvztob4mtwznmnkzoc4upgcf6ghaulujxglzgmglcdubtm" ), ] - ); - }); + ) + }) it("many files in directory", async () => { - const { readable, writable } = new TransformStream(); - const writer = writable.getWriter(); - const blocks = collect(readable); - const root = UnixFS.createShardedDirectoryWriter({ writer }); - const file = UnixFS.createFileWriter(root); - const content = encodeUTF8("this file does not have much content\n"); - file.write(content); - const fileLink = await file.close(); + const { readable, writable } = new TransformStream() + const writer = writable.getWriter() + const blocks = collect(readable) + const root = UnixFS.createShardedDirectoryWriter({ writer }) + const file = UnixFS.createFileWriter(root) + const content = encodeUTF8("this file does not have much content\n") + file.write(content) + const fileLink = await file.close() assert.deepEqual(fileLink, { /** @type {Link.Link} */ @@ -96,13 +96,13 @@ describe("test directory", () => { dagByteLength: 45, contentByteLength: 37, contentByteOffset: 0, - }); + }) - for (let i = 0; i < 100; i++) { - root.set(`file${i}.txt`, fileLink); + for (let i = 0 i < 100 i++) { + root.set(`file${i}.txt`, fileLink) } - const rootLink = await root.close(); + const rootLink = await root.close() assert.deepEqual(rootLink, { dagByteLength: 11591, @@ -110,11 +110,11 @@ describe("test directory", () => { cid: Link.parse( "bafybeidzpkzefoys5ani6qfvrpxyjiolmy6ng445uceov2a33r5bw43qwe" ), - }); + }) - writer.close(); + writer.close() - const output = await blocks; + const output = await blocks assert.deepEqual( output.map(($) => $.cid), @@ -180,50 +180,50 @@ describe("test directory", () => { "bafybeidzpkzefoys5ani6qfvrpxyjiolmy6ng445uceov2a33r5bw43qwe" ), ] - ); - }); + ) + }) it("nested directory", async () => { - const { readable, writable } = new TransformStream(); - const blocks = collect(readable); - const writer = writable.getWriter(); - const root = UnixFS.createShardedDirectoryWriter({ writer }); - const nested = UnixFS.createShardedDirectoryWriter(root); + const { readable, writable } = new TransformStream() + const blocks = collect(readable) + const writer = writable.getWriter() + const root = UnixFS.createShardedDirectoryWriter({ writer }) + const nested = UnixFS.createShardedDirectoryWriter(root) - root.set("nested", await nested.close()); + root.set("nested", await nested.close()) assert.deepEqual(await root.close(), { /** @type {Link.Link} */ cid: Link.parse( "bafybeiesoparpjbe5rwoo6liouikyw2nypo6v3d3n36vb334oddrmp52mq" ), dagByteLength: 102, - }); - writer.close(); + }) + writer.close() - const items = await blocks; + const items = await blocks assert.deepEqual( items.map(({ cid }) => cid.toString()), [ "bafybeifoplefg5piy3pjhlp73q7unqx4hwecxeu7opfqfmg352pkpljt6m", "bafybeiesoparpjbe5rwoo6liouikyw2nypo6v3d3n36vb334oddrmp52mq", ] - ); - }); + ) + }) it("double nested directory", async () => { - const { readable, writable } = new TransformStream(); - const blocks = collect(readable); - const writer = writable.getWriter(); - - const root = UnixFS.createShardedDirectoryWriter({ writer }); - const nested = UnixFS.createShardedDirectoryWriter(root); - - root.set("nested", await nested.close()); - const main = UnixFS.createShardedDirectoryWriter({ writer }); - main.set("root", await root.close()); - const link = await main.close(); - writer.close(); - const items = await blocks; + const { readable, writable } = new TransformStream() + const blocks = collect(readable) + const writer = writable.getWriter() + + const root = UnixFS.createShardedDirectoryWriter({ writer }) + const nested = UnixFS.createShardedDirectoryWriter(root) + + root.set("nested", await nested.close()) + const main = UnixFS.createShardedDirectoryWriter({ writer }) + main.set("root", await root.close()) + const link = await main.close() + writer.close() + const items = await blocks assert.deepEqual( items.map(({ cid }) => cid.toString()), [ @@ -231,17 +231,17 @@ describe("test directory", () => { "bafybeiesoparpjbe5rwoo6liouikyw2nypo6v3d3n36vb334oddrmp52mq", "bafybeifni4qs2xfgtzhk2xw7emp5j7h5ayyw73xizcba2qxry6dc4vqaom", ] - ); - }); + ) + }) it("throws if file already exists", async () => { - const { readable, writable } = new TransformStream(); - const blocks = collect(readable); - const writer = writable.getWriter(); + const { readable, writable } = new TransformStream() + const blocks = collect(readable) + const writer = writable.getWriter() - const root = UnixFS.createShardedDirectoryWriter({ writer }); + const root = UnixFS.createShardedDirectoryWriter({ writer }) - const hello = await importFile(root, ["hello"]); + const hello = await importFile(root, ["hello"]) assert.deepEqual(hello, { /** @type {Link.Link} */ cid: Link.parse( @@ -250,9 +250,9 @@ describe("test directory", () => { contentByteLength: 5, contentByteOffset: 0, dagByteLength: 13, - }); + }) - const bye = await importFile(root, ["bye"]); + const bye = await importFile(root, ["bye"]) assert.deepEqual(bye, { /** @type {Link.Link} */ cid: Link.parse( @@ -261,15 +261,15 @@ describe("test directory", () => { dagByteLength: 11, contentByteLength: 3, contentByteOffset: 0, - }); + }) - root.set("hello", hello); + root.set("hello", hello) assert.throws( () => root.set("hello", bye), /Directory already contains entry with name "hello"/ - ); - root.set("bye", bye); - const link = await root.close(); + ) + root.set("bye", bye) + const link = await root.close() assert.deepEqual(link, { /** @type {Link.Link} */ @@ -277,9 +277,9 @@ describe("test directory", () => { "bafybeihxagpxz7lekn7exw6ob526d6pgvnzc3kgtpkbh7ze73e2oc7oxpa" ), dagByteLength: 164, - }); - writer.close(); - const items = await blocks; + }) + writer.close() + const items = await blocks assert.deepEqual( items.map((item) => item.cid.toString()), [ @@ -287,17 +287,17 @@ describe("test directory", () => { "bafybeigl43jff4muiw2m6kzqhm7xpz6ti7etiujklpnc6vpblzjvvwqmta", "bafybeihxagpxz7lekn7exw6ob526d6pgvnzc3kgtpkbh7ze73e2oc7oxpa", ] - ); - }); + ) + }) it("can overwrite existing", async () => { - const { readable, writable } = new TransformStream(); - const blocks = collect(readable); - const writer = writable.getWriter(); + const { readable, writable } = new TransformStream() + const blocks = collect(readable) + const writer = writable.getWriter() - const root = UnixFS.createShardedDirectoryWriter({ writer }); + const root = UnixFS.createShardedDirectoryWriter({ writer }) - const hello = await importFile(root, ["hello"]); + const hello = await importFile(root, ["hello"]) assert.deepEqual(hello, { /** @type {Link.Link} */ cid: Link.parse( @@ -306,9 +306,9 @@ describe("test directory", () => { contentByteLength: 5, contentByteOffset: 0, dagByteLength: 13, - }); + }) - const bye = await importFile(root, ["bye"]); + const bye = await importFile(root, ["bye"]) assert.deepEqual(bye, { /** @type {Link.Link} */ cid: Link.parse( @@ -317,11 +317,11 @@ describe("test directory", () => { dagByteLength: 11, contentByteLength: 3, contentByteOffset: 0, - }); + }) - root.set("hello", hello); - root.set("hello", bye, { overwrite: true }); - const link = await root.close(); + root.set("hello", hello) + root.set("hello", bye, { overwrite: true }) + const link = await root.close() assert.deepEqual(link, { /** @type {Link.Link} */ @@ -329,9 +329,9 @@ describe("test directory", () => { "bafybeibzscho4rtevqlxvlen7te535kvrawffcdry42iol2kr5nr3itjgy" ), dagByteLength: 99, - }); - writer.close(); - const items = await blocks; + }) + writer.close() + const items = await blocks assert.deepEqual( items.map((item) => item.cid.toString()), [ @@ -339,17 +339,17 @@ describe("test directory", () => { "bafybeigl43jff4muiw2m6kzqhm7xpz6ti7etiujklpnc6vpblzjvvwqmta", "bafybeibzscho4rtevqlxvlen7te535kvrawffcdry42iol2kr5nr3itjgy", ] - ); - }); + ) + }) it("can delete entries", async () => { - const { readable, writable } = createChannel(); - const writer = writable.getWriter(); - const reader = collect(readable); + const { readable, writable } = createChannel() + const writer = writable.getWriter() + const reader = collect(readable) - const root = UnixFS.createShardedDirectoryWriter({ writer }); + const root = UnixFS.createShardedDirectoryWriter({ writer }) - const hello = await importFile(root, ["hello"]); + const hello = await importFile(root, ["hello"]) assert.deepEqual(hello, { /** @type {Link.Link} */ cid: Link.parse( @@ -358,11 +358,11 @@ describe("test directory", () => { contentByteLength: 5, contentByteOffset: 0, dagByteLength: 13, - }); + }) - root.set("hello", hello); - root.remove("hello"); - const link = await root.close(); + root.set("hello", hello) + root.remove("hello") + const link = await root.close() assert.deepEqual(link, { /** @type {Link.Link} */ @@ -370,41 +370,41 @@ describe("test directory", () => { "bafybeifoplefg5piy3pjhlp73q7unqx4hwecxeu7opfqfmg352pkpljt6m" ), dagByteLength: 9, - }); - writer.close(); - const blocks = await reader; + }) + writer.close() + const blocks = await reader assert.deepEqual( blocks.map((block) => block.cid.toString()), [ "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq", "bafybeifoplefg5piy3pjhlp73q7unqx4hwecxeu7opfqfmg352pkpljt6m", ] - ); - }); + ) + }) it("throws on invalid filenames", async () => { - const { readable, writable } = new TransformStream(); - const writer = writable.getWriter(); - const reader = collect(readable); + const { readable, writable } = new TransformStream() + const writer = writable.getWriter() + const reader = collect(readable) - const root = UnixFS.createShardedDirectoryWriter({ writer }); - const hello = await importFile(root, ["hello"]); + const root = UnixFS.createShardedDirectoryWriter({ writer }) + const hello = await importFile(root, ["hello"]) assert.throws( () => root.set("hello/world", hello), /Directory entry name "hello\/world" contains forbidden "\/" character/ - ); - writer.close(); - }); + ) + writer.close() + }) it("can not change after close", async () => { - const { readable, writable } = new TransformStream(); - const writer = writable.getWriter(); - const reader = collect(readable); + const { readable, writable } = new TransformStream() + const writer = writable.getWriter() + const reader = collect(readable) - const root = UnixFS.createShardedDirectoryWriter({ writer }); + const root = UnixFS.createShardedDirectoryWriter({ writer }) - const hello = await importFile(root, ["hello"]); + const hello = await importFile(root, ["hello"]) assert.deepEqual(hello, { /** @type {Link.Link} */ cid: Link.parse( @@ -413,9 +413,9 @@ describe("test directory", () => { contentByteLength: 5, contentByteOffset: 0, dagByteLength: 13, - }); + }) - const bye = await importFile(root, ["bye"]); + const bye = await importFile(root, ["bye"]) assert.deepEqual(bye, { /** @type {Link.Link} */ cid: Link.parse( @@ -424,24 +424,24 @@ describe("test directory", () => { dagByteLength: 11, contentByteLength: 3, contentByteOffset: 0, - }); + }) - root.set("hello", hello); + root.set("hello", hello) assert.deepEqual(await root.close(), { /** @type {Link.Link} */ cid: Link.parse( "bafybeihccqhztoqxfi5mmnv55iofsz7slpzq4gnktf3vzycavqbms5eote" ), dagByteLength: 101, - }); + }) assert.throws( () => root.set("bye", bye), /Can not change written directory, but you can \.fork\(\) and make changes to it/ - ); + ) - writer.close(); - const blocks = await reader; + writer.close() + const blocks = await reader assert.deepEqual( blocks.map((block) => block.cid.toString()), [ @@ -449,27 +449,27 @@ describe("test directory", () => { "bafybeigl43jff4muiw2m6kzqhm7xpz6ti7etiujklpnc6vpblzjvvwqmta", "bafybeihccqhztoqxfi5mmnv55iofsz7slpzq4gnktf3vzycavqbms5eote", ] - ); + ) try { - await root.close(); - assert.fail(); + await root.close() + assert.fail() } catch (/** @type {any} */ err) { assert.equal( err.message, "Can not change written HAMT directory, but you can .fork() and make changes to it" - ); + ) } - }); + }) it("can fork and edit", async () => { - const { readable, writable } = new TransformStream(); - const writer = writable.getWriter(); - const reader = collect(readable); + const { readable, writable } = new TransformStream() + const writer = writable.getWriter() + const reader = collect(readable) - const root = UnixFS.createShardedDirectoryWriter({ writer }); + const root = UnixFS.createShardedDirectoryWriter({ writer }) - const hello = await importFile(root, ["hello"]); + const hello = await importFile(root, ["hello"]) assert.deepEqual(hello, { /** @type {Link.Link} */ cid: Link.parse( @@ -478,9 +478,9 @@ describe("test directory", () => { contentByteLength: 5, contentByteOffset: 0, dagByteLength: 13, - }); + }) - const bye = await importFile(root, ["bye"]); + const bye = await importFile(root, ["bye"]) assert.deepEqual(bye, { /** @type {Link.Link} */ cid: Link.parse( @@ -489,29 +489,29 @@ describe("test directory", () => { dagByteLength: 11, contentByteLength: 3, contentByteOffset: 0, - }); + }) - root.set("hello", hello); + root.set("hello", hello) assert.deepEqual(await root.close(), { /** @type {Link.Link} */ cid: Link.parse( "bafybeihccqhztoqxfi5mmnv55iofsz7slpzq4gnktf3vzycavqbms5eote" ), dagByteLength: 101, - }); + }) - const fork = root.fork(); - fork.set("bye", bye); + const fork = root.fork() + fork.set("bye", bye) assert.deepEqual(await fork.close(), { /** @type {Link.Link} */ cid: Link.parse( "bafybeihxagpxz7lekn7exw6ob526d6pgvnzc3kgtpkbh7ze73e2oc7oxpa" ), dagByteLength: 164, - }); + }) - writer.close(); - const blocks = await reader; + writer.close() + const blocks = await reader assert.deepEqual( blocks.map((block) => block.cid.toString()), [ @@ -520,44 +520,44 @@ describe("test directory", () => { "bafybeihccqhztoqxfi5mmnv55iofsz7slpzq4gnktf3vzycavqbms5eote", "bafybeihxagpxz7lekn7exw6ob526d6pgvnzc3kgtpkbh7ze73e2oc7oxpa", ] - ); - }); + ) + }) it("can autoclose", async () => { - const { readable, writable } = new TransformStream(); - const writer = writable.getWriter(); - const reader = collect(readable); - - const root = UnixFS.createShardedDirectoryWriter({ writer }); - const file = UnixFS.createFileWriter(root); - file.write(new TextEncoder().encode("hello")); - root.set("hello", await file.close()); + const { readable, writable } = new TransformStream() + const writer = writable.getWriter() + const reader = collect(readable) + + const root = UnixFS.createShardedDirectoryWriter({ writer }) + const file = UnixFS.createFileWriter(root) + file.write(new TextEncoder().encode("hello")) + root.set("hello", await file.close()) assert.deepEqual(await root.close({ closeWriter: true }), { /** @type {Link.Link} */ cid: Link.parse( "bafybeihccqhztoqxfi5mmnv55iofsz7slpzq4gnktf3vzycavqbms5eote" ), dagByteLength: 101, - }); + }) - const blocks = await reader; + const blocks = await reader assert.deepEqual( blocks.map((block) => block.cid.toString()), [ "bafybeid3weurg3gvyoi7nisadzolomlvoxoppe2sesktnpvdve3256n5tq", "bafybeihccqhztoqxfi5mmnv55iofsz7slpzq4gnktf3vzycavqbms5eote", ] - ); - }); + ) + }) it("fork into other stream", async () => { - const { readable, writable } = new TransformStream(); - const writer = writable.getWriter(); - const reader = collect(readable); + const { readable, writable } = new TransformStream() + const writer = writable.getWriter() + const reader = collect(readable) - const root = UnixFS.createShardedDirectoryWriter({ writer }); + const root = UnixFS.createShardedDirectoryWriter({ writer }) - const hello = await importFile(root, ["hello"]); + const hello = await importFile(root, ["hello"]) assert.deepEqual(hello, { /** @type {Link.Link} */ cid: Link.parse( @@ -566,9 +566,9 @@ describe("test directory", () => { contentByteLength: 5, contentByteOffset: 0, dagByteLength: 13, - }); + }) - const bye = await importFile(root, ["bye"]); + const bye = await importFile(root, ["bye"]) assert.deepEqual(bye, { /** @type {Link.Link} */ cid: Link.parse( @@ -577,33 +577,33 @@ describe("test directory", () => { dagByteLength: 11, contentByteLength: 3, contentByteOffset: 0, - }); + }) - root.set("hello", hello); + root.set("hello", hello) assert.deepEqual(await root.close(), { /** @type {Link.Link} */ cid: Link.parse( "bafybeihccqhztoqxfi5mmnv55iofsz7slpzq4gnktf3vzycavqbms5eote" ), dagByteLength: 101, - }); + }) - const patch = new TransformStream(); - const patchWriter = patch.writable.getWriter(); - const patchReader = collect(patch.readable); + const patch = new TransformStream() + const patchWriter = patch.writable.getWriter() + const patchReader = collect(patch.readable) - const fork = root.fork({ writer: patchWriter }); - fork.set("bye", bye); + const fork = root.fork({ writer: patchWriter }) + fork.set("bye", bye) assert.deepEqual(await fork.close(), { /** @type {Link.Link} */ cid: Link.parse( "bafybeihxagpxz7lekn7exw6ob526d6pgvnzc3kgtpkbh7ze73e2oc7oxpa" ), dagByteLength: 164, - }); + }) - writer.close(); - const blocks = await reader; + writer.close() + const blocks = await reader assert.deepEqual( blocks.map((block) => block.cid.toString()), [ @@ -611,29 +611,29 @@ describe("test directory", () => { "bafybeigl43jff4muiw2m6kzqhm7xpz6ti7etiujklpnc6vpblzjvvwqmta", "bafybeihccqhztoqxfi5mmnv55iofsz7slpzq4gnktf3vzycavqbms5eote", ] - ); + ) - patchWriter.close(); - const delta = await patchReader; + patchWriter.close() + const delta = await patchReader assert.deepEqual( delta.map((block) => block.cid.toString()), ["bafybeihxagpxz7lekn7exw6ob526d6pgvnzc3kgtpkbh7ze73e2oc7oxpa"] - ); - }); + ) + }) it("can close writer", async function () { - const { readable, writable } = new TransformStream(); - const writer = writable.getWriter(); - const blocks = collect(readable); - const root = UnixFS.createShardedDirectoryWriter({ writer }); - const file = UnixFS.createFileWriter(root); + const { readable, writable } = new TransformStream() + const writer = writable.getWriter() + const blocks = collect(readable) + const root = UnixFS.createShardedDirectoryWriter({ writer }) + const file = UnixFS.createFileWriter(root) - file.write(encodeUTF8("this file does not have much content\n")); - assert.equal(writable.locked, true); - root.set("file.txt", await file.close()); - const link = await root.close({ releaseLock: true, closeWriter: true }); + file.write(encodeUTF8("this file does not have much content\n")) + assert.equal(writable.locked, true) + root.set("file.txt", await file.close()) + const link = await root.close({ releaseLock: true, closeWriter: true }) - await blocks; + await blocks assert.deepEqual(link, { dagByteLength: 133, @@ -641,24 +641,24 @@ describe("test directory", () => { cid: Link.parse( "bafybeibbyshlpvztob4mtwznmnkzoc4upgcf6ghaulujxglzgmglcdubtm" ), - }); - }); + }) + }) it("can release writer lock", async function () { - const { readable, writable } = new TransformStream(); - const writer = writable.getWriter(); - const blocks = collect(readable); - const root = UnixFS.createShardedDirectoryWriter({ writer }); - const file = UnixFS.createFileWriter(root); + const { readable, writable } = new TransformStream() + const writer = writable.getWriter() + const blocks = collect(readable) + const root = UnixFS.createShardedDirectoryWriter({ writer }) + const file = UnixFS.createFileWriter(root) - file.write(encodeUTF8("this file does not have much content\n")); - assert.equal(writable.locked, true); - root.set("file.txt", await file.close()); - const link = await root.close({ releaseLock: true }); - assert.equal(writable.locked, false); + file.write(encodeUTF8("this file does not have much content\n")) + assert.equal(writable.locked, true) + root.set("file.txt", await file.close()) + const link = await root.close({ releaseLock: true }) + assert.equal(writable.locked, false) - writable.close(); - await blocks; + writable.close() + await blocks assert.deepEqual(link, { dagByteLength: 133, @@ -666,190 +666,190 @@ describe("test directory", () => { cid: Link.parse( "bafybeibbyshlpvztob4mtwznmnkzoc4upgcf6ghaulujxglzgmglcdubtm" ), - }); - }); + }) + }) it("can enumerate entries", async function () { - const { writable } = new TransformStream(); - const writer = writable.getWriter(); - const root = UnixFS.createShardedDirectoryWriter({ writer }); + const { writable } = new TransformStream() + const writer = writable.getWriter() + const root = UnixFS.createShardedDirectoryWriter({ writer }) - assert.deepEqual([...root.entries()], []); + assert.deepEqual([...root.entries()], []) /** @type {Link.Link} */ const cid = Link.parse( "bafybeidequ5soq6smzafv4lb76i5dkvl5fzgvrxz4bmlc2k4dkikklv2j4" - ); + ) const fileLink = { cid, dagByteLength: 45, contentByteLength: 37, - }; + } - root.set("file.txt", fileLink); - assert.deepEqual([...root.entries()], [["file.txt", fileLink]]); - }); + root.set("file.txt", fileLink) + assert.deepEqual([...root.entries()], [["file.txt", fileLink]]) + }) it(".has", async function () { - const { writable } = new TransformStream(); - const writer = writable.getWriter(); - const root = UnixFS.createShardedDirectoryWriter({ writer }); - assert.equal(root.has("file.txt"), false); + const { writable } = new TransformStream() + const writer = writable.getWriter() + const root = UnixFS.createShardedDirectoryWriter({ writer }) + assert.equal(root.has("file.txt"), false) /** @type {Link.Link} */ const cid = Link.parse( "bafybeidequ5soq6smzafv4lb76i5dkvl5fzgvrxz4bmlc2k4dkikklv2j4" - ); + ) root.set("file.txt", { cid, dagByteLength: 45, contentByteLength: 37, - }); - assert.equal(root.has("file.txt"), true); + }) + assert.equal(root.has("file.txt"), true) - root.remove("file.txt"); - assert.equal(root.has("file.txt"), false); - }); + root.remove("file.txt") + assert.equal(root.has("file.txt"), false) + }) it(".size", async function () { - const { writable } = new TransformStream(); - const writer = writable.getWriter(); - const root = UnixFS.createShardedDirectoryWriter({ writer }); - assert.equal(root.size, 0); + const { writable } = new TransformStream() + const writer = writable.getWriter() + const root = UnixFS.createShardedDirectoryWriter({ writer }) + assert.equal(root.size, 0) /** @type {Link.Link} */ const cid = Link.parse( "bafybeidequ5soq6smzafv4lb76i5dkvl5fzgvrxz4bmlc2k4dkikklv2j4" - ); + ) root.set("file.txt", { cid, dagByteLength: 45, contentByteLength: 37, - }); - assert.equal(root.size, 1); + }) + assert.equal(root.size, 1) - root.remove("file.txt"); - assert.equal(root.size, 0); - }); + root.remove("file.txt") + assert.equal(root.size, 0) + }) it("writer state .clear", async function () { - const { writable } = new TransformStream(); - const writer = writable.getWriter(); - const root = UnixFS.createShardedDirectoryWriter({ writer }); - assert.equal(root.size, 0); + const { writable } = new TransformStream() + const writer = writable.getWriter() + const root = UnixFS.createShardedDirectoryWriter({ writer }) + assert.equal(root.size, 0) /** @type {Link.Link} */ const cid = Link.parse( "bafybeidequ5soq6smzafv4lb76i5dkvl5fzgvrxz4bmlc2k4dkikklv2j4" - ); + ) const fileLink = { cid, dagByteLength: 45, contentByteLength: 37, - }; - root.set("file.txt", fileLink); - assert.equal(root.size, 1); + } + root.set("file.txt", fileLink) + assert.equal(root.size, 1) - root.state.entries.clear(); - assert.equal(root.size, 0); - }); + root.state.entries.clear() + assert.equal(root.size, 0) + }) it("writer state .forEach", async function () { - const { writable } = new TransformStream(); - const writer = writable.getWriter(); - const root = UnixFS.createShardedDirectoryWriter({ writer }); - assert.equal(root.size, 0); + const { writable } = new TransformStream() + const writer = writable.getWriter() + const root = UnixFS.createShardedDirectoryWriter({ writer }) + assert.equal(root.size, 0) /** @type {Link.Link} */ const cid = Link.parse( "bafybeidequ5soq6smzafv4lb76i5dkvl5fzgvrxz4bmlc2k4dkikklv2j4" - ); + ) const fileLink = { cid, dagByteLength: 45, contentByteLength: 37, - }; - root.set("file.txt", fileLink); - assert.equal(root.size, 1); - root.state.entries.forEach((entry) => assert.deepEqual(entry, fileLink)); - }); + } + root.set("file.txt", fileLink) + assert.equal(root.size, 1) + root.state.entries.forEach((entry) => assert.deepEqual(entry, fileLink)) + }) it("writer state .get", async function () { - const { writable } = new TransformStream(); - const writer = writable.getWriter(); - const root = UnixFS.createShardedDirectoryWriter({ writer }); - assert.equal(root.size, 0); + const { writable } = new TransformStream() + const writer = writable.getWriter() + const root = UnixFS.createShardedDirectoryWriter({ writer }) + assert.equal(root.size, 0) /** @type {Link.Link} */ const cid = Link.parse( "bafybeidequ5soq6smzafv4lb76i5dkvl5fzgvrxz4bmlc2k4dkikklv2j4" - ); + ) const fileLink = { cid, dagByteLength: 45, contentByteLength: 37, - }; - root.set("file.txt", fileLink); - assert.equal(root.size, 1); - assert.deepEqual(root.state.entries.get("file.txt"), fileLink); - }); + } + root.set("file.txt", fileLink) + assert.equal(root.size, 1) + assert.deepEqual(root.state.entries.get("file.txt"), fileLink) + }) it("writer state .[Symbol.iterator]", async function () { - const { writable } = new TransformStream(); - const writer = writable.getWriter(); - const root = UnixFS.createShardedDirectoryWriter({ writer }); - assert.equal(root.size, 0); + const { writable } = new TransformStream() + const writer = writable.getWriter() + const root = UnixFS.createShardedDirectoryWriter({ writer }) + assert.equal(root.size, 0) /** @type {Link.Link} */ const cid = Link.parse( "bafybeidequ5soq6smzafv4lb76i5dkvl5fzgvrxz4bmlc2k4dkikklv2j4" - ); + ) const fileLink = { cid, dagByteLength: 45, contentByteLength: 37, - }; - root.set("file.txt", fileLink); - assert.equal(root.size, 1); - assert.deepEqual([...root.state.entries], [["file.txt", fileLink]]); - }); + } + root.set("file.txt", fileLink) + assert.equal(root.size, 1) + assert.deepEqual([...root.state.entries], [["file.txt", fileLink]]) + }) it("writer state .keys", async function () { - const { writable } = new TransformStream(); - const writer = writable.getWriter(); - const root = UnixFS.createShardedDirectoryWriter({ writer }); - assert.equal(root.size, 0); + const { writable } = new TransformStream() + const writer = writable.getWriter() + const root = UnixFS.createShardedDirectoryWriter({ writer }) + assert.equal(root.size, 0) /** @type {Link.Link} */ const cid = Link.parse( "bafybeidequ5soq6smzafv4lb76i5dkvl5fzgvrxz4bmlc2k4dkikklv2j4" - ); + ) const fileLink = { cid, dagByteLength: 45, contentByteLength: 37, - }; - root.set("file.txt", fileLink); - assert.equal(root.size, 1); - assert.deepEqual([...root.state.entries.keys()], ["file.txt"]); - }); + } + root.set("file.txt", fileLink) + assert.equal(root.size, 1) + assert.deepEqual([...root.state.entries.keys()], ["file.txt"]) + }) it("writer state .values", async function () { - const { writable } = new TransformStream(); - const writer = writable.getWriter(); - const root = UnixFS.createShardedDirectoryWriter({ writer }); - assert.equal(root.size, 0); + const { writable } = new TransformStream() + const writer = writable.getWriter() + const root = UnixFS.createShardedDirectoryWriter({ writer }) + assert.equal(root.size, 0) /** @type {Link.Link} */ const cid = Link.parse( "bafybeidequ5soq6smzafv4lb76i5dkvl5fzgvrxz4bmlc2k4dkikklv2j4" - ); + ) const fileLink = { cid, dagByteLength: 45, contentByteLength: 37, - }; - root.set("file.txt", fileLink); - assert.equal(root.size, 1); - assert.deepEqual([...root.state.entries.values()], [fileLink]); - }); -}); + } + root.set("file.txt", fileLink) + assert.equal(root.size, 1) + assert.deepEqual([...root.state.entries.values()], [fileLink]) + }) +}) diff --git a/yarn.lock b/yarn.lock index ec2ae9a..3729eb7 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2925,13 +2925,6 @@ typescript@^4.8.4: resolved "https://registry.yarnpkg.com/typescript/-/typescript-4.9.5.tgz#095979f9bcc0d09da324d58d03ce8f8374cbe65a" integrity sha512-1FXk9E2Hm+QzZQ7z+McJiHL4NW1F2EzMu9Nq9i3zAaGqibafqYwCVU6WyWAuyQRRzOlxou8xZSyXLEN8oKj24g== -uint8arrays@^5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/uint8arrays/-/uint8arrays-5.1.0.tgz#14047c9bdf825d025b7391299436e5e50e7270f1" - integrity sha512-vA6nFepEmlSKkMBnLBaUMVvAC4G3CTmO58C12y4sq6WPDOR7mOFYOi7GlrQ4djeSbP6JG9Pv9tJDM97PedRSww== - dependencies: - multiformats "^13.0.0" - unbox-primitive@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/unbox-primitive/-/unbox-primitive-1.0.2.tgz#29032021057d5e6cdbd08c5129c226dff8ed6f9e" From a33ec24ceba33917fb7bdbd042b1821ab5aa727e Mon Sep 17 00:00:00 2001 From: Vasco Santos Date: Fri, 16 May 2025 12:02:56 +0200 Subject: [PATCH 4/6] fix: better names --- src/file/api.ts | 5 ++--- src/file/writer.js | 20 ++++++++++---------- test/file.spec.js | 20 ++++++++++---------- test/sharded-directory.spec.js | 2 +- 4 files changed, 23 insertions(+), 24 deletions(-) diff --git a/src/file/api.ts b/src/file/api.ts index 59d62e3..b86eeb4 100644 --- a/src/file/api.ts +++ b/src/file/api.ts @@ -14,7 +14,6 @@ import type { State } from "./writer.js" export * from "../writer/api.js" import * as ChunkerService from "./chunker.js" -import init from "rabin-rs/gen/wasm.js" export type { Chunker, @@ -74,10 +73,10 @@ export interface EncoderSettings { } export interface InitOptions { - linkMetadataWriter?: LinkMetadataWriter + unixFsFileLinkWriter?: UnixFsFileLinkWriter } -export interface LinkMetadataWriter extends StreamWriter {} +export interface UnixFsFileLinkWriter extends StreamWriter {} export interface Options { writer: BlockWriter diff --git a/src/file/writer.js b/src/file/writer.js index 95f8e35..4cf2ec7 100644 --- a/src/file/writer.js +++ b/src/file/writer.js @@ -13,7 +13,7 @@ import * as Queue from "./layout/queue.js" * readonly metadata: UnixFS.Metadata * readonly config: API.EncoderSettings * readonly writer: API.BlockWriter - * readonly linkMetadataWriter?: API.LinkMetadataWriter + * readonly unixFsFileLinkWriter?: API.UnixFsFileLinkWriter * chunker: Chunker.Chunker * layout: Layout * nodeQueue: Queue.Queue @@ -26,7 +26,7 @@ import * as Queue from "./layout/queue.js" * readonly metadata: UnixFS.Metadata * readonly config: API.EncoderSettings * readonly writer: API.BlockWriter - * readonly linkMetadataWriter?: API.LinkMetadataWriter + * readonly unixFsFileLinkWriter?: API.UnixFsFileLinkWriter * readonly rootID: Layout.NodeID * readonly end?: Task.Fork * chunker?: null @@ -41,7 +41,7 @@ import * as Queue from "./layout/queue.js" * readonly metadata: UnixFS.Metadata * readonly config: API.EncoderSettings * readonly writer: API.BlockWriter - * readonly linkMetadataWriter?: API.LinkMetadataWriter + * readonly unixFsFileLinkWriter?: API.UnixFsFileLinkWriter * readonly link: Layout.Link * chunker?: null * layout?: null @@ -66,7 +66,7 @@ import * as Queue from "./layout/queue.js" * |{type:"write", bytes:Uint8Array} * |{type:"link", link:API.EncodedFile} * |{type:"block"} - * |{type:"linkMetadata"} + * |{type:"fileLink"} * |{type: "close"} * |{type: "end"} * } Message @@ -87,7 +87,7 @@ export const update = (message, state) => { case "block": return { state, effect: Task.none() } /* c8 ignore next 2 */ - case "linkMetadata": + case "fileLink": return { state, effect: Task.none() } case "close": return close(state) @@ -124,7 +124,7 @@ export const init = (writer, metadata, config, options = {}) => { // overhead. // @see https://github.com/Gozala/vectrie nodeQueue: Queue.mutable(), - linkMetadataWriter: options.linkMetadataWriter, + unixFsFileLinkWriter: options.unixFsFileLinkWriter, } } /** @@ -197,7 +197,7 @@ export const link = (state, { id, link, block }) => { ? state.end.resume() : Task.none() - if (!state.linkMetadataWriter) { + if (!state.unixFsFileLinkWriter) { return { state: newState, effect: Task.listen({ @@ -213,7 +213,7 @@ export const link = (state, { id, link, block }) => { effect: Task.listen({ link: Task.effects(tasks), block: writeBlock(state.writer, block), - linkMetadata: writeLinkMetadata(state.linkMetadataWriter, link), + fileLink: writeFileLink(state.unixFsFileLinkWriter, link), end, }), } @@ -361,12 +361,12 @@ export const writeBlock = function* (writer, block) { } /** - * @param {API.LinkMetadataWriter} writer + * @param {API.UnixFsFileLinkWriter} writer * @param {Layout.Link} link * @returns {Task.Task} */ -export const writeLinkMetadata = function* (writer, link) { +export const writeFileLink = function* (writer, link) { /* c8 ignore next 3 */ if (!writer) { return diff --git a/test/file.spec.js b/test/file.spec.js index c6faefe..57e9625 100644 --- a/test/file.spec.js +++ b/test/file.spec.js @@ -62,23 +62,23 @@ describe("test file", () => { // Capture links metadata /** @type {import('../src/unixfs.js').FileLink[]} */ - const linkMetadataItems = [] - const { readable: linkMetadataReadable, writable: linkMetadataWritable } = + const fileLinkItems = [] + const { readable: fileLinkReadable, writable: fileLinkWritable } = new TransformStream() // Start consuming links stream asynchronously void (async () => { - const reader = linkMetadataReadable.getReader() + const reader = fileLinkReadable.getReader() while (true) { const { done, value } = await reader.read() if (done) break - linkMetadataItems.push(value) + fileLinkItems.push(value) } })() const file = UnixFS.createFileWriter({ writer, initOptions: { - linkMetadataWriter: linkMetadataWritable.getWriter(), + unixFsFileLinkWriter: fileLinkWritable.getWriter(), }, }) for (const rawFile of rawFiles) { @@ -108,7 +108,7 @@ describe("test file", () => { r1.value.cid, Link.parse("bafybeihhsdoupgd3fnl3e3367ymsanmikafpllldsdt37jzyoh6nuatowe") ) - const l1 = linkMetadataItems.find((l) => l.cid.equals(r1.value.cid)) + const l1 = fileLinkItems.find((l) => l.cid.equals(r1.value.cid)) assert.isTrue(l1 !== undefined) assert.equal(l1?.contentByteLength, CHUNK_SIZE) assert.equal(l1?.dagByteLength, CHUNK_SIZE + 14) @@ -123,7 +123,7 @@ describe("test file", () => { r2.value.cid, Link.parse("bafybeief3dmadxfymhhhrflqytqmlhlz47w6glaxvyzmm6s6tpfb6izzee") ) - const l2 = linkMetadataItems.find((l) => l.cid.equals(r2.value.cid)) + const l2 = fileLinkItems.find((l) => l.cid.equals(r2.value.cid)) assert.isTrue(l2 !== undefined) assert.equal(l2?.contentByteLength, CHUNK_SIZE) assert.equal(l2?.dagByteLength, CHUNK_SIZE + 14) @@ -138,7 +138,7 @@ describe("test file", () => { r3.value.cid, Link.parse("bafybeihznihf5g5ibdyoawn7uu3inlyqrxjv63lt6lop6h3w6rzwrp67a4") ) - const l3 = linkMetadataItems.find((l) => l.cid.equals(r3.value.cid)) + const l3 = fileLinkItems.find((l) => l.cid.equals(r3.value.cid)) assert.isTrue(l3 !== undefined) assert.equal(l3?.contentByteLength, CHUNK_SIZE) assert.equal(l3?.dagByteLength, CHUNK_SIZE + 14) @@ -148,9 +148,9 @@ describe("test file", () => { // Check root assert.isTrue( - linkMetadataItems.find((l) => l.cid.equals(link.cid)) !== undefined + fileLinkItems.find((l) => l.cid.equals(link.cid)) !== undefined ) - assert.equal(linkMetadataItems.length, 4) + assert.equal(fileLinkItems.length, 4) }) it("--chunker=size-65535 --trickle=false --raw-leaves=false --cid-version=1", async () => { diff --git a/test/sharded-directory.spec.js b/test/sharded-directory.spec.js index c840664..e3c9cd3 100644 --- a/test/sharded-directory.spec.js +++ b/test/sharded-directory.spec.js @@ -98,7 +98,7 @@ describe("test directory", () => { contentByteOffset: 0, }) - for (let i = 0 i < 100 i++) { + for (let i = 0; i < 100; i++) { root.set(`file${i}.txt`, fileLink) } From cb996180afb77075d4cf656c2c0f76a691b39d5d Mon Sep 17 00:00:00 2001 From: Vasco Santos Date: Wed, 21 May 2025 17:26:07 +0100 Subject: [PATCH 5/6] chore: add readme --- README.md | 84 ++++++++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 83 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 27c0b1c..e6e1f52 100644 --- a/README.md +++ b/README.md @@ -129,6 +129,88 @@ const demo = async blob => { } ``` +### Collecting UnixFS FileLinks + +You can optionally pass a unixFsFileLinkWriter stream to capture metadata for each link (useful for indexing or tracking layout information). + +```js +import { + createWriter, + createFileWriter, +} from '@vascosantos/unixfs' + +import { withMaxChunkSize } from '@vascosantos/unixfs/file/chunker/fixed' +import { withWidth } from '@vascosantos/unixfs/file/layout/balanced' + +const defaultSettings = UnixFS.configure({ + fileChunkEncoder: raw, + smallFileEncoder: raw, + chunker: withMaxChunkSize(1024 * 1024), + fileLayout: withWidth(1024), +}) + +/** + * @param {Blob} blob + * @returns {Promise} + */ +async function collectUnixFsFileLinks(blob) { + const fileLinks = [] + + // Create a stream to collect metadata (FileLinks) + const { readable, writable } = new TransformStream() + + // Set up the main UnixFS writer (data goes nowhere here) + const unixfsWriter = createWriter({ + writable: new WritableStream(), // Discard actual DAG output + settings: defaultSettings, + }) + + // Set up the file writer with link metadata writer + const unixFsFileLinkWriter = writable.getWriter() + + const fileWriter = createFileWriter({ + ...unixfsWriter, + initOptions: { + unixFsFileLinkWriter, + }, + }) + + // Start concurrent reading of the metadata stream + const fileLinkReader = readable.getReader() + const readLinks = (async () => { + while (true) { + const { done, value } = await fileLinkReader.read() + if (done) break + fileLinks.push(value) + } + })() + + // Pipe the blob to the file writer + await blob.stream().pipeTo( + new WritableStream({ + async write(chunk) { + await fileWriter.write(chunk) + }, + }) + ) + + // Finalize everything + await fileWriter.close() + await unixfsWriter.close() + await unixFsFileLinkWriter.close() + + // Wait for all links to be read + await readLinks + + return fileLinks +} + +// Usage +const blob = new Blob(['Hello UnixFS links']) +const links = await collectUnixFsFileLinks(blob) +console.log(links) +``` + ## License Licensed under either of @@ -144,4 +226,4 @@ Unless you explicitly state otherwise, any contribution intentionally submitted [readablestream]: https://developer.mozilla.org/en-US/docs/Web/API/ReadableStream [car]: https://ipld.io/specs/transport/car/carv1/ [`transformstream`]: https://developer.mozilla.org/en-US/docs/Web/API/TransformStream -[`writablestream`]: https://developer.mozilla.org/en-US/docs/Web/API/WritableStream +[`writablestream`]: https://developer.mozilla.org/en-US/docs/Web/API/WritableStream \ No newline at end of file From 0d339b0e5b3016341e38f18f29127e13b2cc009b Mon Sep 17 00:00:00 2001 From: Vasco Santos Date: Wed, 21 May 2025 18:35:16 +0200 Subject: [PATCH 6/6] chore: change import to ipld namespace in readme --- README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index e6e1f52..68e2348 100644 --- a/README.md +++ b/README.md @@ -137,10 +137,10 @@ You can optionally pass a unixFsFileLinkWriter stream to capture metadata for ea import { createWriter, createFileWriter, -} from '@vascosantos/unixfs' +} from '@ipld/unixfs' -import { withMaxChunkSize } from '@vascosantos/unixfs/file/chunker/fixed' -import { withWidth } from '@vascosantos/unixfs/file/layout/balanced' +import { withMaxChunkSize } from '@ipld/unixfs/file/chunker/fixed' +import { withWidth } from '@ipld/unixfs/file/layout/balanced' const defaultSettings = UnixFS.configure({ fileChunkEncoder: raw,