diff --git a/.gitignore b/.gitignore index 23f7e863..7a2dbcaf 100644 --- a/.gitignore +++ b/.gitignore @@ -109,3 +109,5 @@ dist # temporary folders **/temp/ +/web-app/tests/smallfiles +.DS_Store diff --git a/Makefile b/Makefile index 464ecf64..9c2b7bd0 100644 --- a/Makefile +++ b/Makefile @@ -27,7 +27,7 @@ remote-store/opentdf-remote-store-$(version).tgz: lib/opentdf-client-$(version). (cd remote-store && npm ci ../lib/opentdf-client-$(version).tgz && npm pack) web-app/opentdf-web-app-$(version).tgz: lib/opentdf-client-$(version).tgz $(shell find web-app -not -path '*/dist*' -and -not -path '*/coverage*' -and -not -path '*/node_modules*') - (cd web-app && npm ci ../lib/opentdf-client-$(version).tgz && npm pack) + (cd web-app && npm ci ../lib/opentdf-client-$(version).tgz && npm pack && npm run build) lib/opentdf-client-$(version).tgz: $(shell find lib -not -path '*/dist*' -and -not -path '*/coverage*' -and -not -path '*/node_modules*') (cd lib && npm ci --including=dev && npm pack) diff --git a/lib/README.md b/lib/README.md index 571e3919..18566d4a 100644 --- a/lib/README.md +++ b/lib/README.md @@ -15,7 +15,7 @@ TDF3 with JSON envelopes. oidcOrigin: keycloakUrl, } const authProvider = await AuthProviders.refreshAuthProvider(oidcCredentials); - const client = new NanoTDFClient(authProvider, access); + const client = new NanoTDFClient({authProvider, kasEndpoint}); const cipherText = await client.encrypt(plainText); const clearText = await client.decrypt(cipherText); ``` diff --git a/lib/src/index.ts b/lib/src/index.ts index 25a39f73..20173bb3 100644 --- a/lib/src/index.ts +++ b/lib/src/index.ts @@ -10,7 +10,7 @@ import { } from './nanotdf/index.js'; import { keyAgreement, extractPublicFromCertToCrypto } from './nanotdf-crypto/index.js'; import { TypedArray, createAttribute, Policy } from './tdf/index.js'; -import { type AuthProvider } from './auth/auth.js'; +import { ClientConfig } from './nanotdf/Client.js'; async function fetchKasPubKey(kasUrl: string): Promise { const kasPubKeyResponse = await fetch(`${kasUrl}/kas_public_key?algorithm=ec:secp256r1`); @@ -33,13 +33,14 @@ async function fetchKasPubKey(kasUrl: string): Promise { * const KAS_URL = 'http://localhost:65432/api/kas/'; * * const ciphertext = '...'; - * const client = new NanoTDFClient( - * await clientSecretAuthProvider({ + * const client = new NanoTDFClient({ + * authProvider: await clientSecretAuthProvider({ * clientId: 'tdf-client', * clientSecret: '123-456', * oidcOrigin: OIDC_ENDPOINT, * }), - * KAS_URL + * kasEndpoint: KAS_URL + * } * ); * client.decrypt(ciphertext) * .then(plaintext => { @@ -120,9 +121,9 @@ export class NanoTDFClient extends Client { */ async encrypt(data: string | TypedArray | ArrayBuffer): Promise { // For encrypt always generate the client ephemeralKeyPair - const ephemeralKeyPair = await this.generateEphemeralKeyPair(); - + const ephemeralKeyPair = await this.ephemeralKeyPair; const initializationVector = this.iv; + if (typeof initializationVector !== 'number') { throw new Error('NanoTDF clients are single use. Please generate a new client and keypair.'); } @@ -174,6 +175,10 @@ export class NanoTDFClient extends Client { } } +export type DatasetConfig = ClientConfig & { + maxKeyIterations?: number; +}; + /** * NanoTDF Dataset SDK Client * @@ -186,15 +191,15 @@ export class NanoTDFClient extends Client { * const KAS_URL = 'http://localhost:65432/api/kas/'; * * const ciphertext = '...'; - * const client = new NanoTDFDatasetClient.default( - * await clientSecretAuthProvider({ + * const client = new NanoTDFDatasetClient({ + * authProvider: await clientSecretAuthProvider({ * clientId: 'tdf-client', * clientSecret: '123-456', * exchange: 'client', * oidcOrigin: OIDC_ENDPOINT, * }), - * KAS_URL - * ); + * kasEndpoint: KAS_URL, + * }); * const plaintext = client.decrypt(ciphertext); * console.log('Plaintext', plaintext); * ``` @@ -223,19 +228,18 @@ export class NanoTDFDatasetClient extends Client { * @param ephemeralKeyPair (optional) ephemeral key pair to use * @param maxKeyIterations Max iteration to performe without a key rotation */ - constructor( - authProvider: AuthProvider, - kasUrl: string, - maxKeyIterations: number = NanoTDFDatasetClient.NTDF_MAX_KEY_ITERATIONS, - ephemeralKeyPair?: Required> - ) { - if (maxKeyIterations > NanoTDFDatasetClient.NTDF_MAX_KEY_ITERATIONS) { - throw new Error('Key iteration exceeds max iterations(8388606)'); + constructor(opts: DatasetConfig) { + if ( + opts.maxKeyIterations && + opts.maxKeyIterations > NanoTDFDatasetClient.NTDF_MAX_KEY_ITERATIONS + ) { + throw new Error( + `Key iteration exceeds max iterations(${NanoTDFDatasetClient.NTDF_MAX_KEY_ITERATIONS})` + ); } + super(opts); - super(authProvider, kasUrl, ephemeralKeyPair); - - this.maxKeyIteration = maxKeyIterations; + this.maxKeyIteration = opts.maxKeyIterations || NanoTDFDatasetClient.NTDF_MAX_KEY_ITERATIONS; this.keyIterationCount = 0; } @@ -250,7 +254,7 @@ export class NanoTDFDatasetClient extends Client { // Intial encrypt if (this.keyIterationCount == 0) { // For encrypt always generate the client ephemeralKeyPair - const ephemeralKeyPair = await this.generateEphemeralKeyPair(); + const ephemeralKeyPair = await this.ephemeralKeyPair; if (!this.kasPubKey) { this.kasPubKey = await fetchKasPubKey(this.kasUrl); diff --git a/lib/src/nanotdf/Client.ts b/lib/src/nanotdf/Client.ts index 794fd8db..1c9802b1 100644 --- a/lib/src/nanotdf/Client.ts +++ b/lib/src/nanotdf/Client.ts @@ -16,6 +16,59 @@ import { cryptoPublicToPem, safeUrlCheck, validateSecureUrl } from '../utils.js' const { KeyUsageType, AlgorithmName, NamedCurve } = cryptoEnums; +export interface ClientConfig { + authProvider: AuthProvider; + dpopEnabled?: boolean; + dpopKeys?: Promise; + ephemeralKeyPair?: Promise; + kasEndpoint: string; +} + +function toJWSAlg(c: CryptoKey): string { + const { algorithm } = c; + switch (algorithm.name) { + case 'RSASSA-PKCS1-v1_5': + case 'RSA-PSS': + case 'RSA-OAEP': { + const r = algorithm as RsaHashedKeyGenParams; + switch (r.modulusLength) { + case 2048: + return 'RS256'; + case 3072: + return 'RS384'; + case 3072: + return 'RS512'; + } + } + case 'ECDSA': + case 'ECDH': { + return 'ES256'; + } + } + throw new Error(`Unsupported key algorithm ${JSON.stringify(algorithm)}`); +} + +async function generateEphemeralKeyPair(): Promise { + const { publicKey, privateKey } = await generateKeyPair(); + if (!privateKey || !publicKey) { + throw Error('Key pair generation failed'); + } + return { publicKey, privateKey }; +} + +async function generateSignerKeyPair(): Promise { + const { publicKey, privateKey } = await generateKeyPair({ + type: AlgorithmName.ECDSA, + curve: NamedCurve.P256, + keyUsages: [KeyUsageType.Sign, KeyUsageType.Verify], + isExtractable: true, + }); + if (!privateKey || !publicKey) { + throw Error('Signer key pair generation failed'); + } + return { publicKey, privateKey }; +} + /** * A Client encapsulates sessions interacting with TDF3 and nanoTDF backends, KAS and any * plugin-based sessions like identity and further attribute control. Most importantly, it is responsible @@ -63,8 +116,8 @@ export default class Client { readonly dpopEnabled: boolean; dissems: string[] = []; dataAttributes: string[] = []; - protected ephemeralKeyPair?: Required>; - protected requestSignerKeyPair?: Required>; + protected ephemeralKeyPair: Promise; + protected requestSignerKeyPair: Promise; protected iv?: number; /** @@ -74,59 +127,32 @@ export default class Client { * cannot be changed. If a new ephemeral key is desired it a new client should be initialized. * There is no performance impact for creating a new client IFF the ephemeral key pair is provided. */ - constructor( - authProvider: AuthProvider, - kasUrl: string, - ephemeralKeyPair?: Required>, - dpopEnabled = false - ) { + constructor({ + authProvider, + ephemeralKeyPair, + kasEndpoint, + dpopEnabled, + dpopKeys, + }: ClientConfig) { this.authProvider = authProvider; // TODO Disallow http KAS. For now just log as error - validateSecureUrl(kasUrl); - this.kasUrl = kasUrl; - this.allowedKases = [kasUrl]; + validateSecureUrl(kasEndpoint); + this.kasUrl = kasEndpoint; + this.allowedKases = [kasEndpoint]; this.kasPubKey = ''; - this.dpopEnabled = dpopEnabled; + this.dpopEnabled = !!dpopEnabled; + if (dpopKeys) { + this.requestSignerKeyPair = dpopKeys; + } else { + this.requestSignerKeyPair = generateSignerKeyPair(); + } if (ephemeralKeyPair) { this.ephemeralKeyPair = ephemeralKeyPair; - this.iv = 1; + } else { + this.ephemeralKeyPair = generateEphemeralKeyPair(); } - } - - /** - * Get ephemeral key pair - * - * Returns the ephemeral key pair to be used in other clients or undefined if not set or generated - * - * @security allow returning ephemeral key pair has unknown security risks. - */ - getEphemeralKeyPair(): CryptoKeyPair | undefined { - return this.ephemeralKeyPair; - } - - async generateEphemeralKeyPair(): Promise>> { - const { publicKey, privateKey } = await generateKeyPair(); - if (!privateKey || !publicKey) { - throw Error('Key pair generation failed'); - } - this.ephemeralKeyPair = { publicKey, privateKey }; this.iv = 1; - return { publicKey, privateKey }; - } - - async generateSignerKeyPair(): Promise>> { - const { publicKey, privateKey } = await generateKeyPair({ - type: AlgorithmName.ECDSA, - curve: NamedCurve.P256, - keyUsages: [KeyUsageType.Sign, KeyUsageType.Verify], - isExtractable: true, - }); - if (!privateKey || !publicKey) { - throw Error('Signer key pair generation failed'); - } - this.requestSignerKeyPair = { publicKey, privateKey }; - return { publicKey, privateKey }; } /** @@ -150,18 +176,7 @@ export default class Client { * either be set on the first call or passed in the constructor. */ async fetchOIDCToken(): Promise { - // Generate the ephemeral key pair if not set - const promises: Promise>>[] = []; - if (!this.ephemeralKeyPair) { - promises.push(this.generateEphemeralKeyPair()); - } - - if (!this.requestSignerKeyPair) { - promises.push(this.generateSignerKeyPair()); - } - await Promise.all(promises); - - const signer = this.requestSignerKeyPair; + const signer = await this.requestSignerKeyPair; if (!signer) { throw new Error('Unexpected state'); } @@ -190,13 +205,15 @@ export default class Client { // Ensure the ephemeral key pair has been set or generated (see createOidcServiceProvider) await this.fetchOIDCToken(); + const ephemeralKeyPair = await this.ephemeralKeyPair; + const requestSignerKeyPair = await this.requestSignerKeyPair; // Ensure the ephemeral key pair has been set or generated (see fetchEntityObject) - if (!this.ephemeralKeyPair?.privateKey) { + if (!ephemeralKeyPair?.privateKey) { throw new Error('Ephemeral key has not been set or generated'); } - if (!this.requestSignerKeyPair?.privateKey) { + if (!requestSignerKeyPair?.privateKey) { throw new Error('Signer key has not been set or generated'); } @@ -210,13 +227,13 @@ export default class Client { protocol: Client.KAS_PROTOCOL, header: base64.encodeArrayBuffer(nanoTdfHeader), }, - clientPublicKey: await cryptoPublicToPem(this.ephemeralKeyPair.publicKey), + clientPublicKey: await cryptoPublicToPem(ephemeralKeyPair.publicKey), }); const jwtPayload = { requestBody: requestBodyStr }; const requestBody = { - signedRequestToken: await reqSignature(jwtPayload, this.requestSignerKeyPair.privateKey, { - alg: AlgorithmName.ES256, + signedRequestToken: await reqSignature(jwtPayload, requestSignerKeyPair.privateKey, { + alg: toJWSAlg(requestSignerKeyPair.publicKey), }), }; @@ -239,10 +256,10 @@ export default class Client { const iv = entityWrappedKey.subarray(0, ivLength); const encryptedSharedKey = entityWrappedKey.subarray(ivLength); - let publicKey; + let kasPublicKey; try { // Get session public key as crypto key - publicKey = await pemPublicToCrypto(wrappedKey.sessionPublicKey); + kasPublicKey = await pemPublicToCrypto(wrappedKey.sessionPublicKey); } catch (cause) { throw new Error( `PEM Public Key to crypto public key failed. Is PEM formatted correctly?\n Caused by: ${cause.message}`, @@ -257,12 +274,13 @@ export default class Client { } catch (e) { throw new Error(`Salting hkdf failed\n Caused by: ${e.message}`); } + const { privateKey } = await this.ephemeralKeyPair; // Get the unwrapping key const unwrappingKey = await keyAgreement( // Ephemeral private key - this.ephemeralKeyPair.privateKey, - publicKey, + privateKey, + kasPublicKey, hkdfSalt ); diff --git a/lib/tdf3/src/client/builders.ts b/lib/tdf3/src/client/builders.ts index 99ee102a..f88c008c 100644 --- a/lib/tdf3/src/client/builders.ts +++ b/lib/tdf3/src/client/builders.ts @@ -7,6 +7,7 @@ import { IllegalArgumentError } from '../../../src/errors.js'; import { PemKeyPair } from '../crypto/declarations.js'; import { EntityObject } from '../../../src/tdf/EntityObject.js'; import { DecoratedReadableStream } from './DecoratedReadableStream.js'; +import { type Chunker } from '../utils/chunkers.js'; export const DEFAULT_SEGMENT_SIZE: number = 1024 * 1024; export type Scope = { @@ -470,6 +471,7 @@ export type DecryptStreamMiddleware = ( export type DecryptSource = | { type: 'buffer'; location: Uint8Array } + | { type: 'chunker'; location: Chunker } | { type: 'remote'; location: string } | { type: 'stream'; location: ReadableStream } | { type: 'file-browser'; location: Blob }; diff --git a/lib/tdf3/src/client/index.ts b/lib/tdf3/src/client/index.ts index 367b3114..fa56a955 100644 --- a/lib/tdf3/src/client/index.ts +++ b/lib/tdf3/src/client/index.ts @@ -95,14 +95,20 @@ const makeChunkable = async (source: DecryptSource) => { // we don't support streams anyways (see zipreader.js) let initialChunker: Chunker; let buf = null; - if (source.type === 'stream') { - buf = await streamToBuffer(source.location); - initialChunker = fromBuffer(buf); - } else if (source.type === 'buffer') { - buf = source.location; - initialChunker = fromBuffer(buf); - } else { - initialChunker = await fromDataSource(source); + switch (source.type) { + case 'stream': + buf = await streamToBuffer(source.location); + initialChunker = fromBuffer(buf); + break; + case 'buffer': + buf = source.location; + initialChunker = fromBuffer(buf); + break; + case 'chunker': + initialChunker = source.location; + break; + default: + initialChunker = await fromDataSource(source); } const magic: string = await getFirstTwoBytes(initialChunker); diff --git a/lib/tdf3/src/crypto/crypto-utils.ts b/lib/tdf3/src/crypto/crypto-utils.ts index 42468f59..4fab0248 100644 --- a/lib/tdf3/src/crypto/crypto-utils.ts +++ b/lib/tdf3/src/crypto/crypto-utils.ts @@ -1,4 +1,5 @@ import { base64 } from '../../../src/encodings/index.js'; +import { IllegalArgumentError } from '../../../src/errors.js'; import { type AnyKeyPair, type PemKeyPair } from './declarations.js'; import { rsaPkcs1Sha256 } from './index.js'; @@ -116,3 +117,20 @@ export const toCryptoKeyPair = async (input: AnyKeyPair): Promise ]); return { privateKey, publicKey }; }; + +export async function cryptoToPem(k: CryptoKey): Promise { + switch (k.type) { + case 'private': { + const exPrivate = await crypto.subtle.exportKey('pkcs8', k); + const privateBase64String = base64.encodeArrayBuffer(exPrivate); + return formatAsPem(privateBase64String, 'PRIVATE KEY'); + } + case 'public': { + const exPublic = await crypto.subtle.exportKey('spki', k); + const publicBase64String = base64.encodeArrayBuffer(exPublic); + return formatAsPem(publicBase64String, 'PUBLIC KEY'); + } + default: + throw new IllegalArgumentError(`unsupported key type [${k.type}]`); + } +} diff --git a/lib/tdf3/src/models/attribute-set.ts b/lib/tdf3/src/models/attribute-set.ts index e3ca19b8..806b6bdf 100644 --- a/lib/tdf3/src/models/attribute-set.ts +++ b/lib/tdf3/src/models/attribute-set.ts @@ -1,8 +1,6 @@ import Ajv, { JSONSchemaType } from 'ajv'; import { decodeJwt } from 'jose'; -const verbose = false; - export type AttributeObject = { attribute: string; kasUrl: string; @@ -42,6 +40,8 @@ const validator = (() => { export class AttributeSet { attributes: AttributeObject[]; + verbose: boolean = false; + defaultAttribute?: AttributeObject; constructor() { @@ -103,7 +103,7 @@ export class AttributeSet { if (!result) { // TODO: Determine if an error should be thrown // console.log("WARNING - AttributeSet.addAttribute: AttributeObject is malformed. AddAttribute failed:"); - if (verbose) console.log(attrObj); + if (this.verbose) console.log(attrObj); return null; } // Check for duplicate entries to assure idempotency. diff --git a/lib/tests/web/nano-roundtrip.test.ts b/lib/tests/web/nano-roundtrip.test.ts index 5cce2f17..c7e1cd18 100644 --- a/lib/tests/web/nano-roundtrip.test.ts +++ b/lib/tests/web/nano-roundtrip.test.ts @@ -46,14 +46,14 @@ function initSandbox() { return sandbox; } -const kasUrl = 'http://localhost:65432/api/kas'; +const kasEndpoint = 'http://localhost:65432/api/kas'; describe('Local roundtrip Tests', () => { it('roundtrip string', async () => { // const sandbox = initSandbox(); const sandbox = initSandbox(); try { - const client = new NanoTDFClient(authProvider, kasUrl); + const client = new NanoTDFClient({ authProvider, kasEndpoint }); const keyAgreementSpy = sandbox.spy(globalThis.crypto.subtle, 'deriveKey'); sandbox.stub(client, 'rewrapKey').callsFake(async () => keyAgreementSpy.lastCall.returnValue); const cipherText = await client.encrypt('hello world'); diff --git a/lib/tests/web/nanotdf/Client.test.ts b/lib/tests/web/nanotdf/Client.test.ts index 4238be3a..75a883b5 100644 --- a/lib/tests/web/nanotdf/Client.test.ts +++ b/lib/tests/web/nanotdf/Client.test.ts @@ -4,14 +4,14 @@ import Client from '../../../src/nanotdf/Client.js'; describe('nanotdf client', () => { it('Can create a client with a mock EAS', async () => { - const kasUrl = 'https://etheria.local/kas'; + const kasEndpoint = 'https://etheria.local/kas'; const authProvider = await clientAuthProvider({ clientId: 'string', oidcOrigin: 'string', exchange: 'client', clientSecret: 'password', }); - const client = new Client(authProvider, kasUrl); + const client = new Client({ authProvider, kasEndpoint }); expect(client.authProvider).to.be.ok; }); }); diff --git a/remote-store/package-lock.json b/remote-store/package-lock.json index f9843628..a248882b 100644 --- a/remote-store/package-lock.json +++ b/remote-store/package-lock.json @@ -1649,7 +1649,7 @@ "node_modules/@opentdf/client": { "version": "2.0.0", "resolved": "file:../lib/opentdf-client-2.0.0.tgz", - "integrity": "sha512-10yZrGA4LQBNjUX52+qLld2fTjq2OLxfEmR6kkrlLo6dpuN4p+qUI+i1ducMEcr/4fruKxfj2vMr+0Tg97oolg==", + "integrity": "sha512-nnnGQrjKyAfqAC36C8ErGTBiTJ2xPCQgG0pN93rzA68k8+M2pX8HS41H5jBg/nPNpP/kAfEdR6liOWz9LPkP6g==", "dependencies": { "ajv": "^8.12.0", "axios": "^1.6.1", diff --git a/web-app/package-lock.json b/web-app/package-lock.json index be4c9267..112d1fb3 100644 --- a/web-app/package-lock.json +++ b/web-app/package-lock.json @@ -11,7 +11,8 @@ "dependencies": { "@opentdf/client": "file:../lib/opentdf-client-2.0.0.tgz", "clsx": "^2.0.0", - "native-file-system-adapter": "^3.0.0", + "native-file-system-adapter": "^3.0.1", + "p-limit": "^5.0.0", "react": "^18.2.0", "react-dom": "^18.2.0" }, @@ -20,7 +21,7 @@ "@rollup/plugin-inject": "^5.0.3", "@types/react": "^18.2.17", "@types/react-dom": "^18.2.7", - "@types/wicg-file-system-access": "^2020.9.6", + "@types/wicg-file-system-access": "^2023.10.5", "@typescript-eslint/eslint-plugin": "^6.2.1", "@typescript-eslint/parser": "^6.2.1", "@vitejs/plugin-react": "^4.0.4", @@ -602,7 +603,7 @@ "node_modules/@opentdf/client": { "version": "2.0.0", "resolved": "file:../lib/opentdf-client-2.0.0.tgz", - "integrity": "sha512-10yZrGA4LQBNjUX52+qLld2fTjq2OLxfEmR6kkrlLo6dpuN4p+qUI+i1ducMEcr/4fruKxfj2vMr+0Tg97oolg==", + "integrity": "sha512-nnnGQrjKyAfqAC36C8ErGTBiTJ2xPCQgG0pN93rzA68k8+M2pX8HS41H5jBg/nPNpP/kAfEdR6liOWz9LPkP6g==", "dependencies": { "ajv": "^8.12.0", "axios": "^1.6.1", @@ -770,9 +771,10 @@ "license": "MIT" }, "node_modules/@types/wicg-file-system-access": { - "version": "2020.9.6", - "dev": true, - "license": "MIT" + "version": "2023.10.5", + "resolved": "https://registry.npmjs.org/@types/wicg-file-system-access/-/wicg-file-system-access-2023.10.5.tgz", + "integrity": "sha512-e9kZO9kCdLqT2h9Tw38oGv9UNzBBWaR1MzuAavxPcsV/7FJ3tWbU6RI3uB+yKIDPGLkGVbplS52ub0AcRLvrhA==", + "dev": true }, "node_modules/@typescript-eslint/eslint-plugin": { "version": "6.2.1", @@ -1013,17 +1015,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/@vitest/runner/node_modules/yocto-queue": { - "version": "1.0.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12.20" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/@vitest/snapshot": { "version": "0.33.0", "dev": true, @@ -2563,7 +2554,9 @@ } }, "node_modules/native-file-system-adapter": { - "version": "3.0.0", + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/native-file-system-adapter/-/native-file-system-adapter-3.0.1.tgz", + "integrity": "sha512-ocuhsYk2SY0906LPc3QIMW+rCV3MdhqGiy7wV5Bf0e8/5TsMjDdyIwhNiVPiKxzTJLDrLT6h8BoV9ERfJscKhw==", "funding": [ { "type": "github", @@ -2574,7 +2567,6 @@ "url": "https://paypal.me/jimmywarting" } ], - "license": "MIT", "engines": { "node": ">=14.8.0" }, @@ -2701,14 +2693,14 @@ } }, "node_modules/p-limit": { - "version": "3.1.0", - "dev": true, - "license": "MIT", + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-5.0.0.tgz", + "integrity": "sha512-/Eaoq+QyLSiXQ4lyYV23f14mZRQcXnxfHrN0vCai+ak9G0pp9iEQukIIZq5NccEvwRB8PUnZT0KsOoDCINS1qQ==", "dependencies": { - "yocto-queue": "^0.1.0" + "yocto-queue": "^1.0.0" }, "engines": { - "node": ">=10" + "node": ">=18" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" @@ -2728,6 +2720,33 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/p-locate/node_modules/p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate/node_modules/yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/parent-module": { "version": "1.0.1", "dev": true, @@ -3732,11 +3751,11 @@ "license": "ISC" }, "node_modules/yocto-queue": { - "version": "0.1.0", - "dev": true, - "license": "MIT", + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-1.0.0.tgz", + "integrity": "sha512-9bnSc/HEW2uRy67wc+T8UwauLuPJVn28jb+GtJY16iiKWyvmYJRXVT4UamsAEGQfPohgr2q4Tq0sQbQlxTfi1g==", "engines": { - "node": ">=10" + "node": ">=12.20" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" @@ -4097,7 +4116,7 @@ }, "@opentdf/client": { "version": "file:../lib/opentdf-client-2.0.0.tgz", - "integrity": "sha512-10yZrGA4LQBNjUX52+qLld2fTjq2OLxfEmR6kkrlLo6dpuN4p+qUI+i1ducMEcr/4fruKxfj2vMr+0Tg97oolg==", + "integrity": "sha512-nnnGQrjKyAfqAC36C8ErGTBiTJ2xPCQgG0pN93rzA68k8+M2pX8HS41H5jBg/nPNpP/kAfEdR6liOWz9LPkP6g==", "requires": { "ajv": "^8.12.0", "axios": "^1.6.1", @@ -4218,7 +4237,9 @@ "dev": true }, "@types/wicg-file-system-access": { - "version": "2020.9.6", + "version": "2023.10.5", + "resolved": "https://registry.npmjs.org/@types/wicg-file-system-access/-/wicg-file-system-access-2023.10.5.tgz", + "integrity": "sha512-e9kZO9kCdLqT2h9Tw38oGv9UNzBBWaR1MzuAavxPcsV/7FJ3tWbU6RI3uB+yKIDPGLkGVbplS52ub0AcRLvrhA==", "dev": true }, "@typescript-eslint/eslint-plugin": { @@ -4340,10 +4361,6 @@ "requires": { "yocto-queue": "^1.0.0" } - }, - "yocto-queue": { - "version": "1.0.0", - "dev": true } } }, @@ -5286,7 +5303,9 @@ "dev": true }, "native-file-system-adapter": { - "version": "3.0.0", + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/native-file-system-adapter/-/native-file-system-adapter-3.0.1.tgz", + "integrity": "sha512-ocuhsYk2SY0906LPc3QIMW+rCV3MdhqGiy7wV5Bf0e8/5TsMjDdyIwhNiVPiKxzTJLDrLT6h8BoV9ERfJscKhw==", "requires": { "fetch-blob": "^3.2.0" } @@ -5371,10 +5390,11 @@ } }, "p-limit": { - "version": "3.1.0", - "dev": true, + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-5.0.0.tgz", + "integrity": "sha512-/Eaoq+QyLSiXQ4lyYV23f14mZRQcXnxfHrN0vCai+ak9G0pp9iEQukIIZq5NccEvwRB8PUnZT0KsOoDCINS1qQ==", "requires": { - "yocto-queue": "^0.1.0" + "yocto-queue": "^1.0.0" } }, "p-locate": { @@ -5382,6 +5402,23 @@ "dev": true, "requires": { "p-limit": "^3.0.2" + }, + "dependencies": { + "p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "requires": { + "yocto-queue": "^0.1.0" + } + }, + "yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true + } } }, "parent-module": { @@ -5927,8 +5964,9 @@ "dev": true }, "yocto-queue": { - "version": "0.1.0", - "dev": true + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-1.0.0.tgz", + "integrity": "sha512-9bnSc/HEW2uRy67wc+T8UwauLuPJVn28jb+GtJY16iiKWyvmYJRXVT4UamsAEGQfPohgr2q4Tq0sQbQlxTfi1g==" } } } diff --git a/web-app/package.json b/web-app/package.json index abb22566..a0e57958 100644 --- a/web-app/package.json +++ b/web-app/package.json @@ -17,7 +17,8 @@ "dependencies": { "@opentdf/client": "file:../lib/opentdf-client-2.0.0.tgz", "clsx": "^2.0.0", - "native-file-system-adapter": "^3.0.0", + "native-file-system-adapter": "^3.0.1", + "p-limit": "^5.0.0", "react": "^18.2.0", "react-dom": "^18.2.0" }, @@ -26,7 +27,7 @@ "@rollup/plugin-inject": "^5.0.3", "@types/react": "^18.2.17", "@types/react-dom": "^18.2.7", - "@types/wicg-file-system-access": "^2020.9.6", + "@types/wicg-file-system-access": "^2023.10.5", "@typescript-eslint/eslint-plugin": "^6.2.1", "@typescript-eslint/parser": "^6.2.1", "@vitejs/plugin-react": "^4.0.4", diff --git a/web-app/src/App.tsx b/web-app/src/App.tsx index 2b59410b..a04c13dc 100644 --- a/web-app/src/App.tsx +++ b/web-app/src/App.tsx @@ -1,9 +1,13 @@ import { clsx } from 'clsx'; -import { useState, useEffect, type ChangeEvent } from 'react'; +import { useState, useEffect, type ChangeEvent, useRef } from 'react'; import { showSaveFilePicker } from 'native-file-system-adapter'; import './App.css'; -import { TDF3Client, type DecryptSource, NanoTDFClient, AuthProviders } from '@opentdf/client'; +import { type Chunker, type DecryptSource, NanoTDFClient, TDF3Client } from '@opentdf/client'; import { type SessionInformation, OidcClient } from './session.js'; +import { c } from './config.js'; +import pLimit from 'p-limit'; + +const limit = pLimit(16); function decryptedFileName(encryptedFileName: string): string { // Groups: 1 file 'name' bit @@ -29,11 +33,16 @@ function decryptedFileExtension(encryptedFileName: string): string { return m[2]; } -const oidcClient = new OidcClient( - 'http://localhost:65432/auth/realms/tdf', - 'browsertest', - 'otdf-sample-web-app' -); +function ReadableBufferStream(ab: ArrayBuffer) { + return new ReadableStream({ + start(controller) { + controller.enqueue(ab); + controller.close(); + }, + }); +} + +const oidcClient = new OidcClient(c.oidc.host, c.oidc.clientId, 'otdf-sample-web-app'); function saver(blob: Blob, name: string) { const a = document.createElement('a'); @@ -61,38 +70,51 @@ async function getNewFileHandle( ], suggestedName, }; + //@ts-expect-error //TS2739: not a complete file picker interface return showSaveFilePicker(options); } type Containers = 'html' | 'tdf' | 'nano'; -type CurrentDataController = AbortController | undefined; -type FileInputSource = { file: File }; +type CurrentDataControllers = Record; +type FileInputSource = { + type: 'file'; + file: File; +}; type UrlInputSource = { + type: 'url'; url: URL; }; -type RandomType = 'bytes'; type RandomInputSource = { - type: RandomType; + type: 'bytes'; length: number; }; -type InputSource = FileInputSource | UrlInputSource | RandomInputSource | undefined; -type SinkType = 'file' | 'fsapi' | 'none'; +type MemoryInputSource = { + type: 'memory'; + src: ArrayBuffer; + name: string; +}; -function fileNameFor(inputSource: InputSource) { +type InputSource = FileInputSource | UrlInputSource | RandomInputSource | MemoryInputSource; +type SinkType = 'file' | 'fsapi' | 'memory' | 'none'; + +function fileNameFor(inputSource?: InputSource) { if (!inputSource) { return 'undefined.bin'; } - if ('file' in inputSource) { - return inputSource.file.name; - } - if ('length' in inputSource) { - return `random-${inputSource.type}-${inputSource.length}-bytes`; + switch (inputSource.type) { + case 'file': + return inputSource.file.name; + case 'bytes': + return `random-${inputSource.type}-${inputSource.length}-bytes`; + case 'url': + const { pathname } = inputSource.url; + const i = pathname.lastIndexOf('/'); + return pathname.slice(i + 1); + case 'memory': + return inputSource.name; } - const { pathname } = inputSource.url; - const i = pathname.lastIndexOf('/'); - return pathname.slice(i + 1); } function drain() { @@ -206,9 +228,9 @@ function App() { const [decryptContainerType, setDecryptContainerType] = useState('tdf'); const [downloadState, setDownloadState] = useState(); const [encryptContainerType, setEncryptContainerType] = useState('tdf'); - const [inputSource, setInputSource] = useState(); + const [inputSources, setInputSources] = useState([]); const [sinkType, setSinkType] = useState('file'); - const [streamController, setStreamController] = useState(); + const streamControllers = useRef({}); const handleContainerFormatRadioChange = (handler: typeof setDecryptContainerType) => (e: ChangeEvent) => { @@ -231,26 +253,27 @@ function App() { const setFileHandler = (event: ChangeEvent) => { const target = event.target as HTMLInputElement; if (target.files?.length) { - const [file] = target.files; - setInputSource({ file }); + const fileArray = Array.from(target.files); + const srcs = fileArray.map((file): FileInputSource => ({ type: 'file', file })); + setInputSources(srcs); } else { - setInputSource(undefined); + setInputSources([]); } }; const setRandomHandler = (event: ChangeEvent) => { const target = event.target as HTMLInputElement; if (target.value && target.validity.valid) { - setInputSource({ type: 'bytes', length: parseInt(target.value) }); + setInputSources([{ type: 'bytes', length: parseInt(target.value) }]); } else { - setInputSource(undefined); + setInputSources([]); } }; const setUrlHandler = (event: ChangeEvent) => { const target = event.target as HTMLInputElement; if (target.value && target.validity.valid) { - setInputSource({ url: new URL(target.value) }); + setInputSources([{ type: 'url', url: new URL(target.value) }]); } else { - setInputSource(undefined); + setInputSources([]); } }; @@ -318,7 +341,7 @@ function App() { }; const handleEncrypt = async () => { - if (!inputSource) { + if (!inputSources.length) { console.warn('No input source selected'); return false; } @@ -327,68 +350,123 @@ function App() { console.warn('PLEASE LOG IN'); return false; } - const inputFileName = fileNameFor(inputSource); - console.log(`Encrypting [${inputFileName}] as ${encryptContainerType} to ${sinkType}`); - const authProvider = await AuthProviders.refreshAuthProvider({ - exchange: 'refresh', - clientId: oidcClient.clientId, - oidcOrigin: oidcClient.host, - refreshToken, - }); + const memory: MemoryInputSource[] = []; + + async function encryptNano( + nanoClient: NanoTDFClient, + inputSource: InputSource, + inputFileName: string + ) { + if ('url' in inputSource) { + throw new Error('Unsupported : fetch the url I guess?'); + } + const plainText = + 'file' == inputSource.type + ? await inputSource.file.arrayBuffer() + : 'memory' == inputSource.type + ? inputSource.src + : randomArrayBuffer(inputSource); + setDownloadState('Encrypting...'); + const cipherText = await nanoClient.encrypt(plainText); + switch (sinkType) { + case 'file': + saver(new Blob([cipherText]), `${inputFileName}.ntdf`); + break; + case 'fsapi': + { + const file = await getNewFileHandle('ntdf', `${inputFileName}.ntdf`); + const writable = await file.createWritable(); + try { + await writable.write(cipherText); + } catch (e) { + setDownloadState(`Encrypt Failed: ${e}`); + } finally { + await writable.close(); + } + } + break; + case 'memory': + memory.push({ type: 'memory', name: `${inputFileName}.ntdf`, src: cipherText }); + break; + case 'none': + break; + } + setDownloadState('Encrypt Complete'); + } + let promises; switch (encryptContainerType) { case 'nano': { - if ('url' in inputSource) { - throw new Error('Unsupported : fetch the url I guess?'); - } - const plainText = - 'file' in inputSource - ? await inputSource.file.arrayBuffer() - : randomArrayBuffer(inputSource); - const nanoClient = new NanoTDFClient(authProvider, 'http://localhost:65432/api/kas'); - setDownloadState('Encrypting...'); - switch (sinkType) { - case 'file': - { - const cipherText = await nanoClient.encrypt(plainText); - saver(new Blob([cipherText]), `${inputFileName}.ntdf`); - } - break; - case 'fsapi': - { - const file = await getNewFileHandle('ntdf', `${inputFileName}.ntdf`); - const cipherText = await nanoClient.encrypt(plainText); - const writable = await file.createWritable(); - try { - await writable.write(cipherText); - setDownloadState('Encrypt Complete'); - } catch (e) { - setDownloadState(`Encrypt Failed: ${e}`); - } finally { - await writable.close(); - } - } - break; - case 'none': - break; - } + promises = inputSources.map((inputSource): (() => Promise) => async () => { + const nanoClient = new NanoTDFClient({ + authProvider: oidcClient, + kasEndpoint: c.kas, + dpopKeys: oidcClient.getSigningKey(), + }); + const inputFileName = fileNameFor(inputSource); + console.log(`Encrypting [${inputFileName}] as ${encryptContainerType} to ${sinkType}`); + await encryptNano(nanoClient, inputSource, inputFileName); + }); break; } case 'html': { const client = new TDF3Client({ - authProvider, - kasEndpoint: 'http://localhost:65432/api/kas', - readerUrl: 'https://secure.virtru.com/start?htmlProtocol=1', + authProvider: oidcClient, + dpopKeys: oidcClient.getSigningKey(), + kasEndpoint: c.kas, + readerUrl: c.reader, }); - let source: ReadableStream, size: number; - const sc = new AbortController(); - setStreamController(sc); - if ('file' in inputSource) { + promises = inputSources.map((inputSource): (() => Promise) => async () => { + const inputFileName = fileNameFor(inputSource); + await encryptTdfHtml(inputSource, inputFileName, client); + }); + break; + } + case 'tdf': { + const client = new TDF3Client({ + authProvider: oidcClient, + dpopKeys: oidcClient.getSigningKey(), + kasEndpoint: c.kas, + }); + promises = inputSources.map((inputSource): (() => Promise) => async () => { + const inputFileName = fileNameFor(inputSource); + await encryptTdf(inputSource, inputFileName, client); + }); + break; + } + } + await Promise.all(promises.map(limit)); + + if (memory.length) { + setInputSources(memory); + } + + return true; + + async function encryptTdfHtml( + inputSource: InputSource, + inputFileName: string, + client: TDF3Client + ) { + let source: ReadableStream, size: number; + const sc = new AbortController(); + streamControllers.current[inputFileName] = sc; + switch (inputSource.type) { + case 'file': size = inputSource.file.size; source = inputSource.file.stream() as unknown as ReadableStream; - } else if ('type' in inputSource) { + break; + + case 'bytes': size = inputSource.length; source = randomStream(inputSource); - } else { + break; + + case 'memory': + size = inputSource.src.byteLength; + source = ReadableBufferStream(inputSource.src); + break; + + case 'url': // NOTE: Attaching the signal to the pipeline (in pipeTo, below) // is insufficient (at least in Chrome) to abort the fetch itself. // So aborting a sink in a pipeline does *NOT* cancel its sources @@ -405,57 +483,60 @@ function App() { } size = parseInt(fr.headers.get('Content-Length') || '-1'); source = fr.body; - } - try { - const downloadName = `${inputFileName}.tdf.html`; - let f; - if (sinkType == 'fsapi') { - f = await getNewFileHandle('html', downloadName); - } - const progressTransformers = makeProgressPair(size, 'Encrypt'); - const cipherText = await client.encrypt({ - source: source.pipeThrough(progressTransformers.reader), - offline: true, - asHtml: true, - }); - cipherText.stream = cipherText.stream.pipeThrough(progressTransformers.writer); - switch (sinkType) { - case 'file': - await cipherText.toFile(downloadName, { signal: sc.signal }); - break; - case 'fsapi': - if (!f) { - throw new Error(); - } - const writable = await f.createWritable(); - await cipherText.stream.pipeTo(writable, { signal: sc.signal }); - break; - case 'none': - await cipherText.stream.pipeTo(drain(), { signal: sc.signal }); - break; - } - } catch (e) { - setDownloadState(`Encrypt Failed: ${e}`); - console.error('Encrypt Failed', e); - } - setStreamController(undefined); - break; + break; } - case 'tdf': { - const client = new TDF3Client({ - authProvider, - kasEndpoint: 'http://localhost:65432/api/kas', + try { + const downloadName = `${inputFileName}.tdf.html`; + const progressTransformers = makeProgressPair(size, 'Encrypt'); + const cipherText = await client.encrypt({ + source: source.pipeThrough(progressTransformers.reader), + offline: true, + asHtml: true, }); - const sc = new AbortController(); - setStreamController(sc); - let source: ReadableStream, size: number; - if ('file' in inputSource) { + cipherText.stream = cipherText.stream.pipeThrough(progressTransformers.writer); + switch (sinkType) { + case 'file': + await cipherText.toFile(downloadName, { signal: sc.signal }); + break; + case 'fsapi': + const f = await getNewFileHandle('html', downloadName); + const writable = await f.createWritable(); + await cipherText.stream.pipeTo(writable, { signal: sc.signal }); + break; + case 'memory': + memory.push({ + type: 'memory', + name: downloadName, + src: await new Response(cipherText.stream).arrayBuffer(), + }); + break; + case 'none': + await cipherText.stream.pipeTo(drain(), { signal: sc.signal }); + break; + } + } finally { + delete streamControllers.current[inputFileName]; + } + } + + async function encryptTdf(inputSource: InputSource, inputFileName: string, client: TDF3Client) { + const sc = new AbortController(); + streamControllers.current[inputFileName] = sc; + let source: ReadableStream, size: number; + switch (inputSource.type) { + case 'file': size = inputSource.file.size; source = inputSource.file.stream() as unknown as ReadableStream; - } else if ('type' in inputSource) { + break; + case 'bytes': size = inputSource.length; source = randomStream(inputSource); - } else { + break; + case 'memory': + size = inputSource.src.byteLength; + source = ReadableBufferStream(inputSource.src); + break; + case 'url': const fr = await fetch(inputSource.url, { signal: sc.signal }); if (!fr.ok) { throw Error( @@ -469,47 +550,78 @@ function App() { } size = parseInt(fr.headers.get('Content-Length') || '-1'); source = fr.body; + break; + } + try { + const downloadName = `${inputFileName}.tdf`; + const progressTransformers = makeProgressPair(size, 'Encrypt'); + const cipherText = await client.encrypt({ + source: source.pipeThrough(progressTransformers.reader), + offline: true, + }); + cipherText.stream = cipherText.stream.pipeThrough(progressTransformers.writer); + switch (sinkType) { + case 'file': + await cipherText.toFile(downloadName, { signal: sc.signal }); + break; + case 'fsapi': + const f = await getNewFileHandle('tdf', downloadName); + const writable = await f.createWritable(); + await cipherText.stream.pipeTo(writable, { signal: sc.signal }); + break; + case 'memory': + memory.push({ + type: 'memory', + name: downloadName, + src: await new Response(cipherText.stream).arrayBuffer(), + }); + break; + case 'none': + await cipherText.stream.pipeTo(drain(), { signal: sc.signal }); + break; } + } finally { + delete streamControllers.current[inputFileName]; + } + } + }; + async function decryptNano( + nanoClient: NanoTDFClient, + inputSource: FileInputSource | RandomInputSource | MemoryInputSource, + dfn: string + ) { + const cipherText = + 'file' == inputSource.type + ? await inputSource.file.arrayBuffer() + : 'memory' == inputSource.type + ? inputSource.src + : randomArrayBuffer(inputSource); + const plainText = await nanoClient.decrypt(cipherText); + switch (sinkType) { + case 'file': + saver(new Blob([plainText]), dfn); + break; + case 'fsapi': + const f = await getNewFileHandle(decryptedFileExtension(fileNameFor(inputSource)), dfn); + const writable = await f.createWritable(); try { - let f; - const downloadName = `${inputFileName}.tdf`; - if (sinkType === 'fsapi') { - f = await getNewFileHandle('tdf', downloadName); - } - const progressTransformers = makeProgressPair(size, 'Encrypt'); - const cipherText = await client.encrypt({ - source: source.pipeThrough(progressTransformers.reader), - offline: true, - }); - cipherText.stream = cipherText.stream.pipeThrough(progressTransformers.writer); - switch (sinkType) { - case 'file': - await cipherText.toFile(downloadName, { signal: sc.signal }); - break; - case 'fsapi': - if (!f) { - throw new Error(); - } - const writable = await f.createWritable(); - await cipherText.stream.pipeTo(writable, { signal: sc.signal }); - break; - case 'none': - await cipherText.stream.pipeTo(drain(), { signal: sc.signal }); - break; - } - } catch (e) { - setDownloadState(`Encrypt Failed: ${e}`); - console.error('Encrypt Failed', e); + await writable.write(plainText); + } finally { + await writable.close(); } - setStreamController(undefined); break; - } + case 'memory': + memory.push({ type: 'memory', name: dfn, src: cipherText }); + break; + case 'none': + break; } - return true; - }; + } + let promises: (() => Promise)[]; + const memory: MemoryInputSource[] = []; const handleDecrypt = async () => { - if (!inputSource) { + if (!inputSources.length) { console.log('PLEASE SELECT FILE'); return false; } @@ -517,183 +629,104 @@ function App() { console.error('decrypt while logged out doesnt work'); return false; } - const dfn = decryptedFileName(fileNameFor(inputSource)); - console.log( - `Decrypting ${decryptContainerType} ${JSON.stringify(inputSource)} to ${sinkType} ${dfn}` - ); - const authProvider = await AuthProviders.refreshAuthProvider({ - exchange: 'refresh', - clientId: oidcClient.clientId, - oidcOrigin: oidcClient.host, - refreshToken: authState.user.refreshToken, - }); - let f; - if (sinkType === 'fsapi') { - f = await getNewFileHandle(decryptedFileExtension(fileNameFor(inputSource)), dfn); - } + switch (decryptContainerType) { case 'tdf': { const client = new TDF3Client({ - authProvider, - kasEndpoint: 'http://localhost:65432/api/kas', + authProvider: oidcClient, + dpopKeys: oidcClient.getSigningKey(), + kasEndpoint: c.kas, + }); + promises = inputSources.map((inputSource): (() => Promise) => async () => { + const dfn = decryptedFileName(fileNameFor(inputSource)); + console.log( + `Decrypting ${decryptContainerType} ${JSON.stringify( + inputSource + )} to ${sinkType} ${dfn}` + ); + await decryptTdf(client, inputSource, dfn); }); - try { - const sc = new AbortController(); - setStreamController(sc); - let source: DecryptSource; - let size: number; - if ('file' in inputSource) { - size = inputSource.file.size; - source = { type: 'file-browser', location: inputSource.file }; - } else if ('type' in inputSource) { - size = inputSource.length; - source = { type: 'chunker', location: randomChunker(inputSource) }; - } else { - const hr = await fetch(inputSource.url, { method: 'HEAD' }); - size = parseInt(hr.headers.get('Content-Length') || '-1'); - source = { type: 'remote', location: inputSource.url.toString() }; - } - const progressTransformers = makeProgressPair(size, 'Decrypt'); - // XXX chunker doesn't have an equivalent 'stream' interaface - // so we kinda fake it with percentages by tracking output, which should - // strictly be smaller than the input file. - const plainText = await client.decrypt({ source }); - plainText.stream = plainText.stream - .pipeThrough(progressTransformers.reader) - .pipeThrough(progressTransformers.writer); - switch (sinkType) { - case 'file': - await plainText.toFile(dfn, { signal: sc.signal }); - break; - case 'fsapi': - if (!f) { - throw new Error(); - } - const writable = await f.createWritable(); - await plainText.stream.pipeTo(writable, { signal: sc.signal }); - break; - case 'none': - await plainText.stream.pipeTo(drain(), { signal: sc.signal }); - break; - } - } catch (e) { - console.error('Decrypt Failed', e); - setDownloadState(`Decrypt Failed: ${e}`); - } - setStreamController(undefined); break; } case 'nano': { - if ('url' in inputSource) { - throw new Error('Unsupported : fetch the url I guess?'); - } - const nanoClient = new NanoTDFClient(authProvider, 'http://localhost:65432/api/kas'); - try { - const cipherText = - 'file' in inputSource - ? await inputSource.file.arrayBuffer() - : randomArrayBuffer(inputSource); - const plainText = await nanoClient.decrypt(cipherText); - switch (sinkType) { - case 'file': - saver(new Blob([plainText]), dfn); - break; - case 'fsapi': - if (!f) { - throw new Error(); - } - const writable = await f.createWritable(); - try { - await writable.write(plainText); - setDownloadState('Decrypt Complete'); - } finally { - await writable.close(); - } - break; - case 'none': - break; + const nanoClient = new NanoTDFClient({ + authProvider: oidcClient, + kasEndpoint: c.kas, + dpopKeys: oidcClient.getSigningKey(), + }); + promises = inputSources.map((inputSource): (() => Promise) => async () => { + if ('url' in inputSource) { + throw new Error('Unsupported : fetch the url I guess?'); } - } catch (e) { - console.error('Decrypt Failed', e); - setDownloadState(`Decrypt Failed: ${e}`); - } + const dfn = decryptedFileName(fileNameFor(inputSource)); + await decryptNano(nanoClient, inputSource, dfn); + }); break; } } - return false; - }; - - const handleScan = async () => { - const searchTerm = 'service workers'; - // Chars to show either side of the result in the match - const contextBefore = 30; - const contextAfter = 30; - const caseInsensitive = true; - - if (!inputSource) { - console.warn('PLEASE SELECT FILE ∨ URL'); - return false; + try { + await Promise.all(promises.map(limit)); + setDownloadState('Decrypt Complete'); + } catch (e) { + console.error('Decrypt Failed', e); + setDownloadState(`Decrypt Failed: ${e}`); } - let source; - if ('file' in inputSource) { - source = inputSource.file.stream() as unknown as ReadableStream; - } else { - const sc = new AbortController(); - setStreamController(sc); - const fr = await fetch(inputSource.url, { cache: 'no-store', signal: sc.signal }); - console.log(`Received headers ${fr.headers}`); - if (!fr.ok) { - throw Error( - `Error on fetch [${inputSource.url}]: ${fr.status} code received; [${fr.statusText}]` - ); - } - if (!fr.body) { - throw Error( - `Failed to fetch input [${inputSource.url}]: ${fr.status} code received; [${fr.statusText}]` - ); - } - source = fr.body; - } - const reader = source.getReader(); - const decoder = new TextDecoder(); - const toMatch = caseInsensitive ? searchTerm.toLowerCase() : searchTerm; - const bufferSize = Math.max(toMatch.length - 1, contextBefore); - - let bytesReceived = 0; - let buffer = ''; - let matchFoundAt = -1; + if (memory.length) { + setInputSources(memory); + } + return false; - while (true) { - const { value: chunk, done } = await reader.read(); - if (done) { - console.log('Failed to find match'); - return; + async function decryptTdf(client: TDF3Client, inputSource: InputSource, dfn: string) { + let f; + if (sinkType === 'fsapi') { + f = await getNewFileHandle(decryptedFileExtension(fileNameFor(inputSource)), dfn); } - bytesReceived += chunk.length; - console.log(`Received ${bytesReceived.toLocaleString()} bytes of data so far`); - buffer += decoder.decode(chunk, { stream: true }); - - // already found match & just context-gathering? - if (matchFoundAt === -1) { - matchFoundAt = (caseInsensitive ? buffer.toLowerCase() : buffer).indexOf(toMatch); + const sc = new AbortController(); + setStreamController(sc); + let source: DecryptSource; + let size: number; + switch (inputSource.type) { + case 'file': + size = inputSource.file.size; + source = { type: 'file-browser', location: inputSource.file }; + break; + case 'bytes': + size = inputSource.length; + source = { type: 'chunker', location: randomChunker(inputSource) }; + break; + case 'memory': + size = inputSource.src.byteLength; + source = { type: 'buffer', location: new Uint8Array(inputSource.src) }; + break; + case 'url': + const hr = await fetch(inputSource.url, { method: 'HEAD' }); + size = parseInt(hr.headers.get('Content-Length') || '-1'); + source = { type: 'remote', location: inputSource.url.toString() }; + break; } - - if (matchFoundAt === -1) { - buffer = buffer.slice(-bufferSize); - } else if (buffer.slice(matchFoundAt + toMatch.length).length >= contextAfter) { - console.log("Here's the match:"); - console.log( - buffer.slice( - Math.max(0, matchFoundAt - contextBefore), - matchFoundAt + toMatch.length + contextAfter - ) - ); - console.log('Cancelling fetch'); - reader.cancel(); - return; - } else { - console.log('Found match, but need more context…'); + const progressTransformers = makeProgressPair(size, 'Decrypt'); + // XXX chunker doesn't have an equivalent 'stream' interaface + // so we kinda fake it with percentages by tracking output, which should + // strictly be smaller than the input file. + const plainText = await client.decrypt({ source }); + plainText.stream = plainText.stream + .pipeThrough(progressTransformers.reader) + .pipeThrough(progressTransformers.writer); + switch (sinkType) { + case 'file': + await plainText.toFile(dfn, { signal: sc.signal }); + break; + case 'fsapi': + if (!f) { + throw new Error(); + } + const writable = await f.createWritable(); + await plainText.stream.pipeTo(writable, { signal: sc.signal }); + break; + case 'none': + await plainText.stream.pipeTo(drain(), { signal: sc.signal }); + break; } } }; @@ -716,7 +749,25 @@ function App() {
{JSON.stringify(authState?.user, null, ' ')}
); - const hasFileInput = inputSource && 'file' in inputSource; + let inputDetails; + if (inputSources.length == 1) { + const inputSource = inputSources[0]; + inputDetails = ( + <> +

{fileNameFor(inputSource)}

+ {inputSource.type == 'file' && ( + <> +
Content Type: {inputSource.file.type}
+
Last Modified: {new Date(inputSource.file.lastModified).toLocaleString()}
+
Size: {new Intl.NumberFormat().format(inputSource.file.size)} bytes
+ + )} + + ); + } else { + inputDetails =

{inputSources.length} items

; + } + return (
@@ -730,37 +781,34 @@ function App() {
Source - {hasFileInput ? ( + {inputSources.length ? (
-

- {'file' in inputSource ? inputSource.file.name : inputSource.url.toString()} -

- {'file' in inputSource && ( - <> -
Content Type: {inputSource.file.type}
-
- Last Modified: {new Date(inputSource.file.lastModified).toLocaleString()} -
-
Size: {new Intl.NumberFormat().format(inputSource.file.size)} bytes
- - )} + {inputDetails}
) : ( <> - +
OR
-
+
OR:
-
+
{' '}
+ setSinkType(e.target.value as SinkType)} + checked={sinkType === 'memory'} + />{' '} + +
)} - {inputSource && !streamController && ( + {inputSources.length && !streamController && (

Encrypt

@@ -878,9 +938,6 @@ function App() { -
diff --git a/web-app/src/config.ts b/web-app/src/config.ts new file mode 100644 index 00000000..7dc2f709 --- /dev/null +++ b/web-app/src/config.ts @@ -0,0 +1,27 @@ +export type TDFConfig = { + oidc: { + // eg 'http://localhost:65432/auth/realms/opentdf' + host: string; + // eg browsertest + clientId: string; + }; + kas: string; + reader: string; +}; + +function cfg(): TDFConfig { + const { VITE_TDF_CFG } = import.meta.env; + if (!VITE_TDF_CFG) { + return { + oidc: { + host: 'http://localhost:65432/auth/realms/tdf', + clientId: 'browsertest', + }, + kas: 'http://localhost:65432/api/kas', + reader: 'https://secure.virtru.com/start?htmlProtocol=1', + }; + } + return JSON.parse(VITE_TDF_CFG); +} + +export const c = cfg(); diff --git a/web-app/src/session.ts b/web-app/src/session.ts index b177c75b..59ebd8af 100644 --- a/web-app/src/session.ts +++ b/web-app/src/session.ts @@ -1,5 +1,7 @@ import { decodeJwt } from 'jose'; +import { default as dpopFn } from 'dpop'; import { base64 } from '@opentdf/client/encodings'; +import { AuthProvider, HttpRequest, withHeaders } from '@opentdf/client'; export type OpenidConfiguration = { issuer: string; @@ -89,12 +91,25 @@ export type Sessions = { requests: Record; /** state for most recent request */ lastRequest?: string; + /** DPoP key */ + k?: string[]; }; function getTimestampInSeconds() { return Math.floor(Date.now() / 1000); } +function rsaPkcs1Sha256(): RsaHashedKeyGenParams { + return { + name: 'RSASSA-PKCS1-v1_5', + hash: { + name: 'SHA-256', + }, + modulusLength: 2048, + publicExponent: new Uint8Array([0x01, 0x00, 0x01]), // 24 bit representation of 65537 + }; +} + const extractAuthorizationResponse = (url: string): AuthorizationResponse | null => { const queryParams = new URLSearchParams(url); console.log(`response: ${JSON.stringify(queryParams.toString())}`); @@ -152,12 +167,13 @@ async function fetchConfig(server: string): Promise { return response.json(); } -export class OidcClient { +export class OidcClient implements AuthProvider { clientId: string; host: string; scope: string; sessionIdentifier: string; _sessions?: Sessions; + signingKey?: CryptoKeyPair; constructor(host: string, clientId: string, sessionIdentifier: string) { this.clientId = clientId; @@ -189,7 +205,7 @@ export class OidcClient { return this._sessions; } - async storeSessions() { + storeSessions() { sessionStorage.setItem(this.ssk('sessions'), JSON.stringify(this._sessions)); } @@ -234,18 +250,25 @@ export class OidcClient { window.location.href = whereto; } + _cs?: Promise; + async currentSession(): Promise { - const s = await this.handleRedirect(); - if (s) { - console.log('redirected'); - return s; + if (!this._cs) { + this._cs = (async (): Promise => { + const s = await this.handleRedirect(); + if (s) { + console.log('redirected'); + return s; + } + const sessions = await this.loadSessions(); + if (!sessions?.lastRequest) { + return { sessionState: 'start' }; + } + const thisSession = sessions.requests[sessions.lastRequest]; + return thisSession; + })(); } - const sessions = await this.loadSessions(); - if (!sessions?.lastRequest) { - return { sessionState: 'start' }; - } - const thisSession = sessions.requests[sessions.lastRequest]; - return thisSession; + return this._cs; } async currentUser(): Promise { @@ -271,6 +294,8 @@ export class OidcClient { console.log('Ignoring repeated redirect code'); return; } + currentSession.usedCodes.push(response.code); + this.storeSessions(); try { currentSession.user = await this._makeAccessTokenRequest({ grantType: 'authorization_code', @@ -288,6 +313,24 @@ export class OidcClient { } } + async getSigningKey(): Promise { + if (this.signingKey) { + return this.signingKey; + } + if (this._sessions?.k) { + const k = this._sessions?.k.map((e) => base64.decodeArrayBuffer(e)); + const algorithm = rsaPkcs1Sha256(); + const [publicKey, privateKey] = await Promise.all([ + crypto.subtle.importKey('spki', k[0], algorithm, true, ['verify']), + crypto.subtle.importKey('pkcs8', k[1], algorithm, false, ['sign']), + ]); + this.signingKey = { privateKey, publicKey }; + } else { + this.signingKey = await crypto.subtle.generateKey(rsaPkcs1Sha256(), true, ['sign']); + } + return this.signingKey; + } + private async _makeAccessTokenRequest(options: { grantType: 'authorization_code' | 'refresh_token'; codeOrRefreshToken: string; @@ -312,11 +355,26 @@ export class OidcClient { if (!config) { throw new Error('Unable to autoconfigure OIDC'); } + const headers: Record = { + 'Content-Type': 'application/x-www-form-urlencoded', + }; + const signingKey = await this.getSigningKey(); + if (this._sessions && this.signingKey) { + const k = await Promise.all([ + crypto.subtle.exportKey('spki', this.signingKey.publicKey), + crypto.subtle.exportKey('pkcs8', this.signingKey.privateKey), + ]); + this._sessions.k = k.map((e) => base64.encodeArrayBuffer(e)); + } + console.info( + `signing token request with DPoP key ${JSON.stringify( + await crypto.subtle.exportKey('jwk', signingKey.publicKey) + )}` + ); + headers.DPoP = await dpopFn(signingKey, config.token_endpoint, 'POST'); const response = await fetch(config.token_endpoint, { method: 'POST', - headers: { - 'Content-Type': 'application/x-www-form-urlencoded', - }, + headers, body: params, credentials: 'include', }); @@ -335,4 +393,36 @@ export class OidcClient { refreshToken: refresh_token, }; } + + async updateClientPublicKey(signingKey: CryptoKeyPair): Promise { + this.signingKey = signingKey; + } + + async withCreds(httpReq: HttpRequest): Promise { + const user = await this.currentUser(); + if (!user) { + console.error('Not logged in'); + return httpReq; + } + const { accessToken } = user; + const { signingKey } = this; + if (!signingKey || !signingKey.publicKey) { + console.error('missing DPoP key'); + return httpReq; + } + console.info( + `signing request for ${httpReq.url} with DPoP key ${JSON.stringify( + await crypto.subtle.exportKey('jwk', signingKey.publicKey) + )}` + ); + const dpopToken = await dpopFn( + signingKey, + httpReq.url, + httpReq.method, + /* nonce */ undefined, + accessToken + ); + // TODO: Consider: only set DPoP if cnf.jkt is present in access token? + return withHeaders(httpReq, { Authorization: `Bearer ${accessToken}`, DPoP: dpopToken }); + } } diff --git a/web-app/tests/bigfile.py b/web-app/tests/bigfile.py old mode 100644 new mode 100755 index 8a79e13f..f7ec4300 --- a/web-app/tests/bigfile.py +++ b/web-app/tests/bigfile.py @@ -1,3 +1,5 @@ +#!/usr/bin/env python3 + import math import sys diff --git a/web-app/tests/smallfiles.py b/web-app/tests/smallfiles.py new file mode 100755 index 00000000..ff5317a1 --- /dev/null +++ b/web-app/tests/smallfiles.py @@ -0,0 +1,27 @@ +#!/usr/bin/env python3 + +import math +import sys + + +# Generate a lot of smallish files +# For example, to make an 1,024 file: +# python bigfile.py $(( 2 ** 10 )) +def fill(f, size): + power = math.ceil(math.log(size) / math.log(2)) + octets_per_word = max(1, power - 3) + strides = size // octets_per_word + remainder = size % octets_per_word + for x in range(strides): + f.write(f"%0{octets_per_word}x" % (x * octets_per_word)) + for x in range(remainder): + f.write(".") + + +if __name__ == "__main__": + n = int(sys.argv[1]) + digits = math.ceil(math.log(n+1) / math.log(10)) + for x in range(1, n + 1): + infix = f"{x}".rjust(digits, "0") + with open(f"s-{infix}.txt", "w") as file: + fill(file, x) diff --git a/web-app/tsconfig.node.json b/web-app/tsconfig.node.json index 7a836f70..a5843902 100644 --- a/web-app/tsconfig.node.json +++ b/web-app/tsconfig.node.json @@ -5,5 +5,7 @@ "moduleResolution": "node16", "allowSyntheticDefaultImports": true }, - "include": ["vite.config.ts"] + "include": [ + "*.ts", + ] }