diff --git a/cli/package-lock.json b/cli/package-lock.json index 9b293726..39212cc9 100644 --- a/cli/package-lock.json +++ b/cli/package-lock.json @@ -360,7 +360,7 @@ "node_modules/@opentdf/sdk": { "version": "0.2.0", "resolved": "file:../lib/opentdf-sdk-0.2.0.tgz", - "integrity": "sha512-Ic6Tl6tV/TI9JPyjAnfywPen0t78JSkiupDKdpSVa2ZW8B69yU0oh65aC8oxniZJ57krRIFn/HxY1lX0HTk+TQ==", + "integrity": "sha512-TcLdtRtY12slxtysUC031Np/DmHc2j8ya6f8aEJrmEFvjTaR5R9hMdZ7r3t6oJHJCJCL3duObTDXtxAx/RUxjA==", "license": "BSD-3-Clause-Clear", "dependencies": { "browser-fs-access": "^0.34.1", diff --git a/cli/src/cli.ts b/cli/src/cli.ts index 28f08623..27eab26d 100644 --- a/cli/src/cli.ts +++ b/cli/src/cli.ts @@ -1,20 +1,21 @@ import { createWriteStream, openAsBlob } from 'node:fs'; -import { readFile, stat, writeFile } from 'node:fs/promises'; +import { stat } from 'node:fs/promises'; import { Writable } from 'node:stream'; import yargs from 'yargs'; import { hideBin } from 'yargs/helpers'; import { type AuthProvider, - type EncryptParams, + type CreateOptions, + type CreateNanoTDFOptions, + type CreateZTDFOptions, type HttpRequest, + type ReadOptions, + type Keys, + type Source, AuthProviders, - NanoTDFClient, - NanoTDFDatasetClient, - TDF3Client, version, - EncryptParamsBuilder, - DecryptParams, - DecryptParamsBuilder, + OpenTDF, + DecoratedStream, } from '@opentdf/sdk'; import { CLIError, Level, log } from './logger.js'; import { webcrypto } from 'crypto'; @@ -108,22 +109,8 @@ const rstrip = (str: string, suffix = ' '): string => { return str; }; -type AnyNanoClient = NanoTDFClient | NanoTDFDatasetClient; - -function addParams(client: AnyNanoClient, argv: Partial) { - if (argv.attributes?.length) { - client.dataAttributes = argv.attributes.split(','); - } - if (argv.usersWithAccess?.length) { - client.dissems = argv.usersWithAccess.split(','); - } - log('SILLY', `Built encrypt params dissems: ${client.dissems}, attrs: ${client.dataAttributes}`); -} - -async function parseAssertionVerificationKeys( - s: string -): Promise { - let u; +async function parseAssertionVerificationKeys(s: string): Promise { + let u: assertions.AssertionVerificationKeys; try { u = JSON.parse(s); } catch (err) { @@ -145,7 +132,7 @@ async function parseAssertionVerificationKeys( // handle both cases of "keys" if (!('Keys' in u && typeof u.Keys === 'object')) { if ('keys' in u && typeof u.keys === 'object') { - u.Keys = u.keys; + u.Keys = u.keys as Record; } else { throw new CLIError( 'CRITICAL', @@ -172,26 +159,23 @@ async function parseAssertionVerificationKeys( throw new CLIError('CRITICAL', `Issue converting assertion key from string: ${err.message}`); } } - return u; + return u.Keys; } -async function tdf3DecryptParamsFor(argv: Partial): Promise { - const c = new DecryptParamsBuilder(); +async function parseReadOptions(argv: Partial): Promise { + const r: ReadOptions = { source: await fileAsSource(argv.file as string) }; if (argv.noVerifyAssertions) { - c.withNoVerifyAssertions(true); + r.noVerify = true; } if (argv.assertionVerificationKeys) { - c.withAssertionVerificationKeys( - await parseAssertionVerificationKeys(argv.assertionVerificationKeys) - ); + r.verifiers = await parseAssertionVerificationKeys(argv.assertionVerificationKeys); } if (argv.concurrencyLimit) { - c.withConcurrencyLimit(argv.concurrencyLimit); + r.concurrencyLimit = argv.concurrencyLimit; } else { - c.withConcurrencyLimit(100); + r.concurrencyLimit = 100; } - c.setFileSource(await openAsBlob(argv.file as string)); - return c.build(); + return r; } async function correctAssertionKeys( @@ -265,32 +249,44 @@ async function parseAssertionConfig(s: string): Promise): Promise { - const c = new EncryptParamsBuilder(); - if (argv.assertions?.length) { - c.withAssertions(await parseAssertionConfig(argv.assertions)); - } +async function parseCreateOptions(argv: Partial): Promise { + const c: CreateOptions = { + source: await fileAsSource(argv.file as string), + }; if (argv.attributes?.length) { - c.setAttributes(argv.attributes.split(',')); + c.attributes = argv.attributes.split(','); } - if (argv.usersWithAccess?.length) { - c.setUsersWithAccess(argv.usersWithAccess.split(',')); + c.autoconfigure = !!argv.autoconfigure; + return c; +} + +async function parseCreateZTDFOptions(argv: Partial): Promise { + const c: CreateZTDFOptions = await parseCreateOptions(argv); + if (argv.assertions?.length) { + c.assertionConfigs = await parseAssertionConfig(argv.assertions); } if (argv.mimeType?.length) { - c.setMimeType(argv.mimeType); + if (argv.mimeType && /^[a-z]+\/[a-z0-9-+.]+$/.test(argv.mimeType)) { + c.mimeType = argv.mimeType as `${string}/${string}`; + } else { + throw new CLIError('CRITICAL', 'Invalid mimeType format'); + } } - if (argv.autoconfigure) { - c.withAutoconfigure(); + return c; +} + +async function parseCreateNanoTDFOptions(argv: Partial): Promise { + const c: CreateNanoTDFOptions = await parseCreateOptions(argv); + const ecdsaBinding = argv.policyBinding?.toLowerCase() == 'ecdsa'; + if (ecdsaBinding) { + c.bindingType = 'ecdsa'; } - // use offline mode, we do not have upsert for v2 - c.setOffline(); - // FIXME TODO must call file.close() after we are done - const buffer = await processDataIn(argv.file as string); - c.setBufferSource(buffer); - return c.build(); + // NOTE autoconfigure is not yet supported in nanotdf + delete c.autoconfigure; + return c; } -async function processDataIn(file: string) { +async function fileAsSource(file: string): Promise { if (!file) { throw new CLIError('CRITICAL', 'Must specify file or pipe'); } @@ -303,7 +299,7 @@ async function processDataIn(file: string) { throw new CLIError('CRITICAL', `File is not accessable [${file}]`); } log('DEBUG', `Using input from file [${file}]`); - return readFile(file); + return { type: 'file-browser', location: await openAsBlob(file) }; } export const handleArgs = (args: string[]) => { @@ -332,7 +328,6 @@ export const handleArgs = (args: string[]) => { // AUTH OPTIONS .option('kasEndpoint', { - demandOption: true, group: 'Server Endpoints:', type: 'string', description: 'URL to non-default KAS instance (https://mykas.net)', @@ -467,14 +462,6 @@ export const handleArgs = (args: string[]) => { type: 'string', description: 'Owner email address', }, - usersWithAccess: { - alias: 'users-with-access', - group: 'Encrypt Options:', - desc: 'Add users to the policy', - type: 'string', - default: '', - validate: (users: string) => users.split(','), - }, }) // COMMANDS @@ -520,7 +507,10 @@ export const handleArgs = (args: string[]) => { authProvider.updateClientPublicKey(signingKey); log('DEBUG', `Initialized auth provider ${JSON.stringify(authProvider)}`); - const policyUrl: string = guessPolicyUrl(argv); + const policyUrl = guessPolicyUrl(argv); + if (!policyUrl) { + throw new CLIError('CRITICAL', 'policyEndpoint must be specified'); + } const defs = await attributeFQNsAsValues( policyUrl, authProvider, @@ -546,78 +536,55 @@ export const handleArgs = (args: string[]) => { const ignoreAllowList = !!argv.ignoreAllowList; const authProvider = await processAuth(argv); log('DEBUG', `Initialized auth provider ${JSON.stringify(authProvider)}`); - - const kasEndpoint = argv.kasEndpoint; - if (argv.containerType === 'tdf3' || argv.containerType == 'ztdf') { - log('DEBUG', `TDF3 Client`); - const client = new TDF3Client({ - allowedKases, - ignoreAllowList, - authProvider, - kasEndpoint, - dpopEnabled: argv.dpop, - }); - log('SILLY', `Initialized client ${JSON.stringify(client)}`); - log('DEBUG', `About to decrypt [${argv.file}]`); - const ct = await client.decrypt(await tdf3DecryptParamsFor(argv)); - if (argv.output) { - const destination = createWriteStream(argv.output); - await ct.stream.pipeTo(Writable.toWeb(destination)); - } else { - console.log(await ct.toString()); - } - } else { - const dpopEnabled = !!argv.dpop; - const client = - argv.containerType === 'nano' - ? new NanoTDFClient({ - allowedKases, - ignoreAllowList, - authProvider, - kasEndpoint, - dpopEnabled, - }) - : new NanoTDFDatasetClient({ - allowedKases, - ignoreAllowList, - authProvider, - kasEndpoint, - dpopEnabled, - }); - const buffer = await processDataIn(argv.file as string); - - log('DEBUG', 'Decrypt data.'); - const plaintext = await client.decrypt(buffer); - - log('DEBUG', 'Handle output.'); - if (argv.output) { - await writeFile(argv.output, new Uint8Array(plaintext)); - } else { - console.log(new TextDecoder().decode(plaintext)); + const client = new OpenTDF({ + authProvider, + defaultCreateOptions: { + defaultKASEndpoint: argv.kasEndpoint, + }, + defaultReadOptions: { + allowedKASEndpoints: allowedKases, + ignoreAllowlist: ignoreAllowList, + noVerify: !!argv.noVerifyAssertions, + }, + disableDPoP: !argv.dpop, + policyEndpoint: guessPolicyUrl(argv), + }); + try { + log('SILLY', `Initialized client`); + log('DEBUG', `About to TDF3 decrypt [${argv.file}]`); + const ct = await client.read(await parseReadOptions(argv)); + const destination = argv.output ? createWriteStream(argv.output) : process.stdout; + try { + await ct.pipeTo(Writable.toWeb(destination)); + } catch (e) { + log('ERROR', `Failed to pipe to destination stream: ${e}`); } - } - const lastRequest = authProvider.requestLog[authProvider.requestLog.length - 1]; - let accessToken = null; - let dpopToken = null; - for (const h of Object.keys(lastRequest.headers)) { - switch (h.toLowerCase()) { - case 'dpop': - console.assert(!dpopToken, 'Multiple dpop headers found'); - dpopToken = parseJwtComplete(lastRequest.headers[h]); - log('INFO', `dpop: ${JSON.stringify(dpopToken)}`); - break; - case 'authorization': - console.assert(!accessToken, 'Multiple authorization headers found'); - accessToken = parseJwt(lastRequest.headers[h].split(' ')[1]); - log('INFO', `Access Token: ${JSON.stringify(accessToken)}`); - if (argv.dpop) { - console.assert(accessToken.cnf?.jkt, 'Access token must have a cnf.jkt'); - } - break; + const lastRequest = authProvider.requestLog[authProvider.requestLog.length - 1]; + log('SILLY', `last request is ${JSON.stringify(lastRequest)}`); + let accessToken = null; + let dpopToken = null; + for (const h of Object.keys(lastRequest.headers)) { + switch (h.toLowerCase()) { + case 'dpop': + console.assert(!dpopToken, 'Multiple dpop headers found'); + dpopToken = parseJwtComplete(lastRequest.headers[h]); + log('INFO', `dpop: ${JSON.stringify(dpopToken)}`); + break; + case 'authorization': + console.assert(!accessToken, 'Multiple authorization headers found'); + accessToken = parseJwt(lastRequest.headers[h].split(' ')[1]); + log('INFO', `Access Token: ${JSON.stringify(accessToken)}`); + if (argv.dpop) { + console.assert(accessToken.cnf?.jkt, 'Access token must have a cnf.jkt'); + } + break; + } } + console.assert(accessToken, 'No access_token found'); + console.assert(!argv.dpop || dpopToken, 'DPoP requested but absent'); + } finally { + client.close(); } - console.assert(accessToken, 'No access_token found'); - console.assert(!argv.dpop || dpopToken, 'DPoP requested but absent'); } ) .command( @@ -634,57 +601,33 @@ export const handleArgs = (args: string[]) => { log('DEBUG', 'Running encrypt command'); const authProvider = await processAuth(argv); log('DEBUG', `Initialized auth provider ${JSON.stringify(authProvider)}`); - const kasEndpoint = argv.kasEndpoint; - const ignoreAllowList = !!argv.ignoreAllowList; - const allowedKases = argv.allowList?.split(','); - if ('tdf3' === argv.containerType || 'ztdf' === argv.containerType) { - log('DEBUG', `TDF3 Client`); - const policyEndpoint: string = guessPolicyUrl(argv); - const client = new TDF3Client({ - allowedKases, - ignoreAllowList, - authProvider, - kasEndpoint, - policyEndpoint, - dpopEnabled: argv.dpop, - }); - log('SILLY', `Initialized client ${JSON.stringify(client)}`); - const ct = await client.encrypt(await tdf3EncryptParamsFor(argv)); - if (!ct) { - throw new CLIError('CRITICAL', 'Encrypt configuration error: No output?'); - } - if (argv.output) { - const destination = createWriteStream(argv.output); - await ct.stream.pipeTo(Writable.toWeb(destination)); + const client = new OpenTDF({ + authProvider, + defaultCreateOptions: { + defaultKASEndpoint: argv.kasEndpoint, + }, + disableDPoP: !argv.dpop, + policyEndpoint: guessPolicyUrl(argv), + }); + try { + log('SILLY', `Initialized client`); + + let ct: DecoratedStream; + if ('tdf3' === argv.containerType || 'ztdf' === argv.containerType) { + log('DEBUG', `TDF3 Create`); + ct = await client.createZTDF(await parseCreateZTDFOptions(argv)); } else { - console.log(await ct.toString()); + log('DEBUG', `Nano Create`); + ct = await client.createNanoTDF(await parseCreateNanoTDFOptions(argv)); } - } else { - const dpopEnabled = !!argv.dpop; - const ecdsaBinding = argv.policyBinding.toLowerCase() == 'ecdsa'; - const client = - argv.containerType === 'nano' - ? new NanoTDFClient({ allowedKases, authProvider, dpopEnabled, kasEndpoint }) - : new NanoTDFDatasetClient({ - allowedKases, - authProvider, - dpopEnabled, - kasEndpoint, - }); - log('SILLY', `Initialized client ${JSON.stringify(client)}`); - - addParams(client, argv); - - const buffer = await processDataIn(argv.file as string); - const cyphertext = await client.encrypt(buffer, { ecdsaBinding }); - - log('DEBUG', `Handle cyphertext output ${JSON.stringify(cyphertext)}`); - if (argv.output) { - await writeFile(argv.output, new Uint8Array(cyphertext)); - } else { - console.log(base64.encodeArrayBuffer(cyphertext)); + if (!ct) { + throw new CLIError('CRITICAL', 'Encrypt configuration error: No output?'); } + const destination = argv.output ? createWriteStream(argv.output) : process.stdout; + await ct.pipeTo(Writable.toWeb(destination)); + } finally { + client.close(); } } ) @@ -730,15 +673,15 @@ function guessPolicyUrl({ kasEndpoint, policyEndpoint, }: { - kasEndpoint: string; + kasEndpoint?: string; policyEndpoint?: string; -}) { - let policyUrl: string; +}): string | undefined { if (policyEndpoint) { - policyUrl = rstrip(policyEndpoint, '/'); - } else { + return rstrip(policyEndpoint, '/'); + } + if (kasEndpoint) { const uNoSlash = rstrip(kasEndpoint, '/'); - policyUrl = uNoSlash.endsWith('/kas') ? uNoSlash.slice(0, -4) : uNoSlash; + return uNoSlash.endsWith('/kas') ? uNoSlash.slice(0, -4) : uNoSlash; } - return policyUrl; + return undefined; } diff --git a/lib/package-lock.json b/lib/package-lock.json index e7742bbb..831358c1 100644 --- a/lib/package-lock.json +++ b/lib/package-lock.json @@ -27,7 +27,6 @@ "@types/node": "^20.4.5", "@types/send": "^0.17.1", "@types/sinon": "~10.0.15", - "@types/streamsaver": "^2.0.1", "@types/uuid": "~9.0.2", "@types/wicg-file-system-access": "^2020.9.6", "@typescript-eslint/eslint-plugin": "^6.2.1", @@ -2025,11 +2024,6 @@ "dev": true, "license": "MIT" }, - "node_modules/@types/streamsaver": { - "version": "2.0.1", - "dev": true, - "license": "MIT" - }, "node_modules/@types/tough-cookie": { "version": "4.0.4", "dev": true, diff --git a/lib/package.json b/lib/package.json index 2e62eded..87f62032 100644 --- a/lib/package.json +++ b/lib/package.json @@ -25,6 +25,11 @@ "main": "./dist/cjs/tdf3/index.js", "exports": { ".": { + "types": "./dist/types/src/index.d.ts", + "require": "./dist/cjs/src/index.js", + "import": "./dist/web/src/index.js" + }, + "./singlecontainer": { "types": "./dist/types/tdf3/index.d.ts", "require": "./dist/cjs/tdf3/index.js", "import": "./dist/web/tdf3/index.js" @@ -44,9 +49,9 @@ } }, "./nano": { - "types": "./dist/types/src/index.d.ts", - "require": "./dist/cjs/src/index.js", - "import": "./dist/web/src/index.js" + "types": "./dist/types/src/nanoindex.d.ts", + "require": "./dist/cjs/src/nanoindex.js", + "import": "./dist/web/src/nanoindex.js" } }, "scripts": { @@ -85,7 +90,6 @@ "@types/node": "^20.4.5", "@types/send": "^0.17.1", "@types/sinon": "~10.0.15", - "@types/streamsaver": "^2.0.1", "@types/uuid": "~9.0.2", "@types/wicg-file-system-access": "^2020.9.6", "@typescript-eslint/eslint-plugin": "^6.2.1", diff --git a/lib/src/index.ts b/lib/src/index.ts index d3009e77..022be994 100644 --- a/lib/src/index.ts +++ b/lib/src/index.ts @@ -1,4 +1,6 @@ +export { type AuthProvider, type HttpMethod, HttpRequest, withHeaders } from './auth/auth.js'; export * as AuthProviders from './auth/providers.js'; export { attributeFQNsAsValues } from './policy/api.js'; -export * from './nanoclients.js'; export { version, clientType } from './version.js'; +export * from './opentdf.js'; +export * from './seekable.js'; diff --git a/lib/src/nanoclients.ts b/lib/src/nanoclients.ts index 14fe8375..7d21b1da 100644 --- a/lib/src/nanoclients.ts +++ b/lib/src/nanoclients.ts @@ -71,6 +71,7 @@ export class NanoTDFClient extends Client { async decryptLegacyTDF(ciphertext: string | TypedArray | ArrayBuffer): Promise { // Parse ciphertext const nanotdf = NanoTDF.from(ciphertext, undefined, true); + const legacyVersion = '0.0.0'; // Rewrap key on every request const key = await this.rewrapKey( diff --git a/lib/src/nanoindex.ts b/lib/src/nanoindex.ts new file mode 100644 index 00000000..d3009e77 --- /dev/null +++ b/lib/src/nanoindex.ts @@ -0,0 +1,4 @@ +export * as AuthProviders from './auth/providers.js'; +export { attributeFQNsAsValues } from './policy/api.js'; +export * from './nanoclients.js'; +export { version, clientType } from './version.js'; diff --git a/lib/src/opentdf.ts b/lib/src/opentdf.ts new file mode 100644 index 00000000..a437b553 --- /dev/null +++ b/lib/src/opentdf.ts @@ -0,0 +1,416 @@ +import { type AuthProvider } from './auth/providers.js'; +import { ConfigurationError, InvalidFileError } from './errors.js'; +import { NanoTDFDatasetClient } from './nanoclients.js'; +export { Client as TDF3Client } from '../tdf3/src/client/index.js'; +import NanoTDF from './nanotdf/NanoTDF.js'; +import decryptNanoTDF from './nanotdf/decrypt.js'; +import Client from './nanotdf/Client.js'; +import Header from './nanotdf/models/Header.js'; +import { fromSource, sourceToStream, type Source } from './seekable.js'; +import { Client as TDF3Client } from '../tdf3/src/client/index.js'; +import { AssertionConfig, AssertionVerificationKeys } from '../tdf3/src/assertions.js'; +import { OriginAllowList } from './access.js'; +import { type Manifest } from '../tdf3/src/models/manifest.js'; + +export type Keys = { + [keyID: string]: CryptoKey | CryptoKeyPair; +}; + +// Options when creating a new TDF object +// that are shared between all container types. +export type CreateOptions = { + // If the policy service should be used to control creation options + autoconfigure?: boolean; + + // List of attributes that will be assigned to the object's policy + attributes?: string[]; + + // If set and positive, this represents the maxiumum number of bytes to read from a stream to encrypt. + // This is helpful for enforcing size limits and preventing DoS attacks. + byteLimit?: number; + + // The KAS to use for creation, if none is specified by the attribute service. + defaultKASEndpoint?: string; + + // Private (or shared) keys for signing assertions and bindings + signers?: Keys; + + // Source of plaintext data + source: Source; +}; + +export type CreateNanoTDFOptions = CreateOptions & { + bindingType?: 'ecdsa' | 'gmac'; + + // When creating a new collection, use ECDSA binding with this key id from the signers, + // instead of the DEK. + ecdsaBindingKeyID?: string; + + // When creating a new collection, + // use the key in the `signers` list with this id + // to generate a signature for each element. + // When absent, the nanotdf is unsigned. + signingKeyID?: string; +}; + +export type CreateNanoTDFCollectionOptions = CreateNanoTDFOptions & { + // The maximum number of key iterations to use for a single DEK. + maxKeyIterations?: number; +}; + +// Metadata for a TDF object. +export type Metadata = object; + +// MIME type of the decrypted content. +export type MimeType = `${string}/${string}`; + +// Template for a Key Access Object (KAO) to be filled in during encrypt. +export type SplitStep = { + // Which KAS to use to rewrap this segment of the key + kas: string; + + // An identifier for a key segment. + // Leave empty to share the key. + sid?: string; +}; + +/// Options specific to the ZTDF container format. +export type CreateZTDFOptions = CreateOptions & { + // Configuration for bound metadata. + assertionConfigs?: AssertionConfig[]; + + // Unbound metadata (deprecated) + metadata?: Metadata; + + // MIME type of the decrypted content. Used for display. + mimeType?: MimeType; + + // How to split or share the data encryption key across multiple KASes. + splitPlan?: SplitStep[]; + + // The segment size for the content; smaller is slower, but allows faster random access. + // The current default is 1 MiB (2^20 bytes). + windowSize?: number; +}; + +// Settings for decrypting any variety of TDF file. +export type ReadOptions = { + // ciphertext + source: Source; + // list of KASes that may be contacted for a rewrap + allowedKASEndpoints?: string[]; + // Optionally disable checking the allowlist + ignoreAllowlist?: boolean; + // Public (or shared) keys for verifying assertions + verifiers?: Keys; + // Optionally disable assertion verification + noVerify?: boolean; + + // If set, prevents more than this number of concurrent requests to the KAS. + concurrencyLimit?: number; +}; + +// Defaults and shared settings that are relevant to creating TDF objects. +export type OpenTDFOptions = { + // Policy service endpoint + policyEndpoint?: string; + + // Default settings for 'encrypt' type requests. + defaultCreateOptions?: Omit; + + // Default settings for 'decrypt' type requests. + defaultReadOptions?: Omit; + + // If we want to *not* send a DPoP token + disableDPoP?: boolean; + + // Optional keys for DPoP requests to a server. + // These often must be registered via a DPoP flow with the IdP + // which is out of the scope of this library. + dpopKeys?: Promise; + + authProvider: AuthProvider; +}; + +export type DecoratedStream = ReadableStream & { + // If the source is a TDF3/ZTDF, and includes metadata, and it has been read. + metadata?: Promise; + manifest?: Promise; + // If the source is a NanoTDF, this will be set. + header?: Header; +}; + +// Cache for headers of nanotdf collections. +// Stores keys by the header.ephemeralPublicKey value. +// Has a demon that removes all keys that have not been accessed in the last 5 minutes. +export class NanoHeaderCache { + private cache: Map; + private closer: NodeJS.Timer; + constructor() { + this.cache = new Map(); + this.closer = setInterval(() => { + const now = Date.now(); + for (const [key, value] of this.cache.entries()) { + if (now - value.lastAccessTime > 300000) { + this.cache.delete(key); + } + } + }, 500); + } + + get(key: Uint8Array): CryptoKey | undefined { + const entry = this.cache.get(key); + if (entry) { + entry.lastAccessTime = Date.now(); + return entry.value; + } + return undefined; + } + + set(key: Uint8Array, value: CryptoKey) { + this.cache.set(key, { lastAccessTime: Date.now(), value }); + } + + close() { + clearInterval(this.closer); + } +} + +// SDK for dealing with OpenTDF data and policy services. +export class OpenTDF { + // Configuration service and more is at this URL/connectRPC endpoint + readonly policyEndpoint: string; + readonly authProvider: AuthProvider; + readonly dpopEnabled: boolean; + defaultCreateOptions: Omit; + defaultReadOptions: Omit; + readonly dpopKeys: Promise; + + // Header cache for reading nanotdf collections + private readonly headerCache: NanoHeaderCache; + private tdf3Client: TDF3Client; + + constructor({ + authProvider, + dpopKeys, + defaultCreateOptions, + defaultReadOptions, + disableDPoP, + policyEndpoint, + }: OpenTDFOptions) { + this.authProvider = authProvider; + this.defaultCreateOptions = defaultCreateOptions || {}; + this.defaultReadOptions = defaultReadOptions || {}; + this.dpopEnabled = !!disableDPoP; + this.policyEndpoint = policyEndpoint || ''; + this.headerCache = new NanoHeaderCache(); + this.tdf3Client = new TDF3Client({ + authProvider, + dpopKeys, + kasEndpoint: 'https://disallow.all.invalid', + policyEndpoint, + }); + this.dpopKeys = + dpopKeys ?? + crypto.subtle.generateKey( + { + name: 'RSASSA-PKCS1-v1_5', + hash: 'SHA-256', + modulusLength: 2048, + publicExponent: new Uint8Array([0x01, 0x00, 0x01]), + }, + true, + ['sign', 'verify'] + ); + } + + async createNanoTDF(opts: CreateNanoTDFOptions): Promise { + opts = { ...this.defaultCreateOptions, ...opts }; + const collection = await this.createNanoTDFCollection(opts); + try { + return await collection.encrypt(opts.source); + } finally { + await collection.close(); + } + } + + /** + * Creates a new collection object, which can be used to encrypt a series of data with the same policy. + * @returns + */ + async createNanoTDFCollection(opts: CreateNanoTDFCollectionOptions): Promise { + opts = { ...this.defaultCreateOptions, ...opts }; + return new Collection(this.authProvider, opts); + } + + async createZTDF(opts: CreateZTDFOptions): Promise { + opts = { ...this.defaultCreateOptions, ...opts }; + const oldStream = await this.tdf3Client.encrypt({ + source: await sourceToStream(opts.source), + + assertionConfigs: opts.assertionConfigs, + autoconfigure: !!opts.autoconfigure, + defaultKASEndpoint: opts.defaultKASEndpoint, + byteLimit: opts.byteLimit, + mimeType: opts.mimeType, + scope: { + attributes: opts.attributes, + }, + splitPlan: opts.splitPlan, + windowSize: opts.windowSize, + }); + const stream: DecoratedStream = oldStream.stream; + stream.manifest = Promise.resolve(oldStream.manifest); + stream.metadata = Promise.resolve(oldStream.metadata); + return stream; + } + + /** + * Decrypts a nanotdf object. Optionally, stores the collection header and its DEK. + * @param ciphertext + */ + async read(opts: ReadOptions): Promise { + opts = { ...this.defaultReadOptions, ...opts }; + const chunker = await fromSource(opts.source); + const prefix = await chunker(0, 3); + // switch for prefix, if starts with `PK` in ascii, or `L1L` in ascii: + if (prefix[0] === 0x50 && prefix[1] === 0x4b) { + const allowList = new OriginAllowList(opts.allowedKASEndpoints ?? [], opts.ignoreAllowlist); + let assertionVerificationKeys: AssertionVerificationKeys | undefined; + if (opts.verifiers && !opts.noVerify) { + assertionVerificationKeys = { Keys: {} }; + for (const [keyID, key] of Object.entries(opts.verifiers)) { + if ((key as CryptoKeyPair).publicKey) { + const pk = (key as CryptoKeyPair).publicKey; + const algName = pk.algorithm.name; + const alg = algName.startsWith('EC') ? 'ES256' : 'RS256'; + assertionVerificationKeys.Keys[keyID] = { + alg, + key: pk, + }; + } else { + const k = key as CryptoKey; + const algName = k.algorithm.name; + const alg = algName.startsWith('AES') + ? 'HS256' + : algName.startsWith('EC') + ? 'ES256' + : 'RS256'; + assertionVerificationKeys.Keys[keyID] = { + alg, + key: k, + }; + } + } + } + const oldStream = await this.tdf3Client.decrypt({ + source: opts.source, + allowList, + assertionVerificationKeys, + noVerifyAssertions: opts.noVerify, + }); + const stream: DecoratedStream = oldStream.stream; + stream.metadata = Promise.resolve(oldStream.metadata); + return stream; + } else if (prefix[0] === 0x4c && prefix[1] === 0x31 && prefix[2] === 0x4c) { + const ciphertext = await chunker(); + const nanotdf = NanoTDF.from(ciphertext); + const cachedDEK = this.headerCache.get(nanotdf.header.ephemeralPublicKey); + if (cachedDEK) { + const r: DecoratedStream = await streamify(decryptNanoTDF(cachedDEK, nanotdf)); + r.header = nanotdf.header; + return r; + } + const nc = new Client({ + allowedKases: opts.allowedKASEndpoints, + authProvider: this.authProvider, + ignoreAllowList: opts.ignoreAllowlist, + dpopEnabled: this.dpopEnabled, + dpopKeys: this.dpopKeys, + kasEndpoint: opts.allowedKASEndpoints?.[0] || 'https://disallow.all.invalid', + }); + // TODO: The version number should be fetched from the API + const version = '0.0.1'; + // Rewrap key on every request + const dek = await nc.rewrapKey( + nanotdf.header.toBuffer(), + nanotdf.header.getKasRewrapUrl(), + nanotdf.header.magicNumberVersion, + version + ); + if (!dek) { + // These should have thrown already. + throw new Error('internal: key rewrap failure'); + } + this.headerCache.set(nanotdf.header.ephemeralPublicKey, dek); + const r: DecoratedStream = await streamify(decryptNanoTDF(dek, nanotdf)); + r.header = nanotdf.header; + return r; + } + throw new InvalidFileError(`unsupported format; prefix not recognized ${prefix}`); + } + + close() { + this.headerCache.close(); + } +} + +async function streamify(ab: Promise): Promise> { + const stream = new ReadableStream({ + start(controller) { + ab.then((arrayBuffer) => { + controller.enqueue(new Uint8Array(arrayBuffer)); + controller.close(); + }); + }, + }); + return stream; +} + +export type NanoTDFCollection = { + encrypt: (source: Source) => Promise>; + close: () => Promise; +}; + +class Collection { + client?: NanoTDFDatasetClient; + + constructor(authProvider: AuthProvider, opts: CreateNanoTDFCollectionOptions) { + if (opts.signers || opts.signingKeyID) { + throw new ConfigurationError('ntdf signing not implemented'); + } + if (opts.autoconfigure) { + throw new ConfigurationError('autoconfigure not implemented'); + } + if (opts.ecdsaBindingKeyID) { + throw new ConfigurationError('custom binding key not implemented'); + } + + this.client = new NanoTDFDatasetClient({ + authProvider, + kasEndpoint: opts.defaultKASEndpoint ?? 'https://disallow.all.invalid', + maxKeyIterations: opts.maxKeyIterations, + }); + } + + async encrypt(source: Source): Promise { + if (!this.client) { + throw new ConfigurationError('Collection is closed'); + } + const chunker = await fromSource(source); + const cipherChunk = await this.client.encrypt(await chunker()); + const stream: DecoratedStream = new ReadableStream({ + start(controller) { + controller.enqueue(new Uint8Array(cipherChunk)); + controller.close(); + }, + }); + // TODO: client's header object is private + // stream.header = this.client.header; + return stream; + } + + async close() { + delete this.client; + } +} diff --git a/lib/src/utils.ts b/lib/src/utils.ts index 1d3d9560..80830d93 100644 --- a/lib/src/utils.ts +++ b/lib/src/utils.ts @@ -45,8 +45,6 @@ export function isBrowser() { return typeof window !== 'undefined'; // eslint-disable-line } -export const isFirefox = (): boolean => isBrowser() && 'InstallTrigger' in window; - export const rstrip = (str: string, suffix = ' '): string => { while (str && suffix && str.endsWith(suffix)) { str = str.slice(0, -suffix.length); diff --git a/lib/tdf3/index.ts b/lib/tdf3/index.ts index edde46dd..114494b7 100644 --- a/lib/tdf3/index.ts +++ b/lib/tdf3/index.ts @@ -33,7 +33,7 @@ import { AuthProviders, version, clientType, -} from '../src/index.js'; +} from '../src/nanoindex.js'; import { Algorithms, type AlgorithmName, type AlgorithmUrn } from './src/ciphers/algorithms.js'; import { type Chunker } from '../src/seekable.js'; @@ -82,3 +82,15 @@ export { }; export * as WebCryptoService from './src/crypto/index.js'; +export { + type CreateNanoTDFCollectionOptions, + type CreateNanoTDFOptions, + type CreateOptions, + type CreateZTDFOptions, + type DecoratedStream, + type Keys, + type OpenTDFOptions, + type NanoTDFCollection, + type ReadOptions, + OpenTDF, +} from '../src/opentdf.js'; diff --git a/lib/tdf3/src/assertions.ts b/lib/tdf3/src/assertions.ts index be3aa833..b1f1b436 100644 --- a/lib/tdf3/src/assertions.ts +++ b/lib/tdf3/src/assertions.ts @@ -3,7 +3,7 @@ import { type KeyLike, SignJWT, jwtVerify } from 'jose'; import { base64, hex } from '../../src/encodings/index.js'; import { ConfigurationError, IntegrityError, InvalidFileError } from '../../src/errors.js'; -export type AssertionKeyAlg = 'RS256' | 'HS256'; +export type AssertionKeyAlg = 'ES256' | 'RS256' | 'HS256'; export type AssertionType = 'handling' | 'other'; export type Scope = 'tdo' | 'payload'; export type AppliesToState = 'encrypted' | 'unencrypted'; diff --git a/lib/tdf3/src/client/builders.ts b/lib/tdf3/src/client/builders.ts index cae1d5f3..006a5610 100644 --- a/lib/tdf3/src/client/builders.ts +++ b/lib/tdf3/src/client/builders.ts @@ -4,11 +4,12 @@ import { type Metadata } from '../tdf.js'; import { Binary } from '../binary.js'; import { ConfigurationError } from '../../../src/errors.js'; -import { type Chunker } from '../../../src/seekable.js'; import { PemKeyPair } from '../crypto/declarations.js'; import { DecoratedReadableStream } from './DecoratedReadableStream.js'; +import { type Chunker } from '../../../src/seekable.js'; import { AssertionConfig, AssertionVerificationKeys } from '../assertions.js'; import { Value } from '../../../src/policy/attributes.js'; +import { OriginAllowList } from '../../../src/access.js'; export const DEFAULT_SEGMENT_SIZE: number = 1024 * 1024; export type Scope = { @@ -34,6 +35,7 @@ export type SplitStep = { }; export type EncryptParams = { + byteLimit?: number; source: ReadableStream; opts?: { keypair: PemKeyPair }; autoconfigure?: boolean; @@ -48,6 +50,7 @@ export type EncryptParams = { splitPlan?: SplitStep[]; streamMiddleware?: EncryptStreamMiddleware; assertionConfigs?: AssertionConfig[]; + defaultKASEndpoint?: string; // Unsupported asHtml?: boolean; @@ -500,6 +503,7 @@ export type DecryptSource = export type DecryptParams = { source: DecryptSource; + allowList?: OriginAllowList; keyMiddleware?: DecryptKeyMiddleware; streamMiddleware?: DecryptStreamMiddleware; assertionVerificationKeys?: AssertionVerificationKeys; diff --git a/lib/tdf3/src/client/index.ts b/lib/tdf3/src/client/index.ts index dbdadc58..7d03d662 100644 --- a/lib/tdf3/src/client/index.ts +++ b/lib/tdf3/src/client/index.ts @@ -40,7 +40,6 @@ import { } from './builders.js'; import { KasPublicKeyInfo, OriginAllowList } from '../../../src/access.js'; import { ConfigurationError } from '../../../src/errors.js'; -import { type Chunker, fromBuffer, fromDataSource, fromSource } from '../../../src/seekable.js'; import { Binary } from '../binary.js'; import { AesGcmCipher } from '../ciphers/aes-gcm-cipher.js'; import { toCryptoKeyPair } from '../crypto/crypto-utils.js'; @@ -49,6 +48,7 @@ import { type AttributeObject, type Policy, SplitKey } from '../models/index.js' import { plan } from '../../../src/policy/granter.js'; import { attributeFQNsAsValues } from '../../../src/policy/api.js'; import { type Value } from '../../../src/policy/attributes.js'; +import { type Chunker, fromBuffer, fromSource } from '../../../src/seekable.js'; const GLOBAL_BYTE_LIMIT = 64 * 1000 * 1000 * 1000; // 64 GB, see WS-9363. @@ -95,7 +95,7 @@ const makeChunkable = async (source: DecryptSource) => { export interface ClientConfig { cryptoService?: CryptoService; - organizationName?: string; + /// oauth client id; used to generate oauth authProvider clientId?: string; dpopEnabled?: boolean; dpopKeys?: Promise; @@ -333,7 +333,6 @@ export class Client { * * @param scope dissem and attributes for constructing the policy * @param source source object of unencrypted data - * @param [asHtml] If we should wrap the TDF data in a self-opening HTML wrapper. Defaults to false * @param [autoconfigure] If we should use scope.attributes to configure KAOs * @param [metadata] Additional non-secret data to store with the TDF * @param [opts] Test only @@ -344,31 +343,28 @@ export class Client { * @param [eo] - (deprecated) entity object * @return a {@link https://nodejs.org/api/stream.html#stream_class_stream_readable|Readable} a new stream containing the TDF ciphertext */ - async encrypt({ - scope = { attributes: [], dissem: [] }, - autoconfigure, - source, - asHtml, - metadata, - mimeType, - offline = true, - windowSize = DEFAULT_SEGMENT_SIZE, - keyMiddleware = defaultKeyMiddleware, - streamMiddleware = async (stream: DecoratedReadableStream) => stream, - splitPlan, - assertionConfigs = [], - }: EncryptParams): Promise { - if (!offline) { + async encrypt(opts: EncryptParams): Promise { + if (opts.offline === false) { throw new ConfigurationError('online mode not supported'); } - if (asHtml) { + if (opts.asHtml) { throw new ConfigurationError('html mode not supported'); } const dpopKeys = await this.dpopKeys; + const { + autoconfigure, + metadata, + mimeType = 'unknown', + windowSize = DEFAULT_SEGMENT_SIZE, + keyMiddleware = defaultKeyMiddleware, + streamMiddleware = async (stream: DecoratedReadableStream) => stream, + } = opts; + const scope = opts.scope ?? { attributes: [], dissem: [] }; const policyObject = asPolicy(scope); validatePolicyObject(policyObject); + let splitPlan = opts.splitPlan; if (!splitPlan && autoconfigure) { let avs: Value[] = scope.attributeValues ?? []; const fqns: string[] = scope.attributes @@ -427,9 +423,15 @@ export class Client { // TODO: Refactor underlying builder to remove some of this unnecessary config. - const byteLimit = GLOBAL_BYTE_LIMIT; + const maxByteLimit = GLOBAL_BYTE_LIMIT; + const byteLimit = + opts.byteLimit === undefined || opts.byteLimit <= 0 || opts.byteLimit > maxByteLimit + ? maxByteLimit + : opts.byteLimit; const encryptionInformation = new SplitKey(new AesGcmCipher(this.cryptoService)); - const splits: SplitStep[] = splitPlan?.length ? splitPlan : [{ kas: this.kasEndpoint }]; + const splits: SplitStep[] = splitPlan?.length + ? splitPlan + : [{ kas: opts.defaultKASEndpoint ?? this.kasEndpoint }]; encryptionInformation.keyAccess = await Promise.all( splits.map(async ({ kas, sid }) => { if (!(kas in this.kasKeys)) { @@ -437,7 +439,7 @@ export class Client { } const kasPublicKey = await this.kasKeys[kas]; return buildKeyAccess({ - type: offline ? 'wrapped' : 'remote', + type: 'wrapped', url: kasPublicKey.url, kid: kasPublicKey.kid, publicKey: kasPublicKey.publicKey, @@ -456,14 +458,14 @@ export class Client { segmentSizeDefault: windowSize, integrityAlgorithm: 'HS256', segmentIntegrityAlgorithm: 'GMAC', - contentStream: source, + contentStream: opts.source, mimeType, policy: policyObject, authProvider: this.authProvider, progressHandler: this.clientConfig.progressHandler, keyForEncryption, keyForManifest, - assertionConfigs, + assertionConfigs: opts.assertionConfigs, }; return (streamMiddleware as EncryptStreamMiddleware)(await writeStream(ecfg)); @@ -482,6 +484,7 @@ export class Client { */ async decrypt({ source, + allowList, keyMiddleware = async (key: Binary) => key, streamMiddleware = async (stream: DecoratedReadableStream) => stream, assertionVerificationKeys, @@ -493,12 +496,15 @@ export class Client { throw new ConfigurationError('AuthProvider missing'); } const chunker = await makeChunkable(source); + if (!allowList) { + allowList = this.allowedKases; + } // Await in order to catch any errors from this call. // TODO: Write error event to stream and don't await. return await (streamMiddleware as DecryptStreamMiddleware)( await readStream({ - allowList: this.allowedKases, + allowList, authProvider: this.authProvider, chunker, concurrencyLimit, @@ -540,12 +546,4 @@ export class Client { export type { AuthProvider }; -export { - DecryptParamsBuilder, - DecryptSource, - EncryptParamsBuilder, - HttpRequest, - fromDataSource, - fromSource, - withHeaders, -}; +export { DecryptParamsBuilder, DecryptSource, EncryptParamsBuilder, HttpRequest, withHeaders }; diff --git a/lib/tdf3/src/tdf.ts b/lib/tdf3/src/tdf.ts index d3452a5f..2fb059dd 100644 --- a/lib/tdf3/src/tdf.ts +++ b/lib/tdf3/src/tdf.ts @@ -22,7 +22,6 @@ import { base64 } from '../../src/encodings/index.js'; import { ZipReader, ZipWriter, keyMerge, buffToString, concatUint8 } from './utils/index.js'; import { Binary } from './binary.js'; import { KasPublicKeyAlgorithm, KasPublicKeyInfo, OriginAllowList } from '../../src/access.js'; -import { allPool, anyPool } from '../../src/concurrency.js'; import { ConfigurationError, DecryptError, @@ -32,7 +31,6 @@ import { UnsafeUrlError, UnsupportedFeatureError as UnsupportedError, } from '../../src/errors.js'; -import { type Chunker } from '../../src/seekable.js'; // configurable // TODO: remove dependencies from ciphers so that we can open-source instead of relying on other Virtru libs @@ -42,6 +40,8 @@ import { PolicyObject } from '../../src/tdf/PolicyObject.js'; import { type CryptoService, type DecryptResult } from './crypto/declarations.js'; import { CentralDirectory } from './utils/zip-reader.js'; import { SymmetricCipher } from './ciphers/symmetric-cipher-base.js'; +import { allPool, anyPool } from '../../src/concurrency.js'; +import { type Chunker } from '../../src/seekable.js'; // TODO: input validation on manifest JSON const DEFAULT_SEGMENT_SIZE = 1024 * 1024; @@ -102,7 +102,6 @@ type Chunk = { export type IntegrityAlgorithm = 'GMAC' | 'HS256'; export type EncryptConfiguration = { - allowedKases?: string[]; allowList?: OriginAllowList; cryptoService: CryptoService; dpopKeys: CryptoKeyPair; diff --git a/lib/tdf3/src/utils/zip-reader.ts b/lib/tdf3/src/utils/zip-reader.ts index a09aac38..d3accd6e 100644 --- a/lib/tdf3/src/utils/zip-reader.ts +++ b/lib/tdf3/src/utils/zip-reader.ts @@ -1,7 +1,6 @@ import { InvalidFileError } from '../../../src/errors.js'; -import { Manifest } from '../models/index.js'; import { type Chunker } from '../../../src/seekable.js'; - +import { Manifest } from '../models/index.js'; import { readUInt32LE, readUInt16LE, copyUint8Arr, buffToString } from './index.js'; // TODO: Better document what these constants are diff --git a/lib/tests/web/nano-roundtrip.test.ts b/lib/tests/web/nano-roundtrip.test.ts index e0a55e3f..4fa96678 100644 --- a/lib/tests/web/nano-roundtrip.test.ts +++ b/lib/tests/web/nano-roundtrip.test.ts @@ -1,7 +1,7 @@ import { expect } from '@esm-bundle/chai'; import { type AuthProvider, HttpRequest, withHeaders } from '../../src/auth/auth.js'; -import { NanoTDFClient } from '../../src/index.js'; +import { NanoTDFClient } from '../../src/nanoclients.js'; import NanoTDF from '../../src/nanotdf/NanoTDF.js'; const authProvider = { diff --git a/lib/tests/web/roundtrip.test.ts b/lib/tests/web/roundtrip.test.ts new file mode 100644 index 00000000..6ca65c00 --- /dev/null +++ b/lib/tests/web/roundtrip.test.ts @@ -0,0 +1,83 @@ +import { expect } from '@esm-bundle/chai'; +import { type AuthProvider, HttpRequest, withHeaders } from '../../src/auth/auth.js'; + +import { NanoTDFClient } from '../../src/nanoclients.js'; +import NanoTDF from '../../src/nanotdf/NanoTDF.js'; +import { OpenTDF } from '../../src/opentdf.js'; +import { fromString } from '../../src/seekable.js'; + +const authProvider = { + updateClientPublicKey: async () => { + /* mocked function */ + }, + withCreds: async (req: HttpRequest): Promise => + withHeaders(req, { + Authorization: + 'Bearer dummy-auth-token eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpc3MiOiJ0ZGYiLCJzdWIiOiJKb2huIERvZSIsImlhdCI6MTUxNjIzOTAyMn0.XFu4sQxAd6n-b7urqTdQ-I9zKqKSQtC04unHsMSpJjc', + }), +}; + +const kasEndpoint = 'http://localhost:3000'; + +describe('Local roundtrip Tests', () => { + it(`ztdf roundtrip string`, async () => { + const client = new OpenTDF({ + authProvider, + defaultReadOptions: { + allowedKASEndpoints: [kasEndpoint], + }, + }); + const cipherTextStream = await client.createZTDF({ + autoconfigure: false, + defaultKASEndpoint: kasEndpoint, + source: { type: 'chunker', location: fromString('hello world') }, + }); + const cipherManifest = await cipherTextStream.manifest; + expect(cipherManifest?.encryptionInformation?.keyAccess[0]?.url).to.equal(kasEndpoint); + const cipherTextArray = new Uint8Array(await new Response(cipherTextStream).arrayBuffer()); + + const nanotdfParsed = await client.read({ + source: { type: 'buffer', location: cipherTextArray }, + }); + expect(await nanotdfParsed.metadata).to.contain({ hello: 'world' }); + + const actual = await new Response(nanotdfParsed).arrayBuffer(); + expect(new TextDecoder().decode(actual)).to.be.equal('hello world'); + }); + for (const ecdsaBinding of [false, true]) { + const bindingType = ecdsaBinding ? 'ecdsa' : 'gmac'; + it(`nano roundtrip string (${bindingType} policy binding)`, async () => { + const client = new OpenTDF({ + authProvider, + defaultReadOptions: { + allowedKASEndpoints: [kasEndpoint], + }, + }); + const cipherText = await client.createNanoTDF({ + bindingType, + defaultKASEndpoint: kasEndpoint, + source: { type: 'chunker', location: fromString('hello world') }, + }); + const nanotdfParsed = await client.read({ + source: { type: 'stream', location: cipherText }, + }); + expect(nanotdfParsed.header?.kas?.url).to.equal(kasEndpoint); + expect(nanotdfParsed.header?.kas?.identifier).to.equal('e1'); + + const actual = await new Response(nanotdfParsed).arrayBuffer(); + expect(new TextDecoder().decode(actual)).to.be.equal('hello world'); + }); + it(`roundtrip string (${bindingType} policy binding, deprecated API)`, async () => { + const client = new NanoTDFClient({ authProvider, kasEndpoint }); + const cipherText = await client.encrypt('hello world', { ecdsaBinding }); + const client2 = new NanoTDFClient({ authProvider, kasEndpoint }); + const nanotdfParsed = NanoTDF.from(cipherText); + + expect(nanotdfParsed.header.kas.url).to.equal(kasEndpoint); + expect(nanotdfParsed.header.kas.identifier).to.equal('e1'); + + const actual = await client2.decrypt(cipherText); + expect(new TextDecoder().decode(actual)).to.be.equal('hello world'); + }); + } +}); diff --git a/lib/tests/web/utils.test.ts b/lib/tests/web/utils.test.ts index ef02662a..565f1629 100644 --- a/lib/tests/web/utils.test.ts +++ b/lib/tests/web/utils.test.ts @@ -8,6 +8,32 @@ import { rstrip, validateSecureUrl, } from '../../src/utils.js'; +import { TdfError } from '../../src/errors.js'; + +describe('errors', () => { + it('Avoids errors due to loops', () => { + const cause = new Error(); + cause.message = 'my message'; + (cause as unknown as Record).extra = 'some_stuff'; + cause.cause = cause; + try { + throw new TdfError('message', cause); + } catch (e) { + expect(() => { + throw e; + }).to.throw('message'); + expect(e.cause.extra).to.be.undefined; + expect(e.cause.message).to.equal('my message'); + expect(e.cause.stack).to.equal(cause.stack); + expect(e.cause.stack).to.equal(cause.stack); + expect(e.cause.cause.stack).to.equal(cause.stack); + expect(e.cause.cause.cause.stack).to.equal(cause.stack); + expect(e.cause.cause.cause.cause.stack).to.equal(cause.stack); + expect(e.cause.cause.cause.cause.cause.stack).to.equal(cause.stack); + expect(e.cause.cause.cause.cause.cause.cause).to.be.undefined; + } + }); +}); describe('rstrip', () => { describe('default', () => { diff --git a/web-app/package-lock.json b/web-app/package-lock.json index 4179804d..50e63f2e 100644 --- a/web-app/package-lock.json +++ b/web-app/package-lock.json @@ -21,6 +21,7 @@ "@rollup/plugin-inject": "^5.0.5", "@types/react": "^18.3.12", "@types/react-dom": "^18.3.1", + "@types/streamsaver": "^2.0.1", "@types/wicg-file-system-access": "^2023.10.5", "@typescript-eslint/eslint-plugin": "^6.2.1", "@typescript-eslint/parser": "^6.2.1", @@ -943,7 +944,7 @@ "node_modules/@opentdf/sdk": { "version": "0.2.0", "resolved": "file:../lib/opentdf-sdk-0.2.0.tgz", - "integrity": "sha512-Ic6Tl6tV/TI9JPyjAnfywPen0t78JSkiupDKdpSVa2ZW8B69yU0oh65aC8oxniZJ57krRIFn/HxY1lX0HTk+TQ==", + "integrity": "sha512-arltrgai5s33oBWQe2/NVkspyiAkbWV/T6HWk4JM5s933934kaXdOHvClEkzYx4989QX+ghl5E+iwxzK+2Ysuw==", "license": "BSD-3-Clause-Clear", "dependencies": { "browser-fs-access": "^0.34.1", @@ -1368,6 +1369,13 @@ "dev": true, "license": "MIT" }, + "node_modules/@types/streamsaver": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@types/streamsaver/-/streamsaver-2.0.5.tgz", + "integrity": "sha512-93o0zjV8swEhR2YI57h/2ytbJF8bJh7sI9GNB02TLJHdM4fWDxZuChwfWhyD8vt2ub4kw4rsfZ0C0yAUX+3gcg==", + "dev": true, + "license": "MIT" + }, "node_modules/@types/wicg-file-system-access": { "version": "2023.10.5", "resolved": "https://registry.npmjs.org/@types/wicg-file-system-access/-/wicg-file-system-access-2023.10.5.tgz", @@ -4846,7 +4854,7 @@ }, "@opentdf/sdk": { "version": "file:../lib/opentdf-sdk-0.2.0.tgz", - "integrity": "sha512-Ic6Tl6tV/TI9JPyjAnfywPen0t78JSkiupDKdpSVa2ZW8B69yU0oh65aC8oxniZJ57krRIFn/HxY1lX0HTk+TQ==", + "integrity": "sha512-arltrgai5s33oBWQe2/NVkspyiAkbWV/T6HWk4JM5s933934kaXdOHvClEkzYx4989QX+ghl5E+iwxzK+2Ysuw==", "requires": { "browser-fs-access": "^0.34.1", "buffer-crc32": "^0.2.13", @@ -5101,6 +5109,12 @@ "version": "7.5.0", "dev": true }, + "@types/streamsaver": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@types/streamsaver/-/streamsaver-2.0.5.tgz", + "integrity": "sha512-93o0zjV8swEhR2YI57h/2ytbJF8bJh7sI9GNB02TLJHdM4fWDxZuChwfWhyD8vt2ub4kw4rsfZ0C0yAUX+3gcg==", + "dev": true + }, "@types/wicg-file-system-access": { "version": "2023.10.5", "resolved": "https://registry.npmjs.org/@types/wicg-file-system-access/-/wicg-file-system-access-2023.10.5.tgz", diff --git a/web-app/package.json b/web-app/package.json index ed0a57ff..8c30ca54 100644 --- a/web-app/package.json +++ b/web-app/package.json @@ -27,6 +27,7 @@ "@rollup/plugin-inject": "^5.0.5", "@types/react": "^18.3.12", "@types/react-dom": "^18.3.1", + "@types/streamsaver": "^2.0.1", "@types/wicg-file-system-access": "^2023.10.5", "@typescript-eslint/eslint-plugin": "^6.2.1", "@typescript-eslint/parser": "^6.2.1", diff --git a/web-app/src/App.tsx b/web-app/src/App.tsx index cd9b3c48..f2aeb2d4 100644 --- a/web-app/src/App.tsx +++ b/web-app/src/App.tsx @@ -3,7 +3,7 @@ import { useState, useEffect, type ChangeEvent } from 'react'; import streamsaver from 'streamsaver'; import { showSaveFilePicker } from 'native-file-system-adapter'; import './App.css'; -import { type Chunker, type DecryptSource, NanoTDFClient, TDF3Client } from '@opentdf/sdk'; +import { type Chunker, type Source, OpenTDF } from '@opentdf/sdk'; import { type SessionInformation, OidcClient } from './session.js'; import { c } from './config.js'; @@ -49,17 +49,6 @@ function decryptedFileExtension(encryptedFileName: string): string { const oidcClient = new OidcClient(c.oidc.host, c.oidc.clientId, 'otdf-sample-web-app'); -function saver(blob: Blob, name: string) { - const a = document.createElement('a'); - a.download = name; - a.rel = 'noopener'; - a.href = URL.createObjectURL(blob); - setTimeout(function () { - URL.revokeObjectURL(a.href); - }, 4e4); // 40s - a.dispatchEvent(new MouseEvent('click')); -} - async function getNewFileHandle( extension: string, suggestedName: string @@ -157,18 +146,6 @@ function randomStream({ length }: RandomInputSource): ReadableStream }, }); } -function randomArrayBuffer({ length }: RandomInputSource): ArrayBuffer { - const maxSize = 16 * 2 ** 20; - if (length >= maxSize || length < 0) { - throw new Error(`Invalid size for random buffer: [${length}]`); - } - const maxChunkSize = 65536; - const value = new Uint8Array(length); - for (let i = 0; i < length; i += maxChunkSize) { - crypto.getRandomValues(value.slice(i, i + maxChunkSize)); - } - return value; -} function randomChunker({ length }: RandomInputSource): Chunker { const maxChunkSize = 2 ** 20; @@ -221,7 +198,6 @@ function humanReadableDurationEstimate(ms: number) { function App() { const [authState, setAuthState] = useState({ sessionState: 'start' }); - const [decryptContainerType, setDecryptContainerType] = useState('tdf'); const [downloadState, setDownloadState] = useState(); const [encryptContainerType, setEncryptContainerType] = useState('tdf'); const [inputSource, setInputSource] = useState(); @@ -229,7 +205,7 @@ function App() { const [streamController, setStreamController] = useState(); const handleContainerFormatRadioChange = - (handler: typeof setDecryptContainerType) => (e: ChangeEvent) => { + (handler: typeof setEncryptContainerType) => (e: ChangeEvent) => { handler(e.target.value as Containers); }; @@ -347,117 +323,97 @@ function App() { } const inputFileName = fileNameFor(inputSource); console.log(`Encrypting [${inputFileName}] as ${encryptContainerType} to ${sinkType}`); - switch (encryptContainerType) { - case 'nano': { - if ('url' in inputSource) { - throw new Error('Unsupported : fetch the url I guess?'); + + const sc = new AbortController(); + setStreamController(sc); + let source: ReadableStream, size: number; + switch (inputSource.type) { + case 'file': + size = inputSource.file.size; + source = inputSource.file.stream() as unknown as ReadableStream; + break; + case 'bytes': + size = inputSource.length; + source = randomStream(inputSource); + break; + case 'url': + const fr = await fetch(inputSource.url, { signal: sc.signal }); + if (!fr.ok) { + throw Error( + `Error on fetch [${inputSource.url}]: ${fr.status} code received; [${fr.statusText}]` + ); } - const plainText = - 'file' in inputSource - ? await inputSource.file.arrayBuffer() - : randomArrayBuffer(inputSource); - const nanoClient = new NanoTDFClient({ - authProvider: oidcClient, - kasEndpoint: c.kas, - dpopKeys: oidcClient.getSigningKey(), - }); - setDownloadState('Encrypting...'); - switch (sinkType) { - case 'file': - { - const cipherText = await nanoClient.encrypt(plainText); - saver(new Blob([cipherText]), `${inputFileName}.ntdf`); - } - break; - case 'fsapi': - { - const file = await getNewFileHandle('ntdf', `${inputFileName}.ntdf`); - const cipherText = await nanoClient.encrypt(plainText); - const writable = await file.createWritable(); - try { - await writable.write(cipherText); - setDownloadState('Encrypt Complete'); - } catch (e) { - setDownloadState(`Encrypt Failed: ${e}`); - } finally { - await writable.close(); - } - } - break; - case 'none': - break; + if (!fr.body) { + throw Error( + `Failed to fetch input [${inputSource.url}]: ${fr.status} code received; [${fr.statusText}]` + ); } + size = parseInt(fr.headers.get('Content-Length') || '-1'); + source = fr.body; break; - } - case 'tdf': { - const client = new TDF3Client({ - authProvider: oidcClient, - dpopKeys: oidcClient.getSigningKey(), - kasEndpoint: c.kas, + } + + const client = new OpenTDF({ + authProvider: oidcClient, + defaultCreateOptions: { + defaultKASEndpoint: c.kas, + }, + dpopKeys: oidcClient.getSigningKey(), + }); + setDownloadState('Encrypting...'); + let f: FileSystemFileHandle | undefined; + const downloadName = `${inputFileName}.tdf`; + if (sinkType === 'fsapi') { + f = await getNewFileHandle('tdf', downloadName); + } + const progressTransformers = makeProgressPair(size, 'Encrypt'); + + let cipherText: ReadableStream; + switch (encryptContainerType) { + case 'nano': + cipherText = await client.createNanoTDF({ + source: { type: 'stream', location: source }, }); - const sc = new AbortController(); - setStreamController(sc); - let source: ReadableStream, size: number; - switch (inputSource.type) { - case 'file': - size = inputSource.file.size; - source = inputSource.file.stream() as unknown as ReadableStream; - break; - case 'bytes': - size = inputSource.length; - source = randomStream(inputSource); - break; - case 'url': - const fr = await fetch(inputSource.url, { signal: sc.signal }); - if (!fr.ok) { - throw Error( - `Error on fetch [${inputSource.url}]: ${fr.status} code received; [${fr.statusText}]` - ); - } - if (!fr.body) { - throw Error( - `Failed to fetch input [${inputSource.url}]: ${fr.status} code received; [${fr.statusText}]` - ); - } - size = parseInt(fr.headers.get('Content-Length') || '-1'); - source = fr.body; - break; - } + break; + case 'tdf': try { - let f; - const downloadName = `${inputFileName}.tdf`; - if (sinkType === 'fsapi') { - f = await getNewFileHandle('tdf', downloadName); - } - const progressTransformers = makeProgressPair(size, 'Encrypt'); - const cipherText = await client.encrypt({ - source: source.pipeThrough(progressTransformers.reader), - offline: true, + cipherText = await client.createZTDF({ + autoconfigure: false, + source: { type: 'stream', location: source.pipeThrough(progressTransformers.reader) }, }); - cipherText.stream = cipherText.stream.pipeThrough(progressTransformers.writer); - switch (sinkType) { - case 'file': - await toFile(cipherText.stream, downloadName, { signal: sc.signal }); - break; - case 'fsapi': - if (!f) { - throw new Error(); - } - const writable = await f.createWritable(); - await cipherText.stream.pipeTo(writable, { signal: sc.signal }); - break; - case 'none': - await cipherText.stream.pipeTo(drain(), { signal: sc.signal }); - break; - } } catch (e) { setDownloadState(`Encrypt Failed: ${e}`); console.error('Encrypt Failed', e); + return; } - setStreamController(undefined); break; + default: + setDownloadState(`Unsupported type`); + console.error('Encrypt Failed'); + return; + } + const cipherTextWithProgress = cipherText.pipeThrough(progressTransformers.writer); + try { + switch (sinkType) { + case 'file': + await toFile(cipherTextWithProgress, downloadName, { signal: sc.signal }); + break; + case 'fsapi': + if (!f) { + throw new Error(); + } + const writable = await f.createWritable(); + await cipherTextWithProgress.pipeTo(writable, { signal: sc.signal }); + break; + case 'none': + await cipherTextWithProgress.pipeTo(drain(), { signal: sc.signal }); + break; } + } catch (e) { + setDownloadState(`Encrypt Failed: ${e}`); + console.error('Encrypt Failed', e); } + setStreamController(undefined); return true; }; @@ -471,111 +427,68 @@ function App() { return false; } const dfn = decryptedFileName(fileNameFor(inputSource)); - console.log( - `Decrypting ${decryptContainerType} ${JSON.stringify(inputSource)} to ${sinkType} ${dfn}` - ); - let f; + console.log(`Decrypting ${JSON.stringify(inputSource)} to ${sinkType} ${dfn}`); + let f: FileSystemFileHandle | undefined; if (sinkType === 'fsapi') { f = await getNewFileHandle(decryptedFileExtension(fileNameFor(inputSource)), dfn); } - switch (decryptContainerType) { - case 'tdf': { - const client = new TDF3Client({ - authProvider: oidcClient, - dpopKeys: oidcClient.getSigningKey(), - kasEndpoint: c.kas, - }); - try { - const sc = new AbortController(); - setStreamController(sc); - let source: DecryptSource; - let size: number; - switch (inputSource.type) { - case 'file': - size = inputSource.file.size; - source = { type: 'file-browser', location: inputSource.file }; - break; - case 'bytes': - size = inputSource.length; - source = { type: 'chunker', location: randomChunker(inputSource) }; - break; - case 'url': - const hr = await fetch(inputSource.url, { method: 'HEAD' }); - size = parseInt(hr.headers.get('Content-Length') || '-1'); - source = { type: 'remote', location: inputSource.url.toString() }; - break; - } - const progressTransformers = makeProgressPair(size, 'Decrypt'); - // XXX chunker doesn't have an equivalent 'stream' interaface - // so we kinda fake it with percentages by tracking output, which should - // strictly be smaller than the input file. - const plainText = await client.decrypt({ source }); - plainText.stream = plainText.stream - .pipeThrough(progressTransformers.reader) - .pipeThrough(progressTransformers.writer); - switch (sinkType) { - case 'file': - await toFile(plainText.stream, dfn, { signal: sc.signal }); - break; - case 'fsapi': - if (!f) { - throw new Error(); - } - const writable = await f.createWritable(); - await plainText.stream.pipeTo(writable, { signal: sc.signal }); - break; - case 'none': - await plainText.stream.pipeTo(drain(), { signal: sc.signal }); - break; - } - } catch (e) { - console.error('Decrypt Failed', e); - setDownloadState(`Decrypt Failed: ${e}`); - } - setStreamController(undefined); + const client = new OpenTDF({ + authProvider: oidcClient, + defaultReadOptions: { + allowedKASEndpoints: [c.kas], + }, + dpopKeys: oidcClient.getSigningKey(), + }); + + let source: Source; + let size: number; + switch (inputSource.type) { + case 'file': + size = inputSource.file.size; + source = { type: 'file-browser', location: inputSource.file }; break; - } - case 'nano': { - if ('url' in inputSource) { - throw new Error('Unsupported : fetch the url I guess?'); - } - const nanoClient = new NanoTDFClient({ - authProvider: oidcClient, - kasEndpoint: c.kas, - dpopKeys: oidcClient.getSigningKey(), - }); - try { - const cipherText = - 'file' in inputSource - ? await inputSource.file.arrayBuffer() - : randomArrayBuffer(inputSource); - const plainText = await nanoClient.decrypt(cipherText); - switch (sinkType) { - case 'file': - saver(new Blob([plainText]), dfn); - break; - case 'fsapi': - if (!f) { - throw new Error(); - } - const writable = await f.createWritable(); - try { - await writable.write(plainText); - setDownloadState('Decrypt Complete'); - } finally { - await writable.close(); - } - break; - case 'none': - break; - } - } catch (e) { - console.error('Decrypt Failed', e); - setDownloadState(`Decrypt Failed: ${e}`); - } + case 'bytes': + size = inputSource.length; + source = { type: 'chunker', location: randomChunker(inputSource) }; break; + case 'url': + const hr = await fetch(inputSource.url, { method: 'HEAD' }); + size = parseInt(hr.headers.get('Content-Length') || '-1'); + source = { type: 'remote', location: inputSource.url.toString() }; + break; + } + const progressTransformers = makeProgressPair(size, 'Decrypt'); + + const sc = new AbortController(); + setStreamController(sc); + // XXX chunker doesn't have an equivalent 'stream' interaface + // so we kinda fake it with percentages by tracking output, which should + // strictly be smaller than the input file. + try { + const plainText = await client.read({ source }); + const plainTextStream = plainText + .pipeThrough(progressTransformers.reader) + .pipeThrough(progressTransformers.writer); + switch (sinkType) { + case 'file': + await toFile(plainTextStream, dfn, { signal: sc.signal }); + break; + case 'fsapi': + if (!f) { + throw new Error(); + } + const writable = await f.createWritable(); + await plainTextStream.pipeTo(writable, { signal: sc.signal }); + break; + case 'none': + await plainTextStream.pipeTo(drain(), { signal: sc.signal }); + break; } + } catch (e) { + console.error('Decrypt Failed', e); + setDownloadState(`Decrypt Failed: ${e}`); } + setStreamController(undefined); return false; }; @@ -752,27 +665,6 @@ function App() {

Decrypt

-
- {' '} - -
- {' '} - -
diff --git a/web-app/tests/tests/roundtrip.spec.ts b/web-app/tests/tests/roundtrip.spec.ts index 887442be..60e1aabc 100644 --- a/web-app/tests/tests/roundtrip.spec.ts +++ b/web-app/tests/tests/roundtrip.spec.ts @@ -24,11 +24,11 @@ test('login', async ({ page }) => { }); const scenarios = { - nano: { encryptSelector: '#nanoEncrypt', decryptSelector: '#nanoDecrypt' }, - tdf: { encryptSelector: '#zipEncrypt', decryptSelector: '#tdfDecrypt' }, + nano: { encryptSelector: '#nanoEncrypt' }, + tdf: { encryptSelector: '#zipEncrypt' }, }; -for (const [name, { encryptSelector, decryptSelector }] of Object.entries(scenarios)) { +for (const [name, { encryptSelector }] of Object.entries(scenarios)) { test(`roundtrip ${name}`, async ({ page }) => { page.on('download', (download) => download.path().then((r) => console.log(`Saves ${download.suggestedFilename()} as ${r}`)) @@ -52,7 +52,6 @@ for (const [name, { encryptSelector, decryptSelector }] of Object.entries(scenar await page.locator('#clearFile').click(); await loadFile(page, cipherTextPath); const plainDownloadPromise = page.waitForEvent('download'); - await page.locator(decryptSelector).click(); await page.locator('#fileSink').click(); await page.locator('#decryptButton').click(); const download2 = await plainDownloadPromise; @@ -69,11 +68,11 @@ for (const [name, { encryptSelector, decryptSelector }] of Object.entries(scenar } test('Remote Source Streaming', async ({ page }) => { - const server = await serve('.', 8000); + const server = await serve('.', 8086); try { await authorize(page); - await page.locator('#urlSelector').fill('http://localhost:8000/README.md'); + await page.locator('#urlSelector').fill('http://localhost:8086/README.md'); const downloadPromise = page.waitForEvent('download'); await page.locator('#zipEncrypt').click(); @@ -94,9 +93,8 @@ test('Remote Source Streaming', async ({ page }) => { fs.copyFileSync(cipherTextPath, targetPath); // Clear file selector and upload againg - await page.locator('#urlSelector').fill('http://localhost:8000/README.md.tdf'); + await page.locator('#urlSelector').fill('http://localhost:8086/README.md.tdf'); const plainDownloadPromise = page.waitForEvent('download'); - await page.locator('#tdfDecrypt').click(); await page.locator('#fileSink').click(); await page.locator('#decryptButton').click(); const download2 = await plainDownloadPromise;