diff --git a/cli/package-lock.json b/cli/package-lock.json index c4dae6db..9be5f34b 100644 --- a/cli/package-lock.json +++ b/cli/package-lock.json @@ -36,9 +36,9 @@ } }, "node_modules/@babel/runtime": { - "version": "7.25.6", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.25.6.tgz", - "integrity": "sha512-VBj9MYyDb9tuLq7yzqjgzt6Q+IBQLrGZfdjOekyEirZPHxXWoTSGUTMrpsfi58Up73d13NfYLv8HT9vmznjzhQ==", + "version": "7.25.7", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.25.7.tgz", + "integrity": "sha512-FjoyLe754PMiYsFaN5C94ttGiOmBNYTf6pLr4xXHAT5uctHb092PBszndLDR5XA/jghQvn4n7JMHl7dmTgbm9w==", "license": "MIT", "dependencies": { "regenerator-runtime": "^0.14.0" @@ -372,7 +372,7 @@ "node_modules/@opentdf/client": { "version": "2.0.0", "resolved": "file:../lib/opentdf-client-2.0.0.tgz", - "integrity": "sha512-KzELD2ECWzt5ci3cmqAb/AAHpPUtO8MibopZgxJioXgvuMyoA+t3VFlGxD7RenVx0Ecm1GjEHzs68IGjHN90rQ==", + "integrity": "sha512-LgL8v4mPmcu7nlCz/1C1Vl6WKYSlQM//bevC5u2J1AYmkyCys9A3qd7bgXIU6kMxbuU80B7DgmeSRRvp4qHrRQ==", "license": "BSD-3-Clause-Clear", "dependencies": { "axios": "^1.6.1", @@ -384,6 +384,7 @@ "dpop": "^1.2.0", "eventemitter3": "^5.0.1", "jose": "^4.14.4", + "json-canonicalize": "^1.0.6", "streamsaver": "^2.0.6", "uuid": "~9.0.0" } @@ -1651,9 +1652,9 @@ } }, "node_modules/form-data": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", - "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.1.tgz", + "integrity": "sha512-tzN8e4TX8+kkxGPK8D5u0FNmjPUjw3lwC9lSLxxoB/+GtsJG91CO8bSWy73APlgAZzZbXEYZJuxjkHH2w+Ezhw==", "license": "MIT", "dependencies": { "asynckit": "^0.4.0", @@ -2069,6 +2070,12 @@ "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", "dev": true }, + "node_modules/json-canonicalize": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/json-canonicalize/-/json-canonicalize-1.0.6.tgz", + "integrity": "sha512-kP2iYpOS5SZHYhIaR1t9oG80d4uTY3jPoaBj+nimy3njtJk8+sRsVatN8pyJRDRtk9Su3+6XqA2U8k0dByJBUQ==", + "license": "MIT" + }, "node_modules/json-parse-even-better-errors": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-3.0.2.tgz", diff --git a/lib/src/access.ts b/lib/src/access.ts index aa0def5f..3648839a 100644 --- a/lib/src/access.ts +++ b/lib/src/access.ts @@ -1,4 +1,11 @@ import { type AuthProvider } from './auth/auth.js'; +import { + InvalidFileError, + NetworkError, + PermissionDeniedError, + ServiceError, + UnauthenticatedError, +} from './errors.js'; import { pemToCryptoPublicKey, validateSecureUrl } from './utils.js'; export class RewrapRequest { @@ -32,22 +39,40 @@ export async function fetchWrappedKey( }, body: JSON.stringify(requestBody), }); - const response = await fetch(req.url, { - method: req.method, - mode: 'cors', // no-cors, *cors, same-origin - cache: 'no-cache', // *default, no-cache, reload, force-cache, only-if-cached - credentials: 'same-origin', // include, *same-origin, omit - headers: req.headers, - redirect: 'follow', // manual, *follow, error - referrerPolicy: 'no-referrer', // no-referrer, *no-referrer-when-downgrade, origin, origin-when-cross-origin, same-origin, strict-origin, strict-origin-when-cross-origin, unsafe-url - body: req.body as BodyInit, - }); - if (!response.ok) { - throw new Error(`${req.method} ${req.url} => ${response.status} ${response.statusText}`); - } + try { + const response = await fetch(req.url, { + method: req.method, + mode: 'cors', // no-cors, *cors, same-origin + cache: 'no-cache', // *default, no-cache, reload, force-cache, only-if-cached + credentials: 'same-origin', // include, *same-origin, omit + headers: req.headers, + redirect: 'follow', // manual, *follow, error + referrerPolicy: 'no-referrer', // no-referrer, *no-referrer-when-downgrade, origin, origin-when-cross-origin, same-origin, strict-origin, strict-origin-when-cross-origin, unsafe-url + body: req.body as BodyInit, + }); - return response.json(); + if (!response.ok) { + switch (response.status) { + case 400: + throw new InvalidFileError( + `400 for [${req.url}]: rewrap failure [${await response.text()}]` + ); + case 401: + throw new UnauthenticatedError(`401 for [${req.url}]`); + case 403: + throw new PermissionDeniedError(`403 for [${req.url}]`); + default: + throw new NetworkError( + `${req.method} ${req.url} => ${response.status} ${response.statusText}` + ); + } + } + + return response.json(); + } catch (e) { + throw new NetworkError(`unable to fetch wrapped key from [${url}]: ${e}`); + } } export type KasPublicKeyAlgorithm = 'ec:secp256r1' | 'rsa:2048'; @@ -75,6 +100,17 @@ export type KasPublicKeyInfo = { key: Promise; }; +async function noteInvalidPublicKey(url: string, r: Promise): Promise { + try { + return await r; + } catch (e) { + if (e instanceof TypeError) { + throw new ServiceError(`invalid public key from [${url}]`, e); + } + throw e; + } +} + /** * If we have KAS url but not public key we can fetch it from KAS, fetching * the value from `${kas}/kas_public_key`. @@ -82,36 +118,53 @@ export type KasPublicKeyInfo = { export async function fetchECKasPubKey(kasEndpoint: string): Promise { validateSecureUrl(kasEndpoint); const pkUrlV2 = `${kasEndpoint}/v2/kas_public_key?algorithm=ec:secp256r1&v=2`; - const kasPubKeyResponse = await fetch(pkUrlV2); - if (!kasPubKeyResponse.ok) { - if (kasPubKeyResponse.status != 404) { - throw new Error( - `unable to load KAS public key from [${pkUrlV2}]. Received [${kasPubKeyResponse.status}:${kasPubKeyResponse.statusText}]` - ); + const kasPubKeyResponseV2 = await fetch(pkUrlV2); + if (!kasPubKeyResponseV2.ok) { + switch (kasPubKeyResponseV2.status) { + case 404: + // v2 not implemented, perhaps a legacy server + break; + case 401: + throw new UnauthenticatedError(`401 for [${pkUrlV2}]`); + case 403: + throw new PermissionDeniedError(`403 for [${pkUrlV2}]`); + default: + throw new NetworkError( + `${pkUrlV2} => ${kasPubKeyResponseV2.status} ${kasPubKeyResponseV2.statusText}` + ); } // most likely a server that does not implement v2 endpoint, so no key identifier const pkUrlV1 = `${kasEndpoint}/kas_public_key?algorithm=ec:secp256r1`; const r2 = await fetch(pkUrlV1); if (!r2.ok) { - throw new Error( - `unable to load KAS public key from [${pkUrlV1}]. Received [${r2.status}:${r2.statusText}]` - ); + switch (r2.status) { + case 401: + throw new UnauthenticatedError(`401 for [${pkUrlV2}]`); + case 403: + throw new PermissionDeniedError(`403 for [${pkUrlV2}]`); + default: + throw new NetworkError( + `unable to load KAS public key from [${pkUrlV1}]. Received [${r2.status}:${r2.statusText}]` + ); + } } const pem = await r2.json(); return { - key: pemToCryptoPublicKey(pem), + key: noteInvalidPublicKey(pkUrlV1, pemToCryptoPublicKey(pem)), publicKey: pem, url: kasEndpoint, algorithm: 'ec:secp256r1', }; } - const jsonContent = await kasPubKeyResponse.json(); + const jsonContent = await kasPubKeyResponseV2.json(); const { publicKey, kid }: KasPublicKeyInfo = jsonContent; if (!publicKey) { - throw new Error(`Invalid response from public key endpoint [${JSON.stringify(jsonContent)}]`); + throw new NetworkError( + `invalid response from public key endpoint [${JSON.stringify(jsonContent)}]` + ); } return { - key: pemToCryptoPublicKey(publicKey), + key: noteInvalidPublicKey(pkUrlV2, pemToCryptoPublicKey(publicKey)), publicKey, url: kasEndpoint, algorithm: 'ec:secp256r1', diff --git a/lib/src/auth/oidc-clientcredentials-provider.ts b/lib/src/auth/oidc-clientcredentials-provider.ts index 93b47b63..843e34e8 100644 --- a/lib/src/auth/oidc-clientcredentials-provider.ts +++ b/lib/src/auth/oidc-clientcredentials-provider.ts @@ -1,3 +1,4 @@ +import { ConfigurationError } from '../errors.js'; import { AuthProvider, type HttpRequest } from './auth.js'; import { AccessToken, type ClientSecretCredentials } from './oidc.js'; @@ -10,9 +11,7 @@ export class OIDCClientCredentialsProvider implements AuthProvider { oidcOrigin, }: Partial & Omit) { if (!clientId || !clientSecret) { - throw new Error( - 'To use this nonbrowser-only provider you must supply clientId & clientSecret' - ); + throw new ConfigurationError('clientId & clientSecret required for client credentials flow'); } this.oidcAuth = new AccessToken({ diff --git a/lib/src/auth/oidc-externaljwt-provider.ts b/lib/src/auth/oidc-externaljwt-provider.ts index b115f0af..4006355a 100644 --- a/lib/src/auth/oidc-externaljwt-provider.ts +++ b/lib/src/auth/oidc-externaljwt-provider.ts @@ -1,3 +1,4 @@ +import { ConfigurationError } from '../errors.js'; import { type AuthProvider, type HttpRequest } from './auth.js'; import { AccessToken, type ExternalJwtCredentials } from './oidc.js'; @@ -11,9 +12,7 @@ export class OIDCExternalJwtProvider implements AuthProvider { oidcOrigin, }: Partial & Omit) { if (!clientId || !externalJwt) { - throw new Error( - 'To use this browser-only provider you must supply clientId/JWT from trusted external IdP' - ); + throw new ConfigurationError('external JWT exchange reequires client id and jwt'); } this.oidcAuth = new AccessToken({ diff --git a/lib/src/auth/oidc-refreshtoken-provider.ts b/lib/src/auth/oidc-refreshtoken-provider.ts index e3be9ce3..9c5da4cc 100644 --- a/lib/src/auth/oidc-refreshtoken-provider.ts +++ b/lib/src/auth/oidc-refreshtoken-provider.ts @@ -1,3 +1,4 @@ +import { ConfigurationError } from '../errors.js'; import { type AuthProvider, type HttpRequest } from './auth.js'; import { AccessToken, type RefreshTokenCredentials } from './oidc.js'; @@ -11,9 +12,7 @@ export class OIDCRefreshTokenProvider implements AuthProvider { oidcOrigin, }: Partial & Omit) { if (!clientId || !refreshToken) { - throw new Error( - 'To use this browser-only provider you must supply clientId/valid OIDC refresh token' - ); + throw new ConfigurationError('refresh token or client id missing'); } this.oidcAuth = new AccessToken({ diff --git a/lib/src/auth/oidc.ts b/lib/src/auth/oidc.ts index 970dc5a4..e261c458 100644 --- a/lib/src/auth/oidc.ts +++ b/lib/src/auth/oidc.ts @@ -1,7 +1,7 @@ import { default as dpopFn } from 'dpop'; import { HttpRequest, withHeaders } from './auth.js'; import { base64 } from '../encodings/index.js'; -import { IllegalArgumentError } from '../errors.js'; +import { ConfigurationError, TdfError } from '../errors.js'; import { cryptoPublicToPem, rstrip } from '../utils.js'; /** @@ -98,19 +98,23 @@ export class AccessToken { constructor(cfg: OIDCCredentials, request?: typeof fetch) { if (!cfg.clientId) { - throw new Error('A Keycloak client identifier is currently required for all auth mechanisms'); + throw new ConfigurationError( + 'A Keycloak client identifier is currently required for all auth mechanisms' + ); } if (cfg.exchange === 'client' && !cfg.clientSecret) { - throw new Error('When using client credentials, both clientId and clientSecret are required'); + throw new ConfigurationError( + 'When using client credentials, both clientId and clientSecret are required' + ); } if (cfg.exchange === 'refresh' && !cfg.refreshToken) { - throw new Error('When using refresh token, a refresh token must be provided'); + throw new ConfigurationError('When using refresh token, a refresh token must be provided'); } if (cfg.exchange === 'external' && !cfg.externalJwt) { - throw new Error('When using external JWT, the jwt must be provided'); + throw new ConfigurationError('When using external JWT, the jwt must be provided'); } if (!cfg.exchange) { - throw new Error('Invalid oidc configuration'); + throw new ConfigurationError('Invalid oidc configuration'); } this.config = cfg; this.request = request; @@ -137,7 +141,9 @@ export class AccessToken { }); if (!response.ok) { console.error(await response.text()); - throw new Error(`${response.status} ${response.statusText}`); + throw new TdfError( + `auth info fail: GET [${url}] => ${response.status} ${response.statusText}` + ); } return (await response.json()) as unknown; @@ -151,7 +157,7 @@ export class AccessToken { // add DPoP headers if configured if (this.config.dpopEnabled) { if (!this.signingKey) { - throw new IllegalArgumentError('No signature configured'); + throw new ConfigurationError('No signature configured'); } const clientPubKey = await cryptoPublicToPem(this.signingKey.publicKey); headers['X-VirtruPubKey'] = base64.encode(clientPubKey); @@ -195,7 +201,9 @@ export class AccessToken { const response = await this.doPost(url, body); if (!response.ok) { console.error(await response.text()); - throw new Error(`${response.status} ${response.statusText}`); + throw new TdfError( + `token/code exchange fail: POST [${url}] => ${response.status} ${response.statusText}` + ); } return response.json(); } @@ -255,7 +263,7 @@ export class AccessToken { async exchangeForRefreshToken(): Promise { const cfg = this.config; if (cfg.exchange != 'external' && cfg.exchange != 'refresh') { - throw new Error('No refresh token provided!'); + throw new ConfigurationError('no refresh token provided!'); } const tokenResponse = (this.data = await this.accessTokenLookup(this.config)); if (!tokenResponse.refresh_token) { @@ -278,7 +286,7 @@ export class AccessToken { async withCreds(httpReq: HttpRequest): Promise { if (!this.signingKey) { - throw new Error( + throw new ConfigurationError( 'Client public key was not set via `updateClientPublicKey` or passed in via constructor, cannot fetch OIDC token with valid Virtru claims' ); } diff --git a/lib/src/auth/providers.ts b/lib/src/auth/providers.ts index 956c1530..7c96c075 100644 --- a/lib/src/auth/providers.ts +++ b/lib/src/auth/providers.ts @@ -9,6 +9,7 @@ import { OIDCExternalJwtProvider } from './oidc-externaljwt-provider.js'; import { type AuthProvider } from './auth.js'; import { OIDCRefreshTokenProvider } from './oidc-refreshtoken-provider.js'; import { isBrowser } from '../utils.js'; +import { ConfigurationError } from '../errors.js'; /** * Creates an OIDC Client Credentials Provider for non-browser contexts. @@ -95,13 +96,13 @@ export const refreshAuthProvider = async ( */ export const clientAuthProvider = async (clientConfig: OIDCCredentials): Promise => { if (!clientConfig.clientId) { - throw new Error('Client ID must be provided to constructor'); + throw new ConfigurationError('Client ID must be provided to constructor'); } if (isBrowser()) { //If you're in a browser and passing client secrets, you're Doing It Wrong. // if (clientConfig.clientSecret) { - // throw new Error('Client credentials not supported in a browser context'); + // throw new ConfigurationError('Client credentials not supported in a browser context'); // } //Are we exchanging a refreshToken for a bearer token (normal AuthCode browser auth flow)? //If this is a browser context, we expect the caller to handle the initial @@ -118,15 +119,15 @@ export const clientAuthProvider = async (clientConfig: OIDCCredentials): Promise return clientSecretAuthProvider(clientConfig); } default: - throw new Error(`Unsupported client type`); + throw new ConfigurationError(`Unsupported client type`); } } //If you're NOT in a browser and are NOT passing client secrets, you're Doing It Wrong. //If this is not a browser context, we expect the caller to supply their client ID and client secret, so that // we can authenticate them directly with the OIDC endpoint. if (clientConfig.exchange !== 'client') { - throw new Error( - 'If using client credentials, must supply both client ID and client secret to constructor' + throw new ConfigurationError( + 'When using client credentials, must supply both client ID and client secret to constructor' ); } return clientSecretAuthProvider(clientConfig); diff --git a/lib/src/errors.ts b/lib/src/errors.ts index 6df81f15..76be014a 100644 --- a/lib/src/errors.ts +++ b/lib/src/errors.ts @@ -15,6 +15,10 @@ function scrubCause(error?: Error, d?: number): { cause?: Error } { return { cause }; } +/** + * Root class for all errors thrown by this library. + * This should not be thrown directly, but rather one of its subclasses. + */ export class TdfError extends Error { override name = 'TdfError'; @@ -27,91 +31,83 @@ export class TdfError extends Error { } } -export class UnsafeUrlError extends Error { - override name = 'UnsafeUrlError'; - readonly url: string; - - constructor(message: string, url: string) { - super(message); - Object.setPrototypeOf(this, new.target.prototype); - this.url = url; - } +/** + * Errors that indicate the client or method does not have valid options. + */ +export class ConfigurationError extends TdfError { + override name = 'ConfigurationError'; } -export class AttributeValidationError extends TdfError { +/** + * The assigned data attribute is not in the correct form. + */ +export class AttributeValidationError extends ConfigurationError { override name = 'AttributeValidationError'; + attribute: unknown; + constructor(message: string, attribute: unknown, cause?: Error) { + super(message, cause); + this.attribute = attribute; + } } -export class KasDecryptError extends TdfError { - override name = 'KasDecryptError'; -} - -export class KasUpsertError extends TdfError { - override name = 'KasUpsertError'; -} - -export class KeyAccessError extends TdfError { - override name = 'KeyAccessError'; -} - -export class KeySyncError extends TdfError { - override name = 'KeySyncError'; -} - -export class IllegalArgumentError extends Error {} - -export class IllegalEnvError extends Error {} - -export class InvalidCipherError extends TdfError { - override name = 'InvalidCipherError'; -} - -export class InvalidCurveNameError extends TdfError { - override name = 'InvalidCurveNameError'; -} +/** + * Errors that indicate the TDF object is corrupt, invalid, or fails validation or decrypt. + */ +export class InvalidFileError extends TdfError {} -export class InvalidDataTypeError extends TdfError { - override name = 'InvalidDataTypeError'; +/** + * Indicates a decrypt failure, either due to an incorrect key, corrupt ciphertext, or inappropriate key parameters. + */ +export class DecryptError extends InvalidFileError { + override name = 'DecryptError'; } -export class InvalidEphemeralKeyError extends TdfError { - override name = 'InvalidEphemeralKeyError'; +export class IntegrityError extends InvalidFileError { + override name = 'IntegrityError'; } -export class InvalidPayloadError extends TdfError { - override name = 'InvalidPayloadError'; -} +/** + * Thrown when a KAS URL found in one or more required key access objects are not in the list of known and allowed KASes in the client. + * This may indicate a malicious file - e.g. an attempt to DDoS a server by listing it as the KAS for many files, or to siphon credentials using a lookalike URL. + */ +export class UnsafeUrlError extends InvalidFileError { + override name = 'UnsafeUrlError'; + readonly url: string[]; -export class InvalidPolicyTypeError extends TdfError { - override name = 'InvalidPolicyTypeError'; + constructor(message: string, ...url: string[]) { + super(message); + Object.setPrototypeOf(this, new.target.prototype); + this.url = url; + } } -export class ManifestIntegrityError extends TdfError { - override name = 'ManifestIntegrityError'; +/** + * A network error (no response) from rewrap or other endpoint, Possibly fixed by retrying or adjusting your network settings; could indicate network failure. + */ +export class NetworkError extends TdfError { + override name = 'NetworkError'; } -export class PolicyIntegrityError extends TdfError { - override name = 'PolicyIntegrityError'; +/** + * The service reports an unexpected error on its behalf, or a subcomponent (5xx). + */ +export class ServiceError extends TdfError { + override name = 'ServiceError'; } -export class SignatureError extends TdfError { - override name = 'SignatureError'; +/** Authentication failure (401) */ +export class UnauthenticatedError extends TdfError { + override name = 'UnauthenticatedError'; } -export class TdfCorruptError extends TdfError { - reason: string; - - override name = 'TdfCorruptError'; - - constructor(message: string, err: Error, reason: string) { - super(message, err); - this.reason = reason; - } -} -export class TdfDecryptError extends TdfError { - override name = 'TdfDecryptError'; +/** Authorization failure (403) */ +export class PermissionDeniedError extends TdfError { + override name = 'PermissionDeniedError'; } -export class TdfPayloadExtractionError extends TdfError { - override name = 'TdfPayloadExtractionError'; +/** + * Version of file is unsupported, or file uses a feature that is not supported by this version of the library. + */ +export class UnsupportedFeatureError extends TdfError { + override name = 'UnsupportedFeatureError'; } diff --git a/lib/src/index.ts b/lib/src/index.ts index 0244d674..ae7b6674 100644 --- a/lib/src/index.ts +++ b/lib/src/index.ts @@ -12,6 +12,7 @@ import { keyAgreement } from './nanotdf-crypto/index.js'; import { TypedArray, createAttribute, Policy } from './tdf/index.js'; import { fetchECKasPubKey } from './access.js'; import { ClientConfig } from './nanotdf/Client.js'; +import { ConfigurationError } from './errors.js'; export { attributeFQNsAsValues } from './policy/api.js'; // Define the EncryptOptions type @@ -79,7 +80,7 @@ export class NanoTDFClient extends Client { ); if (!ukey) { - throw new Error('Key rewrap failure'); + throw new Error('internal: key rewrap failure'); } // Return decrypt promise return decrypt(ukey, nanotdf); @@ -108,7 +109,7 @@ export class NanoTDFClient extends Client { ); if (!key) { - throw new Error('Failed unwrap'); + throw new Error('internal: failed unwrap'); } // Return decrypt promise return decrypt(key, nanotdf); @@ -131,7 +132,9 @@ export class NanoTDFClient extends Client { const initializationVector = this.iv; if (typeof initializationVector !== 'number') { - throw new Error('NanoTDF clients are single use. Please generate a new client and keypair.'); + throw new ConfigurationError( + 'NanoTDF clients are single use. Please generate a new client and keypair.' + ); } delete this.iv; @@ -241,8 +244,8 @@ export class NanoTDFDatasetClient extends Client { opts.maxKeyIterations && opts.maxKeyIterations > NanoTDFDatasetClient.NTDF_MAX_KEY_ITERATIONS ) { - throw new Error( - `Key iteration exceeds max iterations(${NanoTDFDatasetClient.NTDF_MAX_KEY_ITERATIONS})` + throw new ConfigurationError( + `key iteration exceeds max iterations(${NanoTDFDatasetClient.NTDF_MAX_KEY_ITERATIONS})` ); } super(opts); @@ -323,11 +326,10 @@ export class NanoTDFDatasetClient extends Client { this.keyIterationCount += 1; if (!this.cachedHeader) { - throw new Error('NanoTDF dataset header should have been assgined'); + throw new ConfigurationError('invalid dataset client: empty nanoTDF header'); } - if (!this.symmetricKey) { - throw new Error('NanoTDF dataset payload key is not set'); + throw new ConfigurationError('invalid dataset client: empty dek'); } this.keyIterationCount += 1; @@ -361,7 +363,8 @@ export class NanoTDFDatasetClient extends Client { if (this.cachedEphemeralKey.toString() == nanotdf.header.ephemeralPublicKey.toString()) { const ukey = this.unwrappedKey; if (!ukey) { - throw new Error('Key rewrap failure'); + // These should have thrown already. + throw new Error('internal: key rewrap failure'); } // Return decrypt promise return decrypt(ukey, nanotdf); @@ -383,7 +386,8 @@ export class NanoTDFDatasetClient extends Client { version ); if (!ukey) { - throw new Error('Key rewrap failure'); + // These should have thrown already. + throw new Error('internal: key rewrap failure'); } this.cachedEphemeralKey = nanotdf.header.ephemeralPublicKey; @@ -396,11 +400,15 @@ export class NanoTDFDatasetClient extends Client { generateIV(): Uint8Array { const iv = this.iv; if (iv === undefined) { - throw new Error('Dataset full'); + // iv has passed the maximum iteration count for this dek + throw new ConfigurationError('dataset full'); } // assert iv ∈ ℤ ∩ (0, 2^24) if (!Number.isInteger(iv) || iv <= 0 || 0xff_ffff < iv) { - throw new Error('Invalid state'); + // Something has fiddled with the iv outside of the expected behavior + // could indicate a race condition, e.g. if two workers or handlers are + // processing the file at once, for example. + throw new Error('internal: invalid state'); } const lengthAsUint32 = new Uint32Array(1); diff --git a/lib/src/nanotdf-crypto/ecdsaSignature.ts b/lib/src/nanotdf-crypto/ecdsaSignature.ts index 9e1d8d49..8e66c380 100644 --- a/lib/src/nanotdf-crypto/ecdsaSignature.ts +++ b/lib/src/nanotdf-crypto/ecdsaSignature.ts @@ -1,3 +1,4 @@ +import { ConfigurationError } from '../errors.js'; import { AlgorithmName } from './../nanotdf-crypto/enums.js'; /** @@ -71,7 +72,7 @@ export function extractRSValuesFromSignature(signatureBytes: Uint8Array): { // Correct validation if (!concatAndCompareUint8Arrays(rValue, sValue, signatureBytes)) { - throw new Error('Invalid ECDSA signature'); + throw new ConfigurationError('invalid ECDSA signature'); } return { diff --git a/lib/src/nanotdf-crypto/keyAgreement.ts b/lib/src/nanotdf-crypto/keyAgreement.ts index 65a767e3..9b76590c 100644 --- a/lib/src/nanotdf-crypto/keyAgreement.ts +++ b/lib/src/nanotdf-crypto/keyAgreement.ts @@ -27,6 +27,7 @@ * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ +import { ConfigurationError } from '../errors.js'; import { AlgorithmName, CipherType, HashType, KeyFormat, KeyType, KeyUsageType } from './enums.js'; const KEY_USAGE_DERIVE_KEY = 'deriveKey'; @@ -72,15 +73,15 @@ export async function keyAgreement( publicKey?.algorithm?.name !== AlgorithmName.ECDSA && publicKey?.algorithm?.name !== AlgorithmName.ECDH ) { - throw new Error('CryptoKey is expected to be of type ECDSA or ECDH'); + throw new ConfigurationError('CryptoKey is expected to be of type ECDSA or ECDH'); } if (privateKey.type !== KeyType.Private) { - throw new TypeError('Expected input of privateKey to be a CryptoKey of type private'); + throw new ConfigurationError('Expected input of privateKey to be a CryptoKey of type private'); } if (publicKey.type !== KeyType.Public) { - throw new TypeError('Expected input of publicKey to be a CryptoKey of type public'); + throw new ConfigurationError('Expected input of publicKey to be a CryptoKey of type public'); } const { diff --git a/lib/src/nanotdf-crypto/pemPublicToCrypto.ts b/lib/src/nanotdf-crypto/pemPublicToCrypto.ts index 3cfbde5f..3f121eae 100644 --- a/lib/src/nanotdf-crypto/pemPublicToCrypto.ts +++ b/lib/src/nanotdf-crypto/pemPublicToCrypto.ts @@ -31,6 +31,7 @@ import * as base64 from '../encodings/base64.js'; import { importX509 } from 'jose'; import { type KeyObject } from 'crypto'; import { encodeArrayBuffer as hexEncodeArrayBuffer } from '../encodings/hex.js'; +import { ConfigurationError, TdfError } from '../errors.js'; const RSA_OID = '06092a864886f70d010101'; const EC_OID = '06072a8648ce3d0201'; @@ -100,7 +101,7 @@ function guessCurveName(hex: string): CurveName { } else if (hex.includes(P521_OID)) { return P_512; } - throw new Error('Unsupported curve name or invalid key'); + throw new TdfError('Unsupported curve name or invalid key'); } /** @@ -209,7 +210,7 @@ export async function pemCertToCrypto( const keylike = await importX509(pem, jwsAlg, { extractable: options.isExtractable }); const { type } = keylike; if (type !== 'public') { - throw new Error('Unpublic'); + throw new ConfigurationError('unpublic'); } // FIXME Jose workaround for node clients. // jose returns a crypto key on node, but we expect a subtle-crypto key diff --git a/lib/src/nanotdf/Client.ts b/lib/src/nanotdf/Client.ts index 976c6fa7..85ded767 100644 --- a/lib/src/nanotdf/Client.ts +++ b/lib/src/nanotdf/Client.ts @@ -5,7 +5,7 @@ import getHkdfSalt from './helpers/getHkdfSalt.js'; import DefaultParams from './models/DefaultParams.js'; import { fetchWrappedKey, KasPublicKeyInfo, OriginAllowList } from '../access.js'; import { AuthProvider, isAuthProvider, reqSignature } from '../auth/providers.js'; -import { UnsafeUrlError } from '../errors.js'; +import { ConfigurationError, DecryptError, TdfError, UnsafeUrlError } from '../errors.js'; import { cryptoPublicToPem, pemToCryptoPublicKey, validateSecureUrl } from '../utils.js'; export interface ClientConfig { @@ -40,7 +40,7 @@ function toJWSAlg(c: CryptoKey): string { return 'ES256'; } } - throw new Error(`Unsupported key algorithm ${JSON.stringify(algorithm)}`); + throw new ConfigurationError(`unsupported key algorithm ${JSON.stringify(algorithm)}`); } async function generateEphemeralKeyPair(): Promise { @@ -131,7 +131,7 @@ export default class Client { if (isAuthProvider(optsOrOldAuthProvider)) { this.authProvider = optsOrOldAuthProvider; if (!kasUrl) { - throw new Error('please specify kasEndpoint'); + throw new ConfigurationError('please specify kasEndpoint'); } // TODO Disallow http KAS. For now just log as error validateSecureUrl(kasUrl); @@ -199,7 +199,7 @@ export default class Client { async fetchOIDCToken(): Promise { const signer = await this.requestSignerKeyPair; if (!signer) { - throw new Error('Unexpected state'); + throw new ConfigurationError('failed to find or generate signer session key'); } await this.authProvider.updateClientPublicKey(signer); @@ -231,118 +231,119 @@ export default class Client { // Ensure the ephemeral key pair has been set or generated (see fetchEntityObject) if (!ephemeralKeyPair?.privateKey) { - throw new Error('Ephemeral key has not been set or generated'); + throw new ConfigurationError('Ephemeral key has not been set or generated'); } if (!requestSignerKeyPair?.privateKey) { - throw new Error('Signer key has not been set or generated'); + throw new ConfigurationError('Signer key has not been set or generated'); } - try { - const requestBodyStr = JSON.stringify({ - algorithm: DefaultParams.defaultECAlgorithm, - // nano keyAccess minimum, header is used for nano - keyAccess: { - type: Client.KEY_ACCESS_REMOTE, - url: '', - protocol: Client.KAS_PROTOCOL, - header: base64.encodeArrayBuffer(nanoTdfHeader), - }, - clientPublicKey: await cryptoPublicToPem(ephemeralKeyPair.publicKey), - }); + const requestBodyStr = JSON.stringify({ + algorithm: DefaultParams.defaultECAlgorithm, + // nano keyAccess minimum, header is used for nano + keyAccess: { + type: Client.KEY_ACCESS_REMOTE, + url: '', + protocol: Client.KAS_PROTOCOL, + header: base64.encodeArrayBuffer(nanoTdfHeader), + }, + clientPublicKey: await cryptoPublicToPem(ephemeralKeyPair.publicKey), + }); - const jwtPayload = { requestBody: requestBodyStr }; - const requestBody = { - signedRequestToken: await reqSignature(jwtPayload, requestSignerKeyPair.privateKey, { - alg: toJWSAlg(requestSignerKeyPair.publicKey), - }), - }; + const jwtPayload = { requestBody: requestBodyStr }; + const requestBody = { + signedRequestToken: await reqSignature(jwtPayload, requestSignerKeyPair.privateKey, { + alg: toJWSAlg(requestSignerKeyPair.publicKey), + }), + }; - // Wrapped - const wrappedKey = await fetchWrappedKey( - kasRewrapUrl, - requestBody, - this.authProvider, - clientVersion - ); + // Wrapped + const wrappedKey = await fetchWrappedKey( + kasRewrapUrl, + requestBody, + this.authProvider, + clientVersion + ); - // Extract the iv and ciphertext - const entityWrappedKey = new Uint8Array( - base64.decodeArrayBuffer(wrappedKey.entityWrappedKey) - ); - const ivLength = - clientVersion == Client.SDK_INITIAL_RELEASE - ? Client.INITIAL_RELEASE_IV_SIZE - : Client.IV_SIZE; - const iv = entityWrappedKey.subarray(0, ivLength); - const encryptedSharedKey = entityWrappedKey.subarray(ivLength); + // Extract the iv and ciphertext + const entityWrappedKey = new Uint8Array(base64.decodeArrayBuffer(wrappedKey.entityWrappedKey)); + const ivLength = + clientVersion == Client.SDK_INITIAL_RELEASE ? Client.INITIAL_RELEASE_IV_SIZE : Client.IV_SIZE; + const iv = entityWrappedKey.subarray(0, ivLength); + const encryptedSharedKey = entityWrappedKey.subarray(ivLength); - let kasPublicKey; - try { - // Let us import public key as a cert or public key - kasPublicKey = await pemToCryptoPublicKey(wrappedKey.sessionPublicKey); - } catch (cause) { - throw new Error( - `PEM Public Key to crypto public key failed. Is PEM formatted correctly?\n Caused by: ${cause.message}`, - { cause } - ); - } + let kasPublicKey; + try { + // Let us import public key as a cert or public key + kasPublicKey = await pemToCryptoPublicKey(wrappedKey.sessionPublicKey); + } catch (cause) { + throw new ConfigurationError( + `internal: [${kasRewrapUrl}] PEM Public Key to crypto public key failed. Is PEM formatted correctly?`, + cause + ); + } - let hkdfSalt; - try { - // Get the hkdf salt params - hkdfSalt = await getHkdfSalt(magicNumberVersion); - } catch (e) { - throw new Error(`Salting hkdf failed\n Caused by: ${e.message}`); - } - const { privateKey } = await this.ephemeralKeyPair; + let hkdfSalt; + try { + // Get the hkdf salt params + hkdfSalt = await getHkdfSalt(magicNumberVersion); + } catch (e) { + throw new TdfError('salting hkdf failed', e); + } + const { privateKey } = await this.ephemeralKeyPair; - // Get the unwrapping key - const unwrappingKey = await keyAgreement( + // Get the unwrapping key + let unwrappingKey; + try { + unwrappingKey = await keyAgreement( // Ephemeral private key privateKey, kasPublicKey, hkdfSalt ); - - const authTagLength = 8 * (encryptedSharedKey.byteLength - 32); - let decryptedKey; - try { - // Decrypt the wrapped key - decryptedKey = await crypto.subtle.decrypt( - { name: 'AES-GCM', iv, tagLength: authTagLength }, - unwrappingKey, - encryptedSharedKey - ); - } catch (cause) { - throw new Error( - `Unable to decrypt key. Are you using the right KAS? Is the salt correct?`, - { cause } - ); + } catch (e) { + if (e.name == 'InvalidAccessError' || e.name == 'OperationError') { + throw new DecryptError('unable to solve key agreement', e); + } else if (e.name == 'NotSupported') { + throw new ConfigurationError('unable to unwrap key from kas', e); } + throw new TdfError('unable to reach agreement', e); + } - // UnwrappedKey - let unwrappedKey; - try { - unwrappedKey = await crypto.subtle.importKey( - 'raw', - decryptedKey, - 'AES-GCM', - // @security This allows the key to be used in `exportKey` and `wrapKey` - // https://developer.mozilla.org/en-US/docs/Web/API/SubtleCrypto/exportKey - // https://developer.mozilla.org/en-US/docs/Web/API/SubtleCrypto/wrapKey - true, - // Want to use the key to encrypt and decrypt. Signing key will be used later. - ['encrypt', 'decrypt'] - ); - } catch (cause) { - throw new Error('Unable to import raw key.', { cause }); - } + const authTagLength = 8 * (encryptedSharedKey.byteLength - 32); + let decryptedKey; + try { + // Decrypt the wrapped key + decryptedKey = await crypto.subtle.decrypt( + { name: 'AES-GCM', iv, tagLength: authTagLength }, + unwrappingKey, + encryptedSharedKey + ); + } catch (cause) { + throw new DecryptError( + `unable to decrypt key. Are you using the right KAS? Is the salt correct?`, + cause + ); + } - return unwrappedKey; + // UnwrappedKey + let unwrappedKey; + try { + unwrappedKey = await crypto.subtle.importKey( + 'raw', + decryptedKey, + 'AES-GCM', + // @security This allows the key to be used in `exportKey` and `wrapKey` + // https://developer.mozilla.org/en-US/docs/Web/API/SubtleCrypto/exportKey + // https://developer.mozilla.org/en-US/docs/Web/API/SubtleCrypto/wrapKey + true, + // Want to use the key to encrypt and decrypt. Signing key will be used later. + ['encrypt', 'decrypt'] + ); } catch (cause) { - console.error('rewrap fail', cause); - throw new Error('Could not rewrap key with entity object.', { cause }); + throw new DecryptError('Unable to import raw key.', cause); } + + return unwrappedKey; } } diff --git a/lib/src/nanotdf/NanoTDF.ts b/lib/src/nanotdf/NanoTDF.ts index ea90c5ef..4a926c7e 100644 --- a/lib/src/nanotdf/NanoTDF.ts +++ b/lib/src/nanotdf/NanoTDF.ts @@ -4,7 +4,7 @@ import Header from './models/Header.js'; import Payload from './models/Payload.js'; import Signature from './models/Signature.js'; import EncodingEnum from './enum/EncodingEnum.js'; -import { InvalidDataTypeError, SignatureError } from '../errors.js'; +import { ConfigurationError, InvalidFileError } from '../errors.js'; // Defaults when none set during encryption @@ -32,14 +32,14 @@ export default class NanoTDF { if (!encoding || encoding === EncodingEnum.Base64) { buffer = base64.decodeArrayBuffer(content); } else { - throw new InvalidDataTypeError(); + throw new ConfigurationError(`Unsupported encoding: ${encoding}`); } } // Handle Uint8Array types else if (ArrayBuffer.isView(content) || content instanceof ArrayBuffer) { buffer = content; } else { - throw new InvalidDataTypeError(); + throw new ConfigurationError(`unsupported content type`); } const dataView = new Uint8Array(buffer); @@ -66,10 +66,10 @@ export default class NanoTDF { // Singature checking if (!header.hasSignature && signature.length > 0) { - throw new SignatureError("Found signature when there shouldn't be one"); + throw new InvalidFileError("Found signature when there shouldn't be one"); } if (header.hasSignature && signature.length === 0) { - throw new SignatureError('Could not find signature'); + throw new InvalidFileError('Could not find signature'); } return new NanoTDF(header, payload, signature); diff --git a/lib/src/nanotdf/encrypt.ts b/lib/src/nanotdf/encrypt.ts index 5f7d3f02..6ad7901f 100644 --- a/lib/src/nanotdf/encrypt.ts +++ b/lib/src/nanotdf/encrypt.ts @@ -18,6 +18,7 @@ import { } from '../nanotdf-crypto/index.js'; import { KasPublicKeyInfo } from '../access.js'; import { computeECDSASig, extractRSValuesFromSignature } from '../nanotdf-crypto/ecdsaSignature.js'; +import { ConfigurationError } from '../errors.js'; /** * Encrypt the plain data into nanotdf buffer @@ -39,7 +40,7 @@ export default async function encrypt( ): Promise { // Generate a symmetric key. if (!ephemeralKeyPair.privateKey) { - throw new Error('incomplete ephemeral key'); + throw new ConfigurationError('incomplete ephemeral key'); } const symmetricKey = await keyAgreement( ephemeralKeyPair.privateKey, @@ -96,7 +97,7 @@ export default async function encrypt( ); if (!ephemeralKeyPair.publicKey) { - throw new Error('incomplete ephemeral key'); + throw new ConfigurationError('incomplete ephemeral key'); } // Create a header const pubKeyAsArrayBuffer = await exportCryptoKey(ephemeralKeyPair.publicKey); @@ -158,7 +159,7 @@ async function getCurveNameFromPrivateKey(privateKey: CryptoKey): Promise buffer.length) { - throw new Error('Invalid buffer size to copy tdf header'); + throw new InvalidFileError('invalid buffer size to copy tdf header'); } let offset = 0; @@ -316,7 +316,7 @@ export default class Header { try { return `${rstrip(this.kas.url, '/')}/v2/rewrap`; } catch (e) { - throw new Error(`Cannot construct KAS Rewrap URL: ${e.message}`); + throw new ConfigurationError(`cannot construct KAS Rewrap URL: ${e.message}`); } } } diff --git a/lib/src/nanotdf/models/Payload.ts b/lib/src/nanotdf/models/Payload.ts index 141824bb..37645a82 100644 --- a/lib/src/nanotdf/models/Payload.ts +++ b/lib/src/nanotdf/models/Payload.ts @@ -1,6 +1,6 @@ import Header from './Header.js'; import { getBitLength } from './Ciphers.js'; -import { InvalidPayloadError } from '../../errors.js'; +import { ConfigurationError, InvalidFileError } from '../../errors.js'; /** * Payload @@ -57,7 +57,7 @@ export default class Payload { const inRange = length >= this.MIN_LENGTH && length <= this.MAX_NANO_TDF_ENCRYPT_PAYLOAD_SIZE; if (!inRange) { - throw new InvalidPayloadError('Payload Length Out Of Range'); + throw new InvalidFileError('nanotdf parse failure: Payload Length Out Of Range'); } /** @@ -71,7 +71,7 @@ export default class Payload { offset += Payload.IV_LEN; if (iv.byteLength != 3) { - throw new InvalidPayloadError('Invalid Payload Length'); + throw new InvalidFileError('nanotdf parse failure: Invalid Payload Length'); } if (!legacyTDF) { @@ -93,7 +93,7 @@ export default class Payload { ); if (ciphertextWithAuthTag.byteLength + Payload.LENGTH_LEN !== length) { - throw new InvalidPayloadError('Invalid Payload Length'); + throw new InvalidFileError('nanotdf parse failure: Invalid Payload Length'); } /** @@ -169,12 +169,12 @@ export default class Payload { */ copyToBuffer(buffer: Uint8Array): void { if (this.length > buffer.length) { - throw new Error('Invalid buffer size to copy payload'); + throw new Error('internal: invalid buffer size to copy payload'); } const lengthOfEncryptedPayload = this.iv.length + this.ciphertext.length + this.authTag.length; if (lengthOfEncryptedPayload > Payload.MAX_NANO_TDF_ENCRYPT_PAYLOAD_SIZE) { - throw new Error("TDF encrypted payload can't be more that 2^24"); + throw new ConfigurationError("TDF encrypted payload can't be more that 2^24"); } const lengthAsUint32 = new Uint32Array(1); diff --git a/lib/src/nanotdf/models/Policy/AbstractPolicy.ts b/lib/src/nanotdf/models/Policy/AbstractPolicy.ts index c0d8cf58..60621466 100644 --- a/lib/src/nanotdf/models/Policy/AbstractPolicy.ts +++ b/lib/src/nanotdf/models/Policy/AbstractPolicy.ts @@ -1,5 +1,6 @@ import PolicyInterface from '../../interfaces/PolicyInterface.js'; import PolicyType from '../../enum/PolicyTypeEnum.js'; +import { ConfigurationError } from '../../../errors.js'; abstract class AbstractPolicy implements PolicyInterface { static readonly TYPE_BYTE_OFF = 0; @@ -24,7 +25,7 @@ abstract class AbstractPolicy implements PolicyInterface { // eslint-disable-next-line @typescript-eslint/no-unused-vars type?: PolicyType ): { policy: PolicyInterface; offset: number } { - throw new Error('parsePolicy was not implemented'); + throw new ConfigurationError('parsePolicy was not implemented'); } constructor(type: PolicyType, binding: Uint8Array) { @@ -36,14 +37,14 @@ abstract class AbstractPolicy implements PolicyInterface { * Length of policy */ getLength(): number | never { - throw new Error('length was not implemented'); + throw new ConfigurationError('length was not implemented'); } /** * Return the content of the policy */ toBuffer(): Uint8Array | never { - throw new Error('toBuffer() was not implemented'); + throw new ConfigurationError('toBuffer() was not implemented'); } /** diff --git a/lib/src/nanotdf/models/Policy/EmbeddedPolicy.ts b/lib/src/nanotdf/models/Policy/EmbeddedPolicy.ts index 92f4b649..3b8c3de4 100644 --- a/lib/src/nanotdf/models/Policy/EmbeddedPolicy.ts +++ b/lib/src/nanotdf/models/Policy/EmbeddedPolicy.ts @@ -1,6 +1,7 @@ import AbstractPolicy from './AbstractPolicy.js'; import { EmbeddedPolicyInterface } from '../../interfaces/PolicyInterface.js'; import PolicyTypes from '../../enum/PolicyTypeEnum.js'; +import { ConfigurationError } from '../../../errors.js'; /** * Embedded Policy @@ -71,7 +72,7 @@ class EmbeddedPolicy extends AbstractPolicy implements EmbeddedPolicyInterface { const buffer = new Uint8Array(this.getLength()); if (this.content.length > EmbeddedPolicy.MAX_POLICY_SIZE) { - throw new Error("TDF Policy can't be more that 2^16"); + throw new ConfigurationError("TDF Policy can't be more that 2^16"); } buffer.set([this.type], 0); diff --git a/lib/src/nanotdf/models/Policy/PolicyFactory.ts b/lib/src/nanotdf/models/Policy/PolicyFactory.ts index 7402524c..db6fc555 100644 --- a/lib/src/nanotdf/models/Policy/PolicyFactory.ts +++ b/lib/src/nanotdf/models/Policy/PolicyFactory.ts @@ -2,7 +2,7 @@ import AbstractPolicy from './AbstractPolicy.js'; import EmbeddedPolicy from './EmbeddedPolicy.js'; import RemotePolicy from './RemotePolicy.js'; import PolicyTypeEnum from '../../enum/PolicyTypeEnum.js'; -import { InvalidPolicyTypeError } from '../../../errors.js'; +import { UnsupportedFeatureError } from '../../../errors.js'; import CurveNameEnum from '../../enum/CurveNameEnum.js'; function parse( @@ -34,7 +34,7 @@ function parse( type )); } else { - throw new InvalidPolicyTypeError(); + throw new UnsupportedFeatureError(`unsupported policy type: ${type}`); } return { diff --git a/lib/src/nanotdf/models/ResourceLocator.ts b/lib/src/nanotdf/models/ResourceLocator.ts index eea61fbc..0944407c 100644 --- a/lib/src/nanotdf/models/ResourceLocator.ts +++ b/lib/src/nanotdf/models/ResourceLocator.ts @@ -1,3 +1,4 @@ +import { ConfigurationError, InvalidFileError } from '../../errors.js'; import ProtocolEnum from '../enum/ProtocolEnum.js'; import ResourceLocatorIdentifierEnum from '../enum/ResourceLocatorIdentifierEnum.js'; @@ -59,7 +60,7 @@ export default class ResourceLocator { protocol = ProtocolEnum.Https; break; default: - throw new Error(`resource locator protocol [${protocolStr}] unsupported`); + throw new ConfigurationError(`resource locator protocol [${protocolStr}] unsupported`); } // Set identifier padded length and protocol identifier byte @@ -75,13 +76,13 @@ export default class ResourceLocator { } else if (identifierLength <= 32) { return ResourceLocatorIdentifierEnum.ThirtyTwoBytes; } - throw new Error(`unsupported identifier length: ${identifier.length}`); + throw new ConfigurationError(`unsupported identifier length: ${identifier.length}`); })(); // Create buffer to hold protocol, body length, body, and identifier const lengthOfBody = new TextEncoder().encode(body).length; if (lengthOfBody == 0) { - throw new Error('url body empty'); + throw new ConfigurationError('url body empty'); } const identifierLength = identifierType.valueOf(); const offset = ResourceLocator.BODY_OFFSET + lengthOfBody + identifierLength; @@ -94,13 +95,13 @@ export default class ResourceLocator { // Length of body const lengthOfBody = buff[ResourceLocator.LENGTH_OFFSET]; if (lengthOfBody == 0) { - throw new Error('url body empty'); + throw new InvalidFileError('url body empty'); } // Body as utf8 string const decoder = new TextDecoder(); let offset = ResourceLocator.BODY_OFFSET + lengthOfBody; if (offset > buff.length) { - throw new Error('parse out of bounds error'); + throw new InvalidFileError('url parser: out of bounds error'); } const body = decoder.decode(buff.subarray(ResourceLocator.BODY_OFFSET, offset)); const protocol = protocolAndIdentifierType & 0xf; @@ -109,7 +110,7 @@ export default class ResourceLocator { case ProtocolEnum.Https: break; default: - throw new Error(`unsupported protocol type [${protocol}]`); + throw new InvalidFileError(`url parser: unsupported protocol type [${protocol}]`); } // identifier const identifierTypeNibble = protocolAndIdentifierType & 0xf0; @@ -121,7 +122,7 @@ export default class ResourceLocator { } else if (identifierTypeNibble === ResourceLocator.IDENTIFIER_32_BYTE) { identifierType = ResourceLocatorIdentifierEnum.ThirtyTwoBytes; } else if (identifierTypeNibble !== ResourceLocator.IDENTIFIER_0_BYTE) { - throw new Error(`unsupported key identifier type [${identifierTypeNibble}]`); + throw new InvalidFileError(`url parser: unsupported fragment type [${identifierTypeNibble}]`); } let identifier: string | undefined = undefined; @@ -136,7 +137,7 @@ export default class ResourceLocator { const kidStart = offset; offset = kidStart + identifierType.valueOf(); if (offset > buff.length) { - throw new Error('parse out of bounds error'); + throw new InvalidFileError('url parser: out of bounds error'); } const kidSubarray = buff.subarray(kidStart, offset); // Remove padding (assuming the padding is null bytes, 0x00) @@ -169,7 +170,7 @@ export default class ResourceLocator { case ProtocolEnum.Https: return 'https://' + this.body; default: - throw new Error('Resource locator protocol is not supported.'); + throw new ConfigurationError(`resource locator protocol unsupported [${this.protocol}]`); } } diff --git a/lib/src/nanotdf/models/Signature.ts b/lib/src/nanotdf/models/Signature.ts index dbd67c39..05610bbf 100644 --- a/lib/src/nanotdf/models/Signature.ts +++ b/lib/src/nanotdf/models/Signature.ts @@ -1,5 +1,6 @@ import Header from './Header.js'; import { lengthOfPublicKey, lengthOfSignature } from '../helpers/calculateByCurve.js'; +import { ConfigurationError } from '../../errors.js'; /** * NanoTDF Signature @@ -75,7 +76,7 @@ export default class Signature { */ copyToBuffer(buffer: Uint8Array): void { if (this.length > buffer.length) { - throw new Error('Invalid buffer size to copy signature'); + throw new ConfigurationError('Invalid buffer size to copy signature'); } buffer.set(this.publicKey, 0); diff --git a/lib/src/policy/api.ts b/lib/src/policy/api.ts index 8a575734..1cbbcded 100644 --- a/lib/src/policy/api.ts +++ b/lib/src/policy/api.ts @@ -1,3 +1,4 @@ +import { NetworkError, ServiceError } from '../errors.js'; import { AuthProvider } from '../auth/auth.js'; import { rstrip } from '../utils.js'; import { GetAttributeValuesByFqnsResponse, Value } from './attributes.js'; @@ -30,21 +31,19 @@ export async function attributeFQNsAsValues( redirect: 'follow', referrerPolicy: 'no-referrer', }); - - if (!response.ok) { - throw new Error(`${req.method} ${req.url} => ${response.status} ${response.statusText}`); - } } catch (e) { - console.error(`network error [${req.method} ${req.url}]`, e); - throw e; + throw new NetworkError(`network error [${req.method} ${req.url}]`, e); + } + + if (!response.ok) { + throw new ServiceError(`${req.method} ${req.url} => ${response.status} ${response.statusText}`); } let resp: GetAttributeValuesByFqnsResponse; try { resp = (await response.json()) as GetAttributeValuesByFqnsResponse; } catch (e) { - console.error(`response parse error [${req.method} ${req.url}]`, e); - throw e; + throw new ServiceError(`response parse error [${req.method} ${req.url}]`, e); } const values: Value[] = []; diff --git a/lib/src/policy/granter.ts b/lib/src/policy/granter.ts index 9173d8c8..823ec589 100644 --- a/lib/src/policy/granter.ts +++ b/lib/src/policy/granter.ts @@ -1,3 +1,4 @@ +import { ConfigurationError } from '../errors.js'; import { Attribute, AttributeRuleType, KeyAccessServer, Value } from './attributes.js'; export type KeySplitStep = { @@ -82,7 +83,7 @@ export function plan(dataAttrs: Value[]): KeySplitStep[] { for (const v of dataAttrs) { const { attribute, fqn } = v; if (!attribute) { - throw new Error(`attribute not defined for [${fqn}]`); + throw new ConfigurationError(`attribute not defined for [${fqn}]`); } const valFqn = fqn.toLowerCase(); const attrFqn = attribute.fqn.toLowerCase(); @@ -139,7 +140,7 @@ function simplify( const anyKids = []; for (const bc of children) { if (bc.op != 'anyOf') { - throw new Error('inversion'); + throw new Error('internal: autoconfigure inversion in disjunction'); } if (!bc.kases?.length) { continue; @@ -154,7 +155,7 @@ function simplify( } else { for (const bc of children) { if (bc.op != 'anyOf') { - throw new Error('inversion'); + throw new Error('insternal: autoconfigure inversion in conjunction'); } if (!bc.kases?.length) { continue; diff --git a/lib/src/utils.ts b/lib/src/utils.ts index ee4b5a15..d9513b09 100644 --- a/lib/src/utils.ts +++ b/lib/src/utils.ts @@ -3,6 +3,7 @@ import { exportSPKI, importX509 } from 'jose'; import { base64 } from './encodings/index.js'; import { pemCertToCrypto, pemPublicToCrypto } from './nanotdf-crypto/index.js'; +import { ConfigurationError } from './errors.js'; /** * Check to see if the given URL is 'secure'. This assumes: @@ -113,7 +114,7 @@ export function addNewLines(str: string): string { export async function cryptoPublicToPem(publicKey: CryptoKey): Promise { if (publicKey.type !== 'public') { - throw new TypeError('Incorrect key type'); + throw new ConfigurationError('incorrect key type'); } const exportedPublicKey = await crypto.subtle.exportKey('spki', publicKey); @@ -128,7 +129,10 @@ export async function pemToCryptoPublicKey(pem: string): Promise { } else if (/-----BEGIN CERTIFICATE-----/.test(pem)) { return pemCertToCrypto(pem); } - throw new Error(`unsupported pem type [${pem}]`); + // This can happen in several circumstances: + // - When parsing a PEM key from a KAS server + // - When converting between PEM and CryptoKey formats for user provided session keys (e.g. for DPoP) + throw new TypeError(`unsupported pem type [${pem}]`); } export async function extractPemFromKeyString(keyString: string): Promise { diff --git a/lib/tdf3/src/binary.ts b/lib/tdf3/src/binary.ts index 97ee70f8..cd33306e 100644 --- a/lib/tdf3/src/binary.ts +++ b/lib/tdf3/src/binary.ts @@ -1,3 +1,4 @@ +import { ConfigurationError } from '../../src/errors.js'; import { buffToString, SupportedEncoding, base64ToBytes } from './utils/index.js'; /** @@ -172,7 +173,7 @@ class StringBinary extends Binary { asString(encoding?: SupportedEncoding): string { if (encoding) { - throw new Error( + throw new ConfigurationError( 'Method doesnt accept encoding param, it returns binary string in original format' ); } diff --git a/lib/tdf3/src/client/DecoratedReadableStream.ts b/lib/tdf3/src/client/DecoratedReadableStream.ts index 803e4f58..59567f21 100644 --- a/lib/tdf3/src/client/DecoratedReadableStream.ts +++ b/lib/tdf3/src/client/DecoratedReadableStream.ts @@ -5,6 +5,7 @@ import { isFirefox } from '../../../src/utils.js'; import { type Metadata } from '../tdf.js'; import { type Manifest, type UpsertResponse } from '../models/index.js'; +import { ConfigurationError } from '../../../src/errors.js'; export async function streamToBuffer(stream: ReadableStream): Promise { const accumulator = await new Response(stream).arrayBuffer(); @@ -94,7 +95,7 @@ export class DecoratedReadableStream { options?: BufferEncoding | DecoratedReadableStreamSinkOptions ): Promise { if (options && typeof options === 'string') { - throw new Error('Unsupported Operation: Cannot set encoding in browser'); + throw new ConfigurationError('unsupported operation: Cannot set encoding in browser'); } if (isFirefox()) { await fileSave(new Response(this.stream), { diff --git a/lib/tdf3/src/client/builders.ts b/lib/tdf3/src/client/builders.ts index a5b87fcf..10752d63 100644 --- a/lib/tdf3/src/client/builders.ts +++ b/lib/tdf3/src/client/builders.ts @@ -3,7 +3,7 @@ import { AttributeObject, KeyInfo, Policy } from '../models/index.js'; import { type Metadata } from '../tdf.js'; import { Binary } from '../binary.js'; -import { IllegalArgumentError } from '../../../src/errors.js'; +import { ConfigurationError } from '../../../src/errors.js'; import { PemKeyPair } from '../crypto/declarations.js'; import { EntityObject } from '../../../src/tdf/EntityObject.js'; import { DecoratedReadableStream } from './DecoratedReadableStream.js'; @@ -104,7 +104,7 @@ class EncryptParamsBuilder { */ withStreamSource(readStream: ReadableStream): EncryptParamsBuilder { if (!readStream?.getReader) { - throw new Error( + throw new ConfigurationError( `Source must be a WebReadableStream. Run node streams through stream.Readable.toWeb()` ); } @@ -363,7 +363,7 @@ class EncryptParamsBuilder { */ setStreamWindowSize(numBytes: number) { if (numBytes <= 0) { - throw new Error('Stream window size must be positive'); + throw new ConfigurationError('Stream window size must be positive'); } this._params.windowSize = numBytes; } @@ -577,7 +577,7 @@ class DecryptParamsBuilder { */ setUrlSource(url: string) { if (!/^https?/.exec(url)) { - throw new IllegalArgumentError(`stream source must be a web url, not [${url}]`); + throw new ConfigurationError(`stream source must be a web url, not [${url}]`); } this._params.source = { type: 'remote', location: url }; } @@ -607,7 +607,7 @@ class DecryptParamsBuilder { */ withStreamSource(stream: ReadableStream) { if (!stream?.getReader) { - throw new Error( + throw new ConfigurationError( `Source must be a WebReadableStream. Run node streams through stream.Readable.toWeb()` ); } @@ -688,7 +688,7 @@ class DecryptParamsBuilder { */ build(): Readonly { if (!this._params.source) { - throw new IllegalArgumentError('No source specified'); + throw new ConfigurationError('No source specified'); } return this._deepCopy(this._params as DecryptParams); } diff --git a/lib/tdf3/src/client/index.ts b/lib/tdf3/src/client/index.ts index c703a2fb..1b752ae0 100644 --- a/lib/tdf3/src/client/index.ts +++ b/lib/tdf3/src/client/index.ts @@ -55,7 +55,7 @@ import { EncryptParamsBuilder, } from './builders.js'; import { KasPublicKeyInfo, OriginAllowList } from '../../../src/access.js'; -import { TdfError } from '../../../src/errors.js'; +import { ConfigurationError } from '../../../src/errors.js'; import { EntityObject } from '../../../src/tdf/EntityObject.js'; import { Binary } from '../binary.js'; import { AesGcmCipher } from '../ciphers/aes-gcm-cipher.js'; @@ -98,7 +98,7 @@ const getFirstTwoBytes = async (chunker: Chunker) => new TextDecoder().decode(aw const makeChunkable = async (source: DecryptSource) => { if (!source) { - throw new Error('Invalid source'); + throw new ConfigurationError('invalid source'); } // dump stream to buffer // we don't support streams anyways (see zipreader.js) @@ -297,7 +297,7 @@ export class Client { } else { // handle Deprecated `kasRewrapEndpoint` parameter if (!clientConfig.keyRewrapEndpoint) { - throw new Error('KAS definition not found'); + throw new ConfigurationError('KAS definition not found'); } this.kasEndpoint = clientConfig.keyRewrapEndpoint.replace(/\/rewrap$/, ''); } @@ -315,11 +315,11 @@ export class Client { !!clientConfig.ignoreAllowList ); if (!validateSecureUrl(this.kasEndpoint) && !this.allowedKases.allows(kasOrigin)) { - throw new TdfError(`Invalid KAS endpoint [${this.kasEndpoint}]`); + throw new ConfigurationError(`Invalid KAS endpoint [${this.kasEndpoint}]`); } } else { if (!validateSecureUrl(this.kasEndpoint)) { - throw new TdfError( + throw new ConfigurationError( `Invalid KAS endpoint [${this.kasEndpoint}]; to force, please list it among allowedKases` ); } @@ -340,7 +340,7 @@ export class Client { this.clientId = clientConfig.clientId; if (!this.authProvider) { if (!clientConfig.clientId) { - throw new Error('Client ID or custom AuthProvider must be defined'); + throw new ConfigurationError('Client ID or custom AuthProvider must be defined'); } //Are we exchanging a refreshToken for a bearer token (normal AuthCode browser auth flow)? @@ -425,7 +425,7 @@ export class Client { if (!avs.length && fqns.length) { // Hydrate avs from policy endpoint givnen the fqns if (!this.policyEndpoint) { - throw new Error('policyEndpoint not set in TDF3 Client constructor'); + throw new ConfigurationError('policyEndpoint not set in TDF3 Client constructor'); } avs = await attributeFQNsAsValues( this.policyEndpoint, @@ -442,7 +442,7 @@ export class Client { avs.length != scope.attributes?.length || !avs.map(({ fqn }) => fqn).every((a) => fqns.indexOf(a) >= 0) ) { - throw new Error( + throw new ConfigurationError( `Attribute mismatch between [${fqns}] and explicit values ${JSON.stringify( avs.map(({ fqn }) => fqn) )}` @@ -530,9 +530,8 @@ export class Client { } // Wrap if it's html. - // FIXME: Support streaming for html format. if (!stream.manifest) { - throw new Error('Missing manifest in encrypt function'); + throw new Error('internal: missing manifest in encrypt function'); } const htmlBuf = wrapHtml(await stream.toBuffer(), stream.manifest, this.readerUrl ?? ''); @@ -575,7 +574,7 @@ export class Client { } } if (!this.authProvider) { - throw new Error('AuthProvider missing'); + throw new ConfigurationError('AuthProvider missing'); } const chunker = await makeChunkable(source); diff --git a/lib/tdf3/src/client/validation.ts b/lib/tdf3/src/client/validation.ts index cf62bddd..0981bcec 100644 --- a/lib/tdf3/src/client/validation.ts +++ b/lib/tdf3/src/client/validation.ts @@ -38,13 +38,13 @@ export const ATTR_ATTRIBUTE_PATTERN = `^(${ATTR_NAMESPACE_PATTERN}${ATTR_VALUE}) export const validateAttributeObject = (attr: unknown): true | never => { const isObject = typeof attr === 'object'; if (!isObject) { - throw new AttributeValidationError(`attribute should be an object`); + throw new AttributeValidationError(`attribute should be an object`, attr); } const { attribute } = attr as Record; const isString = typeof attribute === 'string'; if (!isString) { - throw new AttributeValidationError(`attribute prop should be a string`); + throw new AttributeValidationError(`attribute prop should be a string`, attr); } return validateAttribute(attribute); @@ -52,7 +52,7 @@ export const validateAttributeObject = (attr: unknown): true | never => { export function validateAttribute(attribute: string): true | never { if (!attribute.match(ATTR_ATTRIBUTE_PATTERN)) { - throw new AttributeValidationError(`attribute is in invalid format [${attribute}]`); + throw new AttributeValidationError(`attribute is in invalid format [${attribute}]`, attribute); } const ATTR_NAME_PREFIX = `/${ATTR_NAME_PROP_NAME}/`; @@ -61,18 +61,18 @@ export function validateAttribute(attribute: string): true | never { const attrValueMatch = sageGetMatch(attribute.match(ATTR_VALUE)); if (!attrNameMatch) { - throw new AttributeValidationError(`attribute name matching error`); + throw new AttributeValidationError(`attribute name matching error`, attribute); } if (!attrValueMatch) { - throw new AttributeValidationError(`attribute value matching error`); + throw new AttributeValidationError(`attribute value matching error`, attribute); } const attributeName = attrNameMatch.slice(ATTR_NAME_PREFIX.length); const attributeValue = attrValueMatch.slice(ATTR_VALUE_PREFIX.length); if (attributeName === attributeValue) { - throw new AttributeValidationError(`attribute name should be unique with its value`); + throw new AttributeValidationError(`attribute name should be unique with its value`, attribute); } return true; diff --git a/lib/tdf3/src/crypto/crypto-utils.ts b/lib/tdf3/src/crypto/crypto-utils.ts index e987de6b..59a10750 100644 --- a/lib/tdf3/src/crypto/crypto-utils.ts +++ b/lib/tdf3/src/crypto/crypto-utils.ts @@ -1,5 +1,4 @@ import { base64 } from '../../../src/encodings/index.js'; -import { IllegalArgumentError } from '../../../src/errors.js'; import { type AnyKeyPair, type PemKeyPair } from './declarations.js'; import { rsaPkcs1Sha256 } from './index.js'; @@ -106,7 +105,7 @@ export const toCryptoKeyPair = async (input: AnyKeyPair): Promise return input; } if (!isPemKeyPair(input)) { - throw new Error('invalid keypair'); + throw new Error('internal: generated invalid keypair'); } const k = [input.publicKey, input.privateKey] .map(removePemFormatting) @@ -118,18 +117,3 @@ export const toCryptoKeyPair = async (input: AnyKeyPair): Promise ]); return { privateKey, publicKey }; }; - -export async function cryptoToPem(k: CryptoKey): Promise { - switch (k.type) { - case 'private': { - const exPrivate = await crypto.subtle.exportKey('pkcs8', k); - return formatAsPem(exPrivate, 'PRIVATE KEY'); - } - case 'public': { - const exPublic = await crypto.subtle.exportKey('spki', k); - return formatAsPem(exPublic, 'PUBLIC KEY'); - } - default: - throw new IllegalArgumentError(`unsupported key type [${k.type}]`); - } -} diff --git a/lib/tdf3/src/crypto/index.ts b/lib/tdf3/src/crypto/index.ts index 93d28537..6cbc3933 100644 --- a/lib/tdf3/src/crypto/index.ts +++ b/lib/tdf3/src/crypto/index.ts @@ -13,7 +13,7 @@ import { MIN_ASYMMETRIC_KEY_SIZE_BITS, PemKeyPair, } from './declarations.js'; -import { TdfDecryptError } from '../../../src/errors.js'; +import { ConfigurationError, DecryptError } from '../../../src/errors.js'; import { formatAsPem, removePemFormatting } from './crypto-utils.js'; import { encodeArrayBuffer as hexEncode } from '../../../src/encodings/hex.js'; import { decodeArrayBuffer as base64Decode } from '../../../src/encodings/base64.js'; @@ -34,7 +34,7 @@ export function rsaOaepSha1( modulusLength: number = MIN_ASYMMETRIC_KEY_SIZE_BITS ): RsaHashedKeyGenParams { if (!modulusLength || modulusLength < MIN_ASYMMETRIC_KEY_SIZE_BITS) { - throw new Error('Invalid key size requested'); + throw new ConfigurationError('Invalid key size requested'); } return { name: 'RSA-OAEP', @@ -50,7 +50,7 @@ export function rsaPkcs1Sha256( modulusLength: number = MIN_ASYMMETRIC_KEY_SIZE_BITS ): RsaHashedKeyGenParams { if (!modulusLength || modulusLength < MIN_ASYMMETRIC_KEY_SIZE_BITS) { - throw new Error('Invalid key size requested'); + throw new ConfigurationError('Invalid key size requested'); } return { name: 'RSASSA-PKCS1-v1_5', @@ -100,7 +100,8 @@ export async function generateSigningKeyPair(): Promise { export async function cryptoToPemPair(keysMaybe: unknown): Promise { const keys = keysMaybe as CryptoKeyPair; if (!keys.privateKey || !keys.publicKey) { - throw new Error('invalid'); + // These are only ever generated here, so this should not happen + throw new Error('internal: invalid keys'); } const [exPublic, exPrivate] = await Promise.all([ @@ -291,7 +292,7 @@ async function _doDecrypt( // Catching this error so we can specifically check for OperationError .catch((err) => { if (err.name === 'OperationError') { - throw new TdfDecryptError(err); + throw new DecryptError(err); } throw err; diff --git a/lib/tdf3/src/models/assertion.ts b/lib/tdf3/src/models/assertion.ts index 746bb615..dc08b08c 100644 --- a/lib/tdf3/src/models/assertion.ts +++ b/lib/tdf3/src/models/assertion.ts @@ -1,6 +1,7 @@ import { canonicalizeEx } from 'json-canonicalize'; import { SignJWT, jwtVerify } from 'jose'; import { AssertionKey } from './../client/AssertionConfig.js'; +import { ConfigurationError, InvalidFileError } from '../../../src/errors.js'; export type AssertionKeyAlg = 'RS256' | 'HS256'; export type AssertionType = 'handling' | 'other'; @@ -63,18 +64,18 @@ export async function sign( sig: string, key: AssertionKey ): Promise { - const payload: any = {}; + const payload: Record = {}; payload[kAssertionHash] = assertionHash; payload[kAssertionSignature] = sig; + let token: string; try { - const token = await new SignJWT(payload).setProtectedHeader({ alg: key.alg }).sign(key.key); - - this.binding.method = 'jws'; - this.binding.signature = token; + token = await new SignJWT(payload).setProtectedHeader({ alg: key.alg }).sign(key.key); } catch (error) { - throw new Error(`Signing assertion failed: ${error.message}`); + throw new ConfigurationError(`Signing assertion failed: ${error.message}`, error); } + this.binding.method = 'jws'; + this.binding.signature = token; } /** @@ -92,7 +93,7 @@ export async function verify(this: Assertion, key: AssertionKey): Promise<[strin return [payload[kAssertionHash] as string, payload[kAssertionSignature] as string]; } catch (error) { - throw new Error(`Verifying assertion failed: ${error.message}`); + throw new InvalidFileError(`Verifying assertion failed: ${error.message}`, error); } } diff --git a/lib/tdf3/src/models/encryption-information.ts b/lib/tdf3/src/models/encryption-information.ts index 700ff73c..e288bc41 100644 --- a/lib/tdf3/src/models/encryption-information.ts +++ b/lib/tdf3/src/models/encryption-information.ts @@ -10,6 +10,7 @@ import { type EncryptResult, } from '../crypto/declarations.js'; import { IntegrityAlgorithm } from '../tdf.js'; +import { ConfigurationError } from '../../../src/errors.js'; export type KeyInfo = { readonly unwrappedKeyBinary: Binary; @@ -102,7 +103,9 @@ export class SplitKey { : typeof metadata === 'string' ? metadata : () => { - throw new Error(); + throw new ConfigurationError( + "KAO generation failure: metadata isn't a string or object" + ); } ) as string; @@ -137,9 +140,10 @@ export class SplitKey { } async write(policy: Policy, keyInfo: KeyInfo): Promise { - const algorithm = this.cipher.name; + const algorithm = this.cipher?.name; if (!algorithm) { - throw new Error('Uninitialized cipher type'); + // Hard coded as part of the cipher object. This should not be reachable. + throw new ConfigurationError('uninitialized cipher type'); } const keyAccessObjects = await this.getKeyAccessObjects(policy, keyInfo); diff --git a/lib/tdf3/src/models/policy.ts b/lib/tdf3/src/models/policy.ts index 7f39603b..589ec1db 100644 --- a/lib/tdf3/src/models/policy.ts +++ b/lib/tdf3/src/models/policy.ts @@ -1,4 +1,4 @@ -import { PolicyIntegrityError } from '../../../src/errors.js'; +import { ConfigurationError } from '../../../src/errors.js'; import { AttributeObject } from './attribute-set.js'; export const CURRENT_VERSION = '1.1.0'; @@ -16,7 +16,7 @@ export type Policy = { export function validatePolicyObject(policyMaybe: unknown): policyMaybe is Policy { if (typeof policyMaybe !== 'object') { - throw new PolicyIntegrityError( + throw new ConfigurationError( `The given policy reference must be an object, not: ${policyMaybe}` ); } @@ -27,7 +27,7 @@ export function validatePolicyObject(policyMaybe: unknown): policyMaybe is Polic if (policy.body && !policy.body.dissem) missingFields.push('body.dissem'); if (missingFields.length) { - throw new PolicyIntegrityError( + throw new ConfigurationError( `The given policy object requires the following properties: ${missingFields}` ); } diff --git a/lib/tdf3/src/tdf.ts b/lib/tdf3/src/tdf.ts index 2ef75776..e6744b3a 100644 --- a/lib/tdf3/src/tdf.ts +++ b/lib/tdf3/src/tdf.ts @@ -41,16 +41,17 @@ import { import { Binary } from './binary.js'; import { KasPublicKeyAlgorithm, KasPublicKeyInfo, OriginAllowList } from '../../src/access.js'; import { - IllegalArgumentError, - KasDecryptError, - KasUpsertError, - KeyAccessError, - ManifestIntegrityError, - PolicyIntegrityError, - TdfDecryptError, + ConfigurationError, + DecryptError, + InvalidFileError, + IntegrityError, + NetworkError, + PermissionDeniedError, + ServiceError, TdfError, - TdfPayloadExtractionError, + UnauthenticatedError, UnsafeUrlError, + UnsupportedFeatureError as UnsupportedError, } from '../../src/errors.js'; import { htmlWrapperTemplate } from './templates/index.js'; @@ -206,7 +207,7 @@ export async function fetchKasPublicKey( algorithm?: KasPublicKeyAlgorithm ): Promise { if (!kas) { - throw new TdfError('KAS definition not found'); + throw new ConfigurationError('KAS definition not found'); } // Logs insecure KAS. Secure is enforced in constructor validateSecureUrl(kas); @@ -215,16 +216,14 @@ export async function fetchKasPublicKey( if (algorithm) { params.algorithm = algorithm; } + const v2Url = `${kas}/v2/kas_public_key`; try { - const response: { data: string | KasPublicKeyInfo } = await axios.get( - `${kas}/v2/kas_public_key`, - { - params: { - ...params, - v: '2', - }, - } - ); + const response: { data: string | KasPublicKeyInfo } = await axios.get(v2Url, { + params: { + ...params, + v: '2', + }, + }); const publicKey = typeof response.data === 'string' ? await extractPemFromKeyString(response.data) @@ -237,16 +236,32 @@ export async function fetchKasPublicKey( }; } catch (cause) { const status = cause?.response?.status; - if (status != 400 && status != 404) { - throw new TdfError( - `Retrieving KAS public key [${kas}] failed [${cause.name}] [${cause.message}]`, - cause - ); + switch (status) { + case 400: + case 404: + // KAS does not yet implement v2, maybe + break; + case 401: + throw new UnauthenticatedError(`[${v2Url}] requires auth`, cause); + case 403: + throw new PermissionDeniedError(`[${v2Url}] permission denied`, cause); + default: + if (status && status >= 400 && status < 500) { + throw new ConfigurationError( + `[${v2Url}] request error [${status}] [${cause.name}] [${cause.message}]`, + cause + ); + } + throw new NetworkError( + `[${v2Url}] error [${status}] [${cause.name}] [${cause.message}]`, + cause + ); } } // Retry with v1 params + const v1Url = `${kas}/kas_public_key`; try { - const response: { data: string | KasPublicKeyInfo } = await axios.get(`${kas}/kas_public_key`, { + const response: { data: string | KasPublicKeyInfo } = await axios.get(v1Url, { params, }); const publicKey = @@ -261,10 +276,24 @@ export async function fetchKasPublicKey( ...(typeof response.data !== 'string' && response.data.kid && { kid: response.data.kid }), }; } catch (cause) { - throw new TdfError( - `Retrieving KAS public key [${kas}] failed [${cause.name}] [${cause.message}]`, - cause - ); + const status = cause?.response?.status; + switch (status) { + case 401: + throw new UnauthenticatedError(`[${v1Url}] requires auth`, cause); + case 403: + throw new PermissionDeniedError(`[${v1Url}] permission denied`, cause); + default: + if (status && status >= 400 && status < 500) { + throw new ConfigurationError( + `[${v2Url}] request error [${status}] [${cause.name}] [${cause.message}]`, + cause + ); + } + throw new NetworkError( + `[${v1Url}] error [${status}] [${cause.name}] [${cause.message}]`, + cause + ); + } } } /** @@ -302,13 +331,13 @@ export function unwrapHtml(htmlPayload: ArrayBuffer | Uint8Array | Binary | stri const payloadRe = /]*value=['"]?([a-zA-Z0-9+/=]+)['"]?/; const reResult = payloadRe.exec(html); if (reResult === null) { - throw new TdfPayloadExtractionError('Payload is missing'); + throw new InvalidFileError('Payload is missing'); } const base64Payload = reResult[1]; try { return base64ToBuffer(base64Payload); } catch (e) { - throw new TdfPayloadExtractionError('There was a problem extracting the TDF3 payload', e); + throw new InvalidFileError('There was a problem extracting the TDF3 payload', e); } } @@ -363,7 +392,7 @@ export async function buildKeyAccess({ case 'remote': return new KeyAccessRemote(kasUrl, kasKeyIdentifier, pubKey, metadata, sid); default: - throw new KeyAccessError(`buildKeyAccess: Key access type ${type} is unknown`); + throw new ConfigurationError(`buildKeyAccess: Key access type ${type} is unknown`); } } @@ -389,7 +418,7 @@ export async function buildKeyAccess({ } } // All failed. Raise an error. - throw new KeyAccessError('TDF.buildKeyAccess: No source for kasUrl or pubKey'); + throw new ConfigurationError('TDF.buildKeyAccess: No source for kasUrl or pubKey'); } export function validatePolicyObject(policy: Policy): void { @@ -400,7 +429,7 @@ export function validatePolicyObject(policy: Policy): void { if (policy.body && !policy.body.dissem) missingFields.push('body.dissem'); if (missingFields.length) { - throw new PolicyIntegrityError( + throw new ConfigurationError( `The given policy object requires the following properties: ${missingFields}` ); } @@ -422,17 +451,8 @@ async function _generateManifest( ...(mimeType && { mimeType }), }; - if (!policy) { - throw new Error(`No policy provided`); - } const encryptionInformationStr = await encryptionInformation.write(policy, keyInfo); - - if (!encryptionInformationStr) { - throw new Error('Missing encryption information'); - } - const assertions: Assertion[] = []; - return { payload, // generate the manifest first, then insert integrity information into it @@ -458,7 +478,7 @@ async function getSignature( buffToString(new Uint8Array(payloadBinary.asArrayBuffer()), 'utf-8') ); default: - throw new IllegalArgumentError(`Unsupported signature alg [${algorithmType}]`); + throw new ConfigurationError(`Unsupported signature alg [${algorithmType}]`); } } @@ -485,14 +505,14 @@ export async function upsert({ return allowList; } if (!allowedKases) { - throw new Error('Upsert cannot be done without allowlist'); + throw new ConfigurationError('Upsert cannot be done without allowlist'); } return new OriginAllowList(allowedKases); })(); const { keyAccess, policy } = unsavedManifest.encryptionInformation; const isAppIdProvider = authProvider && isAppIdProviderCheck(authProvider); if (authProvider === undefined) { - throw new Error('Upsert cannot be done without auth provider'); + throw new ConfigurationError('Upsert cannot be done without auth provider'); } return Promise.all( keyAccess.map(async (keyAccessObject) => { @@ -503,7 +523,7 @@ export async function upsert({ } if (!allowed.allows(keyAccessObject.url)) { - throw new KasUpsertError(`Unexpected KAS url: [${keyAccessObject.url}]`); + throw new UnsafeUrlError(`Unexpected KAS url: [${keyAccessObject.url}]`); } const url = `${keyAccessObject.url}/${isAppIdProvider ? '' : 'v2/'}upsert`; @@ -545,7 +565,22 @@ export async function upsert({ } return response.data; } catch (e) { - throw new KasUpsertError( + if (e.response) { + if (e.reponse.status >= 500) { + throw new ServiceError('upsert failure', e); + } else if (e.response.status === 403) { + throw new PermissionDeniedError('upsert failure', e); + } else if (e.response.status === 401) { + throw new UnauthenticatedError('upsert auth failure', e); + } else if (e.response.status === 400) { + throw new ConfigurationError('upsert bad request; likely a configuration error', e); + } else { + throw new NetworkError('upsert server error', e); + } + } else if (e.request) { + throw new NetworkError('upsert request failure', e); + } + throw new TdfError( `Unable to perform upsert operation on the KAS: [${e.name}: ${e.message}], response: [${e?.response?.body}]`, e ); @@ -556,14 +591,12 @@ export async function upsert({ export async function writeStream(cfg: EncryptConfiguration): Promise { if (!cfg.authProvider) { - throw new IllegalArgumentError('No authorization middleware defined'); + throw new ConfigurationError('No authorization middleware defined'); } if (!cfg.contentStream) { - throw new IllegalArgumentError('No input stream defined'); - } - if (!cfg.encryptionInformation) { - throw new IllegalArgumentError('No encryption type specified'); + throw new ConfigurationError('No input stream defined'); } + // eslint-disable-next-line @typescript-eslint/no-this-alias const segmentInfos: Segment[] = []; @@ -587,11 +620,6 @@ export async function writeStream(cfg: EncryptConfiguration): Promise cfg.byteLimit) { - throw new Error(`Safe byte limit (${cfg.byteLimit}) exceeded`); + throw new ConfigurationError(`Safe byte limit (${cfg.byteLimit}) exceeded`); } //new Uint8Array(chunk.buffer, chunk.byteOffset, chunk.byteLength); crcCounter = unsigned(chunk as Uint8Array, crcCounter); @@ -866,10 +895,11 @@ export function splitLookupTableFactory( const accessibleSplits = new Set(keyAccess.filter(allowed).map(({ sid }) => sid)); if (splitIds.size > accessibleSplits.size) { const disallowedKases = new Set(keyAccess.filter((k) => !allowed(k)).map(({ url }) => url)); - throw new KasDecryptError( + throw new UnsafeUrlError( `Unreconstructable key - disallowed KASes include: ${JSON.stringify([ ...disallowedKases, - ])} from splitIds ${JSON.stringify([...splitIds])}` + ])} from splitIds ${JSON.stringify([...splitIds])}`, + ...disallowedKases ); } const splitPotentials: Record> = Object.fromEntries( @@ -878,7 +908,7 @@ export function splitLookupTableFactory( for (const kao of keyAccess) { const disjunction = splitPotentials[kao.sid ?? '']; if (kao.url in disjunction) { - throw new KasDecryptError( + throw new InvalidFileError( `TODO: Fallback to no split ids. Repetition found for [${kao.url}] on split [${kao.sid}]` ); } @@ -905,7 +935,9 @@ async function unwrapKey({ cryptoService: CryptoService; }) { if (authProvider === undefined) { - throw new KasDecryptError('Upsert can be done without auth provider'); + throw new ConfigurationError( + 'upsert requires auth provider; must be configured in client constructor' + ); } const { keyAccess } = manifest.encryptionInformation; const splitPotentials = splitLookupTableFactory(keyAccess, allowedKases); @@ -980,8 +1012,27 @@ async function unwrapKey({ ); rewrappedKeys.push(new Uint8Array(decryptedKeyBinary.asByteArray())); } catch (e) { - console.error(e); - throw new KasDecryptError( + if (e.response) { + if (e.reponse.status >= 500) { + throw new ServiceError('rewrap failure', e); + } else if (e.response.status === 403) { + throw new PermissionDeniedError('rewrap failure', e); + } else if (e.response.status === 401) { + throw new UnauthenticatedError('rewrap auth failure', e); + } else if (e.response.status === 400) { + throw new InvalidFileError( + 'rewrap bad request; could indicate an invalid policy binding or a configuration error', + e + ); + } else { + throw new NetworkError('rewrap server error', e); + } + } else if (e.request) { + throw new NetworkError('rewrap request failure', e); + } else if (e.name == 'InvalidAccessError' || e.name == 'OperationError') { + throw new DecryptError('unable to unwrap key from kas', e); + } + throw new InvalidFileError( `Unable to decrypt the response from KAS: [${e.name}: ${e.message}], response: [${e?.response?.body}]`, e ); @@ -1006,6 +1057,8 @@ async function decryptChunk( segmentIntegrityAlgorithm: IntegrityAlgorithm, cryptoService: CryptoService ): Promise { + if (segmentIntegrityAlgorithm !== 'GMAC' && segmentIntegrityAlgorithm !== 'HS256') { + } const segmentHashStr = await getSignature( reconstructedKeyBinary, Binary.fromArrayBuffer(encryptedChunk.buffer), @@ -1013,7 +1066,7 @@ async function decryptChunk( cryptoService ); if (hash !== btoa(segmentHashStr)) { - throw new ManifestIntegrityError('Failed integrity check on segment hash'); + throw new IntegrityError('Failed integrity check on segment hash'); } return await cipher.decrypt(encryptedChunk, reconstructedKeyBinary); } @@ -1038,33 +1091,35 @@ async function updateChunkQueue( } requests.push( (async () => { + let buffer: Uint8Array | null; + + const slice = chunkMap.slice(i, i + chunksInOneDownload); try { - const slice = chunkMap.slice(i, i + chunksInOneDownload); const bufferSize = slice.reduce( (currentVal, { encryptedSegmentSize }) => currentVal + (encryptedSegmentSize as number), 0 ); - const buffer: Uint8Array | null = await zipReader.getPayloadSegment( + buffer = await zipReader.getPayloadSegment( centralDirectory, '0.payload', slice[0].encryptedOffset, bufferSize ); - if (buffer) { - sliceAndDecrypt({ - buffer, - cryptoService, - reconstructedKeyBinary, - slice, - cipher, - segmentIntegrityAlgorithm, - }); - } } catch (e) { - throw new TdfDecryptError( - 'Error decrypting payload. This suggests the key used to decrypt the payload is not correct.', - e - ); + if (e instanceof InvalidFileError) { + throw e; + } + throw new NetworkError('unable to fetch payload segment', e); + } + if (buffer) { + sliceAndDecrypt({ + buffer, + cryptoService, + reconstructedKeyBinary, + slice, + cipher, + segmentIntegrityAlgorithm, + }); } })() ); @@ -1095,19 +1150,26 @@ export async function sliceAndDecrypt({ buffer.slice(offset, offset + (encryptedSegmentSize as number)) ); - await decryptChunk( - encryptedChunk, - reconstructedKeyBinary, - slice[index]['hash'], - cipher, - segmentIntegrityAlgorithm, - cryptoService - ) - .then((result) => { - slice[index].decryptedChunk = result; - return null; - }) - .then(_resolve, _reject); + try { + const result = await decryptChunk( + encryptedChunk, + reconstructedKeyBinary, + slice[index]['hash'], + cipher, + segmentIntegrityAlgorithm, + cryptoService + ); + slice[index].decryptedChunk = result; + if (_resolve) { + _resolve(null); + } + } catch (e) { + if (_reject) { + _reject(e); + } else { + throw e; + } + } } } @@ -1115,13 +1177,13 @@ export async function readStream(cfg: DecryptConfiguration) { let { allowList } = cfg; if (!allowList) { if (!cfg.allowedKases) { - throw new Error('Upsert cannot be done without allowlist'); + throw new ConfigurationError('Upsert cannot be done without allowlist'); } allowList = new OriginAllowList(cfg.allowedKases); } const { manifest, zipReader, centralDirectory } = await loadTDFStream(cfg.chunker); if (!manifest) { - throw new Error('Missing manifest data'); + throw new InvalidFileError('Missing manifest data'); } cfg.keyMiddleware ??= async (key) => key; @@ -1146,6 +1208,9 @@ export async function readStream(cfg: DecryptConfiguration) { // check the combined string of hashes const aggregateHash = segments.map(({ hash }) => base64.decode(hash)).join(''); const integrityAlgorithm = rootSignature.alg; + if (integrityAlgorithm !== 'GMAC' && integrityAlgorithm !== 'HS256') { + throw new UnsupportedError(`Unsupported integrity alg [${integrityAlgorithm}]`); + } const payloadSigStr = await getSignature( keyForDecryption, Binary.fromString(aggregateHash), @@ -1157,7 +1222,7 @@ export async function readStream(cfg: DecryptConfiguration) { manifest.encryptionInformation.integrityInformation.rootSignature.sig !== base64.encode(payloadSigStr) ) { - throw new ManifestIntegrityError('Failed integrity check on root signature'); + throw new IntegrityError('Failed integrity check on root signature'); } // // Validate assertions @@ -1195,12 +1260,12 @@ export async function readStream(cfg: DecryptConfiguration) { // check if assertionHash is same as hashOfAssertion if (hashOfAssertion !== assertionHash) { - throw new ManifestIntegrityError('Assertion hash mismatch'); + throw new IntegrityError('Assertion hash mismatch'); } // check if assertionSig is same as encodedHash if (assertionSig !== encodedHash) { - throw new ManifestIntegrityError('Failed integrity check on assertion signature'); + throw new IntegrityError('Failed integrity check on assertion signature'); } } @@ -1228,6 +1293,10 @@ export async function readStream(cfg: DecryptConfiguration) { ); const cipher = new AesGcmCipher(cfg.cryptoService); + const segmentIntegrityAlg = segmentHashAlg || integrityAlgorithm; + if (segmentIntegrityAlg !== 'GMAC' && segmentIntegrityAlg !== 'HS256') { + throw new UnsupportedError(`Unsupported segment hash alg [${segmentIntegrityAlg}]`); + } // Not waiting for Promise to resolve updateChunkQueue( @@ -1236,7 +1305,7 @@ export async function readStream(cfg: DecryptConfiguration) { zipReader, keyForDecryption, cipher, - segmentHashAlg || integrityAlgorithm, + segmentIntegrityAlg, cfg.cryptoService ); diff --git a/lib/tdf3/src/utils/chunkers.ts b/lib/tdf3/src/utils/chunkers.ts index 10c2dd14..759a8666 100644 --- a/lib/tdf3/src/utils/chunkers.ts +++ b/lib/tdf3/src/utils/chunkers.ts @@ -4,6 +4,7 @@ import { isDecoratedReadableStream, } from '../client/DecoratedReadableStream.js'; import axiosRetry from 'axios-retry'; +import { ConfigurationError, NetworkError } from '../../../src/errors.js'; let axiosRemoteChunk: AxiosInstance | null = null; @@ -48,7 +49,7 @@ async function getRemoteChunk(url: string, range?: string): Promise responseType: 'arraybuffer', }); if (!res.data) { - throw new Error( + throw new NetworkError( 'Unexpected response type: Server should have responded with an ArrayBuffer.' ); } @@ -88,30 +89,30 @@ export const fromDataSource = async ({ type, location }: DataSource) => { switch (type) { case 'buffer': if (!(location instanceof Uint8Array)) { - throw new Error('Invalid data source; must be uint8 array'); + throw new ConfigurationError('Invalid data source; must be uint8 array'); } return fromBuffer(location); case 'chunker': if (!(location instanceof Function)) { - throw new Error('Invalid data source; must be uint8 array'); + throw new ConfigurationError('Invalid data source; must be uint8 array'); } return location; case 'file-browser': if (!(location instanceof Blob)) { - throw new Error('Invalid data source; must be at least a Blob'); + throw new ConfigurationError('Invalid data source; must be at least a Blob'); } return fromBrowserFile(location); case 'remote': if (typeof location !== 'string') { - throw new Error('Invalid data source; url not provided'); + throw new ConfigurationError('Invalid data source; url not provided'); } return fromUrl(location); case 'stream': if (!isDecoratedReadableStream(location)) { - throw new Error('Invalid data source; must be DecoratedTdfStream'); + throw new ConfigurationError('Invalid data source; must be DecoratedTdfStream'); } return fromBuffer(await location.toBuffer()); default: - throw new Error(`Data source type not defined, or not supported: ${type}}`); + throw new ConfigurationError(`Data source type not defined, or not supported: ${type}}`); } }; diff --git a/lib/tdf3/src/utils/index.ts b/lib/tdf3/src/utils/index.ts index 1fa940f1..0fa41f7a 100644 --- a/lib/tdf3/src/utils/index.ts +++ b/lib/tdf3/src/utils/index.ts @@ -4,6 +4,7 @@ import * as WebCryptoService from '../crypto/index.js'; import { KeyInfo, SplitKey } from '../models/index.js'; import { AesGcmCipher } from '../ciphers/aes-gcm-cipher.js'; +import { ConfigurationError } from '../../../src/errors.js'; export { ZipReader, readUInt64LE } from './zip-reader.js'; export { ZipWriter } from './zip-writer.js'; @@ -301,7 +302,7 @@ export async function keyMiddleware(): Promise<{ const cipher = new AesGcmCipher(WebCryptoService); const encryptionInformation = new SplitKey(cipher); if (!encryptionInformation?.generateKey) { - throw new Error('Crypto service not initialised'); + throw new ConfigurationError('Crypto service not initialised'); } const key = await encryptionInformation.generateKey(); return { keyForEncryption: key, keyForManifest: key }; diff --git a/lib/tdf3/src/utils/zip-reader.ts b/lib/tdf3/src/utils/zip-reader.ts index e76dff48..1fd6cdbe 100644 --- a/lib/tdf3/src/utils/zip-reader.ts +++ b/lib/tdf3/src/utils/zip-reader.ts @@ -1,3 +1,4 @@ +import { InvalidFileError } from '../../../src/errors.js'; import { Manifest } from '../models/index.js'; import { Chunker } from './chunkers.js'; import { readUInt32LE, readUInt16LE, copyUint8Arr, buffToString } from './index.js'; @@ -91,11 +92,11 @@ export class ZipReader { async getManifest(cdBuffers: CentralDirectory[], manifestFileName: string): Promise { const cdObj = cdBuffers.find(({ fileName }) => fileName === manifestFileName); if (!cdObj) { - throw new Error('Unable to retrieve CD manifest'); + throw new InvalidFileError('Unable to retrieve CD manifest'); } const byteStart = cdObj.relativeOffsetOfLocalHeader + cdObj.headerLength; if (cdObj.uncompressedSize > manifestMaxSize) { - throw new Error( + throw new InvalidFileError( `manifest file too large: ${(cdObj.uncompressedSize >> 10).toLocaleString()} KiB` ); } @@ -107,7 +108,7 @@ export class ZipReader { async adjustHeaders(cdObj: CentralDirectory): Promise { if (!cdObj) { - throw new Error('Unable to retrieve CD adjust'); + throw new InvalidFileError('Unable to retrieve CD adjust'); } // Calculate header length -- tdf3-js writes 0 in all the header fields // and does not include extra field for zip64 @@ -126,7 +127,7 @@ export class ZipReader { ): Promise { const cdObj = cdBuffers.find(({ fileName }) => payloadName === fileName); if (!cdObj) { - throw new Error('Unable to retrieve CD'); + throw new InvalidFileError('Unable to retrieve CD'); } const byteStart = cdObj.relativeOffsetOfLocalHeader + cdObj.headerLength + encrpytedSegmentOffset; @@ -217,7 +218,7 @@ function parseCentralDirectoryWithNoExtras(cdBuffer: Uint8Array): CentralDirecto */ export function parseCDBuffer(cdBuffer: Uint8Array): CentralDirectory { if (readUInt32LE(cdBuffer, 0) !== CD_SIGNATURE) { - throw new Error('Invalid central directory file header signature'); + throw new InvalidFileError('Invalid central directory file header signature'); } const cd = parseCentralDirectoryWithNoExtras(cdBuffer); @@ -240,7 +241,7 @@ export function parseCDBuffer(cdBuffer: Uint8Array): CentralDirectory { // 0 - Original Size 8 bytes if (cd.uncompressedSize === 0xffffffff) { if (index + 8 > zip64EiefBuffer.length) { - throw new Error( + throw new InvalidFileError( 'zip64 extended information extra field does not include uncompressed size' ); } @@ -250,7 +251,9 @@ export function parseCDBuffer(cdBuffer: Uint8Array): CentralDirectory { // 8 - Compressed Size 8 bytes if (cd.compressedSize === 0xffffffff) { if (index + 8 > zip64EiefBuffer.length) { - throw new Error('zip64 extended information extra field does not include compressed size'); + throw new InvalidFileError( + 'zip64 extended information extra field does not include compressed size' + ); } cd.compressedSize = readUInt64LE(zip64EiefBuffer, index); index += 8; @@ -258,7 +261,7 @@ export function parseCDBuffer(cdBuffer: Uint8Array): CentralDirectory { // 16 - Relative Header Offset 8 bytes if (cd.relativeOffsetOfLocalHeader === 0xffffffff) { if (index + 8 > zip64EiefBuffer.length) { - throw new Error( + throw new InvalidFileError( 'zip64 extended information extra field does not include relative header offset' ); } @@ -325,12 +328,12 @@ function sliceExtraFields( const dataStart = i + 4; const dataEnd = dataStart + dataSize; if (dataEnd > extraFieldBuffer.length) { - throw new Error('extra field length exceeds extra field buffer size'); + throw new InvalidFileError('extra field length exceeds extra field buffer size'); } const dataBuffer = new Uint8Array(dataSize); copyUint8Arr(extraFieldBuffer, dataBuffer, 0, dataStart, dataEnd); if (extraFields[headerId]) { - throw new Error(`Conflicting extra field #${headerId} for entry [${cd.fileName}]`); + throw new InvalidFileError(`Conflicting extra field #${headerId} for entry [${cd.fileName}]`); } extraFields[headerId] = dataBuffer; i = dataEnd; diff --git a/lib/tdf3/src/utils/zip-writer.ts b/lib/tdf3/src/utils/zip-writer.ts index 1309edf1..87b289a0 100644 --- a/lib/tdf3/src/utils/zip-writer.ts +++ b/lib/tdf3/src/utils/zip-writer.ts @@ -48,7 +48,7 @@ const ZIP64_END_OF_CENTRAL_DIRECTORY_LOCATOR_SIZE = 20; // write a 64bit integer by writing 2 32bit integers export function writeUInt64LE(buffer: Uint8Array, n: number, offset: number): void { if (!Number.isSafeInteger(n)) { - throw new Error(`Unsafe number [${n}]`); + throw new Error(`internal: unsafe number [${n}]`); } const high = Math.floor(n / 0x100000000); const low = n % 0x100000000; diff --git a/lib/tests/mocha/unit/errors.spec.ts b/lib/tests/mocha/unit/errors.spec.ts index a07dff36..6814aab3 100644 --- a/lib/tests/mocha/unit/errors.spec.ts +++ b/lib/tests/mocha/unit/errors.spec.ts @@ -1,27 +1,11 @@ import { assert } from 'chai'; -import { - KasDecryptError, - KasUpsertError, - KeyAccessError, - KeySyncError, - ManifestIntegrityError, - PolicyIntegrityError, - TdfDecryptError, - TdfError, - TdfPayloadExtractionError, -} from '../../../src/errors.js'; +import { DecryptError, IntegrityError, TdfError } from '../../../src/errors.js'; describe('Errors', () => { const errorClasses: Record = { - KasDecryptError, - KasUpsertError, - KeyAccessError, - KeySyncError, - ManifestIntegrityError, - PolicyIntegrityError, - TdfDecryptError, + DecryptError, + IntegrityError, TdfError, - TdfPayloadExtractionError, }; Object.keys(errorClasses).forEach((errorName) => { @@ -41,6 +25,10 @@ describe('Errors', () => { assert.instanceOf(err, Error); }); + it('should be instanceof TdfError', () => { + assert.instanceOf(err, TdfError); + }); + it('should throw correctly', () => { assert.throws(() => { throw err; diff --git a/lib/tests/mocha/unit/tdf.spec.ts b/lib/tests/mocha/unit/tdf.spec.ts index e49f0372..62404aea 100644 --- a/lib/tests/mocha/unit/tdf.spec.ts +++ b/lib/tests/mocha/unit/tdf.spec.ts @@ -3,7 +3,7 @@ import { expect } from 'chai'; import * as TDF from '../../../tdf3/src/tdf.js'; import { KeyAccessObject } from '../../../tdf3/src/models/key-access.js'; import { OriginAllowList } from '../../../src/access.js'; -import { KasDecryptError, TdfError } from '../../../src/errors.js'; +import { InvalidFileError, TdfError, UnsafeUrlError } from '../../../src/errors.js'; const sampleCert = ` -----BEGIN CERTIFICATE----- @@ -126,7 +126,7 @@ describe('splitLookupTableFactory', () => { }); }); - it('should throw KasDecryptError for disallowed KASes', () => { + it('should throw UnsafeUrlError for disallowed KASes', () => { const keyAccess: KeyAccessObject[] = [ { sid: 'split1', type: 'remote', url: 'https://kas1', protocol: 'kas' }, { sid: 'split2', type: 'remote', url: 'https://kas3', protocol: 'kas' }, // kas3 is not allowed @@ -134,12 +134,12 @@ describe('splitLookupTableFactory', () => { const allowedKases = new OriginAllowList(['https://kas1']); expect(() => TDF.splitLookupTableFactory(keyAccess, allowedKases)).to.throw( - KasDecryptError, + UnsafeUrlError, 'Unreconstructable key - disallowed KASes include: ["https://kas3"] from splitIds ["split1","split2"]' ); }); - it('should throw KasDecryptError for duplicate URLs in the same splitId', () => { + it('should throw for duplicate URLs in the same splitId', () => { const keyAccess: KeyAccessObject[] = [ { sid: 'split1', type: 'remote', url: 'https://kas1', protocol: 'kas' }, { sid: 'split1', type: 'remote', url: 'https://kas1', protocol: 'kas' }, // duplicate URL in same splitId @@ -147,7 +147,7 @@ describe('splitLookupTableFactory', () => { const allowedKases = new OriginAllowList(['https://kas1']); expect(() => TDF.splitLookupTableFactory(keyAccess, allowedKases)).to.throw( - KasDecryptError, + InvalidFileError, 'TODO: Fallback to no split ids. Repetition found for [https://kas1] on split [split1]' ); }); @@ -168,7 +168,7 @@ describe('splitLookupTableFactory', () => { const allowedKases = new OriginAllowList([]); expect(() => TDF.splitLookupTableFactory(keyAccess, allowedKases)).to.throw( - KasDecryptError, + InvalidFileError, 'Unreconstructable key - disallowed KASes include: ["https://kas1"]' ); }); diff --git a/lib/tests/mocha/unit/zip.spec.ts b/lib/tests/mocha/unit/zip.spec.ts index a5386a27..a48a33e5 100644 --- a/lib/tests/mocha/unit/zip.spec.ts +++ b/lib/tests/mocha/unit/zip.spec.ts @@ -39,7 +39,7 @@ describe('zip utilities', () => { expect(b1).to.eql(b0); }); it('unsafe ints throw', () => { - expect(() => writeUInt64LE(Buffer.alloc(0), 2 ** 54, 0)).to.throw(/Unsafe number/); + expect(() => writeUInt64LE(Buffer.alloc(0), 2 ** 54, 0)).to.throw(/unsafe number/); }); }); describe('readUInt64LE', () => { diff --git a/lib/tests/web/nanotdf/models/ResourceLocator.test.ts b/lib/tests/web/nanotdf/models/ResourceLocator.test.ts index b9af078d..a00d19b7 100644 --- a/lib/tests/web/nanotdf/models/ResourceLocator.test.ts +++ b/lib/tests/web/nanotdf/models/ResourceLocator.test.ts @@ -62,7 +62,7 @@ describe('NanoTDF.ResourceLocator', () => { for (const { v, msg } of [ { v: '03 01 61', msg: 'protocol' }, - { v: 'a1 01 61', msg: 'identifier' }, + { v: 'a1 01 61', msg: 'url parser: unsupported' }, { v: '00 00', msg: 'body' }, { v: '10 ff 61 61 ', msg: 'bounds' }, { v: '10 01 61 61 ', msg: 'bounds' }, diff --git a/remote-store/package-lock.json b/remote-store/package-lock.json index 62bb5163..1676e480 100644 --- a/remote-store/package-lock.json +++ b/remote-store/package-lock.json @@ -1536,9 +1536,9 @@ } }, "node_modules/@babel/runtime": { - "version": "7.25.6", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.25.6.tgz", - "integrity": "sha512-VBj9MYyDb9tuLq7yzqjgzt6Q+IBQLrGZfdjOekyEirZPHxXWoTSGUTMrpsfi58Up73d13NfYLv8HT9vmznjzhQ==", + "version": "7.25.7", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.25.7.tgz", + "integrity": "sha512-FjoyLe754PMiYsFaN5C94ttGiOmBNYTf6pLr4xXHAT5uctHb092PBszndLDR5XA/jghQvn4n7JMHl7dmTgbm9w==", "license": "MIT", "dependencies": { "regenerator-runtime": "^0.14.0" @@ -1821,7 +1821,7 @@ "node_modules/@opentdf/client": { "version": "2.0.0", "resolved": "file:../lib/opentdf-client-2.0.0.tgz", - "integrity": "sha512-KzELD2ECWzt5ci3cmqAb/AAHpPUtO8MibopZgxJioXgvuMyoA+t3VFlGxD7RenVx0Ecm1GjEHzs68IGjHN90rQ==", + "integrity": "sha512-LgL8v4mPmcu7nlCz/1C1Vl6WKYSlQM//bevC5u2J1AYmkyCys9A3qd7bgXIU6kMxbuU80B7DgmeSRRvp4qHrRQ==", "license": "BSD-3-Clause-Clear", "dependencies": { "axios": "^1.6.1", @@ -1833,6 +1833,7 @@ "dpop": "^1.2.0", "eventemitter3": "^5.0.1", "jose": "^4.14.4", + "json-canonicalize": "^1.0.6", "streamsaver": "^2.0.6", "uuid": "~9.0.0" } @@ -4453,6 +4454,12 @@ "js-yaml": "bin/js-yaml.js" } }, + "node_modules/json-canonicalize": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/json-canonicalize/-/json-canonicalize-1.0.6.tgz", + "integrity": "sha512-kP2iYpOS5SZHYhIaR1t9oG80d4uTY3jPoaBj+nimy3njtJk8+sRsVatN8pyJRDRtk9Su3+6XqA2U8k0dByJBUQ==", + "license": "MIT" + }, "node_modules/json-parse-even-better-errors": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-3.0.0.tgz", diff --git a/web-app/package-lock.json b/web-app/package-lock.json index 33fe70e7..0fde7360 100644 --- a/web-app/package-lock.json +++ b/web-app/package-lock.json @@ -350,9 +350,9 @@ } }, "node_modules/@babel/runtime": { - "version": "7.25.6", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.25.6.tgz", - "integrity": "sha512-VBj9MYyDb9tuLq7yzqjgzt6Q+IBQLrGZfdjOekyEirZPHxXWoTSGUTMrpsfi58Up73d13NfYLv8HT9vmznjzhQ==", + "version": "7.25.7", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.25.7.tgz", + "integrity": "sha512-FjoyLe754PMiYsFaN5C94ttGiOmBNYTf6pLr4xXHAT5uctHb092PBszndLDR5XA/jghQvn4n7JMHl7dmTgbm9w==", "license": "MIT", "dependencies": { "regenerator-runtime": "^0.14.0" @@ -607,7 +607,7 @@ "node_modules/@opentdf/client": { "version": "2.0.0", "resolved": "file:../lib/opentdf-client-2.0.0.tgz", - "integrity": "sha512-KzELD2ECWzt5ci3cmqAb/AAHpPUtO8MibopZgxJioXgvuMyoA+t3VFlGxD7RenVx0Ecm1GjEHzs68IGjHN90rQ==", + "integrity": "sha512-LgL8v4mPmcu7nlCz/1C1Vl6WKYSlQM//bevC5u2J1AYmkyCys9A3qd7bgXIU6kMxbuU80B7DgmeSRRvp4qHrRQ==", "license": "BSD-3-Clause-Clear", "dependencies": { "axios": "^1.6.1", @@ -619,6 +619,7 @@ "dpop": "^1.2.0", "eventemitter3": "^5.0.1", "jose": "^4.14.4", + "json-canonicalize": "^1.0.6", "streamsaver": "^2.0.6", "uuid": "~9.0.0" } @@ -2007,9 +2008,9 @@ } }, "node_modules/form-data": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", - "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.1.tgz", + "integrity": "sha512-tzN8e4TX8+kkxGPK8D5u0FNmjPUjw3lwC9lSLxxoB/+GtsJG91CO8bSWy73APlgAZzZbXEYZJuxjkHH2w+Ezhw==", "license": "MIT", "dependencies": { "asynckit": "^0.4.0", @@ -2312,6 +2313,12 @@ "node": ">=4" } }, + "node_modules/json-canonicalize": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/json-canonicalize/-/json-canonicalize-1.0.6.tgz", + "integrity": "sha512-kP2iYpOS5SZHYhIaR1t9oG80d4uTY3jPoaBj+nimy3njtJk8+sRsVatN8pyJRDRtk9Su3+6XqA2U8k0dByJBUQ==", + "license": "MIT" + }, "node_modules/json-parse-even-better-errors": { "version": "2.3.1", "dev": true, @@ -3951,9 +3958,9 @@ } }, "@babel/runtime": { - "version": "7.25.6", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.25.6.tgz", - "integrity": "sha512-VBj9MYyDb9tuLq7yzqjgzt6Q+IBQLrGZfdjOekyEirZPHxXWoTSGUTMrpsfi58Up73d13NfYLv8HT9vmznjzhQ==", + "version": "7.25.7", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.25.7.tgz", + "integrity": "sha512-FjoyLe754PMiYsFaN5C94ttGiOmBNYTf6pLr4xXHAT5uctHb092PBszndLDR5XA/jghQvn4n7JMHl7dmTgbm9w==", "requires": { "regenerator-runtime": "^0.14.0" } @@ -4111,7 +4118,7 @@ }, "@opentdf/client": { "version": "file:../lib/opentdf-client-2.0.0.tgz", - "integrity": "sha512-KzELD2ECWzt5ci3cmqAb/AAHpPUtO8MibopZgxJioXgvuMyoA+t3VFlGxD7RenVx0Ecm1GjEHzs68IGjHN90rQ==", + "integrity": "sha512-LgL8v4mPmcu7nlCz/1C1Vl6WKYSlQM//bevC5u2J1AYmkyCys9A3qd7bgXIU6kMxbuU80B7DgmeSRRvp4qHrRQ==", "requires": { "axios": "^1.6.1", "axios-retry": "^3.9.0", @@ -4122,6 +4129,7 @@ "dpop": "^1.2.0", "eventemitter3": "^5.0.1", "jose": "^4.14.4", + "json-canonicalize": "^1.0.6", "streamsaver": "^2.0.6", "uuid": "~9.0.0" } @@ -4950,9 +4958,9 @@ "integrity": "sha512-gew4GsXizNgdoRyqmyfMHyAmXsZDk6mHkSxZFCzW9gwlbtOW44CDtYavM+y+72qD/Vq2l550kMF52DT8fOLJqQ==" }, "form-data": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", - "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.1.tgz", + "integrity": "sha512-tzN8e4TX8+kkxGPK8D5u0FNmjPUjw3lwC9lSLxxoB/+GtsJG91CO8bSWy73APlgAZzZbXEYZJuxjkHH2w+Ezhw==", "requires": { "asynckit": "^0.4.0", "combined-stream": "^1.0.8", @@ -5125,6 +5133,11 @@ "version": "2.5.2", "dev": true }, + "json-canonicalize": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/json-canonicalize/-/json-canonicalize-1.0.6.tgz", + "integrity": "sha512-kP2iYpOS5SZHYhIaR1t9oG80d4uTY3jPoaBj+nimy3njtJk8+sRsVatN8pyJRDRtk9Su3+6XqA2U8k0dByJBUQ==" + }, "json-parse-even-better-errors": { "version": "2.3.1", "dev": true