diff --git a/.fern/metadata.json b/.fern/metadata.json new file mode 100644 index 0000000..6eb83df --- /dev/null +++ b/.fern/metadata.json @@ -0,0 +1,8 @@ +{ + "cliVersion": "1.5.0", + "generatorName": "fernapi/fern-typescript-sdk", + "generatorVersion": "3.31.0", + "generatorConfig": { + "namespaceExport": "Lattice" + } +} diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 21fe64b..2db6106 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -11,7 +11,7 @@ jobs: uses: actions/checkout@v4 - name: Set up node - uses: actions/setup-node@v3 + uses: actions/setup-node@v4 - name: Install pnpm uses: pnpm/action-setup@v4 @@ -30,7 +30,7 @@ jobs: uses: actions/checkout@v4 - name: Set up node - uses: actions/setup-node@v3 + uses: actions/setup-node@v4 - name: Install pnpm uses: pnpm/action-setup@v4 @@ -50,7 +50,7 @@ jobs: uses: actions/checkout@v4 - name: Set up node - uses: actions/setup-node@v3 + uses: actions/setup-node@v4 - name: Install pnpm uses: pnpm/action-setup@v4 @@ -64,12 +64,15 @@ jobs: - name: Publish to npm run: | npm config set //registry.npmjs.org/:_authToken ${NPM_TOKEN} + publish() { # use latest npm to ensure OIDC support + npx -y npm@latest publish "$@" + } if [[ ${GITHUB_REF} == *alpha* ]]; then - npm publish --access public --tag alpha + publish --access public --tag alpha elif [[ ${GITHUB_REF} == *beta* ]]; then - npm publish --access public --tag beta + publish --access public --tag beta else - npm publish --access public + publish --access public fi env: NPM_TOKEN: ${{ secrets.NPM_TOKEN }} \ No newline at end of file diff --git a/.npmignore b/.npmignore deleted file mode 100644 index b7e5ad3..0000000 --- a/.npmignore +++ /dev/null @@ -1,10 +0,0 @@ -node_modules -src -tests -.gitignore -.github -.fernignore -biome.json -tsconfig.json -yarn.lock -pnpm-lock.yaml \ No newline at end of file diff --git a/LICENSE b/LICENSE index 9623e9f..87b1a69 100644 --- a/LICENSE +++ b/LICENSE @@ -186,4 +186,4 @@ of any court action, you agree to submit to the exclusive jurisdiction of the co Notwithstanding this, you agree that Anduril shall still be allowed to apply for injunctive remedies (or an equivalent type of urgent legal relief) in any jurisdiction. -**April 14, 2025** \ No newline at end of file +**April 14, 2025** diff --git a/README.md b/README.md deleted file mode 100644 index 073c6dc..0000000 --- a/README.md +++ /dev/null @@ -1,649 +0,0 @@ -# Lattice SDK TypeScript Library - -![](https://www.anduril.com/lattice-sdk/) - -[![npm shield](https://img.shields.io/npm/v/@anduril-industries/lattice-sdk)](https://www.npmjs.com/package/@anduril-industries/lattice-sdk) - -The Lattice SDK TypeScript library provides convenient access to the Lattice SDK APIs from TypeScript. - -## Documentation - -API reference documentation is available [here](https://developer.anduril.com/). - -## Requirements - -To use the SDK please ensure you have the following installed: - -- [NodeJS](https://nodejs.org/en/download/package-manager) - -## Installation - -```sh -npm i -s @anduril-industries/lattice-sdk -``` - -## Support - -For support with this library, please reach out to your Anduril representative. - -## Reference - -A full reference for this library is available [here](https://github.com/anduril/lattice-sdk-javascript/blob/HEAD/./reference.md). - -## Usage - -Instantiate and use the client with the following: - -```typescript -import { LatticeClient } from "@anduril-industries/lattice-sdk"; - -const client = new LatticeClient({ token: "YOUR_TOKEN" }); -await client.entities.longPollEntityEvents({ - sessionToken: "sessionToken" -}); -``` - -## Request And Response Types - -The SDK exports all request and response types as TypeScript interfaces. Simply import them with the -following namespace: - -```typescript -import { Lattice } from "@anduril-industries/lattice-sdk"; - -const request: Lattice.EntityOverride = { - ... -}; -``` - -## Exception Handling - -When the API returns a non-success status code (4xx or 5xx response), a subclass of the following error -will be thrown. - -```typescript -import { LatticeError } from "@anduril-industries/lattice-sdk"; - -try { - await client.entities.longPollEntityEvents(...); -} catch (err) { - if (err instanceof LatticeError) { - console.log(err.statusCode); - console.log(err.message); - console.log(err.body); - console.log(err.rawResponse); - } -} -``` - -## File Uploads - -You can upload files using the client: - -```typescript -import { createReadStream } from "fs"; - -await client.objects.uploadObject(createReadStream("path/to/file"), ...); -await client.objects.uploadObject(new ReadableStream(), ...); -await client.objects.uploadObject(Buffer.from('binary data'), ...); -await client.objects.uploadObject(new Blob(['binary data'], { type: 'audio/mpeg' }), ...); -await client.objects.uploadObject(new File(['binary data'], 'file.mp3'), ...); -await client.objects.uploadObject(new ArrayBuffer(8), ...); -await client.objects.uploadObject(new Uint8Array([0, 1, 2]), ...); -``` -The client accepts a variety of types for file upload parameters: -* Stream types: `fs.ReadStream`, `stream.Readable`, and `ReadableStream` -* Buffered types: `Buffer`, `Blob`, `File`, `ArrayBuffer`, `ArrayBufferView`, and `Uint8Array` - -### Metadata - -You can configure metadata when uploading a file: -```typescript -const file: Uploadable.WithMetadata = { - data: createReadStream("path/to/file"), - filename: "my-file", // optional - contentType: "audio/mpeg", // optional - contentLength: 1949, // optional -}; -``` - -Alternatively, you can upload a file directly from a file path: -```typescript -const file : Uploadable.FromPath = { - path: "path/to/file", - filename: "my-file", // optional - contentType: "audio/mpeg", // optional - contentLength: 1949, // optional -}; -``` - -The metadata is used to set the `Content-Length`, `Content-Type`, and `Content-Disposition` headers. If not provided, the client will attempt to determine them automatically. -For example, `fs.ReadStream` has a `path` property which the SDK uses to retrieve the file size from the filesystem without loading it into memory. - - -## Binary Response - -You can consume binary data from endpoints using the `BinaryResponse` type which lets you choose how to consume the data: - -```typescript -const response = await client.objects.getObject(...); -const stream: ReadableStream = response.stream(); -// const arrayBuffer: ArrayBuffer = await response.arrayBuffer(); -// const blob: Blob = response.blob(); -// const bytes: Uint8Array = response.bytes(); -// You can only use the response body once, so you must choose one of the above methods. -// If you want to check if the response body has been used, you can use the following property. -const bodyUsed = response.bodyUsed; -``` -
-Save binary response to a file - -
-
-Node.js - -
-
-ReadableStream (most-efficient) - -```ts -import { createWriteStream } from 'fs'; -import { Readable } from 'stream'; -import { pipeline } from 'stream/promises'; - -const response = await client.objects.getObject(...); - -const stream = response.stream(); -const nodeStream = Readable.fromWeb(stream); -const writeStream = createWriteStream('path/to/file'); - -await pipeline(nodeStream, writeStream); -``` - -
-
- -
-
-ArrayBuffer - -```ts -import { writeFile } from 'fs/promises'; - -const response = await client.objects.getObject(...); - -const arrayBuffer = await response.arrayBuffer(); -await writeFile('path/to/file', Buffer.from(arrayBuffer)); -``` - -
-
- -
-
-Blob - -```ts -import { writeFile } from 'fs/promises'; - -const response = await client.objects.getObject(...); - -const blob = await response.blob(); -const arrayBuffer = await blob.arrayBuffer(); -await writeFile('output.bin', Buffer.from(arrayBuffer)); -``` - -
-
- -
-
-Bytes (UIntArray8) - -```ts -import { writeFile } from 'fs/promises'; - -const response = await client.objects.getObject(...); - -const bytes = await response.bytes(); -await writeFile('path/to/file', bytes); -``` - -
-
- -
-
- -
-
-Bun - -
-
-ReadableStream (most-efficient) - -```ts -const response = await client.objects.getObject(...); - -const stream = response.stream(); -await Bun.write('path/to/file', stream); -``` - -
-
- -
-
-ArrayBuffer - -```ts -const response = await client.objects.getObject(...); - -const arrayBuffer = await response.arrayBuffer(); -await Bun.write('path/to/file', arrayBuffer); -``` - -
-
- -
-
-Blob - -```ts -const response = await client.objects.getObject(...); - -const blob = await response.blob(); -await Bun.write('path/to/file', blob); -``` - -
-
- -
-
-Bytes (UIntArray8) - -```ts -const response = await client.objects.getObject(...); - -const bytes = await response.bytes(); -await Bun.write('path/to/file', bytes); -``` - -
-
- -
-
- -
-
-Deno - -
-
-ReadableStream (most-efficient) - -```ts -const response = await client.objects.getObject(...); - -const stream = response.stream(); -const file = await Deno.open('path/to/file', { write: true, create: true }); -await stream.pipeTo(file.writable); -``` - -
-
- -
-
-ArrayBuffer - -```ts -const response = await client.objects.getObject(...); - -const arrayBuffer = await response.arrayBuffer(); -await Deno.writeFile('path/to/file', new Uint8Array(arrayBuffer)); -``` - -
-
- -
-
-Blob - -```ts -const response = await client.objects.getObject(...); - -const blob = await response.blob(); -const arrayBuffer = await blob.arrayBuffer(); -await Deno.writeFile('path/to/file', new Uint8Array(arrayBuffer)); -``` - -
-
- -
-
-Bytes (UIntArray8) - -```ts -const response = await client.objects.getObject(...); - -const bytes = await response.bytes(); -await Deno.writeFile('path/to/file', bytes); -``` - -
-
- -
-
- -
-
-Browser - -
-
-Blob (most-efficient) - -```ts -const response = await client.objects.getObject(...); - -const blob = await response.blob(); -const url = URL.createObjectURL(blob); - -// trigger download -const a = document.createElement('a'); -a.href = url; -a.download = 'filename'; -a.click(); -URL.revokeObjectURL(url); -``` - -
-
- -
-
-ReadableStream - -```ts -const response = await client.objects.getObject(...); - -const stream = response.stream(); -const reader = stream.getReader(); -const chunks = []; - -while (true) { - const { done, value } = await reader.read(); - if (done) break; - chunks.push(value); -} - -const blob = new Blob(chunks); -const url = URL.createObjectURL(blob); - -// trigger download -const a = document.createElement('a'); -a.href = url; -a.download = 'filename'; -a.click(); -URL.revokeObjectURL(url); -``` - -
-
- -
-
-ArrayBuffer - -```ts -const response = await client.objects.getObject(...); - -const arrayBuffer = await response.arrayBuffer(); -const blob = new Blob([arrayBuffer]); -const url = URL.createObjectURL(blob); - -// trigger download -const a = document.createElement('a'); -a.href = url; -a.download = 'filename'; -a.click(); -URL.revokeObjectURL(url); -``` - -
-
- -
-
-Bytes (UIntArray8) - -```ts -const response = await client.objects.getObject(...); - -const bytes = await response.bytes(); -const blob = new Blob([bytes]); -const url = URL.createObjectURL(blob); - -// trigger download -const a = document.createElement('a'); -a.href = url; -a.download = 'filename'; -a.click(); -URL.revokeObjectURL(url); -``` - -
-
- -
-
- -
- - -
-Convert binary response to text - -
-
-ReadableStream - -```ts -const response = await client.objects.getObject(...); - -const stream = response.stream(); -const text = await new Response(stream).text(); -``` - -
-
- -
-
-ArrayBuffer - -```ts -const response = await client.objects.getObject(...); - -const arrayBuffer = await response.arrayBuffer(); -const text = new TextDecoder().decode(arrayBuffer); -``` - -
-
- -
-
-Blob - -```ts -const response = await client.objects.getObject(...); - -const blob = await response.blob(); -const text = await blob.text(); -``` - -
-
- -
-
-Bytes (UIntArray8) - -```ts -const response = await client.objects.getObject(...); - -const bytes = await response.bytes(); -const text = new TextDecoder().decode(bytes); -``` - -
-
- -
- -## Pagination - -List endpoints are paginated. The SDK provides an iterator so that you can simply loop over the items: - -```typescript -import { LatticeClient } from "@anduril-industries/lattice-sdk"; - -const client = new LatticeClient({ token: "YOUR_TOKEN" }); -const response = await client.objects.listObjects({ - prefix: "prefix", - sinceTimestamp: "2024-01-15T09:30:00Z", - pageToken: "pageToken", - allObjectsInMesh: true -}); -for await (const item of response) { - console.log(item); -} - -// Or you can manually iterate page-by-page -let page = await client.objects.listObjects({ - prefix: "prefix", - sinceTimestamp: "2024-01-15T09:30:00Z", - pageToken: "pageToken", - allObjectsInMesh: true -}); -while (page.hasNextPage()) { - page = page.getNextPage(); -} -``` - -## Advanced - -### Additional Headers - -If you would like to send additional headers as part of the request, use the `headers` request option. - -```typescript -const response = await client.entities.longPollEntityEvents(..., { - headers: { - 'X-Custom-Header': 'custom value' - } -}); -``` - -### Additional Query String Parameters - -If you would like to send additional query string parameters as part of the request, use the `queryParams` request option. - -```typescript -const response = await client.entities.longPollEntityEvents(..., { - queryParams: { - 'customQueryParamKey': 'custom query param value' - } -}); -``` - -### Retries - -The SDK is instrumented with automatic retries with exponential backoff. A request will be retried as long -as the request is deemed retryable and the number of retry attempts has not grown larger than the configured -retry limit (default: 2). - -A request is deemed retryable when any of the following HTTP status codes is returned: - -- [408](https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/408) (Timeout) -- [429](https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/429) (Too Many Requests) -- [5XX](https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/500) (Internal Server Errors) - -Use the `maxRetries` request option to configure this behavior. - -```typescript -const response = await client.entities.longPollEntityEvents(..., { - maxRetries: 0 // override maxRetries at the request level -}); -``` - -### Timeouts - -The SDK defaults to a 60 second timeout. Use the `timeoutInSeconds` option to configure this behavior. - -```typescript -const response = await client.entities.longPollEntityEvents(..., { - timeoutInSeconds: 30 // override timeout to 30s -}); -``` - -### Aborting Requests - -The SDK allows users to abort requests at any point by passing in an abort signal. - -```typescript -const controller = new AbortController(); -const response = await client.entities.longPollEntityEvents(..., { - abortSignal: controller.signal -}); -controller.abort(); // aborts the request -``` - -### Access Raw Response Data - -The SDK provides access to raw response data, including headers, through the `.withRawResponse()` method. -The `.withRawResponse()` method returns a promise that results to an object with a `data` and a `rawResponse` property. - -```typescript -const { data, rawResponse } = await client.entities.longPollEntityEvents(...).withRawResponse(); - -console.log(data); -console.log(rawResponse.headers['X-My-Header']); -``` - -### Runtime Compatibility - - -The SDK works in the following runtimes: - - - -- Node.js 18+ -- Vercel -- Cloudflare Workers -- Deno v1.25+ -- Bun 1.0+ -- React Native - -### Customizing Fetch Client - -The SDK provides a way for you to customize the underlying HTTP client / Fetch function. If you're running in an -unsupported environment, this provides a way for you to break glass and ensure the SDK works. - -```typescript -import { LatticeClient } from "@anduril-industries/lattice-sdk"; - -const client = new LatticeClient({ - ... - fetcher: // provide your implementation here -}); -``` diff --git a/biome.json b/biome.json index b6890df..a777468 100644 --- a/biome.json +++ b/biome.json @@ -1,5 +1,5 @@ { - "$schema": "https://biomejs.dev/schemas/2.2.5/schema.json", + "$schema": "https://biomejs.dev/schemas/2.3.1/schema.json", "root": true, "vcs": { "enabled": false @@ -7,16 +7,21 @@ "files": { "ignoreUnknown": true, "includes": [ - "./**", - "!dist", - "!lib", - "!*.tsbuildinfo", - "!_tmp_*", - "!*.tmp", - "!.tmp/", - "!*.log", - "!.DS_Store", - "!Thumbs.db" + "**", + "!!dist", + "!!**/dist", + "!!lib", + "!!**/lib", + "!!_tmp_*", + "!!**/_tmp_*", + "!!*.tmp", + "!!**/*.tmp", + "!!.tmp/", + "!!**/.tmp/", + "!!*.log", + "!!**/*.log", + "!!**/.DS_Store", + "!!**/Thumbs.db" ] }, "formatter": { diff --git a/package.json b/package.json index 8584c1c..fa4e9b7 100644 --- a/package.json +++ b/package.json @@ -1,8 +1,8 @@ { "name": "@anduril-industries/lattice-sdk", - "version": "3.0.0", + "version": "3.0.1", "private": false, - "repository": "github:anduril/lattice-sdk-javascript", + "repository": "github:fern-api/lattice-sdk-javascript", "license": "See LICENSE", "type": "commonjs", "main": "./dist/cjs/index.js", @@ -31,6 +31,9 @@ ], "scripts": { "format": "biome format --write --skip-parse-errors --no-errors-on-unmatched --max-diagnostics=none", + "format:check": "biome format --skip-parse-errors --no-errors-on-unmatched --max-diagnostics=none", + "lint": "biome lint --skip-parse-errors --no-errors-on-unmatched --max-diagnostics=none", + "lint:fix": "biome lint --fix --unsafe --skip-parse-errors --no-errors-on-unmatched --max-diagnostics=none", "check": "biome check --skip-parse-errors --no-errors-on-unmatched --max-diagnostics=none", "check:fix": "biome check --fix --unsafe --skip-parse-errors --no-errors-on-unmatched --max-diagnostics=none", "build": "pnpm build:cjs && pnpm build:esm", @@ -40,14 +43,15 @@ "test:unit": "vitest --project unit", "test:wire": "vitest --project wire" }, + "dependencies": {}, "devDependencies": { "webpack": "^5.97.1", "ts-loader": "^9.5.1", "vitest": "^3.2.4", "msw": "2.11.2", "@types/node": "^18.19.70", - "@biomejs/biome": "2.2.5", - "typescript": "~5.7.2" + "typescript": "~5.7.2", + "@biomejs/biome": "2.3.1" }, "browser": { "fs": false, @@ -55,7 +59,7 @@ "path": false, "stream": false }, - "packageManager": "pnpm@10.14.0", + "packageManager": "pnpm@10.20.0", "engines": { "node": ">=18.0.0" }, diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 5b89b8e..a06c63f 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -9,8 +9,8 @@ importers: .: devDependencies: '@biomejs/biome': - specifier: 2.2.5 - version: 2.2.5 + specifier: 2.3.1 + version: 2.3.1 '@types/node': specifier: ^18.19.70 version: 18.19.130 @@ -25,62 +25,62 @@ importers: version: 5.7.3 vitest: specifier: ^3.2.4 - version: 3.2.4(@types/node@18.19.130)(msw@2.11.2(@types/node@18.19.130)(typescript@5.7.3))(terser@5.44.0) + version: 3.2.4(@types/node@18.19.130)(msw@2.11.2(@types/node@18.19.130)(typescript@5.7.3))(terser@5.44.1) webpack: specifier: ^5.97.1 version: 5.102.1 packages: - '@biomejs/biome@2.2.5': - resolution: {integrity: sha512-zcIi+163Rc3HtyHbEO7CjeHq8DjQRs40HsGbW6vx2WI0tg8mYQOPouhvHSyEnCBAorfYNnKdR64/IxO7xQ5faw==} + '@biomejs/biome@2.3.1': + resolution: {integrity: sha512-A29evf1R72V5bo4o2EPxYMm5mtyGvzp2g+biZvRFx29nWebGyyeOSsDWGx3tuNNMFRepGwxmA9ZQ15mzfabK2w==} engines: {node: '>=14.21.3'} hasBin: true - '@biomejs/cli-darwin-arm64@2.2.5': - resolution: {integrity: sha512-MYT+nZ38wEIWVcL5xLyOhYQQ7nlWD0b/4mgATW2c8dvq7R4OQjt/XGXFkXrmtWmQofaIM14L7V8qIz/M+bx5QQ==} + '@biomejs/cli-darwin-arm64@2.3.1': + resolution: {integrity: sha512-ombSf3MnTUueiYGN1SeI9tBCsDUhpWzOwS63Dove42osNh0PfE1cUtHFx6eZ1+MYCCLwXzlFlYFdrJ+U7h6LcA==} engines: {node: '>=14.21.3'} cpu: [arm64] os: [darwin] - '@biomejs/cli-darwin-x64@2.2.5': - resolution: {integrity: sha512-FLIEl73fv0R7dI10EnEiZLw+IMz3mWLnF95ASDI0kbx6DDLJjWxE5JxxBfmG+udz1hIDd3fr5wsuP7nwuTRdAg==} + '@biomejs/cli-darwin-x64@2.3.1': + resolution: {integrity: sha512-pcOfwyoQkrkbGvXxRvZNe5qgD797IowpJPovPX5biPk2FwMEV+INZqfCaz4G5bVq9hYnjwhRMamg11U4QsRXrQ==} engines: {node: '>=14.21.3'} cpu: [x64] os: [darwin] - '@biomejs/cli-linux-arm64-musl@2.2.5': - resolution: {integrity: sha512-5Ov2wgAFwqDvQiESnu7b9ufD1faRa+40uwrohgBopeY84El2TnBDoMNXx6iuQdreoFGjwW8vH6k68G21EpNERw==} + '@biomejs/cli-linux-arm64-musl@2.3.1': + resolution: {integrity: sha512-+DZYv8l7FlUtTrWs1Tdt1KcNCAmRO87PyOnxKGunbWm5HKg1oZBSbIIPkjrCtDZaeqSG1DiGx7qF+CPsquQRcg==} engines: {node: '>=14.21.3'} cpu: [arm64] os: [linux] - '@biomejs/cli-linux-arm64@2.2.5': - resolution: {integrity: sha512-5DjiiDfHqGgR2MS9D+AZ8kOfrzTGqLKywn8hoXpXXlJXIECGQ32t+gt/uiS2XyGBM2XQhR6ztUvbjZWeccFMoQ==} + '@biomejs/cli-linux-arm64@2.3.1': + resolution: {integrity: sha512-td5O8pFIgLs8H1sAZsD6v+5quODihyEw4nv2R8z7swUfIK1FKk+15e4eiYVLcAE4jUqngvh4j3JCNgg0Y4o4IQ==} engines: {node: '>=14.21.3'} cpu: [arm64] os: [linux] - '@biomejs/cli-linux-x64-musl@2.2.5': - resolution: {integrity: sha512-AVqLCDb/6K7aPNIcxHaTQj01sl1m989CJIQFQEaiQkGr2EQwyOpaATJ473h+nXDUuAcREhccfRpe/tu+0wu0eQ==} + '@biomejs/cli-linux-x64-musl@2.3.1': + resolution: {integrity: sha512-Y3Ob4nqgv38Mh+6EGHltuN+Cq8aj/gyMTJYzkFZV2AEj+9XzoXB9VNljz9pjfFNHUxvLEV4b55VWyxozQTBaUQ==} engines: {node: '>=14.21.3'} cpu: [x64] os: [linux] - '@biomejs/cli-linux-x64@2.2.5': - resolution: {integrity: sha512-fq9meKm1AEXeAWan3uCg6XSP5ObA6F/Ovm89TwaMiy1DNIwdgxPkNwxlXJX8iM6oRbFysYeGnT0OG8diCWb9ew==} + '@biomejs/cli-linux-x64@2.3.1': + resolution: {integrity: sha512-PYWgEO7up7XYwSAArOpzsVCiqxBCXy53gsReAb1kKYIyXaoAlhBaBMvxR/k2Rm9aTuZ662locXUmPk/Aj+Xu+Q==} engines: {node: '>=14.21.3'} cpu: [x64] os: [linux] - '@biomejs/cli-win32-arm64@2.2.5': - resolution: {integrity: sha512-xaOIad4wBambwJa6mdp1FigYSIF9i7PCqRbvBqtIi9y29QtPVQ13sDGtUnsRoe6SjL10auMzQ6YAe+B3RpZXVg==} + '@biomejs/cli-win32-arm64@2.3.1': + resolution: {integrity: sha512-RHIG/zgo+69idUqVvV3n8+j58dKYABRpMyDmfWu2TITC+jwGPiEaT0Q3RKD+kQHiS80mpBrST0iUGeEXT0bU9A==} engines: {node: '>=14.21.3'} cpu: [arm64] os: [win32] - '@biomejs/cli-win32-x64@2.2.5': - resolution: {integrity: sha512-F/jhuXCssPFAuciMhHKk00xnCAxJRS/pUzVfXYmOMUp//XW7mO6QeCjsjvnm8L4AO/dG2VOB0O+fJPiJ2uXtIw==} + '@biomejs/cli-win32-x64@2.3.1': + resolution: {integrity: sha512-izl30JJ5Dp10mi90Eko47zhxE6pYyWPcnX1NQxKpL/yMhXxf95oLTzfpu4q+MDBh/gemNqyJEwjBpe0MT5iWPA==} engines: {node: '>=14.21.3'} cpu: [x64] os: [win32] @@ -91,168 +91,168 @@ packages: '@bundled-es-modules/statuses@1.0.1': resolution: {integrity: sha512-yn7BklA5acgcBr+7w064fGV+SGIFySjCKpqjcWgBAIfrAkY+4GQTJJHQMeT3V/sgz23VTEVV8TtOmkvJAhFVfg==} - '@esbuild/aix-ppc64@0.25.11': - resolution: {integrity: sha512-Xt1dOL13m8u0WE8iplx9Ibbm+hFAO0GsU2P34UNoDGvZYkY8ifSiy6Zuc1lYxfG7svWE2fzqCUmFp5HCn51gJg==} + '@esbuild/aix-ppc64@0.25.12': + resolution: {integrity: sha512-Hhmwd6CInZ3dwpuGTF8fJG6yoWmsToE+vYgD4nytZVxcu1ulHpUQRAB1UJ8+N1Am3Mz4+xOByoQoSZf4D+CpkA==} engines: {node: '>=18'} cpu: [ppc64] os: [aix] - '@esbuild/android-arm64@0.25.11': - resolution: {integrity: sha512-9slpyFBc4FPPz48+f6jyiXOx/Y4v34TUeDDXJpZqAWQn/08lKGeD8aDp9TMn9jDz2CiEuHwfhRmGBvpnd/PWIQ==} + '@esbuild/android-arm64@0.25.12': + resolution: {integrity: sha512-6AAmLG7zwD1Z159jCKPvAxZd4y/VTO0VkprYy+3N2FtJ8+BQWFXU+OxARIwA46c5tdD9SsKGZ/1ocqBS/gAKHg==} engines: {node: '>=18'} cpu: [arm64] os: [android] - '@esbuild/android-arm@0.25.11': - resolution: {integrity: sha512-uoa7dU+Dt3HYsethkJ1k6Z9YdcHjTrSb5NUy66ZfZaSV8hEYGD5ZHbEMXnqLFlbBflLsl89Zke7CAdDJ4JI+Gg==} + '@esbuild/android-arm@0.25.12': + resolution: {integrity: sha512-VJ+sKvNA/GE7Ccacc9Cha7bpS8nyzVv0jdVgwNDaR4gDMC/2TTRc33Ip8qrNYUcpkOHUT5OZ0bUcNNVZQ9RLlg==} engines: {node: '>=18'} cpu: [arm] os: [android] - '@esbuild/android-x64@0.25.11': - resolution: {integrity: sha512-Sgiab4xBjPU1QoPEIqS3Xx+R2lezu0LKIEcYe6pftr56PqPygbB7+szVnzoShbx64MUupqoE0KyRlN7gezbl8g==} + '@esbuild/android-x64@0.25.12': + resolution: {integrity: sha512-5jbb+2hhDHx5phYR2By8GTWEzn6I9UqR11Kwf22iKbNpYrsmRB18aX/9ivc5cabcUiAT/wM+YIZ6SG9QO6a8kg==} engines: {node: '>=18'} cpu: [x64] os: [android] - '@esbuild/darwin-arm64@0.25.11': - resolution: {integrity: sha512-VekY0PBCukppoQrycFxUqkCojnTQhdec0vevUL/EDOCnXd9LKWqD/bHwMPzigIJXPhC59Vd1WFIL57SKs2mg4w==} + '@esbuild/darwin-arm64@0.25.12': + resolution: {integrity: sha512-N3zl+lxHCifgIlcMUP5016ESkeQjLj/959RxxNYIthIg+CQHInujFuXeWbWMgnTo4cp5XVHqFPmpyu9J65C1Yg==} engines: {node: '>=18'} cpu: [arm64] os: [darwin] - '@esbuild/darwin-x64@0.25.11': - resolution: {integrity: sha512-+hfp3yfBalNEpTGp9loYgbknjR695HkqtY3d3/JjSRUyPg/xd6q+mQqIb5qdywnDxRZykIHs3axEqU6l1+oWEQ==} + '@esbuild/darwin-x64@0.25.12': + resolution: {integrity: sha512-HQ9ka4Kx21qHXwtlTUVbKJOAnmG1ipXhdWTmNXiPzPfWKpXqASVcWdnf2bnL73wgjNrFXAa3yYvBSd9pzfEIpA==} engines: {node: '>=18'} cpu: [x64] os: [darwin] - '@esbuild/freebsd-arm64@0.25.11': - resolution: {integrity: sha512-CmKjrnayyTJF2eVuO//uSjl/K3KsMIeYeyN7FyDBjsR3lnSJHaXlVoAK8DZa7lXWChbuOk7NjAc7ygAwrnPBhA==} + '@esbuild/freebsd-arm64@0.25.12': + resolution: {integrity: sha512-gA0Bx759+7Jve03K1S0vkOu5Lg/85dou3EseOGUes8flVOGxbhDDh/iZaoek11Y8mtyKPGF3vP8XhnkDEAmzeg==} engines: {node: '>=18'} cpu: [arm64] os: [freebsd] - '@esbuild/freebsd-x64@0.25.11': - resolution: {integrity: sha512-Dyq+5oscTJvMaYPvW3x3FLpi2+gSZTCE/1ffdwuM6G1ARang/mb3jvjxs0mw6n3Lsw84ocfo9CrNMqc5lTfGOw==} + '@esbuild/freebsd-x64@0.25.12': + resolution: {integrity: sha512-TGbO26Yw2xsHzxtbVFGEXBFH0FRAP7gtcPE7P5yP7wGy7cXK2oO7RyOhL5NLiqTlBh47XhmIUXuGciXEqYFfBQ==} engines: {node: '>=18'} cpu: [x64] os: [freebsd] - '@esbuild/linux-arm64@0.25.11': - resolution: {integrity: sha512-Qr8AzcplUhGvdyUF08A1kHU3Vr2O88xxP0Tm8GcdVOUm25XYcMPp2YqSVHbLuXzYQMf9Bh/iKx7YPqECs6ffLA==} + '@esbuild/linux-arm64@0.25.12': + resolution: {integrity: sha512-8bwX7a8FghIgrupcxb4aUmYDLp8pX06rGh5HqDT7bB+8Rdells6mHvrFHHW2JAOPZUbnjUpKTLg6ECyzvas2AQ==} engines: {node: '>=18'} cpu: [arm64] os: [linux] - '@esbuild/linux-arm@0.25.11': - resolution: {integrity: sha512-TBMv6B4kCfrGJ8cUPo7vd6NECZH/8hPpBHHlYI3qzoYFvWu2AdTvZNuU/7hsbKWqu/COU7NIK12dHAAqBLLXgw==} + '@esbuild/linux-arm@0.25.12': + resolution: {integrity: sha512-lPDGyC1JPDou8kGcywY0YILzWlhhnRjdof3UlcoqYmS9El818LLfJJc3PXXgZHrHCAKs/Z2SeZtDJr5MrkxtOw==} engines: {node: '>=18'} cpu: [arm] os: [linux] - '@esbuild/linux-ia32@0.25.11': - resolution: {integrity: sha512-TmnJg8BMGPehs5JKrCLqyWTVAvielc615jbkOirATQvWWB1NMXY77oLMzsUjRLa0+ngecEmDGqt5jiDC6bfvOw==} + '@esbuild/linux-ia32@0.25.12': + resolution: {integrity: sha512-0y9KrdVnbMM2/vG8KfU0byhUN+EFCny9+8g202gYqSSVMonbsCfLjUO+rCci7pM0WBEtz+oK/PIwHkzxkyharA==} engines: {node: '>=18'} cpu: [ia32] os: [linux] - '@esbuild/linux-loong64@0.25.11': - resolution: {integrity: sha512-DIGXL2+gvDaXlaq8xruNXUJdT5tF+SBbJQKbWy/0J7OhU8gOHOzKmGIlfTTl6nHaCOoipxQbuJi7O++ldrxgMw==} + '@esbuild/linux-loong64@0.25.12': + resolution: {integrity: sha512-h///Lr5a9rib/v1GGqXVGzjL4TMvVTv+s1DPoxQdz7l/AYv6LDSxdIwzxkrPW438oUXiDtwM10o9PmwS/6Z0Ng==} engines: {node: '>=18'} cpu: [loong64] os: [linux] - '@esbuild/linux-mips64el@0.25.11': - resolution: {integrity: sha512-Osx1nALUJu4pU43o9OyjSCXokFkFbyzjXb6VhGIJZQ5JZi8ylCQ9/LFagolPsHtgw6himDSyb5ETSfmp4rpiKQ==} + '@esbuild/linux-mips64el@0.25.12': + resolution: {integrity: sha512-iyRrM1Pzy9GFMDLsXn1iHUm18nhKnNMWscjmp4+hpafcZjrr2WbT//d20xaGljXDBYHqRcl8HnxbX6uaA/eGVw==} engines: {node: '>=18'} cpu: [mips64el] os: [linux] - '@esbuild/linux-ppc64@0.25.11': - resolution: {integrity: sha512-nbLFgsQQEsBa8XSgSTSlrnBSrpoWh7ioFDUmwo158gIm5NNP+17IYmNWzaIzWmgCxq56vfr34xGkOcZ7jX6CPw==} + '@esbuild/linux-ppc64@0.25.12': + resolution: {integrity: sha512-9meM/lRXxMi5PSUqEXRCtVjEZBGwB7P/D4yT8UG/mwIdze2aV4Vo6U5gD3+RsoHXKkHCfSxZKzmDssVlRj1QQA==} engines: {node: '>=18'} cpu: [ppc64] os: [linux] - '@esbuild/linux-riscv64@0.25.11': - resolution: {integrity: sha512-HfyAmqZi9uBAbgKYP1yGuI7tSREXwIb438q0nqvlpxAOs3XnZ8RsisRfmVsgV486NdjD7Mw2UrFSw51lzUk1ww==} + '@esbuild/linux-riscv64@0.25.12': + resolution: {integrity: sha512-Zr7KR4hgKUpWAwb1f3o5ygT04MzqVrGEGXGLnj15YQDJErYu/BGg+wmFlIDOdJp0PmB0lLvxFIOXZgFRrdjR0w==} engines: {node: '>=18'} cpu: [riscv64] os: [linux] - '@esbuild/linux-s390x@0.25.11': - resolution: {integrity: sha512-HjLqVgSSYnVXRisyfmzsH6mXqyvj0SA7pG5g+9W7ESgwA70AXYNpfKBqh1KbTxmQVaYxpzA/SvlB9oclGPbApw==} + '@esbuild/linux-s390x@0.25.12': + resolution: {integrity: sha512-MsKncOcgTNvdtiISc/jZs/Zf8d0cl/t3gYWX8J9ubBnVOwlk65UIEEvgBORTiljloIWnBzLs4qhzPkJcitIzIg==} engines: {node: '>=18'} cpu: [s390x] os: [linux] - '@esbuild/linux-x64@0.25.11': - resolution: {integrity: sha512-HSFAT4+WYjIhrHxKBwGmOOSpphjYkcswF449j6EjsjbinTZbp8PJtjsVK1XFJStdzXdy/jaddAep2FGY+wyFAQ==} + '@esbuild/linux-x64@0.25.12': + resolution: {integrity: sha512-uqZMTLr/zR/ed4jIGnwSLkaHmPjOjJvnm6TVVitAa08SLS9Z0VM8wIRx7gWbJB5/J54YuIMInDquWyYvQLZkgw==} engines: {node: '>=18'} cpu: [x64] os: [linux] - '@esbuild/netbsd-arm64@0.25.11': - resolution: {integrity: sha512-hr9Oxj1Fa4r04dNpWr3P8QKVVsjQhqrMSUzZzf+LZcYjZNqhA3IAfPQdEh1FLVUJSiu6sgAwp3OmwBfbFgG2Xg==} + '@esbuild/netbsd-arm64@0.25.12': + resolution: {integrity: sha512-xXwcTq4GhRM7J9A8Gv5boanHhRa/Q9KLVmcyXHCTaM4wKfIpWkdXiMog/KsnxzJ0A1+nD+zoecuzqPmCRyBGjg==} engines: {node: '>=18'} cpu: [arm64] os: [netbsd] - '@esbuild/netbsd-x64@0.25.11': - resolution: {integrity: sha512-u7tKA+qbzBydyj0vgpu+5h5AeudxOAGncb8N6C9Kh1N4n7wU1Xw1JDApsRjpShRpXRQlJLb9wY28ELpwdPcZ7A==} + '@esbuild/netbsd-x64@0.25.12': + resolution: {integrity: sha512-Ld5pTlzPy3YwGec4OuHh1aCVCRvOXdH8DgRjfDy/oumVovmuSzWfnSJg+VtakB9Cm0gxNO9BzWkj6mtO1FMXkQ==} engines: {node: '>=18'} cpu: [x64] os: [netbsd] - '@esbuild/openbsd-arm64@0.25.11': - resolution: {integrity: sha512-Qq6YHhayieor3DxFOoYM1q0q1uMFYb7cSpLD2qzDSvK1NAvqFi8Xgivv0cFC6J+hWVw2teCYltyy9/m/14ryHg==} + '@esbuild/openbsd-arm64@0.25.12': + resolution: {integrity: sha512-fF96T6KsBo/pkQI950FARU9apGNTSlZGsv1jZBAlcLL1MLjLNIWPBkj5NlSz8aAzYKg+eNqknrUJ24QBybeR5A==} engines: {node: '>=18'} cpu: [arm64] os: [openbsd] - '@esbuild/openbsd-x64@0.25.11': - resolution: {integrity: sha512-CN+7c++kkbrckTOz5hrehxWN7uIhFFlmS/hqziSFVWpAzpWrQoAG4chH+nN3Be+Kzv/uuo7zhX716x3Sn2Jduw==} + '@esbuild/openbsd-x64@0.25.12': + resolution: {integrity: sha512-MZyXUkZHjQxUvzK7rN8DJ3SRmrVrke8ZyRusHlP+kuwqTcfWLyqMOE3sScPPyeIXN/mDJIfGXvcMqCgYKekoQw==} engines: {node: '>=18'} cpu: [x64] os: [openbsd] - '@esbuild/openharmony-arm64@0.25.11': - resolution: {integrity: sha512-rOREuNIQgaiR+9QuNkbkxubbp8MSO9rONmwP5nKncnWJ9v5jQ4JxFnLu4zDSRPf3x4u+2VN4pM4RdyIzDty/wQ==} + '@esbuild/openharmony-arm64@0.25.12': + resolution: {integrity: sha512-rm0YWsqUSRrjncSXGA7Zv78Nbnw4XL6/dzr20cyrQf7ZmRcsovpcRBdhD43Nuk3y7XIoW2OxMVvwuRvk9XdASg==} engines: {node: '>=18'} cpu: [arm64] os: [openharmony] - '@esbuild/sunos-x64@0.25.11': - resolution: {integrity: sha512-nq2xdYaWxyg9DcIyXkZhcYulC6pQ2FuCgem3LI92IwMgIZ69KHeY8T4Y88pcwoLIjbed8n36CyKoYRDygNSGhA==} + '@esbuild/sunos-x64@0.25.12': + resolution: {integrity: sha512-3wGSCDyuTHQUzt0nV7bocDy72r2lI33QL3gkDNGkod22EsYl04sMf0qLb8luNKTOmgF/eDEDP5BFNwoBKH441w==} engines: {node: '>=18'} cpu: [x64] os: [sunos] - '@esbuild/win32-arm64@0.25.11': - resolution: {integrity: sha512-3XxECOWJq1qMZ3MN8srCJ/QfoLpL+VaxD/WfNRm1O3B4+AZ/BnLVgFbUV3eiRYDMXetciH16dwPbbHqwe1uU0Q==} + '@esbuild/win32-arm64@0.25.12': + resolution: {integrity: sha512-rMmLrur64A7+DKlnSuwqUdRKyd3UE7oPJZmnljqEptesKM8wx9J8gx5u0+9Pq0fQQW8vqeKebwNXdfOyP+8Bsg==} engines: {node: '>=18'} cpu: [arm64] os: [win32] - '@esbuild/win32-ia32@0.25.11': - resolution: {integrity: sha512-3ukss6gb9XZ8TlRyJlgLn17ecsK4NSQTmdIXRASVsiS2sQ6zPPZklNJT5GR5tE/MUarymmy8kCEf5xPCNCqVOA==} + '@esbuild/win32-ia32@0.25.12': + resolution: {integrity: sha512-HkqnmmBoCbCwxUKKNPBixiWDGCpQGVsrQfJoVGYLPT41XWF8lHuE5N6WhVia2n4o5QK5M4tYr21827fNhi4byQ==} engines: {node: '>=18'} cpu: [ia32] os: [win32] - '@esbuild/win32-x64@0.25.11': - resolution: {integrity: sha512-D7Hpz6A2L4hzsRpPaCYkQnGOotdUpDzSGRIv9I+1ITdHROSFUWW95ZPZWQmGka1Fg7W3zFJowyn9WGwMJ0+KPA==} + '@esbuild/win32-x64@0.25.12': + resolution: {integrity: sha512-alJC0uCZpTFrSL0CCDjcgleBXPnCrEAhTBILpeAp7M/OFgoqtAetfBzX0xM00MUsVVPpVjlPuMbREqnZCXaTnA==} engines: {node: '>=18'} cpu: [x64] os: [win32] - '@inquirer/ansi@1.0.1': - resolution: {integrity: sha512-yqq0aJW/5XPhi5xOAL1xRCpe1eh8UFVgYFpFsjEqmIR8rKLyP+HINvFXwUaxYICflJrVlxnp7lLN6As735kVpw==} + '@inquirer/ansi@1.0.2': + resolution: {integrity: sha512-S8qNSZiYzFd0wAcyG5AXCvUHC5Sr7xpZ9wZ2py9XR88jUz8wooStVx5M6dRzczbBWjic9NP7+rY0Xi7qqK/aMQ==} engines: {node: '>=18'} - '@inquirer/confirm@5.1.19': - resolution: {integrity: sha512-wQNz9cfcxrtEnUyG5PndC8g3gZ7lGDBzmWiXZkX8ot3vfZ+/BLjR8EvyGX4YzQLeVqtAlY/YScZpW7CW8qMoDQ==} + '@inquirer/confirm@5.1.21': + resolution: {integrity: sha512-KR8edRkIsUayMXV+o3Gv+q4jlhENF9nMYUZs9PA2HzrXeHI8M5uDag70U7RJn9yyiMZSbtF5/UexBtAVtZGSbQ==} engines: {node: '>=18'} peerDependencies: '@types/node': '>=18' @@ -260,8 +260,8 @@ packages: '@types/node': optional: true - '@inquirer/core@10.3.0': - resolution: {integrity: sha512-Uv2aPPPSK5jeCplQmQ9xadnFx2Zhj9b5Dj7bU6ZeCdDNNY11nhYy4btcSdtDguHqCT2h5oNeQTcUNSGGLA7NTA==} + '@inquirer/core@10.3.2': + resolution: {integrity: sha512-43RTuEbfP8MbKzedNqBrlhhNKVwoK//vUFNW3Q3vZ88BLcrs4kYpGg+B2mm5p2K/HfygoCxuKwJJiv8PbGmE0A==} engines: {node: '>=18'} peerDependencies: '@types/node': '>=18' @@ -269,12 +269,12 @@ packages: '@types/node': optional: true - '@inquirer/figures@1.0.14': - resolution: {integrity: sha512-DbFgdt+9/OZYFM+19dbpXOSeAstPy884FPy1KjDu4anWwymZeOYhMY1mdFri172htv6mvc/uvIAAi7b7tvjJBQ==} + '@inquirer/figures@1.0.15': + resolution: {integrity: sha512-t2IEY+unGHOzAaVM5Xx6DEWKeXlDDcNPeDyUpsRc6CUhBfU3VQOEl+Vssh7VNp1dR8MdUJBWhuObjXCsVpjN5g==} engines: {node: '>=18'} - '@inquirer/type@3.0.9': - resolution: {integrity: sha512-QPaNt/nmE2bLGQa9b7wwyRJoLZ7pN6rcyXvzU0YCmivmJyq1BVo94G98tStRWkoD1RgDX5C+dPlhhHzNdu/W/w==} + '@inquirer/type@3.0.10': + resolution: {integrity: sha512-BvziSRxfz5Ov8ch0z/n3oijRSEcEsHnhggm4xFZe93DHcUCTlutlq9Ox4SVENAfcRD22UQq7T/atg9Wr3k09eA==} engines: {node: '>=18'} peerDependencies: '@types/node': '>=18' @@ -311,113 +311,113 @@ packages: '@open-draft/until@2.1.0': resolution: {integrity: sha512-U69T3ItWHvLwGg5eJ0n3I62nWuE6ilHlmz7zM0npLBRvPRd7e6NYmg54vvRtP5mZG7kZqZCFVdsTWo7BPtBujg==} - '@rollup/rollup-android-arm-eabi@4.52.5': - resolution: {integrity: sha512-8c1vW4ocv3UOMp9K+gToY5zL2XiiVw3k7f1ksf4yO1FlDFQ1C2u72iACFnSOceJFsWskc2WZNqeRhFRPzv+wtQ==} + '@rollup/rollup-android-arm-eabi@4.53.2': + resolution: {integrity: sha512-yDPzwsgiFO26RJA4nZo8I+xqzh7sJTZIWQOxn+/XOdPE31lAvLIYCKqjV+lNH/vxE2L2iH3plKxDCRK6i+CwhA==} cpu: [arm] os: [android] - '@rollup/rollup-android-arm64@4.52.5': - resolution: {integrity: sha512-mQGfsIEFcu21mvqkEKKu2dYmtuSZOBMmAl5CFlPGLY94Vlcm+zWApK7F/eocsNzp8tKmbeBP8yXyAbx0XHsFNA==} + '@rollup/rollup-android-arm64@4.53.2': + resolution: {integrity: sha512-k8FontTxIE7b0/OGKeSN5B6j25EuppBcWM33Z19JoVT7UTXFSo3D9CdU39wGTeb29NO3XxpMNauh09B+Ibw+9g==} cpu: [arm64] os: [android] - '@rollup/rollup-darwin-arm64@4.52.5': - resolution: {integrity: sha512-takF3CR71mCAGA+v794QUZ0b6ZSrgJkArC+gUiG6LB6TQty9T0Mqh3m2ImRBOxS2IeYBo4lKWIieSvnEk2OQWA==} + '@rollup/rollup-darwin-arm64@4.53.2': + resolution: {integrity: sha512-A6s4gJpomNBtJ2yioj8bflM2oogDwzUiMl2yNJ2v9E7++sHrSrsQ29fOfn5DM/iCzpWcebNYEdXpaK4tr2RhfQ==} cpu: [arm64] os: [darwin] - '@rollup/rollup-darwin-x64@4.52.5': - resolution: {integrity: sha512-W901Pla8Ya95WpxDn//VF9K9u2JbocwV/v75TE0YIHNTbhqUTv9w4VuQ9MaWlNOkkEfFwkdNhXgcLqPSmHy0fA==} + '@rollup/rollup-darwin-x64@4.53.2': + resolution: {integrity: sha512-e6XqVmXlHrBlG56obu9gDRPW3O3hLxpwHpLsBJvuI8qqnsrtSZ9ERoWUXtPOkY8c78WghyPHZdmPhHLWNdAGEw==} cpu: [x64] os: [darwin] - '@rollup/rollup-freebsd-arm64@4.52.5': - resolution: {integrity: sha512-QofO7i7JycsYOWxe0GFqhLmF6l1TqBswJMvICnRUjqCx8b47MTo46W8AoeQwiokAx3zVryVnxtBMcGcnX12LvA==} + '@rollup/rollup-freebsd-arm64@4.53.2': + resolution: {integrity: sha512-v0E9lJW8VsrwPux5Qe5CwmH/CF/2mQs6xU1MF3nmUxmZUCHazCjLgYvToOk+YuuUqLQBio1qkkREhxhc656ViA==} cpu: [arm64] os: [freebsd] - '@rollup/rollup-freebsd-x64@4.52.5': - resolution: {integrity: sha512-jr21b/99ew8ujZubPo9skbrItHEIE50WdV86cdSoRkKtmWa+DDr6fu2c/xyRT0F/WazZpam6kk7IHBerSL7LDQ==} + '@rollup/rollup-freebsd-x64@4.53.2': + resolution: {integrity: sha512-ClAmAPx3ZCHtp6ysl4XEhWU69GUB1D+s7G9YjHGhIGCSrsg00nEGRRZHmINYxkdoJehde8VIsDC5t9C0gb6yqA==} cpu: [x64] os: [freebsd] - '@rollup/rollup-linux-arm-gnueabihf@4.52.5': - resolution: {integrity: sha512-PsNAbcyv9CcecAUagQefwX8fQn9LQ4nZkpDboBOttmyffnInRy8R8dSg6hxxl2Re5QhHBf6FYIDhIj5v982ATQ==} + '@rollup/rollup-linux-arm-gnueabihf@4.53.2': + resolution: {integrity: sha512-EPlb95nUsz6Dd9Qy13fI5kUPXNSljaG9FiJ4YUGU1O/Q77i5DYFW5KR8g1OzTcdZUqQQ1KdDqsTohdFVwCwjqg==} cpu: [arm] os: [linux] - '@rollup/rollup-linux-arm-musleabihf@4.52.5': - resolution: {integrity: sha512-Fw4tysRutyQc/wwkmcyoqFtJhh0u31K+Q6jYjeicsGJJ7bbEq8LwPWV/w0cnzOqR2m694/Af6hpFayLJZkG2VQ==} + '@rollup/rollup-linux-arm-musleabihf@4.53.2': + resolution: {integrity: sha512-BOmnVW+khAUX+YZvNfa0tGTEMVVEerOxN0pDk2E6N6DsEIa2Ctj48FOMfNDdrwinocKaC7YXUZ1pHlKpnkja/Q==} cpu: [arm] os: [linux] - '@rollup/rollup-linux-arm64-gnu@4.52.5': - resolution: {integrity: sha512-a+3wVnAYdQClOTlyapKmyI6BLPAFYs0JM8HRpgYZQO02rMR09ZcV9LbQB+NL6sljzG38869YqThrRnfPMCDtZg==} + '@rollup/rollup-linux-arm64-gnu@4.53.2': + resolution: {integrity: sha512-Xt2byDZ+6OVNuREgBXr4+CZDJtrVso5woFtpKdGPhpTPHcNG7D8YXeQzpNbFRxzTVqJf7kvPMCub/pcGUWgBjA==} cpu: [arm64] os: [linux] - '@rollup/rollup-linux-arm64-musl@4.52.5': - resolution: {integrity: sha512-AvttBOMwO9Pcuuf7m9PkC1PUIKsfaAJ4AYhy944qeTJgQOqJYJ9oVl2nYgY7Rk0mkbsuOpCAYSs6wLYB2Xiw0Q==} + '@rollup/rollup-linux-arm64-musl@4.53.2': + resolution: {integrity: sha512-+LdZSldy/I9N8+klim/Y1HsKbJ3BbInHav5qE9Iy77dtHC/pibw1SR/fXlWyAk0ThnpRKoODwnAuSjqxFRDHUQ==} cpu: [arm64] os: [linux] - '@rollup/rollup-linux-loong64-gnu@4.52.5': - resolution: {integrity: sha512-DkDk8pmXQV2wVrF6oq5tONK6UHLz/XcEVow4JTTerdeV1uqPeHxwcg7aFsfnSm9L+OO8WJsWotKM2JJPMWrQtA==} + '@rollup/rollup-linux-loong64-gnu@4.53.2': + resolution: {integrity: sha512-8ms8sjmyc1jWJS6WdNSA23rEfdjWB30LH8Wqj0Cqvv7qSHnvw6kgMMXRdop6hkmGPlyYBdRPkjJnj3KCUHV/uQ==} cpu: [loong64] os: [linux] - '@rollup/rollup-linux-ppc64-gnu@4.52.5': - resolution: {integrity: sha512-W/b9ZN/U9+hPQVvlGwjzi+Wy4xdoH2I8EjaCkMvzpI7wJUs8sWJ03Rq96jRnHkSrcHTpQe8h5Tg3ZzUPGauvAw==} + '@rollup/rollup-linux-ppc64-gnu@4.53.2': + resolution: {integrity: sha512-3HRQLUQbpBDMmzoxPJYd3W6vrVHOo2cVW8RUo87Xz0JPJcBLBr5kZ1pGcQAhdZgX9VV7NbGNipah1omKKe23/g==} cpu: [ppc64] os: [linux] - '@rollup/rollup-linux-riscv64-gnu@4.52.5': - resolution: {integrity: sha512-sjQLr9BW7R/ZiXnQiWPkErNfLMkkWIoCz7YMn27HldKsADEKa5WYdobaa1hmN6slu9oWQbB6/jFpJ+P2IkVrmw==} + '@rollup/rollup-linux-riscv64-gnu@4.53.2': + resolution: {integrity: sha512-fMjKi+ojnmIvhk34gZP94vjogXNNUKMEYs+EDaB/5TG/wUkoeua7p7VCHnE6T2Tx+iaghAqQX8teQzcvrYpaQA==} cpu: [riscv64] os: [linux] - '@rollup/rollup-linux-riscv64-musl@4.52.5': - resolution: {integrity: sha512-hq3jU/kGyjXWTvAh2awn8oHroCbrPm8JqM7RUpKjalIRWWXE01CQOf/tUNWNHjmbMHg/hmNCwc/Pz3k1T/j/Lg==} + '@rollup/rollup-linux-riscv64-musl@4.53.2': + resolution: {integrity: sha512-XuGFGU+VwUUV5kLvoAdi0Wz5Xbh2SrjIxCtZj6Wq8MDp4bflb/+ThZsVxokM7n0pcbkEr2h5/pzqzDYI7cCgLQ==} cpu: [riscv64] os: [linux] - '@rollup/rollup-linux-s390x-gnu@4.52.5': - resolution: {integrity: sha512-gn8kHOrku8D4NGHMK1Y7NA7INQTRdVOntt1OCYypZPRt6skGbddska44K8iocdpxHTMMNui5oH4elPH4QOLrFQ==} + '@rollup/rollup-linux-s390x-gnu@4.53.2': + resolution: {integrity: sha512-w6yjZF0P+NGzWR3AXWX9zc0DNEGdtvykB03uhonSHMRa+oWA6novflo2WaJr6JZakG2ucsyb+rvhrKac6NIy+w==} cpu: [s390x] os: [linux] - '@rollup/rollup-linux-x64-gnu@4.52.5': - resolution: {integrity: sha512-hXGLYpdhiNElzN770+H2nlx+jRog8TyynpTVzdlc6bndktjKWyZyiCsuDAlpd+j+W+WNqfcyAWz9HxxIGfZm1Q==} + '@rollup/rollup-linux-x64-gnu@4.53.2': + resolution: {integrity: sha512-yo8d6tdfdeBArzC7T/PnHd7OypfI9cbuZzPnzLJIyKYFhAQ8SvlkKtKBMbXDxe1h03Rcr7u++nFS7tqXz87Gtw==} cpu: [x64] os: [linux] - '@rollup/rollup-linux-x64-musl@4.52.5': - resolution: {integrity: sha512-arCGIcuNKjBoKAXD+y7XomR9gY6Mw7HnFBv5Rw7wQRvwYLR7gBAgV7Mb2QTyjXfTveBNFAtPt46/36vV9STLNg==} + '@rollup/rollup-linux-x64-musl@4.53.2': + resolution: {integrity: sha512-ah59c1YkCxKExPP8O9PwOvs+XRLKwh/mV+3YdKqQ5AMQ0r4M4ZDuOrpWkUaqO7fzAHdINzV9tEVu8vNw48z0lA==} cpu: [x64] os: [linux] - '@rollup/rollup-openharmony-arm64@4.52.5': - resolution: {integrity: sha512-QoFqB6+/9Rly/RiPjaomPLmR/13cgkIGfA40LHly9zcH1S0bN2HVFYk3a1eAyHQyjs3ZJYlXvIGtcCs5tko9Cw==} + '@rollup/rollup-openharmony-arm64@4.53.2': + resolution: {integrity: sha512-4VEd19Wmhr+Zy7hbUsFZ6YXEiP48hE//KPLCSVNY5RMGX2/7HZ+QkN55a3atM1C/BZCGIgqN+xrVgtdak2S9+A==} cpu: [arm64] os: [openharmony] - '@rollup/rollup-win32-arm64-msvc@4.52.5': - resolution: {integrity: sha512-w0cDWVR6MlTstla1cIfOGyl8+qb93FlAVutcor14Gf5Md5ap5ySfQ7R9S/NjNaMLSFdUnKGEasmVnu3lCMqB7w==} + '@rollup/rollup-win32-arm64-msvc@4.53.2': + resolution: {integrity: sha512-IlbHFYc/pQCgew/d5fslcy1KEaYVCJ44G8pajugd8VoOEI8ODhtb/j8XMhLpwHCMB3yk2J07ctup10gpw2nyMA==} cpu: [arm64] os: [win32] - '@rollup/rollup-win32-ia32-msvc@4.52.5': - resolution: {integrity: sha512-Aufdpzp7DpOTULJCuvzqcItSGDH73pF3ko/f+ckJhxQyHtp67rHw3HMNxoIdDMUITJESNE6a8uh4Lo4SLouOUg==} + '@rollup/rollup-win32-ia32-msvc@4.53.2': + resolution: {integrity: sha512-lNlPEGgdUfSzdCWU176ku/dQRnA7W+Gp8d+cWv73jYrb8uT7HTVVxq62DUYxjbaByuf1Yk0RIIAbDzp+CnOTFg==} cpu: [ia32] os: [win32] - '@rollup/rollup-win32-x64-gnu@4.52.5': - resolution: {integrity: sha512-UGBUGPFp1vkj6p8wCRraqNhqwX/4kNQPS57BCFc8wYh0g94iVIW33wJtQAx3G7vrjjNtRaxiMUylM0ktp/TRSQ==} + '@rollup/rollup-win32-x64-gnu@4.53.2': + resolution: {integrity: sha512-S6YojNVrHybQis2lYov1sd+uj7K0Q05NxHcGktuMMdIQ2VixGwAfbJ23NnlvvVV1bdpR2m5MsNBViHJKcA4ADw==} cpu: [x64] os: [win32] - '@rollup/rollup-win32-x64-msvc@4.52.5': - resolution: {integrity: sha512-TAcgQh2sSkykPRWLrdyy2AiceMckNf5loITqXxFI5VuQjS5tSuw3WlwdN8qv8vzjLAUTvYaH/mVjSFpbkFbpTg==} + '@rollup/rollup-win32-x64-msvc@4.53.2': + resolution: {integrity: sha512-k+/Rkcyx//P6fetPoLMb8pBeqJBNGx81uuf7iljX9++yNBVRDQgD04L+SVXmXmh5ZP4/WOp4mWF0kmi06PW2tA==} cpu: [x64] os: [win32] @@ -567,16 +567,16 @@ packages: resolution: {integrity: sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==} engines: {node: '>=12'} - baseline-browser-mapping@2.8.19: - resolution: {integrity: sha512-zoKGUdu6vb2jd3YOq0nnhEDQVbPcHhco3UImJrv5dSkvxTc2pl2WjOPsjZXDwPDSl5eghIMuY3R6J9NDKF3KcQ==} + baseline-browser-mapping@2.8.29: + resolution: {integrity: sha512-sXdt2elaVnhpDNRDz+1BDx1JQoJRuNk7oVlAlbGiFkLikHCAQiccexF/9e91zVi6RCgqspl04aP+6Cnl9zRLrA==} hasBin: true braces@3.0.3: resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==} engines: {node: '>=8'} - browserslist@4.26.3: - resolution: {integrity: sha512-lAUU+02RFBuCKQPj/P6NgjlbCnLBMp4UtgTx7vNHd3XSIJF87s9a5rA3aH2yw3GS9DqZAUbOtZdCCiZeVRqt0w==} + browserslist@4.28.0: + resolution: {integrity: sha512-tbydkR/CxfMwelN0vwdP/pLkDwyAASZ+VfWm4EOwlB6SWhx1sYnWLqo8N5j0rAzPfzfRaxt0mM/4wPU/Su84RQ==} engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} hasBin: true @@ -587,8 +587,8 @@ packages: resolution: {integrity: sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==} engines: {node: '>=8'} - caniuse-lite@1.0.30001751: - resolution: {integrity: sha512-A0QJhug0Ly64Ii3eIqHu5X51ebln3k4yTUkY1j8drqpWHVreg/VLijN48cZ1bYPiqOQuqpkIKnzr/Ul8V+p6Cw==} + caniuse-lite@1.0.30001755: + resolution: {integrity: sha512-44V+Jm6ctPj7R52Na4TLi3Zri4dWUljJd+RDm+j8LtNCc/ihLCT+X1TzoOAkRETEWqjuLnh9581Tl80FvK7jVA==} chai@5.3.3: resolution: {integrity: sha512-4zNhdJD/iOjSH0A05ea+Ke6MU5mmpQcbQsSOkgdaUMJ9zTlDTD/GYlwohmIE2u0gaxHYiVHEn1Fw9mZ/ktJWgw==} @@ -641,8 +641,8 @@ packages: resolution: {integrity: sha512-h5k/5U50IJJFpzfL6nO9jaaumfjO/f2NjK/oYB2Djzm4p9L+3T9qWpZqZ2hAbLPuuYq9wrU08WQyBTL5GbPk5Q==} engines: {node: '>=6'} - electron-to-chromium@1.5.237: - resolution: {integrity: sha512-icUt1NvfhGLar5lSWH3tHNzablaA5js3HVHacQimfP8ViEBOQv+L7DKEuHdbTZ0SKCO1ogTJTIL1Gwk9S6Qvcg==} + electron-to-chromium@1.5.255: + resolution: {integrity: sha512-Z9oIp4HrFF/cZkDPMpz2XSuVpc1THDpT4dlmATFlJUIBVCy9Vap5/rIXsASP1CscBacBqhabwh8vLctqBwEerQ==} emoji-regex@8.0.0: resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==} @@ -654,8 +654,8 @@ packages: es-module-lexer@1.7.0: resolution: {integrity: sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA==} - esbuild@0.25.11: - resolution: {integrity: sha512-KohQwyzrKTQmhXDW1PjCv3Tyspn9n5GcY2RTDqeORIdIJY8yKIF7sTSopFmn/wpMPW4rdPXI0UE5LJLuq3bx0Q==} + esbuild@0.25.12: + resolution: {integrity: sha512-bbPBYYrtZbkt6Os6FiTLCTFxvq4tt3JKall1vRwshA3fdVztsLAatFaZobhkBC8/BrPetoa0oksYoKXoG4ryJg==} engines: {node: '>=18'} hasBin: true @@ -724,8 +724,8 @@ packages: graceful-fs@4.2.11: resolution: {integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==} - graphql@16.11.0: - resolution: {integrity: sha512-mS1lbMsxgQj6hge1XZ6p7GPhbrtFwUFYi3wRzXAC/FmYnyXMTvvI3td3rjmQ2u8ewXueaSvRPWaEcgVVOT9Jnw==} + graphql@16.12.0: + resolution: {integrity: sha512-DKKrynuQRne0PNpEbzuEdHlYOMksHSUI8Zc9Unei5gTsMNA2/vMpoMz/yKba50pejK56qj98qM0SjYxAKi13gQ==} engines: {node: ^12.22.0 || ^14.16.0 || ^16.0.0 || >=17.0.0} has-flag@4.0.0: @@ -766,8 +766,8 @@ packages: loupe@3.2.1: resolution: {integrity: sha512-CdzqowRJCeLU72bHvWqwRBBlLcMEtIvGrlvef74kMnV2AolS9Y8xUv1I0U/MNAWMhBlKIoyuEgoJ0t/bbwHbLQ==} - magic-string@0.30.19: - resolution: {integrity: sha512-2N21sPY9Ws53PZvsEpVtNuSW+ScYbQdp4b9qUaL+9QkHUrGFKo56Lg9Emg5s9V/qrtNBmiR01sYhUOwu3H+VOw==} + magic-string@0.30.21: + resolution: {integrity: sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==} merge-stream@2.0.0: resolution: {integrity: sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==} @@ -809,8 +809,8 @@ packages: neo-async@2.6.2: resolution: {integrity: sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==} - node-releases@2.0.26: - resolution: {integrity: sha512-S2M9YimhSjBSvYnlr5/+umAnPHE++ODwt5e2Ij6FoX45HA/s4vHdkDx1eax2pAPeAOqu4s9b7ppahsyEFdVqQA==} + node-releases@2.0.27: + resolution: {integrity: sha512-nmh3lCkYZ3grZvqcCH+fjmQ7X+H0OeZgP40OierEaAptX4XofMh5kwNbWh7lBduUzCcV/8kZ+NDLCwm2iorIlA==} outvariant@1.4.3: resolution: {integrity: sha512-+Sl2UErvtsoajRDKCE5/dBz4DIvHXQQnAxtQTF04OJxY0+DyZXSo5P5Bb7XYWOh81syohlYL24hbDwxedPUJCA==} @@ -854,8 +854,8 @@ packages: rettime@0.7.0: resolution: {integrity: sha512-LPRKoHnLKd/r3dVxcwO7vhCW+orkOGj9ViueosEBK6ie89CijnfRlhaDhHq/3Hxu4CkWQtxwlBG0mzTQY6uQjw==} - rollup@4.52.5: - resolution: {integrity: sha512-3GuObel8h7Kqdjt0gxkEzaifHTqLVW56Y/bjN7PSQtkKr0w3V/QYSdt6QWYtd7A1xUtYQigtdUfgj1RvWVtorw==} + rollup@4.53.2: + resolution: {integrity: sha512-MHngMYwGJVi6Fmnk6ISmnk7JAHRNF0UkuucA0CUW3N3a4KnONPEZz+vUanQP/ZC/iY1Qkf3bwPWzyY84wEks1g==} engines: {node: '>=18.0.0', npm: '>=8.0.0'} hasBin: true @@ -948,8 +948,8 @@ packages: uglify-js: optional: true - terser@5.44.0: - resolution: {integrity: sha512-nIVck8DK+GM/0Frwd+nIhZ84pR/BX7rmXMfYwyg+Sri5oGVE99/E3KvXqpC2xHFxyqXyGHTKBSioxxplrO4I4w==} + terser@5.44.1: + resolution: {integrity: sha512-t/R3R/n0MSwnnazuPpPNVO60LX0SKL45pyl9YlvxIdkH0Of7D5qM2EVe+yASRIlY5pZ73nclYJfNANGWPwFDZw==} engines: {node: '>=10'} hasBin: true @@ -975,11 +975,11 @@ packages: resolution: {integrity: sha512-azl+t0z7pw/z958Gy9svOTuzqIk6xq+NSheJzn5MMWtWTFywIacg2wUlzKFGtt3cthx0r2SxMK0yzJOR0IES7Q==} engines: {node: '>=14.0.0'} - tldts-core@7.0.17: - resolution: {integrity: sha512-DieYoGrP78PWKsrXr8MZwtQ7GLCUeLxihtjC1jZsW1DnvSMdKPitJSe8OSYDM2u5H6g3kWJZpePqkp43TfLh0g==} + tldts-core@7.0.18: + resolution: {integrity: sha512-jqJC13oP4FFAahv4JT/0WTDrCF9Okv7lpKtOZUGPLiAnNbACcSg8Y8T+Z9xthOmRBqi/Sob4yi0TE0miRCvF7Q==} - tldts@7.0.17: - resolution: {integrity: sha512-Y1KQBgDd/NUc+LfOtKS6mNsC9CCaH+m2P1RoIZy7RAPo3C3/t8X45+zgut31cRZtZ3xKPjfn3TkGTrctC2TQIQ==} + tldts@7.0.18: + resolution: {integrity: sha512-lCcgTAgMxQ1JKOWrVGo6E69Ukbnx4Gc1wiYLRf6J5NN4HRYJtCby1rPF8rkQ4a6qqoFBK5dvjJ1zJ0F7VfDSvw==} hasBin: true to-regex-range@5.0.1: @@ -1009,8 +1009,8 @@ packages: undici-types@5.26.5: resolution: {integrity: sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==} - update-browserslist-db@1.1.3: - resolution: {integrity: sha512-UxhIZQ+QInVdunkDAaiazvvT/+fXL5Osr0JZlJulepYu6Jd7qJtDZjlur0emRlT71EN3ScPoE7gvsuIKKNavKw==} + update-browserslist-db@1.1.4: + resolution: {integrity: sha512-q0SPT4xyU84saUX+tomz1WLkxUbuaJnR1xWt17M7fJtEJigJeWUNGUqrauFXsHnqev9y9JTRGwk13tFBuKby4A==} hasBin: true peerDependencies: browserslist: '>= 4.21.0' @@ -1020,8 +1020,8 @@ packages: engines: {node: ^18.0.0 || ^20.0.0 || >=22.0.0} hasBin: true - vite@7.1.11: - resolution: {integrity: sha512-uzcxnSDVjAopEUjljkWh8EIrg6tlzrjFUfMcR1EVsRDGwf/ccef0qQPRyOrROwhrTDaApueq+ja+KLPlzR/zdg==} + vite@7.2.2: + resolution: {integrity: sha512-BxAKBWmIbrDgrokdGZH1IgkIk/5mMHDreLDmCJ0qpyJaAteP8NvMhkwr/ZCQNqNH97bw/dANTE9PDzqwJghfMQ==} engines: {node: ^20.19.0 || >=22.12.0} hasBin: true peerDependencies: @@ -1137,39 +1137,39 @@ packages: snapshots: - '@biomejs/biome@2.2.5': + '@biomejs/biome@2.3.1': optionalDependencies: - '@biomejs/cli-darwin-arm64': 2.2.5 - '@biomejs/cli-darwin-x64': 2.2.5 - '@biomejs/cli-linux-arm64': 2.2.5 - '@biomejs/cli-linux-arm64-musl': 2.2.5 - '@biomejs/cli-linux-x64': 2.2.5 - '@biomejs/cli-linux-x64-musl': 2.2.5 - '@biomejs/cli-win32-arm64': 2.2.5 - '@biomejs/cli-win32-x64': 2.2.5 - - '@biomejs/cli-darwin-arm64@2.2.5': + '@biomejs/cli-darwin-arm64': 2.3.1 + '@biomejs/cli-darwin-x64': 2.3.1 + '@biomejs/cli-linux-arm64': 2.3.1 + '@biomejs/cli-linux-arm64-musl': 2.3.1 + '@biomejs/cli-linux-x64': 2.3.1 + '@biomejs/cli-linux-x64-musl': 2.3.1 + '@biomejs/cli-win32-arm64': 2.3.1 + '@biomejs/cli-win32-x64': 2.3.1 + + '@biomejs/cli-darwin-arm64@2.3.1': optional: true - '@biomejs/cli-darwin-x64@2.2.5': + '@biomejs/cli-darwin-x64@2.3.1': optional: true - '@biomejs/cli-linux-arm64-musl@2.2.5': + '@biomejs/cli-linux-arm64-musl@2.3.1': optional: true - '@biomejs/cli-linux-arm64@2.2.5': + '@biomejs/cli-linux-arm64@2.3.1': optional: true - '@biomejs/cli-linux-x64-musl@2.2.5': + '@biomejs/cli-linux-x64-musl@2.3.1': optional: true - '@biomejs/cli-linux-x64@2.2.5': + '@biomejs/cli-linux-x64@2.3.1': optional: true - '@biomejs/cli-win32-arm64@2.2.5': + '@biomejs/cli-win32-arm64@2.3.1': optional: true - '@biomejs/cli-win32-x64@2.2.5': + '@biomejs/cli-win32-x64@2.3.1': optional: true '@bundled-es-modules/cookie@2.0.1': @@ -1180,98 +1180,98 @@ snapshots: dependencies: statuses: 2.0.2 - '@esbuild/aix-ppc64@0.25.11': + '@esbuild/aix-ppc64@0.25.12': optional: true - '@esbuild/android-arm64@0.25.11': + '@esbuild/android-arm64@0.25.12': optional: true - '@esbuild/android-arm@0.25.11': + '@esbuild/android-arm@0.25.12': optional: true - '@esbuild/android-x64@0.25.11': + '@esbuild/android-x64@0.25.12': optional: true - '@esbuild/darwin-arm64@0.25.11': + '@esbuild/darwin-arm64@0.25.12': optional: true - '@esbuild/darwin-x64@0.25.11': + '@esbuild/darwin-x64@0.25.12': optional: true - '@esbuild/freebsd-arm64@0.25.11': + '@esbuild/freebsd-arm64@0.25.12': optional: true - '@esbuild/freebsd-x64@0.25.11': + '@esbuild/freebsd-x64@0.25.12': optional: true - '@esbuild/linux-arm64@0.25.11': + '@esbuild/linux-arm64@0.25.12': optional: true - '@esbuild/linux-arm@0.25.11': + '@esbuild/linux-arm@0.25.12': optional: true - '@esbuild/linux-ia32@0.25.11': + '@esbuild/linux-ia32@0.25.12': optional: true - '@esbuild/linux-loong64@0.25.11': + '@esbuild/linux-loong64@0.25.12': optional: true - '@esbuild/linux-mips64el@0.25.11': + '@esbuild/linux-mips64el@0.25.12': optional: true - '@esbuild/linux-ppc64@0.25.11': + '@esbuild/linux-ppc64@0.25.12': optional: true - '@esbuild/linux-riscv64@0.25.11': + '@esbuild/linux-riscv64@0.25.12': optional: true - '@esbuild/linux-s390x@0.25.11': + '@esbuild/linux-s390x@0.25.12': optional: true - '@esbuild/linux-x64@0.25.11': + '@esbuild/linux-x64@0.25.12': optional: true - '@esbuild/netbsd-arm64@0.25.11': + '@esbuild/netbsd-arm64@0.25.12': optional: true - '@esbuild/netbsd-x64@0.25.11': + '@esbuild/netbsd-x64@0.25.12': optional: true - '@esbuild/openbsd-arm64@0.25.11': + '@esbuild/openbsd-arm64@0.25.12': optional: true - '@esbuild/openbsd-x64@0.25.11': + '@esbuild/openbsd-x64@0.25.12': optional: true - '@esbuild/openharmony-arm64@0.25.11': + '@esbuild/openharmony-arm64@0.25.12': optional: true - '@esbuild/sunos-x64@0.25.11': + '@esbuild/sunos-x64@0.25.12': optional: true - '@esbuild/win32-arm64@0.25.11': + '@esbuild/win32-arm64@0.25.12': optional: true - '@esbuild/win32-ia32@0.25.11': + '@esbuild/win32-ia32@0.25.12': optional: true - '@esbuild/win32-x64@0.25.11': + '@esbuild/win32-x64@0.25.12': optional: true - '@inquirer/ansi@1.0.1': {} + '@inquirer/ansi@1.0.2': {} - '@inquirer/confirm@5.1.19(@types/node@18.19.130)': + '@inquirer/confirm@5.1.21(@types/node@18.19.130)': dependencies: - '@inquirer/core': 10.3.0(@types/node@18.19.130) - '@inquirer/type': 3.0.9(@types/node@18.19.130) + '@inquirer/core': 10.3.2(@types/node@18.19.130) + '@inquirer/type': 3.0.10(@types/node@18.19.130) optionalDependencies: '@types/node': 18.19.130 - '@inquirer/core@10.3.0(@types/node@18.19.130)': + '@inquirer/core@10.3.2(@types/node@18.19.130)': dependencies: - '@inquirer/ansi': 1.0.1 - '@inquirer/figures': 1.0.14 - '@inquirer/type': 3.0.9(@types/node@18.19.130) + '@inquirer/ansi': 1.0.2 + '@inquirer/figures': 1.0.15 + '@inquirer/type': 3.0.10(@types/node@18.19.130) cli-width: 4.1.0 mute-stream: 2.0.0 signal-exit: 4.1.0 @@ -1280,9 +1280,9 @@ snapshots: optionalDependencies: '@types/node': 18.19.130 - '@inquirer/figures@1.0.14': {} + '@inquirer/figures@1.0.15': {} - '@inquirer/type@3.0.9(@types/node@18.19.130)': + '@inquirer/type@3.0.10(@types/node@18.19.130)': optionalDependencies: '@types/node': 18.19.130 @@ -1323,70 +1323,70 @@ snapshots: '@open-draft/until@2.1.0': {} - '@rollup/rollup-android-arm-eabi@4.52.5': + '@rollup/rollup-android-arm-eabi@4.53.2': optional: true - '@rollup/rollup-android-arm64@4.52.5': + '@rollup/rollup-android-arm64@4.53.2': optional: true - '@rollup/rollup-darwin-arm64@4.52.5': + '@rollup/rollup-darwin-arm64@4.53.2': optional: true - '@rollup/rollup-darwin-x64@4.52.5': + '@rollup/rollup-darwin-x64@4.53.2': optional: true - '@rollup/rollup-freebsd-arm64@4.52.5': + '@rollup/rollup-freebsd-arm64@4.53.2': optional: true - '@rollup/rollup-freebsd-x64@4.52.5': + '@rollup/rollup-freebsd-x64@4.53.2': optional: true - '@rollup/rollup-linux-arm-gnueabihf@4.52.5': + '@rollup/rollup-linux-arm-gnueabihf@4.53.2': optional: true - '@rollup/rollup-linux-arm-musleabihf@4.52.5': + '@rollup/rollup-linux-arm-musleabihf@4.53.2': optional: true - '@rollup/rollup-linux-arm64-gnu@4.52.5': + '@rollup/rollup-linux-arm64-gnu@4.53.2': optional: true - '@rollup/rollup-linux-arm64-musl@4.52.5': + '@rollup/rollup-linux-arm64-musl@4.53.2': optional: true - '@rollup/rollup-linux-loong64-gnu@4.52.5': + '@rollup/rollup-linux-loong64-gnu@4.53.2': optional: true - '@rollup/rollup-linux-ppc64-gnu@4.52.5': + '@rollup/rollup-linux-ppc64-gnu@4.53.2': optional: true - '@rollup/rollup-linux-riscv64-gnu@4.52.5': + '@rollup/rollup-linux-riscv64-gnu@4.53.2': optional: true - '@rollup/rollup-linux-riscv64-musl@4.52.5': + '@rollup/rollup-linux-riscv64-musl@4.53.2': optional: true - '@rollup/rollup-linux-s390x-gnu@4.52.5': + '@rollup/rollup-linux-s390x-gnu@4.53.2': optional: true - '@rollup/rollup-linux-x64-gnu@4.52.5': + '@rollup/rollup-linux-x64-gnu@4.53.2': optional: true - '@rollup/rollup-linux-x64-musl@4.52.5': + '@rollup/rollup-linux-x64-musl@4.53.2': optional: true - '@rollup/rollup-openharmony-arm64@4.52.5': + '@rollup/rollup-openharmony-arm64@4.53.2': optional: true - '@rollup/rollup-win32-arm64-msvc@4.52.5': + '@rollup/rollup-win32-arm64-msvc@4.53.2': optional: true - '@rollup/rollup-win32-ia32-msvc@4.52.5': + '@rollup/rollup-win32-ia32-msvc@4.53.2': optional: true - '@rollup/rollup-win32-x64-gnu@4.52.5': + '@rollup/rollup-win32-x64-gnu@4.53.2': optional: true - '@rollup/rollup-win32-x64-msvc@4.52.5': + '@rollup/rollup-win32-x64-msvc@4.53.2': optional: true '@types/chai@5.2.3': @@ -1426,14 +1426,14 @@ snapshots: chai: 5.3.3 tinyrainbow: 2.0.0 - '@vitest/mocker@3.2.4(msw@2.11.2(@types/node@18.19.130)(typescript@5.7.3))(vite@7.1.11(@types/node@18.19.130)(terser@5.44.0))': + '@vitest/mocker@3.2.4(msw@2.11.2(@types/node@18.19.130)(typescript@5.7.3))(vite@7.2.2(@types/node@18.19.130)(terser@5.44.1))': dependencies: '@vitest/spy': 3.2.4 estree-walker: 3.0.3 - magic-string: 0.30.19 + magic-string: 0.30.21 optionalDependencies: msw: 2.11.2(@types/node@18.19.130)(typescript@5.7.3) - vite: 7.1.11(@types/node@18.19.130)(terser@5.44.0) + vite: 7.2.2(@types/node@18.19.130)(terser@5.44.1) '@vitest/pretty-format@3.2.4': dependencies: @@ -1448,7 +1448,7 @@ snapshots: '@vitest/snapshot@3.2.4': dependencies: '@vitest/pretty-format': 3.2.4 - magic-string: 0.30.19 + magic-string: 0.30.21 pathe: 2.0.3 '@vitest/spy@3.2.4': @@ -1571,25 +1571,25 @@ snapshots: assertion-error@2.0.1: {} - baseline-browser-mapping@2.8.19: {} + baseline-browser-mapping@2.8.29: {} braces@3.0.3: dependencies: fill-range: 7.1.1 - browserslist@4.26.3: + browserslist@4.28.0: dependencies: - baseline-browser-mapping: 2.8.19 - caniuse-lite: 1.0.30001751 - electron-to-chromium: 1.5.237 - node-releases: 2.0.26 - update-browserslist-db: 1.1.3(browserslist@4.26.3) + baseline-browser-mapping: 2.8.29 + caniuse-lite: 1.0.30001755 + electron-to-chromium: 1.5.255 + node-releases: 2.0.27 + update-browserslist-db: 1.1.4(browserslist@4.28.0) buffer-from@1.1.2: {} cac@6.7.14: {} - caniuse-lite@1.0.30001751: {} + caniuse-lite@1.0.30001755: {} chai@5.3.3: dependencies: @@ -1632,7 +1632,7 @@ snapshots: deep-eql@5.0.2: {} - electron-to-chromium@1.5.237: {} + electron-to-chromium@1.5.255: {} emoji-regex@8.0.0: {} @@ -1643,34 +1643,34 @@ snapshots: es-module-lexer@1.7.0: {} - esbuild@0.25.11: + esbuild@0.25.12: optionalDependencies: - '@esbuild/aix-ppc64': 0.25.11 - '@esbuild/android-arm': 0.25.11 - '@esbuild/android-arm64': 0.25.11 - '@esbuild/android-x64': 0.25.11 - '@esbuild/darwin-arm64': 0.25.11 - '@esbuild/darwin-x64': 0.25.11 - '@esbuild/freebsd-arm64': 0.25.11 - '@esbuild/freebsd-x64': 0.25.11 - '@esbuild/linux-arm': 0.25.11 - '@esbuild/linux-arm64': 0.25.11 - '@esbuild/linux-ia32': 0.25.11 - '@esbuild/linux-loong64': 0.25.11 - '@esbuild/linux-mips64el': 0.25.11 - '@esbuild/linux-ppc64': 0.25.11 - '@esbuild/linux-riscv64': 0.25.11 - '@esbuild/linux-s390x': 0.25.11 - '@esbuild/linux-x64': 0.25.11 - '@esbuild/netbsd-arm64': 0.25.11 - '@esbuild/netbsd-x64': 0.25.11 - '@esbuild/openbsd-arm64': 0.25.11 - '@esbuild/openbsd-x64': 0.25.11 - '@esbuild/openharmony-arm64': 0.25.11 - '@esbuild/sunos-x64': 0.25.11 - '@esbuild/win32-arm64': 0.25.11 - '@esbuild/win32-ia32': 0.25.11 - '@esbuild/win32-x64': 0.25.11 + '@esbuild/aix-ppc64': 0.25.12 + '@esbuild/android-arm': 0.25.12 + '@esbuild/android-arm64': 0.25.12 + '@esbuild/android-x64': 0.25.12 + '@esbuild/darwin-arm64': 0.25.12 + '@esbuild/darwin-x64': 0.25.12 + '@esbuild/freebsd-arm64': 0.25.12 + '@esbuild/freebsd-x64': 0.25.12 + '@esbuild/linux-arm': 0.25.12 + '@esbuild/linux-arm64': 0.25.12 + '@esbuild/linux-ia32': 0.25.12 + '@esbuild/linux-loong64': 0.25.12 + '@esbuild/linux-mips64el': 0.25.12 + '@esbuild/linux-ppc64': 0.25.12 + '@esbuild/linux-riscv64': 0.25.12 + '@esbuild/linux-s390x': 0.25.12 + '@esbuild/linux-x64': 0.25.12 + '@esbuild/netbsd-arm64': 0.25.12 + '@esbuild/netbsd-x64': 0.25.12 + '@esbuild/openbsd-arm64': 0.25.12 + '@esbuild/openbsd-x64': 0.25.12 + '@esbuild/openharmony-arm64': 0.25.12 + '@esbuild/sunos-x64': 0.25.12 + '@esbuild/win32-arm64': 0.25.12 + '@esbuild/win32-ia32': 0.25.12 + '@esbuild/win32-x64': 0.25.12 escalade@3.2.0: {} @@ -1716,7 +1716,7 @@ snapshots: graceful-fs@4.2.11: {} - graphql@16.11.0: {} + graphql@16.12.0: {} has-flag@4.0.0: {} @@ -1744,7 +1744,7 @@ snapshots: loupe@3.2.1: {} - magic-string@0.30.19: + magic-string@0.30.21: dependencies: '@jridgewell/sourcemap-codec': 1.5.5 @@ -1767,13 +1767,13 @@ snapshots: dependencies: '@bundled-es-modules/cookie': 2.0.1 '@bundled-es-modules/statuses': 1.0.1 - '@inquirer/confirm': 5.1.19(@types/node@18.19.130) + '@inquirer/confirm': 5.1.21(@types/node@18.19.130) '@mswjs/interceptors': 0.39.8 '@open-draft/deferred-promise': 2.2.0 '@open-draft/until': 2.1.0 '@types/cookie': 0.6.0 '@types/statuses': 2.0.6 - graphql: 16.11.0 + graphql: 16.12.0 headers-polyfill: 4.0.3 is-node-process: 1.2.0 outvariant: 1.4.3 @@ -1795,7 +1795,7 @@ snapshots: neo-async@2.6.2: {} - node-releases@2.0.26: {} + node-releases@2.0.27: {} outvariant@1.4.3: {} @@ -1827,32 +1827,32 @@ snapshots: rettime@0.7.0: {} - rollup@4.52.5: + rollup@4.53.2: dependencies: '@types/estree': 1.0.8 optionalDependencies: - '@rollup/rollup-android-arm-eabi': 4.52.5 - '@rollup/rollup-android-arm64': 4.52.5 - '@rollup/rollup-darwin-arm64': 4.52.5 - '@rollup/rollup-darwin-x64': 4.52.5 - '@rollup/rollup-freebsd-arm64': 4.52.5 - '@rollup/rollup-freebsd-x64': 4.52.5 - '@rollup/rollup-linux-arm-gnueabihf': 4.52.5 - '@rollup/rollup-linux-arm-musleabihf': 4.52.5 - '@rollup/rollup-linux-arm64-gnu': 4.52.5 - '@rollup/rollup-linux-arm64-musl': 4.52.5 - '@rollup/rollup-linux-loong64-gnu': 4.52.5 - '@rollup/rollup-linux-ppc64-gnu': 4.52.5 - '@rollup/rollup-linux-riscv64-gnu': 4.52.5 - '@rollup/rollup-linux-riscv64-musl': 4.52.5 - '@rollup/rollup-linux-s390x-gnu': 4.52.5 - '@rollup/rollup-linux-x64-gnu': 4.52.5 - '@rollup/rollup-linux-x64-musl': 4.52.5 - '@rollup/rollup-openharmony-arm64': 4.52.5 - '@rollup/rollup-win32-arm64-msvc': 4.52.5 - '@rollup/rollup-win32-ia32-msvc': 4.52.5 - '@rollup/rollup-win32-x64-gnu': 4.52.5 - '@rollup/rollup-win32-x64-msvc': 4.52.5 + '@rollup/rollup-android-arm-eabi': 4.53.2 + '@rollup/rollup-android-arm64': 4.53.2 + '@rollup/rollup-darwin-arm64': 4.53.2 + '@rollup/rollup-darwin-x64': 4.53.2 + '@rollup/rollup-freebsd-arm64': 4.53.2 + '@rollup/rollup-freebsd-x64': 4.53.2 + '@rollup/rollup-linux-arm-gnueabihf': 4.53.2 + '@rollup/rollup-linux-arm-musleabihf': 4.53.2 + '@rollup/rollup-linux-arm64-gnu': 4.53.2 + '@rollup/rollup-linux-arm64-musl': 4.53.2 + '@rollup/rollup-linux-loong64-gnu': 4.53.2 + '@rollup/rollup-linux-ppc64-gnu': 4.53.2 + '@rollup/rollup-linux-riscv64-gnu': 4.53.2 + '@rollup/rollup-linux-riscv64-musl': 4.53.2 + '@rollup/rollup-linux-s390x-gnu': 4.53.2 + '@rollup/rollup-linux-x64-gnu': 4.53.2 + '@rollup/rollup-linux-x64-musl': 4.53.2 + '@rollup/rollup-openharmony-arm64': 4.53.2 + '@rollup/rollup-win32-arm64-msvc': 4.53.2 + '@rollup/rollup-win32-ia32-msvc': 4.53.2 + '@rollup/rollup-win32-x64-gnu': 4.53.2 + '@rollup/rollup-win32-x64-msvc': 4.53.2 fsevents: 2.3.3 safe-buffer@5.2.1: {} @@ -1923,10 +1923,10 @@ snapshots: jest-worker: 27.5.1 schema-utils: 4.3.3 serialize-javascript: 6.0.2 - terser: 5.44.0 + terser: 5.44.1 webpack: 5.102.1 - terser@5.44.0: + terser@5.44.1: dependencies: '@jridgewell/source-map': 0.3.11 acorn: 8.15.0 @@ -1948,11 +1948,11 @@ snapshots: tinyspy@4.0.4: {} - tldts-core@7.0.17: {} + tldts-core@7.0.18: {} - tldts@7.0.17: + tldts@7.0.18: dependencies: - tldts-core: 7.0.17 + tldts-core: 7.0.18 to-regex-range@5.0.1: dependencies: @@ -1960,7 +1960,7 @@ snapshots: tough-cookie@6.0.0: dependencies: - tldts: 7.0.17 + tldts: 7.0.18 ts-loader@9.5.4(typescript@5.7.3)(webpack@5.102.1): dependencies: @@ -1978,19 +1978,19 @@ snapshots: undici-types@5.26.5: {} - update-browserslist-db@1.1.3(browserslist@4.26.3): + update-browserslist-db@1.1.4(browserslist@4.28.0): dependencies: - browserslist: 4.26.3 + browserslist: 4.28.0 escalade: 3.2.0 picocolors: 1.1.1 - vite-node@3.2.4(@types/node@18.19.130)(terser@5.44.0): + vite-node@3.2.4(@types/node@18.19.130)(terser@5.44.1): dependencies: cac: 6.7.14 debug: 4.4.3 es-module-lexer: 1.7.0 pathe: 2.0.3 - vite: 7.1.11(@types/node@18.19.130)(terser@5.44.0) + vite: 7.2.2(@types/node@18.19.130)(terser@5.44.1) transitivePeerDependencies: - '@types/node' - jiti @@ -2005,24 +2005,24 @@ snapshots: - tsx - yaml - vite@7.1.11(@types/node@18.19.130)(terser@5.44.0): + vite@7.2.2(@types/node@18.19.130)(terser@5.44.1): dependencies: - esbuild: 0.25.11 + esbuild: 0.25.12 fdir: 6.5.0(picomatch@4.0.3) picomatch: 4.0.3 postcss: 8.5.6 - rollup: 4.52.5 + rollup: 4.53.2 tinyglobby: 0.2.15 optionalDependencies: '@types/node': 18.19.130 fsevents: 2.3.3 - terser: 5.44.0 + terser: 5.44.1 - vitest@3.2.4(@types/node@18.19.130)(msw@2.11.2(@types/node@18.19.130)(typescript@5.7.3))(terser@5.44.0): + vitest@3.2.4(@types/node@18.19.130)(msw@2.11.2(@types/node@18.19.130)(typescript@5.7.3))(terser@5.44.1): dependencies: '@types/chai': 5.2.3 '@vitest/expect': 3.2.4 - '@vitest/mocker': 3.2.4(msw@2.11.2(@types/node@18.19.130)(typescript@5.7.3))(vite@7.1.11(@types/node@18.19.130)(terser@5.44.0)) + '@vitest/mocker': 3.2.4(msw@2.11.2(@types/node@18.19.130)(typescript@5.7.3))(vite@7.2.2(@types/node@18.19.130)(terser@5.44.1)) '@vitest/pretty-format': 3.2.4 '@vitest/runner': 3.2.4 '@vitest/snapshot': 3.2.4 @@ -2031,7 +2031,7 @@ snapshots: chai: 5.3.3 debug: 4.4.3 expect-type: 1.2.2 - magic-string: 0.30.19 + magic-string: 0.30.21 pathe: 2.0.3 picomatch: 4.0.3 std-env: 3.10.0 @@ -2040,8 +2040,8 @@ snapshots: tinyglobby: 0.2.15 tinypool: 1.1.1 tinyrainbow: 2.0.0 - vite: 7.1.11(@types/node@18.19.130)(terser@5.44.0) - vite-node: 3.2.4(@types/node@18.19.130)(terser@5.44.0) + vite: 7.2.2(@types/node@18.19.130)(terser@5.44.1) + vite-node: 3.2.4(@types/node@18.19.130)(terser@5.44.1) why-is-node-running: 2.3.0 optionalDependencies: '@types/node': 18.19.130 @@ -2076,7 +2076,7 @@ snapshots: '@webassemblyjs/wasm-parser': 1.14.1 acorn: 8.15.0 acorn-import-phases: 1.0.4(acorn@8.15.0) - browserslist: 4.26.3 + browserslist: 4.28.0 chrome-trace-event: 1.0.4 enhanced-resolve: 5.18.3 es-module-lexer: 1.7.0 diff --git a/reference.md b/reference.md deleted file mode 100644 index f0bb9f0..0000000 --- a/reference.md +++ /dev/null @@ -1,1007 +0,0 @@ -# Reference -## Entities -
client.entities.publishEntity({ ...params }) -> Lattice.Entity -
-
- -#### 📝 Description - -
-
- -
-
- -Publish an entity for ingest into the Entities API. Entities created with this method are "owned" by the originator: other sources, -such as the UI, may not edit or delete these entities. The server validates entities at API call time and -returns an error if the entity is invalid. - -An entity ID must be provided when calling this endpoint. If the entity referenced by the entity ID does not exist -then it will be created. Otherwise the entity will be updated. An entity will only be updated if its -provenance.sourceUpdateTime is greater than the provenance.sourceUpdateTime of the existing entity. -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```typescript -await client.entities.publishEntity({}); - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**request:** `Lattice.Entity` - -
-
- -
-
- -**requestOptions:** `Entities.RequestOptions` - -
-
-
-
- - -
-
-
- -
client.entities.getEntity(entityId) -> Lattice.Entity -
-
- -#### 🔌 Usage - -
-
- -
-
- -```typescript -await client.entities.getEntity("entityId"); - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**entityId:** `string` — ID of the entity to return - -
-
- -
-
- -**requestOptions:** `Entities.RequestOptions` - -
-
-
-
- - -
-
-
- -
client.entities.overrideEntity(entityId, fieldPath, { ...params }) -> Lattice.Entity -
-
- -#### 📝 Description - -
-
- -
-
- -Only fields marked with overridable can be overridden. Please refer to our documentation to see the comprehensive -list of fields that can be overridden. The entity in the request body should only have a value set on the field -specified in the field path parameter. Field paths are rooted in the base entity object and must be represented -using lower_snake_case. Do not include "entity" in the field path. - -Note that overrides are applied in an eventually consistent manner. If multiple overrides are created -concurrently for the same field path, the last writer wins. -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```typescript -await client.entities.overrideEntity("entityId", "mil_view.disposition"); - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**entityId:** `string` — The unique ID of the entity to override - -
-
- -
-
- -**fieldPath:** `string` — fieldPath to override - -
-
- -
-
- -**request:** `Lattice.EntityOverride` - -
-
- -
-
- -**requestOptions:** `Entities.RequestOptions` - -
-
-
-
- - -
-
-
- -
client.entities.removeEntityOverride(entityId, fieldPath) -> Lattice.Entity -
-
- -#### 📝 Description - -
-
- -
-
- -This operation clears the override value from the specified field path on the entity. -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```typescript -await client.entities.removeEntityOverride("entityId", "mil_view.disposition"); - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**entityId:** `string` — The unique ID of the entity to undo an override from. - -
-
- -
-
- -**fieldPath:** `string` — The fieldPath to clear overrides from. - -
-
- -
-
- -**requestOptions:** `Entities.RequestOptions` - -
-
-
-
- - -
-
-
- -
client.entities.longPollEntityEvents({ ...params }) -> Lattice.EntityEventResponse -
-
- -#### 📝 Description - -
-
- -
-
- -This is a long polling API that will first return all pre-existing data and then return all new data as -it becomes available. If you want to start a new polling session then open a request with an empty -'sessionToken' in the request body. The server will return a new session token in the response. -If you want to retrieve the next batch of results from an existing polling session then send the session -token you received from the server in the request body. If no new data is available then the server will -hold the connection open for up to 5 minutes. After the 5 minute timeout period, the server will close the -connection with no results and you may resume polling with the same session token. If your session falls behind -more than 3x the total number of entities in the environment, the server will terminate your session. -In this case you must start a new session by sending a request with an empty session token. -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```typescript -await client.entities.longPollEntityEvents({ - sessionToken: "sessionToken" -}); - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**request:** `Lattice.EntityEventRequest` - -
-
- -
-
- -**requestOptions:** `Entities.RequestOptions` - -
-
-
-
- - -
-
-
- -
client.entities.streamEntities({ ...params }) -> core.Stream -
-
- -#### 📝 Description - -
-
- -
-
- -Establishes a persistent connection to stream entity events as they occur. -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```typescript -const response = await client.entities.streamEntities(); -for await (const item of response) { - console.log(item); -} - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**request:** `Lattice.EntityStreamRequest` - -
-
- -
-
- -**requestOptions:** `Entities.RequestOptions` - -
-
-
-
- - -
-
-
- -## Tasks -
client.tasks.createTask({ ...params }) -> Lattice.Task -
-
- -#### 📝 Description - -
-
- -
-
- -Submit a request to create a task and schedule it for delivery. Tasks, once delivered, will -be asynchronously updated by their destined agent. -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```typescript -await client.tasks.createTask(); - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**request:** `Lattice.TaskCreation` - -
-
- -
-
- -**requestOptions:** `Tasks.RequestOptions` - -
-
-
-
- - -
-
-
- -
client.tasks.getTask(taskId) -> Lattice.Task -
-
- -#### 🔌 Usage - -
-
- -
-
- -```typescript -await client.tasks.getTask("taskId"); - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**taskId:** `string` — ID of task to return - -
-
- -
-
- -**requestOptions:** `Tasks.RequestOptions` - -
-
-
-
- - -
-
-
- -
client.tasks.updateTaskStatus(taskId, { ...params }) -> Lattice.Task -
-
- -#### 📝 Description - -
-
- -
-
- -Update the status of a task. -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```typescript -await client.tasks.updateTaskStatus("taskId"); - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**taskId:** `string` — ID of task to update status of - -
-
- -
-
- -**request:** `Lattice.TaskStatusUpdate` - -
-
- -
-
- -**requestOptions:** `Tasks.RequestOptions` - -
-
-
-
- - -
-
-
- -
client.tasks.queryTasks({ ...params }) -> Lattice.TaskQueryResults -
-
- -#### 📝 Description - -
-
- -
-
- -Query for tasks by a specified search criteria. -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```typescript -await client.tasks.queryTasks(); - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**request:** `Lattice.TaskQuery` - -
-
- -
-
- -**requestOptions:** `Tasks.RequestOptions` - -
-
-
-
- - -
-
-
- -
client.tasks.listenAsAgent({ ...params }) -> Lattice.AgentRequest -
-
- -#### 📝 Description - -
-
- -
-
- -This is a long polling API that will block until a new task is ready for delivery. If no new task is -available then the server will hold on to your request for up to 5 minutes, after that 5 minute timeout -period you will be expected to reinitiate a new request. -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```typescript -await client.tasks.listenAsAgent(); - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**request:** `Lattice.AgentListener` - -
-
- -
-
- -**requestOptions:** `Tasks.RequestOptions` - -
-
-
-
- - -
-
-
- -## Objects -
client.objects.listObjects({ ...params }) -> core.Page -
-
- -#### 📝 Description - -
-
- -
-
- -Lists objects in your environment. You can define a prefix to list a subset of your objects. If you do not set a prefix, Lattice returns all available objects. By default this endpoint will list local objects only. -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```typescript -const response = await client.objects.listObjects({ - prefix: "prefix", - sinceTimestamp: "2024-01-15T09:30:00Z", - pageToken: "pageToken", - allObjectsInMesh: true -}); -for await (const item of response) { - console.log(item); -} - -// Or you can manually iterate page-by-page -let page = await client.objects.listObjects({ - prefix: "prefix", - sinceTimestamp: "2024-01-15T09:30:00Z", - pageToken: "pageToken", - allObjectsInMesh: true -}); -while (page.hasNextPage()) { - page = page.getNextPage(); -} - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**request:** `Lattice.ListObjectsRequest` - -
-
- -
-
- -**requestOptions:** `Objects.RequestOptions` - -
-
-
-
- - -
-
-
- -
client.objects.getObject(objectPath, { ...params }) -> core.BinaryResponse -
-
- -#### 📝 Description - -
-
- -
-
- -Fetches an object from your environment using the objectPath path parameter. -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```typescript -await client.objects.getObject("objectPath"); - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**objectPath:** `string` — The path of the object to fetch. - -
-
- -
-
- -**request:** `Lattice.GetObjectRequest` - -
-
- -
-
- -**requestOptions:** `Objects.RequestOptions` - -
-
-
-
- - -
-
-
- -
client.objects.deleteObject(objectPath) -> void -
-
- -#### 📝 Description - -
-
- -
-
- -Deletes an object from your environment given the objectPath path parameter. -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```typescript -await client.objects.deleteObject("objectPath"); - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**objectPath:** `string` — The path of the object to delete. - -
-
- -
-
- -**requestOptions:** `Objects.RequestOptions` - -
-
-
-
- - -
-
-
- -
client.objects.getObjectMetadata(objectPath) -> Headers -
-
- -#### 📝 Description - -
-
- -
-
- -Returns metadata for a specified object path. Use this to fetch metadata such as object size (size_bytes), its expiry time (expiry_time), or its latest update timestamp (last_updated_at). -
-
-
-
- -#### 🔌 Usage - -
-
- -
-
- -```typescript -await client.objects.getObjectMetadata("objectPath"); - -``` -
-
-
-
- -#### ⚙️ Parameters - -
-
- -
-
- -**objectPath:** `string` — The path of the object to query. - -
-
- -
-
- -**requestOptions:** `Objects.RequestOptions` - -
-
-
-
- - -
-
-
diff --git a/src/BaseClient.ts b/src/BaseClient.ts index 2ef235a..4f0d230 100644 --- a/src/BaseClient.ts +++ b/src/BaseClient.ts @@ -1,6 +1,7 @@ // This file was auto-generated by Fern from our API Definition. -import type * as core from "./core/index.js"; +import { mergeHeaders } from "./core/headers.js"; +import * as core from "./core/index.js"; import type * as environments from "./environments.js"; export interface BaseClientOptions { @@ -14,6 +15,10 @@ export interface BaseClientOptions { timeoutInSeconds?: number; /** The default number of times to retry the request. Defaults to 2. */ maxRetries?: number; + /** Provide a custom fetch implementation. Useful for platforms that don't have a built-in fetch or need a custom implementation. */ + fetch?: typeof fetch; + /** Configure logging for the client. */ + logging?: core.logging.LogConfig | core.logging.Logger; } export interface BaseRequestOptions { @@ -28,3 +33,23 @@ export interface BaseRequestOptions { /** Additional headers to include in the request. */ headers?: Record | null | undefined>; } + +export function normalizeClientOptions(options: T): T { + const headers = mergeHeaders( + { + "X-Fern-Language": "JavaScript", + "X-Fern-SDK-Name": "@anduril-industries/lattice-sdk", + "X-Fern-SDK-Version": "3.0.1", + "User-Agent": "@anduril-industries/lattice-sdk/3.0.1", + "X-Fern-Runtime": core.RUNTIME.type, + "X-Fern-Runtime-Version": core.RUNTIME.version, + }, + options?.headers, + ); + + return { + ...options, + logging: core.logging.createLogger(options?.logging), + headers, + } as T; +} diff --git a/src/Client.ts b/src/Client.ts index 483efe7..f6b7714 100644 --- a/src/Client.ts +++ b/src/Client.ts @@ -1,11 +1,10 @@ // This file was auto-generated by Fern from our API Definition. -import { Entities } from "./api/resources/entities/client/Client.js"; -import { Objects } from "./api/resources/objects/client/Client.js"; -import { Tasks } from "./api/resources/tasks/client/Client.js"; +import { EntitiesClient } from "./api/resources/entities/client/Client.js"; +import { ObjectsClient } from "./api/resources/objects/client/Client.js"; +import { TasksClient } from "./api/resources/tasks/client/Client.js"; import type { BaseClientOptions, BaseRequestOptions } from "./BaseClient.js"; -import { mergeHeaders } from "./core/headers.js"; -import * as core from "./core/index.js"; +import { normalizeClientOptions } from "./BaseClient.js"; export declare namespace LatticeClient { export interface Options extends BaseClientOptions {} @@ -15,36 +14,23 @@ export declare namespace LatticeClient { export class LatticeClient { protected readonly _options: LatticeClient.Options; - protected _entities: Entities | undefined; - protected _tasks: Tasks | undefined; - protected _objects: Objects | undefined; - - constructor(_options: LatticeClient.Options = {}) { - this._options = { - ..._options, - headers: mergeHeaders( - { - "X-Fern-Language": "JavaScript", - "X-Fern-SDK-Name": "@anduril-industries/lattice-sdk", - "X-Fern-SDK-Version": "3.0.0", - "User-Agent": "@anduril-industries/lattice-sdk/3.0.0", - "X-Fern-Runtime": core.RUNTIME.type, - "X-Fern-Runtime-Version": core.RUNTIME.version, - }, - _options?.headers, - ), - }; + protected _entities: EntitiesClient | undefined; + protected _tasks: TasksClient | undefined; + protected _objects: ObjectsClient | undefined; + + constructor(options: LatticeClient.Options = {}) { + this._options = normalizeClientOptions(options); } - public get entities(): Entities { - return (this._entities ??= new Entities(this._options)); + public get entities(): EntitiesClient { + return (this._entities ??= new EntitiesClient(this._options)); } - public get tasks(): Tasks { - return (this._tasks ??= new Tasks(this._options)); + public get tasks(): TasksClient { + return (this._tasks ??= new TasksClient(this._options)); } - public get objects(): Objects { - return (this._objects ??= new Objects(this._options)); + public get objects(): ObjectsClient { + return (this._objects ??= new ObjectsClient(this._options)); } } diff --git a/src/api/resources/entities/client/Client.ts b/src/api/resources/entities/client/Client.ts index 28973e5..72f3e2c 100644 --- a/src/api/resources/entities/client/Client.ts +++ b/src/api/resources/entities/client/Client.ts @@ -1,13 +1,14 @@ // This file was auto-generated by Fern from our API Definition. import type { BaseClientOptions, BaseRequestOptions } from "../../../../BaseClient.js"; +import { normalizeClientOptions } from "../../../../BaseClient.js"; import { mergeHeaders, mergeOnlyDefinedHeaders } from "../../../../core/headers.js"; import * as core from "../../../../core/index.js"; import * as environments from "../../../../environments.js"; import * as errors from "../../../../errors/index.js"; import * as Lattice from "../../../index.js"; -export declare namespace Entities { +export declare namespace EntitiesClient { export interface Options extends BaseClientOptions {} export interface RequestOptions extends BaseRequestOptions {} @@ -16,11 +17,11 @@ export declare namespace Entities { /** * The Entities API */ -export class Entities { - protected readonly _options: Entities.Options; +export class EntitiesClient { + protected readonly _options: EntitiesClient.Options; - constructor(_options: Entities.Options = {}) { - this._options = _options; + constructor(options: EntitiesClient.Options = {}) { + this._options = normalizeClientOptions(options); } /** @@ -33,7 +34,7 @@ export class Entities { * provenance.sourceUpdateTime is greater than the provenance.sourceUpdateTime of the existing entity. * * @param {Lattice.Entity} request - * @param {Entities.RequestOptions} requestOptions - Request-specific configuration. + * @param {EntitiesClient.RequestOptions} requestOptions - Request-specific configuration. * * @throws {@link Lattice.BadRequestError} * @throws {@link Lattice.UnauthorizedError} @@ -43,14 +44,14 @@ export class Entities { */ public publishEntity( request: Lattice.Entity, - requestOptions?: Entities.RequestOptions, + requestOptions?: EntitiesClient.RequestOptions, ): core.HttpResponsePromise { return core.HttpResponsePromise.fromPromise(this.__publishEntity(request, requestOptions)); } private async __publishEntity( request: Lattice.Entity, - requestOptions?: Entities.RequestOptions, + requestOptions?: EntitiesClient.RequestOptions, ): Promise> { const _headers: core.Fetcher.Args["headers"] = mergeHeaders( this._options?.headers, @@ -73,6 +74,8 @@ export class Entities { timeoutMs: (requestOptions?.timeoutInSeconds ?? this._options?.timeoutInSeconds ?? 60) * 1000, maxRetries: requestOptions?.maxRetries ?? this._options?.maxRetries, abortSignal: requestOptions?.abortSignal, + fetchFn: this._options?.fetch, + logging: this._options.logging, }); if (_response.ok) { return { data: _response.body as Lattice.Entity, rawResponse: _response.rawResponse }; @@ -111,27 +114,30 @@ export class Entities { } /** - * @param {string} entityId - ID of the entity to return - * @param {Entities.RequestOptions} requestOptions - Request-specific configuration. + * @param {Lattice.GetEntityRequest} request + * @param {EntitiesClient.RequestOptions} requestOptions - Request-specific configuration. * * @throws {@link Lattice.BadRequestError} * @throws {@link Lattice.UnauthorizedError} * @throws {@link Lattice.NotFoundError} * * @example - * await client.entities.getEntity("entityId") + * await client.entities.getEntity({ + * entityId: "entityId" + * }) */ public getEntity( - entityId: string, - requestOptions?: Entities.RequestOptions, + request: Lattice.GetEntityRequest, + requestOptions?: EntitiesClient.RequestOptions, ): core.HttpResponsePromise { - return core.HttpResponsePromise.fromPromise(this.__getEntity(entityId, requestOptions)); + return core.HttpResponsePromise.fromPromise(this.__getEntity(request, requestOptions)); } private async __getEntity( - entityId: string, - requestOptions?: Entities.RequestOptions, + request: Lattice.GetEntityRequest, + requestOptions?: EntitiesClient.RequestOptions, ): Promise> { + const { entityId } = request; const _headers: core.Fetcher.Args["headers"] = mergeHeaders( this._options?.headers, mergeOnlyDefinedHeaders({ Authorization: await this._getAuthorizationHeader() }), @@ -150,6 +156,8 @@ export class Entities { timeoutMs: (requestOptions?.timeoutInSeconds ?? this._options?.timeoutInSeconds ?? 60) * 1000, maxRetries: requestOptions?.maxRetries ?? this._options?.maxRetries, abortSignal: requestOptions?.abortSignal, + fetchFn: this._options?.fetch, + logging: this._options.logging, }); if (_response.ok) { return { data: _response.body as Lattice.Entity, rawResponse: _response.rawResponse }; @@ -198,35 +206,31 @@ export class Entities { * Note that overrides are applied in an eventually consistent manner. If multiple overrides are created * concurrently for the same field path, the last writer wins. * - * @param {string} entityId - The unique ID of the entity to override - * @param {string} fieldPath - fieldPath to override * @param {Lattice.EntityOverride} request - * @param {Entities.RequestOptions} requestOptions - Request-specific configuration. + * @param {EntitiesClient.RequestOptions} requestOptions - Request-specific configuration. * * @throws {@link Lattice.BadRequestError} * @throws {@link Lattice.UnauthorizedError} * @throws {@link Lattice.NotFoundError} * * @example - * await client.entities.overrideEntity("entityId", "mil_view.disposition") + * await client.entities.overrideEntity({ + * entityId: "entityId", + * fieldPath: "mil_view.disposition" + * }) */ public overrideEntity( - entityId: string, - fieldPath: string, - request: Lattice.EntityOverride = {}, - requestOptions?: Entities.RequestOptions, + request: Lattice.EntityOverride, + requestOptions?: EntitiesClient.RequestOptions, ): core.HttpResponsePromise { - return core.HttpResponsePromise.fromPromise( - this.__overrideEntity(entityId, fieldPath, request, requestOptions), - ); + return core.HttpResponsePromise.fromPromise(this.__overrideEntity(request, requestOptions)); } private async __overrideEntity( - entityId: string, - fieldPath: string, - request: Lattice.EntityOverride = {}, - requestOptions?: Entities.RequestOptions, + request: Lattice.EntityOverride, + requestOptions?: EntitiesClient.RequestOptions, ): Promise> { + const { entityId, fieldPath, ..._body } = request; const _headers: core.Fetcher.Args["headers"] = mergeHeaders( this._options?.headers, mergeOnlyDefinedHeaders({ Authorization: await this._getAuthorizationHeader() }), @@ -244,10 +248,12 @@ export class Entities { contentType: "application/json", queryParameters: requestOptions?.queryParams, requestType: "json", - body: request, + body: _body, timeoutMs: (requestOptions?.timeoutInSeconds ?? this._options?.timeoutInSeconds ?? 60) * 1000, maxRetries: requestOptions?.maxRetries ?? this._options?.maxRetries, abortSignal: requestOptions?.abortSignal, + fetchFn: this._options?.fetch, + logging: this._options.logging, }); if (_response.ok) { return { data: _response.body as Lattice.Entity, rawResponse: _response.rawResponse }; @@ -292,30 +298,31 @@ export class Entities { /** * This operation clears the override value from the specified field path on the entity. * - * @param {string} entityId - The unique ID of the entity to undo an override from. - * @param {string} fieldPath - The fieldPath to clear overrides from. - * @param {Entities.RequestOptions} requestOptions - Request-specific configuration. + * @param {Lattice.RemoveEntityOverrideRequest} request + * @param {EntitiesClient.RequestOptions} requestOptions - Request-specific configuration. * * @throws {@link Lattice.BadRequestError} * @throws {@link Lattice.UnauthorizedError} * @throws {@link Lattice.NotFoundError} * * @example - * await client.entities.removeEntityOverride("entityId", "mil_view.disposition") + * await client.entities.removeEntityOverride({ + * entityId: "entityId", + * fieldPath: "mil_view.disposition" + * }) */ public removeEntityOverride( - entityId: string, - fieldPath: string, - requestOptions?: Entities.RequestOptions, + request: Lattice.RemoveEntityOverrideRequest, + requestOptions?: EntitiesClient.RequestOptions, ): core.HttpResponsePromise { - return core.HttpResponsePromise.fromPromise(this.__removeEntityOverride(entityId, fieldPath, requestOptions)); + return core.HttpResponsePromise.fromPromise(this.__removeEntityOverride(request, requestOptions)); } private async __removeEntityOverride( - entityId: string, - fieldPath: string, - requestOptions?: Entities.RequestOptions, + request: Lattice.RemoveEntityOverrideRequest, + requestOptions?: EntitiesClient.RequestOptions, ): Promise> { + const { entityId, fieldPath } = request; const _headers: core.Fetcher.Args["headers"] = mergeHeaders( this._options?.headers, mergeOnlyDefinedHeaders({ Authorization: await this._getAuthorizationHeader() }), @@ -334,6 +341,8 @@ export class Entities { timeoutMs: (requestOptions?.timeoutInSeconds ?? this._options?.timeoutInSeconds ?? 60) * 1000, maxRetries: requestOptions?.maxRetries ?? this._options?.maxRetries, abortSignal: requestOptions?.abortSignal, + fetchFn: this._options?.fetch, + logging: this._options.logging, }); if (_response.ok) { return { data: _response.body as Lattice.Entity, rawResponse: _response.rawResponse }; @@ -387,7 +396,7 @@ export class Entities { * In this case you must start a new session by sending a request with an empty session token. * * @param {Lattice.EntityEventRequest} request - * @param {Entities.RequestOptions} requestOptions - Request-specific configuration. + * @param {EntitiesClient.RequestOptions} requestOptions - Request-specific configuration. * * @throws {@link Lattice.BadRequestError} * @throws {@link Lattice.UnauthorizedError} @@ -402,14 +411,14 @@ export class Entities { */ public longPollEntityEvents( request: Lattice.EntityEventRequest, - requestOptions?: Entities.RequestOptions, + requestOptions?: EntitiesClient.RequestOptions, ): core.HttpResponsePromise { return core.HttpResponsePromise.fromPromise(this.__longPollEntityEvents(request, requestOptions)); } private async __longPollEntityEvents( request: Lattice.EntityEventRequest, - requestOptions?: Entities.RequestOptions, + requestOptions?: EntitiesClient.RequestOptions, ): Promise> { const _headers: core.Fetcher.Args["headers"] = mergeHeaders( this._options?.headers, @@ -432,6 +441,8 @@ export class Entities { timeoutMs: (requestOptions?.timeoutInSeconds ?? this._options?.timeoutInSeconds ?? 60) * 1000, maxRetries: requestOptions?.maxRetries ?? this._options?.maxRetries, abortSignal: requestOptions?.abortSignal, + fetchFn: this._options?.fetch, + logging: this._options.logging, }); if (_response.ok) { return { data: _response.body as Lattice.EntityEventResponse, rawResponse: _response.rawResponse }; @@ -480,14 +491,14 @@ export class Entities { */ public streamEntities( request: Lattice.EntityStreamRequest = {}, - requestOptions?: Entities.RequestOptions, + requestOptions?: EntitiesClient.RequestOptions, ): core.HttpResponsePromise> { return core.HttpResponsePromise.fromPromise(this.__streamEntities(request, requestOptions)); } private async __streamEntities( request: Lattice.EntityStreamRequest = {}, - requestOptions?: Entities.RequestOptions, + requestOptions?: EntitiesClient.RequestOptions, ): Promise>> { const _headers: core.Fetcher.Args["headers"] = mergeHeaders( this._options?.headers, @@ -511,6 +522,8 @@ export class Entities { timeoutMs: (requestOptions?.timeoutInSeconds ?? this._options?.timeoutInSeconds ?? 60) * 1000, maxRetries: requestOptions?.maxRetries ?? this._options?.maxRetries, abortSignal: requestOptions?.abortSignal, + fetchFn: this._options?.fetch, + logging: this._options.logging, }); if (_response.ok) { return { diff --git a/src/api/resources/entities/client/requests/EntityOverride.ts b/src/api/resources/entities/client/requests/EntityOverride.ts index 1ace3fc..ec0b3e2 100644 --- a/src/api/resources/entities/client/requests/EntityOverride.ts +++ b/src/api/resources/entities/client/requests/EntityOverride.ts @@ -4,9 +4,16 @@ import type * as Lattice from "../../../../index.js"; /** * @example - * {} + * { + * entityId: "entityId", + * fieldPath: "mil_view.disposition" + * } */ export interface EntityOverride { + /** The unique ID of the entity to override */ + entityId: string; + /** fieldPath to override */ + fieldPath: string; /** * The entity containing the overridden fields. The service will extract the overridable fields from * the object and ignore all other fields. diff --git a/src/api/resources/entities/client/requests/GetEntityRequest.ts b/src/api/resources/entities/client/requests/GetEntityRequest.ts new file mode 100644 index 0000000..dedc9ea --- /dev/null +++ b/src/api/resources/entities/client/requests/GetEntityRequest.ts @@ -0,0 +1,12 @@ +// This file was auto-generated by Fern from our API Definition. + +/** + * @example + * { + * entityId: "entityId" + * } + */ +export interface GetEntityRequest { + /** ID of the entity to return */ + entityId: string; +} diff --git a/src/api/resources/entities/client/requests/RemoveEntityOverrideRequest.ts b/src/api/resources/entities/client/requests/RemoveEntityOverrideRequest.ts new file mode 100644 index 0000000..547e1b7 --- /dev/null +++ b/src/api/resources/entities/client/requests/RemoveEntityOverrideRequest.ts @@ -0,0 +1,15 @@ +// This file was auto-generated by Fern from our API Definition. + +/** + * @example + * { + * entityId: "entityId", + * fieldPath: "mil_view.disposition" + * } + */ +export interface RemoveEntityOverrideRequest { + /** The unique ID of the entity to undo an override from. */ + entityId: string; + /** The fieldPath to clear overrides from. */ + fieldPath: string; +} diff --git a/src/api/resources/entities/client/requests/index.ts b/src/api/resources/entities/client/requests/index.ts index 7dcd185..eee6bb2 100644 --- a/src/api/resources/entities/client/requests/index.ts +++ b/src/api/resources/entities/client/requests/index.ts @@ -1,3 +1,5 @@ export type { EntityEventRequest } from "./EntityEventRequest.js"; export type { EntityOverride } from "./EntityOverride.js"; export type { EntityStreamRequest } from "./EntityStreamRequest.js"; +export type { GetEntityRequest } from "./GetEntityRequest.js"; +export type { RemoveEntityOverrideRequest } from "./RemoveEntityOverrideRequest.js"; diff --git a/src/api/resources/objects/client/Client.ts b/src/api/resources/objects/client/Client.ts index b548535..a4f0f82 100644 --- a/src/api/resources/objects/client/Client.ts +++ b/src/api/resources/objects/client/Client.ts @@ -1,13 +1,14 @@ // This file was auto-generated by Fern from our API Definition. import type { BaseClientOptions, BaseRequestOptions } from "../../../../BaseClient.js"; +import { normalizeClientOptions } from "../../../../BaseClient.js"; import { mergeHeaders, mergeOnlyDefinedHeaders } from "../../../../core/headers.js"; import * as core from "../../../../core/index.js"; import * as environments from "../../../../environments.js"; import * as errors from "../../../../errors/index.js"; import * as Lattice from "../../../index.js"; -export declare namespace Objects { +export declare namespace ObjectsClient { export interface Options extends BaseClientOptions {} export interface RequestOptions extends BaseRequestOptions {} @@ -16,35 +17,30 @@ export declare namespace Objects { /** * API to manipulate and interrogate object data on the local node, as well as list objects from the Lattice mesh. */ -export class Objects { - protected readonly _options: Objects.Options; +export class ObjectsClient { + protected readonly _options: ObjectsClient.Options; - constructor(_options: Objects.Options = {}) { - this._options = _options; + constructor(options: ObjectsClient.Options = {}) { + this._options = normalizeClientOptions(options); } /** * Lists objects in your environment. You can define a prefix to list a subset of your objects. If you do not set a prefix, Lattice returns all available objects. By default this endpoint will list local objects only. * * @param {Lattice.ListObjectsRequest} request - * @param {Objects.RequestOptions} requestOptions - Request-specific configuration. + * @param {ObjectsClient.RequestOptions} requestOptions - Request-specific configuration. * * @throws {@link Lattice.BadRequestError} * @throws {@link Lattice.UnauthorizedError} * @throws {@link Lattice.InternalServerError} * * @example - * await client.objects.listObjects({ - * prefix: "prefix", - * sinceTimestamp: "2024-01-15T09:30:00Z", - * pageToken: "pageToken", - * allObjectsInMesh: true - * }) + * await client.objects.listObjects() */ public async listObjects( request: Lattice.ListObjectsRequest = {}, - requestOptions?: Objects.RequestOptions, - ): Promise> { + requestOptions?: ObjectsClient.RequestOptions, + ): Promise> { const list = core.HttpResponsePromise.interceptFunction( async (request: Lattice.ListObjectsRequest): Promise> => { const { prefix, sinceTimestamp, pageToken, allObjectsInMesh } = request; @@ -79,6 +75,8 @@ export class Objects { timeoutMs: (requestOptions?.timeoutInSeconds ?? this._options?.timeoutInSeconds ?? 60) * 1000, maxRetries: requestOptions?.maxRetries ?? this._options?.maxRetries, abortSignal: requestOptions?.abortSignal, + fetchFn: this._options?.fetch, + logging: this._options.logging, }); if (_response.ok) { return { data: _response.body as Lattice.ListResponse, rawResponse: _response.rawResponse }; @@ -120,7 +118,7 @@ export class Objects { }, ); const dataWithRawResponse = await list(request).withRawResponse(); - return new core.Pageable({ + return new core.Page({ response: dataWithRawResponse.data, rawResponse: dataWithRawResponse.rawResponse, hasNextPage: (response) => @@ -141,19 +139,17 @@ export class Objects { * @throws {@link Lattice.InternalServerError} */ public getObject( - objectPath: string, - request: Lattice.GetObjectRequest = {}, - requestOptions?: Objects.RequestOptions, + request: Lattice.GetObjectRequest, + requestOptions?: ObjectsClient.RequestOptions, ): core.HttpResponsePromise { - return core.HttpResponsePromise.fromPromise(this.__getObject(objectPath, request, requestOptions)); + return core.HttpResponsePromise.fromPromise(this.__getObject(request, requestOptions)); } private async __getObject( - objectPath: string, - request: Lattice.GetObjectRequest = {}, - requestOptions?: Objects.RequestOptions, + request: Lattice.GetObjectRequest, + requestOptions?: ObjectsClient.RequestOptions, ): Promise> { - const { "Accept-Encoding": acceptEncoding, Priority: priority } = request; + const { objectPath, "Accept-Encoding": acceptEncoding, Priority: priority } = request; const _headers: core.Fetcher.Args["headers"] = mergeHeaders( this._options?.headers, mergeOnlyDefinedHeaders({ @@ -177,6 +173,8 @@ export class Objects { timeoutMs: (requestOptions?.timeoutInSeconds ?? this._options?.timeoutInSeconds ?? 60) * 1000, maxRetries: requestOptions?.maxRetries ?? this._options?.maxRetries, abortSignal: requestOptions?.abortSignal, + fetchFn: this._options?.fetch, + logging: this._options.logging, }); if (_response.ok) { return { data: _response.body, rawResponse: _response.rawResponse }; @@ -223,7 +221,7 @@ export class Objects { * * @param {core.file.Uploadable} uploadable * @param {string} objectPath - * @param {Objects.RequestOptions} requestOptions - Request-specific configuration. + * @param {ObjectsClient.RequestOptions} requestOptions - Request-specific configuration. * * @throws {@link Lattice.BadRequestError} * @throws {@link Lattice.UnauthorizedError} @@ -234,7 +232,7 @@ export class Objects { public uploadObject( uploadable: core.file.Uploadable, objectPath: string, - requestOptions?: Objects.RequestOptions, + requestOptions?: ObjectsClient.RequestOptions, ): core.HttpResponsePromise { return core.HttpResponsePromise.fromPromise(this.__uploadObject(uploadable, objectPath, requestOptions)); } @@ -242,7 +240,7 @@ export class Objects { private async __uploadObject( uploadable: core.file.Uploadable, objectPath: string, - requestOptions?: Objects.RequestOptions, + requestOptions?: ObjectsClient.RequestOptions, ): Promise> { const _binaryUploadRequest = await core.file.toBinaryUploadRequest(uploadable); const _headers: core.Fetcher.Args["headers"] = mergeHeaders( @@ -268,6 +266,8 @@ export class Objects { timeoutMs: (requestOptions?.timeoutInSeconds ?? this._options?.timeoutInSeconds ?? 60) * 1000, maxRetries: requestOptions?.maxRetries ?? this._options?.maxRetries, abortSignal: requestOptions?.abortSignal, + fetchFn: this._options?.fetch, + logging: this._options.logging, }); if (_response.ok) { return { data: _response.body as Lattice.PathMetadata, rawResponse: _response.rawResponse }; @@ -316,8 +316,8 @@ export class Objects { /** * Deletes an object from your environment given the objectPath path parameter. * - * @param {string} objectPath - The path of the object to delete. - * @param {Objects.RequestOptions} requestOptions - Request-specific configuration. + * @param {Lattice.DeleteObjectRequest} request + * @param {ObjectsClient.RequestOptions} requestOptions - Request-specific configuration. * * @throws {@link Lattice.BadRequestError} * @throws {@link Lattice.UnauthorizedError} @@ -325,16 +325,22 @@ export class Objects { * @throws {@link Lattice.InternalServerError} * * @example - * await client.objects.deleteObject("objectPath") + * await client.objects.deleteObject({ + * objectPath: "objectPath" + * }) */ - public deleteObject(objectPath: string, requestOptions?: Objects.RequestOptions): core.HttpResponsePromise { - return core.HttpResponsePromise.fromPromise(this.__deleteObject(objectPath, requestOptions)); + public deleteObject( + request: Lattice.DeleteObjectRequest, + requestOptions?: ObjectsClient.RequestOptions, + ): core.HttpResponsePromise { + return core.HttpResponsePromise.fromPromise(this.__deleteObject(request, requestOptions)); } private async __deleteObject( - objectPath: string, - requestOptions?: Objects.RequestOptions, + request: Lattice.DeleteObjectRequest, + requestOptions?: ObjectsClient.RequestOptions, ): Promise> { + const { objectPath } = request; const _headers: core.Fetcher.Args["headers"] = mergeHeaders( this._options?.headers, mergeOnlyDefinedHeaders({ Authorization: await this._getAuthorizationHeader() }), @@ -353,6 +359,8 @@ export class Objects { timeoutMs: (requestOptions?.timeoutInSeconds ?? this._options?.timeoutInSeconds ?? 60) * 1000, maxRetries: requestOptions?.maxRetries ?? this._options?.maxRetries, abortSignal: requestOptions?.abortSignal, + fetchFn: this._options?.fetch, + logging: this._options.logging, }); if (_response.ok) { return { data: undefined, rawResponse: _response.rawResponse }; @@ -399,27 +407,30 @@ export class Objects { /** * Returns metadata for a specified object path. Use this to fetch metadata such as object size (size_bytes), its expiry time (expiry_time), or its latest update timestamp (last_updated_at). * - * @param {string} objectPath - The path of the object to query. - * @param {Objects.RequestOptions} requestOptions - Request-specific configuration. + * @param {Lattice.GetObjectMetadataRequest} request + * @param {ObjectsClient.RequestOptions} requestOptions - Request-specific configuration. * * @throws {@link Lattice.BadRequestError} * @throws {@link Lattice.UnauthorizedError} * @throws {@link Lattice.InternalServerError} * * @example - * await client.objects.getObjectMetadata("objectPath") + * await client.objects.getObjectMetadata({ + * objectPath: "objectPath" + * }) */ public getObjectMetadata( - objectPath: string, - requestOptions?: Objects.RequestOptions, + request: Lattice.GetObjectMetadataRequest, + requestOptions?: ObjectsClient.RequestOptions, ): core.HttpResponsePromise { - return core.HttpResponsePromise.fromPromise(this.__getObjectMetadata(objectPath, requestOptions)); + return core.HttpResponsePromise.fromPromise(this.__getObjectMetadata(request, requestOptions)); } private async __getObjectMetadata( - objectPath: string, - requestOptions?: Objects.RequestOptions, + request: Lattice.GetObjectMetadataRequest, + requestOptions?: ObjectsClient.RequestOptions, ): Promise> { + const { objectPath } = request; const _headers: core.Fetcher.Args["headers"] = mergeHeaders( this._options?.headers, mergeOnlyDefinedHeaders({ Authorization: await this._getAuthorizationHeader() }), @@ -438,6 +449,8 @@ export class Objects { timeoutMs: (requestOptions?.timeoutInSeconds ?? this._options?.timeoutInSeconds ?? 60) * 1000, maxRetries: requestOptions?.maxRetries ?? this._options?.maxRetries, abortSignal: requestOptions?.abortSignal, + fetchFn: this._options?.fetch, + logging: this._options.logging, }); if (_response.ok) { return { data: _response.rawResponse.headers, rawResponse: _response.rawResponse }; diff --git a/src/api/resources/objects/client/requests/DeleteObjectRequest.ts b/src/api/resources/objects/client/requests/DeleteObjectRequest.ts new file mode 100644 index 0000000..2253171 --- /dev/null +++ b/src/api/resources/objects/client/requests/DeleteObjectRequest.ts @@ -0,0 +1,12 @@ +// This file was auto-generated by Fern from our API Definition. + +/** + * @example + * { + * objectPath: "objectPath" + * } + */ +export interface DeleteObjectRequest { + /** The path of the object to delete. */ + objectPath: string; +} diff --git a/src/api/resources/objects/client/requests/GetObjectMetadataRequest.ts b/src/api/resources/objects/client/requests/GetObjectMetadataRequest.ts new file mode 100644 index 0000000..5c9568d --- /dev/null +++ b/src/api/resources/objects/client/requests/GetObjectMetadataRequest.ts @@ -0,0 +1,12 @@ +// This file was auto-generated by Fern from our API Definition. + +/** + * @example + * { + * objectPath: "objectPath" + * } + */ +export interface GetObjectMetadataRequest { + /** The path of the object to query. */ + objectPath: string; +} diff --git a/src/api/resources/objects/client/requests/GetObjectRequest.ts b/src/api/resources/objects/client/requests/GetObjectRequest.ts index f38d03b..494ecaf 100644 --- a/src/api/resources/objects/client/requests/GetObjectRequest.ts +++ b/src/api/resources/objects/client/requests/GetObjectRequest.ts @@ -4,9 +4,13 @@ import type * as Lattice from "../../../../index.js"; /** * @example - * {} + * { + * objectPath: "objectPath" + * } */ export interface GetObjectRequest { + /** The path of the object to fetch. */ + objectPath: string; /** If set, Lattice will compress the response using the specified compression method. If the header is not defined, or the compression method is set to `identity`, no compression will be applied to the response. */ "Accept-Encoding"?: Lattice.GetObjectRequestAcceptEncoding; /** Indicates a client's preference for the priority of the response. The value is a structured header as defined in RFC 9218. If you do not set the header, Lattice uses the default priority set for the environment. Incremental delivery directives are not supported and will be ignored. */ diff --git a/src/api/resources/objects/client/requests/ListObjectsRequest.ts b/src/api/resources/objects/client/requests/ListObjectsRequest.ts index 44e96bd..99938c8 100644 --- a/src/api/resources/objects/client/requests/ListObjectsRequest.ts +++ b/src/api/resources/objects/client/requests/ListObjectsRequest.ts @@ -2,12 +2,7 @@ /** * @example - * { - * prefix: "prefix", - * sinceTimestamp: "2024-01-15T09:30:00Z", - * pageToken: "pageToken", - * allObjectsInMesh: true - * } + * {} */ export interface ListObjectsRequest { /** Filters the objects based on the specified prefix path. If no path is specified, all objects are returned. */ diff --git a/src/api/resources/objects/client/requests/index.ts b/src/api/resources/objects/client/requests/index.ts index e840f79..e77264d 100644 --- a/src/api/resources/objects/client/requests/index.ts +++ b/src/api/resources/objects/client/requests/index.ts @@ -1,2 +1,4 @@ +export type { DeleteObjectRequest } from "./DeleteObjectRequest.js"; +export type { GetObjectMetadataRequest } from "./GetObjectMetadataRequest.js"; export type { GetObjectRequest } from "./GetObjectRequest.js"; export type { ListObjectsRequest } from "./ListObjectsRequest.js"; diff --git a/src/api/resources/tasks/client/Client.ts b/src/api/resources/tasks/client/Client.ts index 07ea354..3f98a1d 100644 --- a/src/api/resources/tasks/client/Client.ts +++ b/src/api/resources/tasks/client/Client.ts @@ -1,13 +1,14 @@ // This file was auto-generated by Fern from our API Definition. import type { BaseClientOptions, BaseRequestOptions } from "../../../../BaseClient.js"; +import { normalizeClientOptions } from "../../../../BaseClient.js"; import { mergeHeaders, mergeOnlyDefinedHeaders } from "../../../../core/headers.js"; import * as core from "../../../../core/index.js"; import * as environments from "../../../../environments.js"; import * as errors from "../../../../errors/index.js"; import * as Lattice from "../../../index.js"; -export declare namespace Tasks { +export declare namespace TasksClient { export interface Options extends BaseClientOptions {} export interface RequestOptions extends BaseRequestOptions {} @@ -16,11 +17,11 @@ export declare namespace Tasks { /** * The Tasks API */ -export class Tasks { - protected readonly _options: Tasks.Options; +export class TasksClient { + protected readonly _options: TasksClient.Options; - constructor(_options: Tasks.Options = {}) { - this._options = _options; + constructor(options: TasksClient.Options = {}) { + this._options = normalizeClientOptions(options); } /** @@ -28,7 +29,7 @@ export class Tasks { * be asynchronously updated by their destined agent. * * @param {Lattice.TaskCreation} request - * @param {Tasks.RequestOptions} requestOptions - Request-specific configuration. + * @param {TasksClient.RequestOptions} requestOptions - Request-specific configuration. * * @throws {@link Lattice.BadRequestError} * @throws {@link Lattice.UnauthorizedError} @@ -38,14 +39,14 @@ export class Tasks { */ public createTask( request: Lattice.TaskCreation = {}, - requestOptions?: Tasks.RequestOptions, + requestOptions?: TasksClient.RequestOptions, ): core.HttpResponsePromise { return core.HttpResponsePromise.fromPromise(this.__createTask(request, requestOptions)); } private async __createTask( request: Lattice.TaskCreation = {}, - requestOptions?: Tasks.RequestOptions, + requestOptions?: TasksClient.RequestOptions, ): Promise> { const _headers: core.Fetcher.Args["headers"] = mergeHeaders( this._options?.headers, @@ -68,6 +69,8 @@ export class Tasks { timeoutMs: (requestOptions?.timeoutInSeconds ?? this._options?.timeoutInSeconds ?? 60) * 1000, maxRetries: requestOptions?.maxRetries ?? this._options?.maxRetries, abortSignal: requestOptions?.abortSignal, + fetchFn: this._options?.fetch, + logging: this._options.logging, }); if (_response.ok) { return { data: _response.body as Lattice.Task, rawResponse: _response.rawResponse }; @@ -106,24 +109,30 @@ export class Tasks { } /** - * @param {string} taskId - ID of task to return - * @param {Tasks.RequestOptions} requestOptions - Request-specific configuration. + * @param {Lattice.GetTaskRequest} request + * @param {TasksClient.RequestOptions} requestOptions - Request-specific configuration. * * @throws {@link Lattice.BadRequestError} * @throws {@link Lattice.UnauthorizedError} * @throws {@link Lattice.NotFoundError} * * @example - * await client.tasks.getTask("taskId") + * await client.tasks.getTask({ + * taskId: "taskId" + * }) */ - public getTask(taskId: string, requestOptions?: Tasks.RequestOptions): core.HttpResponsePromise { - return core.HttpResponsePromise.fromPromise(this.__getTask(taskId, requestOptions)); + public getTask( + request: Lattice.GetTaskRequest, + requestOptions?: TasksClient.RequestOptions, + ): core.HttpResponsePromise { + return core.HttpResponsePromise.fromPromise(this.__getTask(request, requestOptions)); } private async __getTask( - taskId: string, - requestOptions?: Tasks.RequestOptions, + request: Lattice.GetTaskRequest, + requestOptions?: TasksClient.RequestOptions, ): Promise> { + const { taskId } = request; const _headers: core.Fetcher.Args["headers"] = mergeHeaders( this._options?.headers, mergeOnlyDefinedHeaders({ Authorization: await this._getAuthorizationHeader() }), @@ -142,6 +151,8 @@ export class Tasks { timeoutMs: (requestOptions?.timeoutInSeconds ?? this._options?.timeoutInSeconds ?? 60) * 1000, maxRetries: requestOptions?.maxRetries ?? this._options?.maxRetries, abortSignal: requestOptions?.abortSignal, + fetchFn: this._options?.fetch, + logging: this._options.logging, }); if (_response.ok) { return { data: _response.body as Lattice.Task, rawResponse: _response.rawResponse }; @@ -184,30 +195,30 @@ export class Tasks { /** * Update the status of a task. * - * @param {string} taskId - ID of task to update status of * @param {Lattice.TaskStatusUpdate} request - * @param {Tasks.RequestOptions} requestOptions - Request-specific configuration. + * @param {TasksClient.RequestOptions} requestOptions - Request-specific configuration. * * @throws {@link Lattice.BadRequestError} * @throws {@link Lattice.UnauthorizedError} * @throws {@link Lattice.NotFoundError} * * @example - * await client.tasks.updateTaskStatus("taskId") + * await client.tasks.updateTaskStatus({ + * taskId: "taskId" + * }) */ public updateTaskStatus( - taskId: string, - request: Lattice.TaskStatusUpdate = {}, - requestOptions?: Tasks.RequestOptions, + request: Lattice.TaskStatusUpdate, + requestOptions?: TasksClient.RequestOptions, ): core.HttpResponsePromise { - return core.HttpResponsePromise.fromPromise(this.__updateTaskStatus(taskId, request, requestOptions)); + return core.HttpResponsePromise.fromPromise(this.__updateTaskStatus(request, requestOptions)); } private async __updateTaskStatus( - taskId: string, - request: Lattice.TaskStatusUpdate = {}, - requestOptions?: Tasks.RequestOptions, + request: Lattice.TaskStatusUpdate, + requestOptions?: TasksClient.RequestOptions, ): Promise> { + const { taskId, ..._body } = request; const _headers: core.Fetcher.Args["headers"] = mergeHeaders( this._options?.headers, mergeOnlyDefinedHeaders({ Authorization: await this._getAuthorizationHeader() }), @@ -225,10 +236,12 @@ export class Tasks { contentType: "application/json", queryParameters: requestOptions?.queryParams, requestType: "json", - body: request, + body: _body, timeoutMs: (requestOptions?.timeoutInSeconds ?? this._options?.timeoutInSeconds ?? 60) * 1000, maxRetries: requestOptions?.maxRetries ?? this._options?.maxRetries, abortSignal: requestOptions?.abortSignal, + fetchFn: this._options?.fetch, + logging: this._options.logging, }); if (_response.ok) { return { data: _response.body as Lattice.Task, rawResponse: _response.rawResponse }; @@ -274,7 +287,7 @@ export class Tasks { * Query for tasks by a specified search criteria. * * @param {Lattice.TaskQuery} request - * @param {Tasks.RequestOptions} requestOptions - Request-specific configuration. + * @param {TasksClient.RequestOptions} requestOptions - Request-specific configuration. * * @throws {@link Lattice.BadRequestError} * @throws {@link Lattice.UnauthorizedError} @@ -285,14 +298,14 @@ export class Tasks { */ public queryTasks( request: Lattice.TaskQuery = {}, - requestOptions?: Tasks.RequestOptions, + requestOptions?: TasksClient.RequestOptions, ): core.HttpResponsePromise { return core.HttpResponsePromise.fromPromise(this.__queryTasks(request, requestOptions)); } private async __queryTasks( request: Lattice.TaskQuery = {}, - requestOptions?: Tasks.RequestOptions, + requestOptions?: TasksClient.RequestOptions, ): Promise> { const _headers: core.Fetcher.Args["headers"] = mergeHeaders( this._options?.headers, @@ -315,6 +328,8 @@ export class Tasks { timeoutMs: (requestOptions?.timeoutInSeconds ?? this._options?.timeoutInSeconds ?? 60) * 1000, maxRetries: requestOptions?.maxRetries ?? this._options?.maxRetries, abortSignal: requestOptions?.abortSignal, + fetchFn: this._options?.fetch, + logging: this._options.logging, }); if (_response.ok) { return { data: _response.body as Lattice.TaskQueryResults, rawResponse: _response.rawResponse }; @@ -360,7 +375,7 @@ export class Tasks { * period you will be expected to reinitiate a new request. * * @param {Lattice.AgentListener} request - * @param {Tasks.RequestOptions} requestOptions - Request-specific configuration. + * @param {TasksClient.RequestOptions} requestOptions - Request-specific configuration. * * @throws {@link Lattice.BadRequestError} * @throws {@link Lattice.UnauthorizedError} @@ -370,14 +385,14 @@ export class Tasks { */ public listenAsAgent( request: Lattice.AgentListener = {}, - requestOptions?: Tasks.RequestOptions, + requestOptions?: TasksClient.RequestOptions, ): core.HttpResponsePromise { return core.HttpResponsePromise.fromPromise(this.__listenAsAgent(request, requestOptions)); } private async __listenAsAgent( request: Lattice.AgentListener = {}, - requestOptions?: Tasks.RequestOptions, + requestOptions?: TasksClient.RequestOptions, ): Promise> { const _headers: core.Fetcher.Args["headers"] = mergeHeaders( this._options?.headers, @@ -400,6 +415,8 @@ export class Tasks { timeoutMs: (requestOptions?.timeoutInSeconds ?? this._options?.timeoutInSeconds ?? 60) * 1000, maxRetries: requestOptions?.maxRetries ?? this._options?.maxRetries, abortSignal: requestOptions?.abortSignal, + fetchFn: this._options?.fetch, + logging: this._options.logging, }); if (_response.ok) { return { data: _response.body as Lattice.AgentRequest, rawResponse: _response.rawResponse }; diff --git a/src/api/resources/tasks/client/requests/GetTaskRequest.ts b/src/api/resources/tasks/client/requests/GetTaskRequest.ts new file mode 100644 index 0000000..b4ad16d --- /dev/null +++ b/src/api/resources/tasks/client/requests/GetTaskRequest.ts @@ -0,0 +1,12 @@ +// This file was auto-generated by Fern from our API Definition. + +/** + * @example + * { + * taskId: "taskId" + * } + */ +export interface GetTaskRequest { + /** ID of task to return */ + taskId: string; +} diff --git a/src/api/resources/tasks/client/requests/TaskStatusUpdate.ts b/src/api/resources/tasks/client/requests/TaskStatusUpdate.ts index 663bea5..acf34de 100644 --- a/src/api/resources/tasks/client/requests/TaskStatusUpdate.ts +++ b/src/api/resources/tasks/client/requests/TaskStatusUpdate.ts @@ -4,9 +4,13 @@ import type * as Lattice from "../../../../index.js"; /** * @example - * {} + * { + * taskId: "taskId" + * } */ export interface TaskStatusUpdate { + /** ID of task to update status of */ + taskId: string; /** * The status version of the task to update. This version number increments to indicate the task's * current stage in its status lifecycle. Specifically, whenever a task's status updates, the status diff --git a/src/api/resources/tasks/client/requests/index.ts b/src/api/resources/tasks/client/requests/index.ts index c902bd2..df11781 100644 --- a/src/api/resources/tasks/client/requests/index.ts +++ b/src/api/resources/tasks/client/requests/index.ts @@ -1,4 +1,5 @@ export type { AgentListener } from "./AgentListener.js"; +export type { GetTaskRequest } from "./GetTaskRequest.js"; export type { TaskCreation } from "./TaskCreation.js"; export type { TaskQuery } from "./TaskQuery.js"; export type { TaskStatusUpdate } from "./TaskStatusUpdate.js"; diff --git a/src/core/exports.ts b/src/core/exports.ts index e415a8f..c21f056 100644 --- a/src/core/exports.ts +++ b/src/core/exports.ts @@ -1 +1,3 @@ export * from "./file/exports.js"; +export * from "./logging/exports.js"; +export * from "./pagination/exports.js"; diff --git a/src/core/fetcher/Fetcher.ts b/src/core/fetcher/Fetcher.ts index 202e134..fedbcd8 100644 --- a/src/core/fetcher/Fetcher.ts +++ b/src/core/fetcher/Fetcher.ts @@ -1,4 +1,5 @@ import { toJson } from "../json.js"; +import { createLogger, type LogConfig, type Logger } from "../logging/logger.js"; import type { APIResponse } from "./APIResponse.js"; import { createRequestUrl } from "./createRequestUrl.js"; import type { EndpointMetadata } from "./EndpointMetadata.js"; @@ -7,6 +8,7 @@ import { getErrorResponseBody } from "./getErrorResponseBody.js"; import { getFetchFn } from "./getFetchFn.js"; import { getRequestBody } from "./getRequestBody.js"; import { getResponseBody } from "./getResponseBody.js"; +import { Headers } from "./Headers.js"; import { makeRequest } from "./makeRequest.js"; import { abortRawResponse, toRawResponse, unknownRawResponse } from "./RawResponse.js"; import { requestWithRetries } from "./requestWithRetries.js"; @@ -25,10 +27,12 @@ export declare namespace Fetcher { maxRetries?: number; withCredentials?: boolean; abortSignal?: AbortSignal; - requestType?: "json" | "file" | "bytes"; + requestType?: "json" | "file" | "bytes" | "form" | "other"; responseType?: "json" | "blob" | "sse" | "streaming" | "text" | "arrayBuffer" | "binary-response"; duplex?: "half"; endpointMetadata?: EndpointMetadata; + fetchFn?: typeof fetch; + logging?: LogConfig | Logger; } export type Error = FailedStatusCodeError | NonJsonError | TimeoutError | UnknownError; @@ -55,10 +59,164 @@ export declare namespace Fetcher { } } -async function getHeaders(args: Fetcher.Args): Promise> { - const newHeaders: Record = {}; +const SENSITIVE_HEADERS = new Set([ + "authorization", + "www-authenticate", + "x-api-key", + "api-key", + "apikey", + "x-api-token", + "x-auth-token", + "auth-token", + "cookie", + "set-cookie", + "proxy-authorization", + "proxy-authenticate", + "x-csrf-token", + "x-xsrf-token", + "x-session-token", + "x-access-token", +]); + +function redactHeaders(headers: Headers | Record): Record { + const filtered: Record = {}; + for (const [key, value] of headers instanceof Headers ? headers.entries() : Object.entries(headers)) { + if (SENSITIVE_HEADERS.has(key.toLowerCase())) { + filtered[key] = "[REDACTED]"; + } else { + filtered[key] = value; + } + } + return filtered; +} + +const SENSITIVE_QUERY_PARAMS = new Set([ + "api_key", + "api-key", + "apikey", + "token", + "access_token", + "access-token", + "auth_token", + "auth-token", + "password", + "passwd", + "secret", + "api_secret", + "api-secret", + "apisecret", + "key", + "session", + "session_id", + "session-id", +]); + +function redactQueryParameters(queryParameters?: Record): Record | undefined { + if (queryParameters == null) { + return queryParameters; + } + const redacted: Record = {}; + for (const [key, value] of Object.entries(queryParameters)) { + if (SENSITIVE_QUERY_PARAMS.has(key.toLowerCase())) { + redacted[key] = "[REDACTED]"; + } else { + redacted[key] = value; + } + } + return redacted; +} + +function redactUrl(url: string): string { + const protocolIndex = url.indexOf("://"); + if (protocolIndex === -1) return url; + + const afterProtocol = protocolIndex + 3; + + // Find the first delimiter that marks the end of the authority section + const pathStart = url.indexOf("/", afterProtocol); + let queryStart = url.indexOf("?", afterProtocol); + let fragmentStart = url.indexOf("#", afterProtocol); + + const firstDelimiter = Math.min( + pathStart === -1 ? url.length : pathStart, + queryStart === -1 ? url.length : queryStart, + fragmentStart === -1 ? url.length : fragmentStart, + ); + + // Find the LAST @ before the delimiter (handles multiple @ in credentials) + let atIndex = -1; + for (let i = afterProtocol; i < firstDelimiter; i++) { + if (url[i] === "@") { + atIndex = i; + } + } + + if (atIndex !== -1) { + url = `${url.slice(0, afterProtocol)}[REDACTED]@${url.slice(atIndex + 1)}`; + } + + // Recalculate queryStart since url might have changed + queryStart = url.indexOf("?"); + if (queryStart === -1) return url; + + fragmentStart = url.indexOf("#", queryStart); + const queryEnd = fragmentStart !== -1 ? fragmentStart : url.length; + const queryString = url.slice(queryStart + 1, queryEnd); + + if (queryString.length === 0) return url; + + // FAST PATH: Quick check if any sensitive keywords present + // Using indexOf is faster than regex for simple substring matching + const lower = queryString.toLowerCase(); + const hasSensitive = + lower.includes("token") || + lower.includes("key") || + lower.includes("password") || + lower.includes("passwd") || + lower.includes("secret") || + lower.includes("session") || + lower.includes("auth"); + + if (!hasSensitive) { + return url; + } + + // SLOW PATH: Parse and redact + const redactedParams: string[] = []; + const params = queryString.split("&"); + + for (const param of params) { + const equalIndex = param.indexOf("="); + if (equalIndex === -1) { + redactedParams.push(param); + continue; + } + + const key = param.slice(0, equalIndex); + let shouldRedact = SENSITIVE_QUERY_PARAMS.has(key.toLowerCase()); + + if (!shouldRedact && key.includes("%")) { + try { + const decodedKey = decodeURIComponent(key); + shouldRedact = SENSITIVE_QUERY_PARAMS.has(decodedKey.toLowerCase()); + } catch {} + } + + redactedParams.push(shouldRedact ? `${key}=[REDACTED]` : param); + } + + return url.slice(0, queryStart + 1) + redactedParams.join("&") + url.slice(queryEnd); +} + +async function getHeaders(args: Fetcher.Args): Promise { + const newHeaders: Headers = new Headers(); + + newHeaders.set( + "Accept", + args.responseType === "json" ? "application/json" : args.responseType === "text" ? "text/plain" : "*/*", + ); if (args.body !== undefined && args.contentType != null) { - newHeaders["Content-Type"] = args.contentType; + newHeaders.set("Content-Type", args.contentType); } if (args.headers == null) { @@ -68,13 +226,13 @@ async function getHeaders(args: Fetcher.Args): Promise> { for (const [key, value] of Object.entries(args.headers)) { const result = await EndpointSupplier.get(value, { endpointMetadata: args.endpointMetadata ?? {} }); if (typeof result === "string") { - newHeaders[key] = result; + newHeaders.set(key, result); continue; } if (result == null) { continue; } - newHeaders[key] = `${result}`; + newHeaders.set(key, `${result}`); } return newHeaders; } @@ -83,9 +241,22 @@ export async function fetcherImpl(args: Fetcher.Args): Promise(args: Fetcher.Args): Promise(args: Fetcher.Args): Promise= 200 && response.status < 400) { + if (logger.isDebug()) { + const metadata = { + method: args.method, + url: redactUrl(url), + statusCode: response.status, + responseHeaders: redactHeaders(response.headers), + }; + logger.debug("HTTP request succeeded", metadata); + } return { ok: true, body: (await getResponseBody(response, args.responseType)) as R, @@ -112,6 +292,15 @@ export async function fetcherImpl(args: Fetcher.Args): Promise(args: Fetcher.Args): Promise(args: Fetcher.Args): Promise(args: Fetcher.Args): Promise(args: Fetcher.Args): Promise { + if (type === "form") { + return toQueryString(body, { arrayFormat: "repeat", encode: true }); + } if (type.includes("json")) { return toJson(body); } else { diff --git a/src/core/fetcher/makeRequest.ts b/src/core/fetcher/makeRequest.ts index 5edce69..c8d3f2e 100644 --- a/src/core/fetcher/makeRequest.ts +++ b/src/core/fetcher/makeRequest.ts @@ -4,7 +4,7 @@ export const makeRequest = async ( fetchFn: (url: string, init: RequestInit) => Promise, url: string, method: string, - headers: Record, + headers: Headers | Record, requestBody: BodyInit | undefined, timeoutMs?: number, abortSignal?: AbortSignal, @@ -13,7 +13,6 @@ export const makeRequest = async ( ): Promise => { const signals: AbortSignal[] = []; - // Add timeout signal let timeoutAbortId: NodeJS.Timeout | undefined; if (timeoutMs != null) { const { signal, abortId } = getTimeoutSignal(timeoutMs); @@ -21,7 +20,6 @@ export const makeRequest = async ( signals.push(signal); } - // Add arbitrary signal if (abortSignal != null) { signals.push(abortSignal); } diff --git a/src/core/fetcher/requestWithRetries.ts b/src/core/fetcher/requestWithRetries.ts index 3d30bd1..1f68968 100644 --- a/src/core/fetcher/requestWithRetries.ts +++ b/src/core/fetcher/requestWithRetries.ts @@ -4,28 +4,23 @@ const DEFAULT_MAX_RETRIES = 2; const JITTER_FACTOR = 0.2; // 20% random jitter function addPositiveJitter(delay: number): number { - // Generate a random value between 0 and +JITTER_FACTOR const jitterMultiplier = 1 + Math.random() * JITTER_FACTOR; return delay * jitterMultiplier; } function addSymmetricJitter(delay: number): number { - // Generate a random value in a JITTER_FACTOR-sized percentage range around delay const jitterMultiplier = 1 + (Math.random() - 0.5) * JITTER_FACTOR; return delay * jitterMultiplier; } function getRetryDelayFromHeaders(response: Response, retryAttempt: number): number { - // Check for Retry-After header first (RFC 7231), with no jitter const retryAfter = response.headers.get("Retry-After"); if (retryAfter) { - // Parse as number of seconds... const retryAfterSeconds = parseInt(retryAfter, 10); if (!Number.isNaN(retryAfterSeconds) && retryAfterSeconds > 0) { return Math.min(retryAfterSeconds * 1000, MAX_RETRY_DELAY); } - // ...or as an HTTP date; both are valid const retryAfterDate = new Date(retryAfter); if (!Number.isNaN(retryAfterDate.getTime())) { const delay = retryAfterDate.getTime() - Date.now(); @@ -35,12 +30,10 @@ function getRetryDelayFromHeaders(response: Response, retryAttempt: number): num } } - // Then check for industry-standard X-RateLimit-Reset header, with positive jitter const rateLimitReset = response.headers.get("X-RateLimit-Reset"); if (rateLimitReset) { const resetTime = parseInt(rateLimitReset, 10); if (!Number.isNaN(resetTime)) { - // Assume Unix timestamp in epoch seconds const delay = resetTime * 1000 - Date.now(); if (delay > 0) { return addPositiveJitter(Math.min(delay, MAX_RETRY_DELAY)); @@ -48,7 +41,6 @@ function getRetryDelayFromHeaders(response: Response, retryAttempt: number): num } } - // Fall back to exponential backoff, with symmetric jitter return addSymmetricJitter(Math.min(INITIAL_RETRY_DELAY * 2 ** retryAttempt, MAX_RETRY_DELAY)); } @@ -60,7 +52,6 @@ export async function requestWithRetries( for (let i = 0; i < maxRetries; ++i) { if ([408, 429].includes(response.status) || response.status >= 500) { - // Get delay with appropriate jitter applied const delay = getRetryDelayFromHeaders(response, i); await new Promise((resolve) => setTimeout(resolve, delay)); diff --git a/src/core/fetcher/signals.ts b/src/core/fetcher/signals.ts index a8d32a2..c9fcaef 100644 --- a/src/core/fetcher/signals.ts +++ b/src/core/fetcher/signals.ts @@ -6,29 +6,17 @@ export function getTimeoutSignal(timeoutMs: number): { signal: AbortSignal; abor return { signal: controller.signal, abortId }; } -/** - * Returns an abort signal that is getting aborted when - * at least one of the specified abort signals is aborted. - * - * Requires at least node.js 18. - */ export function anySignal(...args: AbortSignal[] | [AbortSignal[]]): AbortSignal { - // Allowing signals to be passed either as array - // of signals or as multiple arguments. const signals = (args.length === 1 && Array.isArray(args[0]) ? args[0] : args) as AbortSignal[]; const controller = new AbortController(); for (const signal of signals) { if (signal.aborted) { - // Exiting early if one of the signals - // is already aborted. controller.abort((signal as any)?.reason); break; } - // Listening for signals and removing the listeners - // when at least one symbol is aborted. signal.addEventListener("abort", () => controller.abort((signal as any)?.reason), { signal: controller.signal, }); diff --git a/src/core/headers.ts b/src/core/headers.ts index a723d22..78ed8b5 100644 --- a/src/core/headers.ts +++ b/src/core/headers.ts @@ -6,10 +6,11 @@ export function mergeHeaders( for (const [key, value] of headersArray .filter((headers) => headers != null) .flatMap((headers) => Object.entries(headers))) { + const insensitiveKey = key.toLowerCase(); if (value != null) { - result[key] = value; - } else if (key in result) { - delete result[key]; + result[insensitiveKey] = value; + } else if (insensitiveKey in result) { + delete result[insensitiveKey]; } } @@ -24,8 +25,9 @@ export function mergeOnlyDefinedHeaders( for (const [key, value] of headersArray .filter((headers) => headers != null) .flatMap((headers) => Object.entries(headers))) { + const insensitiveKey = key.toLowerCase(); if (value != null) { - result[key] = value; + result[insensitiveKey] = value; } } diff --git a/src/core/index.ts b/src/core/index.ts index e838642..e000452 100644 --- a/src/core/index.ts +++ b/src/core/index.ts @@ -2,6 +2,7 @@ export * from "./auth/index.js"; export * from "./base64.js"; export * from "./fetcher/index.js"; export * as file from "./file/index.js"; +export * as logging from "./logging/index.js"; export * from "./pagination/index.js"; export * from "./runtime/index.js"; export * from "./stream/index.js"; diff --git a/src/core/logging/exports.ts b/src/core/logging/exports.ts new file mode 100644 index 0000000..88f6c00 --- /dev/null +++ b/src/core/logging/exports.ts @@ -0,0 +1,19 @@ +import * as logger from "./logger.js"; + +export namespace logging { + /** + * Configuration for logger instances. + */ + export type LogConfig = logger.LogConfig; + export type LogLevel = logger.LogLevel; + export const LogLevel: typeof logger.LogLevel = logger.LogLevel; + export type ILogger = logger.ILogger; + /** + * Console logger implementation that outputs to the console. + */ + export type ConsoleLogger = logger.ConsoleLogger; + /** + * Console logger implementation that outputs to the console. + */ + export const ConsoleLogger: typeof logger.ConsoleLogger = logger.ConsoleLogger; +} diff --git a/src/core/logging/index.ts b/src/core/logging/index.ts new file mode 100644 index 0000000..d81cc32 --- /dev/null +++ b/src/core/logging/index.ts @@ -0,0 +1 @@ +export * from "./logger.js"; diff --git a/src/core/logging/logger.ts b/src/core/logging/logger.ts new file mode 100644 index 0000000..a3f3673 --- /dev/null +++ b/src/core/logging/logger.ts @@ -0,0 +1,203 @@ +export const LogLevel = { + Debug: "debug", + Info: "info", + Warn: "warn", + Error: "error", +} as const; +export type LogLevel = (typeof LogLevel)[keyof typeof LogLevel]; +const logLevelMap: Record = { + [LogLevel.Debug]: 1, + [LogLevel.Info]: 2, + [LogLevel.Warn]: 3, + [LogLevel.Error]: 4, +}; + +export interface ILogger { + /** + * Logs a debug message. + * @param message - The message to log + * @param args - Additional arguments to log + */ + debug(message: string, ...args: unknown[]): void; + /** + * Logs an info message. + * @param message - The message to log + * @param args - Additional arguments to log + */ + info(message: string, ...args: unknown[]): void; + /** + * Logs a warning message. + * @param message - The message to log + * @param args - Additional arguments to log + */ + warn(message: string, ...args: unknown[]): void; + /** + * Logs an error message. + * @param message - The message to log + * @param args - Additional arguments to log + */ + error(message: string, ...args: unknown[]): void; +} + +/** + * Configuration for logger initialization. + */ +export interface LogConfig { + /** + * Minimum log level to output. + * @default LogLevel.Info + */ + level?: LogLevel; + /** + * Logger implementation to use. + * @default new ConsoleLogger() + */ + logger?: ILogger; + /** + * Whether logging should be silenced. + * @default true + */ + silent?: boolean; +} + +/** + * Default console-based logger implementation. + */ +export class ConsoleLogger implements ILogger { + debug(message: string, ...args: unknown[]): void { + console.debug(message, ...args); + } + info(message: string, ...args: unknown[]): void { + console.info(message, ...args); + } + warn(message: string, ...args: unknown[]): void { + console.warn(message, ...args); + } + error(message: string, ...args: unknown[]): void { + console.error(message, ...args); + } +} + +/** + * Logger class that provides level-based logging functionality. + */ +export class Logger { + private readonly level: number; + private readonly logger: ILogger; + private readonly silent: boolean; + + /** + * Creates a new logger instance. + * @param config - Logger configuration + */ + constructor(config: Required) { + this.level = logLevelMap[config.level]; + this.logger = config.logger; + this.silent = config.silent; + } + + /** + * Checks if a log level should be output based on configuration. + * @param level - The log level to check + * @returns True if the level should be logged + */ + public shouldLog(level: LogLevel): boolean { + return !this.silent && this.level <= logLevelMap[level]; + } + + /** + * Checks if debug logging is enabled. + * @returns True if debug logs should be output + */ + public isDebug(): boolean { + return this.shouldLog(LogLevel.Debug); + } + + /** + * Logs a debug message if debug logging is enabled. + * @param message - The message to log + * @param args - Additional arguments to log + */ + public debug(message: string, ...args: unknown[]): void { + if (this.isDebug()) { + this.logger.debug(message, ...args); + } + } + + /** + * Checks if info logging is enabled. + * @returns True if info logs should be output + */ + public isInfo(): boolean { + return this.shouldLog(LogLevel.Info); + } + + /** + * Logs an info message if info logging is enabled. + * @param message - The message to log + * @param args - Additional arguments to log + */ + public info(message: string, ...args: unknown[]): void { + if (this.isInfo()) { + this.logger.info(message, ...args); + } + } + + /** + * Checks if warning logging is enabled. + * @returns True if warning logs should be output + */ + public isWarn(): boolean { + return this.shouldLog(LogLevel.Warn); + } + + /** + * Logs a warning message if warning logging is enabled. + * @param message - The message to log + * @param args - Additional arguments to log + */ + public warn(message: string, ...args: unknown[]): void { + if (this.isWarn()) { + this.logger.warn(message, ...args); + } + } + + /** + * Checks if error logging is enabled. + * @returns True if error logs should be output + */ + public isError(): boolean { + return this.shouldLog(LogLevel.Error); + } + + /** + * Logs an error message if error logging is enabled. + * @param message - The message to log + * @param args - Additional arguments to log + */ + public error(message: string, ...args: unknown[]): void { + if (this.isError()) { + this.logger.error(message, ...args); + } + } +} + +export function createLogger(config?: LogConfig | Logger): Logger { + if (config == null) { + return defaultLogger; + } + if (config instanceof Logger) { + return config; + } + config = config ?? {}; + config.level ??= LogLevel.Info; + config.logger ??= new ConsoleLogger(); + config.silent ??= true; + return new Logger(config as Required); +} + +const defaultLogger: Logger = new Logger({ + level: LogLevel.Info, + logger: new ConsoleLogger(), + silent: true, +}); diff --git a/src/core/pagination/Page.ts b/src/core/pagination/Page.ts index 1aa08e5..6621a6f 100644 --- a/src/core/pagination/Page.ts +++ b/src/core/pagination/Page.ts @@ -4,15 +4,16 @@ import type { HttpResponsePromise, RawResponse } from "../fetcher/index.js"; * A page of results from a paginated API. * * @template T The type of the items in the page. + * @template R The type of the API response. */ -export class Page implements AsyncIterable { +export class Page implements AsyncIterable { public data: T[]; public rawResponse: RawResponse; + public response: R; - private response: unknown; - private _hasNextPage: (response: unknown) => boolean; - private getItems: (response: unknown) => T[]; - private loadNextPage: (response: unknown) => HttpResponsePromise; + private _hasNextPage: (response: R) => boolean; + private getItems: (response: R) => T[]; + private loadNextPage: (response: R) => HttpResponsePromise; constructor({ response, @@ -21,11 +22,11 @@ export class Page implements AsyncIterable { getItems, loadPage, }: { - response: unknown; + response: R; rawResponse: RawResponse; - hasNextPage: (response: unknown) => boolean; - getItems: (response: unknown) => T[]; - loadPage: (response: unknown) => HttpResponsePromise; + hasNextPage: (response: R) => boolean; + getItems: (response: R) => T[]; + loadPage: (response: R) => HttpResponsePromise; }) { this.response = response; this.rawResponse = rawResponse; diff --git a/src/core/pagination/Pageable.ts b/src/core/pagination/Pageable.ts deleted file mode 100644 index 5689e1e..0000000 --- a/src/core/pagination/Pageable.ts +++ /dev/null @@ -1,18 +0,0 @@ -import type { RawResponse } from "../fetcher/index.js"; -import { Page } from "./Page.js"; - -export declare namespace Pageable { - interface Args { - response: Response; - rawResponse: RawResponse; - hasNextPage: (response: Response) => boolean; - getItems: (response: Response) => Item[]; - loadPage: (response: Response) => Promise; - } -} - -export class Pageable extends Page { - constructor(args: Pageable.Args) { - super(args as any); - } -} diff --git a/src/core/pagination/exports.ts b/src/core/pagination/exports.ts new file mode 100644 index 0000000..d3acc60 --- /dev/null +++ b/src/core/pagination/exports.ts @@ -0,0 +1 @@ +export type { Page } from "./Page.js"; diff --git a/src/core/pagination/index.ts b/src/core/pagination/index.ts index b0cd68f..7781cbd 100644 --- a/src/core/pagination/index.ts +++ b/src/core/pagination/index.ts @@ -1,2 +1 @@ export { Page } from "./Page.js"; -export { Pageable } from "./Pageable.js"; diff --git a/src/core/stream/Stream.ts b/src/core/stream/Stream.ts index e41f05a..4d4b97f 100644 --- a/src/core/stream/Stream.ts +++ b/src/core/stream/Stream.ts @@ -43,6 +43,7 @@ export class Stream implements AsyncIterable { private messageTerminator: string; private streamTerminator: string | undefined; private controller: AbortController = new AbortController(); + private decoder: TextDecoder | undefined; constructor({ stream, parse, eventShape, signal }: Stream.Args & { parse: (val: unknown) => Promise }) { this.stream = stream; @@ -55,6 +56,11 @@ export class Stream implements AsyncIterable { this.messageTerminator = eventShape.messageTerminator; } signal?.addEventListener("abort", () => this.controller.abort()); + + // Initialize shared TextDecoder + if (typeof TextDecoder !== "undefined") { + this.decoder = new TextDecoder("utf-8"); + } } private async *iterMessages(): AsyncGenerator { @@ -67,7 +73,7 @@ export class Stream implements AsyncIterable { let terminatorIndex: number; while ((terminatorIndex = buf.indexOf(this.messageTerminator)) >= 0) { - let line = buf.slice(0, terminatorIndex + 1); + let line = buf.slice(0, terminatorIndex); buf = buf.slice(terminatorIndex + this.messageTerminator.length); if (!line.trim()) { @@ -101,10 +107,9 @@ export class Stream implements AsyncIterable { private decodeChunk(chunk: any): string { let decoded = ""; - // If TextDecoder is present, use it - if (typeof TextDecoder !== "undefined") { - const decoder = new TextDecoder("utf8"); - decoded += decoder.decode(chunk); + // If TextDecoder is available, use the streaming decoder instance + if (this.decoder != null) { + decoded += this.decoder.decode(chunk, { stream: true }); } // Buffer is present in Node.js environment else if (RUNTIME.type === "node" && typeof chunk !== "undefined") { diff --git a/src/core/url/join.ts b/src/core/url/join.ts index b872a90..7ca7dae 100644 --- a/src/core/url/join.ts +++ b/src/core/url/join.ts @@ -12,7 +12,6 @@ export function join(base: string, ...segments: string[]): string { try { url = new URL(base); } catch { - // Fallback to path joining if URL is malformed return joinPath(base, ...segments); } diff --git a/src/version.ts b/src/version.ts index b54c0db..68e5be6 100644 --- a/src/version.ts +++ b/src/version.ts @@ -1 +1 @@ -export const SDK_VERSION = "2.5.0"; +export const SDK_VERSION = "3.0.1"; diff --git a/tests/mock-server/mockEndpointBuilder.ts b/tests/mock-server/mockEndpointBuilder.ts index 18557ec..1b0e510 100644 --- a/tests/mock-server/mockEndpointBuilder.ts +++ b/tests/mock-server/mockEndpointBuilder.ts @@ -2,6 +2,7 @@ import { type DefaultBodyType, type HttpHandler, HttpResponse, type HttpResponse import { url } from "../../src/core"; import { toJson } from "../../src/core/json"; +import { withFormUrlEncoded } from "./withFormUrlEncoded"; import { withHeaders } from "./withHeaders"; import { withJson } from "./withJson"; @@ -26,6 +27,7 @@ interface RequestHeadersStage extends RequestBodyStage, ResponseStage { interface RequestBodyStage extends ResponseStage { jsonBody(body: unknown): ResponseStage; + formUrlEncodedBody(body: unknown): ResponseStage; } interface ResponseStage { @@ -135,6 +137,16 @@ class RequestBuilder implements MethodStage, RequestHeadersStage, RequestBodySta return this; } + formUrlEncodedBody(body: unknown): ResponseStage { + if (body === undefined) { + throw new Error( + "Undefined is not valid for form-urlencoded. Do not call formUrlEncodedBody if you want an empty body.", + ); + } + this.predicates.push((resolver) => withFormUrlEncoded(body, resolver)); + return this; + } + respondWith(): ResponseStatusStage { return new ResponseBuilder(this.method, this.buildUrl(), this.predicates, this.handlerOptions); } diff --git a/tests/mock-server/withFormUrlEncoded.ts b/tests/mock-server/withFormUrlEncoded.ts new file mode 100644 index 0000000..e9e6ff2 --- /dev/null +++ b/tests/mock-server/withFormUrlEncoded.ts @@ -0,0 +1,80 @@ +import { type HttpResponseResolver, passthrough } from "msw"; + +import { toJson } from "../../src/core/json"; + +/** + * Creates a request matcher that validates if the request form-urlencoded body exactly matches the expected object + * @param expectedBody - The exact body object to match against + * @param resolver - Response resolver to execute if body matches + */ +export function withFormUrlEncoded(expectedBody: unknown, resolver: HttpResponseResolver): HttpResponseResolver { + return async (args) => { + const { request } = args; + + let clonedRequest: Request; + let bodyText: string | undefined; + let actualBody: Record; + try { + clonedRequest = request.clone(); + bodyText = await clonedRequest.text(); + if (bodyText === "") { + console.error("Request body is empty, expected a form-urlencoded body."); + return passthrough(); + } + const params = new URLSearchParams(bodyText); + actualBody = {}; + for (const [key, value] of params.entries()) { + actualBody[key] = value; + } + } catch (error) { + console.error(`Error processing form-urlencoded request body:\n\tError: ${error}\n\tBody: ${bodyText}`); + return passthrough(); + } + + const mismatches = findMismatches(actualBody, expectedBody); + if (Object.keys(mismatches).length > 0) { + console.error("Form-urlencoded body mismatch:", toJson(mismatches, undefined, 2)); + return passthrough(); + } + + return resolver(args); + }; +} + +function findMismatches(actual: any, expected: any): Record { + const mismatches: Record = {}; + + if (typeof actual !== typeof expected) { + return { value: { actual, expected } }; + } + + if (typeof actual !== "object" || actual === null || expected === null) { + if (actual !== expected) { + return { value: { actual, expected } }; + } + return {}; + } + + const actualKeys = Object.keys(actual); + const expectedKeys = Object.keys(expected); + + const allKeys = new Set([...actualKeys, ...expectedKeys]); + + for (const key of allKeys) { + if (!expectedKeys.includes(key)) { + if (actual[key] === undefined) { + continue; + } + mismatches[key] = { actual: actual[key], expected: undefined }; + } else if (!actualKeys.includes(key)) { + if (expected[key] === undefined) { + continue; + } + mismatches[key] = { actual: undefined, expected: expected[key] }; + } else if (actual[key] !== expected[key]) { + mismatches[key] = { actual: actual[key], expected: expected[key] }; + } + } + + return mismatches; +} diff --git a/tests/setup.ts b/tests/setup.ts new file mode 100644 index 0000000..a5651f8 --- /dev/null +++ b/tests/setup.ts @@ -0,0 +1,80 @@ +import { expect } from "vitest"; + +interface CustomMatchers { + toContainHeaders(expectedHeaders: Record): R; +} + +declare module "vitest" { + interface Assertion extends CustomMatchers {} + interface AsymmetricMatchersContaining extends CustomMatchers {} +} + +expect.extend({ + toContainHeaders(actual: unknown, expectedHeaders: Record) { + const isHeaders = actual instanceof Headers; + const isPlainObject = typeof actual === "object" && actual !== null && !Array.isArray(actual); + + if (!isHeaders && !isPlainObject) { + throw new TypeError("Received value must be an instance of Headers or a plain object!"); + } + + if (typeof expectedHeaders !== "object" || expectedHeaders === null || Array.isArray(expectedHeaders)) { + throw new TypeError("Expected headers must be a plain object!"); + } + + const missingHeaders: string[] = []; + const mismatchedHeaders: Array<{ key: string; expected: string; actual: string | null }> = []; + + for (const [key, value] of Object.entries(expectedHeaders)) { + let actualValue: string | null = null; + + if (isHeaders) { + // Headers.get() is already case-insensitive + actualValue = (actual as Headers).get(key); + } else { + // For plain objects, do case-insensitive lookup + const actualObj = actual as Record; + const lowerKey = key.toLowerCase(); + const foundKey = Object.keys(actualObj).find((k) => k.toLowerCase() === lowerKey); + actualValue = foundKey ? actualObj[foundKey] : null; + } + + if (actualValue === null || actualValue === undefined) { + missingHeaders.push(key); + } else if (actualValue !== value) { + mismatchedHeaders.push({ key, expected: value, actual: actualValue }); + } + } + + const pass = missingHeaders.length === 0 && mismatchedHeaders.length === 0; + + const actualType = isHeaders ? "Headers" : "object"; + + if (pass) { + return { + message: () => `expected ${actualType} not to contain ${this.utils.printExpected(expectedHeaders)}`, + pass: true, + }; + } else { + const messages: string[] = []; + + if (missingHeaders.length > 0) { + messages.push(`Missing headers: ${this.utils.printExpected(missingHeaders.join(", "))}`); + } + + if (mismatchedHeaders.length > 0) { + const mismatches = mismatchedHeaders.map( + ({ key, expected, actual }) => + `${key}: expected ${this.utils.printExpected(expected)} but got ${this.utils.printReceived(actual)}`, + ); + messages.push(mismatches.join("\n")); + } + + return { + message: () => + `expected ${actualType} to contain ${this.utils.printExpected(expectedHeaders)}\n\n${messages.join("\n")}`, + pass: false, + }; + } + }, +}); diff --git a/tests/unit/auth/BasicAuth.test.ts b/tests/unit/auth/BasicAuth.test.ts index 90bbc7e..9b51233 100644 --- a/tests/unit/auth/BasicAuth.test.ts +++ b/tests/unit/auth/BasicAuth.test.ts @@ -1,58 +1,92 @@ import { BasicAuth } from "../../../src/core/auth/BasicAuth"; describe("BasicAuth", () => { - describe("toAuthorizationHeader", () => { - it("correctly converts to header", () => { - expect( - BasicAuth.toAuthorizationHeader({ - username: "username", - password: "password", - }), - ).toBe("Basic dXNlcm5hbWU6cGFzc3dvcmQ="); - }); - }); - describe("fromAuthorizationHeader", () => { - it("correctly parses header", () => { - expect(BasicAuth.fromAuthorizationHeader("Basic dXNlcm5hbWU6cGFzc3dvcmQ=")).toEqual({ - username: "username", - password: "password", - }); - }); + interface ToHeaderTestCase { + description: string; + input: { username: string; password: string }; + expected: string; + } - it("handles password with colons", () => { - expect(BasicAuth.fromAuthorizationHeader("Basic dXNlcjpwYXNzOndvcmQ=")).toEqual({ - username: "user", - password: "pass:word", - }); - }); + interface FromHeaderTestCase { + description: string; + input: string; + expected: { username: string; password: string }; + } - it("handles empty username and password (just colon)", () => { - expect(BasicAuth.fromAuthorizationHeader("Basic Og==")).toEqual({ - username: "", - password: "", - }); - }); + interface ErrorTestCase { + description: string; + input: string; + expectedError: string; + } + + describe("toAuthorizationHeader", () => { + const toHeaderTests: ToHeaderTestCase[] = [ + { + description: "correctly converts to header", + input: { username: "username", password: "password" }, + expected: "Basic dXNlcm5hbWU6cGFzc3dvcmQ=", + }, + ]; - it("handles empty username", () => { - expect(BasicAuth.fromAuthorizationHeader("Basic OnBhc3N3b3Jk")).toEqual({ - username: "", - password: "password", + toHeaderTests.forEach(({ description, input, expected }) => { + it(description, () => { + expect(BasicAuth.toAuthorizationHeader(input)).toBe(expected); }); }); + }); - it("handles empty password", () => { - expect(BasicAuth.fromAuthorizationHeader("Basic dXNlcm5hbWU6")).toEqual({ - username: "username", - password: "", + describe("fromAuthorizationHeader", () => { + const fromHeaderTests: FromHeaderTestCase[] = [ + { + description: "correctly parses header", + input: "Basic dXNlcm5hbWU6cGFzc3dvcmQ=", + expected: { username: "username", password: "password" }, + }, + { + description: "handles password with colons", + input: "Basic dXNlcjpwYXNzOndvcmQ=", + expected: { username: "user", password: "pass:word" }, + }, + { + description: "handles empty username and password (just colon)", + input: "Basic Og==", + expected: { username: "", password: "" }, + }, + { + description: "handles empty username", + input: "Basic OnBhc3N3b3Jk", + expected: { username: "", password: "password" }, + }, + { + description: "handles empty password", + input: "Basic dXNlcm5hbWU6", + expected: { username: "username", password: "" }, + }, + ]; + + fromHeaderTests.forEach(({ description, input, expected }) => { + it(description, () => { + expect(BasicAuth.fromAuthorizationHeader(input)).toEqual(expected); }); }); - it("throws error for completely empty credentials", () => { - expect(() => BasicAuth.fromAuthorizationHeader("Basic ")).toThrow("Invalid basic auth"); - }); + const errorTests: ErrorTestCase[] = [ + { + description: "throws error for completely empty credentials", + input: "Basic ", + expectedError: "Invalid basic auth", + }, + { + description: "throws error for credentials without colon", + input: "Basic dXNlcm5hbWU=", + expectedError: "Invalid basic auth", + }, + ]; - it("throws error for credentials without colon", () => { - expect(() => BasicAuth.fromAuthorizationHeader("Basic dXNlcm5hbWU=")).toThrow("Invalid basic auth"); + errorTests.forEach(({ description, input, expectedError }) => { + it(description, () => { + expect(() => BasicAuth.fromAuthorizationHeader(input)).toThrow(expectedError); + }); }); }); }); diff --git a/tests/unit/fetcher/Fetcher.test.ts b/tests/unit/fetcher/Fetcher.test.ts index bfc6429..60df2b5 100644 --- a/tests/unit/fetcher/Fetcher.test.ts +++ b/tests/unit/fetcher/Fetcher.test.ts @@ -13,6 +13,7 @@ describe("Test fetcherImpl", () => { body: { data: "test" }, contentType: "application/json", requestType: "json", + maxRetries: 0, responseType: "json", }; @@ -33,7 +34,7 @@ describe("Test fetcherImpl", () => { "https://httpbin.org/post", expect.objectContaining({ method: "POST", - headers: expect.objectContaining({ "X-Test": "x-test-header" }), + headers: expect.toContainHeaders({ "X-Test": "x-test-header" }), body: JSON.stringify({ data: "test" }), }), ); @@ -47,6 +48,7 @@ describe("Test fetcherImpl", () => { headers: { "X-Test": "x-test-header" }, contentType: "application/octet-stream", requestType: "bytes", + maxRetries: 0, responseType: "json", body: fs.createReadStream(join(__dirname, "test-file.txt")), }; @@ -64,7 +66,7 @@ describe("Test fetcherImpl", () => { url, expect.objectContaining({ method: "POST", - headers: expect.objectContaining({ "X-Test": "x-test-header" }), + headers: expect.toContainHeaders({ "X-Test": "x-test-header" }), body: expect.any(fs.ReadStream), }), ); @@ -80,6 +82,7 @@ describe("Test fetcherImpl", () => { url, method: "GET", headers: { "X-Test": "x-test-header" }, + maxRetries: 0, responseType: "binary-response", }; @@ -99,7 +102,7 @@ describe("Test fetcherImpl", () => { url, expect.objectContaining({ method: "GET", - headers: expect.objectContaining({ "X-Test": "x-test-header" }), + headers: expect.toContainHeaders({ "X-Test": "x-test-header" }), }), ); expect(result.ok).toBe(true); @@ -125,6 +128,7 @@ describe("Test fetcherImpl", () => { url, method: "GET", headers: { "X-Test": "x-test-header" }, + maxRetries: 0, responseType: "binary-response", }; @@ -144,7 +148,7 @@ describe("Test fetcherImpl", () => { url, expect.objectContaining({ method: "GET", - headers: expect.objectContaining({ "X-Test": "x-test-header" }), + headers: expect.toContainHeaders({ "X-Test": "x-test-header" }), }), ); expect(result.ok).toBe(true); @@ -170,6 +174,7 @@ describe("Test fetcherImpl", () => { url, method: "GET", headers: { "X-Test": "x-test-header" }, + maxRetries: 0, responseType: "binary-response", }; @@ -189,7 +194,7 @@ describe("Test fetcherImpl", () => { url, expect.objectContaining({ method: "GET", - headers: expect.objectContaining({ "X-Test": "x-test-header" }), + headers: expect.toContainHeaders({ "X-Test": "x-test-header" }), }), ); expect(result.ok).toBe(true); @@ -213,6 +218,7 @@ describe("Test fetcherImpl", () => { url, method: "GET", headers: { "X-Test": "x-test-header" }, + maxRetries: 0, responseType: "binary-response", }; @@ -232,7 +238,7 @@ describe("Test fetcherImpl", () => { url, expect.objectContaining({ method: "GET", - headers: expect.objectContaining({ "X-Test": "x-test-header" }), + headers: expect.toContainHeaders({ "X-Test": "x-test-header" }), }), ); expect(result.ok).toBe(true); diff --git a/tests/unit/fetcher/createRequestUrl.test.ts b/tests/unit/fetcher/createRequestUrl.test.ts index 06e03b2..a92f1b5 100644 --- a/tests/unit/fetcher/createRequestUrl.test.ts +++ b/tests/unit/fetcher/createRequestUrl.test.ts @@ -1,160 +1,163 @@ import { createRequestUrl } from "../../../src/core/fetcher/createRequestUrl"; describe("Test createRequestUrl", () => { - it("should return the base URL when no query parameters are provided", () => { - const baseUrl = "https://api.example.com"; - expect(createRequestUrl(baseUrl)).toBe(baseUrl); - }); - - it("should append simple query parameters", () => { - const baseUrl = "https://api.example.com"; - const queryParams = { key: "value", another: "param" }; - expect(createRequestUrl(baseUrl, queryParams)).toBe("https://api.example.com?key=value&another=param"); - }); - - it("should handle array query parameters", () => { - const baseUrl = "https://api.example.com"; - const queryParams = { items: ["a", "b", "c"] }; - expect(createRequestUrl(baseUrl, queryParams)).toBe("https://api.example.com?items=a&items=b&items=c"); - }); - - it("should handle object query parameters", () => { - const baseUrl = "https://api.example.com"; - const queryParams = { filter: { name: "John", age: 30 } }; - expect(createRequestUrl(baseUrl, queryParams)).toBe( - "https://api.example.com?filter%5Bname%5D=John&filter%5Bage%5D=30", - ); - }); - - it("should handle mixed types of query parameters", () => { - const baseUrl = "https://api.example.com"; - const queryParams = { - simple: "value", - array: ["x", "y"], - object: { key: "value" }, - }; - expect(createRequestUrl(baseUrl, queryParams)).toBe( - "https://api.example.com?simple=value&array=x&array=y&object%5Bkey%5D=value", - ); - }); - - it("should handle empty query parameters object", () => { - const baseUrl = "https://api.example.com"; - expect(createRequestUrl(baseUrl, {})).toBe(baseUrl); - }); - - it("should encode special characters in query parameters", () => { - const baseUrl = "https://api.example.com"; - const queryParams = { special: "a&b=c d" }; - expect(createRequestUrl(baseUrl, queryParams)).toBe("https://api.example.com?special=a%26b%3Dc%20d"); - }); - - // Additional tests for edge cases and different value types - it("should handle numeric values", () => { - const baseUrl = "https://api.example.com"; - const queryParams = { count: 42, price: 19.99, active: 1, inactive: 0 }; - expect(createRequestUrl(baseUrl, queryParams)).toBe( - "https://api.example.com?count=42&price=19.99&active=1&inactive=0", - ); - }); - - it("should handle boolean values", () => { - const baseUrl = "https://api.example.com"; - const queryParams = { enabled: true, disabled: false }; - expect(createRequestUrl(baseUrl, queryParams)).toBe("https://api.example.com?enabled=true&disabled=false"); - }); - - it("should handle null and undefined values", () => { - const baseUrl = "https://api.example.com"; - const queryParams = { - valid: "value", - nullValue: null, - undefinedValue: undefined, - emptyString: "", - }; - expect(createRequestUrl(baseUrl, queryParams)).toBe( - "https://api.example.com?valid=value&nullValue=&emptyString=", - ); - }); - - it("should handle deeply nested objects", () => { - const baseUrl = "https://api.example.com"; - const queryParams = { - user: { - profile: { - name: "John", - settings: { theme: "dark" }, + const BASE_URL = "https://api.example.com"; + + interface TestCase { + description: string; + baseUrl: string; + queryParams?: Record; + expected: string; + } + + const testCases: TestCase[] = [ + { + description: "should return the base URL when no query parameters are provided", + baseUrl: BASE_URL, + expected: BASE_URL, + }, + { + description: "should append simple query parameters", + baseUrl: BASE_URL, + queryParams: { key: "value", another: "param" }, + expected: "https://api.example.com?key=value&another=param", + }, + { + description: "should handle array query parameters", + baseUrl: BASE_URL, + queryParams: { items: ["a", "b", "c"] }, + expected: "https://api.example.com?items=a&items=b&items=c", + }, + { + description: "should handle object query parameters", + baseUrl: BASE_URL, + queryParams: { filter: { name: "John", age: 30 } }, + expected: "https://api.example.com?filter%5Bname%5D=John&filter%5Bage%5D=30", + }, + { + description: "should handle mixed types of query parameters", + baseUrl: BASE_URL, + queryParams: { + simple: "value", + array: ["x", "y"], + object: { key: "value" }, + }, + expected: "https://api.example.com?simple=value&array=x&array=y&object%5Bkey%5D=value", + }, + { + description: "should handle empty query parameters object", + baseUrl: BASE_URL, + queryParams: {}, + expected: BASE_URL, + }, + { + description: "should encode special characters in query parameters", + baseUrl: BASE_URL, + queryParams: { special: "a&b=c d" }, + expected: "https://api.example.com?special=a%26b%3Dc%20d", + }, + { + description: "should handle numeric values", + baseUrl: BASE_URL, + queryParams: { count: 42, price: 19.99, active: 1, inactive: 0 }, + expected: "https://api.example.com?count=42&price=19.99&active=1&inactive=0", + }, + { + description: "should handle boolean values", + baseUrl: BASE_URL, + queryParams: { enabled: true, disabled: false }, + expected: "https://api.example.com?enabled=true&disabled=false", + }, + { + description: "should handle null and undefined values", + baseUrl: BASE_URL, + queryParams: { + valid: "value", + nullValue: null, + undefinedValue: undefined, + emptyString: "", + }, + expected: "https://api.example.com?valid=value&nullValue=&emptyString=", + }, + { + description: "should handle deeply nested objects", + baseUrl: BASE_URL, + queryParams: { + user: { + profile: { + name: "John", + settings: { theme: "dark" }, + }, }, }, - }; - expect(createRequestUrl(baseUrl, queryParams)).toBe( - "https://api.example.com?user%5Bprofile%5D%5Bname%5D=John&user%5Bprofile%5D%5Bsettings%5D%5Btheme%5D=dark", - ); - }); - - it("should handle arrays of objects", () => { - const baseUrl = "https://api.example.com"; - const queryParams = { - users: [ - { name: "John", age: 30 }, - { name: "Jane", age: 25 }, - ], - }; - expect(createRequestUrl(baseUrl, queryParams)).toBe( - "https://api.example.com?users%5Bname%5D=John&users%5Bage%5D=30&users%5Bname%5D=Jane&users%5Bage%5D=25", - ); - }); - - it("should handle mixed arrays", () => { - const baseUrl = "https://api.example.com"; - const queryParams = { - mixed: ["string", 42, true, { key: "value" }], - }; - expect(createRequestUrl(baseUrl, queryParams)).toBe( - "https://api.example.com?mixed=string&mixed=42&mixed=true&mixed%5Bkey%5D=value", - ); - }); - - it("should handle empty arrays", () => { - const baseUrl = "https://api.example.com"; - const queryParams = { emptyArray: [] }; - expect(createRequestUrl(baseUrl, queryParams)).toBe(baseUrl); - }); - - it("should handle empty objects", () => { - const baseUrl = "https://api.example.com"; - const queryParams = { emptyObject: {} }; - expect(createRequestUrl(baseUrl, queryParams)).toBe(baseUrl); - }); - - it("should handle special characters in keys", () => { - const baseUrl = "https://api.example.com"; - const queryParams = { "key with spaces": "value", "key[with]brackets": "value" }; - expect(createRequestUrl(baseUrl, queryParams)).toBe( - "https://api.example.com?key%20with%20spaces=value&key%5Bwith%5Dbrackets=value", - ); - }); - - it("should handle URL with existing query parameters", () => { - const baseUrl = "https://api.example.com?existing=param"; - const queryParams = { new: "value" }; - expect(createRequestUrl(baseUrl, queryParams)).toBe("https://api.example.com?existing=param?new=value"); - }); - - it("should handle complex nested structures", () => { - const baseUrl = "https://api.example.com"; - const queryParams = { - filters: { - status: ["active", "pending"], - category: { - type: "electronics", - subcategories: ["phones", "laptops"], + expected: + "https://api.example.com?user%5Bprofile%5D%5Bname%5D=John&user%5Bprofile%5D%5Bsettings%5D%5Btheme%5D=dark", + }, + { + description: "should handle arrays of objects", + baseUrl: BASE_URL, + queryParams: { + users: [ + { name: "John", age: 30 }, + { name: "Jane", age: 25 }, + ], + }, + expected: + "https://api.example.com?users%5Bname%5D=John&users%5Bage%5D=30&users%5Bname%5D=Jane&users%5Bage%5D=25", + }, + { + description: "should handle mixed arrays", + baseUrl: BASE_URL, + queryParams: { + mixed: ["string", 42, true, { key: "value" }], + }, + expected: "https://api.example.com?mixed=string&mixed=42&mixed=true&mixed%5Bkey%5D=value", + }, + { + description: "should handle empty arrays", + baseUrl: BASE_URL, + queryParams: { emptyArray: [] }, + expected: BASE_URL, + }, + { + description: "should handle empty objects", + baseUrl: BASE_URL, + queryParams: { emptyObject: {} }, + expected: BASE_URL, + }, + { + description: "should handle special characters in keys", + baseUrl: BASE_URL, + queryParams: { "key with spaces": "value", "key[with]brackets": "value" }, + expected: "https://api.example.com?key%20with%20spaces=value&key%5Bwith%5Dbrackets=value", + }, + { + description: "should handle URL with existing query parameters", + baseUrl: "https://api.example.com?existing=param", + queryParams: { new: "value" }, + expected: "https://api.example.com?existing=param?new=value", + }, + { + description: "should handle complex nested structures", + baseUrl: BASE_URL, + queryParams: { + filters: { + status: ["active", "pending"], + category: { + type: "electronics", + subcategories: ["phones", "laptops"], + }, }, + sort: { field: "name", direction: "asc" }, }, - sort: { field: "name", direction: "asc" }, - }; - expect(createRequestUrl(baseUrl, queryParams)).toBe( - "https://api.example.com?filters%5Bstatus%5D=active&filters%5Bstatus%5D=pending&filters%5Bcategory%5D%5Btype%5D=electronics&filters%5Bcategory%5D%5Bsubcategories%5D=phones&filters%5Bcategory%5D%5Bsubcategories%5D=laptops&sort%5Bfield%5D=name&sort%5Bdirection%5D=asc", - ); + expected: + "https://api.example.com?filters%5Bstatus%5D=active&filters%5Bstatus%5D=pending&filters%5Bcategory%5D%5Btype%5D=electronics&filters%5Bcategory%5D%5Bsubcategories%5D=phones&filters%5Bcategory%5D%5Bsubcategories%5D=laptops&sort%5Bfield%5D=name&sort%5Bdirection%5D=asc", + }, + ]; + + testCases.forEach(({ description, baseUrl, queryParams, expected }) => { + it(description, () => { + expect(createRequestUrl(baseUrl, queryParams)).toBe(expected); + }); }); }); diff --git a/tests/unit/fetcher/getRequestBody.test.ts b/tests/unit/fetcher/getRequestBody.test.ts index e864c8b..8a6c3a5 100644 --- a/tests/unit/fetcher/getRequestBody.test.ts +++ b/tests/unit/fetcher/getRequestBody.test.ts @@ -2,15 +2,117 @@ import { getRequestBody } from "../../../src/core/fetcher/getRequestBody"; import { RUNTIME } from "../../../src/core/runtime"; describe("Test getRequestBody", () => { - it("should stringify body if not FormData in Node environment", async () => { - if (RUNTIME.type === "node") { - const body = { key: "value" }; + interface TestCase { + description: string; + input: any; + type: "json" | "form" | "file" | "bytes" | "other"; + expected: any; + skipCondition?: () => boolean; + } + + const testCases: TestCase[] = [ + { + description: "should stringify body if not FormData in Node environment", + input: { key: "value" }, + type: "json", + expected: '{"key":"value"}', + skipCondition: () => RUNTIME.type !== "node", + }, + { + description: "should stringify body if not FormData in browser environment", + input: { key: "value" }, + type: "json", + expected: '{"key":"value"}', + skipCondition: () => RUNTIME.type !== "browser", + }, + { + description: "should return the Uint8Array", + input: new Uint8Array([1, 2, 3]), + type: "bytes", + expected: new Uint8Array([1, 2, 3]), + }, + { + description: "should serialize objects for form-urlencoded content type", + input: { username: "johndoe", email: "john@example.com" }, + type: "form", + expected: "username=johndoe&email=john%40example.com", + }, + { + description: "should serialize complex nested objects and arrays for form-urlencoded content type", + input: { + user: { + profile: { + name: "John Doe", + settings: { + theme: "dark", + notifications: true, + }, + }, + tags: ["admin", "user"], + contacts: [ + { type: "email", value: "john@example.com" }, + { type: "phone", value: "+1234567890" }, + ], + }, + filters: { + status: ["active", "pending"], + metadata: { + created: "2024-01-01", + categories: ["electronics", "books"], + }, + }, + preferences: ["notifications", "updates"], + }, + type: "form", + expected: + "user%5Bprofile%5D%5Bname%5D=John%20Doe&" + + "user%5Bprofile%5D%5Bsettings%5D%5Btheme%5D=dark&" + + "user%5Bprofile%5D%5Bsettings%5D%5Bnotifications%5D=true&" + + "user%5Btags%5D=admin&" + + "user%5Btags%5D=user&" + + "user%5Bcontacts%5D%5Btype%5D=email&" + + "user%5Bcontacts%5D%5Bvalue%5D=john%40example.com&" + + "user%5Bcontacts%5D%5Btype%5D=phone&" + + "user%5Bcontacts%5D%5Bvalue%5D=%2B1234567890&" + + "filters%5Bstatus%5D=active&" + + "filters%5Bstatus%5D=pending&" + + "filters%5Bmetadata%5D%5Bcreated%5D=2024-01-01&" + + "filters%5Bmetadata%5D%5Bcategories%5D=electronics&" + + "filters%5Bmetadata%5D%5Bcategories%5D=books&" + + "preferences=notifications&" + + "preferences=updates", + }, + { + description: "should return the input for pre-serialized form-urlencoded strings", + input: "key=value&another=param", + type: "other", + expected: "key=value&another=param", + }, + { + description: "should JSON stringify objects", + input: { key: "value" }, + type: "json", + expected: '{"key":"value"}', + }, + ]; + + testCases.forEach(({ description, input, type, expected, skipCondition }) => { + it(description, async () => { + if (skipCondition?.()) { + return; + } + const result = await getRequestBody({ - body, - type: "json", + body: input, + type, }); - expect(result).toBe('{"key":"value"}'); - } + + if (input instanceof Uint8Array) { + expect(result).toBe(input); + } else { + expect(result).toBe(expected); + } + }); }); it("should return FormData in browser environment", async () => { @@ -24,42 +126,4 @@ describe("Test getRequestBody", () => { expect(result).toBe(formData); } }); - - it("should stringify body if not FormData in browser environment", async () => { - if (RUNTIME.type === "browser") { - const body = { key: "value" }; - const result = await getRequestBody({ - body, - type: "json", - }); - expect(result).toBe('{"key":"value"}'); - } - }); - - it("should return the Uint8Array", async () => { - const input = new Uint8Array([1, 2, 3]); - const result = await getRequestBody({ - body: input, - type: "bytes", - }); - expect(result).toBe(input); - }); - - it("should return the input for content-type 'application/x-www-form-urlencoded'", async () => { - const input = "key=value&another=param"; - const result = await getRequestBody({ - body: input, - type: "other", - }); - expect(result).toBe(input); - }); - - it("should JSON stringify objects", async () => { - const input = { key: "value" }; - const result = await getRequestBody({ - body: input, - type: "json", - }); - expect(result).toBe('{"key":"value"}'); - }); }); diff --git a/tests/unit/fetcher/getResponseBody.test.ts b/tests/unit/fetcher/getResponseBody.test.ts index 151843a..ad6be7f 100644 --- a/tests/unit/fetcher/getResponseBody.test.ts +++ b/tests/unit/fetcher/getResponseBody.test.ts @@ -1,7 +1,61 @@ import { getResponseBody } from "../../../src/core/fetcher/getResponseBody"; + import { RUNTIME } from "../../../src/core/runtime"; describe("Test getResponseBody", () => { + interface SimpleTestCase { + description: string; + responseData: string | Record; + responseType?: "blob" | "sse" | "streaming" | "text"; + expected: any; + skipCondition?: () => boolean; + } + + const simpleTestCases: SimpleTestCase[] = [ + { + description: "should handle text response type", + responseData: "test text", + responseType: "text", + expected: "test text", + }, + { + description: "should handle JSON response", + responseData: { key: "value" }, + expected: { key: "value" }, + }, + { + description: "should handle empty response", + responseData: "", + expected: undefined, + }, + { + description: "should handle non-JSON response", + responseData: "invalid json", + expected: { + ok: false, + error: { + reason: "non-json", + statusCode: 200, + rawBody: "invalid json", + }, + }, + }, + ]; + + simpleTestCases.forEach(({ description, responseData, responseType, expected, skipCondition }) => { + it(description, async () => { + if (skipCondition?.()) { + return; + } + + const mockResponse = new Response( + typeof responseData === "string" ? responseData : JSON.stringify(responseData), + ); + const result = await getResponseBody(mockResponse, responseType); + expect(result).toEqual(expected); + }); + }); + it("should handle blob response type", async () => { const mockBlob = new Blob(["test"], { type: "text/plain" }); const mockResponse = new Response(mockBlob); @@ -20,7 +74,6 @@ describe("Test getResponseBody", () => { }); it("should handle streaming response type", async () => { - // Create a ReadableStream with some test data const encoder = new TextEncoder(); const testData = "test stream data"; const mockStream = new ReadableStream({ @@ -35,43 +88,10 @@ describe("Test getResponseBody", () => { expect(result).toBeInstanceOf(ReadableStream); - // Read and verify the stream content const reader = result.getReader(); const decoder = new TextDecoder(); const { value } = await reader.read(); const streamContent = decoder.decode(value); expect(streamContent).toBe(testData); }); - - it("should handle text response type", async () => { - const mockResponse = new Response("test text"); - const result = await getResponseBody(mockResponse, "text"); - expect(result).toBe("test text"); - }); - - it("should handle JSON response", async () => { - const mockJson = { key: "value" }; - const mockResponse = new Response(JSON.stringify(mockJson)); - const result = await getResponseBody(mockResponse); - expect(result).toEqual(mockJson); - }); - - it("should handle empty response", async () => { - const mockResponse = new Response(""); - const result = await getResponseBody(mockResponse); - expect(result).toBeUndefined(); - }); - - it("should handle non-JSON response", async () => { - const mockResponse = new Response("invalid json"); - const result = await getResponseBody(mockResponse); - expect(result).toEqual({ - ok: false, - error: { - reason: "non-json", - statusCode: 200, - rawBody: "invalid json", - }, - }); - }); }); diff --git a/tests/unit/fetcher/logging.test.ts b/tests/unit/fetcher/logging.test.ts new file mode 100644 index 0000000..366c9b6 --- /dev/null +++ b/tests/unit/fetcher/logging.test.ts @@ -0,0 +1,517 @@ +import { fetcherImpl } from "../../../src/core/fetcher/Fetcher"; + +function createMockLogger() { + return { + debug: vi.fn(), + info: vi.fn(), + warn: vi.fn(), + error: vi.fn(), + }; +} + +function mockSuccessResponse(data: unknown = { data: "test" }, status = 200, statusText = "OK") { + global.fetch = vi.fn().mockResolvedValue( + new Response(JSON.stringify(data), { + status, + statusText, + }), + ); +} + +function mockErrorResponse(data: unknown = { error: "Error" }, status = 404, statusText = "Not Found") { + global.fetch = vi.fn().mockResolvedValue( + new Response(JSON.stringify(data), { + status, + statusText, + }), + ); +} + +describe("Fetcher Logging Integration", () => { + describe("Request Logging", () => { + it("should log successful request at debug level", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "POST", + headers: { "Content-Type": "application/json" }, + body: { test: "data" }, + contentType: "application/json", + requestType: "json", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + method: "POST", + url: "https://example.com/api", + headers: expect.toContainHeaders({ + "Content-Type": "application/json", + }), + hasBody: true, + }), + ); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "HTTP request succeeded", + expect.objectContaining({ + method: "POST", + url: "https://example.com/api", + statusCode: 200, + }), + ); + }); + + it("should not log debug messages at info level for successful requests", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "info", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).not.toHaveBeenCalled(); + expect(mockLogger.info).not.toHaveBeenCalled(); + }); + + it("should log request with body flag", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "POST", + body: { data: "test" }, + contentType: "application/json", + requestType: "json", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + hasBody: true, + }), + ); + }); + + it("should log request without body flag", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + hasBody: false, + }), + ); + }); + + it("should not log when silent mode is enabled", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: true, + }, + }); + + expect(mockLogger.debug).not.toHaveBeenCalled(); + expect(mockLogger.info).not.toHaveBeenCalled(); + expect(mockLogger.warn).not.toHaveBeenCalled(); + expect(mockLogger.error).not.toHaveBeenCalled(); + }); + + it("should not log when no logging config is provided", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + responseType: "json", + maxRetries: 0, + }); + + expect(mockLogger.debug).not.toHaveBeenCalled(); + }); + }); + + describe("Error Logging", () => { + it("should log 4xx errors at error level", async () => { + const mockLogger = createMockLogger(); + mockErrorResponse({ error: "Not found" }, 404, "Not Found"); + + const result = await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "error", + logger: mockLogger, + silent: false, + }, + }); + + expect(result.ok).toBe(false); + expect(mockLogger.error).toHaveBeenCalledWith( + "HTTP request failed with error status", + expect.objectContaining({ + method: "GET", + url: "https://example.com/api", + statusCode: 404, + }), + ); + }); + + it("should log 5xx errors at error level", async () => { + const mockLogger = createMockLogger(); + mockErrorResponse({ error: "Internal error" }, 500, "Internal Server Error"); + + const result = await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "error", + logger: mockLogger, + silent: false, + }, + }); + + expect(result.ok).toBe(false); + expect(mockLogger.error).toHaveBeenCalledWith( + "HTTP request failed with error status", + expect.objectContaining({ + method: "GET", + url: "https://example.com/api", + statusCode: 500, + }), + ); + }); + + it("should log aborted request errors", async () => { + const mockLogger = createMockLogger(); + + const abortController = new AbortController(); + abortController.abort(); + + global.fetch = vi.fn().mockRejectedValue(new Error("Aborted")); + + const result = await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + responseType: "json", + abortSignal: abortController.signal, + maxRetries: 0, + logging: { + level: "error", + logger: mockLogger, + silent: false, + }, + }); + + expect(result.ok).toBe(false); + expect(mockLogger.error).toHaveBeenCalledWith( + "HTTP request was aborted", + expect.objectContaining({ + method: "GET", + url: "https://example.com/api", + }), + ); + }); + + it("should log timeout errors", async () => { + const mockLogger = createMockLogger(); + + const timeoutError = new Error("Request timeout"); + timeoutError.name = "AbortError"; + + global.fetch = vi.fn().mockRejectedValue(timeoutError); + + const result = await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "error", + logger: mockLogger, + silent: false, + }, + }); + + expect(result.ok).toBe(false); + expect(mockLogger.error).toHaveBeenCalledWith( + "HTTP request timed out", + expect.objectContaining({ + method: "GET", + url: "https://example.com/api", + timeoutMs: undefined, + }), + ); + }); + + it("should log unknown errors", async () => { + const mockLogger = createMockLogger(); + + const unknownError = new Error("Unknown error"); + + global.fetch = vi.fn().mockRejectedValue(unknownError); + + const result = await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "error", + logger: mockLogger, + silent: false, + }, + }); + + expect(result.ok).toBe(false); + expect(mockLogger.error).toHaveBeenCalledWith( + "HTTP request failed with error", + expect.objectContaining({ + method: "GET", + url: "https://example.com/api", + errorMessage: "Unknown error", + }), + ); + }); + }); + + describe("Logging with Redaction", () => { + it("should redact sensitive data in error logs", async () => { + const mockLogger = createMockLogger(); + mockErrorResponse({ error: "Unauthorized" }, 401, "Unauthorized"); + + await fetcherImpl({ + url: "https://example.com/api?api_key=secret", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "error", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.error).toHaveBeenCalledWith( + "HTTP request failed with error status", + expect.objectContaining({ + url: "https://example.com/api?api_key=[REDACTED]", + }), + ); + }); + }); + + describe("Different HTTP Methods", () => { + it("should log GET requests", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + method: "GET", + }), + ); + }); + + it("should log POST requests", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse({ data: "test" }, 201, "Created"); + + await fetcherImpl({ + url: "https://example.com/api", + method: "POST", + body: { data: "test" }, + contentType: "application/json", + requestType: "json", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + method: "POST", + }), + ); + }); + + it("should log PUT requests", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "PUT", + body: { data: "test" }, + contentType: "application/json", + requestType: "json", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + method: "PUT", + }), + ); + }); + + it("should log DELETE requests", async () => { + const mockLogger = createMockLogger(); + global.fetch = vi.fn().mockResolvedValue( + new Response(null, { + status: 200, + statusText: "OK", + }), + ); + + await fetcherImpl({ + url: "https://example.com/api", + method: "DELETE", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + method: "DELETE", + }), + ); + }); + }); + + describe("Status Code Logging", () => { + it("should log 2xx success status codes", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse({ data: "test" }, 201, "Created"); + + await fetcherImpl({ + url: "https://example.com/api", + method: "POST", + body: { data: "test" }, + contentType: "application/json", + requestType: "json", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "HTTP request succeeded", + expect.objectContaining({ + statusCode: 201, + }), + ); + }); + + it("should log 3xx redirect status codes as success", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse({ data: "test" }, 301, "Moved Permanently"); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "HTTP request succeeded", + expect.objectContaining({ + statusCode: 301, + }), + ); + }); + }); +}); diff --git a/tests/unit/fetcher/makeRequest.test.ts b/tests/unit/fetcher/makeRequest.test.ts index f6203cd..ea49466 100644 --- a/tests/unit/fetcher/makeRequest.test.ts +++ b/tests/unit/fetcher/makeRequest.test.ts @@ -1,3 +1,4 @@ +import type { Mock } from "vitest"; import { makeRequest } from "../../../src/core/fetcher/makeRequest"; describe("Test makeRequest", () => { @@ -6,7 +7,7 @@ describe("Test makeRequest", () => { const mockHeaders = { "Content-Type": "application/json" }; const mockBody = JSON.stringify({ key: "value" }); - let mockFetch: import("vitest").Mock; + let mockFetch: Mock; beforeEach(() => { mockFetch = vi.fn(); diff --git a/tests/unit/fetcher/redacting.test.ts b/tests/unit/fetcher/redacting.test.ts new file mode 100644 index 0000000..d599376 --- /dev/null +++ b/tests/unit/fetcher/redacting.test.ts @@ -0,0 +1,1115 @@ +import { fetcherImpl } from "../../../src/core/fetcher/Fetcher"; + +function createMockLogger() { + return { + debug: vi.fn(), + info: vi.fn(), + warn: vi.fn(), + error: vi.fn(), + }; +} + +function mockSuccessResponse(data: unknown = { data: "test" }, status = 200, statusText = "OK") { + global.fetch = vi.fn().mockResolvedValue( + new Response(JSON.stringify(data), { + status, + statusText, + }), + ); +} + +describe("Redacting Logic", () => { + describe("Header Redaction", () => { + it("should redact authorization header", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + headers: { Authorization: "Bearer secret-token-12345" }, + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + headers: expect.toContainHeaders({ + Authorization: "[REDACTED]", + }), + }), + ); + }); + + it("should redact api-key header (case-insensitive)", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + headers: { "X-API-KEY": "secret-api-key" }, + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + headers: expect.toContainHeaders({ + "X-API-KEY": "[REDACTED]", + }), + }), + ); + }); + + it("should redact cookie header", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + headers: { Cookie: "session=abc123; token=xyz789" }, + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + headers: expect.toContainHeaders({ + Cookie: "[REDACTED]", + }), + }), + ); + }); + + it("should redact x-auth-token header", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + headers: { "x-auth-token": "auth-token-12345" }, + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + headers: expect.toContainHeaders({ + "x-auth-token": "[REDACTED]", + }), + }), + ); + }); + + it("should redact proxy-authorization header", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + headers: { "Proxy-Authorization": "Basic credentials" }, + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + headers: expect.toContainHeaders({ + "Proxy-Authorization": "[REDACTED]", + }), + }), + ); + }); + + it("should redact x-csrf-token header", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + headers: { "X-CSRF-Token": "csrf-token-abc" }, + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + headers: expect.toContainHeaders({ + "X-CSRF-Token": "[REDACTED]", + }), + }), + ); + }); + + it("should redact www-authenticate header", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + headers: { "WWW-Authenticate": "Bearer realm=example" }, + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + headers: expect.toContainHeaders({ + "WWW-Authenticate": "[REDACTED]", + }), + }), + ); + }); + + it("should redact x-session-token header", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + headers: { "X-Session-Token": "session-token-xyz" }, + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + headers: expect.toContainHeaders({ + "X-Session-Token": "[REDACTED]", + }), + }), + ); + }); + + it("should not redact non-sensitive headers", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + headers: { + "Content-Type": "application/json", + "User-Agent": "Test/1.0", + Accept: "application/json", + }, + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + headers: expect.toContainHeaders({ + "Content-Type": "application/json", + "User-Agent": "Test/1.0", + Accept: "application/json", + }), + }), + ); + }); + + it("should redact multiple sensitive headers at once", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + headers: { + Authorization: "Bearer token", + "X-API-Key": "api-key", + Cookie: "session=123", + "Content-Type": "application/json", + }, + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + headers: expect.toContainHeaders({ + Authorization: "[REDACTED]", + "X-API-Key": "[REDACTED]", + Cookie: "[REDACTED]", + "Content-Type": "application/json", + }), + }), + ); + }); + }); + + describe("Response Header Redaction", () => { + it("should redact Set-Cookie in response headers", async () => { + const mockLogger = createMockLogger(); + + const mockHeaders = new Headers(); + mockHeaders.set("Set-Cookie", "session=abc123; HttpOnly; Secure"); + mockHeaders.set("Content-Type", "application/json"); + + global.fetch = vi.fn().mockResolvedValue( + new Response(JSON.stringify({ data: "test" }), { + status: 200, + statusText: "OK", + headers: mockHeaders, + }), + ); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "HTTP request succeeded", + expect.objectContaining({ + responseHeaders: expect.toContainHeaders({ + "set-cookie": "[REDACTED]", + "content-type": "application/json", + }), + }), + ); + }); + + it("should redact authorization in response headers", async () => { + const mockLogger = createMockLogger(); + + const mockHeaders = new Headers(); + mockHeaders.set("Authorization", "Bearer token-123"); + mockHeaders.set("Content-Type", "application/json"); + + global.fetch = vi.fn().mockResolvedValue( + new Response(JSON.stringify({ data: "test" }), { + status: 200, + statusText: "OK", + headers: mockHeaders, + }), + ); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "HTTP request succeeded", + expect.objectContaining({ + responseHeaders: expect.toContainHeaders({ + authorization: "[REDACTED]", + "content-type": "application/json", + }), + }), + ); + }); + + it("should redact response headers in error responses", async () => { + const mockLogger = createMockLogger(); + + const mockHeaders = new Headers(); + mockHeaders.set("WWW-Authenticate", "Bearer realm=example"); + mockHeaders.set("Content-Type", "application/json"); + + global.fetch = vi.fn().mockResolvedValue( + new Response(JSON.stringify({ error: "Unauthorized" }), { + status: 401, + statusText: "Unauthorized", + headers: mockHeaders, + }), + ); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "error", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.error).toHaveBeenCalledWith( + "HTTP request failed with error status", + expect.objectContaining({ + responseHeaders: expect.toContainHeaders({ + "www-authenticate": "[REDACTED]", + "content-type": "application/json", + }), + }), + ); + }); + }); + + describe("Query Parameter Redaction", () => { + it("should redact api_key query parameter", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + queryParameters: { api_key: "secret-key" }, + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + queryParameters: expect.objectContaining({ + api_key: "[REDACTED]", + }), + }), + ); + }); + + it("should redact token query parameter", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + queryParameters: { token: "secret-token" }, + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + queryParameters: expect.objectContaining({ + token: "[REDACTED]", + }), + }), + ); + }); + + it("should redact access_token query parameter", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + queryParameters: { access_token: "secret-access-token" }, + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + queryParameters: expect.objectContaining({ + access_token: "[REDACTED]", + }), + }), + ); + }); + + it("should redact password query parameter", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + queryParameters: { password: "secret-password" }, + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + queryParameters: expect.objectContaining({ + password: "[REDACTED]", + }), + }), + ); + }); + + it("should redact secret query parameter", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + queryParameters: { secret: "secret-value" }, + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + queryParameters: expect.objectContaining({ + secret: "[REDACTED]", + }), + }), + ); + }); + + it("should redact session_id query parameter", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + queryParameters: { session_id: "session-123" }, + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + queryParameters: expect.objectContaining({ + session_id: "[REDACTED]", + }), + }), + ); + }); + + it("should not redact non-sensitive query parameters", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + queryParameters: { + page: "1", + limit: "10", + sort: "name", + }, + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + queryParameters: expect.objectContaining({ + page: "1", + limit: "10", + sort: "name", + }), + }), + ); + }); + + it("should not redact parameters containing 'auth' substring like 'author'", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + queryParameters: { + author: "john", + authenticate: "false", + authorization_level: "user", + }, + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + queryParameters: expect.objectContaining({ + author: "john", + authenticate: "false", + authorization_level: "user", + }), + }), + ); + }); + + it("should handle undefined query parameters", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + queryParameters: undefined, + }), + ); + }); + + it("should redact case-insensitive query parameters", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + queryParameters: { API_KEY: "secret-key", Token: "secret-token" }, + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + queryParameters: expect.objectContaining({ + API_KEY: "[REDACTED]", + Token: "[REDACTED]", + }), + }), + ); + }); + }); + + describe("URL Redaction", () => { + it("should redact credentials in URL", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://user:password@example.com/api", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + url: "https://[REDACTED]@example.com/api", + }), + ); + }); + + it("should redact api_key in query string", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api?api_key=secret-key&page=1", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + url: "https://example.com/api?api_key=[REDACTED]&page=1", + }), + ); + }); + + it("should redact token in query string", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api?token=secret-token", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + url: "https://example.com/api?token=[REDACTED]", + }), + ); + }); + + it("should redact password in query string", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api?username=user&password=secret", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + url: "https://example.com/api?username=user&password=[REDACTED]", + }), + ); + }); + + it("should not redact non-sensitive query strings", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api?page=1&limit=10&sort=name", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + url: "https://example.com/api?page=1&limit=10&sort=name", + }), + ); + }); + + it("should not redact URL parameters containing 'auth' substring like 'author'", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api?author=john&authenticate=false&page=1", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + url: "https://example.com/api?author=john&authenticate=false&page=1", + }), + ); + }); + + it("should handle URL with fragment", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api?token=secret#section", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + url: "https://example.com/api?token=[REDACTED]#section", + }), + ); + }); + + it("should redact URL-encoded query parameters", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api?api%5Fkey=secret", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + url: "https://example.com/api?api%5Fkey=[REDACTED]", + }), + ); + }); + + it("should handle URL without query string", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + url: "https://example.com/api", + }), + ); + }); + + it("should handle empty query string", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api?", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + url: "https://example.com/api?", + }), + ); + }); + + it("should redact multiple sensitive parameters in URL", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api?api_key=secret1&token=secret2&page=1", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + url: "https://example.com/api?api_key=[REDACTED]&token=[REDACTED]&page=1", + }), + ); + }); + + it("should redact both credentials and query parameters", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://user:pass@example.com/api?token=secret", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + url: "https://[REDACTED]@example.com/api?token=[REDACTED]", + }), + ); + }); + + it("should use fast path for URLs without sensitive keywords", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api?page=1&limit=10&sort=name&filter=value", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + url: "https://example.com/api?page=1&limit=10&sort=name&filter=value", + }), + ); + }); + + it("should handle query parameter without value", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api?flag&token=secret", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + url: "https://example.com/api?flag&token=[REDACTED]", + }), + ); + }); + + it("should handle URL with multiple @ symbols in credentials", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://user@example.com:pass@host.com/api", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + url: "https://[REDACTED]@host.com/api", + }), + ); + }); + + it("should handle URL with @ in query parameter but not in credentials", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api?email=user@example.com", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + url: "https://example.com/api?email=user@example.com", + }), + ); + }); + + it("should handle URL with both credentials and @ in path", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://user:pass@example.com/users/@username", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + url: "https://[REDACTED]@example.com/users/@username", + }), + ); + }); + }); +}); diff --git a/tests/unit/fetcher/requestWithRetries.test.ts b/tests/unit/fetcher/requestWithRetries.test.ts index 7d46082..d226613 100644 --- a/tests/unit/fetcher/requestWithRetries.test.ts +++ b/tests/unit/fetcher/requestWithRetries.test.ts @@ -1,15 +1,15 @@ +import type { Mock, MockInstance } from "vitest"; import { requestWithRetries } from "../../../src/core/fetcher/requestWithRetries"; describe("requestWithRetries", () => { - let mockFetch: import("vitest").Mock; + let mockFetch: Mock; let originalMathRandom: typeof Math.random; - let setTimeoutSpy: import("vitest").MockInstance; + let setTimeoutSpy: MockInstance; beforeEach(() => { mockFetch = vi.fn(); originalMathRandom = Math.random; - // Mock Math.random for consistent jitter Math.random = vi.fn(() => 0.5); vi.useFakeTimers({ @@ -99,6 +99,67 @@ describe("requestWithRetries", () => { } }); + interface RetryHeaderTestCase { + description: string; + headerName: string; + headerValue: string | (() => string); + expectedDelayMin: number; + expectedDelayMax: number; + } + + const retryHeaderTests: RetryHeaderTestCase[] = [ + { + description: "should respect retry-after header with seconds value", + headerName: "retry-after", + headerValue: "5", + expectedDelayMin: 4000, + expectedDelayMax: 6000, + }, + { + description: "should respect retry-after header with HTTP date value", + headerName: "retry-after", + headerValue: () => new Date(Date.now() + 3000).toUTCString(), + expectedDelayMin: 2000, + expectedDelayMax: 4000, + }, + { + description: "should respect x-ratelimit-reset header", + headerName: "x-ratelimit-reset", + headerValue: () => Math.floor((Date.now() + 4000) / 1000).toString(), + expectedDelayMin: 3000, + expectedDelayMax: 6000, + }, + ]; + + retryHeaderTests.forEach(({ description, headerName, headerValue, expectedDelayMin, expectedDelayMax }) => { + it(description, async () => { + setTimeoutSpy = vi.spyOn(global, "setTimeout").mockImplementation((callback: (args: void) => void) => { + process.nextTick(callback); + return null as any; + }); + + const value = typeof headerValue === "function" ? headerValue() : headerValue; + mockFetch + .mockResolvedValueOnce( + new Response("", { + status: 429, + headers: new Headers({ [headerName]: value }), + }), + ) + .mockResolvedValueOnce(new Response("", { status: 200 })); + + const responsePromise = requestWithRetries(() => mockFetch(), 1); + await vi.runAllTimersAsync(); + const response = await responsePromise; + + expect(setTimeoutSpy).toHaveBeenCalledWith(expect.any(Function), expect.any(Number)); + const actualDelay = setTimeoutSpy.mock.calls[0][1]; + expect(actualDelay).toBeGreaterThan(expectedDelayMin); + expect(actualDelay).toBeLessThan(expectedDelayMax); + expect(response.status).toBe(200); + }); + }); + it("should apply correct exponential backoff with jitter", async () => { setTimeoutSpy = vi.spyOn(global, "setTimeout").mockImplementation((callback: (args: void) => void) => { process.nextTick(callback); @@ -113,7 +174,6 @@ describe("requestWithRetries", () => { await vi.runAllTimersAsync(); await responsePromise; - // Verify setTimeout calls expect(setTimeoutSpy).toHaveBeenCalledTimes(expectedDelays.length); expectedDelays.forEach((delay, index) => { @@ -145,85 +205,6 @@ describe("requestWithRetries", () => { expect(response2.status).toBe(200); }); - it("should respect retry-after header with seconds value", async () => { - setTimeoutSpy = vi.spyOn(global, "setTimeout").mockImplementation((callback: (args: void) => void) => { - process.nextTick(callback); - return null as any; - }); - - mockFetch - .mockResolvedValueOnce( - new Response("", { - status: 429, - headers: new Headers({ "retry-after": "5" }), - }), - ) - .mockResolvedValueOnce(new Response("", { status: 200 })); - - const responsePromise = requestWithRetries(() => mockFetch(), 1); - await vi.runAllTimersAsync(); - const response = await responsePromise; - - expect(setTimeoutSpy).toHaveBeenCalledWith(expect.any(Function), 5000); // 5 seconds = 5000ms - expect(response.status).toBe(200); - }); - - it("should respect retry-after header with HTTP date value", async () => { - setTimeoutSpy = vi.spyOn(global, "setTimeout").mockImplementation((callback: (args: void) => void) => { - process.nextTick(callback); - return null as any; - }); - - const futureDate = new Date(Date.now() + 3000); // 3 seconds from now - mockFetch - .mockResolvedValueOnce( - new Response("", { - status: 429, - headers: new Headers({ "retry-after": futureDate.toUTCString() }), - }), - ) - .mockResolvedValueOnce(new Response("", { status: 200 })); - - const responsePromise = requestWithRetries(() => mockFetch(), 1); - await vi.runAllTimersAsync(); - const response = await responsePromise; - - // Should use the date-based delay (approximately 3000ms, but with jitter) - expect(setTimeoutSpy).toHaveBeenCalledWith(expect.any(Function), expect.any(Number)); - const actualDelay = setTimeoutSpy.mock.calls[0][1]; - expect(actualDelay).toBeGreaterThan(2000); - expect(actualDelay).toBeLessThan(4000); - expect(response.status).toBe(200); - }); - - it("should respect x-ratelimit-reset header", async () => { - setTimeoutSpy = vi.spyOn(global, "setTimeout").mockImplementation((callback: (args: void) => void) => { - process.nextTick(callback); - return null as any; - }); - - const resetTime = Math.floor((Date.now() + 4000) / 1000); // 4 seconds from now in Unix timestamp - mockFetch - .mockResolvedValueOnce( - new Response("", { - status: 429, - headers: new Headers({ "x-ratelimit-reset": resetTime.toString() }), - }), - ) - .mockResolvedValueOnce(new Response("", { status: 200 })); - - const responsePromise = requestWithRetries(() => mockFetch(), 1); - await vi.runAllTimersAsync(); - const response = await responsePromise; - - // Should use the x-ratelimit-reset delay (approximately 4000ms, but with positive jitter) - expect(setTimeoutSpy).toHaveBeenCalledWith(expect.any(Function), expect.any(Number)); - const actualDelay = setTimeoutSpy.mock.calls[0][1]; - expect(actualDelay).toBeGreaterThan(3000); - expect(actualDelay).toBeLessThan(6000); - expect(response.status).toBe(200); - }); - it("should cap delay at MAX_RETRY_DELAY for large header values", async () => { setTimeoutSpy = vi.spyOn(global, "setTimeout").mockImplementation((callback: (args: void) => void) => { process.nextTick(callback); @@ -243,8 +224,7 @@ describe("requestWithRetries", () => { await vi.runAllTimersAsync(); const response = await responsePromise; - // Should be capped at MAX_RETRY_DELAY (60000ms) with jitter applied - expect(setTimeoutSpy).toHaveBeenCalledWith(expect.any(Function), 60000); // Exactly MAX_RETRY_DELAY since jitter with 0.5 random keeps it at 60000 + expect(setTimeoutSpy).toHaveBeenCalledWith(expect.any(Function), 60000); expect(response.status).toBe(200); }); }); diff --git a/tests/unit/logging/logger.test.ts b/tests/unit/logging/logger.test.ts new file mode 100644 index 0000000..2e0b5fe --- /dev/null +++ b/tests/unit/logging/logger.test.ts @@ -0,0 +1,454 @@ +import { ConsoleLogger, createLogger, Logger, LogLevel } from "../../../src/core/logging/logger"; + +function createMockLogger() { + return { + debug: vi.fn(), + info: vi.fn(), + warn: vi.fn(), + error: vi.fn(), + }; +} + +describe("Logger", () => { + describe("LogLevel", () => { + it("should have correct log levels", () => { + expect(LogLevel.Debug).toBe("debug"); + expect(LogLevel.Info).toBe("info"); + expect(LogLevel.Warn).toBe("warn"); + expect(LogLevel.Error).toBe("error"); + }); + }); + + describe("ConsoleLogger", () => { + let consoleLogger: ConsoleLogger; + let consoleSpy: { + debug: ReturnType; + info: ReturnType; + warn: ReturnType; + error: ReturnType; + }; + + beforeEach(() => { + consoleLogger = new ConsoleLogger(); + consoleSpy = { + debug: vi.spyOn(console, "debug").mockImplementation(() => {}), + info: vi.spyOn(console, "info").mockImplementation(() => {}), + warn: vi.spyOn(console, "warn").mockImplementation(() => {}), + error: vi.spyOn(console, "error").mockImplementation(() => {}), + }; + }); + + afterEach(() => { + consoleSpy.debug.mockRestore(); + consoleSpy.info.mockRestore(); + consoleSpy.warn.mockRestore(); + consoleSpy.error.mockRestore(); + }); + + it("should log debug messages", () => { + consoleLogger.debug("debug message", { data: "test" }); + expect(consoleSpy.debug).toHaveBeenCalledWith("debug message", { data: "test" }); + }); + + it("should log info messages", () => { + consoleLogger.info("info message", { data: "test" }); + expect(consoleSpy.info).toHaveBeenCalledWith("info message", { data: "test" }); + }); + + it("should log warn messages", () => { + consoleLogger.warn("warn message", { data: "test" }); + expect(consoleSpy.warn).toHaveBeenCalledWith("warn message", { data: "test" }); + }); + + it("should log error messages", () => { + consoleLogger.error("error message", { data: "test" }); + expect(consoleSpy.error).toHaveBeenCalledWith("error message", { data: "test" }); + }); + + it("should handle multiple arguments", () => { + consoleLogger.debug("message", "arg1", "arg2", { key: "value" }); + expect(consoleSpy.debug).toHaveBeenCalledWith("message", "arg1", "arg2", { key: "value" }); + }); + }); + + describe("Logger with level filtering", () => { + let mockLogger: { + debug: ReturnType; + info: ReturnType; + warn: ReturnType; + error: ReturnType; + }; + + beforeEach(() => { + mockLogger = createMockLogger(); + }); + + describe("Debug level", () => { + it("should log all levels when set to debug", () => { + const logger = new Logger({ + level: LogLevel.Debug, + logger: mockLogger, + silent: false, + }); + + logger.debug("debug"); + logger.info("info"); + logger.warn("warn"); + logger.error("error"); + + expect(mockLogger.debug).toHaveBeenCalledWith("debug"); + expect(mockLogger.info).toHaveBeenCalledWith("info"); + expect(mockLogger.warn).toHaveBeenCalledWith("warn"); + expect(mockLogger.error).toHaveBeenCalledWith("error"); + }); + + it("should report correct level checks", () => { + const logger = new Logger({ + level: LogLevel.Debug, + logger: mockLogger, + silent: false, + }); + + expect(logger.isDebug()).toBe(true); + expect(logger.isInfo()).toBe(true); + expect(logger.isWarn()).toBe(true); + expect(logger.isError()).toBe(true); + }); + }); + + describe("Info level", () => { + it("should log info, warn, and error when set to info", () => { + const logger = new Logger({ + level: LogLevel.Info, + logger: mockLogger, + silent: false, + }); + + logger.debug("debug"); + logger.info("info"); + logger.warn("warn"); + logger.error("error"); + + expect(mockLogger.debug).not.toHaveBeenCalled(); + expect(mockLogger.info).toHaveBeenCalledWith("info"); + expect(mockLogger.warn).toHaveBeenCalledWith("warn"); + expect(mockLogger.error).toHaveBeenCalledWith("error"); + }); + + it("should report correct level checks", () => { + const logger = new Logger({ + level: LogLevel.Info, + logger: mockLogger, + silent: false, + }); + + expect(logger.isDebug()).toBe(false); + expect(logger.isInfo()).toBe(true); + expect(logger.isWarn()).toBe(true); + expect(logger.isError()).toBe(true); + }); + }); + + describe("Warn level", () => { + it("should log warn and error when set to warn", () => { + const logger = new Logger({ + level: LogLevel.Warn, + logger: mockLogger, + silent: false, + }); + + logger.debug("debug"); + logger.info("info"); + logger.warn("warn"); + logger.error("error"); + + expect(mockLogger.debug).not.toHaveBeenCalled(); + expect(mockLogger.info).not.toHaveBeenCalled(); + expect(mockLogger.warn).toHaveBeenCalledWith("warn"); + expect(mockLogger.error).toHaveBeenCalledWith("error"); + }); + + it("should report correct level checks", () => { + const logger = new Logger({ + level: LogLevel.Warn, + logger: mockLogger, + silent: false, + }); + + expect(logger.isDebug()).toBe(false); + expect(logger.isInfo()).toBe(false); + expect(logger.isWarn()).toBe(true); + expect(logger.isError()).toBe(true); + }); + }); + + describe("Error level", () => { + it("should only log error when set to error", () => { + const logger = new Logger({ + level: LogLevel.Error, + logger: mockLogger, + silent: false, + }); + + logger.debug("debug"); + logger.info("info"); + logger.warn("warn"); + logger.error("error"); + + expect(mockLogger.debug).not.toHaveBeenCalled(); + expect(mockLogger.info).not.toHaveBeenCalled(); + expect(mockLogger.warn).not.toHaveBeenCalled(); + expect(mockLogger.error).toHaveBeenCalledWith("error"); + }); + + it("should report correct level checks", () => { + const logger = new Logger({ + level: LogLevel.Error, + logger: mockLogger, + silent: false, + }); + + expect(logger.isDebug()).toBe(false); + expect(logger.isInfo()).toBe(false); + expect(logger.isWarn()).toBe(false); + expect(logger.isError()).toBe(true); + }); + }); + + describe("Silent mode", () => { + it("should not log anything when silent is true", () => { + const logger = new Logger({ + level: LogLevel.Debug, + logger: mockLogger, + silent: true, + }); + + logger.debug("debug"); + logger.info("info"); + logger.warn("warn"); + logger.error("error"); + + expect(mockLogger.debug).not.toHaveBeenCalled(); + expect(mockLogger.info).not.toHaveBeenCalled(); + expect(mockLogger.warn).not.toHaveBeenCalled(); + expect(mockLogger.error).not.toHaveBeenCalled(); + }); + + it("should report all level checks as false when silent", () => { + const logger = new Logger({ + level: LogLevel.Debug, + logger: mockLogger, + silent: true, + }); + + expect(logger.isDebug()).toBe(false); + expect(logger.isInfo()).toBe(false); + expect(logger.isWarn()).toBe(false); + expect(logger.isError()).toBe(false); + }); + }); + + describe("shouldLog", () => { + it("should correctly determine if level should be logged", () => { + const logger = new Logger({ + level: LogLevel.Info, + logger: mockLogger, + silent: false, + }); + + expect(logger.shouldLog(LogLevel.Debug)).toBe(false); + expect(logger.shouldLog(LogLevel.Info)).toBe(true); + expect(logger.shouldLog(LogLevel.Warn)).toBe(true); + expect(logger.shouldLog(LogLevel.Error)).toBe(true); + }); + + it("should return false for all levels when silent", () => { + const logger = new Logger({ + level: LogLevel.Debug, + logger: mockLogger, + silent: true, + }); + + expect(logger.shouldLog(LogLevel.Debug)).toBe(false); + expect(logger.shouldLog(LogLevel.Info)).toBe(false); + expect(logger.shouldLog(LogLevel.Warn)).toBe(false); + expect(logger.shouldLog(LogLevel.Error)).toBe(false); + }); + }); + + describe("Multiple arguments", () => { + it("should pass multiple arguments to logger", () => { + const logger = new Logger({ + level: LogLevel.Debug, + logger: mockLogger, + silent: false, + }); + + logger.debug("message", "arg1", { key: "value" }, 123); + expect(mockLogger.debug).toHaveBeenCalledWith("message", "arg1", { key: "value" }, 123); + }); + }); + }); + + describe("createLogger", () => { + it("should return default logger when no config provided", () => { + const logger = createLogger(); + expect(logger).toBeInstanceOf(Logger); + }); + + it("should return same logger instance when Logger is passed", () => { + const customLogger = new Logger({ + level: LogLevel.Debug, + logger: new ConsoleLogger(), + silent: false, + }); + + const result = createLogger(customLogger); + expect(result).toBe(customLogger); + }); + + it("should create logger with custom config", () => { + const mockLogger = createMockLogger(); + + const logger = createLogger({ + level: LogLevel.Warn, + logger: mockLogger, + silent: false, + }); + + expect(logger).toBeInstanceOf(Logger); + logger.warn("test"); + expect(mockLogger.warn).toHaveBeenCalledWith("test"); + }); + + it("should use default values for missing config", () => { + const logger = createLogger({}); + expect(logger).toBeInstanceOf(Logger); + }); + + it("should override default level", () => { + const mockLogger = createMockLogger(); + + const logger = createLogger({ + level: LogLevel.Debug, + logger: mockLogger, + silent: false, + }); + + logger.debug("test"); + expect(mockLogger.debug).toHaveBeenCalledWith("test"); + }); + + it("should override default silent mode", () => { + const mockLogger = createMockLogger(); + + const logger = createLogger({ + logger: mockLogger, + silent: false, + }); + + logger.info("test"); + expect(mockLogger.info).toHaveBeenCalledWith("test"); + }); + + it("should use provided logger implementation", () => { + const customLogger = createMockLogger(); + + const logger = createLogger({ + logger: customLogger, + level: LogLevel.Debug, + silent: false, + }); + + logger.debug("test"); + expect(customLogger.debug).toHaveBeenCalledWith("test"); + }); + + it("should default to silent: true", () => { + const mockLogger = createMockLogger(); + + const logger = createLogger({ + logger: mockLogger, + level: LogLevel.Debug, + }); + + logger.debug("test"); + expect(mockLogger.debug).not.toHaveBeenCalled(); + }); + }); + + describe("Default logger", () => { + it("should have silent: true by default", () => { + const logger = createLogger(); + expect(logger.shouldLog(LogLevel.Info)).toBe(false); + }); + + it("should not log when using default logger", () => { + const logger = createLogger(); + + logger.info("test"); + expect(logger.isInfo()).toBe(false); + }); + }); + + describe("Edge cases", () => { + it("should handle empty message", () => { + const mockLogger = createMockLogger(); + + const logger = new Logger({ + level: LogLevel.Debug, + logger: mockLogger, + silent: false, + }); + + logger.debug(""); + expect(mockLogger.debug).toHaveBeenCalledWith(""); + }); + + it("should handle no arguments", () => { + const mockLogger = createMockLogger(); + + const logger = new Logger({ + level: LogLevel.Debug, + logger: mockLogger, + silent: false, + }); + + logger.debug("message"); + expect(mockLogger.debug).toHaveBeenCalledWith("message"); + }); + + it("should handle complex objects", () => { + const mockLogger = createMockLogger(); + + const logger = new Logger({ + level: LogLevel.Debug, + logger: mockLogger, + silent: false, + }); + + const complexObject = { + nested: { key: "value" }, + array: [1, 2, 3], + fn: () => "test", + }; + + logger.debug("message", complexObject); + expect(mockLogger.debug).toHaveBeenCalledWith("message", complexObject); + }); + + it("should handle errors as arguments", () => { + const mockLogger = createMockLogger(); + + const logger = new Logger({ + level: LogLevel.Error, + logger: mockLogger, + silent: false, + }); + + const error = new Error("Test error"); + logger.error("Error occurred", error); + expect(mockLogger.error).toHaveBeenCalledWith("Error occurred", error); + }); + }); +}); diff --git a/tests/unit/stream/Stream.test.ts b/tests/unit/stream/Stream.test.ts new file mode 100644 index 0000000..f82b90a --- /dev/null +++ b/tests/unit/stream/Stream.test.ts @@ -0,0 +1,348 @@ +import { Stream } from "../../../src/core/stream/Stream"; + +describe("Stream", () => { + describe("JSON streaming", () => { + it("should parse single JSON message", async () => { + const mockStream = createReadableStream(['{"value": 1}\n']); + const stream = new Stream({ + stream: mockStream, + parse: async (val: unknown) => val as { value: number }, + eventShape: { type: "json", messageTerminator: "\n" }, + }); + + const messages: unknown[] = []; + for await (const message of stream) { + messages.push(message); + } + + expect(messages).toEqual([{ value: 1 }]); + }); + + it("should parse multiple JSON messages", async () => { + const mockStream = createReadableStream(['{"value": 1}\n{"value": 2}\n{"value": 3}\n']); + const stream = new Stream({ + stream: mockStream, + parse: async (val: unknown) => val as { value: number }, + eventShape: { type: "json", messageTerminator: "\n" }, + }); + + const messages: unknown[] = []; + for await (const message of stream) { + messages.push(message); + } + + expect(messages).toEqual([{ value: 1 }, { value: 2 }, { value: 3 }]); + }); + + it("should handle messages split across chunks", async () => { + const mockStream = createReadableStream(['{"val', 'ue": 1}\n{"value":', " 2}\n"]); + const stream = new Stream({ + stream: mockStream, + parse: async (val: unknown) => val as { value: number }, + eventShape: { type: "json", messageTerminator: "\n" }, + }); + + const messages: unknown[] = []; + for await (const message of stream) { + messages.push(message); + } + + expect(messages).toEqual([{ value: 1 }, { value: 2 }]); + }); + + it("should skip empty lines", async () => { + const mockStream = createReadableStream(['{"value": 1}\n\n\n{"value": 2}\n']); + const stream = new Stream({ + stream: mockStream, + parse: async (val: unknown) => val as { value: number }, + eventShape: { type: "json", messageTerminator: "\n" }, + }); + + const messages: unknown[] = []; + for await (const message of stream) { + messages.push(message); + } + + expect(messages).toEqual([{ value: 1 }, { value: 2 }]); + }); + + it("should handle custom message terminator", async () => { + const mockStream = createReadableStream(['{"value": 1}|||{"value": 2}|||']); + const stream = new Stream({ + stream: mockStream, + parse: async (val: unknown) => val as { value: number }, + eventShape: { type: "json", messageTerminator: "|||" }, + }); + + const messages: unknown[] = []; + for await (const message of stream) { + messages.push(message); + } + + expect(messages).toEqual([{ value: 1 }, { value: 2 }]); + }); + }); + + describe("SSE streaming", () => { + it("should parse SSE data with prefix", async () => { + const mockStream = createReadableStream(['data: {"value": 1}\n']); + const stream = new Stream({ + stream: mockStream, + parse: async (val: unknown) => val as { value: number }, + eventShape: { type: "sse" }, + }); + + const messages: unknown[] = []; + for await (const message of stream) { + messages.push(message); + } + + expect(messages).toEqual([{ value: 1 }]); + }); + + it("should parse multiple SSE events", async () => { + const mockStream = createReadableStream(['data: {"value": 1}\ndata: {"value": 2}\ndata: {"value": 3}\n']); + const stream = new Stream({ + stream: mockStream, + parse: async (val: unknown) => val as { value: number }, + eventShape: { type: "sse" }, + }); + + const messages: unknown[] = []; + for await (const message of stream) { + messages.push(message); + } + + expect(messages).toEqual([{ value: 1 }, { value: 2 }, { value: 3 }]); + }); + + it("should stop at stream terminator", async () => { + const mockStream = createReadableStream(['data: {"value": 1}\ndata: [DONE]\ndata: {"value": 2}\n']); + const stream = new Stream({ + stream: mockStream, + parse: async (val: unknown) => val as { value: number }, + eventShape: { type: "sse", streamTerminator: "[DONE]" }, + }); + + const messages: unknown[] = []; + for await (const message of stream) { + messages.push(message); + } + + expect(messages).toEqual([{ value: 1 }]); + }); + + it("should skip lines without data prefix", async () => { + const mockStream = createReadableStream([ + 'event: message\ndata: {"value": 1}\nid: 123\ndata: {"value": 2}\n', + ]); + const stream = new Stream({ + stream: mockStream, + parse: async (val: unknown) => val as { value: number }, + eventShape: { type: "sse" }, + }); + + const messages: unknown[] = []; + for await (const message of stream) { + messages.push(message); + } + + expect(messages).toEqual([{ value: 1 }, { value: 2 }]); + }); + }); + + describe("encoding and decoding", () => { + it("should decode UTF-8 text using TextDecoder", async () => { + const encoder = new TextEncoder(); + const mockStream = createReadableStream([encoder.encode('{"text": "café"}\n')]); + const stream = new Stream({ + stream: mockStream, + parse: async (val: unknown) => val as { text: string }, + eventShape: { type: "json", messageTerminator: "\n" }, + }); + + const messages: unknown[] = []; + for await (const message of stream) { + messages.push(message); + } + + expect(messages).toEqual([{ text: "café" }]); + }); + + it("should decode emoji correctly", async () => { + const encoder = new TextEncoder(); + const mockStream = createReadableStream([encoder.encode('{"emoji": "🎉"}\n')]); + const stream = new Stream({ + stream: mockStream, + parse: async (val: unknown) => val as { emoji: string }, + eventShape: { type: "json", messageTerminator: "\n" }, + }); + + const messages: unknown[] = []; + for await (const message of stream) { + messages.push(message); + } + + expect(messages).toEqual([{ emoji: "🎉" }]); + }); + + it("should handle binary data chunks", async () => { + const encoder = new TextEncoder(); + const mockStream = createReadableStream([encoder.encode('{"val'), encoder.encode('ue": 1}\n')]); + const stream = new Stream({ + stream: mockStream, + parse: async (val: unknown) => val as { value: number }, + eventShape: { type: "json", messageTerminator: "\n" }, + }); + + const messages: unknown[] = []; + for await (const message of stream) { + messages.push(message); + } + + expect(messages).toEqual([{ value: 1 }]); + }); + + it("should handle multi-byte UTF-8 characters split across chunk boundaries", async () => { + // Test string with Japanese (3 bytes), Russian (2 bytes), German (2 bytes), and Chinese (3 bytes) + const testString = '{"text": "こんにちは Привет Größe 你好"}\n'; + const fullBytes = new TextEncoder().encode(testString); + + // Split the bytes in the middle of multi-byte characters + // Japanese "こ" starts at byte 11, is 3 bytes (E3 81 93) + // Split after first byte of "こ" to test mid-character splitting + const splitPoint = 12; // This splits "こ" in the middle + const chunk1 = fullBytes.slice(0, splitPoint); + const chunk2 = fullBytes.slice(splitPoint); + + const mockStream = createReadableStream([chunk1, chunk2]); + const stream = new Stream({ + stream: mockStream, + parse: async (val: unknown) => val as { text: string }, + eventShape: { type: "json", messageTerminator: "\n" }, + }); + + const messages: unknown[] = []; + for await (const message of stream) { + messages.push(message); + } + + expect(messages).toEqual([{ text: "こんにちは Привет Größe 你好" }]); + }); + }); + + describe("abort signal", () => { + it("should handle abort signal", async () => { + const controller = new AbortController(); + const mockStream = createReadableStream(['{"value": 1}\n{"value": 2}\n{"value": 3}\n']); + const stream = new Stream({ + stream: mockStream, + parse: async (val: unknown) => val as { value: number }, + eventShape: { type: "json", messageTerminator: "\n" }, + signal: controller.signal, + }); + + const messages: unknown[] = []; + let count = 0; + for await (const message of stream) { + messages.push(message); + count++; + if (count === 2) { + controller.abort(); + break; + } + } + + expect(messages.length).toBe(2); + }); + }); + + describe("async iteration", () => { + it("should support async iterator protocol", async () => { + const mockStream = createReadableStream(['{"value": 1}\n{"value": 2}\n']); + const stream = new Stream({ + stream: mockStream, + parse: async (val: unknown) => val as { value: number }, + eventShape: { type: "json", messageTerminator: "\n" }, + }); + + const iterator = stream[Symbol.asyncIterator](); + const first = await iterator.next(); + expect(first.done).toBe(false); + expect(first.value).toEqual({ value: 1 }); + + const second = await iterator.next(); + expect(second.done).toBe(false); + expect(second.value).toEqual({ value: 2 }); + + const third = await iterator.next(); + expect(third.done).toBe(true); + }); + }); + + describe("edge cases", () => { + it("should handle empty stream", async () => { + const mockStream = createReadableStream([]); + const stream = new Stream({ + stream: mockStream, + parse: async (val: unknown) => val as { value: number }, + eventShape: { type: "json", messageTerminator: "\n" }, + }); + + const messages: unknown[] = []; + for await (const message of stream) { + messages.push(message); + } + + expect(messages).toEqual([]); + }); + + it("should handle stream with only whitespace", async () => { + const mockStream = createReadableStream([" \n\n\t\n "]); + const stream = new Stream({ + stream: mockStream, + parse: async (val: unknown) => val as { value: number }, + eventShape: { type: "json", messageTerminator: "\n" }, + }); + + const messages: unknown[] = []; + for await (const message of stream) { + messages.push(message); + } + + expect(messages).toEqual([]); + }); + + it("should handle incomplete message at end of stream", async () => { + const mockStream = createReadableStream(['{"value": 1}\n{"incomplete']); + const stream = new Stream({ + stream: mockStream, + parse: async (val: unknown) => val as { value: number }, + eventShape: { type: "json", messageTerminator: "\n" }, + }); + + const messages: unknown[] = []; + for await (const message of stream) { + messages.push(message); + } + + expect(messages).toEqual([{ value: 1 }]); + }); + }); +}); + +// Helper function to create a ReadableStream from string chunks +function createReadableStream(chunks: (string | Uint8Array)[]): ReadableStream { + // For standard type, return ReadableStream + let index = 0; + return new ReadableStream({ + pull(controller) { + if (index < chunks.length) { + const chunk = chunks[index++]; + controller.enqueue(typeof chunk === "string" ? new TextEncoder().encode(chunk) : chunk); + } else { + controller.close(); + } + }, + }); +} diff --git a/tests/unit/url/join.test.ts b/tests/unit/url/join.test.ts index 1956a8c..123488f 100644 --- a/tests/unit/url/join.test.ts +++ b/tests/unit/url/join.test.ts @@ -1,88 +1,223 @@ import { join } from "../../../src/core/url/index"; describe("join", () => { - describe("basic functionality", () => { - it("should return empty string for empty base", () => { - expect(join("")).toBe(""); - expect(join("", "path")).toBe(""); - }); + interface TestCase { + description: string; + base: string; + segments: string[]; + expected: string; + } - it("should handle single segment", () => { - expect(join("base", "segment")).toBe("base/segment"); - expect(join("base/", "segment")).toBe("base/segment"); - expect(join("base", "/segment")).toBe("base/segment"); - expect(join("base/", "/segment")).toBe("base/segment"); - }); + describe("basic functionality", () => { + const basicTests: TestCase[] = [ + { description: "should return empty string for empty base", base: "", segments: [], expected: "" }, + { + description: "should return empty string for empty base with path", + base: "", + segments: ["path"], + expected: "", + }, + { + description: "should handle single segment", + base: "base", + segments: ["segment"], + expected: "base/segment", + }, + { + description: "should handle single segment with trailing slash on base", + base: "base/", + segments: ["segment"], + expected: "base/segment", + }, + { + description: "should handle single segment with leading slash", + base: "base", + segments: ["/segment"], + expected: "base/segment", + }, + { + description: "should handle single segment with both slashes", + base: "base/", + segments: ["/segment"], + expected: "base/segment", + }, + { + description: "should handle multiple segments", + base: "base", + segments: ["path1", "path2", "path3"], + expected: "base/path1/path2/path3", + }, + { + description: "should handle multiple segments with slashes", + base: "base/", + segments: ["/path1/", "/path2/", "/path3/"], + expected: "base/path1/path2/path3/", + }, + ]; - it("should handle multiple segments", () => { - expect(join("base", "path1", "path2", "path3")).toBe("base/path1/path2/path3"); - expect(join("base/", "/path1/", "/path2/", "/path3/")).toBe("base/path1/path2/path3/"); + basicTests.forEach(({ description, base, segments, expected }) => { + it(description, () => { + expect(join(base, ...segments)).toBe(expected); + }); }); }); describe("URL handling", () => { - it("should handle absolute URLs", () => { - expect(join("https://example.com", "api", "v1")).toBe("https://example.com/api/v1"); - expect(join("https://example.com/", "/api/", "/v1/")).toBe("https://example.com/api/v1/"); - expect(join("https://example.com/base", "api", "v1")).toBe("https://example.com/base/api/v1"); - }); + const urlTests: TestCase[] = [ + { + description: "should handle absolute URLs", + base: "https://example.com", + segments: ["api", "v1"], + expected: "https://example.com/api/v1", + }, + { + description: "should handle absolute URLs with slashes", + base: "https://example.com/", + segments: ["/api/", "/v1/"], + expected: "https://example.com/api/v1/", + }, + { + description: "should handle absolute URLs with base path", + base: "https://example.com/base", + segments: ["api", "v1"], + expected: "https://example.com/base/api/v1", + }, + { + description: "should preserve URL query parameters", + base: "https://example.com?query=1", + segments: ["api"], + expected: "https://example.com/api?query=1", + }, + { + description: "should preserve URL fragments", + base: "https://example.com#fragment", + segments: ["api"], + expected: "https://example.com/api#fragment", + }, + { + description: "should preserve URL query and fragments", + base: "https://example.com?query=1#fragment", + segments: ["api"], + expected: "https://example.com/api?query=1#fragment", + }, + { + description: "should handle http protocol", + base: "http://example.com", + segments: ["api"], + expected: "http://example.com/api", + }, + { + description: "should handle ftp protocol", + base: "ftp://example.com", + segments: ["files"], + expected: "ftp://example.com/files", + }, + { + description: "should handle ws protocol", + base: "ws://example.com", + segments: ["socket"], + expected: "ws://example.com/socket", + }, + { + description: "should fallback to path joining for malformed URLs", + base: "not-a-url://", + segments: ["path"], + expected: "not-a-url:///path", + }, + ]; - it("should preserve URL query parameters and fragments", () => { - expect(join("https://example.com?query=1", "api")).toBe("https://example.com/api?query=1"); - expect(join("https://example.com#fragment", "api")).toBe("https://example.com/api#fragment"); - expect(join("https://example.com?query=1#fragment", "api")).toBe( - "https://example.com/api?query=1#fragment", - ); - }); - - it("should handle different protocols", () => { - expect(join("http://example.com", "api")).toBe("http://example.com/api"); - expect(join("ftp://example.com", "files")).toBe("ftp://example.com/files"); - expect(join("ws://example.com", "socket")).toBe("ws://example.com/socket"); - }); - - it("should fallback to path joining for malformed URLs", () => { - expect(join("not-a-url://", "path")).toBe("not-a-url:///path"); + urlTests.forEach(({ description, base, segments, expected }) => { + it(description, () => { + expect(join(base, ...segments)).toBe(expected); + }); }); }); describe("edge cases", () => { - it("should handle empty segments", () => { - expect(join("base", "", "path")).toBe("base/path"); - expect(join("base", null as any, "path")).toBe("base/path"); - expect(join("base", undefined as any, "path")).toBe("base/path"); - }); - - it("should handle segments with only slashes", () => { - expect(join("base", "/", "path")).toBe("base/path"); - expect(join("base", "//", "path")).toBe("base/path"); - }); - - it("should handle base paths with trailing slashes", () => { - expect(join("base/", "path")).toBe("base/path"); - }); + const edgeCaseTests: TestCase[] = [ + { + description: "should handle empty segments", + base: "base", + segments: ["", "path"], + expected: "base/path", + }, + { + description: "should handle null segments", + base: "base", + segments: [null as any, "path"], + expected: "base/path", + }, + { + description: "should handle undefined segments", + base: "base", + segments: [undefined as any, "path"], + expected: "base/path", + }, + { + description: "should handle segments with only single slash", + base: "base", + segments: ["/", "path"], + expected: "base/path", + }, + { + description: "should handle segments with only double slash", + base: "base", + segments: ["//", "path"], + expected: "base/path", + }, + { + description: "should handle base paths with trailing slashes", + base: "base/", + segments: ["path"], + expected: "base/path", + }, + { + description: "should handle complex nested paths", + base: "api/v1/", + segments: ["/users/", "/123/", "/profile"], + expected: "api/v1/users/123/profile", + }, + ]; - it("should handle complex nested paths", () => { - expect(join("api/v1/", "/users/", "/123/", "/profile")).toBe("api/v1/users/123/profile"); + edgeCaseTests.forEach(({ description, base, segments, expected }) => { + it(description, () => { + expect(join(base, ...segments)).toBe(expected); + }); }); }); describe("real-world scenarios", () => { - it("should handle API endpoint construction", () => { - const baseUrl = "https://api.example.com/v1"; - expect(join(baseUrl, "users", "123", "posts")).toBe("https://api.example.com/v1/users/123/posts"); - }); - - it("should handle file path construction", () => { - expect(join("/var/www", "html", "assets", "images")).toBe("/var/www/html/assets/images"); - }); + const realWorldTests: TestCase[] = [ + { + description: "should handle API endpoint construction", + base: "https://api.example.com/v1", + segments: ["users", "123", "posts"], + expected: "https://api.example.com/v1/users/123/posts", + }, + { + description: "should handle file path construction", + base: "/var/www", + segments: ["html", "assets", "images"], + expected: "/var/www/html/assets/images", + }, + { + description: "should handle relative path construction", + base: "../parent", + segments: ["child", "grandchild"], + expected: "../parent/child/grandchild", + }, + { + description: "should handle Windows-style paths", + base: "C:\\Users", + segments: ["Documents", "file.txt"], + expected: "C:\\Users/Documents/file.txt", + }, + ]; - it("should handle relative path construction", () => { - expect(join("../parent", "child", "grandchild")).toBe("../parent/child/grandchild"); - }); - - it("should handle Windows-style paths", () => { - expect(join("C:\\Users", "Documents", "file.txt")).toBe("C:\\Users/Documents/file.txt"); + realWorldTests.forEach(({ description, base, segments, expected }) => { + it(description, () => { + expect(join(base, ...segments)).toBe(expected); + }); }); }); @@ -100,21 +235,50 @@ describe("join", () => { }); describe("trailing slash preservation", () => { - it("should preserve trailing slash on final result when base has trailing slash and no segments", () => { - expect(join("https://api.example.com/")).toBe("https://api.example.com/"); - expect(join("https://api.example.com/v1/")).toBe("https://api.example.com/v1/"); - }); - - it("should preserve trailing slash when last segment has trailing slash", () => { - expect(join("https://api.example.com", "users/")).toBe("https://api.example.com/users/"); - expect(join("api/v1", "users/")).toBe("api/v1/users/"); - }); + const trailingSlashTests: TestCase[] = [ + { + description: + "should preserve trailing slash on final result when base has trailing slash and no segments", + base: "https://api.example.com/", + segments: [], + expected: "https://api.example.com/", + }, + { + description: "should preserve trailing slash on v1 path", + base: "https://api.example.com/v1/", + segments: [], + expected: "https://api.example.com/v1/", + }, + { + description: "should preserve trailing slash when last segment has trailing slash", + base: "https://api.example.com", + segments: ["users/"], + expected: "https://api.example.com/users/", + }, + { + description: "should preserve trailing slash with relative path", + base: "api/v1", + segments: ["users/"], + expected: "api/v1/users/", + }, + { + description: "should preserve trailing slash with multiple segments", + base: "https://api.example.com", + segments: ["v1", "collections/"], + expected: "https://api.example.com/v1/collections/", + }, + { + description: "should preserve trailing slash with base path", + base: "base", + segments: ["path1", "path2/"], + expected: "base/path1/path2/", + }, + ]; - it("should preserve trailing slash with multiple segments where last has trailing slash", () => { - expect(join("https://api.example.com", "v1", "collections/")).toBe( - "https://api.example.com/v1/collections/", - ); - expect(join("base", "path1", "path2/")).toBe("base/path1/path2/"); + trailingSlashTests.forEach(({ description, base, segments, expected }) => { + it(description, () => { + expect(join(base, ...segments)).toBe(expected); + }); }); }); }); diff --git a/tests/unit/url/qs.test.ts b/tests/unit/url/qs.test.ts index 80e7e04..42cdffb 100644 --- a/tests/unit/url/qs.test.ts +++ b/tests/unit/url/qs.test.ts @@ -1,187 +1,278 @@ import { toQueryString } from "../../../src/core/url/index"; describe("Test qs toQueryString", () => { - describe("Basic functionality", () => { - it("should return empty string for null/undefined", () => { - expect(toQueryString(null)).toBe(""); - expect(toQueryString(undefined)).toBe(""); - }); + interface BasicTestCase { + description: string; + input: any; + expected: string; + } - it("should return empty string for primitive values", () => { - expect(toQueryString("hello")).toBe(""); - expect(toQueryString(42)).toBe(""); - expect(toQueryString(true)).toBe(""); - expect(toQueryString(false)).toBe(""); - }); - - it("should handle empty objects", () => { - expect(toQueryString({})).toBe(""); - }); + describe("Basic functionality", () => { + const basicTests: BasicTestCase[] = [ + { description: "should return empty string for null", input: null, expected: "" }, + { description: "should return empty string for undefined", input: undefined, expected: "" }, + { description: "should return empty string for string primitive", input: "hello", expected: "" }, + { description: "should return empty string for number primitive", input: 42, expected: "" }, + { description: "should return empty string for true boolean", input: true, expected: "" }, + { description: "should return empty string for false boolean", input: false, expected: "" }, + { description: "should handle empty objects", input: {}, expected: "" }, + { + description: "should handle simple key-value pairs", + input: { name: "John", age: 30 }, + expected: "name=John&age=30", + }, + ]; - it("should handle simple key-value pairs", () => { - const obj = { name: "John", age: 30 }; - expect(toQueryString(obj)).toBe("name=John&age=30"); + basicTests.forEach(({ description, input, expected }) => { + it(description, () => { + expect(toQueryString(input)).toBe(expected); + }); }); }); describe("Array handling", () => { - it("should handle arrays with indices format (default)", () => { - const obj = { items: ["a", "b", "c"] }; - expect(toQueryString(obj)).toBe("items%5B0%5D=a&items%5B1%5D=b&items%5B2%5D=c"); - }); - - it("should handle arrays with repeat format", () => { - const obj = { items: ["a", "b", "c"] }; - expect(toQueryString(obj, { arrayFormat: "repeat" })).toBe("items=a&items=b&items=c"); - }); + interface ArrayTestCase { + description: string; + input: any; + options?: { arrayFormat?: "repeat" | "indices" }; + expected: string; + } - it("should handle empty arrays", () => { - const obj = { items: [] }; - expect(toQueryString(obj)).toBe(""); - }); - - it("should handle arrays with mixed types", () => { - const obj = { mixed: ["string", 42, true, false] }; - expect(toQueryString(obj)).toBe("mixed%5B0%5D=string&mixed%5B1%5D=42&mixed%5B2%5D=true&mixed%5B3%5D=false"); - }); - - it("should handle arrays with objects", () => { - const obj = { users: [{ name: "John" }, { name: "Jane" }] }; - expect(toQueryString(obj)).toBe("users%5B0%5D%5Bname%5D=John&users%5B1%5D%5Bname%5D=Jane"); - }); + const arrayTests: ArrayTestCase[] = [ + { + description: "should handle arrays with indices format (default)", + input: { items: ["a", "b", "c"] }, + expected: "items%5B0%5D=a&items%5B1%5D=b&items%5B2%5D=c", + }, + { + description: "should handle arrays with repeat format", + input: { items: ["a", "b", "c"] }, + options: { arrayFormat: "repeat" }, + expected: "items=a&items=b&items=c", + }, + { + description: "should handle empty arrays", + input: { items: [] }, + expected: "", + }, + { + description: "should handle arrays with mixed types", + input: { mixed: ["string", 42, true, false] }, + expected: "mixed%5B0%5D=string&mixed%5B1%5D=42&mixed%5B2%5D=true&mixed%5B3%5D=false", + }, + { + description: "should handle arrays with objects", + input: { users: [{ name: "John" }, { name: "Jane" }] }, + expected: "users%5B0%5D%5Bname%5D=John&users%5B1%5D%5Bname%5D=Jane", + }, + { + description: "should handle arrays with objects in repeat format", + input: { users: [{ name: "John" }, { name: "Jane" }] }, + options: { arrayFormat: "repeat" }, + expected: "users%5Bname%5D=John&users%5Bname%5D=Jane", + }, + ]; - it("should handle arrays with objects in repeat format", () => { - const obj = { users: [{ name: "John" }, { name: "Jane" }] }; - expect(toQueryString(obj, { arrayFormat: "repeat" })).toBe("users%5Bname%5D=John&users%5Bname%5D=Jane"); + arrayTests.forEach(({ description, input, options, expected }) => { + it(description, () => { + expect(toQueryString(input, options)).toBe(expected); + }); }); }); describe("Nested objects", () => { - it("should handle nested objects", () => { - const obj = { user: { name: "John", age: 30 } }; - expect(toQueryString(obj)).toBe("user%5Bname%5D=John&user%5Bage%5D=30"); - }); - - it("should handle deeply nested objects", () => { - const obj = { user: { profile: { name: "John", settings: { theme: "dark" } } } }; - expect(toQueryString(obj)).toBe( - "user%5Bprofile%5D%5Bname%5D=John&user%5Bprofile%5D%5Bsettings%5D%5Btheme%5D=dark", - ); - }); + const nestedTests: BasicTestCase[] = [ + { + description: "should handle nested objects", + input: { user: { name: "John", age: 30 } }, + expected: "user%5Bname%5D=John&user%5Bage%5D=30", + }, + { + description: "should handle deeply nested objects", + input: { user: { profile: { name: "John", settings: { theme: "dark" } } } }, + expected: "user%5Bprofile%5D%5Bname%5D=John&user%5Bprofile%5D%5Bsettings%5D%5Btheme%5D=dark", + }, + { + description: "should handle empty nested objects", + input: { user: {} }, + expected: "", + }, + ]; - it("should handle empty nested objects", () => { - const obj = { user: {} }; - expect(toQueryString(obj)).toBe(""); + nestedTests.forEach(({ description, input, expected }) => { + it(description, () => { + expect(toQueryString(input)).toBe(expected); + }); }); }); describe("Encoding", () => { - it("should encode by default", () => { - const obj = { name: "John Doe", email: "john@example.com" }; - expect(toQueryString(obj)).toBe("name=John%20Doe&email=john%40example.com"); - }); + interface EncodingTestCase { + description: string; + input: any; + options?: { encode?: boolean }; + expected: string; + } - it("should not encode when encode is false", () => { - const obj = { name: "John Doe", email: "john@example.com" }; - expect(toQueryString(obj, { encode: false })).toBe("name=John Doe&email=john@example.com"); - }); - - it("should encode special characters in keys", () => { - const obj = { "user name": "John", "email[primary]": "john@example.com" }; - expect(toQueryString(obj)).toBe("user%20name=John&email%5Bprimary%5D=john%40example.com"); - }); + const encodingTests: EncodingTestCase[] = [ + { + description: "should encode by default", + input: { name: "John Doe", email: "john@example.com" }, + expected: "name=John%20Doe&email=john%40example.com", + }, + { + description: "should not encode when encode is false", + input: { name: "John Doe", email: "john@example.com" }, + options: { encode: false }, + expected: "name=John Doe&email=john@example.com", + }, + { + description: "should encode special characters in keys", + input: { "user name": "John", "email[primary]": "john@example.com" }, + expected: "user%20name=John&email%5Bprimary%5D=john%40example.com", + }, + { + description: "should not encode special characters in keys when encode is false", + input: { "user name": "John", "email[primary]": "john@example.com" }, + options: { encode: false }, + expected: "user name=John&email[primary]=john@example.com", + }, + ]; - it("should not encode special characters in keys when encode is false", () => { - const obj = { "user name": "John", "email[primary]": "john@example.com" }; - expect(toQueryString(obj, { encode: false })).toBe("user name=John&email[primary]=john@example.com"); + encodingTests.forEach(({ description, input, options, expected }) => { + it(description, () => { + expect(toQueryString(input, options)).toBe(expected); + }); }); }); describe("Mixed scenarios", () => { - it("should handle complex nested structures", () => { - const obj = { - filters: { - status: ["active", "pending"], - category: { - type: "electronics", - subcategories: ["phones", "laptops"], + interface MixedTestCase { + description: string; + input: any; + options?: { arrayFormat?: "repeat" | "indices" }; + expected: string; + } + + const mixedTests: MixedTestCase[] = [ + { + description: "should handle complex nested structures", + input: { + filters: { + status: ["active", "pending"], + category: { + type: "electronics", + subcategories: ["phones", "laptops"], + }, }, + sort: { field: "name", direction: "asc" }, }, - sort: { field: "name", direction: "asc" }, - }; - expect(toQueryString(obj)).toBe( - "filters%5Bstatus%5D%5B0%5D=active&filters%5Bstatus%5D%5B1%5D=pending&filters%5Bcategory%5D%5Btype%5D=electronics&filters%5Bcategory%5D%5Bsubcategories%5D%5B0%5D=phones&filters%5Bcategory%5D%5Bsubcategories%5D%5B1%5D=laptops&sort%5Bfield%5D=name&sort%5Bdirection%5D=asc", - ); - }); - - it("should handle complex nested structures with repeat format", () => { - const obj = { - filters: { - status: ["active", "pending"], - category: { - type: "electronics", - subcategories: ["phones", "laptops"], + expected: + "filters%5Bstatus%5D%5B0%5D=active&filters%5Bstatus%5D%5B1%5D=pending&filters%5Bcategory%5D%5Btype%5D=electronics&filters%5Bcategory%5D%5Bsubcategories%5D%5B0%5D=phones&filters%5Bcategory%5D%5Bsubcategories%5D%5B1%5D=laptops&sort%5Bfield%5D=name&sort%5Bdirection%5D=asc", + }, + { + description: "should handle complex nested structures with repeat format", + input: { + filters: { + status: ["active", "pending"], + category: { + type: "electronics", + subcategories: ["phones", "laptops"], + }, }, + sort: { field: "name", direction: "asc" }, }, - sort: { field: "name", direction: "asc" }, - }; - expect(toQueryString(obj, { arrayFormat: "repeat" })).toBe( - "filters%5Bstatus%5D=active&filters%5Bstatus%5D=pending&filters%5Bcategory%5D%5Btype%5D=electronics&filters%5Bcategory%5D%5Bsubcategories%5D=phones&filters%5Bcategory%5D%5Bsubcategories%5D=laptops&sort%5Bfield%5D=name&sort%5Bdirection%5D=asc", - ); - }); - - it("should handle arrays with null/undefined values", () => { - const obj = { items: ["a", null, "c", undefined, "e"] }; - expect(toQueryString(obj)).toBe("items%5B0%5D=a&items%5B1%5D=&items%5B2%5D=c&items%5B4%5D=e"); - }); + options: { arrayFormat: "repeat" }, + expected: + "filters%5Bstatus%5D=active&filters%5Bstatus%5D=pending&filters%5Bcategory%5D%5Btype%5D=electronics&filters%5Bcategory%5D%5Bsubcategories%5D=phones&filters%5Bcategory%5D%5Bsubcategories%5D=laptops&sort%5Bfield%5D=name&sort%5Bdirection%5D=asc", + }, + { + description: "should handle arrays with null/undefined values", + input: { items: ["a", null, "c", undefined, "e"] }, + expected: "items%5B0%5D=a&items%5B1%5D=&items%5B2%5D=c&items%5B4%5D=e", + }, + { + description: "should handle objects with null/undefined values", + input: { name: "John", age: null, email: undefined, active: true }, + expected: "name=John&age=&active=true", + }, + ]; - it("should handle objects with null/undefined values", () => { - const obj = { name: "John", age: null, email: undefined, active: true }; - expect(toQueryString(obj)).toBe("name=John&age=&active=true"); + mixedTests.forEach(({ description, input, options, expected }) => { + it(description, () => { + expect(toQueryString(input, options)).toBe(expected); + }); }); }); describe("Edge cases", () => { - it("should handle numeric keys", () => { - const obj = { "0": "zero", "1": "one" }; - expect(toQueryString(obj)).toBe("0=zero&1=one"); - }); - - it("should handle boolean values in objects", () => { - const obj = { enabled: true, disabled: false }; - expect(toQueryString(obj)).toBe("enabled=true&disabled=false"); - }); - - it("should handle empty strings", () => { - const obj = { name: "", description: "test" }; - expect(toQueryString(obj)).toBe("name=&description=test"); - }); + const edgeCaseTests: BasicTestCase[] = [ + { + description: "should handle numeric keys", + input: { "0": "zero", "1": "one" }, + expected: "0=zero&1=one", + }, + { + description: "should handle boolean values in objects", + input: { enabled: true, disabled: false }, + expected: "enabled=true&disabled=false", + }, + { + description: "should handle empty strings", + input: { name: "", description: "test" }, + expected: "name=&description=test", + }, + { + description: "should handle zero values", + input: { count: 0, price: 0.0 }, + expected: "count=0&price=0", + }, + { + description: "should handle arrays with empty strings", + input: { items: ["a", "", "c"] }, + expected: "items%5B0%5D=a&items%5B1%5D=&items%5B2%5D=c", + }, + ]; - it("should handle zero values", () => { - const obj = { count: 0, price: 0.0 }; - expect(toQueryString(obj)).toBe("count=0&price=0"); - }); - - it("should handle arrays with empty strings", () => { - const obj = { items: ["a", "", "c"] }; - expect(toQueryString(obj)).toBe("items%5B0%5D=a&items%5B1%5D=&items%5B2%5D=c"); + edgeCaseTests.forEach(({ description, input, expected }) => { + it(description, () => { + expect(toQueryString(input)).toBe(expected); + }); }); }); describe("Options combinations", () => { - it("should respect both arrayFormat and encode options", () => { - const obj = { items: ["a & b", "c & d"] }; - expect(toQueryString(obj, { arrayFormat: "repeat", encode: false })).toBe("items=a & b&items=c & d"); - }); + interface OptionsTestCase { + description: string; + input: any; + options?: { arrayFormat?: "repeat" | "indices"; encode?: boolean }; + expected: string; + } - it("should use default options when none provided", () => { - const obj = { items: ["a", "b"] }; - expect(toQueryString(obj)).toBe("items%5B0%5D=a&items%5B1%5D=b"); - }); + const optionsTests: OptionsTestCase[] = [ + { + description: "should respect both arrayFormat and encode options", + input: { items: ["a & b", "c & d"] }, + options: { arrayFormat: "repeat", encode: false }, + expected: "items=a & b&items=c & d", + }, + { + description: "should use default options when none provided", + input: { items: ["a", "b"] }, + expected: "items%5B0%5D=a&items%5B1%5D=b", + }, + { + description: "should merge provided options with defaults", + input: { items: ["a", "b"], name: "John Doe" }, + options: { encode: false }, + expected: "items[0]=a&items[1]=b&name=John Doe", + }, + ]; - it("should merge provided options with defaults", () => { - const obj = { items: ["a", "b"], name: "John Doe" }; - expect(toQueryString(obj, { encode: false })).toBe("items[0]=a&items[1]=b&name=John Doe"); + optionsTests.forEach(({ description, input, options, expected }) => { + it(description, () => { + expect(toQueryString(input, options)).toBe(expected); + }); }); }); }); diff --git a/tests/wire/entities.test.ts b/tests/wire/entities.test.ts index 069e151..9a62d85 100644 --- a/tests/wire/entities.test.ts +++ b/tests/wire/entities.test.ts @@ -4,7 +4,7 @@ import * as Lattice from "../../src/api/index"; import { LatticeClient } from "../../src/Client"; import { mockServerPool } from "../mock-server/MockServerPool"; -describe("Entities", () => { +describe("EntitiesClient", () => { test("publishEntity (1)", async () => { const server = mockServerPool.createServer(); const client = new LatticeClient({ token: "test", environment: server.baseUrl }); @@ -552,7 +552,9 @@ describe("Entities", () => { .jsonBody(rawResponseBody) .build(); - const response = await client.entities.getEntity("entityId"); + const response = await client.entities.getEntity({ + entityId: "entityId", + }); expect(response).toEqual({ entityId: "entityId", description: "description", @@ -818,7 +820,9 @@ describe("Entities", () => { .build(); await expect(async () => { - return await client.entities.getEntity("entityId"); + return await client.entities.getEntity({ + entityId: "entityId", + }); }).rejects.toThrow(Lattice.BadRequestError); }); @@ -836,7 +840,9 @@ describe("Entities", () => { .build(); await expect(async () => { - return await client.entities.getEntity("entityId"); + return await client.entities.getEntity({ + entityId: "entityId", + }); }).rejects.toThrow(Lattice.UnauthorizedError); }); @@ -854,7 +860,9 @@ describe("Entities", () => { .build(); await expect(async () => { - return await client.entities.getEntity("entityId"); + return await client.entities.getEntity({ + entityId: "entityId", + }); }).rejects.toThrow(Lattice.NotFoundError); }); @@ -987,7 +995,10 @@ describe("Entities", () => { .jsonBody(rawResponseBody) .build(); - const response = await client.entities.overrideEntity("entityId", "mil_view.disposition"); + const response = await client.entities.overrideEntity({ + entityId: "entityId", + fieldPath: "mil_view.disposition", + }); expect(response).toEqual({ entityId: "entityId", description: "description", @@ -1254,7 +1265,10 @@ describe("Entities", () => { .build(); await expect(async () => { - return await client.entities.overrideEntity("entityId", "fieldPath"); + return await client.entities.overrideEntity({ + entityId: "entityId", + fieldPath: "fieldPath", + }); }).rejects.toThrow(Lattice.BadRequestError); }); @@ -1273,7 +1287,10 @@ describe("Entities", () => { .build(); await expect(async () => { - return await client.entities.overrideEntity("entityId", "fieldPath"); + return await client.entities.overrideEntity({ + entityId: "entityId", + fieldPath: "fieldPath", + }); }).rejects.toThrow(Lattice.UnauthorizedError); }); @@ -1292,7 +1309,10 @@ describe("Entities", () => { .build(); await expect(async () => { - return await client.entities.overrideEntity("entityId", "fieldPath"); + return await client.entities.overrideEntity({ + entityId: "entityId", + fieldPath: "fieldPath", + }); }).rejects.toThrow(Lattice.NotFoundError); }); @@ -1424,7 +1444,10 @@ describe("Entities", () => { .jsonBody(rawResponseBody) .build(); - const response = await client.entities.removeEntityOverride("entityId", "mil_view.disposition"); + const response = await client.entities.removeEntityOverride({ + entityId: "entityId", + fieldPath: "mil_view.disposition", + }); expect(response).toEqual({ entityId: "entityId", description: "description", @@ -1690,7 +1713,10 @@ describe("Entities", () => { .build(); await expect(async () => { - return await client.entities.removeEntityOverride("entityId", "fieldPath"); + return await client.entities.removeEntityOverride({ + entityId: "entityId", + fieldPath: "fieldPath", + }); }).rejects.toThrow(Lattice.BadRequestError); }); @@ -1708,7 +1734,10 @@ describe("Entities", () => { .build(); await expect(async () => { - return await client.entities.removeEntityOverride("entityId", "fieldPath"); + return await client.entities.removeEntityOverride({ + entityId: "entityId", + fieldPath: "fieldPath", + }); }).rejects.toThrow(Lattice.UnauthorizedError); }); @@ -1726,7 +1755,10 @@ describe("Entities", () => { .build(); await expect(async () => { - return await client.entities.removeEntityOverride("entityId", "fieldPath"); + return await client.entities.removeEntityOverride({ + entityId: "entityId", + fieldPath: "fieldPath", + }); }).rejects.toThrow(Lattice.NotFoundError); }); diff --git a/tests/wire/objects.test.ts b/tests/wire/objects.test.ts index bb1e519..7e4b181 100644 --- a/tests/wire/objects.test.ts +++ b/tests/wire/objects.test.ts @@ -4,7 +4,7 @@ import * as Lattice from "../../src/api/index"; import { LatticeClient } from "../../src/Client"; import { mockServerPool } from "../mock-server/MockServerPool"; -describe("Objects", () => { +describe("ObjectsClient", () => { test("listObjects (1)", async () => { const server = mockServerPool.createServer(); const client = new LatticeClient({ token: "test", environment: server.baseUrl }); @@ -36,12 +36,7 @@ describe("Objects", () => { ], next_page_token: "next_page_token", }; - const page = await client.objects.listObjects({ - prefix: "prefix", - sinceTimestamp: "2024-01-15T09:30:00Z", - pageToken: "pageToken", - allObjectsInMesh: true, - }); + const page = await client.objects.listObjects(); expect(expected.path_metadatas).toEqual(page.data); expect(page.hasNextPage()).toBe(true); @@ -91,7 +86,9 @@ describe("Objects", () => { server.mockEndpoint().delete("/api/v1/objects/objectPath").respondWith().statusCode(200).build(); - const response = await client.objects.deleteObject("objectPath"); + const response = await client.objects.deleteObject({ + objectPath: "objectPath", + }); expect(response).toEqual(undefined); }); @@ -109,7 +106,9 @@ describe("Objects", () => { .build(); await expect(async () => { - return await client.objects.deleteObject("objectPath"); + return await client.objects.deleteObject({ + objectPath: "objectPath", + }); }).rejects.toThrow(Lattice.BadRequestError); }); @@ -127,7 +126,9 @@ describe("Objects", () => { .build(); await expect(async () => { - return await client.objects.deleteObject("objectPath"); + return await client.objects.deleteObject({ + objectPath: "objectPath", + }); }).rejects.toThrow(Lattice.UnauthorizedError); }); @@ -145,7 +146,9 @@ describe("Objects", () => { .build(); await expect(async () => { - return await client.objects.deleteObject("objectPath"); + return await client.objects.deleteObject({ + objectPath: "objectPath", + }); }).rejects.toThrow(Lattice.NotFoundError); }); @@ -163,7 +166,9 @@ describe("Objects", () => { .build(); await expect(async () => { - return await client.objects.deleteObject("objectPath"); + return await client.objects.deleteObject({ + objectPath: "objectPath", + }); }).rejects.toThrow(Lattice.InternalServerError); }); @@ -173,7 +178,9 @@ describe("Objects", () => { server.mockEndpoint().head("/api/v1/objects/objectPath").respondWith().statusCode(200).build(); - const headers = await client.objects.getObjectMetadata("objectPath"); + const headers = await client.objects.getObjectMetadata({ + objectPath: "objectPath", + }); expect(headers).toBeInstanceOf(Headers); }); @@ -191,7 +198,9 @@ describe("Objects", () => { .build(); await expect(async () => { - return await client.objects.getObjectMetadata("objectPath"); + return await client.objects.getObjectMetadata({ + objectPath: "objectPath", + }); }).rejects.toThrow(Lattice.BadRequestError); }); @@ -209,7 +218,9 @@ describe("Objects", () => { .build(); await expect(async () => { - return await client.objects.getObjectMetadata("objectPath"); + return await client.objects.getObjectMetadata({ + objectPath: "objectPath", + }); }).rejects.toThrow(Lattice.UnauthorizedError); }); @@ -227,7 +238,9 @@ describe("Objects", () => { .build(); await expect(async () => { - return await client.objects.getObjectMetadata("objectPath"); + return await client.objects.getObjectMetadata({ + objectPath: "objectPath", + }); }).rejects.toThrow(Lattice.InternalServerError); }); }); diff --git a/tests/wire/tasks.test.ts b/tests/wire/tasks.test.ts index 29d22e5..09fdb7a 100644 --- a/tests/wire/tasks.test.ts +++ b/tests/wire/tasks.test.ts @@ -4,7 +4,7 @@ import * as Lattice from "../../src/api/index"; import { LatticeClient } from "../../src/Client"; import { mockServerPool } from "../mock-server/MockServerPool"; -describe("Tasks", () => { +describe("TasksClient", () => { test("createTask (1)", async () => { const server = mockServerPool.createServer(); const client = new LatticeClient({ token: "test", environment: server.baseUrl }); @@ -215,7 +215,9 @@ describe("Tasks", () => { .jsonBody(rawResponseBody) .build(); - const response = await client.tasks.getTask("taskId"); + const response = await client.tasks.getTask({ + taskId: "taskId", + }); expect(response).toEqual({ version: { taskId: "taskId", @@ -310,7 +312,9 @@ describe("Tasks", () => { .build(); await expect(async () => { - return await client.tasks.getTask("taskId"); + return await client.tasks.getTask({ + taskId: "taskId", + }); }).rejects.toThrow(Lattice.BadRequestError); }); @@ -328,7 +332,9 @@ describe("Tasks", () => { .build(); await expect(async () => { - return await client.tasks.getTask("taskId"); + return await client.tasks.getTask({ + taskId: "taskId", + }); }).rejects.toThrow(Lattice.UnauthorizedError); }); @@ -346,7 +352,9 @@ describe("Tasks", () => { .build(); await expect(async () => { - return await client.tasks.getTask("taskId"); + return await client.tasks.getTask({ + taskId: "taskId", + }); }).rejects.toThrow(Lattice.NotFoundError); }); @@ -396,7 +404,9 @@ describe("Tasks", () => { .jsonBody(rawResponseBody) .build(); - const response = await client.tasks.updateTaskStatus("taskId"); + const response = await client.tasks.updateTaskStatus({ + taskId: "taskId", + }); expect(response).toEqual({ version: { taskId: "taskId", @@ -492,7 +502,9 @@ describe("Tasks", () => { .build(); await expect(async () => { - return await client.tasks.updateTaskStatus("taskId"); + return await client.tasks.updateTaskStatus({ + taskId: "taskId", + }); }).rejects.toThrow(Lattice.BadRequestError); }); @@ -511,7 +523,9 @@ describe("Tasks", () => { .build(); await expect(async () => { - return await client.tasks.updateTaskStatus("taskId"); + return await client.tasks.updateTaskStatus({ + taskId: "taskId", + }); }).rejects.toThrow(Lattice.UnauthorizedError); }); @@ -530,7 +544,9 @@ describe("Tasks", () => { .build(); await expect(async () => { - return await client.tasks.updateTaskStatus("taskId"); + return await client.tasks.updateTaskStatus({ + taskId: "taskId", + }); }).rejects.toThrow(Lattice.NotFoundError); }); diff --git a/vitest.config.ts b/vitest.config.mts similarity index 85% rename from vitest.config.ts rename to vitest.config.mts index 677c585..ba2ec4f 100644 --- a/vitest.config.ts +++ b/vitest.config.mts @@ -10,6 +10,7 @@ export default defineConfig({ root: "./tests", include: ["**/*.test.{js,ts,jsx,tsx}"], exclude: ["wire/**"], + setupFiles: ["./setup.ts"], }, }, { @@ -18,7 +19,7 @@ export default defineConfig({ name: "wire", environment: "node", root: "./tests/wire", - setupFiles: ["../mock-server/setup.ts"], + setupFiles: ["../setup.ts", "../mock-server/setup.ts"], }, }, ],