diff --git a/.fern/metadata.json b/.fern/metadata.json
new file mode 100644
index 0000000..f28f365
--- /dev/null
+++ b/.fern/metadata.json
@@ -0,0 +1,8 @@
+{
+ "cliVersion": "0.0.0",
+ "generatorName": "fernapi/fern-typescript-sdk",
+ "generatorVersion": "99.99.99",
+ "generatorConfig": {
+ "namespaceExport": "Lattice"
+ }
+}
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 21fe64b..2db6106 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -11,7 +11,7 @@ jobs:
uses: actions/checkout@v4
- name: Set up node
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
- name: Install pnpm
uses: pnpm/action-setup@v4
@@ -30,7 +30,7 @@ jobs:
uses: actions/checkout@v4
- name: Set up node
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
- name: Install pnpm
uses: pnpm/action-setup@v4
@@ -50,7 +50,7 @@ jobs:
uses: actions/checkout@v4
- name: Set up node
- uses: actions/setup-node@v3
+ uses: actions/setup-node@v4
- name: Install pnpm
uses: pnpm/action-setup@v4
@@ -64,12 +64,15 @@ jobs:
- name: Publish to npm
run: |
npm config set //registry.npmjs.org/:_authToken ${NPM_TOKEN}
+ publish() { # use latest npm to ensure OIDC support
+ npx -y npm@latest publish "$@"
+ }
if [[ ${GITHUB_REF} == *alpha* ]]; then
- npm publish --access public --tag alpha
+ publish --access public --tag alpha
elif [[ ${GITHUB_REF} == *beta* ]]; then
- npm publish --access public --tag beta
+ publish --access public --tag beta
else
- npm publish --access public
+ publish --access public
fi
env:
NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
\ No newline at end of file
diff --git a/.npmignore b/.npmignore
index b7e5ad3..c0c40ac 100644
--- a/.npmignore
+++ b/.npmignore
@@ -4,6 +4,7 @@ tests
.gitignore
.github
.fernignore
+.prettierrc.yml
biome.json
tsconfig.json
yarn.lock
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
new file mode 100644
index 0000000..fe5bc2f
--- /dev/null
+++ b/CONTRIBUTING.md
@@ -0,0 +1,133 @@
+# Contributing
+
+Thanks for your interest in contributing to this SDK! This document provides guidelines for contributing to the project.
+
+## Getting Started
+
+### Prerequisites
+
+- Node.js 20 or higher
+- pnpm package manager
+
+### Installation
+
+Install the project dependencies:
+
+```bash
+pnpm install
+```
+
+### Building
+
+Build the project:
+
+```bash
+pnpm build
+```
+
+### Testing
+
+Run the test suite:
+
+```bash
+pnpm test
+```
+
+Run specific test types:
+- `pnpm test:unit` - Run unit tests
+- `pnpm test:wire` - Run wire/integration tests
+
+### Linting and Formatting
+
+Check code style:
+
+```bash
+pnpm run lint
+pnpm run format:check
+```
+
+Fix code style issues:
+
+```bash
+pnpm run lint:fix
+pnpm run format:fix
+```
+
+Or use the combined check command:
+
+```bash
+pnpm run check:fix
+```
+
+## About Generated Code
+
+**Important**: Most files in this SDK are automatically generated by [Fern](https://buildwithfern.com) from the API definition. Direct modifications to generated files will be overwritten the next time the SDK is generated.
+
+### Generated Files
+
+The following directories contain generated code:
+- `src/api/` - API client classes and types
+- `src/serialization/` - Serialization/deserialization logic
+- Most TypeScript files in `src/`
+
+### How to Customize
+
+If you need to customize the SDK, you have two options:
+
+#### Option 1: Use `.fernignore`
+
+For custom code that should persist across SDK regenerations:
+
+1. Create a `.fernignore` file in the project root
+2. Add file patterns for files you want to preserve (similar to `.gitignore` syntax)
+3. Add your custom code to those files
+
+Files listed in `.fernignore` will not be overwritten when the SDK is regenerated.
+
+For more information, see the [Fern documentation on custom code](https://buildwithfern.com/learn/sdks/overview/custom-code).
+
+#### Option 2: Contribute to the Generator
+
+If you want to change how code is generated for all users of this SDK:
+
+1. The TypeScript SDK generator lives in the [Fern repository](https://github.com/fern-api/fern)
+2. Generator code is located at `generators/typescript/sdk/`
+3. Follow the [Fern contributing guidelines](https://github.com/fern-api/fern/blob/main/CONTRIBUTING.md)
+4. Submit a pull request with your changes to the generator
+
+This approach is best for:
+- Bug fixes in generated code
+- New features that would benefit all users
+- Improvements to code generation patterns
+
+## Making Changes
+
+### Workflow
+
+1. Create a new branch for your changes
+2. Make your modifications
+3. Run tests to ensure nothing breaks: `pnpm test`
+4. Run linting and formatting: `pnpm run check:fix`
+5. Build the project: `pnpm build`
+6. Commit your changes with a clear commit message
+7. Push your branch and create a pull request
+
+### Commit Messages
+
+Write clear, descriptive commit messages that explain what changed and why.
+
+### Code Style
+
+This project uses automated code formatting and linting. Run `pnpm run check:fix` before committing to ensure your code meets the project's style guidelines.
+
+## Questions or Issues?
+
+If you have questions or run into issues:
+
+1. Check the [Fern documentation](https://buildwithfern.com)
+2. Search existing [GitHub issues](https://github.com/fern-api/fern/issues)
+3. Open a new issue if your question hasn't been addressed
+
+## License
+
+By contributing to this project, you agree that your contributions will be licensed under the same license as the project.
diff --git a/LICENSE b/LICENSE
index 9623e9f..87b1a69 100644
--- a/LICENSE
+++ b/LICENSE
@@ -186,4 +186,4 @@ of any court action, you agree to submit to the exclusive jurisdiction of the co
Notwithstanding this, you agree that Anduril shall still be allowed to apply for injunctive remedies (or an equivalent type of urgent legal
relief) in any jurisdiction.
-**April 14, 2025**
\ No newline at end of file
+**April 14, 2025**
diff --git a/README.md b/README.md
index 073c6dc..681d59a 100644
--- a/README.md
+++ b/README.md
@@ -28,7 +28,7 @@ For support with this library, please reach out to your Anduril representative.
## Reference
-A full reference for this library is available [here](https://github.com/anduril/lattice-sdk-javascript/blob/HEAD/./reference.md).
+A full reference for this library is available [here](https://github.com/fern-api/lattice-sdk-javascript/blob/HEAD/./reference.md).
## Usage
@@ -76,6 +76,21 @@ try {
}
```
+## Streaming Response
+
+Some endpoints return streaming responses instead of returning the full response at once.
+The SDK uses async iterators, so you can consume the responses using a `for await...of` loop.
+
+```typescript
+import { LatticeClient } from "@anduril-industries/lattice-sdk";
+
+const client = new LatticeClient({ token: "YOUR_TOKEN" });
+const response = await client.entities.streamEntities();
+for await (const item of response) {
+ console.log(item);
+}
+```
+
## File Uploads
You can upload files using the client:
@@ -518,13 +533,13 @@ List endpoints are paginated. The SDK provides an iterator so that you can simpl
import { LatticeClient } from "@anduril-industries/lattice-sdk";
const client = new LatticeClient({ token: "YOUR_TOKEN" });
-const response = await client.objects.listObjects({
+const pageableResponse = await client.objects.listObjects({
prefix: "prefix",
sinceTimestamp: "2024-01-15T09:30:00Z",
pageToken: "pageToken",
allObjectsInMesh: true
});
-for await (const item of response) {
+for await (const item of pageableResponse) {
console.log(item);
}
@@ -538,6 +553,9 @@ let page = await client.objects.listObjects({
while (page.hasNextPage()) {
page = page.getNextPage();
}
+
+// You can also access the underlying response
+const response = page.response;
```
## Advanced
@@ -620,6 +638,69 @@ console.log(data);
console.log(rawResponse.headers['X-My-Header']);
```
+### Logging
+
+The SDK supports logging. You can configure the logger by passing in a `logging` object to the client options.
+
+```typescript
+import { LatticeClient, logging } from "@anduril-industries/lattice-sdk";
+
+const client = new LatticeClient({
+ ...
+ logging: {
+ level: logging.LogLevel.Debug, // defaults to logging.LogLevel.Info
+ logger: new logging.ConsoleLogger(), // defaults to ConsoleLogger
+ silent: false, // defaults to true, set to false to enable logging
+ }
+});
+```
+The `logging` object can have the following properties:
+- `level`: The log level to use. Defaults to `logging.LogLevel.Info`.
+- `logger`: The logger to use. Defaults to a `logging.ConsoleLogger`.
+- `silent`: Whether to silence the logger. Defaults to `true`.
+
+The `level` property can be one of the following values:
+- `logging.LogLevel.Debug`
+- `logging.LogLevel.Info`
+- `logging.LogLevel.Warn`
+- `logging.LogLevel.Error`
+
+To provide a custom logger, you can pass in an object that implements the `logging.ILogger` interface.
+
+
+Custom logger examples
+
+Here's an example using the popular `winston` logging library.
+```ts
+import winston from 'winston';
+
+const winstonLogger = winston.createLogger({...});
+
+const logger: logging.ILogger = {
+ debug: (msg, ...args) => winstonLogger.debug(msg, ...args),
+ info: (msg, ...args) => winstonLogger.info(msg, ...args),
+ warn: (msg, ...args) => winstonLogger.warn(msg, ...args),
+ error: (msg, ...args) => winstonLogger.error(msg, ...args),
+};
+```
+
+Here's an example using the popular `pino` logging library.
+
+```ts
+import pino from 'pino';
+
+const pinoLogger = pino({...});
+
+const logger: logging.ILogger = {
+ debug: (msg, ...args) => pinoLogger.debug(args, msg),
+ info: (msg, ...args) => pinoLogger.info(args, msg),
+ warn: (msg, ...args) => pinoLogger.warn(args, msg),
+ error: (msg, ...args) => pinoLogger.error(args, msg),
+};
+```
+
+
+
### Runtime Compatibility
diff --git a/biome.json b/biome.json
index b6890df..a777468 100644
--- a/biome.json
+++ b/biome.json
@@ -1,5 +1,5 @@
{
- "$schema": "https://biomejs.dev/schemas/2.2.5/schema.json",
+ "$schema": "https://biomejs.dev/schemas/2.3.1/schema.json",
"root": true,
"vcs": {
"enabled": false
@@ -7,16 +7,21 @@
"files": {
"ignoreUnknown": true,
"includes": [
- "./**",
- "!dist",
- "!lib",
- "!*.tsbuildinfo",
- "!_tmp_*",
- "!*.tmp",
- "!.tmp/",
- "!*.log",
- "!.DS_Store",
- "!Thumbs.db"
+ "**",
+ "!!dist",
+ "!!**/dist",
+ "!!lib",
+ "!!**/lib",
+ "!!_tmp_*",
+ "!!**/_tmp_*",
+ "!!*.tmp",
+ "!!**/*.tmp",
+ "!!.tmp/",
+ "!!**/.tmp/",
+ "!!*.log",
+ "!!**/*.log",
+ "!!**/.DS_Store",
+ "!!**/Thumbs.db"
]
},
"formatter": {
diff --git a/package.json b/package.json
index 8584c1c..fa4e9b7 100644
--- a/package.json
+++ b/package.json
@@ -1,8 +1,8 @@
{
"name": "@anduril-industries/lattice-sdk",
- "version": "3.0.0",
+ "version": "3.0.1",
"private": false,
- "repository": "github:anduril/lattice-sdk-javascript",
+ "repository": "github:fern-api/lattice-sdk-javascript",
"license": "See LICENSE",
"type": "commonjs",
"main": "./dist/cjs/index.js",
@@ -31,6 +31,9 @@
],
"scripts": {
"format": "biome format --write --skip-parse-errors --no-errors-on-unmatched --max-diagnostics=none",
+ "format:check": "biome format --skip-parse-errors --no-errors-on-unmatched --max-diagnostics=none",
+ "lint": "biome lint --skip-parse-errors --no-errors-on-unmatched --max-diagnostics=none",
+ "lint:fix": "biome lint --fix --unsafe --skip-parse-errors --no-errors-on-unmatched --max-diagnostics=none",
"check": "biome check --skip-parse-errors --no-errors-on-unmatched --max-diagnostics=none",
"check:fix": "biome check --fix --unsafe --skip-parse-errors --no-errors-on-unmatched --max-diagnostics=none",
"build": "pnpm build:cjs && pnpm build:esm",
@@ -40,14 +43,15 @@
"test:unit": "vitest --project unit",
"test:wire": "vitest --project wire"
},
+ "dependencies": {},
"devDependencies": {
"webpack": "^5.97.1",
"ts-loader": "^9.5.1",
"vitest": "^3.2.4",
"msw": "2.11.2",
"@types/node": "^18.19.70",
- "@biomejs/biome": "2.2.5",
- "typescript": "~5.7.2"
+ "typescript": "~5.7.2",
+ "@biomejs/biome": "2.3.1"
},
"browser": {
"fs": false,
@@ -55,7 +59,7 @@
"path": false,
"stream": false
},
- "packageManager": "pnpm@10.14.0",
+ "packageManager": "pnpm@10.20.0",
"engines": {
"node": ">=18.0.0"
},
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index 5b89b8e..e6c5262 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -9,8 +9,8 @@ importers:
.:
devDependencies:
'@biomejs/biome':
- specifier: 2.2.5
- version: 2.2.5
+ specifier: 2.3.1
+ version: 2.3.1
'@types/node':
specifier: ^18.19.70
version: 18.19.130
@@ -25,62 +25,62 @@ importers:
version: 5.7.3
vitest:
specifier: ^3.2.4
- version: 3.2.4(@types/node@18.19.130)(msw@2.11.2(@types/node@18.19.130)(typescript@5.7.3))(terser@5.44.0)
+ version: 3.2.4(@types/node@18.19.130)(msw@2.11.2(@types/node@18.19.130)(typescript@5.7.3))(terser@5.44.1)
webpack:
specifier: ^5.97.1
version: 5.102.1
packages:
- '@biomejs/biome@2.2.5':
- resolution: {integrity: sha512-zcIi+163Rc3HtyHbEO7CjeHq8DjQRs40HsGbW6vx2WI0tg8mYQOPouhvHSyEnCBAorfYNnKdR64/IxO7xQ5faw==}
+ '@biomejs/biome@2.3.1':
+ resolution: {integrity: sha512-A29evf1R72V5bo4o2EPxYMm5mtyGvzp2g+biZvRFx29nWebGyyeOSsDWGx3tuNNMFRepGwxmA9ZQ15mzfabK2w==}
engines: {node: '>=14.21.3'}
hasBin: true
- '@biomejs/cli-darwin-arm64@2.2.5':
- resolution: {integrity: sha512-MYT+nZ38wEIWVcL5xLyOhYQQ7nlWD0b/4mgATW2c8dvq7R4OQjt/XGXFkXrmtWmQofaIM14L7V8qIz/M+bx5QQ==}
+ '@biomejs/cli-darwin-arm64@2.3.1':
+ resolution: {integrity: sha512-ombSf3MnTUueiYGN1SeI9tBCsDUhpWzOwS63Dove42osNh0PfE1cUtHFx6eZ1+MYCCLwXzlFlYFdrJ+U7h6LcA==}
engines: {node: '>=14.21.3'}
cpu: [arm64]
os: [darwin]
- '@biomejs/cli-darwin-x64@2.2.5':
- resolution: {integrity: sha512-FLIEl73fv0R7dI10EnEiZLw+IMz3mWLnF95ASDI0kbx6DDLJjWxE5JxxBfmG+udz1hIDd3fr5wsuP7nwuTRdAg==}
+ '@biomejs/cli-darwin-x64@2.3.1':
+ resolution: {integrity: sha512-pcOfwyoQkrkbGvXxRvZNe5qgD797IowpJPovPX5biPk2FwMEV+INZqfCaz4G5bVq9hYnjwhRMamg11U4QsRXrQ==}
engines: {node: '>=14.21.3'}
cpu: [x64]
os: [darwin]
- '@biomejs/cli-linux-arm64-musl@2.2.5':
- resolution: {integrity: sha512-5Ov2wgAFwqDvQiESnu7b9ufD1faRa+40uwrohgBopeY84El2TnBDoMNXx6iuQdreoFGjwW8vH6k68G21EpNERw==}
+ '@biomejs/cli-linux-arm64-musl@2.3.1':
+ resolution: {integrity: sha512-+DZYv8l7FlUtTrWs1Tdt1KcNCAmRO87PyOnxKGunbWm5HKg1oZBSbIIPkjrCtDZaeqSG1DiGx7qF+CPsquQRcg==}
engines: {node: '>=14.21.3'}
cpu: [arm64]
os: [linux]
- '@biomejs/cli-linux-arm64@2.2.5':
- resolution: {integrity: sha512-5DjiiDfHqGgR2MS9D+AZ8kOfrzTGqLKywn8hoXpXXlJXIECGQ32t+gt/uiS2XyGBM2XQhR6ztUvbjZWeccFMoQ==}
+ '@biomejs/cli-linux-arm64@2.3.1':
+ resolution: {integrity: sha512-td5O8pFIgLs8H1sAZsD6v+5quODihyEw4nv2R8z7swUfIK1FKk+15e4eiYVLcAE4jUqngvh4j3JCNgg0Y4o4IQ==}
engines: {node: '>=14.21.3'}
cpu: [arm64]
os: [linux]
- '@biomejs/cli-linux-x64-musl@2.2.5':
- resolution: {integrity: sha512-AVqLCDb/6K7aPNIcxHaTQj01sl1m989CJIQFQEaiQkGr2EQwyOpaATJ473h+nXDUuAcREhccfRpe/tu+0wu0eQ==}
+ '@biomejs/cli-linux-x64-musl@2.3.1':
+ resolution: {integrity: sha512-Y3Ob4nqgv38Mh+6EGHltuN+Cq8aj/gyMTJYzkFZV2AEj+9XzoXB9VNljz9pjfFNHUxvLEV4b55VWyxozQTBaUQ==}
engines: {node: '>=14.21.3'}
cpu: [x64]
os: [linux]
- '@biomejs/cli-linux-x64@2.2.5':
- resolution: {integrity: sha512-fq9meKm1AEXeAWan3uCg6XSP5ObA6F/Ovm89TwaMiy1DNIwdgxPkNwxlXJX8iM6oRbFysYeGnT0OG8diCWb9ew==}
+ '@biomejs/cli-linux-x64@2.3.1':
+ resolution: {integrity: sha512-PYWgEO7up7XYwSAArOpzsVCiqxBCXy53gsReAb1kKYIyXaoAlhBaBMvxR/k2Rm9aTuZ662locXUmPk/Aj+Xu+Q==}
engines: {node: '>=14.21.3'}
cpu: [x64]
os: [linux]
- '@biomejs/cli-win32-arm64@2.2.5':
- resolution: {integrity: sha512-xaOIad4wBambwJa6mdp1FigYSIF9i7PCqRbvBqtIi9y29QtPVQ13sDGtUnsRoe6SjL10auMzQ6YAe+B3RpZXVg==}
+ '@biomejs/cli-win32-arm64@2.3.1':
+ resolution: {integrity: sha512-RHIG/zgo+69idUqVvV3n8+j58dKYABRpMyDmfWu2TITC+jwGPiEaT0Q3RKD+kQHiS80mpBrST0iUGeEXT0bU9A==}
engines: {node: '>=14.21.3'}
cpu: [arm64]
os: [win32]
- '@biomejs/cli-win32-x64@2.2.5':
- resolution: {integrity: sha512-F/jhuXCssPFAuciMhHKk00xnCAxJRS/pUzVfXYmOMUp//XW7mO6QeCjsjvnm8L4AO/dG2VOB0O+fJPiJ2uXtIw==}
+ '@biomejs/cli-win32-x64@2.3.1':
+ resolution: {integrity: sha512-izl30JJ5Dp10mi90Eko47zhxE6pYyWPcnX1NQxKpL/yMhXxf95oLTzfpu4q+MDBh/gemNqyJEwjBpe0MT5iWPA==}
engines: {node: '>=14.21.3'}
cpu: [x64]
os: [win32]
@@ -91,168 +91,168 @@ packages:
'@bundled-es-modules/statuses@1.0.1':
resolution: {integrity: sha512-yn7BklA5acgcBr+7w064fGV+SGIFySjCKpqjcWgBAIfrAkY+4GQTJJHQMeT3V/sgz23VTEVV8TtOmkvJAhFVfg==}
- '@esbuild/aix-ppc64@0.25.11':
- resolution: {integrity: sha512-Xt1dOL13m8u0WE8iplx9Ibbm+hFAO0GsU2P34UNoDGvZYkY8ifSiy6Zuc1lYxfG7svWE2fzqCUmFp5HCn51gJg==}
+ '@esbuild/aix-ppc64@0.25.12':
+ resolution: {integrity: sha512-Hhmwd6CInZ3dwpuGTF8fJG6yoWmsToE+vYgD4nytZVxcu1ulHpUQRAB1UJ8+N1Am3Mz4+xOByoQoSZf4D+CpkA==}
engines: {node: '>=18'}
cpu: [ppc64]
os: [aix]
- '@esbuild/android-arm64@0.25.11':
- resolution: {integrity: sha512-9slpyFBc4FPPz48+f6jyiXOx/Y4v34TUeDDXJpZqAWQn/08lKGeD8aDp9TMn9jDz2CiEuHwfhRmGBvpnd/PWIQ==}
+ '@esbuild/android-arm64@0.25.12':
+ resolution: {integrity: sha512-6AAmLG7zwD1Z159jCKPvAxZd4y/VTO0VkprYy+3N2FtJ8+BQWFXU+OxARIwA46c5tdD9SsKGZ/1ocqBS/gAKHg==}
engines: {node: '>=18'}
cpu: [arm64]
os: [android]
- '@esbuild/android-arm@0.25.11':
- resolution: {integrity: sha512-uoa7dU+Dt3HYsethkJ1k6Z9YdcHjTrSb5NUy66ZfZaSV8hEYGD5ZHbEMXnqLFlbBflLsl89Zke7CAdDJ4JI+Gg==}
+ '@esbuild/android-arm@0.25.12':
+ resolution: {integrity: sha512-VJ+sKvNA/GE7Ccacc9Cha7bpS8nyzVv0jdVgwNDaR4gDMC/2TTRc33Ip8qrNYUcpkOHUT5OZ0bUcNNVZQ9RLlg==}
engines: {node: '>=18'}
cpu: [arm]
os: [android]
- '@esbuild/android-x64@0.25.11':
- resolution: {integrity: sha512-Sgiab4xBjPU1QoPEIqS3Xx+R2lezu0LKIEcYe6pftr56PqPygbB7+szVnzoShbx64MUupqoE0KyRlN7gezbl8g==}
+ '@esbuild/android-x64@0.25.12':
+ resolution: {integrity: sha512-5jbb+2hhDHx5phYR2By8GTWEzn6I9UqR11Kwf22iKbNpYrsmRB18aX/9ivc5cabcUiAT/wM+YIZ6SG9QO6a8kg==}
engines: {node: '>=18'}
cpu: [x64]
os: [android]
- '@esbuild/darwin-arm64@0.25.11':
- resolution: {integrity: sha512-VekY0PBCukppoQrycFxUqkCojnTQhdec0vevUL/EDOCnXd9LKWqD/bHwMPzigIJXPhC59Vd1WFIL57SKs2mg4w==}
+ '@esbuild/darwin-arm64@0.25.12':
+ resolution: {integrity: sha512-N3zl+lxHCifgIlcMUP5016ESkeQjLj/959RxxNYIthIg+CQHInujFuXeWbWMgnTo4cp5XVHqFPmpyu9J65C1Yg==}
engines: {node: '>=18'}
cpu: [arm64]
os: [darwin]
- '@esbuild/darwin-x64@0.25.11':
- resolution: {integrity: sha512-+hfp3yfBalNEpTGp9loYgbknjR695HkqtY3d3/JjSRUyPg/xd6q+mQqIb5qdywnDxRZykIHs3axEqU6l1+oWEQ==}
+ '@esbuild/darwin-x64@0.25.12':
+ resolution: {integrity: sha512-HQ9ka4Kx21qHXwtlTUVbKJOAnmG1ipXhdWTmNXiPzPfWKpXqASVcWdnf2bnL73wgjNrFXAa3yYvBSd9pzfEIpA==}
engines: {node: '>=18'}
cpu: [x64]
os: [darwin]
- '@esbuild/freebsd-arm64@0.25.11':
- resolution: {integrity: sha512-CmKjrnayyTJF2eVuO//uSjl/K3KsMIeYeyN7FyDBjsR3lnSJHaXlVoAK8DZa7lXWChbuOk7NjAc7ygAwrnPBhA==}
+ '@esbuild/freebsd-arm64@0.25.12':
+ resolution: {integrity: sha512-gA0Bx759+7Jve03K1S0vkOu5Lg/85dou3EseOGUes8flVOGxbhDDh/iZaoek11Y8mtyKPGF3vP8XhnkDEAmzeg==}
engines: {node: '>=18'}
cpu: [arm64]
os: [freebsd]
- '@esbuild/freebsd-x64@0.25.11':
- resolution: {integrity: sha512-Dyq+5oscTJvMaYPvW3x3FLpi2+gSZTCE/1ffdwuM6G1ARang/mb3jvjxs0mw6n3Lsw84ocfo9CrNMqc5lTfGOw==}
+ '@esbuild/freebsd-x64@0.25.12':
+ resolution: {integrity: sha512-TGbO26Yw2xsHzxtbVFGEXBFH0FRAP7gtcPE7P5yP7wGy7cXK2oO7RyOhL5NLiqTlBh47XhmIUXuGciXEqYFfBQ==}
engines: {node: '>=18'}
cpu: [x64]
os: [freebsd]
- '@esbuild/linux-arm64@0.25.11':
- resolution: {integrity: sha512-Qr8AzcplUhGvdyUF08A1kHU3Vr2O88xxP0Tm8GcdVOUm25XYcMPp2YqSVHbLuXzYQMf9Bh/iKx7YPqECs6ffLA==}
+ '@esbuild/linux-arm64@0.25.12':
+ resolution: {integrity: sha512-8bwX7a8FghIgrupcxb4aUmYDLp8pX06rGh5HqDT7bB+8Rdells6mHvrFHHW2JAOPZUbnjUpKTLg6ECyzvas2AQ==}
engines: {node: '>=18'}
cpu: [arm64]
os: [linux]
- '@esbuild/linux-arm@0.25.11':
- resolution: {integrity: sha512-TBMv6B4kCfrGJ8cUPo7vd6NECZH/8hPpBHHlYI3qzoYFvWu2AdTvZNuU/7hsbKWqu/COU7NIK12dHAAqBLLXgw==}
+ '@esbuild/linux-arm@0.25.12':
+ resolution: {integrity: sha512-lPDGyC1JPDou8kGcywY0YILzWlhhnRjdof3UlcoqYmS9El818LLfJJc3PXXgZHrHCAKs/Z2SeZtDJr5MrkxtOw==}
engines: {node: '>=18'}
cpu: [arm]
os: [linux]
- '@esbuild/linux-ia32@0.25.11':
- resolution: {integrity: sha512-TmnJg8BMGPehs5JKrCLqyWTVAvielc615jbkOirATQvWWB1NMXY77oLMzsUjRLa0+ngecEmDGqt5jiDC6bfvOw==}
+ '@esbuild/linux-ia32@0.25.12':
+ resolution: {integrity: sha512-0y9KrdVnbMM2/vG8KfU0byhUN+EFCny9+8g202gYqSSVMonbsCfLjUO+rCci7pM0WBEtz+oK/PIwHkzxkyharA==}
engines: {node: '>=18'}
cpu: [ia32]
os: [linux]
- '@esbuild/linux-loong64@0.25.11':
- resolution: {integrity: sha512-DIGXL2+gvDaXlaq8xruNXUJdT5tF+SBbJQKbWy/0J7OhU8gOHOzKmGIlfTTl6nHaCOoipxQbuJi7O++ldrxgMw==}
+ '@esbuild/linux-loong64@0.25.12':
+ resolution: {integrity: sha512-h///Lr5a9rib/v1GGqXVGzjL4TMvVTv+s1DPoxQdz7l/AYv6LDSxdIwzxkrPW438oUXiDtwM10o9PmwS/6Z0Ng==}
engines: {node: '>=18'}
cpu: [loong64]
os: [linux]
- '@esbuild/linux-mips64el@0.25.11':
- resolution: {integrity: sha512-Osx1nALUJu4pU43o9OyjSCXokFkFbyzjXb6VhGIJZQ5JZi8ylCQ9/LFagolPsHtgw6himDSyb5ETSfmp4rpiKQ==}
+ '@esbuild/linux-mips64el@0.25.12':
+ resolution: {integrity: sha512-iyRrM1Pzy9GFMDLsXn1iHUm18nhKnNMWscjmp4+hpafcZjrr2WbT//d20xaGljXDBYHqRcl8HnxbX6uaA/eGVw==}
engines: {node: '>=18'}
cpu: [mips64el]
os: [linux]
- '@esbuild/linux-ppc64@0.25.11':
- resolution: {integrity: sha512-nbLFgsQQEsBa8XSgSTSlrnBSrpoWh7ioFDUmwo158gIm5NNP+17IYmNWzaIzWmgCxq56vfr34xGkOcZ7jX6CPw==}
+ '@esbuild/linux-ppc64@0.25.12':
+ resolution: {integrity: sha512-9meM/lRXxMi5PSUqEXRCtVjEZBGwB7P/D4yT8UG/mwIdze2aV4Vo6U5gD3+RsoHXKkHCfSxZKzmDssVlRj1QQA==}
engines: {node: '>=18'}
cpu: [ppc64]
os: [linux]
- '@esbuild/linux-riscv64@0.25.11':
- resolution: {integrity: sha512-HfyAmqZi9uBAbgKYP1yGuI7tSREXwIb438q0nqvlpxAOs3XnZ8RsisRfmVsgV486NdjD7Mw2UrFSw51lzUk1ww==}
+ '@esbuild/linux-riscv64@0.25.12':
+ resolution: {integrity: sha512-Zr7KR4hgKUpWAwb1f3o5ygT04MzqVrGEGXGLnj15YQDJErYu/BGg+wmFlIDOdJp0PmB0lLvxFIOXZgFRrdjR0w==}
engines: {node: '>=18'}
cpu: [riscv64]
os: [linux]
- '@esbuild/linux-s390x@0.25.11':
- resolution: {integrity: sha512-HjLqVgSSYnVXRisyfmzsH6mXqyvj0SA7pG5g+9W7ESgwA70AXYNpfKBqh1KbTxmQVaYxpzA/SvlB9oclGPbApw==}
+ '@esbuild/linux-s390x@0.25.12':
+ resolution: {integrity: sha512-MsKncOcgTNvdtiISc/jZs/Zf8d0cl/t3gYWX8J9ubBnVOwlk65UIEEvgBORTiljloIWnBzLs4qhzPkJcitIzIg==}
engines: {node: '>=18'}
cpu: [s390x]
os: [linux]
- '@esbuild/linux-x64@0.25.11':
- resolution: {integrity: sha512-HSFAT4+WYjIhrHxKBwGmOOSpphjYkcswF449j6EjsjbinTZbp8PJtjsVK1XFJStdzXdy/jaddAep2FGY+wyFAQ==}
+ '@esbuild/linux-x64@0.25.12':
+ resolution: {integrity: sha512-uqZMTLr/zR/ed4jIGnwSLkaHmPjOjJvnm6TVVitAa08SLS9Z0VM8wIRx7gWbJB5/J54YuIMInDquWyYvQLZkgw==}
engines: {node: '>=18'}
cpu: [x64]
os: [linux]
- '@esbuild/netbsd-arm64@0.25.11':
- resolution: {integrity: sha512-hr9Oxj1Fa4r04dNpWr3P8QKVVsjQhqrMSUzZzf+LZcYjZNqhA3IAfPQdEh1FLVUJSiu6sgAwp3OmwBfbFgG2Xg==}
+ '@esbuild/netbsd-arm64@0.25.12':
+ resolution: {integrity: sha512-xXwcTq4GhRM7J9A8Gv5boanHhRa/Q9KLVmcyXHCTaM4wKfIpWkdXiMog/KsnxzJ0A1+nD+zoecuzqPmCRyBGjg==}
engines: {node: '>=18'}
cpu: [arm64]
os: [netbsd]
- '@esbuild/netbsd-x64@0.25.11':
- resolution: {integrity: sha512-u7tKA+qbzBydyj0vgpu+5h5AeudxOAGncb8N6C9Kh1N4n7wU1Xw1JDApsRjpShRpXRQlJLb9wY28ELpwdPcZ7A==}
+ '@esbuild/netbsd-x64@0.25.12':
+ resolution: {integrity: sha512-Ld5pTlzPy3YwGec4OuHh1aCVCRvOXdH8DgRjfDy/oumVovmuSzWfnSJg+VtakB9Cm0gxNO9BzWkj6mtO1FMXkQ==}
engines: {node: '>=18'}
cpu: [x64]
os: [netbsd]
- '@esbuild/openbsd-arm64@0.25.11':
- resolution: {integrity: sha512-Qq6YHhayieor3DxFOoYM1q0q1uMFYb7cSpLD2qzDSvK1NAvqFi8Xgivv0cFC6J+hWVw2teCYltyy9/m/14ryHg==}
+ '@esbuild/openbsd-arm64@0.25.12':
+ resolution: {integrity: sha512-fF96T6KsBo/pkQI950FARU9apGNTSlZGsv1jZBAlcLL1MLjLNIWPBkj5NlSz8aAzYKg+eNqknrUJ24QBybeR5A==}
engines: {node: '>=18'}
cpu: [arm64]
os: [openbsd]
- '@esbuild/openbsd-x64@0.25.11':
- resolution: {integrity: sha512-CN+7c++kkbrckTOz5hrehxWN7uIhFFlmS/hqziSFVWpAzpWrQoAG4chH+nN3Be+Kzv/uuo7zhX716x3Sn2Jduw==}
+ '@esbuild/openbsd-x64@0.25.12':
+ resolution: {integrity: sha512-MZyXUkZHjQxUvzK7rN8DJ3SRmrVrke8ZyRusHlP+kuwqTcfWLyqMOE3sScPPyeIXN/mDJIfGXvcMqCgYKekoQw==}
engines: {node: '>=18'}
cpu: [x64]
os: [openbsd]
- '@esbuild/openharmony-arm64@0.25.11':
- resolution: {integrity: sha512-rOREuNIQgaiR+9QuNkbkxubbp8MSO9rONmwP5nKncnWJ9v5jQ4JxFnLu4zDSRPf3x4u+2VN4pM4RdyIzDty/wQ==}
+ '@esbuild/openharmony-arm64@0.25.12':
+ resolution: {integrity: sha512-rm0YWsqUSRrjncSXGA7Zv78Nbnw4XL6/dzr20cyrQf7ZmRcsovpcRBdhD43Nuk3y7XIoW2OxMVvwuRvk9XdASg==}
engines: {node: '>=18'}
cpu: [arm64]
os: [openharmony]
- '@esbuild/sunos-x64@0.25.11':
- resolution: {integrity: sha512-nq2xdYaWxyg9DcIyXkZhcYulC6pQ2FuCgem3LI92IwMgIZ69KHeY8T4Y88pcwoLIjbed8n36CyKoYRDygNSGhA==}
+ '@esbuild/sunos-x64@0.25.12':
+ resolution: {integrity: sha512-3wGSCDyuTHQUzt0nV7bocDy72r2lI33QL3gkDNGkod22EsYl04sMf0qLb8luNKTOmgF/eDEDP5BFNwoBKH441w==}
engines: {node: '>=18'}
cpu: [x64]
os: [sunos]
- '@esbuild/win32-arm64@0.25.11':
- resolution: {integrity: sha512-3XxECOWJq1qMZ3MN8srCJ/QfoLpL+VaxD/WfNRm1O3B4+AZ/BnLVgFbUV3eiRYDMXetciH16dwPbbHqwe1uU0Q==}
+ '@esbuild/win32-arm64@0.25.12':
+ resolution: {integrity: sha512-rMmLrur64A7+DKlnSuwqUdRKyd3UE7oPJZmnljqEptesKM8wx9J8gx5u0+9Pq0fQQW8vqeKebwNXdfOyP+8Bsg==}
engines: {node: '>=18'}
cpu: [arm64]
os: [win32]
- '@esbuild/win32-ia32@0.25.11':
- resolution: {integrity: sha512-3ukss6gb9XZ8TlRyJlgLn17ecsK4NSQTmdIXRASVsiS2sQ6zPPZklNJT5GR5tE/MUarymmy8kCEf5xPCNCqVOA==}
+ '@esbuild/win32-ia32@0.25.12':
+ resolution: {integrity: sha512-HkqnmmBoCbCwxUKKNPBixiWDGCpQGVsrQfJoVGYLPT41XWF8lHuE5N6WhVia2n4o5QK5M4tYr21827fNhi4byQ==}
engines: {node: '>=18'}
cpu: [ia32]
os: [win32]
- '@esbuild/win32-x64@0.25.11':
- resolution: {integrity: sha512-D7Hpz6A2L4hzsRpPaCYkQnGOotdUpDzSGRIv9I+1ITdHROSFUWW95ZPZWQmGka1Fg7W3zFJowyn9WGwMJ0+KPA==}
+ '@esbuild/win32-x64@0.25.12':
+ resolution: {integrity: sha512-alJC0uCZpTFrSL0CCDjcgleBXPnCrEAhTBILpeAp7M/OFgoqtAetfBzX0xM00MUsVVPpVjlPuMbREqnZCXaTnA==}
engines: {node: '>=18'}
cpu: [x64]
os: [win32]
- '@inquirer/ansi@1.0.1':
- resolution: {integrity: sha512-yqq0aJW/5XPhi5xOAL1xRCpe1eh8UFVgYFpFsjEqmIR8rKLyP+HINvFXwUaxYICflJrVlxnp7lLN6As735kVpw==}
+ '@inquirer/ansi@1.0.2':
+ resolution: {integrity: sha512-S8qNSZiYzFd0wAcyG5AXCvUHC5Sr7xpZ9wZ2py9XR88jUz8wooStVx5M6dRzczbBWjic9NP7+rY0Xi7qqK/aMQ==}
engines: {node: '>=18'}
- '@inquirer/confirm@5.1.19':
- resolution: {integrity: sha512-wQNz9cfcxrtEnUyG5PndC8g3gZ7lGDBzmWiXZkX8ot3vfZ+/BLjR8EvyGX4YzQLeVqtAlY/YScZpW7CW8qMoDQ==}
+ '@inquirer/confirm@5.1.20':
+ resolution: {integrity: sha512-HDGiWh2tyRZa0M1ZnEIUCQro25gW/mN8ODByicQrbR1yHx4hT+IOpozCMi5TgBtUdklLwRI2mv14eNpftDluEw==}
engines: {node: '>=18'}
peerDependencies:
'@types/node': '>=18'
@@ -260,8 +260,8 @@ packages:
'@types/node':
optional: true
- '@inquirer/core@10.3.0':
- resolution: {integrity: sha512-Uv2aPPPSK5jeCplQmQ9xadnFx2Zhj9b5Dj7bU6ZeCdDNNY11nhYy4btcSdtDguHqCT2h5oNeQTcUNSGGLA7NTA==}
+ '@inquirer/core@10.3.1':
+ resolution: {integrity: sha512-hzGKIkfomGFPgxKmnKEKeA+uCYBqC+TKtRx5LgyHRCrF6S2MliwRIjp3sUaWwVzMp7ZXVs8elB0Tfe682Rpg4w==}
engines: {node: '>=18'}
peerDependencies:
'@types/node': '>=18'
@@ -269,12 +269,12 @@ packages:
'@types/node':
optional: true
- '@inquirer/figures@1.0.14':
- resolution: {integrity: sha512-DbFgdt+9/OZYFM+19dbpXOSeAstPy884FPy1KjDu4anWwymZeOYhMY1mdFri172htv6mvc/uvIAAi7b7tvjJBQ==}
+ '@inquirer/figures@1.0.15':
+ resolution: {integrity: sha512-t2IEY+unGHOzAaVM5Xx6DEWKeXlDDcNPeDyUpsRc6CUhBfU3VQOEl+Vssh7VNp1dR8MdUJBWhuObjXCsVpjN5g==}
engines: {node: '>=18'}
- '@inquirer/type@3.0.9':
- resolution: {integrity: sha512-QPaNt/nmE2bLGQa9b7wwyRJoLZ7pN6rcyXvzU0YCmivmJyq1BVo94G98tStRWkoD1RgDX5C+dPlhhHzNdu/W/w==}
+ '@inquirer/type@3.0.10':
+ resolution: {integrity: sha512-BvziSRxfz5Ov8ch0z/n3oijRSEcEsHnhggm4xFZe93DHcUCTlutlq9Ox4SVENAfcRD22UQq7T/atg9Wr3k09eA==}
engines: {node: '>=18'}
peerDependencies:
'@types/node': '>=18'
@@ -311,113 +311,113 @@ packages:
'@open-draft/until@2.1.0':
resolution: {integrity: sha512-U69T3ItWHvLwGg5eJ0n3I62nWuE6ilHlmz7zM0npLBRvPRd7e6NYmg54vvRtP5mZG7kZqZCFVdsTWo7BPtBujg==}
- '@rollup/rollup-android-arm-eabi@4.52.5':
- resolution: {integrity: sha512-8c1vW4ocv3UOMp9K+gToY5zL2XiiVw3k7f1ksf4yO1FlDFQ1C2u72iACFnSOceJFsWskc2WZNqeRhFRPzv+wtQ==}
+ '@rollup/rollup-android-arm-eabi@4.53.2':
+ resolution: {integrity: sha512-yDPzwsgiFO26RJA4nZo8I+xqzh7sJTZIWQOxn+/XOdPE31lAvLIYCKqjV+lNH/vxE2L2iH3plKxDCRK6i+CwhA==}
cpu: [arm]
os: [android]
- '@rollup/rollup-android-arm64@4.52.5':
- resolution: {integrity: sha512-mQGfsIEFcu21mvqkEKKu2dYmtuSZOBMmAl5CFlPGLY94Vlcm+zWApK7F/eocsNzp8tKmbeBP8yXyAbx0XHsFNA==}
+ '@rollup/rollup-android-arm64@4.53.2':
+ resolution: {integrity: sha512-k8FontTxIE7b0/OGKeSN5B6j25EuppBcWM33Z19JoVT7UTXFSo3D9CdU39wGTeb29NO3XxpMNauh09B+Ibw+9g==}
cpu: [arm64]
os: [android]
- '@rollup/rollup-darwin-arm64@4.52.5':
- resolution: {integrity: sha512-takF3CR71mCAGA+v794QUZ0b6ZSrgJkArC+gUiG6LB6TQty9T0Mqh3m2ImRBOxS2IeYBo4lKWIieSvnEk2OQWA==}
+ '@rollup/rollup-darwin-arm64@4.53.2':
+ resolution: {integrity: sha512-A6s4gJpomNBtJ2yioj8bflM2oogDwzUiMl2yNJ2v9E7++sHrSrsQ29fOfn5DM/iCzpWcebNYEdXpaK4tr2RhfQ==}
cpu: [arm64]
os: [darwin]
- '@rollup/rollup-darwin-x64@4.52.5':
- resolution: {integrity: sha512-W901Pla8Ya95WpxDn//VF9K9u2JbocwV/v75TE0YIHNTbhqUTv9w4VuQ9MaWlNOkkEfFwkdNhXgcLqPSmHy0fA==}
+ '@rollup/rollup-darwin-x64@4.53.2':
+ resolution: {integrity: sha512-e6XqVmXlHrBlG56obu9gDRPW3O3hLxpwHpLsBJvuI8qqnsrtSZ9ERoWUXtPOkY8c78WghyPHZdmPhHLWNdAGEw==}
cpu: [x64]
os: [darwin]
- '@rollup/rollup-freebsd-arm64@4.52.5':
- resolution: {integrity: sha512-QofO7i7JycsYOWxe0GFqhLmF6l1TqBswJMvICnRUjqCx8b47MTo46W8AoeQwiokAx3zVryVnxtBMcGcnX12LvA==}
+ '@rollup/rollup-freebsd-arm64@4.53.2':
+ resolution: {integrity: sha512-v0E9lJW8VsrwPux5Qe5CwmH/CF/2mQs6xU1MF3nmUxmZUCHazCjLgYvToOk+YuuUqLQBio1qkkREhxhc656ViA==}
cpu: [arm64]
os: [freebsd]
- '@rollup/rollup-freebsd-x64@4.52.5':
- resolution: {integrity: sha512-jr21b/99ew8ujZubPo9skbrItHEIE50WdV86cdSoRkKtmWa+DDr6fu2c/xyRT0F/WazZpam6kk7IHBerSL7LDQ==}
+ '@rollup/rollup-freebsd-x64@4.53.2':
+ resolution: {integrity: sha512-ClAmAPx3ZCHtp6ysl4XEhWU69GUB1D+s7G9YjHGhIGCSrsg00nEGRRZHmINYxkdoJehde8VIsDC5t9C0gb6yqA==}
cpu: [x64]
os: [freebsd]
- '@rollup/rollup-linux-arm-gnueabihf@4.52.5':
- resolution: {integrity: sha512-PsNAbcyv9CcecAUagQefwX8fQn9LQ4nZkpDboBOttmyffnInRy8R8dSg6hxxl2Re5QhHBf6FYIDhIj5v982ATQ==}
+ '@rollup/rollup-linux-arm-gnueabihf@4.53.2':
+ resolution: {integrity: sha512-EPlb95nUsz6Dd9Qy13fI5kUPXNSljaG9FiJ4YUGU1O/Q77i5DYFW5KR8g1OzTcdZUqQQ1KdDqsTohdFVwCwjqg==}
cpu: [arm]
os: [linux]
- '@rollup/rollup-linux-arm-musleabihf@4.52.5':
- resolution: {integrity: sha512-Fw4tysRutyQc/wwkmcyoqFtJhh0u31K+Q6jYjeicsGJJ7bbEq8LwPWV/w0cnzOqR2m694/Af6hpFayLJZkG2VQ==}
+ '@rollup/rollup-linux-arm-musleabihf@4.53.2':
+ resolution: {integrity: sha512-BOmnVW+khAUX+YZvNfa0tGTEMVVEerOxN0pDk2E6N6DsEIa2Ctj48FOMfNDdrwinocKaC7YXUZ1pHlKpnkja/Q==}
cpu: [arm]
os: [linux]
- '@rollup/rollup-linux-arm64-gnu@4.52.5':
- resolution: {integrity: sha512-a+3wVnAYdQClOTlyapKmyI6BLPAFYs0JM8HRpgYZQO02rMR09ZcV9LbQB+NL6sljzG38869YqThrRnfPMCDtZg==}
+ '@rollup/rollup-linux-arm64-gnu@4.53.2':
+ resolution: {integrity: sha512-Xt2byDZ+6OVNuREgBXr4+CZDJtrVso5woFtpKdGPhpTPHcNG7D8YXeQzpNbFRxzTVqJf7kvPMCub/pcGUWgBjA==}
cpu: [arm64]
os: [linux]
- '@rollup/rollup-linux-arm64-musl@4.52.5':
- resolution: {integrity: sha512-AvttBOMwO9Pcuuf7m9PkC1PUIKsfaAJ4AYhy944qeTJgQOqJYJ9oVl2nYgY7Rk0mkbsuOpCAYSs6wLYB2Xiw0Q==}
+ '@rollup/rollup-linux-arm64-musl@4.53.2':
+ resolution: {integrity: sha512-+LdZSldy/I9N8+klim/Y1HsKbJ3BbInHav5qE9Iy77dtHC/pibw1SR/fXlWyAk0ThnpRKoODwnAuSjqxFRDHUQ==}
cpu: [arm64]
os: [linux]
- '@rollup/rollup-linux-loong64-gnu@4.52.5':
- resolution: {integrity: sha512-DkDk8pmXQV2wVrF6oq5tONK6UHLz/XcEVow4JTTerdeV1uqPeHxwcg7aFsfnSm9L+OO8WJsWotKM2JJPMWrQtA==}
+ '@rollup/rollup-linux-loong64-gnu@4.53.2':
+ resolution: {integrity: sha512-8ms8sjmyc1jWJS6WdNSA23rEfdjWB30LH8Wqj0Cqvv7qSHnvw6kgMMXRdop6hkmGPlyYBdRPkjJnj3KCUHV/uQ==}
cpu: [loong64]
os: [linux]
- '@rollup/rollup-linux-ppc64-gnu@4.52.5':
- resolution: {integrity: sha512-W/b9ZN/U9+hPQVvlGwjzi+Wy4xdoH2I8EjaCkMvzpI7wJUs8sWJ03Rq96jRnHkSrcHTpQe8h5Tg3ZzUPGauvAw==}
+ '@rollup/rollup-linux-ppc64-gnu@4.53.2':
+ resolution: {integrity: sha512-3HRQLUQbpBDMmzoxPJYd3W6vrVHOo2cVW8RUo87Xz0JPJcBLBr5kZ1pGcQAhdZgX9VV7NbGNipah1omKKe23/g==}
cpu: [ppc64]
os: [linux]
- '@rollup/rollup-linux-riscv64-gnu@4.52.5':
- resolution: {integrity: sha512-sjQLr9BW7R/ZiXnQiWPkErNfLMkkWIoCz7YMn27HldKsADEKa5WYdobaa1hmN6slu9oWQbB6/jFpJ+P2IkVrmw==}
+ '@rollup/rollup-linux-riscv64-gnu@4.53.2':
+ resolution: {integrity: sha512-fMjKi+ojnmIvhk34gZP94vjogXNNUKMEYs+EDaB/5TG/wUkoeua7p7VCHnE6T2Tx+iaghAqQX8teQzcvrYpaQA==}
cpu: [riscv64]
os: [linux]
- '@rollup/rollup-linux-riscv64-musl@4.52.5':
- resolution: {integrity: sha512-hq3jU/kGyjXWTvAh2awn8oHroCbrPm8JqM7RUpKjalIRWWXE01CQOf/tUNWNHjmbMHg/hmNCwc/Pz3k1T/j/Lg==}
+ '@rollup/rollup-linux-riscv64-musl@4.53.2':
+ resolution: {integrity: sha512-XuGFGU+VwUUV5kLvoAdi0Wz5Xbh2SrjIxCtZj6Wq8MDp4bflb/+ThZsVxokM7n0pcbkEr2h5/pzqzDYI7cCgLQ==}
cpu: [riscv64]
os: [linux]
- '@rollup/rollup-linux-s390x-gnu@4.52.5':
- resolution: {integrity: sha512-gn8kHOrku8D4NGHMK1Y7NA7INQTRdVOntt1OCYypZPRt6skGbddska44K8iocdpxHTMMNui5oH4elPH4QOLrFQ==}
+ '@rollup/rollup-linux-s390x-gnu@4.53.2':
+ resolution: {integrity: sha512-w6yjZF0P+NGzWR3AXWX9zc0DNEGdtvykB03uhonSHMRa+oWA6novflo2WaJr6JZakG2ucsyb+rvhrKac6NIy+w==}
cpu: [s390x]
os: [linux]
- '@rollup/rollup-linux-x64-gnu@4.52.5':
- resolution: {integrity: sha512-hXGLYpdhiNElzN770+H2nlx+jRog8TyynpTVzdlc6bndktjKWyZyiCsuDAlpd+j+W+WNqfcyAWz9HxxIGfZm1Q==}
+ '@rollup/rollup-linux-x64-gnu@4.53.2':
+ resolution: {integrity: sha512-yo8d6tdfdeBArzC7T/PnHd7OypfI9cbuZzPnzLJIyKYFhAQ8SvlkKtKBMbXDxe1h03Rcr7u++nFS7tqXz87Gtw==}
cpu: [x64]
os: [linux]
- '@rollup/rollup-linux-x64-musl@4.52.5':
- resolution: {integrity: sha512-arCGIcuNKjBoKAXD+y7XomR9gY6Mw7HnFBv5Rw7wQRvwYLR7gBAgV7Mb2QTyjXfTveBNFAtPt46/36vV9STLNg==}
+ '@rollup/rollup-linux-x64-musl@4.53.2':
+ resolution: {integrity: sha512-ah59c1YkCxKExPP8O9PwOvs+XRLKwh/mV+3YdKqQ5AMQ0r4M4ZDuOrpWkUaqO7fzAHdINzV9tEVu8vNw48z0lA==}
cpu: [x64]
os: [linux]
- '@rollup/rollup-openharmony-arm64@4.52.5':
- resolution: {integrity: sha512-QoFqB6+/9Rly/RiPjaomPLmR/13cgkIGfA40LHly9zcH1S0bN2HVFYk3a1eAyHQyjs3ZJYlXvIGtcCs5tko9Cw==}
+ '@rollup/rollup-openharmony-arm64@4.53.2':
+ resolution: {integrity: sha512-4VEd19Wmhr+Zy7hbUsFZ6YXEiP48hE//KPLCSVNY5RMGX2/7HZ+QkN55a3atM1C/BZCGIgqN+xrVgtdak2S9+A==}
cpu: [arm64]
os: [openharmony]
- '@rollup/rollup-win32-arm64-msvc@4.52.5':
- resolution: {integrity: sha512-w0cDWVR6MlTstla1cIfOGyl8+qb93FlAVutcor14Gf5Md5ap5ySfQ7R9S/NjNaMLSFdUnKGEasmVnu3lCMqB7w==}
+ '@rollup/rollup-win32-arm64-msvc@4.53.2':
+ resolution: {integrity: sha512-IlbHFYc/pQCgew/d5fslcy1KEaYVCJ44G8pajugd8VoOEI8ODhtb/j8XMhLpwHCMB3yk2J07ctup10gpw2nyMA==}
cpu: [arm64]
os: [win32]
- '@rollup/rollup-win32-ia32-msvc@4.52.5':
- resolution: {integrity: sha512-Aufdpzp7DpOTULJCuvzqcItSGDH73pF3ko/f+ckJhxQyHtp67rHw3HMNxoIdDMUITJESNE6a8uh4Lo4SLouOUg==}
+ '@rollup/rollup-win32-ia32-msvc@4.53.2':
+ resolution: {integrity: sha512-lNlPEGgdUfSzdCWU176ku/dQRnA7W+Gp8d+cWv73jYrb8uT7HTVVxq62DUYxjbaByuf1Yk0RIIAbDzp+CnOTFg==}
cpu: [ia32]
os: [win32]
- '@rollup/rollup-win32-x64-gnu@4.52.5':
- resolution: {integrity: sha512-UGBUGPFp1vkj6p8wCRraqNhqwX/4kNQPS57BCFc8wYh0g94iVIW33wJtQAx3G7vrjjNtRaxiMUylM0ktp/TRSQ==}
+ '@rollup/rollup-win32-x64-gnu@4.53.2':
+ resolution: {integrity: sha512-S6YojNVrHybQis2lYov1sd+uj7K0Q05NxHcGktuMMdIQ2VixGwAfbJ23NnlvvVV1bdpR2m5MsNBViHJKcA4ADw==}
cpu: [x64]
os: [win32]
- '@rollup/rollup-win32-x64-msvc@4.52.5':
- resolution: {integrity: sha512-TAcgQh2sSkykPRWLrdyy2AiceMckNf5loITqXxFI5VuQjS5tSuw3WlwdN8qv8vzjLAUTvYaH/mVjSFpbkFbpTg==}
+ '@rollup/rollup-win32-x64-msvc@4.53.2':
+ resolution: {integrity: sha512-k+/Rkcyx//P6fetPoLMb8pBeqJBNGx81uuf7iljX9++yNBVRDQgD04L+SVXmXmh5ZP4/WOp4mWF0kmi06PW2tA==}
cpu: [x64]
os: [win32]
@@ -567,16 +567,16 @@ packages:
resolution: {integrity: sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==}
engines: {node: '>=12'}
- baseline-browser-mapping@2.8.19:
- resolution: {integrity: sha512-zoKGUdu6vb2jd3YOq0nnhEDQVbPcHhco3UImJrv5dSkvxTc2pl2WjOPsjZXDwPDSl5eghIMuY3R6J9NDKF3KcQ==}
+ baseline-browser-mapping@2.8.26:
+ resolution: {integrity: sha512-73lC1ugzwoaWCLJ1LvOgrR5xsMLTqSKIEoMHVtL9E/HNk0PXtTM76ZIm84856/SF7Nv8mPZxKoBsgpm0tR1u1Q==}
hasBin: true
braces@3.0.3:
resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==}
engines: {node: '>=8'}
- browserslist@4.26.3:
- resolution: {integrity: sha512-lAUU+02RFBuCKQPj/P6NgjlbCnLBMp4UtgTx7vNHd3XSIJF87s9a5rA3aH2yw3GS9DqZAUbOtZdCCiZeVRqt0w==}
+ browserslist@4.28.0:
+ resolution: {integrity: sha512-tbydkR/CxfMwelN0vwdP/pLkDwyAASZ+VfWm4EOwlB6SWhx1sYnWLqo8N5j0rAzPfzfRaxt0mM/4wPU/Su84RQ==}
engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7}
hasBin: true
@@ -587,8 +587,8 @@ packages:
resolution: {integrity: sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==}
engines: {node: '>=8'}
- caniuse-lite@1.0.30001751:
- resolution: {integrity: sha512-A0QJhug0Ly64Ii3eIqHu5X51ebln3k4yTUkY1j8drqpWHVreg/VLijN48cZ1bYPiqOQuqpkIKnzr/Ul8V+p6Cw==}
+ caniuse-lite@1.0.30001754:
+ resolution: {integrity: sha512-x6OeBXueoAceOmotzx3PO4Zpt4rzpeIFsSr6AAePTZxSkXiYDUmpypEl7e2+8NCd9bD7bXjqyef8CJYPC1jfxg==}
chai@5.3.3:
resolution: {integrity: sha512-4zNhdJD/iOjSH0A05ea+Ke6MU5mmpQcbQsSOkgdaUMJ9zTlDTD/GYlwohmIE2u0gaxHYiVHEn1Fw9mZ/ktJWgw==}
@@ -641,8 +641,8 @@ packages:
resolution: {integrity: sha512-h5k/5U50IJJFpzfL6nO9jaaumfjO/f2NjK/oYB2Djzm4p9L+3T9qWpZqZ2hAbLPuuYq9wrU08WQyBTL5GbPk5Q==}
engines: {node: '>=6'}
- electron-to-chromium@1.5.237:
- resolution: {integrity: sha512-icUt1NvfhGLar5lSWH3tHNzablaA5js3HVHacQimfP8ViEBOQv+L7DKEuHdbTZ0SKCO1ogTJTIL1Gwk9S6Qvcg==}
+ electron-to-chromium@1.5.250:
+ resolution: {integrity: sha512-/5UMj9IiGDMOFBnN4i7/Ry5onJrAGSbOGo3s9FEKmwobGq6xw832ccET0CE3CkkMBZ8GJSlUIesZofpyurqDXw==}
emoji-regex@8.0.0:
resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==}
@@ -654,8 +654,8 @@ packages:
es-module-lexer@1.7.0:
resolution: {integrity: sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA==}
- esbuild@0.25.11:
- resolution: {integrity: sha512-KohQwyzrKTQmhXDW1PjCv3Tyspn9n5GcY2RTDqeORIdIJY8yKIF7sTSopFmn/wpMPW4rdPXI0UE5LJLuq3bx0Q==}
+ esbuild@0.25.12:
+ resolution: {integrity: sha512-bbPBYYrtZbkt6Os6FiTLCTFxvq4tt3JKall1vRwshA3fdVztsLAatFaZobhkBC8/BrPetoa0oksYoKXoG4ryJg==}
engines: {node: '>=18'}
hasBin: true
@@ -724,8 +724,8 @@ packages:
graceful-fs@4.2.11:
resolution: {integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==}
- graphql@16.11.0:
- resolution: {integrity: sha512-mS1lbMsxgQj6hge1XZ6p7GPhbrtFwUFYi3wRzXAC/FmYnyXMTvvI3td3rjmQ2u8ewXueaSvRPWaEcgVVOT9Jnw==}
+ graphql@16.12.0:
+ resolution: {integrity: sha512-DKKrynuQRne0PNpEbzuEdHlYOMksHSUI8Zc9Unei5gTsMNA2/vMpoMz/yKba50pejK56qj98qM0SjYxAKi13gQ==}
engines: {node: ^12.22.0 || ^14.16.0 || ^16.0.0 || >=17.0.0}
has-flag@4.0.0:
@@ -766,8 +766,8 @@ packages:
loupe@3.2.1:
resolution: {integrity: sha512-CdzqowRJCeLU72bHvWqwRBBlLcMEtIvGrlvef74kMnV2AolS9Y8xUv1I0U/MNAWMhBlKIoyuEgoJ0t/bbwHbLQ==}
- magic-string@0.30.19:
- resolution: {integrity: sha512-2N21sPY9Ws53PZvsEpVtNuSW+ScYbQdp4b9qUaL+9QkHUrGFKo56Lg9Emg5s9V/qrtNBmiR01sYhUOwu3H+VOw==}
+ magic-string@0.30.21:
+ resolution: {integrity: sha512-vd2F4YUyEXKGcLHoq+TEyCjxueSeHnFxyyjNp80yg0XV4vUhnDer/lvvlqM/arB5bXQN5K2/3oinyCRyx8T2CQ==}
merge-stream@2.0.0:
resolution: {integrity: sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==}
@@ -797,9 +797,9 @@ packages:
typescript:
optional: true
- mute-stream@2.0.0:
- resolution: {integrity: sha512-WWdIxpyjEn+FhQJQQv9aQAYlHoNVdzIzUySNV1gHUPDSdZJ3yZn7pAAbQcV7B56Mvu881q9FZV+0Vx2xC44VWA==}
- engines: {node: ^18.17.0 || >=20.5.0}
+ mute-stream@3.0.0:
+ resolution: {integrity: sha512-dkEJPVvun4FryqBmZ5KhDo0K9iDXAwn08tMLDinNdRBNPcYEDiWYysLcc6k3mjTMlbP9KyylvRpd4wFtwrT9rw==}
+ engines: {node: ^20.17.0 || >=22.9.0}
nanoid@3.3.11:
resolution: {integrity: sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==}
@@ -809,8 +809,8 @@ packages:
neo-async@2.6.2:
resolution: {integrity: sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==}
- node-releases@2.0.26:
- resolution: {integrity: sha512-S2M9YimhSjBSvYnlr5/+umAnPHE++ODwt5e2Ij6FoX45HA/s4vHdkDx1eax2pAPeAOqu4s9b7ppahsyEFdVqQA==}
+ node-releases@2.0.27:
+ resolution: {integrity: sha512-nmh3lCkYZ3grZvqcCH+fjmQ7X+H0OeZgP40OierEaAptX4XofMh5kwNbWh7lBduUzCcV/8kZ+NDLCwm2iorIlA==}
outvariant@1.4.3:
resolution: {integrity: sha512-+Sl2UErvtsoajRDKCE5/dBz4DIvHXQQnAxtQTF04OJxY0+DyZXSo5P5Bb7XYWOh81syohlYL24hbDwxedPUJCA==}
@@ -854,8 +854,8 @@ packages:
rettime@0.7.0:
resolution: {integrity: sha512-LPRKoHnLKd/r3dVxcwO7vhCW+orkOGj9ViueosEBK6ie89CijnfRlhaDhHq/3Hxu4CkWQtxwlBG0mzTQY6uQjw==}
- rollup@4.52.5:
- resolution: {integrity: sha512-3GuObel8h7Kqdjt0gxkEzaifHTqLVW56Y/bjN7PSQtkKr0w3V/QYSdt6QWYtd7A1xUtYQigtdUfgj1RvWVtorw==}
+ rollup@4.53.2:
+ resolution: {integrity: sha512-MHngMYwGJVi6Fmnk6ISmnk7JAHRNF0UkuucA0CUW3N3a4KnONPEZz+vUanQP/ZC/iY1Qkf3bwPWzyY84wEks1g==}
engines: {node: '>=18.0.0', npm: '>=8.0.0'}
hasBin: true
@@ -948,8 +948,8 @@ packages:
uglify-js:
optional: true
- terser@5.44.0:
- resolution: {integrity: sha512-nIVck8DK+GM/0Frwd+nIhZ84pR/BX7rmXMfYwyg+Sri5oGVE99/E3KvXqpC2xHFxyqXyGHTKBSioxxplrO4I4w==}
+ terser@5.44.1:
+ resolution: {integrity: sha512-t/R3R/n0MSwnnazuPpPNVO60LX0SKL45pyl9YlvxIdkH0Of7D5qM2EVe+yASRIlY5pZ73nclYJfNANGWPwFDZw==}
engines: {node: '>=10'}
hasBin: true
@@ -1009,8 +1009,8 @@ packages:
undici-types@5.26.5:
resolution: {integrity: sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==}
- update-browserslist-db@1.1.3:
- resolution: {integrity: sha512-UxhIZQ+QInVdunkDAaiazvvT/+fXL5Osr0JZlJulepYu6Jd7qJtDZjlur0emRlT71EN3ScPoE7gvsuIKKNavKw==}
+ update-browserslist-db@1.1.4:
+ resolution: {integrity: sha512-q0SPT4xyU84saUX+tomz1WLkxUbuaJnR1xWt17M7fJtEJigJeWUNGUqrauFXsHnqev9y9JTRGwk13tFBuKby4A==}
hasBin: true
peerDependencies:
browserslist: '>= 4.21.0'
@@ -1020,8 +1020,8 @@ packages:
engines: {node: ^18.0.0 || ^20.0.0 || >=22.0.0}
hasBin: true
- vite@7.1.11:
- resolution: {integrity: sha512-uzcxnSDVjAopEUjljkWh8EIrg6tlzrjFUfMcR1EVsRDGwf/ccef0qQPRyOrROwhrTDaApueq+ja+KLPlzR/zdg==}
+ vite@7.2.2:
+ resolution: {integrity: sha512-BxAKBWmIbrDgrokdGZH1IgkIk/5mMHDreLDmCJ0qpyJaAteP8NvMhkwr/ZCQNqNH97bw/dANTE9PDzqwJghfMQ==}
engines: {node: ^20.19.0 || >=22.12.0}
hasBin: true
peerDependencies:
@@ -1137,39 +1137,39 @@ packages:
snapshots:
- '@biomejs/biome@2.2.5':
+ '@biomejs/biome@2.3.1':
optionalDependencies:
- '@biomejs/cli-darwin-arm64': 2.2.5
- '@biomejs/cli-darwin-x64': 2.2.5
- '@biomejs/cli-linux-arm64': 2.2.5
- '@biomejs/cli-linux-arm64-musl': 2.2.5
- '@biomejs/cli-linux-x64': 2.2.5
- '@biomejs/cli-linux-x64-musl': 2.2.5
- '@biomejs/cli-win32-arm64': 2.2.5
- '@biomejs/cli-win32-x64': 2.2.5
-
- '@biomejs/cli-darwin-arm64@2.2.5':
+ '@biomejs/cli-darwin-arm64': 2.3.1
+ '@biomejs/cli-darwin-x64': 2.3.1
+ '@biomejs/cli-linux-arm64': 2.3.1
+ '@biomejs/cli-linux-arm64-musl': 2.3.1
+ '@biomejs/cli-linux-x64': 2.3.1
+ '@biomejs/cli-linux-x64-musl': 2.3.1
+ '@biomejs/cli-win32-arm64': 2.3.1
+ '@biomejs/cli-win32-x64': 2.3.1
+
+ '@biomejs/cli-darwin-arm64@2.3.1':
optional: true
- '@biomejs/cli-darwin-x64@2.2.5':
+ '@biomejs/cli-darwin-x64@2.3.1':
optional: true
- '@biomejs/cli-linux-arm64-musl@2.2.5':
+ '@biomejs/cli-linux-arm64-musl@2.3.1':
optional: true
- '@biomejs/cli-linux-arm64@2.2.5':
+ '@biomejs/cli-linux-arm64@2.3.1':
optional: true
- '@biomejs/cli-linux-x64-musl@2.2.5':
+ '@biomejs/cli-linux-x64-musl@2.3.1':
optional: true
- '@biomejs/cli-linux-x64@2.2.5':
+ '@biomejs/cli-linux-x64@2.3.1':
optional: true
- '@biomejs/cli-win32-arm64@2.2.5':
+ '@biomejs/cli-win32-arm64@2.3.1':
optional: true
- '@biomejs/cli-win32-x64@2.2.5':
+ '@biomejs/cli-win32-x64@2.3.1':
optional: true
'@bundled-es-modules/cookie@2.0.1':
@@ -1180,109 +1180,109 @@ snapshots:
dependencies:
statuses: 2.0.2
- '@esbuild/aix-ppc64@0.25.11':
+ '@esbuild/aix-ppc64@0.25.12':
optional: true
- '@esbuild/android-arm64@0.25.11':
+ '@esbuild/android-arm64@0.25.12':
optional: true
- '@esbuild/android-arm@0.25.11':
+ '@esbuild/android-arm@0.25.12':
optional: true
- '@esbuild/android-x64@0.25.11':
+ '@esbuild/android-x64@0.25.12':
optional: true
- '@esbuild/darwin-arm64@0.25.11':
+ '@esbuild/darwin-arm64@0.25.12':
optional: true
- '@esbuild/darwin-x64@0.25.11':
+ '@esbuild/darwin-x64@0.25.12':
optional: true
- '@esbuild/freebsd-arm64@0.25.11':
+ '@esbuild/freebsd-arm64@0.25.12':
optional: true
- '@esbuild/freebsd-x64@0.25.11':
+ '@esbuild/freebsd-x64@0.25.12':
optional: true
- '@esbuild/linux-arm64@0.25.11':
+ '@esbuild/linux-arm64@0.25.12':
optional: true
- '@esbuild/linux-arm@0.25.11':
+ '@esbuild/linux-arm@0.25.12':
optional: true
- '@esbuild/linux-ia32@0.25.11':
+ '@esbuild/linux-ia32@0.25.12':
optional: true
- '@esbuild/linux-loong64@0.25.11':
+ '@esbuild/linux-loong64@0.25.12':
optional: true
- '@esbuild/linux-mips64el@0.25.11':
+ '@esbuild/linux-mips64el@0.25.12':
optional: true
- '@esbuild/linux-ppc64@0.25.11':
+ '@esbuild/linux-ppc64@0.25.12':
optional: true
- '@esbuild/linux-riscv64@0.25.11':
+ '@esbuild/linux-riscv64@0.25.12':
optional: true
- '@esbuild/linux-s390x@0.25.11':
+ '@esbuild/linux-s390x@0.25.12':
optional: true
- '@esbuild/linux-x64@0.25.11':
+ '@esbuild/linux-x64@0.25.12':
optional: true
- '@esbuild/netbsd-arm64@0.25.11':
+ '@esbuild/netbsd-arm64@0.25.12':
optional: true
- '@esbuild/netbsd-x64@0.25.11':
+ '@esbuild/netbsd-x64@0.25.12':
optional: true
- '@esbuild/openbsd-arm64@0.25.11':
+ '@esbuild/openbsd-arm64@0.25.12':
optional: true
- '@esbuild/openbsd-x64@0.25.11':
+ '@esbuild/openbsd-x64@0.25.12':
optional: true
- '@esbuild/openharmony-arm64@0.25.11':
+ '@esbuild/openharmony-arm64@0.25.12':
optional: true
- '@esbuild/sunos-x64@0.25.11':
+ '@esbuild/sunos-x64@0.25.12':
optional: true
- '@esbuild/win32-arm64@0.25.11':
+ '@esbuild/win32-arm64@0.25.12':
optional: true
- '@esbuild/win32-ia32@0.25.11':
+ '@esbuild/win32-ia32@0.25.12':
optional: true
- '@esbuild/win32-x64@0.25.11':
+ '@esbuild/win32-x64@0.25.12':
optional: true
- '@inquirer/ansi@1.0.1': {}
+ '@inquirer/ansi@1.0.2': {}
- '@inquirer/confirm@5.1.19(@types/node@18.19.130)':
+ '@inquirer/confirm@5.1.20(@types/node@18.19.130)':
dependencies:
- '@inquirer/core': 10.3.0(@types/node@18.19.130)
- '@inquirer/type': 3.0.9(@types/node@18.19.130)
+ '@inquirer/core': 10.3.1(@types/node@18.19.130)
+ '@inquirer/type': 3.0.10(@types/node@18.19.130)
optionalDependencies:
'@types/node': 18.19.130
- '@inquirer/core@10.3.0(@types/node@18.19.130)':
+ '@inquirer/core@10.3.1(@types/node@18.19.130)':
dependencies:
- '@inquirer/ansi': 1.0.1
- '@inquirer/figures': 1.0.14
- '@inquirer/type': 3.0.9(@types/node@18.19.130)
+ '@inquirer/ansi': 1.0.2
+ '@inquirer/figures': 1.0.15
+ '@inquirer/type': 3.0.10(@types/node@18.19.130)
cli-width: 4.1.0
- mute-stream: 2.0.0
+ mute-stream: 3.0.0
signal-exit: 4.1.0
wrap-ansi: 6.2.0
yoctocolors-cjs: 2.1.3
optionalDependencies:
'@types/node': 18.19.130
- '@inquirer/figures@1.0.14': {}
+ '@inquirer/figures@1.0.15': {}
- '@inquirer/type@3.0.9(@types/node@18.19.130)':
+ '@inquirer/type@3.0.10(@types/node@18.19.130)':
optionalDependencies:
'@types/node': 18.19.130
@@ -1323,70 +1323,70 @@ snapshots:
'@open-draft/until@2.1.0': {}
- '@rollup/rollup-android-arm-eabi@4.52.5':
+ '@rollup/rollup-android-arm-eabi@4.53.2':
optional: true
- '@rollup/rollup-android-arm64@4.52.5':
+ '@rollup/rollup-android-arm64@4.53.2':
optional: true
- '@rollup/rollup-darwin-arm64@4.52.5':
+ '@rollup/rollup-darwin-arm64@4.53.2':
optional: true
- '@rollup/rollup-darwin-x64@4.52.5':
+ '@rollup/rollup-darwin-x64@4.53.2':
optional: true
- '@rollup/rollup-freebsd-arm64@4.52.5':
+ '@rollup/rollup-freebsd-arm64@4.53.2':
optional: true
- '@rollup/rollup-freebsd-x64@4.52.5':
+ '@rollup/rollup-freebsd-x64@4.53.2':
optional: true
- '@rollup/rollup-linux-arm-gnueabihf@4.52.5':
+ '@rollup/rollup-linux-arm-gnueabihf@4.53.2':
optional: true
- '@rollup/rollup-linux-arm-musleabihf@4.52.5':
+ '@rollup/rollup-linux-arm-musleabihf@4.53.2':
optional: true
- '@rollup/rollup-linux-arm64-gnu@4.52.5':
+ '@rollup/rollup-linux-arm64-gnu@4.53.2':
optional: true
- '@rollup/rollup-linux-arm64-musl@4.52.5':
+ '@rollup/rollup-linux-arm64-musl@4.53.2':
optional: true
- '@rollup/rollup-linux-loong64-gnu@4.52.5':
+ '@rollup/rollup-linux-loong64-gnu@4.53.2':
optional: true
- '@rollup/rollup-linux-ppc64-gnu@4.52.5':
+ '@rollup/rollup-linux-ppc64-gnu@4.53.2':
optional: true
- '@rollup/rollup-linux-riscv64-gnu@4.52.5':
+ '@rollup/rollup-linux-riscv64-gnu@4.53.2':
optional: true
- '@rollup/rollup-linux-riscv64-musl@4.52.5':
+ '@rollup/rollup-linux-riscv64-musl@4.53.2':
optional: true
- '@rollup/rollup-linux-s390x-gnu@4.52.5':
+ '@rollup/rollup-linux-s390x-gnu@4.53.2':
optional: true
- '@rollup/rollup-linux-x64-gnu@4.52.5':
+ '@rollup/rollup-linux-x64-gnu@4.53.2':
optional: true
- '@rollup/rollup-linux-x64-musl@4.52.5':
+ '@rollup/rollup-linux-x64-musl@4.53.2':
optional: true
- '@rollup/rollup-openharmony-arm64@4.52.5':
+ '@rollup/rollup-openharmony-arm64@4.53.2':
optional: true
- '@rollup/rollup-win32-arm64-msvc@4.52.5':
+ '@rollup/rollup-win32-arm64-msvc@4.53.2':
optional: true
- '@rollup/rollup-win32-ia32-msvc@4.52.5':
+ '@rollup/rollup-win32-ia32-msvc@4.53.2':
optional: true
- '@rollup/rollup-win32-x64-gnu@4.52.5':
+ '@rollup/rollup-win32-x64-gnu@4.53.2':
optional: true
- '@rollup/rollup-win32-x64-msvc@4.52.5':
+ '@rollup/rollup-win32-x64-msvc@4.53.2':
optional: true
'@types/chai@5.2.3':
@@ -1426,14 +1426,14 @@ snapshots:
chai: 5.3.3
tinyrainbow: 2.0.0
- '@vitest/mocker@3.2.4(msw@2.11.2(@types/node@18.19.130)(typescript@5.7.3))(vite@7.1.11(@types/node@18.19.130)(terser@5.44.0))':
+ '@vitest/mocker@3.2.4(msw@2.11.2(@types/node@18.19.130)(typescript@5.7.3))(vite@7.2.2(@types/node@18.19.130)(terser@5.44.1))':
dependencies:
'@vitest/spy': 3.2.4
estree-walker: 3.0.3
- magic-string: 0.30.19
+ magic-string: 0.30.21
optionalDependencies:
msw: 2.11.2(@types/node@18.19.130)(typescript@5.7.3)
- vite: 7.1.11(@types/node@18.19.130)(terser@5.44.0)
+ vite: 7.2.2(@types/node@18.19.130)(terser@5.44.1)
'@vitest/pretty-format@3.2.4':
dependencies:
@@ -1448,7 +1448,7 @@ snapshots:
'@vitest/snapshot@3.2.4':
dependencies:
'@vitest/pretty-format': 3.2.4
- magic-string: 0.30.19
+ magic-string: 0.30.21
pathe: 2.0.3
'@vitest/spy@3.2.4':
@@ -1571,25 +1571,25 @@ snapshots:
assertion-error@2.0.1: {}
- baseline-browser-mapping@2.8.19: {}
+ baseline-browser-mapping@2.8.26: {}
braces@3.0.3:
dependencies:
fill-range: 7.1.1
- browserslist@4.26.3:
+ browserslist@4.28.0:
dependencies:
- baseline-browser-mapping: 2.8.19
- caniuse-lite: 1.0.30001751
- electron-to-chromium: 1.5.237
- node-releases: 2.0.26
- update-browserslist-db: 1.1.3(browserslist@4.26.3)
+ baseline-browser-mapping: 2.8.26
+ caniuse-lite: 1.0.30001754
+ electron-to-chromium: 1.5.250
+ node-releases: 2.0.27
+ update-browserslist-db: 1.1.4(browserslist@4.28.0)
buffer-from@1.1.2: {}
cac@6.7.14: {}
- caniuse-lite@1.0.30001751: {}
+ caniuse-lite@1.0.30001754: {}
chai@5.3.3:
dependencies:
@@ -1632,7 +1632,7 @@ snapshots:
deep-eql@5.0.2: {}
- electron-to-chromium@1.5.237: {}
+ electron-to-chromium@1.5.250: {}
emoji-regex@8.0.0: {}
@@ -1643,34 +1643,34 @@ snapshots:
es-module-lexer@1.7.0: {}
- esbuild@0.25.11:
+ esbuild@0.25.12:
optionalDependencies:
- '@esbuild/aix-ppc64': 0.25.11
- '@esbuild/android-arm': 0.25.11
- '@esbuild/android-arm64': 0.25.11
- '@esbuild/android-x64': 0.25.11
- '@esbuild/darwin-arm64': 0.25.11
- '@esbuild/darwin-x64': 0.25.11
- '@esbuild/freebsd-arm64': 0.25.11
- '@esbuild/freebsd-x64': 0.25.11
- '@esbuild/linux-arm': 0.25.11
- '@esbuild/linux-arm64': 0.25.11
- '@esbuild/linux-ia32': 0.25.11
- '@esbuild/linux-loong64': 0.25.11
- '@esbuild/linux-mips64el': 0.25.11
- '@esbuild/linux-ppc64': 0.25.11
- '@esbuild/linux-riscv64': 0.25.11
- '@esbuild/linux-s390x': 0.25.11
- '@esbuild/linux-x64': 0.25.11
- '@esbuild/netbsd-arm64': 0.25.11
- '@esbuild/netbsd-x64': 0.25.11
- '@esbuild/openbsd-arm64': 0.25.11
- '@esbuild/openbsd-x64': 0.25.11
- '@esbuild/openharmony-arm64': 0.25.11
- '@esbuild/sunos-x64': 0.25.11
- '@esbuild/win32-arm64': 0.25.11
- '@esbuild/win32-ia32': 0.25.11
- '@esbuild/win32-x64': 0.25.11
+ '@esbuild/aix-ppc64': 0.25.12
+ '@esbuild/android-arm': 0.25.12
+ '@esbuild/android-arm64': 0.25.12
+ '@esbuild/android-x64': 0.25.12
+ '@esbuild/darwin-arm64': 0.25.12
+ '@esbuild/darwin-x64': 0.25.12
+ '@esbuild/freebsd-arm64': 0.25.12
+ '@esbuild/freebsd-x64': 0.25.12
+ '@esbuild/linux-arm': 0.25.12
+ '@esbuild/linux-arm64': 0.25.12
+ '@esbuild/linux-ia32': 0.25.12
+ '@esbuild/linux-loong64': 0.25.12
+ '@esbuild/linux-mips64el': 0.25.12
+ '@esbuild/linux-ppc64': 0.25.12
+ '@esbuild/linux-riscv64': 0.25.12
+ '@esbuild/linux-s390x': 0.25.12
+ '@esbuild/linux-x64': 0.25.12
+ '@esbuild/netbsd-arm64': 0.25.12
+ '@esbuild/netbsd-x64': 0.25.12
+ '@esbuild/openbsd-arm64': 0.25.12
+ '@esbuild/openbsd-x64': 0.25.12
+ '@esbuild/openharmony-arm64': 0.25.12
+ '@esbuild/sunos-x64': 0.25.12
+ '@esbuild/win32-arm64': 0.25.12
+ '@esbuild/win32-ia32': 0.25.12
+ '@esbuild/win32-x64': 0.25.12
escalade@3.2.0: {}
@@ -1716,7 +1716,7 @@ snapshots:
graceful-fs@4.2.11: {}
- graphql@16.11.0: {}
+ graphql@16.12.0: {}
has-flag@4.0.0: {}
@@ -1744,7 +1744,7 @@ snapshots:
loupe@3.2.1: {}
- magic-string@0.30.19:
+ magic-string@0.30.21:
dependencies:
'@jridgewell/sourcemap-codec': 1.5.5
@@ -1767,13 +1767,13 @@ snapshots:
dependencies:
'@bundled-es-modules/cookie': 2.0.1
'@bundled-es-modules/statuses': 1.0.1
- '@inquirer/confirm': 5.1.19(@types/node@18.19.130)
+ '@inquirer/confirm': 5.1.20(@types/node@18.19.130)
'@mswjs/interceptors': 0.39.8
'@open-draft/deferred-promise': 2.2.0
'@open-draft/until': 2.1.0
'@types/cookie': 0.6.0
'@types/statuses': 2.0.6
- graphql: 16.11.0
+ graphql: 16.12.0
headers-polyfill: 4.0.3
is-node-process: 1.2.0
outvariant: 1.4.3
@@ -1789,13 +1789,13 @@ snapshots:
transitivePeerDependencies:
- '@types/node'
- mute-stream@2.0.0: {}
+ mute-stream@3.0.0: {}
nanoid@3.3.11: {}
neo-async@2.6.2: {}
- node-releases@2.0.26: {}
+ node-releases@2.0.27: {}
outvariant@1.4.3: {}
@@ -1827,32 +1827,32 @@ snapshots:
rettime@0.7.0: {}
- rollup@4.52.5:
+ rollup@4.53.2:
dependencies:
'@types/estree': 1.0.8
optionalDependencies:
- '@rollup/rollup-android-arm-eabi': 4.52.5
- '@rollup/rollup-android-arm64': 4.52.5
- '@rollup/rollup-darwin-arm64': 4.52.5
- '@rollup/rollup-darwin-x64': 4.52.5
- '@rollup/rollup-freebsd-arm64': 4.52.5
- '@rollup/rollup-freebsd-x64': 4.52.5
- '@rollup/rollup-linux-arm-gnueabihf': 4.52.5
- '@rollup/rollup-linux-arm-musleabihf': 4.52.5
- '@rollup/rollup-linux-arm64-gnu': 4.52.5
- '@rollup/rollup-linux-arm64-musl': 4.52.5
- '@rollup/rollup-linux-loong64-gnu': 4.52.5
- '@rollup/rollup-linux-ppc64-gnu': 4.52.5
- '@rollup/rollup-linux-riscv64-gnu': 4.52.5
- '@rollup/rollup-linux-riscv64-musl': 4.52.5
- '@rollup/rollup-linux-s390x-gnu': 4.52.5
- '@rollup/rollup-linux-x64-gnu': 4.52.5
- '@rollup/rollup-linux-x64-musl': 4.52.5
- '@rollup/rollup-openharmony-arm64': 4.52.5
- '@rollup/rollup-win32-arm64-msvc': 4.52.5
- '@rollup/rollup-win32-ia32-msvc': 4.52.5
- '@rollup/rollup-win32-x64-gnu': 4.52.5
- '@rollup/rollup-win32-x64-msvc': 4.52.5
+ '@rollup/rollup-android-arm-eabi': 4.53.2
+ '@rollup/rollup-android-arm64': 4.53.2
+ '@rollup/rollup-darwin-arm64': 4.53.2
+ '@rollup/rollup-darwin-x64': 4.53.2
+ '@rollup/rollup-freebsd-arm64': 4.53.2
+ '@rollup/rollup-freebsd-x64': 4.53.2
+ '@rollup/rollup-linux-arm-gnueabihf': 4.53.2
+ '@rollup/rollup-linux-arm-musleabihf': 4.53.2
+ '@rollup/rollup-linux-arm64-gnu': 4.53.2
+ '@rollup/rollup-linux-arm64-musl': 4.53.2
+ '@rollup/rollup-linux-loong64-gnu': 4.53.2
+ '@rollup/rollup-linux-ppc64-gnu': 4.53.2
+ '@rollup/rollup-linux-riscv64-gnu': 4.53.2
+ '@rollup/rollup-linux-riscv64-musl': 4.53.2
+ '@rollup/rollup-linux-s390x-gnu': 4.53.2
+ '@rollup/rollup-linux-x64-gnu': 4.53.2
+ '@rollup/rollup-linux-x64-musl': 4.53.2
+ '@rollup/rollup-openharmony-arm64': 4.53.2
+ '@rollup/rollup-win32-arm64-msvc': 4.53.2
+ '@rollup/rollup-win32-ia32-msvc': 4.53.2
+ '@rollup/rollup-win32-x64-gnu': 4.53.2
+ '@rollup/rollup-win32-x64-msvc': 4.53.2
fsevents: 2.3.3
safe-buffer@5.2.1: {}
@@ -1923,10 +1923,10 @@ snapshots:
jest-worker: 27.5.1
schema-utils: 4.3.3
serialize-javascript: 6.0.2
- terser: 5.44.0
+ terser: 5.44.1
webpack: 5.102.1
- terser@5.44.0:
+ terser@5.44.1:
dependencies:
'@jridgewell/source-map': 0.3.11
acorn: 8.15.0
@@ -1978,19 +1978,19 @@ snapshots:
undici-types@5.26.5: {}
- update-browserslist-db@1.1.3(browserslist@4.26.3):
+ update-browserslist-db@1.1.4(browserslist@4.28.0):
dependencies:
- browserslist: 4.26.3
+ browserslist: 4.28.0
escalade: 3.2.0
picocolors: 1.1.1
- vite-node@3.2.4(@types/node@18.19.130)(terser@5.44.0):
+ vite-node@3.2.4(@types/node@18.19.130)(terser@5.44.1):
dependencies:
cac: 6.7.14
debug: 4.4.3
es-module-lexer: 1.7.0
pathe: 2.0.3
- vite: 7.1.11(@types/node@18.19.130)(terser@5.44.0)
+ vite: 7.2.2(@types/node@18.19.130)(terser@5.44.1)
transitivePeerDependencies:
- '@types/node'
- jiti
@@ -2005,24 +2005,24 @@ snapshots:
- tsx
- yaml
- vite@7.1.11(@types/node@18.19.130)(terser@5.44.0):
+ vite@7.2.2(@types/node@18.19.130)(terser@5.44.1):
dependencies:
- esbuild: 0.25.11
+ esbuild: 0.25.12
fdir: 6.5.0(picomatch@4.0.3)
picomatch: 4.0.3
postcss: 8.5.6
- rollup: 4.52.5
+ rollup: 4.53.2
tinyglobby: 0.2.15
optionalDependencies:
'@types/node': 18.19.130
fsevents: 2.3.3
- terser: 5.44.0
+ terser: 5.44.1
- vitest@3.2.4(@types/node@18.19.130)(msw@2.11.2(@types/node@18.19.130)(typescript@5.7.3))(terser@5.44.0):
+ vitest@3.2.4(@types/node@18.19.130)(msw@2.11.2(@types/node@18.19.130)(typescript@5.7.3))(terser@5.44.1):
dependencies:
'@types/chai': 5.2.3
'@vitest/expect': 3.2.4
- '@vitest/mocker': 3.2.4(msw@2.11.2(@types/node@18.19.130)(typescript@5.7.3))(vite@7.1.11(@types/node@18.19.130)(terser@5.44.0))
+ '@vitest/mocker': 3.2.4(msw@2.11.2(@types/node@18.19.130)(typescript@5.7.3))(vite@7.2.2(@types/node@18.19.130)(terser@5.44.1))
'@vitest/pretty-format': 3.2.4
'@vitest/runner': 3.2.4
'@vitest/snapshot': 3.2.4
@@ -2031,7 +2031,7 @@ snapshots:
chai: 5.3.3
debug: 4.4.3
expect-type: 1.2.2
- magic-string: 0.30.19
+ magic-string: 0.30.21
pathe: 2.0.3
picomatch: 4.0.3
std-env: 3.10.0
@@ -2040,8 +2040,8 @@ snapshots:
tinyglobby: 0.2.15
tinypool: 1.1.1
tinyrainbow: 2.0.0
- vite: 7.1.11(@types/node@18.19.130)(terser@5.44.0)
- vite-node: 3.2.4(@types/node@18.19.130)(terser@5.44.0)
+ vite: 7.2.2(@types/node@18.19.130)(terser@5.44.1)
+ vite-node: 3.2.4(@types/node@18.19.130)(terser@5.44.1)
why-is-node-running: 2.3.0
optionalDependencies:
'@types/node': 18.19.130
@@ -2076,7 +2076,7 @@ snapshots:
'@webassemblyjs/wasm-parser': 1.14.1
acorn: 8.15.0
acorn-import-phases: 1.0.4(acorn@8.15.0)
- browserslist: 4.26.3
+ browserslist: 4.28.0
chrome-trace-event: 1.0.4
enhanced-resolve: 5.18.3
es-module-lexer: 1.7.0
diff --git a/reference.md b/reference.md
index f0bb9f0..92767b7 100644
--- a/reference.md
+++ b/reference.md
@@ -727,7 +727,7 @@ await client.tasks.listenAsAgent();
## Objects
-client.objects.listObjects({ ...params }) -> core.Page
+client.objects.listObjects({ ...params }) -> core.Page
-
@@ -754,13 +754,13 @@ Lists objects in your environment. You can define a prefix to list a subset of y
-
```typescript
-const response = await client.objects.listObjects({
+const pageableResponse = await client.objects.listObjects({
prefix: "prefix",
sinceTimestamp: "2024-01-15T09:30:00Z",
pageToken: "pageToken",
allObjectsInMesh: true
});
-for await (const item of response) {
+for await (const item of pageableResponse) {
console.log(item);
}
@@ -775,6 +775,9 @@ while (page.hasNextPage()) {
page = page.getNextPage();
}
+// You can also access the underlying response
+const response = page.response;
+
```
diff --git a/src/BaseClient.ts b/src/BaseClient.ts
index 2ef235a..954db15 100644
--- a/src/BaseClient.ts
+++ b/src/BaseClient.ts
@@ -14,6 +14,10 @@ export interface BaseClientOptions {
timeoutInSeconds?: number;
/** The default number of times to retry the request. Defaults to 2. */
maxRetries?: number;
+ /** Provide a custom fetch implementation. Useful for platforms that don't have a built-in fetch or need a custom implementation. */
+ fetch?: typeof fetch;
+ /** Configure logging for the client. */
+ logging?: core.logging.LogConfig | core.logging.Logger;
}
export interface BaseRequestOptions {
diff --git a/src/Client.ts b/src/Client.ts
index 483efe7..267a38e 100644
--- a/src/Client.ts
+++ b/src/Client.ts
@@ -22,12 +22,13 @@ export class LatticeClient {
constructor(_options: LatticeClient.Options = {}) {
this._options = {
..._options,
+ logging: core.logging.createLogger(_options?.logging),
headers: mergeHeaders(
{
"X-Fern-Language": "JavaScript",
"X-Fern-SDK-Name": "@anduril-industries/lattice-sdk",
- "X-Fern-SDK-Version": "3.0.0",
- "User-Agent": "@anduril-industries/lattice-sdk/3.0.0",
+ "X-Fern-SDK-Version": "3.0.1",
+ "User-Agent": "@anduril-industries/lattice-sdk/3.0.1",
"X-Fern-Runtime": core.RUNTIME.type,
"X-Fern-Runtime-Version": core.RUNTIME.version,
},
diff --git a/src/api/resources/entities/client/Client.ts b/src/api/resources/entities/client/Client.ts
index 28973e5..fec9ffa 100644
--- a/src/api/resources/entities/client/Client.ts
+++ b/src/api/resources/entities/client/Client.ts
@@ -73,6 +73,8 @@ export class Entities {
timeoutMs: (requestOptions?.timeoutInSeconds ?? this._options?.timeoutInSeconds ?? 60) * 1000,
maxRetries: requestOptions?.maxRetries ?? this._options?.maxRetries,
abortSignal: requestOptions?.abortSignal,
+ fetchFn: this._options?.fetch,
+ logging: this._options.logging,
});
if (_response.ok) {
return { data: _response.body as Lattice.Entity, rawResponse: _response.rawResponse };
@@ -150,6 +152,8 @@ export class Entities {
timeoutMs: (requestOptions?.timeoutInSeconds ?? this._options?.timeoutInSeconds ?? 60) * 1000,
maxRetries: requestOptions?.maxRetries ?? this._options?.maxRetries,
abortSignal: requestOptions?.abortSignal,
+ fetchFn: this._options?.fetch,
+ logging: this._options.logging,
});
if (_response.ok) {
return { data: _response.body as Lattice.Entity, rawResponse: _response.rawResponse };
@@ -248,6 +252,8 @@ export class Entities {
timeoutMs: (requestOptions?.timeoutInSeconds ?? this._options?.timeoutInSeconds ?? 60) * 1000,
maxRetries: requestOptions?.maxRetries ?? this._options?.maxRetries,
abortSignal: requestOptions?.abortSignal,
+ fetchFn: this._options?.fetch,
+ logging: this._options.logging,
});
if (_response.ok) {
return { data: _response.body as Lattice.Entity, rawResponse: _response.rawResponse };
@@ -334,6 +340,8 @@ export class Entities {
timeoutMs: (requestOptions?.timeoutInSeconds ?? this._options?.timeoutInSeconds ?? 60) * 1000,
maxRetries: requestOptions?.maxRetries ?? this._options?.maxRetries,
abortSignal: requestOptions?.abortSignal,
+ fetchFn: this._options?.fetch,
+ logging: this._options.logging,
});
if (_response.ok) {
return { data: _response.body as Lattice.Entity, rawResponse: _response.rawResponse };
@@ -432,6 +440,8 @@ export class Entities {
timeoutMs: (requestOptions?.timeoutInSeconds ?? this._options?.timeoutInSeconds ?? 60) * 1000,
maxRetries: requestOptions?.maxRetries ?? this._options?.maxRetries,
abortSignal: requestOptions?.abortSignal,
+ fetchFn: this._options?.fetch,
+ logging: this._options.logging,
});
if (_response.ok) {
return { data: _response.body as Lattice.EntityEventResponse, rawResponse: _response.rawResponse };
@@ -511,6 +521,8 @@ export class Entities {
timeoutMs: (requestOptions?.timeoutInSeconds ?? this._options?.timeoutInSeconds ?? 60) * 1000,
maxRetries: requestOptions?.maxRetries ?? this._options?.maxRetries,
abortSignal: requestOptions?.abortSignal,
+ fetchFn: this._options?.fetch,
+ logging: this._options.logging,
});
if (_response.ok) {
return {
diff --git a/src/api/resources/objects/client/Client.ts b/src/api/resources/objects/client/Client.ts
index b548535..fe76530 100644
--- a/src/api/resources/objects/client/Client.ts
+++ b/src/api/resources/objects/client/Client.ts
@@ -44,7 +44,7 @@ export class Objects {
public async listObjects(
request: Lattice.ListObjectsRequest = {},
requestOptions?: Objects.RequestOptions,
- ): Promise> {
+ ): Promise> {
const list = core.HttpResponsePromise.interceptFunction(
async (request: Lattice.ListObjectsRequest): Promise> => {
const { prefix, sinceTimestamp, pageToken, allObjectsInMesh } = request;
@@ -79,6 +79,8 @@ export class Objects {
timeoutMs: (requestOptions?.timeoutInSeconds ?? this._options?.timeoutInSeconds ?? 60) * 1000,
maxRetries: requestOptions?.maxRetries ?? this._options?.maxRetries,
abortSignal: requestOptions?.abortSignal,
+ fetchFn: this._options?.fetch,
+ logging: this._options.logging,
});
if (_response.ok) {
return { data: _response.body as Lattice.ListResponse, rawResponse: _response.rawResponse };
@@ -120,7 +122,7 @@ export class Objects {
},
);
const dataWithRawResponse = await list(request).withRawResponse();
- return new core.Pageable({
+ return new core.Page({
response: dataWithRawResponse.data,
rawResponse: dataWithRawResponse.rawResponse,
hasNextPage: (response) =>
@@ -177,6 +179,8 @@ export class Objects {
timeoutMs: (requestOptions?.timeoutInSeconds ?? this._options?.timeoutInSeconds ?? 60) * 1000,
maxRetries: requestOptions?.maxRetries ?? this._options?.maxRetries,
abortSignal: requestOptions?.abortSignal,
+ fetchFn: this._options?.fetch,
+ logging: this._options.logging,
});
if (_response.ok) {
return { data: _response.body, rawResponse: _response.rawResponse };
@@ -268,6 +272,8 @@ export class Objects {
timeoutMs: (requestOptions?.timeoutInSeconds ?? this._options?.timeoutInSeconds ?? 60) * 1000,
maxRetries: requestOptions?.maxRetries ?? this._options?.maxRetries,
abortSignal: requestOptions?.abortSignal,
+ fetchFn: this._options?.fetch,
+ logging: this._options.logging,
});
if (_response.ok) {
return { data: _response.body as Lattice.PathMetadata, rawResponse: _response.rawResponse };
@@ -353,6 +359,8 @@ export class Objects {
timeoutMs: (requestOptions?.timeoutInSeconds ?? this._options?.timeoutInSeconds ?? 60) * 1000,
maxRetries: requestOptions?.maxRetries ?? this._options?.maxRetries,
abortSignal: requestOptions?.abortSignal,
+ fetchFn: this._options?.fetch,
+ logging: this._options.logging,
});
if (_response.ok) {
return { data: undefined, rawResponse: _response.rawResponse };
@@ -438,6 +446,8 @@ export class Objects {
timeoutMs: (requestOptions?.timeoutInSeconds ?? this._options?.timeoutInSeconds ?? 60) * 1000,
maxRetries: requestOptions?.maxRetries ?? this._options?.maxRetries,
abortSignal: requestOptions?.abortSignal,
+ fetchFn: this._options?.fetch,
+ logging: this._options.logging,
});
if (_response.ok) {
return { data: _response.rawResponse.headers, rawResponse: _response.rawResponse };
diff --git a/src/api/resources/tasks/client/Client.ts b/src/api/resources/tasks/client/Client.ts
index 07ea354..ff05a1c 100644
--- a/src/api/resources/tasks/client/Client.ts
+++ b/src/api/resources/tasks/client/Client.ts
@@ -68,6 +68,8 @@ export class Tasks {
timeoutMs: (requestOptions?.timeoutInSeconds ?? this._options?.timeoutInSeconds ?? 60) * 1000,
maxRetries: requestOptions?.maxRetries ?? this._options?.maxRetries,
abortSignal: requestOptions?.abortSignal,
+ fetchFn: this._options?.fetch,
+ logging: this._options.logging,
});
if (_response.ok) {
return { data: _response.body as Lattice.Task, rawResponse: _response.rawResponse };
@@ -142,6 +144,8 @@ export class Tasks {
timeoutMs: (requestOptions?.timeoutInSeconds ?? this._options?.timeoutInSeconds ?? 60) * 1000,
maxRetries: requestOptions?.maxRetries ?? this._options?.maxRetries,
abortSignal: requestOptions?.abortSignal,
+ fetchFn: this._options?.fetch,
+ logging: this._options.logging,
});
if (_response.ok) {
return { data: _response.body as Lattice.Task, rawResponse: _response.rawResponse };
@@ -229,6 +233,8 @@ export class Tasks {
timeoutMs: (requestOptions?.timeoutInSeconds ?? this._options?.timeoutInSeconds ?? 60) * 1000,
maxRetries: requestOptions?.maxRetries ?? this._options?.maxRetries,
abortSignal: requestOptions?.abortSignal,
+ fetchFn: this._options?.fetch,
+ logging: this._options.logging,
});
if (_response.ok) {
return { data: _response.body as Lattice.Task, rawResponse: _response.rawResponse };
@@ -315,6 +321,8 @@ export class Tasks {
timeoutMs: (requestOptions?.timeoutInSeconds ?? this._options?.timeoutInSeconds ?? 60) * 1000,
maxRetries: requestOptions?.maxRetries ?? this._options?.maxRetries,
abortSignal: requestOptions?.abortSignal,
+ fetchFn: this._options?.fetch,
+ logging: this._options.logging,
});
if (_response.ok) {
return { data: _response.body as Lattice.TaskQueryResults, rawResponse: _response.rawResponse };
@@ -400,6 +408,8 @@ export class Tasks {
timeoutMs: (requestOptions?.timeoutInSeconds ?? this._options?.timeoutInSeconds ?? 60) * 1000,
maxRetries: requestOptions?.maxRetries ?? this._options?.maxRetries,
abortSignal: requestOptions?.abortSignal,
+ fetchFn: this._options?.fetch,
+ logging: this._options.logging,
});
if (_response.ok) {
return { data: _response.body as Lattice.AgentRequest, rawResponse: _response.rawResponse };
diff --git a/src/core/exports.ts b/src/core/exports.ts
index e415a8f..c21f056 100644
--- a/src/core/exports.ts
+++ b/src/core/exports.ts
@@ -1 +1,3 @@
export * from "./file/exports.js";
+export * from "./logging/exports.js";
+export * from "./pagination/exports.js";
diff --git a/src/core/fetcher/Fetcher.ts b/src/core/fetcher/Fetcher.ts
index 202e134..fedbcd8 100644
--- a/src/core/fetcher/Fetcher.ts
+++ b/src/core/fetcher/Fetcher.ts
@@ -1,4 +1,5 @@
import { toJson } from "../json.js";
+import { createLogger, type LogConfig, type Logger } from "../logging/logger.js";
import type { APIResponse } from "./APIResponse.js";
import { createRequestUrl } from "./createRequestUrl.js";
import type { EndpointMetadata } from "./EndpointMetadata.js";
@@ -7,6 +8,7 @@ import { getErrorResponseBody } from "./getErrorResponseBody.js";
import { getFetchFn } from "./getFetchFn.js";
import { getRequestBody } from "./getRequestBody.js";
import { getResponseBody } from "./getResponseBody.js";
+import { Headers } from "./Headers.js";
import { makeRequest } from "./makeRequest.js";
import { abortRawResponse, toRawResponse, unknownRawResponse } from "./RawResponse.js";
import { requestWithRetries } from "./requestWithRetries.js";
@@ -25,10 +27,12 @@ export declare namespace Fetcher {
maxRetries?: number;
withCredentials?: boolean;
abortSignal?: AbortSignal;
- requestType?: "json" | "file" | "bytes";
+ requestType?: "json" | "file" | "bytes" | "form" | "other";
responseType?: "json" | "blob" | "sse" | "streaming" | "text" | "arrayBuffer" | "binary-response";
duplex?: "half";
endpointMetadata?: EndpointMetadata;
+ fetchFn?: typeof fetch;
+ logging?: LogConfig | Logger;
}
export type Error = FailedStatusCodeError | NonJsonError | TimeoutError | UnknownError;
@@ -55,10 +59,164 @@ export declare namespace Fetcher {
}
}
-async function getHeaders(args: Fetcher.Args): Promise> {
- const newHeaders: Record = {};
+const SENSITIVE_HEADERS = new Set([
+ "authorization",
+ "www-authenticate",
+ "x-api-key",
+ "api-key",
+ "apikey",
+ "x-api-token",
+ "x-auth-token",
+ "auth-token",
+ "cookie",
+ "set-cookie",
+ "proxy-authorization",
+ "proxy-authenticate",
+ "x-csrf-token",
+ "x-xsrf-token",
+ "x-session-token",
+ "x-access-token",
+]);
+
+function redactHeaders(headers: Headers | Record): Record {
+ const filtered: Record = {};
+ for (const [key, value] of headers instanceof Headers ? headers.entries() : Object.entries(headers)) {
+ if (SENSITIVE_HEADERS.has(key.toLowerCase())) {
+ filtered[key] = "[REDACTED]";
+ } else {
+ filtered[key] = value;
+ }
+ }
+ return filtered;
+}
+
+const SENSITIVE_QUERY_PARAMS = new Set([
+ "api_key",
+ "api-key",
+ "apikey",
+ "token",
+ "access_token",
+ "access-token",
+ "auth_token",
+ "auth-token",
+ "password",
+ "passwd",
+ "secret",
+ "api_secret",
+ "api-secret",
+ "apisecret",
+ "key",
+ "session",
+ "session_id",
+ "session-id",
+]);
+
+function redactQueryParameters(queryParameters?: Record): Record | undefined {
+ if (queryParameters == null) {
+ return queryParameters;
+ }
+ const redacted: Record = {};
+ for (const [key, value] of Object.entries(queryParameters)) {
+ if (SENSITIVE_QUERY_PARAMS.has(key.toLowerCase())) {
+ redacted[key] = "[REDACTED]";
+ } else {
+ redacted[key] = value;
+ }
+ }
+ return redacted;
+}
+
+function redactUrl(url: string): string {
+ const protocolIndex = url.indexOf("://");
+ if (protocolIndex === -1) return url;
+
+ const afterProtocol = protocolIndex + 3;
+
+ // Find the first delimiter that marks the end of the authority section
+ const pathStart = url.indexOf("/", afterProtocol);
+ let queryStart = url.indexOf("?", afterProtocol);
+ let fragmentStart = url.indexOf("#", afterProtocol);
+
+ const firstDelimiter = Math.min(
+ pathStart === -1 ? url.length : pathStart,
+ queryStart === -1 ? url.length : queryStart,
+ fragmentStart === -1 ? url.length : fragmentStart,
+ );
+
+ // Find the LAST @ before the delimiter (handles multiple @ in credentials)
+ let atIndex = -1;
+ for (let i = afterProtocol; i < firstDelimiter; i++) {
+ if (url[i] === "@") {
+ atIndex = i;
+ }
+ }
+
+ if (atIndex !== -1) {
+ url = `${url.slice(0, afterProtocol)}[REDACTED]@${url.slice(atIndex + 1)}`;
+ }
+
+ // Recalculate queryStart since url might have changed
+ queryStart = url.indexOf("?");
+ if (queryStart === -1) return url;
+
+ fragmentStart = url.indexOf("#", queryStart);
+ const queryEnd = fragmentStart !== -1 ? fragmentStart : url.length;
+ const queryString = url.slice(queryStart + 1, queryEnd);
+
+ if (queryString.length === 0) return url;
+
+ // FAST PATH: Quick check if any sensitive keywords present
+ // Using indexOf is faster than regex for simple substring matching
+ const lower = queryString.toLowerCase();
+ const hasSensitive =
+ lower.includes("token") ||
+ lower.includes("key") ||
+ lower.includes("password") ||
+ lower.includes("passwd") ||
+ lower.includes("secret") ||
+ lower.includes("session") ||
+ lower.includes("auth");
+
+ if (!hasSensitive) {
+ return url;
+ }
+
+ // SLOW PATH: Parse and redact
+ const redactedParams: string[] = [];
+ const params = queryString.split("&");
+
+ for (const param of params) {
+ const equalIndex = param.indexOf("=");
+ if (equalIndex === -1) {
+ redactedParams.push(param);
+ continue;
+ }
+
+ const key = param.slice(0, equalIndex);
+ let shouldRedact = SENSITIVE_QUERY_PARAMS.has(key.toLowerCase());
+
+ if (!shouldRedact && key.includes("%")) {
+ try {
+ const decodedKey = decodeURIComponent(key);
+ shouldRedact = SENSITIVE_QUERY_PARAMS.has(decodedKey.toLowerCase());
+ } catch {}
+ }
+
+ redactedParams.push(shouldRedact ? `${key}=[REDACTED]` : param);
+ }
+
+ return url.slice(0, queryStart + 1) + redactedParams.join("&") + url.slice(queryEnd);
+}
+
+async function getHeaders(args: Fetcher.Args): Promise {
+ const newHeaders: Headers = new Headers();
+
+ newHeaders.set(
+ "Accept",
+ args.responseType === "json" ? "application/json" : args.responseType === "text" ? "text/plain" : "*/*",
+ );
if (args.body !== undefined && args.contentType != null) {
- newHeaders["Content-Type"] = args.contentType;
+ newHeaders.set("Content-Type", args.contentType);
}
if (args.headers == null) {
@@ -68,13 +226,13 @@ async function getHeaders(args: Fetcher.Args): Promise> {
for (const [key, value] of Object.entries(args.headers)) {
const result = await EndpointSupplier.get(value, { endpointMetadata: args.endpointMetadata ?? {} });
if (typeof result === "string") {
- newHeaders[key] = result;
+ newHeaders.set(key, result);
continue;
}
if (result == null) {
continue;
}
- newHeaders[key] = `${result}`;
+ newHeaders.set(key, `${result}`);
}
return newHeaders;
}
@@ -83,9 +241,22 @@ export async function fetcherImpl(args: Fetcher.Args): Promise(args: Fetcher.Args): Promise(args: Fetcher.Args): Promise= 200 && response.status < 400) {
+ if (logger.isDebug()) {
+ const metadata = {
+ method: args.method,
+ url: redactUrl(url),
+ statusCode: response.status,
+ responseHeaders: redactHeaders(response.headers),
+ };
+ logger.debug("HTTP request succeeded", metadata);
+ }
return {
ok: true,
body: (await getResponseBody(response, args.responseType)) as R,
@@ -112,6 +292,15 @@ export async function fetcherImpl(args: Fetcher.Args): Promise(args: Fetcher.Args): Promise(args: Fetcher.Args): Promise(args: Fetcher.Args): Promise(args: Fetcher.Args): Promise {
+ if (type === "form") {
+ return toQueryString(body, { arrayFormat: "repeat", encode: true });
+ }
if (type.includes("json")) {
return toJson(body);
} else {
diff --git a/src/core/fetcher/makeRequest.ts b/src/core/fetcher/makeRequest.ts
index 5edce69..c8d3f2e 100644
--- a/src/core/fetcher/makeRequest.ts
+++ b/src/core/fetcher/makeRequest.ts
@@ -4,7 +4,7 @@ export const makeRequest = async (
fetchFn: (url: string, init: RequestInit) => Promise,
url: string,
method: string,
- headers: Record,
+ headers: Headers | Record,
requestBody: BodyInit | undefined,
timeoutMs?: number,
abortSignal?: AbortSignal,
@@ -13,7 +13,6 @@ export const makeRequest = async (
): Promise => {
const signals: AbortSignal[] = [];
- // Add timeout signal
let timeoutAbortId: NodeJS.Timeout | undefined;
if (timeoutMs != null) {
const { signal, abortId } = getTimeoutSignal(timeoutMs);
@@ -21,7 +20,6 @@ export const makeRequest = async (
signals.push(signal);
}
- // Add arbitrary signal
if (abortSignal != null) {
signals.push(abortSignal);
}
diff --git a/src/core/fetcher/requestWithRetries.ts b/src/core/fetcher/requestWithRetries.ts
index 3d30bd1..1f68968 100644
--- a/src/core/fetcher/requestWithRetries.ts
+++ b/src/core/fetcher/requestWithRetries.ts
@@ -4,28 +4,23 @@ const DEFAULT_MAX_RETRIES = 2;
const JITTER_FACTOR = 0.2; // 20% random jitter
function addPositiveJitter(delay: number): number {
- // Generate a random value between 0 and +JITTER_FACTOR
const jitterMultiplier = 1 + Math.random() * JITTER_FACTOR;
return delay * jitterMultiplier;
}
function addSymmetricJitter(delay: number): number {
- // Generate a random value in a JITTER_FACTOR-sized percentage range around delay
const jitterMultiplier = 1 + (Math.random() - 0.5) * JITTER_FACTOR;
return delay * jitterMultiplier;
}
function getRetryDelayFromHeaders(response: Response, retryAttempt: number): number {
- // Check for Retry-After header first (RFC 7231), with no jitter
const retryAfter = response.headers.get("Retry-After");
if (retryAfter) {
- // Parse as number of seconds...
const retryAfterSeconds = parseInt(retryAfter, 10);
if (!Number.isNaN(retryAfterSeconds) && retryAfterSeconds > 0) {
return Math.min(retryAfterSeconds * 1000, MAX_RETRY_DELAY);
}
- // ...or as an HTTP date; both are valid
const retryAfterDate = new Date(retryAfter);
if (!Number.isNaN(retryAfterDate.getTime())) {
const delay = retryAfterDate.getTime() - Date.now();
@@ -35,12 +30,10 @@ function getRetryDelayFromHeaders(response: Response, retryAttempt: number): num
}
}
- // Then check for industry-standard X-RateLimit-Reset header, with positive jitter
const rateLimitReset = response.headers.get("X-RateLimit-Reset");
if (rateLimitReset) {
const resetTime = parseInt(rateLimitReset, 10);
if (!Number.isNaN(resetTime)) {
- // Assume Unix timestamp in epoch seconds
const delay = resetTime * 1000 - Date.now();
if (delay > 0) {
return addPositiveJitter(Math.min(delay, MAX_RETRY_DELAY));
@@ -48,7 +41,6 @@ function getRetryDelayFromHeaders(response: Response, retryAttempt: number): num
}
}
- // Fall back to exponential backoff, with symmetric jitter
return addSymmetricJitter(Math.min(INITIAL_RETRY_DELAY * 2 ** retryAttempt, MAX_RETRY_DELAY));
}
@@ -60,7 +52,6 @@ export async function requestWithRetries(
for (let i = 0; i < maxRetries; ++i) {
if ([408, 429].includes(response.status) || response.status >= 500) {
- // Get delay with appropriate jitter applied
const delay = getRetryDelayFromHeaders(response, i);
await new Promise((resolve) => setTimeout(resolve, delay));
diff --git a/src/core/fetcher/signals.ts b/src/core/fetcher/signals.ts
index a8d32a2..c9fcaef 100644
--- a/src/core/fetcher/signals.ts
+++ b/src/core/fetcher/signals.ts
@@ -6,29 +6,17 @@ export function getTimeoutSignal(timeoutMs: number): { signal: AbortSignal; abor
return { signal: controller.signal, abortId };
}
-/**
- * Returns an abort signal that is getting aborted when
- * at least one of the specified abort signals is aborted.
- *
- * Requires at least node.js 18.
- */
export function anySignal(...args: AbortSignal[] | [AbortSignal[]]): AbortSignal {
- // Allowing signals to be passed either as array
- // of signals or as multiple arguments.
const signals = (args.length === 1 && Array.isArray(args[0]) ? args[0] : args) as AbortSignal[];
const controller = new AbortController();
for (const signal of signals) {
if (signal.aborted) {
- // Exiting early if one of the signals
- // is already aborted.
controller.abort((signal as any)?.reason);
break;
}
- // Listening for signals and removing the listeners
- // when at least one symbol is aborted.
signal.addEventListener("abort", () => controller.abort((signal as any)?.reason), {
signal: controller.signal,
});
diff --git a/src/core/headers.ts b/src/core/headers.ts
index a723d22..78ed8b5 100644
--- a/src/core/headers.ts
+++ b/src/core/headers.ts
@@ -6,10 +6,11 @@ export function mergeHeaders(
for (const [key, value] of headersArray
.filter((headers) => headers != null)
.flatMap((headers) => Object.entries(headers))) {
+ const insensitiveKey = key.toLowerCase();
if (value != null) {
- result[key] = value;
- } else if (key in result) {
- delete result[key];
+ result[insensitiveKey] = value;
+ } else if (insensitiveKey in result) {
+ delete result[insensitiveKey];
}
}
@@ -24,8 +25,9 @@ export function mergeOnlyDefinedHeaders(
for (const [key, value] of headersArray
.filter((headers) => headers != null)
.flatMap((headers) => Object.entries(headers))) {
+ const insensitiveKey = key.toLowerCase();
if (value != null) {
- result[key] = value;
+ result[insensitiveKey] = value;
}
}
diff --git a/src/core/index.ts b/src/core/index.ts
index e838642..e000452 100644
--- a/src/core/index.ts
+++ b/src/core/index.ts
@@ -2,6 +2,7 @@ export * from "./auth/index.js";
export * from "./base64.js";
export * from "./fetcher/index.js";
export * as file from "./file/index.js";
+export * as logging from "./logging/index.js";
export * from "./pagination/index.js";
export * from "./runtime/index.js";
export * from "./stream/index.js";
diff --git a/src/core/logging/exports.ts b/src/core/logging/exports.ts
new file mode 100644
index 0000000..88f6c00
--- /dev/null
+++ b/src/core/logging/exports.ts
@@ -0,0 +1,19 @@
+import * as logger from "./logger.js";
+
+export namespace logging {
+ /**
+ * Configuration for logger instances.
+ */
+ export type LogConfig = logger.LogConfig;
+ export type LogLevel = logger.LogLevel;
+ export const LogLevel: typeof logger.LogLevel = logger.LogLevel;
+ export type ILogger = logger.ILogger;
+ /**
+ * Console logger implementation that outputs to the console.
+ */
+ export type ConsoleLogger = logger.ConsoleLogger;
+ /**
+ * Console logger implementation that outputs to the console.
+ */
+ export const ConsoleLogger: typeof logger.ConsoleLogger = logger.ConsoleLogger;
+}
diff --git a/src/core/logging/index.ts b/src/core/logging/index.ts
new file mode 100644
index 0000000..d81cc32
--- /dev/null
+++ b/src/core/logging/index.ts
@@ -0,0 +1 @@
+export * from "./logger.js";
diff --git a/src/core/logging/logger.ts b/src/core/logging/logger.ts
new file mode 100644
index 0000000..a3f3673
--- /dev/null
+++ b/src/core/logging/logger.ts
@@ -0,0 +1,203 @@
+export const LogLevel = {
+ Debug: "debug",
+ Info: "info",
+ Warn: "warn",
+ Error: "error",
+} as const;
+export type LogLevel = (typeof LogLevel)[keyof typeof LogLevel];
+const logLevelMap: Record = {
+ [LogLevel.Debug]: 1,
+ [LogLevel.Info]: 2,
+ [LogLevel.Warn]: 3,
+ [LogLevel.Error]: 4,
+};
+
+export interface ILogger {
+ /**
+ * Logs a debug message.
+ * @param message - The message to log
+ * @param args - Additional arguments to log
+ */
+ debug(message: string, ...args: unknown[]): void;
+ /**
+ * Logs an info message.
+ * @param message - The message to log
+ * @param args - Additional arguments to log
+ */
+ info(message: string, ...args: unknown[]): void;
+ /**
+ * Logs a warning message.
+ * @param message - The message to log
+ * @param args - Additional arguments to log
+ */
+ warn(message: string, ...args: unknown[]): void;
+ /**
+ * Logs an error message.
+ * @param message - The message to log
+ * @param args - Additional arguments to log
+ */
+ error(message: string, ...args: unknown[]): void;
+}
+
+/**
+ * Configuration for logger initialization.
+ */
+export interface LogConfig {
+ /**
+ * Minimum log level to output.
+ * @default LogLevel.Info
+ */
+ level?: LogLevel;
+ /**
+ * Logger implementation to use.
+ * @default new ConsoleLogger()
+ */
+ logger?: ILogger;
+ /**
+ * Whether logging should be silenced.
+ * @default true
+ */
+ silent?: boolean;
+}
+
+/**
+ * Default console-based logger implementation.
+ */
+export class ConsoleLogger implements ILogger {
+ debug(message: string, ...args: unknown[]): void {
+ console.debug(message, ...args);
+ }
+ info(message: string, ...args: unknown[]): void {
+ console.info(message, ...args);
+ }
+ warn(message: string, ...args: unknown[]): void {
+ console.warn(message, ...args);
+ }
+ error(message: string, ...args: unknown[]): void {
+ console.error(message, ...args);
+ }
+}
+
+/**
+ * Logger class that provides level-based logging functionality.
+ */
+export class Logger {
+ private readonly level: number;
+ private readonly logger: ILogger;
+ private readonly silent: boolean;
+
+ /**
+ * Creates a new logger instance.
+ * @param config - Logger configuration
+ */
+ constructor(config: Required) {
+ this.level = logLevelMap[config.level];
+ this.logger = config.logger;
+ this.silent = config.silent;
+ }
+
+ /**
+ * Checks if a log level should be output based on configuration.
+ * @param level - The log level to check
+ * @returns True if the level should be logged
+ */
+ public shouldLog(level: LogLevel): boolean {
+ return !this.silent && this.level <= logLevelMap[level];
+ }
+
+ /**
+ * Checks if debug logging is enabled.
+ * @returns True if debug logs should be output
+ */
+ public isDebug(): boolean {
+ return this.shouldLog(LogLevel.Debug);
+ }
+
+ /**
+ * Logs a debug message if debug logging is enabled.
+ * @param message - The message to log
+ * @param args - Additional arguments to log
+ */
+ public debug(message: string, ...args: unknown[]): void {
+ if (this.isDebug()) {
+ this.logger.debug(message, ...args);
+ }
+ }
+
+ /**
+ * Checks if info logging is enabled.
+ * @returns True if info logs should be output
+ */
+ public isInfo(): boolean {
+ return this.shouldLog(LogLevel.Info);
+ }
+
+ /**
+ * Logs an info message if info logging is enabled.
+ * @param message - The message to log
+ * @param args - Additional arguments to log
+ */
+ public info(message: string, ...args: unknown[]): void {
+ if (this.isInfo()) {
+ this.logger.info(message, ...args);
+ }
+ }
+
+ /**
+ * Checks if warning logging is enabled.
+ * @returns True if warning logs should be output
+ */
+ public isWarn(): boolean {
+ return this.shouldLog(LogLevel.Warn);
+ }
+
+ /**
+ * Logs a warning message if warning logging is enabled.
+ * @param message - The message to log
+ * @param args - Additional arguments to log
+ */
+ public warn(message: string, ...args: unknown[]): void {
+ if (this.isWarn()) {
+ this.logger.warn(message, ...args);
+ }
+ }
+
+ /**
+ * Checks if error logging is enabled.
+ * @returns True if error logs should be output
+ */
+ public isError(): boolean {
+ return this.shouldLog(LogLevel.Error);
+ }
+
+ /**
+ * Logs an error message if error logging is enabled.
+ * @param message - The message to log
+ * @param args - Additional arguments to log
+ */
+ public error(message: string, ...args: unknown[]): void {
+ if (this.isError()) {
+ this.logger.error(message, ...args);
+ }
+ }
+}
+
+export function createLogger(config?: LogConfig | Logger): Logger {
+ if (config == null) {
+ return defaultLogger;
+ }
+ if (config instanceof Logger) {
+ return config;
+ }
+ config = config ?? {};
+ config.level ??= LogLevel.Info;
+ config.logger ??= new ConsoleLogger();
+ config.silent ??= true;
+ return new Logger(config as Required);
+}
+
+const defaultLogger: Logger = new Logger({
+ level: LogLevel.Info,
+ logger: new ConsoleLogger(),
+ silent: true,
+});
diff --git a/src/core/pagination/Page.ts b/src/core/pagination/Page.ts
index 1aa08e5..6621a6f 100644
--- a/src/core/pagination/Page.ts
+++ b/src/core/pagination/Page.ts
@@ -4,15 +4,16 @@ import type { HttpResponsePromise, RawResponse } from "../fetcher/index.js";
* A page of results from a paginated API.
*
* @template T The type of the items in the page.
+ * @template R The type of the API response.
*/
-export class Page implements AsyncIterable {
+export class Page implements AsyncIterable {
public data: T[];
public rawResponse: RawResponse;
+ public response: R;
- private response: unknown;
- private _hasNextPage: (response: unknown) => boolean;
- private getItems: (response: unknown) => T[];
- private loadNextPage: (response: unknown) => HttpResponsePromise;
+ private _hasNextPage: (response: R) => boolean;
+ private getItems: (response: R) => T[];
+ private loadNextPage: (response: R) => HttpResponsePromise;
constructor({
response,
@@ -21,11 +22,11 @@ export class Page implements AsyncIterable {
getItems,
loadPage,
}: {
- response: unknown;
+ response: R;
rawResponse: RawResponse;
- hasNextPage: (response: unknown) => boolean;
- getItems: (response: unknown) => T[];
- loadPage: (response: unknown) => HttpResponsePromise;
+ hasNextPage: (response: R) => boolean;
+ getItems: (response: R) => T[];
+ loadPage: (response: R) => HttpResponsePromise;
}) {
this.response = response;
this.rawResponse = rawResponse;
diff --git a/src/core/pagination/Pageable.ts b/src/core/pagination/Pageable.ts
deleted file mode 100644
index 5689e1e..0000000
--- a/src/core/pagination/Pageable.ts
+++ /dev/null
@@ -1,18 +0,0 @@
-import type { RawResponse } from "../fetcher/index.js";
-import { Page } from "./Page.js";
-
-export declare namespace Pageable {
- interface Args {
- response: Response;
- rawResponse: RawResponse;
- hasNextPage: (response: Response) => boolean;
- getItems: (response: Response) => Item[];
- loadPage: (response: Response) => Promise;
- }
-}
-
-export class Pageable extends Page {
- constructor(args: Pageable.Args) {
- super(args as any);
- }
-}
diff --git a/src/core/pagination/exports.ts b/src/core/pagination/exports.ts
new file mode 100644
index 0000000..d3acc60
--- /dev/null
+++ b/src/core/pagination/exports.ts
@@ -0,0 +1 @@
+export type { Page } from "./Page.js";
diff --git a/src/core/pagination/index.ts b/src/core/pagination/index.ts
index b0cd68f..7781cbd 100644
--- a/src/core/pagination/index.ts
+++ b/src/core/pagination/index.ts
@@ -1,2 +1 @@
export { Page } from "./Page.js";
-export { Pageable } from "./Pageable.js";
diff --git a/src/core/stream/Stream.ts b/src/core/stream/Stream.ts
index e41f05a..4d4b97f 100644
--- a/src/core/stream/Stream.ts
+++ b/src/core/stream/Stream.ts
@@ -43,6 +43,7 @@ export class Stream implements AsyncIterable {
private messageTerminator: string;
private streamTerminator: string | undefined;
private controller: AbortController = new AbortController();
+ private decoder: TextDecoder | undefined;
constructor({ stream, parse, eventShape, signal }: Stream.Args & { parse: (val: unknown) => Promise }) {
this.stream = stream;
@@ -55,6 +56,11 @@ export class Stream implements AsyncIterable {
this.messageTerminator = eventShape.messageTerminator;
}
signal?.addEventListener("abort", () => this.controller.abort());
+
+ // Initialize shared TextDecoder
+ if (typeof TextDecoder !== "undefined") {
+ this.decoder = new TextDecoder("utf-8");
+ }
}
private async *iterMessages(): AsyncGenerator {
@@ -67,7 +73,7 @@ export class Stream implements AsyncIterable {
let terminatorIndex: number;
while ((terminatorIndex = buf.indexOf(this.messageTerminator)) >= 0) {
- let line = buf.slice(0, terminatorIndex + 1);
+ let line = buf.slice(0, terminatorIndex);
buf = buf.slice(terminatorIndex + this.messageTerminator.length);
if (!line.trim()) {
@@ -101,10 +107,9 @@ export class Stream implements AsyncIterable {
private decodeChunk(chunk: any): string {
let decoded = "";
- // If TextDecoder is present, use it
- if (typeof TextDecoder !== "undefined") {
- const decoder = new TextDecoder("utf8");
- decoded += decoder.decode(chunk);
+ // If TextDecoder is available, use the streaming decoder instance
+ if (this.decoder != null) {
+ decoded += this.decoder.decode(chunk, { stream: true });
}
// Buffer is present in Node.js environment
else if (RUNTIME.type === "node" && typeof chunk !== "undefined") {
diff --git a/src/core/url/join.ts b/src/core/url/join.ts
index b872a90..7ca7dae 100644
--- a/src/core/url/join.ts
+++ b/src/core/url/join.ts
@@ -12,7 +12,6 @@ export function join(base: string, ...segments: string[]): string {
try {
url = new URL(base);
} catch {
- // Fallback to path joining if URL is malformed
return joinPath(base, ...segments);
}
diff --git a/src/version.ts b/src/version.ts
index b54c0db..68e5be6 100644
--- a/src/version.ts
+++ b/src/version.ts
@@ -1 +1 @@
-export const SDK_VERSION = "2.5.0";
+export const SDK_VERSION = "3.0.1";
diff --git a/tests/mock-server/mockEndpointBuilder.ts b/tests/mock-server/mockEndpointBuilder.ts
index 18557ec..1b0e510 100644
--- a/tests/mock-server/mockEndpointBuilder.ts
+++ b/tests/mock-server/mockEndpointBuilder.ts
@@ -2,6 +2,7 @@ import { type DefaultBodyType, type HttpHandler, HttpResponse, type HttpResponse
import { url } from "../../src/core";
import { toJson } from "../../src/core/json";
+import { withFormUrlEncoded } from "./withFormUrlEncoded";
import { withHeaders } from "./withHeaders";
import { withJson } from "./withJson";
@@ -26,6 +27,7 @@ interface RequestHeadersStage extends RequestBodyStage, ResponseStage {
interface RequestBodyStage extends ResponseStage {
jsonBody(body: unknown): ResponseStage;
+ formUrlEncodedBody(body: unknown): ResponseStage;
}
interface ResponseStage {
@@ -135,6 +137,16 @@ class RequestBuilder implements MethodStage, RequestHeadersStage, RequestBodySta
return this;
}
+ formUrlEncodedBody(body: unknown): ResponseStage {
+ if (body === undefined) {
+ throw new Error(
+ "Undefined is not valid for form-urlencoded. Do not call formUrlEncodedBody if you want an empty body.",
+ );
+ }
+ this.predicates.push((resolver) => withFormUrlEncoded(body, resolver));
+ return this;
+ }
+
respondWith(): ResponseStatusStage {
return new ResponseBuilder(this.method, this.buildUrl(), this.predicates, this.handlerOptions);
}
diff --git a/tests/mock-server/withFormUrlEncoded.ts b/tests/mock-server/withFormUrlEncoded.ts
new file mode 100644
index 0000000..e9e6ff2
--- /dev/null
+++ b/tests/mock-server/withFormUrlEncoded.ts
@@ -0,0 +1,80 @@
+import { type HttpResponseResolver, passthrough } from "msw";
+
+import { toJson } from "../../src/core/json";
+
+/**
+ * Creates a request matcher that validates if the request form-urlencoded body exactly matches the expected object
+ * @param expectedBody - The exact body object to match against
+ * @param resolver - Response resolver to execute if body matches
+ */
+export function withFormUrlEncoded(expectedBody: unknown, resolver: HttpResponseResolver): HttpResponseResolver {
+ return async (args) => {
+ const { request } = args;
+
+ let clonedRequest: Request;
+ let bodyText: string | undefined;
+ let actualBody: Record;
+ try {
+ clonedRequest = request.clone();
+ bodyText = await clonedRequest.text();
+ if (bodyText === "") {
+ console.error("Request body is empty, expected a form-urlencoded body.");
+ return passthrough();
+ }
+ const params = new URLSearchParams(bodyText);
+ actualBody = {};
+ for (const [key, value] of params.entries()) {
+ actualBody[key] = value;
+ }
+ } catch (error) {
+ console.error(`Error processing form-urlencoded request body:\n\tError: ${error}\n\tBody: ${bodyText}`);
+ return passthrough();
+ }
+
+ const mismatches = findMismatches(actualBody, expectedBody);
+ if (Object.keys(mismatches).length > 0) {
+ console.error("Form-urlencoded body mismatch:", toJson(mismatches, undefined, 2));
+ return passthrough();
+ }
+
+ return resolver(args);
+ };
+}
+
+function findMismatches(actual: any, expected: any): Record {
+ const mismatches: Record = {};
+
+ if (typeof actual !== typeof expected) {
+ return { value: { actual, expected } };
+ }
+
+ if (typeof actual !== "object" || actual === null || expected === null) {
+ if (actual !== expected) {
+ return { value: { actual, expected } };
+ }
+ return {};
+ }
+
+ const actualKeys = Object.keys(actual);
+ const expectedKeys = Object.keys(expected);
+
+ const allKeys = new Set([...actualKeys, ...expectedKeys]);
+
+ for (const key of allKeys) {
+ if (!expectedKeys.includes(key)) {
+ if (actual[key] === undefined) {
+ continue;
+ }
+ mismatches[key] = { actual: actual[key], expected: undefined };
+ } else if (!actualKeys.includes(key)) {
+ if (expected[key] === undefined) {
+ continue;
+ }
+ mismatches[key] = { actual: undefined, expected: expected[key] };
+ } else if (actual[key] !== expected[key]) {
+ mismatches[key] = { actual: actual[key], expected: expected[key] };
+ }
+ }
+
+ return mismatches;
+}
diff --git a/tests/setup.ts b/tests/setup.ts
new file mode 100644
index 0000000..a5651f8
--- /dev/null
+++ b/tests/setup.ts
@@ -0,0 +1,80 @@
+import { expect } from "vitest";
+
+interface CustomMatchers {
+ toContainHeaders(expectedHeaders: Record): R;
+}
+
+declare module "vitest" {
+ interface Assertion extends CustomMatchers {}
+ interface AsymmetricMatchersContaining extends CustomMatchers {}
+}
+
+expect.extend({
+ toContainHeaders(actual: unknown, expectedHeaders: Record) {
+ const isHeaders = actual instanceof Headers;
+ const isPlainObject = typeof actual === "object" && actual !== null && !Array.isArray(actual);
+
+ if (!isHeaders && !isPlainObject) {
+ throw new TypeError("Received value must be an instance of Headers or a plain object!");
+ }
+
+ if (typeof expectedHeaders !== "object" || expectedHeaders === null || Array.isArray(expectedHeaders)) {
+ throw new TypeError("Expected headers must be a plain object!");
+ }
+
+ const missingHeaders: string[] = [];
+ const mismatchedHeaders: Array<{ key: string; expected: string; actual: string | null }> = [];
+
+ for (const [key, value] of Object.entries(expectedHeaders)) {
+ let actualValue: string | null = null;
+
+ if (isHeaders) {
+ // Headers.get() is already case-insensitive
+ actualValue = (actual as Headers).get(key);
+ } else {
+ // For plain objects, do case-insensitive lookup
+ const actualObj = actual as Record;
+ const lowerKey = key.toLowerCase();
+ const foundKey = Object.keys(actualObj).find((k) => k.toLowerCase() === lowerKey);
+ actualValue = foundKey ? actualObj[foundKey] : null;
+ }
+
+ if (actualValue === null || actualValue === undefined) {
+ missingHeaders.push(key);
+ } else if (actualValue !== value) {
+ mismatchedHeaders.push({ key, expected: value, actual: actualValue });
+ }
+ }
+
+ const pass = missingHeaders.length === 0 && mismatchedHeaders.length === 0;
+
+ const actualType = isHeaders ? "Headers" : "object";
+
+ if (pass) {
+ return {
+ message: () => `expected ${actualType} not to contain ${this.utils.printExpected(expectedHeaders)}`,
+ pass: true,
+ };
+ } else {
+ const messages: string[] = [];
+
+ if (missingHeaders.length > 0) {
+ messages.push(`Missing headers: ${this.utils.printExpected(missingHeaders.join(", "))}`);
+ }
+
+ if (mismatchedHeaders.length > 0) {
+ const mismatches = mismatchedHeaders.map(
+ ({ key, expected, actual }) =>
+ `${key}: expected ${this.utils.printExpected(expected)} but got ${this.utils.printReceived(actual)}`,
+ );
+ messages.push(mismatches.join("\n"));
+ }
+
+ return {
+ message: () =>
+ `expected ${actualType} to contain ${this.utils.printExpected(expectedHeaders)}\n\n${messages.join("\n")}`,
+ pass: false,
+ };
+ }
+ },
+});
diff --git a/tests/unit/auth/BasicAuth.test.ts b/tests/unit/auth/BasicAuth.test.ts
index 90bbc7e..9b51233 100644
--- a/tests/unit/auth/BasicAuth.test.ts
+++ b/tests/unit/auth/BasicAuth.test.ts
@@ -1,58 +1,92 @@
import { BasicAuth } from "../../../src/core/auth/BasicAuth";
describe("BasicAuth", () => {
- describe("toAuthorizationHeader", () => {
- it("correctly converts to header", () => {
- expect(
- BasicAuth.toAuthorizationHeader({
- username: "username",
- password: "password",
- }),
- ).toBe("Basic dXNlcm5hbWU6cGFzc3dvcmQ=");
- });
- });
- describe("fromAuthorizationHeader", () => {
- it("correctly parses header", () => {
- expect(BasicAuth.fromAuthorizationHeader("Basic dXNlcm5hbWU6cGFzc3dvcmQ=")).toEqual({
- username: "username",
- password: "password",
- });
- });
+ interface ToHeaderTestCase {
+ description: string;
+ input: { username: string; password: string };
+ expected: string;
+ }
- it("handles password with colons", () => {
- expect(BasicAuth.fromAuthorizationHeader("Basic dXNlcjpwYXNzOndvcmQ=")).toEqual({
- username: "user",
- password: "pass:word",
- });
- });
+ interface FromHeaderTestCase {
+ description: string;
+ input: string;
+ expected: { username: string; password: string };
+ }
- it("handles empty username and password (just colon)", () => {
- expect(BasicAuth.fromAuthorizationHeader("Basic Og==")).toEqual({
- username: "",
- password: "",
- });
- });
+ interface ErrorTestCase {
+ description: string;
+ input: string;
+ expectedError: string;
+ }
+
+ describe("toAuthorizationHeader", () => {
+ const toHeaderTests: ToHeaderTestCase[] = [
+ {
+ description: "correctly converts to header",
+ input: { username: "username", password: "password" },
+ expected: "Basic dXNlcm5hbWU6cGFzc3dvcmQ=",
+ },
+ ];
- it("handles empty username", () => {
- expect(BasicAuth.fromAuthorizationHeader("Basic OnBhc3N3b3Jk")).toEqual({
- username: "",
- password: "password",
+ toHeaderTests.forEach(({ description, input, expected }) => {
+ it(description, () => {
+ expect(BasicAuth.toAuthorizationHeader(input)).toBe(expected);
});
});
+ });
- it("handles empty password", () => {
- expect(BasicAuth.fromAuthorizationHeader("Basic dXNlcm5hbWU6")).toEqual({
- username: "username",
- password: "",
+ describe("fromAuthorizationHeader", () => {
+ const fromHeaderTests: FromHeaderTestCase[] = [
+ {
+ description: "correctly parses header",
+ input: "Basic dXNlcm5hbWU6cGFzc3dvcmQ=",
+ expected: { username: "username", password: "password" },
+ },
+ {
+ description: "handles password with colons",
+ input: "Basic dXNlcjpwYXNzOndvcmQ=",
+ expected: { username: "user", password: "pass:word" },
+ },
+ {
+ description: "handles empty username and password (just colon)",
+ input: "Basic Og==",
+ expected: { username: "", password: "" },
+ },
+ {
+ description: "handles empty username",
+ input: "Basic OnBhc3N3b3Jk",
+ expected: { username: "", password: "password" },
+ },
+ {
+ description: "handles empty password",
+ input: "Basic dXNlcm5hbWU6",
+ expected: { username: "username", password: "" },
+ },
+ ];
+
+ fromHeaderTests.forEach(({ description, input, expected }) => {
+ it(description, () => {
+ expect(BasicAuth.fromAuthorizationHeader(input)).toEqual(expected);
});
});
- it("throws error for completely empty credentials", () => {
- expect(() => BasicAuth.fromAuthorizationHeader("Basic ")).toThrow("Invalid basic auth");
- });
+ const errorTests: ErrorTestCase[] = [
+ {
+ description: "throws error for completely empty credentials",
+ input: "Basic ",
+ expectedError: "Invalid basic auth",
+ },
+ {
+ description: "throws error for credentials without colon",
+ input: "Basic dXNlcm5hbWU=",
+ expectedError: "Invalid basic auth",
+ },
+ ];
- it("throws error for credentials without colon", () => {
- expect(() => BasicAuth.fromAuthorizationHeader("Basic dXNlcm5hbWU=")).toThrow("Invalid basic auth");
+ errorTests.forEach(({ description, input, expectedError }) => {
+ it(description, () => {
+ expect(() => BasicAuth.fromAuthorizationHeader(input)).toThrow(expectedError);
+ });
});
});
});
diff --git a/tests/unit/fetcher/Fetcher.test.ts b/tests/unit/fetcher/Fetcher.test.ts
index bfc6429..60df2b5 100644
--- a/tests/unit/fetcher/Fetcher.test.ts
+++ b/tests/unit/fetcher/Fetcher.test.ts
@@ -13,6 +13,7 @@ describe("Test fetcherImpl", () => {
body: { data: "test" },
contentType: "application/json",
requestType: "json",
+ maxRetries: 0,
responseType: "json",
};
@@ -33,7 +34,7 @@ describe("Test fetcherImpl", () => {
"https://httpbin.org/post",
expect.objectContaining({
method: "POST",
- headers: expect.objectContaining({ "X-Test": "x-test-header" }),
+ headers: expect.toContainHeaders({ "X-Test": "x-test-header" }),
body: JSON.stringify({ data: "test" }),
}),
);
@@ -47,6 +48,7 @@ describe("Test fetcherImpl", () => {
headers: { "X-Test": "x-test-header" },
contentType: "application/octet-stream",
requestType: "bytes",
+ maxRetries: 0,
responseType: "json",
body: fs.createReadStream(join(__dirname, "test-file.txt")),
};
@@ -64,7 +66,7 @@ describe("Test fetcherImpl", () => {
url,
expect.objectContaining({
method: "POST",
- headers: expect.objectContaining({ "X-Test": "x-test-header" }),
+ headers: expect.toContainHeaders({ "X-Test": "x-test-header" }),
body: expect.any(fs.ReadStream),
}),
);
@@ -80,6 +82,7 @@ describe("Test fetcherImpl", () => {
url,
method: "GET",
headers: { "X-Test": "x-test-header" },
+ maxRetries: 0,
responseType: "binary-response",
};
@@ -99,7 +102,7 @@ describe("Test fetcherImpl", () => {
url,
expect.objectContaining({
method: "GET",
- headers: expect.objectContaining({ "X-Test": "x-test-header" }),
+ headers: expect.toContainHeaders({ "X-Test": "x-test-header" }),
}),
);
expect(result.ok).toBe(true);
@@ -125,6 +128,7 @@ describe("Test fetcherImpl", () => {
url,
method: "GET",
headers: { "X-Test": "x-test-header" },
+ maxRetries: 0,
responseType: "binary-response",
};
@@ -144,7 +148,7 @@ describe("Test fetcherImpl", () => {
url,
expect.objectContaining({
method: "GET",
- headers: expect.objectContaining({ "X-Test": "x-test-header" }),
+ headers: expect.toContainHeaders({ "X-Test": "x-test-header" }),
}),
);
expect(result.ok).toBe(true);
@@ -170,6 +174,7 @@ describe("Test fetcherImpl", () => {
url,
method: "GET",
headers: { "X-Test": "x-test-header" },
+ maxRetries: 0,
responseType: "binary-response",
};
@@ -189,7 +194,7 @@ describe("Test fetcherImpl", () => {
url,
expect.objectContaining({
method: "GET",
- headers: expect.objectContaining({ "X-Test": "x-test-header" }),
+ headers: expect.toContainHeaders({ "X-Test": "x-test-header" }),
}),
);
expect(result.ok).toBe(true);
@@ -213,6 +218,7 @@ describe("Test fetcherImpl", () => {
url,
method: "GET",
headers: { "X-Test": "x-test-header" },
+ maxRetries: 0,
responseType: "binary-response",
};
@@ -232,7 +238,7 @@ describe("Test fetcherImpl", () => {
url,
expect.objectContaining({
method: "GET",
- headers: expect.objectContaining({ "X-Test": "x-test-header" }),
+ headers: expect.toContainHeaders({ "X-Test": "x-test-header" }),
}),
);
expect(result.ok).toBe(true);
diff --git a/tests/unit/fetcher/createRequestUrl.test.ts b/tests/unit/fetcher/createRequestUrl.test.ts
index 06e03b2..a92f1b5 100644
--- a/tests/unit/fetcher/createRequestUrl.test.ts
+++ b/tests/unit/fetcher/createRequestUrl.test.ts
@@ -1,160 +1,163 @@
import { createRequestUrl } from "../../../src/core/fetcher/createRequestUrl";
describe("Test createRequestUrl", () => {
- it("should return the base URL when no query parameters are provided", () => {
- const baseUrl = "https://api.example.com";
- expect(createRequestUrl(baseUrl)).toBe(baseUrl);
- });
-
- it("should append simple query parameters", () => {
- const baseUrl = "https://api.example.com";
- const queryParams = { key: "value", another: "param" };
- expect(createRequestUrl(baseUrl, queryParams)).toBe("https://api.example.com?key=value&another=param");
- });
-
- it("should handle array query parameters", () => {
- const baseUrl = "https://api.example.com";
- const queryParams = { items: ["a", "b", "c"] };
- expect(createRequestUrl(baseUrl, queryParams)).toBe("https://api.example.com?items=a&items=b&items=c");
- });
-
- it("should handle object query parameters", () => {
- const baseUrl = "https://api.example.com";
- const queryParams = { filter: { name: "John", age: 30 } };
- expect(createRequestUrl(baseUrl, queryParams)).toBe(
- "https://api.example.com?filter%5Bname%5D=John&filter%5Bage%5D=30",
- );
- });
-
- it("should handle mixed types of query parameters", () => {
- const baseUrl = "https://api.example.com";
- const queryParams = {
- simple: "value",
- array: ["x", "y"],
- object: { key: "value" },
- };
- expect(createRequestUrl(baseUrl, queryParams)).toBe(
- "https://api.example.com?simple=value&array=x&array=y&object%5Bkey%5D=value",
- );
- });
-
- it("should handle empty query parameters object", () => {
- const baseUrl = "https://api.example.com";
- expect(createRequestUrl(baseUrl, {})).toBe(baseUrl);
- });
-
- it("should encode special characters in query parameters", () => {
- const baseUrl = "https://api.example.com";
- const queryParams = { special: "a&b=c d" };
- expect(createRequestUrl(baseUrl, queryParams)).toBe("https://api.example.com?special=a%26b%3Dc%20d");
- });
-
- // Additional tests for edge cases and different value types
- it("should handle numeric values", () => {
- const baseUrl = "https://api.example.com";
- const queryParams = { count: 42, price: 19.99, active: 1, inactive: 0 };
- expect(createRequestUrl(baseUrl, queryParams)).toBe(
- "https://api.example.com?count=42&price=19.99&active=1&inactive=0",
- );
- });
-
- it("should handle boolean values", () => {
- const baseUrl = "https://api.example.com";
- const queryParams = { enabled: true, disabled: false };
- expect(createRequestUrl(baseUrl, queryParams)).toBe("https://api.example.com?enabled=true&disabled=false");
- });
-
- it("should handle null and undefined values", () => {
- const baseUrl = "https://api.example.com";
- const queryParams = {
- valid: "value",
- nullValue: null,
- undefinedValue: undefined,
- emptyString: "",
- };
- expect(createRequestUrl(baseUrl, queryParams)).toBe(
- "https://api.example.com?valid=value&nullValue=&emptyString=",
- );
- });
-
- it("should handle deeply nested objects", () => {
- const baseUrl = "https://api.example.com";
- const queryParams = {
- user: {
- profile: {
- name: "John",
- settings: { theme: "dark" },
+ const BASE_URL = "https://api.example.com";
+
+ interface TestCase {
+ description: string;
+ baseUrl: string;
+ queryParams?: Record;
+ expected: string;
+ }
+
+ const testCases: TestCase[] = [
+ {
+ description: "should return the base URL when no query parameters are provided",
+ baseUrl: BASE_URL,
+ expected: BASE_URL,
+ },
+ {
+ description: "should append simple query parameters",
+ baseUrl: BASE_URL,
+ queryParams: { key: "value", another: "param" },
+ expected: "https://api.example.com?key=value&another=param",
+ },
+ {
+ description: "should handle array query parameters",
+ baseUrl: BASE_URL,
+ queryParams: { items: ["a", "b", "c"] },
+ expected: "https://api.example.com?items=a&items=b&items=c",
+ },
+ {
+ description: "should handle object query parameters",
+ baseUrl: BASE_URL,
+ queryParams: { filter: { name: "John", age: 30 } },
+ expected: "https://api.example.com?filter%5Bname%5D=John&filter%5Bage%5D=30",
+ },
+ {
+ description: "should handle mixed types of query parameters",
+ baseUrl: BASE_URL,
+ queryParams: {
+ simple: "value",
+ array: ["x", "y"],
+ object: { key: "value" },
+ },
+ expected: "https://api.example.com?simple=value&array=x&array=y&object%5Bkey%5D=value",
+ },
+ {
+ description: "should handle empty query parameters object",
+ baseUrl: BASE_URL,
+ queryParams: {},
+ expected: BASE_URL,
+ },
+ {
+ description: "should encode special characters in query parameters",
+ baseUrl: BASE_URL,
+ queryParams: { special: "a&b=c d" },
+ expected: "https://api.example.com?special=a%26b%3Dc%20d",
+ },
+ {
+ description: "should handle numeric values",
+ baseUrl: BASE_URL,
+ queryParams: { count: 42, price: 19.99, active: 1, inactive: 0 },
+ expected: "https://api.example.com?count=42&price=19.99&active=1&inactive=0",
+ },
+ {
+ description: "should handle boolean values",
+ baseUrl: BASE_URL,
+ queryParams: { enabled: true, disabled: false },
+ expected: "https://api.example.com?enabled=true&disabled=false",
+ },
+ {
+ description: "should handle null and undefined values",
+ baseUrl: BASE_URL,
+ queryParams: {
+ valid: "value",
+ nullValue: null,
+ undefinedValue: undefined,
+ emptyString: "",
+ },
+ expected: "https://api.example.com?valid=value&nullValue=&emptyString=",
+ },
+ {
+ description: "should handle deeply nested objects",
+ baseUrl: BASE_URL,
+ queryParams: {
+ user: {
+ profile: {
+ name: "John",
+ settings: { theme: "dark" },
+ },
},
},
- };
- expect(createRequestUrl(baseUrl, queryParams)).toBe(
- "https://api.example.com?user%5Bprofile%5D%5Bname%5D=John&user%5Bprofile%5D%5Bsettings%5D%5Btheme%5D=dark",
- );
- });
-
- it("should handle arrays of objects", () => {
- const baseUrl = "https://api.example.com";
- const queryParams = {
- users: [
- { name: "John", age: 30 },
- { name: "Jane", age: 25 },
- ],
- };
- expect(createRequestUrl(baseUrl, queryParams)).toBe(
- "https://api.example.com?users%5Bname%5D=John&users%5Bage%5D=30&users%5Bname%5D=Jane&users%5Bage%5D=25",
- );
- });
-
- it("should handle mixed arrays", () => {
- const baseUrl = "https://api.example.com";
- const queryParams = {
- mixed: ["string", 42, true, { key: "value" }],
- };
- expect(createRequestUrl(baseUrl, queryParams)).toBe(
- "https://api.example.com?mixed=string&mixed=42&mixed=true&mixed%5Bkey%5D=value",
- );
- });
-
- it("should handle empty arrays", () => {
- const baseUrl = "https://api.example.com";
- const queryParams = { emptyArray: [] };
- expect(createRequestUrl(baseUrl, queryParams)).toBe(baseUrl);
- });
-
- it("should handle empty objects", () => {
- const baseUrl = "https://api.example.com";
- const queryParams = { emptyObject: {} };
- expect(createRequestUrl(baseUrl, queryParams)).toBe(baseUrl);
- });
-
- it("should handle special characters in keys", () => {
- const baseUrl = "https://api.example.com";
- const queryParams = { "key with spaces": "value", "key[with]brackets": "value" };
- expect(createRequestUrl(baseUrl, queryParams)).toBe(
- "https://api.example.com?key%20with%20spaces=value&key%5Bwith%5Dbrackets=value",
- );
- });
-
- it("should handle URL with existing query parameters", () => {
- const baseUrl = "https://api.example.com?existing=param";
- const queryParams = { new: "value" };
- expect(createRequestUrl(baseUrl, queryParams)).toBe("https://api.example.com?existing=param?new=value");
- });
-
- it("should handle complex nested structures", () => {
- const baseUrl = "https://api.example.com";
- const queryParams = {
- filters: {
- status: ["active", "pending"],
- category: {
- type: "electronics",
- subcategories: ["phones", "laptops"],
+ expected:
+ "https://api.example.com?user%5Bprofile%5D%5Bname%5D=John&user%5Bprofile%5D%5Bsettings%5D%5Btheme%5D=dark",
+ },
+ {
+ description: "should handle arrays of objects",
+ baseUrl: BASE_URL,
+ queryParams: {
+ users: [
+ { name: "John", age: 30 },
+ { name: "Jane", age: 25 },
+ ],
+ },
+ expected:
+ "https://api.example.com?users%5Bname%5D=John&users%5Bage%5D=30&users%5Bname%5D=Jane&users%5Bage%5D=25",
+ },
+ {
+ description: "should handle mixed arrays",
+ baseUrl: BASE_URL,
+ queryParams: {
+ mixed: ["string", 42, true, { key: "value" }],
+ },
+ expected: "https://api.example.com?mixed=string&mixed=42&mixed=true&mixed%5Bkey%5D=value",
+ },
+ {
+ description: "should handle empty arrays",
+ baseUrl: BASE_URL,
+ queryParams: { emptyArray: [] },
+ expected: BASE_URL,
+ },
+ {
+ description: "should handle empty objects",
+ baseUrl: BASE_URL,
+ queryParams: { emptyObject: {} },
+ expected: BASE_URL,
+ },
+ {
+ description: "should handle special characters in keys",
+ baseUrl: BASE_URL,
+ queryParams: { "key with spaces": "value", "key[with]brackets": "value" },
+ expected: "https://api.example.com?key%20with%20spaces=value&key%5Bwith%5Dbrackets=value",
+ },
+ {
+ description: "should handle URL with existing query parameters",
+ baseUrl: "https://api.example.com?existing=param",
+ queryParams: { new: "value" },
+ expected: "https://api.example.com?existing=param?new=value",
+ },
+ {
+ description: "should handle complex nested structures",
+ baseUrl: BASE_URL,
+ queryParams: {
+ filters: {
+ status: ["active", "pending"],
+ category: {
+ type: "electronics",
+ subcategories: ["phones", "laptops"],
+ },
},
+ sort: { field: "name", direction: "asc" },
},
- sort: { field: "name", direction: "asc" },
- };
- expect(createRequestUrl(baseUrl, queryParams)).toBe(
- "https://api.example.com?filters%5Bstatus%5D=active&filters%5Bstatus%5D=pending&filters%5Bcategory%5D%5Btype%5D=electronics&filters%5Bcategory%5D%5Bsubcategories%5D=phones&filters%5Bcategory%5D%5Bsubcategories%5D=laptops&sort%5Bfield%5D=name&sort%5Bdirection%5D=asc",
- );
+ expected:
+ "https://api.example.com?filters%5Bstatus%5D=active&filters%5Bstatus%5D=pending&filters%5Bcategory%5D%5Btype%5D=electronics&filters%5Bcategory%5D%5Bsubcategories%5D=phones&filters%5Bcategory%5D%5Bsubcategories%5D=laptops&sort%5Bfield%5D=name&sort%5Bdirection%5D=asc",
+ },
+ ];
+
+ testCases.forEach(({ description, baseUrl, queryParams, expected }) => {
+ it(description, () => {
+ expect(createRequestUrl(baseUrl, queryParams)).toBe(expected);
+ });
});
});
diff --git a/tests/unit/fetcher/getRequestBody.test.ts b/tests/unit/fetcher/getRequestBody.test.ts
index e864c8b..8a6c3a5 100644
--- a/tests/unit/fetcher/getRequestBody.test.ts
+++ b/tests/unit/fetcher/getRequestBody.test.ts
@@ -2,15 +2,117 @@ import { getRequestBody } from "../../../src/core/fetcher/getRequestBody";
import { RUNTIME } from "../../../src/core/runtime";
describe("Test getRequestBody", () => {
- it("should stringify body if not FormData in Node environment", async () => {
- if (RUNTIME.type === "node") {
- const body = { key: "value" };
+ interface TestCase {
+ description: string;
+ input: any;
+ type: "json" | "form" | "file" | "bytes" | "other";
+ expected: any;
+ skipCondition?: () => boolean;
+ }
+
+ const testCases: TestCase[] = [
+ {
+ description: "should stringify body if not FormData in Node environment",
+ input: { key: "value" },
+ type: "json",
+ expected: '{"key":"value"}',
+ skipCondition: () => RUNTIME.type !== "node",
+ },
+ {
+ description: "should stringify body if not FormData in browser environment",
+ input: { key: "value" },
+ type: "json",
+ expected: '{"key":"value"}',
+ skipCondition: () => RUNTIME.type !== "browser",
+ },
+ {
+ description: "should return the Uint8Array",
+ input: new Uint8Array([1, 2, 3]),
+ type: "bytes",
+ expected: new Uint8Array([1, 2, 3]),
+ },
+ {
+ description: "should serialize objects for form-urlencoded content type",
+ input: { username: "johndoe", email: "john@example.com" },
+ type: "form",
+ expected: "username=johndoe&email=john%40example.com",
+ },
+ {
+ description: "should serialize complex nested objects and arrays for form-urlencoded content type",
+ input: {
+ user: {
+ profile: {
+ name: "John Doe",
+ settings: {
+ theme: "dark",
+ notifications: true,
+ },
+ },
+ tags: ["admin", "user"],
+ contacts: [
+ { type: "email", value: "john@example.com" },
+ { type: "phone", value: "+1234567890" },
+ ],
+ },
+ filters: {
+ status: ["active", "pending"],
+ metadata: {
+ created: "2024-01-01",
+ categories: ["electronics", "books"],
+ },
+ },
+ preferences: ["notifications", "updates"],
+ },
+ type: "form",
+ expected:
+ "user%5Bprofile%5D%5Bname%5D=John%20Doe&" +
+ "user%5Bprofile%5D%5Bsettings%5D%5Btheme%5D=dark&" +
+ "user%5Bprofile%5D%5Bsettings%5D%5Bnotifications%5D=true&" +
+ "user%5Btags%5D=admin&" +
+ "user%5Btags%5D=user&" +
+ "user%5Bcontacts%5D%5Btype%5D=email&" +
+ "user%5Bcontacts%5D%5Bvalue%5D=john%40example.com&" +
+ "user%5Bcontacts%5D%5Btype%5D=phone&" +
+ "user%5Bcontacts%5D%5Bvalue%5D=%2B1234567890&" +
+ "filters%5Bstatus%5D=active&" +
+ "filters%5Bstatus%5D=pending&" +
+ "filters%5Bmetadata%5D%5Bcreated%5D=2024-01-01&" +
+ "filters%5Bmetadata%5D%5Bcategories%5D=electronics&" +
+ "filters%5Bmetadata%5D%5Bcategories%5D=books&" +
+ "preferences=notifications&" +
+ "preferences=updates",
+ },
+ {
+ description: "should return the input for pre-serialized form-urlencoded strings",
+ input: "key=value&another=param",
+ type: "other",
+ expected: "key=value&another=param",
+ },
+ {
+ description: "should JSON stringify objects",
+ input: { key: "value" },
+ type: "json",
+ expected: '{"key":"value"}',
+ },
+ ];
+
+ testCases.forEach(({ description, input, type, expected, skipCondition }) => {
+ it(description, async () => {
+ if (skipCondition?.()) {
+ return;
+ }
+
const result = await getRequestBody({
- body,
- type: "json",
+ body: input,
+ type,
});
- expect(result).toBe('{"key":"value"}');
- }
+
+ if (input instanceof Uint8Array) {
+ expect(result).toBe(input);
+ } else {
+ expect(result).toBe(expected);
+ }
+ });
});
it("should return FormData in browser environment", async () => {
@@ -24,42 +126,4 @@ describe("Test getRequestBody", () => {
expect(result).toBe(formData);
}
});
-
- it("should stringify body if not FormData in browser environment", async () => {
- if (RUNTIME.type === "browser") {
- const body = { key: "value" };
- const result = await getRequestBody({
- body,
- type: "json",
- });
- expect(result).toBe('{"key":"value"}');
- }
- });
-
- it("should return the Uint8Array", async () => {
- const input = new Uint8Array([1, 2, 3]);
- const result = await getRequestBody({
- body: input,
- type: "bytes",
- });
- expect(result).toBe(input);
- });
-
- it("should return the input for content-type 'application/x-www-form-urlencoded'", async () => {
- const input = "key=value&another=param";
- const result = await getRequestBody({
- body: input,
- type: "other",
- });
- expect(result).toBe(input);
- });
-
- it("should JSON stringify objects", async () => {
- const input = { key: "value" };
- const result = await getRequestBody({
- body: input,
- type: "json",
- });
- expect(result).toBe('{"key":"value"}');
- });
});
diff --git a/tests/unit/fetcher/getResponseBody.test.ts b/tests/unit/fetcher/getResponseBody.test.ts
index 151843a..ad6be7f 100644
--- a/tests/unit/fetcher/getResponseBody.test.ts
+++ b/tests/unit/fetcher/getResponseBody.test.ts
@@ -1,7 +1,61 @@
import { getResponseBody } from "../../../src/core/fetcher/getResponseBody";
+
import { RUNTIME } from "../../../src/core/runtime";
describe("Test getResponseBody", () => {
+ interface SimpleTestCase {
+ description: string;
+ responseData: string | Record;
+ responseType?: "blob" | "sse" | "streaming" | "text";
+ expected: any;
+ skipCondition?: () => boolean;
+ }
+
+ const simpleTestCases: SimpleTestCase[] = [
+ {
+ description: "should handle text response type",
+ responseData: "test text",
+ responseType: "text",
+ expected: "test text",
+ },
+ {
+ description: "should handle JSON response",
+ responseData: { key: "value" },
+ expected: { key: "value" },
+ },
+ {
+ description: "should handle empty response",
+ responseData: "",
+ expected: undefined,
+ },
+ {
+ description: "should handle non-JSON response",
+ responseData: "invalid json",
+ expected: {
+ ok: false,
+ error: {
+ reason: "non-json",
+ statusCode: 200,
+ rawBody: "invalid json",
+ },
+ },
+ },
+ ];
+
+ simpleTestCases.forEach(({ description, responseData, responseType, expected, skipCondition }) => {
+ it(description, async () => {
+ if (skipCondition?.()) {
+ return;
+ }
+
+ const mockResponse = new Response(
+ typeof responseData === "string" ? responseData : JSON.stringify(responseData),
+ );
+ const result = await getResponseBody(mockResponse, responseType);
+ expect(result).toEqual(expected);
+ });
+ });
+
it("should handle blob response type", async () => {
const mockBlob = new Blob(["test"], { type: "text/plain" });
const mockResponse = new Response(mockBlob);
@@ -20,7 +74,6 @@ describe("Test getResponseBody", () => {
});
it("should handle streaming response type", async () => {
- // Create a ReadableStream with some test data
const encoder = new TextEncoder();
const testData = "test stream data";
const mockStream = new ReadableStream({
@@ -35,43 +88,10 @@ describe("Test getResponseBody", () => {
expect(result).toBeInstanceOf(ReadableStream);
- // Read and verify the stream content
const reader = result.getReader();
const decoder = new TextDecoder();
const { value } = await reader.read();
const streamContent = decoder.decode(value);
expect(streamContent).toBe(testData);
});
-
- it("should handle text response type", async () => {
- const mockResponse = new Response("test text");
- const result = await getResponseBody(mockResponse, "text");
- expect(result).toBe("test text");
- });
-
- it("should handle JSON response", async () => {
- const mockJson = { key: "value" };
- const mockResponse = new Response(JSON.stringify(mockJson));
- const result = await getResponseBody(mockResponse);
- expect(result).toEqual(mockJson);
- });
-
- it("should handle empty response", async () => {
- const mockResponse = new Response("");
- const result = await getResponseBody(mockResponse);
- expect(result).toBeUndefined();
- });
-
- it("should handle non-JSON response", async () => {
- const mockResponse = new Response("invalid json");
- const result = await getResponseBody(mockResponse);
- expect(result).toEqual({
- ok: false,
- error: {
- reason: "non-json",
- statusCode: 200,
- rawBody: "invalid json",
- },
- });
- });
});
diff --git a/tests/unit/fetcher/logging.test.ts b/tests/unit/fetcher/logging.test.ts
new file mode 100644
index 0000000..366c9b6
--- /dev/null
+++ b/tests/unit/fetcher/logging.test.ts
@@ -0,0 +1,517 @@
+import { fetcherImpl } from "../../../src/core/fetcher/Fetcher";
+
+function createMockLogger() {
+ return {
+ debug: vi.fn(),
+ info: vi.fn(),
+ warn: vi.fn(),
+ error: vi.fn(),
+ };
+}
+
+function mockSuccessResponse(data: unknown = { data: "test" }, status = 200, statusText = "OK") {
+ global.fetch = vi.fn().mockResolvedValue(
+ new Response(JSON.stringify(data), {
+ status,
+ statusText,
+ }),
+ );
+}
+
+function mockErrorResponse(data: unknown = { error: "Error" }, status = 404, statusText = "Not Found") {
+ global.fetch = vi.fn().mockResolvedValue(
+ new Response(JSON.stringify(data), {
+ status,
+ statusText,
+ }),
+ );
+}
+
+describe("Fetcher Logging Integration", () => {
+ describe("Request Logging", () => {
+ it("should log successful request at debug level", async () => {
+ const mockLogger = createMockLogger();
+ mockSuccessResponse();
+
+ await fetcherImpl({
+ url: "https://example.com/api",
+ method: "POST",
+ headers: { "Content-Type": "application/json" },
+ body: { test: "data" },
+ contentType: "application/json",
+ requestType: "json",
+ responseType: "json",
+ maxRetries: 0,
+ logging: {
+ level: "debug",
+ logger: mockLogger,
+ silent: false,
+ },
+ });
+
+ expect(mockLogger.debug).toHaveBeenCalledWith(
+ "Making HTTP request",
+ expect.objectContaining({
+ method: "POST",
+ url: "https://example.com/api",
+ headers: expect.toContainHeaders({
+ "Content-Type": "application/json",
+ }),
+ hasBody: true,
+ }),
+ );
+
+ expect(mockLogger.debug).toHaveBeenCalledWith(
+ "HTTP request succeeded",
+ expect.objectContaining({
+ method: "POST",
+ url: "https://example.com/api",
+ statusCode: 200,
+ }),
+ );
+ });
+
+ it("should not log debug messages at info level for successful requests", async () => {
+ const mockLogger = createMockLogger();
+ mockSuccessResponse();
+
+ await fetcherImpl({
+ url: "https://example.com/api",
+ method: "GET",
+ responseType: "json",
+ maxRetries: 0,
+ logging: {
+ level: "info",
+ logger: mockLogger,
+ silent: false,
+ },
+ });
+
+ expect(mockLogger.debug).not.toHaveBeenCalled();
+ expect(mockLogger.info).not.toHaveBeenCalled();
+ });
+
+ it("should log request with body flag", async () => {
+ const mockLogger = createMockLogger();
+ mockSuccessResponse();
+
+ await fetcherImpl({
+ url: "https://example.com/api",
+ method: "POST",
+ body: { data: "test" },
+ contentType: "application/json",
+ requestType: "json",
+ responseType: "json",
+ maxRetries: 0,
+ logging: {
+ level: "debug",
+ logger: mockLogger,
+ silent: false,
+ },
+ });
+
+ expect(mockLogger.debug).toHaveBeenCalledWith(
+ "Making HTTP request",
+ expect.objectContaining({
+ hasBody: true,
+ }),
+ );
+ });
+
+ it("should log request without body flag", async () => {
+ const mockLogger = createMockLogger();
+ mockSuccessResponse();
+
+ await fetcherImpl({
+ url: "https://example.com/api",
+ method: "GET",
+ responseType: "json",
+ maxRetries: 0,
+ logging: {
+ level: "debug",
+ logger: mockLogger,
+ silent: false,
+ },
+ });
+
+ expect(mockLogger.debug).toHaveBeenCalledWith(
+ "Making HTTP request",
+ expect.objectContaining({
+ hasBody: false,
+ }),
+ );
+ });
+
+ it("should not log when silent mode is enabled", async () => {
+ const mockLogger = createMockLogger();
+ mockSuccessResponse();
+
+ await fetcherImpl({
+ url: "https://example.com/api",
+ method: "GET",
+ responseType: "json",
+ maxRetries: 0,
+ logging: {
+ level: "debug",
+ logger: mockLogger,
+ silent: true,
+ },
+ });
+
+ expect(mockLogger.debug).not.toHaveBeenCalled();
+ expect(mockLogger.info).not.toHaveBeenCalled();
+ expect(mockLogger.warn).not.toHaveBeenCalled();
+ expect(mockLogger.error).not.toHaveBeenCalled();
+ });
+
+ it("should not log when no logging config is provided", async () => {
+ const mockLogger = createMockLogger();
+ mockSuccessResponse();
+
+ await fetcherImpl({
+ url: "https://example.com/api",
+ method: "GET",
+ responseType: "json",
+ maxRetries: 0,
+ });
+
+ expect(mockLogger.debug).not.toHaveBeenCalled();
+ });
+ });
+
+ describe("Error Logging", () => {
+ it("should log 4xx errors at error level", async () => {
+ const mockLogger = createMockLogger();
+ mockErrorResponse({ error: "Not found" }, 404, "Not Found");
+
+ const result = await fetcherImpl({
+ url: "https://example.com/api",
+ method: "GET",
+ responseType: "json",
+ maxRetries: 0,
+ logging: {
+ level: "error",
+ logger: mockLogger,
+ silent: false,
+ },
+ });
+
+ expect(result.ok).toBe(false);
+ expect(mockLogger.error).toHaveBeenCalledWith(
+ "HTTP request failed with error status",
+ expect.objectContaining({
+ method: "GET",
+ url: "https://example.com/api",
+ statusCode: 404,
+ }),
+ );
+ });
+
+ it("should log 5xx errors at error level", async () => {
+ const mockLogger = createMockLogger();
+ mockErrorResponse({ error: "Internal error" }, 500, "Internal Server Error");
+
+ const result = await fetcherImpl({
+ url: "https://example.com/api",
+ method: "GET",
+ responseType: "json",
+ maxRetries: 0,
+ logging: {
+ level: "error",
+ logger: mockLogger,
+ silent: false,
+ },
+ });
+
+ expect(result.ok).toBe(false);
+ expect(mockLogger.error).toHaveBeenCalledWith(
+ "HTTP request failed with error status",
+ expect.objectContaining({
+ method: "GET",
+ url: "https://example.com/api",
+ statusCode: 500,
+ }),
+ );
+ });
+
+ it("should log aborted request errors", async () => {
+ const mockLogger = createMockLogger();
+
+ const abortController = new AbortController();
+ abortController.abort();
+
+ global.fetch = vi.fn().mockRejectedValue(new Error("Aborted"));
+
+ const result = await fetcherImpl({
+ url: "https://example.com/api",
+ method: "GET",
+ responseType: "json",
+ abortSignal: abortController.signal,
+ maxRetries: 0,
+ logging: {
+ level: "error",
+ logger: mockLogger,
+ silent: false,
+ },
+ });
+
+ expect(result.ok).toBe(false);
+ expect(mockLogger.error).toHaveBeenCalledWith(
+ "HTTP request was aborted",
+ expect.objectContaining({
+ method: "GET",
+ url: "https://example.com/api",
+ }),
+ );
+ });
+
+ it("should log timeout errors", async () => {
+ const mockLogger = createMockLogger();
+
+ const timeoutError = new Error("Request timeout");
+ timeoutError.name = "AbortError";
+
+ global.fetch = vi.fn().mockRejectedValue(timeoutError);
+
+ const result = await fetcherImpl({
+ url: "https://example.com/api",
+ method: "GET",
+ responseType: "json",
+ maxRetries: 0,
+ logging: {
+ level: "error",
+ logger: mockLogger,
+ silent: false,
+ },
+ });
+
+ expect(result.ok).toBe(false);
+ expect(mockLogger.error).toHaveBeenCalledWith(
+ "HTTP request timed out",
+ expect.objectContaining({
+ method: "GET",
+ url: "https://example.com/api",
+ timeoutMs: undefined,
+ }),
+ );
+ });
+
+ it("should log unknown errors", async () => {
+ const mockLogger = createMockLogger();
+
+ const unknownError = new Error("Unknown error");
+
+ global.fetch = vi.fn().mockRejectedValue(unknownError);
+
+ const result = await fetcherImpl({
+ url: "https://example.com/api",
+ method: "GET",
+ responseType: "json",
+ maxRetries: 0,
+ logging: {
+ level: "error",
+ logger: mockLogger,
+ silent: false,
+ },
+ });
+
+ expect(result.ok).toBe(false);
+ expect(mockLogger.error).toHaveBeenCalledWith(
+ "HTTP request failed with error",
+ expect.objectContaining({
+ method: "GET",
+ url: "https://example.com/api",
+ errorMessage: "Unknown error",
+ }),
+ );
+ });
+ });
+
+ describe("Logging with Redaction", () => {
+ it("should redact sensitive data in error logs", async () => {
+ const mockLogger = createMockLogger();
+ mockErrorResponse({ error: "Unauthorized" }, 401, "Unauthorized");
+
+ await fetcherImpl({
+ url: "https://example.com/api?api_key=secret",
+ method: "GET",
+ responseType: "json",
+ maxRetries: 0,
+ logging: {
+ level: "error",
+ logger: mockLogger,
+ silent: false,
+ },
+ });
+
+ expect(mockLogger.error).toHaveBeenCalledWith(
+ "HTTP request failed with error status",
+ expect.objectContaining({
+ url: "https://example.com/api?api_key=[REDACTED]",
+ }),
+ );
+ });
+ });
+
+ describe("Different HTTP Methods", () => {
+ it("should log GET requests", async () => {
+ const mockLogger = createMockLogger();
+ mockSuccessResponse();
+
+ await fetcherImpl({
+ url: "https://example.com/api",
+ method: "GET",
+ responseType: "json",
+ maxRetries: 0,
+ logging: {
+ level: "debug",
+ logger: mockLogger,
+ silent: false,
+ },
+ });
+
+ expect(mockLogger.debug).toHaveBeenCalledWith(
+ "Making HTTP request",
+ expect.objectContaining({
+ method: "GET",
+ }),
+ );
+ });
+
+ it("should log POST requests", async () => {
+ const mockLogger = createMockLogger();
+ mockSuccessResponse({ data: "test" }, 201, "Created");
+
+ await fetcherImpl({
+ url: "https://example.com/api",
+ method: "POST",
+ body: { data: "test" },
+ contentType: "application/json",
+ requestType: "json",
+ responseType: "json",
+ maxRetries: 0,
+ logging: {
+ level: "debug",
+ logger: mockLogger,
+ silent: false,
+ },
+ });
+
+ expect(mockLogger.debug).toHaveBeenCalledWith(
+ "Making HTTP request",
+ expect.objectContaining({
+ method: "POST",
+ }),
+ );
+ });
+
+ it("should log PUT requests", async () => {
+ const mockLogger = createMockLogger();
+ mockSuccessResponse();
+
+ await fetcherImpl({
+ url: "https://example.com/api",
+ method: "PUT",
+ body: { data: "test" },
+ contentType: "application/json",
+ requestType: "json",
+ responseType: "json",
+ maxRetries: 0,
+ logging: {
+ level: "debug",
+ logger: mockLogger,
+ silent: false,
+ },
+ });
+
+ expect(mockLogger.debug).toHaveBeenCalledWith(
+ "Making HTTP request",
+ expect.objectContaining({
+ method: "PUT",
+ }),
+ );
+ });
+
+ it("should log DELETE requests", async () => {
+ const mockLogger = createMockLogger();
+ global.fetch = vi.fn().mockResolvedValue(
+ new Response(null, {
+ status: 200,
+ statusText: "OK",
+ }),
+ );
+
+ await fetcherImpl({
+ url: "https://example.com/api",
+ method: "DELETE",
+ responseType: "json",
+ maxRetries: 0,
+ logging: {
+ level: "debug",
+ logger: mockLogger,
+ silent: false,
+ },
+ });
+
+ expect(mockLogger.debug).toHaveBeenCalledWith(
+ "Making HTTP request",
+ expect.objectContaining({
+ method: "DELETE",
+ }),
+ );
+ });
+ });
+
+ describe("Status Code Logging", () => {
+ it("should log 2xx success status codes", async () => {
+ const mockLogger = createMockLogger();
+ mockSuccessResponse({ data: "test" }, 201, "Created");
+
+ await fetcherImpl({
+ url: "https://example.com/api",
+ method: "POST",
+ body: { data: "test" },
+ contentType: "application/json",
+ requestType: "json",
+ responseType: "json",
+ maxRetries: 0,
+ logging: {
+ level: "debug",
+ logger: mockLogger,
+ silent: false,
+ },
+ });
+
+ expect(mockLogger.debug).toHaveBeenCalledWith(
+ "HTTP request succeeded",
+ expect.objectContaining({
+ statusCode: 201,
+ }),
+ );
+ });
+
+ it("should log 3xx redirect status codes as success", async () => {
+ const mockLogger = createMockLogger();
+ mockSuccessResponse({ data: "test" }, 301, "Moved Permanently");
+
+ await fetcherImpl({
+ url: "https://example.com/api",
+ method: "GET",
+ responseType: "json",
+ maxRetries: 0,
+ logging: {
+ level: "debug",
+ logger: mockLogger,
+ silent: false,
+ },
+ });
+
+ expect(mockLogger.debug).toHaveBeenCalledWith(
+ "HTTP request succeeded",
+ expect.objectContaining({
+ statusCode: 301,
+ }),
+ );
+ });
+ });
+});
diff --git a/tests/unit/fetcher/makeRequest.test.ts b/tests/unit/fetcher/makeRequest.test.ts
index f6203cd..ea49466 100644
--- a/tests/unit/fetcher/makeRequest.test.ts
+++ b/tests/unit/fetcher/makeRequest.test.ts
@@ -1,3 +1,4 @@
+import type { Mock } from "vitest";
import { makeRequest } from "../../../src/core/fetcher/makeRequest";
describe("Test makeRequest", () => {
@@ -6,7 +7,7 @@ describe("Test makeRequest", () => {
const mockHeaders = { "Content-Type": "application/json" };
const mockBody = JSON.stringify({ key: "value" });
- let mockFetch: import("vitest").Mock;
+ let mockFetch: Mock;
beforeEach(() => {
mockFetch = vi.fn();
diff --git a/tests/unit/fetcher/redacting.test.ts b/tests/unit/fetcher/redacting.test.ts
new file mode 100644
index 0000000..d599376
--- /dev/null
+++ b/tests/unit/fetcher/redacting.test.ts
@@ -0,0 +1,1115 @@
+import { fetcherImpl } from "../../../src/core/fetcher/Fetcher";
+
+function createMockLogger() {
+ return {
+ debug: vi.fn(),
+ info: vi.fn(),
+ warn: vi.fn(),
+ error: vi.fn(),
+ };
+}
+
+function mockSuccessResponse(data: unknown = { data: "test" }, status = 200, statusText = "OK") {
+ global.fetch = vi.fn().mockResolvedValue(
+ new Response(JSON.stringify(data), {
+ status,
+ statusText,
+ }),
+ );
+}
+
+describe("Redacting Logic", () => {
+ describe("Header Redaction", () => {
+ it("should redact authorization header", async () => {
+ const mockLogger = createMockLogger();
+ mockSuccessResponse();
+
+ await fetcherImpl({
+ url: "https://example.com/api",
+ method: "GET",
+ headers: { Authorization: "Bearer secret-token-12345" },
+ responseType: "json",
+ maxRetries: 0,
+ logging: {
+ level: "debug",
+ logger: mockLogger,
+ silent: false,
+ },
+ });
+
+ expect(mockLogger.debug).toHaveBeenCalledWith(
+ "Making HTTP request",
+ expect.objectContaining({
+ headers: expect.toContainHeaders({
+ Authorization: "[REDACTED]",
+ }),
+ }),
+ );
+ });
+
+ it("should redact api-key header (case-insensitive)", async () => {
+ const mockLogger = createMockLogger();
+ mockSuccessResponse();
+
+ await fetcherImpl({
+ url: "https://example.com/api",
+ method: "GET",
+ headers: { "X-API-KEY": "secret-api-key" },
+ responseType: "json",
+ maxRetries: 0,
+ logging: {
+ level: "debug",
+ logger: mockLogger,
+ silent: false,
+ },
+ });
+
+ expect(mockLogger.debug).toHaveBeenCalledWith(
+ "Making HTTP request",
+ expect.objectContaining({
+ headers: expect.toContainHeaders({
+ "X-API-KEY": "[REDACTED]",
+ }),
+ }),
+ );
+ });
+
+ it("should redact cookie header", async () => {
+ const mockLogger = createMockLogger();
+ mockSuccessResponse();
+
+ await fetcherImpl({
+ url: "https://example.com/api",
+ method: "GET",
+ headers: { Cookie: "session=abc123; token=xyz789" },
+ responseType: "json",
+ maxRetries: 0,
+ logging: {
+ level: "debug",
+ logger: mockLogger,
+ silent: false,
+ },
+ });
+
+ expect(mockLogger.debug).toHaveBeenCalledWith(
+ "Making HTTP request",
+ expect.objectContaining({
+ headers: expect.toContainHeaders({
+ Cookie: "[REDACTED]",
+ }),
+ }),
+ );
+ });
+
+ it("should redact x-auth-token header", async () => {
+ const mockLogger = createMockLogger();
+ mockSuccessResponse();
+
+ await fetcherImpl({
+ url: "https://example.com/api",
+ method: "GET",
+ headers: { "x-auth-token": "auth-token-12345" },
+ responseType: "json",
+ maxRetries: 0,
+ logging: {
+ level: "debug",
+ logger: mockLogger,
+ silent: false,
+ },
+ });
+
+ expect(mockLogger.debug).toHaveBeenCalledWith(
+ "Making HTTP request",
+ expect.objectContaining({
+ headers: expect.toContainHeaders({
+ "x-auth-token": "[REDACTED]",
+ }),
+ }),
+ );
+ });
+
+ it("should redact proxy-authorization header", async () => {
+ const mockLogger = createMockLogger();
+ mockSuccessResponse();
+
+ await fetcherImpl({
+ url: "https://example.com/api",
+ method: "GET",
+ headers: { "Proxy-Authorization": "Basic credentials" },
+ responseType: "json",
+ maxRetries: 0,
+ logging: {
+ level: "debug",
+ logger: mockLogger,
+ silent: false,
+ },
+ });
+
+ expect(mockLogger.debug).toHaveBeenCalledWith(
+ "Making HTTP request",
+ expect.objectContaining({
+ headers: expect.toContainHeaders({
+ "Proxy-Authorization": "[REDACTED]",
+ }),
+ }),
+ );
+ });
+
+ it("should redact x-csrf-token header", async () => {
+ const mockLogger = createMockLogger();
+ mockSuccessResponse();
+
+ await fetcherImpl({
+ url: "https://example.com/api",
+ method: "GET",
+ headers: { "X-CSRF-Token": "csrf-token-abc" },
+ responseType: "json",
+ maxRetries: 0,
+ logging: {
+ level: "debug",
+ logger: mockLogger,
+ silent: false,
+ },
+ });
+
+ expect(mockLogger.debug).toHaveBeenCalledWith(
+ "Making HTTP request",
+ expect.objectContaining({
+ headers: expect.toContainHeaders({
+ "X-CSRF-Token": "[REDACTED]",
+ }),
+ }),
+ );
+ });
+
+ it("should redact www-authenticate header", async () => {
+ const mockLogger = createMockLogger();
+ mockSuccessResponse();
+
+ await fetcherImpl({
+ url: "https://example.com/api",
+ method: "GET",
+ headers: { "WWW-Authenticate": "Bearer realm=example" },
+ responseType: "json",
+ maxRetries: 0,
+ logging: {
+ level: "debug",
+ logger: mockLogger,
+ silent: false,
+ },
+ });
+
+ expect(mockLogger.debug).toHaveBeenCalledWith(
+ "Making HTTP request",
+ expect.objectContaining({
+ headers: expect.toContainHeaders({
+ "WWW-Authenticate": "[REDACTED]",
+ }),
+ }),
+ );
+ });
+
+ it("should redact x-session-token header", async () => {
+ const mockLogger = createMockLogger();
+ mockSuccessResponse();
+
+ await fetcherImpl({
+ url: "https://example.com/api",
+ method: "GET",
+ headers: { "X-Session-Token": "session-token-xyz" },
+ responseType: "json",
+ maxRetries: 0,
+ logging: {
+ level: "debug",
+ logger: mockLogger,
+ silent: false,
+ },
+ });
+
+ expect(mockLogger.debug).toHaveBeenCalledWith(
+ "Making HTTP request",
+ expect.objectContaining({
+ headers: expect.toContainHeaders({
+ "X-Session-Token": "[REDACTED]",
+ }),
+ }),
+ );
+ });
+
+ it("should not redact non-sensitive headers", async () => {
+ const mockLogger = createMockLogger();
+ mockSuccessResponse();
+
+ await fetcherImpl({
+ url: "https://example.com/api",
+ method: "GET",
+ headers: {
+ "Content-Type": "application/json",
+ "User-Agent": "Test/1.0",
+ Accept: "application/json",
+ },
+ responseType: "json",
+ maxRetries: 0,
+ logging: {
+ level: "debug",
+ logger: mockLogger,
+ silent: false,
+ },
+ });
+
+ expect(mockLogger.debug).toHaveBeenCalledWith(
+ "Making HTTP request",
+ expect.objectContaining({
+ headers: expect.toContainHeaders({
+ "Content-Type": "application/json",
+ "User-Agent": "Test/1.0",
+ Accept: "application/json",
+ }),
+ }),
+ );
+ });
+
+ it("should redact multiple sensitive headers at once", async () => {
+ const mockLogger = createMockLogger();
+ mockSuccessResponse();
+
+ await fetcherImpl({
+ url: "https://example.com/api",
+ method: "GET",
+ headers: {
+ Authorization: "Bearer token",
+ "X-API-Key": "api-key",
+ Cookie: "session=123",
+ "Content-Type": "application/json",
+ },
+ responseType: "json",
+ maxRetries: 0,
+ logging: {
+ level: "debug",
+ logger: mockLogger,
+ silent: false,
+ },
+ });
+
+ expect(mockLogger.debug).toHaveBeenCalledWith(
+ "Making HTTP request",
+ expect.objectContaining({
+ headers: expect.toContainHeaders({
+ Authorization: "[REDACTED]",
+ "X-API-Key": "[REDACTED]",
+ Cookie: "[REDACTED]",
+ "Content-Type": "application/json",
+ }),
+ }),
+ );
+ });
+ });
+
+ describe("Response Header Redaction", () => {
+ it("should redact Set-Cookie in response headers", async () => {
+ const mockLogger = createMockLogger();
+
+ const mockHeaders = new Headers();
+ mockHeaders.set("Set-Cookie", "session=abc123; HttpOnly; Secure");
+ mockHeaders.set("Content-Type", "application/json");
+
+ global.fetch = vi.fn().mockResolvedValue(
+ new Response(JSON.stringify({ data: "test" }), {
+ status: 200,
+ statusText: "OK",
+ headers: mockHeaders,
+ }),
+ );
+
+ await fetcherImpl({
+ url: "https://example.com/api",
+ method: "GET",
+ responseType: "json",
+ maxRetries: 0,
+ logging: {
+ level: "debug",
+ logger: mockLogger,
+ silent: false,
+ },
+ });
+
+ expect(mockLogger.debug).toHaveBeenCalledWith(
+ "HTTP request succeeded",
+ expect.objectContaining({
+ responseHeaders: expect.toContainHeaders({
+ "set-cookie": "[REDACTED]",
+ "content-type": "application/json",
+ }),
+ }),
+ );
+ });
+
+ it("should redact authorization in response headers", async () => {
+ const mockLogger = createMockLogger();
+
+ const mockHeaders = new Headers();
+ mockHeaders.set("Authorization", "Bearer token-123");
+ mockHeaders.set("Content-Type", "application/json");
+
+ global.fetch = vi.fn().mockResolvedValue(
+ new Response(JSON.stringify({ data: "test" }), {
+ status: 200,
+ statusText: "OK",
+ headers: mockHeaders,
+ }),
+ );
+
+ await fetcherImpl({
+ url: "https://example.com/api",
+ method: "GET",
+ responseType: "json",
+ maxRetries: 0,
+ logging: {
+ level: "debug",
+ logger: mockLogger,
+ silent: false,
+ },
+ });
+
+ expect(mockLogger.debug).toHaveBeenCalledWith(
+ "HTTP request succeeded",
+ expect.objectContaining({
+ responseHeaders: expect.toContainHeaders({
+ authorization: "[REDACTED]",
+ "content-type": "application/json",
+ }),
+ }),
+ );
+ });
+
+ it("should redact response headers in error responses", async () => {
+ const mockLogger = createMockLogger();
+
+ const mockHeaders = new Headers();
+ mockHeaders.set("WWW-Authenticate", "Bearer realm=example");
+ mockHeaders.set("Content-Type", "application/json");
+
+ global.fetch = vi.fn().mockResolvedValue(
+ new Response(JSON.stringify({ error: "Unauthorized" }), {
+ status: 401,
+ statusText: "Unauthorized",
+ headers: mockHeaders,
+ }),
+ );
+
+ await fetcherImpl({
+ url: "https://example.com/api",
+ method: "GET",
+ responseType: "json",
+ maxRetries: 0,
+ logging: {
+ level: "error",
+ logger: mockLogger,
+ silent: false,
+ },
+ });
+
+ expect(mockLogger.error).toHaveBeenCalledWith(
+ "HTTP request failed with error status",
+ expect.objectContaining({
+ responseHeaders: expect.toContainHeaders({
+ "www-authenticate": "[REDACTED]",
+ "content-type": "application/json",
+ }),
+ }),
+ );
+ });
+ });
+
+ describe("Query Parameter Redaction", () => {
+ it("should redact api_key query parameter", async () => {
+ const mockLogger = createMockLogger();
+ mockSuccessResponse();
+
+ await fetcherImpl({
+ url: "https://example.com/api",
+ method: "GET",
+ queryParameters: { api_key: "secret-key" },
+ responseType: "json",
+ maxRetries: 0,
+ logging: {
+ level: "debug",
+ logger: mockLogger,
+ silent: false,
+ },
+ });
+
+ expect(mockLogger.debug).toHaveBeenCalledWith(
+ "Making HTTP request",
+ expect.objectContaining({
+ queryParameters: expect.objectContaining({
+ api_key: "[REDACTED]",
+ }),
+ }),
+ );
+ });
+
+ it("should redact token query parameter", async () => {
+ const mockLogger = createMockLogger();
+ mockSuccessResponse();
+
+ await fetcherImpl({
+ url: "https://example.com/api",
+ method: "GET",
+ queryParameters: { token: "secret-token" },
+ responseType: "json",
+ maxRetries: 0,
+ logging: {
+ level: "debug",
+ logger: mockLogger,
+ silent: false,
+ },
+ });
+
+ expect(mockLogger.debug).toHaveBeenCalledWith(
+ "Making HTTP request",
+ expect.objectContaining({
+ queryParameters: expect.objectContaining({
+ token: "[REDACTED]",
+ }),
+ }),
+ );
+ });
+
+ it("should redact access_token query parameter", async () => {
+ const mockLogger = createMockLogger();
+ mockSuccessResponse();
+
+ await fetcherImpl({
+ url: "https://example.com/api",
+ method: "GET",
+ queryParameters: { access_token: "secret-access-token" },
+ responseType: "json",
+ maxRetries: 0,
+ logging: {
+ level: "debug",
+ logger: mockLogger,
+ silent: false,
+ },
+ });
+
+ expect(mockLogger.debug).toHaveBeenCalledWith(
+ "Making HTTP request",
+ expect.objectContaining({
+ queryParameters: expect.objectContaining({
+ access_token: "[REDACTED]",
+ }),
+ }),
+ );
+ });
+
+ it("should redact password query parameter", async () => {
+ const mockLogger = createMockLogger();
+ mockSuccessResponse();
+
+ await fetcherImpl({
+ url: "https://example.com/api",
+ method: "GET",
+ queryParameters: { password: "secret-password" },
+ responseType: "json",
+ maxRetries: 0,
+ logging: {
+ level: "debug",
+ logger: mockLogger,
+ silent: false,
+ },
+ });
+
+ expect(mockLogger.debug).toHaveBeenCalledWith(
+ "Making HTTP request",
+ expect.objectContaining({
+ queryParameters: expect.objectContaining({
+ password: "[REDACTED]",
+ }),
+ }),
+ );
+ });
+
+ it("should redact secret query parameter", async () => {
+ const mockLogger = createMockLogger();
+ mockSuccessResponse();
+
+ await fetcherImpl({
+ url: "https://example.com/api",
+ method: "GET",
+ queryParameters: { secret: "secret-value" },
+ responseType: "json",
+ maxRetries: 0,
+ logging: {
+ level: "debug",
+ logger: mockLogger,
+ silent: false,
+ },
+ });
+
+ expect(mockLogger.debug).toHaveBeenCalledWith(
+ "Making HTTP request",
+ expect.objectContaining({
+ queryParameters: expect.objectContaining({
+ secret: "[REDACTED]",
+ }),
+ }),
+ );
+ });
+
+ it("should redact session_id query parameter", async () => {
+ const mockLogger = createMockLogger();
+ mockSuccessResponse();
+
+ await fetcherImpl({
+ url: "https://example.com/api",
+ method: "GET",
+ queryParameters: { session_id: "session-123" },
+ responseType: "json",
+ maxRetries: 0,
+ logging: {
+ level: "debug",
+ logger: mockLogger,
+ silent: false,
+ },
+ });
+
+ expect(mockLogger.debug).toHaveBeenCalledWith(
+ "Making HTTP request",
+ expect.objectContaining({
+ queryParameters: expect.objectContaining({
+ session_id: "[REDACTED]",
+ }),
+ }),
+ );
+ });
+
+ it("should not redact non-sensitive query parameters", async () => {
+ const mockLogger = createMockLogger();
+ mockSuccessResponse();
+
+ await fetcherImpl({
+ url: "https://example.com/api",
+ method: "GET",
+ queryParameters: {
+ page: "1",
+ limit: "10",
+ sort: "name",
+ },
+ responseType: "json",
+ maxRetries: 0,
+ logging: {
+ level: "debug",
+ logger: mockLogger,
+ silent: false,
+ },
+ });
+
+ expect(mockLogger.debug).toHaveBeenCalledWith(
+ "Making HTTP request",
+ expect.objectContaining({
+ queryParameters: expect.objectContaining({
+ page: "1",
+ limit: "10",
+ sort: "name",
+ }),
+ }),
+ );
+ });
+
+ it("should not redact parameters containing 'auth' substring like 'author'", async () => {
+ const mockLogger = createMockLogger();
+ mockSuccessResponse();
+
+ await fetcherImpl({
+ url: "https://example.com/api",
+ method: "GET",
+ queryParameters: {
+ author: "john",
+ authenticate: "false",
+ authorization_level: "user",
+ },
+ responseType: "json",
+ maxRetries: 0,
+ logging: {
+ level: "debug",
+ logger: mockLogger,
+ silent: false,
+ },
+ });
+
+ expect(mockLogger.debug).toHaveBeenCalledWith(
+ "Making HTTP request",
+ expect.objectContaining({
+ queryParameters: expect.objectContaining({
+ author: "john",
+ authenticate: "false",
+ authorization_level: "user",
+ }),
+ }),
+ );
+ });
+
+ it("should handle undefined query parameters", async () => {
+ const mockLogger = createMockLogger();
+ mockSuccessResponse();
+
+ await fetcherImpl({
+ url: "https://example.com/api",
+ method: "GET",
+ responseType: "json",
+ maxRetries: 0,
+ logging: {
+ level: "debug",
+ logger: mockLogger,
+ silent: false,
+ },
+ });
+
+ expect(mockLogger.debug).toHaveBeenCalledWith(
+ "Making HTTP request",
+ expect.objectContaining({
+ queryParameters: undefined,
+ }),
+ );
+ });
+
+ it("should redact case-insensitive query parameters", async () => {
+ const mockLogger = createMockLogger();
+ mockSuccessResponse();
+
+ await fetcherImpl({
+ url: "https://example.com/api",
+ method: "GET",
+ queryParameters: { API_KEY: "secret-key", Token: "secret-token" },
+ responseType: "json",
+ maxRetries: 0,
+ logging: {
+ level: "debug",
+ logger: mockLogger,
+ silent: false,
+ },
+ });
+
+ expect(mockLogger.debug).toHaveBeenCalledWith(
+ "Making HTTP request",
+ expect.objectContaining({
+ queryParameters: expect.objectContaining({
+ API_KEY: "[REDACTED]",
+ Token: "[REDACTED]",
+ }),
+ }),
+ );
+ });
+ });
+
+ describe("URL Redaction", () => {
+ it("should redact credentials in URL", async () => {
+ const mockLogger = createMockLogger();
+ mockSuccessResponse();
+
+ await fetcherImpl({
+ url: "https://user:password@example.com/api",
+ method: "GET",
+ responseType: "json",
+ maxRetries: 0,
+ logging: {
+ level: "debug",
+ logger: mockLogger,
+ silent: false,
+ },
+ });
+
+ expect(mockLogger.debug).toHaveBeenCalledWith(
+ "Making HTTP request",
+ expect.objectContaining({
+ url: "https://[REDACTED]@example.com/api",
+ }),
+ );
+ });
+
+ it("should redact api_key in query string", async () => {
+ const mockLogger = createMockLogger();
+ mockSuccessResponse();
+
+ await fetcherImpl({
+ url: "https://example.com/api?api_key=secret-key&page=1",
+ method: "GET",
+ responseType: "json",
+ maxRetries: 0,
+ logging: {
+ level: "debug",
+ logger: mockLogger,
+ silent: false,
+ },
+ });
+
+ expect(mockLogger.debug).toHaveBeenCalledWith(
+ "Making HTTP request",
+ expect.objectContaining({
+ url: "https://example.com/api?api_key=[REDACTED]&page=1",
+ }),
+ );
+ });
+
+ it("should redact token in query string", async () => {
+ const mockLogger = createMockLogger();
+ mockSuccessResponse();
+
+ await fetcherImpl({
+ url: "https://example.com/api?token=secret-token",
+ method: "GET",
+ responseType: "json",
+ maxRetries: 0,
+ logging: {
+ level: "debug",
+ logger: mockLogger,
+ silent: false,
+ },
+ });
+
+ expect(mockLogger.debug).toHaveBeenCalledWith(
+ "Making HTTP request",
+ expect.objectContaining({
+ url: "https://example.com/api?token=[REDACTED]",
+ }),
+ );
+ });
+
+ it("should redact password in query string", async () => {
+ const mockLogger = createMockLogger();
+ mockSuccessResponse();
+
+ await fetcherImpl({
+ url: "https://example.com/api?username=user&password=secret",
+ method: "GET",
+ responseType: "json",
+ maxRetries: 0,
+ logging: {
+ level: "debug",
+ logger: mockLogger,
+ silent: false,
+ },
+ });
+
+ expect(mockLogger.debug).toHaveBeenCalledWith(
+ "Making HTTP request",
+ expect.objectContaining({
+ url: "https://example.com/api?username=user&password=[REDACTED]",
+ }),
+ );
+ });
+
+ it("should not redact non-sensitive query strings", async () => {
+ const mockLogger = createMockLogger();
+ mockSuccessResponse();
+
+ await fetcherImpl({
+ url: "https://example.com/api?page=1&limit=10&sort=name",
+ method: "GET",
+ responseType: "json",
+ maxRetries: 0,
+ logging: {
+ level: "debug",
+ logger: mockLogger,
+ silent: false,
+ },
+ });
+
+ expect(mockLogger.debug).toHaveBeenCalledWith(
+ "Making HTTP request",
+ expect.objectContaining({
+ url: "https://example.com/api?page=1&limit=10&sort=name",
+ }),
+ );
+ });
+
+ it("should not redact URL parameters containing 'auth' substring like 'author'", async () => {
+ const mockLogger = createMockLogger();
+ mockSuccessResponse();
+
+ await fetcherImpl({
+ url: "https://example.com/api?author=john&authenticate=false&page=1",
+ method: "GET",
+ responseType: "json",
+ maxRetries: 0,
+ logging: {
+ level: "debug",
+ logger: mockLogger,
+ silent: false,
+ },
+ });
+
+ expect(mockLogger.debug).toHaveBeenCalledWith(
+ "Making HTTP request",
+ expect.objectContaining({
+ url: "https://example.com/api?author=john&authenticate=false&page=1",
+ }),
+ );
+ });
+
+ it("should handle URL with fragment", async () => {
+ const mockLogger = createMockLogger();
+ mockSuccessResponse();
+
+ await fetcherImpl({
+ url: "https://example.com/api?token=secret#section",
+ method: "GET",
+ responseType: "json",
+ maxRetries: 0,
+ logging: {
+ level: "debug",
+ logger: mockLogger,
+ silent: false,
+ },
+ });
+
+ expect(mockLogger.debug).toHaveBeenCalledWith(
+ "Making HTTP request",
+ expect.objectContaining({
+ url: "https://example.com/api?token=[REDACTED]#section",
+ }),
+ );
+ });
+
+ it("should redact URL-encoded query parameters", async () => {
+ const mockLogger = createMockLogger();
+ mockSuccessResponse();
+
+ await fetcherImpl({
+ url: "https://example.com/api?api%5Fkey=secret",
+ method: "GET",
+ responseType: "json",
+ maxRetries: 0,
+ logging: {
+ level: "debug",
+ logger: mockLogger,
+ silent: false,
+ },
+ });
+
+ expect(mockLogger.debug).toHaveBeenCalledWith(
+ "Making HTTP request",
+ expect.objectContaining({
+ url: "https://example.com/api?api%5Fkey=[REDACTED]",
+ }),
+ );
+ });
+
+ it("should handle URL without query string", async () => {
+ const mockLogger = createMockLogger();
+ mockSuccessResponse();
+
+ await fetcherImpl({
+ url: "https://example.com/api",
+ method: "GET",
+ responseType: "json",
+ maxRetries: 0,
+ logging: {
+ level: "debug",
+ logger: mockLogger,
+ silent: false,
+ },
+ });
+
+ expect(mockLogger.debug).toHaveBeenCalledWith(
+ "Making HTTP request",
+ expect.objectContaining({
+ url: "https://example.com/api",
+ }),
+ );
+ });
+
+ it("should handle empty query string", async () => {
+ const mockLogger = createMockLogger();
+ mockSuccessResponse();
+
+ await fetcherImpl({
+ url: "https://example.com/api?",
+ method: "GET",
+ responseType: "json",
+ maxRetries: 0,
+ logging: {
+ level: "debug",
+ logger: mockLogger,
+ silent: false,
+ },
+ });
+
+ expect(mockLogger.debug).toHaveBeenCalledWith(
+ "Making HTTP request",
+ expect.objectContaining({
+ url: "https://example.com/api?",
+ }),
+ );
+ });
+
+ it("should redact multiple sensitive parameters in URL", async () => {
+ const mockLogger = createMockLogger();
+ mockSuccessResponse();
+
+ await fetcherImpl({
+ url: "https://example.com/api?api_key=secret1&token=secret2&page=1",
+ method: "GET",
+ responseType: "json",
+ maxRetries: 0,
+ logging: {
+ level: "debug",
+ logger: mockLogger,
+ silent: false,
+ },
+ });
+
+ expect(mockLogger.debug).toHaveBeenCalledWith(
+ "Making HTTP request",
+ expect.objectContaining({
+ url: "https://example.com/api?api_key=[REDACTED]&token=[REDACTED]&page=1",
+ }),
+ );
+ });
+
+ it("should redact both credentials and query parameters", async () => {
+ const mockLogger = createMockLogger();
+ mockSuccessResponse();
+
+ await fetcherImpl({
+ url: "https://user:pass@example.com/api?token=secret",
+ method: "GET",
+ responseType: "json",
+ maxRetries: 0,
+ logging: {
+ level: "debug",
+ logger: mockLogger,
+ silent: false,
+ },
+ });
+
+ expect(mockLogger.debug).toHaveBeenCalledWith(
+ "Making HTTP request",
+ expect.objectContaining({
+ url: "https://[REDACTED]@example.com/api?token=[REDACTED]",
+ }),
+ );
+ });
+
+ it("should use fast path for URLs without sensitive keywords", async () => {
+ const mockLogger = createMockLogger();
+ mockSuccessResponse();
+
+ await fetcherImpl({
+ url: "https://example.com/api?page=1&limit=10&sort=name&filter=value",
+ method: "GET",
+ responseType: "json",
+ maxRetries: 0,
+ logging: {
+ level: "debug",
+ logger: mockLogger,
+ silent: false,
+ },
+ });
+
+ expect(mockLogger.debug).toHaveBeenCalledWith(
+ "Making HTTP request",
+ expect.objectContaining({
+ url: "https://example.com/api?page=1&limit=10&sort=name&filter=value",
+ }),
+ );
+ });
+
+ it("should handle query parameter without value", async () => {
+ const mockLogger = createMockLogger();
+ mockSuccessResponse();
+
+ await fetcherImpl({
+ url: "https://example.com/api?flag&token=secret",
+ method: "GET",
+ responseType: "json",
+ maxRetries: 0,
+ logging: {
+ level: "debug",
+ logger: mockLogger,
+ silent: false,
+ },
+ });
+
+ expect(mockLogger.debug).toHaveBeenCalledWith(
+ "Making HTTP request",
+ expect.objectContaining({
+ url: "https://example.com/api?flag&token=[REDACTED]",
+ }),
+ );
+ });
+
+ it("should handle URL with multiple @ symbols in credentials", async () => {
+ const mockLogger = createMockLogger();
+ mockSuccessResponse();
+
+ await fetcherImpl({
+ url: "https://user@example.com:pass@host.com/api",
+ method: "GET",
+ responseType: "json",
+ maxRetries: 0,
+ logging: {
+ level: "debug",
+ logger: mockLogger,
+ silent: false,
+ },
+ });
+
+ expect(mockLogger.debug).toHaveBeenCalledWith(
+ "Making HTTP request",
+ expect.objectContaining({
+ url: "https://[REDACTED]@host.com/api",
+ }),
+ );
+ });
+
+ it("should handle URL with @ in query parameter but not in credentials", async () => {
+ const mockLogger = createMockLogger();
+ mockSuccessResponse();
+
+ await fetcherImpl({
+ url: "https://example.com/api?email=user@example.com",
+ method: "GET",
+ responseType: "json",
+ maxRetries: 0,
+ logging: {
+ level: "debug",
+ logger: mockLogger,
+ silent: false,
+ },
+ });
+
+ expect(mockLogger.debug).toHaveBeenCalledWith(
+ "Making HTTP request",
+ expect.objectContaining({
+ url: "https://example.com/api?email=user@example.com",
+ }),
+ );
+ });
+
+ it("should handle URL with both credentials and @ in path", async () => {
+ const mockLogger = createMockLogger();
+ mockSuccessResponse();
+
+ await fetcherImpl({
+ url: "https://user:pass@example.com/users/@username",
+ method: "GET",
+ responseType: "json",
+ maxRetries: 0,
+ logging: {
+ level: "debug",
+ logger: mockLogger,
+ silent: false,
+ },
+ });
+
+ expect(mockLogger.debug).toHaveBeenCalledWith(
+ "Making HTTP request",
+ expect.objectContaining({
+ url: "https://[REDACTED]@example.com/users/@username",
+ }),
+ );
+ });
+ });
+});
diff --git a/tests/unit/fetcher/requestWithRetries.test.ts b/tests/unit/fetcher/requestWithRetries.test.ts
index 7d46082..d226613 100644
--- a/tests/unit/fetcher/requestWithRetries.test.ts
+++ b/tests/unit/fetcher/requestWithRetries.test.ts
@@ -1,15 +1,15 @@
+import type { Mock, MockInstance } from "vitest";
import { requestWithRetries } from "../../../src/core/fetcher/requestWithRetries";
describe("requestWithRetries", () => {
- let mockFetch: import("vitest").Mock;
+ let mockFetch: Mock;
let originalMathRandom: typeof Math.random;
- let setTimeoutSpy: import("vitest").MockInstance;
+ let setTimeoutSpy: MockInstance;
beforeEach(() => {
mockFetch = vi.fn();
originalMathRandom = Math.random;
- // Mock Math.random for consistent jitter
Math.random = vi.fn(() => 0.5);
vi.useFakeTimers({
@@ -99,6 +99,67 @@ describe("requestWithRetries", () => {
}
});
+ interface RetryHeaderTestCase {
+ description: string;
+ headerName: string;
+ headerValue: string | (() => string);
+ expectedDelayMin: number;
+ expectedDelayMax: number;
+ }
+
+ const retryHeaderTests: RetryHeaderTestCase[] = [
+ {
+ description: "should respect retry-after header with seconds value",
+ headerName: "retry-after",
+ headerValue: "5",
+ expectedDelayMin: 4000,
+ expectedDelayMax: 6000,
+ },
+ {
+ description: "should respect retry-after header with HTTP date value",
+ headerName: "retry-after",
+ headerValue: () => new Date(Date.now() + 3000).toUTCString(),
+ expectedDelayMin: 2000,
+ expectedDelayMax: 4000,
+ },
+ {
+ description: "should respect x-ratelimit-reset header",
+ headerName: "x-ratelimit-reset",
+ headerValue: () => Math.floor((Date.now() + 4000) / 1000).toString(),
+ expectedDelayMin: 3000,
+ expectedDelayMax: 6000,
+ },
+ ];
+
+ retryHeaderTests.forEach(({ description, headerName, headerValue, expectedDelayMin, expectedDelayMax }) => {
+ it(description, async () => {
+ setTimeoutSpy = vi.spyOn(global, "setTimeout").mockImplementation((callback: (args: void) => void) => {
+ process.nextTick(callback);
+ return null as any;
+ });
+
+ const value = typeof headerValue === "function" ? headerValue() : headerValue;
+ mockFetch
+ .mockResolvedValueOnce(
+ new Response("", {
+ status: 429,
+ headers: new Headers({ [headerName]: value }),
+ }),
+ )
+ .mockResolvedValueOnce(new Response("", { status: 200 }));
+
+ const responsePromise = requestWithRetries(() => mockFetch(), 1);
+ await vi.runAllTimersAsync();
+ const response = await responsePromise;
+
+ expect(setTimeoutSpy).toHaveBeenCalledWith(expect.any(Function), expect.any(Number));
+ const actualDelay = setTimeoutSpy.mock.calls[0][1];
+ expect(actualDelay).toBeGreaterThan(expectedDelayMin);
+ expect(actualDelay).toBeLessThan(expectedDelayMax);
+ expect(response.status).toBe(200);
+ });
+ });
+
it("should apply correct exponential backoff with jitter", async () => {
setTimeoutSpy = vi.spyOn(global, "setTimeout").mockImplementation((callback: (args: void) => void) => {
process.nextTick(callback);
@@ -113,7 +174,6 @@ describe("requestWithRetries", () => {
await vi.runAllTimersAsync();
await responsePromise;
- // Verify setTimeout calls
expect(setTimeoutSpy).toHaveBeenCalledTimes(expectedDelays.length);
expectedDelays.forEach((delay, index) => {
@@ -145,85 +205,6 @@ describe("requestWithRetries", () => {
expect(response2.status).toBe(200);
});
- it("should respect retry-after header with seconds value", async () => {
- setTimeoutSpy = vi.spyOn(global, "setTimeout").mockImplementation((callback: (args: void) => void) => {
- process.nextTick(callback);
- return null as any;
- });
-
- mockFetch
- .mockResolvedValueOnce(
- new Response("", {
- status: 429,
- headers: new Headers({ "retry-after": "5" }),
- }),
- )
- .mockResolvedValueOnce(new Response("", { status: 200 }));
-
- const responsePromise = requestWithRetries(() => mockFetch(), 1);
- await vi.runAllTimersAsync();
- const response = await responsePromise;
-
- expect(setTimeoutSpy).toHaveBeenCalledWith(expect.any(Function), 5000); // 5 seconds = 5000ms
- expect(response.status).toBe(200);
- });
-
- it("should respect retry-after header with HTTP date value", async () => {
- setTimeoutSpy = vi.spyOn(global, "setTimeout").mockImplementation((callback: (args: void) => void) => {
- process.nextTick(callback);
- return null as any;
- });
-
- const futureDate = new Date(Date.now() + 3000); // 3 seconds from now
- mockFetch
- .mockResolvedValueOnce(
- new Response("", {
- status: 429,
- headers: new Headers({ "retry-after": futureDate.toUTCString() }),
- }),
- )
- .mockResolvedValueOnce(new Response("", { status: 200 }));
-
- const responsePromise = requestWithRetries(() => mockFetch(), 1);
- await vi.runAllTimersAsync();
- const response = await responsePromise;
-
- // Should use the date-based delay (approximately 3000ms, but with jitter)
- expect(setTimeoutSpy).toHaveBeenCalledWith(expect.any(Function), expect.any(Number));
- const actualDelay = setTimeoutSpy.mock.calls[0][1];
- expect(actualDelay).toBeGreaterThan(2000);
- expect(actualDelay).toBeLessThan(4000);
- expect(response.status).toBe(200);
- });
-
- it("should respect x-ratelimit-reset header", async () => {
- setTimeoutSpy = vi.spyOn(global, "setTimeout").mockImplementation((callback: (args: void) => void) => {
- process.nextTick(callback);
- return null as any;
- });
-
- const resetTime = Math.floor((Date.now() + 4000) / 1000); // 4 seconds from now in Unix timestamp
- mockFetch
- .mockResolvedValueOnce(
- new Response("", {
- status: 429,
- headers: new Headers({ "x-ratelimit-reset": resetTime.toString() }),
- }),
- )
- .mockResolvedValueOnce(new Response("", { status: 200 }));
-
- const responsePromise = requestWithRetries(() => mockFetch(), 1);
- await vi.runAllTimersAsync();
- const response = await responsePromise;
-
- // Should use the x-ratelimit-reset delay (approximately 4000ms, but with positive jitter)
- expect(setTimeoutSpy).toHaveBeenCalledWith(expect.any(Function), expect.any(Number));
- const actualDelay = setTimeoutSpy.mock.calls[0][1];
- expect(actualDelay).toBeGreaterThan(3000);
- expect(actualDelay).toBeLessThan(6000);
- expect(response.status).toBe(200);
- });
-
it("should cap delay at MAX_RETRY_DELAY for large header values", async () => {
setTimeoutSpy = vi.spyOn(global, "setTimeout").mockImplementation((callback: (args: void) => void) => {
process.nextTick(callback);
@@ -243,8 +224,7 @@ describe("requestWithRetries", () => {
await vi.runAllTimersAsync();
const response = await responsePromise;
- // Should be capped at MAX_RETRY_DELAY (60000ms) with jitter applied
- expect(setTimeoutSpy).toHaveBeenCalledWith(expect.any(Function), 60000); // Exactly MAX_RETRY_DELAY since jitter with 0.5 random keeps it at 60000
+ expect(setTimeoutSpy).toHaveBeenCalledWith(expect.any(Function), 60000);
expect(response.status).toBe(200);
});
});
diff --git a/tests/unit/logging/logger.test.ts b/tests/unit/logging/logger.test.ts
new file mode 100644
index 0000000..2e0b5fe
--- /dev/null
+++ b/tests/unit/logging/logger.test.ts
@@ -0,0 +1,454 @@
+import { ConsoleLogger, createLogger, Logger, LogLevel } from "../../../src/core/logging/logger";
+
+function createMockLogger() {
+ return {
+ debug: vi.fn(),
+ info: vi.fn(),
+ warn: vi.fn(),
+ error: vi.fn(),
+ };
+}
+
+describe("Logger", () => {
+ describe("LogLevel", () => {
+ it("should have correct log levels", () => {
+ expect(LogLevel.Debug).toBe("debug");
+ expect(LogLevel.Info).toBe("info");
+ expect(LogLevel.Warn).toBe("warn");
+ expect(LogLevel.Error).toBe("error");
+ });
+ });
+
+ describe("ConsoleLogger", () => {
+ let consoleLogger: ConsoleLogger;
+ let consoleSpy: {
+ debug: ReturnType;
+ info: ReturnType;
+ warn: ReturnType;
+ error: ReturnType;
+ };
+
+ beforeEach(() => {
+ consoleLogger = new ConsoleLogger();
+ consoleSpy = {
+ debug: vi.spyOn(console, "debug").mockImplementation(() => {}),
+ info: vi.spyOn(console, "info").mockImplementation(() => {}),
+ warn: vi.spyOn(console, "warn").mockImplementation(() => {}),
+ error: vi.spyOn(console, "error").mockImplementation(() => {}),
+ };
+ });
+
+ afterEach(() => {
+ consoleSpy.debug.mockRestore();
+ consoleSpy.info.mockRestore();
+ consoleSpy.warn.mockRestore();
+ consoleSpy.error.mockRestore();
+ });
+
+ it("should log debug messages", () => {
+ consoleLogger.debug("debug message", { data: "test" });
+ expect(consoleSpy.debug).toHaveBeenCalledWith("debug message", { data: "test" });
+ });
+
+ it("should log info messages", () => {
+ consoleLogger.info("info message", { data: "test" });
+ expect(consoleSpy.info).toHaveBeenCalledWith("info message", { data: "test" });
+ });
+
+ it("should log warn messages", () => {
+ consoleLogger.warn("warn message", { data: "test" });
+ expect(consoleSpy.warn).toHaveBeenCalledWith("warn message", { data: "test" });
+ });
+
+ it("should log error messages", () => {
+ consoleLogger.error("error message", { data: "test" });
+ expect(consoleSpy.error).toHaveBeenCalledWith("error message", { data: "test" });
+ });
+
+ it("should handle multiple arguments", () => {
+ consoleLogger.debug("message", "arg1", "arg2", { key: "value" });
+ expect(consoleSpy.debug).toHaveBeenCalledWith("message", "arg1", "arg2", { key: "value" });
+ });
+ });
+
+ describe("Logger with level filtering", () => {
+ let mockLogger: {
+ debug: ReturnType;
+ info: ReturnType;
+ warn: ReturnType;
+ error: ReturnType;
+ };
+
+ beforeEach(() => {
+ mockLogger = createMockLogger();
+ });
+
+ describe("Debug level", () => {
+ it("should log all levels when set to debug", () => {
+ const logger = new Logger({
+ level: LogLevel.Debug,
+ logger: mockLogger,
+ silent: false,
+ });
+
+ logger.debug("debug");
+ logger.info("info");
+ logger.warn("warn");
+ logger.error("error");
+
+ expect(mockLogger.debug).toHaveBeenCalledWith("debug");
+ expect(mockLogger.info).toHaveBeenCalledWith("info");
+ expect(mockLogger.warn).toHaveBeenCalledWith("warn");
+ expect(mockLogger.error).toHaveBeenCalledWith("error");
+ });
+
+ it("should report correct level checks", () => {
+ const logger = new Logger({
+ level: LogLevel.Debug,
+ logger: mockLogger,
+ silent: false,
+ });
+
+ expect(logger.isDebug()).toBe(true);
+ expect(logger.isInfo()).toBe(true);
+ expect(logger.isWarn()).toBe(true);
+ expect(logger.isError()).toBe(true);
+ });
+ });
+
+ describe("Info level", () => {
+ it("should log info, warn, and error when set to info", () => {
+ const logger = new Logger({
+ level: LogLevel.Info,
+ logger: mockLogger,
+ silent: false,
+ });
+
+ logger.debug("debug");
+ logger.info("info");
+ logger.warn("warn");
+ logger.error("error");
+
+ expect(mockLogger.debug).not.toHaveBeenCalled();
+ expect(mockLogger.info).toHaveBeenCalledWith("info");
+ expect(mockLogger.warn).toHaveBeenCalledWith("warn");
+ expect(mockLogger.error).toHaveBeenCalledWith("error");
+ });
+
+ it("should report correct level checks", () => {
+ const logger = new Logger({
+ level: LogLevel.Info,
+ logger: mockLogger,
+ silent: false,
+ });
+
+ expect(logger.isDebug()).toBe(false);
+ expect(logger.isInfo()).toBe(true);
+ expect(logger.isWarn()).toBe(true);
+ expect(logger.isError()).toBe(true);
+ });
+ });
+
+ describe("Warn level", () => {
+ it("should log warn and error when set to warn", () => {
+ const logger = new Logger({
+ level: LogLevel.Warn,
+ logger: mockLogger,
+ silent: false,
+ });
+
+ logger.debug("debug");
+ logger.info("info");
+ logger.warn("warn");
+ logger.error("error");
+
+ expect(mockLogger.debug).not.toHaveBeenCalled();
+ expect(mockLogger.info).not.toHaveBeenCalled();
+ expect(mockLogger.warn).toHaveBeenCalledWith("warn");
+ expect(mockLogger.error).toHaveBeenCalledWith("error");
+ });
+
+ it("should report correct level checks", () => {
+ const logger = new Logger({
+ level: LogLevel.Warn,
+ logger: mockLogger,
+ silent: false,
+ });
+
+ expect(logger.isDebug()).toBe(false);
+ expect(logger.isInfo()).toBe(false);
+ expect(logger.isWarn()).toBe(true);
+ expect(logger.isError()).toBe(true);
+ });
+ });
+
+ describe("Error level", () => {
+ it("should only log error when set to error", () => {
+ const logger = new Logger({
+ level: LogLevel.Error,
+ logger: mockLogger,
+ silent: false,
+ });
+
+ logger.debug("debug");
+ logger.info("info");
+ logger.warn("warn");
+ logger.error("error");
+
+ expect(mockLogger.debug).not.toHaveBeenCalled();
+ expect(mockLogger.info).not.toHaveBeenCalled();
+ expect(mockLogger.warn).not.toHaveBeenCalled();
+ expect(mockLogger.error).toHaveBeenCalledWith("error");
+ });
+
+ it("should report correct level checks", () => {
+ const logger = new Logger({
+ level: LogLevel.Error,
+ logger: mockLogger,
+ silent: false,
+ });
+
+ expect(logger.isDebug()).toBe(false);
+ expect(logger.isInfo()).toBe(false);
+ expect(logger.isWarn()).toBe(false);
+ expect(logger.isError()).toBe(true);
+ });
+ });
+
+ describe("Silent mode", () => {
+ it("should not log anything when silent is true", () => {
+ const logger = new Logger({
+ level: LogLevel.Debug,
+ logger: mockLogger,
+ silent: true,
+ });
+
+ logger.debug("debug");
+ logger.info("info");
+ logger.warn("warn");
+ logger.error("error");
+
+ expect(mockLogger.debug).not.toHaveBeenCalled();
+ expect(mockLogger.info).not.toHaveBeenCalled();
+ expect(mockLogger.warn).not.toHaveBeenCalled();
+ expect(mockLogger.error).not.toHaveBeenCalled();
+ });
+
+ it("should report all level checks as false when silent", () => {
+ const logger = new Logger({
+ level: LogLevel.Debug,
+ logger: mockLogger,
+ silent: true,
+ });
+
+ expect(logger.isDebug()).toBe(false);
+ expect(logger.isInfo()).toBe(false);
+ expect(logger.isWarn()).toBe(false);
+ expect(logger.isError()).toBe(false);
+ });
+ });
+
+ describe("shouldLog", () => {
+ it("should correctly determine if level should be logged", () => {
+ const logger = new Logger({
+ level: LogLevel.Info,
+ logger: mockLogger,
+ silent: false,
+ });
+
+ expect(logger.shouldLog(LogLevel.Debug)).toBe(false);
+ expect(logger.shouldLog(LogLevel.Info)).toBe(true);
+ expect(logger.shouldLog(LogLevel.Warn)).toBe(true);
+ expect(logger.shouldLog(LogLevel.Error)).toBe(true);
+ });
+
+ it("should return false for all levels when silent", () => {
+ const logger = new Logger({
+ level: LogLevel.Debug,
+ logger: mockLogger,
+ silent: true,
+ });
+
+ expect(logger.shouldLog(LogLevel.Debug)).toBe(false);
+ expect(logger.shouldLog(LogLevel.Info)).toBe(false);
+ expect(logger.shouldLog(LogLevel.Warn)).toBe(false);
+ expect(logger.shouldLog(LogLevel.Error)).toBe(false);
+ });
+ });
+
+ describe("Multiple arguments", () => {
+ it("should pass multiple arguments to logger", () => {
+ const logger = new Logger({
+ level: LogLevel.Debug,
+ logger: mockLogger,
+ silent: false,
+ });
+
+ logger.debug("message", "arg1", { key: "value" }, 123);
+ expect(mockLogger.debug).toHaveBeenCalledWith("message", "arg1", { key: "value" }, 123);
+ });
+ });
+ });
+
+ describe("createLogger", () => {
+ it("should return default logger when no config provided", () => {
+ const logger = createLogger();
+ expect(logger).toBeInstanceOf(Logger);
+ });
+
+ it("should return same logger instance when Logger is passed", () => {
+ const customLogger = new Logger({
+ level: LogLevel.Debug,
+ logger: new ConsoleLogger(),
+ silent: false,
+ });
+
+ const result = createLogger(customLogger);
+ expect(result).toBe(customLogger);
+ });
+
+ it("should create logger with custom config", () => {
+ const mockLogger = createMockLogger();
+
+ const logger = createLogger({
+ level: LogLevel.Warn,
+ logger: mockLogger,
+ silent: false,
+ });
+
+ expect(logger).toBeInstanceOf(Logger);
+ logger.warn("test");
+ expect(mockLogger.warn).toHaveBeenCalledWith("test");
+ });
+
+ it("should use default values for missing config", () => {
+ const logger = createLogger({});
+ expect(logger).toBeInstanceOf(Logger);
+ });
+
+ it("should override default level", () => {
+ const mockLogger = createMockLogger();
+
+ const logger = createLogger({
+ level: LogLevel.Debug,
+ logger: mockLogger,
+ silent: false,
+ });
+
+ logger.debug("test");
+ expect(mockLogger.debug).toHaveBeenCalledWith("test");
+ });
+
+ it("should override default silent mode", () => {
+ const mockLogger = createMockLogger();
+
+ const logger = createLogger({
+ logger: mockLogger,
+ silent: false,
+ });
+
+ logger.info("test");
+ expect(mockLogger.info).toHaveBeenCalledWith("test");
+ });
+
+ it("should use provided logger implementation", () => {
+ const customLogger = createMockLogger();
+
+ const logger = createLogger({
+ logger: customLogger,
+ level: LogLevel.Debug,
+ silent: false,
+ });
+
+ logger.debug("test");
+ expect(customLogger.debug).toHaveBeenCalledWith("test");
+ });
+
+ it("should default to silent: true", () => {
+ const mockLogger = createMockLogger();
+
+ const logger = createLogger({
+ logger: mockLogger,
+ level: LogLevel.Debug,
+ });
+
+ logger.debug("test");
+ expect(mockLogger.debug).not.toHaveBeenCalled();
+ });
+ });
+
+ describe("Default logger", () => {
+ it("should have silent: true by default", () => {
+ const logger = createLogger();
+ expect(logger.shouldLog(LogLevel.Info)).toBe(false);
+ });
+
+ it("should not log when using default logger", () => {
+ const logger = createLogger();
+
+ logger.info("test");
+ expect(logger.isInfo()).toBe(false);
+ });
+ });
+
+ describe("Edge cases", () => {
+ it("should handle empty message", () => {
+ const mockLogger = createMockLogger();
+
+ const logger = new Logger({
+ level: LogLevel.Debug,
+ logger: mockLogger,
+ silent: false,
+ });
+
+ logger.debug("");
+ expect(mockLogger.debug).toHaveBeenCalledWith("");
+ });
+
+ it("should handle no arguments", () => {
+ const mockLogger = createMockLogger();
+
+ const logger = new Logger({
+ level: LogLevel.Debug,
+ logger: mockLogger,
+ silent: false,
+ });
+
+ logger.debug("message");
+ expect(mockLogger.debug).toHaveBeenCalledWith("message");
+ });
+
+ it("should handle complex objects", () => {
+ const mockLogger = createMockLogger();
+
+ const logger = new Logger({
+ level: LogLevel.Debug,
+ logger: mockLogger,
+ silent: false,
+ });
+
+ const complexObject = {
+ nested: { key: "value" },
+ array: [1, 2, 3],
+ fn: () => "test",
+ };
+
+ logger.debug("message", complexObject);
+ expect(mockLogger.debug).toHaveBeenCalledWith("message", complexObject);
+ });
+
+ it("should handle errors as arguments", () => {
+ const mockLogger = createMockLogger();
+
+ const logger = new Logger({
+ level: LogLevel.Error,
+ logger: mockLogger,
+ silent: false,
+ });
+
+ const error = new Error("Test error");
+ logger.error("Error occurred", error);
+ expect(mockLogger.error).toHaveBeenCalledWith("Error occurred", error);
+ });
+ });
+});
diff --git a/tests/unit/stream/Stream.test.ts b/tests/unit/stream/Stream.test.ts
new file mode 100644
index 0000000..f82b90a
--- /dev/null
+++ b/tests/unit/stream/Stream.test.ts
@@ -0,0 +1,348 @@
+import { Stream } from "../../../src/core/stream/Stream";
+
+describe("Stream", () => {
+ describe("JSON streaming", () => {
+ it("should parse single JSON message", async () => {
+ const mockStream = createReadableStream(['{"value": 1}\n']);
+ const stream = new Stream({
+ stream: mockStream,
+ parse: async (val: unknown) => val as { value: number },
+ eventShape: { type: "json", messageTerminator: "\n" },
+ });
+
+ const messages: unknown[] = [];
+ for await (const message of stream) {
+ messages.push(message);
+ }
+
+ expect(messages).toEqual([{ value: 1 }]);
+ });
+
+ it("should parse multiple JSON messages", async () => {
+ const mockStream = createReadableStream(['{"value": 1}\n{"value": 2}\n{"value": 3}\n']);
+ const stream = new Stream({
+ stream: mockStream,
+ parse: async (val: unknown) => val as { value: number },
+ eventShape: { type: "json", messageTerminator: "\n" },
+ });
+
+ const messages: unknown[] = [];
+ for await (const message of stream) {
+ messages.push(message);
+ }
+
+ expect(messages).toEqual([{ value: 1 }, { value: 2 }, { value: 3 }]);
+ });
+
+ it("should handle messages split across chunks", async () => {
+ const mockStream = createReadableStream(['{"val', 'ue": 1}\n{"value":', " 2}\n"]);
+ const stream = new Stream({
+ stream: mockStream,
+ parse: async (val: unknown) => val as { value: number },
+ eventShape: { type: "json", messageTerminator: "\n" },
+ });
+
+ const messages: unknown[] = [];
+ for await (const message of stream) {
+ messages.push(message);
+ }
+
+ expect(messages).toEqual([{ value: 1 }, { value: 2 }]);
+ });
+
+ it("should skip empty lines", async () => {
+ const mockStream = createReadableStream(['{"value": 1}\n\n\n{"value": 2}\n']);
+ const stream = new Stream({
+ stream: mockStream,
+ parse: async (val: unknown) => val as { value: number },
+ eventShape: { type: "json", messageTerminator: "\n" },
+ });
+
+ const messages: unknown[] = [];
+ for await (const message of stream) {
+ messages.push(message);
+ }
+
+ expect(messages).toEqual([{ value: 1 }, { value: 2 }]);
+ });
+
+ it("should handle custom message terminator", async () => {
+ const mockStream = createReadableStream(['{"value": 1}|||{"value": 2}|||']);
+ const stream = new Stream({
+ stream: mockStream,
+ parse: async (val: unknown) => val as { value: number },
+ eventShape: { type: "json", messageTerminator: "|||" },
+ });
+
+ const messages: unknown[] = [];
+ for await (const message of stream) {
+ messages.push(message);
+ }
+
+ expect(messages).toEqual([{ value: 1 }, { value: 2 }]);
+ });
+ });
+
+ describe("SSE streaming", () => {
+ it("should parse SSE data with prefix", async () => {
+ const mockStream = createReadableStream(['data: {"value": 1}\n']);
+ const stream = new Stream({
+ stream: mockStream,
+ parse: async (val: unknown) => val as { value: number },
+ eventShape: { type: "sse" },
+ });
+
+ const messages: unknown[] = [];
+ for await (const message of stream) {
+ messages.push(message);
+ }
+
+ expect(messages).toEqual([{ value: 1 }]);
+ });
+
+ it("should parse multiple SSE events", async () => {
+ const mockStream = createReadableStream(['data: {"value": 1}\ndata: {"value": 2}\ndata: {"value": 3}\n']);
+ const stream = new Stream({
+ stream: mockStream,
+ parse: async (val: unknown) => val as { value: number },
+ eventShape: { type: "sse" },
+ });
+
+ const messages: unknown[] = [];
+ for await (const message of stream) {
+ messages.push(message);
+ }
+
+ expect(messages).toEqual([{ value: 1 }, { value: 2 }, { value: 3 }]);
+ });
+
+ it("should stop at stream terminator", async () => {
+ const mockStream = createReadableStream(['data: {"value": 1}\ndata: [DONE]\ndata: {"value": 2}\n']);
+ const stream = new Stream({
+ stream: mockStream,
+ parse: async (val: unknown) => val as { value: number },
+ eventShape: { type: "sse", streamTerminator: "[DONE]" },
+ });
+
+ const messages: unknown[] = [];
+ for await (const message of stream) {
+ messages.push(message);
+ }
+
+ expect(messages).toEqual([{ value: 1 }]);
+ });
+
+ it("should skip lines without data prefix", async () => {
+ const mockStream = createReadableStream([
+ 'event: message\ndata: {"value": 1}\nid: 123\ndata: {"value": 2}\n',
+ ]);
+ const stream = new Stream({
+ stream: mockStream,
+ parse: async (val: unknown) => val as { value: number },
+ eventShape: { type: "sse" },
+ });
+
+ const messages: unknown[] = [];
+ for await (const message of stream) {
+ messages.push(message);
+ }
+
+ expect(messages).toEqual([{ value: 1 }, { value: 2 }]);
+ });
+ });
+
+ describe("encoding and decoding", () => {
+ it("should decode UTF-8 text using TextDecoder", async () => {
+ const encoder = new TextEncoder();
+ const mockStream = createReadableStream([encoder.encode('{"text": "café"}\n')]);
+ const stream = new Stream({
+ stream: mockStream,
+ parse: async (val: unknown) => val as { text: string },
+ eventShape: { type: "json", messageTerminator: "\n" },
+ });
+
+ const messages: unknown[] = [];
+ for await (const message of stream) {
+ messages.push(message);
+ }
+
+ expect(messages).toEqual([{ text: "café" }]);
+ });
+
+ it("should decode emoji correctly", async () => {
+ const encoder = new TextEncoder();
+ const mockStream = createReadableStream([encoder.encode('{"emoji": "🎉"}\n')]);
+ const stream = new Stream({
+ stream: mockStream,
+ parse: async (val: unknown) => val as { emoji: string },
+ eventShape: { type: "json", messageTerminator: "\n" },
+ });
+
+ const messages: unknown[] = [];
+ for await (const message of stream) {
+ messages.push(message);
+ }
+
+ expect(messages).toEqual([{ emoji: "🎉" }]);
+ });
+
+ it("should handle binary data chunks", async () => {
+ const encoder = new TextEncoder();
+ const mockStream = createReadableStream([encoder.encode('{"val'), encoder.encode('ue": 1}\n')]);
+ const stream = new Stream({
+ stream: mockStream,
+ parse: async (val: unknown) => val as { value: number },
+ eventShape: { type: "json", messageTerminator: "\n" },
+ });
+
+ const messages: unknown[] = [];
+ for await (const message of stream) {
+ messages.push(message);
+ }
+
+ expect(messages).toEqual([{ value: 1 }]);
+ });
+
+ it("should handle multi-byte UTF-8 characters split across chunk boundaries", async () => {
+ // Test string with Japanese (3 bytes), Russian (2 bytes), German (2 bytes), and Chinese (3 bytes)
+ const testString = '{"text": "こんにちは Привет Größe 你好"}\n';
+ const fullBytes = new TextEncoder().encode(testString);
+
+ // Split the bytes in the middle of multi-byte characters
+ // Japanese "こ" starts at byte 11, is 3 bytes (E3 81 93)
+ // Split after first byte of "こ" to test mid-character splitting
+ const splitPoint = 12; // This splits "こ" in the middle
+ const chunk1 = fullBytes.slice(0, splitPoint);
+ const chunk2 = fullBytes.slice(splitPoint);
+
+ const mockStream = createReadableStream([chunk1, chunk2]);
+ const stream = new Stream({
+ stream: mockStream,
+ parse: async (val: unknown) => val as { text: string },
+ eventShape: { type: "json", messageTerminator: "\n" },
+ });
+
+ const messages: unknown[] = [];
+ for await (const message of stream) {
+ messages.push(message);
+ }
+
+ expect(messages).toEqual([{ text: "こんにちは Привет Größe 你好" }]);
+ });
+ });
+
+ describe("abort signal", () => {
+ it("should handle abort signal", async () => {
+ const controller = new AbortController();
+ const mockStream = createReadableStream(['{"value": 1}\n{"value": 2}\n{"value": 3}\n']);
+ const stream = new Stream({
+ stream: mockStream,
+ parse: async (val: unknown) => val as { value: number },
+ eventShape: { type: "json", messageTerminator: "\n" },
+ signal: controller.signal,
+ });
+
+ const messages: unknown[] = [];
+ let count = 0;
+ for await (const message of stream) {
+ messages.push(message);
+ count++;
+ if (count === 2) {
+ controller.abort();
+ break;
+ }
+ }
+
+ expect(messages.length).toBe(2);
+ });
+ });
+
+ describe("async iteration", () => {
+ it("should support async iterator protocol", async () => {
+ const mockStream = createReadableStream(['{"value": 1}\n{"value": 2}\n']);
+ const stream = new Stream({
+ stream: mockStream,
+ parse: async (val: unknown) => val as { value: number },
+ eventShape: { type: "json", messageTerminator: "\n" },
+ });
+
+ const iterator = stream[Symbol.asyncIterator]();
+ const first = await iterator.next();
+ expect(first.done).toBe(false);
+ expect(first.value).toEqual({ value: 1 });
+
+ const second = await iterator.next();
+ expect(second.done).toBe(false);
+ expect(second.value).toEqual({ value: 2 });
+
+ const third = await iterator.next();
+ expect(third.done).toBe(true);
+ });
+ });
+
+ describe("edge cases", () => {
+ it("should handle empty stream", async () => {
+ const mockStream = createReadableStream([]);
+ const stream = new Stream({
+ stream: mockStream,
+ parse: async (val: unknown) => val as { value: number },
+ eventShape: { type: "json", messageTerminator: "\n" },
+ });
+
+ const messages: unknown[] = [];
+ for await (const message of stream) {
+ messages.push(message);
+ }
+
+ expect(messages).toEqual([]);
+ });
+
+ it("should handle stream with only whitespace", async () => {
+ const mockStream = createReadableStream([" \n\n\t\n "]);
+ const stream = new Stream({
+ stream: mockStream,
+ parse: async (val: unknown) => val as { value: number },
+ eventShape: { type: "json", messageTerminator: "\n" },
+ });
+
+ const messages: unknown[] = [];
+ for await (const message of stream) {
+ messages.push(message);
+ }
+
+ expect(messages).toEqual([]);
+ });
+
+ it("should handle incomplete message at end of stream", async () => {
+ const mockStream = createReadableStream(['{"value": 1}\n{"incomplete']);
+ const stream = new Stream({
+ stream: mockStream,
+ parse: async (val: unknown) => val as { value: number },
+ eventShape: { type: "json", messageTerminator: "\n" },
+ });
+
+ const messages: unknown[] = [];
+ for await (const message of stream) {
+ messages.push(message);
+ }
+
+ expect(messages).toEqual([{ value: 1 }]);
+ });
+ });
+});
+
+// Helper function to create a ReadableStream from string chunks
+function createReadableStream(chunks: (string | Uint8Array)[]): ReadableStream {
+ // For standard type, return ReadableStream
+ let index = 0;
+ return new ReadableStream({
+ pull(controller) {
+ if (index < chunks.length) {
+ const chunk = chunks[index++];
+ controller.enqueue(typeof chunk === "string" ? new TextEncoder().encode(chunk) : chunk);
+ } else {
+ controller.close();
+ }
+ },
+ });
+}
diff --git a/tests/unit/url/join.test.ts b/tests/unit/url/join.test.ts
index 1956a8c..123488f 100644
--- a/tests/unit/url/join.test.ts
+++ b/tests/unit/url/join.test.ts
@@ -1,88 +1,223 @@
import { join } from "../../../src/core/url/index";
describe("join", () => {
- describe("basic functionality", () => {
- it("should return empty string for empty base", () => {
- expect(join("")).toBe("");
- expect(join("", "path")).toBe("");
- });
+ interface TestCase {
+ description: string;
+ base: string;
+ segments: string[];
+ expected: string;
+ }
- it("should handle single segment", () => {
- expect(join("base", "segment")).toBe("base/segment");
- expect(join("base/", "segment")).toBe("base/segment");
- expect(join("base", "/segment")).toBe("base/segment");
- expect(join("base/", "/segment")).toBe("base/segment");
- });
+ describe("basic functionality", () => {
+ const basicTests: TestCase[] = [
+ { description: "should return empty string for empty base", base: "", segments: [], expected: "" },
+ {
+ description: "should return empty string for empty base with path",
+ base: "",
+ segments: ["path"],
+ expected: "",
+ },
+ {
+ description: "should handle single segment",
+ base: "base",
+ segments: ["segment"],
+ expected: "base/segment",
+ },
+ {
+ description: "should handle single segment with trailing slash on base",
+ base: "base/",
+ segments: ["segment"],
+ expected: "base/segment",
+ },
+ {
+ description: "should handle single segment with leading slash",
+ base: "base",
+ segments: ["/segment"],
+ expected: "base/segment",
+ },
+ {
+ description: "should handle single segment with both slashes",
+ base: "base/",
+ segments: ["/segment"],
+ expected: "base/segment",
+ },
+ {
+ description: "should handle multiple segments",
+ base: "base",
+ segments: ["path1", "path2", "path3"],
+ expected: "base/path1/path2/path3",
+ },
+ {
+ description: "should handle multiple segments with slashes",
+ base: "base/",
+ segments: ["/path1/", "/path2/", "/path3/"],
+ expected: "base/path1/path2/path3/",
+ },
+ ];
- it("should handle multiple segments", () => {
- expect(join("base", "path1", "path2", "path3")).toBe("base/path1/path2/path3");
- expect(join("base/", "/path1/", "/path2/", "/path3/")).toBe("base/path1/path2/path3/");
+ basicTests.forEach(({ description, base, segments, expected }) => {
+ it(description, () => {
+ expect(join(base, ...segments)).toBe(expected);
+ });
});
});
describe("URL handling", () => {
- it("should handle absolute URLs", () => {
- expect(join("https://example.com", "api", "v1")).toBe("https://example.com/api/v1");
- expect(join("https://example.com/", "/api/", "/v1/")).toBe("https://example.com/api/v1/");
- expect(join("https://example.com/base", "api", "v1")).toBe("https://example.com/base/api/v1");
- });
+ const urlTests: TestCase[] = [
+ {
+ description: "should handle absolute URLs",
+ base: "https://example.com",
+ segments: ["api", "v1"],
+ expected: "https://example.com/api/v1",
+ },
+ {
+ description: "should handle absolute URLs with slashes",
+ base: "https://example.com/",
+ segments: ["/api/", "/v1/"],
+ expected: "https://example.com/api/v1/",
+ },
+ {
+ description: "should handle absolute URLs with base path",
+ base: "https://example.com/base",
+ segments: ["api", "v1"],
+ expected: "https://example.com/base/api/v1",
+ },
+ {
+ description: "should preserve URL query parameters",
+ base: "https://example.com?query=1",
+ segments: ["api"],
+ expected: "https://example.com/api?query=1",
+ },
+ {
+ description: "should preserve URL fragments",
+ base: "https://example.com#fragment",
+ segments: ["api"],
+ expected: "https://example.com/api#fragment",
+ },
+ {
+ description: "should preserve URL query and fragments",
+ base: "https://example.com?query=1#fragment",
+ segments: ["api"],
+ expected: "https://example.com/api?query=1#fragment",
+ },
+ {
+ description: "should handle http protocol",
+ base: "http://example.com",
+ segments: ["api"],
+ expected: "http://example.com/api",
+ },
+ {
+ description: "should handle ftp protocol",
+ base: "ftp://example.com",
+ segments: ["files"],
+ expected: "ftp://example.com/files",
+ },
+ {
+ description: "should handle ws protocol",
+ base: "ws://example.com",
+ segments: ["socket"],
+ expected: "ws://example.com/socket",
+ },
+ {
+ description: "should fallback to path joining for malformed URLs",
+ base: "not-a-url://",
+ segments: ["path"],
+ expected: "not-a-url:///path",
+ },
+ ];
- it("should preserve URL query parameters and fragments", () => {
- expect(join("https://example.com?query=1", "api")).toBe("https://example.com/api?query=1");
- expect(join("https://example.com#fragment", "api")).toBe("https://example.com/api#fragment");
- expect(join("https://example.com?query=1#fragment", "api")).toBe(
- "https://example.com/api?query=1#fragment",
- );
- });
-
- it("should handle different protocols", () => {
- expect(join("http://example.com", "api")).toBe("http://example.com/api");
- expect(join("ftp://example.com", "files")).toBe("ftp://example.com/files");
- expect(join("ws://example.com", "socket")).toBe("ws://example.com/socket");
- });
-
- it("should fallback to path joining for malformed URLs", () => {
- expect(join("not-a-url://", "path")).toBe("not-a-url:///path");
+ urlTests.forEach(({ description, base, segments, expected }) => {
+ it(description, () => {
+ expect(join(base, ...segments)).toBe(expected);
+ });
});
});
describe("edge cases", () => {
- it("should handle empty segments", () => {
- expect(join("base", "", "path")).toBe("base/path");
- expect(join("base", null as any, "path")).toBe("base/path");
- expect(join("base", undefined as any, "path")).toBe("base/path");
- });
-
- it("should handle segments with only slashes", () => {
- expect(join("base", "/", "path")).toBe("base/path");
- expect(join("base", "//", "path")).toBe("base/path");
- });
-
- it("should handle base paths with trailing slashes", () => {
- expect(join("base/", "path")).toBe("base/path");
- });
+ const edgeCaseTests: TestCase[] = [
+ {
+ description: "should handle empty segments",
+ base: "base",
+ segments: ["", "path"],
+ expected: "base/path",
+ },
+ {
+ description: "should handle null segments",
+ base: "base",
+ segments: [null as any, "path"],
+ expected: "base/path",
+ },
+ {
+ description: "should handle undefined segments",
+ base: "base",
+ segments: [undefined as any, "path"],
+ expected: "base/path",
+ },
+ {
+ description: "should handle segments with only single slash",
+ base: "base",
+ segments: ["/", "path"],
+ expected: "base/path",
+ },
+ {
+ description: "should handle segments with only double slash",
+ base: "base",
+ segments: ["//", "path"],
+ expected: "base/path",
+ },
+ {
+ description: "should handle base paths with trailing slashes",
+ base: "base/",
+ segments: ["path"],
+ expected: "base/path",
+ },
+ {
+ description: "should handle complex nested paths",
+ base: "api/v1/",
+ segments: ["/users/", "/123/", "/profile"],
+ expected: "api/v1/users/123/profile",
+ },
+ ];
- it("should handle complex nested paths", () => {
- expect(join("api/v1/", "/users/", "/123/", "/profile")).toBe("api/v1/users/123/profile");
+ edgeCaseTests.forEach(({ description, base, segments, expected }) => {
+ it(description, () => {
+ expect(join(base, ...segments)).toBe(expected);
+ });
});
});
describe("real-world scenarios", () => {
- it("should handle API endpoint construction", () => {
- const baseUrl = "https://api.example.com/v1";
- expect(join(baseUrl, "users", "123", "posts")).toBe("https://api.example.com/v1/users/123/posts");
- });
-
- it("should handle file path construction", () => {
- expect(join("/var/www", "html", "assets", "images")).toBe("/var/www/html/assets/images");
- });
+ const realWorldTests: TestCase[] = [
+ {
+ description: "should handle API endpoint construction",
+ base: "https://api.example.com/v1",
+ segments: ["users", "123", "posts"],
+ expected: "https://api.example.com/v1/users/123/posts",
+ },
+ {
+ description: "should handle file path construction",
+ base: "/var/www",
+ segments: ["html", "assets", "images"],
+ expected: "/var/www/html/assets/images",
+ },
+ {
+ description: "should handle relative path construction",
+ base: "../parent",
+ segments: ["child", "grandchild"],
+ expected: "../parent/child/grandchild",
+ },
+ {
+ description: "should handle Windows-style paths",
+ base: "C:\\Users",
+ segments: ["Documents", "file.txt"],
+ expected: "C:\\Users/Documents/file.txt",
+ },
+ ];
- it("should handle relative path construction", () => {
- expect(join("../parent", "child", "grandchild")).toBe("../parent/child/grandchild");
- });
-
- it("should handle Windows-style paths", () => {
- expect(join("C:\\Users", "Documents", "file.txt")).toBe("C:\\Users/Documents/file.txt");
+ realWorldTests.forEach(({ description, base, segments, expected }) => {
+ it(description, () => {
+ expect(join(base, ...segments)).toBe(expected);
+ });
});
});
@@ -100,21 +235,50 @@ describe("join", () => {
});
describe("trailing slash preservation", () => {
- it("should preserve trailing slash on final result when base has trailing slash and no segments", () => {
- expect(join("https://api.example.com/")).toBe("https://api.example.com/");
- expect(join("https://api.example.com/v1/")).toBe("https://api.example.com/v1/");
- });
-
- it("should preserve trailing slash when last segment has trailing slash", () => {
- expect(join("https://api.example.com", "users/")).toBe("https://api.example.com/users/");
- expect(join("api/v1", "users/")).toBe("api/v1/users/");
- });
+ const trailingSlashTests: TestCase[] = [
+ {
+ description:
+ "should preserve trailing slash on final result when base has trailing slash and no segments",
+ base: "https://api.example.com/",
+ segments: [],
+ expected: "https://api.example.com/",
+ },
+ {
+ description: "should preserve trailing slash on v1 path",
+ base: "https://api.example.com/v1/",
+ segments: [],
+ expected: "https://api.example.com/v1/",
+ },
+ {
+ description: "should preserve trailing slash when last segment has trailing slash",
+ base: "https://api.example.com",
+ segments: ["users/"],
+ expected: "https://api.example.com/users/",
+ },
+ {
+ description: "should preserve trailing slash with relative path",
+ base: "api/v1",
+ segments: ["users/"],
+ expected: "api/v1/users/",
+ },
+ {
+ description: "should preserve trailing slash with multiple segments",
+ base: "https://api.example.com",
+ segments: ["v1", "collections/"],
+ expected: "https://api.example.com/v1/collections/",
+ },
+ {
+ description: "should preserve trailing slash with base path",
+ base: "base",
+ segments: ["path1", "path2/"],
+ expected: "base/path1/path2/",
+ },
+ ];
- it("should preserve trailing slash with multiple segments where last has trailing slash", () => {
- expect(join("https://api.example.com", "v1", "collections/")).toBe(
- "https://api.example.com/v1/collections/",
- );
- expect(join("base", "path1", "path2/")).toBe("base/path1/path2/");
+ trailingSlashTests.forEach(({ description, base, segments, expected }) => {
+ it(description, () => {
+ expect(join(base, ...segments)).toBe(expected);
+ });
});
});
});
diff --git a/tests/unit/url/qs.test.ts b/tests/unit/url/qs.test.ts
index 80e7e04..42cdffb 100644
--- a/tests/unit/url/qs.test.ts
+++ b/tests/unit/url/qs.test.ts
@@ -1,187 +1,278 @@
import { toQueryString } from "../../../src/core/url/index";
describe("Test qs toQueryString", () => {
- describe("Basic functionality", () => {
- it("should return empty string for null/undefined", () => {
- expect(toQueryString(null)).toBe("");
- expect(toQueryString(undefined)).toBe("");
- });
+ interface BasicTestCase {
+ description: string;
+ input: any;
+ expected: string;
+ }
- it("should return empty string for primitive values", () => {
- expect(toQueryString("hello")).toBe("");
- expect(toQueryString(42)).toBe("");
- expect(toQueryString(true)).toBe("");
- expect(toQueryString(false)).toBe("");
- });
-
- it("should handle empty objects", () => {
- expect(toQueryString({})).toBe("");
- });
+ describe("Basic functionality", () => {
+ const basicTests: BasicTestCase[] = [
+ { description: "should return empty string for null", input: null, expected: "" },
+ { description: "should return empty string for undefined", input: undefined, expected: "" },
+ { description: "should return empty string for string primitive", input: "hello", expected: "" },
+ { description: "should return empty string for number primitive", input: 42, expected: "" },
+ { description: "should return empty string for true boolean", input: true, expected: "" },
+ { description: "should return empty string for false boolean", input: false, expected: "" },
+ { description: "should handle empty objects", input: {}, expected: "" },
+ {
+ description: "should handle simple key-value pairs",
+ input: { name: "John", age: 30 },
+ expected: "name=John&age=30",
+ },
+ ];
- it("should handle simple key-value pairs", () => {
- const obj = { name: "John", age: 30 };
- expect(toQueryString(obj)).toBe("name=John&age=30");
+ basicTests.forEach(({ description, input, expected }) => {
+ it(description, () => {
+ expect(toQueryString(input)).toBe(expected);
+ });
});
});
describe("Array handling", () => {
- it("should handle arrays with indices format (default)", () => {
- const obj = { items: ["a", "b", "c"] };
- expect(toQueryString(obj)).toBe("items%5B0%5D=a&items%5B1%5D=b&items%5B2%5D=c");
- });
-
- it("should handle arrays with repeat format", () => {
- const obj = { items: ["a", "b", "c"] };
- expect(toQueryString(obj, { arrayFormat: "repeat" })).toBe("items=a&items=b&items=c");
- });
+ interface ArrayTestCase {
+ description: string;
+ input: any;
+ options?: { arrayFormat?: "repeat" | "indices" };
+ expected: string;
+ }
- it("should handle empty arrays", () => {
- const obj = { items: [] };
- expect(toQueryString(obj)).toBe("");
- });
-
- it("should handle arrays with mixed types", () => {
- const obj = { mixed: ["string", 42, true, false] };
- expect(toQueryString(obj)).toBe("mixed%5B0%5D=string&mixed%5B1%5D=42&mixed%5B2%5D=true&mixed%5B3%5D=false");
- });
-
- it("should handle arrays with objects", () => {
- const obj = { users: [{ name: "John" }, { name: "Jane" }] };
- expect(toQueryString(obj)).toBe("users%5B0%5D%5Bname%5D=John&users%5B1%5D%5Bname%5D=Jane");
- });
+ const arrayTests: ArrayTestCase[] = [
+ {
+ description: "should handle arrays with indices format (default)",
+ input: { items: ["a", "b", "c"] },
+ expected: "items%5B0%5D=a&items%5B1%5D=b&items%5B2%5D=c",
+ },
+ {
+ description: "should handle arrays with repeat format",
+ input: { items: ["a", "b", "c"] },
+ options: { arrayFormat: "repeat" },
+ expected: "items=a&items=b&items=c",
+ },
+ {
+ description: "should handle empty arrays",
+ input: { items: [] },
+ expected: "",
+ },
+ {
+ description: "should handle arrays with mixed types",
+ input: { mixed: ["string", 42, true, false] },
+ expected: "mixed%5B0%5D=string&mixed%5B1%5D=42&mixed%5B2%5D=true&mixed%5B3%5D=false",
+ },
+ {
+ description: "should handle arrays with objects",
+ input: { users: [{ name: "John" }, { name: "Jane" }] },
+ expected: "users%5B0%5D%5Bname%5D=John&users%5B1%5D%5Bname%5D=Jane",
+ },
+ {
+ description: "should handle arrays with objects in repeat format",
+ input: { users: [{ name: "John" }, { name: "Jane" }] },
+ options: { arrayFormat: "repeat" },
+ expected: "users%5Bname%5D=John&users%5Bname%5D=Jane",
+ },
+ ];
- it("should handle arrays with objects in repeat format", () => {
- const obj = { users: [{ name: "John" }, { name: "Jane" }] };
- expect(toQueryString(obj, { arrayFormat: "repeat" })).toBe("users%5Bname%5D=John&users%5Bname%5D=Jane");
+ arrayTests.forEach(({ description, input, options, expected }) => {
+ it(description, () => {
+ expect(toQueryString(input, options)).toBe(expected);
+ });
});
});
describe("Nested objects", () => {
- it("should handle nested objects", () => {
- const obj = { user: { name: "John", age: 30 } };
- expect(toQueryString(obj)).toBe("user%5Bname%5D=John&user%5Bage%5D=30");
- });
-
- it("should handle deeply nested objects", () => {
- const obj = { user: { profile: { name: "John", settings: { theme: "dark" } } } };
- expect(toQueryString(obj)).toBe(
- "user%5Bprofile%5D%5Bname%5D=John&user%5Bprofile%5D%5Bsettings%5D%5Btheme%5D=dark",
- );
- });
+ const nestedTests: BasicTestCase[] = [
+ {
+ description: "should handle nested objects",
+ input: { user: { name: "John", age: 30 } },
+ expected: "user%5Bname%5D=John&user%5Bage%5D=30",
+ },
+ {
+ description: "should handle deeply nested objects",
+ input: { user: { profile: { name: "John", settings: { theme: "dark" } } } },
+ expected: "user%5Bprofile%5D%5Bname%5D=John&user%5Bprofile%5D%5Bsettings%5D%5Btheme%5D=dark",
+ },
+ {
+ description: "should handle empty nested objects",
+ input: { user: {} },
+ expected: "",
+ },
+ ];
- it("should handle empty nested objects", () => {
- const obj = { user: {} };
- expect(toQueryString(obj)).toBe("");
+ nestedTests.forEach(({ description, input, expected }) => {
+ it(description, () => {
+ expect(toQueryString(input)).toBe(expected);
+ });
});
});
describe("Encoding", () => {
- it("should encode by default", () => {
- const obj = { name: "John Doe", email: "john@example.com" };
- expect(toQueryString(obj)).toBe("name=John%20Doe&email=john%40example.com");
- });
+ interface EncodingTestCase {
+ description: string;
+ input: any;
+ options?: { encode?: boolean };
+ expected: string;
+ }
- it("should not encode when encode is false", () => {
- const obj = { name: "John Doe", email: "john@example.com" };
- expect(toQueryString(obj, { encode: false })).toBe("name=John Doe&email=john@example.com");
- });
-
- it("should encode special characters in keys", () => {
- const obj = { "user name": "John", "email[primary]": "john@example.com" };
- expect(toQueryString(obj)).toBe("user%20name=John&email%5Bprimary%5D=john%40example.com");
- });
+ const encodingTests: EncodingTestCase[] = [
+ {
+ description: "should encode by default",
+ input: { name: "John Doe", email: "john@example.com" },
+ expected: "name=John%20Doe&email=john%40example.com",
+ },
+ {
+ description: "should not encode when encode is false",
+ input: { name: "John Doe", email: "john@example.com" },
+ options: { encode: false },
+ expected: "name=John Doe&email=john@example.com",
+ },
+ {
+ description: "should encode special characters in keys",
+ input: { "user name": "John", "email[primary]": "john@example.com" },
+ expected: "user%20name=John&email%5Bprimary%5D=john%40example.com",
+ },
+ {
+ description: "should not encode special characters in keys when encode is false",
+ input: { "user name": "John", "email[primary]": "john@example.com" },
+ options: { encode: false },
+ expected: "user name=John&email[primary]=john@example.com",
+ },
+ ];
- it("should not encode special characters in keys when encode is false", () => {
- const obj = { "user name": "John", "email[primary]": "john@example.com" };
- expect(toQueryString(obj, { encode: false })).toBe("user name=John&email[primary]=john@example.com");
+ encodingTests.forEach(({ description, input, options, expected }) => {
+ it(description, () => {
+ expect(toQueryString(input, options)).toBe(expected);
+ });
});
});
describe("Mixed scenarios", () => {
- it("should handle complex nested structures", () => {
- const obj = {
- filters: {
- status: ["active", "pending"],
- category: {
- type: "electronics",
- subcategories: ["phones", "laptops"],
+ interface MixedTestCase {
+ description: string;
+ input: any;
+ options?: { arrayFormat?: "repeat" | "indices" };
+ expected: string;
+ }
+
+ const mixedTests: MixedTestCase[] = [
+ {
+ description: "should handle complex nested structures",
+ input: {
+ filters: {
+ status: ["active", "pending"],
+ category: {
+ type: "electronics",
+ subcategories: ["phones", "laptops"],
+ },
},
+ sort: { field: "name", direction: "asc" },
},
- sort: { field: "name", direction: "asc" },
- };
- expect(toQueryString(obj)).toBe(
- "filters%5Bstatus%5D%5B0%5D=active&filters%5Bstatus%5D%5B1%5D=pending&filters%5Bcategory%5D%5Btype%5D=electronics&filters%5Bcategory%5D%5Bsubcategories%5D%5B0%5D=phones&filters%5Bcategory%5D%5Bsubcategories%5D%5B1%5D=laptops&sort%5Bfield%5D=name&sort%5Bdirection%5D=asc",
- );
- });
-
- it("should handle complex nested structures with repeat format", () => {
- const obj = {
- filters: {
- status: ["active", "pending"],
- category: {
- type: "electronics",
- subcategories: ["phones", "laptops"],
+ expected:
+ "filters%5Bstatus%5D%5B0%5D=active&filters%5Bstatus%5D%5B1%5D=pending&filters%5Bcategory%5D%5Btype%5D=electronics&filters%5Bcategory%5D%5Bsubcategories%5D%5B0%5D=phones&filters%5Bcategory%5D%5Bsubcategories%5D%5B1%5D=laptops&sort%5Bfield%5D=name&sort%5Bdirection%5D=asc",
+ },
+ {
+ description: "should handle complex nested structures with repeat format",
+ input: {
+ filters: {
+ status: ["active", "pending"],
+ category: {
+ type: "electronics",
+ subcategories: ["phones", "laptops"],
+ },
},
+ sort: { field: "name", direction: "asc" },
},
- sort: { field: "name", direction: "asc" },
- };
- expect(toQueryString(obj, { arrayFormat: "repeat" })).toBe(
- "filters%5Bstatus%5D=active&filters%5Bstatus%5D=pending&filters%5Bcategory%5D%5Btype%5D=electronics&filters%5Bcategory%5D%5Bsubcategories%5D=phones&filters%5Bcategory%5D%5Bsubcategories%5D=laptops&sort%5Bfield%5D=name&sort%5Bdirection%5D=asc",
- );
- });
-
- it("should handle arrays with null/undefined values", () => {
- const obj = { items: ["a", null, "c", undefined, "e"] };
- expect(toQueryString(obj)).toBe("items%5B0%5D=a&items%5B1%5D=&items%5B2%5D=c&items%5B4%5D=e");
- });
+ options: { arrayFormat: "repeat" },
+ expected:
+ "filters%5Bstatus%5D=active&filters%5Bstatus%5D=pending&filters%5Bcategory%5D%5Btype%5D=electronics&filters%5Bcategory%5D%5Bsubcategories%5D=phones&filters%5Bcategory%5D%5Bsubcategories%5D=laptops&sort%5Bfield%5D=name&sort%5Bdirection%5D=asc",
+ },
+ {
+ description: "should handle arrays with null/undefined values",
+ input: { items: ["a", null, "c", undefined, "e"] },
+ expected: "items%5B0%5D=a&items%5B1%5D=&items%5B2%5D=c&items%5B4%5D=e",
+ },
+ {
+ description: "should handle objects with null/undefined values",
+ input: { name: "John", age: null, email: undefined, active: true },
+ expected: "name=John&age=&active=true",
+ },
+ ];
- it("should handle objects with null/undefined values", () => {
- const obj = { name: "John", age: null, email: undefined, active: true };
- expect(toQueryString(obj)).toBe("name=John&age=&active=true");
+ mixedTests.forEach(({ description, input, options, expected }) => {
+ it(description, () => {
+ expect(toQueryString(input, options)).toBe(expected);
+ });
});
});
describe("Edge cases", () => {
- it("should handle numeric keys", () => {
- const obj = { "0": "zero", "1": "one" };
- expect(toQueryString(obj)).toBe("0=zero&1=one");
- });
-
- it("should handle boolean values in objects", () => {
- const obj = { enabled: true, disabled: false };
- expect(toQueryString(obj)).toBe("enabled=true&disabled=false");
- });
-
- it("should handle empty strings", () => {
- const obj = { name: "", description: "test" };
- expect(toQueryString(obj)).toBe("name=&description=test");
- });
+ const edgeCaseTests: BasicTestCase[] = [
+ {
+ description: "should handle numeric keys",
+ input: { "0": "zero", "1": "one" },
+ expected: "0=zero&1=one",
+ },
+ {
+ description: "should handle boolean values in objects",
+ input: { enabled: true, disabled: false },
+ expected: "enabled=true&disabled=false",
+ },
+ {
+ description: "should handle empty strings",
+ input: { name: "", description: "test" },
+ expected: "name=&description=test",
+ },
+ {
+ description: "should handle zero values",
+ input: { count: 0, price: 0.0 },
+ expected: "count=0&price=0",
+ },
+ {
+ description: "should handle arrays with empty strings",
+ input: { items: ["a", "", "c"] },
+ expected: "items%5B0%5D=a&items%5B1%5D=&items%5B2%5D=c",
+ },
+ ];
- it("should handle zero values", () => {
- const obj = { count: 0, price: 0.0 };
- expect(toQueryString(obj)).toBe("count=0&price=0");
- });
-
- it("should handle arrays with empty strings", () => {
- const obj = { items: ["a", "", "c"] };
- expect(toQueryString(obj)).toBe("items%5B0%5D=a&items%5B1%5D=&items%5B2%5D=c");
+ edgeCaseTests.forEach(({ description, input, expected }) => {
+ it(description, () => {
+ expect(toQueryString(input)).toBe(expected);
+ });
});
});
describe("Options combinations", () => {
- it("should respect both arrayFormat and encode options", () => {
- const obj = { items: ["a & b", "c & d"] };
- expect(toQueryString(obj, { arrayFormat: "repeat", encode: false })).toBe("items=a & b&items=c & d");
- });
+ interface OptionsTestCase {
+ description: string;
+ input: any;
+ options?: { arrayFormat?: "repeat" | "indices"; encode?: boolean };
+ expected: string;
+ }
- it("should use default options when none provided", () => {
- const obj = { items: ["a", "b"] };
- expect(toQueryString(obj)).toBe("items%5B0%5D=a&items%5B1%5D=b");
- });
+ const optionsTests: OptionsTestCase[] = [
+ {
+ description: "should respect both arrayFormat and encode options",
+ input: { items: ["a & b", "c & d"] },
+ options: { arrayFormat: "repeat", encode: false },
+ expected: "items=a & b&items=c & d",
+ },
+ {
+ description: "should use default options when none provided",
+ input: { items: ["a", "b"] },
+ expected: "items%5B0%5D=a&items%5B1%5D=b",
+ },
+ {
+ description: "should merge provided options with defaults",
+ input: { items: ["a", "b"], name: "John Doe" },
+ options: { encode: false },
+ expected: "items[0]=a&items[1]=b&name=John Doe",
+ },
+ ];
- it("should merge provided options with defaults", () => {
- const obj = { items: ["a", "b"], name: "John Doe" };
- expect(toQueryString(obj, { encode: false })).toBe("items[0]=a&items[1]=b&name=John Doe");
+ optionsTests.forEach(({ description, input, options, expected }) => {
+ it(description, () => {
+ expect(toQueryString(input, options)).toBe(expected);
+ });
});
});
});
diff --git a/vitest.config.ts b/vitest.config.mts
similarity index 85%
rename from vitest.config.ts
rename to vitest.config.mts
index 677c585..ba2ec4f 100644
--- a/vitest.config.ts
+++ b/vitest.config.mts
@@ -10,6 +10,7 @@ export default defineConfig({
root: "./tests",
include: ["**/*.test.{js,ts,jsx,tsx}"],
exclude: ["wire/**"],
+ setupFiles: ["./setup.ts"],
},
},
{
@@ -18,7 +19,7 @@ export default defineConfig({
name: "wire",
environment: "node",
root: "./tests/wire",
- setupFiles: ["../mock-server/setup.ts"],
+ setupFiles: ["../setup.ts", "../mock-server/setup.ts"],
},
},
],