diff --git a/.github/workflows/publish-cli.yml b/.github/workflows/publish-cli.yml index dcbcb5ce3dd6..d5c37ef0368a 100644 --- a/.github/workflows/publish-cli.yml +++ b/.github/workflows/publish-cli.yml @@ -119,7 +119,7 @@ jobs: uses: actions/checkout@v4 with: ref: main - fetch-depth: 2 + fetch-depth: 20 - name: Install uses: ./.github/actions/install diff --git a/README.md b/README.md index 6763eee89541..0e9e04b35a24 100644 --- a/README.md +++ b/README.md @@ -124,7 +124,7 @@ Fern's model generators will output schemas or types defined in your OpenAPI spe | Generator ID | Latest Version | Entrypoint | | ----------------------------- | ------------------------------------------------------------------------------------------------ | ----------------------------------------------------------------------------- | | `fernapi/fern-pydantic-model` | ![Pydantic Model Generator Version](https://img.shields.io/docker/v/fernapi/fern-pydantic-model) | [cli.py](./generators/python/src/fern_python/generators/sdk/cli.py) | -| `fernapi/java-model` | ![Java Model Generator Version](https://img.shields.io/docker/v/fernapi/java-model) | [Cli.java](./generators/java/sdk/src/main/java/com/fern/java/client/Cli.java) | +| `fernapi/fern-java-model` | ![Java Model Generator Version](https://img.shields.io/docker/v/fernapi/fern-java-model) | [Cli.java](./generators/java/sdk/src/main/java/com/fern/java/client/Cli.java) | | `fernapi/fern-ruby-model` | ![Ruby Model Generator Version](https://img.shields.io/docker/v/fernapi/fern-ruby-model) | [cli.ts](./generators/ruby/model/src/cli.ts) | | `fernapi/fern-go-model` | ![Go Model Generator Version](https://img.shields.io/docker/v/fernapi/fern-go-model) | [main.go](./generators/go/cmd/fern-go-model/main.go) | diff --git a/docs-yml.schema.json b/docs-yml.schema.json index d476c5ec8f9d..1ef92a792dda 100644 --- a/docs-yml.schema.json +++ b/docs-yml.schema.json @@ -195,6 +195,16 @@ } ] }, + "ai-examples": { + "oneOf": [ + { + "$ref": "#/definitions/docs.AiExamplesConfig" + }, + { + "type": "null" + } + ] + }, "metadata": { "oneOf": [ { @@ -1552,6 +1562,16 @@ "docs.PlaygroundSettings": { "type": "object", "properties": { + "hidden": { + "oneOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ] + }, "environments": { "oneOf": [ { @@ -3911,6 +3931,32 @@ }, "additionalProperties": false }, + "docs.AiExamplesConfig": { + "type": "object", + "properties": { + "enabled": { + "oneOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ] + }, + "style": { + "oneOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + } + }, + "additionalProperties": false + }, "docs.TwitterCardSetting": { "type": "string", "enum": [ diff --git a/fern/apis/docs-yml/definition/docs.yml b/fern/apis/docs-yml/definition/docs.yml index 07fa26db065d..36c06de14178 100644 --- a/fern/apis/docs-yml/definition/docs.yml +++ b/fern/apis/docs-yml/definition/docs.yml @@ -134,6 +134,17 @@ types: # properties: # app-id: string + AiExamplesConfig: + properties: + enabled: + type: optional + docs: | + Enable AI-powered example enhancement for API documentation. When enabled, API examples will be enhanced with AI-generated content to provide more comprehensive and realistic examples. + style: + type: optional + docs: | + Custom styling instructions for AI-generated examples. When provided, these instructions will guide the AI in generating examples that match your preferred style, naming conventions, or domain-specific terminology. Limited to 500 characters. + DocsConfiguration: properties: instances: list @@ -179,6 +190,12 @@ types: "ai-search": optional + # ai examples + "ai-examples": + type: optional + docs: | + Configure AI-powered example enhancement for API documentation. When enabled, API examples will be enhanced with AI-generated content to provide more comprehensive and realistic examples. + # seo metadata: optional redirects: optional> @@ -1633,17 +1650,22 @@ types: Enable dynamic snippets in `docs.yml`, then configure them by following the SDK snippets setup instructions. ai-examples: type: optional - availability: pre-release + availability: deprecated docs: | Enable AI-powered example enhancement for API documentation. When enabled, API examples will be enhanced with AI-generated content to provide more comprehensive and realistic examples. + + DEPRECATED: Use the top-level `ai-examples` property instead. ai-example-style-instructions: type: optional - availability: pre-release + availability: deprecated docs: | Custom styling instructions for AI-generated examples. When provided, these instructions will guide the AI in generating examples that match your preferred style, naming conventions, or domain-specific terminology. Limited to 500 characters. + DEPRECATED: Use the top-level `ai-example-style-instructions` property instead. + PlaygroundSettings: properties: + hidden: optional environments: type: optional> docs: A list of environment IDs that are allowed to be used in the playground. diff --git a/generators/python-v2/sdk/src/wire-tests/WireTestSetupGenerator.ts b/generators/python-v2/sdk/src/wire-tests/WireTestSetupGenerator.ts index ea2b38c9cb90..0311f74a100c 100644 --- a/generators/python-v2/sdk/src/wire-tests/WireTestSetupGenerator.ts +++ b/generators/python-v2/sdk/src/wire-tests/WireTestSetupGenerator.ts @@ -69,7 +69,7 @@ export class WireTestSetupGenerator { wiremock: image: wiremock/wiremock:3.9.1 ports: - - "8080:8080" + - "0:8080" # Use dynamic port to avoid conflicts with concurrent tests volumes: - ./wiremock-mappings.json:/home/wiremock/mappings/wiremock-mappings.json command: ["--global-response-templating", "--verbose"] @@ -121,14 +121,23 @@ The WireMock container lifecycle itself is managed by a top-level pytest plugin (wiremock_pytest_plugin.py) so that the container is started exactly once per test run, even when using pytest-xdist. """ + +import inspect +import os from typing import Any, Dict, Optional import requests -import inspect ${clientImport} ${environmentSetup.imports} + +def _get_wiremock_base_url() -> str: + """Returns the WireMock base URL using the dynamically assigned port.""" + port = os.environ.get("WIREMOCK_PORT", "8080") + return f"http://localhost:{port}" + + def get_client(test_id: str) -> ${clientClassName}: """ Creates a configured client instance for wire tests. @@ -140,12 +149,13 @@ def get_client(test_id: str) -> ${clientClassName}: A configured client instance with all required auth parameters. """ test_headers = {"X-Test-Id": test_id} + base_url = _get_wiremock_base_url() # Prefer passing headers directly if the client constructor supports it. try: if "headers" in inspect.signature(${clientClassName}).parameters: return ${clientClassName}( - ${environmentSetup.param}, + ${environmentSetup.paramDynamic}, headers=test_headers, ${clientConstructorParams} ) @@ -155,7 +165,7 @@ ${clientConstructorParams} import httpx return ${clientClassName}( - ${environmentSetup.param}, + ${environmentSetup.paramDynamic}, httpx_client=httpx.Client(headers=test_headers), ${clientConstructorParams} ) @@ -169,7 +179,7 @@ def verify_request_count( expected: int, ) -> None: """Verifies the number of requests made to WireMock filtered by test ID for concurrency safety""" - wiremock_admin_url = "http://localhost:8080/__admin" + wiremock_admin_url = f"{_get_wiremock_base_url()}/__admin" request_body: Dict[str, Any] = { "method": method, "urlPath": url_path, @@ -224,12 +234,11 @@ by pytest's normal test collection rules. import os import subprocess -from typing import Optional import pytest - _STARTED: bool = False +_WIREMOCK_PORT: str = "8080" # Default, will be updated after container starts def _compose_file() -> str: @@ -241,22 +250,54 @@ def _compose_file() -> str: return os.path.join(wiremock_dir, "docker-compose.test.yml") +def _project_name() -> str: + """Returns a unique project name for this test fixture to avoid container name conflicts.""" + tests_dir = os.path.dirname(__file__) + project_root = os.path.abspath(os.path.join(tests_dir, "..")) + # Use the last two directory names to create a unique project name + # e.g., "python-streaming-parameter-openapi-with-wire-tests" + parent = os.path.basename(os.path.dirname(project_root)) + current = os.path.basename(project_root) + return f"{parent}-{current}".replace("_", "-").lower() + + +def _get_wiremock_port() -> str: + """Gets the dynamically assigned port for the WireMock container.""" + compose_file = _compose_file() + project = _project_name() + try: + result = subprocess.run( + ["docker", "compose", "-f", compose_file, "-p", project, "port", "wiremock", "8080"], + check=True, + capture_output=True, + text=True, + ) + # Output is like "0.0.0.0:32768" or "[::]:32768" + port = result.stdout.strip().split(":")[-1] + return port + except subprocess.CalledProcessError: + return "8080" # Fallback to default + + def _start_wiremock() -> None: """Starts the WireMock container using docker-compose.""" - global _STARTED + global _STARTED, _WIREMOCK_PORT if _STARTED: return compose_file = _compose_file() - print("\\nStarting WireMock container...") + project = _project_name() + print(f"\\nStarting WireMock container (project: {project})...") try: subprocess.run( - ["docker", "compose", "-f", compose_file, "up", "-d", "--wait"], + ["docker", "compose", "-f", compose_file, "-p", project, "up", "-d", "--wait"], check=True, capture_output=True, text=True, ) - print("WireMock container is ready") + _WIREMOCK_PORT = _get_wiremock_port() + os.environ["WIREMOCK_PORT"] = _WIREMOCK_PORT + print(f"WireMock container is ready on port {_WIREMOCK_PORT}") _STARTED = True except subprocess.CalledProcessError as e: print(f"Failed to start WireMock: {e.stderr}") @@ -266,9 +307,10 @@ def _start_wiremock() -> None: def _stop_wiremock() -> None: """Stops and removes the WireMock container.""" compose_file = _compose_file() + project = _project_name() print("\\nStopping WireMock container...") subprocess.run( - ["docker", "compose", "-f", compose_file, "down", "-v"], + ["docker", "compose", "-f", compose_file, "-p", project, "down", "-v"], check=False, capture_output=True, ) @@ -381,14 +423,15 @@ def pytest_unconfigure(config: pytest.Config) -> None: * create a custom environment instance that points all URLs to WireMock. * If no environments are defined, we use base_url directly. */ - private buildEnvironmentSetup(): { imports: string; param: string } { + private buildEnvironmentSetup(): { imports: string; param: string; paramDynamic: string } { const environments = this.ir.environments; if (environments?.environments.type !== "multipleBaseUrls") { // No environments defined - use base_url directly return { imports: "", - param: 'base_url="http://localhost:8080"' + param: 'base_url="http://localhost:8080"', + paramDynamic: "base_url=base_url" }; } @@ -398,14 +441,20 @@ def pytest_unconfigure(config: pytest.Config) -> None: const environmentClassName = this.getEnvironmentClassName(); const modulePath = this.getModulePath(); - // Build kwargs for all base URLs pointing to WireMock + // Build kwargs for all base URLs pointing to WireMock (static version for backwards compat) const baseUrlKwargs = envConfig.baseUrls .map((baseUrl) => `${baseUrl.name.snakeCase.safeName}="http://localhost:8080"`) .join(", "); + // Build kwargs for all base URLs using dynamic base_url variable + const baseUrlKwargsDynamic = envConfig.baseUrls + .map((baseUrl) => `${baseUrl.name.snakeCase.safeName}=base_url`) + .join(", "); + return { imports: `from ${modulePath}.environment import ${environmentClassName}`, - param: `environment=${environmentClassName}(${baseUrlKwargs})` + param: `environment=${environmentClassName}(${baseUrlKwargs})`, + paramDynamic: `environment=${environmentClassName}(${baseUrlKwargsDynamic})` }; } diff --git a/generators/python/sdk/versions.yml b/generators/python/sdk/versions.yml index 2ecf11d91616..1f302405f59f 100644 --- a/generators/python/sdk/versions.yml +++ b/generators/python/sdk/versions.yml @@ -1,5 +1,18 @@ # yaml-language-server: $schema=../../../fern-versions-yml.schema.json # For unreleased changes, use unreleased.yml + +- version: 4.48.1 + changelogEntry: + - summary: | + Fix WireMock stub generation for streaming endpoints. When an endpoint has both streaming + and non-streaming variants (via x-fern-streaming with stream-condition), the generated + WireMock stubs now include request body matching criteria to differentiate between them. + SSE stubs match on `stream: true` in the request body and have higher priority, ensuring + wire tests correctly route streaming requests to the SSE stub. + type: fix + createdAt: "2026-01-14" + irVersion: 62 + - version: 4.48.0 changelogEntry: - summary: | diff --git a/generators/ruby-v2/base/src/asIs/internal/http/base_request.Template.rb b/generators/ruby-v2/base/src/asIs/internal/http/base_request.Template.rb index e462f9070fb2..453a25dfc7a1 100644 --- a/generators/ruby-v2/base/src/asIs/internal/http/base_request.Template.rb +++ b/generators/ruby-v2/base/src/asIs/internal/http/base_request.Template.rb @@ -22,10 +22,16 @@ def initialize(base_url:, path:, method:, headers: {}, query: {}, request_option @request_options = request_options end + # @return [Hash] The query parameters merged with additional query parameters from request options. + def encode_query + additional_query = @request_options&.dig(:additional_query_parameters) || @request_options&.dig("additional_query_parameters") || {} + @query.merge(additional_query) + end + # Child classes should implement: # - encode_headers: Returns the encoded HTTP request headers. # - encode_body: Returns the encoded HTTP request body. end end end -end \ No newline at end of file +end \ No newline at end of file diff --git a/generators/ruby-v2/base/src/asIs/internal/http/raw_client.Template.rb b/generators/ruby-v2/base/src/asIs/internal/http/raw_client.Template.rb index 46433a4a6bc5..6b8bcc8cc1af 100644 --- a/generators/ruby-v2/base/src/asIs/internal/http/raw_client.Template.rb +++ b/generators/ruby-v2/base/src/asIs/internal/http/raw_client.Template.rb @@ -47,17 +47,19 @@ def send(request) # @param request [<%= gem_namespace %>::Internal::Http::BaseRequest] The HTTP request. # @return [URI::Generic] The URL. def build_url(request) + encoded_query = request.encode_query + # If the path is already an absolute URL, use it directly if request.path.start_with?("http://", "https://") url = request.path - url = "#{url}?#{encode_query(request.query)}" if request.query&.any? + url = "#{url}?#{encode_query(encoded_query)}" if encoded_query&.any? return URI.parse(url) end path = request.path.start_with?("/") ? request.path[1..] : request.path base = request.base_url || @base_url url = "#{base.chomp("/")}/#{path}" - url = "#{url}?#{encode_query(request.query)}" if request.query&.any? + url = "#{url}?#{encode_query(encoded_query)}" if encoded_query&.any? URI.parse(url) end @@ -113,4 +115,4 @@ def inspect end end end -end \ No newline at end of file +end \ No newline at end of file diff --git a/generators/ruby-v2/sdk/versions.yml b/generators/ruby-v2/sdk/versions.yml index ccc3db5d84bb..a6ad101fa178 100644 --- a/generators/ruby-v2/sdk/versions.yml +++ b/generators/ruby-v2/sdk/versions.yml @@ -1,5 +1,15 @@ # yaml-language-server: $schema=../../../fern-versions-yml.schema.json +- version: 1.0.0-rc79 + changelogEntry: + - summary: | + Add support for additional_query_parameters in request options. Users can now pass + additional query parameters via request_options[:additional_query_parameters] which + will be merged with the endpoint's query parameters when making HTTP requests. + type: feat + createdAt: "2026-01-14" + irVersion: 61 + - version: 1.0.0-rc78 changelogEntry: - summary: Update Dockerfile to use the latest generator-cli with improve reference.md generation. diff --git a/generators/swift/sdk/src/generators/client/util/__test__/snapshots/formatted-endpoint-paths/python-streaming-parameter-openapi.swift b/generators/swift/sdk/src/generators/client/util/__test__/snapshots/formatted-endpoint-paths/python-streaming-parameter-openapi.swift new file mode 100644 index 000000000000..00216c32ae90 --- /dev/null +++ b/generators/swift/sdk/src/generators/client/util/__test__/snapshots/formatted-endpoint-paths/python-streaming-parameter-openapi.swift @@ -0,0 +1,3 @@ +// service_ +"/chat" +"/chat" \ No newline at end of file diff --git a/generators/swift/sdk/src/generators/client/util/__test__/snapshots/formatted-endpoint-paths/ts-extra-properties.swift b/generators/swift/sdk/src/generators/client/util/__test__/snapshots/formatted-endpoint-paths/ts-extra-properties.swift new file mode 100644 index 000000000000..ec1b793ca624 --- /dev/null +++ b/generators/swift/sdk/src/generators/client/util/__test__/snapshots/formatted-endpoint-paths/ts-extra-properties.swift @@ -0,0 +1,3 @@ +// service_ +"/user" +"/user" \ No newline at end of file diff --git a/generators/typescript/sdk/versions.yml b/generators/typescript/sdk/versions.yml index f535e9dfa675..e8e11d727ff9 100644 --- a/generators/typescript/sdk/versions.yml +++ b/generators/typescript/sdk/versions.yml @@ -1,4 +1,17 @@ # yaml-language-server: $schema=../../../fern-versions-yml.schema.json +- version: 3.43.12 + changelogEntry: + - summary: | + Fix passthrough() function to only include truly unknown/extra properties in the result. + Previously, when a type had `additionalProperties: true` and used property name mapping + (e.g., snake_case to camelCase), the passthrough would spread both the original raw object + and the transformed value, resulting in duplicate properties with both naming conventions. + Now the passthrough correctly filters out known properties before spreading, ensuring only + extra properties that aren't part of the schema definition are included. + type: fix + createdAt: "2026-01-14" + irVersion: 63 + - version: 3.43.11 changelogEntry: - summary: Update linters and formatters to latest versions. diff --git a/generators/typescript/utils/core-utilities/src/core/schemas/builders/object/object.ts b/generators/typescript/utils/core-utilities/src/core/schemas/builders/object/object.ts index bdad8076717c..024a96e54a56 100644 --- a/generators/typescript/utils/core-utilities/src/core/schemas/builders/object/object.ts +++ b/generators/typescript/utils/core-utilities/src/core/schemas/builders/object/object.ts @@ -270,6 +270,8 @@ export function getObjectUtils(schema: BaseObjectSchema { + const knownRawKeys = new Set(schema._getRawProperties() as string[]); + const knownParsedKeys = new Set(schema._getParsedProperties() as string[]); const baseSchema: BaseObjectSchema = { _getParsedProperties: () => schema._getParsedProperties(), @@ -279,10 +281,18 @@ export function getObjectUtils(schema: BaseObjectSchema = {}; + if (typeof raw === "object" && raw != null) { + for (const [key, value] of Object.entries(raw)) { + if (!knownRawKeys.has(key)) { + extraProperties[key] = value; + } + } + } return { ok: true, value: { - ...(raw as any), + ...extraProperties, ...transformed.value, }, }; @@ -292,10 +302,18 @@ export function getObjectUtils(schema: BaseObjectSchema = {}; + if (typeof parsed === "object" && parsed != null) { + for (const [key, value] of Object.entries(parsed)) { + if (!knownParsedKeys.has(key)) { + extraProperties[key] = value; + } + } + } return { ok: true, value: { - ...(parsed as any), + ...extraProperties, ...transformed.value, }, }; diff --git a/packages/cli/api-importers/openapi/openapi-ir-parser/src/schema/convertDiscriminatedOneOf.ts b/packages/cli/api-importers/openapi/openapi-ir-parser/src/schema/convertDiscriminatedOneOf.ts index a9ef2c2370f4..6fe3f4a33b43 100644 --- a/packages/cli/api-importers/openapi/openapi-ir-parser/src/schema/convertDiscriminatedOneOf.ts +++ b/packages/cli/api-importers/openapi/openapi-ir-parser/src/schema/convertDiscriminatedOneOf.ts @@ -8,6 +8,9 @@ import { Source } from "@fern-api/openapi-ir"; import { OpenAPIV3 } from "openapi-types"; + +import { getExtension } from "../getExtension"; +import { FernOpenAPIExtension } from "../openapi/v3/extensions/fernExtensions"; import { convertReferenceObject, convertSchema, convertSchemaObject } from "./convertSchemas"; import { SchemaParserContext } from "./SchemaParserContext"; import { isReferenceObject } from "./utils/isReferenceObject"; @@ -48,6 +51,7 @@ export function convertDiscriminatedOneOf({ source: Source; }): SchemaWithExample { const discriminant = discriminator.propertyName; + const discriminantNameOverride = getExtension(discriminator, FernOpenAPIExtension.FERN_PROPERTY_NAME); const unionSubTypes = Object.fromEntries( Object.entries(discriminator.mapping ?? {}).map(([discriminantValue, schema]) => { const subtypeReference = convertReferenceObject( @@ -105,6 +109,7 @@ export function convertDiscriminatedOneOf({ description, availability, discriminant, + discriminantNameOverride, subtypes: unionSubTypes, namespace, groupName, @@ -213,6 +218,7 @@ export function convertDiscriminatedOneOfWithVariants({ description, availability, discriminant, + discriminantNameOverride: undefined, subtypes: unionSubTypes, namespace, groupName, @@ -230,6 +236,7 @@ export function wrapDiscriminatedOneOf({ description, availability, discriminant, + discriminantNameOverride, subtypes, namespace, groupName, @@ -244,6 +251,7 @@ export function wrapDiscriminatedOneOf({ description: string | undefined; availability: Availability | undefined; discriminant: string; + discriminantNameOverride: string | undefined; subtypes: Record; namespace: string | undefined; groupName: SdkGroupName | undefined; @@ -254,6 +262,7 @@ export function wrapDiscriminatedOneOf({ description, availability, discriminantProperty: discriminant, + discriminantPropertyNameOverride: discriminantNameOverride, nameOverride, generatedName, title, diff --git a/packages/cli/api-importers/openapi/openapi-ir-parser/src/schema/utils/convertSchemaToSchemaWithExample.ts b/packages/cli/api-importers/openapi/openapi-ir-parser/src/schema/utils/convertSchemaToSchemaWithExample.ts index 6dab8b1e8911..fc53ddd4d719 100644 --- a/packages/cli/api-importers/openapi/openapi-ir-parser/src/schema/utils/convertSchemaToSchemaWithExample.ts +++ b/packages/cli/api-importers/openapi/openapi-ir-parser/src/schema/utils/convertSchemaToSchemaWithExample.ts @@ -240,6 +240,7 @@ function convertToOneOf(oneOfSchema: OneOfSchema): OneOfSchemaWithExample { description: oneOfSchema.description, availability: oneOfSchema.availability, discriminantProperty: oneOfSchema.discriminantProperty, + discriminantPropertyNameOverride: oneOfSchema.discriminantPropertyNameOverride, generatedName: oneOfSchema.generatedName, nameOverride: oneOfSchema.nameOverride, title: oneOfSchema.title, diff --git a/packages/cli/api-importers/openapi/openapi-ir-parser/src/schema/utils/convertSchemaWithExampleToSchema.ts b/packages/cli/api-importers/openapi/openapi-ir-parser/src/schema/utils/convertSchemaWithExampleToSchema.ts index 3927059d4ff3..aa680b83618b 100644 --- a/packages/cli/api-importers/openapi/openapi-ir-parser/src/schema/utils/convertSchemaWithExampleToSchema.ts +++ b/packages/cli/api-importers/openapi/openapi-ir-parser/src/schema/utils/convertSchemaWithExampleToSchema.ts @@ -216,6 +216,7 @@ function convertToOneOf(oneOfSchema: OneOfSchemaWithExample): OneOfSchema { description: oneOfSchema.description, availability: oneOfSchema.availability, discriminantProperty: oneOfSchema.discriminantProperty, + discriminantPropertyNameOverride: oneOfSchema.discriminantPropertyNameOverride, generatedName: oneOfSchema.generatedName, title: oneOfSchema.title, nameOverride: oneOfSchema.nameOverride, diff --git a/packages/cli/api-importers/openapi/openapi-ir-to-fern-tests/src/__test__/__snapshots__/openapi-ir/x-fern-property-name.json b/packages/cli/api-importers/openapi/openapi-ir-to-fern-tests/src/__test__/__snapshots__/openapi-ir/x-fern-property-name.json index 7e7c87c9ff72..28ab41a2cc1e 100644 --- a/packages/cli/api-importers/openapi/openapi-ir-to-fern-tests/src/__test__/__snapshots__/openapi-ir/x-fern-property-name.json +++ b/packages/cli/api-importers/openapi/openapi-ir-to-fern-tests/src/__test__/__snapshots__/openapi-ir/x-fern-property-name.json @@ -45,6 +45,210 @@ "type": "openapi" }, "type": "object" + }, + "UnionWithDiscriminant": { + "value": { + "commonProperties": [], + "description": "A union type with a discriminant property.", + "discriminantProperty": "@type", + "discriminantPropertyNameOverride": "type", + "generatedName": "UnionWithDiscriminant", + "schemas": { + "VariantOne": { + "generatedName": "ComponentsSchemasVariantOne", + "schema": "VariantOne", + "source": { + "file": "../openapi.yml", + "type": "openapi" + }, + "type": "reference" + }, + "VariantTwo": { + "generatedName": "ComponentsSchemasVariantTwo", + "schema": "VariantTwo", + "source": { + "file": "../openapi.yml", + "type": "openapi" + }, + "type": "reference" + } + }, + "groupName": [], + "source": { + "file": "../openapi.yml", + "type": "openapi" + }, + "type": "discriminated" + }, + "type": "oneOf" + }, + "VariantOne": { + "allOf": [], + "properties": [ + { + "conflict": {}, + "generatedName": "variantOneFeatureOne", + "key": "featureOne", + "schema": { + "generatedName": "VariantOneFeatureOne", + "value": { + "schema": { + "type": "string" + }, + "generatedName": "VariantOneFeatureOne", + "groupName": [], + "type": "primitive" + }, + "groupName": [], + "type": "optional" + }, + "audiences": [] + } + ], + "allOfPropertyConflicts": [], + "generatedName": "VariantOne", + "groupName": [], + "additionalProperties": false, + "source": { + "file": "../openapi.yml", + "type": "openapi" + }, + "type": "object" + }, + "VariantTwo": { + "allOf": [], + "properties": [ + { + "conflict": {}, + "generatedName": "variantTwoFeatureTwo", + "key": "featureTwo", + "schema": { + "generatedName": "VariantTwoFeatureTwo", + "value": { + "schema": { + "type": "string" + }, + "generatedName": "VariantTwoFeatureTwo", + "groupName": [], + "type": "primitive" + }, + "groupName": [], + "type": "optional" + }, + "audiences": [] + } + ], + "allOfPropertyConflicts": [], + "generatedName": "VariantTwo", + "groupName": [], + "additionalProperties": false, + "source": { + "file": "../openapi.yml", + "type": "openapi" + }, + "type": "object" + }, + "TestIdentity": { + "value": { + "commonProperties": [], + "description": "Test description", + "discriminantProperty": "@type", + "discriminantPropertyNameOverride": "type", + "generatedName": "TestIdentity", + "schemas": { + "TestA": { + "generatedName": "ComponentsSchemasTestA", + "schema": "TestA", + "source": { + "file": "../openapi.yml", + "type": "openapi" + }, + "type": "reference" + }, + "TestB": { + "generatedName": "ComponentsSchemasTestB", + "schema": "TestB", + "source": { + "file": "../openapi.yml", + "type": "openapi" + }, + "type": "reference" + } + }, + "groupName": [], + "source": { + "file": "../openapi.yml", + "type": "openapi" + }, + "type": "discriminated" + }, + "type": "oneOf" + }, + "TestA": { + "allOf": [], + "properties": [ + { + "conflict": {}, + "generatedName": "testAAFeature", + "key": "aFeature", + "schema": { + "generatedName": "TestAAFeature", + "value": { + "schema": { + "type": "string" + }, + "generatedName": "TestAAFeature", + "groupName": [], + "type": "primitive" + }, + "groupName": [], + "type": "optional" + }, + "audiences": [] + } + ], + "allOfPropertyConflicts": [], + "generatedName": "TestA", + "groupName": [], + "additionalProperties": false, + "source": { + "file": "../openapi.yml", + "type": "openapi" + }, + "type": "object" + }, + "TestB": { + "allOf": [], + "properties": [ + { + "conflict": {}, + "generatedName": "testBBFeature", + "key": "bFeature", + "schema": { + "generatedName": "TestBBFeature", + "value": { + "schema": { + "type": "string" + }, + "generatedName": "TestBBFeature", + "groupName": [], + "type": "primitive" + }, + "groupName": [], + "type": "optional" + }, + "audiences": [] + } + ], + "allOfPropertyConflicts": [], + "generatedName": "TestB", + "groupName": [], + "additionalProperties": false, + "source": { + "file": "../openapi.yml", + "type": "openapi" + }, + "type": "object" } }, "namespacedSchemas": {} diff --git a/packages/cli/api-importers/openapi/openapi-ir-to-fern-tests/src/__test__/__snapshots__/openapi/x-fern-property-name.json b/packages/cli/api-importers/openapi/openapi-ir-to-fern-tests/src/__test__/__snapshots__/openapi/x-fern-property-name.json index 44779d38820a..0bef7734a8d2 100644 --- a/packages/cli/api-importers/openapi/openapi-ir-to-fern-tests/src/__test__/__snapshots__/openapi/x-fern-property-name.json +++ b/packages/cli/api-importers/openapi/openapi-ir-to-fern-tests/src/__test__/__snapshots__/openapi/x-fern-property-name.json @@ -25,6 +25,80 @@ "openapi": "../openapi.yml", }, }, + "TestA": { + "docs": undefined, + "inline": undefined, + "properties": { + "aFeature": "optional", + }, + "source": { + "openapi": "../openapi.yml", + }, + }, + "TestB": { + "docs": undefined, + "inline": undefined, + "properties": { + "bFeature": "optional", + }, + "source": { + "openapi": "../openapi.yml", + }, + }, + "TestIdentity": { + "availability": undefined, + "base-properties": {}, + "discriminant": { + "name": "type", + "value": "@type", + }, + "docs": "Test description", + "encoding": undefined, + "source": { + "openapi": "../openapi.yml", + }, + "union": { + "TestA": "TestA", + "TestB": "TestB", + }, + }, + "UnionWithDiscriminant": { + "availability": undefined, + "base-properties": {}, + "discriminant": { + "name": "type", + "value": "@type", + }, + "docs": "A union type with a discriminant property.", + "encoding": undefined, + "source": { + "openapi": "../openapi.yml", + }, + "union": { + "VariantOne": "VariantOne", + "VariantTwo": "VariantTwo", + }, + }, + "VariantOne": { + "docs": undefined, + "inline": undefined, + "properties": { + "featureOne": "optional", + }, + "source": { + "openapi": "../openapi.yml", + }, + }, + "VariantTwo": { + "docs": undefined, + "inline": undefined, + "properties": { + "featureTwo": "optional", + }, + "source": { + "openapi": "../openapi.yml", + }, + }, }, }, "rawContents": "types: @@ -37,6 +111,48 @@ name: renamed_id source: openapi: ../openapi.yml + UnionWithDiscriminant: + discriminant: + name: type + value: '@type' + base-properties: {} + docs: A union type with a discriminant property. + union: + VariantOne: VariantOne + VariantTwo: VariantTwo + source: + openapi: ../openapi.yml + VariantOne: + properties: + featureOne: optional + source: + openapi: ../openapi.yml + VariantTwo: + properties: + featureTwo: optional + source: + openapi: ../openapi.yml + TestIdentity: + discriminant: + name: type + value: '@type' + base-properties: {} + docs: Test description + union: + TestA: TestA + TestB: TestB + source: + openapi: ../openapi.yml + TestA: + properties: + aFeature: optional + source: + openapi: ../openapi.yml + TestB: + properties: + bFeature: optional + source: + openapi: ../openapi.yml ", }, }, diff --git a/packages/cli/api-importers/openapi/openapi-ir-to-fern-tests/src/__test__/fixtures/x-fern-property-name/fern/generators.yml b/packages/cli/api-importers/openapi/openapi-ir-to-fern-tests/src/__test__/fixtures/x-fern-property-name/fern/generators.yml index 5b01f1e0833d..7ad486f7db37 100644 --- a/packages/cli/api-importers/openapi/openapi-ir-to-fern-tests/src/__test__/fixtures/x-fern-property-name/fern/generators.yml +++ b/packages/cli/api-importers/openapi/openapi-ir-to-fern-tests/src/__test__/fixtures/x-fern-property-name/fern/generators.yml @@ -2,3 +2,4 @@ api: specs: - openapi: ../openapi.yml + overrides: ../overrides.yml diff --git a/packages/cli/api-importers/openapi/openapi-ir-to-fern-tests/src/__test__/fixtures/x-fern-property-name/openapi.yml b/packages/cli/api-importers/openapi/openapi-ir-to-fern-tests/src/__test__/fixtures/x-fern-property-name/openapi.yml index c582d3430039..685505952091 100644 --- a/packages/cli/api-importers/openapi/openapi-ir-to-fern-tests/src/__test__/fixtures/x-fern-property-name/openapi.yml +++ b/packages/cli/api-importers/openapi/openapi-ir-to-fern-tests/src/__test__/fixtures/x-fern-property-name/openapi.yml @@ -12,3 +12,63 @@ components: x-fern-property-name: renamed_id type: string format: uuid + UnionWithDiscriminant: + type: object + description: A union type with a discriminant property. + oneOf: + - $ref: "#/components/schemas/VariantOne" + - $ref: "#/components/schemas/VariantTwo" + discriminator: + propertyName: "@type" + x-fern-property-name: type + mapping: + VariantOne: "#/components/schemas/VariantOne" + VariantTwo: "#/components/schemas/VariantTwo" + VariantOne: + type: object + properties: + "@type": + type: string + featureOne: + type: string + required: + - "@type" + VariantTwo: + type: object + properties: + "@type": + type: string + featureTwo: + type: string + required: + - "@type" + # Test case for discriminator with x-fern-property-name applied via overrides + TestIdentity: + type: object + description: Test description + oneOf: + - $ref: "#/components/schemas/TestA" + - $ref: "#/components/schemas/TestB" + discriminator: + propertyName: "@type" + mapping: + TestA: "#/components/schemas/TestA" + TestB: "#/components/schemas/TestB" + TestA: + type: object + properties: + "@type": + type: string + aFeature: + type: string + required: + - "@type" + TestB: + type: object + properties: + "@type": + type: string + bFeature: + type: string + required: + - "@type" diff --git a/packages/cli/api-importers/openapi/openapi-ir-to-fern-tests/src/__test__/fixtures/x-fern-property-name/overrides.yml b/packages/cli/api-importers/openapi/openapi-ir-to-fern-tests/src/__test__/fixtures/x-fern-property-name/overrides.yml new file mode 100644 index 000000000000..751d3c628d6f --- /dev/null +++ b/packages/cli/api-importers/openapi/openapi-ir-to-fern-tests/src/__test__/fixtures/x-fern-property-name/overrides.yml @@ -0,0 +1,5 @@ +components: + schemas: + TestIdentity: + discriminator: + x-fern-property-name: type diff --git a/packages/cli/api-importers/openapi/openapi-ir-to-fern/src/buildTypeDeclaration.ts b/packages/cli/api-importers/openapi/openapi-ir-to-fern/src/buildTypeDeclaration.ts index 41f0bceec75e..fd36164ed814 100644 --- a/packages/cli/api-importers/openapi/openapi-ir-to-fern/src/buildTypeDeclaration.ts +++ b/packages/cli/api-importers/openapi/openapi-ir-to-fern/src/buildTypeDeclaration.ts @@ -672,7 +672,10 @@ export function buildOneOfTypeDeclaration({ return { name: schema.nameOverride ?? schema.generatedName, schema: { - discriminant: schema.discriminantProperty, + discriminant: + schema.discriminantPropertyNameOverride != null + ? { name: schema.discriminantPropertyNameOverride, value: schema.discriminantProperty } + : schema.discriminantProperty, "base-properties": baseProperties, docs: schema.description ?? undefined, availability: schema.availability != null ? convertAvailability(schema.availability) : undefined, diff --git a/packages/cli/api-importers/openapi/openapi-ir/fern/definition/finalIr.yml b/packages/cli/api-importers/openapi/openapi-ir/fern/definition/finalIr.yml index 29be54ee9a73..bb2159509cdd 100644 --- a/packages/cli/api-importers/openapi/openapi-ir/fern/definition/finalIr.yml +++ b/packages/cli/api-importers/openapi/openapi-ir/fern/definition/finalIr.yml @@ -828,6 +828,12 @@ types: - commons.WithInline properties: discriminantProperty: string + discriminantPropertyNameOverride: + type: optional + docs: | + Populated by `x-fern-property-name` on a discriminator object. + When set, this is used as the code generation name while + discriminantProperty is used as the wire format. commonProperties: list schemas: map diff --git a/packages/cli/api-importers/openapi/openapi-ir/fern/definition/parseIr.yml b/packages/cli/api-importers/openapi/openapi-ir/fern/definition/parseIr.yml index e27ba39aef62..dd0b821c37e6 100644 --- a/packages/cli/api-importers/openapi/openapi-ir/fern/definition/parseIr.yml +++ b/packages/cli/api-importers/openapi/openapi-ir/fern/definition/parseIr.yml @@ -365,6 +365,12 @@ types: - commons.WithInline properties: discriminantProperty: string + discriminantPropertyNameOverride: + type: optional + docs: | + Populated by `x-fern-property-name` on a discriminator object. + When set, this is used as the code generation name while + discriminantProperty is used as the wire format. commonProperties: list schemas: map diff --git a/packages/cli/api-importers/openapi/openapi-ir/src/sdk/api/resources/finalIr/types/DiscriminatedOneOfSchema.ts b/packages/cli/api-importers/openapi/openapi-ir/src/sdk/api/resources/finalIr/types/DiscriminatedOneOfSchema.ts index ce8d87839b40..9864519e8405 100644 --- a/packages/cli/api-importers/openapi/openapi-ir/src/sdk/api/resources/finalIr/types/DiscriminatedOneOfSchema.ts +++ b/packages/cli/api-importers/openapi/openapi-ir/src/sdk/api/resources/finalIr/types/DiscriminatedOneOfSchema.ts @@ -16,6 +16,12 @@ export interface DiscriminatedOneOfSchema FernOpenapiIr.WithTitle, FernOpenapiIr.WithInline { discriminantProperty: string; + /** + * Populated by `x-fern-property-name` on a discriminator object. + * When set, this is used as the code generation name while + * discriminantProperty is used as the wire format. + */ + discriminantPropertyNameOverride: string | undefined; commonProperties: FernOpenapiIr.CommonProperty[]; schemas: Record; } diff --git a/packages/cli/api-importers/openapi/openapi-ir/src/sdk/api/resources/parseIr/types/DiscriminatedOneOfSchemaWithExample.ts b/packages/cli/api-importers/openapi/openapi-ir/src/sdk/api/resources/parseIr/types/DiscriminatedOneOfSchemaWithExample.ts index 7a88ccbc60fb..dcc0c7d39973 100644 --- a/packages/cli/api-importers/openapi/openapi-ir/src/sdk/api/resources/parseIr/types/DiscriminatedOneOfSchemaWithExample.ts +++ b/packages/cli/api-importers/openapi/openapi-ir/src/sdk/api/resources/parseIr/types/DiscriminatedOneOfSchemaWithExample.ts @@ -16,6 +16,12 @@ export interface DiscriminatedOneOfSchemaWithExample FernOpenapiIr.WithTitle, FernOpenapiIr.WithInline { discriminantProperty: string; + /** + * Populated by `x-fern-property-name` on a discriminator object. + * When set, this is used as the code generation name while + * discriminantProperty is used as the wire format. + */ + discriminantPropertyNameOverride: string | undefined; commonProperties: FernOpenapiIr.CommonPropertyWithExample[]; schemas: Record; } diff --git a/packages/cli/api-importers/openapi/openapi-ir/src/sdk/serialization/resources/finalIr/types/DiscriminatedOneOfSchema.ts b/packages/cli/api-importers/openapi/openapi-ir/src/sdk/serialization/resources/finalIr/types/DiscriminatedOneOfSchema.ts index 44d10af94fbf..a87a5aef6cb2 100644 --- a/packages/cli/api-importers/openapi/openapi-ir/src/sdk/serialization/resources/finalIr/types/DiscriminatedOneOfSchema.ts +++ b/packages/cli/api-importers/openapi/openapi-ir/src/sdk/serialization/resources/finalIr/types/DiscriminatedOneOfSchema.ts @@ -21,6 +21,7 @@ export const DiscriminatedOneOfSchema: core.serialization.ObjectSchema< > = core.serialization .objectWithoutOptionalProperties({ discriminantProperty: core.serialization.string(), + discriminantPropertyNameOverride: core.serialization.string().optional(), commonProperties: core.serialization.list(core.serialization.lazyObject(() => serializers.CommonProperty)), schemas: core.serialization.record( core.serialization.string(), @@ -50,6 +51,7 @@ export declare namespace DiscriminatedOneOfSchema { WithTitle.Raw, WithInline.Raw { discriminantProperty: string; + discriminantPropertyNameOverride?: string | null; commonProperties: serializers.CommonProperty.Raw[]; schemas: Record; } diff --git a/packages/cli/api-importers/openapi/openapi-ir/src/sdk/serialization/resources/parseIr/types/DiscriminatedOneOfSchemaWithExample.ts b/packages/cli/api-importers/openapi/openapi-ir/src/sdk/serialization/resources/parseIr/types/DiscriminatedOneOfSchemaWithExample.ts index 98ada87daf84..eb3f03a6357b 100644 --- a/packages/cli/api-importers/openapi/openapi-ir/src/sdk/serialization/resources/parseIr/types/DiscriminatedOneOfSchemaWithExample.ts +++ b/packages/cli/api-importers/openapi/openapi-ir/src/sdk/serialization/resources/parseIr/types/DiscriminatedOneOfSchemaWithExample.ts @@ -21,6 +21,7 @@ export const DiscriminatedOneOfSchemaWithExample: core.serialization.ObjectSchem > = core.serialization .objectWithoutOptionalProperties({ discriminantProperty: core.serialization.string(), + discriminantPropertyNameOverride: core.serialization.string().optional(), commonProperties: core.serialization.list( core.serialization.lazyObject(() => serializers.CommonPropertyWithExample), ), @@ -52,6 +53,7 @@ export declare namespace DiscriminatedOneOfSchemaWithExample { WithTitle.Raw, WithInline.Raw { discriminantProperty: string; + discriminantPropertyNameOverride?: string | null; commonProperties: serializers.CommonPropertyWithExample.Raw[]; schemas: Record; } diff --git a/packages/cli/cli/versions.yml b/packages/cli/cli/versions.yml index ecf38a1a1e29..ecbaf94d41e1 100644 --- a/packages/cli/cli/versions.yml +++ b/packages/cli/cli/versions.yml @@ -1,4 +1,36 @@ # yaml-language-server: $schema=../../../fern-versions-yml.schema.json +- version: 3.42.1 + changelogEntry: + - summary: | + Rename java-model generator to fern-java-model. + type: fix + createdAt: "2026-01-14" + irVersion: 63 + +- version: 3.42.0 + changelogEntry: + - summary: | + Add placeholder page for python-docs in `fern docs dev`. When running local development, a helpful placeholder page is shown explaining that Python library documentation requires `fern generate --docs` or `fern generate --docs --preview` to generate. + type: feat + createdAt: "2026-01-14" + irVersion: 63 + +- version: 3.41.1 + changelogEntry: + - summary: | + Support `ai-examples` and `ai-example-style-instructions` configuration options from `experimental` with the new `enabled` and `style`, under `ai-examples` in the top-level `docs.yml` configuration. + type: chore + createdAt: "2026-01-14" + irVersion: 63 + +- version: 3.41.0 + changelogEntry: + - summary: | + Add support for `x-fern-property-name` extension on OpenAPI discriminators. This allows specifying a code-generation-friendly name for discriminant properties that have non-alphanumeric wire values (e.g., `@type`). The extension can be applied directly in the OpenAPI spec or via an overrides file. + type: feat + createdAt: "2026-01-14" + irVersion: 63 + - version: 3.40.1 changelogEntry: - summary: | diff --git a/packages/cli/configuration-loader/src/generators-yml/GeneratorName.ts b/packages/cli/configuration-loader/src/generators-yml/GeneratorName.ts index 578faf6bf7b8..2fff392612f5 100644 --- a/packages/cli/configuration-loader/src/generators-yml/GeneratorName.ts +++ b/packages/cli/configuration-loader/src/generators-yml/GeneratorName.ts @@ -5,7 +5,7 @@ export const GeneratorName = { TYPESCRIPT_BROWSER_SDK: "fernapi/fern-typescript-browser-sdk", TYPESCRIPT_EXPRESS: "fernapi/fern-typescript-express", JAVA: "fernapi/fern-java", - JAVA_MODEL: "fernapi/java-model", + JAVA_MODEL: "fernapi/fern-java-model", JAVA_SDK: "fernapi/fern-java-sdk", JAVA_SPRING: "fernapi/fern-java-spring", PYTHON_FASTAPI: "fernapi/fern-fastapi-server", diff --git a/packages/cli/configuration-loader/src/generators-yml/getGeneratorVersions.ts b/packages/cli/configuration-loader/src/generators-yml/getGeneratorVersions.ts index ce2700f1edb0..4aa7fd9da666 100644 --- a/packages/cli/configuration-loader/src/generators-yml/getGeneratorVersions.ts +++ b/packages/cli/configuration-loader/src/generators-yml/getGeneratorVersions.ts @@ -73,7 +73,7 @@ function getGeneratorMetadataFromName(generatorName: string, context?: TaskConte // Java case "fern-java-sdk": return "java-sdk"; - case "java-model": + case "fern-java-model": return "java-model"; case "fern-java-spring": return "java-spring"; diff --git a/packages/cli/configuration/src/docs-yml/schemas/sdk/api/resources/docs/types/AiExamplesConfig.ts b/packages/cli/configuration/src/docs-yml/schemas/sdk/api/resources/docs/types/AiExamplesConfig.ts new file mode 100644 index 000000000000..18c6f1fd8348 --- /dev/null +++ b/packages/cli/configuration/src/docs-yml/schemas/sdk/api/resources/docs/types/AiExamplesConfig.ts @@ -0,0 +1,10 @@ +/** + * This file was auto-generated by Fern from our API Definition. + */ + +export interface AiExamplesConfig { + /** Enable AI-powered example enhancement for API documentation. When enabled, API examples will be enhanced with AI-generated content to provide more comprehensive and realistic examples. */ + enabled?: boolean; + /** Custom styling instructions for AI-generated examples. When provided, these instructions will guide the AI in generating examples that match your preferred style, naming conventions, or domain-specific terminology. Limited to 500 characters. */ + style?: string; +} diff --git a/packages/cli/configuration/src/docs-yml/schemas/sdk/api/resources/docs/types/DocsConfiguration.ts b/packages/cli/configuration/src/docs-yml/schemas/sdk/api/resources/docs/types/DocsConfiguration.ts index a7ff36ff1b88..9d690f3281b2 100644 --- a/packages/cli/configuration/src/docs-yml/schemas/sdk/api/resources/docs/types/DocsConfiguration.ts +++ b/packages/cli/configuration/src/docs-yml/schemas/sdk/api/resources/docs/types/DocsConfiguration.ts @@ -35,6 +35,8 @@ export interface DocsConfiguration { languages?: FernDocsConfig.Language[]; aiChat?: FernDocsConfig.AiChatConfig; aiSearch?: FernDocsConfig.AiChatConfig; + /** Configure AI-powered example enhancement for API documentation. When enabled, API examples will be enhanced with AI-generated content to provide more comprehensive and realistic examples. */ + aiExamples?: FernDocsConfig.AiExamplesConfig; metadata?: FernDocsConfig.MetadataConfig; redirects?: FernDocsConfig.RedirectConfig[]; logo?: FernDocsConfig.LogoConfiguration; diff --git a/packages/cli/configuration/src/docs-yml/schemas/sdk/api/resources/docs/types/ExperimentalConfig.ts b/packages/cli/configuration/src/docs-yml/schemas/sdk/api/resources/docs/types/ExperimentalConfig.ts index 2679a48aac6f..647062d725ae 100644 --- a/packages/cli/configuration/src/docs-yml/schemas/sdk/api/resources/docs/types/ExperimentalConfig.ts +++ b/packages/cli/configuration/src/docs-yml/schemas/sdk/api/resources/docs/types/ExperimentalConfig.ts @@ -24,8 +24,16 @@ export interface ExperimentalConfig { * Enable dynamic snippets in `docs.yml`, then configure them by following the SDK snippets setup instructions. */ dynamicSnippets?: boolean; - /** Enable AI-powered example enhancement for API documentation. When enabled, API examples will be enhanced with AI-generated content to provide more comprehensive and realistic examples. */ + /** + * Enable AI-powered example enhancement for API documentation. When enabled, API examples will be enhanced with AI-generated content to provide more comprehensive and realistic examples. + * + * DEPRECATED: Use the top-level `ai-examples` property instead. + */ aiExamples?: boolean; - /** Custom styling instructions for AI-generated examples. When provided, these instructions will guide the AI in generating examples that match your preferred style, naming conventions, or domain-specific terminology. Limited to 500 characters. */ + /** + * Custom styling instructions for AI-generated examples. When provided, these instructions will guide the AI in generating examples that match your preferred style, naming conventions, or domain-specific terminology. Limited to 500 characters. + * + * DEPRECATED: Use the top-level `ai-example-style-instructions` property instead. + */ aiExampleStyleInstructions?: string; } diff --git a/packages/cli/configuration/src/docs-yml/schemas/sdk/api/resources/docs/types/PlaygroundSettings.ts b/packages/cli/configuration/src/docs-yml/schemas/sdk/api/resources/docs/types/PlaygroundSettings.ts index 6f416ef10101..b8960d1d7638 100644 --- a/packages/cli/configuration/src/docs-yml/schemas/sdk/api/resources/docs/types/PlaygroundSettings.ts +++ b/packages/cli/configuration/src/docs-yml/schemas/sdk/api/resources/docs/types/PlaygroundSettings.ts @@ -5,12 +5,11 @@ import * as FernDocsConfig from "../../../index"; export interface PlaygroundSettings { + hidden?: boolean; /** A list of environment IDs that are allowed to be used in the playground. If not provided, all environments are allowed. And if the provided list is empty, the playground should be disabled. */ environments?: string[]; button?: FernDocsConfig.PlaygroundButtonSettings; oauth?: boolean; /** The maximum number of websocket messages per connection in the playground. */ limitWebsocketMessagesPerConnection?: number; - /** Whether the playground should be hidden for this endpoint. */ - hidden?: boolean; } diff --git a/packages/cli/configuration/src/docs-yml/schemas/sdk/api/resources/docs/types/index.ts b/packages/cli/configuration/src/docs-yml/schemas/sdk/api/resources/docs/types/index.ts index 5f571a310cea..7c04b8b8c6d4 100644 --- a/packages/cli/configuration/src/docs-yml/schemas/sdk/api/resources/docs/types/index.ts +++ b/packages/cli/configuration/src/docs-yml/schemas/sdk/api/resources/docs/types/index.ts @@ -7,6 +7,7 @@ export * from "./IntercomConfig"; export * from "./PostHogConfig"; export * from "./GtmConfig"; export * from "./GoogleAnalytics4Config"; +export * from "./AiExamplesConfig"; export * from "./DocsConfiguration"; export * from "./TabId"; export * from "./PageActionsConfig"; diff --git a/packages/cli/configuration/src/docs-yml/schemas/sdk/core/fetcher/stream-wrappers/Node18UniversalStreamWrapper.ts b/packages/cli/configuration/src/docs-yml/schemas/sdk/core/fetcher/stream-wrappers/Node18UniversalStreamWrapper.ts index 53fd559bf3ca..adf5ccb18d36 100644 --- a/packages/cli/configuration/src/docs-yml/schemas/sdk/core/fetcher/stream-wrappers/Node18UniversalStreamWrapper.ts +++ b/packages/cli/configuration/src/docs-yml/schemas/sdk/core/fetcher/stream-wrappers/Node18UniversalStreamWrapper.ts @@ -2,10 +2,12 @@ import type { Writable } from "readable-stream"; import { EventCallback, StreamWrapper } from "./chooseStreamWrapper"; -export class Node18UniversalStreamWrapper - implements - StreamWrapper | Writable | WritableStream, ReadFormat> -{ +export class Node18UniversalStreamWrapper< + ReadFormat extends Uint8Array | Uint16Array | Uint32Array, +> implements StreamWrapper< + Node18UniversalStreamWrapper | Writable | WritableStream, + ReadFormat +> { private readableStream: ReadableStream; private reader: ReadableStreamDefaultReader; private events: Record; diff --git a/packages/cli/configuration/src/docs-yml/schemas/sdk/core/fetcher/stream-wrappers/UndiciStreamWrapper.ts b/packages/cli/configuration/src/docs-yml/schemas/sdk/core/fetcher/stream-wrappers/UndiciStreamWrapper.ts index 9c857be6b8da..9bc28ead096b 100644 --- a/packages/cli/configuration/src/docs-yml/schemas/sdk/core/fetcher/stream-wrappers/UndiciStreamWrapper.ts +++ b/packages/cli/configuration/src/docs-yml/schemas/sdk/core/fetcher/stream-wrappers/UndiciStreamWrapper.ts @@ -2,9 +2,10 @@ import { StreamWrapper } from "./chooseStreamWrapper"; type EventCallback = (data?: any) => void; -export class UndiciStreamWrapper - implements StreamWrapper | WritableStream, ReadFormat> -{ +export class UndiciStreamWrapper implements StreamWrapper< + UndiciStreamWrapper | WritableStream, + ReadFormat +> { private readableStream: ReadableStream; private reader: ReadableStreamDefaultReader; private events: Record; @@ -156,7 +157,7 @@ export class UndiciStreamWrapper { - const chunks: BlobPart[] = []; + const chunks: ReadFormat[] = []; while (true) { const { done, value } = await this.reader.read(); @@ -164,12 +165,12 @@ export class UndiciStreamWrapper(): Promise { diff --git a/packages/cli/configuration/src/docs-yml/schemas/sdk/serialization/resources/docs/types/AiExamplesConfig.ts b/packages/cli/configuration/src/docs-yml/schemas/sdk/serialization/resources/docs/types/AiExamplesConfig.ts new file mode 100644 index 000000000000..44135f350f85 --- /dev/null +++ b/packages/cli/configuration/src/docs-yml/schemas/sdk/serialization/resources/docs/types/AiExamplesConfig.ts @@ -0,0 +1,22 @@ +/** + * This file was auto-generated by Fern from our API Definition. + */ + +import * as serializers from "../../../index"; +import * as FernDocsConfig from "../../../../api/index"; +import * as core from "../../../../core"; + +export const AiExamplesConfig: core.serialization.ObjectSchema< + serializers.AiExamplesConfig.Raw, + FernDocsConfig.AiExamplesConfig +> = core.serialization.object({ + enabled: core.serialization.boolean().optional(), + style: core.serialization.string().optional(), +}); + +export declare namespace AiExamplesConfig { + export interface Raw { + enabled?: boolean | null; + style?: string | null; + } +} diff --git a/packages/cli/configuration/src/docs-yml/schemas/sdk/serialization/resources/docs/types/DocsConfiguration.ts b/packages/cli/configuration/src/docs-yml/schemas/sdk/serialization/resources/docs/types/DocsConfiguration.ts index b302cae31f46..21672ba1753b 100644 --- a/packages/cli/configuration/src/docs-yml/schemas/sdk/serialization/resources/docs/types/DocsConfiguration.ts +++ b/packages/cli/configuration/src/docs-yml/schemas/sdk/serialization/resources/docs/types/DocsConfiguration.ts @@ -22,6 +22,7 @@ import { ExperimentalConfig } from "./ExperimentalConfig"; import { ProgrammingLanguage } from "./ProgrammingLanguage"; import { Language } from "./Language"; import { AiChatConfig } from "./AiChatConfig"; +import { AiExamplesConfig } from "./AiExamplesConfig"; import { MetadataConfig } from "./MetadataConfig"; import { RedirectConfig } from "./RedirectConfig"; import { LogoConfiguration } from "./LogoConfiguration"; @@ -57,6 +58,7 @@ export const DocsConfiguration: core.serialization.ObjectSchema< languages: core.serialization.list(Language).optional(), aiChat: core.serialization.property("ai-chat", AiChatConfig.optional()), aiSearch: core.serialization.property("ai-search", AiChatConfig.optional()), + aiExamples: core.serialization.property("ai-examples", AiExamplesConfig.optional()), metadata: MetadataConfig.optional(), redirects: core.serialization.list(RedirectConfig).optional(), logo: LogoConfiguration.optional(), @@ -92,6 +94,7 @@ export declare namespace DocsConfiguration { languages?: Language.Raw[] | null; "ai-chat"?: AiChatConfig.Raw | null; "ai-search"?: AiChatConfig.Raw | null; + "ai-examples"?: AiExamplesConfig.Raw | null; metadata?: MetadataConfig.Raw | null; redirects?: RedirectConfig.Raw[] | null; logo?: LogoConfiguration.Raw | null; diff --git a/packages/cli/configuration/src/docs-yml/schemas/sdk/serialization/resources/docs/types/PlaygroundSettings.ts b/packages/cli/configuration/src/docs-yml/schemas/sdk/serialization/resources/docs/types/PlaygroundSettings.ts index 17390cb04e56..3fe4df31696c 100644 --- a/packages/cli/configuration/src/docs-yml/schemas/sdk/serialization/resources/docs/types/PlaygroundSettings.ts +++ b/packages/cli/configuration/src/docs-yml/schemas/sdk/serialization/resources/docs/types/PlaygroundSettings.ts @@ -11,6 +11,7 @@ export const PlaygroundSettings: core.serialization.ObjectSchema< serializers.PlaygroundSettings.Raw, FernDocsConfig.PlaygroundSettings > = core.serialization.object({ + hidden: core.serialization.boolean().optional(), environments: core.serialization.list(core.serialization.string()).optional(), button: PlaygroundButtonSettings.optional(), oauth: core.serialization.boolean().optional(), @@ -18,15 +19,14 @@ export const PlaygroundSettings: core.serialization.ObjectSchema< "limit-websocket-messages-per-connection", core.serialization.number().optional(), ), - hidden: core.serialization.boolean().optional(), }); export declare namespace PlaygroundSettings { export interface Raw { + hidden?: boolean | null; environments?: string[] | null; button?: PlaygroundButtonSettings.Raw | null; oauth?: boolean | null; "limit-websocket-messages-per-connection"?: number | null; - hidden?: boolean | null; } } diff --git a/packages/cli/configuration/src/docs-yml/schemas/sdk/serialization/resources/docs/types/index.ts b/packages/cli/configuration/src/docs-yml/schemas/sdk/serialization/resources/docs/types/index.ts index 5f571a310cea..7c04b8b8c6d4 100644 --- a/packages/cli/configuration/src/docs-yml/schemas/sdk/serialization/resources/docs/types/index.ts +++ b/packages/cli/configuration/src/docs-yml/schemas/sdk/serialization/resources/docs/types/index.ts @@ -7,6 +7,7 @@ export * from "./IntercomConfig"; export * from "./PostHogConfig"; export * from "./GtmConfig"; export * from "./GoogleAnalytics4Config"; +export * from "./AiExamplesConfig"; export * from "./DocsConfiguration"; export * from "./TabId"; export * from "./PageActionsConfig"; diff --git a/packages/cli/docs-resolver/src/DocsDefinitionResolver.ts b/packages/cli/docs-resolver/src/DocsDefinitionResolver.ts index bbc39133a054..6128c3e79cfa 100644 --- a/packages/cli/docs-resolver/src/DocsDefinitionResolver.ts +++ b/packages/cli/docs-resolver/src/DocsDefinitionResolver.ts @@ -1128,7 +1128,7 @@ export class DocsDefinitionResolver { link: async (value) => this.toLinkNode(value), changelog: async (value) => this.toChangelogNode(value, parentSlug), // Library sections are handled by FDR during registration, returning placeholder - pythonDocsSection: async () => this.toPythonDocsSectionPlaceholder(parentSlug) + pythonDocsSection: async (value) => this.toPythonDocsSectionPlaceholder(value, parentSlug) }); } @@ -1154,7 +1154,7 @@ export class DocsDefinitionResolver { link: async (value) => this.toLinkNode(value), changelog: async (value) => this.toChangelogNode(value, parentSlug, hideChildren), // Library sections are handled by FDR during registration, returning placeholder - pythonDocsSection: async () => this.toPythonDocsSectionPlaceholder(parentSlug) + pythonDocsSection: async (value) => this.toPythonDocsSectionPlaceholder(value, parentSlug) }); } @@ -1351,28 +1351,80 @@ export class DocsDefinitionResolver { * The CLI starts the generation job, polls for completion, and passes the jobId to FDR. * FDR then merges the generated Python docs into the navigation. * - * This placeholder returns a hidden section that will be replaced/augmented by FDR. + * In dev mode (fern docs dev), this placeholder returns a visible page with helpful content + * explaining that Python library docs are only generated during `fern generate --docs`. + * + * In production mode, FDR replaces/augments this with the actual generated documentation. */ - private toPythonDocsSectionPlaceholder(parentSlug: FernNavigation.V1.SlugGenerator): FernNavigation.V1.SectionNode { - // Return a hidden placeholder section - FDR will append the actual Python docs - const slug = parentSlug.apply({ urlSlug: "python-docs", skipUrlSlug: true }); + private toPythonDocsSectionPlaceholder( + item: docsYml.DocsNavigationItem.PythonDocsSection, + parentSlug: FernNavigation.V1.SlugGenerator + ): FernNavigation.V1.PageNode { + const title = item.title ?? "Python Reference"; + const urlSlug = item.slug ?? "python-docs"; + const slug = parentSlug.apply({ urlSlug }); + + // Create a synthetic page ID for the placeholder + const syntheticPageId = `__python-docs-placeholder-${urlSlug}__.mdx`; + const pageId = FernNavigation.PageId(syntheticPageId); + + // Add placeholder markdown content to parsedDocsConfig.pages + const placeholderMarkdown = `--- +title: ${title} +--- + + +Python library documentation is not yet supported with \`fern docs dev\`. This feature will be added in a future release. To view the generated documentation, run \`fern generate --docs --preview\`. + + +## About Python Library Docs + +When you publish your documentation using \`fern generate --docs\`, Fern will: + +1. Clone and analyze your Python repository from: \`${item.githubUrl}\` +2. Parse the Python source code to extract docstrings and type information +3. Generate comprehensive API reference documentation +4. Integrate the generated docs into your documentation site + +## How to Generate + +To generate the full Python library documentation, run: + +\`\`\`bash +fern generate --docs +\`\`\` + +Or to preview without publishing: + +\`\`\`bash +fern generate --docs --preview +\`\`\` + +The generated documentation will replace this placeholder page with complete API reference content including: + +- Module and package documentation +- Class and function references +- Type annotations and signatures +- Docstring content +`; + + this.parsedDocsConfig.pages[RelativeFilePath.of(syntheticPageId)] = placeholderMarkdown; + + const id = this.#idgen.get(pageId); return { - type: "section", - id: this.#idgen.get("python-docs-placeholder"), + id, + type: "page", slug: slug.get(), - title: "Python Reference", - collapsed: false, - hidden: true, // Hidden - actual content comes from FDR - children: [], - overviewPageId: undefined, + title, icon: undefined, + hidden: false, viewers: undefined, orphaned: undefined, + pageId, authed: undefined, - pointsTo: undefined, - availability: undefined, + noindex: true, // Don't index placeholder pages featureFlags: undefined, - noindex: undefined + availability: undefined }; } diff --git a/packages/cli/fern-definition/ir-to-jsonschema/src/__test__/__snapshots__/python-streaming-parameter-openapi/type__ChatResponse.json b/packages/cli/fern-definition/ir-to-jsonschema/src/__test__/__snapshots__/python-streaming-parameter-openapi/type__ChatResponse.json new file mode 100644 index 000000000000..c3af17705006 --- /dev/null +++ b/packages/cli/fern-definition/ir-to-jsonschema/src/__test__/__snapshots__/python-streaming-parameter-openapi/type__ChatResponse.json @@ -0,0 +1,27 @@ +{ + "type": "object", + "properties": { + "message": { + "oneOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "finish_reason": { + "oneOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + } + }, + "additionalProperties": false, + "definitions": {} +} \ No newline at end of file diff --git a/packages/cli/fern-definition/ir-to-jsonschema/src/__test__/__snapshots__/python-streaming-parameter-openapi/type__ChatStreamEvent.json b/packages/cli/fern-definition/ir-to-jsonschema/src/__test__/__snapshots__/python-streaming-parameter-openapi/type__ChatStreamEvent.json new file mode 100644 index 000000000000..e46a9dea1d35 --- /dev/null +++ b/packages/cli/fern-definition/ir-to-jsonschema/src/__test__/__snapshots__/python-streaming-parameter-openapi/type__ChatStreamEvent.json @@ -0,0 +1,27 @@ +{ + "type": "object", + "properties": { + "delta": { + "oneOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "tokens": { + "oneOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ] + } + }, + "additionalProperties": false, + "definitions": {} +} \ No newline at end of file diff --git a/packages/cli/fern-definition/ir-to-jsonschema/src/__test__/__snapshots__/python-streaming-parameter-openapi/type__Message.json b/packages/cli/fern-definition/ir-to-jsonschema/src/__test__/__snapshots__/python-streaming-parameter-openapi/type__Message.json new file mode 100644 index 000000000000..7b38e990c271 --- /dev/null +++ b/packages/cli/fern-definition/ir-to-jsonschema/src/__test__/__snapshots__/python-streaming-parameter-openapi/type__Message.json @@ -0,0 +1,26 @@ +{ + "type": "object", + "properties": { + "role": { + "$ref": "#/definitions/MessageRole" + }, + "content": { + "type": "string" + } + }, + "required": [ + "role", + "content" + ], + "additionalProperties": false, + "definitions": { + "MessageRole": { + "type": "string", + "enum": [ + "user", + "assistant", + "system" + ] + } + } +} \ No newline at end of file diff --git a/packages/cli/fern-definition/ir-to-jsonschema/src/__test__/__snapshots__/python-streaming-parameter-openapi/type__MessageRole.json b/packages/cli/fern-definition/ir-to-jsonschema/src/__test__/__snapshots__/python-streaming-parameter-openapi/type__MessageRole.json new file mode 100644 index 000000000000..324021c7059b --- /dev/null +++ b/packages/cli/fern-definition/ir-to-jsonschema/src/__test__/__snapshots__/python-streaming-parameter-openapi/type__MessageRole.json @@ -0,0 +1,9 @@ +{ + "type": "string", + "enum": [ + "user", + "assistant", + "system" + ], + "definitions": {} +} \ No newline at end of file diff --git a/packages/cli/fern-definition/ir-to-jsonschema/src/__test__/__snapshots__/ts-extra-properties/type__User.json b/packages/cli/fern-definition/ir-to-jsonschema/src/__test__/__snapshots__/ts-extra-properties/type__User.json new file mode 100644 index 000000000000..398b1753454c --- /dev/null +++ b/packages/cli/fern-definition/ir-to-jsonschema/src/__test__/__snapshots__/ts-extra-properties/type__User.json @@ -0,0 +1,33 @@ +{ + "type": "object", + "properties": { + "id": { + "type": "string" + }, + "user_name": { + "type": "string" + }, + "created_at": { + "type": "string", + "format": "date-time" + }, + "updated_at": { + "oneOf": [ + { + "type": "string", + "format": "date-time" + }, + { + "type": "null" + } + ] + } + }, + "required": [ + "id", + "user_name", + "created_at" + ], + "additionalProperties": true, + "definitions": {} +} \ No newline at end of file diff --git a/packages/cli/generation/remote-generation/remote-workspace-runner/src/publishDocs.ts b/packages/cli/generation/remote-generation/remote-workspace-runner/src/publishDocs.ts index c7837df0ad0e..207ceffbca70 100644 --- a/packages/cli/generation/remote-generation/remote-workspace-runner/src/publishDocs.ts +++ b/packages/cli/generation/remote-generation/remote-workspace-runner/src/publishDocs.ts @@ -261,7 +261,7 @@ export async function publishDocs({ const aiEnhancerConfig = getAIEnhancerConfig( withAiExamples, - docsWorkspace.config.experimental?.aiExampleStyleInstructions + docsWorkspace.config.aiExamples?.style ?? docsWorkspace.config.experimental?.aiExampleStyleInstructions ); if (aiEnhancerConfig && workspace) { const sources = workspace.getSources(); diff --git a/packages/cli/generation/remote-generation/remote-workspace-runner/src/runRemoteGenerationForDocsWorkspace.ts b/packages/cli/generation/remote-generation/remote-workspace-runner/src/runRemoteGenerationForDocsWorkspace.ts index ea26990eac46..6ab2a51efa3f 100644 --- a/packages/cli/generation/remote-generation/remote-workspace-runner/src/runRemoteGenerationForDocsWorkspace.ts +++ b/packages/cli/generation/remote-generation/remote-workspace-runner/src/runRemoteGenerationForDocsWorkspace.ts @@ -100,7 +100,8 @@ export async function runRemoteGenerationForDocsWorkspace({ isPrivate: maybeInstance.private, disableTemplates, skipUpload, - withAiExamples: docsWorkspace.config.experimental?.aiExamples ?? true, + withAiExamples: + docsWorkspace.config.aiExamples?.enabled ?? docsWorkspace.config.experimental?.aiExamples ?? true, targetAudiences: maybeInstance.audiences ? Array.isArray(maybeInstance.audiences) ? maybeInstance.audiences diff --git a/packages/cli/register/package.json b/packages/cli/register/package.json index b46c3f8dec80..fb2b2f9f62f5 100644 --- a/packages/cli/register/package.json +++ b/packages/cli/register/package.json @@ -51,8 +51,6 @@ "@fern-api/v3-importer-commons": "workspace:*", "@fern-api/workspace-loader": "workspace:*", "@types/js-yaml": "^4.0.8", - "boxen": "^7.1.1", - "chalk": "^5.3.0", "js-yaml": "^4.1.1", "lodash-es": "^4.17.21", "openapi-types": "^12.1.3" diff --git a/packages/cli/register/src/ai-example-enhancer/enhanceExamplesWithAI.ts b/packages/cli/register/src/ai-example-enhancer/enhanceExamplesWithAI.ts index d8afabea48be..de440ac6d905 100644 --- a/packages/cli/register/src/ai-example-enhancer/enhanceExamplesWithAI.ts +++ b/packages/cli/register/src/ai-example-enhancer/enhanceExamplesWithAI.ts @@ -4,8 +4,6 @@ import { FdrAPI as FdrCjsSdk } from "@fern-api/fdr-sdk"; import { AbsoluteFilePath } from "@fern-api/fs-utils"; import { type EndpointSelector, type HttpMethod, OpenAPIPruner } from "@fern-api/openapi-pruner"; import { TaskContext } from "@fern-api/task-context"; -import boxen from "boxen"; -import chalk from "chalk"; import { readFile, writeFile } from "fs/promises"; import * as yaml from "js-yaml"; import { OpenAPIV3 } from "openapi-types"; @@ -156,9 +154,6 @@ class ConcurrentEndpointProcessor { } } -// Static flag to ensure the informative message is only logged once per process -let hasLoggedInfoMessage = false; - interface BodyV3 { type?: "json" | "stream" | "sse" | "filename"; value?: unknown; @@ -395,24 +390,6 @@ async function performAIEnhancement( sourceFilePath?: AbsoluteFilePath, apiName?: string ): Promise { - // Log informative message only once per process - if (!hasLoggedInfoMessage) { - const message = - chalk.blue("Notice: new feature added (experimental)!\n\n") + - "We are generating realistic examples for endpoints in your spec.\n" + - "This will not override your current examples. Please wait a moment.\n\n" + - "Future runs will use saved examples. If you wish to override the content of the\n" + - "examples, please edit and commit auto-generated `ai_examples_override.yml` files."; - const boxedMessage = boxen(message, { - padding: 1, - textAlignment: "left", - borderColor: "blue", - borderStyle: "round" - }); - context.logger.info("\n" + boxedMessage + "\n"); - hasLoggedInfoMessage = true; - } - const enhancer = new LambdaExampleEnhancer(config, context, token, organizationId); const circuitBreaker = new CircuitBreaker(); diff --git a/packages/cli/workspace/loader/src/docs-yml.schema.json b/packages/cli/workspace/loader/src/docs-yml.schema.json index d476c5ec8f9d..1ef92a792dda 100644 --- a/packages/cli/workspace/loader/src/docs-yml.schema.json +++ b/packages/cli/workspace/loader/src/docs-yml.schema.json @@ -195,6 +195,16 @@ } ] }, + "ai-examples": { + "oneOf": [ + { + "$ref": "#/definitions/docs.AiExamplesConfig" + }, + { + "type": "null" + } + ] + }, "metadata": { "oneOf": [ { @@ -1552,6 +1562,16 @@ "docs.PlaygroundSettings": { "type": "object", "properties": { + "hidden": { + "oneOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ] + }, "environments": { "oneOf": [ { @@ -3911,6 +3931,32 @@ }, "additionalProperties": false }, + "docs.AiExamplesConfig": { + "type": "object", + "properties": { + "enabled": { + "oneOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ] + }, + "style": { + "oneOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + } + }, + "additionalProperties": false + }, "docs.TwitterCardSetting": { "type": "string", "enum": [ diff --git a/packages/cli/yaml/docs-validator/src/docsAst/products-yml.schema.json b/packages/cli/yaml/docs-validator/src/docsAst/products-yml.schema.json index 5eac08721a5e..b7482650050d 100644 --- a/packages/cli/yaml/docs-validator/src/docsAst/products-yml.schema.json +++ b/packages/cli/yaml/docs-validator/src/docsAst/products-yml.schema.json @@ -621,6 +621,16 @@ "docs.PlaygroundSettings": { "type": "object", "properties": { + "hidden": { + "oneOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ] + }, "environments": { "oneOf": [ { diff --git a/packages/cli/yaml/docs-validator/src/docsAst/versions-yml.schema.json b/packages/cli/yaml/docs-validator/src/docsAst/versions-yml.schema.json index 5eac08721a5e..b7482650050d 100644 --- a/packages/cli/yaml/docs-validator/src/docsAst/versions-yml.schema.json +++ b/packages/cli/yaml/docs-validator/src/docsAst/versions-yml.schema.json @@ -621,6 +621,16 @@ "docs.PlaygroundSettings": { "type": "object", "properties": { + "hidden": { + "oneOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ] + }, "environments": { "oneOf": [ { diff --git a/packages/cli/yaml/docs-validator/src/docsAst/visitDocsConfigFileYamlAst.ts b/packages/cli/yaml/docs-validator/src/docsAst/visitDocsConfigFileYamlAst.ts index cefbb9df1e74..96183a4cd208 100644 --- a/packages/cli/yaml/docs-validator/src/docsAst/visitDocsConfigFileYamlAst.ts +++ b/packages/cli/yaml/docs-validator/src/docsAst/visitDocsConfigFileYamlAst.ts @@ -43,6 +43,16 @@ export async function visitDocsConfigFileYamlAst({ analytics: noop, aiChat: noop, aiSearch: noop, + aiExamples: async (aiExamples) => { + // Handle nested structure (new format) + if (aiExamples != null && typeof aiExamples === "object") { + await visitObjectAsync(aiExamples, { + enabled: noop, + style: noop + }); + } + // If it's a boolean, it's the old format which is handled by noop + }, pageActions: noop, announcement: noop, backgroundImage: async (background) => { diff --git a/packages/commons/mock-utils/index.ts b/packages/commons/mock-utils/index.ts index d9f1cb40211d..6953e1b1eaaf 100644 --- a/packages/commons/mock-utils/index.ts +++ b/packages/commons/mock-utils/index.ts @@ -20,6 +20,7 @@ export interface WireMockMapping { method: string; pathParameters?: Record; formParameters?: Record; + bodyPatterns?: Array<{ matchesJsonPath: string }>; }; response: { status: number; @@ -44,15 +45,48 @@ export class WireMock { public convertToWireMock(ir: IntermediateRepresentation): WireMockStubMapping { const mappings: WireMockMapping[] = []; - // Iterate through all services and their endpoints + // First pass: identify endpoints that share the same URL path and method + // This is used to detect when we need to add body patterns to differentiate + // between streaming and non-streaming variants of the same endpoint + const endpointsByPathAndMethod = new Map>(); + + for (const service of Object.values(ir.services)) { + for (const endpoint of service.endpoints) { + const urlPath = this.buildUrlPathTemplate(endpoint); + const key = `${endpoint.method}:${urlPath}`; + const exampleWrapper = endpoint.userSpecifiedExamples[0] ?? endpoint.autogeneratedExamples[0]; + const example = exampleWrapper?.example; + const isSse = example?.response?.type === "ok" && example?.response?.value?.type === "sse"; + + if (!endpointsByPathAndMethod.has(key)) { + endpointsByPathAndMethod.set(key, []); + } + endpointsByPathAndMethod.get(key)?.push({ endpoint, isSse }); + } + } + + // Determine which URL paths have both SSE and non-SSE endpoints (need body pattern matching) + const pathsNeedingBodyPatterns = new Set(); + for (const [key, endpoints] of endpointsByPathAndMethod) { + const hasSse = endpoints.some((e) => e.isSse); + const hasNonSse = endpoints.some((e) => !e.isSse); + if (hasSse && hasNonSse) { + pathsNeedingBodyPatterns.add(key); + } + } + + // Second pass: generate mappings with body patterns only where needed for (const service of Object.values(ir.services)) { for (const endpoint of service.endpoints) { // Use the first available example (prefer user-specified over autogenerated) - let exampleWrapper = endpoint.userSpecifiedExamples[0] ?? endpoint.autogeneratedExamples[0]; + const exampleWrapper = endpoint.userSpecifiedExamples[0] ?? endpoint.autogeneratedExamples[0]; if (exampleWrapper) { const example = exampleWrapper.example; - const mapping = this.convertExampleToMapping(ir, service, endpoint, example); + const urlPath = this.buildUrlPathTemplate(endpoint); + const key = `${endpoint.method}:${urlPath}`; + const needsBodyPattern = pathsNeedingBodyPatterns.has(key); + const mapping = this.convertExampleToMapping(ir, service, endpoint, example, needsBodyPattern); if (mapping) { mappings.push(mapping); } @@ -103,7 +137,8 @@ export class WireMock { ir: IntermediateRepresentation, service: FernIr.HttpService, endpoint: FernIr.HttpEndpoint, - example?: FernIr.ExampleEndpointCall + example: FernIr.ExampleEndpointCall | undefined, + needsBodyPattern: boolean ): WireMockMapping | null { // Build URL path template const urlPathTemplate = this.buildUrlPathTemplate(endpoint); @@ -190,6 +225,10 @@ export class WireMock { const name = `${endpointName} - ${exampleName}`; const uuid = this.deterministicUUIDv4(`${name}-${endpoint.id}-${urlPathTemplate}-${endpoint.method}`); + // Only add body patterns when there are both SSE and non-SSE endpoints for the same URL path + // This allows WireMock to differentiate between streaming and non-streaming requests + const shouldAddBodyPattern = needsBodyPattern && isSseResponse; + const mapping: WireMockMapping = { id: uuid, name, @@ -197,7 +236,10 @@ export class WireMock { urlPathTemplate, method: endpoint.method, pathParameters: Object.keys(pathParameters).length > 0 ? pathParameters : undefined, - formParameters: {} + formParameters: {}, + // For SSE endpoints that share a URL path with non-SSE endpoints, + // add body pattern to match stream: true + bodyPatterns: shouldAddBodyPattern ? [{ matchesJsonPath: "$[?(@.stream == true)]" }] : undefined }, response: { status, @@ -208,7 +250,9 @@ export class WireMock { }, uuid, persistent: true, - priority: 3, + // SSE endpoints get higher priority (lower number) so they match first when stream: true + // Only set different priorities when there are conflicting endpoints + priority: shouldAddBodyPattern ? 2 : 3, metadata: { mocklab: { created: { diff --git a/packages/ir-sdk/fern/apis/ir-types-latest/generators.yml b/packages/ir-sdk/fern/apis/ir-types-latest/generators.yml index e4f031eb05b7..990738a7622e 100644 --- a/packages/ir-sdk/fern/apis/ir-types-latest/generators.yml +++ b/packages/ir-sdk/fern/apis/ir-types-latest/generators.yml @@ -38,7 +38,7 @@ groups: noOptionalProperties: true java: generators: - - name: fernapi/java-model + - name: fernapi/fern-java-model version: 1.8.5 output: location: maven diff --git a/packages/ir-sdk/fern/fern.config.json b/packages/ir-sdk/fern/fern.config.json index 182976bcf623..ab6f4de58348 100644 --- a/packages/ir-sdk/fern/fern.config.json +++ b/packages/ir-sdk/fern/fern.config.json @@ -1,4 +1,4 @@ { "organization": "fern", - "version": "0.62.4" + "version": "3.37.6" } diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 5d0862d2766a..48e22b69cca4 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -6768,12 +6768,6 @@ importers: '@types/js-yaml': specifier: ^4.0.8 version: 4.0.9 - boxen: - specifier: ^7.1.1 - version: 7.1.1 - chalk: - specifier: ^5.3.0 - version: 5.6.2 js-yaml: specifier: ^4.1.1 version: 4.1.1 diff --git a/product-yml.schema.json b/product-yml.schema.json index 5eac08721a5e..b7482650050d 100644 --- a/product-yml.schema.json +++ b/product-yml.schema.json @@ -621,6 +621,16 @@ "docs.PlaygroundSettings": { "type": "object", "properties": { + "hidden": { + "oneOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ] + }, "environments": { "oneOf": [ { diff --git a/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/.fern/metadata.json b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/.fern/metadata.json new file mode 100644 index 000000000000..4d4842540bc7 --- /dev/null +++ b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/.fern/metadata.json @@ -0,0 +1,9 @@ +{ + "cliVersion": "DUMMY", + "generatorName": "fernapi/fern-python-sdk", + "generatorVersion": "latest", + "generatorConfig": { + "enable_wire_tests": true + }, + "sdkVersion": "0.0.1" +} \ No newline at end of file diff --git a/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/.github/workflows/ci.yml b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/.github/workflows/ci.yml new file mode 100644 index 000000000000..ffd2d8acab24 --- /dev/null +++ b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/.github/workflows/ci.yml @@ -0,0 +1,60 @@ +name: ci +on: [push] +jobs: + compile: + runs-on: ubuntu-latest + steps: + - name: Checkout repo + uses: actions/checkout@v4 + - name: Set up python + uses: actions/setup-python@v4 + with: + python-version: 3.9 + - name: Bootstrap poetry + run: | + curl -sSL https://install.python-poetry.org | python - -y --version 1.5.1 + - name: Install dependencies + run: poetry install + - name: Compile + run: poetry run mypy . + test: + runs-on: ubuntu-latest + steps: + - name: Checkout repo + uses: actions/checkout@v4 + - name: Set up python + uses: actions/setup-python@v4 + with: + python-version: 3.9 + - name: Bootstrap poetry + run: | + curl -sSL https://install.python-poetry.org | python - -y --version 1.5.1 + - name: Install dependencies + run: poetry install + + - name: Test + run: poetry run pytest -rP -n auto . + + publish: + needs: [compile, test] + if: github.event_name == 'push' && contains(github.ref, 'refs/tags/') + runs-on: ubuntu-latest + steps: + - name: Checkout repo + uses: actions/checkout@v4 + - name: Set up python + uses: actions/setup-python@v4 + with: + python-version: 3.9 + - name: Bootstrap poetry + run: | + curl -sSL https://install.python-poetry.org | python - -y --version 1.5.1 + - name: Install dependencies + run: poetry install + - name: Publish to pypi + run: | + poetry config repositories.remote + poetry --no-interaction -v publish --build --repository remote --username "$PYPI_USERNAME" --password "$PYPI_PASSWORD" + env: + PYPI_USERNAME: ${{ secrets.PYPI_USERNAME }} + PYPI_PASSWORD: ${{ secrets.PYPI_PASSWORD }} diff --git a/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/.gitignore b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/.gitignore new file mode 100644 index 000000000000..d2e4ca808d21 --- /dev/null +++ b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/.gitignore @@ -0,0 +1,5 @@ +.mypy_cache/ +.ruff_cache/ +__pycache__/ +dist/ +poetry.toml diff --git a/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/README.md b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/README.md new file mode 100644 index 000000000000..cbb297dbb9cf --- /dev/null +++ b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/README.md @@ -0,0 +1,192 @@ +# Seed Python Library + +[![fern shield](https://img.shields.io/badge/%F0%9F%8C%BF-Built%20with%20Fern-brightgreen)](https://buildwithfern.com?utm_source=github&utm_medium=github&utm_campaign=readme&utm_source=Seed%2FPython) +[![pypi](https://img.shields.io/pypi/v/fern_python-streaming-parameter-openapi)](https://pypi.python.org/pypi/fern_python-streaming-parameter-openapi) + +The Seed Python library provides convenient access to the Seed APIs from Python. + +## Table of Contents + +- [Installation](#installation) +- [Reference](#reference) +- [Usage](#usage) +- [Async Client](#async-client) +- [Exception Handling](#exception-handling) +- [Streaming](#streaming) +- [Advanced](#advanced) + - [Access Raw Response Data](#access-raw-response-data) + - [Retries](#retries) + - [Timeouts](#timeouts) + - [Custom Client](#custom-client) +- [Contributing](#contributing) + +## Installation + +```sh +pip install fern_python-streaming-parameter-openapi +``` + +## Reference + +A full reference for this library is available [here](./reference.md). + +## Usage + +Instantiate and use the client with the following: + +```python +from seed import SeedApi + +client = SeedApi( + base_url="https://yourhost.com/path/to/api", +) +response = client.chat_stream( + prompt="prompt", +) +for chunk in response.data: + yield chunk +``` + +## Async Client + +The SDK also exports an `async` client so that you can make non-blocking calls to our API. Note that if you are constructing an Async httpx client class to pass into this client, use `httpx.AsyncClient()` instead of `httpx.Client()` (e.g. for the `httpx_client` parameter of this client). + +```python +import asyncio + +from seed import AsyncSeedApi + +client = AsyncSeedApi( + base_url="https://yourhost.com/path/to/api", +) + + +async def main() -> None: + response = await client.chat_stream( + prompt="prompt", + ) + async for chunk in response.data: + yield chunk + + +asyncio.run(main()) +``` + +## Exception Handling + +When the API returns a non-success status code (4xx or 5xx response), a subclass of the following error +will be thrown. + +```python +from seed.core.api_error import ApiError + +try: + client.chat_stream(...) +except ApiError as e: + print(e.status_code) + print(e.body) +``` + +## Streaming + +The SDK supports streaming responses, as well, the response will be a generator that you can loop over. + +```python +from seed import SeedApi + +client = SeedApi( + base_url="https://yourhost.com/path/to/api", +) +response = client.chat_stream( + prompt="prompt", +) +for chunk in response.data: + yield chunk +``` + +## Advanced + +### Access Raw Response Data + +The SDK provides access to raw response data, including headers, through the `.with_raw_response` property. +The `.with_raw_response` property returns a "raw" client that can be used to access the `.headers` and `.data` attributes. + +```python +from seed import SeedApi + +client = SeedApi( + ..., +) +with client.with_raw_response.chat_stream(...) as response: + print(response.headers) # access the response headers + for chunk in response.data: + print(chunk) # access the underlying object(s) +``` + +### Retries + +The SDK is instrumented with automatic retries with exponential backoff. A request will be retried as long +as the request is deemed retryable and the number of retry attempts has not grown larger than the configured +retry limit (default: 2). + +A request is deemed retryable when any of the following HTTP status codes is returned: + +- [408](https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/408) (Timeout) +- [429](https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/429) (Too Many Requests) +- [5XX](https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/500) (Internal Server Errors) + +Use the `max_retries` request option to configure this behavior. + +```python +client.chat_stream(..., request_options={ + "max_retries": 1 +}) +``` + +### Timeouts + +The SDK defaults to a 60 second timeout. You can configure this with a timeout option at the client or request level. + +```python + +from seed import SeedApi + +client = SeedApi( + ..., + timeout=20.0, +) + + +# Override timeout for a specific method +client.chat_stream(..., request_options={ + "timeout_in_seconds": 1 +}) +``` + +### Custom Client + +You can override the `httpx` client to customize it for your use-case. Some common use-cases include support for proxies +and transports. + +```python +import httpx +from seed import SeedApi + +client = SeedApi( + ..., + httpx_client=httpx.Client( + proxy="http://my.test.proxy.example.com", + transport=httpx.HTTPTransport(local_address="0.0.0.0"), + ), +) +``` + +## Contributing + +While we value open-source contributions to this SDK, this library is generated programmatically. +Additions made directly to this library would have to be moved over to our generation code, +otherwise they would be overwritten upon the next generated release. Feel free to open a PR as +a proof of concept, but know that we will not be able to merge it as-is. We suggest opening +an issue first to discuss with us! + +On the other hand, contributions to the README are always very welcome! diff --git a/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/poetry.lock b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/poetry.lock new file mode 100644 index 000000000000..e82d70fb96ed --- /dev/null +++ b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/poetry.lock @@ -0,0 +1,773 @@ +# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand. + +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""} + +[[package]] +name = "anyio" +version = "4.5.2" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +optional = false +python-versions = ">=3.8" +files = [ + {file = "anyio-4.5.2-py3-none-any.whl", hash = "sha256:c011ee36bc1e8ba40e5a81cb9df91925c218fe9b778554e0b56a21e1b5d4716f"}, + {file = "anyio-4.5.2.tar.gz", hash = "sha256:23009af4ed04ce05991845451e11ef02fc7c5ed29179ac9a420e5ad0ac7ddc5b"}, +] + +[package.dependencies] +exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} +idna = ">=2.8" +sniffio = ">=1.1" +typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} + +[package.extras] +doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21.0b1)"] +trio = ["trio (>=0.26.1)"] + +[[package]] +name = "certifi" +version = "2026.1.4" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.7" +files = [ + {file = "certifi-2026.1.4-py3-none-any.whl", hash = "sha256:9943707519e4add1115f44c2bc244f782c0249876bf51b6599fee1ffbedd685c"}, + {file = "certifi-2026.1.4.tar.gz", hash = "sha256:ac726dd470482006e014ad384921ed6438c457018f4b3d204aea4281258b2120"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.4" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7" +files = [ + {file = "charset_normalizer-3.4.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e824f1492727fa856dd6eda4f7cee25f8518a12f3c4a56a74e8095695089cf6d"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4bd5d4137d500351a30687c2d3971758aac9a19208fc110ccb9d7188fbe709e8"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:027f6de494925c0ab2a55eab46ae5129951638a49a34d87f4c3eda90f696b4ad"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f820802628d2694cb7e56db99213f930856014862f3fd943d290ea8438d07ca8"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:798d75d81754988d2565bff1b97ba5a44411867c0cf32b77a7e8f8d84796b10d"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d1bb833febdff5c8927f922386db610b49db6e0d4f4ee29601d71e7c2694313"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:9cd98cdc06614a2f768d2b7286d66805f94c48cde050acdbbb7db2600ab3197e"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:077fbb858e903c73f6c9db43374fd213b0b6a778106bc7032446a8e8b5b38b93"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:244bfb999c71b35de57821b8ea746b24e863398194a4014e4c76adc2bbdfeff0"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:64b55f9dce520635f018f907ff1b0df1fdc31f2795a922fb49dd14fbcdf48c84"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:faa3a41b2b66b6e50f84ae4a68c64fcd0c44355741c6374813a800cd6695db9e"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:6515f3182dbe4ea06ced2d9e8666d97b46ef4c75e326b79bb624110f122551db"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cc00f04ed596e9dc0da42ed17ac5e596c6ccba999ba6bd92b0e0aef2f170f2d6"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-win32.whl", hash = "sha256:f34be2938726fc13801220747472850852fe6b1ea75869a048d6f896838c896f"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:a61900df84c667873b292c3de315a786dd8dac506704dea57bc957bd31e22c7d"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-win_arm64.whl", hash = "sha256:cead0978fc57397645f12578bfd2d5ea9138ea0fac82b2f63f7f7c6877986a69"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6e1fcf0720908f200cd21aa4e6750a48ff6ce4afe7ff5a79a90d5ed8a08296f8"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f819d5fe9234f9f82d75bdfa9aef3a3d72c4d24a6e57aeaebba32a704553aa0"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a59cb51917aa591b1c4e6a43c132f0cdc3c76dbad6155df4e28ee626cc77a0a3"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8ef3c867360f88ac904fd3f5e1f902f13307af9052646963ee08ff4f131adafc"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d9e45d7faa48ee908174d8fe84854479ef838fc6a705c9315372eacbc2f02897"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:840c25fb618a231545cbab0564a799f101b63b9901f2569faecd6b222ac72381"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ca5862d5b3928c4940729dacc329aa9102900382fea192fc5e52eb69d6093815"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d9c7f57c3d666a53421049053eaacdd14bbd0a528e2186fcb2e672effd053bb0"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:277e970e750505ed74c832b4bf75dac7476262ee2a013f5574dd49075879e161"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:31fd66405eaf47bb62e8cd575dc621c56c668f27d46a61d975a249930dd5e2a4"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:0d3d8f15c07f86e9ff82319b3d9ef6f4bf907608f53fe9d92b28ea9ae3d1fd89"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:9f7fcd74d410a36883701fafa2482a6af2ff5ba96b9a620e9e0721e28ead5569"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ebf3e58c7ec8a8bed6d66a75d7fb37b55e5015b03ceae72a8e7c74495551e224"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-win32.whl", hash = "sha256:eecbc200c7fd5ddb9a7f16c7decb07b566c29fa2161a16cf67b8d068bd21690a"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:5ae497466c7901d54b639cf42d5b8c1b6a4fead55215500d2f486d34db48d016"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:65e2befcd84bc6f37095f5961e68a6f077bf44946771354a28ad434c2cce0ae1"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-win32.whl", hash = "sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e1f185f86a6f3403aa2420e815904c67b2f9ebc443f045edd0de921108345794"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b39f987ae8ccdf0d2642338faf2abb1862340facc796048b604ef14919e55ed"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3162d5d8ce1bb98dd51af660f2121c55d0fa541b46dff7bb9b9f86ea1d87de72"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:81d5eb2a312700f4ecaa977a8235b634ce853200e828fbadf3a9c50bab278328"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5bd2293095d766545ec1a8f612559f6b40abc0eb18bb2f5d1171872d34036ede"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8a8b89589086a25749f471e6a900d3f662d1d3b6e2e59dcecf787b1cc3a1894"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc7637e2f80d8530ee4a78e878bce464f70087ce73cf7c1caf142416923b98f1"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f8bf04158c6b607d747e93949aa60618b61312fe647a6369f88ce2ff16043490"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:554af85e960429cf30784dd47447d5125aaa3b99a6f0683589dbd27e2f45da44"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:74018750915ee7ad843a774364e13a3db91682f26142baddf775342c3f5b1133"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c0463276121fdee9c49b98908b3a89c39be45d86d1dbaa22957e38f6321d4ce3"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:362d61fd13843997c1c446760ef36f240cf81d3ebf74ac62652aebaf7838561e"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a26f18905b8dd5d685d6d07b0cdf98a79f3c7a918906af7cc143ea2e164c8bc"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-win32.whl", hash = "sha256:9b35f4c90079ff2e2edc5b26c0c77925e5d2d255c42c74fdb70fb49b172726ac"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:b435cba5f4f750aa6c0a0d92c541fb79f69a387c91e61f1795227e4ed9cece14"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:542d2cee80be6f80247095cc36c418f7bddd14f4a6de45af91dfad36d817bba2"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:da3326d9e65ef63a817ecbcc0df6e94463713b754fe293eaa03da99befb9a5bd"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8af65f14dc14a79b924524b1e7fffe304517b2bff5a58bf64f30b98bbc5079eb"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74664978bb272435107de04e36db5a9735e78232b85b77d45cfb38f758efd33e"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:752944c7ffbfdd10c074dc58ec2d5a8a4cd9493b314d367c14d24c17684ddd14"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1f13550535ad8cff21b8d757a3257963e951d96e20ec82ab44bc64aeb62a191"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ecaae4149d99b1c9e7b88bb03e3221956f68fd6d50be2ef061b2381b61d20838"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cb6254dc36b47a990e59e1068afacdcd02958bdcce30bb50cc1700a8b9d624a6"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c8ae8a0f02f57a6e61203a31428fa1d677cbe50c93622b4149d5c0f319c1d19e"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:47cc91b2f4dd2833fddaedd2893006b0106129d4b94fdb6af1f4ce5a9965577c"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:82004af6c302b5d3ab2cfc4cc5f29db16123b1a8417f2e25f9066f91d4411090"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b7d8f6c26245217bd2ad053761201e9f9680f8ce52f0fcd8d0755aeae5b2152"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:799a7a5e4fb2d5898c60b640fd4981d6a25f1c11790935a44ce38c54e985f828"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:99ae2cffebb06e6c22bdc25801d7b30f503cc87dbd283479e7b606f70aff57ec"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-win32.whl", hash = "sha256:f9d332f8c2a2fcbffe1378594431458ddbef721c1769d78e2cbc06280d8155f9"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:8a6562c3700cce886c5be75ade4a5db4214fda19fede41d9792d100288d8f94c"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:de00632ca48df9daf77a2c65a484531649261ec9f25489917f09e455cb09ddb2"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ce8a0633f41a967713a59c4139d29110c07e826d131a316b50ce11b1d79b4f84"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eaabd426fe94daf8fd157c32e571c85cb12e66692f15516a83a03264b08d06c3"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c4ef880e27901b6cc782f1b95f82da9313c0eb95c3af699103088fa0ac3ce9ac"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2aaba3b0819274cc41757a1da876f810a3e4d7b6eb25699253a4effef9e8e4af"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:778d2e08eda00f4256d7f672ca9fef386071c9202f5e4607920b86d7803387f2"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f155a433c2ec037d4e8df17d18922c3a0d9b3232a396690f17175d2946f0218d"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a8bf8d0f749c5757af2142fe7903a9df1d2e8aa3841559b2bad34b08d0e2bcf3"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:194f08cbb32dc406d6e1aea671a68be0823673db2832b38405deba2fb0d88f63"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_armv7l.whl", hash = "sha256:6aee717dcfead04c6eb1ce3bd29ac1e22663cdea57f943c87d1eab9a025438d7"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:cd4b7ca9984e5e7985c12bc60a6f173f3c958eae74f3ef6624bb6b26e2abbae4"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_riscv64.whl", hash = "sha256:b7cf1017d601aa35e6bb650b6ad28652c9cd78ee6caff19f3c28d03e1c80acbf"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:e912091979546adf63357d7e2ccff9b44f026c075aeaf25a52d0e95ad2281074"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:5cb4d72eea50c8868f5288b7f7f33ed276118325c1dfd3957089f6b519e1382a"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-win32.whl", hash = "sha256:837c2ce8c5a65a2035be9b3569c684358dfbf109fd3b6969630a87535495ceaa"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:44c2a8734b333e0578090c4cd6b16f275e07aa6614ca8715e6c038e865e70576"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a9768c477b9d7bd54bc0c86dbaebdec6f03306675526c9927c0e8a04e8f94af9"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1bee1e43c28aa63cb16e5c14e582580546b08e535299b8b6158a7c9c768a1f3d"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:fd44c878ea55ba351104cb93cc85e74916eb8fa440ca7903e57575e97394f608"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:0f04b14ffe5fdc8c4933862d8306109a2c51e0704acfa35d51598eb45a1e89fc"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:cd09d08005f958f370f539f186d10aec3377d55b9eeb0d796025d4886119d76e"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4fe7859a4e3e8457458e2ff592f15ccb02f3da787fcd31e0183879c3ad4692a1"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fa09f53c465e532f4d3db095e0c55b615f010ad81803d383195b6b5ca6cbf5f3"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7fa17817dc5625de8a027cb8b26d9fefa3ea28c8253929b8d6649e705d2835b6"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:5947809c8a2417be3267efc979c47d76a079758166f7d43ef5ae8e9f92751f88"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:4902828217069c3c5c71094537a8e623f5d097858ac6ca8252f7b4d10b7560f1"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:7c308f7e26e4363d79df40ca5b2be1c6ba9f02bdbccfed5abddb7859a6ce72cf"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:2c9d3c380143a1fedbff95a312aa798578371eb29da42106a29019368a475318"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cb01158d8b88ee68f15949894ccc6712278243d95f344770fa7593fa2d94410c"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-win32.whl", hash = "sha256:2677acec1a2f8ef614c6888b5b4ae4060cc184174a938ed4e8ef690e15d3e505"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:f8e160feb2aed042cd657a72acc0b481212ed28b1b9a95c0cee1621b524e1966"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-win_arm64.whl", hash = "sha256:b5d84d37db046c5ca74ee7bb47dd6cbc13f80665fdde3e8040bdd3fb015ecb50"}, + {file = "charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f"}, + {file = "charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.3.1" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.3.1-py3-none-any.whl", hash = "sha256:a7a39a3bd276781e98394987d3a5701d0c4edffb633bb7a5144577f82c773598"}, + {file = "exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.6.0", markers = "python_version < \"3.13\""} + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "execnet" +version = "2.1.2" +description = "execnet: rapid multi-Python deployment" +optional = false +python-versions = ">=3.8" +files = [ + {file = "execnet-2.1.2-py3-none-any.whl", hash = "sha256:67fba928dd5a544b783f6056f449e5e3931a5c378b128bc18501f7ea79e296ec"}, + {file = "execnet-2.1.2.tar.gz", hash = "sha256:63d83bfdd9a23e35b9c6a3261412324f964c2ec8dcd8d3c6916ee9373e0befcd"}, +] + +[package.extras] +testing = ["hatch", "pre-commit", "pytest", "tox"] + +[[package]] +name = "h11" +version = "0.16.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.8" +files = [ + {file = "h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86"}, + {file = "h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1"}, +] + +[[package]] +name = "httpcore" +version = "1.0.9" +description = "A minimal low-level HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55"}, + {file = "httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8"}, +] + +[package.dependencies] +certifi = "*" +h11 = ">=0.16" + +[package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<1.0)"] + +[[package]] +name = "httpx" +version = "0.28.1" +description = "The next generation HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"}, + {file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"}, +] + +[package.dependencies] +anyio = "*" +certifi = "*" +httpcore = "==1.*" +idna = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "idna" +version = "3.11" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea"}, + {file = "idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902"}, +] + +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + +[[package]] +name = "iniconfig" +version = "2.1.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.8" +files = [ + {file = "iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"}, + {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, +] + +[[package]] +name = "mypy" +version = "1.13.0" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6607e0f1dd1fb7f0aca14d936d13fd19eba5e17e1cd2a14f808fa5f8f6d8f60a"}, + {file = "mypy-1.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8a21be69bd26fa81b1f80a61ee7ab05b076c674d9b18fb56239d72e21d9f4c80"}, + {file = "mypy-1.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b2353a44d2179846a096e25691d54d59904559f4232519d420d64da6828a3a7"}, + {file = "mypy-1.13.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0730d1c6a2739d4511dc4253f8274cdd140c55c32dfb0a4cf8b7a43f40abfa6f"}, + {file = "mypy-1.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:c5fc54dbb712ff5e5a0fca797e6e0aa25726c7e72c6a5850cfd2adbc1eb0a372"}, + {file = "mypy-1.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:581665e6f3a8a9078f28d5502f4c334c0c8d802ef55ea0e7276a6e409bc0d82d"}, + {file = "mypy-1.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3ddb5b9bf82e05cc9a627e84707b528e5c7caaa1c55c69e175abb15a761cec2d"}, + {file = "mypy-1.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:20c7ee0bc0d5a9595c46f38beb04201f2620065a93755704e141fcac9f59db2b"}, + {file = "mypy-1.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3790ded76f0b34bc9c8ba4def8f919dd6a46db0f5a6610fb994fe8efdd447f73"}, + {file = "mypy-1.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:51f869f4b6b538229c1d1bcc1dd7d119817206e2bc54e8e374b3dfa202defcca"}, + {file = "mypy-1.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5c7051a3461ae84dfb5dd15eff5094640c61c5f22257c8b766794e6dd85e72d5"}, + {file = "mypy-1.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39bb21c69a5d6342f4ce526e4584bc5c197fd20a60d14a8624d8743fffb9472e"}, + {file = "mypy-1.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:164f28cb9d6367439031f4c81e84d3ccaa1e19232d9d05d37cb0bd880d3f93c2"}, + {file = "mypy-1.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a4c1bfcdbce96ff5d96fc9b08e3831acb30dc44ab02671eca5953eadad07d6d0"}, + {file = "mypy-1.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:a0affb3a79a256b4183ba09811e3577c5163ed06685e4d4b46429a271ba174d2"}, + {file = "mypy-1.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a7b44178c9760ce1a43f544e595d35ed61ac2c3de306599fa59b38a6048e1aa7"}, + {file = "mypy-1.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d5092efb8516d08440e36626f0153b5006d4088c1d663d88bf79625af3d1d62"}, + {file = "mypy-1.13.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de2904956dac40ced10931ac967ae63c5089bd498542194b436eb097a9f77bc8"}, + {file = "mypy-1.13.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:7bfd8836970d33c2105562650656b6846149374dc8ed77d98424b40b09340ba7"}, + {file = "mypy-1.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:9f73dba9ec77acb86457a8fc04b5239822df0c14a082564737833d2963677dbc"}, + {file = "mypy-1.13.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:100fac22ce82925f676a734af0db922ecfea991e1d7ec0ceb1e115ebe501301a"}, + {file = "mypy-1.13.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7bcb0bb7f42a978bb323a7c88f1081d1b5dee77ca86f4100735a6f541299d8fb"}, + {file = "mypy-1.13.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bde31fc887c213e223bbfc34328070996061b0833b0a4cfec53745ed61f3519b"}, + {file = "mypy-1.13.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07de989f89786f62b937851295ed62e51774722e5444a27cecca993fc3f9cd74"}, + {file = "mypy-1.13.0-cp38-cp38-win_amd64.whl", hash = "sha256:4bde84334fbe19bad704b3f5b78c4abd35ff1026f8ba72b29de70dda0916beb6"}, + {file = "mypy-1.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0246bcb1b5de7f08f2826451abd947bf656945209b140d16ed317f65a17dc7dc"}, + {file = "mypy-1.13.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7f5b7deae912cf8b77e990b9280f170381fdfbddf61b4ef80927edd813163732"}, + {file = "mypy-1.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7029881ec6ffb8bc233a4fa364736789582c738217b133f1b55967115288a2bc"}, + {file = "mypy-1.13.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3e38b980e5681f28f033f3be86b099a247b13c491f14bb8b1e1e134d23bb599d"}, + {file = "mypy-1.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:a6789be98a2017c912ae6ccb77ea553bbaf13d27605d2ca20a76dfbced631b24"}, + {file = "mypy-1.13.0-py3-none-any.whl", hash = "sha256:9c250883f9fd81d212e0952c92dbfcc96fc237f4b7c92f56ac81fd48460b3e5a"}, + {file = "mypy-1.13.0.tar.gz", hash = "sha256:0291a61b6fbf3e6673e3405cfcc0e7650bebc7939659fdca2702958038bd835e"}, +] + +[package.dependencies] +mypy-extensions = ">=1.0.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing-extensions = ">=4.6.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +faster-cache = ["orjson"] +install-types = ["pip"] +mypyc = ["setuptools (>=50)"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.1.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"}, + {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, +] + +[[package]] +name = "packaging" +version = "25.0" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, + {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, +] + +[[package]] +name = "pluggy" +version = "1.5.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pydantic" +version = "2.10.6" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584"}, + {file = "pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236"}, +] + +[package.dependencies] +annotated-types = ">=0.6.0" +pydantic-core = "2.27.2" +typing-extensions = ">=4.12.2" + +[package.extras] +email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata"] + +[[package]] +name = "pydantic-core" +version = "2.27.2" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"}, + {file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7969e133a6f183be60e9f6f56bfae753585680f3b7307a8e555a948d443cc05a"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3de9961f2a346257caf0aa508a4da705467f53778e9ef6fe744c038119737ef5"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2bb4d3e5873c37bb3dd58714d4cd0b0e6238cebc4177ac8fe878f8b3aa8e74c"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:280d219beebb0752699480fe8f1dc61ab6615c2046d76b7ab7ee38858de0a4e7"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47956ae78b6422cbd46f772f1746799cbb862de838fd8d1fbd34a82e05b0983a"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:14d4a5c49d2f009d62a2a7140d3064f686d17a5d1a268bc641954ba181880236"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:337b443af21d488716f8d0b6164de833e788aa6bd7e3a39c005febc1284f4962"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:03d0f86ea3184a12f41a2d23f7ccb79cdb5a18e06993f8a45baa8dfec746f0e9"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7041c36f5680c6e0f08d922aed302e98b3745d97fe1589db0a3eebf6624523af"}, + {file = "pydantic_core-2.27.2-cp310-cp310-win32.whl", hash = "sha256:50a68f3e3819077be2c98110c1f9dcb3817e93f267ba80a2c05bb4f8799e2ff4"}, + {file = "pydantic_core-2.27.2-cp310-cp310-win_amd64.whl", hash = "sha256:e0fd26b16394ead34a424eecf8a31a1f5137094cabe84a1bcb10fa6ba39d3d31"}, + {file = "pydantic_core-2.27.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8e10c99ef58cfdf2a66fc15d66b16c4a04f62bca39db589ae8cba08bc55331bc"}, + {file = "pydantic_core-2.27.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:26f32e0adf166a84d0cb63be85c562ca8a6fa8de28e5f0d92250c6b7e9e2aff7"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c19d1ea0673cd13cc2f872f6c9ab42acc4e4f492a7ca9d3795ce2b112dd7e15"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e68c4446fe0810e959cdff46ab0a41ce2f2c86d227d96dc3847af0ba7def306"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9640b0059ff4f14d1f37321b94061c6db164fbe49b334b31643e0528d100d99"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40d02e7d45c9f8af700f3452f329ead92da4c5f4317ca9b896de7ce7199ea459"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c1fd185014191700554795c99b347d64f2bb637966c4cfc16998a0ca700d048"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d81d2068e1c1228a565af076598f9e7451712700b673de8f502f0334f281387d"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a4207639fb02ec2dbb76227d7c751a20b1a6b4bc52850568e52260cae64ca3b"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:3de3ce3c9ddc8bbd88f6e0e304dea0e66d843ec9de1b0042b0911c1663ffd474"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:30c5f68ded0c36466acede341551106821043e9afaad516adfb6e8fa80a4e6a6"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win32.whl", hash = "sha256:c70c26d2c99f78b125a3459f8afe1aed4d9687c24fd677c6a4436bc042e50d6c"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win_amd64.whl", hash = "sha256:08e125dbdc505fa69ca7d9c499639ab6407cfa909214d500897d02afb816e7cc"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win_arm64.whl", hash = "sha256:26f0d68d4b235a2bae0c3fc585c585b4ecc51382db0e3ba402a22cbc440915e4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0"}, + {file = "pydantic_core-2.27.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win32.whl", hash = "sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win_arm64.whl", hash = "sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b"}, + {file = "pydantic_core-2.27.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7d14bd329640e63852364c306f4d23eb744e0f8193148d4044dd3dacdaacbd8b"}, + {file = "pydantic_core-2.27.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82f91663004eb8ed30ff478d77c4d1179b3563df6cdb15c0817cd1cdaf34d154"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71b24c7d61131bb83df10cc7e687433609963a944ccf45190cfc21e0887b08c9"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa8e459d4954f608fa26116118bb67f56b93b209c39b008277ace29937453dc9"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8918cbebc8da707ba805b7fd0b382816858728ae7fe19a942080c24e5b7cd1"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eda3f5c2a021bbc5d976107bb302e0131351c2ba54343f8a496dc8783d3d3a6a"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8086fa684c4775c27f03f062cbb9eaa6e17f064307e86b21b9e0abc9c0f02e"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8d9b3388db186ba0c099a6d20f0604a44eabdeef1777ddd94786cdae158729e4"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7a66efda2387de898c8f38c0cf7f14fca0b51a8ef0b24bfea5849f1b3c95af27"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:18a101c168e4e092ab40dbc2503bdc0f62010e95d292b27827871dc85450d7ee"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ba5dd002f88b78a4215ed2f8ddbdf85e8513382820ba15ad5ad8955ce0ca19a1"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win32.whl", hash = "sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win_amd64.whl", hash = "sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win_arm64.whl", hash = "sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b"}, + {file = "pydantic_core-2.27.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d3e8d504bdd3f10835468f29008d72fc8359d95c9c415ce6e767203db6127506"}, + {file = "pydantic_core-2.27.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:521eb9b7f036c9b6187f0b47318ab0d7ca14bd87f776240b90b21c1f4f149320"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85210c4d99a0114f5a9481b44560d7d1e35e32cc5634c656bc48e590b669b145"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d716e2e30c6f140d7560ef1538953a5cd1a87264c737643d481f2779fc247fe1"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f66d89ba397d92f840f8654756196d93804278457b5fbede59598a1f9f90b228"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:669e193c1c576a58f132e3158f9dfa9662969edb1a250c54d8fa52590045f046"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdbe7629b996647b99c01b37f11170a57ae675375b14b8c13b8518b8320ced5"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d262606bf386a5ba0b0af3b97f37c83d7011439e3dc1a9298f21efb292e42f1a"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cabb9bcb7e0d97f74df8646f34fc76fbf793b7f6dc2438517d7a9e50eee4f14d"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:d2d63f1215638d28221f664596b1ccb3944f6e25dd18cd3b86b0a4c408d5ebb9"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bca101c00bff0adb45a833f8451b9105d9df18accb8743b08107d7ada14bd7da"}, + {file = "pydantic_core-2.27.2-cp38-cp38-win32.whl", hash = "sha256:f6f8e111843bbb0dee4cb6594cdc73e79b3329b526037ec242a3e49012495b3b"}, + {file = "pydantic_core-2.27.2-cp38-cp38-win_amd64.whl", hash = "sha256:fd1aea04935a508f62e0d0ef1f5ae968774a32afc306fb8545e06f5ff5cdf3ad"}, + {file = "pydantic_core-2.27.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c10eb4f1659290b523af58fa7cffb452a61ad6ae5613404519aee4bfbf1df993"}, + {file = "pydantic_core-2.27.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef592d4bad47296fb11f96cd7dc898b92e795032b4894dfb4076cfccd43a9308"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c61709a844acc6bf0b7dce7daae75195a10aac96a596ea1b776996414791ede4"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c5f762659e47fdb7b16956c71598292f60a03aa92f8b6351504359dbdba6cf"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c9775e339e42e79ec99c441d9730fccf07414af63eac2f0e48e08fd38a64d76"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57762139821c31847cfb2df63c12f725788bd9f04bc2fb392790959b8f70f118"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d1e85068e818c73e048fe28cfc769040bb1f475524f4745a5dc621f75ac7630"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:097830ed52fd9e427942ff3b9bc17fab52913b2f50f2880dc4a5611446606a54"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044a50963a614ecfae59bb1eaf7ea7efc4bc62f49ed594e18fa1e5d953c40e9f"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:4e0b4220ba5b40d727c7f879eac379b822eee5d8fff418e9d3381ee45b3b0362"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e4f4bb20d75e9325cc9696c6802657b58bc1dbbe3022f32cc2b2b632c3fbb96"}, + {file = "pydantic_core-2.27.2-cp39-cp39-win32.whl", hash = "sha256:cca63613e90d001b9f2f9a9ceb276c308bfa2a43fafb75c8031c4f66039e8c6e"}, + {file = "pydantic_core-2.27.2-cp39-cp39-win_amd64.whl", hash = "sha256:77d1bca19b0f7021b3a982e6f903dcd5b2b06076def36a652e3907f596e29f67"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2bf14caea37e91198329b828eae1618c068dfb8ef17bb33287a7ad4b61ac314e"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0cb791f5b45307caae8810c2023a184c74605ec3bcbb67d13846c28ff731ff8"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:688d3fd9fcb71f41c4c015c023d12a79d1c4c0732ec9eb35d96e3388a120dcf3"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d591580c34f4d731592f0e9fe40f9cc1b430d297eecc70b962e93c5c668f15f"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:82f986faf4e644ffc189a7f1aafc86e46ef70372bb153e7001e8afccc6e54133"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bec317a27290e2537f922639cafd54990551725fc844249e64c523301d0822fc"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:0296abcb83a797db256b773f45773da397da75a08f5fcaef41f2044adec05f50"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0d75070718e369e452075a6017fbf187f788e17ed67a3abd47fa934d001863d9"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7e17b560be3c98a8e3aa66ce828bdebb9e9ac6ad5466fba92eb74c4c95cb1151"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c33939a82924da9ed65dab5a65d427205a73181d8098e79b6b426bdf8ad4e656"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:00bad2484fa6bda1e216e7345a798bd37c68fb2d97558edd584942aa41b7d278"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c817e2b40aba42bac6f457498dacabc568c3b7a986fc9ba7c8d9d260b71485fb"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:251136cdad0cb722e93732cb45ca5299fb56e1344a833640bf93b2803f8d1bfd"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2088237af596f0a524d3afc39ab3b036e8adb054ee57cbb1dcf8e09da5b29cc"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d4041c0b966a84b4ae7a09832eb691a35aec90910cd2dbe7a208de59be77965b"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:8083d4e875ebe0b864ffef72a4304827015cff328a1be6e22cc850753bfb122b"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f141ee28a0ad2123b6611b6ceff018039df17f32ada8b534e6aa039545a3efb2"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7d0c8399fcc1848491f00e0314bd59fb34a9c008761bcb422a057670c3f65e35"}, + {file = "pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "pytest" +version = "7.4.4" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, + {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-asyncio" +version = "0.23.8" +description = "Pytest support for asyncio" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest_asyncio-0.23.8-py3-none-any.whl", hash = "sha256:50265d892689a5faefb84df80819d1ecef566eb3549cf915dfb33569359d1ce2"}, + {file = "pytest_asyncio-0.23.8.tar.gz", hash = "sha256:759b10b33a6dc61cce40a8bd5205e302978bbbcc00e279a8b61d9a6a3c82e4d3"}, +] + +[package.dependencies] +pytest = ">=7.0.0,<9" + +[package.extras] +docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] +testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] + +[[package]] +name = "pytest-xdist" +version = "3.6.1" +description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pytest_xdist-3.6.1-py3-none-any.whl", hash = "sha256:9ed4adfb68a016610848639bb7e02c9352d5d9f03d04809919e2dafc3be4cca7"}, + {file = "pytest_xdist-3.6.1.tar.gz", hash = "sha256:ead156a4db231eec769737f57668ef58a2084a34b2e55c4a8fa20d861107300d"}, +] + +[package.dependencies] +execnet = ">=2.1" +pytest = ">=7.0.0" + +[package.extras] +psutil = ["psutil (>=3.0)"] +setproctitle = ["setproctitle"] +testing = ["filelock"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "requests" +version = "2.32.4" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.8" +files = [ + {file = "requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c"}, + {file = "requests-2.32.4.tar.gz", hash = "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset_normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "ruff" +version = "0.11.5" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.11.5-py3-none-linux_armv6l.whl", hash = "sha256:2561294e108eb648e50f210671cc56aee590fb6167b594144401532138c66c7b"}, + {file = "ruff-0.11.5-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ac12884b9e005c12d0bd121f56ccf8033e1614f736f766c118ad60780882a077"}, + {file = "ruff-0.11.5-py3-none-macosx_11_0_arm64.whl", hash = "sha256:4bfd80a6ec559a5eeb96c33f832418bf0fb96752de0539905cf7b0cc1d31d779"}, + {file = "ruff-0.11.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0947c0a1afa75dcb5db4b34b070ec2bccee869d40e6cc8ab25aca11a7d527794"}, + {file = "ruff-0.11.5-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ad871ff74b5ec9caa66cb725b85d4ef89b53f8170f47c3406e32ef040400b038"}, + {file = "ruff-0.11.5-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e6cf918390cfe46d240732d4d72fa6e18e528ca1f60e318a10835cf2fa3dc19f"}, + {file = "ruff-0.11.5-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:56145ee1478582f61c08f21076dc59153310d606ad663acc00ea3ab5b2125f82"}, + {file = "ruff-0.11.5-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e5f66f8f1e8c9fc594cbd66fbc5f246a8d91f916cb9667e80208663ec3728304"}, + {file = "ruff-0.11.5-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80b4df4d335a80315ab9afc81ed1cff62be112bd165e162b5eed8ac55bfc8470"}, + {file = "ruff-0.11.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3068befab73620b8a0cc2431bd46b3cd619bc17d6f7695a3e1bb166b652c382a"}, + {file = "ruff-0.11.5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:f5da2e710a9641828e09aa98b92c9ebbc60518fdf3921241326ca3e8f8e55b8b"}, + {file = "ruff-0.11.5-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:ef39f19cb8ec98cbc762344921e216f3857a06c47412030374fffd413fb8fd3a"}, + {file = "ruff-0.11.5-py3-none-musllinux_1_2_i686.whl", hash = "sha256:b2a7cedf47244f431fd11aa5a7e2806dda2e0c365873bda7834e8f7d785ae159"}, + {file = "ruff-0.11.5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:81be52e7519f3d1a0beadcf8e974715b2dfc808ae8ec729ecfc79bddf8dbb783"}, + {file = "ruff-0.11.5-py3-none-win32.whl", hash = "sha256:e268da7b40f56e3eca571508a7e567e794f9bfcc0f412c4b607931d3af9c4afe"}, + {file = "ruff-0.11.5-py3-none-win_amd64.whl", hash = "sha256:6c6dc38af3cfe2863213ea25b6dc616d679205732dc0fb673356c2d69608f800"}, + {file = "ruff-0.11.5-py3-none-win_arm64.whl", hash = "sha256:67e241b4314f4eacf14a601d586026a962f4002a475aa702c69980a38087aa4e"}, + {file = "ruff-0.11.5.tar.gz", hash = "sha256:cae2e2439cb88853e421901ec040a758960b576126dab520fa08e9de431d1bef"}, +] + +[[package]] +name = "six" +version = "1.17.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, + {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, +] + +[[package]] +name = "tomli" +version = "2.4.0" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tomli-2.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b5ef256a3fd497d4973c11bf142e9ed78b150d36f5773f1ca6088c230ffc5867"}, + {file = "tomli-2.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5572e41282d5268eb09a697c89a7bee84fae66511f87533a6f88bd2f7b652da9"}, + {file = "tomli-2.4.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:551e321c6ba03b55676970b47cb1b73f14a0a4dce6a3e1a9458fd6d921d72e95"}, + {file = "tomli-2.4.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5e3f639a7a8f10069d0e15408c0b96a2a828cfdec6fca05296ebcdcc28ca7c76"}, + {file = "tomli-2.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1b168f2731796b045128c45982d3a4874057626da0e2ef1fdd722848b741361d"}, + {file = "tomli-2.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:133e93646ec4300d651839d382d63edff11d8978be23da4cc106f5a18b7d0576"}, + {file = "tomli-2.4.0-cp311-cp311-win32.whl", hash = "sha256:b6c78bdf37764092d369722d9946cb65b8767bfa4110f902a1b2542d8d173c8a"}, + {file = "tomli-2.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:d3d1654e11d724760cdb37a3d7691f0be9db5fbdaef59c9f532aabf87006dbaa"}, + {file = "tomli-2.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:cae9c19ed12d4e8f3ebf46d1a75090e4c0dc16271c5bce1c833ac168f08fb614"}, + {file = "tomli-2.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:920b1de295e72887bafa3ad9f7a792f811847d57ea6b1215154030cf131f16b1"}, + {file = "tomli-2.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7d6d9a4aee98fac3eab4952ad1d73aee87359452d1c086b5ceb43ed02ddb16b8"}, + {file = "tomli-2.4.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:36b9d05b51e65b254ea6c2585b59d2c4cb91c8a3d91d0ed0f17591a29aaea54a"}, + {file = "tomli-2.4.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1c8a885b370751837c029ef9bc014f27d80840e48bac415f3412e6593bbc18c1"}, + {file = "tomli-2.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8768715ffc41f0008abe25d808c20c3d990f42b6e2e58305d5da280ae7d1fa3b"}, + {file = "tomli-2.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b438885858efd5be02a9a133caf5812b8776ee0c969fea02c45e8e3f296ba51"}, + {file = "tomli-2.4.0-cp312-cp312-win32.whl", hash = "sha256:0408e3de5ec77cc7f81960c362543cbbd91ef883e3138e81b729fc3eea5b9729"}, + {file = "tomli-2.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:685306e2cc7da35be4ee914fd34ab801a6acacb061b6a7abca922aaf9ad368da"}, + {file = "tomli-2.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:5aa48d7c2356055feef06a43611fc401a07337d5b006be13a30f6c58f869e3c3"}, + {file = "tomli-2.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:84d081fbc252d1b6a982e1870660e7330fb8f90f676f6e78b052ad4e64714bf0"}, + {file = "tomli-2.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9a08144fa4cba33db5255f9b74f0b89888622109bd2776148f2597447f92a94e"}, + {file = "tomli-2.4.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c73add4bb52a206fd0c0723432db123c0c75c280cbd67174dd9d2db228ebb1b4"}, + {file = "tomli-2.4.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1fb2945cbe303b1419e2706e711b7113da57b7db31ee378d08712d678a34e51e"}, + {file = "tomli-2.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bbb1b10aa643d973366dc2cb1ad94f99c1726a02343d43cbc011edbfac579e7c"}, + {file = "tomli-2.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4cbcb367d44a1f0c2be408758b43e1ffb5308abe0ea222897d6bfc8e8281ef2f"}, + {file = "tomli-2.4.0-cp313-cp313-win32.whl", hash = "sha256:7d49c66a7d5e56ac959cb6fc583aff0651094ec071ba9ad43df785abc2320d86"}, + {file = "tomli-2.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:3cf226acb51d8f1c394c1b310e0e0e61fecdd7adcb78d01e294ac297dd2e7f87"}, + {file = "tomli-2.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:d20b797a5c1ad80c516e41bc1fb0443ddb5006e9aaa7bda2d71978346aeb9132"}, + {file = "tomli-2.4.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:26ab906a1eb794cd4e103691daa23d95c6919cc2fa9160000ac02370cc9dd3f6"}, + {file = "tomli-2.4.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:20cedb4ee43278bc4f2fee6cb50daec836959aadaf948db5172e776dd3d993fc"}, + {file = "tomli-2.4.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:39b0b5d1b6dd03684b3fb276407ebed7090bbec989fa55838c98560c01113b66"}, + {file = "tomli-2.4.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a26d7ff68dfdb9f87a016ecfd1e1c2bacbe3108f4e0f8bcd2228ef9a766c787d"}, + {file = "tomli-2.4.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:20ffd184fb1df76a66e34bd1b36b4a4641bd2b82954befa32fe8163e79f1a702"}, + {file = "tomli-2.4.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:75c2f8bbddf170e8effc98f5e9084a8751f8174ea6ccf4fca5398436e0320bc8"}, + {file = "tomli-2.4.0-cp314-cp314-win32.whl", hash = "sha256:31d556d079d72db7c584c0627ff3a24c5d3fb4f730221d3444f3efb1b2514776"}, + {file = "tomli-2.4.0-cp314-cp314-win_amd64.whl", hash = "sha256:43e685b9b2341681907759cf3a04e14d7104b3580f808cfde1dfdb60ada85475"}, + {file = "tomli-2.4.0-cp314-cp314-win_arm64.whl", hash = "sha256:3d895d56bd3f82ddd6faaff993c275efc2ff38e52322ea264122d72729dca2b2"}, + {file = "tomli-2.4.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:5b5807f3999fb66776dbce568cc9a828544244a8eb84b84b9bafc080c99597b9"}, + {file = "tomli-2.4.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c084ad935abe686bd9c898e62a02a19abfc9760b5a79bc29644463eaf2840cb0"}, + {file = "tomli-2.4.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0f2e3955efea4d1cfbcb87bc321e00dc08d2bcb737fd1d5e398af111d86db5df"}, + {file = "tomli-2.4.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e0fe8a0b8312acf3a88077a0802565cb09ee34107813bba1c7cd591fa6cfc8d"}, + {file = "tomli-2.4.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:413540dce94673591859c4c6f794dfeaa845e98bf35d72ed59636f869ef9f86f"}, + {file = "tomli-2.4.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:0dc56fef0e2c1c470aeac5b6ca8cc7b640bb93e92d9803ddaf9ea03e198f5b0b"}, + {file = "tomli-2.4.0-cp314-cp314t-win32.whl", hash = "sha256:d878f2a6707cc9d53a1be1414bbb419e629c3d6e67f69230217bb663e76b5087"}, + {file = "tomli-2.4.0-cp314-cp314t-win_amd64.whl", hash = "sha256:2add28aacc7425117ff6364fe9e06a183bb0251b03f986df0e78e974047571fd"}, + {file = "tomli-2.4.0-cp314-cp314t-win_arm64.whl", hash = "sha256:2b1e3b80e1d5e52e40e9b924ec43d81570f0e7d09d11081b797bc4692765a3d4"}, + {file = "tomli-2.4.0-py3-none-any.whl", hash = "sha256:1f776e7d669ebceb01dee46484485f43a4048746235e683bcdffacdf1fb4785a"}, + {file = "tomli-2.4.0.tar.gz", hash = "sha256:aa89c3f6c277dd275d8e243ad24f3b5e701491a860d5121f2cdd399fbb31fc9c"}, +] + +[[package]] +name = "types-python-dateutil" +version = "2.9.0.20241206" +description = "Typing stubs for python-dateutil" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types_python_dateutil-2.9.0.20241206-py3-none-any.whl", hash = "sha256:e248a4bc70a486d3e3ec84d0dc30eec3a5f979d6e7ee4123ae043eedbb987f53"}, + {file = "types_python_dateutil-2.9.0.20241206.tar.gz", hash = "sha256:18f493414c26ffba692a72369fea7a154c502646301ebfe3d56a04b3767284cb"}, +] + +[[package]] +name = "types-requests" +version = "2.32.0.20241016" +description = "Typing stubs for requests" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types-requests-2.32.0.20241016.tar.gz", hash = "sha256:0d9cad2f27515d0e3e3da7134a1b6f28fb97129d86b867f24d9c726452634d95"}, + {file = "types_requests-2.32.0.20241016-py3-none-any.whl", hash = "sha256:4195d62d6d3e043a4eaaf08ff8a62184584d2e8684e9d2aa178c7915a7da3747"}, +] + +[package.dependencies] +urllib3 = ">=2" + +[[package]] +name = "typing-extensions" +version = "4.13.2" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c"}, + {file = "typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef"}, +] + +[[package]] +name = "urllib3" +version = "2.2.3" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, + {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.8" +content-hash = "bab34812d8b562ff6f64a656292646c46f472e19bcba8e27cf347d906b2bd51f" diff --git a/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/pyproject.toml b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/pyproject.toml new file mode 100644 index 000000000000..848e9ae42561 --- /dev/null +++ b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/pyproject.toml @@ -0,0 +1,93 @@ +[project] +name = "fern_python-streaming-parameter-openapi" +dynamic = ["version"] + +[tool.poetry] +name = "fern_python-streaming-parameter-openapi" +version = "0.0.1" +description = "" +readme = "README.md" +authors = [] +keywords = [ + "fern", + "test" +] + +classifiers = [ + "Intended Audience :: Developers", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Operating System :: OS Independent", + "Operating System :: POSIX", + "Operating System :: MacOS", + "Operating System :: POSIX :: Linux", + "Operating System :: Microsoft :: Windows", + "Topic :: Software Development :: Libraries :: Python Modules", + "Typing :: Typed" +] +packages = [ + { include = "seed", from = "src"} +] + +[tool.poetry.urls] +Documentation = 'https://buildwithfern.com/learn' +Homepage = 'https://buildwithfern.com/' +Repository = 'https://github.com/python-streaming-parameter-openapi/fern' + +[tool.poetry.dependencies] +python = "^3.8" +httpx = ">=0.21.2" +pydantic = ">= 1.9.2" +pydantic-core = ">=2.18.2" +typing_extensions = ">= 4.0.0" + +[tool.poetry.group.dev.dependencies] +mypy = "==1.13.0" +pytest = "^7.4.0" +pytest-asyncio = "^0.23.5" +pytest-xdist = "^3.6.1" +python-dateutil = "^2.9.0" +types-python-dateutil = "^2.9.0.20240316" +requests = "^2.31.0" +types-requests = "^2.31.0" +ruff = "==0.11.5" + +[tool.pytest.ini_options] +testpaths = [ "tests" ] +asyncio_mode = "auto" + +[tool.mypy] +plugins = ["pydantic.mypy"] + +[tool.ruff] +line-length = 120 + +[tool.ruff.lint] +select = [ + "E", # pycodestyle errors + "F", # pyflakes + "I", # isort +] +ignore = [ + "E402", # Module level import not at top of file + "E501", # Line too long + "E711", # Comparison to `None` should be `cond is not None` + "E712", # Avoid equality comparisons to `True`; use `if ...:` checks + "E721", # Use `is` and `is not` for type comparisons, or `isinstance()` for insinstance checks + "E722", # Do not use bare `except` + "E731", # Do not assign a `lambda` expression, use a `def` + "F821", # Undefined name + "F841" # Local variable ... is assigned to but never used +] + +[tool.ruff.lint.isort] +section-order = ["future", "standard-library", "third-party", "first-party"] + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" diff --git a/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/reference.md b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/reference.md new file mode 100644 index 000000000000..c99b68278f08 --- /dev/null +++ b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/reference.md @@ -0,0 +1,115 @@ +# Reference +
client.chat_stream(...) -> typing.AsyncIterator[AsyncHttpResponse[typing.AsyncIterator[ChatStreamEvent]]] +
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from seed import SeedApi + +client = SeedApi( + base_url="https://yourhost.com/path/to/api", +) +response = client.chat_stream( + prompt="prompt", +) +for chunk in response.data: + yield chunk + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**prompt:** `str` — The user's message + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ +
client.chat(...) -> AsyncHttpResponse[ChatResponse] +
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```python +from seed import SeedApi + +client = SeedApi( + base_url="https://yourhost.com/path/to/api", +) +client.chat( + prompt="Hello", +) + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**prompt:** `str` — The user's message + +
+
+ +
+
+ +**request_options:** `typing.Optional[RequestOptions]` — Request-specific configuration. + +
+
+
+
+ + +
+
+
+ diff --git a/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/requirements.txt b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/requirements.txt new file mode 100644 index 000000000000..e80f640a2e74 --- /dev/null +++ b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/requirements.txt @@ -0,0 +1,4 @@ +httpx>=0.21.2 +pydantic>= 1.9.2 +pydantic-core>=2.18.2 +typing_extensions>= 4.0.0 diff --git a/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/snippet.json b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/snippet.json new file mode 100644 index 000000000000..5d8c73caec01 --- /dev/null +++ b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/snippet.json @@ -0,0 +1,31 @@ +{ + "types": {}, + "endpoints": [ + { + "example_identifier": "default", + "id": { + "path": "/chat", + "method": "POST", + "identifier_override": "endpoint_.chat_stream" + }, + "snippet": { + "sync_client": "from seed import SeedApi\n\nclient = SeedApi(\n base_url=\"https://yourhost.com/path/to/api\",\n)\nresponse = client.chat_stream(\n prompt=\"prompt\",\n)\nfor chunk in response.data:\n yield chunk\n", + "async_client": "import asyncio\n\nfrom seed import AsyncSeedApi\n\nclient = AsyncSeedApi(\n base_url=\"https://yourhost.com/path/to/api\",\n)\n\n\nasync def main() -> None:\n response = await client.chat_stream(\n prompt=\"prompt\",\n )\n async for chunk in response.data:\n yield chunk\n\n\nasyncio.run(main())\n", + "type": "python" + } + }, + { + "example_identifier": "default", + "id": { + "path": "/chat", + "method": "POST", + "identifier_override": "endpoint_.chat" + }, + "snippet": { + "sync_client": "from seed import SeedApi\n\nclient = SeedApi(\n base_url=\"https://yourhost.com/path/to/api\",\n)\nclient.chat(\n prompt=\"Hello\",\n)\n", + "async_client": "import asyncio\n\nfrom seed import AsyncSeedApi\n\nclient = AsyncSeedApi(\n base_url=\"https://yourhost.com/path/to/api\",\n)\n\n\nasync def main() -> None:\n await client.chat(\n prompt=\"Hello\",\n )\n\n\nasyncio.run(main())\n", + "type": "python" + } + } + ] +} \ No newline at end of file diff --git a/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/__init__.py b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/__init__.py new file mode 100644 index 000000000000..2f5be3e5aa0e --- /dev/null +++ b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/__init__.py @@ -0,0 +1,42 @@ +# This file was auto-generated by Fern from our API Definition. + +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .types import ChatResponse, ChatStreamEvent + from .client import AsyncSeedApi, SeedApi + from .version import __version__ +_dynamic_imports: typing.Dict[str, str] = { + "AsyncSeedApi": ".client", + "ChatResponse": ".types", + "ChatStreamEvent": ".types", + "SeedApi": ".client", + "__version__": ".version", +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError(f"No {attr_name} found in _dynamic_imports for module name -> {__name__}") + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError(f"Failed to import {attr_name} from {module_name}: {e}") from e + except AttributeError as e: + raise AttributeError(f"Failed to get {attr_name} from {module_name}: {e}") from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + + +__all__ = ["AsyncSeedApi", "ChatResponse", "ChatStreamEvent", "SeedApi", "__version__"] diff --git a/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/client.py b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/client.py new file mode 100644 index 000000000000..64207f2841a2 --- /dev/null +++ b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/client.py @@ -0,0 +1,286 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import httpx +from .core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from .core.request_options import RequestOptions +from .raw_client import AsyncRawSeedApi, RawSeedApi +from .types.chat_response import ChatResponse +from .types.chat_stream_event import ChatStreamEvent + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class SeedApi: + """ + Use this class to access the different functions within the SDK. You can instantiate any number of clients with different configuration that will propagate to these functions. + + Parameters + ---------- + base_url : str + The base url to use for requests from the client. + + headers : typing.Optional[typing.Dict[str, str]] + Additional headers to send with every request. + + timeout : typing.Optional[float] + The timeout to be used, in seconds, for requests. By default the timeout is 60 seconds, unless a custom httpx client is used, in which case this default is not enforced. + + follow_redirects : typing.Optional[bool] + Whether the default httpx client follows redirects or not, this is irrelevant if a custom httpx client is passed in. + + httpx_client : typing.Optional[httpx.Client] + The httpx client to use for making requests, a preconfigured client is used by default, however this is useful should you want to pass in any custom httpx configuration. + + Examples + -------- + from seed import SeedApi + + client = SeedApi( + base_url="https://yourhost.com/path/to/api", + ) + """ + + def __init__( + self, + *, + base_url: str, + headers: typing.Optional[typing.Dict[str, str]] = None, + timeout: typing.Optional[float] = None, + follow_redirects: typing.Optional[bool] = True, + httpx_client: typing.Optional[httpx.Client] = None, + ): + _defaulted_timeout = ( + timeout if timeout is not None else 60 if httpx_client is None else httpx_client.timeout.read + ) + self._client_wrapper = SyncClientWrapper( + base_url=base_url, + headers=headers, + httpx_client=httpx_client + if httpx_client is not None + else httpx.Client(timeout=_defaulted_timeout, follow_redirects=follow_redirects) + if follow_redirects is not None + else httpx.Client(timeout=_defaulted_timeout), + timeout=_defaulted_timeout, + ) + self._raw_client = RawSeedApi(client_wrapper=self._client_wrapper) + + @property + def with_raw_response(self) -> RawSeedApi: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + RawSeedApi + """ + return self._raw_client + + def chat_stream( + self, *, prompt: str, request_options: typing.Optional[RequestOptions] = None + ) -> typing.Iterator[ChatStreamEvent]: + """ + Parameters + ---------- + prompt : str + The user's message + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Yields + ------ + typing.Iterator[ChatStreamEvent] + + + Examples + -------- + from seed import SeedApi + + client = SeedApi( + base_url="https://yourhost.com/path/to/api", + ) + response = client.chat_stream( + prompt="prompt", + ) + for chunk in response: + yield chunk + """ + with self._raw_client.chat_stream(prompt=prompt, request_options=request_options) as r: + yield from r.data + + def chat(self, *, prompt: str, request_options: typing.Optional[RequestOptions] = None) -> ChatResponse: + """ + Parameters + ---------- + prompt : str + The user's message + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + ChatResponse + + + Examples + -------- + from seed import SeedApi + + client = SeedApi( + base_url="https://yourhost.com/path/to/api", + ) + client.chat( + prompt="Hello", + ) + """ + _response = self._raw_client.chat(prompt=prompt, request_options=request_options) + return _response.data + + +class AsyncSeedApi: + """ + Use this class to access the different functions within the SDK. You can instantiate any number of clients with different configuration that will propagate to these functions. + + Parameters + ---------- + base_url : str + The base url to use for requests from the client. + + headers : typing.Optional[typing.Dict[str, str]] + Additional headers to send with every request. + + timeout : typing.Optional[float] + The timeout to be used, in seconds, for requests. By default the timeout is 60 seconds, unless a custom httpx client is used, in which case this default is not enforced. + + follow_redirects : typing.Optional[bool] + Whether the default httpx client follows redirects or not, this is irrelevant if a custom httpx client is passed in. + + httpx_client : typing.Optional[httpx.AsyncClient] + The httpx client to use for making requests, a preconfigured client is used by default, however this is useful should you want to pass in any custom httpx configuration. + + Examples + -------- + from seed import AsyncSeedApi + + client = AsyncSeedApi( + base_url="https://yourhost.com/path/to/api", + ) + """ + + def __init__( + self, + *, + base_url: str, + headers: typing.Optional[typing.Dict[str, str]] = None, + timeout: typing.Optional[float] = None, + follow_redirects: typing.Optional[bool] = True, + httpx_client: typing.Optional[httpx.AsyncClient] = None, + ): + _defaulted_timeout = ( + timeout if timeout is not None else 60 if httpx_client is None else httpx_client.timeout.read + ) + self._client_wrapper = AsyncClientWrapper( + base_url=base_url, + headers=headers, + httpx_client=httpx_client + if httpx_client is not None + else httpx.AsyncClient(timeout=_defaulted_timeout, follow_redirects=follow_redirects) + if follow_redirects is not None + else httpx.AsyncClient(timeout=_defaulted_timeout), + timeout=_defaulted_timeout, + ) + self._raw_client = AsyncRawSeedApi(client_wrapper=self._client_wrapper) + + @property + def with_raw_response(self) -> AsyncRawSeedApi: + """ + Retrieves a raw implementation of this client that returns raw responses. + + Returns + ------- + AsyncRawSeedApi + """ + return self._raw_client + + async def chat_stream( + self, *, prompt: str, request_options: typing.Optional[RequestOptions] = None + ) -> typing.AsyncIterator[ChatStreamEvent]: + """ + Parameters + ---------- + prompt : str + The user's message + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Yields + ------ + typing.AsyncIterator[ChatStreamEvent] + + + Examples + -------- + import asyncio + + from seed import AsyncSeedApi + + client = AsyncSeedApi( + base_url="https://yourhost.com/path/to/api", + ) + + + async def main() -> None: + response = await client.chat_stream( + prompt="prompt", + ) + async for chunk in response: + yield chunk + + + asyncio.run(main()) + """ + async with self._raw_client.chat_stream(prompt=prompt, request_options=request_options) as r: + async for _chunk in r.data: + yield _chunk + + async def chat(self, *, prompt: str, request_options: typing.Optional[RequestOptions] = None) -> ChatResponse: + """ + Parameters + ---------- + prompt : str + The user's message + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + ChatResponse + + + Examples + -------- + import asyncio + + from seed import AsyncSeedApi + + client = AsyncSeedApi( + base_url="https://yourhost.com/path/to/api", + ) + + + async def main() -> None: + await client.chat( + prompt="Hello", + ) + + + asyncio.run(main()) + """ + _response = await self._raw_client.chat(prompt=prompt, request_options=request_options) + return _response.data diff --git a/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/__init__.py b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/__init__.py new file mode 100644 index 000000000000..9a33e233875e --- /dev/null +++ b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/__init__.py @@ -0,0 +1,105 @@ +# This file was auto-generated by Fern from our API Definition. + +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .api_error import ApiError + from .client_wrapper import AsyncClientWrapper, BaseClientWrapper, SyncClientWrapper + from .datetime_utils import serialize_datetime + from .file import File, convert_file_dict_to_httpx_tuples, with_content_type + from .http_client import AsyncHttpClient, HttpClient + from .http_response import AsyncHttpResponse, HttpResponse + from .jsonable_encoder import jsonable_encoder + from .pydantic_utilities import ( + IS_PYDANTIC_V2, + UniversalBaseModel, + UniversalRootModel, + parse_obj_as, + universal_field_validator, + universal_root_validator, + update_forward_refs, + ) + from .query_encoder import encode_query + from .remove_none_from_dict import remove_none_from_dict + from .request_options import RequestOptions + from .serialization import FieldMetadata, convert_and_respect_annotation_metadata +_dynamic_imports: typing.Dict[str, str] = { + "ApiError": ".api_error", + "AsyncClientWrapper": ".client_wrapper", + "AsyncHttpClient": ".http_client", + "AsyncHttpResponse": ".http_response", + "BaseClientWrapper": ".client_wrapper", + "FieldMetadata": ".serialization", + "File": ".file", + "HttpClient": ".http_client", + "HttpResponse": ".http_response", + "IS_PYDANTIC_V2": ".pydantic_utilities", + "RequestOptions": ".request_options", + "SyncClientWrapper": ".client_wrapper", + "UniversalBaseModel": ".pydantic_utilities", + "UniversalRootModel": ".pydantic_utilities", + "convert_and_respect_annotation_metadata": ".serialization", + "convert_file_dict_to_httpx_tuples": ".file", + "encode_query": ".query_encoder", + "jsonable_encoder": ".jsonable_encoder", + "parse_obj_as": ".pydantic_utilities", + "remove_none_from_dict": ".remove_none_from_dict", + "serialize_datetime": ".datetime_utils", + "universal_field_validator": ".pydantic_utilities", + "universal_root_validator": ".pydantic_utilities", + "update_forward_refs": ".pydantic_utilities", + "with_content_type": ".file", +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError(f"No {attr_name} found in _dynamic_imports for module name -> {__name__}") + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError(f"Failed to import {attr_name} from {module_name}: {e}") from e + except AttributeError as e: + raise AttributeError(f"Failed to get {attr_name} from {module_name}: {e}") from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + + +__all__ = [ + "ApiError", + "AsyncClientWrapper", + "AsyncHttpClient", + "AsyncHttpResponse", + "BaseClientWrapper", + "FieldMetadata", + "File", + "HttpClient", + "HttpResponse", + "IS_PYDANTIC_V2", + "RequestOptions", + "SyncClientWrapper", + "UniversalBaseModel", + "UniversalRootModel", + "convert_and_respect_annotation_metadata", + "convert_file_dict_to_httpx_tuples", + "encode_query", + "jsonable_encoder", + "parse_obj_as", + "remove_none_from_dict", + "serialize_datetime", + "universal_field_validator", + "universal_root_validator", + "update_forward_refs", + "with_content_type", +] diff --git a/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/api_error.py b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/api_error.py new file mode 100644 index 000000000000..6f850a60cba3 --- /dev/null +++ b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/api_error.py @@ -0,0 +1,23 @@ +# This file was auto-generated by Fern from our API Definition. + +from typing import Any, Dict, Optional + + +class ApiError(Exception): + headers: Optional[Dict[str, str]] + status_code: Optional[int] + body: Any + + def __init__( + self, + *, + headers: Optional[Dict[str, str]] = None, + status_code: Optional[int] = None, + body: Any = None, + ) -> None: + self.headers = headers + self.status_code = status_code + self.body = body + + def __str__(self) -> str: + return f"headers: {self.headers}, status_code: {self.status_code}, body: {self.body}" diff --git a/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/client_wrapper.py b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/client_wrapper.py new file mode 100644 index 000000000000..ded056afa19d --- /dev/null +++ b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/client_wrapper.py @@ -0,0 +1,88 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import httpx +from .http_client import AsyncHttpClient, HttpClient + + +class BaseClientWrapper: + def __init__( + self, + *, + headers: typing.Optional[typing.Dict[str, str]] = None, + base_url: str, + timeout: typing.Optional[float] = None, + ): + self._headers = headers + self._base_url = base_url + self._timeout = timeout + + def get_headers(self) -> typing.Dict[str, str]: + import platform + + headers: typing.Dict[str, str] = { + "User-Agent": "fern_python-streaming-parameter-openapi/0.0.1", + "X-Fern-Language": "Python", + "X-Fern-Runtime": f"python/{platform.python_version()}", + "X-Fern-Platform": f"{platform.system().lower()}/{platform.release()}", + "X-Fern-SDK-Name": "fern_python-streaming-parameter-openapi", + "X-Fern-SDK-Version": "0.0.1", + **(self.get_custom_headers() or {}), + } + return headers + + def get_custom_headers(self) -> typing.Optional[typing.Dict[str, str]]: + return self._headers + + def get_base_url(self) -> str: + return self._base_url + + def get_timeout(self) -> typing.Optional[float]: + return self._timeout + + +class SyncClientWrapper(BaseClientWrapper): + def __init__( + self, + *, + headers: typing.Optional[typing.Dict[str, str]] = None, + base_url: str, + timeout: typing.Optional[float] = None, + httpx_client: httpx.Client, + ): + super().__init__(headers=headers, base_url=base_url, timeout=timeout) + self.httpx_client = HttpClient( + httpx_client=httpx_client, + base_headers=self.get_headers, + base_timeout=self.get_timeout, + base_url=self.get_base_url, + ) + + +class AsyncClientWrapper(BaseClientWrapper): + def __init__( + self, + *, + headers: typing.Optional[typing.Dict[str, str]] = None, + base_url: str, + timeout: typing.Optional[float] = None, + async_token: typing.Optional[typing.Callable[[], typing.Awaitable[str]]] = None, + httpx_client: httpx.AsyncClient, + ): + super().__init__(headers=headers, base_url=base_url, timeout=timeout) + self._async_token = async_token + self.httpx_client = AsyncHttpClient( + httpx_client=httpx_client, + base_headers=self.get_headers, + base_timeout=self.get_timeout, + base_url=self.get_base_url, + async_base_headers=self.async_get_headers, + ) + + async def async_get_headers(self) -> typing.Dict[str, str]: + headers = self.get_headers() + if self._async_token is not None: + token = await self._async_token() + headers["Authorization"] = f"Bearer {token}" + return headers diff --git a/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/datetime_utils.py b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/datetime_utils.py new file mode 100644 index 000000000000..7c9864a944c2 --- /dev/null +++ b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/datetime_utils.py @@ -0,0 +1,28 @@ +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt + + +def serialize_datetime(v: dt.datetime) -> str: + """ + Serialize a datetime including timezone info. + + Uses the timezone info provided if present, otherwise uses the current runtime's timezone info. + + UTC datetimes end in "Z" while all other timezones are represented as offset from UTC, e.g. +05:00. + """ + + def _serialize_zoned_datetime(v: dt.datetime) -> str: + if v.tzinfo is not None and v.tzinfo.tzname(None) == dt.timezone.utc.tzname(None): + # UTC is a special case where we use "Z" at the end instead of "+00:00" + return v.isoformat().replace("+00:00", "Z") + else: + # Delegate to the typical +/- offset format + return v.isoformat() + + if v.tzinfo is not None: + return _serialize_zoned_datetime(v) + else: + local_tz = dt.datetime.now().astimezone().tzinfo + localized_dt = v.replace(tzinfo=local_tz) + return _serialize_zoned_datetime(localized_dt) diff --git a/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/file.py b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/file.py new file mode 100644 index 000000000000..44b0d27c0895 --- /dev/null +++ b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/file.py @@ -0,0 +1,67 @@ +# This file was auto-generated by Fern from our API Definition. + +from typing import IO, Dict, List, Mapping, Optional, Tuple, Union, cast + +# File typing inspired by the flexibility of types within the httpx library +# https://github.com/encode/httpx/blob/master/httpx/_types.py +FileContent = Union[IO[bytes], bytes, str] +File = Union[ + # file (or bytes) + FileContent, + # (filename, file (or bytes)) + Tuple[Optional[str], FileContent], + # (filename, file (or bytes), content_type) + Tuple[Optional[str], FileContent, Optional[str]], + # (filename, file (or bytes), content_type, headers) + Tuple[ + Optional[str], + FileContent, + Optional[str], + Mapping[str, str], + ], +] + + +def convert_file_dict_to_httpx_tuples( + d: Dict[str, Union[File, List[File]]], +) -> List[Tuple[str, File]]: + """ + The format we use is a list of tuples, where the first element is the + name of the file and the second is the file object. Typically HTTPX wants + a dict, but to be able to send lists of files, you have to use the list + approach (which also works for non-lists) + https://github.com/encode/httpx/pull/1032 + """ + + httpx_tuples = [] + for key, file_like in d.items(): + if isinstance(file_like, list): + for file_like_item in file_like: + httpx_tuples.append((key, file_like_item)) + else: + httpx_tuples.append((key, file_like)) + return httpx_tuples + + +def with_content_type(*, file: File, default_content_type: str) -> File: + """ + This function resolves to the file's content type, if provided, and defaults + to the default_content_type value if not. + """ + if isinstance(file, tuple): + if len(file) == 2: + filename, content = cast(Tuple[Optional[str], FileContent], file) # type: ignore + return (filename, content, default_content_type) + elif len(file) == 3: + filename, content, file_content_type = cast(Tuple[Optional[str], FileContent, Optional[str]], file) # type: ignore + out_content_type = file_content_type or default_content_type + return (filename, content, out_content_type) + elif len(file) == 4: + filename, content, file_content_type, headers = cast( # type: ignore + Tuple[Optional[str], FileContent, Optional[str], Mapping[str, str]], file + ) + out_content_type = file_content_type or default_content_type + return (filename, content, out_content_type, headers) + else: + raise ValueError(f"Unexpected tuple length: {len(file)}") + return (None, file, default_content_type) diff --git a/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/force_multipart.py b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/force_multipart.py new file mode 100644 index 000000000000..5440913fd4bc --- /dev/null +++ b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/force_multipart.py @@ -0,0 +1,18 @@ +# This file was auto-generated by Fern from our API Definition. + +from typing import Any, Dict + + +class ForceMultipartDict(Dict[str, Any]): + """ + A dictionary subclass that always evaluates to True in boolean contexts. + + This is used to force multipart/form-data encoding in HTTP requests even when + the dictionary is empty, which would normally evaluate to False. + """ + + def __bool__(self) -> bool: + return True + + +FORCE_MULTIPART = ForceMultipartDict() diff --git a/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/http_client.py b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/http_client.py new file mode 100644 index 000000000000..7c6c936f9ddc --- /dev/null +++ b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/http_client.py @@ -0,0 +1,663 @@ +# This file was auto-generated by Fern from our API Definition. + +import asyncio +import email.utils +import re +import time +import typing +from contextlib import asynccontextmanager, contextmanager +from random import random + +import httpx +from .file import File, convert_file_dict_to_httpx_tuples +from .force_multipart import FORCE_MULTIPART +from .jsonable_encoder import jsonable_encoder +from .query_encoder import encode_query +from .remove_none_from_dict import remove_none_from_dict as remove_none_from_dict +from .request_options import RequestOptions +from httpx._types import RequestFiles + +INITIAL_RETRY_DELAY_SECONDS = 1.0 +MAX_RETRY_DELAY_SECONDS = 60.0 +JITTER_FACTOR = 0.2 # 20% random jitter + + +def _parse_retry_after(response_headers: httpx.Headers) -> typing.Optional[float]: + """ + This function parses the `Retry-After` header in a HTTP response and returns the number of seconds to wait. + + Inspired by the urllib3 retry implementation. + """ + retry_after_ms = response_headers.get("retry-after-ms") + if retry_after_ms is not None: + try: + return int(retry_after_ms) / 1000 if retry_after_ms > 0 else 0 + except Exception: + pass + + retry_after = response_headers.get("retry-after") + if retry_after is None: + return None + + # Attempt to parse the header as an int. + if re.match(r"^\s*[0-9]+\s*$", retry_after): + seconds = float(retry_after) + # Fallback to parsing it as a date. + else: + retry_date_tuple = email.utils.parsedate_tz(retry_after) + if retry_date_tuple is None: + return None + if retry_date_tuple[9] is None: # Python 2 + # Assume UTC if no timezone was specified + # On Python2.7, parsedate_tz returns None for a timezone offset + # instead of 0 if no timezone is given, where mktime_tz treats + # a None timezone offset as local time. + retry_date_tuple = retry_date_tuple[:9] + (0,) + retry_date_tuple[10:] + + retry_date = email.utils.mktime_tz(retry_date_tuple) + seconds = retry_date - time.time() + + if seconds < 0: + seconds = 0 + + return seconds + + +def _add_positive_jitter(delay: float) -> float: + """Add positive jitter (0-20%) to prevent thundering herd.""" + jitter_multiplier = 1 + random() * JITTER_FACTOR + return delay * jitter_multiplier + + +def _add_symmetric_jitter(delay: float) -> float: + """Add symmetric jitter (±10%) for exponential backoff.""" + jitter_multiplier = 1 + (random() - 0.5) * JITTER_FACTOR + return delay * jitter_multiplier + + +def _parse_x_ratelimit_reset(response_headers: httpx.Headers) -> typing.Optional[float]: + """ + Parse the X-RateLimit-Reset header (Unix timestamp in seconds). + Returns seconds to wait, or None if header is missing/invalid. + """ + reset_time_str = response_headers.get("x-ratelimit-reset") + if reset_time_str is None: + return None + + try: + reset_time = int(reset_time_str) + delay = reset_time - time.time() + if delay > 0: + return delay + except (ValueError, TypeError): + pass + + return None + + +def _retry_timeout(response: httpx.Response, retries: int) -> float: + """ + Determine the amount of time to wait before retrying a request. + This function begins by trying to parse a retry-after header from the response, and then proceeds to use exponential backoff + with a jitter to determine the number of seconds to wait. + """ + + # 1. Check Retry-After header first + retry_after = _parse_retry_after(response.headers) + if retry_after is not None and retry_after > 0: + return min(retry_after, MAX_RETRY_DELAY_SECONDS) + + # 2. Check X-RateLimit-Reset header (with positive jitter) + ratelimit_reset = _parse_x_ratelimit_reset(response.headers) + if ratelimit_reset is not None: + return _add_positive_jitter(min(ratelimit_reset, MAX_RETRY_DELAY_SECONDS)) + + # 3. Fall back to exponential backoff (with symmetric jitter) + backoff = min(INITIAL_RETRY_DELAY_SECONDS * pow(2.0, retries), MAX_RETRY_DELAY_SECONDS) + return _add_symmetric_jitter(backoff) + + +def _should_retry(response: httpx.Response) -> bool: + retryable_400s = [429, 408, 409] + return response.status_code >= 500 or response.status_code in retryable_400s + + +def _build_url(base_url: str, path: typing.Optional[str]) -> str: + """ + Build a full URL by joining a base URL with a path. + + This function correctly handles base URLs that contain path prefixes (e.g., tenant-based URLs) + by using string concatenation instead of urllib.parse.urljoin(), which would incorrectly + strip path components when the path starts with '/'. + + Example: + >>> _build_url("https://cloud.example.com/org/tenant/api", "/users") + 'https://cloud.example.com/org/tenant/api/users' + + Args: + base_url: The base URL, which may contain path prefixes. + path: The path to append. Can be None or empty string. + + Returns: + The full URL with base_url and path properly joined. + """ + if not path: + return base_url + return f"{base_url.rstrip('/')}/{path.lstrip('/')}" + + +def _maybe_filter_none_from_multipart_data( + data: typing.Optional[typing.Any], + request_files: typing.Optional[RequestFiles], + force_multipart: typing.Optional[bool], +) -> typing.Optional[typing.Any]: + """ + Filter None values from data body for multipart/form requests. + This prevents httpx from converting None to empty strings in multipart encoding. + Only applies when files are present or force_multipart is True. + """ + if data is not None and isinstance(data, typing.Mapping) and (request_files or force_multipart): + return remove_none_from_dict(data) + return data + + +def remove_omit_from_dict( + original: typing.Dict[str, typing.Optional[typing.Any]], + omit: typing.Optional[typing.Any], +) -> typing.Dict[str, typing.Any]: + if omit is None: + return original + new: typing.Dict[str, typing.Any] = {} + for key, value in original.items(): + if value is not omit: + new[key] = value + return new + + +def maybe_filter_request_body( + data: typing.Optional[typing.Any], + request_options: typing.Optional[RequestOptions], + omit: typing.Optional[typing.Any], +) -> typing.Optional[typing.Any]: + if data is None: + return ( + jsonable_encoder(request_options.get("additional_body_parameters", {})) or {} + if request_options is not None + else None + ) + elif not isinstance(data, typing.Mapping): + data_content = jsonable_encoder(data) + else: + data_content = { + **(jsonable_encoder(remove_omit_from_dict(data, omit))), # type: ignore + **( + jsonable_encoder(request_options.get("additional_body_parameters", {})) or {} + if request_options is not None + else {} + ), + } + return data_content + + +# Abstracted out for testing purposes +def get_request_body( + *, + json: typing.Optional[typing.Any], + data: typing.Optional[typing.Any], + request_options: typing.Optional[RequestOptions], + omit: typing.Optional[typing.Any], +) -> typing.Tuple[typing.Optional[typing.Any], typing.Optional[typing.Any]]: + json_body = None + data_body = None + if data is not None: + data_body = maybe_filter_request_body(data, request_options, omit) + else: + # If both data and json are None, we send json data in the event extra properties are specified + json_body = maybe_filter_request_body(json, request_options, omit) + + has_additional_body_parameters = bool( + request_options is not None and request_options.get("additional_body_parameters") + ) + + # Only collapse empty dict to None when the body was not explicitly provided + # and there are no additional body parameters. This preserves explicit empty + # bodies (e.g., when an endpoint has a request body type but all fields are optional). + if json_body == {} and json is None and not has_additional_body_parameters: + json_body = None + if data_body == {} and data is None and not has_additional_body_parameters: + data_body = None + + return json_body, data_body + + +class HttpClient: + def __init__( + self, + *, + httpx_client: httpx.Client, + base_timeout: typing.Callable[[], typing.Optional[float]], + base_headers: typing.Callable[[], typing.Dict[str, str]], + base_url: typing.Optional[typing.Callable[[], str]] = None, + ): + self.base_url = base_url + self.base_timeout = base_timeout + self.base_headers = base_headers + self.httpx_client = httpx_client + + def get_base_url(self, maybe_base_url: typing.Optional[str]) -> str: + base_url = maybe_base_url + if self.base_url is not None and base_url is None: + base_url = self.base_url() + + if base_url is None: + raise ValueError("A base_url is required to make this request, please provide one and try again.") + return base_url + + def request( + self, + path: typing.Optional[str] = None, + *, + method: str, + base_url: typing.Optional[str] = None, + params: typing.Optional[typing.Dict[str, typing.Any]] = None, + json: typing.Optional[typing.Any] = None, + data: typing.Optional[typing.Any] = None, + content: typing.Optional[typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]]] = None, + files: typing.Optional[ + typing.Union[ + typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]], + typing.List[typing.Tuple[str, File]], + ] + ] = None, + headers: typing.Optional[typing.Dict[str, typing.Any]] = None, + request_options: typing.Optional[RequestOptions] = None, + retries: int = 0, + omit: typing.Optional[typing.Any] = None, + force_multipart: typing.Optional[bool] = None, + ) -> httpx.Response: + base_url = self.get_base_url(base_url) + timeout = ( + request_options.get("timeout_in_seconds") + if request_options is not None and request_options.get("timeout_in_seconds") is not None + else self.base_timeout() + ) + + json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit) + + request_files: typing.Optional[RequestFiles] = ( + convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit)) + if (files is not None and files is not omit and isinstance(files, dict)) + else None + ) + + if (request_files is None or len(request_files) == 0) and force_multipart: + request_files = FORCE_MULTIPART + + data_body = _maybe_filter_none_from_multipart_data(data_body, request_files, force_multipart) + + # Compute encoded params separately to avoid passing empty list to httpx + # (httpx strips existing query params from URL when params=[] is passed) + _encoded_params = encode_query( + jsonable_encoder( + remove_none_from_dict( + remove_omit_from_dict( + { + **(params if params is not None else {}), + **( + request_options.get("additional_query_parameters", {}) or {} + if request_options is not None + else {} + ), + }, + omit, + ) + ) + ) + ) + + response = self.httpx_client.request( + method=method, + url=_build_url(base_url, path), + headers=jsonable_encoder( + remove_none_from_dict( + { + **self.base_headers(), + **(headers if headers is not None else {}), + **(request_options.get("additional_headers", {}) or {} if request_options is not None else {}), + } + ) + ), + params=_encoded_params if _encoded_params else None, + json=json_body, + data=data_body, + content=content, + files=request_files, + timeout=timeout, + ) + + max_retries: int = request_options.get("max_retries", 2) if request_options is not None else 2 + if _should_retry(response=response): + if retries < max_retries: + time.sleep(_retry_timeout(response=response, retries=retries)) + return self.request( + path=path, + method=method, + base_url=base_url, + params=params, + json=json, + content=content, + files=files, + headers=headers, + request_options=request_options, + retries=retries + 1, + omit=omit, + ) + + return response + + @contextmanager + def stream( + self, + path: typing.Optional[str] = None, + *, + method: str, + base_url: typing.Optional[str] = None, + params: typing.Optional[typing.Dict[str, typing.Any]] = None, + json: typing.Optional[typing.Any] = None, + data: typing.Optional[typing.Any] = None, + content: typing.Optional[typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]]] = None, + files: typing.Optional[ + typing.Union[ + typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]], + typing.List[typing.Tuple[str, File]], + ] + ] = None, + headers: typing.Optional[typing.Dict[str, typing.Any]] = None, + request_options: typing.Optional[RequestOptions] = None, + retries: int = 0, + omit: typing.Optional[typing.Any] = None, + force_multipart: typing.Optional[bool] = None, + ) -> typing.Iterator[httpx.Response]: + base_url = self.get_base_url(base_url) + timeout = ( + request_options.get("timeout_in_seconds") + if request_options is not None and request_options.get("timeout_in_seconds") is not None + else self.base_timeout() + ) + + request_files: typing.Optional[RequestFiles] = ( + convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit)) + if (files is not None and files is not omit and isinstance(files, dict)) + else None + ) + + if (request_files is None or len(request_files) == 0) and force_multipart: + request_files = FORCE_MULTIPART + + json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit) + + data_body = _maybe_filter_none_from_multipart_data(data_body, request_files, force_multipart) + + # Compute encoded params separately to avoid passing empty list to httpx + # (httpx strips existing query params from URL when params=[] is passed) + _encoded_params = encode_query( + jsonable_encoder( + remove_none_from_dict( + remove_omit_from_dict( + { + **(params if params is not None else {}), + **( + request_options.get("additional_query_parameters", {}) + if request_options is not None + else {} + ), + }, + omit, + ) + ) + ) + ) + + with self.httpx_client.stream( + method=method, + url=_build_url(base_url, path), + headers=jsonable_encoder( + remove_none_from_dict( + { + **self.base_headers(), + **(headers if headers is not None else {}), + **(request_options.get("additional_headers", {}) if request_options is not None else {}), + } + ) + ), + params=_encoded_params if _encoded_params else None, + json=json_body, + data=data_body, + content=content, + files=request_files, + timeout=timeout, + ) as stream: + yield stream + + +class AsyncHttpClient: + def __init__( + self, + *, + httpx_client: httpx.AsyncClient, + base_timeout: typing.Callable[[], typing.Optional[float]], + base_headers: typing.Callable[[], typing.Dict[str, str]], + base_url: typing.Optional[typing.Callable[[], str]] = None, + async_base_headers: typing.Optional[typing.Callable[[], typing.Awaitable[typing.Dict[str, str]]]] = None, + ): + self.base_url = base_url + self.base_timeout = base_timeout + self.base_headers = base_headers + self.async_base_headers = async_base_headers + self.httpx_client = httpx_client + + async def _get_headers(self) -> typing.Dict[str, str]: + if self.async_base_headers is not None: + return await self.async_base_headers() + return self.base_headers() + + def get_base_url(self, maybe_base_url: typing.Optional[str]) -> str: + base_url = maybe_base_url + if self.base_url is not None and base_url is None: + base_url = self.base_url() + + if base_url is None: + raise ValueError("A base_url is required to make this request, please provide one and try again.") + return base_url + + async def request( + self, + path: typing.Optional[str] = None, + *, + method: str, + base_url: typing.Optional[str] = None, + params: typing.Optional[typing.Dict[str, typing.Any]] = None, + json: typing.Optional[typing.Any] = None, + data: typing.Optional[typing.Any] = None, + content: typing.Optional[typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]]] = None, + files: typing.Optional[ + typing.Union[ + typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]], + typing.List[typing.Tuple[str, File]], + ] + ] = None, + headers: typing.Optional[typing.Dict[str, typing.Any]] = None, + request_options: typing.Optional[RequestOptions] = None, + retries: int = 0, + omit: typing.Optional[typing.Any] = None, + force_multipart: typing.Optional[bool] = None, + ) -> httpx.Response: + base_url = self.get_base_url(base_url) + timeout = ( + request_options.get("timeout_in_seconds") + if request_options is not None and request_options.get("timeout_in_seconds") is not None + else self.base_timeout() + ) + + request_files: typing.Optional[RequestFiles] = ( + convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit)) + if (files is not None and files is not omit and isinstance(files, dict)) + else None + ) + + if (request_files is None or len(request_files) == 0) and force_multipart: + request_files = FORCE_MULTIPART + + json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit) + + data_body = _maybe_filter_none_from_multipart_data(data_body, request_files, force_multipart) + + # Get headers (supports async token providers) + _headers = await self._get_headers() + + # Compute encoded params separately to avoid passing empty list to httpx + # (httpx strips existing query params from URL when params=[] is passed) + _encoded_params = encode_query( + jsonable_encoder( + remove_none_from_dict( + remove_omit_from_dict( + { + **(params if params is not None else {}), + **( + request_options.get("additional_query_parameters", {}) or {} + if request_options is not None + else {} + ), + }, + omit, + ) + ) + ) + ) + + # Add the input to each of these and do None-safety checks + response = await self.httpx_client.request( + method=method, + url=_build_url(base_url, path), + headers=jsonable_encoder( + remove_none_from_dict( + { + **_headers, + **(headers if headers is not None else {}), + **(request_options.get("additional_headers", {}) or {} if request_options is not None else {}), + } + ) + ), + params=_encoded_params if _encoded_params else None, + json=json_body, + data=data_body, + content=content, + files=request_files, + timeout=timeout, + ) + + max_retries: int = request_options.get("max_retries", 2) if request_options is not None else 2 + if _should_retry(response=response): + if retries < max_retries: + await asyncio.sleep(_retry_timeout(response=response, retries=retries)) + return await self.request( + path=path, + method=method, + base_url=base_url, + params=params, + json=json, + content=content, + files=files, + headers=headers, + request_options=request_options, + retries=retries + 1, + omit=omit, + ) + return response + + @asynccontextmanager + async def stream( + self, + path: typing.Optional[str] = None, + *, + method: str, + base_url: typing.Optional[str] = None, + params: typing.Optional[typing.Dict[str, typing.Any]] = None, + json: typing.Optional[typing.Any] = None, + data: typing.Optional[typing.Any] = None, + content: typing.Optional[typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]]] = None, + files: typing.Optional[ + typing.Union[ + typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]], + typing.List[typing.Tuple[str, File]], + ] + ] = None, + headers: typing.Optional[typing.Dict[str, typing.Any]] = None, + request_options: typing.Optional[RequestOptions] = None, + retries: int = 0, + omit: typing.Optional[typing.Any] = None, + force_multipart: typing.Optional[bool] = None, + ) -> typing.AsyncIterator[httpx.Response]: + base_url = self.get_base_url(base_url) + timeout = ( + request_options.get("timeout_in_seconds") + if request_options is not None and request_options.get("timeout_in_seconds") is not None + else self.base_timeout() + ) + + request_files: typing.Optional[RequestFiles] = ( + convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit)) + if (files is not None and files is not omit and isinstance(files, dict)) + else None + ) + + if (request_files is None or len(request_files) == 0) and force_multipart: + request_files = FORCE_MULTIPART + + json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit) + + data_body = _maybe_filter_none_from_multipart_data(data_body, request_files, force_multipart) + + # Get headers (supports async token providers) + _headers = await self._get_headers() + + # Compute encoded params separately to avoid passing empty list to httpx + # (httpx strips existing query params from URL when params=[] is passed) + _encoded_params = encode_query( + jsonable_encoder( + remove_none_from_dict( + remove_omit_from_dict( + { + **(params if params is not None else {}), + **( + request_options.get("additional_query_parameters", {}) + if request_options is not None + else {} + ), + }, + omit=omit, + ) + ) + ) + ) + + async with self.httpx_client.stream( + method=method, + url=_build_url(base_url, path), + headers=jsonable_encoder( + remove_none_from_dict( + { + **_headers, + **(headers if headers is not None else {}), + **(request_options.get("additional_headers", {}) if request_options is not None else {}), + } + ) + ), + params=_encoded_params if _encoded_params else None, + json=json_body, + data=data_body, + content=content, + files=request_files, + timeout=timeout, + ) as stream: + yield stream diff --git a/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/http_response.py b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/http_response.py new file mode 100644 index 000000000000..2479747e8bb0 --- /dev/null +++ b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/http_response.py @@ -0,0 +1,55 @@ +# This file was auto-generated by Fern from our API Definition. + +from typing import Dict, Generic, TypeVar + +import httpx + +# Generic to represent the underlying type of the data wrapped by the HTTP response. +T = TypeVar("T") + + +class BaseHttpResponse: + """Minimalist HTTP response wrapper that exposes response headers.""" + + _response: httpx.Response + + def __init__(self, response: httpx.Response): + self._response = response + + @property + def headers(self) -> Dict[str, str]: + return dict(self._response.headers) + + +class HttpResponse(Generic[T], BaseHttpResponse): + """HTTP response wrapper that exposes response headers and data.""" + + _data: T + + def __init__(self, response: httpx.Response, data: T): + super().__init__(response) + self._data = data + + @property + def data(self) -> T: + return self._data + + def close(self) -> None: + self._response.close() + + +class AsyncHttpResponse(Generic[T], BaseHttpResponse): + """HTTP response wrapper that exposes response headers and data.""" + + _data: T + + def __init__(self, response: httpx.Response, data: T): + super().__init__(response) + self._data = data + + @property + def data(self) -> T: + return self._data + + async def close(self) -> None: + await self._response.aclose() diff --git a/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/http_sse/__init__.py b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/http_sse/__init__.py new file mode 100644 index 000000000000..730e5a3382eb --- /dev/null +++ b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/http_sse/__init__.py @@ -0,0 +1,42 @@ +# This file was auto-generated by Fern from our API Definition. + +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from ._api import EventSource, aconnect_sse, connect_sse + from ._exceptions import SSEError + from ._models import ServerSentEvent +_dynamic_imports: typing.Dict[str, str] = { + "EventSource": "._api", + "SSEError": "._exceptions", + "ServerSentEvent": "._models", + "aconnect_sse": "._api", + "connect_sse": "._api", +} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError(f"No {attr_name} found in _dynamic_imports for module name -> {__name__}") + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError(f"Failed to import {attr_name} from {module_name}: {e}") from e + except AttributeError as e: + raise AttributeError(f"Failed to get {attr_name} from {module_name}: {e}") from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + + +__all__ = ["EventSource", "SSEError", "ServerSentEvent", "aconnect_sse", "connect_sse"] diff --git a/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/http_sse/_api.py b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/http_sse/_api.py new file mode 100644 index 000000000000..f900b3b686de --- /dev/null +++ b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/http_sse/_api.py @@ -0,0 +1,112 @@ +# This file was auto-generated by Fern from our API Definition. + +import re +from contextlib import asynccontextmanager, contextmanager +from typing import Any, AsyncGenerator, AsyncIterator, Iterator, cast + +import httpx +from ._decoders import SSEDecoder +from ._exceptions import SSEError +from ._models import ServerSentEvent + + +class EventSource: + def __init__(self, response: httpx.Response) -> None: + self._response = response + + def _check_content_type(self) -> None: + content_type = self._response.headers.get("content-type", "").partition(";")[0] + if "text/event-stream" not in content_type: + raise SSEError( + f"Expected response header Content-Type to contain 'text/event-stream', got {content_type!r}" + ) + + def _get_charset(self) -> str: + """Extract charset from Content-Type header, fallback to UTF-8.""" + content_type = self._response.headers.get("content-type", "") + + # Parse charset parameter using regex + charset_match = re.search(r"charset=([^;\s]+)", content_type, re.IGNORECASE) + if charset_match: + charset = charset_match.group(1).strip("\"'") + # Validate that it's a known encoding + try: + # Test if the charset is valid by trying to encode/decode + "test".encode(charset).decode(charset) + return charset + except (LookupError, UnicodeError): + # If charset is invalid, fall back to UTF-8 + pass + + # Default to UTF-8 if no charset specified or invalid charset + return "utf-8" + + @property + def response(self) -> httpx.Response: + return self._response + + def iter_sse(self) -> Iterator[ServerSentEvent]: + self._check_content_type() + decoder = SSEDecoder() + charset = self._get_charset() + + buffer = "" + for chunk in self._response.iter_bytes(): + # Decode chunk using detected charset + text_chunk = chunk.decode(charset, errors="replace") + buffer += text_chunk + + # Process complete lines + while "\n" in buffer: + line, buffer = buffer.split("\n", 1) + line = line.rstrip("\r") + sse = decoder.decode(line) + # when we reach a "\n\n" => line = '' + # => decoder will attempt to return an SSE Event + if sse is not None: + yield sse + + # Process any remaining data in buffer + if buffer.strip(): + line = buffer.rstrip("\r") + sse = decoder.decode(line) + if sse is not None: + yield sse + + async def aiter_sse(self) -> AsyncGenerator[ServerSentEvent, None]: + self._check_content_type() + decoder = SSEDecoder() + lines = cast(AsyncGenerator[str, None], self._response.aiter_lines()) + try: + async for line in lines: + line = line.rstrip("\n") + sse = decoder.decode(line) + if sse is not None: + yield sse + finally: + await lines.aclose() + + +@contextmanager +def connect_sse(client: httpx.Client, method: str, url: str, **kwargs: Any) -> Iterator[EventSource]: + headers = kwargs.pop("headers", {}) + headers["Accept"] = "text/event-stream" + headers["Cache-Control"] = "no-store" + + with client.stream(method, url, headers=headers, **kwargs) as response: + yield EventSource(response) + + +@asynccontextmanager +async def aconnect_sse( + client: httpx.AsyncClient, + method: str, + url: str, + **kwargs: Any, +) -> AsyncIterator[EventSource]: + headers = kwargs.pop("headers", {}) + headers["Accept"] = "text/event-stream" + headers["Cache-Control"] = "no-store" + + async with client.stream(method, url, headers=headers, **kwargs) as response: + yield EventSource(response) diff --git a/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/http_sse/_decoders.py b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/http_sse/_decoders.py new file mode 100644 index 000000000000..339b08901381 --- /dev/null +++ b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/http_sse/_decoders.py @@ -0,0 +1,61 @@ +# This file was auto-generated by Fern from our API Definition. + +from typing import List, Optional + +from ._models import ServerSentEvent + + +class SSEDecoder: + def __init__(self) -> None: + self._event = "" + self._data: List[str] = [] + self._last_event_id = "" + self._retry: Optional[int] = None + + def decode(self, line: str) -> Optional[ServerSentEvent]: + # See: https://html.spec.whatwg.org/multipage/server-sent-events.html#event-stream-interpretation # noqa: E501 + + if not line: + if not self._event and not self._data and not self._last_event_id and self._retry is None: + return None + + sse = ServerSentEvent( + event=self._event, + data="\n".join(self._data), + id=self._last_event_id, + retry=self._retry, + ) + + # NOTE: as per the SSE spec, do not reset last_event_id. + self._event = "" + self._data = [] + self._retry = None + + return sse + + if line.startswith(":"): + return None + + fieldname, _, value = line.partition(":") + + if value.startswith(" "): + value = value[1:] + + if fieldname == "event": + self._event = value + elif fieldname == "data": + self._data.append(value) + elif fieldname == "id": + if "\0" in value: + pass + else: + self._last_event_id = value + elif fieldname == "retry": + try: + self._retry = int(value) + except (TypeError, ValueError): + pass + else: + pass # Field is ignored. + + return None diff --git a/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/http_sse/_exceptions.py b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/http_sse/_exceptions.py new file mode 100644 index 000000000000..81605a8a65ed --- /dev/null +++ b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/http_sse/_exceptions.py @@ -0,0 +1,7 @@ +# This file was auto-generated by Fern from our API Definition. + +import httpx + + +class SSEError(httpx.TransportError): + pass diff --git a/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/http_sse/_models.py b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/http_sse/_models.py new file mode 100644 index 000000000000..1af57f8fd0d2 --- /dev/null +++ b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/http_sse/_models.py @@ -0,0 +1,17 @@ +# This file was auto-generated by Fern from our API Definition. + +import json +from dataclasses import dataclass +from typing import Any, Optional + + +@dataclass(frozen=True) +class ServerSentEvent: + event: str = "message" + data: str = "" + id: str = "" + retry: Optional[int] = None + + def json(self) -> Any: + """Parse the data field as JSON.""" + return json.loads(self.data) diff --git a/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/jsonable_encoder.py b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/jsonable_encoder.py new file mode 100644 index 000000000000..f8beaeafb17f --- /dev/null +++ b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/jsonable_encoder.py @@ -0,0 +1,108 @@ +# This file was auto-generated by Fern from our API Definition. + +""" +jsonable_encoder converts a Python object to a JSON-friendly dict +(e.g. datetimes to strings, Pydantic models to dicts). + +Taken from FastAPI, and made a bit simpler +https://github.com/tiangolo/fastapi/blob/master/fastapi/encoders.py +""" + +import base64 +import dataclasses +import datetime as dt +from enum import Enum +from pathlib import PurePath +from types import GeneratorType +from typing import Any, Callable, Dict, List, Optional, Set, Union + +import pydantic +from .datetime_utils import serialize_datetime +from .pydantic_utilities import ( + IS_PYDANTIC_V2, + encode_by_type, + to_jsonable_with_fallback, +) + +SetIntStr = Set[Union[int, str]] +DictIntStrAny = Dict[Union[int, str], Any] + + +def jsonable_encoder(obj: Any, custom_encoder: Optional[Dict[Any, Callable[[Any], Any]]] = None) -> Any: + custom_encoder = custom_encoder or {} + # Generated SDKs use Ellipsis (`...`) as the sentinel value for "OMIT". + # OMIT values should be excluded from serialized payloads. + if obj is Ellipsis: + return None + if custom_encoder: + if type(obj) in custom_encoder: + return custom_encoder[type(obj)](obj) + else: + for encoder_type, encoder_instance in custom_encoder.items(): + if isinstance(obj, encoder_type): + return encoder_instance(obj) + if isinstance(obj, pydantic.BaseModel): + if IS_PYDANTIC_V2: + encoder = getattr(obj.model_config, "json_encoders", {}) # type: ignore # Pydantic v2 + else: + encoder = getattr(obj.__config__, "json_encoders", {}) # type: ignore # Pydantic v1 + if custom_encoder: + encoder.update(custom_encoder) + obj_dict = obj.dict(by_alias=True) + if "__root__" in obj_dict: + obj_dict = obj_dict["__root__"] + if "root" in obj_dict: + obj_dict = obj_dict["root"] + return jsonable_encoder(obj_dict, custom_encoder=encoder) + if dataclasses.is_dataclass(obj): + obj_dict = dataclasses.asdict(obj) # type: ignore + return jsonable_encoder(obj_dict, custom_encoder=custom_encoder) + if isinstance(obj, bytes): + return base64.b64encode(obj).decode("utf-8") + if isinstance(obj, Enum): + return obj.value + if isinstance(obj, PurePath): + return str(obj) + if isinstance(obj, (str, int, float, type(None))): + return obj + if isinstance(obj, dt.datetime): + return serialize_datetime(obj) + if isinstance(obj, dt.date): + return str(obj) + if isinstance(obj, dict): + encoded_dict = {} + allowed_keys = set(obj.keys()) + for key, value in obj.items(): + if key in allowed_keys: + if value is Ellipsis: + continue + encoded_key = jsonable_encoder(key, custom_encoder=custom_encoder) + encoded_value = jsonable_encoder(value, custom_encoder=custom_encoder) + encoded_dict[encoded_key] = encoded_value + return encoded_dict + if isinstance(obj, (list, set, frozenset, GeneratorType, tuple)): + encoded_list = [] + for item in obj: + if item is Ellipsis: + continue + encoded_list.append(jsonable_encoder(item, custom_encoder=custom_encoder)) + return encoded_list + + def fallback_serializer(o: Any) -> Any: + attempt_encode = encode_by_type(o) + if attempt_encode is not None: + return attempt_encode + + try: + data = dict(o) + except Exception as e: + errors: List[Exception] = [] + errors.append(e) + try: + data = vars(o) + except Exception as e: + errors.append(e) + raise ValueError(errors) from e + return jsonable_encoder(data, custom_encoder=custom_encoder) + + return to_jsonable_with_fallback(obj, fallback_serializer) diff --git a/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/pydantic_utilities.py b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/pydantic_utilities.py new file mode 100644 index 000000000000..12dc057bb6ae --- /dev/null +++ b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/pydantic_utilities.py @@ -0,0 +1,361 @@ +# This file was auto-generated by Fern from our API Definition. + +# nopycln: file +import datetime as dt +import inspect +from collections import defaultdict +from typing import Any, Callable, ClassVar, Dict, List, Mapping, Optional, Set, Tuple, Type, TypeVar, Union, cast + +import pydantic + +IS_PYDANTIC_V2 = pydantic.VERSION.startswith("2.") + +if IS_PYDANTIC_V2: + from pydantic.v1.datetime_parse import parse_date as parse_date + from pydantic.v1.datetime_parse import parse_datetime as parse_datetime + from pydantic.v1.fields import ModelField as ModelField + from pydantic.v1.json import ENCODERS_BY_TYPE as encoders_by_type # type: ignore[attr-defined] + from pydantic.v1.typing import get_args as get_args + from pydantic.v1.typing import get_origin as get_origin + from pydantic.v1.typing import is_literal_type as is_literal_type + from pydantic.v1.typing import is_union as is_union +else: + from pydantic.datetime_parse import parse_date as parse_date # type: ignore[no-redef] + from pydantic.datetime_parse import parse_datetime as parse_datetime # type: ignore[no-redef] + from pydantic.fields import ModelField as ModelField # type: ignore[attr-defined, no-redef] + from pydantic.json import ENCODERS_BY_TYPE as encoders_by_type # type: ignore[no-redef] + from pydantic.typing import get_args as get_args # type: ignore[no-redef] + from pydantic.typing import get_origin as get_origin # type: ignore[no-redef] + from pydantic.typing import is_literal_type as is_literal_type # type: ignore[no-redef] + from pydantic.typing import is_union as is_union # type: ignore[no-redef] + +from .datetime_utils import serialize_datetime +from .serialization import convert_and_respect_annotation_metadata +from typing_extensions import TypeAlias + +T = TypeVar("T") +Model = TypeVar("Model", bound=pydantic.BaseModel) + + +def parse_obj_as(type_: Type[T], object_: Any) -> T: + # convert_and_respect_annotation_metadata is required for TypedDict aliasing. + # + # For Pydantic models, whether we should pre-dealias depends on how the model encodes aliasing: + # - If the model uses real Pydantic aliases (pydantic.Field(alias=...)), then we must pass wire keys through + # unchanged so Pydantic can validate them. + # - If the model encodes aliasing only via FieldMetadata annotations, then we MUST pre-dealias because Pydantic + # will not recognize those aliases during validation. + if inspect.isclass(type_) and issubclass(type_, pydantic.BaseModel): + has_pydantic_aliases = False + if IS_PYDANTIC_V2: + for field_name, field_info in getattr(type_, "model_fields", {}).items(): # type: ignore[attr-defined] + alias = getattr(field_info, "alias", None) + if alias is not None and alias != field_name: + has_pydantic_aliases = True + break + else: + for field in getattr(type_, "__fields__", {}).values(): + alias = getattr(field, "alias", None) + name = getattr(field, "name", None) + if alias is not None and name is not None and alias != name: + has_pydantic_aliases = True + break + + dealiased_object = ( + object_ + if has_pydantic_aliases + else convert_and_respect_annotation_metadata(object_=object_, annotation=type_, direction="read") + ) + else: + dealiased_object = convert_and_respect_annotation_metadata(object_=object_, annotation=type_, direction="read") + if IS_PYDANTIC_V2: + adapter = pydantic.TypeAdapter(type_) # type: ignore[attr-defined] + return adapter.validate_python(dealiased_object) + return pydantic.parse_obj_as(type_, dealiased_object) + + +def to_jsonable_with_fallback(obj: Any, fallback_serializer: Callable[[Any], Any]) -> Any: + if IS_PYDANTIC_V2: + from pydantic_core import to_jsonable_python + + return to_jsonable_python(obj, fallback=fallback_serializer) + return fallback_serializer(obj) + + +class UniversalBaseModel(pydantic.BaseModel): + if IS_PYDANTIC_V2: + model_config: ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict( # type: ignore[typeddict-unknown-key] + # Allow fields beginning with `model_` to be used in the model + protected_namespaces=(), + ) + + @pydantic.model_validator(mode="before") # type: ignore[attr-defined] + @classmethod + def _coerce_field_names_to_aliases(cls, data: Any) -> Any: + """ + Accept Python field names in input by rewriting them to their Pydantic aliases, + while avoiding silent collisions when a key could refer to multiple fields. + """ + if not isinstance(data, Mapping): + return data + + fields = getattr(cls, "model_fields", {}) # type: ignore[attr-defined] + name_to_alias: Dict[str, str] = {} + alias_to_name: Dict[str, str] = {} + + for name, field_info in fields.items(): + alias = getattr(field_info, "alias", None) or name + name_to_alias[name] = alias + if alias != name: + alias_to_name[alias] = name + + # Detect ambiguous keys: a key that is an alias for one field and a name for another. + ambiguous_keys = set(alias_to_name.keys()).intersection(set(name_to_alias.keys())) + for key in ambiguous_keys: + if key in data and name_to_alias[key] not in data: + raise ValueError( + f"Ambiguous input key '{key}': it is both a field name and an alias. " + "Provide the explicit alias key to disambiguate." + ) + + original_keys = set(data.keys()) + rewritten: Dict[str, Any] = dict(data) + for name, alias in name_to_alias.items(): + if alias != name and name in original_keys and alias not in rewritten: + rewritten[alias] = rewritten.pop(name) + + return rewritten + + @pydantic.model_serializer(mode="plain", when_used="json") # type: ignore[attr-defined] + def serialize_model(self) -> Any: # type: ignore[name-defined] + serialized = self.dict() # type: ignore[attr-defined] + data = {k: serialize_datetime(v) if isinstance(v, dt.datetime) else v for k, v in serialized.items()} + return data + + else: + + class Config: + smart_union = True + json_encoders = {dt.datetime: serialize_datetime} + + @pydantic.root_validator(pre=True) + def _coerce_field_names_to_aliases(cls, values: Any) -> Any: + """ + Pydantic v1 equivalent of _coerce_field_names_to_aliases. + """ + if not isinstance(values, Mapping): + return values + + fields = getattr(cls, "__fields__", {}) + name_to_alias: Dict[str, str] = {} + alias_to_name: Dict[str, str] = {} + + for name, field in fields.items(): + alias = getattr(field, "alias", None) or name + name_to_alias[name] = alias + if alias != name: + alias_to_name[alias] = name + + ambiguous_keys = set(alias_to_name.keys()).intersection(set(name_to_alias.keys())) + for key in ambiguous_keys: + if key in values and name_to_alias[key] not in values: + raise ValueError( + f"Ambiguous input key '{key}': it is both a field name and an alias. " + "Provide the explicit alias key to disambiguate." + ) + + original_keys = set(values.keys()) + rewritten: Dict[str, Any] = dict(values) + for name, alias in name_to_alias.items(): + if alias != name and name in original_keys and alias not in rewritten: + rewritten[alias] = rewritten.pop(name) + + return rewritten + + @classmethod + def model_construct(cls: Type["Model"], _fields_set: Optional[Set[str]] = None, **values: Any) -> "Model": + dealiased_object = convert_and_respect_annotation_metadata(object_=values, annotation=cls, direction="read") + return cls.construct(_fields_set, **dealiased_object) + + @classmethod + def construct(cls: Type["Model"], _fields_set: Optional[Set[str]] = None, **values: Any) -> "Model": + dealiased_object = convert_and_respect_annotation_metadata(object_=values, annotation=cls, direction="read") + if IS_PYDANTIC_V2: + return super().model_construct(_fields_set, **dealiased_object) # type: ignore[misc] + return super().construct(_fields_set, **dealiased_object) + + def json(self, **kwargs: Any) -> str: + kwargs_with_defaults = { + "by_alias": True, + "exclude_unset": True, + **kwargs, + } + if IS_PYDANTIC_V2: + return super().model_dump_json(**kwargs_with_defaults) # type: ignore[misc] + return super().json(**kwargs_with_defaults) + + def dict(self, **kwargs: Any) -> Dict[str, Any]: + """ + Override the default dict method to `exclude_unset` by default. This function patches + `exclude_unset` to work include fields within non-None default values. + """ + # Note: the logic here is multiplexed given the levers exposed in Pydantic V1 vs V2 + # Pydantic V1's .dict can be extremely slow, so we do not want to call it twice. + # + # We'd ideally do the same for Pydantic V2, but it shells out to a library to serialize models + # that we have less control over, and this is less intrusive than custom serializers for now. + if IS_PYDANTIC_V2: + kwargs_with_defaults_exclude_unset = { + **kwargs, + "by_alias": True, + "exclude_unset": True, + "exclude_none": False, + } + kwargs_with_defaults_exclude_none = { + **kwargs, + "by_alias": True, + "exclude_none": True, + "exclude_unset": False, + } + dict_dump = deep_union_pydantic_dicts( + super().model_dump(**kwargs_with_defaults_exclude_unset), # type: ignore[misc] + super().model_dump(**kwargs_with_defaults_exclude_none), # type: ignore[misc] + ) + + else: + _fields_set = self.__fields_set__.copy() + + fields = _get_model_fields(self.__class__) + for name, field in fields.items(): + if name not in _fields_set: + default = _get_field_default(field) + + # If the default values are non-null act like they've been set + # This effectively allows exclude_unset to work like exclude_none where + # the latter passes through intentionally set none values. + if default is not None or ("exclude_unset" in kwargs and not kwargs["exclude_unset"]): + _fields_set.add(name) + + if default is not None: + self.__fields_set__.add(name) + + kwargs_with_defaults_exclude_unset_include_fields = { + "by_alias": True, + "exclude_unset": True, + "include": _fields_set, + **kwargs, + } + + dict_dump = super().dict(**kwargs_with_defaults_exclude_unset_include_fields) + + return cast( + Dict[str, Any], + convert_and_respect_annotation_metadata(object_=dict_dump, annotation=self.__class__, direction="write"), + ) + + +def _union_list_of_pydantic_dicts(source: List[Any], destination: List[Any]) -> List[Any]: + converted_list: List[Any] = [] + for i, item in enumerate(source): + destination_value = destination[i] + if isinstance(item, dict): + converted_list.append(deep_union_pydantic_dicts(item, destination_value)) + elif isinstance(item, list): + converted_list.append(_union_list_of_pydantic_dicts(item, destination_value)) + else: + converted_list.append(item) + return converted_list + + +def deep_union_pydantic_dicts(source: Dict[str, Any], destination: Dict[str, Any]) -> Dict[str, Any]: + for key, value in source.items(): + node = destination.setdefault(key, {}) + if isinstance(value, dict): + deep_union_pydantic_dicts(value, node) + # Note: we do not do this same processing for sets given we do not have sets of models + # and given the sets are unordered, the processing of the set and matching objects would + # be non-trivial. + elif isinstance(value, list): + destination[key] = _union_list_of_pydantic_dicts(value, node) + else: + destination[key] = value + + return destination + + +if IS_PYDANTIC_V2: + + class V2RootModel(UniversalBaseModel, pydantic.RootModel): # type: ignore[misc, name-defined, type-arg] + pass + + UniversalRootModel: TypeAlias = V2RootModel # type: ignore[misc] +else: + UniversalRootModel: TypeAlias = UniversalBaseModel # type: ignore[misc, no-redef] + + +def encode_by_type(o: Any) -> Any: + encoders_by_class_tuples: Dict[Callable[[Any], Any], Tuple[Any, ...]] = defaultdict(tuple) + for type_, encoder in encoders_by_type.items(): + encoders_by_class_tuples[encoder] += (type_,) + + if type(o) in encoders_by_type: + return encoders_by_type[type(o)](o) + for encoder, classes_tuple in encoders_by_class_tuples.items(): + if isinstance(o, classes_tuple): + return encoder(o) + + +def update_forward_refs(model: Type["Model"], **localns: Any) -> None: + if IS_PYDANTIC_V2: + model.model_rebuild(raise_errors=False) # type: ignore[attr-defined] + else: + model.update_forward_refs(**localns) + + +# Mirrors Pydantic's internal typing +AnyCallable = Callable[..., Any] + + +def universal_root_validator( + pre: bool = False, +) -> Callable[[AnyCallable], AnyCallable]: + def decorator(func: AnyCallable) -> AnyCallable: + if IS_PYDANTIC_V2: + # In Pydantic v2, for RootModel we always use "before" mode + # The custom validators transform the input value before the model is created + return cast(AnyCallable, pydantic.model_validator(mode="before")(func)) # type: ignore[attr-defined] + return cast(AnyCallable, pydantic.root_validator(pre=pre)(func)) # type: ignore[call-overload] + + return decorator + + +def universal_field_validator(field_name: str, pre: bool = False) -> Callable[[AnyCallable], AnyCallable]: + def decorator(func: AnyCallable) -> AnyCallable: + if IS_PYDANTIC_V2: + return cast(AnyCallable, pydantic.field_validator(field_name, mode="before" if pre else "after")(func)) # type: ignore[attr-defined] + return cast(AnyCallable, pydantic.validator(field_name, pre=pre)(func)) + + return decorator + + +PydanticField = Union[ModelField, pydantic.fields.FieldInfo] + + +def _get_model_fields(model: Type["Model"]) -> Mapping[str, PydanticField]: + if IS_PYDANTIC_V2: + return cast(Mapping[str, PydanticField], model.model_fields) # type: ignore[attr-defined] + return cast(Mapping[str, PydanticField], model.__fields__) + + +def _get_field_default(field: PydanticField) -> Any: + try: + value = field.get_default() # type: ignore[union-attr] + except: + value = field.default + if IS_PYDANTIC_V2: + from pydantic_core import PydanticUndefined + + if value == PydanticUndefined: + return None + return value + return value diff --git a/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/query_encoder.py b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/query_encoder.py new file mode 100644 index 000000000000..3183001d4046 --- /dev/null +++ b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/query_encoder.py @@ -0,0 +1,58 @@ +# This file was auto-generated by Fern from our API Definition. + +from typing import Any, Dict, List, Optional, Tuple + +import pydantic + + +# Flattens dicts to be of the form {"key[subkey][subkey2]": value} where value is not a dict +def traverse_query_dict(dict_flat: Dict[str, Any], key_prefix: Optional[str] = None) -> List[Tuple[str, Any]]: + result = [] + for k, v in dict_flat.items(): + key = f"{key_prefix}[{k}]" if key_prefix is not None else k + if isinstance(v, dict): + result.extend(traverse_query_dict(v, key)) + elif isinstance(v, list): + for arr_v in v: + if isinstance(arr_v, dict): + result.extend(traverse_query_dict(arr_v, key)) + else: + result.append((key, arr_v)) + else: + result.append((key, v)) + return result + + +def single_query_encoder(query_key: str, query_value: Any) -> List[Tuple[str, Any]]: + if isinstance(query_value, pydantic.BaseModel) or isinstance(query_value, dict): + if isinstance(query_value, pydantic.BaseModel): + obj_dict = query_value.dict(by_alias=True) + else: + obj_dict = query_value + return traverse_query_dict(obj_dict, query_key) + elif isinstance(query_value, list): + encoded_values: List[Tuple[str, Any]] = [] + for value in query_value: + if isinstance(value, pydantic.BaseModel) or isinstance(value, dict): + if isinstance(value, pydantic.BaseModel): + obj_dict = value.dict(by_alias=True) + elif isinstance(value, dict): + obj_dict = value + + encoded_values.extend(single_query_encoder(query_key, obj_dict)) + else: + encoded_values.append((query_key, value)) + + return encoded_values + + return [(query_key, query_value)] + + +def encode_query(query: Optional[Dict[str, Any]]) -> Optional[List[Tuple[str, Any]]]: + if query is None: + return None + + encoded_query = [] + for k, v in query.items(): + encoded_query.extend(single_query_encoder(k, v)) + return encoded_query diff --git a/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/remove_none_from_dict.py b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/remove_none_from_dict.py new file mode 100644 index 000000000000..c2298143f14a --- /dev/null +++ b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/remove_none_from_dict.py @@ -0,0 +1,11 @@ +# This file was auto-generated by Fern from our API Definition. + +from typing import Any, Dict, Mapping, Optional + + +def remove_none_from_dict(original: Mapping[str, Optional[Any]]) -> Dict[str, Any]: + new: Dict[str, Any] = {} + for key, value in original.items(): + if value is not None: + new[key] = value + return new diff --git a/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/request_options.py b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/request_options.py new file mode 100644 index 000000000000..1b38804432ba --- /dev/null +++ b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/request_options.py @@ -0,0 +1,35 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +try: + from typing import NotRequired # type: ignore +except ImportError: + from typing_extensions import NotRequired + + +class RequestOptions(typing.TypedDict, total=False): + """ + Additional options for request-specific configuration when calling APIs via the SDK. + This is used primarily as an optional final parameter for service functions. + + Attributes: + - timeout_in_seconds: int. The number of seconds to await an API call before timing out. + + - max_retries: int. The max number of retries to attempt if the API call fails. + + - additional_headers: typing.Dict[str, typing.Any]. A dictionary containing additional parameters to spread into the request's header dict + + - additional_query_parameters: typing.Dict[str, typing.Any]. A dictionary containing additional parameters to spread into the request's query parameters dict + + - additional_body_parameters: typing.Dict[str, typing.Any]. A dictionary containing additional parameters to spread into the request's body parameters dict + + - chunk_size: int. The size, in bytes, to process each chunk of data being streamed back within the response. This equates to leveraging `chunk_size` within `requests` or `httpx`, and is only leveraged for file downloads. + """ + + timeout_in_seconds: NotRequired[int] + max_retries: NotRequired[int] + additional_headers: NotRequired[typing.Dict[str, typing.Any]] + additional_query_parameters: NotRequired[typing.Dict[str, typing.Any]] + additional_body_parameters: NotRequired[typing.Dict[str, typing.Any]] + chunk_size: NotRequired[int] diff --git a/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/serialization.py b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/serialization.py new file mode 100644 index 000000000000..c36e865cc729 --- /dev/null +++ b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/core/serialization.py @@ -0,0 +1,276 @@ +# This file was auto-generated by Fern from our API Definition. + +import collections +import inspect +import typing + +import pydantic +import typing_extensions + + +class FieldMetadata: + """ + Metadata class used to annotate fields to provide additional information. + + Example: + class MyDict(TypedDict): + field: typing.Annotated[str, FieldMetadata(alias="field_name")] + + Will serialize: `{"field": "value"}` + To: `{"field_name": "value"}` + """ + + alias: str + + def __init__(self, *, alias: str) -> None: + self.alias = alias + + +def convert_and_respect_annotation_metadata( + *, + object_: typing.Any, + annotation: typing.Any, + inner_type: typing.Optional[typing.Any] = None, + direction: typing.Literal["read", "write"], +) -> typing.Any: + """ + Respect the metadata annotations on a field, such as aliasing. This function effectively + manipulates the dict-form of an object to respect the metadata annotations. This is primarily used for + TypedDicts, which cannot support aliasing out of the box, and can be extended for additional + utilities, such as defaults. + + Parameters + ---------- + object_ : typing.Any + + annotation : type + The type we're looking to apply typing annotations from + + inner_type : typing.Optional[type] + + Returns + ------- + typing.Any + """ + + if object_ is None: + return None + if inner_type is None: + inner_type = annotation + + clean_type = _remove_annotations(inner_type) + # Pydantic models + if ( + inspect.isclass(clean_type) + and issubclass(clean_type, pydantic.BaseModel) + and isinstance(object_, typing.Mapping) + ): + return _convert_mapping(object_, clean_type, direction) + # TypedDicts + if typing_extensions.is_typeddict(clean_type) and isinstance(object_, typing.Mapping): + return _convert_mapping(object_, clean_type, direction) + + if ( + typing_extensions.get_origin(clean_type) == typing.Dict + or typing_extensions.get_origin(clean_type) == dict + or clean_type == typing.Dict + ) and isinstance(object_, typing.Dict): + key_type = typing_extensions.get_args(clean_type)[0] + value_type = typing_extensions.get_args(clean_type)[1] + + return { + key: convert_and_respect_annotation_metadata( + object_=value, + annotation=annotation, + inner_type=value_type, + direction=direction, + ) + for key, value in object_.items() + } + + # If you're iterating on a string, do not bother to coerce it to a sequence. + if not isinstance(object_, str): + if ( + typing_extensions.get_origin(clean_type) == typing.Set + or typing_extensions.get_origin(clean_type) == set + or clean_type == typing.Set + ) and isinstance(object_, typing.Set): + inner_type = typing_extensions.get_args(clean_type)[0] + return { + convert_and_respect_annotation_metadata( + object_=item, + annotation=annotation, + inner_type=inner_type, + direction=direction, + ) + for item in object_ + } + elif ( + ( + typing_extensions.get_origin(clean_type) == typing.List + or typing_extensions.get_origin(clean_type) == list + or clean_type == typing.List + ) + and isinstance(object_, typing.List) + ) or ( + ( + typing_extensions.get_origin(clean_type) == typing.Sequence + or typing_extensions.get_origin(clean_type) == collections.abc.Sequence + or clean_type == typing.Sequence + ) + and isinstance(object_, typing.Sequence) + ): + inner_type = typing_extensions.get_args(clean_type)[0] + return [ + convert_and_respect_annotation_metadata( + object_=item, + annotation=annotation, + inner_type=inner_type, + direction=direction, + ) + for item in object_ + ] + + if typing_extensions.get_origin(clean_type) == typing.Union: + # We should be able to ~relatively~ safely try to convert keys against all + # member types in the union, the edge case here is if one member aliases a field + # of the same name to a different name from another member + # Or if another member aliases a field of the same name that another member does not. + for member in typing_extensions.get_args(clean_type): + object_ = convert_and_respect_annotation_metadata( + object_=object_, + annotation=annotation, + inner_type=member, + direction=direction, + ) + return object_ + + annotated_type = _get_annotation(annotation) + if annotated_type is None: + return object_ + + # If the object is not a TypedDict, a Union, or other container (list, set, sequence, etc.) + # Then we can safely call it on the recursive conversion. + return object_ + + +def _convert_mapping( + object_: typing.Mapping[str, object], + expected_type: typing.Any, + direction: typing.Literal["read", "write"], +) -> typing.Mapping[str, object]: + converted_object: typing.Dict[str, object] = {} + try: + annotations = typing_extensions.get_type_hints(expected_type, include_extras=True) + except NameError: + # The TypedDict contains a circular reference, so + # we use the __annotations__ attribute directly. + annotations = getattr(expected_type, "__annotations__", {}) + aliases_to_field_names = _get_alias_to_field_name(annotations) + for key, value in object_.items(): + if direction == "read" and key in aliases_to_field_names: + dealiased_key = aliases_to_field_names.get(key) + if dealiased_key is not None: + type_ = annotations.get(dealiased_key) + else: + type_ = annotations.get(key) + # Note you can't get the annotation by the field name if you're in read mode, so you must check the aliases map + # + # So this is effectively saying if we're in write mode, and we don't have a type, or if we're in read mode and we don't have an alias + # then we can just pass the value through as is + if type_ is None: + converted_object[key] = value + elif direction == "read" and key not in aliases_to_field_names: + converted_object[key] = convert_and_respect_annotation_metadata( + object_=value, annotation=type_, direction=direction + ) + else: + converted_object[_alias_key(key, type_, direction, aliases_to_field_names)] = ( + convert_and_respect_annotation_metadata(object_=value, annotation=type_, direction=direction) + ) + return converted_object + + +def _get_annotation(type_: typing.Any) -> typing.Optional[typing.Any]: + maybe_annotated_type = typing_extensions.get_origin(type_) + if maybe_annotated_type is None: + return None + + if maybe_annotated_type == typing_extensions.NotRequired: + type_ = typing_extensions.get_args(type_)[0] + maybe_annotated_type = typing_extensions.get_origin(type_) + + if maybe_annotated_type == typing_extensions.Annotated: + return type_ + + return None + + +def _remove_annotations(type_: typing.Any) -> typing.Any: + maybe_annotated_type = typing_extensions.get_origin(type_) + if maybe_annotated_type is None: + return type_ + + if maybe_annotated_type == typing_extensions.NotRequired: + return _remove_annotations(typing_extensions.get_args(type_)[0]) + + if maybe_annotated_type == typing_extensions.Annotated: + return _remove_annotations(typing_extensions.get_args(type_)[0]) + + return type_ + + +def get_alias_to_field_mapping(type_: typing.Any) -> typing.Dict[str, str]: + annotations = typing_extensions.get_type_hints(type_, include_extras=True) + return _get_alias_to_field_name(annotations) + + +def get_field_to_alias_mapping(type_: typing.Any) -> typing.Dict[str, str]: + annotations = typing_extensions.get_type_hints(type_, include_extras=True) + return _get_field_to_alias_name(annotations) + + +def _get_alias_to_field_name( + field_to_hint: typing.Dict[str, typing.Any], +) -> typing.Dict[str, str]: + aliases = {} + for field, hint in field_to_hint.items(): + maybe_alias = _get_alias_from_type(hint) + if maybe_alias is not None: + aliases[maybe_alias] = field + return aliases + + +def _get_field_to_alias_name( + field_to_hint: typing.Dict[str, typing.Any], +) -> typing.Dict[str, str]: + aliases = {} + for field, hint in field_to_hint.items(): + maybe_alias = _get_alias_from_type(hint) + if maybe_alias is not None: + aliases[field] = maybe_alias + return aliases + + +def _get_alias_from_type(type_: typing.Any) -> typing.Optional[str]: + maybe_annotated_type = _get_annotation(type_) + + if maybe_annotated_type is not None: + # The actual annotations are 1 onward, the first is the annotated type + annotations = typing_extensions.get_args(maybe_annotated_type)[1:] + + for annotation in annotations: + if isinstance(annotation, FieldMetadata) and annotation.alias is not None: + return annotation.alias + return None + + +def _alias_key( + key: str, + type_: typing.Any, + direction: typing.Literal["read", "write"], + aliases_to_field_names: typing.Dict[str, str], +) -> str: + if direction == "read": + return aliases_to_field_names.get(key, key) + return _get_alias_from_type(type_=type_) or key diff --git a/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/py.typed b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/py.typed new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/raw_client.py b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/raw_client.py new file mode 100644 index 000000000000..0baaf3816e54 --- /dev/null +++ b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/raw_client.py @@ -0,0 +1,262 @@ +# This file was auto-generated by Fern from our API Definition. + +import contextlib +import typing +from json.decoder import JSONDecodeError +from logging import error, warning + +from .core.api_error import ApiError +from .core.client_wrapper import AsyncClientWrapper, SyncClientWrapper +from .core.http_response import AsyncHttpResponse, HttpResponse +from .core.http_sse._api import EventSource +from .core.pydantic_utilities import parse_obj_as +from .core.request_options import RequestOptions +from .types.chat_response import ChatResponse +from .types.chat_stream_event import ChatStreamEvent + +# this is used as the default value for optional parameters +OMIT = typing.cast(typing.Any, ...) + + +class RawSeedApi: + def __init__(self, *, client_wrapper: SyncClientWrapper): + self._client_wrapper = client_wrapper + + @contextlib.contextmanager + def chat_stream( + self, *, prompt: str, request_options: typing.Optional[RequestOptions] = None + ) -> typing.Iterator[HttpResponse[typing.Iterator[ChatStreamEvent]]]: + """ + Parameters + ---------- + prompt : str + The user's message + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Yields + ------ + typing.Iterator[HttpResponse[typing.Iterator[ChatStreamEvent]]] + + """ + with self._client_wrapper.httpx_client.stream( + "chat", + method="POST", + json={ + "prompt": prompt, + "stream": True, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) as _response: + + def _stream() -> HttpResponse[typing.Iterator[ChatStreamEvent]]: + try: + if 200 <= _response.status_code < 300: + + def _iter(): + _event_source = EventSource(_response) + for _sse in _event_source.iter_sse(): + if _sse.data == None: + return + try: + yield typing.cast( + ChatStreamEvent, + parse_obj_as( + type_=ChatStreamEvent, # type: ignore + object_=_sse.json(), + ), + ) + except JSONDecodeError as e: + warning(f"Skipping SSE event with invalid JSON: {e}, sse: {_sse!r}") + except (TypeError, ValueError, KeyError, AttributeError) as e: + warning( + f"Skipping SSE event due to model construction error: {type(e).__name__}: {e}, sse: {_sse!r}" + ) + except Exception as e: + error( + f"Unexpected error processing SSE event: {type(e).__name__}: {e}, sse: {_sse!r}" + ) + return + + return HttpResponse(response=_response, data=_iter()) + _response.read() + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, headers=dict(_response.headers), body=_response.text + ) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + yield _stream() + + def chat( + self, *, prompt: str, request_options: typing.Optional[RequestOptions] = None + ) -> HttpResponse[ChatResponse]: + """ + Parameters + ---------- + prompt : str + The user's message + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[ChatResponse] + + """ + _response = self._client_wrapper.httpx_client.request( + "chat", + method="POST", + json={ + "prompt": prompt, + "stream": False, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ChatResponse, + parse_obj_as( + type_=ChatResponse, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + +class AsyncRawSeedApi: + def __init__(self, *, client_wrapper: AsyncClientWrapper): + self._client_wrapper = client_wrapper + + @contextlib.asynccontextmanager + async def chat_stream( + self, *, prompt: str, request_options: typing.Optional[RequestOptions] = None + ) -> typing.AsyncIterator[AsyncHttpResponse[typing.AsyncIterator[ChatStreamEvent]]]: + """ + Parameters + ---------- + prompt : str + The user's message + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Yields + ------ + typing.AsyncIterator[AsyncHttpResponse[typing.AsyncIterator[ChatStreamEvent]]] + + """ + async with self._client_wrapper.httpx_client.stream( + "chat", + method="POST", + json={ + "prompt": prompt, + "stream": True, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) as _response: + + async def _stream() -> AsyncHttpResponse[typing.AsyncIterator[ChatStreamEvent]]: + try: + if 200 <= _response.status_code < 300: + + async def _iter(): + _event_source = EventSource(_response) + async for _sse in _event_source.aiter_sse(): + if _sse.data == None: + return + try: + yield typing.cast( + ChatStreamEvent, + parse_obj_as( + type_=ChatStreamEvent, # type: ignore + object_=_sse.json(), + ), + ) + except JSONDecodeError as e: + warning(f"Skipping SSE event with invalid JSON: {e}, sse: {_sse!r}") + except (TypeError, ValueError, KeyError, AttributeError) as e: + warning( + f"Skipping SSE event due to model construction error: {type(e).__name__}: {e}, sse: {_sse!r}" + ) + except Exception as e: + error( + f"Unexpected error processing SSE event: {type(e).__name__}: {e}, sse: {_sse!r}" + ) + return + + return AsyncHttpResponse(response=_response, data=_iter()) + await _response.aread() + _response_json = _response.json() + except JSONDecodeError: + raise ApiError( + status_code=_response.status_code, headers=dict(_response.headers), body=_response.text + ) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) + + yield await _stream() + + async def chat( + self, *, prompt: str, request_options: typing.Optional[RequestOptions] = None + ) -> AsyncHttpResponse[ChatResponse]: + """ + Parameters + ---------- + prompt : str + The user's message + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[ChatResponse] + + """ + _response = await self._client_wrapper.httpx_client.request( + "chat", + method="POST", + json={ + "prompt": prompt, + "stream": False, + }, + headers={ + "content-type": "application/json", + }, + request_options=request_options, + omit=OMIT, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + ChatResponse, + parse_obj_as( + type_=ChatResponse, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response.text) + raise ApiError(status_code=_response.status_code, headers=dict(_response.headers), body=_response_json) diff --git a/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/types/__init__.py b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/types/__init__.py new file mode 100644 index 000000000000..77fadcadef2f --- /dev/null +++ b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/types/__init__.py @@ -0,0 +1,35 @@ +# This file was auto-generated by Fern from our API Definition. + +# isort: skip_file + +import typing +from importlib import import_module + +if typing.TYPE_CHECKING: + from .chat_response import ChatResponse + from .chat_stream_event import ChatStreamEvent +_dynamic_imports: typing.Dict[str, str] = {"ChatResponse": ".chat_response", "ChatStreamEvent": ".chat_stream_event"} + + +def __getattr__(attr_name: str) -> typing.Any: + module_name = _dynamic_imports.get(attr_name) + if module_name is None: + raise AttributeError(f"No {attr_name} found in _dynamic_imports for module name -> {__name__}") + try: + module = import_module(module_name, __package__) + if module_name == f".{attr_name}": + return module + else: + return getattr(module, attr_name) + except ImportError as e: + raise ImportError(f"Failed to import {attr_name} from {module_name}: {e}") from e + except AttributeError as e: + raise AttributeError(f"Failed to get {attr_name} from {module_name}: {e}") from e + + +def __dir__(): + lazy_attrs = list(_dynamic_imports.keys()) + return sorted(lazy_attrs) + + +__all__ = ["ChatResponse", "ChatStreamEvent"] diff --git a/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/types/chat_response.py b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/types/chat_response.py new file mode 100644 index 000000000000..d2667015af05 --- /dev/null +++ b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/types/chat_response.py @@ -0,0 +1,20 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel + + +class ChatResponse(UniversalBaseModel): + message: typing.Optional[str] = None + finish_reason: typing.Optional[str] = None + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/types/chat_stream_event.py b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/types/chat_stream_event.py new file mode 100644 index 000000000000..a5d965676db0 --- /dev/null +++ b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/types/chat_stream_event.py @@ -0,0 +1,20 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +from ..core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel + + +class ChatStreamEvent(UniversalBaseModel): + delta: typing.Optional[str] = None + tokens: typing.Optional[int] = None + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/version.py b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/version.py new file mode 100644 index 000000000000..b227ec71c3fc --- /dev/null +++ b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/src/seed/version.py @@ -0,0 +1,3 @@ +from importlib import metadata + +__version__ = metadata.version("fern_python-streaming-parameter-openapi") diff --git a/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/tests/conftest.py b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/tests/conftest.py new file mode 100644 index 000000000000..dd48d29f8ee3 --- /dev/null +++ b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/tests/conftest.py @@ -0,0 +1,133 @@ +""" +Pytest plugin that manages the WireMock container lifecycle for wire tests. + +This plugin is loaded globally for the test suite and is responsible for +starting and stopping the WireMock container exactly once per test run, +including when running with pytest-xdist over the entire project. + +It lives under tests/ (as tests/conftest.py) and is discovered automatically +by pytest's normal test collection rules. +""" + +import os +import subprocess + +import pytest + +_STARTED: bool = False +_WIREMOCK_PORT: str = "8080" # Default, will be updated after container starts + + +def _compose_file() -> str: + """Returns the path to the docker-compose file for WireMock.""" + # This file lives in tests/conftest.py, so the project root is the parent of tests. + tests_dir = os.path.dirname(__file__) + project_root = os.path.abspath(os.path.join(tests_dir, "..")) + wiremock_dir = os.path.join(project_root, "wiremock") + return os.path.join(wiremock_dir, "docker-compose.test.yml") + + +def _project_name() -> str: + """Returns a unique project name for this test fixture to avoid container name conflicts.""" + tests_dir = os.path.dirname(__file__) + project_root = os.path.abspath(os.path.join(tests_dir, "..")) + # Use the last two directory names to create a unique project name + # e.g., "python-streaming-parameter-openapi-with-wire-tests" + parent = os.path.basename(os.path.dirname(project_root)) + current = os.path.basename(project_root) + return f"{parent}-{current}".replace("_", "-").lower() + + +def _get_wiremock_port() -> str: + """Gets the dynamically assigned port for the WireMock container.""" + compose_file = _compose_file() + project = _project_name() + try: + result = subprocess.run( + ["docker", "compose", "-f", compose_file, "-p", project, "port", "wiremock", "8080"], + check=True, + capture_output=True, + text=True, + ) + # Output is like "0.0.0.0:32768" or "[::]:32768" + port = result.stdout.strip().split(":")[-1] + return port + except subprocess.CalledProcessError: + return "8080" # Fallback to default + + +def _start_wiremock() -> None: + """Starts the WireMock container using docker-compose.""" + global _STARTED, _WIREMOCK_PORT + if _STARTED: + return + + compose_file = _compose_file() + project = _project_name() + print(f"\nStarting WireMock container (project: {project})...") + try: + subprocess.run( + ["docker", "compose", "-f", compose_file, "-p", project, "up", "-d", "--wait"], + check=True, + capture_output=True, + text=True, + ) + _WIREMOCK_PORT = _get_wiremock_port() + os.environ["WIREMOCK_PORT"] = _WIREMOCK_PORT + print(f"WireMock container is ready on port {_WIREMOCK_PORT}") + _STARTED = True + except subprocess.CalledProcessError as e: + print(f"Failed to start WireMock: {e.stderr}") + raise + + +def _stop_wiremock() -> None: + """Stops and removes the WireMock container.""" + compose_file = _compose_file() + project = _project_name() + print("\nStopping WireMock container...") + subprocess.run( + ["docker", "compose", "-f", compose_file, "-p", project, "down", "-v"], + check=False, + capture_output=True, + ) + + +def _is_xdist_worker(config: pytest.Config) -> bool: + """ + Determines if the current process is an xdist worker. + + In pytest-xdist, worker processes have a 'workerinput' attribute + on the config object, while the controller process does not. + """ + return hasattr(config, "workerinput") + + +def pytest_configure(config: pytest.Config) -> None: + """ + Pytest hook that runs during test session setup. + + Starts WireMock container only from the controller process (xdist) + or the single process (non-xdist). This ensures only one container + is started regardless of the number of worker processes. + """ + if _is_xdist_worker(config): + # Workers never manage the container lifecycle. + return + + _start_wiremock() + + +def pytest_unconfigure(config: pytest.Config) -> None: + """ + Pytest hook that runs during test session teardown. + + Stops WireMock container only from the controller process (xdist) + or the single process (non-xdist). This ensures the container is + cleaned up after all workers have finished. + """ + if _is_xdist_worker(config): + # Workers never manage the container lifecycle. + return + + _stop_wiremock() diff --git a/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/tests/custom/test_client.py b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/tests/custom/test_client.py new file mode 100644 index 000000000000..ab04ce6393ef --- /dev/null +++ b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/tests/custom/test_client.py @@ -0,0 +1,7 @@ +import pytest + + +# Get started with writing tests with pytest at https://docs.pytest.org +@pytest.mark.skip(reason="Unimplemented") +def test_client() -> None: + assert True diff --git a/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/tests/utils/__init__.py b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/tests/utils/__init__.py new file mode 100644 index 000000000000..f3ea2659bb1c --- /dev/null +++ b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/tests/utils/__init__.py @@ -0,0 +1,2 @@ +# This file was auto-generated by Fern from our API Definition. + diff --git a/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/tests/utils/assets/models/__init__.py b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/tests/utils/assets/models/__init__.py new file mode 100644 index 000000000000..2cf01263529d --- /dev/null +++ b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/tests/utils/assets/models/__init__.py @@ -0,0 +1,21 @@ +# This file was auto-generated by Fern from our API Definition. + +# This file was auto-generated by Fern from our API Definition. + +from .circle import CircleParams +from .object_with_defaults import ObjectWithDefaultsParams +from .object_with_optional_field import ObjectWithOptionalFieldParams +from .shape import Shape_CircleParams, Shape_SquareParams, ShapeParams +from .square import SquareParams +from .undiscriminated_shape import UndiscriminatedShapeParams + +__all__ = [ + "CircleParams", + "ObjectWithDefaultsParams", + "ObjectWithOptionalFieldParams", + "ShapeParams", + "Shape_CircleParams", + "Shape_SquareParams", + "SquareParams", + "UndiscriminatedShapeParams", +] diff --git a/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/tests/utils/assets/models/circle.py b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/tests/utils/assets/models/circle.py new file mode 100644 index 000000000000..74ecf38c308b --- /dev/null +++ b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/tests/utils/assets/models/circle.py @@ -0,0 +1,11 @@ +# This file was auto-generated by Fern from our API Definition. + +# This file was auto-generated by Fern from our API Definition. + +import typing_extensions + +from seed.core.serialization import FieldMetadata + + +class CircleParams(typing_extensions.TypedDict): + radius_measurement: typing_extensions.Annotated[float, FieldMetadata(alias="radiusMeasurement")] diff --git a/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/tests/utils/assets/models/color.py b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/tests/utils/assets/models/color.py new file mode 100644 index 000000000000..2aa2c4c52f0c --- /dev/null +++ b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/tests/utils/assets/models/color.py @@ -0,0 +1,7 @@ +# This file was auto-generated by Fern from our API Definition. + +# This file was auto-generated by Fern from our API Definition. + +import typing + +Color = typing.Union[typing.Literal["red", "blue"], typing.Any] diff --git a/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/tests/utils/assets/models/object_with_defaults.py b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/tests/utils/assets/models/object_with_defaults.py new file mode 100644 index 000000000000..a977b1d2aa1c --- /dev/null +++ b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/tests/utils/assets/models/object_with_defaults.py @@ -0,0 +1,15 @@ +# This file was auto-generated by Fern from our API Definition. + +# This file was auto-generated by Fern from our API Definition. + +import typing_extensions + + +class ObjectWithDefaultsParams(typing_extensions.TypedDict): + """ + Defines properties with default values and validation rules. + """ + + decimal: typing_extensions.NotRequired[float] + string: typing_extensions.NotRequired[str] + required_string: str diff --git a/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/tests/utils/assets/models/object_with_optional_field.py b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/tests/utils/assets/models/object_with_optional_field.py new file mode 100644 index 000000000000..6b5608bc05b6 --- /dev/null +++ b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/tests/utils/assets/models/object_with_optional_field.py @@ -0,0 +1,35 @@ +# This file was auto-generated by Fern from our API Definition. + +# This file was auto-generated by Fern from our API Definition. + +import datetime as dt +import typing +import uuid + +import typing_extensions +from .color import Color +from .shape import ShapeParams +from .undiscriminated_shape import UndiscriminatedShapeParams + +from seed.core.serialization import FieldMetadata + + +class ObjectWithOptionalFieldParams(typing_extensions.TypedDict): + literal: typing.Literal["lit_one"] + string: typing_extensions.NotRequired[str] + integer: typing_extensions.NotRequired[int] + long_: typing_extensions.NotRequired[typing_extensions.Annotated[int, FieldMetadata(alias="long")]] + double: typing_extensions.NotRequired[float] + bool_: typing_extensions.NotRequired[typing_extensions.Annotated[bool, FieldMetadata(alias="bool")]] + datetime: typing_extensions.NotRequired[dt.datetime] + date: typing_extensions.NotRequired[dt.date] + uuid_: typing_extensions.NotRequired[typing_extensions.Annotated[uuid.UUID, FieldMetadata(alias="uuid")]] + base_64: typing_extensions.NotRequired[typing_extensions.Annotated[str, FieldMetadata(alias="base64")]] + list_: typing_extensions.NotRequired[typing_extensions.Annotated[typing.Sequence[str], FieldMetadata(alias="list")]] + set_: typing_extensions.NotRequired[typing_extensions.Annotated[typing.Set[str], FieldMetadata(alias="set")]] + map_: typing_extensions.NotRequired[typing_extensions.Annotated[typing.Dict[int, str], FieldMetadata(alias="map")]] + enum: typing_extensions.NotRequired[Color] + union: typing_extensions.NotRequired[ShapeParams] + second_union: typing_extensions.NotRequired[ShapeParams] + undiscriminated_union: typing_extensions.NotRequired[UndiscriminatedShapeParams] + any: typing.Optional[typing.Any] diff --git a/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/tests/utils/assets/models/shape.py b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/tests/utils/assets/models/shape.py new file mode 100644 index 000000000000..7e70010a251f --- /dev/null +++ b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/tests/utils/assets/models/shape.py @@ -0,0 +1,28 @@ +# This file was auto-generated by Fern from our API Definition. + +# This file was auto-generated by Fern from our API Definition. + +from __future__ import annotations + +import typing + +import typing_extensions + +from seed.core.serialization import FieldMetadata + + +class Base(typing_extensions.TypedDict): + id: str + + +class Shape_CircleParams(Base): + shape_type: typing_extensions.Annotated[typing.Literal["circle"], FieldMetadata(alias="shapeType")] + radius_measurement: typing_extensions.Annotated[float, FieldMetadata(alias="radiusMeasurement")] + + +class Shape_SquareParams(Base): + shape_type: typing_extensions.Annotated[typing.Literal["square"], FieldMetadata(alias="shapeType")] + length_measurement: typing_extensions.Annotated[float, FieldMetadata(alias="lengthMeasurement")] + + +ShapeParams = typing.Union[Shape_CircleParams, Shape_SquareParams] diff --git a/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/tests/utils/assets/models/square.py b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/tests/utils/assets/models/square.py new file mode 100644 index 000000000000..71c7d25fd4ad --- /dev/null +++ b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/tests/utils/assets/models/square.py @@ -0,0 +1,11 @@ +# This file was auto-generated by Fern from our API Definition. + +# This file was auto-generated by Fern from our API Definition. + +import typing_extensions + +from seed.core.serialization import FieldMetadata + + +class SquareParams(typing_extensions.TypedDict): + length_measurement: typing_extensions.Annotated[float, FieldMetadata(alias="lengthMeasurement")] diff --git a/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/tests/utils/assets/models/undiscriminated_shape.py b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/tests/utils/assets/models/undiscriminated_shape.py new file mode 100644 index 000000000000..99f12b300d1d --- /dev/null +++ b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/tests/utils/assets/models/undiscriminated_shape.py @@ -0,0 +1,10 @@ +# This file was auto-generated by Fern from our API Definition. + +# This file was auto-generated by Fern from our API Definition. + +import typing + +from .circle import CircleParams +from .square import SquareParams + +UndiscriminatedShapeParams = typing.Union[CircleParams, SquareParams] diff --git a/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/tests/utils/test_http_client.py b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/tests/utils/test_http_client.py new file mode 100644 index 000000000000..ea631e94edb8 --- /dev/null +++ b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/tests/utils/test_http_client.py @@ -0,0 +1,300 @@ +# This file was auto-generated by Fern from our API Definition. + +from typing import Any, Dict + +import pytest + +from seed.core.http_client import ( + AsyncHttpClient, + HttpClient, + _build_url, + get_request_body, + remove_none_from_dict, +) +from seed.core.request_options import RequestOptions + + +# Stub clients for testing HttpClient and AsyncHttpClient +class _DummySyncClient: + """A minimal stub for httpx.Client that records request arguments.""" + + def __init__(self) -> None: + self.last_request_kwargs: Dict[str, Any] = {} + + def request(self, **kwargs: Any) -> "_DummyResponse": + self.last_request_kwargs = kwargs + return _DummyResponse() + + +class _DummyAsyncClient: + """A minimal stub for httpx.AsyncClient that records request arguments.""" + + def __init__(self) -> None: + self.last_request_kwargs: Dict[str, Any] = {} + + async def request(self, **kwargs: Any) -> "_DummyResponse": + self.last_request_kwargs = kwargs + return _DummyResponse() + + +class _DummyResponse: + """A minimal stub for httpx.Response.""" + + status_code = 200 + headers: Dict[str, str] = {} + + +def get_request_options() -> RequestOptions: + return {"additional_body_parameters": {"see you": "later"}} + + +def get_request_options_with_none() -> RequestOptions: + return {"additional_body_parameters": {"see you": "later", "optional": None}} + + +def test_get_json_request_body() -> None: + json_body, data_body = get_request_body(json={"hello": "world"}, data=None, request_options=None, omit=None) + assert json_body == {"hello": "world"} + assert data_body is None + + json_body_extras, data_body_extras = get_request_body( + json={"goodbye": "world"}, data=None, request_options=get_request_options(), omit=None + ) + + assert json_body_extras == {"goodbye": "world", "see you": "later"} + assert data_body_extras is None + + +def test_get_files_request_body() -> None: + json_body, data_body = get_request_body(json=None, data={"hello": "world"}, request_options=None, omit=None) + assert data_body == {"hello": "world"} + assert json_body is None + + json_body_extras, data_body_extras = get_request_body( + json=None, data={"goodbye": "world"}, request_options=get_request_options(), omit=None + ) + + assert data_body_extras == {"goodbye": "world", "see you": "later"} + assert json_body_extras is None + + +def test_get_none_request_body() -> None: + json_body, data_body = get_request_body(json=None, data=None, request_options=None, omit=None) + assert data_body is None + assert json_body is None + + json_body_extras, data_body_extras = get_request_body( + json=None, data=None, request_options=get_request_options(), omit=None + ) + + assert json_body_extras == {"see you": "later"} + assert data_body_extras is None + + +def test_get_empty_json_request_body() -> None: + """Test that implicit empty bodies (json=None) are collapsed to None.""" + unrelated_request_options: RequestOptions = {"max_retries": 3} + json_body, data_body = get_request_body(json=None, data=None, request_options=unrelated_request_options, omit=None) + assert json_body is None + assert data_body is None + + +def test_explicit_empty_json_body_is_preserved() -> None: + """Test that explicit empty bodies (json={}) are preserved and sent as {}. + + This is important for endpoints where the request body is required but all + fields are optional. The server expects valid JSON ({}) not an empty body. + """ + unrelated_request_options: RequestOptions = {"max_retries": 3} + + # Explicit json={} should be preserved + json_body, data_body = get_request_body(json={}, data=None, request_options=unrelated_request_options, omit=None) + assert json_body == {} + assert data_body is None + + # Explicit data={} should also be preserved + json_body2, data_body2 = get_request_body(json=None, data={}, request_options=unrelated_request_options, omit=None) + assert json_body2 is None + assert data_body2 == {} + + +def test_json_body_preserves_none_values() -> None: + """Test that JSON bodies preserve None values (they become JSON null).""" + json_body, data_body = get_request_body( + json={"hello": "world", "optional": None}, data=None, request_options=None, omit=None + ) + # JSON bodies should preserve None values + assert json_body == {"hello": "world", "optional": None} + assert data_body is None + + +def test_data_body_preserves_none_values_without_multipart() -> None: + """Test that data bodies preserve None values when not using multipart. + + The filtering of None values happens in HttpClient.request/stream methods, + not in get_request_body. This test verifies get_request_body doesn't filter None. + """ + json_body, data_body = get_request_body( + json=None, data={"hello": "world", "optional": None}, request_options=None, omit=None + ) + # get_request_body should preserve None values in data body + # The filtering happens later in HttpClient.request when multipart is detected + assert data_body == {"hello": "world", "optional": None} + assert json_body is None + + +def test_remove_none_from_dict_filters_none_values() -> None: + """Test that remove_none_from_dict correctly filters out None values.""" + original = {"hello": "world", "optional": None, "another": "value", "also_none": None} + filtered = remove_none_from_dict(original) + assert filtered == {"hello": "world", "another": "value"} + # Original should not be modified + assert original == {"hello": "world", "optional": None, "another": "value", "also_none": None} + + +def test_remove_none_from_dict_empty_dict() -> None: + """Test that remove_none_from_dict handles empty dict.""" + assert remove_none_from_dict({}) == {} + + +def test_remove_none_from_dict_all_none() -> None: + """Test that remove_none_from_dict handles dict with all None values.""" + assert remove_none_from_dict({"a": None, "b": None}) == {} + + +def test_http_client_does_not_pass_empty_params_list() -> None: + """Test that HttpClient passes params=None when params are empty. + + This prevents httpx from stripping existing query parameters from the URL, + which happens when params=[] or params={} is passed. + """ + dummy_client = _DummySyncClient() + http_client = HttpClient( + httpx_client=dummy_client, # type: ignore[arg-type] + base_timeout=lambda: None, + base_headers=lambda: {}, + base_url=lambda: "https://example.com", + ) + + # Use a path with query params (e.g., pagination cursor URL) + http_client.request( + path="resource?after=123", + method="GET", + params=None, + request_options=None, + ) + + # We care that httpx receives params=None, not [] or {} + assert "params" in dummy_client.last_request_kwargs + assert dummy_client.last_request_kwargs["params"] is None + + # Verify the query string in the URL is preserved + url = str(dummy_client.last_request_kwargs["url"]) + assert "after=123" in url, f"Expected query param 'after=123' in URL, got: {url}" + + +def test_http_client_passes_encoded_params_when_present() -> None: + """Test that HttpClient passes encoded params when params are provided.""" + dummy_client = _DummySyncClient() + http_client = HttpClient( + httpx_client=dummy_client, # type: ignore[arg-type] + base_timeout=lambda: None, + base_headers=lambda: {}, + base_url=lambda: "https://example.com/resource", + ) + + http_client.request( + path="", + method="GET", + params={"after": "456"}, + request_options=None, + ) + + params = dummy_client.last_request_kwargs["params"] + # For a simple dict, encode_query should give a single (key, value) tuple + assert params == [("after", "456")] + + +@pytest.mark.asyncio +async def test_async_http_client_does_not_pass_empty_params_list() -> None: + """Test that AsyncHttpClient passes params=None when params are empty. + + This prevents httpx from stripping existing query parameters from the URL, + which happens when params=[] or params={} is passed. + """ + dummy_client = _DummyAsyncClient() + http_client = AsyncHttpClient( + httpx_client=dummy_client, # type: ignore[arg-type] + base_timeout=lambda: None, + base_headers=lambda: {}, + base_url=lambda: "https://example.com", + async_base_headers=None, + ) + + # Use a path with query params (e.g., pagination cursor URL) + await http_client.request( + path="resource?after=123", + method="GET", + params=None, + request_options=None, + ) + + # We care that httpx receives params=None, not [] or {} + assert "params" in dummy_client.last_request_kwargs + assert dummy_client.last_request_kwargs["params"] is None + + # Verify the query string in the URL is preserved + url = str(dummy_client.last_request_kwargs["url"]) + assert "after=123" in url, f"Expected query param 'after=123' in URL, got: {url}" + + +@pytest.mark.asyncio +async def test_async_http_client_passes_encoded_params_when_present() -> None: + """Test that AsyncHttpClient passes encoded params when params are provided.""" + dummy_client = _DummyAsyncClient() + http_client = AsyncHttpClient( + httpx_client=dummy_client, # type: ignore[arg-type] + base_timeout=lambda: None, + base_headers=lambda: {}, + base_url=lambda: "https://example.com/resource", + async_base_headers=None, + ) + + await http_client.request( + path="", + method="GET", + params={"after": "456"}, + request_options=None, + ) + + params = dummy_client.last_request_kwargs["params"] + # For a simple dict, encode_query should give a single (key, value) tuple + assert params == [("after", "456")] + + +def test_basic_url_joining() -> None: + """Test basic URL joining with a simple base URL and path.""" + result = _build_url("https://api.example.com", "/users") + assert result == "https://api.example.com/users" + + +def test_basic_url_joining_trailing_slash() -> None: + """Test basic URL joining with a simple base URL and path.""" + result = _build_url("https://api.example.com/", "/users") + assert result == "https://api.example.com/users" + + +def test_preserves_base_url_path_prefix() -> None: + """Test that path prefixes in base URL are preserved. + + This is the critical bug fix - urllib.parse.urljoin() would strip + the path prefix when the path starts with '/'. + """ + result = _build_url("https://cloud.example.com/org/tenant/api", "/users") + assert result == "https://cloud.example.com/org/tenant/api/users" + + +def test_preserves_base_url_path_prefix_trailing_slash() -> None: + """Test that path prefixes in base URL are preserved.""" + result = _build_url("https://cloud.example.com/org/tenant/api/", "/users") + assert result == "https://cloud.example.com/org/tenant/api/users" diff --git a/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/tests/utils/test_query_encoding.py b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/tests/utils/test_query_encoding.py new file mode 100644 index 000000000000..ef5fd7094f9b --- /dev/null +++ b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/tests/utils/test_query_encoding.py @@ -0,0 +1,36 @@ +# This file was auto-generated by Fern from our API Definition. + +from seed.core.query_encoder import encode_query + + +def test_query_encoding_deep_objects() -> None: + assert encode_query({"hello world": "hello world"}) == [("hello world", "hello world")] + assert encode_query({"hello_world": {"hello": "world"}}) == [("hello_world[hello]", "world")] + assert encode_query({"hello_world": {"hello": {"world": "today"}, "test": "this"}, "hi": "there"}) == [ + ("hello_world[hello][world]", "today"), + ("hello_world[test]", "this"), + ("hi", "there"), + ] + + +def test_query_encoding_deep_object_arrays() -> None: + assert encode_query({"objects": [{"key": "hello", "value": "world"}, {"key": "foo", "value": "bar"}]}) == [ + ("objects[key]", "hello"), + ("objects[value]", "world"), + ("objects[key]", "foo"), + ("objects[value]", "bar"), + ] + assert encode_query( + {"users": [{"name": "string", "tags": ["string"]}, {"name": "string2", "tags": ["string2", "string3"]}]} + ) == [ + ("users[name]", "string"), + ("users[tags]", "string"), + ("users[name]", "string2"), + ("users[tags]", "string2"), + ("users[tags]", "string3"), + ] + + +def test_encode_query_with_none() -> None: + encoded = encode_query(None) + assert encoded is None diff --git a/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/tests/utils/test_serialization.py b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/tests/utils/test_serialization.py new file mode 100644 index 000000000000..b298db89c4bd --- /dev/null +++ b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/tests/utils/test_serialization.py @@ -0,0 +1,72 @@ +# This file was auto-generated by Fern from our API Definition. + +from typing import Any, List + +from .assets.models import ObjectWithOptionalFieldParams, ShapeParams + +from seed.core.serialization import convert_and_respect_annotation_metadata + +UNION_TEST: ShapeParams = {"radius_measurement": 1.0, "shape_type": "circle", "id": "1"} +UNION_TEST_CONVERTED = {"shapeType": "circle", "radiusMeasurement": 1.0, "id": "1"} + + +def test_convert_and_respect_annotation_metadata() -> None: + data: ObjectWithOptionalFieldParams = { + "string": "string", + "long_": 12345, + "bool_": True, + "literal": "lit_one", + "any": "any", + } + converted = convert_and_respect_annotation_metadata( + object_=data, annotation=ObjectWithOptionalFieldParams, direction="write" + ) + assert converted == {"string": "string", "long": 12345, "bool": True, "literal": "lit_one", "any": "any"} + + +def test_convert_and_respect_annotation_metadata_in_list() -> None: + data: List[ObjectWithOptionalFieldParams] = [ + {"string": "string", "long_": 12345, "bool_": True, "literal": "lit_one", "any": "any"}, + {"string": "another string", "long_": 67890, "list_": [], "literal": "lit_one", "any": "any"}, + ] + converted = convert_and_respect_annotation_metadata( + object_=data, annotation=List[ObjectWithOptionalFieldParams], direction="write" + ) + + assert converted == [ + {"string": "string", "long": 12345, "bool": True, "literal": "lit_one", "any": "any"}, + {"string": "another string", "long": 67890, "list": [], "literal": "lit_one", "any": "any"}, + ] + + +def test_convert_and_respect_annotation_metadata_in_nested_object() -> None: + data: ObjectWithOptionalFieldParams = { + "string": "string", + "long_": 12345, + "union": UNION_TEST, + "literal": "lit_one", + "any": "any", + } + converted = convert_and_respect_annotation_metadata( + object_=data, annotation=ObjectWithOptionalFieldParams, direction="write" + ) + + assert converted == { + "string": "string", + "long": 12345, + "union": UNION_TEST_CONVERTED, + "literal": "lit_one", + "any": "any", + } + + +def test_convert_and_respect_annotation_metadata_in_union() -> None: + converted = convert_and_respect_annotation_metadata(object_=UNION_TEST, annotation=ShapeParams, direction="write") + + assert converted == UNION_TEST_CONVERTED + + +def test_convert_and_respect_annotation_metadata_with_empty_object() -> None: + data: Any = {} + converted = convert_and_respect_annotation_metadata(object_=data, annotation=ShapeParams, direction="write") + assert converted == data diff --git a/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/tests/wire/__init__.py b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/tests/wire/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/tests/wire/conftest.py b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/tests/wire/conftest.py new file mode 100644 index 000000000000..72eae059a383 --- /dev/null +++ b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/tests/wire/conftest.py @@ -0,0 +1,79 @@ +""" +Pytest configuration for wire tests. + +This module provides helpers for creating a configured client that talks to +WireMock and for verifying requests in WireMock. + +The WireMock container lifecycle itself is managed by a top-level pytest +plugin (wiremock_pytest_plugin.py) so that the container is started exactly +once per test run, even when using pytest-xdist. +""" + +import inspect +import os +from typing import Any, Dict, Optional + +import requests + +from seed.client import SeedApi + + +def _get_wiremock_base_url() -> str: + """Returns the WireMock base URL using the dynamically assigned port.""" + port = os.environ.get("WIREMOCK_PORT", "8080") + return f"http://localhost:{port}" + + +def get_client(test_id: str) -> SeedApi: + """ + Creates a configured client instance for wire tests. + + Args: + test_id: Unique identifier for the test, used for request tracking. + + Returns: + A configured client instance with all required auth parameters. + """ + test_headers = {"X-Test-Id": test_id} + base_url = _get_wiremock_base_url() + + # Prefer passing headers directly if the client constructor supports it. + try: + if "headers" in inspect.signature(SeedApi).parameters: + return SeedApi( + base_url=base_url, + headers=test_headers, + ) + except (TypeError, ValueError): + pass + + import httpx + + return SeedApi( + base_url=base_url, + httpx_client=httpx.Client(headers=test_headers), + ) + + +def verify_request_count( + test_id: str, + method: str, + url_path: str, + query_params: Optional[Dict[str, str]], + expected: int, +) -> None: + """Verifies the number of requests made to WireMock filtered by test ID for concurrency safety""" + wiremock_admin_url = f"{_get_wiremock_base_url()}/__admin" + request_body: Dict[str, Any] = { + "method": method, + "urlPath": url_path, + "headers": {"X-Test-Id": {"equalTo": test_id}}, + } + if query_params: + query_parameters = {k: {"equalTo": v} for k, v in query_params.items()} + request_body["queryParameters"] = query_parameters + response = requests.post(f"{wiremock_admin_url}/requests/find", json=request_body) + assert response.status_code == 200, "Failed to query WireMock requests" + result = response.json() + requests_found = len(result.get("requests", [])) + assert requests_found == expected, f"Expected {expected} requests, found {requests_found}" diff --git a/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/tests/wire/test_.py b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/tests/wire/test_.py new file mode 100644 index 000000000000..165eb70e4028 --- /dev/null +++ b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/tests/wire/test_.py @@ -0,0 +1,18 @@ +from .conftest import get_client, verify_request_count + + +def test__chat_stream() -> None: + """Test chat_stream endpoint with WireMock""" + test_id = "chat_stream.0" + client = get_client(test_id) + for _ in client.chat_stream(prompt="prompt"): + pass + verify_request_count(test_id, "POST", "/chat", None, 1) + + +def test__chat() -> None: + """Test chat endpoint with WireMock""" + test_id = "chat.0" + client = get_client(test_id) + client.chat(prompt="Hello") + verify_request_count(test_id, "POST", "/chat", None, 1) diff --git a/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/wiremock/docker-compose.test.yml b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/wiremock/docker-compose.test.yml new file mode 100644 index 000000000000..58747d54a46b --- /dev/null +++ b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/wiremock/docker-compose.test.yml @@ -0,0 +1,14 @@ +services: + wiremock: + image: wiremock/wiremock:3.9.1 + ports: + - "0:8080" # Use dynamic port to avoid conflicts with concurrent tests + volumes: + - ./wiremock-mappings.json:/home/wiremock/mappings/wiremock-mappings.json + command: ["--global-response-templating", "--verbose"] + healthcheck: + test: ["CMD", "curl", "-f", "http://localhost:8080/__admin/health"] + interval: 2s + timeout: 5s + retries: 15 + start_period: 5s diff --git a/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/wiremock/wiremock-mappings.json b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/wiremock/wiremock-mappings.json new file mode 100644 index 000000000000..c1790d76cc25 --- /dev/null +++ b/seed/python-sdk/python-streaming-parameter-openapi/with-wire-tests/wiremock/wiremock-mappings.json @@ -0,0 +1 @@ +{"mappings":[{"id":"4f5c0edf-44aa-4cf1-b7db-9eaad0d0f376","name":"Chat endpoint with streaming support - default","request":{"urlPathTemplate":"/chat","method":"POST","bodyPatterns":[{"matchesJsonPath":"$[?(@.stream == true)]"}]},"response":{"status":200,"body":"event: message\ndata: {\"delta\":\"delta\",\"tokens\":1}\n","headers":{"Content-Type":"text/event-stream"}},"uuid":"4f5c0edf-44aa-4cf1-b7db-9eaad0d0f376","persistent":true,"priority":2,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}},{"id":"3b14b290-771f-4f23-a7c0-030362b5cc32","name":"Chat endpoint with streaming support - default","request":{"urlPathTemplate":"/chat","method":"POST"},"response":{"status":200,"body":"{\n \"message\": \"Hello! How can I help you?\",\n \"finish_reason\": \"complete\"\n}","headers":{"Content-Type":"application/json"}},"uuid":"3b14b290-771f-4f23-a7c0-030362b5cc32","persistent":true,"priority":3,"metadata":{"mocklab":{"created":{"at":"2020-01-01T00:00:00.000Z","via":"SYSTEM"}}}}],"meta":{"total":2}} \ No newline at end of file diff --git a/seed/python-sdk/seed.yml b/seed/python-sdk/seed.yml index bed1033ec3f1..a0c5eec70e55 100644 --- a/seed/python-sdk/seed.yml +++ b/seed/python-sdk/seed.yml @@ -316,6 +316,10 @@ fixtures: pydantic_config: skip_validation: true outputFolder: skip-pydantic-validation + python-streaming-parameter-openapi: + - customConfig: + enable_wire_tests: true + outputFolder: with-wire-tests server-sent-events: - customConfig: enable_wire_tests: true diff --git a/seed/python-sdk/server-sent-events/with-wire-tests/tests/conftest.py b/seed/python-sdk/server-sent-events/with-wire-tests/tests/conftest.py index 2103b1beeca0..dd48d29f8ee3 100644 --- a/seed/python-sdk/server-sent-events/with-wire-tests/tests/conftest.py +++ b/seed/python-sdk/server-sent-events/with-wire-tests/tests/conftest.py @@ -15,6 +15,7 @@ import pytest _STARTED: bool = False +_WIREMOCK_PORT: str = "8080" # Default, will be updated after container starts def _compose_file() -> str: @@ -26,22 +27,54 @@ def _compose_file() -> str: return os.path.join(wiremock_dir, "docker-compose.test.yml") +def _project_name() -> str: + """Returns a unique project name for this test fixture to avoid container name conflicts.""" + tests_dir = os.path.dirname(__file__) + project_root = os.path.abspath(os.path.join(tests_dir, "..")) + # Use the last two directory names to create a unique project name + # e.g., "python-streaming-parameter-openapi-with-wire-tests" + parent = os.path.basename(os.path.dirname(project_root)) + current = os.path.basename(project_root) + return f"{parent}-{current}".replace("_", "-").lower() + + +def _get_wiremock_port() -> str: + """Gets the dynamically assigned port for the WireMock container.""" + compose_file = _compose_file() + project = _project_name() + try: + result = subprocess.run( + ["docker", "compose", "-f", compose_file, "-p", project, "port", "wiremock", "8080"], + check=True, + capture_output=True, + text=True, + ) + # Output is like "0.0.0.0:32768" or "[::]:32768" + port = result.stdout.strip().split(":")[-1] + return port + except subprocess.CalledProcessError: + return "8080" # Fallback to default + + def _start_wiremock() -> None: """Starts the WireMock container using docker-compose.""" - global _STARTED + global _STARTED, _WIREMOCK_PORT if _STARTED: return compose_file = _compose_file() - print("\nStarting WireMock container...") + project = _project_name() + print(f"\nStarting WireMock container (project: {project})...") try: subprocess.run( - ["docker", "compose", "-f", compose_file, "up", "-d", "--wait"], + ["docker", "compose", "-f", compose_file, "-p", project, "up", "-d", "--wait"], check=True, capture_output=True, text=True, ) - print("WireMock container is ready") + _WIREMOCK_PORT = _get_wiremock_port() + os.environ["WIREMOCK_PORT"] = _WIREMOCK_PORT + print(f"WireMock container is ready on port {_WIREMOCK_PORT}") _STARTED = True except subprocess.CalledProcessError as e: print(f"Failed to start WireMock: {e.stderr}") @@ -51,9 +84,10 @@ def _start_wiremock() -> None: def _stop_wiremock() -> None: """Stops and removes the WireMock container.""" compose_file = _compose_file() + project = _project_name() print("\nStopping WireMock container...") subprocess.run( - ["docker", "compose", "-f", compose_file, "down", "-v"], + ["docker", "compose", "-f", compose_file, "-p", project, "down", "-v"], check=False, capture_output=True, ) diff --git a/seed/python-sdk/server-sent-events/with-wire-tests/tests/wire/conftest.py b/seed/python-sdk/server-sent-events/with-wire-tests/tests/wire/conftest.py index 607f7a7bcd4f..fb416aa51230 100644 --- a/seed/python-sdk/server-sent-events/with-wire-tests/tests/wire/conftest.py +++ b/seed/python-sdk/server-sent-events/with-wire-tests/tests/wire/conftest.py @@ -10,6 +10,7 @@ """ import inspect +import os from typing import Any, Dict, Optional import requests @@ -17,6 +18,12 @@ from seed.client import SeedServerSentEvents +def _get_wiremock_base_url() -> str: + """Returns the WireMock base URL using the dynamically assigned port.""" + port = os.environ.get("WIREMOCK_PORT", "8080") + return f"http://localhost:{port}" + + def get_client(test_id: str) -> SeedServerSentEvents: """ Creates a configured client instance for wire tests. @@ -28,12 +35,13 @@ def get_client(test_id: str) -> SeedServerSentEvents: A configured client instance with all required auth parameters. """ test_headers = {"X-Test-Id": test_id} + base_url = _get_wiremock_base_url() # Prefer passing headers directly if the client constructor supports it. try: if "headers" in inspect.signature(SeedServerSentEvents).parameters: return SeedServerSentEvents( - base_url="http://localhost:8080", + base_url=base_url, headers=test_headers, ) except (TypeError, ValueError): @@ -42,7 +50,7 @@ def get_client(test_id: str) -> SeedServerSentEvents: import httpx return SeedServerSentEvents( - base_url="http://localhost:8080", + base_url=base_url, httpx_client=httpx.Client(headers=test_headers), ) @@ -55,7 +63,7 @@ def verify_request_count( expected: int, ) -> None: """Verifies the number of requests made to WireMock filtered by test ID for concurrency safety""" - wiremock_admin_url = "http://localhost:8080/__admin" + wiremock_admin_url = f"{_get_wiremock_base_url()}/__admin" request_body: Dict[str, Any] = { "method": method, "urlPath": url_path, diff --git a/seed/python-sdk/server-sent-events/with-wire-tests/wiremock/docker-compose.test.yml b/seed/python-sdk/server-sent-events/with-wire-tests/wiremock/docker-compose.test.yml index f80c6b0aab6a..58747d54a46b 100644 --- a/seed/python-sdk/server-sent-events/with-wire-tests/wiremock/docker-compose.test.yml +++ b/seed/python-sdk/server-sent-events/with-wire-tests/wiremock/docker-compose.test.yml @@ -2,7 +2,7 @@ services: wiremock: image: wiremock/wiremock:3.9.1 ports: - - "8080:8080" + - "0:8080" # Use dynamic port to avoid conflicts with concurrent tests volumes: - ./wiremock-mappings.json:/home/wiremock/mappings/wiremock-mappings.json command: ["--global-response-templating", "--verbose"] diff --git a/seed/ruby-sdk-v2/imdb/lib/seed/internal/http/base_request.rb b/seed/ruby-sdk-v2/imdb/lib/seed/internal/http/base_request.rb index 5f65f1327023..8f4728534866 100644 --- a/seed/ruby-sdk-v2/imdb/lib/seed/internal/http/base_request.rb +++ b/seed/ruby-sdk-v2/imdb/lib/seed/internal/http/base_request.rb @@ -22,6 +22,12 @@ def initialize(base_url:, path:, method:, headers: {}, query: {}, request_option @request_options = request_options end + # @return [Hash] The query parameters merged with additional query parameters from request options. + def encode_query + additional_query = @request_options&.dig(:additional_query_parameters) || @request_options&.dig("additional_query_parameters") || {} + @query.merge(additional_query) + end + # Child classes should implement: # - encode_headers: Returns the encoded HTTP request headers. # - encode_body: Returns the encoded HTTP request body. diff --git a/seed/ruby-sdk-v2/imdb/lib/seed/internal/http/raw_client.rb b/seed/ruby-sdk-v2/imdb/lib/seed/internal/http/raw_client.rb index fff82a2d3036..d29e32a44094 100644 --- a/seed/ruby-sdk-v2/imdb/lib/seed/internal/http/raw_client.rb +++ b/seed/ruby-sdk-v2/imdb/lib/seed/internal/http/raw_client.rb @@ -47,17 +47,19 @@ def send(request) # @param request [Seed::Internal::Http::BaseRequest] The HTTP request. # @return [URI::Generic] The URL. def build_url(request) + encoded_query = request.encode_query + # If the path is already an absolute URL, use it directly if request.path.start_with?("http://", "https://") url = request.path - url = "#{url}?#{encode_query(request.query)}" if request.query&.any? + url = "#{url}?#{encode_query(encoded_query)}" if encoded_query&.any? return URI.parse(url) end path = request.path.start_with?("/") ? request.path[1..] : request.path base = request.base_url || @base_url url = "#{base.chomp("/")}/#{path}" - url = "#{url}?#{encode_query(request.query)}" if request.query&.any? + url = "#{url}?#{encode_query(encoded_query)}" if encoded_query&.any? URI.parse(url) end diff --git a/seed/ruby-sdk-v2/query-parameters/lib/seed/internal/http/base_request.rb b/seed/ruby-sdk-v2/query-parameters/lib/seed/internal/http/base_request.rb index 5f65f1327023..8f4728534866 100644 --- a/seed/ruby-sdk-v2/query-parameters/lib/seed/internal/http/base_request.rb +++ b/seed/ruby-sdk-v2/query-parameters/lib/seed/internal/http/base_request.rb @@ -22,6 +22,12 @@ def initialize(base_url:, path:, method:, headers: {}, query: {}, request_option @request_options = request_options end + # @return [Hash] The query parameters merged with additional query parameters from request options. + def encode_query + additional_query = @request_options&.dig(:additional_query_parameters) || @request_options&.dig("additional_query_parameters") || {} + @query.merge(additional_query) + end + # Child classes should implement: # - encode_headers: Returns the encoded HTTP request headers. # - encode_body: Returns the encoded HTTP request body. diff --git a/seed/ruby-sdk-v2/query-parameters/lib/seed/internal/http/raw_client.rb b/seed/ruby-sdk-v2/query-parameters/lib/seed/internal/http/raw_client.rb index fff82a2d3036..d29e32a44094 100644 --- a/seed/ruby-sdk-v2/query-parameters/lib/seed/internal/http/raw_client.rb +++ b/seed/ruby-sdk-v2/query-parameters/lib/seed/internal/http/raw_client.rb @@ -47,17 +47,19 @@ def send(request) # @param request [Seed::Internal::Http::BaseRequest] The HTTP request. # @return [URI::Generic] The URL. def build_url(request) + encoded_query = request.encode_query + # If the path is already an absolute URL, use it directly if request.path.start_with?("http://", "https://") url = request.path - url = "#{url}?#{encode_query(request.query)}" if request.query&.any? + url = "#{url}?#{encode_query(encoded_query)}" if encoded_query&.any? return URI.parse(url) end path = request.path.start_with?("/") ? request.path[1..] : request.path base = request.base_url || @base_url url = "#{base.chomp("/")}/#{path}" - url = "#{url}?#{encode_query(request.query)}" if request.query&.any? + url = "#{url}?#{encode_query(encoded_query)}" if encoded_query&.any? URI.parse(url) end diff --git a/seed/ts-sdk/seed.yml b/seed/ts-sdk/seed.yml index 9ec870bc7c0d..6d21b5d14cb6 100644 --- a/seed/ts-sdk/seed.yml +++ b/seed/ts-sdk/seed.yml @@ -453,6 +453,10 @@ fixtures: - outputFolder: . customConfig: customPagerName: "MyPager" + ts-extra-properties: + - outputFolder: . + customConfig: + noSerdeLayer: false scripts: - image: fernapi/ts-seed commands: diff --git a/seed/ts-sdk/ts-extra-properties/.fern/metadata.json b/seed/ts-sdk/ts-extra-properties/.fern/metadata.json new file mode 100644 index 000000000000..1ce60ac0c97a --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/.fern/metadata.json @@ -0,0 +1,9 @@ +{ + "cliVersion": "DUMMY", + "generatorName": "fernapi/fern-typescript-sdk", + "generatorVersion": "latest", + "generatorConfig": { + "noSerdeLayer": false + }, + "sdkVersion": "0.0.1" +} diff --git a/seed/ts-sdk/ts-extra-properties/.github/workflows/ci.yml b/seed/ts-sdk/ts-extra-properties/.github/workflows/ci.yml new file mode 100644 index 000000000000..a98d4d00ff0e --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/.github/workflows/ci.yml @@ -0,0 +1,42 @@ +name: ci + +on: [push] + +jobs: + compile: + runs-on: ubuntu-latest + + steps: + - name: Checkout repo + uses: actions/checkout@v6 + + - name: Set up node + uses: actions/setup-node@v6 + + - name: Install pnpm + uses: pnpm/action-setup@v4 + + - name: Install dependencies + run: pnpm install --frozen-lockfile + + - name: Compile + run: pnpm build + + test: + runs-on: ubuntu-latest + + steps: + - name: Checkout repo + uses: actions/checkout@v6 + + - name: Set up node + uses: actions/setup-node@v6 + + - name: Install pnpm + uses: pnpm/action-setup@v4 + + - name: Install dependencies + run: pnpm install --frozen-lockfile + + - name: Test + run: pnpm test diff --git a/seed/ts-sdk/ts-extra-properties/.gitignore b/seed/ts-sdk/ts-extra-properties/.gitignore new file mode 100644 index 000000000000..72271e049c02 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/.gitignore @@ -0,0 +1,3 @@ +node_modules +.DS_Store +/dist \ No newline at end of file diff --git a/seed/ts-sdk/ts-extra-properties/CONTRIBUTING.md b/seed/ts-sdk/ts-extra-properties/CONTRIBUTING.md new file mode 100644 index 000000000000..fe5bc2f77e0b --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/CONTRIBUTING.md @@ -0,0 +1,133 @@ +# Contributing + +Thanks for your interest in contributing to this SDK! This document provides guidelines for contributing to the project. + +## Getting Started + +### Prerequisites + +- Node.js 20 or higher +- pnpm package manager + +### Installation + +Install the project dependencies: + +```bash +pnpm install +``` + +### Building + +Build the project: + +```bash +pnpm build +``` + +### Testing + +Run the test suite: + +```bash +pnpm test +``` + +Run specific test types: +- `pnpm test:unit` - Run unit tests +- `pnpm test:wire` - Run wire/integration tests + +### Linting and Formatting + +Check code style: + +```bash +pnpm run lint +pnpm run format:check +``` + +Fix code style issues: + +```bash +pnpm run lint:fix +pnpm run format:fix +``` + +Or use the combined check command: + +```bash +pnpm run check:fix +``` + +## About Generated Code + +**Important**: Most files in this SDK are automatically generated by [Fern](https://buildwithfern.com) from the API definition. Direct modifications to generated files will be overwritten the next time the SDK is generated. + +### Generated Files + +The following directories contain generated code: +- `src/api/` - API client classes and types +- `src/serialization/` - Serialization/deserialization logic +- Most TypeScript files in `src/` + +### How to Customize + +If you need to customize the SDK, you have two options: + +#### Option 1: Use `.fernignore` + +For custom code that should persist across SDK regenerations: + +1. Create a `.fernignore` file in the project root +2. Add file patterns for files you want to preserve (similar to `.gitignore` syntax) +3. Add your custom code to those files + +Files listed in `.fernignore` will not be overwritten when the SDK is regenerated. + +For more information, see the [Fern documentation on custom code](https://buildwithfern.com/learn/sdks/overview/custom-code). + +#### Option 2: Contribute to the Generator + +If you want to change how code is generated for all users of this SDK: + +1. The TypeScript SDK generator lives in the [Fern repository](https://github.com/fern-api/fern) +2. Generator code is located at `generators/typescript/sdk/` +3. Follow the [Fern contributing guidelines](https://github.com/fern-api/fern/blob/main/CONTRIBUTING.md) +4. Submit a pull request with your changes to the generator + +This approach is best for: +- Bug fixes in generated code +- New features that would benefit all users +- Improvements to code generation patterns + +## Making Changes + +### Workflow + +1. Create a new branch for your changes +2. Make your modifications +3. Run tests to ensure nothing breaks: `pnpm test` +4. Run linting and formatting: `pnpm run check:fix` +5. Build the project: `pnpm build` +6. Commit your changes with a clear commit message +7. Push your branch and create a pull request + +### Commit Messages + +Write clear, descriptive commit messages that explain what changed and why. + +### Code Style + +This project uses automated code formatting and linting. Run `pnpm run check:fix` before committing to ensure your code meets the project's style guidelines. + +## Questions or Issues? + +If you have questions or run into issues: + +1. Check the [Fern documentation](https://buildwithfern.com) +2. Search existing [GitHub issues](https://github.com/fern-api/fern/issues) +3. Open a new issue if your question hasn't been addressed + +## License + +By contributing to this project, you agree that your contributions will be licensed under the same license as the project. diff --git a/seed/ts-sdk/ts-extra-properties/README.md b/seed/ts-sdk/ts-extra-properties/README.md new file mode 100644 index 000000000000..62710c8e37d0 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/README.md @@ -0,0 +1,270 @@ +# Seed TypeScript Library + +[![fern shield](https://img.shields.io/badge/%F0%9F%8C%BF-Built%20with%20Fern-brightgreen)](https://buildwithfern.com?utm_source=github&utm_medium=github&utm_campaign=readme&utm_source=Seed%2FTypeScript) +[![npm shield](https://img.shields.io/npm/v/@fern/ts-extra-properties)](https://www.npmjs.com/package/@fern/ts-extra-properties) + +The Seed TypeScript library provides convenient access to the Seed APIs from TypeScript. + +## Table of Contents + +- [Installation](#installation) +- [Reference](#reference) +- [Usage](#usage) +- [Request and Response Types](#request-and-response-types) +- [Exception Handling](#exception-handling) +- [Advanced](#advanced) + - [Additional Headers](#additional-headers) + - [Additional Query String Parameters](#additional-query-string-parameters) + - [Retries](#retries) + - [Timeouts](#timeouts) + - [Aborting Requests](#aborting-requests) + - [Access Raw Response Data](#access-raw-response-data) + - [Logging](#logging) + - [Runtime Compatibility](#runtime-compatibility) +- [Contributing](#contributing) + +## Installation + +```sh +npm i -s @fern/ts-extra-properties +``` + +## Reference + +A full reference for this library is available [here](./reference.md). + +## Usage + +Instantiate and use the client with the following: + +```typescript +import { SeedApiClient } from "@fern/ts-extra-properties"; + +const client = new SeedApiClient({ environment: "YOUR_BASE_URL" }); +await client.createUser({ + userName: "user_name" +}); +``` + +## Request and Response Types + +The SDK exports all request and response types as TypeScript interfaces. Simply import them with the +following namespace: + +```typescript +import { SeedApi } from "@fern/ts-extra-properties"; + +const request: SeedApi.CreateUserRequest = { + ... +}; +``` + +## Exception Handling + +When the API returns a non-success status code (4xx or 5xx response), a subclass of the following error +will be thrown. + +```typescript +import { SeedApiError } from "@fern/ts-extra-properties"; + +try { + await client.createUser(...); +} catch (err) { + if (err instanceof SeedApiError) { + console.log(err.statusCode); + console.log(err.message); + console.log(err.body); + console.log(err.rawResponse); + } +} +``` + +## Advanced + +### Additional Headers + +If you would like to send additional headers as part of the request, use the `headers` request option. + +```typescript +import { SeedApiClient } from "@fern/ts-extra-properties"; + +const client = new SeedApiClient({ + ... + headers: { + 'X-Custom-Header': 'custom value' + } +}); + +const response = await client.createUser(..., { + headers: { + 'X-Custom-Header': 'custom value' + } +}); +``` + +### Additional Query String Parameters + +If you would like to send additional query string parameters as part of the request, use the `queryParams` request option. + +```typescript +const response = await client.createUser(..., { + queryParams: { + 'customQueryParamKey': 'custom query param value' + } +}); +``` + +### Retries + +The SDK is instrumented with automatic retries with exponential backoff. A request will be retried as long +as the request is deemed retryable and the number of retry attempts has not grown larger than the configured +retry limit (default: 2). + +A request is deemed retryable when any of the following HTTP status codes is returned: + +- [408](https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/408) (Timeout) +- [429](https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/429) (Too Many Requests) +- [5XX](https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/500) (Internal Server Errors) + +Use the `maxRetries` request option to configure this behavior. + +```typescript +const response = await client.createUser(..., { + maxRetries: 0 // override maxRetries at the request level +}); +``` + +### Timeouts + +The SDK defaults to a 60 second timeout. Use the `timeoutInSeconds` option to configure this behavior. + +```typescript +const response = await client.createUser(..., { + timeoutInSeconds: 30 // override timeout to 30s +}); +``` + +### Aborting Requests + +The SDK allows users to abort requests at any point by passing in an abort signal. + +```typescript +const controller = new AbortController(); +const response = await client.createUser(..., { + abortSignal: controller.signal +}); +controller.abort(); // aborts the request +``` + +### Access Raw Response Data + +The SDK provides access to raw response data, including headers, through the `.withRawResponse()` method. +The `.withRawResponse()` method returns a promise that results to an object with a `data` and a `rawResponse` property. + +```typescript +const { data, rawResponse } = await client.createUser(...).withRawResponse(); + +console.log(data); +console.log(rawResponse.headers['X-My-Header']); +``` + +### Logging + +The SDK supports logging. You can configure the logger by passing in a `logging` object to the client options. + +```typescript +import { SeedApiClient, logging } from "@fern/ts-extra-properties"; + +const client = new SeedApiClient({ + ... + logging: { + level: logging.LogLevel.Debug, // defaults to logging.LogLevel.Info + logger: new logging.ConsoleLogger(), // defaults to ConsoleLogger + silent: false, // defaults to true, set to false to enable logging + } +}); +``` +The `logging` object can have the following properties: +- `level`: The log level to use. Defaults to `logging.LogLevel.Info`. +- `logger`: The logger to use. Defaults to a `logging.ConsoleLogger`. +- `silent`: Whether to silence the logger. Defaults to `true`. + +The `level` property can be one of the following values: +- `logging.LogLevel.Debug` +- `logging.LogLevel.Info` +- `logging.LogLevel.Warn` +- `logging.LogLevel.Error` + +To provide a custom logger, you can pass in an object that implements the `logging.ILogger` interface. + +
+Custom logger examples + +Here's an example using the popular `winston` logging library. +```ts +import winston from 'winston'; + +const winstonLogger = winston.createLogger({...}); + +const logger: logging.ILogger = { + debug: (msg, ...args) => winstonLogger.debug(msg, ...args), + info: (msg, ...args) => winstonLogger.info(msg, ...args), + warn: (msg, ...args) => winstonLogger.warn(msg, ...args), + error: (msg, ...args) => winstonLogger.error(msg, ...args), +}; +``` + +Here's an example using the popular `pino` logging library. + +```ts +import pino from 'pino'; + +const pinoLogger = pino({...}); + +const logger: logging.ILogger = { + debug: (msg, ...args) => pinoLogger.debug(args, msg), + info: (msg, ...args) => pinoLogger.info(args, msg), + warn: (msg, ...args) => pinoLogger.warn(args, msg), + error: (msg, ...args) => pinoLogger.error(args, msg), +}; +``` +
+ + +### Runtime Compatibility + + +The SDK works in the following runtimes: + + + +- Node.js 18+ +- Vercel +- Cloudflare Workers +- Deno v1.25+ +- Bun 1.0+ +- React Native + +### Customizing Fetch Client + +The SDK provides a way for you to customize the underlying HTTP client / Fetch function. If you're running in an +unsupported environment, this provides a way for you to break glass and ensure the SDK works. + +```typescript +import { SeedApiClient } from "@fern/ts-extra-properties"; + +const client = new SeedApiClient({ + ... + fetcher: // provide your implementation here +}); +``` + +## Contributing + +While we value open-source contributions to this SDK, this library is generated programmatically. +Additions made directly to this library would have to be moved over to our generation code, +otherwise they would be overwritten upon the next generated release. Feel free to open a PR as +a proof of concept, but know that we will not be able to merge it as-is. We suggest opening +an issue first to discuss with us! + +On the other hand, contributions to the README are always very welcome! \ No newline at end of file diff --git a/seed/ts-sdk/ts-extra-properties/biome.json b/seed/ts-sdk/ts-extra-properties/biome.json new file mode 100644 index 000000000000..371d3650883e --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/biome.json @@ -0,0 +1,74 @@ +{ + "$schema": "https://biomejs.dev/schemas/2.3.11/schema.json", + "root": true, + "vcs": { + "enabled": false + }, + "files": { + "ignoreUnknown": true, + "includes": [ + "**", + "!!dist", + "!!**/dist", + "!!lib", + "!!**/lib", + "!!_tmp_*", + "!!**/_tmp_*", + "!!*.tmp", + "!!**/*.tmp", + "!!.tmp/", + "!!**/.tmp/", + "!!*.log", + "!!**/*.log", + "!!**/.DS_Store", + "!!**/Thumbs.db" + ] + }, + "formatter": { + "enabled": true, + "indentStyle": "space", + "indentWidth": 4, + "lineWidth": 120 + }, + "javascript": { + "formatter": { + "quoteStyle": "double" + } + }, + "assist": { + "enabled": true, + "actions": { + "source": { + "organizeImports": "on" + } + } + }, + "linter": { + "rules": { + "style": { + "useNodejsImportProtocol": "off" + }, + "suspicious": { + "noAssignInExpressions": "warn", + "noUselessEscapeInString": { + "level": "warn", + "fix": "none", + "options": {} + }, + "noThenProperty": "warn", + "useIterableCallbackReturn": "warn", + "noShadowRestrictedNames": "warn", + "noTsIgnore": { + "level": "warn", + "fix": "none", + "options": {} + }, + "noConfusingVoidType": { + "level": "warn", + "fix": "none", + "options": {} + } + } + } + } +} diff --git a/seed/ts-sdk/ts-extra-properties/package.json b/seed/ts-sdk/ts-extra-properties/package.json new file mode 100644 index 000000000000..fcd145a9fe09 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/package.json @@ -0,0 +1,81 @@ +{ + "name": "@fern/ts-extra-properties", + "version": "0.0.1", + "private": false, + "repository": { + "type": "git", + "url": "git+https://github.com/ts-extra-properties/fern.git" + }, + "type": "commonjs", + "main": "./dist/cjs/index.js", + "module": "./dist/esm/index.mjs", + "types": "./dist/cjs/index.d.ts", + "exports": { + ".": { + "types": "./dist/cjs/index.d.ts", + "import": { + "types": "./dist/esm/index.d.mts", + "default": "./dist/esm/index.mjs" + }, + "require": { + "types": "./dist/cjs/index.d.ts", + "default": "./dist/cjs/index.js" + }, + "default": "./dist/cjs/index.js" + }, + "./serialization": { + "types": "./dist/cjs/serialization/index.d.ts", + "import": { + "types": "./dist/esm/serialization/index.d.mts", + "default": "./dist/esm/serialization/index.mjs" + }, + "require": { + "types": "./dist/cjs/serialization/index.d.ts", + "default": "./dist/cjs/serialization/index.js" + }, + "default": "./dist/cjs/serialization/index.js" + }, + "./package.json": "./package.json" + }, + "files": [ + "dist", + "reference.md", + "README.md", + "LICENSE" + ], + "scripts": { + "format": "biome format --write --skip-parse-errors --no-errors-on-unmatched --max-diagnostics=none", + "format:check": "biome format --skip-parse-errors --no-errors-on-unmatched --max-diagnostics=none", + "lint": "biome lint --skip-parse-errors --no-errors-on-unmatched --max-diagnostics=none", + "lint:fix": "biome lint --fix --unsafe --skip-parse-errors --no-errors-on-unmatched --max-diagnostics=none", + "check": "biome check --skip-parse-errors --no-errors-on-unmatched --max-diagnostics=none", + "check:fix": "biome check --fix --unsafe --skip-parse-errors --no-errors-on-unmatched --max-diagnostics=none", + "build": "pnpm build:cjs && pnpm build:esm", + "build:cjs": "tsc --project ./tsconfig.cjs.json", + "build:esm": "tsc --project ./tsconfig.esm.json && node scripts/rename-to-esm-files.js dist/esm", + "test": "vitest", + "test:unit": "vitest --project unit", + "test:wire": "vitest --project wire" + }, + "dependencies": {}, + "devDependencies": { + "webpack": "^5.97.1", + "ts-loader": "^9.5.1", + "vitest": "^3.2.4", + "msw": "2.11.2", + "@types/node": "^18.19.70", + "typescript": "~5.7.2", + "@biomejs/biome": "2.3.11" + }, + "browser": { + "fs": false, + "os": false, + "path": false, + "stream": false + }, + "packageManager": "pnpm@10.20.0", + "engines": { + "node": ">=18.0.0" + }, + "sideEffects": false +} diff --git a/seed/ts-sdk/ts-extra-properties/pnpm-workspace.yaml b/seed/ts-sdk/ts-extra-properties/pnpm-workspace.yaml new file mode 100644 index 000000000000..6e4c395107df --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/pnpm-workspace.yaml @@ -0,0 +1 @@ +packages: ['.'] \ No newline at end of file diff --git a/seed/ts-sdk/ts-extra-properties/reference.md b/seed/ts-sdk/ts-extra-properties/reference.md new file mode 100644 index 000000000000..8ac87170b9eb --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/reference.md @@ -0,0 +1,92 @@ +# Reference +
client.getUser() -> SeedApi.User +
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```typescript +await client.getUser(); + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**requestOptions:** `SeedApiClient.RequestOptions` + +
+
+
+
+ + +
+
+
+ +
client.createUser({ ...params }) -> SeedApi.User +
+
+ +#### 🔌 Usage + +
+
+ +
+
+ +```typescript +await client.createUser({ + userName: "user_name" +}); + +``` +
+
+
+
+ +#### ⚙️ Parameters + +
+
+ +
+
+ +**request:** `SeedApi.CreateUserRequest` + +
+
+ +
+
+ +**requestOptions:** `SeedApiClient.RequestOptions` + +
+
+
+
+ + +
+
+
diff --git a/seed/ts-sdk/ts-extra-properties/scripts/rename-to-esm-files.js b/seed/ts-sdk/ts-extra-properties/scripts/rename-to-esm-files.js new file mode 100644 index 000000000000..dc1df1cbbacb --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/scripts/rename-to-esm-files.js @@ -0,0 +1,123 @@ +#!/usr/bin/env node + +const fs = require("fs").promises; +const path = require("path"); + +const extensionMap = { + ".js": ".mjs", + ".d.ts": ".d.mts", +}; +const oldExtensions = Object.keys(extensionMap); + +async function findFiles(rootPath) { + const files = []; + + async function scan(directory) { + const entries = await fs.readdir(directory, { withFileTypes: true }); + + for (const entry of entries) { + const fullPath = path.join(directory, entry.name); + + if (entry.isDirectory()) { + if (entry.name !== "node_modules" && !entry.name.startsWith(".")) { + await scan(fullPath); + } + } else if (entry.isFile()) { + if (oldExtensions.some((ext) => entry.name.endsWith(ext))) { + files.push(fullPath); + } + } + } + } + + await scan(rootPath); + return files; +} + +async function updateFiles(files) { + const updatedFiles = []; + for (const file of files) { + const updated = await updateFileContents(file); + updatedFiles.push(updated); + } + + console.log(`Updated imports in ${updatedFiles.length} files.`); +} + +async function updateFileContents(file) { + const content = await fs.readFile(file, "utf8"); + + let newContent = content; + // Update each extension type defined in the map + for (const [oldExt, newExt] of Object.entries(extensionMap)) { + // Handle static imports/exports + const staticRegex = new RegExp(`(import|export)(.+from\\s+['"])(\\.\\.?\\/[^'"]+)(\\${oldExt})(['"])`, "g"); + newContent = newContent.replace(staticRegex, `$1$2$3${newExt}$5`); + + // Handle dynamic imports (yield import, await import, regular import()) + const dynamicRegex = new RegExp( + `(yield\\s+import|await\\s+import|import)\\s*\\(\\s*['"](\\.\\.\?\\/[^'"]+)(\\${oldExt})['"]\\s*\\)`, + "g", + ); + newContent = newContent.replace(dynamicRegex, `$1("$2${newExt}")`); + } + + if (content !== newContent) { + await fs.writeFile(file, newContent, "utf8"); + return true; + } + return false; +} + +async function renameFiles(files) { + let counter = 0; + for (const file of files) { + const ext = oldExtensions.find((ext) => file.endsWith(ext)); + const newExt = extensionMap[ext]; + + if (newExt) { + const newPath = file.slice(0, -ext.length) + newExt; + await fs.rename(file, newPath); + counter++; + } + } + + console.log(`Renamed ${counter} files.`); +} + +async function main() { + try { + const targetDir = process.argv[2]; + if (!targetDir) { + console.error("Please provide a target directory"); + process.exit(1); + } + + const targetPath = path.resolve(targetDir); + const targetStats = await fs.stat(targetPath); + + if (!targetStats.isDirectory()) { + console.error("The provided path is not a directory"); + process.exit(1); + } + + console.log(`Scanning directory: ${targetDir}`); + + const files = await findFiles(targetDir); + + if (files.length === 0) { + console.log("No matching files found."); + process.exit(0); + } + + console.log(`Found ${files.length} files.`); + await updateFiles(files); + await renameFiles(files); + console.log("\nDone!"); + } catch (error) { + console.error("An error occurred:", error.message); + process.exit(1); + } +} + +main(); diff --git a/seed/ts-sdk/ts-extra-properties/snippet.json b/seed/ts-sdk/ts-extra-properties/snippet.json new file mode 100644 index 000000000000..a5746d862e7c --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/snippet.json @@ -0,0 +1,27 @@ +{ + "endpoints": [ + { + "id": { + "path": "/user", + "method": "GET", + "identifier_override": "endpoint_.getUser" + }, + "snippet": { + "type": "typescript", + "client": "import { SeedApiClient } from \"@fern/ts-extra-properties\";\n\nconst client = new SeedApiClient({ environment: \"YOUR_BASE_URL\" });\nawait client.getUser();\n" + } + }, + { + "id": { + "path": "/user", + "method": "POST", + "identifier_override": "endpoint_.createUser" + }, + "snippet": { + "type": "typescript", + "client": "import { SeedApiClient } from \"@fern/ts-extra-properties\";\n\nconst client = new SeedApiClient({ environment: \"YOUR_BASE_URL\" });\nawait client.createUser({\n userName: \"user_name\"\n});\n" + } + } + ], + "types": {} +} \ No newline at end of file diff --git a/seed/ts-sdk/ts-extra-properties/src/BaseClient.ts b/seed/ts-sdk/ts-extra-properties/src/BaseClient.ts new file mode 100644 index 000000000000..7c0a997f8c1f --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/BaseClient.ts @@ -0,0 +1,59 @@ +// This file was auto-generated by Fern from our API Definition. + +import { mergeHeaders } from "./core/headers.js"; +import * as core from "./core/index.js"; + +export interface BaseClientOptions { + environment: core.Supplier; + /** Specify a custom URL to connect the client to. */ + baseUrl?: core.Supplier; + /** Additional headers to include in requests. */ + headers?: Record | null | undefined>; + /** The default maximum time to wait for a response in seconds. */ + timeoutInSeconds?: number; + /** The default number of times to retry the request. Defaults to 2. */ + maxRetries?: number; + /** Provide a custom fetch implementation. Useful for platforms that don't have a built-in fetch or need a custom implementation. */ + fetch?: typeof fetch; + /** Configure logging for the client. */ + logging?: core.logging.LogConfig | core.logging.Logger; +} + +export interface BaseRequestOptions { + /** The maximum time to wait for a response in seconds. */ + timeoutInSeconds?: number; + /** The number of times to retry the request. Defaults to 2. */ + maxRetries?: number; + /** A hook to abort the request. */ + abortSignal?: AbortSignal; + /** Additional query string parameters to include in the request. */ + queryParams?: Record; + /** Additional headers to include in the request. */ + headers?: Record | null | undefined>; +} + +export type NormalizedClientOptions = T & { + logging: core.logging.Logger; +}; + +export function normalizeClientOptions( + options: T, +): NormalizedClientOptions { + const headers = mergeHeaders( + { + "X-Fern-Language": "JavaScript", + "X-Fern-SDK-Name": "@fern/ts-extra-properties", + "X-Fern-SDK-Version": "0.0.1", + "User-Agent": "@fern/ts-extra-properties/0.0.1", + "X-Fern-Runtime": core.RUNTIME.type, + "X-Fern-Runtime-Version": core.RUNTIME.version, + }, + options?.headers, + ); + + return { + ...options, + logging: core.logging.createLogger(options?.logging), + headers, + } as NormalizedClientOptions; +} diff --git a/seed/ts-sdk/ts-extra-properties/src/Client.ts b/seed/ts-sdk/ts-extra-properties/src/Client.ts new file mode 100644 index 000000000000..c192e0306072 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/Client.ts @@ -0,0 +1,143 @@ +// This file was auto-generated by Fern from our API Definition. + +import type * as SeedApi from "./api/index.js"; +import type { BaseClientOptions, BaseRequestOptions } from "./BaseClient.js"; +import { type NormalizedClientOptions, normalizeClientOptions } from "./BaseClient.js"; +import { mergeHeaders } from "./core/headers.js"; +import * as core from "./core/index.js"; +import { handleNonStatusCodeError } from "./errors/handleNonStatusCodeError.js"; +import * as errors from "./errors/index.js"; +import * as serializers from "./serialization/index.js"; + +export declare namespace SeedApiClient { + export type Options = BaseClientOptions; + + export interface RequestOptions extends BaseRequestOptions {} +} + +export class SeedApiClient { + protected readonly _options: NormalizedClientOptions; + + constructor(options: SeedApiClient.Options) { + this._options = normalizeClientOptions(options); + } + + /** + * @param {SeedApiClient.RequestOptions} requestOptions - Request-specific configuration. + * + * @example + * await client.getUser() + */ + public getUser(requestOptions?: SeedApiClient.RequestOptions): core.HttpResponsePromise { + return core.HttpResponsePromise.fromPromise(this.__getUser(requestOptions)); + } + + private async __getUser( + requestOptions?: SeedApiClient.RequestOptions, + ): Promise> { + const _headers: core.Fetcher.Args["headers"] = mergeHeaders(this._options?.headers, requestOptions?.headers); + const _response = await core.fetcher({ + url: core.url.join( + (await core.Supplier.get(this._options.baseUrl)) ?? + (await core.Supplier.get(this._options.environment)), + "user", + ), + method: "GET", + headers: _headers, + queryParameters: requestOptions?.queryParams, + timeoutMs: (requestOptions?.timeoutInSeconds ?? this._options?.timeoutInSeconds ?? 60) * 1000, + maxRetries: requestOptions?.maxRetries ?? this._options?.maxRetries, + abortSignal: requestOptions?.abortSignal, + fetchFn: this._options?.fetch, + logging: this._options.logging, + }); + if (_response.ok) { + return { + data: serializers.User.parseOrThrow(_response.body, { + unrecognizedObjectKeys: "passthrough", + allowUnrecognizedUnionMembers: true, + allowUnrecognizedEnumValues: true, + skipValidation: true, + breadcrumbsPrefix: ["response"], + }), + rawResponse: _response.rawResponse, + }; + } + + if (_response.error.reason === "status-code") { + throw new errors.SeedApiError({ + statusCode: _response.error.statusCode, + body: _response.error.body, + rawResponse: _response.rawResponse, + }); + } + + return handleNonStatusCodeError(_response.error, _response.rawResponse, "GET", "/user"); + } + + /** + * @param {SeedApi.CreateUserRequest} request + * @param {SeedApiClient.RequestOptions} requestOptions - Request-specific configuration. + * + * @example + * await client.createUser({ + * userName: "user_name" + * }) + */ + public createUser( + request: SeedApi.CreateUserRequest, + requestOptions?: SeedApiClient.RequestOptions, + ): core.HttpResponsePromise { + return core.HttpResponsePromise.fromPromise(this.__createUser(request, requestOptions)); + } + + private async __createUser( + request: SeedApi.CreateUserRequest, + requestOptions?: SeedApiClient.RequestOptions, + ): Promise> { + const _headers: core.Fetcher.Args["headers"] = mergeHeaders(this._options?.headers, requestOptions?.headers); + const _response = await core.fetcher({ + url: core.url.join( + (await core.Supplier.get(this._options.baseUrl)) ?? + (await core.Supplier.get(this._options.environment)), + "user", + ), + method: "POST", + headers: _headers, + contentType: "application/json", + queryParameters: requestOptions?.queryParams, + requestType: "json", + body: serializers.CreateUserRequest.jsonOrThrow(request, { + unrecognizedObjectKeys: "strip", + omitUndefined: true, + }), + timeoutMs: (requestOptions?.timeoutInSeconds ?? this._options?.timeoutInSeconds ?? 60) * 1000, + maxRetries: requestOptions?.maxRetries ?? this._options?.maxRetries, + abortSignal: requestOptions?.abortSignal, + fetchFn: this._options?.fetch, + logging: this._options.logging, + }); + if (_response.ok) { + return { + data: serializers.User.parseOrThrow(_response.body, { + unrecognizedObjectKeys: "passthrough", + allowUnrecognizedUnionMembers: true, + allowUnrecognizedEnumValues: true, + skipValidation: true, + breadcrumbsPrefix: ["response"], + }), + rawResponse: _response.rawResponse, + }; + } + + if (_response.error.reason === "status-code") { + throw new errors.SeedApiError({ + statusCode: _response.error.statusCode, + body: _response.error.body, + rawResponse: _response.rawResponse, + }); + } + + return handleNonStatusCodeError(_response.error, _response.rawResponse, "POST", "/user"); + } +} diff --git a/seed/ts-sdk/ts-extra-properties/src/api/client/index.ts b/seed/ts-sdk/ts-extra-properties/src/api/client/index.ts new file mode 100644 index 000000000000..195f9aa8a846 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/api/client/index.ts @@ -0,0 +1 @@ +export * from "./requests/index.js"; diff --git a/seed/ts-sdk/ts-extra-properties/src/api/client/requests/CreateUserRequest.ts b/seed/ts-sdk/ts-extra-properties/src/api/client/requests/CreateUserRequest.ts new file mode 100644 index 000000000000..61bf47f5dd88 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/api/client/requests/CreateUserRequest.ts @@ -0,0 +1,12 @@ +// This file was auto-generated by Fern from our API Definition. + +/** + * @example + * { + * userName: "user_name" + * } + */ +export interface CreateUserRequest { + userName: string; + metaData?: Record; +} diff --git a/seed/ts-sdk/ts-extra-properties/src/api/client/requests/index.ts b/seed/ts-sdk/ts-extra-properties/src/api/client/requests/index.ts new file mode 100644 index 000000000000..d66efc133063 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/api/client/requests/index.ts @@ -0,0 +1 @@ +export type { CreateUserRequest } from "./CreateUserRequest.js"; diff --git a/seed/ts-sdk/ts-extra-properties/src/api/index.ts b/seed/ts-sdk/ts-extra-properties/src/api/index.ts new file mode 100644 index 000000000000..d9adb1af9a93 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/api/index.ts @@ -0,0 +1,2 @@ +export * from "./client/index.js"; +export * from "./types/index.js"; diff --git a/seed/ts-sdk/ts-extra-properties/src/api/types/User.ts b/seed/ts-sdk/ts-extra-properties/src/api/types/User.ts new file mode 100644 index 000000000000..21f9d79287e4 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/api/types/User.ts @@ -0,0 +1,10 @@ +// This file was auto-generated by Fern from our API Definition. + +export interface User { + id: string; + userName: string; + createdAt: Date; + updatedAt?: Date; + /** Accepts any additional properties */ + [key: string]: any; +} diff --git a/seed/ts-sdk/ts-extra-properties/src/api/types/index.ts b/seed/ts-sdk/ts-extra-properties/src/api/types/index.ts new file mode 100644 index 000000000000..169437c217d9 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/api/types/index.ts @@ -0,0 +1 @@ +export * from "./User.js"; diff --git a/seed/ts-sdk/ts-extra-properties/src/core/exports.ts b/seed/ts-sdk/ts-extra-properties/src/core/exports.ts new file mode 100644 index 000000000000..69296d7100d6 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/exports.ts @@ -0,0 +1 @@ +export * from "./logging/exports.js"; diff --git a/seed/ts-sdk/ts-extra-properties/src/core/fetcher/APIResponse.ts b/seed/ts-sdk/ts-extra-properties/src/core/fetcher/APIResponse.ts new file mode 100644 index 000000000000..97ab83c2b195 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/fetcher/APIResponse.ts @@ -0,0 +1,23 @@ +import type { RawResponse } from "./RawResponse.js"; + +/** + * The response of an API call. + * It is a successful response or a failed response. + */ +export type APIResponse = SuccessfulResponse | FailedResponse; + +export interface SuccessfulResponse { + ok: true; + body: T; + /** + * @deprecated Use `rawResponse` instead + */ + headers?: Record; + rawResponse: RawResponse; +} + +export interface FailedResponse { + ok: false; + error: T; + rawResponse: RawResponse; +} diff --git a/seed/ts-sdk/ts-extra-properties/src/core/fetcher/BinaryResponse.ts b/seed/ts-sdk/ts-extra-properties/src/core/fetcher/BinaryResponse.ts new file mode 100644 index 000000000000..bca7f4c77981 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/fetcher/BinaryResponse.ts @@ -0,0 +1,34 @@ +export type BinaryResponse = { + /** [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/bodyUsed) */ + bodyUsed: Response["bodyUsed"]; + /** + * Returns a ReadableStream of the response body. + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/body) + */ + stream: () => Response["body"]; + /** [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/arrayBuffer) */ + arrayBuffer: () => ReturnType; + /** [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/blob) */ + blob: () => ReturnType; + /** + * [MDN Reference](https://developer.mozilla.org/docs/Web/API/Request/bytes) + * Some versions of the Fetch API may not support this method. + */ + bytes?(): ReturnType; +}; + +export function getBinaryResponse(response: Response): BinaryResponse { + const binaryResponse: BinaryResponse = { + get bodyUsed() { + return response.bodyUsed; + }, + stream: () => response.body, + arrayBuffer: response.arrayBuffer.bind(response), + blob: response.blob.bind(response), + }; + if ("bytes" in response && typeof response.bytes === "function") { + binaryResponse.bytes = response.bytes.bind(response); + } + + return binaryResponse; +} diff --git a/seed/ts-sdk/ts-extra-properties/src/core/fetcher/EndpointMetadata.ts b/seed/ts-sdk/ts-extra-properties/src/core/fetcher/EndpointMetadata.ts new file mode 100644 index 000000000000..998d68f5c20c --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/fetcher/EndpointMetadata.ts @@ -0,0 +1,13 @@ +export type SecuritySchemeKey = string; +/** + * A collection of security schemes, where the key is the name of the security scheme and the value is the list of scopes required for that scheme. + * All schemes in the collection must be satisfied for authentication to be successful. + */ +export type SecuritySchemeCollection = Record; +export type AuthScope = string; +export type EndpointMetadata = { + /** + * An array of security scheme collections. Each collection represents an alternative way to authenticate. + */ + security?: SecuritySchemeCollection[]; +}; diff --git a/seed/ts-sdk/ts-extra-properties/src/core/fetcher/EndpointSupplier.ts b/seed/ts-sdk/ts-extra-properties/src/core/fetcher/EndpointSupplier.ts new file mode 100644 index 000000000000..aad81f0d9040 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/fetcher/EndpointSupplier.ts @@ -0,0 +1,14 @@ +import type { EndpointMetadata } from "./EndpointMetadata.js"; +import type { Supplier } from "./Supplier.js"; + +type EndpointSupplierFn = (arg: { endpointMetadata?: EndpointMetadata }) => T | Promise; +export type EndpointSupplier = Supplier | EndpointSupplierFn; +export const EndpointSupplier = { + get: async (supplier: EndpointSupplier, arg: { endpointMetadata?: EndpointMetadata }): Promise => { + if (typeof supplier === "function") { + return (supplier as EndpointSupplierFn)(arg); + } else { + return supplier; + } + }, +}; diff --git a/seed/ts-sdk/ts-extra-properties/src/core/fetcher/Fetcher.ts b/seed/ts-sdk/ts-extra-properties/src/core/fetcher/Fetcher.ts new file mode 100644 index 000000000000..45cae32b23c1 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/fetcher/Fetcher.ts @@ -0,0 +1,391 @@ +import { toJson } from "../json.js"; +import { createLogger, type LogConfig, type Logger } from "../logging/logger.js"; +import type { APIResponse } from "./APIResponse.js"; +import { createRequestUrl } from "./createRequestUrl.js"; +import type { EndpointMetadata } from "./EndpointMetadata.js"; +import { EndpointSupplier } from "./EndpointSupplier.js"; +import { getErrorResponseBody } from "./getErrorResponseBody.js"; +import { getFetchFn } from "./getFetchFn.js"; +import { getRequestBody } from "./getRequestBody.js"; +import { getResponseBody } from "./getResponseBody.js"; +import { Headers } from "./Headers.js"; +import { makeRequest } from "./makeRequest.js"; +import { abortRawResponse, toRawResponse, unknownRawResponse } from "./RawResponse.js"; +import { requestWithRetries } from "./requestWithRetries.js"; + +export type FetchFunction = (args: Fetcher.Args) => Promise>; + +export declare namespace Fetcher { + export interface Args { + url: string; + method: string; + contentType?: string; + headers?: Record; + queryParameters?: Record; + body?: unknown; + timeoutMs?: number; + maxRetries?: number; + withCredentials?: boolean; + abortSignal?: AbortSignal; + requestType?: "json" | "file" | "bytes" | "form" | "other"; + responseType?: "json" | "blob" | "sse" | "streaming" | "text" | "arrayBuffer" | "binary-response"; + duplex?: "half"; + endpointMetadata?: EndpointMetadata; + fetchFn?: typeof fetch; + logging?: LogConfig | Logger; + } + + export type Error = FailedStatusCodeError | NonJsonError | BodyIsNullError | TimeoutError | UnknownError; + + export interface FailedStatusCodeError { + reason: "status-code"; + statusCode: number; + body: unknown; + } + + export interface NonJsonError { + reason: "non-json"; + statusCode: number; + rawBody: string; + } + + export interface BodyIsNullError { + reason: "body-is-null"; + statusCode: number; + } + + export interface TimeoutError { + reason: "timeout"; + } + + export interface UnknownError { + reason: "unknown"; + errorMessage: string; + } +} + +const SENSITIVE_HEADERS = new Set([ + "authorization", + "www-authenticate", + "x-api-key", + "api-key", + "apikey", + "x-api-token", + "x-auth-token", + "auth-token", + "cookie", + "set-cookie", + "proxy-authorization", + "proxy-authenticate", + "x-csrf-token", + "x-xsrf-token", + "x-session-token", + "x-access-token", +]); + +function redactHeaders(headers: Headers | Record): Record { + const filtered: Record = {}; + for (const [key, value] of headers instanceof Headers ? headers.entries() : Object.entries(headers)) { + if (SENSITIVE_HEADERS.has(key.toLowerCase())) { + filtered[key] = "[REDACTED]"; + } else { + filtered[key] = value; + } + } + return filtered; +} + +const SENSITIVE_QUERY_PARAMS = new Set([ + "api_key", + "api-key", + "apikey", + "token", + "access_token", + "access-token", + "auth_token", + "auth-token", + "password", + "passwd", + "secret", + "api_secret", + "api-secret", + "apisecret", + "key", + "session", + "session_id", + "session-id", +]); + +function redactQueryParameters(queryParameters?: Record): Record | undefined { + if (queryParameters == null) { + return queryParameters; + } + const redacted: Record = {}; + for (const [key, value] of Object.entries(queryParameters)) { + if (SENSITIVE_QUERY_PARAMS.has(key.toLowerCase())) { + redacted[key] = "[REDACTED]"; + } else { + redacted[key] = value; + } + } + return redacted; +} + +function redactUrl(url: string): string { + const protocolIndex = url.indexOf("://"); + if (protocolIndex === -1) return url; + + const afterProtocol = protocolIndex + 3; + + // Find the first delimiter that marks the end of the authority section + const pathStart = url.indexOf("/", afterProtocol); + let queryStart = url.indexOf("?", afterProtocol); + let fragmentStart = url.indexOf("#", afterProtocol); + + const firstDelimiter = Math.min( + pathStart === -1 ? url.length : pathStart, + queryStart === -1 ? url.length : queryStart, + fragmentStart === -1 ? url.length : fragmentStart, + ); + + // Find the LAST @ before the delimiter (handles multiple @ in credentials) + let atIndex = -1; + for (let i = afterProtocol; i < firstDelimiter; i++) { + if (url[i] === "@") { + atIndex = i; + } + } + + if (atIndex !== -1) { + url = `${url.slice(0, afterProtocol)}[REDACTED]@${url.slice(atIndex + 1)}`; + } + + // Recalculate queryStart since url might have changed + queryStart = url.indexOf("?"); + if (queryStart === -1) return url; + + fragmentStart = url.indexOf("#", queryStart); + const queryEnd = fragmentStart !== -1 ? fragmentStart : url.length; + const queryString = url.slice(queryStart + 1, queryEnd); + + if (queryString.length === 0) return url; + + // FAST PATH: Quick check if any sensitive keywords present + // Using indexOf is faster than regex for simple substring matching + const lower = queryString.toLowerCase(); + const hasSensitive = + lower.includes("token") || + lower.includes("key") || + lower.includes("password") || + lower.includes("passwd") || + lower.includes("secret") || + lower.includes("session") || + lower.includes("auth"); + + if (!hasSensitive) { + return url; + } + + // SLOW PATH: Parse and redact + const redactedParams: string[] = []; + const params = queryString.split("&"); + + for (const param of params) { + const equalIndex = param.indexOf("="); + if (equalIndex === -1) { + redactedParams.push(param); + continue; + } + + const key = param.slice(0, equalIndex); + let shouldRedact = SENSITIVE_QUERY_PARAMS.has(key.toLowerCase()); + + if (!shouldRedact && key.includes("%")) { + try { + const decodedKey = decodeURIComponent(key); + shouldRedact = SENSITIVE_QUERY_PARAMS.has(decodedKey.toLowerCase()); + } catch {} + } + + redactedParams.push(shouldRedact ? `${key}=[REDACTED]` : param); + } + + return url.slice(0, queryStart + 1) + redactedParams.join("&") + url.slice(queryEnd); +} + +async function getHeaders(args: Fetcher.Args): Promise { + const newHeaders: Headers = new Headers(); + + newHeaders.set( + "Accept", + args.responseType === "json" ? "application/json" : args.responseType === "text" ? "text/plain" : "*/*", + ); + if (args.body !== undefined && args.contentType != null) { + newHeaders.set("Content-Type", args.contentType); + } + + if (args.headers == null) { + return newHeaders; + } + + for (const [key, value] of Object.entries(args.headers)) { + const result = await EndpointSupplier.get(value, { endpointMetadata: args.endpointMetadata ?? {} }); + if (typeof result === "string") { + newHeaders.set(key, result); + continue; + } + if (result == null) { + continue; + } + newHeaders.set(key, `${result}`); + } + return newHeaders; +} + +export async function fetcherImpl(args: Fetcher.Args): Promise> { + const url = createRequestUrl(args.url, args.queryParameters); + const requestBody: BodyInit | undefined = await getRequestBody({ + body: args.body, + type: args.requestType ?? "other", + }); + const fetchFn = args.fetchFn ?? (await getFetchFn()); + const headers = await getHeaders(args); + const logger = createLogger(args.logging); + + if (logger.isDebug()) { + const metadata = { + method: args.method, + url: redactUrl(url), + headers: redactHeaders(headers), + queryParameters: redactQueryParameters(args.queryParameters), + hasBody: requestBody != null, + }; + logger.debug("Making HTTP request", metadata); + } + + try { + const response = await requestWithRetries( + async () => + makeRequest( + fetchFn, + url, + args.method, + headers, + requestBody, + args.timeoutMs, + args.abortSignal, + args.withCredentials, + args.duplex, + ), + args.maxRetries, + ); + + if (response.status >= 200 && response.status < 400) { + if (logger.isDebug()) { + const metadata = { + method: args.method, + url: redactUrl(url), + statusCode: response.status, + responseHeaders: redactHeaders(response.headers), + }; + logger.debug("HTTP request succeeded", metadata); + } + const body = await getResponseBody(response, args.responseType); + return { + ok: true, + body: body as R, + headers: response.headers, + rawResponse: toRawResponse(response), + }; + } else { + if (logger.isError()) { + const metadata = { + method: args.method, + url: redactUrl(url), + statusCode: response.status, + responseHeaders: redactHeaders(Object.fromEntries(response.headers.entries())), + }; + logger.error("HTTP request failed with error status", metadata); + } + return { + ok: false, + error: { + reason: "status-code", + statusCode: response.status, + body: await getErrorResponseBody(response), + }, + rawResponse: toRawResponse(response), + }; + } + } catch (error) { + if (args.abortSignal?.aborted) { + if (logger.isError()) { + const metadata = { + method: args.method, + url: redactUrl(url), + }; + logger.error("HTTP request was aborted", metadata); + } + return { + ok: false, + error: { + reason: "unknown", + errorMessage: "The user aborted a request", + }, + rawResponse: abortRawResponse, + }; + } else if (error instanceof Error && error.name === "AbortError") { + if (logger.isError()) { + const metadata = { + method: args.method, + url: redactUrl(url), + timeoutMs: args.timeoutMs, + }; + logger.error("HTTP request timed out", metadata); + } + return { + ok: false, + error: { + reason: "timeout", + }, + rawResponse: abortRawResponse, + }; + } else if (error instanceof Error) { + if (logger.isError()) { + const metadata = { + method: args.method, + url: redactUrl(url), + errorMessage: error.message, + }; + logger.error("HTTP request failed with error", metadata); + } + return { + ok: false, + error: { + reason: "unknown", + errorMessage: error.message, + }, + rawResponse: unknownRawResponse, + }; + } + + if (logger.isError()) { + const metadata = { + method: args.method, + url: redactUrl(url), + error: toJson(error), + }; + logger.error("HTTP request failed with unknown error", metadata); + } + return { + ok: false, + error: { + reason: "unknown", + errorMessage: toJson(error), + }, + rawResponse: unknownRawResponse, + }; + } +} + +export const fetcher: FetchFunction = fetcherImpl; diff --git a/seed/ts-sdk/ts-extra-properties/src/core/fetcher/Headers.ts b/seed/ts-sdk/ts-extra-properties/src/core/fetcher/Headers.ts new file mode 100644 index 000000000000..af841aa24f55 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/fetcher/Headers.ts @@ -0,0 +1,93 @@ +let Headers: typeof globalThis.Headers; + +if (typeof globalThis.Headers !== "undefined") { + Headers = globalThis.Headers; +} else { + Headers = class Headers implements Headers { + private headers: Map; + + constructor(init?: HeadersInit) { + this.headers = new Map(); + + if (init) { + if (init instanceof Headers) { + init.forEach((value, key) => this.append(key, value)); + } else if (Array.isArray(init)) { + for (const [key, value] of init) { + if (typeof key === "string" && typeof value === "string") { + this.append(key, value); + } else { + throw new TypeError("Each header entry must be a [string, string] tuple"); + } + } + } else { + for (const [key, value] of Object.entries(init)) { + if (typeof value === "string") { + this.append(key, value); + } else { + throw new TypeError("Header values must be strings"); + } + } + } + } + } + + append(name: string, value: string): void { + const key = name.toLowerCase(); + const existing = this.headers.get(key) || []; + this.headers.set(key, [...existing, value]); + } + + delete(name: string): void { + const key = name.toLowerCase(); + this.headers.delete(key); + } + + get(name: string): string | null { + const key = name.toLowerCase(); + const values = this.headers.get(key); + return values ? values.join(", ") : null; + } + + has(name: string): boolean { + const key = name.toLowerCase(); + return this.headers.has(key); + } + + set(name: string, value: string): void { + const key = name.toLowerCase(); + this.headers.set(key, [value]); + } + + forEach(callbackfn: (value: string, key: string, parent: Headers) => void, thisArg?: unknown): void { + const boundCallback = thisArg ? callbackfn.bind(thisArg) : callbackfn; + this.headers.forEach((values, key) => boundCallback(values.join(", "), key, this)); + } + + getSetCookie(): string[] { + return this.headers.get("set-cookie") || []; + } + + *entries(): HeadersIterator<[string, string]> { + for (const [key, values] of this.headers.entries()) { + yield [key, values.join(", ")]; + } + } + + *keys(): HeadersIterator { + yield* this.headers.keys(); + } + + *values(): HeadersIterator { + for (const values of this.headers.values()) { + yield values.join(", "); + } + } + + [Symbol.iterator](): HeadersIterator<[string, string]> { + return this.entries(); + } + }; +} + +export { Headers }; diff --git a/seed/ts-sdk/ts-extra-properties/src/core/fetcher/HttpResponsePromise.ts b/seed/ts-sdk/ts-extra-properties/src/core/fetcher/HttpResponsePromise.ts new file mode 100644 index 000000000000..692ca7d795f0 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/fetcher/HttpResponsePromise.ts @@ -0,0 +1,116 @@ +import type { WithRawResponse } from "./RawResponse.js"; + +/** + * A promise that returns the parsed response and lets you retrieve the raw response too. + */ +export class HttpResponsePromise extends Promise { + private innerPromise: Promise>; + private unwrappedPromise: Promise | undefined; + + private constructor(promise: Promise>) { + // Initialize with a no-op to avoid premature parsing + super((resolve) => { + resolve(undefined as unknown as T); + }); + this.innerPromise = promise; + } + + /** + * Creates an `HttpResponsePromise` from a function that returns a promise. + * + * @param fn - A function that returns a promise resolving to a `WithRawResponse` object. + * @param args - Arguments to pass to the function. + * @returns An `HttpResponsePromise` instance. + */ + public static fromFunction Promise>, T>( + fn: F, + ...args: Parameters + ): HttpResponsePromise { + return new HttpResponsePromise(fn(...args)); + } + + /** + * Creates a function that returns an `HttpResponsePromise` from a function that returns a promise. + * + * @param fn - A function that returns a promise resolving to a `WithRawResponse` object. + * @returns A function that returns an `HttpResponsePromise` instance. + */ + public static interceptFunction< + F extends (...args: never[]) => Promise>, + T = Awaited>["data"], + >(fn: F): (...args: Parameters) => HttpResponsePromise { + return (...args: Parameters): HttpResponsePromise => { + return HttpResponsePromise.fromPromise(fn(...args)); + }; + } + + /** + * Creates an `HttpResponsePromise` from an existing promise. + * + * @param promise - A promise resolving to a `WithRawResponse` object. + * @returns An `HttpResponsePromise` instance. + */ + public static fromPromise(promise: Promise>): HttpResponsePromise { + return new HttpResponsePromise(promise); + } + + /** + * Creates an `HttpResponsePromise` from an executor function. + * + * @param executor - A function that takes resolve and reject callbacks to create a promise. + * @returns An `HttpResponsePromise` instance. + */ + public static fromExecutor( + executor: (resolve: (value: WithRawResponse) => void, reject: (reason?: unknown) => void) => void, + ): HttpResponsePromise { + const promise = new Promise>(executor); + return new HttpResponsePromise(promise); + } + + /** + * Creates an `HttpResponsePromise` from a resolved result. + * + * @param result - A `WithRawResponse` object to resolve immediately. + * @returns An `HttpResponsePromise` instance. + */ + public static fromResult(result: WithRawResponse): HttpResponsePromise { + const promise = Promise.resolve(result); + return new HttpResponsePromise(promise); + } + + private unwrap(): Promise { + if (!this.unwrappedPromise) { + this.unwrappedPromise = this.innerPromise.then(({ data }) => data); + } + return this.unwrappedPromise; + } + + /** @inheritdoc */ + public override then( + onfulfilled?: ((value: T) => TResult1 | PromiseLike) | null, + onrejected?: ((reason: unknown) => TResult2 | PromiseLike) | null, + ): Promise { + return this.unwrap().then(onfulfilled, onrejected); + } + + /** @inheritdoc */ + public override catch( + onrejected?: ((reason: unknown) => TResult | PromiseLike) | null, + ): Promise { + return this.unwrap().catch(onrejected); + } + + /** @inheritdoc */ + public override finally(onfinally?: (() => void) | null): Promise { + return this.unwrap().finally(onfinally); + } + + /** + * Retrieves the data and raw response. + * + * @returns A promise resolving to a `WithRawResponse` object. + */ + public async withRawResponse(): Promise> { + return await this.innerPromise; + } +} diff --git a/seed/ts-sdk/ts-extra-properties/src/core/fetcher/RawResponse.ts b/seed/ts-sdk/ts-extra-properties/src/core/fetcher/RawResponse.ts new file mode 100644 index 000000000000..37fb44e2aa99 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/fetcher/RawResponse.ts @@ -0,0 +1,61 @@ +import { Headers } from "./Headers.js"; + +/** + * The raw response from the fetch call excluding the body. + */ +export type RawResponse = Omit< + { + [K in keyof Response as Response[K] extends Function ? never : K]: Response[K]; // strips out functions + }, + "ok" | "body" | "bodyUsed" +>; // strips out body and bodyUsed + +/** + * A raw response indicating that the request was aborted. + */ +export const abortRawResponse: RawResponse = { + headers: new Headers(), + redirected: false, + status: 499, + statusText: "Client Closed Request", + type: "error", + url: "", +} as const; + +/** + * A raw response indicating an unknown error. + */ +export const unknownRawResponse: RawResponse = { + headers: new Headers(), + redirected: false, + status: 0, + statusText: "Unknown Error", + type: "error", + url: "", +} as const; + +/** + * Converts a `RawResponse` object into a `RawResponse` by extracting its properties, + * excluding the `body` and `bodyUsed` fields. + * + * @param response - The `RawResponse` object to convert. + * @returns A `RawResponse` object containing the extracted properties of the input response. + */ +export function toRawResponse(response: Response): RawResponse { + return { + headers: response.headers, + redirected: response.redirected, + status: response.status, + statusText: response.statusText, + type: response.type, + url: response.url, + }; +} + +/** + * Creates a `RawResponse` from a standard `Response` object. + */ +export interface WithRawResponse { + readonly data: T; + readonly rawResponse: RawResponse; +} diff --git a/seed/ts-sdk/ts-extra-properties/src/core/fetcher/Supplier.ts b/seed/ts-sdk/ts-extra-properties/src/core/fetcher/Supplier.ts new file mode 100644 index 000000000000..867c931c02f4 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/fetcher/Supplier.ts @@ -0,0 +1,11 @@ +export type Supplier = T | Promise | (() => T | Promise); + +export const Supplier = { + get: async (supplier: Supplier): Promise => { + if (typeof supplier === "function") { + return (supplier as () => T)(); + } else { + return supplier; + } + }, +}; diff --git a/seed/ts-sdk/ts-extra-properties/src/core/fetcher/createRequestUrl.ts b/seed/ts-sdk/ts-extra-properties/src/core/fetcher/createRequestUrl.ts new file mode 100644 index 000000000000..88e13265e112 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/fetcher/createRequestUrl.ts @@ -0,0 +1,6 @@ +import { toQueryString } from "../url/qs.js"; + +export function createRequestUrl(baseUrl: string, queryParameters?: Record): string { + const queryString = toQueryString(queryParameters, { arrayFormat: "repeat" }); + return queryString ? `${baseUrl}?${queryString}` : baseUrl; +} diff --git a/seed/ts-sdk/ts-extra-properties/src/core/fetcher/getErrorResponseBody.ts b/seed/ts-sdk/ts-extra-properties/src/core/fetcher/getErrorResponseBody.ts new file mode 100644 index 000000000000..7cf4e623c2f5 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/fetcher/getErrorResponseBody.ts @@ -0,0 +1,33 @@ +import { fromJson } from "../json.js"; +import { getResponseBody } from "./getResponseBody.js"; + +export async function getErrorResponseBody(response: Response): Promise { + let contentType = response.headers.get("Content-Type")?.toLowerCase(); + if (contentType == null || contentType.length === 0) { + return getResponseBody(response); + } + + if (contentType.indexOf(";") !== -1) { + contentType = contentType.split(";")[0]?.trim() ?? ""; + } + switch (contentType) { + case "application/hal+json": + case "application/json": + case "application/ld+json": + case "application/problem+json": + case "application/vnd.api+json": + case "text/json": { + const text = await response.text(); + return text.length > 0 ? fromJson(text) : undefined; + } + default: + if (contentType.startsWith("application/vnd.") && contentType.endsWith("+json")) { + const text = await response.text(); + return text.length > 0 ? fromJson(text) : undefined; + } + + // Fallback to plain text if content type is not recognized + // Even if no body is present, the response will be an empty string + return await response.text(); + } +} diff --git a/seed/ts-sdk/ts-extra-properties/src/core/fetcher/getFetchFn.ts b/seed/ts-sdk/ts-extra-properties/src/core/fetcher/getFetchFn.ts new file mode 100644 index 000000000000..9f845b956392 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/fetcher/getFetchFn.ts @@ -0,0 +1,3 @@ +export async function getFetchFn(): Promise { + return fetch; +} diff --git a/seed/ts-sdk/ts-extra-properties/src/core/fetcher/getHeader.ts b/seed/ts-sdk/ts-extra-properties/src/core/fetcher/getHeader.ts new file mode 100644 index 000000000000..50f922b0e87f --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/fetcher/getHeader.ts @@ -0,0 +1,8 @@ +export function getHeader(headers: Record, header: string): string | undefined { + for (const [headerKey, headerValue] of Object.entries(headers)) { + if (headerKey.toLowerCase() === header.toLowerCase()) { + return headerValue; + } + } + return undefined; +} diff --git a/seed/ts-sdk/ts-extra-properties/src/core/fetcher/getRequestBody.ts b/seed/ts-sdk/ts-extra-properties/src/core/fetcher/getRequestBody.ts new file mode 100644 index 000000000000..91d9d81f50e5 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/fetcher/getRequestBody.ts @@ -0,0 +1,20 @@ +import { toJson } from "../json.js"; +import { toQueryString } from "../url/qs.js"; + +export declare namespace GetRequestBody { + interface Args { + body: unknown; + type: "json" | "file" | "bytes" | "form" | "other"; + } +} + +export async function getRequestBody({ body, type }: GetRequestBody.Args): Promise { + if (type === "form") { + return toQueryString(body, { arrayFormat: "repeat", encode: true }); + } + if (type.includes("json")) { + return toJson(body); + } else { + return body as BodyInit; + } +} diff --git a/seed/ts-sdk/ts-extra-properties/src/core/fetcher/getResponseBody.ts b/seed/ts-sdk/ts-extra-properties/src/core/fetcher/getResponseBody.ts new file mode 100644 index 000000000000..708d55728f2b --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/fetcher/getResponseBody.ts @@ -0,0 +1,58 @@ +import { fromJson } from "../json.js"; +import { getBinaryResponse } from "./BinaryResponse.js"; + +export async function getResponseBody(response: Response, responseType?: string): Promise { + switch (responseType) { + case "binary-response": + return getBinaryResponse(response); + case "blob": + return await response.blob(); + case "arrayBuffer": + return await response.arrayBuffer(); + case "sse": + if (response.body == null) { + return { + ok: false, + error: { + reason: "body-is-null", + statusCode: response.status, + }, + }; + } + return response.body; + case "streaming": + if (response.body == null) { + return { + ok: false, + error: { + reason: "body-is-null", + statusCode: response.status, + }, + }; + } + + return response.body; + + case "text": + return await response.text(); + } + + // if responseType is "json" or not specified, try to parse as JSON + const text = await response.text(); + if (text.length > 0) { + try { + const responseBody = fromJson(text); + return responseBody; + } catch (_err) { + return { + ok: false, + error: { + reason: "non-json", + statusCode: response.status, + rawBody: text, + }, + }; + } + } + return undefined; +} diff --git a/seed/ts-sdk/ts-extra-properties/src/core/fetcher/index.ts b/seed/ts-sdk/ts-extra-properties/src/core/fetcher/index.ts new file mode 100644 index 000000000000..c3bc6da20f49 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/fetcher/index.ts @@ -0,0 +1,11 @@ +export type { APIResponse } from "./APIResponse.js"; +export type { BinaryResponse } from "./BinaryResponse.js"; +export type { EndpointMetadata } from "./EndpointMetadata.js"; +export { EndpointSupplier } from "./EndpointSupplier.js"; +export type { Fetcher, FetchFunction } from "./Fetcher.js"; +export { fetcher } from "./Fetcher.js"; +export { getHeader } from "./getHeader.js"; +export { HttpResponsePromise } from "./HttpResponsePromise.js"; +export type { RawResponse, WithRawResponse } from "./RawResponse.js"; +export { abortRawResponse, toRawResponse, unknownRawResponse } from "./RawResponse.js"; +export { Supplier } from "./Supplier.js"; diff --git a/seed/ts-sdk/ts-extra-properties/src/core/fetcher/makeRequest.ts b/seed/ts-sdk/ts-extra-properties/src/core/fetcher/makeRequest.ts new file mode 100644 index 000000000000..921565eb0063 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/fetcher/makeRequest.ts @@ -0,0 +1,42 @@ +import { anySignal, getTimeoutSignal } from "./signals.js"; + +export const makeRequest = async ( + fetchFn: (url: string, init: RequestInit) => Promise, + url: string, + method: string, + headers: Headers | Record, + requestBody: BodyInit | undefined, + timeoutMs?: number, + abortSignal?: AbortSignal, + withCredentials?: boolean, + duplex?: "half", +): Promise => { + const signals: AbortSignal[] = []; + + let timeoutAbortId: ReturnType | undefined; + if (timeoutMs != null) { + const { signal, abortId } = getTimeoutSignal(timeoutMs); + timeoutAbortId = abortId; + signals.push(signal); + } + + if (abortSignal != null) { + signals.push(abortSignal); + } + const newSignals = anySignal(signals); + const response = await fetchFn(url, { + method: method, + headers, + body: requestBody, + signal: newSignals, + credentials: withCredentials ? "include" : undefined, + // @ts-ignore + duplex, + }); + + if (timeoutAbortId != null) { + clearTimeout(timeoutAbortId); + } + + return response; +}; diff --git a/seed/ts-sdk/ts-extra-properties/src/core/fetcher/requestWithRetries.ts b/seed/ts-sdk/ts-extra-properties/src/core/fetcher/requestWithRetries.ts new file mode 100644 index 000000000000..1f689688c4b2 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/fetcher/requestWithRetries.ts @@ -0,0 +1,64 @@ +const INITIAL_RETRY_DELAY = 1000; // in milliseconds +const MAX_RETRY_DELAY = 60000; // in milliseconds +const DEFAULT_MAX_RETRIES = 2; +const JITTER_FACTOR = 0.2; // 20% random jitter + +function addPositiveJitter(delay: number): number { + const jitterMultiplier = 1 + Math.random() * JITTER_FACTOR; + return delay * jitterMultiplier; +} + +function addSymmetricJitter(delay: number): number { + const jitterMultiplier = 1 + (Math.random() - 0.5) * JITTER_FACTOR; + return delay * jitterMultiplier; +} + +function getRetryDelayFromHeaders(response: Response, retryAttempt: number): number { + const retryAfter = response.headers.get("Retry-After"); + if (retryAfter) { + const retryAfterSeconds = parseInt(retryAfter, 10); + if (!Number.isNaN(retryAfterSeconds) && retryAfterSeconds > 0) { + return Math.min(retryAfterSeconds * 1000, MAX_RETRY_DELAY); + } + + const retryAfterDate = new Date(retryAfter); + if (!Number.isNaN(retryAfterDate.getTime())) { + const delay = retryAfterDate.getTime() - Date.now(); + if (delay > 0) { + return Math.min(Math.max(delay, 0), MAX_RETRY_DELAY); + } + } + } + + const rateLimitReset = response.headers.get("X-RateLimit-Reset"); + if (rateLimitReset) { + const resetTime = parseInt(rateLimitReset, 10); + if (!Number.isNaN(resetTime)) { + const delay = resetTime * 1000 - Date.now(); + if (delay > 0) { + return addPositiveJitter(Math.min(delay, MAX_RETRY_DELAY)); + } + } + } + + return addSymmetricJitter(Math.min(INITIAL_RETRY_DELAY * 2 ** retryAttempt, MAX_RETRY_DELAY)); +} + +export async function requestWithRetries( + requestFn: () => Promise, + maxRetries: number = DEFAULT_MAX_RETRIES, +): Promise { + let response: Response = await requestFn(); + + for (let i = 0; i < maxRetries; ++i) { + if ([408, 429].includes(response.status) || response.status >= 500) { + const delay = getRetryDelayFromHeaders(response, i); + + await new Promise((resolve) => setTimeout(resolve, delay)); + response = await requestFn(); + } else { + break; + } + } + return response!; +} diff --git a/seed/ts-sdk/ts-extra-properties/src/core/fetcher/signals.ts b/seed/ts-sdk/ts-extra-properties/src/core/fetcher/signals.ts new file mode 100644 index 000000000000..7bd3757ec3a7 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/fetcher/signals.ts @@ -0,0 +1,26 @@ +const TIMEOUT = "timeout"; + +export function getTimeoutSignal(timeoutMs: number): { signal: AbortSignal; abortId: ReturnType } { + const controller = new AbortController(); + const abortId = setTimeout(() => controller.abort(TIMEOUT), timeoutMs); + return { signal: controller.signal, abortId }; +} + +export function anySignal(...args: AbortSignal[] | [AbortSignal[]]): AbortSignal { + const signals = (args.length === 1 && Array.isArray(args[0]) ? args[0] : args) as AbortSignal[]; + + const controller = new AbortController(); + + for (const signal of signals) { + if (signal.aborted) { + controller.abort((signal as any)?.reason); + break; + } + + signal.addEventListener("abort", () => controller.abort((signal as any)?.reason), { + signal: controller.signal, + }); + } + + return controller.signal; +} diff --git a/seed/ts-sdk/ts-extra-properties/src/core/headers.ts b/seed/ts-sdk/ts-extra-properties/src/core/headers.ts new file mode 100644 index 000000000000..be45c4552a35 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/headers.ts @@ -0,0 +1,33 @@ +export function mergeHeaders(...headersArray: (Record | null | undefined)[]): Record { + const result: Record = {}; + + for (const [key, value] of headersArray + .filter((headers) => headers != null) + .flatMap((headers) => Object.entries(headers))) { + const insensitiveKey = key.toLowerCase(); + if (value != null) { + result[insensitiveKey] = value; + } else if (insensitiveKey in result) { + delete result[insensitiveKey]; + } + } + + return result; +} + +export function mergeOnlyDefinedHeaders( + ...headersArray: (Record | null | undefined)[] +): Record { + const result: Record = {}; + + for (const [key, value] of headersArray + .filter((headers) => headers != null) + .flatMap((headers) => Object.entries(headers))) { + const insensitiveKey = key.toLowerCase(); + if (value != null) { + result[insensitiveKey] = value; + } + } + + return result; +} diff --git a/seed/ts-sdk/ts-extra-properties/src/core/index.ts b/seed/ts-sdk/ts-extra-properties/src/core/index.ts new file mode 100644 index 000000000000..8e3274c01c6f --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/index.ts @@ -0,0 +1,5 @@ +export * from "./fetcher/index.js"; +export * as logging from "./logging/index.js"; +export * from "./runtime/index.js"; +export * as serialization from "./schemas/index.js"; +export * as url from "./url/index.js"; diff --git a/seed/ts-sdk/ts-extra-properties/src/core/json.ts b/seed/ts-sdk/ts-extra-properties/src/core/json.ts new file mode 100644 index 000000000000..c052f3249f4f --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/json.ts @@ -0,0 +1,27 @@ +/** + * Serialize a value to JSON + * @param value A JavaScript value, usually an object or array, to be converted. + * @param replacer A function that transforms the results. + * @param space Adds indentation, white space, and line break characters to the return-value JSON text to make it easier to read. + * @returns JSON string + */ +export const toJson = ( + value: unknown, + replacer?: (this: unknown, key: string, value: unknown) => unknown, + space?: string | number, +): string => { + return JSON.stringify(value, replacer, space); +}; + +/** + * Parse JSON string to object, array, or other type + * @param text A valid JSON string. + * @param reviver A function that transforms the results. This function is called for each member of the object. If a member contains nested objects, the nested objects are transformed before the parent object is. + * @returns Parsed object, array, or other type + */ +export function fromJson( + text: string, + reviver?: (this: unknown, key: string, value: unknown) => unknown, +): T { + return JSON.parse(text, reviver); +} diff --git a/seed/ts-sdk/ts-extra-properties/src/core/logging/exports.ts b/seed/ts-sdk/ts-extra-properties/src/core/logging/exports.ts new file mode 100644 index 000000000000..88f6c00db0cf --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/logging/exports.ts @@ -0,0 +1,19 @@ +import * as logger from "./logger.js"; + +export namespace logging { + /** + * Configuration for logger instances. + */ + export type LogConfig = logger.LogConfig; + export type LogLevel = logger.LogLevel; + export const LogLevel: typeof logger.LogLevel = logger.LogLevel; + export type ILogger = logger.ILogger; + /** + * Console logger implementation that outputs to the console. + */ + export type ConsoleLogger = logger.ConsoleLogger; + /** + * Console logger implementation that outputs to the console. + */ + export const ConsoleLogger: typeof logger.ConsoleLogger = logger.ConsoleLogger; +} diff --git a/seed/ts-sdk/ts-extra-properties/src/core/logging/index.ts b/seed/ts-sdk/ts-extra-properties/src/core/logging/index.ts new file mode 100644 index 000000000000..d81cc32c40f9 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/logging/index.ts @@ -0,0 +1 @@ +export * from "./logger.js"; diff --git a/seed/ts-sdk/ts-extra-properties/src/core/logging/logger.ts b/seed/ts-sdk/ts-extra-properties/src/core/logging/logger.ts new file mode 100644 index 000000000000..a3f3673cda93 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/logging/logger.ts @@ -0,0 +1,203 @@ +export const LogLevel = { + Debug: "debug", + Info: "info", + Warn: "warn", + Error: "error", +} as const; +export type LogLevel = (typeof LogLevel)[keyof typeof LogLevel]; +const logLevelMap: Record = { + [LogLevel.Debug]: 1, + [LogLevel.Info]: 2, + [LogLevel.Warn]: 3, + [LogLevel.Error]: 4, +}; + +export interface ILogger { + /** + * Logs a debug message. + * @param message - The message to log + * @param args - Additional arguments to log + */ + debug(message: string, ...args: unknown[]): void; + /** + * Logs an info message. + * @param message - The message to log + * @param args - Additional arguments to log + */ + info(message: string, ...args: unknown[]): void; + /** + * Logs a warning message. + * @param message - The message to log + * @param args - Additional arguments to log + */ + warn(message: string, ...args: unknown[]): void; + /** + * Logs an error message. + * @param message - The message to log + * @param args - Additional arguments to log + */ + error(message: string, ...args: unknown[]): void; +} + +/** + * Configuration for logger initialization. + */ +export interface LogConfig { + /** + * Minimum log level to output. + * @default LogLevel.Info + */ + level?: LogLevel; + /** + * Logger implementation to use. + * @default new ConsoleLogger() + */ + logger?: ILogger; + /** + * Whether logging should be silenced. + * @default true + */ + silent?: boolean; +} + +/** + * Default console-based logger implementation. + */ +export class ConsoleLogger implements ILogger { + debug(message: string, ...args: unknown[]): void { + console.debug(message, ...args); + } + info(message: string, ...args: unknown[]): void { + console.info(message, ...args); + } + warn(message: string, ...args: unknown[]): void { + console.warn(message, ...args); + } + error(message: string, ...args: unknown[]): void { + console.error(message, ...args); + } +} + +/** + * Logger class that provides level-based logging functionality. + */ +export class Logger { + private readonly level: number; + private readonly logger: ILogger; + private readonly silent: boolean; + + /** + * Creates a new logger instance. + * @param config - Logger configuration + */ + constructor(config: Required) { + this.level = logLevelMap[config.level]; + this.logger = config.logger; + this.silent = config.silent; + } + + /** + * Checks if a log level should be output based on configuration. + * @param level - The log level to check + * @returns True if the level should be logged + */ + public shouldLog(level: LogLevel): boolean { + return !this.silent && this.level <= logLevelMap[level]; + } + + /** + * Checks if debug logging is enabled. + * @returns True if debug logs should be output + */ + public isDebug(): boolean { + return this.shouldLog(LogLevel.Debug); + } + + /** + * Logs a debug message if debug logging is enabled. + * @param message - The message to log + * @param args - Additional arguments to log + */ + public debug(message: string, ...args: unknown[]): void { + if (this.isDebug()) { + this.logger.debug(message, ...args); + } + } + + /** + * Checks if info logging is enabled. + * @returns True if info logs should be output + */ + public isInfo(): boolean { + return this.shouldLog(LogLevel.Info); + } + + /** + * Logs an info message if info logging is enabled. + * @param message - The message to log + * @param args - Additional arguments to log + */ + public info(message: string, ...args: unknown[]): void { + if (this.isInfo()) { + this.logger.info(message, ...args); + } + } + + /** + * Checks if warning logging is enabled. + * @returns True if warning logs should be output + */ + public isWarn(): boolean { + return this.shouldLog(LogLevel.Warn); + } + + /** + * Logs a warning message if warning logging is enabled. + * @param message - The message to log + * @param args - Additional arguments to log + */ + public warn(message: string, ...args: unknown[]): void { + if (this.isWarn()) { + this.logger.warn(message, ...args); + } + } + + /** + * Checks if error logging is enabled. + * @returns True if error logs should be output + */ + public isError(): boolean { + return this.shouldLog(LogLevel.Error); + } + + /** + * Logs an error message if error logging is enabled. + * @param message - The message to log + * @param args - Additional arguments to log + */ + public error(message: string, ...args: unknown[]): void { + if (this.isError()) { + this.logger.error(message, ...args); + } + } +} + +export function createLogger(config?: LogConfig | Logger): Logger { + if (config == null) { + return defaultLogger; + } + if (config instanceof Logger) { + return config; + } + config = config ?? {}; + config.level ??= LogLevel.Info; + config.logger ??= new ConsoleLogger(); + config.silent ??= true; + return new Logger(config as Required); +} + +const defaultLogger: Logger = new Logger({ + level: LogLevel.Info, + logger: new ConsoleLogger(), + silent: true, +}); diff --git a/seed/ts-sdk/ts-extra-properties/src/core/runtime/index.ts b/seed/ts-sdk/ts-extra-properties/src/core/runtime/index.ts new file mode 100644 index 000000000000..cfab23f9a834 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/runtime/index.ts @@ -0,0 +1 @@ +export { RUNTIME } from "./runtime.js"; diff --git a/seed/ts-sdk/ts-extra-properties/src/core/runtime/runtime.ts b/seed/ts-sdk/ts-extra-properties/src/core/runtime/runtime.ts new file mode 100644 index 000000000000..56ebbb87c4d3 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/runtime/runtime.ts @@ -0,0 +1,134 @@ +interface DenoGlobal { + version: { + deno: string; + }; +} + +interface BunGlobal { + version: string; +} + +declare const Deno: DenoGlobal | undefined; +declare const Bun: BunGlobal | undefined; +declare const EdgeRuntime: string | undefined; +declare const self: typeof globalThis.self & { + importScripts?: unknown; +}; + +/** + * A constant that indicates which environment and version the SDK is running in. + */ +export const RUNTIME: Runtime = evaluateRuntime(); + +export interface Runtime { + type: "browser" | "web-worker" | "deno" | "bun" | "node" | "react-native" | "unknown" | "workerd" | "edge-runtime"; + version?: string; + parsedVersion?: number; +} + +function evaluateRuntime(): Runtime { + /** + * A constant that indicates whether the environment the code is running is a Web Browser. + */ + const isBrowser = typeof window !== "undefined" && typeof window.document !== "undefined"; + if (isBrowser) { + return { + type: "browser", + version: window.navigator.userAgent, + }; + } + + /** + * A constant that indicates whether the environment the code is running is Cloudflare. + * https://developers.cloudflare.com/workers/runtime-apis/web-standards/#navigatoruseragent + */ + const isCloudflare = typeof globalThis !== "undefined" && globalThis?.navigator?.userAgent === "Cloudflare-Workers"; + if (isCloudflare) { + return { + type: "workerd", + }; + } + + /** + * A constant that indicates whether the environment the code is running is Edge Runtime. + * https://vercel.com/docs/functions/runtimes/edge-runtime#check-if-you're-running-on-the-edge-runtime + */ + const isEdgeRuntime = typeof EdgeRuntime === "string"; + if (isEdgeRuntime) { + return { + type: "edge-runtime", + }; + } + + /** + * A constant that indicates whether the environment the code is running is a Web Worker. + */ + const isWebWorker = + typeof self === "object" && + typeof self?.importScripts === "function" && + (self.constructor?.name === "DedicatedWorkerGlobalScope" || + self.constructor?.name === "ServiceWorkerGlobalScope" || + self.constructor?.name === "SharedWorkerGlobalScope"); + if (isWebWorker) { + return { + type: "web-worker", + }; + } + + /** + * A constant that indicates whether the environment the code is running is Deno. + * FYI Deno spoofs process.versions.node, see https://deno.land/std@0.177.0/node/process.ts?s=versions + */ + const isDeno = + typeof Deno !== "undefined" && typeof Deno.version !== "undefined" && typeof Deno.version.deno !== "undefined"; + if (isDeno) { + return { + type: "deno", + version: Deno.version.deno, + }; + } + + /** + * A constant that indicates whether the environment the code is running is Bun.sh. + */ + const isBun = typeof Bun !== "undefined" && typeof Bun.version !== "undefined"; + if (isBun) { + return { + type: "bun", + version: Bun.version, + }; + } + + /** + * A constant that indicates whether the environment the code is running is in React-Native. + * This check should come before Node.js detection since React Native may have a process polyfill. + * https://github.com/facebook/react-native/blob/main/packages/react-native/Libraries/Core/setUpNavigator.js + */ + const isReactNative = typeof navigator !== "undefined" && navigator?.product === "ReactNative"; + if (isReactNative) { + return { + type: "react-native", + }; + } + + /** + * A constant that indicates whether the environment the code is running is Node.JS. + */ + const isNode = + typeof process !== "undefined" && + "version" in process && + !!process.version && + "versions" in process && + !!process.versions?.node; + if (isNode) { + return { + type: "node", + version: process.versions.node, + parsedVersion: Number(process.versions.node.split(".")[0]), + }; + } + + return { + type: "unknown", + }; +} diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/Schema.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/Schema.ts new file mode 100644 index 000000000000..4cd8b1d95934 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/Schema.ts @@ -0,0 +1,103 @@ +import type { SchemaUtils } from "./builders/index.js"; + +export type Schema = BaseSchema & SchemaUtils; + +export type inferRaw = S extends Schema ? Raw : never; +export type inferParsed = S extends Schema ? Parsed : never; + +export interface BaseSchema { + parse: (raw: unknown, opts?: SchemaOptions) => MaybeValid; + json: (parsed: unknown, opts?: SchemaOptions) => MaybeValid; + getType: () => SchemaType | SchemaType; +} + +export const SchemaType = { + BIGINT: "bigint", + DATE: "date", + ENUM: "enum", + LIST: "list", + STRING_LITERAL: "stringLiteral", + BOOLEAN_LITERAL: "booleanLiteral", + OBJECT: "object", + ANY: "any", + BOOLEAN: "boolean", + NUMBER: "number", + STRING: "string", + UNKNOWN: "unknown", + NEVER: "never", + RECORD: "record", + SET: "set", + UNION: "union", + UNDISCRIMINATED_UNION: "undiscriminatedUnion", + NULLABLE: "nullable", + OPTIONAL: "optional", + OPTIONAL_NULLABLE: "optionalNullable", +} as const; + +export type SchemaType = (typeof SchemaType)[keyof typeof SchemaType]; + +export type MaybeValid = Valid | Invalid; + +export interface Valid { + ok: true; + value: T; +} + +export interface Invalid { + ok: false; + errors: ValidationError[]; +} + +export interface ValidationError { + path: string[]; + message: string; +} + +export interface SchemaOptions { + /** + * how to handle unrecognized keys in objects + * + * @default "fail" + */ + unrecognizedObjectKeys?: "fail" | "passthrough" | "strip"; + + /** + * whether to fail when an unrecognized discriminant value is + * encountered in a union + * + * @default false + */ + allowUnrecognizedUnionMembers?: boolean; + + /** + * whether to fail when an unrecognized enum value is encountered + * + * @default false + */ + allowUnrecognizedEnumValues?: boolean; + + /** + * whether to allow data that doesn't conform to the schema. + * invalid data is passed through without transformation. + * + * when this is enabled, .parse() and .json() will always + * return `ok: true`. `.parseOrThrow()` and `.jsonOrThrow()` + * will never fail. + * + * @default false + */ + skipValidation?: boolean; + + /** + * each validation failure contains a "path" property, which is + * the breadcrumbs to the offending node in the JSON. you can supply + * a prefix that is prepended to all the errors' paths. this can be + * helpful for zurg's internal debug logging. + */ + breadcrumbsPrefix?: string[]; + + /** + * whether to send 'null' for optional properties explicitly set to 'undefined'. + */ + omitUndefined?: boolean; +} diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/bigint/bigint.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/bigint/bigint.ts new file mode 100644 index 000000000000..2c7c74c54a37 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/bigint/bigint.ts @@ -0,0 +1,55 @@ +import { type BaseSchema, type Schema, SchemaType } from "../../Schema.js"; +import { getErrorMessageForIncorrectType } from "../../utils/getErrorMessageForIncorrectType.js"; +import { maybeSkipValidation } from "../../utils/maybeSkipValidation.js"; +import { getSchemaUtils } from "../schema-utils/index.js"; + +export function bigint(): Schema { + const baseSchema: BaseSchema = { + parse: (raw, { breadcrumbsPrefix = [] } = {}) => { + if (typeof raw === "bigint") { + return { + ok: true, + value: raw, + }; + } + if (typeof raw === "number") { + return { + ok: true, + value: BigInt(raw), + }; + } + return { + ok: false, + errors: [ + { + path: breadcrumbsPrefix, + message: getErrorMessageForIncorrectType(raw, "bigint | number"), + }, + ], + }; + }, + json: (bigint, { breadcrumbsPrefix = [] } = {}) => { + if (typeof bigint !== "bigint") { + return { + ok: false, + errors: [ + { + path: breadcrumbsPrefix, + message: getErrorMessageForIncorrectType(bigint, "bigint"), + }, + ], + }; + } + return { + ok: true, + value: bigint, + }; + }, + getType: () => SchemaType.BIGINT, + }; + + return { + ...maybeSkipValidation(baseSchema), + ...getSchemaUtils(baseSchema), + }; +} diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/bigint/index.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/bigint/index.ts new file mode 100644 index 000000000000..13cc76e25b2a --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/bigint/index.ts @@ -0,0 +1 @@ +export { bigint } from "./bigint.js"; diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/date/date.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/date/date.ts new file mode 100644 index 000000000000..f02e3367f88f --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/date/date.ts @@ -0,0 +1,65 @@ +import { type BaseSchema, type Schema, SchemaType } from "../../Schema.js"; +import { getErrorMessageForIncorrectType } from "../../utils/getErrorMessageForIncorrectType.js"; +import { maybeSkipValidation } from "../../utils/maybeSkipValidation.js"; +import { getSchemaUtils } from "../schema-utils/index.js"; + +// https://stackoverflow.com/questions/12756159/regex-and-iso8601-formatted-datetime +const ISO_8601_REGEX = + /^([+-]?\d{4}(?!\d{2}\b))((-?)((0[1-9]|1[0-2])(\3([12]\d|0[1-9]|3[01]))?|W([0-4]\d|5[0-2])(-?[1-7])?|(00[1-9]|0[1-9]\d|[12]\d{2}|3([0-5]\d|6[1-6])))([T\s]((([01]\d|2[0-3])((:?)[0-5]\d)?|24:?00)([.,]\d+(?!:))?)?(\17[0-5]\d([.,]\d+)?)?([zZ]|([+-])([01]\d|2[0-3]):?([0-5]\d)?)?)?)?$/; + +export function date(): Schema { + const baseSchema: BaseSchema = { + parse: (raw, { breadcrumbsPrefix = [] } = {}) => { + if (typeof raw !== "string") { + return { + ok: false, + errors: [ + { + path: breadcrumbsPrefix, + message: getErrorMessageForIncorrectType(raw, "string"), + }, + ], + }; + } + if (!ISO_8601_REGEX.test(raw)) { + return { + ok: false, + errors: [ + { + path: breadcrumbsPrefix, + message: getErrorMessageForIncorrectType(raw, "ISO 8601 date string"), + }, + ], + }; + } + return { + ok: true, + value: new Date(raw), + }; + }, + json: (date, { breadcrumbsPrefix = [] } = {}) => { + if (date instanceof Date) { + return { + ok: true, + value: date.toISOString(), + }; + } else { + return { + ok: false, + errors: [ + { + path: breadcrumbsPrefix, + message: getErrorMessageForIncorrectType(date, "Date object"), + }, + ], + }; + } + }, + getType: () => SchemaType.DATE, + }; + + return { + ...maybeSkipValidation(baseSchema), + ...getSchemaUtils(baseSchema), + }; +} diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/date/index.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/date/index.ts new file mode 100644 index 000000000000..e22a2f16bfc9 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/date/index.ts @@ -0,0 +1 @@ +export { date } from "./date.js"; diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/enum/enum.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/enum/enum.ts new file mode 100644 index 000000000000..ccae24bcf200 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/enum/enum.ts @@ -0,0 +1,43 @@ +import { type Schema, SchemaType } from "../../Schema.js"; +import { createIdentitySchemaCreator } from "../../utils/createIdentitySchemaCreator.js"; +import { getErrorMessageForIncorrectType } from "../../utils/getErrorMessageForIncorrectType.js"; + +export function enum_(values: E): Schema { + const validValues = new Set(values); + + const schemaCreator = createIdentitySchemaCreator( + SchemaType.ENUM, + (value, { allowUnrecognizedEnumValues, breadcrumbsPrefix = [] } = {}) => { + if (typeof value !== "string") { + return { + ok: false, + errors: [ + { + path: breadcrumbsPrefix, + message: getErrorMessageForIncorrectType(value, "string"), + }, + ], + }; + } + + if (!validValues.has(value) && !allowUnrecognizedEnumValues) { + return { + ok: false, + errors: [ + { + path: breadcrumbsPrefix, + message: getErrorMessageForIncorrectType(value, "enum"), + }, + ], + }; + } + + return { + ok: true, + value: value as U, + }; + }, + ); + + return schemaCreator(); +} diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/enum/index.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/enum/index.ts new file mode 100644 index 000000000000..ff3bee3bf653 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/enum/index.ts @@ -0,0 +1 @@ +export { enum_ } from "./enum.js"; diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/index.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/index.ts new file mode 100644 index 000000000000..ddb9b3c94555 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/index.ts @@ -0,0 +1,14 @@ +export * from "./bigint/index.js"; +export * from "./date/index.js"; +export * from "./enum/index.js"; +export * from "./lazy/index.js"; +export * from "./list/index.js"; +export * from "./literals/index.js"; +export * from "./object/index.js"; +export * from "./object-like/index.js"; +export * from "./primitives/index.js"; +export * from "./record/index.js"; +export * from "./schema-utils/index.js"; +export * from "./set/index.js"; +export * from "./undiscriminated-union/index.js"; +export * from "./union/index.js"; diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/lazy/index.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/lazy/index.ts new file mode 100644 index 000000000000..e8ca40992061 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/lazy/index.ts @@ -0,0 +1,3 @@ +export type { SchemaGetter } from "./lazy.js"; +export { lazy } from "./lazy.js"; +export { lazyObject } from "./lazyObject.js"; diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/lazy/lazy.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/lazy/lazy.ts new file mode 100644 index 000000000000..37f28871683a --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/lazy/lazy.ts @@ -0,0 +1,32 @@ +import type { BaseSchema, Schema } from "../../Schema.js"; +import { getSchemaUtils } from "../schema-utils/index.js"; + +export type SchemaGetter> = () => SchemaType; + +export function lazy(getter: SchemaGetter>): Schema { + const baseSchema = constructLazyBaseSchema(getter); + return { + ...baseSchema, + ...getSchemaUtils(baseSchema), + }; +} + +export function constructLazyBaseSchema( + getter: SchemaGetter>, +): BaseSchema { + return { + parse: (raw, opts) => getMemoizedSchema(getter).parse(raw, opts), + json: (parsed, opts) => getMemoizedSchema(getter).json(parsed, opts), + getType: () => getMemoizedSchema(getter).getType(), + }; +} + +type MemoizedGetter> = SchemaGetter & { __zurg_memoized?: SchemaType }; + +export function getMemoizedSchema>(getter: SchemaGetter): SchemaType { + const castedGetter = getter as MemoizedGetter; + if (castedGetter.__zurg_memoized == null) { + castedGetter.__zurg_memoized = getter(); + } + return castedGetter.__zurg_memoized; +} diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/lazy/lazyObject.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/lazy/lazyObject.ts new file mode 100644 index 000000000000..192c90e5c83e --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/lazy/lazyObject.ts @@ -0,0 +1,20 @@ +import { getObjectUtils } from "../object/index.js"; +import type { BaseObjectSchema, ObjectSchema } from "../object/types.js"; +import { getObjectLikeUtils } from "../object-like/index.js"; +import { getSchemaUtils } from "../schema-utils/index.js"; +import { constructLazyBaseSchema, getMemoizedSchema, type SchemaGetter } from "./lazy.js"; + +export function lazyObject(getter: SchemaGetter>): ObjectSchema { + const baseSchema: BaseObjectSchema = { + ...constructLazyBaseSchema(getter), + _getRawProperties: () => getMemoizedSchema(getter)._getRawProperties(), + _getParsedProperties: () => getMemoizedSchema(getter)._getParsedProperties(), + }; + + return { + ...baseSchema, + ...getSchemaUtils(baseSchema), + ...getObjectLikeUtils(baseSchema), + ...getObjectUtils(baseSchema), + }; +} diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/list/index.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/list/index.ts new file mode 100644 index 000000000000..021f1e4df1ff --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/list/index.ts @@ -0,0 +1 @@ +export { list } from "./list.js"; diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/list/list.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/list/list.ts new file mode 100644 index 000000000000..4f8c10ba483a --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/list/list.ts @@ -0,0 +1,73 @@ +import { type BaseSchema, type MaybeValid, type Schema, SchemaType, type ValidationError } from "../../Schema.js"; +import { getErrorMessageForIncorrectType } from "../../utils/getErrorMessageForIncorrectType.js"; +import { maybeSkipValidation } from "../../utils/maybeSkipValidation.js"; +import { getSchemaUtils } from "../schema-utils/index.js"; + +export function list(schema: Schema): Schema { + const baseSchema: BaseSchema = { + parse: (raw, opts) => + validateAndTransformArray(raw, (item, index) => + schema.parse(item, { + ...opts, + breadcrumbsPrefix: [...(opts?.breadcrumbsPrefix ?? []), `[${index}]`], + }), + ), + json: (parsed, opts) => + validateAndTransformArray(parsed, (item, index) => + schema.json(item, { + ...opts, + breadcrumbsPrefix: [...(opts?.breadcrumbsPrefix ?? []), `[${index}]`], + }), + ), + getType: () => SchemaType.LIST, + }; + + return { + ...maybeSkipValidation(baseSchema), + ...getSchemaUtils(baseSchema), + }; +} + +function validateAndTransformArray( + value: unknown, + transformItem: (item: Raw, index: number) => MaybeValid, +): MaybeValid { + if (!Array.isArray(value)) { + return { + ok: false, + errors: [ + { + message: getErrorMessageForIncorrectType(value, "list"), + path: [], + }, + ], + }; + } + + const maybeValidItems = value.map((item, index) => transformItem(item, index)); + + return maybeValidItems.reduce>( + (acc, item) => { + if (acc.ok && item.ok) { + return { + ok: true, + value: [...acc.value, item.value], + }; + } + + const errors: ValidationError[] = []; + if (!acc.ok) { + errors.push(...acc.errors); + } + if (!item.ok) { + errors.push(...item.errors); + } + + return { + ok: false, + errors, + }; + }, + { ok: true, value: [] }, + ); +} diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/literals/booleanLiteral.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/literals/booleanLiteral.ts new file mode 100644 index 000000000000..db5d2c7a7313 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/literals/booleanLiteral.ts @@ -0,0 +1,29 @@ +import { type Schema, SchemaType } from "../../Schema.js"; +import { createIdentitySchemaCreator } from "../../utils/createIdentitySchemaCreator.js"; +import { getErrorMessageForIncorrectType } from "../../utils/getErrorMessageForIncorrectType.js"; + +export function booleanLiteral(literal: V): Schema { + const schemaCreator = createIdentitySchemaCreator( + SchemaType.BOOLEAN_LITERAL, + (value, { breadcrumbsPrefix = [] } = {}) => { + if (value === literal) { + return { + ok: true, + value: literal, + }; + } else { + return { + ok: false, + errors: [ + { + path: breadcrumbsPrefix, + message: getErrorMessageForIncorrectType(value, `${literal.toString()}`), + }, + ], + }; + } + }, + ); + + return schemaCreator(); +} diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/literals/index.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/literals/index.ts new file mode 100644 index 000000000000..4a4ab39d91a7 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/literals/index.ts @@ -0,0 +1,2 @@ +export { booleanLiteral } from "./booleanLiteral.js"; +export { stringLiteral } from "./stringLiteral.js"; diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/literals/stringLiteral.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/literals/stringLiteral.ts new file mode 100644 index 000000000000..ce6e20caf8d6 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/literals/stringLiteral.ts @@ -0,0 +1,29 @@ +import { type Schema, SchemaType } from "../../Schema.js"; +import { createIdentitySchemaCreator } from "../../utils/createIdentitySchemaCreator.js"; +import { getErrorMessageForIncorrectType } from "../../utils/getErrorMessageForIncorrectType.js"; + +export function stringLiteral(literal: V): Schema { + const schemaCreator = createIdentitySchemaCreator( + SchemaType.STRING_LITERAL, + (value, { breadcrumbsPrefix = [] } = {}) => { + if (value === literal) { + return { + ok: true, + value: literal, + }; + } else { + return { + ok: false, + errors: [ + { + path: breadcrumbsPrefix, + message: getErrorMessageForIncorrectType(value, `"${literal}"`), + }, + ], + }; + } + }, + ); + + return schemaCreator(); +} diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/object-like/getObjectLikeUtils.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/object-like/getObjectLikeUtils.ts new file mode 100644 index 000000000000..af69acb01dc1 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/object-like/getObjectLikeUtils.ts @@ -0,0 +1,79 @@ +import type { BaseSchema } from "../../Schema.js"; +import { filterObject } from "../../utils/filterObject.js"; +import { getErrorMessageForIncorrectType } from "../../utils/getErrorMessageForIncorrectType.js"; +import { isPlainObject } from "../../utils/isPlainObject.js"; +import { getSchemaUtils } from "../schema-utils/index.js"; +import type { ObjectLikeSchema, ObjectLikeUtils } from "./types.js"; + +export function getObjectLikeUtils(schema: BaseSchema): ObjectLikeUtils { + return { + withParsedProperties: (properties) => withParsedProperties(schema, properties), + }; +} + +/** + * object-like utils are defined in one file to resolve issues with circular imports + */ + +export function withParsedProperties( + objectLike: BaseSchema, + properties: { [K in keyof Properties]: Properties[K] | ((parsed: ParsedObjectShape) => Properties[K]) }, +): ObjectLikeSchema { + const objectSchema: BaseSchema = { + parse: (raw, opts) => { + const parsedObject = objectLike.parse(raw, opts); + if (!parsedObject.ok) { + return parsedObject; + } + + const additionalProperties = Object.entries(properties).reduce>( + (processed, [key, value]) => { + return { + ...processed, + [key]: typeof value === "function" ? value(parsedObject.value) : value, + }; + }, + {}, + ); + + return { + ok: true, + value: { + ...parsedObject.value, + ...(additionalProperties as Properties), + }, + }; + }, + + json: (parsed, opts) => { + if (!isPlainObject(parsed)) { + return { + ok: false, + errors: [ + { + path: opts?.breadcrumbsPrefix ?? [], + message: getErrorMessageForIncorrectType(parsed, "object"), + }, + ], + }; + } + + // strip out added properties + const addedPropertyKeys = new Set(Object.keys(properties)); + const parsedWithoutAddedProperties = filterObject( + parsed, + Object.keys(parsed).filter((key) => !addedPropertyKeys.has(key)), + ); + + return objectLike.json(parsedWithoutAddedProperties as ParsedObjectShape, opts); + }, + + getType: () => objectLike.getType(), + }; + + return { + ...objectSchema, + ...getSchemaUtils(objectSchema), + ...getObjectLikeUtils(objectSchema), + }; +} diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/object-like/index.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/object-like/index.ts new file mode 100644 index 000000000000..2451ef7d0e50 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/object-like/index.ts @@ -0,0 +1,2 @@ +export { getObjectLikeUtils, withParsedProperties } from "./getObjectLikeUtils.js"; +export type { ObjectLikeSchema, ObjectLikeUtils } from "./types.js"; diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/object-like/types.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/object-like/types.ts new file mode 100644 index 000000000000..44b9669108cb --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/object-like/types.ts @@ -0,0 +1,13 @@ +import type { BaseSchema, Schema } from "../../Schema.js"; + +export type ObjectLikeSchema = Schema & + BaseSchema & + ObjectLikeUtils; + +export interface ObjectLikeUtils { + withParsedProperties: >( + properties: { + [K in keyof T]: T[K] | ((parsed: Parsed) => T[K]); + }, + ) => ObjectLikeSchema; +} diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/object/index.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/object/index.ts new file mode 100644 index 000000000000..c6611aaacd3b --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/object/index.ts @@ -0,0 +1,22 @@ +export { getObjectUtils, object } from "./object.js"; +export type { + inferObjectWithoutOptionalPropertiesSchemaFromPropertySchemas, + inferParsedObjectWithoutOptionalPropertiesFromPropertySchemas, +} from "./objectWithoutOptionalProperties.js"; +export { objectWithoutOptionalProperties } from "./objectWithoutOptionalProperties.js"; +export type { Property } from "./property.js"; +export { isProperty, property } from "./property.js"; +export type { + BaseObjectSchema, + inferObjectSchemaFromPropertySchemas, + inferParsedObject, + inferParsedObjectFromPropertySchemas, + inferParsedPropertySchema, + inferRawKey, + inferRawObject, + inferRawObjectFromPropertySchemas, + inferRawPropertySchema, + ObjectSchema, + ObjectUtils, + PropertySchemas, +} from "./types.js"; diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/object/object.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/object/object.ts new file mode 100644 index 000000000000..9ec570a15d77 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/object/object.ts @@ -0,0 +1,400 @@ +import { type MaybeValid, type Schema, SchemaType, type ValidationError } from "../../Schema.js"; +import { entries } from "../../utils/entries.js"; +import { filterObject } from "../../utils/filterObject.js"; +import { getErrorMessageForIncorrectType } from "../../utils/getErrorMessageForIncorrectType.js"; +import { isPlainObject } from "../../utils/isPlainObject.js"; +import { keys } from "../../utils/keys.js"; +import { maybeSkipValidation } from "../../utils/maybeSkipValidation.js"; +import { partition } from "../../utils/partition.js"; +import { getObjectLikeUtils } from "../object-like/index.js"; +import { getSchemaUtils } from "../schema-utils/index.js"; +import { isProperty } from "./property.js"; +import type { + BaseObjectSchema, + inferObjectSchemaFromPropertySchemas, + inferParsedObjectFromPropertySchemas, + inferRawObjectFromPropertySchemas, + ObjectSchema, + ObjectUtils, + PropertySchemas, +} from "./types.js"; + +interface ObjectPropertyWithRawKey { + rawKey: string; + parsedKey: string; + valueSchema: Schema; +} + +export function object>( + schemas: T, +): inferObjectSchemaFromPropertySchemas { + const baseSchema: BaseObjectSchema< + inferRawObjectFromPropertySchemas, + inferParsedObjectFromPropertySchemas + > = { + _getRawProperties: () => + Object.entries(schemas).map(([parsedKey, propertySchema]) => + isProperty(propertySchema) ? propertySchema.rawKey : parsedKey, + ) as unknown as (keyof inferRawObjectFromPropertySchemas)[], + _getParsedProperties: () => keys(schemas) as unknown as (keyof inferParsedObjectFromPropertySchemas)[], + + parse: (raw, opts) => { + const rawKeyToProperty: Record = {}; + const requiredKeys: string[] = []; + + for (const [parsedKey, schemaOrObjectProperty] of entries(schemas)) { + const rawKey = isProperty(schemaOrObjectProperty) ? schemaOrObjectProperty.rawKey : parsedKey; + const valueSchema: Schema = isProperty(schemaOrObjectProperty) + ? schemaOrObjectProperty.valueSchema + : schemaOrObjectProperty; + + const property: ObjectPropertyWithRawKey = { + rawKey, + parsedKey: parsedKey as string, + valueSchema, + }; + + rawKeyToProperty[rawKey] = property; + + if (isSchemaRequired(valueSchema)) { + requiredKeys.push(rawKey); + } + } + + return validateAndTransformObject({ + value: raw, + requiredKeys, + getProperty: (rawKey) => { + const property = rawKeyToProperty[rawKey]; + if (property == null) { + return undefined; + } + return { + transformedKey: property.parsedKey, + transform: (propertyValue) => + property.valueSchema.parse(propertyValue, { + ...opts, + breadcrumbsPrefix: [...(opts?.breadcrumbsPrefix ?? []), rawKey], + }), + }; + }, + unrecognizedObjectKeys: opts?.unrecognizedObjectKeys, + skipValidation: opts?.skipValidation, + breadcrumbsPrefix: opts?.breadcrumbsPrefix, + omitUndefined: opts?.omitUndefined, + }); + }, + + json: (parsed, opts) => { + const requiredKeys: string[] = []; + + for (const [parsedKey, schemaOrObjectProperty] of entries(schemas)) { + const valueSchema: Schema = isProperty(schemaOrObjectProperty) + ? schemaOrObjectProperty.valueSchema + : schemaOrObjectProperty; + + if (isSchemaRequired(valueSchema)) { + requiredKeys.push(parsedKey as string); + } + } + + return validateAndTransformObject({ + value: parsed, + requiredKeys, + getProperty: ( + parsedKey, + ): { transformedKey: string; transform: (propertyValue: object) => MaybeValid } | undefined => { + const property = schemas[parsedKey as keyof T]; + + // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition + if (property == null) { + return undefined; + } + + if (isProperty(property)) { + return { + transformedKey: property.rawKey, + transform: (propertyValue) => + property.valueSchema.json(propertyValue, { + ...opts, + breadcrumbsPrefix: [...(opts?.breadcrumbsPrefix ?? []), parsedKey], + }), + }; + } else { + return { + transformedKey: parsedKey, + transform: (propertyValue) => + property.json(propertyValue, { + ...opts, + breadcrumbsPrefix: [...(opts?.breadcrumbsPrefix ?? []), parsedKey], + }), + }; + } + }, + unrecognizedObjectKeys: opts?.unrecognizedObjectKeys, + skipValidation: opts?.skipValidation, + breadcrumbsPrefix: opts?.breadcrumbsPrefix, + omitUndefined: opts?.omitUndefined, + }); + }, + + getType: () => SchemaType.OBJECT, + }; + + return { + ...maybeSkipValidation(baseSchema), + ...getSchemaUtils(baseSchema), + ...getObjectLikeUtils(baseSchema), + ...getObjectUtils(baseSchema), + }; +} + +function validateAndTransformObject({ + value, + requiredKeys, + getProperty, + unrecognizedObjectKeys = "fail", + skipValidation = false, + breadcrumbsPrefix = [], +}: { + value: unknown; + requiredKeys: string[]; + getProperty: ( + preTransformedKey: string, + ) => { transformedKey: string; transform: (propertyValue: object) => MaybeValid } | undefined; + unrecognizedObjectKeys: "fail" | "passthrough" | "strip" | undefined; + skipValidation: boolean | undefined; + breadcrumbsPrefix: string[] | undefined; + omitUndefined: boolean | undefined; +}): MaybeValid { + if (!isPlainObject(value)) { + return { + ok: false, + errors: [ + { + path: breadcrumbsPrefix, + message: getErrorMessageForIncorrectType(value, "object"), + }, + ], + }; + } + + const missingRequiredKeys = new Set(requiredKeys); + const errors: ValidationError[] = []; + const transformed: Record = {}; + + for (const [preTransformedKey, preTransformedItemValue] of Object.entries(value)) { + const property = getProperty(preTransformedKey); + + if (property != null) { + missingRequiredKeys.delete(preTransformedKey); + + const value = property.transform(preTransformedItemValue as object); + if (value.ok) { + transformed[property.transformedKey] = value.value; + } else { + transformed[preTransformedKey] = preTransformedItemValue; + errors.push(...value.errors); + } + } else { + switch (unrecognizedObjectKeys) { + case "fail": + errors.push({ + path: [...breadcrumbsPrefix, preTransformedKey], + message: `Unexpected key "${preTransformedKey}"`, + }); + break; + case "strip": + break; + case "passthrough": + transformed[preTransformedKey] = preTransformedItemValue; + break; + } + } + } + + errors.push( + ...requiredKeys + .filter((key) => missingRequiredKeys.has(key)) + .map((key) => ({ + path: breadcrumbsPrefix, + message: `Missing required key "${key}"`, + })), + ); + + if (errors.length === 0 || skipValidation) { + return { + ok: true, + value: transformed as Transformed, + }; + } else { + return { + ok: false, + errors, + }; + } +} + +export function getObjectUtils(schema: BaseObjectSchema): ObjectUtils { + return { + extend: (extension: ObjectSchema) => { + const baseSchema: BaseObjectSchema = { + _getParsedProperties: () => [...schema._getParsedProperties(), ...extension._getParsedProperties()], + _getRawProperties: () => [...schema._getRawProperties(), ...extension._getRawProperties()], + parse: (raw, opts) => { + return validateAndTransformExtendedObject({ + extensionKeys: extension._getRawProperties(), + value: raw, + transformBase: (rawBase) => schema.parse(rawBase, opts), + transformExtension: (rawExtension) => extension.parse(rawExtension, opts), + breadcrumbsPrefix: opts?.breadcrumbsPrefix, + }); + }, + json: (parsed, opts) => { + return validateAndTransformExtendedObject({ + extensionKeys: extension._getParsedProperties(), + value: parsed, + transformBase: (parsedBase) => schema.json(parsedBase, opts), + transformExtension: (parsedExtension) => extension.json(parsedExtension, opts), + breadcrumbsPrefix: opts?.breadcrumbsPrefix, + }); + }, + getType: () => SchemaType.OBJECT, + }; + + return { + ...baseSchema, + ...getSchemaUtils(baseSchema), + ...getObjectLikeUtils(baseSchema), + ...getObjectUtils(baseSchema), + }; + }, + passthrough: () => { + const knownRawKeys = new Set(schema._getRawProperties() as string[]); + const knownParsedKeys = new Set(schema._getParsedProperties() as string[]); + const baseSchema: BaseObjectSchema = + { + _getParsedProperties: () => schema._getParsedProperties(), + _getRawProperties: () => schema._getRawProperties(), + parse: (raw, opts) => { + const transformed = schema.parse(raw, { ...opts, unrecognizedObjectKeys: "passthrough" }); + if (!transformed.ok) { + return transformed; + } + const extraProperties: Record = {}; + if (typeof raw === "object" && raw != null) { + for (const [key, value] of Object.entries(raw)) { + if (!knownRawKeys.has(key)) { + extraProperties[key] = value; + } + } + } + return { + ok: true, + value: { + ...extraProperties, + ...transformed.value, + }, + }; + }, + json: (parsed, opts) => { + const transformed = schema.json(parsed, { ...opts, unrecognizedObjectKeys: "passthrough" }); + if (!transformed.ok) { + return transformed; + } + const extraProperties: Record = {}; + if (typeof parsed === "object" && parsed != null) { + for (const [key, value] of Object.entries(parsed)) { + if (!knownParsedKeys.has(key)) { + extraProperties[key] = value; + } + } + } + return { + ok: true, + value: { + ...extraProperties, + ...transformed.value, + }, + }; + }, + getType: () => SchemaType.OBJECT, + }; + + return { + ...baseSchema, + ...getSchemaUtils(baseSchema), + ...getObjectLikeUtils(baseSchema), + ...getObjectUtils(baseSchema), + }; + }, + }; +} + +function validateAndTransformExtendedObject({ + extensionKeys, + value, + transformBase, + transformExtension, + breadcrumbsPrefix = [], +}: { + extensionKeys: (keyof PreTransformedExtension)[]; + value: unknown; + transformBase: (value: object) => MaybeValid; + transformExtension: (value: object) => MaybeValid; + breadcrumbsPrefix?: string[]; +}): MaybeValid { + if (!isPlainObject(value)) { + return { + ok: false, + errors: [ + { + path: breadcrumbsPrefix, + message: getErrorMessageForIncorrectType(value, "object"), + }, + ], + }; + } + + const extensionPropertiesSet = new Set(extensionKeys); + const [extensionProperties, baseProperties] = partition(keys(value), (key) => + extensionPropertiesSet.has(key as keyof PreTransformedExtension), + ); + + const transformedBase = transformBase(filterObject(value, baseProperties)); + const transformedExtension = transformExtension(filterObject(value, extensionProperties)); + + if (transformedBase.ok && transformedExtension.ok) { + return { + ok: true, + value: { + ...transformedBase.value, + ...transformedExtension.value, + }, + }; + } else { + return { + ok: false, + errors: [ + ...(transformedBase.ok ? [] : transformedBase.errors), + ...(transformedExtension.ok ? [] : transformedExtension.errors), + ], + }; + } +} + +function isSchemaRequired(schema: Schema): boolean { + return !isSchemaOptional(schema); +} + +function isSchemaOptional(schema: Schema): boolean { + switch (schema.getType()) { + case SchemaType.ANY: + case SchemaType.UNKNOWN: + case SchemaType.OPTIONAL: + case SchemaType.OPTIONAL_NULLABLE: + return true; + default: + return false; + } +} diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/object/objectWithoutOptionalProperties.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/object/objectWithoutOptionalProperties.ts new file mode 100644 index 000000000000..4d39c862f0c4 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/object/objectWithoutOptionalProperties.ts @@ -0,0 +1,23 @@ +import { object } from "./object.js"; +import type { + inferParsedPropertySchema, + inferRawObjectFromPropertySchemas, + ObjectSchema, + PropertySchemas, +} from "./types.js"; + +export function objectWithoutOptionalProperties>( + schemas: T, +): inferObjectWithoutOptionalPropertiesSchemaFromPropertySchemas { + return object(schemas) as unknown as inferObjectWithoutOptionalPropertiesSchemaFromPropertySchemas; +} + +export type inferObjectWithoutOptionalPropertiesSchemaFromPropertySchemas> = + ObjectSchema< + inferRawObjectFromPropertySchemas, + inferParsedObjectWithoutOptionalPropertiesFromPropertySchemas + >; + +export type inferParsedObjectWithoutOptionalPropertiesFromPropertySchemas> = { + [K in keyof T]: inferParsedPropertySchema; +}; diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/object/property.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/object/property.ts new file mode 100644 index 000000000000..d1f9f386aa64 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/object/property.ts @@ -0,0 +1,23 @@ +import type { Schema } from "../../Schema.js"; + +export function property( + rawKey: RawKey, + valueSchema: Schema, +): Property { + return { + rawKey, + valueSchema, + isProperty: true, + }; +} + +export interface Property { + rawKey: RawKey; + valueSchema: Schema; + isProperty: true; +} + +export function isProperty>(maybeProperty: unknown): maybeProperty is O { + // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition + return (maybeProperty as O).isProperty; +} diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/object/types.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/object/types.ts new file mode 100644 index 000000000000..384ae873f5af --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/object/types.ts @@ -0,0 +1,58 @@ +import type { BaseSchema, inferParsed, inferRaw, Schema } from "../../Schema.js"; +import type { addQuestionMarksToNullableProperties } from "../../utils/addQuestionMarksToNullableProperties.js"; +import type { ObjectLikeUtils } from "../object-like/index.js"; +import type { SchemaUtils } from "../schema-utils/index.js"; +import type { Property } from "./property.js"; + +export type ObjectSchema = BaseObjectSchema & + ObjectLikeUtils & + ObjectUtils & + SchemaUtils; + +export interface BaseObjectSchema extends BaseSchema { + _getRawProperties: () => (keyof Raw)[]; + _getParsedProperties: () => (keyof Parsed)[]; +} + +export interface ObjectUtils { + extend: ( + schemas: ObjectSchema, + ) => ObjectSchema; + passthrough: () => ObjectSchema; +} + +export type inferRawObject> = O extends ObjectSchema ? Raw : never; + +export type inferParsedObject> = + O extends ObjectSchema ? Parsed : never; + +export type inferObjectSchemaFromPropertySchemas> = ObjectSchema< + inferRawObjectFromPropertySchemas, + inferParsedObjectFromPropertySchemas +>; + +export type inferRawObjectFromPropertySchemas> = + addQuestionMarksToNullableProperties<{ + [ParsedKey in keyof T as inferRawKey]: inferRawPropertySchema; + }>; + +export type inferParsedObjectFromPropertySchemas> = + addQuestionMarksToNullableProperties<{ + [K in keyof T]: inferParsedPropertySchema; + }>; + +export type PropertySchemas = Record< + ParsedKeys, + Property | Schema +>; + +export type inferRawPropertySchema

| Schema> = + P extends Property ? Raw : P extends Schema ? inferRaw

: never; + +export type inferParsedPropertySchema

| Schema> = + P extends Property ? Parsed : P extends Schema ? inferParsed

: never; + +export type inferRawKey< + ParsedKey extends string | number | symbol, + P extends Property | Schema, +> = P extends Property ? Raw : ParsedKey; diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/primitives/any.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/primitives/any.ts new file mode 100644 index 000000000000..bc4d47fab56e --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/primitives/any.ts @@ -0,0 +1,7 @@ +import { type Schema, SchemaType } from "../../Schema.js"; +import { createIdentitySchemaCreator } from "../../utils/createIdentitySchemaCreator.js"; + +export const any: () => Schema = createIdentitySchemaCreator(SchemaType.ANY, (value) => ({ + ok: true, + value, +})); diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/primitives/boolean.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/primitives/boolean.ts new file mode 100644 index 000000000000..78c3c36284c7 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/primitives/boolean.ts @@ -0,0 +1,25 @@ +import { type Schema, SchemaType } from "../../Schema.js"; +import { createIdentitySchemaCreator } from "../../utils/createIdentitySchemaCreator.js"; +import { getErrorMessageForIncorrectType } from "../../utils/getErrorMessageForIncorrectType.js"; + +export const boolean: () => Schema = createIdentitySchemaCreator( + SchemaType.BOOLEAN, + (value, { breadcrumbsPrefix = [] } = {}) => { + if (typeof value === "boolean") { + return { + ok: true, + value, + }; + } else { + return { + ok: false, + errors: [ + { + path: breadcrumbsPrefix, + message: getErrorMessageForIncorrectType(value, "boolean"), + }, + ], + }; + } + }, +); diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/primitives/index.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/primitives/index.ts new file mode 100644 index 000000000000..7a3ee0154829 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/primitives/index.ts @@ -0,0 +1,6 @@ +export { any } from "./any.js"; +export { boolean } from "./boolean.js"; +export { never } from "./never.js"; +export { number } from "./number.js"; +export { string } from "./string.js"; +export { unknown } from "./unknown.js"; diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/primitives/never.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/primitives/never.ts new file mode 100644 index 000000000000..91f85d74c01b --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/primitives/never.ts @@ -0,0 +1,15 @@ +import { type Schema, SchemaType } from "../../Schema.js"; +import { createIdentitySchemaCreator } from "../../utils/createIdentitySchemaCreator.js"; + +export const never: () => Schema = createIdentitySchemaCreator( + SchemaType.NEVER, + (_value, { breadcrumbsPrefix = [] } = {}) => ({ + ok: false, + errors: [ + { + path: breadcrumbsPrefix, + message: "Expected never", + }, + ], + }), +); diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/primitives/number.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/primitives/number.ts new file mode 100644 index 000000000000..6f16cd462a1b --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/primitives/number.ts @@ -0,0 +1,25 @@ +import { type Schema, SchemaType } from "../../Schema.js"; +import { createIdentitySchemaCreator } from "../../utils/createIdentitySchemaCreator.js"; +import { getErrorMessageForIncorrectType } from "../../utils/getErrorMessageForIncorrectType.js"; + +export const number: () => Schema = createIdentitySchemaCreator( + SchemaType.NUMBER, + (value, { breadcrumbsPrefix = [] } = {}) => { + if (typeof value === "number") { + return { + ok: true, + value, + }; + } else { + return { + ok: false, + errors: [ + { + path: breadcrumbsPrefix, + message: getErrorMessageForIncorrectType(value, "number"), + }, + ], + }; + } + }, +); diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/primitives/string.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/primitives/string.ts new file mode 100644 index 000000000000..b29d72ae7ef1 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/primitives/string.ts @@ -0,0 +1,25 @@ +import { type Schema, SchemaType } from "../../Schema.js"; +import { createIdentitySchemaCreator } from "../../utils/createIdentitySchemaCreator.js"; +import { getErrorMessageForIncorrectType } from "../../utils/getErrorMessageForIncorrectType.js"; + +export const string: () => Schema = createIdentitySchemaCreator( + SchemaType.STRING, + (value, { breadcrumbsPrefix = [] } = {}) => { + if (typeof value === "string") { + return { + ok: true, + value, + }; + } else { + return { + ok: false, + errors: [ + { + path: breadcrumbsPrefix, + message: getErrorMessageForIncorrectType(value, "string"), + }, + ], + }; + } + }, +); diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/primitives/unknown.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/primitives/unknown.ts new file mode 100644 index 000000000000..04514160366f --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/primitives/unknown.ts @@ -0,0 +1,7 @@ +import { type Schema, SchemaType } from "../../Schema.js"; +import { createIdentitySchemaCreator } from "../../utils/createIdentitySchemaCreator.js"; + +export const unknown: () => Schema = createIdentitySchemaCreator( + SchemaType.UNKNOWN, + (value) => ({ ok: true, value }), +); diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/record/index.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/record/index.ts new file mode 100644 index 000000000000..b17997f7bf84 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/record/index.ts @@ -0,0 +1,2 @@ +export { record } from "./record.js"; +export type { BaseRecordSchema, RecordSchema } from "./types.js"; diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/record/record.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/record/record.ts new file mode 100644 index 000000000000..a489660399b7 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/record/record.ts @@ -0,0 +1,129 @@ +import { type MaybeValid, type Schema, SchemaType, type ValidationError } from "../../Schema.js"; +import { entries } from "../../utils/entries.js"; +import { getErrorMessageForIncorrectType } from "../../utils/getErrorMessageForIncorrectType.js"; +import { isPlainObject } from "../../utils/isPlainObject.js"; +import { maybeSkipValidation } from "../../utils/maybeSkipValidation.js"; +import { getSchemaUtils } from "../schema-utils/index.js"; +import type { BaseRecordSchema, RecordSchema } from "./types.js"; + +export function record( + keySchema: Schema, + valueSchema: Schema, +): RecordSchema { + const baseSchema: BaseRecordSchema = { + parse: (raw, opts) => { + return validateAndTransformRecord({ + value: raw, + isKeyNumeric: keySchema.getType() === SchemaType.NUMBER, + transformKey: (key) => + keySchema.parse(key, { + ...opts, + breadcrumbsPrefix: [...(opts?.breadcrumbsPrefix ?? []), `${key} (key)`], + }), + transformValue: (value, key) => + valueSchema.parse(value, { + ...opts, + breadcrumbsPrefix: [...(opts?.breadcrumbsPrefix ?? []), `${key}`], + }), + breadcrumbsPrefix: opts?.breadcrumbsPrefix, + }); + }, + json: (parsed, opts) => { + return validateAndTransformRecord({ + value: parsed, + isKeyNumeric: keySchema.getType() === SchemaType.NUMBER, + transformKey: (key) => + keySchema.json(key, { + ...opts, + breadcrumbsPrefix: [...(opts?.breadcrumbsPrefix ?? []), `${key} (key)`], + }), + transformValue: (value, key) => + valueSchema.json(value, { + ...opts, + breadcrumbsPrefix: [...(opts?.breadcrumbsPrefix ?? []), `${key}`], + }), + breadcrumbsPrefix: opts?.breadcrumbsPrefix, + }); + }, + getType: () => SchemaType.RECORD, + }; + + return { + ...maybeSkipValidation(baseSchema), + ...getSchemaUtils(baseSchema), + }; +} + +function validateAndTransformRecord({ + value, + isKeyNumeric, + transformKey, + transformValue, + breadcrumbsPrefix = [], +}: { + value: unknown; + isKeyNumeric: boolean; + transformKey: (key: string | number) => MaybeValid; + transformValue: (value: unknown, key: string | number) => MaybeValid; + breadcrumbsPrefix: string[] | undefined; +}): MaybeValid> { + if (!isPlainObject(value)) { + return { + ok: false, + errors: [ + { + path: breadcrumbsPrefix, + message: getErrorMessageForIncorrectType(value, "object"), + }, + ], + }; + } + + return entries(value).reduce>>( + (accPromise, [stringKey, value]) => { + if (value === undefined) { + return accPromise; + } + + const acc = accPromise; + + let key: string | number = stringKey; + if (isKeyNumeric) { + const numberKey = stringKey.length > 0 ? Number(stringKey) : NaN; + if (!Number.isNaN(numberKey)) { + key = numberKey; + } + } + const transformedKey = transformKey(key); + + const transformedValue = transformValue(value, key); + + if (acc.ok && transformedKey.ok && transformedValue.ok) { + return { + ok: true, + value: { + ...acc.value, + [transformedKey.value]: transformedValue.value, + }, + }; + } + + const errors: ValidationError[] = []; + if (!acc.ok) { + errors.push(...acc.errors); + } + if (!transformedKey.ok) { + errors.push(...transformedKey.errors); + } + if (!transformedValue.ok) { + errors.push(...transformedValue.errors); + } + + return { + ok: false, + errors, + }; + }, + { ok: true, value: {} as Record }, + ); +} diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/record/types.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/record/types.ts new file mode 100644 index 000000000000..5950b4cbde1e --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/record/types.ts @@ -0,0 +1,17 @@ +import type { BaseSchema } from "../../Schema.js"; +import type { SchemaUtils } from "../schema-utils/index.js"; + +export type RecordSchema< + RawKey extends string | number, + RawValue, + ParsedKey extends string | number, + ParsedValue, +> = BaseRecordSchema & + SchemaUtils, Record>; + +export type BaseRecordSchema< + RawKey extends string | number, + RawValue, + ParsedKey extends string | number, + ParsedValue, +> = BaseSchema, Record>; diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/schema-utils/JsonError.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/schema-utils/JsonError.ts new file mode 100644 index 000000000000..daee3dc79184 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/schema-utils/JsonError.ts @@ -0,0 +1,9 @@ +import type { ValidationError } from "../../Schema.js"; +import { stringifyValidationError } from "./stringifyValidationErrors.js"; + +export class JsonError extends Error { + constructor(public readonly errors: ValidationError[]) { + super(errors.map(stringifyValidationError).join("; ")); + Object.setPrototypeOf(this, JsonError.prototype); + } +} diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/schema-utils/ParseError.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/schema-utils/ParseError.ts new file mode 100644 index 000000000000..9facf06159b2 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/schema-utils/ParseError.ts @@ -0,0 +1,9 @@ +import type { ValidationError } from "../../Schema.js"; +import { stringifyValidationError } from "./stringifyValidationErrors.js"; + +export class ParseError extends Error { + constructor(public readonly errors: ValidationError[]) { + super(errors.map(stringifyValidationError).join("; ")); + Object.setPrototypeOf(this, ParseError.prototype); + } +} diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/schema-utils/getSchemaUtils.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/schema-utils/getSchemaUtils.ts new file mode 100644 index 000000000000..3ceaf4e011f0 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/schema-utils/getSchemaUtils.ts @@ -0,0 +1,181 @@ +import { type BaseSchema, type Schema, type SchemaOptions, SchemaType } from "../../Schema.js"; +import { JsonError } from "./JsonError.js"; +import { ParseError } from "./ParseError.js"; + +export interface SchemaUtils { + nullable: () => Schema; + optional: () => Schema; + optionalNullable: () => Schema; + transform: (transformer: SchemaTransformer) => Schema; + parseOrThrow: (raw: unknown, opts?: SchemaOptions) => Parsed; + jsonOrThrow: (raw: unknown, opts?: SchemaOptions) => Raw; +} + +export interface SchemaTransformer { + transform: (parsed: Parsed) => Transformed; + untransform: (transformed: any) => Parsed; +} + +export function getSchemaUtils(schema: BaseSchema): SchemaUtils { + return { + nullable: () => nullable(schema), + optional: () => optional(schema), + optionalNullable: () => optionalNullable(schema), + transform: (transformer) => transform(schema, transformer), + parseOrThrow: (raw, opts) => { + const parsed = schema.parse(raw, opts); + if (parsed.ok) { + return parsed.value; + } + throw new ParseError(parsed.errors); + }, + jsonOrThrow: (parsed, opts) => { + const raw = schema.json(parsed, opts); + if (raw.ok) { + return raw.value; + } + throw new JsonError(raw.errors); + }, + }; +} + +/** + * schema utils are defined in one file to resolve issues with circular imports + */ + +export function nullable(schema: BaseSchema): Schema { + const baseSchema: BaseSchema = { + parse: (raw, opts) => { + if (raw == null) { + return { + ok: true, + value: null, + }; + } + return schema.parse(raw, opts); + }, + json: (parsed, opts) => { + if (parsed == null) { + return { + ok: true, + value: null, + }; + } + return schema.json(parsed, opts); + }, + getType: () => SchemaType.NULLABLE, + }; + + return { + ...baseSchema, + ...getSchemaUtils(baseSchema), + }; +} + +export function optional( + schema: BaseSchema, +): Schema { + const baseSchema: BaseSchema = { + parse: (raw, opts) => { + if (raw == null) { + return { + ok: true, + value: undefined, + }; + } + return schema.parse(raw, opts); + }, + json: (parsed, opts) => { + if (opts?.omitUndefined && parsed === undefined) { + return { + ok: true, + value: undefined, + }; + } + if (parsed == null) { + return { + ok: true, + value: null, + }; + } + return schema.json(parsed, opts); + }, + getType: () => SchemaType.OPTIONAL, + }; + + return { + ...baseSchema, + ...getSchemaUtils(baseSchema), + }; +} + +export function optionalNullable( + schema: BaseSchema, +): Schema { + const baseSchema: BaseSchema = { + parse: (raw, opts) => { + if (raw === undefined) { + return { + ok: true, + value: undefined, + }; + } + if (raw === null) { + return { + ok: true, + value: null, + }; + } + return schema.parse(raw, opts); + }, + json: (parsed, opts) => { + if (parsed === undefined) { + return { + ok: true, + value: undefined, + }; + } + if (parsed === null) { + return { + ok: true, + value: null, + }; + } + return schema.json(parsed, opts); + }, + getType: () => SchemaType.OPTIONAL_NULLABLE, + }; + + return { + ...baseSchema, + ...getSchemaUtils(baseSchema), + }; +} + +export function transform( + schema: BaseSchema, + transformer: SchemaTransformer, +): Schema { + const baseSchema: BaseSchema = { + parse: (raw, opts) => { + const parsed = schema.parse(raw, opts); + if (!parsed.ok) { + return parsed; + } + return { + ok: true, + value: transformer.transform(parsed.value), + }; + }, + json: (transformed, opts) => { + const parsed = transformer.untransform(transformed); + return schema.json(parsed, opts); + }, + getType: () => schema.getType(), + }; + + return { + ...baseSchema, + ...getSchemaUtils(baseSchema), + }; +} diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/schema-utils/index.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/schema-utils/index.ts new file mode 100644 index 000000000000..efb3b0c46288 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/schema-utils/index.ts @@ -0,0 +1,4 @@ +export type { SchemaUtils } from "./getSchemaUtils.js"; +export { getSchemaUtils, optional, transform } from "./getSchemaUtils.js"; +export { JsonError } from "./JsonError.js"; +export { ParseError } from "./ParseError.js"; diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/schema-utils/stringifyValidationErrors.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/schema-utils/stringifyValidationErrors.ts new file mode 100644 index 000000000000..d36a4900c6e6 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/schema-utils/stringifyValidationErrors.ts @@ -0,0 +1,8 @@ +import type { ValidationError } from "../../Schema.js"; + +export function stringifyValidationError(error: ValidationError): string { + if (error.path.length === 0) { + return error.message; + } + return `${error.path.join(" -> ")}: ${error.message}`; +} diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/set/index.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/set/index.ts new file mode 100644 index 000000000000..c72be55e9406 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/set/index.ts @@ -0,0 +1 @@ +export { set } from "./set.js"; diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/set/set.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/set/set.ts new file mode 100644 index 000000000000..2013cdb4760f --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/set/set.ts @@ -0,0 +1,43 @@ +import { type BaseSchema, type Schema, SchemaType } from "../../Schema.js"; +import { getErrorMessageForIncorrectType } from "../../utils/getErrorMessageForIncorrectType.js"; +import { maybeSkipValidation } from "../../utils/maybeSkipValidation.js"; +import { list } from "../list/index.js"; +import { getSchemaUtils } from "../schema-utils/index.js"; + +export function set(schema: Schema): Schema> { + const listSchema = list(schema); + const baseSchema: BaseSchema> = { + parse: (raw, opts) => { + const parsedList = listSchema.parse(raw, opts); + if (parsedList.ok) { + return { + ok: true, + value: new Set(parsedList.value), + }; + } else { + return parsedList; + } + }, + json: (parsed, opts) => { + if (!(parsed instanceof Set)) { + return { + ok: false, + errors: [ + { + path: opts?.breadcrumbsPrefix ?? [], + message: getErrorMessageForIncorrectType(parsed, "Set"), + }, + ], + }; + } + const jsonList = listSchema.json([...parsed], opts); + return jsonList; + }, + getType: () => SchemaType.SET, + }; + + return { + ...maybeSkipValidation(baseSchema), + ...getSchemaUtils(baseSchema), + }; +} diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/undiscriminated-union/index.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/undiscriminated-union/index.ts new file mode 100644 index 000000000000..c8318222b4f7 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/undiscriminated-union/index.ts @@ -0,0 +1,6 @@ +export type { + inferParsedUnidiscriminatedUnionSchema, + inferRawUnidiscriminatedUnionSchema, + UndiscriminatedUnionSchema, +} from "./types.js"; +export { undiscriminatedUnion } from "./undiscriminatedUnion.js"; diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/undiscriminated-union/types.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/undiscriminated-union/types.ts new file mode 100644 index 000000000000..0d5096fab913 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/undiscriminated-union/types.ts @@ -0,0 +1,10 @@ +import type { inferParsed, inferRaw, Schema } from "../../Schema.js"; + +export type UndiscriminatedUnionSchema = Schema< + inferRawUnidiscriminatedUnionSchema, + inferParsedUnidiscriminatedUnionSchema +>; + +export type inferRawUnidiscriminatedUnionSchema = inferRaw; + +export type inferParsedUnidiscriminatedUnionSchema = inferParsed; diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/undiscriminated-union/undiscriminatedUnion.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/undiscriminated-union/undiscriminatedUnion.ts new file mode 100644 index 000000000000..07591b4d3e63 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/undiscriminated-union/undiscriminatedUnion.ts @@ -0,0 +1,67 @@ +import { + type BaseSchema, + type MaybeValid, + type Schema, + type SchemaOptions, + SchemaType, + type ValidationError, +} from "../../Schema.js"; +import { maybeSkipValidation } from "../../utils/maybeSkipValidation.js"; +import { getSchemaUtils } from "../schema-utils/index.js"; +import type { inferParsedUnidiscriminatedUnionSchema, inferRawUnidiscriminatedUnionSchema } from "./types.js"; + +export function undiscriminatedUnion, ...Schema[]]>( + schemas: Schemas, +): Schema, inferParsedUnidiscriminatedUnionSchema> { + const baseSchema: BaseSchema< + inferRawUnidiscriminatedUnionSchema, + inferParsedUnidiscriminatedUnionSchema + > = { + parse: (raw, opts) => { + return validateAndTransformUndiscriminatedUnion>( + (schema, opts) => schema.parse(raw, opts), + schemas, + opts, + ); + }, + json: (parsed, opts) => { + return validateAndTransformUndiscriminatedUnion>( + (schema, opts) => schema.json(parsed, opts), + schemas, + opts, + ); + }, + getType: () => SchemaType.UNDISCRIMINATED_UNION, + }; + + return { + ...maybeSkipValidation(baseSchema), + ...getSchemaUtils(baseSchema), + }; +} + +function validateAndTransformUndiscriminatedUnion( + transform: (schema: Schema, opts: SchemaOptions) => MaybeValid, + schemas: Schema[], + opts: SchemaOptions | undefined, +): MaybeValid { + const errors: ValidationError[] = []; + for (const [index, schema] of schemas.entries()) { + const transformed = transform(schema, { ...opts, skipValidation: false }); + if (transformed.ok) { + return transformed; + } else { + for (const error of transformed.errors) { + errors.push({ + path: error.path, + message: `[Variant ${index}] ${error.message}`, + }); + } + } + } + + return { + ok: false, + errors, + }; +} diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/union/discriminant.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/union/discriminant.ts new file mode 100644 index 000000000000..73cd62adeba5 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/union/discriminant.ts @@ -0,0 +1,14 @@ +export function discriminant( + parsedDiscriminant: ParsedDiscriminant, + rawDiscriminant: RawDiscriminant, +): Discriminant { + return { + parsedDiscriminant, + rawDiscriminant, + }; +} + +export interface Discriminant { + parsedDiscriminant: ParsedDiscriminant; + rawDiscriminant: RawDiscriminant; +} diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/union/index.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/union/index.ts new file mode 100644 index 000000000000..6bc29ba9ed38 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/union/index.ts @@ -0,0 +1,10 @@ +export type { Discriminant } from "./discriminant.js"; +export { discriminant } from "./discriminant.js"; +export type { + inferParsedDiscriminant, + inferParsedUnion, + inferRawDiscriminant, + inferRawUnion, + UnionSubtypes, +} from "./types.js"; +export { union } from "./union.js"; diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/union/types.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/union/types.ts new file mode 100644 index 000000000000..7bfdd636d8d0 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/union/types.ts @@ -0,0 +1,26 @@ +import type { inferParsedObject, inferRawObject, ObjectSchema } from "../object/index.js"; +import type { Discriminant } from "./discriminant.js"; + +export type UnionSubtypes = { + [K in DiscriminantValues]: ObjectSchema; +}; + +export type inferRawUnion, U extends UnionSubtypes> = { + [K in keyof U]: Record, K> & inferRawObject; +}[keyof U]; + +export type inferParsedUnion, U extends UnionSubtypes> = { + [K in keyof U]: Record, K> & inferParsedObject; +}[keyof U]; + +export type inferRawDiscriminant> = D extends string + ? D + : D extends Discriminant + ? Raw + : never; + +export type inferParsedDiscriminant> = D extends string + ? D + : D extends Discriminant + ? Parsed + : never; diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/union/union.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/union/union.ts new file mode 100644 index 000000000000..509658e0eb3d --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/builders/union/union.ts @@ -0,0 +1,176 @@ +import { type BaseSchema, type MaybeValid, SchemaType } from "../../Schema.js"; +import { getErrorMessageForIncorrectType } from "../../utils/getErrorMessageForIncorrectType.js"; +import { isPlainObject } from "../../utils/isPlainObject.js"; +import { keys } from "../../utils/keys.js"; +import { maybeSkipValidation } from "../../utils/maybeSkipValidation.js"; +import { enum_ } from "../enum/index.js"; +import type { ObjectSchema } from "../object/index.js"; +import { getObjectLikeUtils, type ObjectLikeSchema } from "../object-like/index.js"; +import { getSchemaUtils } from "../schema-utils/index.js"; +import type { Discriminant } from "./discriminant.js"; +import type { + inferParsedDiscriminant, + inferParsedUnion, + inferRawDiscriminant, + inferRawUnion, + UnionSubtypes, +} from "./types.js"; + +export function union, U extends UnionSubtypes>( + discriminant: D, + union: U, +): ObjectLikeSchema, inferParsedUnion> { + const rawDiscriminant = + typeof discriminant === "string" ? discriminant : (discriminant.rawDiscriminant as inferRawDiscriminant); + const parsedDiscriminant = + typeof discriminant === "string" + ? discriminant + : (discriminant.parsedDiscriminant as inferParsedDiscriminant); + + const discriminantValueSchema = enum_(keys(union) as string[]); + + const baseSchema: BaseSchema, inferParsedUnion> = { + parse: (raw, opts) => { + return transformAndValidateUnion({ + value: raw, + discriminant: rawDiscriminant, + transformedDiscriminant: parsedDiscriminant, + transformDiscriminantValue: (discriminantValue) => + discriminantValueSchema.parse(discriminantValue, { + allowUnrecognizedEnumValues: opts?.allowUnrecognizedUnionMembers, + breadcrumbsPrefix: [...(opts?.breadcrumbsPrefix ?? []), rawDiscriminant], + }), + getAdditionalPropertiesSchema: (discriminantValue) => union[discriminantValue], + allowUnrecognizedUnionMembers: opts?.allowUnrecognizedUnionMembers, + transformAdditionalProperties: (additionalProperties, additionalPropertiesSchema) => + additionalPropertiesSchema.parse(additionalProperties, opts), + breadcrumbsPrefix: opts?.breadcrumbsPrefix, + }); + }, + json: (parsed, opts) => { + return transformAndValidateUnion({ + value: parsed, + discriminant: parsedDiscriminant, + transformedDiscriminant: rawDiscriminant, + transformDiscriminantValue: (discriminantValue) => + discriminantValueSchema.json(discriminantValue, { + allowUnrecognizedEnumValues: opts?.allowUnrecognizedUnionMembers, + breadcrumbsPrefix: [...(opts?.breadcrumbsPrefix ?? []), parsedDiscriminant], + }), + getAdditionalPropertiesSchema: (discriminantValue) => union[discriminantValue], + allowUnrecognizedUnionMembers: opts?.allowUnrecognizedUnionMembers, + transformAdditionalProperties: (additionalProperties, additionalPropertiesSchema) => + additionalPropertiesSchema.json(additionalProperties, opts), + breadcrumbsPrefix: opts?.breadcrumbsPrefix, + }); + }, + getType: () => SchemaType.UNION, + }; + + return { + ...maybeSkipValidation(baseSchema), + ...getSchemaUtils(baseSchema), + ...getObjectLikeUtils(baseSchema), + }; +} + +function transformAndValidateUnion< + TransformedDiscriminant extends string, + TransformedDiscriminantValue extends string, + TransformedAdditionalProperties, +>({ + value, + discriminant, + transformedDiscriminant, + transformDiscriminantValue, + getAdditionalPropertiesSchema, + allowUnrecognizedUnionMembers = false, + transformAdditionalProperties, + breadcrumbsPrefix = [], +}: { + value: unknown; + discriminant: string; + transformedDiscriminant: TransformedDiscriminant; + transformDiscriminantValue: (discriminantValue: unknown) => MaybeValid; + getAdditionalPropertiesSchema: (discriminantValue: string) => ObjectSchema | undefined; + allowUnrecognizedUnionMembers: boolean | undefined; + transformAdditionalProperties: ( + additionalProperties: unknown, + additionalPropertiesSchema: ObjectSchema, + ) => MaybeValid; + breadcrumbsPrefix: string[] | undefined; +}): MaybeValid & TransformedAdditionalProperties> { + if (!isPlainObject(value)) { + return { + ok: false, + errors: [ + { + path: breadcrumbsPrefix, + message: getErrorMessageForIncorrectType(value, "object"), + }, + ], + }; + } + + const { [discriminant]: discriminantValue, ...additionalProperties } = value; + + if (discriminantValue == null) { + return { + ok: false, + errors: [ + { + path: breadcrumbsPrefix, + message: `Missing discriminant ("${discriminant}")`, + }, + ], + }; + } + + const transformedDiscriminantValue = transformDiscriminantValue(discriminantValue); + if (!transformedDiscriminantValue.ok) { + return { + ok: false, + errors: transformedDiscriminantValue.errors, + }; + } + + const additionalPropertiesSchema = getAdditionalPropertiesSchema(transformedDiscriminantValue.value); + + if (additionalPropertiesSchema == null) { + if (allowUnrecognizedUnionMembers) { + return { + ok: true, + value: { + [transformedDiscriminant]: transformedDiscriminantValue.value, + ...additionalProperties, + } as Record & TransformedAdditionalProperties, + }; + } else { + return { + ok: false, + errors: [ + { + path: [...breadcrumbsPrefix, discriminant], + message: "Unexpected discriminant value", + }, + ], + }; + } + } + + const transformedAdditionalProperties = transformAdditionalProperties( + additionalProperties, + additionalPropertiesSchema, + ); + if (!transformedAdditionalProperties.ok) { + return transformedAdditionalProperties; + } + + return { + ok: true, + value: { + [transformedDiscriminant]: discriminantValue, + ...transformedAdditionalProperties.value, + } as Record & TransformedAdditionalProperties, + }; +} diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/index.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/index.ts new file mode 100644 index 000000000000..befac2e3bebc --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/index.ts @@ -0,0 +1,2 @@ +export * from "./builders/index.js"; +export type { inferParsed, inferRaw, Schema, SchemaOptions } from "./Schema.js"; diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/utils/MaybePromise.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/utils/MaybePromise.ts new file mode 100644 index 000000000000..9cd354b3418e --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/utils/MaybePromise.ts @@ -0,0 +1 @@ +export type MaybePromise = T | Promise; diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/utils/addQuestionMarksToNullableProperties.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/utils/addQuestionMarksToNullableProperties.ts new file mode 100644 index 000000000000..59f9e658867b --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/utils/addQuestionMarksToNullableProperties.ts @@ -0,0 +1,9 @@ +export type addQuestionMarksToNullableProperties = { + [K in OptionalKeys]?: T[K]; +} & Pick>; + +export type OptionalKeys = { + [K in keyof T]-?: undefined extends T[K] ? K : never; +}[keyof T]; + +export type RequiredKeys = Exclude>; diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/utils/createIdentitySchemaCreator.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/utils/createIdentitySchemaCreator.ts new file mode 100644 index 000000000000..9aa4ed5029a0 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/utils/createIdentitySchemaCreator.ts @@ -0,0 +1,21 @@ +import { getSchemaUtils } from "../builders/schema-utils/index.js"; +import type { BaseSchema, MaybeValid, Schema, SchemaOptions, SchemaType } from "../Schema.js"; +import { maybeSkipValidation } from "./maybeSkipValidation.js"; + +export function createIdentitySchemaCreator( + schemaType: SchemaType, + validate: (value: unknown, opts?: SchemaOptions) => MaybeValid, +): () => Schema { + return () => { + const baseSchema: BaseSchema = { + parse: validate, + json: validate, + getType: () => schemaType, + }; + + return { + ...maybeSkipValidation(baseSchema), + ...getSchemaUtils(baseSchema), + }; + }; +} diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/utils/entries.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/utils/entries.ts new file mode 100644 index 000000000000..2d5c93d657ce --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/utils/entries.ts @@ -0,0 +1,3 @@ +export function entries(object: T): [keyof T, T[keyof T]][] { + return Object.entries(object) as [keyof T, T[keyof T]][]; +} diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/utils/filterObject.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/utils/filterObject.ts new file mode 100644 index 000000000000..70527d10013b --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/utils/filterObject.ts @@ -0,0 +1,13 @@ +export function filterObject(obj: T, keysToInclude: K[]): Pick { + const keysToIncludeSet = new Set(keysToInclude); + return Object.entries(obj).reduce( + (acc, [key, value]) => { + if (keysToIncludeSet.has(key as K)) { + acc[key as K] = value as T[K]; + } + return acc; + // eslint-disable-next-line @typescript-eslint/prefer-reduce-type-parameter + }, + {} as Pick, + ); +} diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/utils/getErrorMessageForIncorrectType.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/utils/getErrorMessageForIncorrectType.ts new file mode 100644 index 000000000000..1a5c31027ce9 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/utils/getErrorMessageForIncorrectType.ts @@ -0,0 +1,25 @@ +export function getErrorMessageForIncorrectType(value: unknown, expectedType: string): string { + return `Expected ${expectedType}. Received ${getTypeAsString(value)}.`; +} + +function getTypeAsString(value: unknown): string { + if (Array.isArray(value)) { + return "list"; + } + if (value === null) { + return "null"; + } + if (value instanceof BigInt) { + return "BigInt"; + } + switch (typeof value) { + case "string": + return `"${value}"`; + case "bigint": + case "number": + case "boolean": + case "undefined": + return `${value}`; + } + return typeof value; +} diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/utils/isPlainObject.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/utils/isPlainObject.ts new file mode 100644 index 000000000000..db82a722c35b --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/utils/isPlainObject.ts @@ -0,0 +1,17 @@ +// borrowed from https://github.com/lodash/lodash/blob/master/isPlainObject.js +export function isPlainObject(value: unknown): value is Record { + if (typeof value !== "object" || value === null) { + return false; + } + + if (Object.getPrototypeOf(value) === null) { + return true; + } + + let proto = value; + while (Object.getPrototypeOf(proto) !== null) { + proto = Object.getPrototypeOf(proto); + } + + return Object.getPrototypeOf(value) === proto; +} diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/utils/keys.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/utils/keys.ts new file mode 100644 index 000000000000..2e0930e2d70b --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/utils/keys.ts @@ -0,0 +1,3 @@ +export function keys(object: T): (keyof T)[] { + return Object.keys(object) as (keyof T)[]; +} diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/utils/maybeSkipValidation.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/utils/maybeSkipValidation.ts new file mode 100644 index 000000000000..f32d4525136d --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/utils/maybeSkipValidation.ts @@ -0,0 +1,38 @@ +import type { BaseSchema, MaybeValid, SchemaOptions } from "../Schema.js"; + +export function maybeSkipValidation, Raw, Parsed>(schema: S): S { + return { + ...schema, + json: transformAndMaybeSkipValidation(schema.json), + parse: transformAndMaybeSkipValidation(schema.parse), + }; +} + +function transformAndMaybeSkipValidation( + transform: (value: unknown, opts?: SchemaOptions) => MaybeValid, +): (value: unknown, opts?: SchemaOptions) => MaybeValid { + return (value, opts): MaybeValid => { + const transformed = transform(value, opts); + const { skipValidation = false } = opts ?? {}; + if (!transformed.ok && skipValidation) { + // biome-ignore lint/suspicious/noConsole: allow console + console.warn( + [ + "Failed to validate.", + ...transformed.errors.map( + (error) => + " - " + + (error.path.length > 0 ? `${error.path.join(".")}: ${error.message}` : error.message), + ), + ].join("\n"), + ); + + return { + ok: true, + value: value as T, + }; + } else { + return transformed; + } + }; +} diff --git a/seed/ts-sdk/ts-extra-properties/src/core/schemas/utils/partition.ts b/seed/ts-sdk/ts-extra-properties/src/core/schemas/utils/partition.ts new file mode 100644 index 000000000000..f58d6f3d35f3 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/schemas/utils/partition.ts @@ -0,0 +1,12 @@ +export function partition(items: readonly T[], predicate: (item: T) => boolean): [T[], T[]] { + const trueItems: T[] = [], + falseItems: T[] = []; + for (const item of items) { + if (predicate(item)) { + trueItems.push(item); + } else { + falseItems.push(item); + } + } + return [trueItems, falseItems]; +} diff --git a/seed/ts-sdk/ts-extra-properties/src/core/url/encodePathParam.ts b/seed/ts-sdk/ts-extra-properties/src/core/url/encodePathParam.ts new file mode 100644 index 000000000000..19b901244218 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/url/encodePathParam.ts @@ -0,0 +1,18 @@ +export function encodePathParam(param: unknown): string { + if (param === null) { + return "null"; + } + const typeofParam = typeof param; + switch (typeofParam) { + case "undefined": + return "undefined"; + case "string": + case "number": + case "boolean": + break; + default: + param = String(param); + break; + } + return encodeURIComponent(param as string | number | boolean); +} diff --git a/seed/ts-sdk/ts-extra-properties/src/core/url/index.ts b/seed/ts-sdk/ts-extra-properties/src/core/url/index.ts new file mode 100644 index 000000000000..f2e0fa2d2221 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/url/index.ts @@ -0,0 +1,3 @@ +export { encodePathParam } from "./encodePathParam.js"; +export { join } from "./join.js"; +export { toQueryString } from "./qs.js"; diff --git a/seed/ts-sdk/ts-extra-properties/src/core/url/join.ts b/seed/ts-sdk/ts-extra-properties/src/core/url/join.ts new file mode 100644 index 000000000000..7ca7daef094d --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/url/join.ts @@ -0,0 +1,79 @@ +export function join(base: string, ...segments: string[]): string { + if (!base) { + return ""; + } + + if (segments.length === 0) { + return base; + } + + if (base.includes("://")) { + let url: URL; + try { + url = new URL(base); + } catch { + return joinPath(base, ...segments); + } + + const lastSegment = segments[segments.length - 1]; + const shouldPreserveTrailingSlash = lastSegment?.endsWith("/"); + + for (const segment of segments) { + const cleanSegment = trimSlashes(segment); + if (cleanSegment) { + url.pathname = joinPathSegments(url.pathname, cleanSegment); + } + } + + if (shouldPreserveTrailingSlash && !url.pathname.endsWith("/")) { + url.pathname += "/"; + } + + return url.toString(); + } + + return joinPath(base, ...segments); +} + +function joinPath(base: string, ...segments: string[]): string { + if (segments.length === 0) { + return base; + } + + let result = base; + + const lastSegment = segments[segments.length - 1]; + const shouldPreserveTrailingSlash = lastSegment?.endsWith("/"); + + for (const segment of segments) { + const cleanSegment = trimSlashes(segment); + if (cleanSegment) { + result = joinPathSegments(result, cleanSegment); + } + } + + if (shouldPreserveTrailingSlash && !result.endsWith("/")) { + result += "/"; + } + + return result; +} + +function joinPathSegments(left: string, right: string): string { + if (left.endsWith("/")) { + return left + right; + } + return `${left}/${right}`; +} + +function trimSlashes(str: string): string { + if (!str) return str; + + let start = 0; + let end = str.length; + + if (str.startsWith("/")) start = 1; + if (str.endsWith("/")) end = str.length - 1; + + return start === 0 && end === str.length ? str : str.slice(start, end); +} diff --git a/seed/ts-sdk/ts-extra-properties/src/core/url/qs.ts b/seed/ts-sdk/ts-extra-properties/src/core/url/qs.ts new file mode 100644 index 000000000000..13e89be9d9a6 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/core/url/qs.ts @@ -0,0 +1,74 @@ +interface QueryStringOptions { + arrayFormat?: "indices" | "repeat"; + encode?: boolean; +} + +const defaultQsOptions: Required = { + arrayFormat: "indices", + encode: true, +} as const; + +function encodeValue(value: unknown, shouldEncode: boolean): string { + if (value === undefined) { + return ""; + } + if (value === null) { + return ""; + } + const stringValue = String(value); + return shouldEncode ? encodeURIComponent(stringValue) : stringValue; +} + +function stringifyObject(obj: Record, prefix = "", options: Required): string[] { + const parts: string[] = []; + + for (const [key, value] of Object.entries(obj)) { + const fullKey = prefix ? `${prefix}[${key}]` : key; + + if (value === undefined) { + continue; + } + + if (Array.isArray(value)) { + if (value.length === 0) { + continue; + } + for (let i = 0; i < value.length; i++) { + const item = value[i]; + if (item === undefined) { + continue; + } + if (typeof item === "object" && !Array.isArray(item) && item !== null) { + const arrayKey = options.arrayFormat === "indices" ? `${fullKey}[${i}]` : fullKey; + parts.push(...stringifyObject(item as Record, arrayKey, options)); + } else { + const arrayKey = options.arrayFormat === "indices" ? `${fullKey}[${i}]` : fullKey; + const encodedKey = options.encode ? encodeURIComponent(arrayKey) : arrayKey; + parts.push(`${encodedKey}=${encodeValue(item, options.encode)}`); + } + } + } else if (typeof value === "object" && value !== null) { + if (Object.keys(value as Record).length === 0) { + continue; + } + parts.push(...stringifyObject(value as Record, fullKey, options)); + } else { + const encodedKey = options.encode ? encodeURIComponent(fullKey) : fullKey; + parts.push(`${encodedKey}=${encodeValue(value, options.encode)}`); + } + } + + return parts; +} + +export function toQueryString(obj: unknown, options?: QueryStringOptions): string { + if (obj == null || typeof obj !== "object") { + return ""; + } + + const parts = stringifyObject(obj as Record, "", { + ...defaultQsOptions, + ...options, + }); + return parts.join("&"); +} diff --git a/seed/ts-sdk/ts-extra-properties/src/errors/SeedApiError.ts b/seed/ts-sdk/ts-extra-properties/src/errors/SeedApiError.ts new file mode 100644 index 000000000000..feb7d3461003 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/errors/SeedApiError.ts @@ -0,0 +1,58 @@ +// This file was auto-generated by Fern from our API Definition. + +import type * as core from "../core/index.js"; +import { toJson } from "../core/json.js"; + +export class SeedApiError extends Error { + public readonly statusCode?: number; + public readonly body?: unknown; + public readonly rawResponse?: core.RawResponse; + + constructor({ + message, + statusCode, + body, + rawResponse, + }: { + message?: string; + statusCode?: number; + body?: unknown; + rawResponse?: core.RawResponse; + }) { + super(buildMessage({ message, statusCode, body })); + Object.setPrototypeOf(this, new.target.prototype); + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + + this.name = this.constructor.name; + this.statusCode = statusCode; + this.body = body; + this.rawResponse = rawResponse; + } +} + +function buildMessage({ + message, + statusCode, + body, +}: { + message: string | undefined; + statusCode: number | undefined; + body: unknown | undefined; +}): string { + const lines: string[] = []; + if (message != null) { + lines.push(message); + } + + if (statusCode != null) { + lines.push(`Status code: ${statusCode.toString()}`); + } + + if (body != null) { + lines.push(`Body: ${toJson(body, undefined, 2)}`); + } + + return lines.join("\n"); +} diff --git a/seed/ts-sdk/ts-extra-properties/src/errors/SeedApiTimeoutError.ts b/seed/ts-sdk/ts-extra-properties/src/errors/SeedApiTimeoutError.ts new file mode 100644 index 000000000000..5f451edf37bb --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/errors/SeedApiTimeoutError.ts @@ -0,0 +1,13 @@ +// This file was auto-generated by Fern from our API Definition. + +export class SeedApiTimeoutError extends Error { + constructor(message: string) { + super(message); + Object.setPrototypeOf(this, new.target.prototype); + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + + this.name = this.constructor.name; + } +} diff --git a/seed/ts-sdk/ts-extra-properties/src/errors/handleNonStatusCodeError.ts b/seed/ts-sdk/ts-extra-properties/src/errors/handleNonStatusCodeError.ts new file mode 100644 index 000000000000..fdb7a48879da --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/errors/handleNonStatusCodeError.ts @@ -0,0 +1,37 @@ +// This file was auto-generated by Fern from our API Definition. + +import type * as core from "../core/index.js"; +import * as errors from "./index.js"; + +export function handleNonStatusCodeError( + error: core.Fetcher.Error, + rawResponse: core.RawResponse, + method: string, + path: string, +): never { + switch (error.reason) { + case "non-json": + throw new errors.SeedApiError({ + statusCode: error.statusCode, + body: error.rawBody, + rawResponse: rawResponse, + }); + case "body-is-null": + throw new errors.SeedApiError({ + statusCode: error.statusCode, + rawResponse: rawResponse, + }); + case "timeout": + throw new errors.SeedApiTimeoutError(`Timeout exceeded when calling ${method} ${path}.`); + case "unknown": + throw new errors.SeedApiError({ + message: error.errorMessage, + rawResponse: rawResponse, + }); + default: + throw new errors.SeedApiError({ + message: "Unknown error", + rawResponse: rawResponse, + }); + } +} diff --git a/seed/ts-sdk/ts-extra-properties/src/errors/index.ts b/seed/ts-sdk/ts-extra-properties/src/errors/index.ts new file mode 100644 index 000000000000..09e82b954c26 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/errors/index.ts @@ -0,0 +1,2 @@ +export { SeedApiError } from "./SeedApiError.js"; +export { SeedApiTimeoutError } from "./SeedApiTimeoutError.js"; diff --git a/seed/ts-sdk/ts-extra-properties/src/exports.ts b/seed/ts-sdk/ts-extra-properties/src/exports.ts new file mode 100644 index 000000000000..7b70ee14fc02 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/exports.ts @@ -0,0 +1 @@ +export * from "./core/exports.js"; diff --git a/seed/ts-sdk/ts-extra-properties/src/index.ts b/seed/ts-sdk/ts-extra-properties/src/index.ts new file mode 100644 index 000000000000..5a2ac4b417c8 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/index.ts @@ -0,0 +1,6 @@ +export * as SeedApi from "./api/index.js"; +export type { BaseClientOptions, BaseRequestOptions } from "./BaseClient.js"; +export { SeedApiClient } from "./Client.js"; +export { SeedApiError, SeedApiTimeoutError } from "./errors/index.js"; +export * from "./exports.js"; +export * as serialization from "./serialization/index.js"; diff --git a/seed/ts-sdk/ts-extra-properties/src/serialization/client/index.ts b/seed/ts-sdk/ts-extra-properties/src/serialization/client/index.ts new file mode 100644 index 000000000000..195f9aa8a846 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/serialization/client/index.ts @@ -0,0 +1 @@ +export * from "./requests/index.js"; diff --git a/seed/ts-sdk/ts-extra-properties/src/serialization/client/requests/CreateUserRequest.ts b/seed/ts-sdk/ts-extra-properties/src/serialization/client/requests/CreateUserRequest.ts new file mode 100644 index 000000000000..a5180644bdc6 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/serialization/client/requests/CreateUserRequest.ts @@ -0,0 +1,23 @@ +// This file was auto-generated by Fern from our API Definition. + +import type * as SeedApi from "../../../api/index.js"; +import * as core from "../../../core/index.js"; +import type * as serializers from "../../index.js"; + +export const CreateUserRequest: core.serialization.Schema< + serializers.CreateUserRequest.Raw, + SeedApi.CreateUserRequest +> = core.serialization.object({ + userName: core.serialization.property("user_name", core.serialization.string()), + metaData: core.serialization.property( + "meta_data", + core.serialization.record(core.serialization.string(), core.serialization.unknown()).optional(), + ), +}); + +export declare namespace CreateUserRequest { + export interface Raw { + user_name: string; + meta_data?: Record | null; + } +} diff --git a/seed/ts-sdk/ts-extra-properties/src/serialization/client/requests/index.ts b/seed/ts-sdk/ts-extra-properties/src/serialization/client/requests/index.ts new file mode 100644 index 000000000000..13ed69b97ed8 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/serialization/client/requests/index.ts @@ -0,0 +1 @@ +export { CreateUserRequest } from "./CreateUserRequest.js"; diff --git a/seed/ts-sdk/ts-extra-properties/src/serialization/index.ts b/seed/ts-sdk/ts-extra-properties/src/serialization/index.ts new file mode 100644 index 000000000000..d9adb1af9a93 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/serialization/index.ts @@ -0,0 +1,2 @@ +export * from "./client/index.js"; +export * from "./types/index.js"; diff --git a/seed/ts-sdk/ts-extra-properties/src/serialization/types/User.ts b/seed/ts-sdk/ts-extra-properties/src/serialization/types/User.ts new file mode 100644 index 000000000000..bc4f0a00f2d1 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/serialization/types/User.ts @@ -0,0 +1,24 @@ +// This file was auto-generated by Fern from our API Definition. + +import type * as SeedApi from "../../api/index.js"; +import * as core from "../../core/index.js"; +import type * as serializers from "../index.js"; + +export const User: core.serialization.ObjectSchema = core.serialization + .object({ + id: core.serialization.string(), + userName: core.serialization.property("user_name", core.serialization.string()), + createdAt: core.serialization.property("created_at", core.serialization.date()), + updatedAt: core.serialization.property("updated_at", core.serialization.date().optional()), + }) + .passthrough(); + +export declare namespace User { + export interface Raw { + id: string; + user_name: string; + created_at: string; + updated_at?: string | null; + [key: string]: any; + } +} diff --git a/seed/ts-sdk/ts-extra-properties/src/serialization/types/index.ts b/seed/ts-sdk/ts-extra-properties/src/serialization/types/index.ts new file mode 100644 index 000000000000..169437c217d9 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/serialization/types/index.ts @@ -0,0 +1 @@ +export * from "./User.js"; diff --git a/seed/ts-sdk/ts-extra-properties/src/version.ts b/seed/ts-sdk/ts-extra-properties/src/version.ts new file mode 100644 index 000000000000..b643a3e3ea27 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/src/version.ts @@ -0,0 +1 @@ +export const SDK_VERSION = "0.0.1"; diff --git a/seed/ts-sdk/ts-extra-properties/tests/custom.test.ts b/seed/ts-sdk/ts-extra-properties/tests/custom.test.ts new file mode 100644 index 000000000000..7f5e031c8396 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tests/custom.test.ts @@ -0,0 +1,13 @@ +/** + * This is a custom test file, if you wish to add more tests + * to your SDK. + * Be sure to mark this file in `.fernignore`. + * + * If you include example requests/responses in your fern definition, + * you will have tests automatically generated for you. + */ +describe("test", () => { + it("default", () => { + expect(true).toBe(true); + }); +}); diff --git a/seed/ts-sdk/ts-extra-properties/tests/mock-server/MockServer.ts b/seed/ts-sdk/ts-extra-properties/tests/mock-server/MockServer.ts new file mode 100644 index 000000000000..954872157d52 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tests/mock-server/MockServer.ts @@ -0,0 +1,29 @@ +import type { RequestHandlerOptions } from "msw"; +import type { SetupServer } from "msw/node"; + +import { mockEndpointBuilder } from "./mockEndpointBuilder"; + +export interface MockServerOptions { + baseUrl: string; + server: SetupServer; +} + +export class MockServer { + private readonly server: SetupServer; + public readonly baseUrl: string; + + constructor({ baseUrl, server }: MockServerOptions) { + this.baseUrl = baseUrl.endsWith("/") ? baseUrl.slice(0, -1) : baseUrl; + this.server = server; + } + + public mockEndpoint(options?: RequestHandlerOptions): ReturnType { + const builder = mockEndpointBuilder({ + once: options?.once ?? true, + onBuild: (handler) => { + this.server.use(handler); + }, + }).baseUrl(this.baseUrl); + return builder; + } +} diff --git a/seed/ts-sdk/ts-extra-properties/tests/mock-server/MockServerPool.ts b/seed/ts-sdk/ts-extra-properties/tests/mock-server/MockServerPool.ts new file mode 100644 index 000000000000..e1a90f7fb2e3 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tests/mock-server/MockServerPool.ts @@ -0,0 +1,106 @@ +import { setupServer } from "msw/node"; + +import { fromJson, toJson } from "../../src/core/json"; +import { MockServer } from "./MockServer"; +import { randomBaseUrl } from "./randomBaseUrl"; + +const mswServer = setupServer(); +interface MockServerOptions { + baseUrl?: string; +} + +async function formatHttpRequest(request: Request, id?: string): Promise { + try { + const clone = request.clone(); + const headers = [...clone.headers.entries()].map(([k, v]) => `${k}: ${v}`).join("\n"); + + let body = ""; + try { + const contentType = clone.headers.get("content-type"); + if (contentType?.includes("application/json")) { + body = toJson(fromJson(await clone.text()), undefined, 2); + } else if (clone.body) { + body = await clone.text(); + } + } catch (_e) { + body = "(unable to parse body)"; + } + + const title = id ? `### Request ${id} ###\n` : ""; + const firstLine = `${title}${request.method} ${request.url.toString()} HTTP/1.1`; + + return `\n${firstLine}\n${headers}\n\n${body || "(no body)"}\n`; + } catch (e) { + return `Error formatting request: ${e}`; + } +} + +async function formatHttpResponse(response: Response, id?: string): Promise { + try { + const clone = response.clone(); + const headers = [...clone.headers.entries()].map(([k, v]) => `${k}: ${v}`).join("\n"); + + let body = ""; + try { + const contentType = clone.headers.get("content-type"); + if (contentType?.includes("application/json")) { + body = toJson(fromJson(await clone.text()), undefined, 2); + } else if (clone.body) { + body = await clone.text(); + } + } catch (_e) { + body = "(unable to parse body)"; + } + + const title = id ? `### Response for ${id} ###\n` : ""; + const firstLine = `${title}HTTP/1.1 ${response.status} ${response.statusText}`; + + return `\n${firstLine}\n${headers}\n\n${body || "(no body)"}\n`; + } catch (e) { + return `Error formatting response: ${e}`; + } +} + +class MockServerPool { + private servers: MockServer[] = []; + + public createServer(options?: Partial): MockServer { + const baseUrl = options?.baseUrl || randomBaseUrl(); + const server = new MockServer({ baseUrl, server: mswServer }); + this.servers.push(server); + return server; + } + + public getServers(): MockServer[] { + return [...this.servers]; + } + + public listen(): void { + const onUnhandledRequest = process.env.LOG_LEVEL === "debug" ? "warn" : "bypass"; + mswServer.listen({ onUnhandledRequest }); + + if (process.env.LOG_LEVEL === "debug") { + mswServer.events.on("request:start", async ({ request, requestId }) => { + const formattedRequest = await formatHttpRequest(request, requestId); + console.debug(`request:start\n${formattedRequest}`); + }); + + mswServer.events.on("request:unhandled", async ({ request, requestId }) => { + const formattedRequest = await formatHttpRequest(request, requestId); + console.debug(`request:unhandled\n${formattedRequest}`); + }); + + mswServer.events.on("response:mocked", async ({ request, response, requestId }) => { + const formattedResponse = await formatHttpResponse(response, requestId); + console.debug(`response:mocked\n${formattedResponse}`); + }); + } + } + + public close(): void { + this.servers = []; + mswServer.close(); + } +} + +export const mockServerPool = new MockServerPool(); diff --git a/seed/ts-sdk/ts-extra-properties/tests/mock-server/mockEndpointBuilder.ts b/seed/ts-sdk/ts-extra-properties/tests/mock-server/mockEndpointBuilder.ts new file mode 100644 index 000000000000..78985e7211b4 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tests/mock-server/mockEndpointBuilder.ts @@ -0,0 +1,227 @@ +import { type DefaultBodyType, type HttpHandler, HttpResponse, type HttpResponseResolver, http } from "msw"; + +import { url } from "../../src/core"; +import { toJson } from "../../src/core/json"; +import { withFormUrlEncoded } from "./withFormUrlEncoded"; +import { withHeaders } from "./withHeaders"; +import { type WithJsonOptions, withJson } from "./withJson"; + +type HttpMethod = "all" | "get" | "post" | "put" | "delete" | "patch" | "options" | "head"; + +interface MethodStage { + baseUrl(baseUrl: string): MethodStage; + all(path: string): RequestHeadersStage; + get(path: string): RequestHeadersStage; + post(path: string): RequestHeadersStage; + put(path: string): RequestHeadersStage; + delete(path: string): RequestHeadersStage; + patch(path: string): RequestHeadersStage; + options(path: string): RequestHeadersStage; + head(path: string): RequestHeadersStage; +} + +interface RequestHeadersStage extends RequestBodyStage, ResponseStage { + header(name: string, value: string): RequestHeadersStage; + headers(headers: Record): RequestBodyStage; +} + +interface RequestBodyStage extends ResponseStage { + jsonBody(body: unknown, options?: WithJsonOptions): ResponseStage; + formUrlEncodedBody(body: unknown): ResponseStage; +} + +interface ResponseStage { + respondWith(): ResponseStatusStage; +} +interface ResponseStatusStage { + statusCode(statusCode: number): ResponseHeaderStage; +} + +interface ResponseHeaderStage extends ResponseBodyStage, BuildStage { + header(name: string, value: string): ResponseHeaderStage; + headers(headers: Record): ResponseHeaderStage; +} + +interface ResponseBodyStage { + jsonBody(body: unknown): BuildStage; +} + +interface BuildStage { + build(): HttpHandler; +} + +export interface HttpHandlerBuilderOptions { + onBuild?: (handler: HttpHandler) => void; + once?: boolean; +} + +class RequestBuilder implements MethodStage, RequestHeadersStage, RequestBodyStage, ResponseStage { + private method: HttpMethod = "get"; + private _baseUrl: string = ""; + private path: string = "/"; + private readonly predicates: ((resolver: HttpResponseResolver) => HttpResponseResolver)[] = []; + private readonly handlerOptions?: HttpHandlerBuilderOptions; + + constructor(options?: HttpHandlerBuilderOptions) { + this.handlerOptions = options; + } + + baseUrl(baseUrl: string): MethodStage { + this._baseUrl = baseUrl; + return this; + } + + all(path: string): RequestHeadersStage { + this.method = "all"; + this.path = path; + return this; + } + + get(path: string): RequestHeadersStage { + this.method = "get"; + this.path = path; + return this; + } + + post(path: string): RequestHeadersStage { + this.method = "post"; + this.path = path; + return this; + } + + put(path: string): RequestHeadersStage { + this.method = "put"; + this.path = path; + return this; + } + + delete(path: string): RequestHeadersStage { + this.method = "delete"; + this.path = path; + return this; + } + + patch(path: string): RequestHeadersStage { + this.method = "patch"; + this.path = path; + return this; + } + + options(path: string): RequestHeadersStage { + this.method = "options"; + this.path = path; + return this; + } + + head(path: string): RequestHeadersStage { + this.method = "head"; + this.path = path; + return this; + } + + header(name: string, value: string): RequestHeadersStage { + this.predicates.push((resolver) => withHeaders({ [name]: value }, resolver)); + return this; + } + + headers(headers: Record): RequestBodyStage { + this.predicates.push((resolver) => withHeaders(headers, resolver)); + return this; + } + + jsonBody(body: unknown, options?: WithJsonOptions): ResponseStage { + if (body === undefined) { + throw new Error("Undefined is not valid JSON. Do not call jsonBody if you want an empty body."); + } + this.predicates.push((resolver) => withJson(body, resolver, options)); + return this; + } + + formUrlEncodedBody(body: unknown): ResponseStage { + if (body === undefined) { + throw new Error( + "Undefined is not valid for form-urlencoded. Do not call formUrlEncodedBody if you want an empty body.", + ); + } + this.predicates.push((resolver) => withFormUrlEncoded(body, resolver)); + return this; + } + + respondWith(): ResponseStatusStage { + return new ResponseBuilder(this.method, this.buildUrl(), this.predicates, this.handlerOptions); + } + + private buildUrl(): string { + return url.join(this._baseUrl, this.path); + } +} + +class ResponseBuilder implements ResponseStatusStage, ResponseHeaderStage, ResponseBodyStage, BuildStage { + private readonly method: HttpMethod; + private readonly url: string; + private readonly requestPredicates: ((resolver: HttpResponseResolver) => HttpResponseResolver)[]; + private readonly handlerOptions?: HttpHandlerBuilderOptions; + + private responseStatusCode: number = 200; + private responseHeaders: Record = {}; + private responseBody: DefaultBodyType = undefined; + + constructor( + method: HttpMethod, + url: string, + requestPredicates: ((resolver: HttpResponseResolver) => HttpResponseResolver)[], + options?: HttpHandlerBuilderOptions, + ) { + this.method = method; + this.url = url; + this.requestPredicates = requestPredicates; + this.handlerOptions = options; + } + + public statusCode(code: number): ResponseHeaderStage { + this.responseStatusCode = code; + return this; + } + + public header(name: string, value: string): ResponseHeaderStage { + this.responseHeaders[name] = value; + return this; + } + + public headers(headers: Record): ResponseHeaderStage { + this.responseHeaders = { ...this.responseHeaders, ...headers }; + return this; + } + + public jsonBody(body: unknown): BuildStage { + if (body === undefined) { + throw new Error("Undefined is not valid JSON. Do not call jsonBody if you expect an empty body."); + } + this.responseBody = toJson(body); + return this; + } + + public build(): HttpHandler { + const responseResolver: HttpResponseResolver = () => { + const response = new HttpResponse(this.responseBody, { + status: this.responseStatusCode, + headers: this.responseHeaders, + }); + // if no Content-Type header is set, delete the default text content type that is set + if (Object.keys(this.responseHeaders).some((key) => key.toLowerCase() === "content-type") === false) { + response.headers.delete("Content-Type"); + } + return response; + }; + + const finalResolver = this.requestPredicates.reduceRight((acc, predicate) => predicate(acc), responseResolver); + + const handler = http[this.method](this.url, finalResolver, this.handlerOptions); + this.handlerOptions?.onBuild?.(handler); + return handler; + } +} + +export function mockEndpointBuilder(options?: HttpHandlerBuilderOptions): MethodStage { + return new RequestBuilder(options); +} diff --git a/seed/ts-sdk/ts-extra-properties/tests/mock-server/randomBaseUrl.ts b/seed/ts-sdk/ts-extra-properties/tests/mock-server/randomBaseUrl.ts new file mode 100644 index 000000000000..031aa6408aca --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tests/mock-server/randomBaseUrl.ts @@ -0,0 +1,4 @@ +export function randomBaseUrl(): string { + const randomString = Math.random().toString(36).substring(2, 15); + return `http://${randomString}.localhost`; +} diff --git a/seed/ts-sdk/ts-extra-properties/tests/mock-server/setup.ts b/seed/ts-sdk/ts-extra-properties/tests/mock-server/setup.ts new file mode 100644 index 000000000000..aeb3a95af7dc --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tests/mock-server/setup.ts @@ -0,0 +1,10 @@ +import { afterAll, beforeAll } from "vitest"; + +import { mockServerPool } from "./MockServerPool"; + +beforeAll(() => { + mockServerPool.listen(); +}); +afterAll(() => { + mockServerPool.close(); +}); diff --git a/seed/ts-sdk/ts-extra-properties/tests/mock-server/withFormUrlEncoded.ts b/seed/ts-sdk/ts-extra-properties/tests/mock-server/withFormUrlEncoded.ts new file mode 100644 index 000000000000..e250cb3c0f61 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tests/mock-server/withFormUrlEncoded.ts @@ -0,0 +1,89 @@ +import { type HttpResponseResolver, passthrough } from "msw"; + +import { toJson } from "../../src/core/json"; + +/** + * Creates a request matcher that validates if the request form-urlencoded body exactly matches the expected object + * @param expectedBody - The exact body object to match against + * @param resolver - Response resolver to execute if body matches + */ +export function withFormUrlEncoded(expectedBody: unknown, resolver: HttpResponseResolver): HttpResponseResolver { + return async (args) => { + const { request } = args; + + let clonedRequest: Request; + let bodyText: string | undefined; + let actualBody: Record; + try { + clonedRequest = request.clone(); + bodyText = await clonedRequest.text(); + if (bodyText === "") { + // Empty body is valid if expected body is also empty + const isExpectedEmpty = + expectedBody != null && + typeof expectedBody === "object" && + Object.keys(expectedBody as Record).length === 0; + if (!isExpectedEmpty) { + console.error("Request body is empty, expected a form-urlencoded body."); + return passthrough(); + } + actualBody = {}; + } else { + const params = new URLSearchParams(bodyText); + actualBody = {}; + for (const [key, value] of params.entries()) { + actualBody[key] = value; + } + } + } catch (error) { + console.error(`Error processing form-urlencoded request body:\n\tError: ${error}\n\tBody: ${bodyText}`); + return passthrough(); + } + + const mismatches = findMismatches(actualBody, expectedBody); + if (Object.keys(mismatches).length > 0) { + console.error("Form-urlencoded body mismatch:", toJson(mismatches, undefined, 2)); + return passthrough(); + } + + return resolver(args); + }; +} + +function findMismatches(actual: any, expected: any): Record { + const mismatches: Record = {}; + + if (typeof actual !== typeof expected) { + return { value: { actual, expected } }; + } + + if (typeof actual !== "object" || actual === null || expected === null) { + if (actual !== expected) { + return { value: { actual, expected } }; + } + return {}; + } + + const actualKeys = Object.keys(actual); + const expectedKeys = Object.keys(expected); + + const allKeys = new Set([...actualKeys, ...expectedKeys]); + + for (const key of allKeys) { + if (!expectedKeys.includes(key)) { + if (actual[key] === undefined) { + continue; + } + mismatches[key] = { actual: actual[key], expected: undefined }; + } else if (!actualKeys.includes(key)) { + if (expected[key] === undefined) { + continue; + } + mismatches[key] = { actual: undefined, expected: expected[key] }; + } else if (actual[key] !== expected[key]) { + mismatches[key] = { actual: actual[key], expected: expected[key] }; + } + } + + return mismatches; +} diff --git a/seed/ts-sdk/ts-extra-properties/tests/mock-server/withHeaders.ts b/seed/ts-sdk/ts-extra-properties/tests/mock-server/withHeaders.ts new file mode 100644 index 000000000000..6599d2b4a92d --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tests/mock-server/withHeaders.ts @@ -0,0 +1,70 @@ +import { type HttpResponseResolver, passthrough } from "msw"; + +/** + * Creates a request matcher that validates if request headers match specified criteria + * @param expectedHeaders - Headers to match against + * @param resolver - Response resolver to execute if headers match + */ +export function withHeaders( + expectedHeaders: Record boolean)>, + resolver: HttpResponseResolver, +): HttpResponseResolver { + return (args) => { + const { request } = args; + const { headers } = request; + + const mismatches: Record< + string, + { actual: string | null; expected: string | RegExp | ((value: string) => boolean) } + > = {}; + + for (const [key, expectedValue] of Object.entries(expectedHeaders)) { + const actualValue = headers.get(key); + + if (actualValue === null) { + mismatches[key] = { actual: null, expected: expectedValue }; + continue; + } + + if (typeof expectedValue === "function") { + if (!expectedValue(actualValue)) { + mismatches[key] = { actual: actualValue, expected: expectedValue }; + } + } else if (expectedValue instanceof RegExp) { + if (!expectedValue.test(actualValue)) { + mismatches[key] = { actual: actualValue, expected: expectedValue }; + } + } else if (expectedValue !== actualValue) { + mismatches[key] = { actual: actualValue, expected: expectedValue }; + } + } + + if (Object.keys(mismatches).length > 0) { + const formattedMismatches = formatHeaderMismatches(mismatches); + console.error("Header mismatch:", formattedMismatches); + return passthrough(); + } + + return resolver(args); + }; +} + +function formatHeaderMismatches( + mismatches: Record boolean) }>, +): Record { + const formatted: Record = {}; + + for (const [key, { actual, expected }] of Object.entries(mismatches)) { + formatted[key] = { + actual, + expected: + expected instanceof RegExp + ? expected.toString() + : typeof expected === "function" + ? "[Function]" + : expected, + }; + } + + return formatted; +} diff --git a/seed/ts-sdk/ts-extra-properties/tests/mock-server/withJson.ts b/seed/ts-sdk/ts-extra-properties/tests/mock-server/withJson.ts new file mode 100644 index 000000000000..3e8800a0c374 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tests/mock-server/withJson.ts @@ -0,0 +1,173 @@ +import { type HttpResponseResolver, passthrough } from "msw"; + +import { fromJson, toJson } from "../../src/core/json"; + +export interface WithJsonOptions { + /** + * List of field names to ignore when comparing request bodies. + * This is useful for pagination cursor fields that change between requests. + */ + ignoredFields?: string[]; +} + +/** + * Creates a request matcher that validates if the request JSON body exactly matches the expected object + * @param expectedBody - The exact body object to match against + * @param resolver - Response resolver to execute if body matches + * @param options - Optional configuration including fields to ignore + */ +export function withJson( + expectedBody: unknown, + resolver: HttpResponseResolver, + options?: WithJsonOptions, +): HttpResponseResolver { + const ignoredFields = options?.ignoredFields ?? []; + return async (args) => { + const { request } = args; + + let clonedRequest: Request; + let bodyText: string | undefined; + let actualBody: unknown; + try { + clonedRequest = request.clone(); + bodyText = await clonedRequest.text(); + if (bodyText === "") { + console.error("Request body is empty, expected a JSON object."); + return passthrough(); + } + actualBody = fromJson(bodyText); + } catch (error) { + console.error(`Error processing request body:\n\tError: ${error}\n\tBody: ${bodyText}`); + return passthrough(); + } + + const mismatches = findMismatches(actualBody, expectedBody); + const filteredMismatches = Object.keys(mismatches).filter((key) => !ignoredFields.includes(key)); + if (filteredMismatches.length > 0) { + console.error("JSON body mismatch:", toJson(mismatches, undefined, 2)); + return passthrough(); + } + + return resolver(args); + }; +} + +function findMismatches(actual: any, expected: any): Record { + const mismatches: Record = {}; + + if (typeof actual !== typeof expected) { + if (areEquivalent(actual, expected)) { + return {}; + } + return { value: { actual, expected } }; + } + + if (typeof actual !== "object" || actual === null || expected === null) { + if (actual !== expected) { + if (areEquivalent(actual, expected)) { + return {}; + } + return { value: { actual, expected } }; + } + return {}; + } + + if (Array.isArray(actual) && Array.isArray(expected)) { + if (actual.length !== expected.length) { + return { length: { actual: actual.length, expected: expected.length } }; + } + + const arrayMismatches: Record = {}; + for (let i = 0; i < actual.length; i++) { + const itemMismatches = findMismatches(actual[i], expected[i]); + if (Object.keys(itemMismatches).length > 0) { + for (const [mismatchKey, mismatchValue] of Object.entries(itemMismatches)) { + arrayMismatches[`[${i}]${mismatchKey === "value" ? "" : `.${mismatchKey}`}`] = mismatchValue; + } + } + } + return arrayMismatches; + } + + const actualKeys = Object.keys(actual); + const expectedKeys = Object.keys(expected); + + const allKeys = new Set([...actualKeys, ...expectedKeys]); + + for (const key of allKeys) { + if (!expectedKeys.includes(key)) { + if (actual[key] === undefined) { + continue; // Skip undefined values in actual + } + mismatches[key] = { actual: actual[key], expected: undefined }; + } else if (!actualKeys.includes(key)) { + if (expected[key] === undefined) { + continue; // Skip undefined values in expected + } + mismatches[key] = { actual: undefined, expected: expected[key] }; + } else if ( + typeof actual[key] === "object" && + actual[key] !== null && + typeof expected[key] === "object" && + expected[key] !== null + ) { + const nestedMismatches = findMismatches(actual[key], expected[key]); + if (Object.keys(nestedMismatches).length > 0) { + for (const [nestedKey, nestedValue] of Object.entries(nestedMismatches)) { + mismatches[`${key}${nestedKey === "value" ? "" : `.${nestedKey}`}`] = nestedValue; + } + } + } else if (actual[key] !== expected[key]) { + if (areEquivalent(actual[key], expected[key])) { + continue; + } + mismatches[key] = { actual: actual[key], expected: expected[key] }; + } + } + + return mismatches; +} + +function areEquivalent(actual: unknown, expected: unknown): boolean { + if (actual === expected) { + return true; + } + if (isEquivalentBigInt(actual, expected)) { + return true; + } + if (isEquivalentDatetime(actual, expected)) { + return true; + } + return false; +} + +function isEquivalentBigInt(actual: unknown, expected: unknown) { + if (typeof actual === "number") { + actual = BigInt(actual); + } + if (typeof expected === "number") { + expected = BigInt(expected); + } + if (typeof actual === "bigint" && typeof expected === "bigint") { + return actual === expected; + } + return false; +} + +function isEquivalentDatetime(str1: unknown, str2: unknown): boolean { + if (typeof str1 !== "string" || typeof str2 !== "string") { + return false; + } + const isoDatePattern = /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(?:\.\d{3})?Z$/; + if (!isoDatePattern.test(str1) || !isoDatePattern.test(str2)) { + return false; + } + + try { + const date1 = new Date(str1).getTime(); + const date2 = new Date(str2).getTime(); + return date1 === date2; + } catch { + return false; + } +} diff --git a/seed/ts-sdk/ts-extra-properties/tests/setup.ts b/seed/ts-sdk/ts-extra-properties/tests/setup.ts new file mode 100644 index 000000000000..a5651f81ba10 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tests/setup.ts @@ -0,0 +1,80 @@ +import { expect } from "vitest"; + +interface CustomMatchers { + toContainHeaders(expectedHeaders: Record): R; +} + +declare module "vitest" { + interface Assertion extends CustomMatchers {} + interface AsymmetricMatchersContaining extends CustomMatchers {} +} + +expect.extend({ + toContainHeaders(actual: unknown, expectedHeaders: Record) { + const isHeaders = actual instanceof Headers; + const isPlainObject = typeof actual === "object" && actual !== null && !Array.isArray(actual); + + if (!isHeaders && !isPlainObject) { + throw new TypeError("Received value must be an instance of Headers or a plain object!"); + } + + if (typeof expectedHeaders !== "object" || expectedHeaders === null || Array.isArray(expectedHeaders)) { + throw new TypeError("Expected headers must be a plain object!"); + } + + const missingHeaders: string[] = []; + const mismatchedHeaders: Array<{ key: string; expected: string; actual: string | null }> = []; + + for (const [key, value] of Object.entries(expectedHeaders)) { + let actualValue: string | null = null; + + if (isHeaders) { + // Headers.get() is already case-insensitive + actualValue = (actual as Headers).get(key); + } else { + // For plain objects, do case-insensitive lookup + const actualObj = actual as Record; + const lowerKey = key.toLowerCase(); + const foundKey = Object.keys(actualObj).find((k) => k.toLowerCase() === lowerKey); + actualValue = foundKey ? actualObj[foundKey] : null; + } + + if (actualValue === null || actualValue === undefined) { + missingHeaders.push(key); + } else if (actualValue !== value) { + mismatchedHeaders.push({ key, expected: value, actual: actualValue }); + } + } + + const pass = missingHeaders.length === 0 && mismatchedHeaders.length === 0; + + const actualType = isHeaders ? "Headers" : "object"; + + if (pass) { + return { + message: () => `expected ${actualType} not to contain ${this.utils.printExpected(expectedHeaders)}`, + pass: true, + }; + } else { + const messages: string[] = []; + + if (missingHeaders.length > 0) { + messages.push(`Missing headers: ${this.utils.printExpected(missingHeaders.join(", "))}`); + } + + if (mismatchedHeaders.length > 0) { + const mismatches = mismatchedHeaders.map( + ({ key, expected, actual }) => + `${key}: expected ${this.utils.printExpected(expected)} but got ${this.utils.printReceived(actual)}`, + ); + messages.push(mismatches.join("\n")); + } + + return { + message: () => + `expected ${actualType} to contain ${this.utils.printExpected(expectedHeaders)}\n\n${messages.join("\n")}`, + pass: false, + }; + } + }, +}); diff --git a/seed/ts-sdk/ts-extra-properties/tests/tsconfig.json b/seed/ts-sdk/ts-extra-properties/tests/tsconfig.json new file mode 100644 index 000000000000..a477df47920c --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tests/tsconfig.json @@ -0,0 +1,11 @@ +{ + "extends": "../tsconfig.base.json", + "compilerOptions": { + "outDir": null, + "rootDir": "..", + "baseUrl": "..", + "types": ["vitest/globals"] + }, + "include": ["../src", "../tests"], + "exclude": [] +} diff --git a/seed/ts-sdk/ts-extra-properties/tests/unit/fetcher/Fetcher.test.ts b/seed/ts-sdk/ts-extra-properties/tests/unit/fetcher/Fetcher.test.ts new file mode 100644 index 000000000000..6c17624228bb --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tests/unit/fetcher/Fetcher.test.ts @@ -0,0 +1,262 @@ +import fs from "fs"; +import { join } from "path"; +import stream from "stream"; +import type { BinaryResponse } from "../../../src/core"; +import { type Fetcher, fetcherImpl } from "../../../src/core/fetcher/Fetcher"; + +describe("Test fetcherImpl", () => { + it("should handle successful request", async () => { + const mockArgs: Fetcher.Args = { + url: "https://httpbin.org/post", + method: "POST", + headers: { "X-Test": "x-test-header" }, + body: { data: "test" }, + contentType: "application/json", + requestType: "json", + maxRetries: 0, + responseType: "json", + }; + + global.fetch = vi.fn().mockResolvedValue( + new Response(JSON.stringify({ data: "test" }), { + status: 200, + statusText: "OK", + }), + ); + + const result = await fetcherImpl(mockArgs); + expect(result.ok).toBe(true); + if (result.ok) { + expect(result.body).toEqual({ data: "test" }); + } + + expect(global.fetch).toHaveBeenCalledWith( + "https://httpbin.org/post", + expect.objectContaining({ + method: "POST", + headers: expect.toContainHeaders({ "X-Test": "x-test-header" }), + body: JSON.stringify({ data: "test" }), + }), + ); + }); + + it("should send octet stream", async () => { + const url = "https://httpbin.org/post/file"; + const mockArgs: Fetcher.Args = { + url, + method: "POST", + headers: { "X-Test": "x-test-header" }, + contentType: "application/octet-stream", + requestType: "bytes", + maxRetries: 0, + responseType: "json", + body: fs.createReadStream(join(__dirname, "test-file.txt")), + }; + + global.fetch = vi.fn().mockResolvedValue( + new Response(JSON.stringify({ data: "test" }), { + status: 200, + statusText: "OK", + }), + ); + + const result = await fetcherImpl(mockArgs); + + expect(global.fetch).toHaveBeenCalledWith( + url, + expect.objectContaining({ + method: "POST", + headers: expect.toContainHeaders({ "X-Test": "x-test-header" }), + body: expect.any(fs.ReadStream), + }), + ); + expect(result.ok).toBe(true); + if (result.ok) { + expect(result.body).toEqual({ data: "test" }); + } + }); + + it("should receive file as stream", async () => { + const url = "https://httpbin.org/post/file"; + const mockArgs: Fetcher.Args = { + url, + method: "GET", + headers: { "X-Test": "x-test-header" }, + maxRetries: 0, + responseType: "binary-response", + }; + + global.fetch = vi.fn().mockResolvedValue( + new Response( + stream.Readable.toWeb(fs.createReadStream(join(__dirname, "test-file.txt"))) as ReadableStream, + { + status: 200, + statusText: "OK", + }, + ), + ); + + const result = await fetcherImpl(mockArgs); + + expect(global.fetch).toHaveBeenCalledWith( + url, + expect.objectContaining({ + method: "GET", + headers: expect.toContainHeaders({ "X-Test": "x-test-header" }), + }), + ); + expect(result.ok).toBe(true); + if (result.ok) { + const body = result.body as BinaryResponse; + expect(body).toBeDefined(); + expect(body.bodyUsed).toBe(false); + expect(typeof body.stream).toBe("function"); + const stream = body.stream(); + expect(stream).toBeInstanceOf(ReadableStream); + const readableStream = stream as ReadableStream; + const reader = readableStream.getReader(); + const { value } = await reader.read(); + const decoder = new TextDecoder(); + const streamContent = decoder.decode(value); + expect(streamContent.trim()).toBe("This is a test file!"); + expect(body.bodyUsed).toBe(true); + } + }); + + it("should receive file as blob", async () => { + const url = "https://httpbin.org/post/file"; + const mockArgs: Fetcher.Args = { + url, + method: "GET", + headers: { "X-Test": "x-test-header" }, + maxRetries: 0, + responseType: "binary-response", + }; + + global.fetch = vi.fn().mockResolvedValue( + new Response( + stream.Readable.toWeb(fs.createReadStream(join(__dirname, "test-file.txt"))) as ReadableStream, + { + status: 200, + statusText: "OK", + }, + ), + ); + + const result = await fetcherImpl(mockArgs); + + expect(global.fetch).toHaveBeenCalledWith( + url, + expect.objectContaining({ + method: "GET", + headers: expect.toContainHeaders({ "X-Test": "x-test-header" }), + }), + ); + expect(result.ok).toBe(true); + if (result.ok) { + const body = result.body as BinaryResponse; + expect(body).toBeDefined(); + expect(body.bodyUsed).toBe(false); + expect(typeof body.blob).toBe("function"); + const blob = await body.blob(); + expect(blob).toBeInstanceOf(Blob); + const reader = blob.stream().getReader(); + const { value } = await reader.read(); + const decoder = new TextDecoder(); + const streamContent = decoder.decode(value); + expect(streamContent.trim()).toBe("This is a test file!"); + expect(body.bodyUsed).toBe(true); + } + }); + + it("should receive file as arraybuffer", async () => { + const url = "https://httpbin.org/post/file"; + const mockArgs: Fetcher.Args = { + url, + method: "GET", + headers: { "X-Test": "x-test-header" }, + maxRetries: 0, + responseType: "binary-response", + }; + + global.fetch = vi.fn().mockResolvedValue( + new Response( + stream.Readable.toWeb(fs.createReadStream(join(__dirname, "test-file.txt"))) as ReadableStream, + { + status: 200, + statusText: "OK", + }, + ), + ); + + const result = await fetcherImpl(mockArgs); + + expect(global.fetch).toHaveBeenCalledWith( + url, + expect.objectContaining({ + method: "GET", + headers: expect.toContainHeaders({ "X-Test": "x-test-header" }), + }), + ); + expect(result.ok).toBe(true); + if (result.ok) { + const body = result.body as BinaryResponse; + expect(body).toBeDefined(); + expect(body.bodyUsed).toBe(false); + expect(typeof body.arrayBuffer).toBe("function"); + const arrayBuffer = await body.arrayBuffer(); + expect(arrayBuffer).toBeInstanceOf(ArrayBuffer); + const decoder = new TextDecoder(); + const streamContent = decoder.decode(new Uint8Array(arrayBuffer)); + expect(streamContent.trim()).toBe("This is a test file!"); + expect(body.bodyUsed).toBe(true); + } + }); + + it("should receive file as bytes", async () => { + const url = "https://httpbin.org/post/file"; + const mockArgs: Fetcher.Args = { + url, + method: "GET", + headers: { "X-Test": "x-test-header" }, + maxRetries: 0, + responseType: "binary-response", + }; + + global.fetch = vi.fn().mockResolvedValue( + new Response( + stream.Readable.toWeb(fs.createReadStream(join(__dirname, "test-file.txt"))) as ReadableStream, + { + status: 200, + statusText: "OK", + }, + ), + ); + + const result = await fetcherImpl(mockArgs); + + expect(global.fetch).toHaveBeenCalledWith( + url, + expect.objectContaining({ + method: "GET", + headers: expect.toContainHeaders({ "X-Test": "x-test-header" }), + }), + ); + expect(result.ok).toBe(true); + if (result.ok) { + const body = result.body as BinaryResponse; + expect(body).toBeDefined(); + expect(body.bodyUsed).toBe(false); + expect(typeof body.bytes).toBe("function"); + if (!body.bytes) { + return; + } + const bytes = await body.bytes(); + expect(bytes).toBeInstanceOf(Uint8Array); + const decoder = new TextDecoder(); + const streamContent = decoder.decode(bytes); + expect(streamContent.trim()).toBe("This is a test file!"); + expect(body.bodyUsed).toBe(true); + } + }); +}); diff --git a/seed/ts-sdk/ts-extra-properties/tests/unit/fetcher/HttpResponsePromise.test.ts b/seed/ts-sdk/ts-extra-properties/tests/unit/fetcher/HttpResponsePromise.test.ts new file mode 100644 index 000000000000..2ec008e581d8 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tests/unit/fetcher/HttpResponsePromise.test.ts @@ -0,0 +1,143 @@ +import { beforeEach, describe, expect, it, vi } from "vitest"; + +import { HttpResponsePromise } from "../../../src/core/fetcher/HttpResponsePromise"; +import type { RawResponse, WithRawResponse } from "../../../src/core/fetcher/RawResponse"; + +describe("HttpResponsePromise", () => { + const mockRawResponse: RawResponse = { + headers: new Headers(), + redirected: false, + status: 200, + statusText: "OK", + type: "basic" as ResponseType, + url: "https://example.com", + }; + const mockData = { id: "123", name: "test" }; + const mockWithRawResponse: WithRawResponse = { + data: mockData, + rawResponse: mockRawResponse, + }; + + describe("fromFunction", () => { + it("should create an HttpResponsePromise from a function", async () => { + const mockFn = vi + .fn<(arg1: string, arg2: string) => Promise>>() + .mockResolvedValue(mockWithRawResponse); + + const responsePromise = HttpResponsePromise.fromFunction(mockFn, "arg1", "arg2"); + + const result = await responsePromise; + expect(result).toEqual(mockData); + expect(mockFn).toHaveBeenCalledWith("arg1", "arg2"); + + const resultWithRawResponse = await responsePromise.withRawResponse(); + expect(resultWithRawResponse).toEqual({ + data: mockData, + rawResponse: mockRawResponse, + }); + }); + }); + + describe("fromPromise", () => { + it("should create an HttpResponsePromise from a promise", async () => { + const promise = Promise.resolve(mockWithRawResponse); + + const responsePromise = HttpResponsePromise.fromPromise(promise); + + const result = await responsePromise; + expect(result).toEqual(mockData); + + const resultWithRawResponse = await responsePromise.withRawResponse(); + expect(resultWithRawResponse).toEqual({ + data: mockData, + rawResponse: mockRawResponse, + }); + }); + }); + + describe("fromExecutor", () => { + it("should create an HttpResponsePromise from an executor function", async () => { + const responsePromise = HttpResponsePromise.fromExecutor((resolve) => { + resolve(mockWithRawResponse); + }); + + const result = await responsePromise; + expect(result).toEqual(mockData); + + const resultWithRawResponse = await responsePromise.withRawResponse(); + expect(resultWithRawResponse).toEqual({ + data: mockData, + rawResponse: mockRawResponse, + }); + }); + }); + + describe("fromResult", () => { + it("should create an HttpResponsePromise from a result", async () => { + const responsePromise = HttpResponsePromise.fromResult(mockWithRawResponse); + + const result = await responsePromise; + expect(result).toEqual(mockData); + + const resultWithRawResponse = await responsePromise.withRawResponse(); + expect(resultWithRawResponse).toEqual({ + data: mockData, + rawResponse: mockRawResponse, + }); + }); + }); + + describe("Promise methods", () => { + let responsePromise: HttpResponsePromise; + + beforeEach(() => { + responsePromise = HttpResponsePromise.fromResult(mockWithRawResponse); + }); + + it("should support then() method", async () => { + const result = await responsePromise.then((data) => ({ + ...data, + modified: true, + })); + + expect(result).toEqual({ + ...mockData, + modified: true, + }); + }); + + it("should support catch() method", async () => { + const errorResponsePromise = HttpResponsePromise.fromExecutor((_, reject) => { + reject(new Error("Test error")); + }); + + const catchSpy = vi.fn(); + await errorResponsePromise.catch(catchSpy); + + expect(catchSpy).toHaveBeenCalled(); + const error = catchSpy.mock.calls[0]?.[0]; + expect(error).toBeInstanceOf(Error); + expect((error as Error).message).toBe("Test error"); + }); + + it("should support finally() method", async () => { + const finallySpy = vi.fn(); + await responsePromise.finally(finallySpy); + + expect(finallySpy).toHaveBeenCalled(); + }); + }); + + describe("withRawResponse", () => { + it("should return both data and raw response", async () => { + const responsePromise = HttpResponsePromise.fromResult(mockWithRawResponse); + + const result = await responsePromise.withRawResponse(); + + expect(result).toEqual({ + data: mockData, + rawResponse: mockRawResponse, + }); + }); + }); +}); diff --git a/seed/ts-sdk/ts-extra-properties/tests/unit/fetcher/RawResponse.test.ts b/seed/ts-sdk/ts-extra-properties/tests/unit/fetcher/RawResponse.test.ts new file mode 100644 index 000000000000..375ee3f38064 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tests/unit/fetcher/RawResponse.test.ts @@ -0,0 +1,34 @@ +import { describe, expect, it } from "vitest"; + +import { toRawResponse } from "../../../src/core/fetcher/RawResponse"; + +describe("RawResponse", () => { + describe("toRawResponse", () => { + it("should convert Response to RawResponse by removing body, bodyUsed, and ok properties", () => { + const mockHeaders = new Headers({ "content-type": "application/json" }); + const mockResponse = { + body: "test body", + bodyUsed: false, + ok: true, + headers: mockHeaders, + redirected: false, + status: 200, + statusText: "OK", + type: "basic" as ResponseType, + url: "https://example.com", + }; + + const result = toRawResponse(mockResponse as unknown as Response); + + expect("body" in result).toBe(false); + expect("bodyUsed" in result).toBe(false); + expect("ok" in result).toBe(false); + expect(result.headers).toBe(mockHeaders); + expect(result.redirected).toBe(false); + expect(result.status).toBe(200); + expect(result.statusText).toBe("OK"); + expect(result.type).toBe("basic"); + expect(result.url).toBe("https://example.com"); + }); + }); +}); diff --git a/seed/ts-sdk/ts-extra-properties/tests/unit/fetcher/createRequestUrl.test.ts b/seed/ts-sdk/ts-extra-properties/tests/unit/fetcher/createRequestUrl.test.ts new file mode 100644 index 000000000000..a92f1b5e81d1 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tests/unit/fetcher/createRequestUrl.test.ts @@ -0,0 +1,163 @@ +import { createRequestUrl } from "../../../src/core/fetcher/createRequestUrl"; + +describe("Test createRequestUrl", () => { + const BASE_URL = "https://api.example.com"; + + interface TestCase { + description: string; + baseUrl: string; + queryParams?: Record; + expected: string; + } + + const testCases: TestCase[] = [ + { + description: "should return the base URL when no query parameters are provided", + baseUrl: BASE_URL, + expected: BASE_URL, + }, + { + description: "should append simple query parameters", + baseUrl: BASE_URL, + queryParams: { key: "value", another: "param" }, + expected: "https://api.example.com?key=value&another=param", + }, + { + description: "should handle array query parameters", + baseUrl: BASE_URL, + queryParams: { items: ["a", "b", "c"] }, + expected: "https://api.example.com?items=a&items=b&items=c", + }, + { + description: "should handle object query parameters", + baseUrl: BASE_URL, + queryParams: { filter: { name: "John", age: 30 } }, + expected: "https://api.example.com?filter%5Bname%5D=John&filter%5Bage%5D=30", + }, + { + description: "should handle mixed types of query parameters", + baseUrl: BASE_URL, + queryParams: { + simple: "value", + array: ["x", "y"], + object: { key: "value" }, + }, + expected: "https://api.example.com?simple=value&array=x&array=y&object%5Bkey%5D=value", + }, + { + description: "should handle empty query parameters object", + baseUrl: BASE_URL, + queryParams: {}, + expected: BASE_URL, + }, + { + description: "should encode special characters in query parameters", + baseUrl: BASE_URL, + queryParams: { special: "a&b=c d" }, + expected: "https://api.example.com?special=a%26b%3Dc%20d", + }, + { + description: "should handle numeric values", + baseUrl: BASE_URL, + queryParams: { count: 42, price: 19.99, active: 1, inactive: 0 }, + expected: "https://api.example.com?count=42&price=19.99&active=1&inactive=0", + }, + { + description: "should handle boolean values", + baseUrl: BASE_URL, + queryParams: { enabled: true, disabled: false }, + expected: "https://api.example.com?enabled=true&disabled=false", + }, + { + description: "should handle null and undefined values", + baseUrl: BASE_URL, + queryParams: { + valid: "value", + nullValue: null, + undefinedValue: undefined, + emptyString: "", + }, + expected: "https://api.example.com?valid=value&nullValue=&emptyString=", + }, + { + description: "should handle deeply nested objects", + baseUrl: BASE_URL, + queryParams: { + user: { + profile: { + name: "John", + settings: { theme: "dark" }, + }, + }, + }, + expected: + "https://api.example.com?user%5Bprofile%5D%5Bname%5D=John&user%5Bprofile%5D%5Bsettings%5D%5Btheme%5D=dark", + }, + { + description: "should handle arrays of objects", + baseUrl: BASE_URL, + queryParams: { + users: [ + { name: "John", age: 30 }, + { name: "Jane", age: 25 }, + ], + }, + expected: + "https://api.example.com?users%5Bname%5D=John&users%5Bage%5D=30&users%5Bname%5D=Jane&users%5Bage%5D=25", + }, + { + description: "should handle mixed arrays", + baseUrl: BASE_URL, + queryParams: { + mixed: ["string", 42, true, { key: "value" }], + }, + expected: "https://api.example.com?mixed=string&mixed=42&mixed=true&mixed%5Bkey%5D=value", + }, + { + description: "should handle empty arrays", + baseUrl: BASE_URL, + queryParams: { emptyArray: [] }, + expected: BASE_URL, + }, + { + description: "should handle empty objects", + baseUrl: BASE_URL, + queryParams: { emptyObject: {} }, + expected: BASE_URL, + }, + { + description: "should handle special characters in keys", + baseUrl: BASE_URL, + queryParams: { "key with spaces": "value", "key[with]brackets": "value" }, + expected: "https://api.example.com?key%20with%20spaces=value&key%5Bwith%5Dbrackets=value", + }, + { + description: "should handle URL with existing query parameters", + baseUrl: "https://api.example.com?existing=param", + queryParams: { new: "value" }, + expected: "https://api.example.com?existing=param?new=value", + }, + { + description: "should handle complex nested structures", + baseUrl: BASE_URL, + queryParams: { + filters: { + status: ["active", "pending"], + category: { + type: "electronics", + subcategories: ["phones", "laptops"], + }, + }, + sort: { field: "name", direction: "asc" }, + }, + expected: + "https://api.example.com?filters%5Bstatus%5D=active&filters%5Bstatus%5D=pending&filters%5Bcategory%5D%5Btype%5D=electronics&filters%5Bcategory%5D%5Bsubcategories%5D=phones&filters%5Bcategory%5D%5Bsubcategories%5D=laptops&sort%5Bfield%5D=name&sort%5Bdirection%5D=asc", + }, + ]; + + testCases.forEach(({ description, baseUrl, queryParams, expected }) => { + it(description, () => { + expect(createRequestUrl(baseUrl, queryParams)).toBe(expected); + }); + }); +}); diff --git a/seed/ts-sdk/ts-extra-properties/tests/unit/fetcher/getRequestBody.test.ts b/seed/ts-sdk/ts-extra-properties/tests/unit/fetcher/getRequestBody.test.ts new file mode 100644 index 000000000000..8a6c3a57e211 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tests/unit/fetcher/getRequestBody.test.ts @@ -0,0 +1,129 @@ +import { getRequestBody } from "../../../src/core/fetcher/getRequestBody"; +import { RUNTIME } from "../../../src/core/runtime"; + +describe("Test getRequestBody", () => { + interface TestCase { + description: string; + input: any; + type: "json" | "form" | "file" | "bytes" | "other"; + expected: any; + skipCondition?: () => boolean; + } + + const testCases: TestCase[] = [ + { + description: "should stringify body if not FormData in Node environment", + input: { key: "value" }, + type: "json", + expected: '{"key":"value"}', + skipCondition: () => RUNTIME.type !== "node", + }, + { + description: "should stringify body if not FormData in browser environment", + input: { key: "value" }, + type: "json", + expected: '{"key":"value"}', + skipCondition: () => RUNTIME.type !== "browser", + }, + { + description: "should return the Uint8Array", + input: new Uint8Array([1, 2, 3]), + type: "bytes", + expected: new Uint8Array([1, 2, 3]), + }, + { + description: "should serialize objects for form-urlencoded content type", + input: { username: "johndoe", email: "john@example.com" }, + type: "form", + expected: "username=johndoe&email=john%40example.com", + }, + { + description: "should serialize complex nested objects and arrays for form-urlencoded content type", + input: { + user: { + profile: { + name: "John Doe", + settings: { + theme: "dark", + notifications: true, + }, + }, + tags: ["admin", "user"], + contacts: [ + { type: "email", value: "john@example.com" }, + { type: "phone", value: "+1234567890" }, + ], + }, + filters: { + status: ["active", "pending"], + metadata: { + created: "2024-01-01", + categories: ["electronics", "books"], + }, + }, + preferences: ["notifications", "updates"], + }, + type: "form", + expected: + "user%5Bprofile%5D%5Bname%5D=John%20Doe&" + + "user%5Bprofile%5D%5Bsettings%5D%5Btheme%5D=dark&" + + "user%5Bprofile%5D%5Bsettings%5D%5Bnotifications%5D=true&" + + "user%5Btags%5D=admin&" + + "user%5Btags%5D=user&" + + "user%5Bcontacts%5D%5Btype%5D=email&" + + "user%5Bcontacts%5D%5Bvalue%5D=john%40example.com&" + + "user%5Bcontacts%5D%5Btype%5D=phone&" + + "user%5Bcontacts%5D%5Bvalue%5D=%2B1234567890&" + + "filters%5Bstatus%5D=active&" + + "filters%5Bstatus%5D=pending&" + + "filters%5Bmetadata%5D%5Bcreated%5D=2024-01-01&" + + "filters%5Bmetadata%5D%5Bcategories%5D=electronics&" + + "filters%5Bmetadata%5D%5Bcategories%5D=books&" + + "preferences=notifications&" + + "preferences=updates", + }, + { + description: "should return the input for pre-serialized form-urlencoded strings", + input: "key=value&another=param", + type: "other", + expected: "key=value&another=param", + }, + { + description: "should JSON stringify objects", + input: { key: "value" }, + type: "json", + expected: '{"key":"value"}', + }, + ]; + + testCases.forEach(({ description, input, type, expected, skipCondition }) => { + it(description, async () => { + if (skipCondition?.()) { + return; + } + + const result = await getRequestBody({ + body: input, + type, + }); + + if (input instanceof Uint8Array) { + expect(result).toBe(input); + } else { + expect(result).toBe(expected); + } + }); + }); + + it("should return FormData in browser environment", async () => { + if (RUNTIME.type === "browser") { + const formData = new FormData(); + formData.append("key", "value"); + const result = await getRequestBody({ + body: formData, + type: "file", + }); + expect(result).toBe(formData); + } + }); +}); diff --git a/seed/ts-sdk/ts-extra-properties/tests/unit/fetcher/getResponseBody.test.ts b/seed/ts-sdk/ts-extra-properties/tests/unit/fetcher/getResponseBody.test.ts new file mode 100644 index 000000000000..ad6be7fc2c9b --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tests/unit/fetcher/getResponseBody.test.ts @@ -0,0 +1,97 @@ +import { getResponseBody } from "../../../src/core/fetcher/getResponseBody"; + +import { RUNTIME } from "../../../src/core/runtime"; + +describe("Test getResponseBody", () => { + interface SimpleTestCase { + description: string; + responseData: string | Record; + responseType?: "blob" | "sse" | "streaming" | "text"; + expected: any; + skipCondition?: () => boolean; + } + + const simpleTestCases: SimpleTestCase[] = [ + { + description: "should handle text response type", + responseData: "test text", + responseType: "text", + expected: "test text", + }, + { + description: "should handle JSON response", + responseData: { key: "value" }, + expected: { key: "value" }, + }, + { + description: "should handle empty response", + responseData: "", + expected: undefined, + }, + { + description: "should handle non-JSON response", + responseData: "invalid json", + expected: { + ok: false, + error: { + reason: "non-json", + statusCode: 200, + rawBody: "invalid json", + }, + }, + }, + ]; + + simpleTestCases.forEach(({ description, responseData, responseType, expected, skipCondition }) => { + it(description, async () => { + if (skipCondition?.()) { + return; + } + + const mockResponse = new Response( + typeof responseData === "string" ? responseData : JSON.stringify(responseData), + ); + const result = await getResponseBody(mockResponse, responseType); + expect(result).toEqual(expected); + }); + }); + + it("should handle blob response type", async () => { + const mockBlob = new Blob(["test"], { type: "text/plain" }); + const mockResponse = new Response(mockBlob); + const result = await getResponseBody(mockResponse, "blob"); + // @ts-expect-error + expect(result.constructor.name).toBe("Blob"); + }); + + it("should handle sse response type", async () => { + if (RUNTIME.type === "node") { + const mockStream = new ReadableStream(); + const mockResponse = new Response(mockStream); + const result = await getResponseBody(mockResponse, "sse"); + expect(result).toBe(mockStream); + } + }); + + it("should handle streaming response type", async () => { + const encoder = new TextEncoder(); + const testData = "test stream data"; + const mockStream = new ReadableStream({ + start(controller) { + controller.enqueue(encoder.encode(testData)); + controller.close(); + }, + }); + + const mockResponse = new Response(mockStream); + const result = (await getResponseBody(mockResponse, "streaming")) as ReadableStream; + + expect(result).toBeInstanceOf(ReadableStream); + + const reader = result.getReader(); + const decoder = new TextDecoder(); + const { value } = await reader.read(); + const streamContent = decoder.decode(value); + expect(streamContent).toBe(testData); + }); +}); diff --git a/seed/ts-sdk/ts-extra-properties/tests/unit/fetcher/logging.test.ts b/seed/ts-sdk/ts-extra-properties/tests/unit/fetcher/logging.test.ts new file mode 100644 index 000000000000..366c9b6ced61 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tests/unit/fetcher/logging.test.ts @@ -0,0 +1,517 @@ +import { fetcherImpl } from "../../../src/core/fetcher/Fetcher"; + +function createMockLogger() { + return { + debug: vi.fn(), + info: vi.fn(), + warn: vi.fn(), + error: vi.fn(), + }; +} + +function mockSuccessResponse(data: unknown = { data: "test" }, status = 200, statusText = "OK") { + global.fetch = vi.fn().mockResolvedValue( + new Response(JSON.stringify(data), { + status, + statusText, + }), + ); +} + +function mockErrorResponse(data: unknown = { error: "Error" }, status = 404, statusText = "Not Found") { + global.fetch = vi.fn().mockResolvedValue( + new Response(JSON.stringify(data), { + status, + statusText, + }), + ); +} + +describe("Fetcher Logging Integration", () => { + describe("Request Logging", () => { + it("should log successful request at debug level", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "POST", + headers: { "Content-Type": "application/json" }, + body: { test: "data" }, + contentType: "application/json", + requestType: "json", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + method: "POST", + url: "https://example.com/api", + headers: expect.toContainHeaders({ + "Content-Type": "application/json", + }), + hasBody: true, + }), + ); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "HTTP request succeeded", + expect.objectContaining({ + method: "POST", + url: "https://example.com/api", + statusCode: 200, + }), + ); + }); + + it("should not log debug messages at info level for successful requests", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "info", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).not.toHaveBeenCalled(); + expect(mockLogger.info).not.toHaveBeenCalled(); + }); + + it("should log request with body flag", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "POST", + body: { data: "test" }, + contentType: "application/json", + requestType: "json", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + hasBody: true, + }), + ); + }); + + it("should log request without body flag", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + hasBody: false, + }), + ); + }); + + it("should not log when silent mode is enabled", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: true, + }, + }); + + expect(mockLogger.debug).not.toHaveBeenCalled(); + expect(mockLogger.info).not.toHaveBeenCalled(); + expect(mockLogger.warn).not.toHaveBeenCalled(); + expect(mockLogger.error).not.toHaveBeenCalled(); + }); + + it("should not log when no logging config is provided", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + responseType: "json", + maxRetries: 0, + }); + + expect(mockLogger.debug).not.toHaveBeenCalled(); + }); + }); + + describe("Error Logging", () => { + it("should log 4xx errors at error level", async () => { + const mockLogger = createMockLogger(); + mockErrorResponse({ error: "Not found" }, 404, "Not Found"); + + const result = await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "error", + logger: mockLogger, + silent: false, + }, + }); + + expect(result.ok).toBe(false); + expect(mockLogger.error).toHaveBeenCalledWith( + "HTTP request failed with error status", + expect.objectContaining({ + method: "GET", + url: "https://example.com/api", + statusCode: 404, + }), + ); + }); + + it("should log 5xx errors at error level", async () => { + const mockLogger = createMockLogger(); + mockErrorResponse({ error: "Internal error" }, 500, "Internal Server Error"); + + const result = await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "error", + logger: mockLogger, + silent: false, + }, + }); + + expect(result.ok).toBe(false); + expect(mockLogger.error).toHaveBeenCalledWith( + "HTTP request failed with error status", + expect.objectContaining({ + method: "GET", + url: "https://example.com/api", + statusCode: 500, + }), + ); + }); + + it("should log aborted request errors", async () => { + const mockLogger = createMockLogger(); + + const abortController = new AbortController(); + abortController.abort(); + + global.fetch = vi.fn().mockRejectedValue(new Error("Aborted")); + + const result = await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + responseType: "json", + abortSignal: abortController.signal, + maxRetries: 0, + logging: { + level: "error", + logger: mockLogger, + silent: false, + }, + }); + + expect(result.ok).toBe(false); + expect(mockLogger.error).toHaveBeenCalledWith( + "HTTP request was aborted", + expect.objectContaining({ + method: "GET", + url: "https://example.com/api", + }), + ); + }); + + it("should log timeout errors", async () => { + const mockLogger = createMockLogger(); + + const timeoutError = new Error("Request timeout"); + timeoutError.name = "AbortError"; + + global.fetch = vi.fn().mockRejectedValue(timeoutError); + + const result = await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "error", + logger: mockLogger, + silent: false, + }, + }); + + expect(result.ok).toBe(false); + expect(mockLogger.error).toHaveBeenCalledWith( + "HTTP request timed out", + expect.objectContaining({ + method: "GET", + url: "https://example.com/api", + timeoutMs: undefined, + }), + ); + }); + + it("should log unknown errors", async () => { + const mockLogger = createMockLogger(); + + const unknownError = new Error("Unknown error"); + + global.fetch = vi.fn().mockRejectedValue(unknownError); + + const result = await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "error", + logger: mockLogger, + silent: false, + }, + }); + + expect(result.ok).toBe(false); + expect(mockLogger.error).toHaveBeenCalledWith( + "HTTP request failed with error", + expect.objectContaining({ + method: "GET", + url: "https://example.com/api", + errorMessage: "Unknown error", + }), + ); + }); + }); + + describe("Logging with Redaction", () => { + it("should redact sensitive data in error logs", async () => { + const mockLogger = createMockLogger(); + mockErrorResponse({ error: "Unauthorized" }, 401, "Unauthorized"); + + await fetcherImpl({ + url: "https://example.com/api?api_key=secret", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "error", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.error).toHaveBeenCalledWith( + "HTTP request failed with error status", + expect.objectContaining({ + url: "https://example.com/api?api_key=[REDACTED]", + }), + ); + }); + }); + + describe("Different HTTP Methods", () => { + it("should log GET requests", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + method: "GET", + }), + ); + }); + + it("should log POST requests", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse({ data: "test" }, 201, "Created"); + + await fetcherImpl({ + url: "https://example.com/api", + method: "POST", + body: { data: "test" }, + contentType: "application/json", + requestType: "json", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + method: "POST", + }), + ); + }); + + it("should log PUT requests", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "PUT", + body: { data: "test" }, + contentType: "application/json", + requestType: "json", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + method: "PUT", + }), + ); + }); + + it("should log DELETE requests", async () => { + const mockLogger = createMockLogger(); + global.fetch = vi.fn().mockResolvedValue( + new Response(null, { + status: 200, + statusText: "OK", + }), + ); + + await fetcherImpl({ + url: "https://example.com/api", + method: "DELETE", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + method: "DELETE", + }), + ); + }); + }); + + describe("Status Code Logging", () => { + it("should log 2xx success status codes", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse({ data: "test" }, 201, "Created"); + + await fetcherImpl({ + url: "https://example.com/api", + method: "POST", + body: { data: "test" }, + contentType: "application/json", + requestType: "json", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "HTTP request succeeded", + expect.objectContaining({ + statusCode: 201, + }), + ); + }); + + it("should log 3xx redirect status codes as success", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse({ data: "test" }, 301, "Moved Permanently"); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "HTTP request succeeded", + expect.objectContaining({ + statusCode: 301, + }), + ); + }); + }); +}); diff --git a/seed/ts-sdk/ts-extra-properties/tests/unit/fetcher/makeRequest.test.ts b/seed/ts-sdk/ts-extra-properties/tests/unit/fetcher/makeRequest.test.ts new file mode 100644 index 000000000000..ea49466a55fc --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tests/unit/fetcher/makeRequest.test.ts @@ -0,0 +1,54 @@ +import type { Mock } from "vitest"; +import { makeRequest } from "../../../src/core/fetcher/makeRequest"; + +describe("Test makeRequest", () => { + const mockPostUrl = "https://httpbin.org/post"; + const mockGetUrl = "https://httpbin.org/get"; + const mockHeaders = { "Content-Type": "application/json" }; + const mockBody = JSON.stringify({ key: "value" }); + + let mockFetch: Mock; + + beforeEach(() => { + mockFetch = vi.fn(); + mockFetch.mockResolvedValue(new Response(JSON.stringify({ test: "successful" }), { status: 200 })); + }); + + it("should handle POST request correctly", async () => { + const response = await makeRequest(mockFetch, mockPostUrl, "POST", mockHeaders, mockBody); + const responseBody = await response.json(); + expect(responseBody).toEqual({ test: "successful" }); + expect(mockFetch).toHaveBeenCalledTimes(1); + const [calledUrl, calledOptions] = mockFetch.mock.calls[0]; + expect(calledUrl).toBe(mockPostUrl); + expect(calledOptions).toEqual( + expect.objectContaining({ + method: "POST", + headers: mockHeaders, + body: mockBody, + credentials: undefined, + }), + ); + expect(calledOptions.signal).toBeDefined(); + expect(calledOptions.signal).toBeInstanceOf(AbortSignal); + }); + + it("should handle GET request correctly", async () => { + const response = await makeRequest(mockFetch, mockGetUrl, "GET", mockHeaders, undefined); + const responseBody = await response.json(); + expect(responseBody).toEqual({ test: "successful" }); + expect(mockFetch).toHaveBeenCalledTimes(1); + const [calledUrl, calledOptions] = mockFetch.mock.calls[0]; + expect(calledUrl).toBe(mockGetUrl); + expect(calledOptions).toEqual( + expect.objectContaining({ + method: "GET", + headers: mockHeaders, + body: undefined, + credentials: undefined, + }), + ); + expect(calledOptions.signal).toBeDefined(); + expect(calledOptions.signal).toBeInstanceOf(AbortSignal); + }); +}); diff --git a/seed/ts-sdk/ts-extra-properties/tests/unit/fetcher/redacting.test.ts b/seed/ts-sdk/ts-extra-properties/tests/unit/fetcher/redacting.test.ts new file mode 100644 index 000000000000..d599376b9bcf --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tests/unit/fetcher/redacting.test.ts @@ -0,0 +1,1115 @@ +import { fetcherImpl } from "../../../src/core/fetcher/Fetcher"; + +function createMockLogger() { + return { + debug: vi.fn(), + info: vi.fn(), + warn: vi.fn(), + error: vi.fn(), + }; +} + +function mockSuccessResponse(data: unknown = { data: "test" }, status = 200, statusText = "OK") { + global.fetch = vi.fn().mockResolvedValue( + new Response(JSON.stringify(data), { + status, + statusText, + }), + ); +} + +describe("Redacting Logic", () => { + describe("Header Redaction", () => { + it("should redact authorization header", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + headers: { Authorization: "Bearer secret-token-12345" }, + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + headers: expect.toContainHeaders({ + Authorization: "[REDACTED]", + }), + }), + ); + }); + + it("should redact api-key header (case-insensitive)", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + headers: { "X-API-KEY": "secret-api-key" }, + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + headers: expect.toContainHeaders({ + "X-API-KEY": "[REDACTED]", + }), + }), + ); + }); + + it("should redact cookie header", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + headers: { Cookie: "session=abc123; token=xyz789" }, + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + headers: expect.toContainHeaders({ + Cookie: "[REDACTED]", + }), + }), + ); + }); + + it("should redact x-auth-token header", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + headers: { "x-auth-token": "auth-token-12345" }, + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + headers: expect.toContainHeaders({ + "x-auth-token": "[REDACTED]", + }), + }), + ); + }); + + it("should redact proxy-authorization header", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + headers: { "Proxy-Authorization": "Basic credentials" }, + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + headers: expect.toContainHeaders({ + "Proxy-Authorization": "[REDACTED]", + }), + }), + ); + }); + + it("should redact x-csrf-token header", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + headers: { "X-CSRF-Token": "csrf-token-abc" }, + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + headers: expect.toContainHeaders({ + "X-CSRF-Token": "[REDACTED]", + }), + }), + ); + }); + + it("should redact www-authenticate header", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + headers: { "WWW-Authenticate": "Bearer realm=example" }, + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + headers: expect.toContainHeaders({ + "WWW-Authenticate": "[REDACTED]", + }), + }), + ); + }); + + it("should redact x-session-token header", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + headers: { "X-Session-Token": "session-token-xyz" }, + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + headers: expect.toContainHeaders({ + "X-Session-Token": "[REDACTED]", + }), + }), + ); + }); + + it("should not redact non-sensitive headers", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + headers: { + "Content-Type": "application/json", + "User-Agent": "Test/1.0", + Accept: "application/json", + }, + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + headers: expect.toContainHeaders({ + "Content-Type": "application/json", + "User-Agent": "Test/1.0", + Accept: "application/json", + }), + }), + ); + }); + + it("should redact multiple sensitive headers at once", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + headers: { + Authorization: "Bearer token", + "X-API-Key": "api-key", + Cookie: "session=123", + "Content-Type": "application/json", + }, + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + headers: expect.toContainHeaders({ + Authorization: "[REDACTED]", + "X-API-Key": "[REDACTED]", + Cookie: "[REDACTED]", + "Content-Type": "application/json", + }), + }), + ); + }); + }); + + describe("Response Header Redaction", () => { + it("should redact Set-Cookie in response headers", async () => { + const mockLogger = createMockLogger(); + + const mockHeaders = new Headers(); + mockHeaders.set("Set-Cookie", "session=abc123; HttpOnly; Secure"); + mockHeaders.set("Content-Type", "application/json"); + + global.fetch = vi.fn().mockResolvedValue( + new Response(JSON.stringify({ data: "test" }), { + status: 200, + statusText: "OK", + headers: mockHeaders, + }), + ); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "HTTP request succeeded", + expect.objectContaining({ + responseHeaders: expect.toContainHeaders({ + "set-cookie": "[REDACTED]", + "content-type": "application/json", + }), + }), + ); + }); + + it("should redact authorization in response headers", async () => { + const mockLogger = createMockLogger(); + + const mockHeaders = new Headers(); + mockHeaders.set("Authorization", "Bearer token-123"); + mockHeaders.set("Content-Type", "application/json"); + + global.fetch = vi.fn().mockResolvedValue( + new Response(JSON.stringify({ data: "test" }), { + status: 200, + statusText: "OK", + headers: mockHeaders, + }), + ); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "HTTP request succeeded", + expect.objectContaining({ + responseHeaders: expect.toContainHeaders({ + authorization: "[REDACTED]", + "content-type": "application/json", + }), + }), + ); + }); + + it("should redact response headers in error responses", async () => { + const mockLogger = createMockLogger(); + + const mockHeaders = new Headers(); + mockHeaders.set("WWW-Authenticate", "Bearer realm=example"); + mockHeaders.set("Content-Type", "application/json"); + + global.fetch = vi.fn().mockResolvedValue( + new Response(JSON.stringify({ error: "Unauthorized" }), { + status: 401, + statusText: "Unauthorized", + headers: mockHeaders, + }), + ); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "error", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.error).toHaveBeenCalledWith( + "HTTP request failed with error status", + expect.objectContaining({ + responseHeaders: expect.toContainHeaders({ + "www-authenticate": "[REDACTED]", + "content-type": "application/json", + }), + }), + ); + }); + }); + + describe("Query Parameter Redaction", () => { + it("should redact api_key query parameter", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + queryParameters: { api_key: "secret-key" }, + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + queryParameters: expect.objectContaining({ + api_key: "[REDACTED]", + }), + }), + ); + }); + + it("should redact token query parameter", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + queryParameters: { token: "secret-token" }, + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + queryParameters: expect.objectContaining({ + token: "[REDACTED]", + }), + }), + ); + }); + + it("should redact access_token query parameter", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + queryParameters: { access_token: "secret-access-token" }, + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + queryParameters: expect.objectContaining({ + access_token: "[REDACTED]", + }), + }), + ); + }); + + it("should redact password query parameter", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + queryParameters: { password: "secret-password" }, + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + queryParameters: expect.objectContaining({ + password: "[REDACTED]", + }), + }), + ); + }); + + it("should redact secret query parameter", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + queryParameters: { secret: "secret-value" }, + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + queryParameters: expect.objectContaining({ + secret: "[REDACTED]", + }), + }), + ); + }); + + it("should redact session_id query parameter", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + queryParameters: { session_id: "session-123" }, + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + queryParameters: expect.objectContaining({ + session_id: "[REDACTED]", + }), + }), + ); + }); + + it("should not redact non-sensitive query parameters", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + queryParameters: { + page: "1", + limit: "10", + sort: "name", + }, + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + queryParameters: expect.objectContaining({ + page: "1", + limit: "10", + sort: "name", + }), + }), + ); + }); + + it("should not redact parameters containing 'auth' substring like 'author'", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + queryParameters: { + author: "john", + authenticate: "false", + authorization_level: "user", + }, + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + queryParameters: expect.objectContaining({ + author: "john", + authenticate: "false", + authorization_level: "user", + }), + }), + ); + }); + + it("should handle undefined query parameters", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + queryParameters: undefined, + }), + ); + }); + + it("should redact case-insensitive query parameters", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + queryParameters: { API_KEY: "secret-key", Token: "secret-token" }, + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + queryParameters: expect.objectContaining({ + API_KEY: "[REDACTED]", + Token: "[REDACTED]", + }), + }), + ); + }); + }); + + describe("URL Redaction", () => { + it("should redact credentials in URL", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://user:password@example.com/api", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + url: "https://[REDACTED]@example.com/api", + }), + ); + }); + + it("should redact api_key in query string", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api?api_key=secret-key&page=1", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + url: "https://example.com/api?api_key=[REDACTED]&page=1", + }), + ); + }); + + it("should redact token in query string", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api?token=secret-token", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + url: "https://example.com/api?token=[REDACTED]", + }), + ); + }); + + it("should redact password in query string", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api?username=user&password=secret", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + url: "https://example.com/api?username=user&password=[REDACTED]", + }), + ); + }); + + it("should not redact non-sensitive query strings", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api?page=1&limit=10&sort=name", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + url: "https://example.com/api?page=1&limit=10&sort=name", + }), + ); + }); + + it("should not redact URL parameters containing 'auth' substring like 'author'", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api?author=john&authenticate=false&page=1", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + url: "https://example.com/api?author=john&authenticate=false&page=1", + }), + ); + }); + + it("should handle URL with fragment", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api?token=secret#section", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + url: "https://example.com/api?token=[REDACTED]#section", + }), + ); + }); + + it("should redact URL-encoded query parameters", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api?api%5Fkey=secret", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + url: "https://example.com/api?api%5Fkey=[REDACTED]", + }), + ); + }); + + it("should handle URL without query string", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + url: "https://example.com/api", + }), + ); + }); + + it("should handle empty query string", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api?", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + url: "https://example.com/api?", + }), + ); + }); + + it("should redact multiple sensitive parameters in URL", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api?api_key=secret1&token=secret2&page=1", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + url: "https://example.com/api?api_key=[REDACTED]&token=[REDACTED]&page=1", + }), + ); + }); + + it("should redact both credentials and query parameters", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://user:pass@example.com/api?token=secret", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + url: "https://[REDACTED]@example.com/api?token=[REDACTED]", + }), + ); + }); + + it("should use fast path for URLs without sensitive keywords", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api?page=1&limit=10&sort=name&filter=value", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + url: "https://example.com/api?page=1&limit=10&sort=name&filter=value", + }), + ); + }); + + it("should handle query parameter without value", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api?flag&token=secret", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + url: "https://example.com/api?flag&token=[REDACTED]", + }), + ); + }); + + it("should handle URL with multiple @ symbols in credentials", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://user@example.com:pass@host.com/api", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + url: "https://[REDACTED]@host.com/api", + }), + ); + }); + + it("should handle URL with @ in query parameter but not in credentials", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://example.com/api?email=user@example.com", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + url: "https://example.com/api?email=user@example.com", + }), + ); + }); + + it("should handle URL with both credentials and @ in path", async () => { + const mockLogger = createMockLogger(); + mockSuccessResponse(); + + await fetcherImpl({ + url: "https://user:pass@example.com/users/@username", + method: "GET", + responseType: "json", + maxRetries: 0, + logging: { + level: "debug", + logger: mockLogger, + silent: false, + }, + }); + + expect(mockLogger.debug).toHaveBeenCalledWith( + "Making HTTP request", + expect.objectContaining({ + url: "https://[REDACTED]@example.com/users/@username", + }), + ); + }); + }); +}); diff --git a/seed/ts-sdk/ts-extra-properties/tests/unit/fetcher/requestWithRetries.test.ts b/seed/ts-sdk/ts-extra-properties/tests/unit/fetcher/requestWithRetries.test.ts new file mode 100644 index 000000000000..d22661367f4e --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tests/unit/fetcher/requestWithRetries.test.ts @@ -0,0 +1,230 @@ +import type { Mock, MockInstance } from "vitest"; +import { requestWithRetries } from "../../../src/core/fetcher/requestWithRetries"; + +describe("requestWithRetries", () => { + let mockFetch: Mock; + let originalMathRandom: typeof Math.random; + let setTimeoutSpy: MockInstance; + + beforeEach(() => { + mockFetch = vi.fn(); + originalMathRandom = Math.random; + + Math.random = vi.fn(() => 0.5); + + vi.useFakeTimers({ + toFake: [ + "setTimeout", + "clearTimeout", + "setInterval", + "clearInterval", + "setImmediate", + "clearImmediate", + "Date", + "performance", + "requestAnimationFrame", + "cancelAnimationFrame", + "requestIdleCallback", + "cancelIdleCallback", + ], + }); + }); + + afterEach(() => { + Math.random = originalMathRandom; + vi.clearAllMocks(); + vi.clearAllTimers(); + }); + + it("should retry on retryable status codes", async () => { + setTimeoutSpy = vi.spyOn(global, "setTimeout").mockImplementation((callback: (args: void) => void) => { + process.nextTick(callback); + return null as any; + }); + + const retryableStatuses = [408, 429, 500, 502]; + let callCount = 0; + + mockFetch.mockImplementation(async () => { + if (callCount < retryableStatuses.length) { + return new Response("", { status: retryableStatuses[callCount++] }); + } + return new Response("", { status: 200 }); + }); + + const responsePromise = requestWithRetries(() => mockFetch(), retryableStatuses.length); + await vi.runAllTimersAsync(); + const response = await responsePromise; + + expect(mockFetch).toHaveBeenCalledTimes(retryableStatuses.length + 1); + expect(response.status).toBe(200); + }); + + it("should respect maxRetries limit", async () => { + setTimeoutSpy = vi.spyOn(global, "setTimeout").mockImplementation((callback: (args: void) => void) => { + process.nextTick(callback); + return null as any; + }); + + const maxRetries = 2; + mockFetch.mockResolvedValue(new Response("", { status: 500 })); + + const responsePromise = requestWithRetries(() => mockFetch(), maxRetries); + await vi.runAllTimersAsync(); + const response = await responsePromise; + + expect(mockFetch).toHaveBeenCalledTimes(maxRetries + 1); + expect(response.status).toBe(500); + }); + + it("should not retry on success status codes", async () => { + setTimeoutSpy = vi.spyOn(global, "setTimeout").mockImplementation((callback: (args: void) => void) => { + process.nextTick(callback); + return null as any; + }); + + const successStatuses = [200, 201, 202]; + + for (const status of successStatuses) { + mockFetch.mockReset(); + setTimeoutSpy.mockClear(); + mockFetch.mockResolvedValueOnce(new Response("", { status })); + + const responsePromise = requestWithRetries(() => mockFetch(), 3); + await vi.runAllTimersAsync(); + await responsePromise; + + expect(mockFetch).toHaveBeenCalledTimes(1); + expect(setTimeoutSpy).not.toHaveBeenCalled(); + } + }); + + interface RetryHeaderTestCase { + description: string; + headerName: string; + headerValue: string | (() => string); + expectedDelayMin: number; + expectedDelayMax: number; + } + + const retryHeaderTests: RetryHeaderTestCase[] = [ + { + description: "should respect retry-after header with seconds value", + headerName: "retry-after", + headerValue: "5", + expectedDelayMin: 4000, + expectedDelayMax: 6000, + }, + { + description: "should respect retry-after header with HTTP date value", + headerName: "retry-after", + headerValue: () => new Date(Date.now() + 3000).toUTCString(), + expectedDelayMin: 2000, + expectedDelayMax: 4000, + }, + { + description: "should respect x-ratelimit-reset header", + headerName: "x-ratelimit-reset", + headerValue: () => Math.floor((Date.now() + 4000) / 1000).toString(), + expectedDelayMin: 3000, + expectedDelayMax: 6000, + }, + ]; + + retryHeaderTests.forEach(({ description, headerName, headerValue, expectedDelayMin, expectedDelayMax }) => { + it(description, async () => { + setTimeoutSpy = vi.spyOn(global, "setTimeout").mockImplementation((callback: (args: void) => void) => { + process.nextTick(callback); + return null as any; + }); + + const value = typeof headerValue === "function" ? headerValue() : headerValue; + mockFetch + .mockResolvedValueOnce( + new Response("", { + status: 429, + headers: new Headers({ [headerName]: value }), + }), + ) + .mockResolvedValueOnce(new Response("", { status: 200 })); + + const responsePromise = requestWithRetries(() => mockFetch(), 1); + await vi.runAllTimersAsync(); + const response = await responsePromise; + + expect(setTimeoutSpy).toHaveBeenCalledWith(expect.any(Function), expect.any(Number)); + const actualDelay = setTimeoutSpy.mock.calls[0][1]; + expect(actualDelay).toBeGreaterThan(expectedDelayMin); + expect(actualDelay).toBeLessThan(expectedDelayMax); + expect(response.status).toBe(200); + }); + }); + + it("should apply correct exponential backoff with jitter", async () => { + setTimeoutSpy = vi.spyOn(global, "setTimeout").mockImplementation((callback: (args: void) => void) => { + process.nextTick(callback); + return null as any; + }); + + mockFetch.mockResolvedValue(new Response("", { status: 500 })); + const maxRetries = 3; + const expectedDelays = [1000, 2000, 4000]; + + const responsePromise = requestWithRetries(() => mockFetch(), maxRetries); + await vi.runAllTimersAsync(); + await responsePromise; + + expect(setTimeoutSpy).toHaveBeenCalledTimes(expectedDelays.length); + + expectedDelays.forEach((delay, index) => { + expect(setTimeoutSpy).toHaveBeenNthCalledWith(index + 1, expect.any(Function), delay); + }); + + expect(mockFetch).toHaveBeenCalledTimes(maxRetries + 1); + }); + + it("should handle concurrent retries independently", async () => { + setTimeoutSpy = vi.spyOn(global, "setTimeout").mockImplementation((callback: (args: void) => void) => { + process.nextTick(callback); + return null as any; + }); + + mockFetch + .mockResolvedValueOnce(new Response("", { status: 500 })) + .mockResolvedValueOnce(new Response("", { status: 500 })) + .mockResolvedValueOnce(new Response("", { status: 200 })) + .mockResolvedValueOnce(new Response("", { status: 200 })); + + const promise1 = requestWithRetries(() => mockFetch(), 1); + const promise2 = requestWithRetries(() => mockFetch(), 1); + + await vi.runAllTimersAsync(); + const [response1, response2] = await Promise.all([promise1, promise2]); + + expect(response1.status).toBe(200); + expect(response2.status).toBe(200); + }); + + it("should cap delay at MAX_RETRY_DELAY for large header values", async () => { + setTimeoutSpy = vi.spyOn(global, "setTimeout").mockImplementation((callback: (args: void) => void) => { + process.nextTick(callback); + return null as any; + }); + + mockFetch + .mockResolvedValueOnce( + new Response("", { + status: 429, + headers: new Headers({ "retry-after": "120" }), // 120 seconds = 120000ms > MAX_RETRY_DELAY (60000ms) + }), + ) + .mockResolvedValueOnce(new Response("", { status: 200 })); + + const responsePromise = requestWithRetries(() => mockFetch(), 1); + await vi.runAllTimersAsync(); + const response = await responsePromise; + + expect(setTimeoutSpy).toHaveBeenCalledWith(expect.any(Function), 60000); + expect(response.status).toBe(200); + }); +}); diff --git a/seed/ts-sdk/ts-extra-properties/tests/unit/fetcher/signals.test.ts b/seed/ts-sdk/ts-extra-properties/tests/unit/fetcher/signals.test.ts new file mode 100644 index 000000000000..d7b6d1e63caa --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tests/unit/fetcher/signals.test.ts @@ -0,0 +1,69 @@ +import { anySignal, getTimeoutSignal } from "../../../src/core/fetcher/signals"; + +describe("Test getTimeoutSignal", () => { + beforeEach(() => { + vi.useFakeTimers(); + }); + + afterEach(() => { + vi.useRealTimers(); + }); + + it("should return an object with signal and abortId", () => { + const { signal, abortId } = getTimeoutSignal(1000); + + expect(signal).toBeDefined(); + expect(abortId).toBeDefined(); + expect(signal).toBeInstanceOf(AbortSignal); + expect(signal.aborted).toBe(false); + }); + + it("should create a signal that aborts after the specified timeout", () => { + const timeoutMs = 5000; + const { signal } = getTimeoutSignal(timeoutMs); + + expect(signal.aborted).toBe(false); + + vi.advanceTimersByTime(timeoutMs - 1); + expect(signal.aborted).toBe(false); + + vi.advanceTimersByTime(1); + expect(signal.aborted).toBe(true); + }); +}); + +describe("Test anySignal", () => { + it("should return an AbortSignal", () => { + const signal = anySignal(new AbortController().signal); + expect(signal).toBeInstanceOf(AbortSignal); + }); + + it("should abort when any of the input signals is aborted", () => { + const controller1 = new AbortController(); + const controller2 = new AbortController(); + const signal = anySignal(controller1.signal, controller2.signal); + + expect(signal.aborted).toBe(false); + controller1.abort(); + expect(signal.aborted).toBe(true); + }); + + it("should handle an array of signals", () => { + const controller1 = new AbortController(); + const controller2 = new AbortController(); + const signal = anySignal([controller1.signal, controller2.signal]); + + expect(signal.aborted).toBe(false); + controller2.abort(); + expect(signal.aborted).toBe(true); + }); + + it("should abort immediately if one of the input signals is already aborted", () => { + const controller1 = new AbortController(); + const controller2 = new AbortController(); + controller1.abort(); + + const signal = anySignal(controller1.signal, controller2.signal); + expect(signal.aborted).toBe(true); + }); +}); diff --git a/seed/ts-sdk/ts-extra-properties/tests/unit/fetcher/test-file.txt b/seed/ts-sdk/ts-extra-properties/tests/unit/fetcher/test-file.txt new file mode 100644 index 000000000000..c66d471e359c --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tests/unit/fetcher/test-file.txt @@ -0,0 +1 @@ +This is a test file! diff --git a/seed/ts-sdk/ts-extra-properties/tests/unit/logging/logger.test.ts b/seed/ts-sdk/ts-extra-properties/tests/unit/logging/logger.test.ts new file mode 100644 index 000000000000..2e0b5fe5040c --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tests/unit/logging/logger.test.ts @@ -0,0 +1,454 @@ +import { ConsoleLogger, createLogger, Logger, LogLevel } from "../../../src/core/logging/logger"; + +function createMockLogger() { + return { + debug: vi.fn(), + info: vi.fn(), + warn: vi.fn(), + error: vi.fn(), + }; +} + +describe("Logger", () => { + describe("LogLevel", () => { + it("should have correct log levels", () => { + expect(LogLevel.Debug).toBe("debug"); + expect(LogLevel.Info).toBe("info"); + expect(LogLevel.Warn).toBe("warn"); + expect(LogLevel.Error).toBe("error"); + }); + }); + + describe("ConsoleLogger", () => { + let consoleLogger: ConsoleLogger; + let consoleSpy: { + debug: ReturnType; + info: ReturnType; + warn: ReturnType; + error: ReturnType; + }; + + beforeEach(() => { + consoleLogger = new ConsoleLogger(); + consoleSpy = { + debug: vi.spyOn(console, "debug").mockImplementation(() => {}), + info: vi.spyOn(console, "info").mockImplementation(() => {}), + warn: vi.spyOn(console, "warn").mockImplementation(() => {}), + error: vi.spyOn(console, "error").mockImplementation(() => {}), + }; + }); + + afterEach(() => { + consoleSpy.debug.mockRestore(); + consoleSpy.info.mockRestore(); + consoleSpy.warn.mockRestore(); + consoleSpy.error.mockRestore(); + }); + + it("should log debug messages", () => { + consoleLogger.debug("debug message", { data: "test" }); + expect(consoleSpy.debug).toHaveBeenCalledWith("debug message", { data: "test" }); + }); + + it("should log info messages", () => { + consoleLogger.info("info message", { data: "test" }); + expect(consoleSpy.info).toHaveBeenCalledWith("info message", { data: "test" }); + }); + + it("should log warn messages", () => { + consoleLogger.warn("warn message", { data: "test" }); + expect(consoleSpy.warn).toHaveBeenCalledWith("warn message", { data: "test" }); + }); + + it("should log error messages", () => { + consoleLogger.error("error message", { data: "test" }); + expect(consoleSpy.error).toHaveBeenCalledWith("error message", { data: "test" }); + }); + + it("should handle multiple arguments", () => { + consoleLogger.debug("message", "arg1", "arg2", { key: "value" }); + expect(consoleSpy.debug).toHaveBeenCalledWith("message", "arg1", "arg2", { key: "value" }); + }); + }); + + describe("Logger with level filtering", () => { + let mockLogger: { + debug: ReturnType; + info: ReturnType; + warn: ReturnType; + error: ReturnType; + }; + + beforeEach(() => { + mockLogger = createMockLogger(); + }); + + describe("Debug level", () => { + it("should log all levels when set to debug", () => { + const logger = new Logger({ + level: LogLevel.Debug, + logger: mockLogger, + silent: false, + }); + + logger.debug("debug"); + logger.info("info"); + logger.warn("warn"); + logger.error("error"); + + expect(mockLogger.debug).toHaveBeenCalledWith("debug"); + expect(mockLogger.info).toHaveBeenCalledWith("info"); + expect(mockLogger.warn).toHaveBeenCalledWith("warn"); + expect(mockLogger.error).toHaveBeenCalledWith("error"); + }); + + it("should report correct level checks", () => { + const logger = new Logger({ + level: LogLevel.Debug, + logger: mockLogger, + silent: false, + }); + + expect(logger.isDebug()).toBe(true); + expect(logger.isInfo()).toBe(true); + expect(logger.isWarn()).toBe(true); + expect(logger.isError()).toBe(true); + }); + }); + + describe("Info level", () => { + it("should log info, warn, and error when set to info", () => { + const logger = new Logger({ + level: LogLevel.Info, + logger: mockLogger, + silent: false, + }); + + logger.debug("debug"); + logger.info("info"); + logger.warn("warn"); + logger.error("error"); + + expect(mockLogger.debug).not.toHaveBeenCalled(); + expect(mockLogger.info).toHaveBeenCalledWith("info"); + expect(mockLogger.warn).toHaveBeenCalledWith("warn"); + expect(mockLogger.error).toHaveBeenCalledWith("error"); + }); + + it("should report correct level checks", () => { + const logger = new Logger({ + level: LogLevel.Info, + logger: mockLogger, + silent: false, + }); + + expect(logger.isDebug()).toBe(false); + expect(logger.isInfo()).toBe(true); + expect(logger.isWarn()).toBe(true); + expect(logger.isError()).toBe(true); + }); + }); + + describe("Warn level", () => { + it("should log warn and error when set to warn", () => { + const logger = new Logger({ + level: LogLevel.Warn, + logger: mockLogger, + silent: false, + }); + + logger.debug("debug"); + logger.info("info"); + logger.warn("warn"); + logger.error("error"); + + expect(mockLogger.debug).not.toHaveBeenCalled(); + expect(mockLogger.info).not.toHaveBeenCalled(); + expect(mockLogger.warn).toHaveBeenCalledWith("warn"); + expect(mockLogger.error).toHaveBeenCalledWith("error"); + }); + + it("should report correct level checks", () => { + const logger = new Logger({ + level: LogLevel.Warn, + logger: mockLogger, + silent: false, + }); + + expect(logger.isDebug()).toBe(false); + expect(logger.isInfo()).toBe(false); + expect(logger.isWarn()).toBe(true); + expect(logger.isError()).toBe(true); + }); + }); + + describe("Error level", () => { + it("should only log error when set to error", () => { + const logger = new Logger({ + level: LogLevel.Error, + logger: mockLogger, + silent: false, + }); + + logger.debug("debug"); + logger.info("info"); + logger.warn("warn"); + logger.error("error"); + + expect(mockLogger.debug).not.toHaveBeenCalled(); + expect(mockLogger.info).not.toHaveBeenCalled(); + expect(mockLogger.warn).not.toHaveBeenCalled(); + expect(mockLogger.error).toHaveBeenCalledWith("error"); + }); + + it("should report correct level checks", () => { + const logger = new Logger({ + level: LogLevel.Error, + logger: mockLogger, + silent: false, + }); + + expect(logger.isDebug()).toBe(false); + expect(logger.isInfo()).toBe(false); + expect(logger.isWarn()).toBe(false); + expect(logger.isError()).toBe(true); + }); + }); + + describe("Silent mode", () => { + it("should not log anything when silent is true", () => { + const logger = new Logger({ + level: LogLevel.Debug, + logger: mockLogger, + silent: true, + }); + + logger.debug("debug"); + logger.info("info"); + logger.warn("warn"); + logger.error("error"); + + expect(mockLogger.debug).not.toHaveBeenCalled(); + expect(mockLogger.info).not.toHaveBeenCalled(); + expect(mockLogger.warn).not.toHaveBeenCalled(); + expect(mockLogger.error).not.toHaveBeenCalled(); + }); + + it("should report all level checks as false when silent", () => { + const logger = new Logger({ + level: LogLevel.Debug, + logger: mockLogger, + silent: true, + }); + + expect(logger.isDebug()).toBe(false); + expect(logger.isInfo()).toBe(false); + expect(logger.isWarn()).toBe(false); + expect(logger.isError()).toBe(false); + }); + }); + + describe("shouldLog", () => { + it("should correctly determine if level should be logged", () => { + const logger = new Logger({ + level: LogLevel.Info, + logger: mockLogger, + silent: false, + }); + + expect(logger.shouldLog(LogLevel.Debug)).toBe(false); + expect(logger.shouldLog(LogLevel.Info)).toBe(true); + expect(logger.shouldLog(LogLevel.Warn)).toBe(true); + expect(logger.shouldLog(LogLevel.Error)).toBe(true); + }); + + it("should return false for all levels when silent", () => { + const logger = new Logger({ + level: LogLevel.Debug, + logger: mockLogger, + silent: true, + }); + + expect(logger.shouldLog(LogLevel.Debug)).toBe(false); + expect(logger.shouldLog(LogLevel.Info)).toBe(false); + expect(logger.shouldLog(LogLevel.Warn)).toBe(false); + expect(logger.shouldLog(LogLevel.Error)).toBe(false); + }); + }); + + describe("Multiple arguments", () => { + it("should pass multiple arguments to logger", () => { + const logger = new Logger({ + level: LogLevel.Debug, + logger: mockLogger, + silent: false, + }); + + logger.debug("message", "arg1", { key: "value" }, 123); + expect(mockLogger.debug).toHaveBeenCalledWith("message", "arg1", { key: "value" }, 123); + }); + }); + }); + + describe("createLogger", () => { + it("should return default logger when no config provided", () => { + const logger = createLogger(); + expect(logger).toBeInstanceOf(Logger); + }); + + it("should return same logger instance when Logger is passed", () => { + const customLogger = new Logger({ + level: LogLevel.Debug, + logger: new ConsoleLogger(), + silent: false, + }); + + const result = createLogger(customLogger); + expect(result).toBe(customLogger); + }); + + it("should create logger with custom config", () => { + const mockLogger = createMockLogger(); + + const logger = createLogger({ + level: LogLevel.Warn, + logger: mockLogger, + silent: false, + }); + + expect(logger).toBeInstanceOf(Logger); + logger.warn("test"); + expect(mockLogger.warn).toHaveBeenCalledWith("test"); + }); + + it("should use default values for missing config", () => { + const logger = createLogger({}); + expect(logger).toBeInstanceOf(Logger); + }); + + it("should override default level", () => { + const mockLogger = createMockLogger(); + + const logger = createLogger({ + level: LogLevel.Debug, + logger: mockLogger, + silent: false, + }); + + logger.debug("test"); + expect(mockLogger.debug).toHaveBeenCalledWith("test"); + }); + + it("should override default silent mode", () => { + const mockLogger = createMockLogger(); + + const logger = createLogger({ + logger: mockLogger, + silent: false, + }); + + logger.info("test"); + expect(mockLogger.info).toHaveBeenCalledWith("test"); + }); + + it("should use provided logger implementation", () => { + const customLogger = createMockLogger(); + + const logger = createLogger({ + logger: customLogger, + level: LogLevel.Debug, + silent: false, + }); + + logger.debug("test"); + expect(customLogger.debug).toHaveBeenCalledWith("test"); + }); + + it("should default to silent: true", () => { + const mockLogger = createMockLogger(); + + const logger = createLogger({ + logger: mockLogger, + level: LogLevel.Debug, + }); + + logger.debug("test"); + expect(mockLogger.debug).not.toHaveBeenCalled(); + }); + }); + + describe("Default logger", () => { + it("should have silent: true by default", () => { + const logger = createLogger(); + expect(logger.shouldLog(LogLevel.Info)).toBe(false); + }); + + it("should not log when using default logger", () => { + const logger = createLogger(); + + logger.info("test"); + expect(logger.isInfo()).toBe(false); + }); + }); + + describe("Edge cases", () => { + it("should handle empty message", () => { + const mockLogger = createMockLogger(); + + const logger = new Logger({ + level: LogLevel.Debug, + logger: mockLogger, + silent: false, + }); + + logger.debug(""); + expect(mockLogger.debug).toHaveBeenCalledWith(""); + }); + + it("should handle no arguments", () => { + const mockLogger = createMockLogger(); + + const logger = new Logger({ + level: LogLevel.Debug, + logger: mockLogger, + silent: false, + }); + + logger.debug("message"); + expect(mockLogger.debug).toHaveBeenCalledWith("message"); + }); + + it("should handle complex objects", () => { + const mockLogger = createMockLogger(); + + const logger = new Logger({ + level: LogLevel.Debug, + logger: mockLogger, + silent: false, + }); + + const complexObject = { + nested: { key: "value" }, + array: [1, 2, 3], + fn: () => "test", + }; + + logger.debug("message", complexObject); + expect(mockLogger.debug).toHaveBeenCalledWith("message", complexObject); + }); + + it("should handle errors as arguments", () => { + const mockLogger = createMockLogger(); + + const logger = new Logger({ + level: LogLevel.Error, + logger: mockLogger, + silent: false, + }); + + const error = new Error("Test error"); + logger.error("Error occurred", error); + expect(mockLogger.error).toHaveBeenCalledWith("Error occurred", error); + }); + }); +}); diff --git a/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/bigint/bigint.test.ts b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/bigint/bigint.test.ts new file mode 100644 index 000000000000..498f143c7283 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/bigint/bigint.test.ts @@ -0,0 +1,46 @@ +import { bigint } from "../../../../src/core/schemas/builders/bigint"; +import { itJson, itParse, itSchema } from "../utils/itSchema"; +import { itValidateJson, itValidateParse } from "../utils/itValidate"; + +describe("bigint", () => { + itSchema("converts between raw bigint and parsed bigint", bigint(), { + raw: BigInt("9007199254740992"), + parsed: BigInt("9007199254740992"), + }); + + itParse("converts between raw number and parsed bigint", bigint(), { + raw: 10, + parsed: BigInt("10"), + }); + + itParse("converts between raw number and parsed bigint", bigint(), { + raw: BigInt("10"), + parsed: BigInt("10"), + }); + + itJson("converts raw bigint to parsed bigint", bigint(), { + parsed: BigInt("10"), + raw: BigInt("10"), + }); + + itValidateParse("string", bigint(), "42", [ + { + message: 'Expected bigint | number. Received "42".', + path: [], + }, + ]); + + itValidateJson("number", bigint(), 42, [ + { + message: "Expected bigint. Received 42.", + path: [], + }, + ]); + + itValidateJson("string", bigint(), "42", [ + { + message: 'Expected bigint. Received "42".', + path: [], + }, + ]); +}); diff --git a/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/date/date.test.ts b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/date/date.test.ts new file mode 100644 index 000000000000..2790268a09c6 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/date/date.test.ts @@ -0,0 +1,31 @@ +import { date } from "../../../../src/core/schemas/builders/date"; +import { itSchema } from "../utils/itSchema"; +import { itValidateJson, itValidateParse } from "../utils/itValidate"; + +describe("date", () => { + itSchema("converts between raw ISO string and parsed Date", date(), { + raw: "2022-09-29T05:41:21.939Z", + parsed: new Date("2022-09-29T05:41:21.939Z"), + }); + + itValidateParse("non-string", date(), 42, [ + { + message: "Expected string. Received 42.", + path: [], + }, + ]); + + itValidateParse("non-ISO", date(), "hello world", [ + { + message: 'Expected ISO 8601 date string. Received "hello world".', + path: [], + }, + ]); + + itValidateJson("non-Date", date(), "hello", [ + { + message: 'Expected Date object. Received "hello".', + path: [], + }, + ]); +}); diff --git a/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/enum/enum.test.ts b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/enum/enum.test.ts new file mode 100644 index 000000000000..d1707325b29b --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/enum/enum.test.ts @@ -0,0 +1,30 @@ +import { enum_ } from "../../../../src/core/schemas/builders/enum"; +import { itSchemaIdentity } from "../utils/itSchema"; +import { itValidate } from "../utils/itValidate"; + +describe("enum", () => { + itSchemaIdentity(enum_(["A", "B", "C"]), "A"); + + itSchemaIdentity(enum_(["A", "B", "C"]), "D" as any, { + opts: { allowUnrecognizedEnumValues: true }, + }); + + itValidate("invalid enum", enum_(["A", "B", "C"]), "D", [ + { + message: 'Expected enum. Received "D".', + path: [], + }, + ]); + + itValidate( + "non-string", + enum_(["A", "B", "C"]), + [], + [ + { + message: "Expected string. Received list.", + path: [], + }, + ], + ); +}); diff --git a/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/lazy/lazy.test.ts b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/lazy/lazy.test.ts new file mode 100644 index 000000000000..a82ace4a08c0 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/lazy/lazy.test.ts @@ -0,0 +1,57 @@ +import { lazy, list, object, string } from "../../../../src/core/schemas/builders"; +import type { Schema } from "../../../../src/core/schemas/Schema"; +import { itSchemaIdentity } from "../utils/itSchema"; + +describe("lazy", () => { + it("doesn't run immediately", () => { + let wasRun = false; + lazy(() => { + wasRun = true; + return string(); + }); + expect(wasRun).toBe(false); + }); + + it("only runs first time", async () => { + let count = 0; + const schema = lazy(() => { + count++; + return string(); + }); + await schema.parse("hello"); + await schema.json("world"); + expect(count).toBe(1); + }); + + itSchemaIdentity( + lazy(() => object({})), + { foo: "hello" }, + { + title: "passes opts through", + opts: { unrecognizedObjectKeys: "passthrough" }, + }, + ); + + itSchemaIdentity( + lazy(() => object({ foo: string() })), + { foo: "hello" }, + ); + + // eslint-disable-next-line vi/expect-expect + it("self-referencial schema doesn't compile", () => { + () => { + // @ts-expect-error + const a = lazy(() => object({ foo: a })); + }; + }); + + // eslint-disable-next-line vi/expect-expect + it("self-referencial compiles with explicit type", () => { + () => { + interface TreeNode { + children: TreeNode[]; + } + const TreeNode: Schema = lazy(() => object({ children: list(TreeNode) })); + }; + }); +}); diff --git a/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/lazy/lazyObject.test.ts b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/lazy/lazyObject.test.ts new file mode 100644 index 000000000000..9b443671a71f --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/lazy/lazyObject.test.ts @@ -0,0 +1,18 @@ +import { lazyObject, number, object, string } from "../../../../src/core/schemas/builders"; +import { itSchemaIdentity } from "../utils/itSchema"; + +describe("lazy", () => { + itSchemaIdentity( + lazyObject(() => object({ foo: string() })), + { foo: "hello" }, + ); + + itSchemaIdentity( + lazyObject(() => object({ foo: string() })).extend(object({ bar: number() })), + { + foo: "hello", + bar: 42, + }, + { title: "returned schema has object utils" }, + ); +}); diff --git a/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/lazy/recursive/a.ts b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/lazy/recursive/a.ts new file mode 100644 index 000000000000..8b7d5e40cfaf --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/lazy/recursive/a.ts @@ -0,0 +1,7 @@ +import { object } from "../../../../../src/core/schemas/builders/object"; +import { schemaB } from "./b"; + +// @ts-expect-error +export const schemaA = object({ + b: schemaB, +}); diff --git a/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/lazy/recursive/b.ts b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/lazy/recursive/b.ts new file mode 100644 index 000000000000..fb219d54c8e5 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/lazy/recursive/b.ts @@ -0,0 +1,8 @@ +import { object } from "../../../../../src/core/schemas/builders/object"; +import { optional } from "../../../../../src/core/schemas/builders/schema-utils"; +import { schemaA } from "./a"; + +// @ts-expect-error +export const schemaB = object({ + a: optional(schemaA), +}); diff --git a/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/list/list.test.ts b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/list/list.test.ts new file mode 100644 index 000000000000..108789b7317d --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/list/list.test.ts @@ -0,0 +1,41 @@ +import { list, object, property, string } from "../../../../src/core/schemas/builders"; +import { itSchema, itSchemaIdentity } from "../utils/itSchema"; +import { itValidate } from "../utils/itValidate"; + +describe("list", () => { + itSchemaIdentity(list(string()), ["hello", "world"], { + title: "functions as identity when item type is primitive", + }); + + itSchema( + "converts objects correctly", + list( + object({ + helloWorld: property("hello_world", string()), + }), + ), + { + raw: [{ hello_world: "123" }], + parsed: [{ helloWorld: "123" }], + }, + ); + + itValidate("not a list", list(string()), 42, [ + { + path: [], + message: "Expected list. Received 42.", + }, + ]); + + itValidate( + "invalid item type", + list(string()), + [42], + [ + { + path: ["[0]"], + message: "Expected string. Received 42.", + }, + ], + ); +}); diff --git a/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/literals/stringLiteral.test.ts b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/literals/stringLiteral.test.ts new file mode 100644 index 000000000000..fa6c88873c61 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/literals/stringLiteral.test.ts @@ -0,0 +1,21 @@ +import { stringLiteral } from "../../../../src/core/schemas/builders"; +import { itSchemaIdentity } from "../utils/itSchema"; +import { itValidate } from "../utils/itValidate"; + +describe("stringLiteral", () => { + itSchemaIdentity(stringLiteral("A"), "A"); + + itValidate("incorrect string", stringLiteral("A"), "B", [ + { + path: [], + message: 'Expected "A". Received "B".', + }, + ]); + + itValidate("non-string", stringLiteral("A"), 42, [ + { + path: [], + message: 'Expected "A". Received 42.', + }, + ]); +}); diff --git a/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/object-like/withParsedProperties.test.ts b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/object-like/withParsedProperties.test.ts new file mode 100644 index 000000000000..b18bc9d3e5df --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/object-like/withParsedProperties.test.ts @@ -0,0 +1,57 @@ +import { object, property, string, stringLiteral } from "../../../../src/core/schemas/builders"; + +describe("withParsedProperties", () => { + it("Added properties included on parsed object", async () => { + const schema = object({ + foo: property("raw_foo", string()), + bar: stringLiteral("bar"), + }).withParsedProperties({ + printFoo: (parsed) => () => parsed.foo, + printHelloWorld: () => () => "Hello world", + helloWorld: "Hello world", + }); + + const parsed = await schema.parse({ raw_foo: "value of foo", bar: "bar" }); + if (!parsed.ok) { + throw new Error("Failed to parse"); + } + expect(parsed.value.printFoo()).toBe("value of foo"); + expect(parsed.value.printHelloWorld()).toBe("Hello world"); + expect(parsed.value.helloWorld).toBe("Hello world"); + }); + + it("Added property is removed on raw object", async () => { + const schema = object({ + foo: property("raw_foo", string()), + bar: stringLiteral("bar"), + }).withParsedProperties({ + printFoo: (parsed) => () => parsed.foo, + }); + + const original = { raw_foo: "value of foo", bar: "bar" } as const; + const parsed = await schema.parse(original); + if (!parsed.ok) { + throw new Error("Failed to parse()"); + } + + const raw = await schema.json(parsed.value); + + if (!raw.ok) { + throw new Error("Failed to json()"); + } + + expect(raw.value).toEqual(original); + }); + + describe("compile", () => { + // eslint-disable-next-line vi/expect-expect + it("doesn't compile with non-object schema", () => { + () => + object({ + foo: string(), + }) + // @ts-expect-error + .withParsedProperties(42); + }); + }); +}); diff --git a/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/object/extend.test.ts b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/object/extend.test.ts new file mode 100644 index 000000000000..b6c2920f4d3e --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/object/extend.test.ts @@ -0,0 +1,89 @@ +import { boolean, object, property, string, stringLiteral } from "../../../../src/core/schemas/builders"; +import { itSchema, itSchemaIdentity } from "../utils/itSchema"; + +describe("extend", () => { + itSchemaIdentity( + object({ + foo: string(), + }).extend( + object({ + bar: stringLiteral("bar"), + }), + ), + { + foo: "", + bar: "bar", + } as const, + { + title: "extended properties are included in schema", + }, + ); + + itSchemaIdentity( + object({ + foo: string(), + }) + .extend( + object({ + bar: stringLiteral("bar"), + }), + ) + .extend( + object({ + baz: boolean(), + }), + ), + { + foo: "", + bar: "bar", + baz: true, + } as const, + { + title: "extensions can be extended", + }, + ); + + itSchema( + "converts nested object", + object({ + item: object({ + helloWorld: property("hello_world", string()), + }), + }).extend( + object({ + goodbye: property("goodbye_raw", string()), + }), + ), + { + raw: { item: { hello_world: "yo" }, goodbye_raw: "peace" }, + parsed: { item: { helloWorld: "yo" }, goodbye: "peace" }, + }, + ); + + itSchema( + "extensions work with raw/parsed property name conversions", + object({ + item: property("item_raw", string()), + }).extend( + object({ + goodbye: property("goodbye_raw", string()), + }), + ), + { + raw: { item_raw: "hi", goodbye_raw: "peace" }, + parsed: { item: "hi", goodbye: "peace" }, + }, + ); + + describe("compile", () => { + // eslint-disable-next-line vi/expect-expect + it("doesn't compile with non-object schema", () => { + () => + object({ + foo: string(), + }) + // @ts-expect-error + .extend([]); + }); + }); +}); diff --git a/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/object/object.test.ts b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/object/object.test.ts new file mode 100644 index 000000000000..a8d9fe0a1359 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/object/object.test.ts @@ -0,0 +1,255 @@ +import { any, number, object, property, string, stringLiteral, unknown } from "../../../../src/core/schemas/builders"; +import { itJson, itParse, itSchema, itSchemaIdentity } from "../utils/itSchema"; +import { itValidate } from "../utils/itValidate"; + +describe("object", () => { + itSchemaIdentity( + object({ + foo: string(), + bar: stringLiteral("bar"), + }), + { + foo: "", + bar: "bar", + }, + { + title: "functions as identity when values are primitives and property() isn't used", + }, + ); + + itSchema( + "uses raw key from property()", + object({ + foo: property("raw_foo", string()), + bar: stringLiteral("bar"), + }), + { + raw: { raw_foo: "foo", bar: "bar" }, + parsed: { foo: "foo", bar: "bar" }, + }, + ); + + itSchema( + "keys with unknown type can be omitted", + object({ + foo: unknown(), + }), + { + raw: {}, + parsed: {}, + }, + ); + + itSchema( + "keys with any type can be omitted", + object({ + foo: any(), + }), + { + raw: {}, + parsed: {}, + }, + ); + + describe("unrecognizedObjectKeys", () => { + describe("parse", () => { + itParse( + 'includes unknown values when unrecognizedObjectKeys === "passthrough"', + object({ + foo: property("raw_foo", string()), + bar: stringLiteral("bar"), + }), + { + raw: { + raw_foo: "foo", + bar: "bar", + // @ts-expect-error + baz: "yoyo", + }, + parsed: { + foo: "foo", + bar: "bar", + // @ts-expect-error + baz: "yoyo", + }, + opts: { + unrecognizedObjectKeys: "passthrough", + }, + }, + ); + + itParse( + 'strips unknown values when unrecognizedObjectKeys === "strip"', + object({ + foo: property("raw_foo", string()), + bar: stringLiteral("bar"), + }), + { + raw: { + raw_foo: "foo", + bar: "bar", + // @ts-expect-error + baz: "yoyo", + }, + parsed: { + foo: "foo", + bar: "bar", + }, + opts: { + unrecognizedObjectKeys: "strip", + }, + }, + ); + }); + + describe("json", () => { + itJson( + 'includes unknown values when unrecognizedObjectKeys === "passthrough"', + object({ + foo: property("raw_foo", string()), + bar: stringLiteral("bar"), + }), + { + raw: { + raw_foo: "foo", + bar: "bar", + // @ts-expect-error + baz: "yoyo", + }, + parsed: { + foo: "foo", + bar: "bar", + // @ts-expect-error + baz: "yoyo", + }, + opts: { + unrecognizedObjectKeys: "passthrough", + }, + }, + ); + + itJson( + 'strips unknown values when unrecognizedObjectKeys === "strip"', + object({ + foo: property("raw_foo", string()), + bar: stringLiteral("bar"), + }), + { + raw: { + raw_foo: "foo", + bar: "bar", + }, + parsed: { + foo: "foo", + bar: "bar", + // @ts-expect-error + baz: "yoyo", + }, + opts: { + unrecognizedObjectKeys: "strip", + }, + }, + ); + }); + }); + + describe("nullish properties", () => { + itSchema("missing properties are not added", object({ foo: property("raw_foo", string().optional()) }), { + raw: {}, + parsed: {}, + }); + + itSchema("undefined properties are not dropped", object({ foo: property("raw_foo", string().optional()) }), { + raw: { raw_foo: null }, + parsed: { foo: undefined }, + }); + + itSchema("null properties are not dropped", object({ foo: property("raw_foo", string().optional()) }), { + raw: { raw_foo: null }, + parsed: { foo: undefined }, + }); + + describe("extensions", () => { + itSchema( + "undefined properties are not dropped", + object({}).extend(object({ foo: property("raw_foo", string().optional()) })), + { + raw: { raw_foo: null }, + parsed: { foo: undefined }, + }, + ); + + describe("parse()", () => { + itParse( + "null properties are not dropped", + object({}).extend(object({ foo: property("raw_foo", string().optional()) })), + { + raw: { raw_foo: null }, + parsed: { foo: undefined }, + }, + ); + }); + }); + }); + + itValidate( + "missing property", + object({ + foo: string(), + bar: stringLiteral("bar"), + }), + { foo: "hello" }, + [ + { + path: [], + message: 'Missing required key "bar"', + }, + ], + ); + + itValidate( + "extra property", + object({ + foo: string(), + bar: stringLiteral("bar"), + }), + { foo: "hello", bar: "bar", baz: 42 }, + [ + { + path: ["baz"], + message: 'Unexpected key "baz"', + }, + ], + ); + + itValidate( + "not an object", + object({ + foo: string(), + bar: stringLiteral("bar"), + }), + [], + [ + { + path: [], + message: "Expected object. Received list.", + }, + ], + ); + + itValidate( + "nested validation error", + object({ + foo: object({ + bar: number(), + }), + }), + { foo: { bar: "hello" } }, + [ + { + path: ["foo", "bar"], + message: 'Expected number. Received "hello".', + }, + ], + ); +}); diff --git a/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/object/objectWithoutOptionalProperties.test.ts b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/object/objectWithoutOptionalProperties.test.ts new file mode 100644 index 000000000000..efcd83afae79 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/object/objectWithoutOptionalProperties.test.ts @@ -0,0 +1,21 @@ +import { objectWithoutOptionalProperties, string, stringLiteral } from "../../../../src/core/schemas/builders"; +import { itSchema } from "../utils/itSchema"; + +describe("objectWithoutOptionalProperties", () => { + itSchema( + "all properties are required", + objectWithoutOptionalProperties({ + foo: string(), + bar: stringLiteral("bar").optional(), + }), + { + raw: { + foo: "hello", + }, + // @ts-expect-error + parsed: { + foo: "hello", + }, + }, + ); +}); diff --git a/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/object/passthrough.test.ts b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/object/passthrough.test.ts new file mode 100644 index 000000000000..c8770fca17dc --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/object/passthrough.test.ts @@ -0,0 +1,87 @@ +import { object, string, stringLiteral } from "../../../../src/core/schemas/builders"; +import { itJson, itParse, itSchema } from "../utils/itSchema"; +import { itValidate } from "../utils/itValidate"; + +describe("passthrough", () => { + const baseSchema = object({ + foo: string(), + bar: stringLiteral("bar"), + }); + + describe("parse", () => { + itParse("includes unknown values", baseSchema.passthrough(), { + raw: { + foo: "hello", + bar: "bar", + baz: "extra", + }, + parsed: { + foo: "hello", + bar: "bar", + baz: "extra", + }, + }); + + itValidate( + "preserves schema validation", + baseSchema.passthrough(), + { + foo: 123, + bar: "bar", + baz: "extra", + }, + [ + { + path: ["foo"], + message: "Expected string. Received 123.", + }, + ], + ); + }); + + describe("json", () => { + itJson("includes unknown values", baseSchema.passthrough(), { + raw: { + foo: "hello", + bar: "bar", + + baz: "extra", + }, + parsed: { + foo: "hello", + bar: "bar", + + baz: "extra", + }, + }); + + itValidate( + "preserves schema validation", + baseSchema.passthrough(), + { + foo: "hello", + bar: "wrong", + baz: "extra", + }, + [ + { + path: ["bar"], + message: 'Expected "bar". Received "wrong".', + }, + ], + ); + }); + + itSchema("preserves schema validation in both directions", baseSchema.passthrough(), { + raw: { + foo: "hello", + bar: "bar", + extra: 42, + }, + parsed: { + foo: "hello", + bar: "bar", + extra: 42, + }, + }); +}); diff --git a/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/primitives/any.test.ts b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/primitives/any.test.ts new file mode 100644 index 000000000000..1adbbe2a8380 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/primitives/any.test.ts @@ -0,0 +1,6 @@ +import { any } from "../../../../src/core/schemas/builders"; +import { itSchemaIdentity } from "../utils/itSchema"; + +describe("any", () => { + itSchemaIdentity(any(), true); +}); diff --git a/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/primitives/boolean.test.ts b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/primitives/boolean.test.ts new file mode 100644 index 000000000000..897a8295dca7 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/primitives/boolean.test.ts @@ -0,0 +1,14 @@ +import { boolean } from "../../../../src/core/schemas/builders"; +import { itSchemaIdentity } from "../utils/itSchema"; +import { itValidate } from "../utils/itValidate"; + +describe("boolean", () => { + itSchemaIdentity(boolean(), true); + + itValidate("non-boolean", boolean(), {}, [ + { + path: [], + message: "Expected boolean. Received object.", + }, + ]); +}); diff --git a/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/primitives/never.test.ts b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/primitives/never.test.ts new file mode 100644 index 000000000000..1d18eba052ab --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/primitives/never.test.ts @@ -0,0 +1,54 @@ +import { never } from "../../../../src/core/schemas/builders"; + +describe("never", () => { + it("always fails to parse", () => { + const schema = never(); + const result = schema.parse("test"); + expect(result.ok).toBe(false); + if (!result.ok) { + expect(result.errors).toHaveLength(1); + expect(result.errors[0]?.message).toBe("Expected never"); + } + }); + + it("always fails to json", () => { + const schema = never(); + const result = schema.json("test"); + expect(result.ok).toBe(false); + if (!result.ok) { + expect(result.errors).toHaveLength(1); + expect(result.errors[0]?.message).toBe("Expected never"); + } + }); + + it("fails with any value including undefined", () => { + const schema = never(); + expect(schema.parse(undefined).ok).toBe(false); + expect(schema.parse(null).ok).toBe(false); + expect(schema.parse(0).ok).toBe(false); + expect(schema.parse("").ok).toBe(false); + expect(schema.parse({}).ok).toBe(false); + expect(schema.parse([]).ok).toBe(false); + }); + + it("works when called without options parameter", () => { + const schema = never(); + // This tests that the default = {} parameter works correctly + const result = schema.parse("test"); + expect(result.ok).toBe(false); + if (!result.ok) { + expect(result.errors).toHaveLength(1); + expect(result.errors[0]?.message).toBe("Expected never"); + expect(result.errors[0]?.path).toEqual([]); + } + }); + + it("succeeds with skipValidation", () => { + const schema = never(); + const result = schema.parse("test", { skipValidation: true }); + expect(result.ok).toBe(true); + if (result.ok) { + expect(result.value).toBe("test"); + } + }); +}); diff --git a/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/primitives/number.test.ts b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/primitives/number.test.ts new file mode 100644 index 000000000000..2d01415a60ba --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/primitives/number.test.ts @@ -0,0 +1,14 @@ +import { number } from "../../../../src/core/schemas/builders"; +import { itSchemaIdentity } from "../utils/itSchema"; +import { itValidate } from "../utils/itValidate"; + +describe("number", () => { + itSchemaIdentity(number(), 42); + + itValidate("non-number", number(), "hello", [ + { + path: [], + message: 'Expected number. Received "hello".', + }, + ]); +}); diff --git a/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/primitives/string.test.ts b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/primitives/string.test.ts new file mode 100644 index 000000000000..57b2368784ab --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/primitives/string.test.ts @@ -0,0 +1,14 @@ +import { string } from "../../../../src/core/schemas/builders"; +import { itSchemaIdentity } from "../utils/itSchema"; +import { itValidate } from "../utils/itValidate"; + +describe("string", () => { + itSchemaIdentity(string(), "hello"); + + itValidate("non-string", string(), 42, [ + { + path: [], + message: "Expected string. Received 42.", + }, + ]); +}); diff --git a/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/primitives/unknown.test.ts b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/primitives/unknown.test.ts new file mode 100644 index 000000000000..4d17a7dbd005 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/primitives/unknown.test.ts @@ -0,0 +1,6 @@ +import { unknown } from "../../../../src/core/schemas/builders"; +import { itSchemaIdentity } from "../utils/itSchema"; + +describe("unknown", () => { + itSchemaIdentity(unknown(), true); +}); diff --git a/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/record/record.test.ts b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/record/record.test.ts new file mode 100644 index 000000000000..e07f3e7cb00d --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/record/record.test.ts @@ -0,0 +1,34 @@ +import { number, record, string } from "../../../../src/core/schemas/builders"; +import { itSchemaIdentity } from "../utils/itSchema"; +import { itValidate } from "../utils/itValidate"; + +describe("record", () => { + itSchemaIdentity(record(string(), string()), { hello: "world" }); + itSchemaIdentity(record(number(), string()), { 42: "world" }); + + itValidate( + "non-record", + record(number(), string()), + [], + [ + { + path: [], + message: "Expected object. Received list.", + }, + ], + ); + + itValidate("invalid key type", record(number(), string()), { hello: "world" }, [ + { + path: ["hello (key)"], + message: 'Expected number. Received "hello".', + }, + ]); + + itValidate("invalid value type", record(string(), number()), { hello: "world" }, [ + { + path: ["hello"], + message: 'Expected number. Received "world".', + }, + ]); +}); diff --git a/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/schema-utils/getSchemaUtils.test.ts b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/schema-utils/getSchemaUtils.test.ts new file mode 100644 index 000000000000..822c3ca4e5a4 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/schema-utils/getSchemaUtils.test.ts @@ -0,0 +1,83 @@ +import { object, string } from "../../../../src/core/schemas/builders"; +import { itSchema } from "../utils/itSchema"; + +describe("getSchemaUtils", () => { + describe("optional()", () => { + itSchema("optional fields allow original schema", string().optional(), { + raw: "hello", + parsed: "hello", + }); + + itSchema("optional fields are not required", string().optional(), { + raw: null, + parsed: undefined, + }); + }); + + describe("transform()", () => { + itSchema( + "transform and untransform run correctly", + string().transform({ + transform: (x) => `${x}X`, + untransform: (x) => (x as string).slice(0, -1), + }), + { + raw: "hello", + parsed: "helloX", + }, + ); + }); + + describe("parseOrThrow()", () => { + it("parses valid value", async () => { + const value = string().parseOrThrow("hello"); + expect(value).toBe("hello"); + }); + + it("throws on invalid value", async () => { + const value = () => object({ a: string(), b: string() }).parseOrThrow({ a: 24 }); + expect(value).toThrowError('a: Expected string. Received 24.; Missing required key "b"'); + }); + }); + + describe("jsonOrThrow()", () => { + it("serializes valid value", async () => { + const value = string().jsonOrThrow("hello"); + expect(value).toBe("hello"); + }); + + it("throws on invalid value", async () => { + const value = () => object({ a: string(), b: string() }).jsonOrThrow({ a: 24 }); + expect(value).toThrowError('a: Expected string. Received 24.; Missing required key "b"'); + }); + }); + + describe("omitUndefined", () => { + it("serializes undefined as null", async () => { + const value = object({ + a: string().optional(), + b: string().optional(), + }).jsonOrThrow({ + a: "hello", + b: undefined, + }); + expect(value).toEqual({ a: "hello", b: null }); + }); + + it("omits undefined values", async () => { + const value = object({ + a: string().optional(), + b: string().optional(), + }).jsonOrThrow( + { + a: "hello", + b: undefined, + }, + { + omitUndefined: true, + }, + ); + expect(value).toEqual({ a: "hello" }); + }); + }); +}); diff --git a/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/schema.test.ts b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/schema.test.ts new file mode 100644 index 000000000000..13842ff40157 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/schema.test.ts @@ -0,0 +1,78 @@ +import { + boolean, + discriminant, + list, + number, + object, + string, + stringLiteral, + union, +} from "../../../src/core/schemas/builders"; +import { booleanLiteral } from "../../../src/core/schemas/builders/literals/booleanLiteral"; +import { property } from "../../../src/core/schemas/builders/object/property"; +import { itSchema } from "./utils/itSchema"; + +describe("Schema", () => { + itSchema( + "large nested object", + object({ + a: string(), + b: stringLiteral("b value"), + c: property( + "raw_c", + list( + object({ + animal: union(discriminant("type", "_type"), { + dog: object({ value: boolean() }), + cat: object({ value: property("raw_cat", number()) }), + }), + }), + ), + ), + d: property("raw_d", boolean()), + e: booleanLiteral(true), + }), + { + raw: { + a: "hello", + b: "b value", + raw_c: [ + { + animal: { + _type: "dog", + value: true, + }, + }, + { + animal: { + _type: "cat", + raw_cat: 42, + }, + }, + ], + raw_d: false, + e: true, + }, + parsed: { + a: "hello", + b: "b value", + c: [ + { + animal: { + type: "dog", + value: true, + }, + }, + { + animal: { + type: "cat", + value: 42, + }, + }, + ], + d: false, + e: true, + }, + }, + ); +}); diff --git a/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/set/set.test.ts b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/set/set.test.ts new file mode 100644 index 000000000000..53a1652c8bbb --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/set/set.test.ts @@ -0,0 +1,48 @@ +import { set, string } from "../../../../src/core/schemas/builders"; +import { itSchema } from "../utils/itSchema"; +import { itValidateJson, itValidateParse } from "../utils/itValidate"; + +describe("set", () => { + itSchema("converts between raw list and parsed Set", set(string()), { + raw: ["A", "B"], + parsed: new Set(["A", "B"]), + }); + + itValidateParse("not a list", set(string()), 42, [ + { + path: [], + message: "Expected list. Received 42.", + }, + ]); + + itValidateJson( + "not a Set", + set(string()), + [], + [ + { + path: [], + message: "Expected Set. Received list.", + }, + ], + ); + + itValidateParse( + "invalid item type", + set(string()), + [42], + [ + { + path: ["[0]"], + message: "Expected string. Received 42.", + }, + ], + ); + + itValidateJson("invalid item type", set(string()), new Set([42]), [ + { + path: ["[0]"], + message: "Expected string. Received 42.", + }, + ]); +}); diff --git a/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/skipValidation.test.ts b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/skipValidation.test.ts new file mode 100644 index 000000000000..3283555949ab --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/skipValidation.test.ts @@ -0,0 +1,44 @@ +/* eslint-disable no-console */ +import { boolean, number, object, property, string, undiscriminatedUnion } from "../../../src/core/schemas/builders"; + +describe("skipValidation", () => { + it("allows data that doesn't conform to the schema", async () => { + const warningLogs: string[] = []; + const originalConsoleWarn = console.warn; + console.warn = (...args) => warningLogs.push(args.join(" ")); + + const schema = object({ + camelCase: property("snake_case", string()), + numberProperty: number(), + requiredProperty: boolean(), + anyPrimitive: undiscriminatedUnion([string(), number(), boolean()]), + }); + + const parsed = await schema.parse( + { + snake_case: "hello", + numberProperty: "oops", + anyPrimitive: true, + }, + { + skipValidation: true, + }, + ); + + expect(parsed).toEqual({ + ok: true, + value: { + camelCase: "hello", + numberProperty: "oops", + anyPrimitive: true, + }, + }); + + expect(warningLogs).toEqual([ + `Failed to validate. + - numberProperty: Expected number. Received "oops".`, + ]); + + console.warn = originalConsoleWarn; + }); +}); diff --git a/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/undiscriminated-union/undiscriminatedUnion.test.ts b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/undiscriminated-union/undiscriminatedUnion.test.ts new file mode 100644 index 000000000000..01dcadbba37b --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/undiscriminated-union/undiscriminatedUnion.test.ts @@ -0,0 +1,44 @@ +import { number, object, property, string, undiscriminatedUnion } from "../../../../src/core/schemas/builders"; +import { itSchema, itSchemaIdentity } from "../utils/itSchema"; + +describe("undiscriminatedUnion", () => { + itSchemaIdentity(undiscriminatedUnion([string(), number()]), "hello world"); + + itSchemaIdentity(undiscriminatedUnion([object({ hello: string() }), object({ goodbye: string() })]), { + goodbye: "foo", + }); + + itSchema( + "Correctly transforms", + undiscriminatedUnion([object({ hello: string() }), object({ helloWorld: property("hello_world", string()) })]), + { + raw: { hello_world: "foo " }, + parsed: { helloWorld: "foo " }, + }, + ); + + it("Returns errors for all variants", async () => { + const result = await undiscriminatedUnion([string(), number()]).parse(true); + if (result.ok) { + throw new Error("Unexpectedly passed validation"); + } + expect(result.errors).toEqual([ + { + message: "[Variant 0] Expected string. Received true.", + path: [], + }, + { + message: "[Variant 1] Expected number. Received true.", + path: [], + }, + ]); + }); + + describe("compile", () => { + // eslint-disable-next-line vi/expect-expect + it("doesn't compile with zero members", () => { + // @ts-expect-error + () => undiscriminatedUnion([]); + }); + }); +}); diff --git a/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/union/union.test.ts b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/union/union.test.ts new file mode 100644 index 000000000000..1f5d7a8fad5c --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/union/union.test.ts @@ -0,0 +1,113 @@ +import { boolean, discriminant, number, object, string, union } from "../../../../src/core/schemas/builders"; +import { itSchema, itSchemaIdentity } from "../utils/itSchema"; +import { itValidate } from "../utils/itValidate"; + +describe("union", () => { + itSchemaIdentity( + union("type", { + lion: object({ + meows: boolean(), + }), + giraffe: object({ + heightInInches: number(), + }), + }), + { type: "lion", meows: true }, + { title: "doesn't transform discriminant when it's a string" }, + ); + + itSchema( + "transforms discriminant when it's a discriminant()", + union(discriminant("type", "_type"), { + lion: object({ meows: boolean() }), + giraffe: object({ heightInInches: number() }), + }), + { + raw: { _type: "lion", meows: true }, + parsed: { type: "lion", meows: true }, + }, + ); + + describe("allowUnrecognizedUnionMembers", () => { + itSchema( + "transforms discriminant & passes through values when discriminant value is unrecognized", + union(discriminant("type", "_type"), { + lion: object({ meows: boolean() }), + giraffe: object({ heightInInches: number() }), + }), + { + // @ts-expect-error + raw: { _type: "moose", isAMoose: true }, + // @ts-expect-error + parsed: { type: "moose", isAMoose: true }, + opts: { + allowUnrecognizedUnionMembers: true, + }, + }, + ); + }); + + describe("withParsedProperties", () => { + it("Added property is included on parsed object", async () => { + const schema = union("type", { + lion: object({}), + tiger: object({ value: string() }), + }).withParsedProperties({ + printType: (parsed) => () => parsed.type, + }); + + const parsed = await schema.parse({ type: "lion" }); + if (!parsed.ok) { + throw new Error("Failed to parse"); + } + expect(parsed.value.printType()).toBe("lion"); + }); + }); + + itValidate( + "non-object", + union("type", { + lion: object({}), + tiger: object({ value: string() }), + }), + [], + [ + { + path: [], + message: "Expected object. Received list.", + }, + ], + ); + + itValidate( + "missing discriminant", + union("type", { + lion: object({}), + tiger: object({ value: string() }), + }), + {}, + [ + { + path: [], + message: 'Missing discriminant ("type")', + }, + ], + ); + + itValidate( + "unrecognized discriminant value", + union("type", { + lion: object({}), + tiger: object({ value: string() }), + }), + { + type: "bear", + }, + [ + { + path: ["type"], + message: 'Expected enum. Received "bear".', + }, + ], + ); +}); diff --git a/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/utils/itSchema.ts b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/utils/itSchema.ts new file mode 100644 index 000000000000..25b13e643207 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/utils/itSchema.ts @@ -0,0 +1,78 @@ +/* eslint-disable vi/no-export */ +import type { Schema, SchemaOptions } from "../../../../src/core/schemas/Schema"; + +export function itSchemaIdentity( + schema: Schema, + value: T, + { title = "functions as identity", opts }: { title?: string; opts?: SchemaOptions } = {}, +): void { + itSchema(title, schema, { raw: value, parsed: value, opts }); +} + +export function itSchema( + title: string, + schema: Schema, + { + raw, + parsed, + opts, + only = false, + }: { + raw: Raw; + parsed: Parsed; + opts?: SchemaOptions; + only?: boolean; + }, +): void { + // eslint-disable-next-line vi/valid-title + (only ? describe.only : describe)(title, () => { + itParse("parse()", schema, { raw, parsed, opts }); + itJson("json()", schema, { raw, parsed, opts }); + }); +} + +export function itParse( + title: string, + schema: Schema, + { + raw, + parsed, + opts, + }: { + raw: Raw; + parsed: Parsed; + opts?: SchemaOptions; + }, +): void { + // eslint-disable-next-line vi/valid-title + it(title, () => { + const maybeValid = schema.parse(raw, opts); + if (!maybeValid.ok) { + throw new Error(`Failed to parse() ${JSON.stringify(maybeValid.errors, undefined, 4)}`); + } + expect(maybeValid.value).toStrictEqual(parsed); + }); +} + +export function itJson( + title: string, + schema: Schema, + { + raw, + parsed, + opts, + }: { + raw: Raw; + parsed: Parsed; + opts?: SchemaOptions; + }, +): void { + // eslint-disable-next-line vi/valid-title + it(title, () => { + const maybeValid = schema.json(parsed, opts); + if (!maybeValid.ok) { + throw new Error(`Failed to json() ${JSON.stringify(maybeValid.errors, undefined, 4)}`); + } + expect(maybeValid.value).toStrictEqual(raw); + }); +} diff --git a/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/utils/itValidate.ts b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/utils/itValidate.ts new file mode 100644 index 000000000000..60bc56c123cf --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tests/unit/schemas/utils/itValidate.ts @@ -0,0 +1,56 @@ +/* eslint-disable vi/no-export */ +import type { Schema, SchemaOptions, ValidationError } from "../../../../src/core/schemas/Schema"; + +export function itValidate( + title: string, + schema: Schema, + input: unknown, + errors: ValidationError[], + opts?: SchemaOptions, +): void { + // eslint-disable-next-line vi/valid-title + describe("parse()", () => { + itValidateParse(title, schema, input, errors, opts); + }); + describe("json()", () => { + itValidateJson(title, schema, input, errors, opts); + }); +} + +export function itValidateParse( + title: string, + schema: Schema, + raw: unknown, + errors: ValidationError[], + opts?: SchemaOptions, +): void { + describe("parse", () => { + // eslint-disable-next-line vi/valid-title + it(title, async () => { + const maybeValid = await schema.parse(raw, opts); + if (maybeValid.ok) { + throw new Error("Value passed validation"); + } + expect(maybeValid.errors).toStrictEqual(errors); + }); + }); +} + +export function itValidateJson( + title: string, + schema: Schema, + parsed: unknown, + errors: ValidationError[], + opts?: SchemaOptions, +): void { + describe("json", () => { + // eslint-disable-next-line vi/valid-title + it(title, async () => { + const maybeValid = await schema.json(parsed, opts); + if (maybeValid.ok) { + throw new Error("Value passed validation"); + } + expect(maybeValid.errors).toStrictEqual(errors); + }); + }); +} diff --git a/seed/ts-sdk/ts-extra-properties/tests/unit/url/join.test.ts b/seed/ts-sdk/ts-extra-properties/tests/unit/url/join.test.ts new file mode 100644 index 000000000000..123488f084ea --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tests/unit/url/join.test.ts @@ -0,0 +1,284 @@ +import { join } from "../../../src/core/url/index"; + +describe("join", () => { + interface TestCase { + description: string; + base: string; + segments: string[]; + expected: string; + } + + describe("basic functionality", () => { + const basicTests: TestCase[] = [ + { description: "should return empty string for empty base", base: "", segments: [], expected: "" }, + { + description: "should return empty string for empty base with path", + base: "", + segments: ["path"], + expected: "", + }, + { + description: "should handle single segment", + base: "base", + segments: ["segment"], + expected: "base/segment", + }, + { + description: "should handle single segment with trailing slash on base", + base: "base/", + segments: ["segment"], + expected: "base/segment", + }, + { + description: "should handle single segment with leading slash", + base: "base", + segments: ["/segment"], + expected: "base/segment", + }, + { + description: "should handle single segment with both slashes", + base: "base/", + segments: ["/segment"], + expected: "base/segment", + }, + { + description: "should handle multiple segments", + base: "base", + segments: ["path1", "path2", "path3"], + expected: "base/path1/path2/path3", + }, + { + description: "should handle multiple segments with slashes", + base: "base/", + segments: ["/path1/", "/path2/", "/path3/"], + expected: "base/path1/path2/path3/", + }, + ]; + + basicTests.forEach(({ description, base, segments, expected }) => { + it(description, () => { + expect(join(base, ...segments)).toBe(expected); + }); + }); + }); + + describe("URL handling", () => { + const urlTests: TestCase[] = [ + { + description: "should handle absolute URLs", + base: "https://example.com", + segments: ["api", "v1"], + expected: "https://example.com/api/v1", + }, + { + description: "should handle absolute URLs with slashes", + base: "https://example.com/", + segments: ["/api/", "/v1/"], + expected: "https://example.com/api/v1/", + }, + { + description: "should handle absolute URLs with base path", + base: "https://example.com/base", + segments: ["api", "v1"], + expected: "https://example.com/base/api/v1", + }, + { + description: "should preserve URL query parameters", + base: "https://example.com?query=1", + segments: ["api"], + expected: "https://example.com/api?query=1", + }, + { + description: "should preserve URL fragments", + base: "https://example.com#fragment", + segments: ["api"], + expected: "https://example.com/api#fragment", + }, + { + description: "should preserve URL query and fragments", + base: "https://example.com?query=1#fragment", + segments: ["api"], + expected: "https://example.com/api?query=1#fragment", + }, + { + description: "should handle http protocol", + base: "http://example.com", + segments: ["api"], + expected: "http://example.com/api", + }, + { + description: "should handle ftp protocol", + base: "ftp://example.com", + segments: ["files"], + expected: "ftp://example.com/files", + }, + { + description: "should handle ws protocol", + base: "ws://example.com", + segments: ["socket"], + expected: "ws://example.com/socket", + }, + { + description: "should fallback to path joining for malformed URLs", + base: "not-a-url://", + segments: ["path"], + expected: "not-a-url:///path", + }, + ]; + + urlTests.forEach(({ description, base, segments, expected }) => { + it(description, () => { + expect(join(base, ...segments)).toBe(expected); + }); + }); + }); + + describe("edge cases", () => { + const edgeCaseTests: TestCase[] = [ + { + description: "should handle empty segments", + base: "base", + segments: ["", "path"], + expected: "base/path", + }, + { + description: "should handle null segments", + base: "base", + segments: [null as any, "path"], + expected: "base/path", + }, + { + description: "should handle undefined segments", + base: "base", + segments: [undefined as any, "path"], + expected: "base/path", + }, + { + description: "should handle segments with only single slash", + base: "base", + segments: ["/", "path"], + expected: "base/path", + }, + { + description: "should handle segments with only double slash", + base: "base", + segments: ["//", "path"], + expected: "base/path", + }, + { + description: "should handle base paths with trailing slashes", + base: "base/", + segments: ["path"], + expected: "base/path", + }, + { + description: "should handle complex nested paths", + base: "api/v1/", + segments: ["/users/", "/123/", "/profile"], + expected: "api/v1/users/123/profile", + }, + ]; + + edgeCaseTests.forEach(({ description, base, segments, expected }) => { + it(description, () => { + expect(join(base, ...segments)).toBe(expected); + }); + }); + }); + + describe("real-world scenarios", () => { + const realWorldTests: TestCase[] = [ + { + description: "should handle API endpoint construction", + base: "https://api.example.com/v1", + segments: ["users", "123", "posts"], + expected: "https://api.example.com/v1/users/123/posts", + }, + { + description: "should handle file path construction", + base: "/var/www", + segments: ["html", "assets", "images"], + expected: "/var/www/html/assets/images", + }, + { + description: "should handle relative path construction", + base: "../parent", + segments: ["child", "grandchild"], + expected: "../parent/child/grandchild", + }, + { + description: "should handle Windows-style paths", + base: "C:\\Users", + segments: ["Documents", "file.txt"], + expected: "C:\\Users/Documents/file.txt", + }, + ]; + + realWorldTests.forEach(({ description, base, segments, expected }) => { + it(description, () => { + expect(join(base, ...segments)).toBe(expected); + }); + }); + }); + + describe("performance scenarios", () => { + it("should handle many segments efficiently", () => { + const segments = Array(100).fill("segment"); + const result = join("base", ...segments); + expect(result).toBe(`base/${segments.join("/")}`); + }); + + it("should handle long URLs", () => { + const longPath = "a".repeat(1000); + expect(join("https://example.com", longPath)).toBe(`https://example.com/${longPath}`); + }); + }); + + describe("trailing slash preservation", () => { + const trailingSlashTests: TestCase[] = [ + { + description: + "should preserve trailing slash on final result when base has trailing slash and no segments", + base: "https://api.example.com/", + segments: [], + expected: "https://api.example.com/", + }, + { + description: "should preserve trailing slash on v1 path", + base: "https://api.example.com/v1/", + segments: [], + expected: "https://api.example.com/v1/", + }, + { + description: "should preserve trailing slash when last segment has trailing slash", + base: "https://api.example.com", + segments: ["users/"], + expected: "https://api.example.com/users/", + }, + { + description: "should preserve trailing slash with relative path", + base: "api/v1", + segments: ["users/"], + expected: "api/v1/users/", + }, + { + description: "should preserve trailing slash with multiple segments", + base: "https://api.example.com", + segments: ["v1", "collections/"], + expected: "https://api.example.com/v1/collections/", + }, + { + description: "should preserve trailing slash with base path", + base: "base", + segments: ["path1", "path2/"], + expected: "base/path1/path2/", + }, + ]; + + trailingSlashTests.forEach(({ description, base, segments, expected }) => { + it(description, () => { + expect(join(base, ...segments)).toBe(expected); + }); + }); + }); +}); diff --git a/seed/ts-sdk/ts-extra-properties/tests/unit/url/qs.test.ts b/seed/ts-sdk/ts-extra-properties/tests/unit/url/qs.test.ts new file mode 100644 index 000000000000..42cdffb9e5ea --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tests/unit/url/qs.test.ts @@ -0,0 +1,278 @@ +import { toQueryString } from "../../../src/core/url/index"; + +describe("Test qs toQueryString", () => { + interface BasicTestCase { + description: string; + input: any; + expected: string; + } + + describe("Basic functionality", () => { + const basicTests: BasicTestCase[] = [ + { description: "should return empty string for null", input: null, expected: "" }, + { description: "should return empty string for undefined", input: undefined, expected: "" }, + { description: "should return empty string for string primitive", input: "hello", expected: "" }, + { description: "should return empty string for number primitive", input: 42, expected: "" }, + { description: "should return empty string for true boolean", input: true, expected: "" }, + { description: "should return empty string for false boolean", input: false, expected: "" }, + { description: "should handle empty objects", input: {}, expected: "" }, + { + description: "should handle simple key-value pairs", + input: { name: "John", age: 30 }, + expected: "name=John&age=30", + }, + ]; + + basicTests.forEach(({ description, input, expected }) => { + it(description, () => { + expect(toQueryString(input)).toBe(expected); + }); + }); + }); + + describe("Array handling", () => { + interface ArrayTestCase { + description: string; + input: any; + options?: { arrayFormat?: "repeat" | "indices" }; + expected: string; + } + + const arrayTests: ArrayTestCase[] = [ + { + description: "should handle arrays with indices format (default)", + input: { items: ["a", "b", "c"] }, + expected: "items%5B0%5D=a&items%5B1%5D=b&items%5B2%5D=c", + }, + { + description: "should handle arrays with repeat format", + input: { items: ["a", "b", "c"] }, + options: { arrayFormat: "repeat" }, + expected: "items=a&items=b&items=c", + }, + { + description: "should handle empty arrays", + input: { items: [] }, + expected: "", + }, + { + description: "should handle arrays with mixed types", + input: { mixed: ["string", 42, true, false] }, + expected: "mixed%5B0%5D=string&mixed%5B1%5D=42&mixed%5B2%5D=true&mixed%5B3%5D=false", + }, + { + description: "should handle arrays with objects", + input: { users: [{ name: "John" }, { name: "Jane" }] }, + expected: "users%5B0%5D%5Bname%5D=John&users%5B1%5D%5Bname%5D=Jane", + }, + { + description: "should handle arrays with objects in repeat format", + input: { users: [{ name: "John" }, { name: "Jane" }] }, + options: { arrayFormat: "repeat" }, + expected: "users%5Bname%5D=John&users%5Bname%5D=Jane", + }, + ]; + + arrayTests.forEach(({ description, input, options, expected }) => { + it(description, () => { + expect(toQueryString(input, options)).toBe(expected); + }); + }); + }); + + describe("Nested objects", () => { + const nestedTests: BasicTestCase[] = [ + { + description: "should handle nested objects", + input: { user: { name: "John", age: 30 } }, + expected: "user%5Bname%5D=John&user%5Bage%5D=30", + }, + { + description: "should handle deeply nested objects", + input: { user: { profile: { name: "John", settings: { theme: "dark" } } } }, + expected: "user%5Bprofile%5D%5Bname%5D=John&user%5Bprofile%5D%5Bsettings%5D%5Btheme%5D=dark", + }, + { + description: "should handle empty nested objects", + input: { user: {} }, + expected: "", + }, + ]; + + nestedTests.forEach(({ description, input, expected }) => { + it(description, () => { + expect(toQueryString(input)).toBe(expected); + }); + }); + }); + + describe("Encoding", () => { + interface EncodingTestCase { + description: string; + input: any; + options?: { encode?: boolean }; + expected: string; + } + + const encodingTests: EncodingTestCase[] = [ + { + description: "should encode by default", + input: { name: "John Doe", email: "john@example.com" }, + expected: "name=John%20Doe&email=john%40example.com", + }, + { + description: "should not encode when encode is false", + input: { name: "John Doe", email: "john@example.com" }, + options: { encode: false }, + expected: "name=John Doe&email=john@example.com", + }, + { + description: "should encode special characters in keys", + input: { "user name": "John", "email[primary]": "john@example.com" }, + expected: "user%20name=John&email%5Bprimary%5D=john%40example.com", + }, + { + description: "should not encode special characters in keys when encode is false", + input: { "user name": "John", "email[primary]": "john@example.com" }, + options: { encode: false }, + expected: "user name=John&email[primary]=john@example.com", + }, + ]; + + encodingTests.forEach(({ description, input, options, expected }) => { + it(description, () => { + expect(toQueryString(input, options)).toBe(expected); + }); + }); + }); + + describe("Mixed scenarios", () => { + interface MixedTestCase { + description: string; + input: any; + options?: { arrayFormat?: "repeat" | "indices" }; + expected: string; + } + + const mixedTests: MixedTestCase[] = [ + { + description: "should handle complex nested structures", + input: { + filters: { + status: ["active", "pending"], + category: { + type: "electronics", + subcategories: ["phones", "laptops"], + }, + }, + sort: { field: "name", direction: "asc" }, + }, + expected: + "filters%5Bstatus%5D%5B0%5D=active&filters%5Bstatus%5D%5B1%5D=pending&filters%5Bcategory%5D%5Btype%5D=electronics&filters%5Bcategory%5D%5Bsubcategories%5D%5B0%5D=phones&filters%5Bcategory%5D%5Bsubcategories%5D%5B1%5D=laptops&sort%5Bfield%5D=name&sort%5Bdirection%5D=asc", + }, + { + description: "should handle complex nested structures with repeat format", + input: { + filters: { + status: ["active", "pending"], + category: { + type: "electronics", + subcategories: ["phones", "laptops"], + }, + }, + sort: { field: "name", direction: "asc" }, + }, + options: { arrayFormat: "repeat" }, + expected: + "filters%5Bstatus%5D=active&filters%5Bstatus%5D=pending&filters%5Bcategory%5D%5Btype%5D=electronics&filters%5Bcategory%5D%5Bsubcategories%5D=phones&filters%5Bcategory%5D%5Bsubcategories%5D=laptops&sort%5Bfield%5D=name&sort%5Bdirection%5D=asc", + }, + { + description: "should handle arrays with null/undefined values", + input: { items: ["a", null, "c", undefined, "e"] }, + expected: "items%5B0%5D=a&items%5B1%5D=&items%5B2%5D=c&items%5B4%5D=e", + }, + { + description: "should handle objects with null/undefined values", + input: { name: "John", age: null, email: undefined, active: true }, + expected: "name=John&age=&active=true", + }, + ]; + + mixedTests.forEach(({ description, input, options, expected }) => { + it(description, () => { + expect(toQueryString(input, options)).toBe(expected); + }); + }); + }); + + describe("Edge cases", () => { + const edgeCaseTests: BasicTestCase[] = [ + { + description: "should handle numeric keys", + input: { "0": "zero", "1": "one" }, + expected: "0=zero&1=one", + }, + { + description: "should handle boolean values in objects", + input: { enabled: true, disabled: false }, + expected: "enabled=true&disabled=false", + }, + { + description: "should handle empty strings", + input: { name: "", description: "test" }, + expected: "name=&description=test", + }, + { + description: "should handle zero values", + input: { count: 0, price: 0.0 }, + expected: "count=0&price=0", + }, + { + description: "should handle arrays with empty strings", + input: { items: ["a", "", "c"] }, + expected: "items%5B0%5D=a&items%5B1%5D=&items%5B2%5D=c", + }, + ]; + + edgeCaseTests.forEach(({ description, input, expected }) => { + it(description, () => { + expect(toQueryString(input)).toBe(expected); + }); + }); + }); + + describe("Options combinations", () => { + interface OptionsTestCase { + description: string; + input: any; + options?: { arrayFormat?: "repeat" | "indices"; encode?: boolean }; + expected: string; + } + + const optionsTests: OptionsTestCase[] = [ + { + description: "should respect both arrayFormat and encode options", + input: { items: ["a & b", "c & d"] }, + options: { arrayFormat: "repeat", encode: false }, + expected: "items=a & b&items=c & d", + }, + { + description: "should use default options when none provided", + input: { items: ["a", "b"] }, + expected: "items%5B0%5D=a&items%5B1%5D=b", + }, + { + description: "should merge provided options with defaults", + input: { items: ["a", "b"], name: "John Doe" }, + options: { encode: false }, + expected: "items[0]=a&items[1]=b&name=John Doe", + }, + ]; + + optionsTests.forEach(({ description, input, options, expected }) => { + it(description, () => { + expect(toQueryString(input, options)).toBe(expected); + }); + }); + }); +}); diff --git a/seed/ts-sdk/ts-extra-properties/tests/wire/.gitkeep b/seed/ts-sdk/ts-extra-properties/tests/wire/.gitkeep new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/seed/ts-sdk/ts-extra-properties/tests/wire/main.test.ts b/seed/ts-sdk/ts-extra-properties/tests/wire/main.test.ts new file mode 100644 index 000000000000..ab2c105c5623 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tests/wire/main.test.ts @@ -0,0 +1,57 @@ +// This file was auto-generated by Fern from our API Definition. + +import { SeedApiClient } from "../../src/Client"; +import { mockServerPool } from "../mock-server/MockServerPool"; + +describe("SeedApiClient", () => { + test("getUser", async () => { + const server = mockServerPool.createServer(); + const client = new SeedApiClient({ maxRetries: 0, environment: server.baseUrl }); + + const rawResponseBody = { + id: "id", + user_name: "user_name", + created_at: "2024-01-15T09:30:00Z", + updated_at: "2024-01-15T09:30:00Z", + }; + server.mockEndpoint().get("/user").respondWith().statusCode(200).jsonBody(rawResponseBody).build(); + + const response = await client.getUser(); + expect(response).toEqual({ + id: "id", + userName: "user_name", + createdAt: new Date("2024-01-15T09:30:00.000Z"), + updatedAt: new Date("2024-01-15T09:30:00.000Z"), + }); + }); + + test("createUser", async () => { + const server = mockServerPool.createServer(); + const client = new SeedApiClient({ maxRetries: 0, environment: server.baseUrl }); + const rawRequestBody = { user_name: "user_name" }; + const rawResponseBody = { + id: "id", + user_name: "user_name", + created_at: "2024-01-15T09:30:00Z", + updated_at: "2024-01-15T09:30:00Z", + }; + server + .mockEndpoint() + .post("/user") + .jsonBody(rawRequestBody) + .respondWith() + .statusCode(200) + .jsonBody(rawResponseBody) + .build(); + + const response = await client.createUser({ + userName: "user_name", + }); + expect(response).toEqual({ + id: "id", + userName: "user_name", + createdAt: new Date("2024-01-15T09:30:00.000Z"), + updatedAt: new Date("2024-01-15T09:30:00.000Z"), + }); + }); +}); diff --git a/seed/ts-sdk/ts-extra-properties/tsconfig.base.json b/seed/ts-sdk/ts-extra-properties/tsconfig.base.json new file mode 100644 index 000000000000..d7627675de20 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tsconfig.base.json @@ -0,0 +1,18 @@ +{ + "compilerOptions": { + "extendedDiagnostics": true, + "strict": true, + "target": "ES6", + "moduleResolution": "node", + "esModuleInterop": true, + "skipLibCheck": true, + "declaration": true, + "outDir": "dist", + "rootDir": "src", + "baseUrl": "src", + "isolatedModules": true, + "isolatedDeclarations": true + }, + "include": ["src"], + "exclude": [] +} diff --git a/seed/ts-sdk/ts-extra-properties/tsconfig.cjs.json b/seed/ts-sdk/ts-extra-properties/tsconfig.cjs.json new file mode 100644 index 000000000000..5c11446f5984 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tsconfig.cjs.json @@ -0,0 +1,9 @@ +{ + "extends": "./tsconfig.base.json", + "compilerOptions": { + "module": "CommonJS", + "outDir": "dist/cjs" + }, + "include": ["src"], + "exclude": [] +} diff --git a/seed/ts-sdk/ts-extra-properties/tsconfig.esm.json b/seed/ts-sdk/ts-extra-properties/tsconfig.esm.json new file mode 100644 index 000000000000..6ce909748b2c --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tsconfig.esm.json @@ -0,0 +1,10 @@ +{ + "extends": "./tsconfig.base.json", + "compilerOptions": { + "module": "esnext", + "outDir": "dist/esm", + "verbatimModuleSyntax": true + }, + "include": ["src"], + "exclude": [] +} diff --git a/seed/ts-sdk/ts-extra-properties/tsconfig.json b/seed/ts-sdk/ts-extra-properties/tsconfig.json new file mode 100644 index 000000000000..d77fdf00d259 --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/tsconfig.json @@ -0,0 +1,3 @@ +{ + "extends": "./tsconfig.cjs.json" +} diff --git a/seed/ts-sdk/ts-extra-properties/vitest.config.mts b/seed/ts-sdk/ts-extra-properties/vitest.config.mts new file mode 100644 index 000000000000..ba2ec4f9d45a --- /dev/null +++ b/seed/ts-sdk/ts-extra-properties/vitest.config.mts @@ -0,0 +1,28 @@ +import { defineConfig } from "vitest/config"; +export default defineConfig({ + test: { + projects: [ + { + test: { + globals: true, + name: "unit", + environment: "node", + root: "./tests", + include: ["**/*.test.{js,ts,jsx,tsx}"], + exclude: ["wire/**"], + setupFiles: ["./setup.ts"], + }, + }, + { + test: { + globals: true, + name: "wire", + environment: "node", + root: "./tests/wire", + setupFiles: ["../setup.ts", "../mock-server/setup.ts"], + }, + }, + ], + passWithNoTests: true, + }, +}); diff --git a/test-definitions/fern/apis/python-streaming-parameter-openapi/generators.yml b/test-definitions/fern/apis/python-streaming-parameter-openapi/generators.yml new file mode 100644 index 000000000000..8762d29b8370 --- /dev/null +++ b/test-definitions/fern/apis/python-streaming-parameter-openapi/generators.yml @@ -0,0 +1,4 @@ +# yaml-language-server: $schema=https://schema.buildwithfern.dev/generators-yml.json +api: + specs: + - openapi: ./openapi.yml diff --git a/test-definitions/fern/apis/python-streaming-parameter-openapi/openapi.yml b/test-definitions/fern/apis/python-streaming-parameter-openapi/openapi.yml new file mode 100644 index 000000000000..5c489647b5fe --- /dev/null +++ b/test-definitions/fern/apis/python-streaming-parameter-openapi/openapi.yml @@ -0,0 +1,64 @@ +openapi: 3.0.3 +info: + title: Streaming Parameter API + version: 1.0.0 +paths: + /chat: + post: + operationId: chat + summary: Chat endpoint with streaming support + requestBody: + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/ChatRequest' + responses: + '200': + description: Successful response + content: + application/json: + schema: + $ref: '#/components/schemas/ChatResponse' + x-fern-streaming: + format: sse + stream-condition: $request.stream + response: + $ref: '#/components/schemas/ChatResponse' + response-stream: + $ref: '#/components/schemas/ChatStreamEvent' + x-fern-examples: + - name: default + request: + prompt: "Hello" + response: + body: + message: "Hello! How can I help you?" + finish_reason: complete +components: + schemas: + ChatRequest: + type: object + required: + - prompt + properties: + prompt: + type: string + description: The user's message + stream: + type: boolean + default: false + ChatResponse: + type: object + properties: + message: + type: string + finish_reason: + type: string + ChatStreamEvent: + type: object + properties: + delta: + type: string + tokens: + type: integer diff --git a/test-definitions/fern/apis/ts-extra-properties/generators.yml b/test-definitions/fern/apis/ts-extra-properties/generators.yml new file mode 100644 index 000000000000..30c009de16fd --- /dev/null +++ b/test-definitions/fern/apis/ts-extra-properties/generators.yml @@ -0,0 +1,13 @@ +# yaml-language-server: $schema=https://schema.buildwithfern.dev/generators-yml.json +api: + specs: + - openapi: ./openapi.yml +groups: + ts-sdk: + generators: + - name: fernapi/fern-typescript-sdk + version: latest + config: + generateWireTests: true + noSerdeLayer: false + enableInlineTypes: false diff --git a/test-definitions/fern/apis/ts-extra-properties/openapi.yml b/test-definitions/fern/apis/ts-extra-properties/openapi.yml new file mode 100644 index 000000000000..eebaaf28580b --- /dev/null +++ b/test-definitions/fern/apis/ts-extra-properties/openapi.yml @@ -0,0 +1,63 @@ +openapi: 3.0.3 +info: + title: Extra Properties Test API + version: 1.0.0 +paths: + /user: + get: + operationId: getUser + summary: Get a user with extra properties + responses: + "200": + description: Successful response + content: + application/json: + schema: + $ref: "#/components/schemas/User" + post: + operationId: createUser + summary: Create a user with extra properties + requestBody: + required: true + content: + application/json: + schema: + $ref: "#/components/schemas/CreateUserRequest" + responses: + "200": + description: Successful response + content: + application/json: + schema: + $ref: "#/components/schemas/User" +components: + schemas: + User: + type: object + additionalProperties: true + required: + - id + - user_name + - created_at + properties: + id: + type: string + user_name: + type: string + created_at: + type: string + format: date-time + updated_at: + type: string + format: date-time + CreateUserRequest: + type: object + additionalProperties: true + required: + - user_name + properties: + user_name: + type: string + meta_data: + type: object + additionalProperties: true diff --git a/version-yml.schema.json b/version-yml.schema.json index 5eac08721a5e..b7482650050d 100644 --- a/version-yml.schema.json +++ b/version-yml.schema.json @@ -621,6 +621,16 @@ "docs.PlaygroundSettings": { "type": "object", "properties": { + "hidden": { + "oneOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ] + }, "environments": { "oneOf": [ {