diff --git a/docs/classes/BaseModelProvider.html b/docs/classes/BaseModelProvider.html index f6a56dd..4773fde 100644 --- a/docs/classes/BaseModelProvider.html +++ b/docs/classes/BaseModelProvider.html @@ -1,7 +1,7 @@ -BaseModelProvider | generative-ts - v0.1.0-alpha.6
generative-ts

Class BaseModelProvider<TRequestOptions, TResponse, TModelProviderConfig, TMetaOptions>Abstract

Type Parameters

  • TRequestOptions extends ModelRequestOptions
  • TResponse = unknown
  • TModelProviderConfig extends BaseModelProviderConfig = BaseModelProviderConfig
  • TMetaOptions = unknown

Hierarchy (view full)

Implements

Constructors

constructor +BaseModelProvider | generative-ts - v0.1.0-alpha.7

Class BaseModelProvider<TRequestOptions, TResponse, TModelProviderConfig, TMetaOptions>Abstract

Type Parameters

  • TRequestOptions extends ModelRequestOptions
  • TResponse = unknown
  • TModelProviderConfig extends BaseModelProviderConfig = BaseModelProviderConfig
  • TMetaOptions = unknown

Hierarchy (view full)

Implements

Constructors

Properties

Methods

Constructors

Properties

api: ModelApi<TRequestOptions, TResponse>
history: {
    meta: undefined | TMetaOptions;
    options: TRequestOptions;
    response: undefined | TResponse;
}[]

Type declaration

Methods

  • Parameters

    Returns Promise<unknown>

  • Parameters

    Returns Promise<TResponse>

\ No newline at end of file +

Constructors

Properties

api: ModelApi<TRequestOptions, TResponse>
history: {
    meta: undefined | TMetaOptions;
    options: TRequestOptions;
    response: undefined | TResponse;
}[]

Type declaration

Methods

  • Parameters

    Returns Promise<unknown>

  • Parameters

    Returns Promise<TResponse>

\ No newline at end of file diff --git a/docs/classes/FnTemplate.html b/docs/classes/FnTemplate.html index a951cce..da0c75b 100644 --- a/docs/classes/FnTemplate.html +++ b/docs/classes/FnTemplate.html @@ -1,5 +1,5 @@ -FnTemplate | generative-ts - v0.1.0-alpha.6
generative-ts

Class FnTemplate<TVars>

Implementation of the Template interface using a ts function

+FnTemplate | generative-ts - v0.1.0-alpha.7

Class FnTemplate<TVars>

Implementation of the Template interface using a ts function

Type Parameters

  • TVars extends object

Implements

Constructors

Properties

Methods

Constructors

Properties

source: RenderFn<TVars>

Methods

  • Parameters

    Returns string

\ No newline at end of file +

Constructors

Properties

source: RenderFn<TVars>

Methods

  • Parameters

    Returns string

\ No newline at end of file diff --git a/docs/classes/HttpModelProvider.html b/docs/classes/HttpModelProvider.html index bc56210..7cf3b47 100644 --- a/docs/classes/HttpModelProvider.html +++ b/docs/classes/HttpModelProvider.html @@ -1,4 +1,4 @@ -HttpModelProvider | generative-ts - v0.1.0-alpha.6
generative-ts

Class HttpModelProvider<TRequestOptions, TResponse, THttpClientOptions, TModelProviderConfig>

Type Parameters

  • TRequestOptions extends ModelRequestOptions = ModelRequestOptions
  • TResponse = unknown
  • THttpClientOptions = HttpClientOptions
  • TModelProviderConfig extends BaseModelProviderConfig = BaseModelProviderConfig

Hierarchy (view full)

Constructors

constructor +HttpModelProvider | generative-ts - v0.1.0-alpha.7

Class HttpModelProvider<TRequestOptions, TResponse, THttpClientOptions, TModelProviderConfig>

Type Parameters

  • TRequestOptions extends ModelRequestOptions = ModelRequestOptions
  • TResponse = unknown
  • THttpClientOptions = HttpClientOptions
  • TModelProviderConfig extends BaseModelProviderConfig = BaseModelProviderConfig

Hierarchy (view full)

Constructors

Properties

Constructors

Properties

api: ModelApi<TRequestOptions, TResponse>
auth: any
client: HttpClient<THttpClientOptions>
endpoint: any
headers: any
history: {
    meta: undefined | THttpClientOptions;
    options: TRequestOptions;
    response: undefined | TResponse;
}[]

Type declaration

Methods

  • Parameters

    Returns Promise<{
        body: string;
        endpoint: string;
        headers: Headers;
    }> | {
        body: string;
        endpoint: string;
        headers: Headers;
    }

  • Parameters

    Returns string

  • Parameters

    Returns string | Promise<string>

  • Parameters

    Returns Headers | Promise<Headers>

\ No newline at end of file +

Constructors

Properties

api: ModelApi<TRequestOptions, TResponse>
auth: any
client: HttpClient<THttpClientOptions>
endpoint: any
headers: any
history: {
    meta: undefined | THttpClientOptions;
    options: TRequestOptions;
    response: undefined | TResponse;
}[]

Type declaration

Methods

  • Parameters

    Returns Promise<{
        body: string;
        endpoint: string;
        headers: Headers;
    }> | {
        body: string;
        endpoint: string;
        headers: Headers;
    }

  • Parameters

    Returns string

  • Parameters

    Returns string | Promise<string>

  • Parameters

    Returns Headers | Promise<Headers>

\ No newline at end of file diff --git a/docs/functions/createAwsBedrockModelProvider.html b/docs/functions/createAwsBedrockModelProvider.html index c2e9c84..5941aae 100644 --- a/docs/functions/createAwsBedrockModelProvider.html +++ b/docs/functions/createAwsBedrockModelProvider.html @@ -1,4 +1,4 @@ -createAwsBedrockModelProvider | generative-ts - v0.1.0-alpha.6
generative-ts

Function createAwsBedrockModelProvider

\ No newline at end of file diff --git a/docs/functions/createCohereModelProvider.html b/docs/functions/createCohereModelProvider.html index 377f0b1..c62cdd4 100644 --- a/docs/functions/createCohereModelProvider.html +++ b/docs/functions/createCohereModelProvider.html @@ -1,4 +1,4 @@ -createCohereModelProvider | generative-ts - v0.1.0-alpha.6
generative-ts

Function createCohereModelProvider

\ No newline at end of file diff --git a/docs/functions/createGroqModelProvider.html b/docs/functions/createGroqModelProvider.html index 1f5128a..199c0a2 100644 --- a/docs/functions/createGroqModelProvider.html +++ b/docs/functions/createGroqModelProvider.html @@ -1,4 +1,4 @@ -createGroqModelProvider | generative-ts - v0.1.0-alpha.6
generative-ts

Function createGroqModelProvider

\ No newline at end of file diff --git a/docs/functions/createHuggingfaceInferenceModelProvider.html b/docs/functions/createHuggingfaceInferenceModelProvider.html index 0d230c8..3e002ed 100644 --- a/docs/functions/createHuggingfaceInferenceModelProvider.html +++ b/docs/functions/createHuggingfaceInferenceModelProvider.html @@ -1,4 +1,4 @@ -createHuggingfaceInferenceModelProvider | generative-ts - v0.1.0-alpha.6
generative-ts

Function createHuggingfaceInferenceModelProvider

\ No newline at end of file diff --git a/docs/functions/createLmStudioModelProvider.html b/docs/functions/createLmStudioModelProvider.html index 7b0b391..965f71a 100644 --- a/docs/functions/createLmStudioModelProvider.html +++ b/docs/functions/createLmStudioModelProvider.html @@ -1,4 +1,4 @@ -createLmStudioModelProvider | generative-ts - v0.1.0-alpha.6
generative-ts

Function createLmStudioModelProvider

  • Creates a LMStudio ModelProvider with the OpenAiChatApi

    +createLmStudioModelProvider | generative-ts - v0.1.0-alpha.7

    Function createLmStudioModelProvider

    • Creates a LMStudio ModelProvider with the OpenAiChatApi

      import { createLmStudioModelProvider } from "generative-ts";

      const llama3 = createLmStudioModelProvider({
      modelId: "lmstudio-community/Meta-Llama-3-70B-Instruct-GGUF", // a ID of a model you have downloaded in LMStudio
      });

      const response = await llama3.sendRequest({
      $prompt: "Brief History of NY Mets:"
      // all other OpenAI ChatCompletion options available here (LMStudio uses the OpenAI ChatCompletion API for all the models it hosts)
      });

      console.log(response.choices[0]?.message.content);

      Provider Setup and Notes

      Follow LMStudio's instructions to set up the LMStudio local server.

      @@ -19,4 +19,4 @@

    Example: Usage

    import { createLmStudioModelProvider } from "generative-ts";

    const llama3 = createLmStudioModelProvider({
    modelId: "lmstudio-community/Meta-Llama-3-70B-Instruct-GGUF", // a ID of a model you have downloaded in LMStudio
    });

    const response = await llama3.sendRequest({
    $prompt: "Brief History of NY Mets:"
    // all other OpenAI ChatCompletion options available here (LMStudio uses the OpenAI ChatCompletion API for all the models it hosts)
    });

    console.log(response.choices[0]?.message.content);
    -
\ No newline at end of file +
\ No newline at end of file diff --git a/docs/functions/createMistralModelProvider.html b/docs/functions/createMistralModelProvider.html index 8a4323d..30185e9 100644 --- a/docs/functions/createMistralModelProvider.html +++ b/docs/functions/createMistralModelProvider.html @@ -1,4 +1,4 @@ -createMistralModelProvider | generative-ts - v0.1.0-alpha.6
generative-ts

Function createMistralModelProvider

\ No newline at end of file diff --git a/docs/functions/createOpenAiChatModelProvider.html b/docs/functions/createOpenAiChatModelProvider.html index 98475db..33e05ed 100644 --- a/docs/functions/createOpenAiChatModelProvider.html +++ b/docs/functions/createOpenAiChatModelProvider.html @@ -1,4 +1,4 @@ -createOpenAiChatModelProvider | generative-ts - v0.1.0-alpha.6
generative-ts

Function createOpenAiChatModelProvider

\ No newline at end of file diff --git a/docs/functions/createVertexAiModelProvider.html b/docs/functions/createVertexAiModelProvider.html index a260622..7e8c5a4 100644 --- a/docs/functions/createVertexAiModelProvider.html +++ b/docs/functions/createVertexAiModelProvider.html @@ -1,4 +1,4 @@ -createVertexAiModelProvider | generative-ts - v0.1.0-alpha.6
generative-ts

Function createVertexAiModelProvider

  • Creates a Google Cloud VertexAI ModelProvider with the GoogleGeminiApi.

    +createVertexAiModelProvider | generative-ts - v0.1.0-alpha.7

    Function createVertexAiModelProvider

    • Creates a Google Cloud VertexAI ModelProvider with the GoogleGeminiApi.

      import { createVertexAiModelProvider } from "@packages/gcloud-vertex-ai";

      const gemini = await createVertexAiModelProvider({
      modelId: "gemini-1.0-pro", // VertexAI defined model ID
      // you can explicitly pass auth here, otherwise by default it is read from process.env
      });

      const response = await gemini.sendRequest({
      $prompt: "Brief History of NY Mets:",
      // all other Gemini options available here
      });

      console.log(response.data.candidates[0]);

      Provider Setup and Notes

      Enable VertexAI in your Google Cloud Console. Note: VertexAI is currently only available in certain regions.

      @@ -16,7 +16,7 @@

      Type Parameters

      • THttpClientOptions = HttpClientOptions

      Parameters

      • params: {
            auth?: VertexAiAuthConfig;
            client?: HttpClient<THttpClientOptions>;
            modelId: string;
        }
        • Optional auth?: VertexAiAuthConfig

          Authorization configuration for VertexAI. If not supplied, it will be loaded from the environment.

        • Optional client?: HttpClient<THttpClientOptions>

          HTTP client to use for requests. If not supplied, a client implementing Google Cloud Application Default Credentials will be used.

        • modelId: string

          The model ID as defined by Google Cloud VertexAI.

          -

      Returns Promise<HttpModelProvider<GoogleGeminiOptions, GoogleGeminiResponse, THttpClientOptions, {
          modelId: string;
      }>>

      The VertexAI Model Provider

      +

Returns Promise<any>

The VertexAI Model Provider

See

\ No newline at end of file +
\ No newline at end of file diff --git a/docs/hierarchy.html b/docs/hierarchy.html index 959daf1..660b336 100644 --- a/docs/hierarchy.html +++ b/docs/hierarchy.html @@ -1 +1 @@ -generative-ts - v0.1.0-alpha.6
generative-ts

generative-ts - v0.1.0-alpha.6

Class Hierarchy

\ No newline at end of file +generative-ts - v0.1.0-alpha.7
generative-ts

generative-ts - v0.1.0-alpha.7

Class Hierarchy

\ No newline at end of file diff --git a/docs/index.html b/docs/index.html index 836ae20..e08f7c9 100644 --- a/docs/index.html +++ b/docs/index.html @@ -1,4 +1,4 @@ -generative-ts - v0.1.0-alpha.6
generative-ts

generative-ts - v0.1.0-alpha.6

generative-ts

a typescript library for building LLM applications+agents

+generative-ts - v0.1.0-alpha.7

generative-ts - v0.1.0-alpha.7

generative-ts

a typescript library for building LLM applications+agents

Documentation NPM License

@@ -147,4 +147,4 @@

To run examples and integration/e2e tests, create an .env file by running cp .env.example .env and then add values where necessary

Publishing

The "main" generative-ts package and the scoped @generative-ts packages both are controlled by the generative-ts npm organization. Releases are published via circleci job upon pushes of tags that have a name starting with release/. The job requires an NPM token that has publishing permissions to both generative-ts and @generative-ts. Currently this is a "granular" token set to expire every 30 days, created by @jnaglick, set in a circleci context.

-
\ No newline at end of file +
\ No newline at end of file diff --git a/docs/interfaces/Ai21Jurassic2Api.html b/docs/interfaces/Ai21Jurassic2Api.html index 623f5f9..9214a38 100644 --- a/docs/interfaces/Ai21Jurassic2Api.html +++ b/docs/interfaces/Ai21Jurassic2Api.html @@ -1,4 +1,4 @@ -Ai21Jurassic2Api | generative-ts - v0.1.0-alpha.6
generative-ts

Interface Ai21Jurassic2Api

interface Ai21Jurassic2Api {
    name?: string;
    requestTemplate: Template<Ai21Jurassic2Options>;
    responseGuard: ((response) => response is Ai21Jurassic2Response);
}

Hierarchy

Properties

name? +Ai21Jurassic2Api | generative-ts - v0.1.0-alpha.7

Interface Ai21Jurassic2Api

interface Ai21Jurassic2Api {
    name?: string;
    requestTemplate: Template<Ai21Jurassic2Options>;
    responseGuard: ((response) => response is Ai21Jurassic2Response);
}

Hierarchy

Properties

name?: string
requestTemplate: Template<Ai21Jurassic2Options>
responseGuard: ((response) => response is Ai21Jurassic2Response)

Type declaration

\ No newline at end of file +

Properties

name?: string
requestTemplate: Template<Ai21Jurassic2Options>
responseGuard: ((response) => response is Ai21Jurassic2Response)

Type declaration

\ No newline at end of file diff --git a/docs/interfaces/Ai21Jurassic2Options.html b/docs/interfaces/Ai21Jurassic2Options.html index c0da265..8e2db73 100644 --- a/docs/interfaces/Ai21Jurassic2Options.html +++ b/docs/interfaces/Ai21Jurassic2Options.html @@ -1,4 +1,4 @@ -Ai21Jurassic2Options | generative-ts - v0.1.0-alpha.6
generative-ts

Interface Ai21Jurassic2Options

interface Ai21Jurassic2Options {
    $prompt: string;
    countPenalty?: PenaltyOptions;
    frequencyPenalty?: PenaltyOptions;
    maxTokens?: number;
    minTokens?: number;
    modelId: string;
    numResults?: number;
    presencePenalty?: PenaltyOptions;
    stopSequences?: string[];
    temperature?: number;
    topKReturn?: number;
    topP?: number;
}

Hierarchy

  • ModelRequestOptions
    • Ai21Jurassic2Options

Properties

$prompt +Ai21Jurassic2Options | generative-ts - v0.1.0-alpha.7

Interface Ai21Jurassic2Options

interface Ai21Jurassic2Options {
    $prompt: string;
    countPenalty?: PenaltyOptions;
    frequencyPenalty?: PenaltyOptions;
    maxTokens?: number;
    minTokens?: number;
    modelId: string;
    numResults?: number;
    presencePenalty?: PenaltyOptions;
    stopSequences?: string[];
    temperature?: number;
    topKReturn?: number;
    topP?: number;
}

Hierarchy

  • ModelRequestOptions
    • Ai21Jurassic2Options

Properties

$prompt: string
countPenalty?: PenaltyOptions
frequencyPenalty?: PenaltyOptions
maxTokens?: number
minTokens?: number
modelId: string
numResults?: number
presencePenalty?: PenaltyOptions
stopSequences?: string[]
temperature?: number
topKReturn?: number
topP?: number
\ No newline at end of file +

Properties

$prompt: string
countPenalty?: PenaltyOptions
frequencyPenalty?: PenaltyOptions
maxTokens?: number
minTokens?: number
modelId: string
numResults?: number
presencePenalty?: PenaltyOptions
stopSequences?: string[]
temperature?: number
topKReturn?: number
topP?: number
\ No newline at end of file diff --git a/docs/interfaces/Ai21Jurassic2Response.html b/docs/interfaces/Ai21Jurassic2Response.html index 0484ce7..609160b 100644 --- a/docs/interfaces/Ai21Jurassic2Response.html +++ b/docs/interfaces/Ai21Jurassic2Response.html @@ -1,4 +1,4 @@ -Ai21Jurassic2Response | generative-ts - v0.1.0-alpha.6
generative-ts

Interface Ai21Jurassic2Response

interface Ai21Jurassic2Response {
    completions: {
        data: {
            text: string;
            tokens: {
                generatedToken: {
                    logprob: number;
                    raw_logprob: number;
                    token: string;
                };
                textRange: {
                    end: number;
                    start: number;
                };
                topTokens: any;
            }[];
        };
        finishReason: {
            reason: string;
        } & {
            length?: number;
        };
    }[];
    id: number;
    prompt: {
        text: string;
        tokens: {
            generatedToken: {
                logprob: number;
                raw_logprob: number;
                token: string;
            };
            textRange: {
                end: number;
                start: number;
            };
            topTokens: any;
        }[];
    };
}

Hierarchy

  • TypeOf<typeof Ai21Jurassic2ResponseCodec>
    • Ai21Jurassic2Response

Properties

completions +Ai21Jurassic2Response | generative-ts - v0.1.0-alpha.7

Interface Ai21Jurassic2Response

interface Ai21Jurassic2Response {
    completions: {
        data: {
            text: string;
            tokens: {
                generatedToken: {
                    logprob: number;
                    raw_logprob: number;
                    token: string;
                };
                textRange: {
                    end: number;
                    start: number;
                };
                topTokens: any;
            }[];
        };
        finishReason: {
            reason: string;
        } & {
            length?: number;
        };
    }[];
    id: number;
    prompt: {
        text: string;
        tokens: {
            generatedToken: {
                logprob: number;
                raw_logprob: number;
                token: string;
            };
            textRange: {
                end: number;
                start: number;
            };
            topTokens: any;
        }[];
    };
}

Hierarchy

  • TypeOf<typeof Ai21Jurassic2ResponseCodec>
    • Ai21Jurassic2Response

Properties

Properties

completions: {
    data: {
        text: string;
        tokens: {
            generatedToken: {
                logprob: number;
                raw_logprob: number;
                token: string;
            };
            textRange: {
                end: number;
                start: number;
            };
            topTokens: any;
        }[];
    };
    finishReason: {
        reason: string;
    } & {
        length?: number;
    };
}[]

Type declaration

  • data: {
        text: string;
        tokens: {
            generatedToken: {
                logprob: number;
                raw_logprob: number;
                token: string;
            };
            textRange: {
                end: number;
                start: number;
            };
            topTokens: any;
        }[];
    }
    • text: string
    • tokens: {
          generatedToken: {
              logprob: number;
              raw_logprob: number;
              token: string;
          };
          textRange: {
              end: number;
              start: number;
          };
          topTokens: any;
      }[]
  • finishReason: {
        reason: string;
    } & {
        length?: number;
    }
id: number
prompt: {
    text: string;
    tokens: {
        generatedToken: {
            logprob: number;
            raw_logprob: number;
            token: string;
        };
        textRange: {
            end: number;
            start: number;
        };
        topTokens: any;
    }[];
}

Type declaration

  • text: string
  • tokens: {
        generatedToken: {
            logprob: number;
            raw_logprob: number;
            token: string;
        };
        textRange: {
            end: number;
            start: number;
        };
        topTokens: any;
    }[]
\ No newline at end of file +

Properties

completions: {
    data: {
        text: string;
        tokens: {
            generatedToken: {
                logprob: number;
                raw_logprob: number;
                token: string;
            };
            textRange: {
                end: number;
                start: number;
            };
            topTokens: any;
        }[];
    };
    finishReason: {
        reason: string;
    } & {
        length?: number;
    };
}[]

Type declaration

  • data: {
        text: string;
        tokens: {
            generatedToken: {
                logprob: number;
                raw_logprob: number;
                token: string;
            };
            textRange: {
                end: number;
                start: number;
            };
            topTokens: any;
        }[];
    }
    • text: string
    • tokens: {
          generatedToken: {
              logprob: number;
              raw_logprob: number;
              token: string;
          };
          textRange: {
              end: number;
              start: number;
          };
          topTokens: any;
      }[]
  • finishReason: {
        reason: string;
    } & {
        length?: number;
    }
id: number
prompt: {
    text: string;
    tokens: {
        generatedToken: {
            logprob: number;
            raw_logprob: number;
            token: string;
        };
        textRange: {
            end: number;
            start: number;
        };
        topTokens: any;
    }[];
}

Type declaration

  • text: string
  • tokens: {
        generatedToken: {
            logprob: number;
            raw_logprob: number;
            token: string;
        };
        textRange: {
            end: number;
            start: number;
        };
        topTokens: any;
    }[]
\ No newline at end of file diff --git a/docs/interfaces/AmazonTitanTextApi.html b/docs/interfaces/AmazonTitanTextApi.html index f64f21f..77b3827 100644 --- a/docs/interfaces/AmazonTitanTextApi.html +++ b/docs/interfaces/AmazonTitanTextApi.html @@ -1,4 +1,4 @@ -AmazonTitanTextApi | generative-ts - v0.1.0-alpha.6
generative-ts

Interface AmazonTitanTextApi

interface AmazonTitanTextApi {
    name?: string;
    requestTemplate: Template<AmazonTitanTextOptions>;
    responseGuard: ((response) => response is AmazonTitanTextResponse);
}

Hierarchy

Properties

name? +AmazonTitanTextApi | generative-ts - v0.1.0-alpha.7

Interface AmazonTitanTextApi

interface AmazonTitanTextApi {
    name?: string;
    requestTemplate: Template<AmazonTitanTextOptions>;
    responseGuard: ((response) => response is AmazonTitanTextResponse);
}

Hierarchy

Properties

name?: string
requestTemplate: Template<AmazonTitanTextOptions>
responseGuard: ((response) => response is AmazonTitanTextResponse)

Type declaration

\ No newline at end of file +

Properties

name?: string
requestTemplate: Template<AmazonTitanTextOptions>
responseGuard: ((response) => response is AmazonTitanTextResponse)

Type declaration

\ No newline at end of file diff --git a/docs/interfaces/AmazonTitanTextOptions.html b/docs/interfaces/AmazonTitanTextOptions.html index cd2fecf..18f61e3 100644 --- a/docs/interfaces/AmazonTitanTextOptions.html +++ b/docs/interfaces/AmazonTitanTextOptions.html @@ -1,7 +1,7 @@ -AmazonTitanTextOptions | generative-ts - v0.1.0-alpha.6
generative-ts

Interface AmazonTitanTextOptions

interface AmazonTitanTextOptions {
    $prompt: string;
    maxTokenCount?: number;
    modelId: string;
    stopSequences?: string[];
    temperature?: number;
    topP?: number;
}

Hierarchy

  • ModelRequestOptions
    • AmazonTitanTextOptions

Properties

$prompt +AmazonTitanTextOptions | generative-ts - v0.1.0-alpha.7

Interface AmazonTitanTextOptions

interface AmazonTitanTextOptions {
    $prompt: string;
    maxTokenCount?: number;
    modelId: string;
    stopSequences?: string[];
    temperature?: number;
    topP?: number;
}

Hierarchy

  • ModelRequestOptions
    • AmazonTitanTextOptions

Properties

$prompt: string
maxTokenCount?: number
modelId: string
stopSequences?: string[]
temperature?: number
topP?: number
\ No newline at end of file +

Properties

$prompt: string
maxTokenCount?: number
modelId: string
stopSequences?: string[]
temperature?: number
topP?: number
\ No newline at end of file diff --git a/docs/interfaces/AmazonTitanTextResponse.html b/docs/interfaces/AmazonTitanTextResponse.html index 018f7eb..c11850f 100644 --- a/docs/interfaces/AmazonTitanTextResponse.html +++ b/docs/interfaces/AmazonTitanTextResponse.html @@ -1,3 +1,3 @@ -AmazonTitanTextResponse | generative-ts - v0.1.0-alpha.6
generative-ts

Interface AmazonTitanTextResponse

interface AmazonTitanTextResponse {
    inputTextTokenCount: number;
    results: {
        completionReason: string;
        outputText: string;
        tokenCount: number;
    }[];
}

Hierarchy

  • TypeOf<typeof AmazonTitanTextResponseCodec>
    • AmazonTitanTextResponse

Properties

inputTextTokenCount +AmazonTitanTextResponse | generative-ts - v0.1.0-alpha.7

Interface AmazonTitanTextResponse

interface AmazonTitanTextResponse {
    inputTextTokenCount: number;
    results: {
        completionReason: string;
        outputText: string;
        tokenCount: number;
    }[];
}

Hierarchy

  • TypeOf<typeof AmazonTitanTextResponseCodec>
    • AmazonTitanTextResponse

Properties

inputTextTokenCount: number
results: {
    completionReason: string;
    outputText: string;
    tokenCount: number;
}[]

Type declaration

  • completionReason: string
  • outputText: string
  • tokenCount: number
\ No newline at end of file +

Properties

inputTextTokenCount: number
results: {
    completionReason: string;
    outputText: string;
    tokenCount: number;
}[]

Type declaration

  • completionReason: string
  • outputText: string
  • tokenCount: number
\ No newline at end of file diff --git a/docs/interfaces/AwsBedrockAuthConfig.html b/docs/interfaces/AwsBedrockAuthConfig.html index 2925c86..888f514 100644 --- a/docs/interfaces/AwsBedrockAuthConfig.html +++ b/docs/interfaces/AwsBedrockAuthConfig.html @@ -1,6 +1,6 @@ -AwsBedrockAuthConfig | generative-ts - v0.1.0-alpha.6
generative-ts

Interface AwsBedrockAuthConfig

interface AwsBedrockAuthConfig {
    AWS_ACCESS_KEY_ID?: string;
    AWS_REGION: string;
    AWS_SECRET_ACCESS_KEY?: string;
}

Properties

AWS_ACCESS_KEY_ID? +AwsBedrockAuthConfig | generative-ts - v0.1.0-alpha.7

Interface AwsBedrockAuthConfig

interface AwsBedrockAuthConfig {
    AWS_ACCESS_KEY_ID?: string;
    AWS_REGION: string;
    AWS_SECRET_ACCESS_KEY?: string;
}

Properties

AWS_ACCESS_KEY_ID?: string

The AWS Access Key ID

AWS_REGION: string
AWS_SECRET_ACCESS_KEY?: string

The AWS Secret Access Key

-
\ No newline at end of file +
\ No newline at end of file diff --git a/docs/interfaces/CohereAuthConfig.html b/docs/interfaces/CohereAuthConfig.html index 93993b0..fb5119a 100644 --- a/docs/interfaces/CohereAuthConfig.html +++ b/docs/interfaces/CohereAuthConfig.html @@ -1,3 +1,3 @@ -CohereAuthConfig | generative-ts - v0.1.0-alpha.6
generative-ts

Interface CohereAuthConfig

interface CohereAuthConfig {
    COHERE_API_KEY: string;
}

Properties

COHERE_API_KEY +CohereAuthConfig | generative-ts - v0.1.0-alpha.7

Interface CohereAuthConfig

interface CohereAuthConfig {
    COHERE_API_KEY: string;
}

Properties

Properties

COHERE_API_KEY: string

The Cohere key, used as Bearer Token

-
\ No newline at end of file +
\ No newline at end of file diff --git a/docs/interfaces/CohereChatApi.html b/docs/interfaces/CohereChatApi.html index db91c76..cf48d82 100644 --- a/docs/interfaces/CohereChatApi.html +++ b/docs/interfaces/CohereChatApi.html @@ -1,4 +1,4 @@ -CohereChatApi | generative-ts - v0.1.0-alpha.6
generative-ts

Interface CohereChatApi

interface CohereChatApi {
    name?: string;
    requestTemplate: Template<CohereChatOptions>;
    responseGuard: ((response) => response is CohereChatResponse);
}

Hierarchy

Properties

name? +CohereChatApi | generative-ts - v0.1.0-alpha.7

Interface CohereChatApi

interface CohereChatApi {
    name?: string;
    requestTemplate: Template<CohereChatOptions>;
    responseGuard: ((response) => response is CohereChatResponse);
}

Hierarchy

Properties

name?: string
requestTemplate: Template<CohereChatOptions>
responseGuard: ((response) => response is CohereChatResponse)

Type declaration

\ No newline at end of file +

Properties

name?: string
requestTemplate: Template<CohereChatOptions>
responseGuard: ((response) => response is CohereChatResponse)

Type declaration

\ No newline at end of file diff --git a/docs/interfaces/CohereChatOptions.html b/docs/interfaces/CohereChatOptions.html index 3800f84..567824a 100644 --- a/docs/interfaces/CohereChatOptions.html +++ b/docs/interfaces/CohereChatOptions.html @@ -1,4 +1,4 @@ -CohereChatOptions | generative-ts - v0.1.0-alpha.6
generative-ts

Interface CohereChatOptions

interface CohereChatOptions {
    $prompt: string;
    chat_history?: CohereChatHistoryItem[];
    citation_quality?: string;
    conversation_id?: string;
    documents?: Record<string, string>[];
    examplePairs?: {
        assistant: string;
        user: string;
    }[];
    force_single_step?: boolean;
    frequency_penalty?: number;
    k?: number;
    max_input_tokens?: number;
    max_tokens?: number;
    modelId: string;
    p?: number;
    preamble?: string;
    presence_penalty?: number;
    prompt_truncation?: string;
    search_queries_only?: boolean;
    seed?: number;
    stop_sequences?: string[];
    stream?: boolean;
    system?: string;
    temperature?: number;
    tool_results?: CohereChatToolExecutionResult[];
    tools?: {
        description: string;
        name: string;
        parameter_definitions?: Record<string, {
            description?: string;
            required?: boolean;
            type: string;
        }>;
    }[];
}

Hierarchy

  • ModelRequestOptions
  • FewShotRequestOptions
    • CohereChatOptions

Properties

$prompt +CohereChatOptions | generative-ts - v0.1.0-alpha.7

Interface CohereChatOptions

interface CohereChatOptions {
    $prompt: string;
    chat_history?: CohereChatHistoryItem[];
    citation_quality?: string;
    conversation_id?: string;
    documents?: Record<string, string>[];
    examplePairs?: {
        assistant: string;
        user: string;
    }[];
    force_single_step?: boolean;
    frequency_penalty?: number;
    k?: number;
    max_input_tokens?: number;
    max_tokens?: number;
    modelId: string;
    p?: number;
    preamble?: string;
    presence_penalty?: number;
    prompt_truncation?: string;
    search_queries_only?: boolean;
    seed?: number;
    stop_sequences?: string[];
    stream?: boolean;
    system?: string;
    temperature?: number;
    tool_results?: CohereChatToolExecutionResult[];
    tools?: {
        description: string;
        name: string;
        parameter_definitions?: Record<string, {
            description?: string;
            required?: boolean;
            type: string;
        }>;
    }[];
}

Hierarchy

  • ModelRequestOptions
  • FewShotRequestOptions
    • CohereChatOptions

Properties

$prompt: string
chat_history?: CohereChatHistoryItem[]
citation_quality?: string
conversation_id?: string
documents?: Record<string, string>[]
examplePairs?: {
    assistant: string;
    user: string;
}[]

Type declaration

  • assistant: string
  • user: string
force_single_step?: boolean
frequency_penalty?: number
k?: number
max_input_tokens?: number
max_tokens?: number
modelId: string
p?: number
preamble?: string
presence_penalty?: number
prompt_truncation?: string
search_queries_only?: boolean
seed?: number
stop_sequences?: string[]
stream?: boolean
system?: string
temperature?: number
tool_results?: CohereChatToolExecutionResult[]
tools?: {
    description: string;
    name: string;
    parameter_definitions?: Record<string, {
        description?: string;
        required?: boolean;
        type: string;
    }>;
}[]

Type declaration

  • description: string
  • name: string
  • Optional parameter_definitions?: Record<string, {
        description?: string;
        required?: boolean;
        type: string;
    }>
\ No newline at end of file +

Properties

$prompt: string
chat_history?: CohereChatHistoryItem[]
citation_quality?: string
conversation_id?: string
documents?: Record<string, string>[]
examplePairs?: {
    assistant: string;
    user: string;
}[]

Type declaration

  • assistant: string
  • user: string
force_single_step?: boolean
frequency_penalty?: number
k?: number
max_input_tokens?: number
max_tokens?: number
modelId: string
p?: number
preamble?: string
presence_penalty?: number
prompt_truncation?: string
search_queries_only?: boolean
seed?: number
stop_sequences?: string[]
stream?: boolean
system?: string
temperature?: number
tool_results?: CohereChatToolExecutionResult[]
tools?: {
    description: string;
    name: string;
    parameter_definitions?: Record<string, {
        description?: string;
        required?: boolean;
        type: string;
    }>;
}[]

Type declaration

  • description: string
  • name: string
  • Optional parameter_definitions?: Record<string, {
        description?: string;
        required?: boolean;
        type: string;
    }>
\ No newline at end of file diff --git a/docs/interfaces/CohereChatResponse.html b/docs/interfaces/CohereChatResponse.html index d830609..e32aa7b 100644 --- a/docs/interfaces/CohereChatResponse.html +++ b/docs/interfaces/CohereChatResponse.html @@ -1,4 +1,4 @@ -CohereChatResponse | generative-ts - v0.1.0-alpha.6
generative-ts

Interface CohereChatResponse

interface CohereChatResponse {
    chat_history: ({
        role: string;
    } & {
        message?: string;
        tool_calls?: {
            name: string;
            parameters: {};
        }[];
        tool_results?: {
            call: {
                name: string;
                parameters: {};
            };
            outputs: {}[];
        }[];
    })[];
    citations?: {
        document_ids: string[];
        end: number;
        start: number;
        text: string;
    }[];
    documents?: {}[];
    finish_reason: string;
    generation_id: string;
    is_search_required?: boolean;
    meta: {
        api_version: {
            version: string;
        } & {
            is_deprecated?: boolean;
            is_experimental?: boolean;
        };
        billed_units: {
            input_tokens: number;
            output_tokens: number;
        } & {
            classifications?: number;
            search_units?: number;
        };
        tokens: {
            output_tokens: number;
        } & {
            input_tokens?: number;
        };
    } & {
        warnings?: string[];
    };
    response_id?: string;
    search_queries?: {
        generation_id: string;
        text: string;
    }[];
    search_results?: {
        connector: {
            id: string;
        };
        continue_on_failure: boolean;
        document_ids: string[];
        error_message: string;
        search_query: {
            generation_id: string;
            text: string;
        };
    }[];
    text: string;
    tool_calls?: {
        name: string;
        parameters: {};
    }[];
}

Hierarchy

  • TypeOf<typeof CohereChatResponseCodec>
    • CohereChatResponse

Properties

chat_history +CohereChatResponse | generative-ts - v0.1.0-alpha.7

Interface CohereChatResponse

interface CohereChatResponse {
    chat_history: ({
        role: string;
    } & {
        message?: string;
        tool_calls?: {
            name: string;
            parameters: {};
        }[];
        tool_results?: {
            call: {
                name: string;
                parameters: {};
            };
            outputs: {}[];
        }[];
    })[];
    citations?: {
        document_ids: string[];
        end: number;
        start: number;
        text: string;
    }[];
    documents?: {}[];
    finish_reason: string;
    generation_id: string;
    is_search_required?: boolean;
    meta: {
        api_version: {
            version: string;
        } & {
            is_deprecated?: boolean;
            is_experimental?: boolean;
        };
        billed_units: {
            input_tokens: number;
            output_tokens: number;
        } & {
            classifications?: number;
            search_units?: number;
        };
        tokens: {
            output_tokens: number;
        } & {
            input_tokens?: number;
        };
    } & {
        warnings?: string[];
    };
    response_id?: string;
    search_queries?: {
        generation_id: string;
        text: string;
    }[];
    search_results?: {
        connector: {
            id: string;
        };
        continue_on_failure: boolean;
        document_ids: string[];
        error_message: string;
        search_query: {
            generation_id: string;
            text: string;
        };
    }[];
    text: string;
    tool_calls?: {
        name: string;
        parameters: {};
    }[];
}

Hierarchy

  • TypeOf<typeof CohereChatResponseCodec>
    • CohereChatResponse

Properties

chat_history: ({
    role: string;
} & {
    message?: string;
    tool_calls?: {
        name: string;
        parameters: {};
    }[];
    tool_results?: {
        call: {
            name: string;
            parameters: {};
        };
        outputs: {}[];
    }[];
})[]
citations?: {
    document_ids: string[];
    end: number;
    start: number;
    text: string;
}[]

Type declaration

  • document_ids: string[]
  • end: number
  • start: number
  • text: string
documents?: {}[]

Type declaration

    finish_reason: string
    generation_id: string
    is_search_required?: boolean
    meta: {
        api_version: {
            version: string;
        } & {
            is_deprecated?: boolean;
            is_experimental?: boolean;
        };
        billed_units: {
            input_tokens: number;
            output_tokens: number;
        } & {
            classifications?: number;
            search_units?: number;
        };
        tokens: {
            output_tokens: number;
        } & {
            input_tokens?: number;
        };
    } & {
        warnings?: string[];
    }

    Type declaration

    • api_version: {
          version: string;
      } & {
          is_deprecated?: boolean;
          is_experimental?: boolean;
      }
    • billed_units: {
          input_tokens: number;
          output_tokens: number;
      } & {
          classifications?: number;
          search_units?: number;
      }
    • tokens: {
          output_tokens: number;
      } & {
          input_tokens?: number;
      }

    Type declaration

    • Optional warnings?: string[]
    response_id?: string
    search_queries?: {
        generation_id: string;
        text: string;
    }[]

    Type declaration

    • generation_id: string
    • text: string
    search_results?: {
        connector: {
            id: string;
        };
        continue_on_failure: boolean;
        document_ids: string[];
        error_message: string;
        search_query: {
            generation_id: string;
            text: string;
        };
    }[]

    Type declaration

    • connector: {
          id: string;
      }
      • id: string
    • continue_on_failure: boolean
    • document_ids: string[]
    • error_message: string
    • search_query: {
          generation_id: string;
          text: string;
      }
      • generation_id: string
      • text: string
    text: string
    tool_calls?: {
        name: string;
        parameters: {};
    }[]

    Type declaration

    • name: string
    • parameters: {}
      \ No newline at end of file +

      Properties

      chat_history: ({
          role: string;
      } & {
          message?: string;
          tool_calls?: {
              name: string;
              parameters: {};
          }[];
          tool_results?: {
              call: {
                  name: string;
                  parameters: {};
              };
              outputs: {}[];
          }[];
      })[]
      citations?: {
          document_ids: string[];
          end: number;
          start: number;
          text: string;
      }[]

      Type declaration

      • document_ids: string[]
      • end: number
      • start: number
      • text: string
      documents?: {}[]

      Type declaration

        finish_reason: string
        generation_id: string
        is_search_required?: boolean
        meta: {
            api_version: {
                version: string;
            } & {
                is_deprecated?: boolean;
                is_experimental?: boolean;
            };
            billed_units: {
                input_tokens: number;
                output_tokens: number;
            } & {
                classifications?: number;
                search_units?: number;
            };
            tokens: {
                output_tokens: number;
            } & {
                input_tokens?: number;
            };
        } & {
            warnings?: string[];
        }

        Type declaration

        • api_version: {
              version: string;
          } & {
              is_deprecated?: boolean;
              is_experimental?: boolean;
          }
        • billed_units: {
              input_tokens: number;
              output_tokens: number;
          } & {
              classifications?: number;
              search_units?: number;
          }
        • tokens: {
              output_tokens: number;
          } & {
              input_tokens?: number;
          }

        Type declaration

        • Optional warnings?: string[]
        response_id?: string
        search_queries?: {
            generation_id: string;
            text: string;
        }[]

        Type declaration

        • generation_id: string
        • text: string
        search_results?: {
            connector: {
                id: string;
            };
            continue_on_failure: boolean;
            document_ids: string[];
            error_message: string;
            search_query: {
                generation_id: string;
                text: string;
            };
        }[]

        Type declaration

        • connector: {
              id: string;
          }
          • id: string
        • continue_on_failure: boolean
        • document_ids: string[]
        • error_message: string
        • search_query: {
              generation_id: string;
              text: string;
          }
          • generation_id: string
          • text: string
        text: string
        tool_calls?: {
            name: string;
            parameters: {};
        }[]

        Type declaration

        • name: string
        • parameters: {}
          \ No newline at end of file diff --git a/docs/interfaces/CohereGenerateApi.html b/docs/interfaces/CohereGenerateApi.html index 974e1b0..113076f 100644 --- a/docs/interfaces/CohereGenerateApi.html +++ b/docs/interfaces/CohereGenerateApi.html @@ -1,4 +1,4 @@ -CohereGenerateApi | generative-ts - v0.1.0-alpha.6
          generative-ts

          Interface CohereGenerateApi

          interface CohereGenerateApi {
              name?: string;
              requestTemplate: Template<CohereGenerateOptions>;
              responseGuard: ((response) => response is CohereGenerateResponse);
          }

          Hierarchy

          Properties

          name? +CohereGenerateApi | generative-ts - v0.1.0-alpha.7

          Interface CohereGenerateApi

          interface CohereGenerateApi {
              name?: string;
              requestTemplate: Template<CohereGenerateOptions>;
              responseGuard: ((response) => response is CohereGenerateResponse);
          }

          Hierarchy

          Properties

          name?: string
          requestTemplate: Template<CohereGenerateOptions>
          responseGuard: ((response) => response is CohereGenerateResponse)

          Type declaration

          \ No newline at end of file +

          Properties

          name?: string
          requestTemplate: Template<CohereGenerateOptions>
          responseGuard: ((response) => response is CohereGenerateResponse)

          Type declaration

          \ No newline at end of file diff --git a/docs/interfaces/CohereGenerateOptions.html b/docs/interfaces/CohereGenerateOptions.html index e30bc68..b8ba9f8 100644 --- a/docs/interfaces/CohereGenerateOptions.html +++ b/docs/interfaces/CohereGenerateOptions.html @@ -1,4 +1,4 @@ -CohereGenerateOptions | generative-ts - v0.1.0-alpha.6
          generative-ts

          Interface CohereGenerateOptions

          interface CohereGenerateOptions {
              $prompt: string;
              end_sequences?: string[];
              frequency_penalty?: number;
              k?: number;
              logit_bias?: {
                  [token_id: number]: number;
              };
              max_tokens?: number;
              modelId: string;
              num_generations?: number;
              p?: number;
              presence_penalty?: number;
              preset?: string;
              return_likelihoods?: "NONE" | "GENERATION" | "ALL";
              seed?: number;
              stop_sequences?: string[];
              stream?: boolean;
              temperature?: number;
              truncate?: "NONE" | "START" | "END";
          }

          Hierarchy

          • ModelRequestOptions
            • CohereGenerateOptions

          Properties

          $prompt +CohereGenerateOptions | generative-ts - v0.1.0-alpha.7

          Interface CohereGenerateOptions

          interface CohereGenerateOptions {
              $prompt: string;
              end_sequences?: string[];
              frequency_penalty?: number;
              k?: number;
              logit_bias?: {
                  [token_id: number]: number;
              };
              max_tokens?: number;
              modelId: string;
              num_generations?: number;
              p?: number;
              presence_penalty?: number;
              preset?: string;
              return_likelihoods?: "NONE" | "GENERATION" | "ALL";
              seed?: number;
              stop_sequences?: string[];
              stream?: boolean;
              temperature?: number;
              truncate?: "NONE" | "START" | "END";
          }

          Hierarchy

          • ModelRequestOptions
            • CohereGenerateOptions

          Properties

          $prompt: string
          end_sequences?: string[]
          frequency_penalty?: number
          k?: number
          logit_bias?: {
              [token_id: number]: number;
          }

          Type declaration

          • [token_id: number]: number
          max_tokens?: number
          modelId: string
          num_generations?: number
          p?: number
          presence_penalty?: number
          preset?: string
          return_likelihoods?: "NONE" | "GENERATION" | "ALL"
          seed?: number
          stop_sequences?: string[]
          stream?: boolean
          temperature?: number
          truncate?: "NONE" | "START" | "END"
          \ No newline at end of file +

          Properties

          $prompt: string
          end_sequences?: string[]
          frequency_penalty?: number
          k?: number
          logit_bias?: {
              [token_id: number]: number;
          }

          Type declaration

          • [token_id: number]: number
          max_tokens?: number
          modelId: string
          num_generations?: number
          p?: number
          presence_penalty?: number
          preset?: string
          return_likelihoods?: "NONE" | "GENERATION" | "ALL"
          seed?: number
          stop_sequences?: string[]
          stream?: boolean
          temperature?: number
          truncate?: "NONE" | "START" | "END"
          \ No newline at end of file diff --git a/docs/interfaces/CohereGenerateResponse.html b/docs/interfaces/CohereGenerateResponse.html index ec5a6ce..057d02d 100644 --- a/docs/interfaces/CohereGenerateResponse.html +++ b/docs/interfaces/CohereGenerateResponse.html @@ -1,5 +1,5 @@ -CohereGenerateResponse | generative-ts - v0.1.0-alpha.6
          generative-ts

          Interface CohereGenerateResponse

          interface CohereGenerateResponse {
              generations: {
                  finish_reason: string;
                  id: string;
                  text: string;
              }[];
              id: string;
              meta?: {
                  api_version: {
                      version: string;
                  };
                  billed_units: {
                      input_tokens: number;
                      output_tokens: number;
                  };
              };
              prompt: string;
          }

          Hierarchy

          • TypeOf<typeof CohereGenerateResponseCodec>
            • CohereGenerateResponse

          Properties

          generations +CohereGenerateResponse | generative-ts - v0.1.0-alpha.7

          Interface CohereGenerateResponse

          interface CohereGenerateResponse {
              generations: {
                  finish_reason: string;
                  id: string;
                  text: string;
              }[];
              id: string;
              meta?: {
                  api_version: {
                      version: string;
                  };
                  billed_units: {
                      input_tokens: number;
                      output_tokens: number;
                  };
              };
              prompt: string;
          }

          Hierarchy

          • TypeOf<typeof CohereGenerateResponseCodec>
            • CohereGenerateResponse

          Properties

          generations: {
              finish_reason: string;
              id: string;
              text: string;
          }[]

          Type declaration

          • finish_reason: string
          • id: string
          • text: string
          id: string
          meta?: {
              api_version: {
                  version: string;
              };
              billed_units: {
                  input_tokens: number;
                  output_tokens: number;
              };
          }

          Type declaration

          • api_version: {
                version: string;
            }
            • version: string
          • billed_units: {
                input_tokens: number;
                output_tokens: number;
            }
            • input_tokens: number
            • output_tokens: number
          prompt: string
          \ No newline at end of file +

          Properties

          generations: {
              finish_reason: string;
              id: string;
              text: string;
          }[]

          Type declaration

          • finish_reason: string
          • id: string
          • text: string
          id: string
          meta?: {
              api_version: {
                  version: string;
              };
              billed_units: {
                  input_tokens: number;
                  output_tokens: number;
              };
          }

          Type declaration

          • api_version: {
                version: string;
            }
            • version: string
          • billed_units: {
                input_tokens: number;
                output_tokens: number;
            }
            • input_tokens: number
            • output_tokens: number
          prompt: string
          \ No newline at end of file diff --git a/docs/interfaces/GoogleGeminiApi.html b/docs/interfaces/GoogleGeminiApi.html index dda498a..02ba148 100644 --- a/docs/interfaces/GoogleGeminiApi.html +++ b/docs/interfaces/GoogleGeminiApi.html @@ -1,4 +1,4 @@ -GoogleGeminiApi | generative-ts - v0.1.0-alpha.6
          generative-ts

          Interface GoogleGeminiApi

          interface GoogleGeminiApi {
              name?: string;
              requestTemplate: Template<GoogleGeminiOptions>;
              responseGuard: ((response) => response is GoogleGeminiResponse);
          }

          Hierarchy

          Properties

          name? +GoogleGeminiApi | generative-ts - v0.1.0-alpha.7

          Interface GoogleGeminiApi

          interface GoogleGeminiApi {
              name?: string;
              requestTemplate: Template<GoogleGeminiOptions>;
              responseGuard: ((response) => response is GoogleGeminiResponse);
          }

          Hierarchy

          Properties

          name?: string
          requestTemplate: Template<GoogleGeminiOptions>
          responseGuard: ((response) => response is GoogleGeminiResponse)

          Type declaration

          \ No newline at end of file +

          Properties

          name?: string
          requestTemplate: Template<GoogleGeminiOptions>
          responseGuard: ((response) => response is GoogleGeminiResponse)

          Type declaration

          \ No newline at end of file diff --git a/docs/interfaces/GoogleGeminiOptions.html b/docs/interfaces/GoogleGeminiOptions.html index ddfa4a6..4fd6eb2 100644 --- a/docs/interfaces/GoogleGeminiOptions.html +++ b/docs/interfaces/GoogleGeminiOptions.html @@ -1,4 +1,4 @@ -GoogleGeminiOptions | generative-ts - v0.1.0-alpha.6
          generative-ts

          Interface GoogleGeminiOptions

          interface GoogleGeminiOptions {
              $prompt: string;
              $tools?: ToolDescriptor<{
                  [key: string]: unknown;
              }, unknown>[];
              contents?: GoogleGeminiContentItem | GoogleGeminiContentItem[];
              examplePairs?: {
                  assistant: string;
                  user: string;
              }[];
              generation_config?: {
                  candidate_count?: number;
                  frequency_penalty?: number;
                  max_output_tokens?: number;
                  presence_penalty?: number;
                  response_mime_type?: "text/plain" | "application/json";
                  stop_sequences?: string[];
                  temperature?: number;
                  top_k?: number;
                  top_p?: number;
              };
              modelId: string;
              safety_settings?: {
                  category?: string;
                  max_influential_terms?: number;
                  method?: string;
                  threshold?: string;
              };
              system?: string;
              system_instruction?: GoogleGeminiContentItem;
              tools?: {
                  function_declarations: {
                      description?: string;
                      name: string;
                      parameters?: GoogleGeminiSchema;
                      response?: GoogleGeminiSchema;
                  }[];
              }[];
              tools_config?: {
                  allowed_function_names?: string[];
                  mode?: "NONE" | "AUTO" | "ANY";
              };
          }

          Hierarchy

          • ModelRequestOptions
          • FewShotRequestOptions
          • ToolUseRequestOptions
          • GoogleGeminiToolsOptions
            • GoogleGeminiOptions

          Properties

          $prompt +GoogleGeminiOptions | generative-ts - v0.1.0-alpha.7

          Interface GoogleGeminiOptions

          interface GoogleGeminiOptions {
              $prompt: string;
              $tools?: ToolDescriptor<{
                  [key: string]: unknown;
              }, unknown>[];
              contents?: GoogleGeminiContentItem | GoogleGeminiContentItem[];
              examplePairs?: {
                  assistant: string;
                  user: string;
              }[];
              generation_config?: {
                  candidate_count?: number;
                  frequency_penalty?: number;
                  max_output_tokens?: number;
                  presence_penalty?: number;
                  response_mime_type?: "text/plain" | "application/json";
                  stop_sequences?: string[];
                  temperature?: number;
                  top_k?: number;
                  top_p?: number;
              };
              modelId: string;
              safety_settings?: {
                  category?: string;
                  max_influential_terms?: number;
                  method?: string;
                  threshold?: string;
              };
              system?: string;
              system_instruction?: GoogleGeminiContentItem;
              tools?: {
                  function_declarations: {
                      description?: string;
                      name: string;
                      parameters?: GoogleGeminiSchema;
                      response?: GoogleGeminiSchema;
                  }[];
              }[];
              tools_config?: {
                  allowed_function_names?: string[];
                  mode?: "NONE" | "AUTO" | "ANY";
              };
          }

          Hierarchy

          • ModelRequestOptions
          • FewShotRequestOptions
          • ToolUseRequestOptions
          • GoogleGeminiToolsOptions
            • GoogleGeminiOptions

          Properties

          $prompt: string
          $tools?: ToolDescriptor<{
              [key: string]: unknown;
          }, unknown>[]
          contents?: GoogleGeminiContentItem | GoogleGeminiContentItem[]
          examplePairs?: {
              assistant: string;
              user: string;
          }[]

          Type declaration

          • assistant: string
          • user: string
          generation_config?: {
              candidate_count?: number;
              frequency_penalty?: number;
              max_output_tokens?: number;
              presence_penalty?: number;
              response_mime_type?: "text/plain" | "application/json";
              stop_sequences?: string[];
              temperature?: number;
              top_k?: number;
              top_p?: number;
          }

          Type declaration

          • Optional candidate_count?: number
          • Optional frequency_penalty?: number
          • Optional max_output_tokens?: number
          • Optional presence_penalty?: number
          • Optional response_mime_type?: "text/plain" | "application/json"
          • Optional stop_sequences?: string[]
          • Optional temperature?: number
          • Optional top_k?: number
          • Optional top_p?: number
          modelId: string
          safety_settings?: {
              category?: string;
              max_influential_terms?: number;
              method?: string;
              threshold?: string;
          }

          Type declaration

          • Optional category?: string
          • Optional max_influential_terms?: number
          • Optional method?: string
          • Optional threshold?: string
          system?: string
          system_instruction?: GoogleGeminiContentItem
          tools?: {
              function_declarations: {
                  description?: string;
                  name: string;
                  parameters?: GoogleGeminiSchema;
                  response?: GoogleGeminiSchema;
              }[];
          }[]

          Type declaration

          • function_declarations: {
                description?: string;
                name: string;
                parameters?: GoogleGeminiSchema;
                response?: GoogleGeminiSchema;
            }[]
          tools_config?: {
              allowed_function_names?: string[];
              mode?: "NONE" | "AUTO" | "ANY";
          }

          Type declaration

          • Optional allowed_function_names?: string[]
          • Optional mode?: "NONE" | "AUTO" | "ANY"
          \ No newline at end of file +

          Properties

          $prompt: string
          $tools?: ToolDescriptor<{
              [key: string]: unknown;
          }, unknown>[]
          contents?: GoogleGeminiContentItem | GoogleGeminiContentItem[]
          examplePairs?: {
              assistant: string;
              user: string;
          }[]

          Type declaration

          • assistant: string
          • user: string
          generation_config?: {
              candidate_count?: number;
              frequency_penalty?: number;
              max_output_tokens?: number;
              presence_penalty?: number;
              response_mime_type?: "text/plain" | "application/json";
              stop_sequences?: string[];
              temperature?: number;
              top_k?: number;
              top_p?: number;
          }

          Type declaration

          • Optional candidate_count?: number
          • Optional frequency_penalty?: number
          • Optional max_output_tokens?: number
          • Optional presence_penalty?: number
          • Optional response_mime_type?: "text/plain" | "application/json"
          • Optional stop_sequences?: string[]
          • Optional temperature?: number
          • Optional top_k?: number
          • Optional top_p?: number
          modelId: string
          safety_settings?: {
              category?: string;
              max_influential_terms?: number;
              method?: string;
              threshold?: string;
          }

          Type declaration

          • Optional category?: string
          • Optional max_influential_terms?: number
          • Optional method?: string
          • Optional threshold?: string
          system?: string
          system_instruction?: GoogleGeminiContentItem
          tools?: {
              function_declarations: {
                  description?: string;
                  name: string;
                  parameters?: GoogleGeminiSchema;
                  response?: GoogleGeminiSchema;
              }[];
          }[]

          Type declaration

          • function_declarations: {
                description?: string;
                name: string;
                parameters?: GoogleGeminiSchema;
                response?: GoogleGeminiSchema;
            }[]
          tools_config?: {
              allowed_function_names?: string[];
              mode?: "NONE" | "AUTO" | "ANY";
          }

          Type declaration

          • Optional allowed_function_names?: string[]
          • Optional mode?: "NONE" | "AUTO" | "ANY"
          \ No newline at end of file diff --git a/docs/interfaces/GoogleGeminiResponse.html b/docs/interfaces/GoogleGeminiResponse.html index 7d68b5e..b4193af 100644 --- a/docs/interfaces/GoogleGeminiResponse.html +++ b/docs/interfaces/GoogleGeminiResponse.html @@ -1,5 +1,5 @@ -GoogleGeminiResponse | generative-ts - v0.1.0-alpha.6
          generative-ts

          Interface GoogleGeminiResponse

          interface GoogleGeminiResponse {
              data: {
                  candidates: ({
                      finishReason: string;
                  } & {
                      citationMetadata?: {
                          citations: {
                              endIndex?: number;
                              startIndex?: number;
                              uri?: string;
                          }[];
                      };
                      content?: {
                          parts: {
                              functionCall?: {
                                  args: {};
                                  name: string;
                              };
                              text?: string;
                          }[];
                          role: string;
                      };
                      safetyRatings?: {
                          category: string;
                          probability: string;
                          probabilityScore: number;
                          severity: string;
                          severityScore: number;
                      }[];
                  })[];
                  usageMetadata: {
                      candidatesTokenCount: number;
                      promptTokenCount: number;
                      totalTokenCount: number;
                  };
              };
              headers: {};
              status: number;
              statusText: string;
          }

          Hierarchy

          • TypeOf<typeof GoogleGeminiResponseCodec>
            • GoogleGeminiResponse

          Properties

          data +GoogleGeminiResponse | generative-ts - v0.1.0-alpha.7

          Interface GoogleGeminiResponse

          interface GoogleGeminiResponse {
              data: {
                  candidates: ({
                      finishReason: string;
                  } & {
                      citationMetadata?: {
                          citations: {
                              endIndex?: number;
                              startIndex?: number;
                              uri?: string;
                          }[];
                      };
                      content?: {
                          parts: {
                              functionCall?: {
                                  args: {};
                                  name: string;
                              };
                              text?: string;
                          }[];
                          role: string;
                      };
                      safetyRatings?: {
                          category: string;
                          probability: string;
                          probabilityScore: number;
                          severity: string;
                          severityScore: number;
                      }[];
                  })[];
                  usageMetadata: {
                      candidatesTokenCount: number;
                      promptTokenCount: number;
                      totalTokenCount: number;
                  };
              };
              headers: {};
              status: number;
              statusText: string;
          }

          Hierarchy

          • TypeOf<typeof GoogleGeminiResponseCodec>
            • GoogleGeminiResponse

          Properties

          data: {
              candidates: ({
                  finishReason: string;
              } & {
                  citationMetadata?: {
                      citations: {
                          endIndex?: number;
                          startIndex?: number;
                          uri?: string;
                      }[];
                  };
                  content?: {
                      parts: {
                          functionCall?: {
                              args: {};
                              name: string;
                          };
                          text?: string;
                      }[];
                      role: string;
                  };
                  safetyRatings?: {
                      category: string;
                      probability: string;
                      probabilityScore: number;
                      severity: string;
                      severityScore: number;
                  }[];
              })[];
              usageMetadata: {
                  candidatesTokenCount: number;
                  promptTokenCount: number;
                  totalTokenCount: number;
              };
          }

          Type declaration

          • candidates: ({
                finishReason: string;
            } & {
                citationMetadata?: {
                    citations: {
                        endIndex?: number;
                        startIndex?: number;
                        uri?: string;
                    }[];
                };
                content?: {
                    parts: {
                        functionCall?: {
                            args: {};
                            name: string;
                        };
                        text?: string;
                    }[];
                    role: string;
                };
                safetyRatings?: {
                    category: string;
                    probability: string;
                    probabilityScore: number;
                    severity: string;
                    severityScore: number;
                }[];
            })[]
          • usageMetadata: {
                candidatesTokenCount: number;
                promptTokenCount: number;
                totalTokenCount: number;
            }
            • candidatesTokenCount: number
            • promptTokenCount: number
            • totalTokenCount: number
          headers: {}

          Type declaration

            status: number
            statusText: string
            \ No newline at end of file +

            Properties

            data: {
                candidates: ({
                    finishReason: string;
                } & {
                    citationMetadata?: {
                        citations: {
                            endIndex?: number;
                            startIndex?: number;
                            uri?: string;
                        }[];
                    };
                    content?: {
                        parts: {
                            functionCall?: {
                                args: {};
                                name: string;
                            };
                            text?: string;
                        }[];
                        role: string;
                    };
                    safetyRatings?: {
                        category: string;
                        probability: string;
                        probabilityScore: number;
                        severity: string;
                        severityScore: number;
                    }[];
                })[];
                usageMetadata: {
                    candidatesTokenCount: number;
                    promptTokenCount: number;
                    totalTokenCount: number;
                };
            }

            Type declaration

            • candidates: ({
                  finishReason: string;
              } & {
                  citationMetadata?: {
                      citations: {
                          endIndex?: number;
                          startIndex?: number;
                          uri?: string;
                      }[];
                  };
                  content?: {
                      parts: {
                          functionCall?: {
                              args: {};
                              name: string;
                          };
                          text?: string;
                      }[];
                      role: string;
                  };
                  safetyRatings?: {
                      category: string;
                      probability: string;
                      probabilityScore: number;
                      severity: string;
                      severityScore: number;
                  }[];
              })[]
            • usageMetadata: {
                  candidatesTokenCount: number;
                  promptTokenCount: number;
                  totalTokenCount: number;
              }
              • candidatesTokenCount: number
              • promptTokenCount: number
              • totalTokenCount: number
            headers: {}

            Type declaration

              status: number
              statusText: string
              \ No newline at end of file diff --git a/docs/interfaces/GroqAuthConfig.html b/docs/interfaces/GroqAuthConfig.html index 85a9cbc..52be39d 100644 --- a/docs/interfaces/GroqAuthConfig.html +++ b/docs/interfaces/GroqAuthConfig.html @@ -1,3 +1,3 @@ -GroqAuthConfig | generative-ts - v0.1.0-alpha.6
              generative-ts

              Interface GroqAuthConfig

              interface GroqAuthConfig {
                  GROQ_API_KEY: string;
              }

              Properties

              GROQ_API_KEY +GroqAuthConfig | generative-ts - v0.1.0-alpha.7

              Interface GroqAuthConfig

              interface GroqAuthConfig {
                  GROQ_API_KEY: string;
              }

              Properties

              Properties

              GROQ_API_KEY: string

              The Groq key, used as Bearer Token

              -
              \ No newline at end of file +
              \ No newline at end of file diff --git a/docs/interfaces/HfConversationalTaskApi.html b/docs/interfaces/HfConversationalTaskApi.html index 70d2402..0d29dbb 100644 --- a/docs/interfaces/HfConversationalTaskApi.html +++ b/docs/interfaces/HfConversationalTaskApi.html @@ -1,4 +1,4 @@ -HfConversationalTaskApi | generative-ts - v0.1.0-alpha.6
              generative-ts

              Interface HfConversationalTaskApi

              interface HfConversationalTaskApi {
                  name?: string;
                  requestTemplate: Template<HfConversationalTaskOptions>;
                  responseGuard: ((response) => response is HfConversationalTaskResponse);
              }

              Hierarchy

              Properties

              name? +HfConversationalTaskApi | generative-ts - v0.1.0-alpha.7

              Interface HfConversationalTaskApi

              interface HfConversationalTaskApi {
                  name?: string;
                  requestTemplate: Template<HfConversationalTaskOptions>;
                  responseGuard: ((response) => response is HfConversationalTaskResponse);
              }

              Hierarchy

              Properties

              name?: string
              requestTemplate: Template<HfConversationalTaskOptions>
              responseGuard: ((response) => response is HfConversationalTaskResponse)

              Type declaration

              \ No newline at end of file +

              Properties

              name?: string
              requestTemplate: Template<HfConversationalTaskOptions>
              responseGuard: ((response) => response is HfConversationalTaskResponse)

              Type declaration

              \ No newline at end of file diff --git a/docs/interfaces/HfConversationalTaskOptions.html b/docs/interfaces/HfConversationalTaskOptions.html index b1107a9..bb3b616 100644 --- a/docs/interfaces/HfConversationalTaskOptions.html +++ b/docs/interfaces/HfConversationalTaskOptions.html @@ -1,7 +1,7 @@ -HfConversationalTaskOptions | generative-ts - v0.1.0-alpha.6
              generative-ts

              Interface HfConversationalTaskOptions

              interface HfConversationalTaskOptions {
                  $prompt: string;
                  generated_responses?: string[];
                  modelId: string;
                  options?: {
                      use_cache?: boolean;
                      wait_for_model?: boolean;
                  };
                  parameters?: {
                      max_length?: number;
                      max_time?: number;
                      min_length?: number;
                      repetition_penalty?: number;
                      temperature?: number;
                      top_k?: number;
                      top_p?: number;
                  };
                  past_user_inputs?: string[];
              }

              Hierarchy (view full)

              Properties

              $prompt +HfConversationalTaskOptions | generative-ts - v0.1.0-alpha.7

              Interface HfConversationalTaskOptions

              interface HfConversationalTaskOptions {
                  $prompt: string;
                  generated_responses?: string[];
                  modelId: string;
                  options?: {
                      use_cache?: boolean;
                      wait_for_model?: boolean;
                  };
                  parameters?: {
                      max_length?: number;
                      max_time?: number;
                      min_length?: number;
                      repetition_penalty?: number;
                      temperature?: number;
                      top_k?: number;
                      top_p?: number;
                  };
                  past_user_inputs?: string[];
              }

              Hierarchy (view full)

              Properties

              $prompt: string
              generated_responses?: string[]
              modelId: string
              options?: {
                  use_cache?: boolean;
                  wait_for_model?: boolean;
              }

              Type declaration

              • Optional use_cache?: boolean
              • Optional wait_for_model?: boolean
              parameters?: {
                  max_length?: number;
                  max_time?: number;
                  min_length?: number;
                  repetition_penalty?: number;
                  temperature?: number;
                  top_k?: number;
                  top_p?: number;
              }

              Type declaration

              • Optional max_length?: number
              • Optional max_time?: number
              • Optional min_length?: number
              • Optional repetition_penalty?: number
              • Optional temperature?: number
              • Optional top_k?: number
              • Optional top_p?: number
              past_user_inputs?: string[]
              \ No newline at end of file +

              Properties

              $prompt: string
              generated_responses?: string[]
              modelId: string
              options?: {
                  use_cache?: boolean;
                  wait_for_model?: boolean;
              }

              Type declaration

              • Optional use_cache?: boolean
              • Optional wait_for_model?: boolean
              parameters?: {
                  max_length?: number;
                  max_time?: number;
                  min_length?: number;
                  repetition_penalty?: number;
                  temperature?: number;
                  top_k?: number;
                  top_p?: number;
              }

              Type declaration

              • Optional max_length?: number
              • Optional max_time?: number
              • Optional min_length?: number
              • Optional repetition_penalty?: number
              • Optional temperature?: number
              • Optional top_k?: number
              • Optional top_p?: number
              past_user_inputs?: string[]
              \ No newline at end of file diff --git a/docs/interfaces/HfConversationalTaskResponse.html b/docs/interfaces/HfConversationalTaskResponse.html index 37fa8b2..84e615c 100644 --- a/docs/interfaces/HfConversationalTaskResponse.html +++ b/docs/interfaces/HfConversationalTaskResponse.html @@ -1,4 +1,4 @@ -HfConversationalTaskResponse | generative-ts - v0.1.0-alpha.6
              generative-ts

              Interface HfConversationalTaskResponse

              interface HfConversationalTaskResponse {
                  [unscopables]: {
                      [unscopables]?: boolean;
                      length?: boolean;
                      [iterator]?: any;
                      at?: any;
                      concat?: any;
                      copyWithin?: any;
                      entries?: any;
                      every?: any;
                      fill?: any;
                      filter?: any;
                      find?: any;
                      findIndex?: any;
                      flat?: any;
                      flatMap?: any;
                      forEach?: any;
                      includes?: any;
                      indexOf?: any;
                      join?: any;
                      keys?: any;
                      lastIndexOf?: any;
                      map?: any;
                      pop?: any;
                      push?: any;
                      reduce?: any;
                      reduceRight?: any;
                      reverse?: any;
                      shift?: any;
                      slice?: any;
                      some?: any;
                      sort?: any;
                      splice?: any;
                      toLocaleString?: any;
                      toString?: any;
                      unshift?: any;
                      values?: any;
                  };
                  length: number;
                  [iterator](): IterableIterator<{
                      generated_text: string;
                  }>;
                  at(index): undefined | {
                      generated_text: string;
                  };
                  concat(...items): {
                      generated_text: string;
                  }[];
                  concat(...items): {
                      generated_text: string;
                  }[];
                  copyWithin(target, start, end?): this;
                  entries(): IterableIterator<[number, {
                      generated_text: string;
                  }]>;
                  every<S>(predicate, thisArg?): this is S[];
                  every(predicate, thisArg?): boolean;
                  fill(value, start?, end?): this;
                  filter<S>(predicate, thisArg?): S[];
                  filter(predicate, thisArg?): {
                      generated_text: string;
                  }[];
                  find<S>(predicate, thisArg?): undefined | S;
                  find(predicate, thisArg?): undefined | {
                      generated_text: string;
                  };
                  findIndex(predicate, thisArg?): number;
                  flat<A, D>(this, depth?): FlatArray<A, D>[];
                  flatMap<U, This>(callback, thisArg?): U[];
                  forEach(callbackfn, thisArg?): void;
                  includes(searchElement, fromIndex?): boolean;
                  indexOf(searchElement, fromIndex?): number;
                  join(separator?): string;
                  keys(): IterableIterator<number>;
                  lastIndexOf(searchElement, fromIndex?): number;
                  map<U>(callbackfn, thisArg?): U[];
                  pop(): undefined | {
                      generated_text: string;
                  };
                  push(...items): number;
                  reduce(callbackfn): {
                      generated_text: string;
                  };
                  reduce(callbackfn, initialValue): {
                      generated_text: string;
                  };
                  reduce<U>(callbackfn, initialValue): U;
                  reduceRight(callbackfn): {
                      generated_text: string;
                  };
                  reduceRight(callbackfn, initialValue): {
                      generated_text: string;
                  };
                  reduceRight<U>(callbackfn, initialValue): U;
                  reverse(): {
                      generated_text: string;
                  }[];
                  shift(): undefined | {
                      generated_text: string;
                  };
                  slice(start?, end?): {
                      generated_text: string;
                  }[];
                  some(predicate, thisArg?): boolean;
                  sort(compareFn?): this;
                  splice(start, deleteCount?): {
                      generated_text: string;
                  }[];
                  splice(start, deleteCount, ...items): {
                      generated_text: string;
                  }[];
                  toLocaleString(): string;
                  toString(): string;
                  unshift(...items): number;
                  values(): IterableIterator<{
                      generated_text: string;
                  }>;
              }

              Hierarchy

              • TypeOf<typeof HfConversationalTaskResponseCodec>
                • HfConversationalTaskResponse

              Properties

              [unscopables] +HfConversationalTaskResponse | generative-ts - v0.1.0-alpha.7

              Interface HfConversationalTaskResponse

              interface HfConversationalTaskResponse {
                  [unscopables]: {
                      [unscopables]?: boolean;
                      length?: boolean;
                      [iterator]?: any;
                      at?: any;
                      concat?: any;
                      copyWithin?: any;
                      entries?: any;
                      every?: any;
                      fill?: any;
                      filter?: any;
                      find?: any;
                      findIndex?: any;
                      flat?: any;
                      flatMap?: any;
                      forEach?: any;
                      includes?: any;
                      indexOf?: any;
                      join?: any;
                      keys?: any;
                      lastIndexOf?: any;
                      map?: any;
                      pop?: any;
                      push?: any;
                      reduce?: any;
                      reduceRight?: any;
                      reverse?: any;
                      shift?: any;
                      slice?: any;
                      some?: any;
                      sort?: any;
                      splice?: any;
                      toLocaleString?: any;
                      toString?: any;
                      unshift?: any;
                      values?: any;
                  };
                  length: number;
                  [iterator](): IterableIterator<{
                      generated_text: string;
                  }>;
                  at(index): undefined | {
                      generated_text: string;
                  };
                  concat(...items): {
                      generated_text: string;
                  }[];
                  concat(...items): {
                      generated_text: string;
                  }[];
                  copyWithin(target, start, end?): this;
                  entries(): IterableIterator<[number, {
                      generated_text: string;
                  }]>;
                  every<S>(predicate, thisArg?): this is S[];
                  every(predicate, thisArg?): boolean;
                  fill(value, start?, end?): this;
                  filter<S>(predicate, thisArg?): S[];
                  filter(predicate, thisArg?): {
                      generated_text: string;
                  }[];
                  find<S>(predicate, thisArg?): undefined | S;
                  find(predicate, thisArg?): undefined | {
                      generated_text: string;
                  };
                  findIndex(predicate, thisArg?): number;
                  flat<A, D>(this, depth?): FlatArray<A, D>[];
                  flatMap<U, This>(callback, thisArg?): U[];
                  forEach(callbackfn, thisArg?): void;
                  includes(searchElement, fromIndex?): boolean;
                  indexOf(searchElement, fromIndex?): number;
                  join(separator?): string;
                  keys(): IterableIterator<number>;
                  lastIndexOf(searchElement, fromIndex?): number;
                  map<U>(callbackfn, thisArg?): U[];
                  pop(): undefined | {
                      generated_text: string;
                  };
                  push(...items): number;
                  reduce(callbackfn): {
                      generated_text: string;
                  };
                  reduce(callbackfn, initialValue): {
                      generated_text: string;
                  };
                  reduce<U>(callbackfn, initialValue): U;
                  reduceRight(callbackfn): {
                      generated_text: string;
                  };
                  reduceRight(callbackfn, initialValue): {
                      generated_text: string;
                  };
                  reduceRight<U>(callbackfn, initialValue): U;
                  reverse(): {
                      generated_text: string;
                  }[];
                  shift(): undefined | {
                      generated_text: string;
                  };
                  slice(start?, end?): {
                      generated_text: string;
                  }[];
                  some(predicate, thisArg?): boolean;
                  sort(compareFn?): this;
                  splice(start, deleteCount?): {
                      generated_text: string;
                  }[];
                  splice(start, deleteCount, ...items): {
                      generated_text: string;
                  }[];
                  toLocaleString(): string;
                  toString(): string;
                  unshift(...items): number;
                  values(): IterableIterator<{
                      generated_text: string;
                  }>;
              }

              Hierarchy

              • TypeOf<typeof HfConversationalTaskResponseCodec>
                • HfConversationalTaskResponse

              Properties

              Methods

              [iterator] at @@ -175,4 +175,4 @@

              Returns string

              • Inserts new elements at the start of an array, and returns the new length of the array.

                Parameters

                • Rest ...items: {
                      generated_text: string;
                  }[]

                  Elements to insert at the start of the array.

                Returns number

              • Returns an iterable of values in the array

                -

                Returns IterableIterator<{
                    generated_text: string;
                }>

              \ No newline at end of file +

              Returns IterableIterator<{
                  generated_text: string;
              }>

              \ No newline at end of file diff --git a/docs/interfaces/HfInferenceApiOptions.html b/docs/interfaces/HfInferenceApiOptions.html index c07a8a4..f6ff099 100644 --- a/docs/interfaces/HfInferenceApiOptions.html +++ b/docs/interfaces/HfInferenceApiOptions.html @@ -1,4 +1,4 @@ -HfInferenceApiOptions | generative-ts - v0.1.0-alpha.6
              generative-ts

              Interface HfInferenceApiOptions

              interface HfInferenceApiOptions {
                  $prompt: string;
                  modelId: string;
                  options?: {
                      use_cache?: boolean;
                      wait_for_model?: boolean;
                  };
              }

              Hierarchy (view full)

              Properties

              $prompt +HfInferenceApiOptions | generative-ts - v0.1.0-alpha.7

              Interface HfInferenceApiOptions

              interface HfInferenceApiOptions {
                  $prompt: string;
                  modelId: string;
                  options?: {
                      use_cache?: boolean;
                      wait_for_model?: boolean;
                  };
              }

              Hierarchy (view full)

              Properties

              $prompt: string
              modelId: string
              options?: {
                  use_cache?: boolean;
                  wait_for_model?: boolean;
              }

              Type declaration

              • Optional use_cache?: boolean
              • Optional wait_for_model?: boolean
              \ No newline at end of file +

              Properties

              $prompt: string
              modelId: string
              options?: {
                  use_cache?: boolean;
                  wait_for_model?: boolean;
              }

              Type declaration

              • Optional use_cache?: boolean
              • Optional wait_for_model?: boolean
              \ No newline at end of file diff --git a/docs/interfaces/HfTextGenerationTaskApi.html b/docs/interfaces/HfTextGenerationTaskApi.html index 255fd6a..1be944f 100644 --- a/docs/interfaces/HfTextGenerationTaskApi.html +++ b/docs/interfaces/HfTextGenerationTaskApi.html @@ -1,4 +1,4 @@ -HfTextGenerationTaskApi | generative-ts - v0.1.0-alpha.6
              generative-ts

              Interface HfTextGenerationTaskApi

              interface HfTextGenerationTaskApi {
                  name?: string;
                  requestTemplate: Template<HfTextGenerationTaskOptions>;
                  responseGuard: ((response) => response is HfTextGenerationTaskResponse);
              }

              Hierarchy

              Properties

              name? +HfTextGenerationTaskApi | generative-ts - v0.1.0-alpha.7

              Interface HfTextGenerationTaskApi

              interface HfTextGenerationTaskApi {
                  name?: string;
                  requestTemplate: Template<HfTextGenerationTaskOptions>;
                  responseGuard: ((response) => response is HfTextGenerationTaskResponse);
              }

              Hierarchy

              Properties

              name?: string
              requestTemplate: Template<HfTextGenerationTaskOptions>
              responseGuard: ((response) => response is HfTextGenerationTaskResponse)

              Type declaration

              \ No newline at end of file +

              Properties

              name?: string
              requestTemplate: Template<HfTextGenerationTaskOptions>
              responseGuard: ((response) => response is HfTextGenerationTaskResponse)

              Type declaration

              \ No newline at end of file diff --git a/docs/interfaces/HfTextGenerationTaskOptions.html b/docs/interfaces/HfTextGenerationTaskOptions.html index 86a52d1..88c029f 100644 --- a/docs/interfaces/HfTextGenerationTaskOptions.html +++ b/docs/interfaces/HfTextGenerationTaskOptions.html @@ -1,5 +1,5 @@ -HfTextGenerationTaskOptions | generative-ts - v0.1.0-alpha.6
              generative-ts

              Interface HfTextGenerationTaskOptions

              interface HfTextGenerationTaskOptions {
                  $prompt: string;
                  modelId: string;
                  options?: {
                      use_cache?: boolean;
                      wait_for_model?: boolean;
                  };
                  parameters?: {
                      do_sample?: boolean;
                      max_new_tokens?: number;
                      max_time?: number;
                      num_return_sequences?: number;
                      repetition_penalty?: number;
                      return_full_text?: boolean;
                      temperature?: number;
                      top_k?: number;
                      top_p?: number;
                  };
              }

              Hierarchy (view full)

              Properties

              $prompt +HfTextGenerationTaskOptions | generative-ts - v0.1.0-alpha.7

              Interface HfTextGenerationTaskOptions

              interface HfTextGenerationTaskOptions {
                  $prompt: string;
                  modelId: string;
                  options?: {
                      use_cache?: boolean;
                      wait_for_model?: boolean;
                  };
                  parameters?: {
                      do_sample?: boolean;
                      max_new_tokens?: number;
                      max_time?: number;
                      num_return_sequences?: number;
                      repetition_penalty?: number;
                      return_full_text?: boolean;
                      temperature?: number;
                      top_k?: number;
                      top_p?: number;
                  };
              }

              Hierarchy (view full)

              Properties

              $prompt: string
              modelId: string
              options?: {
                  use_cache?: boolean;
                  wait_for_model?: boolean;
              }

              Type declaration

              • Optional use_cache?: boolean
              • Optional wait_for_model?: boolean
              parameters?: {
                  do_sample?: boolean;
                  max_new_tokens?: number;
                  max_time?: number;
                  num_return_sequences?: number;
                  repetition_penalty?: number;
                  return_full_text?: boolean;
                  temperature?: number;
                  top_k?: number;
                  top_p?: number;
              }

              Type declaration

              • Optional do_sample?: boolean
              • Optional max_new_tokens?: number
              • Optional max_time?: number
              • Optional num_return_sequences?: number
              • Optional repetition_penalty?: number
              • Optional return_full_text?: boolean
              • Optional temperature?: number
              • Optional top_k?: number
              • Optional top_p?: number
              \ No newline at end of file +

              Properties

              $prompt: string
              modelId: string
              options?: {
                  use_cache?: boolean;
                  wait_for_model?: boolean;
              }

              Type declaration

              • Optional use_cache?: boolean
              • Optional wait_for_model?: boolean
              parameters?: {
                  do_sample?: boolean;
                  max_new_tokens?: number;
                  max_time?: number;
                  num_return_sequences?: number;
                  repetition_penalty?: number;
                  return_full_text?: boolean;
                  temperature?: number;
                  top_k?: number;
                  top_p?: number;
              }

              Type declaration

              • Optional do_sample?: boolean
              • Optional max_new_tokens?: number
              • Optional max_time?: number
              • Optional num_return_sequences?: number
              • Optional repetition_penalty?: number
              • Optional return_full_text?: boolean
              • Optional temperature?: number
              • Optional top_k?: number
              • Optional top_p?: number
              \ No newline at end of file diff --git a/docs/interfaces/HfTextGenerationTaskResponse.html b/docs/interfaces/HfTextGenerationTaskResponse.html index e87acbd..14a4a51 100644 --- a/docs/interfaces/HfTextGenerationTaskResponse.html +++ b/docs/interfaces/HfTextGenerationTaskResponse.html @@ -1,4 +1,4 @@ -HfTextGenerationTaskResponse | generative-ts - v0.1.0-alpha.6
              generative-ts

              Interface HfTextGenerationTaskResponse

              interface HfTextGenerationTaskResponse {
                  [unscopables]: {
                      [unscopables]?: boolean;
                      length?: boolean;
                      [iterator]?: any;
                      at?: any;
                      concat?: any;
                      copyWithin?: any;
                      entries?: any;
                      every?: any;
                      fill?: any;
                      filter?: any;
                      find?: any;
                      findIndex?: any;
                      flat?: any;
                      flatMap?: any;
                      forEach?: any;
                      includes?: any;
                      indexOf?: any;
                      join?: any;
                      keys?: any;
                      lastIndexOf?: any;
                      map?: any;
                      pop?: any;
                      push?: any;
                      reduce?: any;
                      reduceRight?: any;
                      reverse?: any;
                      shift?: any;
                      slice?: any;
                      some?: any;
                      sort?: any;
                      splice?: any;
                      toLocaleString?: any;
                      toString?: any;
                      unshift?: any;
                      values?: any;
                  };
                  length: number;
                  [iterator](): IterableIterator<{
                      generated_text: string;
                  }>;
                  at(index): undefined | {
                      generated_text: string;
                  };
                  concat(...items): {
                      generated_text: string;
                  }[];
                  concat(...items): {
                      generated_text: string;
                  }[];
                  copyWithin(target, start, end?): this;
                  entries(): IterableIterator<[number, {
                      generated_text: string;
                  }]>;
                  every<S>(predicate, thisArg?): this is S[];
                  every(predicate, thisArg?): boolean;
                  fill(value, start?, end?): this;
                  filter<S>(predicate, thisArg?): S[];
                  filter(predicate, thisArg?): {
                      generated_text: string;
                  }[];
                  find<S>(predicate, thisArg?): undefined | S;
                  find(predicate, thisArg?): undefined | {
                      generated_text: string;
                  };
                  findIndex(predicate, thisArg?): number;
                  flat<A, D>(this, depth?): FlatArray<A, D>[];
                  flatMap<U, This>(callback, thisArg?): U[];
                  forEach(callbackfn, thisArg?): void;
                  includes(searchElement, fromIndex?): boolean;
                  indexOf(searchElement, fromIndex?): number;
                  join(separator?): string;
                  keys(): IterableIterator<number>;
                  lastIndexOf(searchElement, fromIndex?): number;
                  map<U>(callbackfn, thisArg?): U[];
                  pop(): undefined | {
                      generated_text: string;
                  };
                  push(...items): number;
                  reduce(callbackfn): {
                      generated_text: string;
                  };
                  reduce(callbackfn, initialValue): {
                      generated_text: string;
                  };
                  reduce<U>(callbackfn, initialValue): U;
                  reduceRight(callbackfn): {
                      generated_text: string;
                  };
                  reduceRight(callbackfn, initialValue): {
                      generated_text: string;
                  };
                  reduceRight<U>(callbackfn, initialValue): U;
                  reverse(): {
                      generated_text: string;
                  }[];
                  shift(): undefined | {
                      generated_text: string;
                  };
                  slice(start?, end?): {
                      generated_text: string;
                  }[];
                  some(predicate, thisArg?): boolean;
                  sort(compareFn?): this;
                  splice(start, deleteCount?): {
                      generated_text: string;
                  }[];
                  splice(start, deleteCount, ...items): {
                      generated_text: string;
                  }[];
                  toLocaleString(): string;
                  toString(): string;
                  unshift(...items): number;
                  values(): IterableIterator<{
                      generated_text: string;
                  }>;
              }

              Hierarchy

              • TypeOf<typeof HfTextGenerationTaskResponseCodec>
                • HfTextGenerationTaskResponse

              Properties

              [unscopables] +HfTextGenerationTaskResponse | generative-ts - v0.1.0-alpha.7

              Interface HfTextGenerationTaskResponse

              interface HfTextGenerationTaskResponse {
                  [unscopables]: {
                      [unscopables]?: boolean;
                      length?: boolean;
                      [iterator]?: any;
                      at?: any;
                      concat?: any;
                      copyWithin?: any;
                      entries?: any;
                      every?: any;
                      fill?: any;
                      filter?: any;
                      find?: any;
                      findIndex?: any;
                      flat?: any;
                      flatMap?: any;
                      forEach?: any;
                      includes?: any;
                      indexOf?: any;
                      join?: any;
                      keys?: any;
                      lastIndexOf?: any;
                      map?: any;
                      pop?: any;
                      push?: any;
                      reduce?: any;
                      reduceRight?: any;
                      reverse?: any;
                      shift?: any;
                      slice?: any;
                      some?: any;
                      sort?: any;
                      splice?: any;
                      toLocaleString?: any;
                      toString?: any;
                      unshift?: any;
                      values?: any;
                  };
                  length: number;
                  [iterator](): IterableIterator<{
                      generated_text: string;
                  }>;
                  at(index): undefined | {
                      generated_text: string;
                  };
                  concat(...items): {
                      generated_text: string;
                  }[];
                  concat(...items): {
                      generated_text: string;
                  }[];
                  copyWithin(target, start, end?): this;
                  entries(): IterableIterator<[number, {
                      generated_text: string;
                  }]>;
                  every<S>(predicate, thisArg?): this is S[];
                  every(predicate, thisArg?): boolean;
                  fill(value, start?, end?): this;
                  filter<S>(predicate, thisArg?): S[];
                  filter(predicate, thisArg?): {
                      generated_text: string;
                  }[];
                  find<S>(predicate, thisArg?): undefined | S;
                  find(predicate, thisArg?): undefined | {
                      generated_text: string;
                  };
                  findIndex(predicate, thisArg?): number;
                  flat<A, D>(this, depth?): FlatArray<A, D>[];
                  flatMap<U, This>(callback, thisArg?): U[];
                  forEach(callbackfn, thisArg?): void;
                  includes(searchElement, fromIndex?): boolean;
                  indexOf(searchElement, fromIndex?): number;
                  join(separator?): string;
                  keys(): IterableIterator<number>;
                  lastIndexOf(searchElement, fromIndex?): number;
                  map<U>(callbackfn, thisArg?): U[];
                  pop(): undefined | {
                      generated_text: string;
                  };
                  push(...items): number;
                  reduce(callbackfn): {
                      generated_text: string;
                  };
                  reduce(callbackfn, initialValue): {
                      generated_text: string;
                  };
                  reduce<U>(callbackfn, initialValue): U;
                  reduceRight(callbackfn): {
                      generated_text: string;
                  };
                  reduceRight(callbackfn, initialValue): {
                      generated_text: string;
                  };
                  reduceRight<U>(callbackfn, initialValue): U;
                  reverse(): {
                      generated_text: string;
                  }[];
                  shift(): undefined | {
                      generated_text: string;
                  };
                  slice(start?, end?): {
                      generated_text: string;
                  }[];
                  some(predicate, thisArg?): boolean;
                  sort(compareFn?): this;
                  splice(start, deleteCount?): {
                      generated_text: string;
                  }[];
                  splice(start, deleteCount, ...items): {
                      generated_text: string;
                  }[];
                  toLocaleString(): string;
                  toString(): string;
                  unshift(...items): number;
                  values(): IterableIterator<{
                      generated_text: string;
                  }>;
              }

              Hierarchy

              • TypeOf<typeof HfTextGenerationTaskResponseCodec>
                • HfTextGenerationTaskResponse

              Properties

              Methods

              [iterator] at @@ -175,4 +175,4 @@

              Returns string

              • Inserts new elements at the start of an array, and returns the new length of the array.

                Parameters

                • Rest ...items: {
                      generated_text: string;
                  }[]

                  Elements to insert at the start of the array.

                Returns number

              • Returns an iterable of values in the array

                -

                Returns IterableIterator<{
                    generated_text: string;
                }>

              \ No newline at end of file +

              Returns IterableIterator<{
                  generated_text: string;
              }>

              \ No newline at end of file diff --git a/docs/interfaces/HttpClient.html b/docs/interfaces/HttpClient.html index 766a38b..e22d7c6 100644 --- a/docs/interfaces/HttpClient.html +++ b/docs/interfaces/HttpClient.html @@ -1,2 +1,2 @@ -HttpClient | generative-ts - v0.1.0-alpha.6
              generative-ts

              Interface HttpClient<TCustomHttpClientRequestOptions>

              interface HttpClient<TCustomHttpClientRequestOptions> {
                  fetch(endpoint, request): Promise<unknown>;
              }

              Type Parameters

              • TCustomHttpClientRequestOptions = unknown

              Methods

              Methods

              \ No newline at end of file +HttpClient | generative-ts - v0.1.0-alpha.7
              generative-ts

              Interface HttpClient<TCustomHttpClientRequestOptions>

              interface HttpClient<TCustomHttpClientRequestOptions> {
                  fetch(endpoint, request): Promise<unknown>;
              }

              Type Parameters

              • TCustomHttpClientRequestOptions = unknown

              Methods

              Methods

              \ No newline at end of file diff --git a/docs/interfaces/HttpClientRequest.html b/docs/interfaces/HttpClientRequest.html index fca429e..c3d7f52 100644 --- a/docs/interfaces/HttpClientRequest.html +++ b/docs/interfaces/HttpClientRequest.html @@ -1,4 +1,4 @@ -HttpClientRequest | generative-ts - v0.1.0-alpha.6
              generative-ts

              Interface HttpClientRequest

              interface HttpClientRequest {
                  body: string;
                  headers: Headers;
                  method: "POST";
              }

              Properties

              body +HttpClientRequest | generative-ts - v0.1.0-alpha.7

              Interface HttpClientRequest

              interface HttpClientRequest {
                  body: string;
                  headers: Headers;
                  method: "POST";
              }

              Properties

              Properties

              body: string
              headers: Headers
              method: "POST"
              \ No newline at end of file +

              Properties

              body: string
              headers: Headers
              method: "POST"
              \ No newline at end of file diff --git a/docs/interfaces/HuggingfaceAuthConfig.html b/docs/interfaces/HuggingfaceAuthConfig.html index a3a6dab..c9143c9 100644 --- a/docs/interfaces/HuggingfaceAuthConfig.html +++ b/docs/interfaces/HuggingfaceAuthConfig.html @@ -1,3 +1,3 @@ -HuggingfaceAuthConfig | generative-ts - v0.1.0-alpha.6
              generative-ts

              Interface HuggingfaceAuthConfig

              interface HuggingfaceAuthConfig {
                  HUGGINGFACE_API_TOKEN: string;
              }

              Properties

              HUGGINGFACE_API_TOKEN +HuggingfaceAuthConfig | generative-ts - v0.1.0-alpha.7

              Interface HuggingfaceAuthConfig

              interface HuggingfaceAuthConfig {
                  HUGGINGFACE_API_TOKEN: string;
              }

              Properties

              HUGGINGFACE_API_TOKEN: string

              The Huggingface Inference key, used as Bearer Token

              -
              \ No newline at end of file +
              \ No newline at end of file diff --git a/docs/interfaces/Llama2ChatApi.html b/docs/interfaces/Llama2ChatApi.html index b98153c..bf6930a 100644 --- a/docs/interfaces/Llama2ChatApi.html +++ b/docs/interfaces/Llama2ChatApi.html @@ -1,4 +1,4 @@ -Llama2ChatApi | generative-ts - v0.1.0-alpha.6
              generative-ts

              Interface Llama2ChatApi

              interface Llama2ChatApi {
                  name?: string;
                  requestTemplate: Template<Llama2ChatOptions>;
                  responseGuard: ((response) => response is LlamaResponse);
              }

              Hierarchy

              Properties

              name? +Llama2ChatApi | generative-ts - v0.1.0-alpha.7

              Interface Llama2ChatApi

              interface Llama2ChatApi {
                  name?: string;
                  requestTemplate: Template<Llama2ChatOptions>;
                  responseGuard: ((response) => response is LlamaResponse);
              }

              Hierarchy

              Properties

              name?: string
              requestTemplate: Template<Llama2ChatOptions>
              responseGuard: ((response) => response is LlamaResponse)

              Type declaration

              \ No newline at end of file +

              Properties

              name?: string
              requestTemplate: Template<Llama2ChatOptions>
              responseGuard: ((response) => response is LlamaResponse)

              Type declaration

              \ No newline at end of file diff --git a/docs/interfaces/Llama2ChatOptions.html b/docs/interfaces/Llama2ChatOptions.html index d07a29c..6fc13ec 100644 --- a/docs/interfaces/Llama2ChatOptions.html +++ b/docs/interfaces/Llama2ChatOptions.html @@ -1,8 +1,8 @@ -Llama2ChatOptions | generative-ts - v0.1.0-alpha.6
              generative-ts

              Interface Llama2ChatOptions

              interface Llama2ChatOptions {
                  $prompt: string;
                  examplePairs?: {
                      assistant: string;
                      user: string;
                  }[];
                  max_gen_len?: number;
                  modelId: string;
                  system?: string;
                  temperature?: number;
                  top_p?: number;
              }

              Hierarchy

              • FewShotRequestOptions
              • ModelRequestOptions
                • Llama2ChatOptions

              Properties

              $prompt +Llama2ChatOptions | generative-ts - v0.1.0-alpha.7

              Interface Llama2ChatOptions

              interface Llama2ChatOptions {
                  $prompt: string;
                  examplePairs?: {
                      assistant: string;
                      user: string;
                  }[];
                  max_gen_len?: number;
                  modelId: string;
                  system?: string;
                  temperature?: number;
                  top_p?: number;
              }

              Hierarchy

              • FewShotRequestOptions
              • ModelRequestOptions
                • Llama2ChatOptions

              Properties

              $prompt: string
              examplePairs?: {
                  assistant: string;
                  user: string;
              }[]

              Type declaration

              • assistant: string
              • user: string
              max_gen_len?: number
              modelId: string
              system?: string
              temperature?: number
              top_p?: number
              \ No newline at end of file +

              Properties

              $prompt: string
              examplePairs?: {
                  assistant: string;
                  user: string;
              }[]

              Type declaration

              • assistant: string
              • user: string
              max_gen_len?: number
              modelId: string
              system?: string
              temperature?: number
              top_p?: number
              \ No newline at end of file diff --git a/docs/interfaces/Llama3ChatApi.html b/docs/interfaces/Llama3ChatApi.html index 9516575..133c239 100644 --- a/docs/interfaces/Llama3ChatApi.html +++ b/docs/interfaces/Llama3ChatApi.html @@ -1,4 +1,4 @@ -Llama3ChatApi | generative-ts - v0.1.0-alpha.6
              generative-ts

              Interface Llama3ChatApi

              interface Llama3ChatApi {
                  name?: string;
                  requestTemplate: Template<Llama3ChatOptions>;
                  responseGuard: ((response) => response is LlamaResponse);
              }

              Hierarchy

              Properties

              name? +Llama3ChatApi | generative-ts - v0.1.0-alpha.7

              Interface Llama3ChatApi

              interface Llama3ChatApi {
                  name?: string;
                  requestTemplate: Template<Llama3ChatOptions>;
                  responseGuard: ((response) => response is LlamaResponse);
              }

              Hierarchy

              Properties

              name?: string
              requestTemplate: Template<Llama3ChatOptions>
              responseGuard: ((response) => response is LlamaResponse)

              Type declaration

              \ No newline at end of file +

              Properties

              name?: string
              requestTemplate: Template<Llama3ChatOptions>
              responseGuard: ((response) => response is LlamaResponse)

              Type declaration

              \ No newline at end of file diff --git a/docs/interfaces/Llama3ChatOptions.html b/docs/interfaces/Llama3ChatOptions.html index 9a31a4e..3f74c9e 100644 --- a/docs/interfaces/Llama3ChatOptions.html +++ b/docs/interfaces/Llama3ChatOptions.html @@ -1,8 +1,8 @@ -Llama3ChatOptions | generative-ts - v0.1.0-alpha.6
              generative-ts

              Interface Llama3ChatOptions

              interface Llama3ChatOptions {
                  $prompt: string;
                  examplePairs?: {
                      assistant: string;
                      user: string;
                  }[];
                  max_gen_len?: number;
                  modelId: string;
                  system?: string;
                  temperature?: number;
                  top_p?: number;
              }

              Hierarchy

              • FewShotRequestOptions
              • ModelRequestOptions
                • Llama3ChatOptions

              Properties

              $prompt +Llama3ChatOptions | generative-ts - v0.1.0-alpha.7

              Interface Llama3ChatOptions

              interface Llama3ChatOptions {
                  $prompt: string;
                  examplePairs?: {
                      assistant: string;
                      user: string;
                  }[];
                  max_gen_len?: number;
                  modelId: string;
                  system?: string;
                  temperature?: number;
                  top_p?: number;
              }

              Hierarchy

              • FewShotRequestOptions
              • ModelRequestOptions
                • Llama3ChatOptions

              Properties

              $prompt: string
              examplePairs?: {
                  assistant: string;
                  user: string;
              }[]

              Type declaration

              • assistant: string
              • user: string
              max_gen_len?: number
              modelId: string
              system?: string
              temperature?: number
              top_p?: number
              \ No newline at end of file +

              Properties

              $prompt: string
              examplePairs?: {
                  assistant: string;
                  user: string;
              }[]

              Type declaration

              • assistant: string
              • user: string
              max_gen_len?: number
              modelId: string
              system?: string
              temperature?: number
              top_p?: number
              \ No newline at end of file diff --git a/docs/interfaces/LlamaResponse.html b/docs/interfaces/LlamaResponse.html index 0e40124..d0fccb4 100644 --- a/docs/interfaces/LlamaResponse.html +++ b/docs/interfaces/LlamaResponse.html @@ -1,5 +1,5 @@ -LlamaResponse | generative-ts - v0.1.0-alpha.6
              generative-ts

              Interface LlamaResponse

              interface LlamaResponse {
                  generation: string;
                  generation_token_count: number;
                  prompt_token_count: number;
                  stop_reason: string;
              }

              Hierarchy

              • TypeOf<typeof LlamaResponseCodec>
                • LlamaResponse

              Properties

              generation +LlamaResponse | generative-ts - v0.1.0-alpha.7

              Interface LlamaResponse

              interface LlamaResponse {
                  generation: string;
                  generation_token_count: number;
                  prompt_token_count: number;
                  stop_reason: string;
              }

              Hierarchy

              • TypeOf<typeof LlamaResponseCodec>
                • LlamaResponse

              Properties

              generation: string
              generation_token_count: number
              prompt_token_count: number
              stop_reason: string
              \ No newline at end of file +

              Properties

              generation: string
              generation_token_count: number
              prompt_token_count: number
              stop_reason: string
              \ No newline at end of file diff --git a/docs/interfaces/MistralAiOptions.html b/docs/interfaces/MistralAiOptions.html index 06086af..314d18c 100644 --- a/docs/interfaces/MistralAiOptions.html +++ b/docs/interfaces/MistralAiOptions.html @@ -1,4 +1,4 @@ -MistralAiOptions | generative-ts - v0.1.0-alpha.6
              generative-ts

              Interface MistralAiOptions

              interface MistralAiOptions {
                  $prompt: string;
                  examplePairs?: {
                      assistant: string;
                      user: string;
                  }[];
                  max_tokens?: number;
                  messages?: {
                      content: string;
                      role: "user" | "assistant" | "system";
                  }[];
                  modelId: string;
                  random_seed?: number;
                  safe_prompt?: boolean;
                  stream?: boolean;
                  system?: string;
                  temperature?: number;
                  top_p?: number;
              }

              Hierarchy

              • FewShotRequestOptions
              • ModelRequestOptions
                • MistralAiOptions

              Properties

              $prompt +MistralAiOptions | generative-ts - v0.1.0-alpha.7

              Interface MistralAiOptions

              interface MistralAiOptions {
                  $prompt: string;
                  examplePairs?: {
                      assistant: string;
                      user: string;
                  }[];
                  max_tokens?: number;
                  messages?: {
                      content: string;
                      role: "user" | "assistant" | "system";
                  }[];
                  modelId: string;
                  random_seed?: number;
                  safe_prompt?: boolean;
                  stream?: boolean;
                  system?: string;
                  temperature?: number;
                  top_p?: number;
              }

              Hierarchy

              • FewShotRequestOptions
              • ModelRequestOptions
                • MistralAiOptions

              Properties

              $prompt: string
              examplePairs?: {
                  assistant: string;
                  user: string;
              }[]

              Type declaration

              • assistant: string
              • user: string
              max_tokens?: number
              messages?: {
                  content: string;
                  role: "user" | "assistant" | "system";
              }[]

              Type declaration

              • content: string
              • role: "user" | "assistant" | "system"
              modelId: string
              random_seed?: number
              safe_prompt?: boolean
              stream?: boolean
              system?: string
              temperature?: number
              top_p?: number
              \ No newline at end of file +

              Properties

              $prompt: string
              examplePairs?: {
                  assistant: string;
                  user: string;
              }[]

              Type declaration

              • assistant: string
              • user: string
              max_tokens?: number
              messages?: {
                  content: string;
                  role: "user" | "assistant" | "system";
              }[]

              Type declaration

              • content: string
              • role: "user" | "assistant" | "system"
              modelId: string
              random_seed?: number
              safe_prompt?: boolean
              stream?: boolean
              system?: string
              temperature?: number
              top_p?: number
              \ No newline at end of file diff --git a/docs/interfaces/MistralAiResponse.html b/docs/interfaces/MistralAiResponse.html index 34dfe06..2914854 100644 --- a/docs/interfaces/MistralAiResponse.html +++ b/docs/interfaces/MistralAiResponse.html @@ -1,7 +1,7 @@ -MistralAiResponse | generative-ts - v0.1.0-alpha.6
              generative-ts

              Interface MistralAiResponse

              interface MistralAiResponse {
                  choices: {
                      finish_reason: string;
                      index: number;
                      message: {
                          content: string;
                          role: string;
                      };
                  }[];
                  created: number;
                  id: string;
                  model: string;
                  object: string;
                  usage: {
                      completion_tokens: number;
                      prompt_tokens: number;
                      total_tokens: number;
                  };
              }

              Hierarchy

              • TypeOf<typeof MistralAiApiResponseCodec>
                • MistralAiResponse

              Properties

              choices +MistralAiResponse | generative-ts - v0.1.0-alpha.7

              Interface MistralAiResponse

              interface MistralAiResponse {
                  choices: {
                      finish_reason: string;
                      index: number;
                      message: {
                          content: string;
                          role: string;
                      };
                  }[];
                  created: number;
                  id: string;
                  model: string;
                  object: string;
                  usage: {
                      completion_tokens: number;
                      prompt_tokens: number;
                      total_tokens: number;
                  };
              }

              Hierarchy

              • TypeOf<typeof MistralAiApiResponseCodec>
                • MistralAiResponse

              Properties

              choices: {
                  finish_reason: string;
                  index: number;
                  message: {
                      content: string;
                      role: string;
                  };
              }[]

              Type declaration

              • finish_reason: string
              • index: number
              • message: {
                    content: string;
                    role: string;
                }
                • content: string
                • role: string
              created: number
              id: string
              model: string
              object: string
              usage: {
                  completion_tokens: number;
                  prompt_tokens: number;
                  total_tokens: number;
              }

              Type declaration

              • completion_tokens: number
              • prompt_tokens: number
              • total_tokens: number
              \ No newline at end of file +

              Properties

              choices: {
                  finish_reason: string;
                  index: number;
                  message: {
                      content: string;
                      role: string;
                  };
              }[]

              Type declaration

              • finish_reason: string
              • index: number
              • message: {
                    content: string;
                    role: string;
                }
                • content: string
                • role: string
              created: number
              id: string
              model: string
              object: string
              usage: {
                  completion_tokens: number;
                  prompt_tokens: number;
                  total_tokens: number;
              }

              Type declaration

              • completion_tokens: number
              • prompt_tokens: number
              • total_tokens: number
              \ No newline at end of file diff --git a/docs/interfaces/MistralAuthConfig.html b/docs/interfaces/MistralAuthConfig.html index 3e37f32..0dfc33b 100644 --- a/docs/interfaces/MistralAuthConfig.html +++ b/docs/interfaces/MistralAuthConfig.html @@ -1,3 +1,3 @@ -MistralAuthConfig | generative-ts - v0.1.0-alpha.6
              generative-ts

              Interface MistralAuthConfig

              interface MistralAuthConfig {
                  MISTRAL_API_KEY: string;
              }

              Properties

              MISTRAL_API_KEY +MistralAuthConfig | generative-ts - v0.1.0-alpha.7

              Interface MistralAuthConfig

              interface MistralAuthConfig {
                  MISTRAL_API_KEY: string;
              }

              Properties

              Properties

              MISTRAL_API_KEY: string

              The Mistral key, used as Bearer Token

              -
              \ No newline at end of file +
              \ No newline at end of file diff --git a/docs/interfaces/MistralBedrockApi.html b/docs/interfaces/MistralBedrockApi.html index 9bbac1f..61c0129 100644 --- a/docs/interfaces/MistralBedrockApi.html +++ b/docs/interfaces/MistralBedrockApi.html @@ -1,4 +1,4 @@ -MistralBedrockApi | generative-ts - v0.1.0-alpha.6
              generative-ts

              Interface MistralBedrockApi

              interface MistralBedrockApi {
                  name?: string;
                  requestTemplate: Template<MistralBedrockOptions>;
                  responseGuard: ((response) => response is MistralBedrockResponse);
              }

              Hierarchy

              Properties

              name? +MistralBedrockApi | generative-ts - v0.1.0-alpha.7

              Interface MistralBedrockApi

              interface MistralBedrockApi {
                  name?: string;
                  requestTemplate: Template<MistralBedrockOptions>;
                  responseGuard: ((response) => response is MistralBedrockResponse);
              }

              Hierarchy

              Properties

              name?: string
              requestTemplate: Template<MistralBedrockOptions>
              responseGuard: ((response) => response is MistralBedrockResponse)

              Type declaration

              \ No newline at end of file +

              Properties

              name?: string
              requestTemplate: Template<MistralBedrockOptions>
              responseGuard: ((response) => response is MistralBedrockResponse)

              Type declaration

              \ No newline at end of file diff --git a/docs/interfaces/MistralBedrockOptions.html b/docs/interfaces/MistralBedrockOptions.html index d6017f0..1eb367d 100644 --- a/docs/interfaces/MistralBedrockOptions.html +++ b/docs/interfaces/MistralBedrockOptions.html @@ -1,4 +1,4 @@ -MistralBedrockOptions | generative-ts - v0.1.0-alpha.6
              generative-ts

              Interface MistralBedrockOptions

              interface MistralBedrockOptions {
                  $prompt: string;
                  examplePairs?: {
                      assistant: string;
                      user: string;
                  }[];
                  max_tokens?: number;
                  modelId: string;
                  stop?: string[];
                  system?: string;
                  temperature?: number;
                  top_k?: number;
                  top_p?: number;
              }

              Hierarchy

              • FewShotRequestOptions
              • ModelRequestOptions
                • MistralBedrockOptions

              Properties

              $prompt +MistralBedrockOptions | generative-ts - v0.1.0-alpha.7

              Interface MistralBedrockOptions

              interface MistralBedrockOptions {
                  $prompt: string;
                  examplePairs?: {
                      assistant: string;
                      user: string;
                  }[];
                  max_tokens?: number;
                  modelId: string;
                  stop?: string[];
                  system?: string;
                  temperature?: number;
                  top_k?: number;
                  top_p?: number;
              }

              Hierarchy

              • FewShotRequestOptions
              • ModelRequestOptions
                • MistralBedrockOptions

              Properties

              $prompt: string
              examplePairs?: {
                  assistant: string;
                  user: string;
              }[]

              Type declaration

              • assistant: string
              • user: string
              max_tokens?: number
              modelId: string
              stop?: string[]
              system?: string
              temperature?: number
              top_k?: number
              top_p?: number
              \ No newline at end of file +

              Properties

              $prompt: string
              examplePairs?: {
                  assistant: string;
                  user: string;
              }[]

              Type declaration

              • assistant: string
              • user: string
              max_tokens?: number
              modelId: string
              stop?: string[]
              system?: string
              temperature?: number
              top_k?: number
              top_p?: number
              \ No newline at end of file diff --git a/docs/interfaces/MistralBedrockResponse.html b/docs/interfaces/MistralBedrockResponse.html index 667562c..7971e03 100644 --- a/docs/interfaces/MistralBedrockResponse.html +++ b/docs/interfaces/MistralBedrockResponse.html @@ -1,2 +1,2 @@ -MistralBedrockResponse | generative-ts - v0.1.0-alpha.6
              generative-ts

              Interface MistralBedrockResponse

              interface MistralBedrockResponse {
                  outputs: {
                      stop_reason: string;
                      text: string;
                  }[];
              }

              Hierarchy

              • TypeOf<typeof MistralBedrockResponseCodec>
                • MistralBedrockResponse

              Properties

              Properties

              outputs: {
                  stop_reason: string;
                  text: string;
              }[]

              Type declaration

              • stop_reason: string
              • text: string
              \ No newline at end of file +MistralBedrockResponse | generative-ts - v0.1.0-alpha.7
              generative-ts

              Interface MistralBedrockResponse

              interface MistralBedrockResponse {
                  outputs: {
                      stop_reason: string;
                      text: string;
                  }[];
              }

              Hierarchy

              • TypeOf<typeof MistralBedrockResponseCodec>
                • MistralBedrockResponse

              Properties

              Properties

              outputs: {
                  stop_reason: string;
                  text: string;
              }[]

              Type declaration

              • stop_reason: string
              • text: string
              \ No newline at end of file diff --git a/docs/interfaces/ModelApi.html b/docs/interfaces/ModelApi.html index 52b34f7..4b8608e 100644 --- a/docs/interfaces/ModelApi.html +++ b/docs/interfaces/ModelApi.html @@ -1,4 +1,4 @@ -ModelApi | generative-ts - v0.1.0-alpha.6
              generative-ts

              Interface ModelApi<TRequestOptions, TResponse>

              interface ModelApi<TRequestOptions, TResponse> {
                  name?: string;
                  requestTemplate: Template<TRequestOptions>;
                  responseGuard: ((response) => response is TResponse);
              }

              Type Parameters

              Properties

              name? +ModelApi | generative-ts - v0.1.0-alpha.7

              Interface ModelApi<TRequestOptions, TResponse>

              interface ModelApi<TRequestOptions, TResponse> {
                  name?: string;
                  requestTemplate: Template<TRequestOptions>;
                  responseGuard: ((response) => response is TResponse);
              }

              Type Parameters

              Properties

              name?: string
              requestTemplate: Template<TRequestOptions>
              responseGuard: ((response) => response is TResponse)

              Type declaration

              \ No newline at end of file +

              Properties

              name?: string
              requestTemplate: Template<TRequestOptions>
              responseGuard: ((response) => response is TResponse)

              Type declaration

              \ No newline at end of file diff --git a/docs/interfaces/ModelProvider.html b/docs/interfaces/ModelProvider.html index 21a312d..81c6c57 100644 --- a/docs/interfaces/ModelProvider.html +++ b/docs/interfaces/ModelProvider.html @@ -1,2 +1,2 @@ -ModelProvider | generative-ts - v0.1.0-alpha.6
              generative-ts

              Interface ModelProvider<TRequestOptions, TResponse>

              interface ModelProvider<TRequestOptions, TResponse> {
                  sendRequest(options): Promise<TResponse>;
              }

              Type Parameters

              Methods

              Methods

              \ No newline at end of file +ModelProvider | generative-ts - v0.1.0-alpha.7
              generative-ts

              Interface ModelProvider<TRequestOptions, TResponse>

              interface ModelProvider<TRequestOptions, TResponse> {
                  sendRequest(options): Promise<TResponse>;
              }

              Type Parameters

              Methods

              Methods

              \ No newline at end of file diff --git a/docs/interfaces/ModelRequestOptions.html b/docs/interfaces/ModelRequestOptions.html index 61200fa..b6dc3bd 100644 --- a/docs/interfaces/ModelRequestOptions.html +++ b/docs/interfaces/ModelRequestOptions.html @@ -1,3 +1,3 @@ -ModelRequestOptions | generative-ts - v0.1.0-alpha.6
              generative-ts

              Interface ModelRequestOptions

              interface ModelRequestOptions {
                  $prompt: string;
                  modelId: string;
              }

              Properties

              $prompt +ModelRequestOptions | generative-ts - v0.1.0-alpha.7

              Interface ModelRequestOptions

              interface ModelRequestOptions {
                  $prompt: string;
                  modelId: string;
              }

              Properties

              Properties

              $prompt: string
              modelId: string
              \ No newline at end of file +

              Properties

              $prompt: string
              modelId: string
              \ No newline at end of file diff --git a/docs/interfaces/OpenAiAuthConfig.html b/docs/interfaces/OpenAiAuthConfig.html index dc40044..f656d1a 100644 --- a/docs/interfaces/OpenAiAuthConfig.html +++ b/docs/interfaces/OpenAiAuthConfig.html @@ -1,3 +1,3 @@ -OpenAiAuthConfig | generative-ts - v0.1.0-alpha.6
              generative-ts

              Interface OpenAiAuthConfig

              interface OpenAiAuthConfig {
                  OPENAI_API_KEY: string;
              }

              Properties

              OPENAI_API_KEY +OpenAiAuthConfig | generative-ts - v0.1.0-alpha.7

              Interface OpenAiAuthConfig

              interface OpenAiAuthConfig {
                  OPENAI_API_KEY: string;
              }

              Properties

              Properties

              OPENAI_API_KEY: string

              The OpenAI key, used as Bearer Token

              -
              \ No newline at end of file +
              \ No newline at end of file diff --git a/docs/interfaces/OpenAiChatOptions.html b/docs/interfaces/OpenAiChatOptions.html index fc71913..e1b2d3d 100644 --- a/docs/interfaces/OpenAiChatOptions.html +++ b/docs/interfaces/OpenAiChatOptions.html @@ -1,4 +1,4 @@ -OpenAiChatOptions | generative-ts - v0.1.0-alpha.6
              generative-ts

              Interface OpenAiChatOptions

              interface OpenAiChatOptions {
                  $prompt: string;
                  examplePairs?: {
                      assistant: string;
                      user: string;
                  }[];
                  frequency_penalty?: number;
                  function_call?: string;
                  functions?: {
                      description?: string;
                      name: string;
                      parameters?: object;
                  }[];
                  logit_bias?: Record<string, number>;
                  logprobs?: boolean;
                  max_tokens?: number;
                  messages?: ChatCompletionRequestMessage[];
                  modelId: string;
                  n?: number;
                  presence_penalty?: number;
                  response_format?: {
                      type: "text" | "json_object";
                  };
                  seed?: number;
                  stop?: string | string[];
                  stream?: boolean;
                  stream_options?: {
                      include_usage: boolean;
                  };
                  system?: string;
                  temperature?: number;
                  tool_choice?: "required" | "none" | "auto" | {
                      function: {
                          name: string;
                      };
                      type: "function";
                  };
                  tools?: {
                      function: {
                          description?: string;
                          name: string;
                          parameters?: object;
                      };
                      type: "function";
                  }[];
                  top_logprobs?: number;
                  top_p?: number;
                  user?: string;
              }

              Hierarchy

              • ModelRequestOptions
              • FewShotRequestOptions
              • OpenAiChatToolsOptions
                • OpenAiChatOptions

              Properties

              $prompt +OpenAiChatOptions | generative-ts - v0.1.0-alpha.7

              Interface OpenAiChatOptions

              interface OpenAiChatOptions {
                  $prompt: string;
                  examplePairs?: {
                      assistant: string;
                      user: string;
                  }[];
                  frequency_penalty?: number;
                  function_call?: string;
                  functions?: {
                      description?: string;
                      name: string;
                      parameters?: object;
                  }[];
                  logit_bias?: Record<string, number>;
                  logprobs?: boolean;
                  max_tokens?: number;
                  messages?: ChatCompletionRequestMessage[];
                  modelId: string;
                  n?: number;
                  presence_penalty?: number;
                  response_format?: {
                      type: "text" | "json_object";
                  };
                  seed?: number;
                  stop?: string | string[];
                  stream?: boolean;
                  stream_options?: {
                      include_usage: boolean;
                  };
                  system?: string;
                  temperature?: number;
                  tool_choice?: "required" | "none" | "auto" | {
                      function: {
                          name: string;
                      };
                      type: "function";
                  };
                  tools?: {
                      function: {
                          description?: string;
                          name: string;
                          parameters?: object;
                      };
                      type: "function";
                  }[];
                  top_logprobs?: number;
                  top_p?: number;
                  user?: string;
              }

              Hierarchy

              • ModelRequestOptions
              • FewShotRequestOptions
              • OpenAiChatToolsOptions
                • OpenAiChatOptions

              Properties

              $prompt: string
              examplePairs?: {
                  assistant: string;
                  user: string;
              }[]

              Type declaration

              • assistant: string
              • user: string
              frequency_penalty?: number
              function_call?: string
              functions?: {
                  description?: string;
                  name: string;
                  parameters?: object;
              }[]

              Type declaration

              • Optional description?: string
              • name: string
              • Optional parameters?: object
              logit_bias?: Record<string, number>
              logprobs?: boolean
              max_tokens?: number
              messages?: ChatCompletionRequestMessage[]
              modelId: string
              n?: number
              presence_penalty?: number
              response_format?: {
                  type: "text" | "json_object";
              }

              Type declaration

              • type: "text" | "json_object"
              seed?: number
              stop?: string | string[]
              stream?: boolean
              stream_options?: {
                  include_usage: boolean;
              }

              Type declaration

              • include_usage: boolean
              system?: string
              temperature?: number
              tool_choice?: "required" | "none" | "auto" | {
                  function: {
                      name: string;
                  };
                  type: "function";
              }

              Type declaration

              • function: {
                    name: string;
                }
                • name: string
              • type: "function"
              tools?: {
                  function: {
                      description?: string;
                      name: string;
                      parameters?: object;
                  };
                  type: "function";
              }[]

              Type declaration

              • function: {
                    description?: string;
                    name: string;
                    parameters?: object;
                }
                • Optional description?: string
                • name: string
                • Optional parameters?: object
              • type: "function"
              top_logprobs?: number
              top_p?: number
              user?: string
              \ No newline at end of file +

              Properties

              $prompt: string
              examplePairs?: {
                  assistant: string;
                  user: string;
              }[]

              Type declaration

              • assistant: string
              • user: string
              frequency_penalty?: number
              function_call?: string
              functions?: {
                  description?: string;
                  name: string;
                  parameters?: object;
              }[]

              Type declaration

              • Optional description?: string
              • name: string
              • Optional parameters?: object
              logit_bias?: Record<string, number>
              logprobs?: boolean
              max_tokens?: number
              messages?: ChatCompletionRequestMessage[]
              modelId: string
              n?: number
              presence_penalty?: number
              response_format?: {
                  type: "text" | "json_object";
              }

              Type declaration

              • type: "text" | "json_object"
              seed?: number
              stop?: string | string[]
              stream?: boolean
              stream_options?: {
                  include_usage: boolean;
              }

              Type declaration

              • include_usage: boolean
              system?: string
              temperature?: number
              tool_choice?: "required" | "none" | "auto" | {
                  function: {
                      name: string;
                  };
                  type: "function";
              }

              Type declaration

              • function: {
                    name: string;
                }
                • name: string
              • type: "function"
              tools?: {
                  function: {
                      description?: string;
                      name: string;
                      parameters?: object;
                  };
                  type: "function";
              }[]

              Type declaration

              • function: {
                    description?: string;
                    name: string;
                    parameters?: object;
                }
                • Optional description?: string
                • name: string
                • Optional parameters?: object
              • type: "function"
              top_logprobs?: number
              top_p?: number
              user?: string
              \ No newline at end of file diff --git a/docs/interfaces/OpenAiChatResponse.html b/docs/interfaces/OpenAiChatResponse.html index 1fc3eb1..782c498 100644 --- a/docs/interfaces/OpenAiChatResponse.html +++ b/docs/interfaces/OpenAiChatResponse.html @@ -1,8 +1,8 @@ -OpenAiChatResponse | generative-ts - v0.1.0-alpha.6
              generative-ts

              Interface OpenAiChatResponse

              interface OpenAiChatResponse {
                  choices: {
                      finish_reason: string;
                      index: number;
                      logprobs: any;
                      message: {
                          content: string;
                          role: string;
                      } & {
                          function_call?: {
                              arguments: string;
                              name: string;
                          };
                          tool_calls?: {
                              function: {
                                  arguments: string;
                                  name: string;
                              };
                              id: string;
                              type: string;
                          }[];
                      };
                  }[];
                  created: number;
                  id: string;
                  model: string;
                  object: string;
                  system_fingerprint?: string;
                  usage?: {
                      completion_tokens: number;
                      prompt_tokens: number;
                      total_tokens: number;
                  };
              }

              Hierarchy

              • TypeOf<typeof OpenAiChatResponseCodec>
                • OpenAiChatResponse

              Properties

              choices +OpenAiChatResponse | generative-ts - v0.1.0-alpha.7

              Interface OpenAiChatResponse

              interface OpenAiChatResponse {
                  choices: {
                      finish_reason: string;
                      index: number;
                      logprobs: any;
                      message: {
                          content: string;
                          role: string;
                      } & {
                          function_call?: {
                              arguments: string;
                              name: string;
                          };
                          tool_calls?: {
                              function: {
                                  arguments: string;
                                  name: string;
                              };
                              id: string;
                              type: string;
                          }[];
                      };
                  }[];
                  created: number;
                  id: string;
                  model: string;
                  object: string;
                  system_fingerprint?: string;
                  usage?: {
                      completion_tokens: number;
                      prompt_tokens: number;
                      total_tokens: number;
                  };
              }

              Hierarchy

              • TypeOf<typeof OpenAiChatResponseCodec>
                • OpenAiChatResponse

              Properties

              choices: {
                  finish_reason: string;
                  index: number;
                  logprobs: any;
                  message: {
                      content: string;
                      role: string;
                  } & {
                      function_call?: {
                          arguments: string;
                          name: string;
                      };
                      tool_calls?: {
                          function: {
                              arguments: string;
                              name: string;
                          };
                          id: string;
                          type: string;
                      }[];
                  };
              }[]

              Type declaration

              • finish_reason: string
              • index: number
              • logprobs: any
              • message: {
                    content: string;
                    role: string;
                } & {
                    function_call?: {
                        arguments: string;
                        name: string;
                    };
                    tool_calls?: {
                        function: {
                            arguments: string;
                            name: string;
                        };
                        id: string;
                        type: string;
                    }[];
                }
              created: number
              id: string
              model: string
              object: string
              system_fingerprint?: string
              usage?: {
                  completion_tokens: number;
                  prompt_tokens: number;
                  total_tokens: number;
              }

              Type declaration

              • completion_tokens: number
              • prompt_tokens: number
              • total_tokens: number
              \ No newline at end of file +

              Properties

              choices: {
                  finish_reason: string;
                  index: number;
                  logprobs: any;
                  message: {
                      content: string;
                      role: string;
                  } & {
                      function_call?: {
                          arguments: string;
                          name: string;
                      };
                      tool_calls?: {
                          function: {
                              arguments: string;
                              name: string;
                          };
                          id: string;
                          type: string;
                      }[];
                  };
              }[]

              Type declaration

              • finish_reason: string
              • index: number
              • logprobs: any
              • message: {
                    content: string;
                    role: string;
                } & {
                    function_call?: {
                        arguments: string;
                        name: string;
                    };
                    tool_calls?: {
                        function: {
                            arguments: string;
                            name: string;
                        };
                        id: string;
                        type: string;
                    }[];
                }
              created: number
              id: string
              model: string
              object: string
              system_fingerprint?: string
              usage?: {
                  completion_tokens: number;
                  prompt_tokens: number;
                  total_tokens: number;
              }

              Type declaration

              • completion_tokens: number
              • prompt_tokens: number
              • total_tokens: number
              \ No newline at end of file diff --git a/docs/interfaces/Template.html b/docs/interfaces/Template.html index e8ca2a5..34ba883 100644 --- a/docs/interfaces/Template.html +++ b/docs/interfaces/Template.html @@ -1,2 +1,2 @@ -Template | generative-ts - v0.1.0-alpha.6
              generative-ts

              Interface Template<TVars>

              interface Template<TVars> {
                  render(context): string;
              }

              Type Parameters

              • TVars

              Methods

              Methods

              • Parameters

                Returns string

              \ No newline at end of file +Template | generative-ts - v0.1.0-alpha.7
              generative-ts

              Interface Template<TVars>

              interface Template<TVars> {
                  render(context): string;
              }

              Type Parameters

              • TVars

              Methods

              Methods

              • Parameters

                Returns string

              \ No newline at end of file diff --git a/docs/interfaces/VertexAiAuthConfig.html b/docs/interfaces/VertexAiAuthConfig.html index be9dfd0..049c88d 100644 --- a/docs/interfaces/VertexAiAuthConfig.html +++ b/docs/interfaces/VertexAiAuthConfig.html @@ -1,5 +1,5 @@ -VertexAiAuthConfig | generative-ts - v0.1.0-alpha.6
              generative-ts

              Interface VertexAiAuthConfig

              interface VertexAiAuthConfig {
                  GCLOUD_LOCATION: string;
                  GCLOUD_PROJECT_ID: string;
              }

              Properties

              GCLOUD_LOCATION +VertexAiAuthConfig | generative-ts - v0.1.0-alpha.7

              Interface VertexAiAuthConfig

              interface VertexAiAuthConfig {
                  GCLOUD_LOCATION: string;
                  GCLOUD_PROJECT_ID: string;
              }

              Properties

              GCLOUD_LOCATION: string

              The GCloud location of your project. NOTE: As of writing, VertexAI is only available in certain regions.

              GCLOUD_PROJECT_ID: string

              Your GCloud project ID

              -
              \ No newline at end of file +
              \ No newline at end of file diff --git a/docs/modules.html b/docs/modules.html index e932114..6d7b11d 100644 --- a/docs/modules.html +++ b/docs/modules.html @@ -1,4 +1,4 @@ -generative-ts - v0.1.0-alpha.6
              generative-ts

              generative-ts - v0.1.0-alpha.6

              Index

              Providers

              createAwsBedrockModelProvider +generative-ts - v0.1.0-alpha.7
              \ No newline at end of file +
              \ No newline at end of file diff --git a/docs/types/InferHttpClientOptions.html b/docs/types/InferHttpClientOptions.html index d5f3dcb..828f807 100644 --- a/docs/types/InferHttpClientOptions.html +++ b/docs/types/InferHttpClientOptions.html @@ -1 +1 @@ -InferHttpClientOptions | generative-ts - v0.1.0-alpha.6
              generative-ts

              Type alias InferHttpClientOptions<T>

              InferHttpClientOptions<T>: T extends HttpModelProvider<ModelRequestOptions, unknown, infer U>
                  ? U
                  : never

              Type Parameters

              • T
              \ No newline at end of file +InferHttpClientOptions | generative-ts - v0.1.0-alpha.7
              generative-ts

              Type alias InferHttpClientOptions<T>

              InferHttpClientOptions<T>: T extends HttpModelProvider<ModelRequestOptions, unknown, infer U>
                  ? U
                  : never

              Type Parameters

              • T
              \ No newline at end of file diff --git a/docs/types/ModelId.html b/docs/types/ModelId.html index f859a80..7ee1eac 100644 --- a/docs/types/ModelId.html +++ b/docs/types/ModelId.html @@ -1 +1 @@ -ModelId | generative-ts - v0.1.0-alpha.6
              generative-ts

              Type alias ModelId

              ModelId: string
              \ No newline at end of file +ModelId | generative-ts - v0.1.0-alpha.7
              generative-ts

              Type alias ModelId

              ModelId: string
              \ No newline at end of file diff --git a/docs/variables/Ai21Jurassic2Api-1.html b/docs/variables/Ai21Jurassic2Api-1.html index 3f554b8..76e67fc 100644 --- a/docs/variables/Ai21Jurassic2Api-1.html +++ b/docs/variables/Ai21Jurassic2Api-1.html @@ -1,5 +1,5 @@ -Ai21Jurassic2Api | generative-ts - v0.1.0-alpha.6
              generative-ts

              Variable Ai21Jurassic2Api

              Ai21Jurassic2Api: Ai21Jurassic2Api

              Reference

              Ai21 Jurrassic 2

              +Ai21Jurassic2Api | generative-ts - v0.1.0-alpha.7

              Variable Ai21Jurassic2Api

              Ai21Jurassic2Api: Ai21Jurassic2Api
              \ No newline at end of file +
              \ No newline at end of file diff --git a/docs/variables/Ai21Jurassic2Template.html b/docs/variables/Ai21Jurassic2Template.html index 72ce200..896be53 100644 --- a/docs/variables/Ai21Jurassic2Template.html +++ b/docs/variables/Ai21Jurassic2Template.html @@ -1 +1 @@ -Ai21Jurassic2Template | generative-ts - v0.1.0-alpha.6
              generative-ts

              Variable Ai21Jurassic2TemplateConst

              Ai21Jurassic2Template: FnTemplate<Ai21Jurassic2Options>
              \ No newline at end of file +Ai21Jurassic2Template | generative-ts - v0.1.0-alpha.7
              generative-ts

              Variable Ai21Jurassic2TemplateConst

              Ai21Jurassic2Template: FnTemplate<Ai21Jurassic2Options>
              \ No newline at end of file diff --git a/docs/variables/AmazonTitanTextApi-1.html b/docs/variables/AmazonTitanTextApi-1.html index 111bad0..e337260 100644 --- a/docs/variables/AmazonTitanTextApi-1.html +++ b/docs/variables/AmazonTitanTextApi-1.html @@ -1,5 +1,5 @@ -AmazonTitanTextApi | generative-ts - v0.1.0-alpha.6
              generative-ts

              Variable AmazonTitanTextApi

              AmazonTitanTextApi: AmazonTitanTextApi

              Reference

              Amazon Titan Text

              +AmazonTitanTextApi | generative-ts - v0.1.0-alpha.7

              Variable AmazonTitanTextApi

              AmazonTitanTextApi: AmazonTitanTextApi
              \ No newline at end of file +
              \ No newline at end of file diff --git a/docs/variables/AmazonTitanTextTemplate.html b/docs/variables/AmazonTitanTextTemplate.html index 6cc8a07..8eb5ecc 100644 --- a/docs/variables/AmazonTitanTextTemplate.html +++ b/docs/variables/AmazonTitanTextTemplate.html @@ -1 +1 @@ -AmazonTitanTextTemplate | generative-ts - v0.1.0-alpha.6
              generative-ts

              Variable AmazonTitanTextTemplateConst

              AmazonTitanTextTemplate: FnTemplate<AmazonTitanTextOptions>
              \ No newline at end of file +AmazonTitanTextTemplate | generative-ts - v0.1.0-alpha.7
              generative-ts

              Variable AmazonTitanTextTemplateConst

              AmazonTitanTextTemplate: FnTemplate<AmazonTitanTextOptions>
              \ No newline at end of file diff --git a/docs/variables/CohereChatApi-1.html b/docs/variables/CohereChatApi-1.html index 00e4a3f..bd9c79c 100644 --- a/docs/variables/CohereChatApi-1.html +++ b/docs/variables/CohereChatApi-1.html @@ -1,5 +1,5 @@ -CohereChatApi | generative-ts - v0.1.0-alpha.6
              generative-ts

              Variable CohereChatApi

              CohereChatApi: CohereChatApi

              Reference

              Cohere Chat

              +CohereChatApi | generative-ts - v0.1.0-alpha.7

              Variable CohereChatApi

              CohereChatApi: CohereChatApi
              \ No newline at end of file +
              \ No newline at end of file diff --git a/docs/variables/CohereChatTemplate.html b/docs/variables/CohereChatTemplate.html index e0e1add..16fe2ea 100644 --- a/docs/variables/CohereChatTemplate.html +++ b/docs/variables/CohereChatTemplate.html @@ -1 +1 @@ -CohereChatTemplate | generative-ts - v0.1.0-alpha.6
              generative-ts

              Variable CohereChatTemplateConst

              CohereChatTemplate: FnTemplate<CohereChatOptions>
              \ No newline at end of file +CohereChatTemplate | generative-ts - v0.1.0-alpha.7
              generative-ts

              Variable CohereChatTemplateConst

              CohereChatTemplate: FnTemplate<CohereChatOptions>
              \ No newline at end of file diff --git a/docs/variables/CohereGenerateApi-1.html b/docs/variables/CohereGenerateApi-1.html index 08bdfc9..887c90b 100644 --- a/docs/variables/CohereGenerateApi-1.html +++ b/docs/variables/CohereGenerateApi-1.html @@ -1,6 +1,6 @@ -CohereGenerateApi | generative-ts - v0.1.0-alpha.6
              generative-ts

              Variable CohereGenerateApi

              CohereGenerateApi: CohereGenerateApi

              Reference

              Cohere Generate

              +CohereGenerateApi | generative-ts - v0.1.0-alpha.7

              Variable CohereGenerateApi

              CohereGenerateApi: CohereGenerateApi
              \ No newline at end of file +
              \ No newline at end of file diff --git a/docs/variables/CohereGenerateTemplate.html b/docs/variables/CohereGenerateTemplate.html index 087490d..9fd7692 100644 --- a/docs/variables/CohereGenerateTemplate.html +++ b/docs/variables/CohereGenerateTemplate.html @@ -1 +1 @@ -CohereGenerateTemplate | generative-ts - v0.1.0-alpha.6
              generative-ts

              Variable CohereGenerateTemplateConst

              CohereGenerateTemplate: FnTemplate<CohereGenerateOptions>
              \ No newline at end of file +CohereGenerateTemplate | generative-ts - v0.1.0-alpha.7
              generative-ts

              Variable CohereGenerateTemplateConst

              CohereGenerateTemplate: FnTemplate<CohereGenerateOptions>
              \ No newline at end of file diff --git a/docs/variables/GoogleGeminiApi-1.html b/docs/variables/GoogleGeminiApi-1.html index ff0531e..56bd39d 100644 --- a/docs/variables/GoogleGeminiApi-1.html +++ b/docs/variables/GoogleGeminiApi-1.html @@ -1,8 +1,8 @@ -GoogleGeminiApi | generative-ts - v0.1.0-alpha.6
              generative-ts

              Variable GoogleGeminiApi

              GoogleGeminiApi: GoogleGeminiApi

              Reference

              \ No newline at end of file diff --git a/docs/variables/GoogleGeminiTemplate.html b/docs/variables/GoogleGeminiTemplate.html index 9d192a1..d205eae 100644 --- a/docs/variables/GoogleGeminiTemplate.html +++ b/docs/variables/GoogleGeminiTemplate.html @@ -1 +1 @@ -GoogleGeminiTemplate | generative-ts - v0.1.0-alpha.6
              generative-ts

              Variable GoogleGeminiTemplateConst

              GoogleGeminiTemplate: FnTemplate<GoogleGeminiOptions>
              \ No newline at end of file +GoogleGeminiTemplate | generative-ts - v0.1.0-alpha.7
              generative-ts

              Variable GoogleGeminiTemplateConst

              GoogleGeminiTemplate: FnTemplate<GoogleGeminiOptions>
              \ No newline at end of file diff --git a/docs/variables/HfConversationalTaskApi-1.html b/docs/variables/HfConversationalTaskApi-1.html index c96c031..9cb58dd 100644 --- a/docs/variables/HfConversationalTaskApi-1.html +++ b/docs/variables/HfConversationalTaskApi-1.html @@ -1,5 +1,5 @@ -HfConversationalTaskApi | generative-ts - v0.1.0-alpha.6
              generative-ts

              Variable HfConversationalTaskApi

              HfConversationalTaskApi: HfConversationalTaskApi

              Reference

              Huggingface Conversational Task

              +HfConversationalTaskApi | generative-ts - v0.1.0-alpha.7

              Variable HfConversationalTaskApi

              HfConversationalTaskApi: HfConversationalTaskApi
              \ No newline at end of file +
              \ No newline at end of file diff --git a/docs/variables/HfConversationalTaskTemplate.html b/docs/variables/HfConversationalTaskTemplate.html index de8cba2..8e0a8f3 100644 --- a/docs/variables/HfConversationalTaskTemplate.html +++ b/docs/variables/HfConversationalTaskTemplate.html @@ -1 +1 @@ -HfConversationalTaskTemplate | generative-ts - v0.1.0-alpha.6
              generative-ts

              Variable HfConversationalTaskTemplateConst

              HfConversationalTaskTemplate: FnTemplate<HfConversationalTaskOptions>
              \ No newline at end of file +HfConversationalTaskTemplate | generative-ts - v0.1.0-alpha.7
              generative-ts

              Variable HfConversationalTaskTemplateConst

              HfConversationalTaskTemplate: FnTemplate<HfConversationalTaskOptions>
              \ No newline at end of file diff --git a/docs/variables/HfTextGenerationTaskApi-1.html b/docs/variables/HfTextGenerationTaskApi-1.html index e4840af..c20d782 100644 --- a/docs/variables/HfTextGenerationTaskApi-1.html +++ b/docs/variables/HfTextGenerationTaskApi-1.html @@ -1,5 +1,5 @@ -HfTextGenerationTaskApi | generative-ts - v0.1.0-alpha.6
              generative-ts

              Variable HfTextGenerationTaskApi

              HfTextGenerationTaskApi: HfTextGenerationTaskApi

              Reference

              Huggingface Text Generation Task

              +HfTextGenerationTaskApi | generative-ts - v0.1.0-alpha.7

              Variable HfTextGenerationTaskApi

              HfTextGenerationTaskApi: HfTextGenerationTaskApi
              \ No newline at end of file +
              \ No newline at end of file diff --git a/docs/variables/HfTextGenerationTaskTemplate.html b/docs/variables/HfTextGenerationTaskTemplate.html index 8cd1ca9..31e6929 100644 --- a/docs/variables/HfTextGenerationTaskTemplate.html +++ b/docs/variables/HfTextGenerationTaskTemplate.html @@ -1 +1 @@ -HfTextGenerationTaskTemplate | generative-ts - v0.1.0-alpha.6
              generative-ts

              Variable HfTextGenerationTaskTemplateConst

              HfTextGenerationTaskTemplate: FnTemplate<HfTextGenerationTaskOptions>
              \ No newline at end of file +HfTextGenerationTaskTemplate | generative-ts - v0.1.0-alpha.7
              generative-ts

              Variable HfTextGenerationTaskTemplateConst

              HfTextGenerationTaskTemplate: FnTemplate<HfTextGenerationTaskOptions>
              \ No newline at end of file diff --git a/docs/variables/Llama2ChatApi-1.html b/docs/variables/Llama2ChatApi-1.html index f4fe918..3f0d911 100644 --- a/docs/variables/Llama2ChatApi-1.html +++ b/docs/variables/Llama2ChatApi-1.html @@ -1,5 +1,5 @@ -Llama2ChatApi | generative-ts - v0.1.0-alpha.6
              generative-ts

              Variable Llama2ChatApi

              Llama2ChatApi: Llama2ChatApi

              Reference

              LLama2

              +Llama2ChatApi | generative-ts - v0.1.0-alpha.7

              Variable Llama2ChatApi

              Llama2ChatApi: Llama2ChatApi
              \ No newline at end of file +
              \ No newline at end of file diff --git a/docs/variables/Llama2ChatTemplate.html b/docs/variables/Llama2ChatTemplate.html index 4dea375..4f8ae80 100644 --- a/docs/variables/Llama2ChatTemplate.html +++ b/docs/variables/Llama2ChatTemplate.html @@ -1 +1 @@ -Llama2ChatTemplate | generative-ts - v0.1.0-alpha.6
              generative-ts

              Variable Llama2ChatTemplateConst

              Llama2ChatTemplate: FnTemplate<Llama2ChatOptions>
              \ No newline at end of file +Llama2ChatTemplate | generative-ts - v0.1.0-alpha.7
              generative-ts

              Variable Llama2ChatTemplateConst

              Llama2ChatTemplate: FnTemplate<Llama2ChatOptions>
              \ No newline at end of file diff --git a/docs/variables/Llama3ChatApi-1.html b/docs/variables/Llama3ChatApi-1.html index 5b0ca67..804eeea 100644 --- a/docs/variables/Llama3ChatApi-1.html +++ b/docs/variables/Llama3ChatApi-1.html @@ -1,5 +1,5 @@ -Llama3ChatApi | generative-ts - v0.1.0-alpha.6
              generative-ts

              Variable Llama3ChatApi

              Llama3ChatApi: Llama3ChatApi

              Reference

              LLama3

              +Llama3ChatApi | generative-ts - v0.1.0-alpha.7

              Variable Llama3ChatApi

              Llama3ChatApi: Llama3ChatApi
              \ No newline at end of file +
              \ No newline at end of file diff --git a/docs/variables/Llama3ChatTemplate.html b/docs/variables/Llama3ChatTemplate.html index d81d591..f9ba720 100644 --- a/docs/variables/Llama3ChatTemplate.html +++ b/docs/variables/Llama3ChatTemplate.html @@ -1 +1 @@ -Llama3ChatTemplate | generative-ts - v0.1.0-alpha.6
              generative-ts

              Variable Llama3ChatTemplateConst

              Llama3ChatTemplate: FnTemplate<Llama3ChatOptions>
              \ No newline at end of file +Llama3ChatTemplate | generative-ts - v0.1.0-alpha.7
              generative-ts

              Variable Llama3ChatTemplateConst

              Llama3ChatTemplate: FnTemplate<Llama3ChatOptions>
              \ No newline at end of file diff --git a/docs/variables/MistralAiApi.html b/docs/variables/MistralAiApi.html index 2436170..58e8d9e 100644 --- a/docs/variables/MistralAiApi.html +++ b/docs/variables/MistralAiApi.html @@ -1,5 +1,5 @@ -MistralAiApi | generative-ts - v0.1.0-alpha.6
              generative-ts

              Variable MistralAiApiConst

              MistralAiApi: ModelApi<MistralAiOptions, MistralAiResponse>

              Reference

              Mistral AI Chat Completion

              +MistralAiApi | generative-ts - v0.1.0-alpha.7

              Variable MistralAiApiConst

              MistralAiApi: ModelApi<MistralAiOptions, MistralAiResponse>
              \ No newline at end of file +
              \ No newline at end of file diff --git a/docs/variables/MistralAiTemplate.html b/docs/variables/MistralAiTemplate.html index 1957a58..b0e1e7e 100644 --- a/docs/variables/MistralAiTemplate.html +++ b/docs/variables/MistralAiTemplate.html @@ -1 +1 @@ -MistralAiTemplate | generative-ts - v0.1.0-alpha.6
              generative-ts

              Variable MistralAiTemplateConst

              MistralAiTemplate: FnTemplate<MistralAiOptions>
              \ No newline at end of file +MistralAiTemplate | generative-ts - v0.1.0-alpha.7
              generative-ts

              Variable MistralAiTemplateConst

              MistralAiTemplate: FnTemplate<MistralAiOptions>
              \ No newline at end of file diff --git a/docs/variables/MistralBedrockApi-1.html b/docs/variables/MistralBedrockApi-1.html index 08a304d..9a8eec5 100644 --- a/docs/variables/MistralBedrockApi-1.html +++ b/docs/variables/MistralBedrockApi-1.html @@ -1,5 +1,5 @@ -MistralBedrockApi | generative-ts - v0.1.0-alpha.6
              generative-ts

              Variable MistralBedrockApi

              MistralBedrockApi: MistralBedrockApi

              Reference

              Mistral on AWS Bedrock

              +MistralBedrockApi | generative-ts - v0.1.0-alpha.7

              Variable MistralBedrockApi

              MistralBedrockApi: MistralBedrockApi
              \ No newline at end of file +
              \ No newline at end of file diff --git a/docs/variables/MistralBedrockTemplate.html b/docs/variables/MistralBedrockTemplate.html index 76f9cc0..9e0593b 100644 --- a/docs/variables/MistralBedrockTemplate.html +++ b/docs/variables/MistralBedrockTemplate.html @@ -1 +1 @@ -MistralBedrockTemplate | generative-ts - v0.1.0-alpha.6
              generative-ts

              Variable MistralBedrockTemplateConst

              MistralBedrockTemplate: FnTemplate<MistralBedrockOptions>
              \ No newline at end of file +MistralBedrockTemplate | generative-ts - v0.1.0-alpha.7
              generative-ts

              Variable MistralBedrockTemplateConst

              MistralBedrockTemplate: FnTemplate<MistralBedrockOptions>
              \ No newline at end of file diff --git a/docs/variables/OpenAiChatApi.html b/docs/variables/OpenAiChatApi.html index 46e9fcf..163dc41 100644 --- a/docs/variables/OpenAiChatApi.html +++ b/docs/variables/OpenAiChatApi.html @@ -1,7 +1,7 @@ -OpenAiChatApi | generative-ts - v0.1.0-alpha.6
              generative-ts

              Variable OpenAiChatApiConst

              OpenAiChatApi: ModelApi<OpenAiChatOptions, OpenAiChatResponse>

              Reference

              OpenAI Chat Completion

              +OpenAiChatApi | generative-ts - v0.1.0-alpha.7

              Variable OpenAiChatApiConst

              OpenAiChatApi: ModelApi<OpenAiChatOptions, OpenAiChatResponse>
              \ No newline at end of file +
              \ No newline at end of file diff --git a/docs/variables/OpenAiChatTemplate.html b/docs/variables/OpenAiChatTemplate.html index 909d207..8248b35 100644 --- a/docs/variables/OpenAiChatTemplate.html +++ b/docs/variables/OpenAiChatTemplate.html @@ -1 +1 @@ -OpenAiChatTemplate | generative-ts - v0.1.0-alpha.6
              generative-ts

              Variable OpenAiChatTemplateConst

              OpenAiChatTemplate: FnTemplate<OpenAiChatOptions>
              \ No newline at end of file +OpenAiChatTemplate | generative-ts - v0.1.0-alpha.7
              generative-ts

              Variable OpenAiChatTemplateConst

              OpenAiChatTemplate: FnTemplate<OpenAiChatOptions>
              \ No newline at end of file diff --git a/package-lock.json b/package-lock.json index b880336..e7e3476 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "generative-ts-development", - "version": "0.1.0-alpha.6", + "version": "0.1.0-alpha.7", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "generative-ts-development", - "version": "0.1.0-alpha.6", + "version": "0.1.0-alpha.7", "license": "ISC", "workspaces": [ "packages/*", @@ -10871,7 +10871,7 @@ }, "packages/core": { "name": "@generative-ts/core", - "version": "0.1.0-alpha.6", + "version": "0.1.0-alpha.7", "license": "ISC", "dependencies": { "aws4": "^1.12.0", @@ -10882,22 +10882,22 @@ }, "packages/gcloud-vertex-ai": { "name": "@generative-ts/gcloud-vertex-ai", - "version": "0.1.0-alpha.6", + "version": "0.1.0-alpha.7", "license": "ISC", "dependencies": { "google-auth-library": "^9.10.0", "tslib": "^2.6.2" }, "peerDependencies": { - "@generative-ts/core": "^0.1.0-alpha.6" + "@generative-ts/core": "0.1.0-alpha.7" } }, "packages/generative-ts": { - "version": "0.1.0-alpha.6", + "version": "0.1.0-alpha.7", "license": "ISC", "dependencies": { - "@generative-ts/core": "^0.1.0-alpha.6", - "@generative-ts/gcloud-vertex-ai": "^0.1.0-alpha.6", + "@generative-ts/core": "0.1.0-alpha.7", + "@generative-ts/gcloud-vertex-ai": "0.1.0-alpha.7", "tslib": "^2.6.2" } }, diff --git a/package.json b/package.json index 79e4432..dbd1343 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "generative-ts-development", - "version": "0.1.0-alpha.6", + "version": "0.1.0-alpha.7", "private": true, "description": "monorepo development configuration for generative-ts", "license": "ISC", diff --git a/packages/core/package.json b/packages/core/package.json index 9432ef4..25a188e 100644 --- a/packages/core/package.json +++ b/packages/core/package.json @@ -1,6 +1,6 @@ { "name": "@generative-ts/core", - "version": "0.1.0-alpha.6", + "version": "0.1.0-alpha.7", "description": "Core functionalities for generative-ts", "main": "dist/index.cjs", "module": "dist/index.mjs", diff --git a/packages/gcloud-vertex-ai/package.json b/packages/gcloud-vertex-ai/package.json index fc7e343..4c889f4 100644 --- a/packages/gcloud-vertex-ai/package.json +++ b/packages/gcloud-vertex-ai/package.json @@ -1,6 +1,6 @@ { "name": "@generative-ts/gcloud-vertex-ai", - "version": "0.1.0-alpha.6", + "version": "0.1.0-alpha.7", "description": "Google Cloud VertexAI support for for generative-ts", "main": "dist/index.cjs", "module": "dist/index.mjs", @@ -32,7 +32,7 @@ "tslib": "^2.6.2" }, "peerDependencies": { - "@generative-ts/core": "^0.1.0-alpha.6" + "@generative-ts/core": "0.1.0-alpha.7" }, "publishConfig": { "access": "public" diff --git a/packages/generative-ts/package.json b/packages/generative-ts/package.json index 4c064c8..e36fa4a 100644 --- a/packages/generative-ts/package.json +++ b/packages/generative-ts/package.json @@ -1,6 +1,6 @@ { "name": "generative-ts", - "version": "0.1.0-alpha.6", + "version": "0.1.0-alpha.7", "description": "simple, type-safe, isomorphic LLM interactions (with power)", "main": "dist/index.cjs", "module": "dist/index.mjs", @@ -29,8 +29,8 @@ "typecheck": "tsc --noEmit" }, "dependencies": { - "@generative-ts/core": "^0.1.0-alpha.6", - "@generative-ts/gcloud-vertex-ai": "^0.1.0-alpha.6", + "@generative-ts/core": "0.1.0-alpha.7", + "@generative-ts/gcloud-vertex-ai": "0.1.0-alpha.7", "tslib": "^2.6.2" }, "publishConfig": {