Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion packages/chat-client/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@botpress/chat",
"version": "0.5.1",
"version": "0.5.2",
"description": "Botpress Chat API Client",
"main": "./dist/index.cjs",
"module": "./dist/index.mjs",
Expand Down
29 changes: 29 additions & 0 deletions packages/chat-client/src/client.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import * as consts from './consts'
import * as errors from './errors'
import { apiVersion, Client as AutoGeneratedClient } from './gen/client'
import jwt from './jsonwebtoken'
import { AsyncCollection } from './listing'
import { SignalListener } from './signal-listener'
import * as types from './types'

Expand Down Expand Up @@ -103,6 +104,23 @@ export class Client implements IClient {
public readonly createEvent: IClient['createEvent'] = (x) => this._call('createEvent', x)
public readonly getEvent: IClient['getEvent'] = (x) => this._call('getEvent', x)

public get list() {
return {
conversations: (props: types.ClientRequests['listConversations']) =>
new AsyncCollection(({ nextToken }) =>
this.listConversations({ nextToken, ...props }).then((r) => ({ ...r, items: r.conversations }))
),
messages: (props: types.ClientRequests['listMessages']) =>
new AsyncCollection(({ nextToken }) =>
this.listMessages({ nextToken, ...props }).then((r) => ({ ...r, items: r.messages }))
),
participants: (props: types.ClientRequests['listParticipants']) =>
new AsyncCollection(({ nextToken }) =>
this.listParticipants({ nextToken, ...props }).then((r) => ({ ...r, items: r.participants }))
),
}
}

public readonly listenConversation: IClient['listenConversation'] = async ({ id, 'x-user-key': userKey }) => {
const signalListener = await SignalListener.listen({
url: this._apiUrl,
Expand Down Expand Up @@ -257,4 +275,15 @@ export class AuthenticatedClient implements IAuthenticatedClient {
this._client.createEvent({ 'x-user-key': this.user.key, ...x })
public readonly getEvent: IAuthenticatedClient['getEvent'] = (x) =>
this._client.getEvent({ 'x-user-key': this.user.key, ...x })

public get list() {
return {
conversations: (x: types.AuthenticatedClientRequests['listConversations']) =>
this._client.list.conversations({ 'x-user-key': this.user.key, ...x }),
messages: (x: types.AuthenticatedClientRequests['listMessages']) =>
this._client.list.messages({ 'x-user-key': this.user.key, ...x }),
participants: (x: types.AuthenticatedClientRequests['listParticipants']) =>
this._client.list.participants({ 'x-user-key': this.user.key, ...x }),
}
}
}
29 changes: 29 additions & 0 deletions packages/chat-client/src/listing.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
export type PageLister<R> = (t: { nextToken?: string }) => Promise<{ items: R[]; meta: { nextToken?: string } }>
export class AsyncCollection<T> {
public constructor(private _list: PageLister<T>) {}

public async *[Symbol.asyncIterator]() {
let nextToken: string | undefined
do {
const { items, meta } = await this._list({ nextToken })
nextToken = meta.nextToken
for (const item of items) {
yield item
}
} while (nextToken)
}

public async collect(props: { limit?: number } = {}) {
const limit = props.limit ?? Number.POSITIVE_INFINITY
const arr: T[] = []
let count = 0
for await (const item of this) {
arr.push(item)
count++
if (count >= limit) {
break
}
}
return arr
}
}
2 changes: 1 addition & 1 deletion packages/cli/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
"types": "dist/index.d.ts",
"dependencies": {
"@apidevtools/json-schema-ref-parser": "^11.7.0",
"@botpress/chat": "0.5.1",
"@botpress/chat": "0.5.2",
"@botpress/client": "1.25.0",
"@botpress/sdk": "4.15.11",
"@bpinternal/const": "^0.1.0",
Expand Down
2 changes: 1 addition & 1 deletion plugins/conversation-insights/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,9 @@
},
"private": true,
"dependencies": {
"@botpress/cognitive": "workspace:*",
"@botpress/sdk": "workspace:*",
"browser-or-node": "^2.1.1",
"json5": "^2.2.3",
"jsonrepair": "^3.10.0"
},
"devDependencies": {
Expand Down
7 changes: 1 addition & 6 deletions plugins/conversation-insights/plugin.definition.ts
Original file line number Diff line number Diff line change
@@ -1,12 +1,10 @@
import { PluginDefinition, z } from '@botpress/sdk'
import llm from './bp_modules/llm'

export default new PluginDefinition({
name: 'conversation-insights',
version: '0.3.2',
version: '0.4.0',
configuration: {
schema: z.object({
modelId: z.string().describe('The AI model id (ex: gpt-4.1-nano-2025-04-14)'),
aiEnabled: z.boolean().default(true).describe('Set to true to enable title, summary and sentiment ai generation'),
}),
},
Expand Down Expand Up @@ -42,9 +40,6 @@ export default new PluginDefinition({
},
},
workflows: { updateAllConversations: { input: { schema: z.object({}) }, output: { schema: z.object({}) } } },
interfaces: {
llm,
},
__advanced: {
useLegacyZuiTransformer: true,
},
Expand Down
51 changes: 51 additions & 0 deletions plugins/conversation-insights/src/prompt/parse-content.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
import { describe, it, expect } from 'vitest'
import { parseLLMOutput } from './parse-content'
import * as sdk from '@botpress/sdk'
import { z } from '@botpress/sdk'
import * as cognitive from '@botpress/cognitive'

const COGNITIVE_OUTPUT = (content: string): cognitive.GenerateContentOutput => ({
provider: 'test-provider',
model: 'test-model',
botpress: { cost: 0 },
id: '',
usage: {
inputCost: 0,
inputTokens: 0,
outputTokens: 0,
outputCost: 0,
},
choices: [{ content, index: 0, role: 'assistant', stopReason: 'other' }],
})

const CONTENT_PARSE_SCHEMA = z.object({ foo: z.string(), bar: z.number() })

describe('parseLLMOutput', () => {
it('valid json parsing is successful', () => {
const output = COGNITIVE_OUTPUT(`{"foo": "hello", "bar": 42}`)

const result = parseLLMOutput<z.infer<typeof CONTENT_PARSE_SCHEMA>>({ schema: CONTENT_PARSE_SCHEMA, ...output })

expect(result.success).toBe(true)
})

it('invalid json parsing throws an error', () => {
const output = COGNITIVE_OUTPUT(`not a json`)

expect(() => {
parseLLMOutput<typeof CONTENT_PARSE_SCHEMA>({ schema: CONTENT_PARSE_SCHEMA, ...output })
}).toThrowError(sdk.ZodError)
})

it('empty choices parsing throws an error', () => {
expect(() => parseLLMOutput<any>({ choices: [] } as any)).toThrow(sdk.RuntimeError)
})

it('valid json with whitespaces parsing is successful', () => {
const output = COGNITIVE_OUTPUT(` { "foo": "bar", "bar": 123 } `)

const result = parseLLMOutput<z.infer<typeof CONTENT_PARSE_SCHEMA>>({ schema: CONTENT_PARSE_SCHEMA, ...output })

expect(result.success).toBe(true)
})
})
26 changes: 11 additions & 15 deletions plugins/conversation-insights/src/prompt/parse-content.ts
Original file line number Diff line number Diff line change
@@ -1,33 +1,29 @@
import * as cognitive from '@botpress/cognitive'
import * as sdk from '@botpress/sdk'
import JSON5 from 'json5'
import { jsonrepair } from 'jsonrepair'
import * as bp from '.botpress'

export type LLMInput = bp.interfaces.llm.actions.generateContent.input.Input
export type LLMOutput = bp.interfaces.llm.actions.generateContent.output.Output
export type LLMInput = cognitive.GenerateContentInput

export type LLMMessage = LLMInput['messages'][number]
export type LLMChoice = LLMOutput['choices'][number]
type LLMChoice = cognitive.GenerateContentOutput['choices'][number]

export type PredictResponse<T> = {
success: boolean
json: T
}

const tryParseJson = (str: string) => {
try {
return JSON5.parse(jsonrepair(str))
} catch {
return str
}
const parseJson = <T>(expectedSchema: sdk.ZodSchema, str: string): T => {
const repaired = jsonrepair(str)
const parsed = JSON.parse(repaired)
return expectedSchema.parse(parsed)
}

export const parseLLMOutput = <T>(output: LLMOutput): PredictResponse<T> => {
const mappedChoices: LLMChoice['content'][] = output.choices.map((choice) => choice.content)
type ParseLLMOutputProps = cognitive.GenerateContentOutput & { schema: sdk.ZodSchema }
export const parseLLMOutput = <T>(props: ParseLLMOutputProps): PredictResponse<T> => {
const mappedChoices: LLMChoice['content'][] = props.choices.map((choice) => choice.content)
if (!mappedChoices[0]) throw new sdk.RuntimeError('Could not parse LLM output')
const firstChoice = mappedChoices[0]
return {
success: true,
json: tryParseJson(firstChoice.toString()),
json: parseJson<T>(props.schema, firstChoice.toString()),
}
}
2 changes: 0 additions & 2 deletions plugins/conversation-insights/src/prompt/prompt.ts
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@ const formatMessages = (
export type PromptArgs = {
systemPrompt: string
messages: bp.MessageHandlerProps['message'][]
model: { id: string }
context: object
botId: string
}
Expand All @@ -43,5 +42,4 @@ export const createPrompt = (args: PromptArgs): LLMInput => ({
temperature: 0,
systemPrompt: args.systemPrompt.trim(),
messages: formatMessages(args.messages, args.context, args.botId),
model: args.model,
})
4 changes: 2 additions & 2 deletions plugins/conversation-insights/src/prompt/summary-prompt.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@ import { z } from '@botpress/sdk'
import { LLMInput } from './parse-content'
import * as prompt from './prompt'

export type OutputFormat = z.infer<typeof OutputFormat>
export const OutputFormat = z.object({
export type SummaryOutput = z.infer<typeof SummaryOutput>
export const SummaryOutput = z.object({
title: z.string().describe('A fitting title for the conversation'),
summary: z.string().describe('A short summary of the conversation'),
})
Expand Down
27 changes: 19 additions & 8 deletions plugins/conversation-insights/src/tagsUpdater.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import * as cognitive from '@botpress/cognitive'
import * as sdk from '@botpress/sdk'
import * as gen from './prompt/parse-content'
import * as sentiment from './prompt/sentiment-prompt'
import * as summarizer from './prompt/summary-prompt'
Expand All @@ -9,32 +11,35 @@ type CommonProps = types.CommonProps
type UpdateTitleAndSummaryProps = CommonProps & {
conversation: bp.MessageHandlerProps['conversation']
messages: bp.MessageHandlerProps['message'][]
client: cognitive.BotpressClientLike
}
export const updateTitleAndSummary = async (props: UpdateTitleAndSummaryProps) => {
const summaryPrompt = summarizer.createPrompt({
messages: props.messages,
botId: props.ctx.botId,
model: { id: props.configuration.modelId },
context: { previousTitle: props.conversation.tags.title, previousSummary: props.conversation.tags.summary },
})

const parsedSummary = await _generateContentWithRetries<summarizer.OutputFormat>({
const parsedSummary = await _generateContentWithRetries<summarizer.SummaryOutput>({
actions: props.actions,
logger: props.logger,
prompt: summaryPrompt,
client: props.client,
schema: summarizer.SummaryOutput,
})

const sentimentPrompt = sentiment.createPrompt({
messages: props.messages,
botId: props.ctx.botId,
context: { previousSentiment: props.conversation.tags.sentiment },
model: { id: props.configuration.modelId },
})

const parsedSentiment = await _generateContentWithRetries<sentiment.SentimentAnalysisOutput>({
actions: props.actions,
logger: props.logger,
prompt: sentimentPrompt,
client: props.client,
schema: sentiment.SentimentAnalysisOutput,
})

await props.client.updateConversation({
Expand All @@ -53,18 +58,24 @@ type ParsePromptProps = {
actions: UpdateTitleAndSummaryProps['actions']
logger: UpdateTitleAndSummaryProps['logger']
prompt: gen.LLMInput
client: cognitive.BotpressClientLike
schema: sdk.ZodSchema
}
const _generateContentWithRetries = async <T>(props: ParsePromptProps): Promise<gen.PredictResponse<T>> => {
let attemptCount = 0
const maxRetries = 3

let llmOutput = await props.actions.llm.generateContent(props.prompt)
let parsed = gen.parseLLMOutput<T>(llmOutput)
const cognitiveClient = new cognitive.Cognitive({ client: props.client, __experimental_beta: true })
let llmOutput = await cognitiveClient.generateContent(props.prompt)
let parsed = gen.parseLLMOutput<T>({ schema: props.schema, ...llmOutput.output })

while (!parsed.success && attemptCount < maxRetries) {
props.logger.debug(`Attempt ${attemptCount + 1}: The LLM output did not respect the schema.`, parsed.json)
llmOutput = await props.actions.llm.generateContent(props.prompt)
parsed = gen.parseLLMOutput<T>(llmOutput)
props.logger.debug(
`Attempt ${attemptCount + 1}: The LLM output did not respect the schema. It submitted: `,
parsed.json
)
llmOutput = await cognitiveClient.generateContent(props.prompt)
parsed = gen.parseLLMOutput<T>({ schema: props.schema, ...llmOutput.output })
attemptCount++
}

Expand Down
8 changes: 4 additions & 4 deletions pnpm-lock.yaml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Loading