Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 5 additions & 1 deletion plugins/conversation-insights/plugin.definition.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ import llm from './bp_modules/llm'

export default new PluginDefinition({
name: 'conversation-insights',
version: '0.2.1',
version: '0.2.2',
configuration: {
schema: z.object({ modelId: z.string() }),
},
Expand All @@ -22,6 +22,10 @@ export default new PluginDefinition({
title: 'Participant count',
description: 'The count of users having participated in the conversation, including the bot. Type: int',
},
sentiment: {
title: 'Sentiment',
description: 'The sentiment that best describes the conversation. Type: enum Sentiments',
},
},
},
events: {
Expand Down
12 changes: 7 additions & 5 deletions plugins/conversation-insights/src/index.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import * as sdk from '@botpress/sdk'
import * as summaryUpdater from './summaryUpdater'
import * as updateScheduler from './summaryUpdateScheduler'
import * as summaryUpdater from './tagsUpdater'
import * as types from './types'
import * as bp from '.botpress'

Expand All @@ -15,7 +15,7 @@ plugin.on.afterIncomingMessage('*', async (props) => {
const { message_count } = await _onNewMessage({ ...props, conversation })

if (updateScheduler.isTimeToUpdate(message_count)) {
props.client.createEvent({ payload: {}, type: 'updateSummary', conversationId: props.data.conversationId })
await props.events.updateSummary.withConversationId(props.data.conversationId).emit({})
}

return undefined
Expand Down Expand Up @@ -52,8 +52,10 @@ const _onNewMessage = async (
}

plugin.on.event('updateSummary', async (props) => {
const messages = await props.client.listMessages({ conversationId: props.event.conversationId })
const newMessages: string[] = messages.messages.map((message) => message.payload.text)
const firstMessagePage = await props.client
.listMessages({ conversationId: props.event.conversationId })
.then((res) => res.messages)

if (!props.event.conversationId) {
throw new sdk.RuntimeError(`The conversationId cannot be null when calling the event '${props.event.type}'`)
}
Expand All @@ -62,7 +64,7 @@ plugin.on.event('updateSummary', async (props) => {
await summaryUpdater.updateTitleAndSummary({
...props,
conversation: conversation.conversation,
messages: newMessages,
messages: firstMessagePage,
})
})

Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import * as sdk from '@botpress/sdk'
import JSON5 from 'json5'
import { jsonrepair } from 'jsonrepair'
import { OutputFormat } from './summary-prompt'
import * as bp from '.botpress'

export type LLMInput = bp.interfaces.llm.actions.generateContent.input.Input
Expand All @@ -10,9 +9,9 @@ export type LLMOutput = bp.interfaces.llm.actions.generateContent.output.Output
export type LLMMessage = LLMInput['messages'][number]
export type LLMChoice = LLMOutput['choices'][number]

type PredictResponse = {
export type PredictResponse<T> = {
success: boolean
json: OutputFormat
json: T
}

const tryParseJson = (str: string) => {
Expand All @@ -23,7 +22,7 @@ const tryParseJson = (str: string) => {
}
}

export const parseLLMOutput = (output: LLMOutput): PredictResponse => {
export const parseLLMOutput = <T>(output: LLMOutput): PredictResponse<T> => {
const mappedChoices: LLMChoice['content'][] = output.choices.map((choice) => choice.content)
if (!mappedChoices[0]) throw new sdk.RuntimeError('Could not parse LLM output')
const firstChoice = mappedChoices[0]
Expand Down
47 changes: 47 additions & 0 deletions plugins/conversation-insights/src/prompt/prompt.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
import { z } from '@botpress/sdk'
import { LLMInput } from './parse-content'
import * as bp from '.botpress'

export type SentimentAnalysisOutput = z.infer<typeof SentimentAnalysisOutput>
export const SentimentAnalysisOutput = z.object({
sentiment: z.string().describe('The sentiment that best describes the conversation'),
})

export type InputFormat = z.infer<typeof InputFormat>
export const InputFormat = z.array(z.string())

const formatMessages = (
messages: PromptArgs['messages'],
context: PromptArgs['context'],
botId: string
): LLMInput['messages'] => {
const contextMessage: LLMInput['messages'][0] = {
role: 'assistant',
content: `Context: ${JSON.stringify(context)}`,
}

const messagesWithUser: LLMInput['messages'] = []
for (const message of messages) {
if (message.type !== 'text') continue // only text is supported to analyse messages
messagesWithUser.push({
role: message.userId === botId ? 'assistant' : 'user',
content: message.payload.text,
})
}
return [contextMessage, ...messagesWithUser.reverse()]
}

export type PromptArgs = {
systemPrompt: string
messages: bp.MessageHandlerProps['message'][]
model: { id: string }
context: object
botId: string
}
export const createPrompt = (args: PromptArgs): LLMInput => ({
responseFormat: 'json_object',
temperature: 0,
systemPrompt: args.systemPrompt.trim(),
messages: formatMessages(args.messages, args.context, args.botId),
model: args.model,
})
89 changes: 89 additions & 0 deletions plugins/conversation-insights/src/prompt/sentiment-prompt.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,89 @@
import { z } from '@botpress/sdk'
import { LLMInput } from './parse-content'
import * as prompt from './prompt'

export type SentimentAnalysisOutput = z.infer<typeof SentimentAnalysisOutput>
export const SentimentAnalysisOutput = z.object({
sentiment: z
.enum(['very_negative', 'negative', 'neutral', 'positive', 'very_positive'])
.describe('The sentiment that best describes the conversation'),
})

export const SENTIMENT_OPTIONS = SentimentAnalysisOutput.shape.sentiment.options.map((opt) => ` "${opt}" `).join('|')

export type PromptArgs = Omit<prompt.PromptArgs, 'systemPrompt'>
export const createPrompt = (args: PromptArgs): LLMInput =>
prompt.createPrompt({
...args,
systemPrompt: `
You are a conversation analyser.
You will be given:
- A previous sentiment
- An array of messages

Your task is to reply the sentiment that best describes the overall conversation.

Return your response only in valid JSON using the following type:

\`\`\`json
{
"sentiment": ${SENTIMENT_OPTIONS}, // The latest sentiment of the conversation
}
\`\`\`

Instructions:

- Consider the previous sentiment when choosing the new one — keep it if still relevant, or update it if needed.
- Focus on the most recent sentiment of the conversation.
- Only use the available sentiments
- Do not include extra commentary, formatting, or explanation outside the JSON output.
- The messages are in order, which means the most recent ones are at the end of the list.
- Keep in mind that your own messages are included in the messages, but have the 'assistant' role

The available sentiments are: ${SENTIMENT_OPTIONS}

Examples:

Input:

\`\`\`json
{
"messages": [
"Context: {'previousSentiment': 'negative'}",
"User: I hate your service. I want to unsubscribe right now!",
"Bot: I understand your frustation, but there is nothing we can do",
"User: I want a refund.",
]
}
\`\`\`

Output:

\`\`\`json
{
"sentiment": "very_negative"
}
\`\`\`

Input:

\`\`\`json
{
"messages": [
"previousSentiment: neutral",
"User: Hi, how could I get a premium subscription?",
"Bot: You can get it by clicking on the link I just sent you.",
"User: Thank you so much, your help has changed my life",
]
}
\`\`\`

Output:

\`\`\`json
{
"sentiment": "very_positive"
}
\`\`\`
`,
})
Original file line number Diff line number Diff line change
@@ -1,42 +1,18 @@
import { z } from '@botpress/sdk'
import { LLMInput } from './generate-content'
import { LLMInput } from './parse-content'
import * as prompt from './prompt'

export type OutputFormat = z.infer<typeof OutputFormat>
export const OutputFormat = z.object({
title: z.string().describe('A fitting title for the conversation'),
summary: z.string().describe('A short summary of the conversation'),
})

export type InputFormat = z.infer<typeof InputFormat>
export const InputFormat = z.array(z.string())

const formatMessages = (
messages: string[],
context: PromptArgs['context']
): { role: 'user' | 'assistant'; content: string } => {
return {
role: 'user',
content: JSON.stringify(
{
previousTitle: context.previousTitle,
previousSummary: context.previousSummary,
messages: messages.reverse(),
},
null,
2
),
}
}

export type PromptArgs = {
messages: string[]
model: { id: string }
context: { previousSummary?: string; previousTitle?: string }
}
export const createPrompt = (args: PromptArgs): LLMInput => ({
responseFormat: 'json_object',
temperature: 0,
systemPrompt: `
export type PromptArgs = Omit<prompt.PromptArgs, 'systemPrompt'>
export const createPrompt = (args: PromptArgs): LLMInput =>
prompt.createPrompt({
...args,
systemPrompt: `
You are a conversation summarizer.
You will be given:
- A previous title and summary
Expand Down Expand Up @@ -68,13 +44,12 @@ Input:

\`\`\`json
{
"previousTitle": "Used cars",
"previousSummary": "The user is talking abous a used Toyota Matrix",
"messages": [
"What mileage should I expect from a car that was made two years ago?",
"What price should I expect from a car manufactured in 2011?",
"What should I look out for when buying a secondhand Toyota Matrix?",
"I am looking to buy a used car, what would you recommend?",
"Context: {'previousTitle': 'Used cars', 'previousSummary': 'The user is talking abous a used Toyota Matrix'}",
"User: What mileage should I expect from a car that was made two years ago?",
"User: What price should I expect from a car manufactured in 2011?",
"User: What should I look out for when buying a secondhand Toyota Matrix?",
"User: I am looking to buy a used car, what would you recommend?",
]
}
\`\`\`
Expand All @@ -87,7 +62,5 @@ Output:
"summary": "The user is seeking advice on purchasing a used car."
}
\`\`\`
`.trim(),
messages: [formatMessages(args.messages, args.context)],
model: args.model,
})
`,
})
44 changes: 0 additions & 44 deletions plugins/conversation-insights/src/summaryUpdater.ts

This file was deleted.

Loading
Loading