From 3939ff47ef2d0b29579f3ace7dc53e3a86627c98 Mon Sep 17 00:00:00 2001 From: "Q.A.zh" <40236765+QAbot-zh@users.noreply.github.com> Date: Wed, 14 Aug 2024 02:33:39 +0000 Subject: [PATCH] fix: remove the condition that uses max_token to reduce the context --- app/store/chat.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/app/store/chat.ts b/app/store/chat.ts index 653926d1b02..11659796e66 100644 --- a/app/store/chat.ts +++ b/app/store/chat.ts @@ -494,13 +494,13 @@ export const useChatStore = createPersistStore( : shortTermMemoryStartIndex; // and if user has cleared history messages, we should exclude the memory too. const contextStartIndex = Math.max(clearContextIndex, memoryStartIndex); - const maxTokenThreshold = modelConfig.max_tokens; + // const maxTokenThreshold = modelConfig.max_tokens; // get recent messages as much as possible const reversedRecentMessages = []; for ( let i = totalMessageCount - 1, tokenCount = 0; - i >= contextStartIndex && tokenCount < maxTokenThreshold; + i >= contextStartIndex ;//&& tokenCount < maxTokenThreshold; i -= 1 ) { const msg = messages[i];