Skip to content

Commit d1a96c7

Browse files
committed
feat(core): strip message IDs from cache keys using model_copy (closes #33883)
1 parent 31b5e48 commit d1a96c7

File tree

1 file changed

+9
-1
lines changed

1 file changed

+9
-1
lines changed

libs/core/langchain_core/language_models/chat_models.py

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1128,7 +1128,15 @@ def _generate_with_cache(
11281128
if check_cache:
11291129
if llm_cache:
11301130
llm_string = self._get_llm_string(stop=stop, **kwargs)
1131-
prompt = dumps(messages)
1131+
normalized_messages = [
1132+
(
1133+
msg.model_copy(update={"id": None})
1134+
if getattr(msg, "id", None) is not None
1135+
else msg
1136+
)
1137+
for msg in messages
1138+
]
1139+
prompt = dumps(normalized_messages)
11321140
cache_val = llm_cache.lookup(prompt, llm_string)
11331141
if isinstance(cache_val, list):
11341142
converted_generations = self._convert_cached_generations(cache_val)

0 commit comments

Comments
 (0)