From d1a96c7893792295526a22e399307fc2757e86f4 Mon Sep 17 00:00:00 2001 From: Arunkumar S Date: Mon, 10 Nov 2025 21:31:03 +0530 Subject: [PATCH] feat(core): strip message IDs from cache keys using model_copy (closes #33883) --- .../core/langchain_core/language_models/chat_models.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/libs/core/langchain_core/language_models/chat_models.py b/libs/core/langchain_core/language_models/chat_models.py index f37113b4661e6..0556a708a9a5c 100644 --- a/libs/core/langchain_core/language_models/chat_models.py +++ b/libs/core/langchain_core/language_models/chat_models.py @@ -1128,7 +1128,15 @@ def _generate_with_cache( if check_cache: if llm_cache: llm_string = self._get_llm_string(stop=stop, **kwargs) - prompt = dumps(messages) + normalized_messages = [ + ( + msg.model_copy(update={"id": None}) + if getattr(msg, "id", None) is not None + else msg + ) + for msg in messages + ] + prompt = dumps(normalized_messages) cache_val = llm_cache.lookup(prompt, llm_string) if isinstance(cache_val, list): converted_generations = self._convert_cached_generations(cache_val)