Skip to content

Commit 811cd88

Browse files
committed
chore(dependencies): Bump langchain and langgraph libs to 1.0.x
1 parent 38bdbe6 commit 811cd88

File tree

2 files changed

+9
-9
lines changed

2 files changed

+9
-9
lines changed

pyproject.toml

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -30,19 +30,19 @@ dependencies = [
3030
"cachetools ~= 6.2.1",
3131
"colorlog ~= 6.10.1",
3232
"fastapi ~= 0.119.0",
33-
"langchain ~= 0.3.27",
34-
"langchain-community ~= 0.3.31",
35-
"langchain-core ~= 0.3.76",
36-
"langchain-openai ~= 0.3.35",
37-
"langfuse ~= 3.7.0",
38-
"langgraph ~= 0.6.10",
33+
"langchain ~= 1.0.1",
34+
"langchain-community ~= 0.4.0",
35+
"langchain-core ~= 1.0.0",
36+
"langchain-openai ~= 1.0.0",
37+
"langfuse ~= 3.8.0",
38+
"langgraph ~= 1.0.1",
3939
"langsmith ~= 0.4.37",
4040
"fastmcp ~= 2.12.5",
4141
"pymilvus ~= 2.6.2",
4242
"python-dotenv ~= 1.1.1",
4343
"tiktoken ~= 0.12.0",
4444
"tqdm ~= 4.67.1",
45-
"uvicorn ~= 0.37.0",
45+
"uvicorn ~= 0.38.0",
4646
]
4747

4848
[project.optional-dependencies]

wiki_rag/search/util.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,6 @@
1010
from typing import Annotated, Literal, TypedDict
1111

1212
from cachetools import TTLCache, cached
13-
from langchain import hub
1413
from langchain_core.messages import BaseMessage
1514
from langchain_core.prompts import (
1615
ChatPromptTemplate,
@@ -26,6 +25,7 @@
2625
from langgraph.graph import END, START, StateGraph
2726
from langgraph.graph.state import CompiledStateGraph
2827
from langgraph.runtime import Runtime
28+
from langsmith.client import Client
2929
from pymilvus import AnnSearchRequest, MilvusClient, WeightedRanker
3030

3131
import wiki_rag.index as index
@@ -129,7 +129,7 @@ def load_prompts_for_rag(prompt_name: str) -> ChatPromptTemplate:
129129
prefixed_prompt_name = f"{os.getenv("LANGSMITH_PROMPT_PREFIX")}{prompt_name}"
130130
logger.info(f"Loading the prompt {prefixed_prompt_name} from LangSmith.")
131131
prompt_provider = "LangSmith"
132-
chat_prompt = hub.pull(prefixed_prompt_name)
132+
chat_prompt = Client().pull_prompt(prefixed_prompt_name)
133133
elif os.getenv("LANGFUSE_PROMPTS", "false") == "true":
134134
langfuse = Langfuse()
135135
prefixed_prompt_name = f"{os.getenv("LANGFUSE_PROMPT_PREFIX")}{prompt_name}"

0 commit comments

Comments
 (0)