Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 5 additions & 1 deletion units/en/unit2/llama-index/agents.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,11 @@ def multiply(a: int, b: int) -> int:
return a * b

# initialize llm
llm = HuggingFaceInferenceAPI(model_name="Qwen/Qwen2.5-Coder-32B-Instruct")
# Check available providers: https://huggingface.co/inference/models
llm = HuggingFaceInferenceAPI(
model_name="Qwen/Qwen2.5-Coder-32B-Instruct",
provider="auto"
)

# initialize agent
agent = AgentWorkflow.from_tools_or_functions(
Expand Down
7 changes: 6 additions & 1 deletion units/en/unit2/llama-index/components.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,12 @@ We also pass in an LLM to the query engine to use for the response.
```python
from llama_index.llms.huggingface_api import HuggingFaceInferenceAPI

llm = HuggingFaceInferenceAPI(model_name="Qwen/Qwen2.5-Coder-32B-Instruct")
# Check available providers: https://huggingface.co/inference/models
llm = HuggingFaceInferenceAPI(
model_name="Qwen/Qwen2.5-Coder-32B-Instruct",
provider="auto"
)

query_engine = index.as_query_engine(
llm=llm,
response_mode="tree_summarize",
Expand Down
6 changes: 5 additions & 1 deletion units/en/unit2/llama-index/workflows.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -201,7 +201,11 @@ def multiply(a: int, b: int) -> int:
"""Multiply two numbers."""
return a * b

llm = HuggingFaceInferenceAPI(model_name="Qwen/Qwen2.5-Coder-32B-Instruct")
# Check available providers: https://huggingface.co/inference/models
llm = HuggingFaceInferenceAPI(
model_name="Qwen/Qwen2.5-Coder-32B-Instruct",
provider="auto"
)

# we can pass functions directly without FunctionTool -- the fn/docstring are parsed for the name/description
multiply_agent = ReActAgent(
Expand Down
9 changes: 7 additions & 2 deletions units/en/unit3/agentic-rag/agent.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,11 @@ Now, let's combine all these tools into a single agent:

```python
# Initialize the Hugging Face model
llm = HuggingFaceInferenceAPI(model_name="Qwen/Qwen2.5-Coder-32B-Instruct")
# Check available providers: https://huggingface.co/inference/models
llm = HuggingFaceInferenceAPI(
model_name="Qwen/Qwen2.5-Coder-32B-Instruct",
provider="auto"
)

# Create Alfred with all the tools
alfred = AgentWorkflow.from_tools_or_functions(
Expand All @@ -90,7 +94,8 @@ from langgraph.graph import START, StateGraph
from langgraph.prebuilt import tools_condition
from langchain_huggingface import HuggingFaceEndpoint, ChatHuggingFace

from tools import DuckDuckGoSearchRun, weather_info_tool, hub_stats_tool
from langchain_community.tools import DuckDuckGoSearchRun
from tools import weather_info_tool, hub_stats_tool
from retriever import guest_info_tool
```

Expand Down