Skip to content

Commit 5263bde

Browse files
feat: Use selected LlamaCloud index in multi-agent template (#350)
1 parent 4dee65b commit 5263bde

File tree

8 files changed

+47
-31
lines changed

8 files changed

+47
-31
lines changed

.changeset/witty-hotels-do.md

+5
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
"create-llama": patch
3+
---
4+
5+
Use selected LlamaCloud index in multi-agent template

templates/components/multiagent/python/app/api/routers/chat.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -28,8 +28,8 @@ async def chat(
2828
# but agent workflow does not support them yet
2929
# ignore chat params and use all documents for now
3030
# TODO: generate filters based on doc_ids
31-
# TODO: use chat params
32-
engine = get_chat_engine(chat_history=messages)
31+
params = data.data or {}
32+
engine = get_chat_engine(chat_history=messages, params=params)
3333

3434
event_handler = engine.run(input=last_message_content, streaming=True)
3535
return VercelStreamResponse(

templates/components/multiagent/python/app/engine/engine.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -18,11 +18,11 @@ def get_chat_engine(
1818
agent_type = os.getenv("EXAMPLE_TYPE", "").lower()
1919
match agent_type:
2020
case "choreography":
21-
agent = create_choreography(chat_history)
21+
agent = create_choreography(chat_history, **kwargs)
2222
case "orchestrator":
23-
agent = create_orchestrator(chat_history)
23+
agent = create_orchestrator(chat_history, **kwargs)
2424
case _:
25-
agent = create_workflow(chat_history)
25+
agent = create_workflow(chat_history, **kwargs)
2626

2727
logger.info(f"Using agent pattern: {agent_type}")
2828

templates/components/multiagent/python/app/examples/choreography.py

+6-4
Original file line numberDiff line numberDiff line change
@@ -8,8 +8,8 @@
88
from llama_index.core.chat_engine.types import ChatMessage
99

1010

11-
def create_choreography(chat_history: Optional[List[ChatMessage]] = None):
12-
researcher = create_researcher(chat_history)
11+
def create_choreography(chat_history: Optional[List[ChatMessage]] = None, **kwargs):
12+
researcher = create_researcher(chat_history, **kwargs)
1313
publisher = create_publisher(chat_history)
1414
reviewer = FunctionCallingAgent(
1515
name="reviewer",
@@ -21,12 +21,14 @@ def create_choreography(chat_history: Optional[List[ChatMessage]] = None):
2121
name="writer",
2222
agents=[researcher, reviewer, publisher],
2323
description="expert in writing blog posts, needs researched information and images to write a blog post",
24-
system_prompt=dedent("""
24+
system_prompt=dedent(
25+
"""
2526
You are an expert in writing blog posts. You are given a task to write a blog post. Before starting to write the post, consult the researcher agent to get the information you need. Don't make up any information yourself.
2627
After creating a draft for the post, send it to the reviewer agent to receive feedback and make sure to incorporate the feedback from the reviewer.
2728
You can consult the reviewer and researcher a maximum of two times. Your output should contain only the blog post.
2829
Finally, always request the publisher to create a document (PDF, HTML) and publish the blog post.
29-
"""),
30+
"""
31+
),
3032
# TODO: add chat_history support to AgentCallingAgent
3133
# chat_history=chat_history,
3234
)

templates/components/multiagent/python/app/examples/orchestrator.py

+10-6
Original file line numberDiff line numberDiff line change
@@ -8,28 +8,32 @@
88
from llama_index.core.chat_engine.types import ChatMessage
99

1010

11-
def create_orchestrator(chat_history: Optional[List[ChatMessage]] = None):
12-
researcher = create_researcher(chat_history)
11+
def create_orchestrator(chat_history: Optional[List[ChatMessage]] = None, **kwargs):
12+
researcher = create_researcher(chat_history, **kwargs)
1313
writer = FunctionCallingAgent(
1414
name="writer",
1515
description="expert in writing blog posts, need information and images to write a post",
16-
system_prompt=dedent("""
16+
system_prompt=dedent(
17+
"""
1718
You are an expert in writing blog posts.
1819
You are given a task to write a blog post. Do not make up any information yourself.
1920
If you don't have the necessary information to write a blog post, reply "I need information about the topic to write the blog post".
2021
If you need to use images, reply "I need images about the topic to write the blog post". Do not use any dummy images made up by you.
2122
If you have all the information needed, write the blog post.
22-
"""),
23+
"""
24+
),
2325
chat_history=chat_history,
2426
)
2527
reviewer = FunctionCallingAgent(
2628
name="reviewer",
2729
description="expert in reviewing blog posts, needs a written blog post to review",
28-
system_prompt=dedent("""
30+
system_prompt=dedent(
31+
"""
2932
You are an expert in reviewing blog posts. You are given a task to review a blog post. Review the post and fix any issues found yourself. You must output a final blog post.
3033
A post must include at least one valid image. If not, reply "I need images about the topic to write the blog post". An image URL starting with "example" or "your website" is not valid.
3134
Especially check for logical inconsistencies and proofread the post for grammar and spelling errors.
32-
"""),
35+
"""
36+
),
3337
chat_history=chat_history,
3438
)
3539
publisher = create_publisher(chat_history)

templates/components/multiagent/python/app/examples/researcher.py

+13-9
Original file line numberDiff line numberDiff line change
@@ -3,17 +3,19 @@
33
from typing import List
44

55
from app.agents.single import FunctionCallingAgent
6-
from app.engine.index import get_index
6+
from app.engine.index import IndexConfig, get_index
77
from app.engine.tools import ToolFactory
88
from llama_index.core.chat_engine.types import ChatMessage
99
from llama_index.core.tools import QueryEngineTool, ToolMetadata
1010

1111

12-
def _create_query_engine_tool() -> QueryEngineTool:
12+
def _create_query_engine_tool(params=None) -> QueryEngineTool:
1313
"""
1414
Provide an agent worker that can be used to query the index.
1515
"""
16-
index = get_index()
16+
# Add query tool if index exists
17+
index_config = IndexConfig(**(params or {}))
18+
index = get_index(index_config)
1719
if index is None:
1820
return None
1921
top_k = int(os.getenv("TOP_K", 0))
@@ -31,13 +33,13 @@ def _create_query_engine_tool() -> QueryEngineTool:
3133
)
3234

3335

34-
def _get_research_tools() -> QueryEngineTool:
36+
def _get_research_tools(**kwargs) -> QueryEngineTool:
3537
"""
3638
Researcher take responsibility for retrieving information.
3739
Try init wikipedia or duckduckgo tool if available.
3840
"""
3941
tools = []
40-
query_engine_tool = _create_query_engine_tool()
42+
query_engine_tool = _create_query_engine_tool(**kwargs)
4143
if query_engine_tool is not None:
4244
tools.append(query_engine_tool)
4345
researcher_tool_names = ["duckduckgo", "wikipedia.WikipediaToolSpec"]
@@ -48,16 +50,17 @@ def _get_research_tools() -> QueryEngineTool:
4850
return tools
4951

5052

51-
def create_researcher(chat_history: List[ChatMessage]):
53+
def create_researcher(chat_history: List[ChatMessage], **kwargs):
5254
"""
5355
Researcher is an agent that take responsibility for using tools to complete a given task.
5456
"""
55-
tools = _get_research_tools()
57+
tools = _get_research_tools(**kwargs)
5658
return FunctionCallingAgent(
5759
name="researcher",
5860
tools=tools,
5961
description="expert in retrieving any unknown content or searching for images from the internet",
60-
system_prompt=dedent("""
62+
system_prompt=dedent(
63+
"""
6164
You are a researcher agent. You are given a research task.
6265
6366
If the conversation already includes the information and there is no new request for additional information from the user, you should return the appropriate content to the writer.
@@ -77,6 +80,7 @@ def create_researcher(chat_history: List[ChatMessage]):
7780
7881
If you use the tools but don't find any related information, please return "I didn't find any new information for {the topic}." along with the content you found. Don't try to make up information yourself.
7982
If the request doesn't need any new information because it was in the conversation history, please return "The task doesn't need any new information. Please reuse the existing content in the conversation history."
80-
"""),
83+
"""
84+
),
8185
chat_history=chat_history,
8286
)

templates/components/multiagent/python/app/examples/workflow.py

+6-3
Original file line numberDiff line numberDiff line change
@@ -17,9 +17,10 @@
1717
)
1818

1919

20-
def create_workflow(chat_history: Optional[List[ChatMessage]] = None):
20+
def create_workflow(chat_history: Optional[List[ChatMessage]] = None, **kwargs):
2121
researcher = create_researcher(
2222
chat_history=chat_history,
23+
**kwargs,
2324
)
2425
publisher = create_publisher(
2526
chat_history=chat_history,
@@ -127,7 +128,8 @@ async def _decide_workflow(
127128
self, input: str, chat_history: List[ChatMessage]
128129
) -> str:
129130
prompt_template = PromptTemplate(
130-
dedent("""
131+
dedent(
132+
"""
131133
You are an expert in decision-making, helping people write and publish blog posts.
132134
If the user is asking for a file or to publish content, respond with 'publish'.
133135
If the user requests to write or update a blog post, respond with 'not_publish'.
@@ -140,7 +142,8 @@ async def _decide_workflow(
140142
141143
Given the chat history and the new user request, decide whether to publish based on existing information.
142144
Decision (respond with either 'not_publish' or 'publish'):
143-
""")
145+
"""
146+
)
144147
)
145148

146149
chat_history_str = "\n".join(

templates/types/streaming/fastapi/app/api/routers/chat_config.py

+2-4
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ async def chat_config() -> ChatConfig:
2323
try:
2424
from app.engine.service import LLamaCloudFileService
2525

26-
logger.info("LlamaCloud is configured. Adding /config/llamacloud route.")
26+
print("LlamaCloud is configured. Adding /config/llamacloud route.")
2727

2828
@r.get("/llamacloud")
2929
async def chat_llama_cloud_config():
@@ -42,7 +42,5 @@ async def chat_llama_cloud_config():
4242
}
4343

4444
except ImportError:
45-
logger.debug(
46-
"LlamaCloud is not configured. Skipping adding /config/llamacloud route."
47-
)
45+
print("LlamaCloud is not configured. Skipping adding /config/llamacloud route.")
4846
pass

0 commit comments

Comments
 (0)