Skip to content

Commit a05d444

Browse files
committed
update with new workflow code
1 parent 7848b83 commit a05d444

File tree

7 files changed

+181
-147
lines changed

7 files changed

+181
-147
lines changed

templates/components/agents/typescript/financial_report/app/api/chat/workflow/factory.ts

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
import { WorkflowEvent } from "@llamaindex/core/workflow";
21
import { Message } from "ai";
32
import { ChatMessage, ToolCallLLM } from "llamaindex";
43
import { getAnnotations } from "../llamaindex/streaming/annotations";
@@ -7,10 +6,6 @@ import { getAvailableTools } from "./tools";
76

87
const TIMEOUT = 360 * 1000;
98

10-
class ResearchEvent extends WorkflowEvent<{ input: string }> {}
11-
class AnalyzeEvent extends WorkflowEvent<{ input: string }> {}
12-
class ReportEvent extends WorkflowEvent<{ input: string }> {}
13-
149
const prepareChatHistory = (chatHistory: Message[]): ChatMessage[] => {
1510
// By default, the chat history only contains the assistant and user messages
1611
// all the agents messages are stored in annotation data which is not visible to the LLM

templates/components/agents/typescript/financial_report/app/api/chat/workflow/finReport.ts

Lines changed: 66 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,16 @@
11
import {
2-
Context,
2+
HandlerContext,
33
StartEvent,
44
StopEvent,
55
Workflow,
66
WorkflowEvent,
7-
} from "@llamaindex/core/workflow";
7+
} from "@llamaindex/workflow";
88
import {
99
BaseToolWithCall,
1010
ChatMemoryBuffer,
1111
ChatMessage,
12+
ChatResponseChunk,
13+
MessageContent,
1214
Settings,
1315
ToolCall,
1416
ToolCallLLM,
@@ -37,7 +39,11 @@ It's good to using appropriate tools for the user request and always use the inf
3739
For the query engine tool, you should break down the user request into a list of queries and call the tool with the queries.
3840
`;
3941

40-
export class FinancialReportWorkflow extends Workflow {
42+
export class FinancialReportWorkflow extends Workflow<
43+
null,
44+
string | MessageContent,
45+
ChatResponseChunk
46+
> {
4147
llm: ToolCallLLM;
4248
memory: ChatMemoryBuffer;
4349
queryEngineTool: BaseToolWithCall;
@@ -67,48 +73,74 @@ export class FinancialReportWorkflow extends Workflow {
6773
this.writeEvents = options.writeEvents;
6874
this.queryEngineTool = options.queryEngineTool;
6975
this.codeInterpreterTool = options.codeInterpreterTool;
76+
console.log("Chat history:", options.chatHistory);
77+
7078
this.documentGeneratorTool = options.documentGeneratorTool;
7179
this.memory = new ChatMemoryBuffer({
7280
llm: this.llm,
7381
chatHistory: options.chatHistory,
7482
});
7583

7684
// Add steps
77-
this.addStep(StartEvent<AgentInput>, this.prepareChatHistory, {
78-
outputs: InputEvent,
79-
});
80-
this.addStep(InputEvent, this.handleLLMInput, {
81-
outputs: [
82-
InputEvent,
83-
ResearchEvent,
84-
AnalyzeEvent,
85-
ReportGenerationEvent,
86-
StopEvent,
87-
],
88-
});
89-
this.addStep(ResearchEvent, this.handleResearch, {
90-
outputs: AnalyzeEvent,
91-
});
92-
this.addStep(AnalyzeEvent, this.handleAnalyze, {
93-
outputs: InputEvent,
94-
});
95-
this.addStep(ReportGenerationEvent, this.handleReportGeneration, {
96-
outputs: InputEvent,
97-
});
98-
}
85+
this.addStep(
86+
{
87+
inputs: [StartEvent<AgentInput>],
88+
outputs: [InputEvent],
89+
},
90+
this.prepareChatHistory.bind(this),
91+
);
92+
93+
this.addStep(
94+
{
95+
inputs: [InputEvent],
96+
outputs: [
97+
InputEvent,
98+
ResearchEvent,
99+
AnalyzeEvent,
100+
ReportGenerationEvent,
101+
StopEvent,
102+
],
103+
},
104+
this.handleLLMInput.bind(this),
105+
);
106+
107+
this.addStep(
108+
{
109+
inputs: [ResearchEvent],
110+
outputs: [AnalyzeEvent],
111+
},
112+
this.handleResearch.bind(this),
113+
);
99114

100-
private async prepareChatHistory(ctx: Context, ev: StartEvent<AgentInput>) {
101-
const message = ev.data.input.message;
115+
this.addStep(
116+
{
117+
inputs: [AnalyzeEvent],
118+
outputs: [InputEvent],
119+
},
120+
this.handleAnalyze.bind(this),
121+
);
102122

123+
this.addStep(
124+
{
125+
inputs: [ReportGenerationEvent],
126+
outputs: [InputEvent],
127+
},
128+
this.handleReportGeneration.bind(this),
129+
);
130+
}
131+
132+
private async prepareChatHistory(
133+
ctx: HandlerContext<null>,
134+
ev: StartEvent<AgentInput>,
135+
) {
103136
if (this.systemPrompt) {
104137
this.memory.put({ role: "system", content: this.systemPrompt });
105138
}
106-
this.memory.put({ role: "user", content: message });
107139

108140
return new InputEvent({ input: this.memory.getMessages() });
109141
}
110142

111-
private async handleLLMInput(ctx: Context, ev: InputEvent) {
143+
private async handleLLMInput(ctx: HandlerContext<null>, ev: InputEvent) {
112144
const chatHistory = ev.data.input;
113145

114146
const tools = [this.codeInterpreterTool, this.documentGeneratorTool];
@@ -156,8 +188,8 @@ export class FinancialReportWorkflow extends Workflow {
156188
}
157189
}
158190

159-
private async handleResearch(ctx: Context, ev: ResearchEvent) {
160-
ctx.writeEventToStream(
191+
private async handleResearch(ctx: HandlerContext<null>, ev: ResearchEvent) {
192+
ctx.sendEvent(
161193
new AgentRunEvent({
162194
name: "Researcher",
163195
text: "Researching data",
@@ -188,8 +220,8 @@ export class FinancialReportWorkflow extends Workflow {
188220
/**
189221
* Analyze a research result or a tool call for code interpreter from the LLM
190222
*/
191-
private async handleAnalyze(ctx: Context, ev: AnalyzeEvent) {
192-
ctx.writeEventToStream(
223+
private async handleAnalyze(ctx: HandlerContext<null>, ev: AnalyzeEvent) {
224+
ctx.sendEvent(
193225
new AgentRunEvent({
194226
name: "Analyst",
195227
text: `Starting analysis`,
@@ -251,7 +283,7 @@ export class FinancialReportWorkflow extends Workflow {
251283
}
252284

253285
private async handleReportGeneration(
254-
ctx: Context,
286+
ctx: HandlerContext<null>,
255287
ev: ReportGenerationEvent,
256288
) {
257289
const { toolCalls } = ev.data;

templates/components/multiagent/typescript/nextjs/route.ts

Lines changed: 9 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -1,16 +1,14 @@
11
import { initObservability } from "@/app/observability";
2-
import { StartEvent, StopEvent } from "@llamaindex/core/workflow";
3-
import { Message, StreamingTextResponse } from "ai";
4-
import { ChatResponseChunk } from "llamaindex";
2+
import { StreamingTextResponse, type Message } from "ai";
3+
import { MessageContent } from "llamaindex";
54
import { NextRequest, NextResponse } from "next/server";
65
import { initSettings } from "./engine/settings";
76
import {
87
isValidMessages,
98
retrieveMessageContent,
109
} from "./llamaindex/streaming/annotations";
1110
import { createWorkflow } from "./workflow/factory";
12-
import { toDataStream, workflowEventsToStreamData } from "./workflow/stream";
13-
import { AgentInput } from "./workflow/type";
11+
import { createStreamFromWorkflowContext } from "./workflow/stream";
1412

1513
initObservability();
1614
initSettings();
@@ -38,23 +36,13 @@ export async function POST(request: NextRequest) {
3836
writeEvents: true,
3937
});
4038

41-
const result = workflow.run(
42-
new StartEvent<AgentInput>({
43-
input: {
44-
message: userMessageContent,
45-
},
46-
}),
47-
) as unknown as Promise<StopEvent<AsyncGenerator<ChatResponseChunk>>>;
39+
const context = workflow.run(userMessageContent as MessageContent);
40+
const { stream, streamData } =
41+
await createStreamFromWorkflowContext(context);
4842

49-
// convert the workflow events to a vercel AI stream data object
50-
const agentStreamData = await workflowEventsToStreamData(
51-
workflow.streamEvents(),
52-
);
53-
// convert the workflow result to a vercel AI content stream
54-
const stream = toDataStream(result, {
55-
onFinal: () => agentStreamData.close(),
56-
});
57-
return new StreamingTextResponse(stream, {}, agentStreamData);
43+
// Return using the new Response API
44+
// TODO: StreamingTextResponse has been deprecated
45+
return new StreamingTextResponse(stream, {}, streamData);
5846
} catch (error) {
5947
console.error("[LlamaIndex]", error);
6048
return NextResponse.json(

templates/components/multiagent/typescript/workflow/single-agent.ts

Lines changed: 47 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,10 @@
11
import {
2-
Context,
2+
HandlerContext,
33
StartEvent,
44
StopEvent,
55
Workflow,
66
WorkflowEvent,
7-
} from "@llamaindex/core/workflow";
7+
} from "@llamaindex/workflow";
88
import {
99
BaseToolWithCall,
1010
ChatMemoryBuffer,
@@ -27,7 +27,15 @@ class ToolCallEvent extends WorkflowEvent<{
2727
toolCalls: ToolCall[];
2828
}> {}
2929

30-
export class FunctionCallingAgent extends Workflow {
30+
type FunctionCallingAgentContextData = {
31+
streaming: boolean;
32+
};
33+
34+
export class FunctionCallingAgent extends Workflow<
35+
FunctionCallingAgentContextData,
36+
string,
37+
string | AsyncGenerator<boolean | ChatResponseChunk<object>>
38+
> {
3139
name: string;
3240
llm: ToolCallLLM;
3341
memory: ChatMemoryBuffer;
@@ -64,27 +72,39 @@ export class FunctionCallingAgent extends Workflow {
6472
this.role = options?.role;
6573

6674
// add steps
67-
this.addStep(StartEvent<AgentInput>, this.prepareChatHistory, {
68-
outputs: InputEvent,
69-
});
70-
this.addStep(InputEvent, this.handleLLMInput, {
71-
outputs: [ToolCallEvent, StopEvent],
72-
});
73-
this.addStep(ToolCallEvent, this.handleToolCalls, {
74-
outputs: InputEvent,
75-
});
75+
this.addStep(
76+
{
77+
inputs: [StartEvent<AgentInput>],
78+
outputs: [InputEvent],
79+
},
80+
this.prepareChatHistory,
81+
);
82+
this.addStep(
83+
{
84+
inputs: [InputEvent],
85+
outputs: [ToolCallEvent, StopEvent],
86+
},
87+
this.handleLLMInput,
88+
);
89+
this.addStep(
90+
{
91+
inputs: [ToolCallEvent],
92+
outputs: [InputEvent],
93+
},
94+
this.handleToolCalls,
95+
);
7696
}
7797

7898
private get chatHistory() {
7999
return this.memory.getMessages();
80100
}
81101

82102
private async prepareChatHistory(
83-
ctx: Context,
103+
ctx: HandlerContext<FunctionCallingAgentContextData>,
84104
ev: StartEvent<AgentInput>,
85105
): Promise<InputEvent> {
86-
const { message, streaming } = ev.data.input;
87-
ctx.set("streaming", streaming);
106+
const { message, streaming } = ev.data;
107+
ctx.data.streaming = streaming ?? false;
88108
this.writeEvent(`Start to work on: ${message}`, ctx);
89109
if (this.systemPrompt) {
90110
this.memory.put({ role: "system", content: this.systemPrompt });
@@ -94,10 +114,10 @@ export class FunctionCallingAgent extends Workflow {
94114
}
95115

96116
private async handleLLMInput(
97-
ctx: Context,
117+
ctx: HandlerContext<FunctionCallingAgentContextData>,
98118
ev: InputEvent,
99119
): Promise<StopEvent<string | AsyncGenerator> | ToolCallEvent> {
100-
if (ctx.get("streaming")) {
120+
if (ctx.data.streaming) {
101121
return await this.handleLLMInputStream(ctx, ev);
102122
}
103123

@@ -112,11 +132,11 @@ export class FunctionCallingAgent extends Workflow {
112132
return new ToolCallEvent({ toolCalls });
113133
}
114134
this.writeEvent("Finished task", ctx);
115-
return new StopEvent({ result: result.message.content.toString() });
135+
return new StopEvent(result.message.content.toString());
116136
}
117137

118138
private async handleLLMInputStream(
119-
context: Context,
139+
ctx: HandlerContext<FunctionCallingAgentContextData>,
120140
ev: InputEvent,
121141
): Promise<StopEvent<AsyncGenerator> | ToolCallEvent> {
122142
const { llm, tools, memory } = this;
@@ -163,12 +183,12 @@ export class FunctionCallingAgent extends Workflow {
163183
return new ToolCallEvent({ toolCalls });
164184
}
165185

166-
this.writeEvent("Finished task", context);
167-
return new StopEvent({ result: generator });
186+
this.writeEvent("Finished task", ctx);
187+
return new StopEvent(generator);
168188
}
169189

170190
private async handleToolCalls(
171-
ctx: Context,
191+
ctx: HandlerContext<FunctionCallingAgentContextData>,
172192
ev: ToolCallEvent,
173193
): Promise<InputEvent> {
174194
const { toolCalls } = ev.data;
@@ -207,11 +227,12 @@ export class FunctionCallingAgent extends Workflow {
207227
return new InputEvent({ input: this.memory.getMessages() });
208228
}
209229

210-
private writeEvent(msg: string, context: Context) {
230+
private writeEvent(
231+
msg: string,
232+
ctx: HandlerContext<FunctionCallingAgentContextData>,
233+
) {
211234
if (!this.writeEvents) return;
212-
context.writeEventToStream({
213-
data: new AgentRunEvent({ name: this.name, msg }),
214-
});
235+
ctx.sendEvent(new AgentRunEvent({ name: this.name, text: msg }));
215236
}
216237

217238
private checkToolCallSupport() {

0 commit comments

Comments
 (0)