Skip to content

Feature/issue/5 #6

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 9 commits into from
Apr 22, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
95 changes: 95 additions & 0 deletions examples/basic/previous-response-id.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,95 @@
import { Agent, Runner } from '../../src/agents';

/**
* This demonstrates usage of the `previous_response_id` parameter to continue a conversation.
* The second run passes the previous response ID to the model, which allows it to continue the
* conversation without re-sending the previous messages.
*
* Notes:
* 1. This only applies to the OpenAI Responses API. Other models will ignore this parameter.
* 2. Responses are only stored for 30 days as of this writing, so in production you should
* store the response ID along with an expiration date; if the response is no longer valid,
* you'll need to re-send the previous conversation history.
*/

async function main() {
const agent = new Agent({
name: 'Assistant',
instructions: 'You are a helpful assistant. be VERY concise.',
});

const result = await Runner.run(
agent,
'What is the largest country in South America?'
);
console.log(result.finalOutput);
// Brazil
console.log('first message response_id:', result.lastResponseId);

const followupResult = await Runner.run(
agent,
'What is the capital of that country?',
{ previousResponseId: result.lastResponseId! }
);
console.log(followupResult.finalOutput);
// Brasilia
console.log('second message response_id:', followupResult.lastResponseId);
}

async function mainStream() {
const agent = new Agent({
name: 'Assistant',
instructions: 'You are a helpful assistant. be VERY concise.',
tools: [],
});

const result = Runner.runStreamed(
agent,
'What is the largest country in South America?'
);

for await (const event of result.streamEvents()) {
if (
event.type === 'raw_response_event' &&
event.data.type === 'response.output_text.delta'
) {
process.stdout.write(event.data.delta);
}
}
console.log('\n---');

console.log('first messageresponse_id:', result.lastResponseId);

const followupResult = Runner.runStreamed(
agent,
'What is the capital of that country?',
{ previousResponseId: result.lastResponseId! }
);

for await (const event of followupResult.streamEvents()) {
if (
event.type === 'raw_response_event' &&
event.data.type === 'response.output_text.delta'
) {
process.stdout.write(event.data.delta);
}
}

console.log('\n---');
console.log('second message response_id:', followupResult.lastResponseId);
}

// Get user input for stream mode
const readline = require('readline').createInterface({
input: process.stdin,
output: process.stdout,
});

readline.question('Run in stream mode? (y/n): ', (answer: string) => {
readline.close();
if (answer.toLowerCase() === 'y') {
mainStream();
} else {
main();
}
});
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "openai-agents-js",
"version": "0.1.2",
"version": "0.1.4",
"description": "An unofficial Node.js library for building AI agents and multi-agent workflows with OpenAI",
"main": "dist/index.js",
"types": "dist/index.d.ts",
Expand Down
8 changes: 4 additions & 4 deletions src/agents/agent/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ interface AgentProps<TContext = any> {
/** Model-specific tuning parameters */
model_settings?: ModelSettings;
/** A list of tools that the agent can use */
tools: Array<Tool>;
tools?: Array<Tool>;
/** Model Context Protocol servers the agent can use */
mcp_servers?: any; // TODO: Implement `MCPServer` . Then uncomment: Array<MCPServer>
/** Checks that run before generating a response */
Expand Down Expand Up @@ -126,7 +126,7 @@ export class Agent<TContext> {
model_settings: ModelSettings = new ModelSettings();

/** A list of tools that the agent can use */
tools: Array<Tool> = [];
tools?: Array<Tool> = [];

/**
* A list of Model Context Protocol servers that the agent can use.
Expand Down Expand Up @@ -188,7 +188,7 @@ export class Agent<TContext> {
this.handoffs = handoffs;
this.model = model ?? DEFAULT_MODEL;
this.model_settings = model_settings ?? new ModelSettings();
this.tools = tools;
this.tools = tools || [];
this.mcp_servers = mcp_servers;
this.input_guardrails = input_guardrails ?? [];
this.output_guardrails = output_guardrails ?? [];
Expand Down Expand Up @@ -229,7 +229,7 @@ export class Agent<TContext> {
*/
async getAllTools(): Promise<Tool[]> {
const mcpTools = await this.getMCPTools();
return [...mcpTools, ...this.tools];
return [...(mcpTools ?? []), ...(this.tools ?? [])];
}

/**
Expand Down
6 changes: 3 additions & 3 deletions src/agents/items/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -239,16 +239,16 @@ export class ModelResponse {
* An ID for the response which can be used to refer to the response in subsequent calls to the
* model. Not supported by all model providers.
*/
referenceable_id: string | null;
response_id: string | null;

constructor(
output: TResponseOutputItem[],
usage: Usage,
referenceable_id: string | null = null
response_id: string | null = null
) {
this.output = output;
this.usage = usage;
this.referenceable_id = referenceable_id;
this.response_id = response_id;
}

/**
Expand Down
6 changes: 4 additions & 2 deletions src/agents/models/interface.ts
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,8 @@ export abstract class Model {
tools: Tool[],
outputSchema: AgentOutputSchema | null,
handoffs: Handoff<any>[],
tracing: ModelTracing
tracing: ModelTracing,
previousResponseId?: string
): Promise<ModelResponse>;

/**
Expand All @@ -93,7 +94,8 @@ export abstract class Model {
tools: Tool[],
outputSchema: AgentOutputSchema | null,
handoffs: Handoff<any>[],
tracing: ModelTracing
tracing: ModelTracing,
previousResponseId?: string
): AsyncIterableIterator<TResponseStreamEvent>;
}

Expand Down
6 changes: 4 additions & 2 deletions src/agents/models/openai-chatcompletions.ts
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,8 @@ export class OpenAIChatCompletionsModel implements Model {
tools: Tool[],
outputSchema: AgentOutputSchema | null,
handoffs: Handoff<any>[],
tracing: ModelTracing
tracing: ModelTracing,
previousResponseId?: string
): Promise<ModelResponse> {
const convertedInput =
typeof input === 'string'
Expand Down Expand Up @@ -160,7 +161,8 @@ export class OpenAIChatCompletionsModel implements Model {
tools: Tool[],
outputSchema: AgentOutputSchema | null,
handoffs: Handoff<any>[],
tracing: ModelTracing
tracing: ModelTracing,
previousResponseId?: string
): AsyncGenerator<TResponseStreamEvent> {
const convertedInput =
typeof input === 'string'
Expand Down
16 changes: 11 additions & 5 deletions src/agents/models/openai-responses.ts
Original file line number Diff line number Diff line change
Expand Up @@ -214,7 +214,8 @@ export class OpenAIResponsesModel implements Model {
tools: Tool[],
outputSchema: AgentOutputSchema | null,
handoffs: Handoff<any>[],
tracing: ModelTracing
tracing: ModelTracing,
previousResponseId?: string
): Promise<ModelResponse> {
try {
const response = await this.fetchResponse(
Expand All @@ -224,7 +225,8 @@ export class OpenAIResponsesModel implements Model {
tools,
outputSchema,
handoffs,
false
false,
previousResponseId
);

if (!('usage' in response) || !response.usage) {
Expand Down Expand Up @@ -268,7 +270,8 @@ export class OpenAIResponsesModel implements Model {
tools: Tool[],
outputSchema: AgentOutputSchema | null,
handoffs: Handoff<any>[],
tracing: ModelTracing
tracing: ModelTracing,
previousResponseId?: string
): AsyncIterableIterator<TResponseStreamEvent> {
const stream = await this.fetchResponse(
systemInstructions,
Expand All @@ -277,7 +280,8 @@ export class OpenAIResponsesModel implements Model {
tools,
outputSchema,
handoffs,
true
true,
previousResponseId
);

if (!(stream instanceof Stream)) {
Expand All @@ -296,7 +300,8 @@ export class OpenAIResponsesModel implements Model {
tools: Tool[],
outputSchema: AgentOutputSchema | null,
handoffs: Handoff<any>[],
stream: boolean
stream: boolean,
previousResponseId?: string
): Promise<Response | Stream<ResponseStreamEvent>> {
const listInput = Array.isArray(input)
? input
Expand Down Expand Up @@ -337,6 +342,7 @@ export class OpenAIResponsesModel implements Model {
text: responseFormat,
store: modelSettings.store ?? undefined,
reasoning: modelSettings.reasoning ?? null,
previous_response_id: previousResponseId ?? undefined,
};

// console.log('----PARAMA SOKARIm\n\n', params, '\n\n----');
Expand Down
4 changes: 4 additions & 0 deletions src/agents/result.ts
Original file line number Diff line number Diff line change
Expand Up @@ -112,6 +112,10 @@ export abstract class RunResultBase {

return [...originalItems, ...newItems];
}

get lastResponseId(): string | null {
return this.rawResponses[this.rawResponses.length - 1]?.response_id ?? null;
}
}

/**
Expand Down
Loading