Skip to content

feat: implement interpreter tool #94

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 20 commits into from
May 23, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions .changeset/gentle-knives-unite.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
"create-llama": patch
---

Add interpreter tool for TS using e2b.dev
2 changes: 1 addition & 1 deletion .github/workflows/e2e.yml
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ jobs:
- uses: actions/checkout@v4

- name: Set up python ${{ matrix.python-version }}
uses: actions/setup-python@v4
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}

Expand Down
98 changes: 77 additions & 21 deletions helpers/env-variables.ts
Original file line number Diff line number Diff line change
@@ -1,13 +1,14 @@
import fs from "fs/promises";
import path from "path";
import { TOOL_SYSTEM_PROMPT_ENV_VAR, Tool } from "./tools";
import {
ModelConfig,
TemplateDataSource,
TemplateFramework,
TemplateVectorDB,
} from "./types";

type EnvVar = {
export type EnvVar = {
name?: string;
description?: string;
value?: string;
Expand Down Expand Up @@ -232,24 +233,39 @@ const getModelEnvs = (modelConfig: ModelConfig): EnvVar[] => {
};

const getFrameworkEnvs = (
framework?: TemplateFramework,
framework: TemplateFramework,
port?: number,
): EnvVar[] => {
if (framework !== "fastapi") {
return [];
}
return [
{
name: "APP_HOST",
description: "The address to start the backend app.",
value: "0.0.0.0",
},
const sPort = port?.toString() || "8000";
const result: EnvVar[] = [
{
name: "APP_PORT",
description: "The port to start the backend app.",
value: port?.toString() || "8000",
name: "FILESERVER_URL_PREFIX",
description:
"FILESERVER_URL_PREFIX is the URL prefix of the server storing the images generated by the interpreter.",
value:
framework === "nextjs"
? // FIXME: if we are using nextjs, port should be 3000
"http://localhost:3000/api/files"
: `http://localhost:${sPort}/api/files`,
},
];
if (framework === "fastapi") {
result.push(
...[
{
name: "APP_HOST",
description: "The address to start the backend app.",
value: "0.0.0.0",
},
{
name: "APP_PORT",
description: "The port to start the backend app.",
value: sPort,
},
],
);
}
return result;
};

const getEngineEnvs = (): EnvVar[] => {
Expand All @@ -260,24 +276,62 @@ const getEngineEnvs = (): EnvVar[] => {
"The number of similar embeddings to return when retrieving documents.",
value: "3",
},
{
name: "SYSTEM_PROMPT",
description: `Custom system prompt.
Example:
SYSTEM_PROMPT="You are a helpful assistant who helps users with their questions."`,
},
];
};

const getToolEnvs = (tools?: Tool[]): EnvVar[] => {
if (!tools?.length) return [];
const toolEnvs: EnvVar[] = [];
tools.forEach((tool) => {
if (tool.envVars?.length) {
toolEnvs.push(
// Don't include the system prompt env var here
// It should be handled separately by merging with the default system prompt
...tool.envVars.filter(
(env) => env.name !== TOOL_SYSTEM_PROMPT_ENV_VAR,
),
);
}
});
return toolEnvs;
};

const getSystemPromptEnv = (tools?: Tool[]): EnvVar => {
const defaultSystemPrompt =
"You are a helpful assistant who helps users with their questions.";

// build tool system prompt by merging all tool system prompts
let toolSystemPrompt = "";
tools?.forEach((tool) => {
const toolSystemPromptEnv = tool.envVars?.find(
(env) => env.name === TOOL_SYSTEM_PROMPT_ENV_VAR,
);
if (toolSystemPromptEnv) {
toolSystemPrompt += toolSystemPromptEnv.value + "\n";
}
});

const systemPrompt = toolSystemPrompt
? `\"${toolSystemPrompt}\"`
: defaultSystemPrompt;

return {
name: "SYSTEM_PROMPT",
description: "The system prompt for the AI model.",
value: systemPrompt,
};
};

export const createBackendEnvFile = async (
root: string,
opts: {
llamaCloudKey?: string;
vectorDb?: TemplateVectorDB;
modelConfig: ModelConfig;
framework?: TemplateFramework;
framework: TemplateFramework;
dataSources?: TemplateDataSource[];
port?: number;
tools?: Tool[];
},
) => {
// Init env values
Expand All @@ -295,6 +349,8 @@ export const createBackendEnvFile = async (
// Add vector database environment variables
...getVectorDBEnvs(opts.vectorDb, opts.framework),
...getFrameworkEnvs(opts.framework, opts.port),
...getToolEnvs(opts.tools),
getSystemPromptEnv(opts.tools),
];
// Render and write env file
const content = renderEnvVar(envVars);
Expand Down
1 change: 1 addition & 0 deletions helpers/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -148,6 +148,7 @@ export const installTemplate = async (
framework: props.framework,
dataSources: props.dataSources,
port: props.externalPort,
tools: props.tools,
});

if (props.dataSources.length > 0) {
Expand Down
50 changes: 50 additions & 0 deletions helpers/tools.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,12 @@ import fs from "fs/promises";
import path from "path";
import { red } from "picocolors";
import yaml from "yaml";
import { EnvVar } from "./env-variables";
import { makeDir } from "./make-dir";
import { TemplateFramework } from "./types";

export const TOOL_SYSTEM_PROMPT_ENV_VAR = "TOOL_SYSTEM_PROMPT";

export enum ToolType {
LLAMAHUB = "llamahub",
LOCAL = "local",
Expand All @@ -17,6 +20,7 @@ export type Tool = {
dependencies?: ToolDependencies[];
supportedFrameworks?: Array<TemplateFramework>;
type: ToolType;
envVars?: EnvVar[];
};

export type ToolDependencies = {
Expand All @@ -42,6 +46,13 @@ export const supportedTools: Tool[] = [
],
supportedFrameworks: ["fastapi"],
type: ToolType.LLAMAHUB,
envVars: [
{
name: TOOL_SYSTEM_PROMPT_ENV_VAR,
description: "System prompt for google search tool.",
value: `You are a Google search agent. You help users to get information from Google search.`,
},
],
},
{
display: "Wikipedia",
Expand All @@ -54,13 +65,52 @@ export const supportedTools: Tool[] = [
],
supportedFrameworks: ["fastapi", "express", "nextjs"],
type: ToolType.LLAMAHUB,
envVars: [
{
name: TOOL_SYSTEM_PROMPT_ENV_VAR,
description: "System prompt for wiki tool.",
value: `You are a Wikipedia agent. You help users to get information from Wikipedia.`,
},
],
},
{
display: "Weather",
name: "weather",
dependencies: [],
supportedFrameworks: ["fastapi", "express", "nextjs"],
type: ToolType.LOCAL,
envVars: [
{
name: TOOL_SYSTEM_PROMPT_ENV_VAR,
description: "System prompt for weather tool.",
value: `You are a weather forecast agent. You help users to get the weather forecast for a given location.`,
},
],
},
{
display: "Code Interpreter",
name: "interpreter",
dependencies: [],
supportedFrameworks: ["express", "nextjs"],
type: ToolType.LOCAL,
envVars: [
{
name: "E2B_API_KEY",
description:
"E2B_API_KEY key is required to run code interpreter tool. Get it here: https://e2b.dev/docs/getting-started/api-key",
},
{
name: TOOL_SYSTEM_PROMPT_ENV_VAR,
description: "System prompt for code interpreter tool.",
value: `You are a Python interpreter.
- You are given tasks to complete and you run python code to solve them.
- The python code runs in a Jupyter notebook. Every time you call \`interpreter\` tool, the python code is executed in a separate cell. It's okay to make multiple calls to \`interpreter\`.
- Display visualizations using matplotlib or any other visualization library directly in the notebook. Shouldn't save the visualizations to a file, just return the base64 encoded data.
- You can install any pip package (if it exists) if you need to but the usual packages for data analysis are already preinstalled.
- You can run any python code you want in a secure environment.
- Use absolute url from result to display images or any other media.`,
},
],
},
];

Expand Down
24 changes: 10 additions & 14 deletions templates/components/engines/typescript/agent/chat.ts
Original file line number Diff line number Diff line change
@@ -1,10 +1,9 @@
import { BaseToolWithCall, OpenAIAgent, QueryEngineTool } from "llamaindex";
import { ToolsFactory } from "llamaindex/tools/ToolsFactory";
import fs from "node:fs/promises";
import path from "node:path";
import { getDataSource } from "./index";
import { STORAGE_CACHE_DIR } from "./shared";
import { createLocalTools } from "./tools";
import { createTools } from "./tools";

export async function createChatEngine() {
const tools: BaseToolWithCall[] = [];
Expand All @@ -24,20 +23,17 @@ export async function createChatEngine() {
);
}

const configFile = path.join("config", "tools.json");
let toolConfig: any;
try {
// add tools from config file if it exists
const config = JSON.parse(
await fs.readFile(path.join("config", "tools.json"), "utf8"),
);

// add local tools from the 'tools' folder (if configured)
const localTools = createLocalTools(config.local);
tools.push(...localTools);

// add tools from LlamaIndexTS (if configured)
const llamaTools = await ToolsFactory.createTools(config.llamahub);
tools.push(...llamaTools);
} catch {}
toolConfig = JSON.parse(await fs.readFile(configFile, "utf8"));
} catch (e) {
console.info(`Could not read ${configFile} file. Using no tools.`);
}
if (toolConfig) {
tools.push(...(await createTools(toolConfig)));
}

return new OpenAIAgent({
tools,
Expand Down
18 changes: 17 additions & 1 deletion templates/components/engines/typescript/agent/tools/index.ts
Original file line number Diff line number Diff line change
@@ -1,15 +1,31 @@
import { BaseToolWithCall } from "llamaindex";
import { ToolsFactory } from "llamaindex/tools/ToolsFactory";
import { InterpreterTool, InterpreterToolParams } from "./interpreter";
import { WeatherTool, WeatherToolParams } from "./weather";

type ToolCreator = (config: unknown) => BaseToolWithCall;

export async function createTools(toolConfig: {
local: Record<string, unknown>;
llamahub: any;
}): Promise<BaseToolWithCall[]> {
// add local tools from the 'tools' folder (if configured)
const tools = createLocalTools(toolConfig.local);
// add tools from LlamaIndexTS (if configured)
tools.push(...(await ToolsFactory.createTools(toolConfig.llamahub)));
return tools;
}

const toolFactory: Record<string, ToolCreator> = {
weather: (config: unknown) => {
return new WeatherTool(config as WeatherToolParams);
},
interpreter: (config: unknown) => {
return new InterpreterTool(config as InterpreterToolParams);
},
};

export function createLocalTools(
function createLocalTools(
localConfig: Record<string, unknown>,
): BaseToolWithCall[] {
const tools: BaseToolWithCall[] = [];
Expand Down
Loading
Loading