Skip to content

feat: use llamacloud for chat #149

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 12 commits into from
Jun 28, 2024
Merged
5 changes: 5 additions & 0 deletions .changeset/tough-pugs-destroy.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
"create-llama": patch
---

use llamacloud for chat
5 changes: 3 additions & 2 deletions helpers/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,8 @@ export type TemplateVectorDB =
| "milvus"
| "astra"
| "qdrant"
| "chroma";
| "chroma"
| "llamacloud";
export type TemplatePostInstallAction =
| "none"
| "VSCode"
Expand All @@ -36,7 +37,7 @@ export type TemplateDataSource = {
type: TemplateDataSourceType;
config: TemplateDataSourceConfig;
};
export type TemplateDataSourceType = "file" | "web" | "db";
export type TemplateDataSourceType = "file" | "web" | "db" | "llamacloud";
export type TemplateObservability = "none" | "opentelemetry";
// Config for both file and folder
export type FileSourceConfig = {
Expand Down
41 changes: 36 additions & 5 deletions questions.ts
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,13 @@ export const getDataSourceChoices = (
framework: TemplateFramework,
selectedDataSource: TemplateDataSource[],
) => {
// If LlamaCloud is already selected, don't show any other options
if (selectedDataSource.find((s) => s.type === "llamacloud")) {
return [];
}

const choices = [];

if (selectedDataSource.length > 0) {
choices.push({
title: "No",
Expand Down Expand Up @@ -171,6 +177,13 @@ export const getDataSourceChoices = (
value: "db",
});
}

if (framework !== "fastapi" && !selectedDataSource.length) {
choices.push({
title: "Use LlamaCloud",
value: "llamacloud",
});
}
return choices;
};

Expand Down Expand Up @@ -484,17 +497,19 @@ export const askQuestions = async (
// continue asking user for data sources if none are initially provided
while (true) {
const firstQuestion = program.dataSources.length === 0;
const choices = getDataSourceChoices(
program.framework,
program.dataSources,
);
if (choices.length === 0) break;
const { selectedSource } = await prompts(
{
type: "select",
name: "selectedSource",
message: firstQuestion
? "Which data source would you like to use?"
: "Would you like to add another data source?",
choices: getDataSourceChoices(
program.framework,
program.dataSources,
),
choices,
initial: firstQuestion ? 1 : 0,
},
questionHandlers,
Expand Down Expand Up @@ -591,6 +606,13 @@ export const askQuestions = async (
config: await prompts(dbPrompts, questionHandlers),
});
}
case "llamacloud": {
program.dataSources.push({
type: "llamacloud",
config: {},
});
break;
}
}
}
}
Expand Down Expand Up @@ -635,7 +657,16 @@ export const askQuestions = async (
}
}

if (program.dataSources.length > 0 && !program.vectorDb) {
const isUsingLlamaCloud = program.dataSources.some(
(ds) => ds.type === "llamacloud",
);

if (isUsingLlamaCloud) {
// If using LlamaCloud, don't ask for vector database and use `llamacloud` folder for vector database
const vectorDb = "llamacloud";
program.vectorDb = vectorDb;
preferences.vectorDb = vectorDb;
} else if (program.dataSources.length > 0 && !program.vectorDb) {
if (ciInfo.isCI) {
program.vectorDb = getPrefOrDefault("vectorDb");
} else {
Expand Down
3 changes: 1 addition & 2 deletions templates/components/engines/typescript/agent/chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@ import { BaseToolWithCall, OpenAIAgent, QueryEngineTool } from "llamaindex";
import fs from "node:fs/promises";
import path from "node:path";
import { getDataSource } from "./index";
import { STORAGE_CACHE_DIR } from "./shared";
import { createTools } from "./tools";

export async function createChatEngine() {
Expand All @@ -17,7 +16,7 @@ export async function createChatEngine() {
queryEngine: index.asQueryEngine(),
metadata: {
name: "data_query_engine",
description: `A query engine for documents in storage folder: ${STORAGE_CACHE_DIR}`,
description: `A query engine for documents from your data source.`,
},
}),
);
Expand Down
26 changes: 26 additions & 0 deletions templates/components/vectordbs/typescript/llamacloud/generate.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
import * as dotenv from "dotenv";
import { LlamaCloudIndex } from "llamaindex";
import { getDocuments } from "./loader";
import { initSettings } from "./settings";
import { checkRequiredEnvVars } from "./shared";

dotenv.config();

async function loadAndIndex() {
const documents = await getDocuments();
await LlamaCloudIndex.fromDocuments({
documents,
name: process.env.LLAMA_CLOUD_NAME!,
projectName: process.env.LLAMA_CLOUD_PROJECT_NAME!,
apiKey: process.env.LLAMA_CLOUD_API_KEY,
baseUrl: process.env.LLAMA_CLOUD_BASE_URL,
});
console.log(`Successfully created embeddings!`);
}

(async () => {
checkRequiredEnvVars();
initSettings();
await loadAndIndex();
console.log("Finished generating storage.");
})();
13 changes: 13 additions & 0 deletions templates/components/vectordbs/typescript/llamacloud/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
import { LlamaCloudIndex } from "llamaindex/cloud/LlamaCloudIndex";
import { checkRequiredEnvVars } from "./shared";

export async function getDataSource() {
checkRequiredEnvVars();
const index = new LlamaCloudIndex({
name: process.env.LLAMA_CLOUD_NAME!,
projectName: process.env.LLAMA_CLOUD_PROJECT_NAME!,
apiKey: process.env.LLAMA_CLOUD_API_KEY,
baseUrl: process.env.LLAMA_CLOUD_BASE_URL,
});
return index;
}
22 changes: 22 additions & 0 deletions templates/components/vectordbs/typescript/llamacloud/shared.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
const REQUIRED_ENV_VARS = [
"LLAMA_CLOUD_NAME",
"LLAMA_CLOUD_PROJECT_NAME",
"LLAMA_CLOUD_API_KEY",
];

export function checkRequiredEnvVars() {
const missingEnvVars = REQUIRED_ENV_VARS.filter((envVar) => {
return !process.env[envVar];
});

if (missingEnvVars.length > 0) {
console.log(
`The following environment variables are required but missing: ${missingEnvVars.join(
", ",
)}`,
);
throw new Error(
`Missing environment variables: ${missingEnvVars.join(", ")}`,
);
}
}
2 changes: 1 addition & 1 deletion templates/types/streaming/express/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
"dotenv": "^16.3.1",
"duck-duck-scrape": "^2.2.5",
"express": "^4.18.2",
"llamaindex": "0.4.3",
"llamaindex": "0.4.6",
"pdf2json": "3.0.5",
"ajv": "^8.12.0",
"@e2b/code-interpreter": "^0.0.5",
Expand Down
2 changes: 1 addition & 1 deletion templates/types/streaming/nextjs/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
"duck-duck-scrape": "^2.2.5",
"formdata-node": "^6.0.3",
"got": "^14.4.1",
"llamaindex": "0.4.3",
"llamaindex": "0.4.6",
"lucide-react": "^0.294.0",
"next": "^14.2.4",
"pdf2json": "3.0.5",
Expand Down
Loading