Skip to content

Commit b0becaa

Browse files
feat: add e2e testing for llamacloud datasource (#181)
--------- Co-authored-by: Marcus Schiesser <[email protected]>
1 parent 6a42542 commit b0becaa

File tree

7 files changed

+92
-37
lines changed

7 files changed

+92
-37
lines changed

.changeset/great-spies-cover.md

+5
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
"create-llama": patch
3+
---
4+
5+
Add e2e testing for llamacloud datasource

.github/workflows/e2e.yml

+1
Original file line numberDiff line numberDiff line change
@@ -62,6 +62,7 @@ jobs:
6262
run: pnpm run e2e
6363
env:
6464
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
65+
LLAMA_CLOUD_API_KEY: ${{ secrets.LLAMA_CLOUD_API_KEY }}
6566
working-directory: .
6667

6768
- uses: actions/upload-artifact@v3

e2e/basic.spec.ts

+12-3
Original file line numberDiff line numberDiff line change
@@ -17,20 +17,27 @@ const templateFrameworks: TemplateFramework[] = [
1717
"express",
1818
"fastapi",
1919
];
20-
const dataSources: string[] = ["--no-files", "--example-file"];
20+
const dataSources: string[] = ["--no-files", "--llamacloud"];
2121
const templateUIs: TemplateUI[] = ["shadcn", "html"];
2222
const templatePostInstallActions: TemplatePostInstallAction[] = [
2323
"none",
2424
"runApp",
2525
];
2626

27+
const llamaCloudProjectName = "create-llama";
28+
const llamaCloudIndexName = "e2e-test";
29+
2730
for (const templateType of templateTypes) {
2831
for (const templateFramework of templateFrameworks) {
2932
for (const dataSource of dataSources) {
3033
for (const templateUI of templateUIs) {
3134
for (const templatePostInstallAction of templatePostInstallActions) {
3235
const appType: AppType =
3336
templateFramework === "nextjs" ? "" : "--frontend";
37+
const userMessage =
38+
dataSource !== "--no-files"
39+
? "Physical standard for letters"
40+
: "Hello";
3441
test.describe(`try create-llama ${templateType} ${templateFramework} ${dataSource} ${templateUI} ${appType} ${templatePostInstallAction}`, async () => {
3542
let port: number;
3643
let externalPort: number;
@@ -55,6 +62,8 @@ for (const templateType of templateTypes) {
5562
port,
5663
externalPort,
5764
templatePostInstallAction,
65+
llamaCloudProjectName,
66+
llamaCloudIndexName,
5867
);
5968
name = result.projectName;
6069
appProcess = result.appProcess;
@@ -75,7 +84,7 @@ for (const templateType of templateTypes) {
7584
}) => {
7685
test.skip(templatePostInstallAction !== "runApp");
7786
await page.goto(`http://localhost:${port}`);
78-
await page.fill("form input", "hello");
87+
await page.fill("form input", userMessage);
7988
const [response] = await Promise.all([
8089
page.waitForResponse(
8190
(res) => {
@@ -106,7 +115,7 @@ for (const templateType of templateTypes) {
106115
messages: [
107116
{
108117
role: "user",
109-
content: "Hello",
118+
content: userMessage,
110119
},
111120
],
112121
},

e2e/utils.ts

+10-2
Original file line numberDiff line numberDiff line change
@@ -72,9 +72,13 @@ export async function runCreateLlama(
7272
port: number,
7373
externalPort: number,
7474
postInstallAction: TemplatePostInstallAction,
75+
llamaCloudProjectName: string,
76+
llamaCloudIndexName: string,
7577
): Promise<CreateLlamaResult> {
76-
if (!process.env.OPENAI_API_KEY) {
77-
throw new Error("Setting OPENAI_API_KEY is mandatory to run tests");
78+
if (!process.env.OPENAI_API_KEY || !process.env.LLAMA_CLOUD_API_KEY) {
79+
throw new Error(
80+
"Setting the OPENAI_API_KEY and LLAMA_CLOUD_API_KEY is mandatory to run tests",
81+
);
7882
}
7983
const name = [
8084
templateType,
@@ -110,12 +114,16 @@ export async function runCreateLlama(
110114
"--no-llama-parse",
111115
"--observability",
112116
"none",
117+
"--llama-cloud-key",
118+
process.env.LLAMA_CLOUD_API_KEY,
113119
].join(" ");
114120
console.log(`running command '${command}' in ${cwd}`);
115121
const appProcess = exec(command, {
116122
cwd,
117123
env: {
118124
...process.env,
125+
LLAMA_CLOUD_PROJECT_NAME: llamaCloudProjectName,
126+
LLAMA_CLOUD_INDEX_NAME: llamaCloudIndexName,
119127
},
120128
});
121129
appProcess.stderr?.on("data", (data) => {

helpers/index.ts

+36-15
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,31 @@ import {
2323
} from "./types";
2424
import { installTSTemplate } from "./typescript";
2525

26+
const checkForGenerateScript = (
27+
modelConfig: ModelConfig,
28+
vectorDb?: TemplateVectorDB,
29+
llamaCloudKey?: string,
30+
useLlamaParse?: boolean,
31+
) => {
32+
const missingSettings = [];
33+
34+
if (!modelConfig.isConfigured()) {
35+
missingSettings.push("your model provider API key");
36+
}
37+
38+
const llamaCloudApiKey = llamaCloudKey ?? process.env["LLAMA_CLOUD_API_KEY"];
39+
const isRequiredLlamaCloudKey = useLlamaParse || vectorDb === "llamacloud";
40+
if (isRequiredLlamaCloudKey && !llamaCloudApiKey) {
41+
missingSettings.push("your LLAMA_CLOUD_API_KEY");
42+
}
43+
44+
if (vectorDb !== "none" && vectorDb !== "llamacloud") {
45+
missingSettings.push("your Vector DB environment variables");
46+
}
47+
48+
return missingSettings;
49+
};
50+
2651
// eslint-disable-next-line max-params
2752
async function generateContextData(
2853
framework: TemplateFramework,
@@ -38,12 +63,15 @@ async function generateContextData(
3863
? "poetry run generate"
3964
: `${packageManager} run generate`,
4065
)}`;
41-
const modelConfigured = modelConfig.isConfigured();
42-
const llamaCloudKeyConfigured = useLlamaParse
43-
? llamaCloudKey || process.env["LLAMA_CLOUD_API_KEY"]
44-
: true;
45-
const hasVectorDb = vectorDb && vectorDb !== "none";
46-
if (modelConfigured && llamaCloudKeyConfigured && !hasVectorDb) {
66+
67+
const missingSettings = checkForGenerateScript(
68+
modelConfig,
69+
vectorDb,
70+
llamaCloudKey,
71+
useLlamaParse,
72+
);
73+
74+
if (!missingSettings.length) {
4775
// If all the required environment variables are set, run the generate script
4876
if (framework === "fastapi") {
4977
if (isHavingPoetryLockFile()) {
@@ -63,15 +91,8 @@ async function generateContextData(
6391
}
6492
}
6593

66-
// generate the message of what to do to run the generate script manually
67-
const settings = [];
68-
if (!modelConfigured) settings.push("your model provider API key");
69-
if (!llamaCloudKeyConfigured) settings.push("your Llama Cloud key");
70-
if (hasVectorDb) settings.push("your Vector DB environment variables");
71-
const settingsMessage =
72-
settings.length > 0 ? `After setting ${settings.join(" and ")}, ` : "";
73-
const generateMessage = `run ${runGenerate} to generate the context data.`;
74-
console.log(`\n${settingsMessage}${generateMessage}\n\n`);
94+
const settingsMessage = `After setting ${missingSettings.join(" and ")}, run ${runGenerate} to generate the context data.`;
95+
console.log(`\n${settingsMessage}\n\n`);
7596
}
7697
}
7798

index.ts

+10-2
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ import prompts from "prompts";
99
import terminalLink from "terminal-link";
1010
import checkForUpdate from "update-check";
1111
import { createApp } from "./create-app";
12-
import { getDataSources } from "./helpers/datasources";
12+
import { EXAMPLE_FILE, getDataSources } from "./helpers/datasources";
1313
import { getPkgManager } from "./helpers/get-pkg-manager";
1414
import { isFolderEmpty } from "./helpers/is-folder-empty";
1515
import { initializeGlobalAgent } from "./helpers/proxy";
@@ -194,8 +194,16 @@ if (process.argv.includes("--no-llama-parse")) {
194194
program.askModels = process.argv.includes("--ask-models");
195195
if (process.argv.includes("--no-files")) {
196196
program.dataSources = [];
197-
} else {
197+
} else if (process.argv.includes("--example-file")) {
198198
program.dataSources = getDataSources(program.files, program.exampleFile);
199+
} else if (process.argv.includes("--llamacloud")) {
200+
program.dataSources = [
201+
{
202+
type: "llamacloud",
203+
config: {},
204+
},
205+
EXAMPLE_FILE,
206+
];
199207
}
200208

201209
const packageManager = !!program.useNpm

questions.ts

+18-15
Original file line numberDiff line numberDiff line change
@@ -671,21 +671,24 @@ export const askQuestions = async (
671671

672672
// Ask for LlamaCloud API key when using a LlamaCloud index or LlamaParse
673673
if (isUsingLlamaCloud || program.useLlamaParse) {
674-
if (ciInfo.isCI) {
675-
program.llamaCloudKey = getPrefOrDefault("llamaCloudKey");
676-
} else {
677-
// Ask for LlamaCloud API key
678-
const { llamaCloudKey } = await prompts(
679-
{
680-
type: "text",
681-
name: "llamaCloudKey",
682-
message:
683-
"Please provide your LlamaCloud API key (leave blank to skip):",
684-
},
685-
questionHandlers,
686-
);
687-
program.llamaCloudKey = preferences.llamaCloudKey =
688-
llamaCloudKey || process.env.LLAMA_CLOUD_API_KEY;
674+
if (!program.llamaCloudKey) {
675+
// if already set, don't ask again
676+
if (ciInfo.isCI) {
677+
program.llamaCloudKey = getPrefOrDefault("llamaCloudKey");
678+
} else {
679+
// Ask for LlamaCloud API key
680+
const { llamaCloudKey } = await prompts(
681+
{
682+
type: "text",
683+
name: "llamaCloudKey",
684+
message:
685+
"Please provide your LlamaCloud API key (leave blank to skip):",
686+
},
687+
questionHandlers,
688+
);
689+
program.llamaCloudKey = preferences.llamaCloudKey =
690+
llamaCloudKey || process.env.LLAMA_CLOUD_API_KEY;
691+
}
689692
}
690693
}
691694

0 commit comments

Comments
 (0)