Skip to content

Commit fbe3b08

Browse files
refactor: test frameworks via matrix
1 parent e974c8e commit fbe3b08

File tree

2 files changed

+102
-104
lines changed

2 files changed

+102
-104
lines changed

.github/workflows/e2e.yml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@ jobs:
1818
node-version: [18, 20]
1919
python-version: ["3.11"]
2020
os: [macos-latest, windows-latest, ubuntu-22.04]
21+
frameworks: ["nextjs", "express", "fastapi"]
2122
defaults:
2223
run:
2324
shell: bash
@@ -63,6 +64,7 @@ jobs:
6364
env:
6465
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
6566
LLAMA_CLOUD_API_KEY: ${{ secrets.LLAMA_CLOUD_API_KEY }}
67+
FRAMEWORKS: ${{ matrix.frameworks }}
6668
working-directory: .
6769

6870
- uses: actions/upload-artifact@v3

e2e/basic.spec.ts

Lines changed: 100 additions & 104 deletions
Original file line numberDiff line numberDiff line change
@@ -11,12 +11,10 @@ import type {
1111
} from "../helpers";
1212
import { createTestDir, runCreateLlama, type AppType } from "./utils";
1313

14-
const templateTypes: TemplateType[] = ["streaming"];
15-
const templateFrameworks: TemplateFramework[] = [
16-
"nextjs",
17-
"express",
18-
"fastapi",
19-
];
14+
const templateType: TemplateType = "streaming";
15+
const templateFrameworks: TemplateFramework[] = process.env.FRAMEWORKS
16+
? (process.env.FRAMEWORKS.split(",") as TemplateFramework[])
17+
: ["fastapi"];
2018
const dataSources: string[] = ["--no-files", "--example-file"];
2119
const templateUIs: TemplateUI[] = ["shadcn"];
2220
const templatePostInstallActions: TemplatePostInstallAction[] = [
@@ -27,111 +25,109 @@ const templatePostInstallActions: TemplatePostInstallAction[] = [
2725
const llamaCloudProjectName = "create-llama";
2826
const llamaCloudIndexName = "e2e-test";
2927

30-
for (const templateType of templateTypes) {
31-
for (const templateFramework of templateFrameworks) {
32-
for (const dataSource of dataSources) {
33-
for (const templateUI of templateUIs) {
34-
for (const templatePostInstallAction of templatePostInstallActions) {
35-
const appType: AppType =
36-
templateFramework === "nextjs" ? "" : "--frontend";
37-
const userMessage =
38-
dataSource !== "--no-files"
39-
? "Physical standard for letters"
40-
: "Hello";
41-
test.describe(`try create-llama ${templateType} ${templateFramework} ${dataSource} ${templateUI} ${appType} ${templatePostInstallAction}`, async () => {
42-
let port: number;
43-
let externalPort: number;
44-
let cwd: string;
45-
let name: string;
46-
let appProcess: ChildProcess;
47-
// Only test without using vector db for now
48-
const vectorDb = "none";
49-
50-
test.beforeAll(async () => {
51-
port = Math.floor(Math.random() * 10000) + 10000;
52-
externalPort = port + 1;
53-
cwd = await createTestDir();
54-
const result = await runCreateLlama(
55-
cwd,
56-
templateType,
57-
templateFramework,
58-
dataSource,
59-
templateUI,
60-
vectorDb,
61-
appType,
62-
port,
63-
externalPort,
64-
templatePostInstallAction,
65-
llamaCloudProjectName,
66-
llamaCloudIndexName,
67-
);
68-
name = result.projectName;
69-
appProcess = result.appProcess;
70-
});
28+
for (const templateFramework of templateFrameworks) {
29+
for (const dataSource of dataSources) {
30+
for (const templateUI of templateUIs) {
31+
for (const templatePostInstallAction of templatePostInstallActions) {
32+
const appType: AppType =
33+
templateFramework === "nextjs" ? "" : "--frontend";
34+
const userMessage =
35+
dataSource !== "--no-files"
36+
? "Physical standard for letters"
37+
: "Hello";
38+
test.describe(`try create-llama ${templateType} ${templateFramework} ${dataSource} ${templateUI} ${appType} ${templatePostInstallAction}`, async () => {
39+
let port: number;
40+
let externalPort: number;
41+
let cwd: string;
42+
let name: string;
43+
let appProcess: ChildProcess;
44+
// Only test without using vector db for now
45+
const vectorDb = "none";
7146

72-
test("App folder should exist", async () => {
73-
const dirExists = fs.existsSync(path.join(cwd, name));
74-
expect(dirExists).toBeTruthy();
75-
});
76-
test("Frontend should have a title", async ({ page }) => {
77-
test.skip(templatePostInstallAction !== "runApp");
78-
await page.goto(`http://localhost:${port}`);
79-
await expect(page.getByText("Built by LlamaIndex")).toBeVisible();
80-
});
47+
test.beforeAll(async () => {
48+
port = Math.floor(Math.random() * 10000) + 10000;
49+
externalPort = port + 1;
50+
cwd = await createTestDir();
51+
const result = await runCreateLlama(
52+
cwd,
53+
templateType,
54+
templateFramework,
55+
dataSource,
56+
templateUI,
57+
vectorDb,
58+
appType,
59+
port,
60+
externalPort,
61+
templatePostInstallAction,
62+
llamaCloudProjectName,
63+
llamaCloudIndexName,
64+
);
65+
name = result.projectName;
66+
appProcess = result.appProcess;
67+
});
8168

82-
test("Frontend should be able to submit a message and receive a response", async ({
83-
page,
84-
}) => {
85-
test.skip(templatePostInstallAction !== "runApp");
86-
await page.goto(`http://localhost:${port}`);
87-
await page.fill("form input", userMessage);
88-
const [response] = await Promise.all([
89-
page.waitForResponse(
90-
(res) => {
91-
return (
92-
res.url().includes("/api/chat") && res.status() === 200
93-
);
94-
},
95-
{
96-
timeout: 1000 * 60,
97-
},
98-
),
99-
page.click("form button[type=submit]"),
100-
]);
101-
const text = await response.text();
102-
console.log("AI response when submitting message: ", text);
103-
expect(response.ok()).toBeTruthy();
104-
});
69+
test("App folder should exist", async () => {
70+
const dirExists = fs.existsSync(path.join(cwd, name));
71+
expect(dirExists).toBeTruthy();
72+
});
73+
test("Frontend should have a title", async ({ page }) => {
74+
test.skip(templatePostInstallAction !== "runApp");
75+
await page.goto(`http://localhost:${port}`);
76+
await expect(page.getByText("Built by LlamaIndex")).toBeVisible();
77+
});
10578

106-
test("Backend frameworks should response when calling non-streaming chat API", async ({
107-
request,
108-
}) => {
109-
test.skip(templatePostInstallAction !== "runApp");
110-
test.skip(templateFramework === "nextjs");
111-
const response = await request.post(
112-
`http://localhost:${externalPort}/api/chat/request`,
79+
test("Frontend should be able to submit a message and receive a response", async ({
80+
page,
81+
}) => {
82+
test.skip(templatePostInstallAction !== "runApp");
83+
await page.goto(`http://localhost:${port}`);
84+
await page.fill("form input", userMessage);
85+
const [response] = await Promise.all([
86+
page.waitForResponse(
87+
(res) => {
88+
return (
89+
res.url().includes("/api/chat") && res.status() === 200
90+
);
91+
},
11392
{
114-
data: {
115-
messages: [
116-
{
117-
role: "user",
118-
content: userMessage,
119-
},
120-
],
121-
},
93+
timeout: 1000 * 60,
12294
},
123-
);
124-
const text = await response.text();
125-
console.log("AI response when calling API: ", text);
126-
expect(response.ok()).toBeTruthy();
127-
});
95+
),
96+
page.click("form button[type=submit]"),
97+
]);
98+
const text = await response.text();
99+
console.log("AI response when submitting message: ", text);
100+
expect(response.ok()).toBeTruthy();
101+
});
102+
103+
test("Backend frameworks should response when calling non-streaming chat API", async ({
104+
request,
105+
}) => {
106+
test.skip(templatePostInstallAction !== "runApp");
107+
test.skip(templateFramework === "nextjs");
108+
const response = await request.post(
109+
`http://localhost:${externalPort}/api/chat/request`,
110+
{
111+
data: {
112+
messages: [
113+
{
114+
role: "user",
115+
content: userMessage,
116+
},
117+
],
118+
},
119+
},
120+
);
121+
const text = await response.text();
122+
console.log("AI response when calling API: ", text);
123+
expect(response.ok()).toBeTruthy();
124+
});
128125

129-
// clean processes
130-
test.afterAll(async () => {
131-
appProcess?.kill();
132-
});
126+
// clean processes
127+
test.afterAll(async () => {
128+
appProcess?.kill();
133129
});
134-
}
130+
});
135131
}
136132
}
137133
}

0 commit comments

Comments
 (0)