Skip to content

refactor: test frameworks via matrix #211

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Aug 5, 2024
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .github/workflows/e2e.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ jobs:
node-version: [18, 20]
python-version: ["3.11"]
os: [macos-latest, windows-latest, ubuntu-22.04]
frameworks: ["nextjs", "express", "fastapi"]
defaults:
run:
shell: bash
Expand Down Expand Up @@ -63,6 +64,7 @@ jobs:
env:
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
LLAMA_CLOUD_API_KEY: ${{ secrets.LLAMA_CLOUD_API_KEY }}
FRAMEWORKS: ${{ matrix.frameworks }}
working-directory: .

- uses: actions/upload-artifact@v3
Expand Down
204 changes: 100 additions & 104 deletions e2e/basic.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,12 +11,10 @@ import type {
} from "../helpers";
import { createTestDir, runCreateLlama, type AppType } from "./utils";

const templateTypes: TemplateType[] = ["streaming"];
const templateFrameworks: TemplateFramework[] = [
"nextjs",
"express",
"fastapi",
];
const templateType: TemplateType = "streaming";
const templateFrameworks: TemplateFramework[] = process.env.FRAMEWORKS
? (process.env.FRAMEWORKS.split(",") as TemplateFramework[])
: ["fastapi"];
const dataSources: string[] = ["--no-files", "--example-file"];
const templateUIs: TemplateUI[] = ["shadcn"];
const templatePostInstallActions: TemplatePostInstallAction[] = [
Expand All @@ -27,111 +25,109 @@ const templatePostInstallActions: TemplatePostInstallAction[] = [
const llamaCloudProjectName = "create-llama";
const llamaCloudIndexName = "e2e-test";

for (const templateType of templateTypes) {
for (const templateFramework of templateFrameworks) {
for (const dataSource of dataSources) {
for (const templateUI of templateUIs) {
for (const templatePostInstallAction of templatePostInstallActions) {
const appType: AppType =
templateFramework === "nextjs" ? "" : "--frontend";
const userMessage =
dataSource !== "--no-files"
? "Physical standard for letters"
: "Hello";
test.describe(`try create-llama ${templateType} ${templateFramework} ${dataSource} ${templateUI} ${appType} ${templatePostInstallAction}`, async () => {
let port: number;
let externalPort: number;
let cwd: string;
let name: string;
let appProcess: ChildProcess;
// Only test without using vector db for now
const vectorDb = "none";

test.beforeAll(async () => {
port = Math.floor(Math.random() * 10000) + 10000;
externalPort = port + 1;
cwd = await createTestDir();
const result = await runCreateLlama(
cwd,
templateType,
templateFramework,
dataSource,
templateUI,
vectorDb,
appType,
port,
externalPort,
templatePostInstallAction,
llamaCloudProjectName,
llamaCloudIndexName,
);
name = result.projectName;
appProcess = result.appProcess;
});
for (const templateFramework of templateFrameworks) {
for (const dataSource of dataSources) {
for (const templateUI of templateUIs) {
for (const templatePostInstallAction of templatePostInstallActions) {
const appType: AppType =
templateFramework === "nextjs" ? "" : "--frontend";
const userMessage =
dataSource !== "--no-files"
? "Physical standard for letters"
: "Hello";
test.describe(`try create-llama ${templateType} ${templateFramework} ${dataSource} ${templateUI} ${appType} ${templatePostInstallAction}`, async () => {
let port: number;
let externalPort: number;
let cwd: string;
let name: string;
let appProcess: ChildProcess;
// Only test without using vector db for now
const vectorDb = "none";

test("App folder should exist", async () => {
const dirExists = fs.existsSync(path.join(cwd, name));
expect(dirExists).toBeTruthy();
});
test("Frontend should have a title", async ({ page }) => {
test.skip(templatePostInstallAction !== "runApp");
await page.goto(`http://localhost:${port}`);
await expect(page.getByText("Built by LlamaIndex")).toBeVisible();
});
test.beforeAll(async () => {
port = Math.floor(Math.random() * 10000) + 10000;
externalPort = port + 1;
cwd = await createTestDir();
const result = await runCreateLlama(
cwd,
templateType,
templateFramework,
dataSource,
templateUI,
vectorDb,
appType,
port,
externalPort,
templatePostInstallAction,
llamaCloudProjectName,
llamaCloudIndexName,
);
name = result.projectName;
appProcess = result.appProcess;
});

test("Frontend should be able to submit a message and receive a response", async ({
page,
}) => {
test.skip(templatePostInstallAction !== "runApp");
await page.goto(`http://localhost:${port}`);
await page.fill("form input", userMessage);
const [response] = await Promise.all([
page.waitForResponse(
(res) => {
return (
res.url().includes("/api/chat") && res.status() === 200
);
},
{
timeout: 1000 * 60,
},
),
page.click("form button[type=submit]"),
]);
const text = await response.text();
console.log("AI response when submitting message: ", text);
expect(response.ok()).toBeTruthy();
});
test("App folder should exist", async () => {
const dirExists = fs.existsSync(path.join(cwd, name));
expect(dirExists).toBeTruthy();
});
test("Frontend should have a title", async ({ page }) => {
test.skip(templatePostInstallAction !== "runApp");
await page.goto(`http://localhost:${port}`);
await expect(page.getByText("Built by LlamaIndex")).toBeVisible();
});

test("Backend frameworks should response when calling non-streaming chat API", async ({
request,
}) => {
test.skip(templatePostInstallAction !== "runApp");
test.skip(templateFramework === "nextjs");
const response = await request.post(
`http://localhost:${externalPort}/api/chat/request`,
test("Frontend should be able to submit a message and receive a response", async ({
page,
}) => {
test.skip(templatePostInstallAction !== "runApp");
await page.goto(`http://localhost:${port}`);
await page.fill("form input", userMessage);
const [response] = await Promise.all([
page.waitForResponse(
(res) => {
return (
res.url().includes("/api/chat") && res.status() === 200
);
},
{
data: {
messages: [
{
role: "user",
content: userMessage,
},
],
},
timeout: 1000 * 60,
},
);
const text = await response.text();
console.log("AI response when calling API: ", text);
expect(response.ok()).toBeTruthy();
});
),
page.click("form button[type=submit]"),
]);
const text = await response.text();
console.log("AI response when submitting message: ", text);
expect(response.ok()).toBeTruthy();
});

test("Backend frameworks should response when calling non-streaming chat API", async ({
request,
}) => {
test.skip(templatePostInstallAction !== "runApp");
test.skip(templateFramework === "nextjs");
const response = await request.post(
`http://localhost:${externalPort}/api/chat/request`,
{
data: {
messages: [
{
role: "user",
content: userMessage,
},
],
},
},
);
const text = await response.text();
console.log("AI response when calling API: ", text);
expect(response.ok()).toBeTruthy();
});

// clean processes
test.afterAll(async () => {
appProcess?.kill();
});
// clean processes
test.afterAll(async () => {
appProcess?.kill();
});
}
});
}
}
}
Expand Down
Loading