Skip to content

refactor: test frameworks via matrix #211

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Aug 5, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions .github/workflows/e2e.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@ jobs:
node-version: [18, 20]
python-version: ["3.11"]
os: [macos-latest, windows-latest, ubuntu-22.04]
frameworks: ["nextjs", "express", "fastapi"]
datasources: ["--no-files", "--example-file"]
defaults:
run:
shell: bash
Expand Down Expand Up @@ -63,6 +65,8 @@ jobs:
env:
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
LLAMA_CLOUD_API_KEY: ${{ secrets.LLAMA_CLOUD_API_KEY }}
FRAMEWORK: ${{ matrix.frameworks }}
DATASOURCE: ${{ matrix.datasources }}
working-directory: .

- uses: actions/upload-artifact@v3
Expand Down
214 changes: 98 additions & 116 deletions e2e/basic.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,128 +11,110 @@ import type {
} from "../helpers";
import { createTestDir, runCreateLlama, type AppType } from "./utils";

const templateTypes: TemplateType[] = ["streaming"];
const templateFrameworks: TemplateFramework[] = [
"nextjs",
"express",
"fastapi",
];
const dataSources: string[] = ["--no-files", "--example-file"];
const templateUIs: TemplateUI[] = ["shadcn"];
const templatePostInstallActions: TemplatePostInstallAction[] = [
"none",
"runApp",
];
const templateType: TemplateType = "streaming";
const templateFramework: TemplateFramework = process.env.FRAMEWORK
? (process.env.FRAMEWORK as TemplateFramework)
: "fastapi";
const dataSource: string = process.env.DATASOURCE
? process.env.DATASOURCE
: "--example-file";
const templateUI: TemplateUI = "shadcn";
const templatePostInstallAction: TemplatePostInstallAction = "runApp";

const llamaCloudProjectName = "create-llama";
const llamaCloudIndexName = "e2e-test";

for (const templateType of templateTypes) {
for (const templateFramework of templateFrameworks) {
for (const dataSource of dataSources) {
for (const templateUI of templateUIs) {
for (const templatePostInstallAction of templatePostInstallActions) {
const appType: AppType =
templateFramework === "nextjs" ? "" : "--frontend";
const userMessage =
dataSource !== "--no-files"
? "Physical standard for letters"
: "Hello";
test.describe(`try create-llama ${templateType} ${templateFramework} ${dataSource} ${templateUI} ${appType} ${templatePostInstallAction}`, async () => {
let port: number;
let externalPort: number;
let cwd: string;
let name: string;
let appProcess: ChildProcess;
// Only test without using vector db for now
const vectorDb = "none";
const appType: AppType = templateFramework === "nextjs" ? "" : "--frontend";
const userMessage =
dataSource !== "--no-files" ? "Physical standard for letters" : "Hello";
test.describe(`try create-llama ${templateType} ${templateFramework} ${dataSource} ${templateUI} ${appType} ${templatePostInstallAction}`, async () => {
let port: number;
let externalPort: number;
let cwd: string;
let name: string;
let appProcess: ChildProcess;
// Only test without using vector db for now
const vectorDb = "none";

test.beforeAll(async () => {
port = Math.floor(Math.random() * 10000) + 10000;
externalPort = port + 1;
cwd = await createTestDir();
const result = await runCreateLlama(
cwd,
templateType,
templateFramework,
dataSource,
templateUI,
vectorDb,
appType,
port,
externalPort,
templatePostInstallAction,
llamaCloudProjectName,
llamaCloudIndexName,
);
name = result.projectName;
appProcess = result.appProcess;
});
test.beforeAll(async () => {
port = Math.floor(Math.random() * 10000) + 10000;
externalPort = port + 1;
cwd = await createTestDir();
const result = await runCreateLlama(
cwd,
templateType,
templateFramework,
dataSource,
templateUI,
vectorDb,
appType,
port,
externalPort,
templatePostInstallAction,
llamaCloudProjectName,
llamaCloudIndexName,
);
name = result.projectName;
appProcess = result.appProcess;
});

test("App folder should exist", async () => {
const dirExists = fs.existsSync(path.join(cwd, name));
expect(dirExists).toBeTruthy();
});
test("Frontend should have a title", async ({ page }) => {
test.skip(templatePostInstallAction !== "runApp");
await page.goto(`http://localhost:${port}`);
await expect(page.getByText("Built by LlamaIndex")).toBeVisible();
});
test("App folder should exist", async () => {
const dirExists = fs.existsSync(path.join(cwd, name));
expect(dirExists).toBeTruthy();
});
test("Frontend should have a title", async ({ page }) => {
test.skip(templatePostInstallAction !== "runApp");
await page.goto(`http://localhost:${port}`);
await expect(page.getByText("Built by LlamaIndex")).toBeVisible();
});

test("Frontend should be able to submit a message and receive a response", async ({
page,
}) => {
test.skip(templatePostInstallAction !== "runApp");
await page.goto(`http://localhost:${port}`);
await page.fill("form input", userMessage);
const [response] = await Promise.all([
page.waitForResponse(
(res) => {
return (
res.url().includes("/api/chat") && res.status() === 200
);
},
{
timeout: 1000 * 60,
},
),
page.click("form button[type=submit]"),
]);
const text = await response.text();
console.log("AI response when submitting message: ", text);
expect(response.ok()).toBeTruthy();
});
test("Frontend should be able to submit a message and receive a response", async ({
page,
}) => {
test.skip(templatePostInstallAction !== "runApp");
await page.goto(`http://localhost:${port}`);
await page.fill("form input", userMessage);
const [response] = await Promise.all([
page.waitForResponse(
(res) => {
return res.url().includes("/api/chat") && res.status() === 200;
},
{
timeout: 1000 * 60,
},
),
page.click("form button[type=submit]"),
]);
const text = await response.text();
console.log("AI response when submitting message: ", text);
expect(response.ok()).toBeTruthy();
});

test("Backend frameworks should response when calling non-streaming chat API", async ({
request,
}) => {
test.skip(templatePostInstallAction !== "runApp");
test.skip(templateFramework === "nextjs");
const response = await request.post(
`http://localhost:${externalPort}/api/chat/request`,
{
data: {
messages: [
{
role: "user",
content: userMessage,
},
],
},
},
);
const text = await response.text();
console.log("AI response when calling API: ", text);
expect(response.ok()).toBeTruthy();
});
test("Backend frameworks should response when calling non-streaming chat API", async ({
request,
}) => {
test.skip(templatePostInstallAction !== "runApp");
test.skip(templateFramework === "nextjs");
const response = await request.post(
`http://localhost:${externalPort}/api/chat/request`,
{
data: {
messages: [
{
role: "user",
content: userMessage,
},
],
},
},
);
const text = await response.text();
console.log("AI response when calling API: ", text);
expect(response.ok()).toBeTruthy();
});

// clean processes
test.afterAll(async () => {
appProcess?.kill();
});
});
}
}
}
}
}
// clean processes
test.afterAll(async () => {
appProcess?.kill();
});
});
Loading