Skip to content

fix: bump llama-index-callbacks-arize-phoenix package #340

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Oct 7, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions .changeset/perfect-bags-greet.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
"create-llama": patch
---

Bump package for llamatrace observability
134 changes: 88 additions & 46 deletions e2e/python/resolve_dependencies.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import fs from "fs";
import path from "path";
import util from "util";
import { TemplateFramework, TemplateVectorDB } from "../../helpers/types";
import { createTestDir, runCreateLlama } from "../utils";
import { RunCreateLlamaOptions, createTestDir, runCreateLlama } from "../utils";

const execAsync = util.promisify(exec);

Expand Down Expand Up @@ -42,62 +42,69 @@ if (
"--db-source mysql+pymysql://user:pass@localhost:3306/mydb",
];

const observabilityOptions = ["llamatrace", "traceloop"];

// Run separate tests for each observability option to reduce CI runtime
test.describe("Test resolve python dependencies with observability", () => {
// Testing with streaming template, vectorDb: none, tools: none, and dataSource: --example-file
for (const observability of observabilityOptions) {
test(`observability: ${observability}`, async () => {
const cwd = await createTestDir();

await createAndCheckLlamaProject({
options: {
cwd,
templateType: "streaming",
templateFramework,
dataSource,
vectorDb: "none",
tools: "none",
port: 3000, // port, not used
externalPort: 8000, // externalPort, not used
postInstallAction: "none", // postInstallAction
templateUI: undefined, // ui
appType: "--no-frontend", // appType
llamaCloudProjectName: undefined, // llamaCloudProjectName
llamaCloudIndexName: undefined, // llamaCloudIndexName
observability,
},
});
});
}
});

test.describe("Test resolve python dependencies", () => {
for (const vectorDb of vectorDbs) {
for (const tool of toolOptions) {
for (const dataSource of dataSources) {
Comment on lines 77 to 79
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@leehuwuj how about we run not each combination but just first all vec dbs, then all tools and then all DS?

const dataSourceType = dataSource.split(" ")[0];
const optionDescription = `vectorDb: ${vectorDb}, tools: ${tool}, dataSource: ${dataSourceType}`;
const toolDescription = tool === "none" ? "no tools" : tool;
const optionDescription = `vectorDb: ${vectorDb}, ${toolDescription}, dataSource: ${dataSourceType}`;

test(`options: ${optionDescription}`, async () => {
const cwd = await createTestDir();

const result = await runCreateLlama({
cwd,
templateType: "streaming",
templateFramework,
dataSource,
vectorDb,
port: 3000, // port
externalPort: 8000, // externalPort
postInstallAction: "none", // postInstallAction
templateUI: undefined, // ui
appType: "--no-frontend", // appType
llamaCloudProjectName: undefined, // llamaCloudProjectName
llamaCloudIndexName: undefined, // llamaCloudIndexName
tools: tool,
});
const name = result.projectName;

// Check if the app folder exists
const dirExists = fs.existsSync(path.join(cwd, name));
expect(dirExists).toBeTruthy();

// Check if pyproject.toml exists
const pyprojectPath = path.join(cwd, name, "pyproject.toml");
const pyprojectExists = fs.existsSync(pyprojectPath);
expect(pyprojectExists).toBeTruthy();

// Run poetry lock
try {
const { stdout, stderr } = await execAsync(
"poetry config virtualenvs.in-project true && poetry lock --no-update",
{
cwd: path.join(cwd, name),
const { pyprojectPath, projectPath } =
await createAndCheckLlamaProject({
options: {
cwd,
templateType: "streaming",
templateFramework,
dataSource,
vectorDb,
tools: tool,
port: 3000, // port, not used
externalPort: 8000, // externalPort, not used
postInstallAction: "none", // postInstallAction
templateUI: undefined, // ui
appType: "--no-frontend", // appType
llamaCloudProjectName: undefined, // llamaCloudProjectName
llamaCloudIndexName: undefined, // llamaCloudIndexName
observability: undefined, // observability
},
);
console.log("poetry lock stdout:", stdout);
console.error("poetry lock stderr:", stderr);
} catch (error) {
console.error("Error running poetry lock:", error);
throw error;
}
});

// Check if poetry.lock file was created
const poetryLockExists = fs.existsSync(
path.join(cwd, name, "poetry.lock"),
);
expect(poetryLockExists).toBeTruthy();
// Additional checks for specific dependencies

// Verify that specific dependencies are in pyproject.toml
const pyprojectContent = fs.readFileSync(pyprojectPath, "utf-8");
Expand Down Expand Up @@ -136,3 +143,38 @@ if (
}
});
}

async function createAndCheckLlamaProject({
options,
}: {
options: RunCreateLlamaOptions;
}): Promise<{ pyprojectPath: string; projectPath: string }> {
const result = await runCreateLlama(options);
const name = result.projectName;
const projectPath = path.join(options.cwd, name);

// Check if the app folder exists
expect(fs.existsSync(projectPath)).toBeTruthy();

// Check if pyproject.toml exists
const pyprojectPath = path.join(projectPath, "pyproject.toml");
expect(fs.existsSync(pyprojectPath)).toBeTruthy();

// Run poetry lock
try {
const { stdout, stderr } = await execAsync(
"poetry config virtualenvs.in-project true && poetry lock --no-update",
{ cwd: projectPath },
);
console.log("poetry lock stdout:", stdout);
console.error("poetry lock stderr:", stderr);
} catch (error) {
console.error("Error running poetry lock:", error);
throw error;
}

// Check if poetry.lock file was created
expect(fs.existsSync(path.join(projectPath, "poetry.lock"))).toBeTruthy();

return { pyprojectPath, projectPath };
}
5 changes: 5 additions & 0 deletions e2e/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ export type RunCreateLlamaOptions = {
llamaCloudIndexName?: string;
tools?: string;
useLlamaParse?: boolean;
observability?: string;
};

export async function runCreateLlama({
Expand All @@ -50,6 +51,7 @@ export async function runCreateLlama({
llamaCloudIndexName,
tools,
useLlamaParse,
observability,
}: RunCreateLlamaOptions): Promise<CreateLlamaResult> {
if (!process.env.OPENAI_API_KEY || !process.env.LLAMA_CLOUD_API_KEY) {
throw new Error(
Expand Down Expand Up @@ -114,6 +116,9 @@ export async function runCreateLlama({
} else {
commandArgs.push("--no-llama-parse");
}
if (observability) {
commandArgs.push("--observability", observability);
}

const command = commandArgs.join(" ");
console.log(`running command '${command}' in ${cwd}`);
Expand Down
2 changes: 1 addition & 1 deletion helpers/python.ts
Original file line number Diff line number Diff line change
Expand Up @@ -463,7 +463,7 @@ export const installPythonTemplate = async ({
if (observability === "llamatrace") {
addOnDependencies.push({
name: "llama-index-callbacks-arize-phoenix",
version: "^0.1.6",
version: "^0.2.1",
});
}

Expand Down
Loading