Skip to content

Commit 75e1f61

Browse files
authored
fix: TypeScript templates do not create a new LlamaCloud index or upload a file to an existing index. (#356)
1 parent 88220f1 commit 75e1f61

File tree

13 files changed

+83
-35
lines changed

13 files changed

+83
-35
lines changed

.changeset/lucky-queens-smile.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
"create-llama": patch
3+
---
4+
5+
Fix cannot query public document from llamacloud

.changeset/thirty-tips-drum.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
"create-llama": patch
3+
---
4+
5+
Fix typescript templates cannot upload file to llamacloud

e2e/shared/streaming_template.spec.ts

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,13 @@ const userMessage =
2727
dataSource !== "--no-files" ? "Physical standard for letters" : "Hello";
2828

2929
test.describe(`Test streaming template ${templateFramework} ${dataSource} ${templateUI} ${appType} ${templatePostInstallAction}`, async () => {
30+
const isNode18 = process.version.startsWith("v18");
31+
const isLlamaCloud = dataSource === "--llamacloud";
32+
// llamacloud is using File API which is not supported on node 18
33+
if (isNode18 && isLlamaCloud) {
34+
test.skip(true, "Skipping tests for Node 18 and LlamaCloud data source");
35+
}
36+
3037
let port: number;
3138
let externalPort: number;
3239
let cwd: string;

templates/components/llamaindex/typescript/documents/upload.ts

Lines changed: 20 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -16,14 +16,26 @@ export async function uploadDocument(
1616
// trigger LlamaCloudIndex API to upload the file and run the pipeline
1717
const projectId = await index.getProjectId();
1818
const pipelineId = await index.getPipelineId();
19-
return [
20-
await LLamaCloudFileService.addFileToPipeline(
21-
projectId,
22-
pipelineId,
23-
new File([fileBuffer], filename, { type: mimeType }),
24-
{ private: "true" },
25-
),
26-
];
19+
try {
20+
return [
21+
await LLamaCloudFileService.addFileToPipeline(
22+
projectId,
23+
pipelineId,
24+
new File([fileBuffer], filename, { type: mimeType }),
25+
{ private: "true" },
26+
),
27+
];
28+
} catch (error) {
29+
if (
30+
error instanceof ReferenceError &&
31+
error.message.includes("File is not defined")
32+
) {
33+
throw new Error(
34+
"File class is not supported in the current Node.js version. Please use Node.js 20 or higher.",
35+
);
36+
}
37+
throw error;
38+
}
2739
}
2840

2941
// run the pipeline for other vector store indexes

templates/components/llamaindex/typescript/streaming/events.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -75,7 +75,7 @@ export function createCallbackManager(stream: StreamData) {
7575
callbackManager.on("retrieve-end", (data) => {
7676
const { nodes, query } = data.detail;
7777
appendSourceData(stream, nodes);
78-
appendEventData(stream, `Retrieving context for query: '${query}'`);
78+
appendEventData(stream, `Retrieving context for query: '${query.query}'`);
7979
appendEventData(
8080
stream,
8181
`Retrieved ${nodes.length} sources to use as context for the query`,

templates/components/vectordbs/python/llamacloud/generate.py

Lines changed: 7 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -1,20 +1,18 @@
11
# flake8: noqa: E402
22
import os
3+
34
from dotenv import load_dotenv
45

56
load_dotenv()
67

7-
from llama_cloud import PipelineType
8-
9-
from app.settings import init_settings
10-
from llama_index.core.settings import Settings
11-
8+
import logging
129

1310
from app.engine.index import get_client, get_index
14-
15-
import logging
16-
from llama_index.core.readers import SimpleDirectoryReader
1711
from app.engine.service import LLamaCloudFileService
12+
from app.settings import init_settings
13+
from llama_cloud import PipelineType
14+
from llama_index.core.readers import SimpleDirectoryReader
15+
from llama_index.core.settings import Settings
1816

1917
logging.basicConfig(level=logging.INFO)
2018
logger = logging.getLogger()
@@ -80,13 +78,7 @@ def generate_datasource():
8078
f"Adding file {input_file} to pipeline {index.name} in project {index.project_name}"
8179
)
8280
LLamaCloudFileService.add_file_to_pipeline(
83-
project_id,
84-
pipeline_id,
85-
f,
86-
custom_metadata={
87-
# Set private=false to mark the document as public (required for filtering)
88-
"private": "false",
89-
},
81+
project_id, pipeline_id, f, custom_metadata={}
9082
)
9183

9284
logger.info("Finished generating the index")

templates/components/vectordbs/python/llamacloud/query_filter.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ def generate_filters(doc_ids):
55
"""
66
Generate public/private document filters based on the doc_ids and the vector store.
77
"""
8-
# Using "is_empty" filter to include the documents don't have the "private" key because they're uploaded in LlamaCloud UI
8+
# public documents (ingested by "poetry run generate" or in the LlamaCloud UI) don't have the "private" field
99
public_doc_filter = MetadataFilter(
1010
key="private",
1111
value=None,

templates/components/vectordbs/typescript/llamacloud/generate.ts

Lines changed: 19 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -25,17 +25,32 @@ async function* walk(dir: string): AsyncGenerator<string> {
2525

2626
async function loadAndIndex() {
2727
const index = await getDataSource();
28+
// ensure the index is available or create a new one
29+
await index.ensureIndex();
2830
const projectId = await index.getProjectId();
2931
const pipelineId = await index.getPipelineId();
3032

3133
// walk through the data directory and upload each file to LlamaCloud
3234
for await (const filePath of walk(DATA_DIR)) {
3335
const buffer = await fs.readFile(filePath);
3436
const filename = path.basename(filePath);
35-
const file = new File([buffer], filename);
36-
await LLamaCloudFileService.addFileToPipeline(projectId, pipelineId, file, {
37-
private: "false",
38-
});
37+
try {
38+
await LLamaCloudFileService.addFileToPipeline(
39+
projectId,
40+
pipelineId,
41+
new File([buffer], filename),
42+
);
43+
} catch (error) {
44+
if (
45+
error instanceof ReferenceError &&
46+
error.message.includes("File is not defined")
47+
) {
48+
throw new Error(
49+
"File class is not supported in the current Node.js version. Please use Node.js 20 or higher.",
50+
);
51+
}
52+
throw error;
53+
}
3954
}
4055

4156
console.log(`Successfully uploaded documents to LlamaCloud!`);

templates/components/vectordbs/typescript/llamacloud/queryFilter.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
import { CloudRetrieveParams, MetadataFilter } from "llamaindex";
22

33
export function generateFilters(documentIds: string[]) {
4-
// public documents don't have the "private" field or it's set to "false"
4+
// public documents (ingested by "npm run generate" or in the LlamaCloud UI) don't have the "private" field
55
const publicDocumentsFilter: MetadataFilter = {
66
key: "private",
77
operator: "is_empty",

templates/types/streaming/express/package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121
"dotenv": "^16.3.1",
2222
"duck-duck-scrape": "^2.2.5",
2323
"express": "^4.18.2",
24-
"llamaindex": "0.6.18",
24+
"llamaindex": "0.6.19",
2525
"pdf2json": "3.0.5",
2626
"ajv": "^8.12.0",
2727
"@e2b/code-interpreter": "0.0.9-beta.3",

templates/types/streaming/fastapi/app/api/routers/chat_config.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,10 @@
11
import logging
22
import os
33

4-
from fastapi import APIRouter
4+
from fastapi import APIRouter, HTTPException
55

66
from app.api.routers.models import ChatConfig
77

8-
98
config_router = r = APIRouter()
109

1110
logger = logging.getLogger("uvicorn")
@@ -27,6 +26,10 @@ async def chat_config() -> ChatConfig:
2726

2827
@r.get("/llamacloud")
2928
async def chat_llama_cloud_config():
29+
if not os.getenv("LLAMA_CLOUD_API_KEY"):
30+
raise HTTPException(
31+
status_code=500, detail="LlamaCloud API KEY is not configured"
32+
)
3033
projects = LLamaCloudFileService.get_all_projects_with_pipelines()
3134
pipeline = os.getenv("LLAMA_CLOUD_INDEX_NAME")
3235
project = os.getenv("LLAMA_CLOUD_PROJECT_NAME")

templates/types/streaming/nextjs/app/components/ui/chat/widgets/LlamaCloudSelector.tsx

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -66,7 +66,16 @@ export function LlamaCloudSelector({
6666
useEffect(() => {
6767
if (process.env.NEXT_PUBLIC_USE_LLAMACLOUD === "true" && !config) {
6868
fetch(`${backend}/api/chat/config/llamacloud`)
69-
.then((response) => response.json())
69+
.then((response) => {
70+
if (!response.ok) {
71+
return response.json().then((errorData) => {
72+
window.alert(
73+
`Error: ${JSON.stringify(errorData) || "Unknown error occurred"}`,
74+
);
75+
});
76+
}
77+
return response.json();
78+
})
7079
.then((data) => {
7180
const pipeline = defaultPipeline ?? data.pipeline; // defaultPipeline will override pipeline in .env
7281
setConfig({ ...data, pipeline });

templates/types/streaming/nextjs/package.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@
2727
"duck-duck-scrape": "^2.2.5",
2828
"formdata-node": "^6.0.3",
2929
"got": "^14.4.1",
30-
"llamaindex": "0.6.18",
30+
"llamaindex": "0.6.19",
3131
"lucide-react": "^0.294.0",
3232
"next": "^14.2.4",
3333
"react": "^18.2.0",

0 commit comments

Comments
 (0)