Skip to content

Commit 83f6e03

Browse files
committed
update llamacloud
1 parent 224d413 commit 83f6e03

File tree

6 files changed

+25
-18
lines changed

6 files changed

+25
-18
lines changed

templates/components/vectordbs/python/llamacloud/generate.py

Lines changed: 6 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1,20 +1,18 @@
11
# flake8: noqa: E402
22
import os
3+
34
from dotenv import load_dotenv
45

56
load_dotenv()
67

7-
from llama_cloud import PipelineType
8-
9-
from app.settings import init_settings
10-
from llama_index.core.settings import Settings
11-
8+
import logging
129

1310
from app.engine.index import get_client, get_index
14-
15-
import logging
16-
from llama_index.core.readers import SimpleDirectoryReader
1711
from app.engine.service import LLamaCloudFileService
12+
from app.settings import init_settings
13+
from llama_cloud import PipelineType
14+
from llama_index.core.readers import SimpleDirectoryReader
15+
from llama_index.core.settings import Settings
1816

1917
logging.basicConfig(level=logging.INFO)
2018
logger = logging.getLogger()
@@ -83,10 +81,6 @@ def generate_datasource():
8381
project_id,
8482
pipeline_id,
8583
f,
86-
custom_metadata={
87-
# Set private=false to mark the document as public (required for filtering)
88-
"private": "false",
89-
},
9084
)
9185

9286
logger.info("Finished generating the index")

templates/components/vectordbs/python/llamacloud/query_filter.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ def generate_filters(doc_ids):
55
"""
66
Generate public/private document filters based on the doc_ids and the vector store.
77
"""
8-
# Using "is_empty" filter to include the documents don't have the "private" key because they're uploaded in LlamaCloud UI
8+
# public documents (ingested by "poetry run generate" or in the LlamaCloud UI) don't have the "private" field
99
public_doc_filter = MetadataFilter(
1010
key="private",
1111
value=None,

templates/components/vectordbs/typescript/llamacloud/generate.ts

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,8 @@ async function* walk(dir: string): AsyncGenerator<string> {
2525

2626
async function loadAndIndex() {
2727
const index = await getDataSource();
28+
// ensure the index is available or create a new one
29+
await index.ensureIndex();
2830
const projectId = await index.getProjectId();
2931
const pipelineId = await index.getPipelineId();
3032

@@ -37,7 +39,6 @@ async function loadAndIndex() {
3739
projectId,
3840
pipelineId,
3941
new File([buffer], filename),
40-
{ private: "false" },
4142
);
4243
} catch (error) {
4344
if (

templates/components/vectordbs/typescript/llamacloud/queryFilter.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
import { CloudRetrieveParams, MetadataFilter } from "llamaindex";
22

33
export function generateFilters(documentIds: string[]) {
4-
// public documents don't have the "private" field or it's set to "false"
4+
// public documents (ingested by "npm run generate" or in the LlamaCloud UI) don't have the "private" field
55
const publicDocumentsFilter: MetadataFilter = {
66
key: "private",
77
operator: "is_empty",

templates/types/streaming/fastapi/app/api/routers/chat_config.py

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,10 @@
11
import logging
22
import os
33

4-
from fastapi import APIRouter
4+
from fastapi import APIRouter, HTTPException
55

66
from app.api.routers.models import ChatConfig
77

8-
98
config_router = r = APIRouter()
109

1110
logger = logging.getLogger("uvicorn")
@@ -27,6 +26,10 @@ async def chat_config() -> ChatConfig:
2726

2827
@r.get("/llamacloud")
2928
async def chat_llama_cloud_config():
29+
if not os.getenv("LLAMA_CLOUD_API_KEY"):
30+
raise HTTPException(
31+
status_code=500, detail="LlamaCloud API KEY is not configured"
32+
)
3033
projects = LLamaCloudFileService.get_all_projects_with_pipelines()
3134
pipeline = os.getenv("LLAMA_CLOUD_INDEX_NAME")
3235
project = os.getenv("LLAMA_CLOUD_PROJECT_NAME")

templates/types/streaming/nextjs/app/components/ui/chat/widgets/LlamaCloudSelector.tsx

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -66,7 +66,16 @@ export function LlamaCloudSelector({
6666
useEffect(() => {
6767
if (process.env.NEXT_PUBLIC_USE_LLAMACLOUD === "true" && !config) {
6868
fetch(`${backend}/api/chat/config/llamacloud`)
69-
.then((response) => response.json())
69+
.then((response) => {
70+
if (!response.ok) {
71+
return response.json().then((errorData) => {
72+
window.alert(
73+
`Error: ${JSON.stringify(errorData) || "Unknown error occurred"}`,
74+
);
75+
});
76+
}
77+
return response.json();
78+
})
7079
.then((data) => {
7180
const pipeline = defaultPipeline ?? data.pipeline; // defaultPipeline will override pipeline in .env
7281
setConfig({ ...data, pipeline });

0 commit comments

Comments
 (0)