@@ -11,12 +11,10 @@ import type {
11
11
} from "../helpers" ;
12
12
import { createTestDir , runCreateLlama , type AppType } from "./utils" ;
13
13
14
- const templateTypes : TemplateType [ ] = [ "streaming" ] ;
15
- const templateFrameworks : TemplateFramework [ ] = [
16
- "nextjs" ,
17
- "express" ,
18
- "fastapi" ,
19
- ] ;
14
+ const templateType : TemplateType = "streaming" ;
15
+ const templateFrameworks : TemplateFramework [ ] = process . env . FRAMEWORKS
16
+ ? ( process . env . FRAMEWORKS . split ( "," ) as TemplateFramework [ ] )
17
+ : [ "fastapi" ] ;
20
18
const dataSources : string [ ] = [ "--no-files" , "--example-file" ] ;
21
19
const templateUIs : TemplateUI [ ] = [ "shadcn" ] ;
22
20
const templatePostInstallActions : TemplatePostInstallAction [ ] = [
@@ -27,111 +25,109 @@ const templatePostInstallActions: TemplatePostInstallAction[] = [
27
25
const llamaCloudProjectName = "create-llama" ;
28
26
const llamaCloudIndexName = "e2e-test" ;
29
27
30
- for ( const templateType of templateTypes ) {
31
- for ( const templateFramework of templateFrameworks ) {
32
- for ( const dataSource of dataSources ) {
33
- for ( const templateUI of templateUIs ) {
34
- for ( const templatePostInstallAction of templatePostInstallActions ) {
35
- const appType : AppType =
36
- templateFramework === "nextjs" ? "" : "--frontend" ;
37
- const userMessage =
38
- dataSource !== "--no-files"
39
- ? "Physical standard for letters"
40
- : "Hello" ;
41
- test . describe ( `try create-llama ${ templateType } ${ templateFramework } ${ dataSource } ${ templateUI } ${ appType } ${ templatePostInstallAction } ` , async ( ) => {
42
- let port : number ;
43
- let externalPort : number ;
44
- let cwd : string ;
45
- let name : string ;
46
- let appProcess : ChildProcess ;
47
- // Only test without using vector db for now
48
- const vectorDb = "none" ;
49
-
50
- test . beforeAll ( async ( ) => {
51
- port = Math . floor ( Math . random ( ) * 10000 ) + 10000 ;
52
- externalPort = port + 1 ;
53
- cwd = await createTestDir ( ) ;
54
- const result = await runCreateLlama (
55
- cwd ,
56
- templateType ,
57
- templateFramework ,
58
- dataSource ,
59
- templateUI ,
60
- vectorDb ,
61
- appType ,
62
- port ,
63
- externalPort ,
64
- templatePostInstallAction ,
65
- llamaCloudProjectName ,
66
- llamaCloudIndexName ,
67
- ) ;
68
- name = result . projectName ;
69
- appProcess = result . appProcess ;
70
- } ) ;
28
+ for ( const templateFramework of templateFrameworks ) {
29
+ for ( const dataSource of dataSources ) {
30
+ for ( const templateUI of templateUIs ) {
31
+ for ( const templatePostInstallAction of templatePostInstallActions ) {
32
+ const appType : AppType =
33
+ templateFramework === "nextjs" ? "" : "--frontend" ;
34
+ const userMessage =
35
+ dataSource !== "--no-files"
36
+ ? "Physical standard for letters"
37
+ : "Hello" ;
38
+ test . describe ( `try create-llama ${ templateType } ${ templateFramework } ${ dataSource } ${ templateUI } ${ appType } ${ templatePostInstallAction } ` , async ( ) => {
39
+ let port : number ;
40
+ let externalPort : number ;
41
+ let cwd : string ;
42
+ let name : string ;
43
+ let appProcess : ChildProcess ;
44
+ // Only test without using vector db for now
45
+ const vectorDb = "none" ;
71
46
72
- test ( "App folder should exist" , async ( ) => {
73
- const dirExists = fs . existsSync ( path . join ( cwd , name ) ) ;
74
- expect ( dirExists ) . toBeTruthy ( ) ;
75
- } ) ;
76
- test ( "Frontend should have a title" , async ( { page } ) => {
77
- test . skip ( templatePostInstallAction !== "runApp" ) ;
78
- await page . goto ( `http://localhost:${ port } ` ) ;
79
- await expect ( page . getByText ( "Built by LlamaIndex" ) ) . toBeVisible ( ) ;
80
- } ) ;
47
+ test . beforeAll ( async ( ) => {
48
+ port = Math . floor ( Math . random ( ) * 10000 ) + 10000 ;
49
+ externalPort = port + 1 ;
50
+ cwd = await createTestDir ( ) ;
51
+ const result = await runCreateLlama (
52
+ cwd ,
53
+ templateType ,
54
+ templateFramework ,
55
+ dataSource ,
56
+ templateUI ,
57
+ vectorDb ,
58
+ appType ,
59
+ port ,
60
+ externalPort ,
61
+ templatePostInstallAction ,
62
+ llamaCloudProjectName ,
63
+ llamaCloudIndexName ,
64
+ ) ;
65
+ name = result . projectName ;
66
+ appProcess = result . appProcess ;
67
+ } ) ;
81
68
82
- test ( "Frontend should be able to submit a message and receive a response" , async ( {
83
- page,
84
- } ) => {
85
- test . skip ( templatePostInstallAction !== "runApp" ) ;
86
- await page . goto ( `http://localhost:${ port } ` ) ;
87
- await page . fill ( "form input" , userMessage ) ;
88
- const [ response ] = await Promise . all ( [
89
- page . waitForResponse (
90
- ( res ) => {
91
- return (
92
- res . url ( ) . includes ( "/api/chat" ) && res . status ( ) === 200
93
- ) ;
94
- } ,
95
- {
96
- timeout : 1000 * 60 ,
97
- } ,
98
- ) ,
99
- page . click ( "form button[type=submit]" ) ,
100
- ] ) ;
101
- const text = await response . text ( ) ;
102
- console . log ( "AI response when submitting message: " , text ) ;
103
- expect ( response . ok ( ) ) . toBeTruthy ( ) ;
104
- } ) ;
69
+ test ( "App folder should exist" , async ( ) => {
70
+ const dirExists = fs . existsSync ( path . join ( cwd , name ) ) ;
71
+ expect ( dirExists ) . toBeTruthy ( ) ;
72
+ } ) ;
73
+ test ( "Frontend should have a title" , async ( { page } ) => {
74
+ test . skip ( templatePostInstallAction !== "runApp" ) ;
75
+ await page . goto ( `http://localhost:${ port } ` ) ;
76
+ await expect ( page . getByText ( "Built by LlamaIndex" ) ) . toBeVisible ( ) ;
77
+ } ) ;
105
78
106
- test ( "Backend frameworks should response when calling non-streaming chat API" , async ( {
107
- request,
108
- } ) => {
109
- test . skip ( templatePostInstallAction !== "runApp" ) ;
110
- test . skip ( templateFramework === "nextjs" ) ;
111
- const response = await request . post (
112
- `http://localhost:${ externalPort } /api/chat/request` ,
79
+ test ( "Frontend should be able to submit a message and receive a response" , async ( {
80
+ page,
81
+ } ) => {
82
+ test . skip ( templatePostInstallAction !== "runApp" ) ;
83
+ await page . goto ( `http://localhost:${ port } ` ) ;
84
+ await page . fill ( "form input" , userMessage ) ;
85
+ const [ response ] = await Promise . all ( [
86
+ page . waitForResponse (
87
+ ( res ) => {
88
+ return (
89
+ res . url ( ) . includes ( "/api/chat" ) && res . status ( ) === 200
90
+ ) ;
91
+ } ,
113
92
{
114
- data : {
115
- messages : [
116
- {
117
- role : "user" ,
118
- content : userMessage ,
119
- } ,
120
- ] ,
121
- } ,
93
+ timeout : 1000 * 60 ,
122
94
} ,
123
- ) ;
124
- const text = await response . text ( ) ;
125
- console . log ( "AI response when calling API: " , text ) ;
126
- expect ( response . ok ( ) ) . toBeTruthy ( ) ;
127
- } ) ;
95
+ ) ,
96
+ page . click ( "form button[type=submit]" ) ,
97
+ ] ) ;
98
+ const text = await response . text ( ) ;
99
+ console . log ( "AI response when submitting message: " , text ) ;
100
+ expect ( response . ok ( ) ) . toBeTruthy ( ) ;
101
+ } ) ;
102
+
103
+ test ( "Backend frameworks should response when calling non-streaming chat API" , async ( {
104
+ request,
105
+ } ) => {
106
+ test . skip ( templatePostInstallAction !== "runApp" ) ;
107
+ test . skip ( templateFramework === "nextjs" ) ;
108
+ const response = await request . post (
109
+ `http://localhost:${ externalPort } /api/chat/request` ,
110
+ {
111
+ data : {
112
+ messages : [
113
+ {
114
+ role : "user" ,
115
+ content : userMessage ,
116
+ } ,
117
+ ] ,
118
+ } ,
119
+ } ,
120
+ ) ;
121
+ const text = await response . text ( ) ;
122
+ console . log ( "AI response when calling API: " , text ) ;
123
+ expect ( response . ok ( ) ) . toBeTruthy ( ) ;
124
+ } ) ;
128
125
129
- // clean processes
130
- test . afterAll ( async ( ) => {
131
- appProcess ?. kill ( ) ;
132
- } ) ;
126
+ // clean processes
127
+ test . afterAll ( async ( ) => {
128
+ appProcess ?. kill ( ) ;
133
129
} ) ;
134
- }
130
+ } ) ;
135
131
}
136
132
}
137
133
}
0 commit comments