Skip to content

Commit 9dde6d0

Browse files
feat: simplify questions asked (#370)
1 parent 98a82b0 commit 9dde6d0

24 files changed

+977
-1025
lines changed

helpers/providers/anthropic.ts

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
import ciInfo from "ci-info";
22
import prompts from "prompts";
33
import { ModelConfigParams } from ".";
4-
import { questionHandlers, toChoice } from "../../questions";
4+
import { questionHandlers, toChoice } from "../../questions/utils";
55

66
const MODELS = [
77
"claude-3-opus",

helpers/providers/azure.ts

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
import ciInfo from "ci-info";
22
import prompts from "prompts";
33
import { ModelConfigParams, ModelConfigQuestionsParams } from ".";
4-
import { questionHandlers } from "../../questions";
4+
import { questionHandlers } from "../../questions/utils";
55

66
const ALL_AZURE_OPENAI_CHAT_MODELS: Record<string, { openAIModel: string }> = {
77
"gpt-35-turbo": { openAIModel: "gpt-3.5-turbo" },

helpers/providers/gemini.ts

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
import ciInfo from "ci-info";
22
import prompts from "prompts";
33
import { ModelConfigParams } from ".";
4-
import { questionHandlers, toChoice } from "../../questions";
4+
import { questionHandlers, toChoice } from "../../questions/utils";
55

66
const MODELS = ["gemini-1.5-pro-latest", "gemini-pro", "gemini-pro-vision"];
77
type ModelData = {

helpers/providers/groq.ts

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
import ciInfo from "ci-info";
22
import prompts from "prompts";
33
import { ModelConfigParams } from ".";
4-
import { questionHandlers, toChoice } from "../../questions";
4+
import { questionHandlers, toChoice } from "../../questions/utils";
55

66
import got from "got";
77
import ora from "ora";

helpers/providers/index.ts

+2-3
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
1-
import ciInfo from "ci-info";
21
import prompts from "prompts";
3-
import { questionHandlers } from "../../questions";
2+
import { questionHandlers } from "../../questions/utils";
43
import { ModelConfig, ModelProvider, TemplateFramework } from "../types";
54
import { askAnthropicQuestions } from "./anthropic";
65
import { askAzureQuestions } from "./azure";
@@ -27,7 +26,7 @@ export async function askModelConfig({
2726
framework,
2827
}: ModelConfigQuestionsParams): Promise<ModelConfig> {
2928
let modelProvider: ModelProvider = DEFAULT_MODEL_PROVIDER;
30-
if (askModels && !ciInfo.isCI) {
29+
if (askModels) {
3130
let choices = [
3231
{ title: "OpenAI", value: "openai" },
3332
{ title: "Groq", value: "groq" },

helpers/providers/llmhub.ts

+1-1
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ import ora from "ora";
44
import { red } from "picocolors";
55
import prompts from "prompts";
66
import { ModelConfigParams } from ".";
7-
import { questionHandlers } from "../../questions";
7+
import { questionHandlers } from "../../questions/utils";
88

99
export const TSYSTEMS_LLMHUB_API_URL =
1010
"https://llm-server.llmhub.t-systems.net/v2";

helpers/providers/mistral.ts

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
import ciInfo from "ci-info";
22
import prompts from "prompts";
33
import { ModelConfigParams } from ".";
4-
import { questionHandlers, toChoice } from "../../questions";
4+
import { questionHandlers, toChoice } from "../../questions/utils";
55

66
const MODELS = ["mistral-tiny", "mistral-small", "mistral-medium"];
77
type ModelData = {

helpers/providers/ollama.ts

+1-1
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@ import ollama, { type ModelResponse } from "ollama";
33
import { red } from "picocolors";
44
import prompts from "prompts";
55
import { ModelConfigParams } from ".";
6-
import { questionHandlers, toChoice } from "../../questions";
6+
import { questionHandlers, toChoice } from "../../questions/utils";
77

88
type ModelData = {
99
dimensions: number;

helpers/providers/openai.ts

+1-1
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ import ora from "ora";
44
import { red } from "picocolors";
55
import prompts from "prompts";
66
import { ModelConfigParams, ModelConfigQuestionsParams } from ".";
7-
import { questionHandlers } from "../../questions";
7+
import { questionHandlers } from "../../questions/utils";
88

99
const OPENAI_API_URL = "https://api.openai.com/v1";
1010

helpers/python.ts

+6-6
Original file line numberDiff line numberDiff line change
@@ -93,6 +93,12 @@ const getAdditionalDependencies = (
9393
});
9494
break;
9595
}
96+
case "llamacloud":
97+
dependencies.push({
98+
name: "llama-index-indices-managed-llama-cloud",
99+
version: "^0.3.1",
100+
});
101+
break;
96102
}
97103

98104
// Add data source dependencies
@@ -127,12 +133,6 @@ const getAdditionalDependencies = (
127133
version: "^2.9.9",
128134
});
129135
break;
130-
case "llamacloud":
131-
dependencies.push({
132-
name: "llama-index-indices-managed-llama-cloud",
133-
version: "^0.3.1",
134-
});
135-
break;
136136
}
137137
}
138138
}

helpers/types.ts

+1-1
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@ export type TemplateDataSource = {
4646
type: TemplateDataSourceType;
4747
config: TemplateDataSourceConfig;
4848
};
49-
export type TemplateDataSourceType = "file" | "web" | "db" | "llamacloud";
49+
export type TemplateDataSourceType = "file" | "web" | "db";
5050
export type TemplateObservability = "none" | "traceloop" | "llamatrace";
5151
// Config for both file and folder
5252
export type FileSourceConfig = {

index.ts

+57-87
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
/* eslint-disable import/no-extraneous-dependencies */
22
import { execSync } from "child_process";
3-
import Commander from "commander";
4-
import Conf from "conf";
3+
import { Command } from "commander";
54
import fs from "fs";
65
import path from "path";
76
import { bold, cyan, green, red, yellow } from "picocolors";
@@ -17,8 +16,9 @@ import { runApp } from "./helpers/run-app";
1716
import { getTools } from "./helpers/tools";
1817
import { validateNpmName } from "./helpers/validate-pkg";
1918
import packageJson from "./package.json";
20-
import { QuestionArgs, askQuestions, onPromptState } from "./questions";
21-
19+
import { askQuestions } from "./questions/index";
20+
import { QuestionArgs } from "./questions/types";
21+
import { onPromptState } from "./questions/utils";
2222
// Run the initialization function
2323
initializeGlobalAgent();
2424

@@ -29,12 +29,14 @@ const handleSigTerm = () => process.exit(0);
2929
process.on("SIGINT", handleSigTerm);
3030
process.on("SIGTERM", handleSigTerm);
3131

32-
const program = new Commander.Command(packageJson.name)
32+
const program = new Command(packageJson.name)
3333
.version(packageJson.version)
34-
.arguments("<project-directory>")
35-
.usage(`${green("<project-directory>")} [options]`)
34+
.arguments("[project-directory]")
35+
.usage(`${green("[project-directory]")} [options]`)
3636
.action((name) => {
37-
projectPath = name;
37+
if (name) {
38+
projectPath = name;
39+
}
3840
})
3941
.option(
4042
"--use-npm",
@@ -55,13 +57,6 @@ const program = new Commander.Command(packageJson.name)
5557
`
5658
5759
Explicitly tell the CLI to bootstrap the application using Yarn
58-
`,
59-
)
60-
.option(
61-
"--reset-preferences",
62-
`
63-
64-
Explicitly tell the CLI to reset any stored preferences
6560
`,
6661
)
6762
.option(
@@ -124,7 +119,14 @@ const program = new Commander.Command(packageJson.name)
124119
"--frontend",
125120
`
126121
127-
Whether to generate a frontend for your backend.
122+
Generate a frontend for your backend.
123+
`,
124+
)
125+
.option(
126+
"--no-frontend",
127+
`
128+
129+
Do not generate a frontend for your backend.
128130
`,
129131
)
130132
.option(
@@ -161,6 +163,13 @@ const program = new Commander.Command(packageJson.name)
161163
162164
Specify the tools you want to use by providing a comma-separated list. For example, 'wikipedia.WikipediaToolSpec,google.GoogleSearchToolSpec'. Use 'none' to not using any tools.
163165
`,
166+
(tools, _) => {
167+
if (tools === "none") {
168+
return [];
169+
} else {
170+
return getTools(tools.split(","));
171+
}
172+
},
164173
)
165174
.option(
166175
"--use-llama-parse",
@@ -189,86 +198,66 @@ const program = new Commander.Command(packageJson.name)
189198
190199
Allow interactive selection of LLM and embedding models of different model providers.
191200
`,
201+
false,
192202
)
193203
.option(
194-
"--ask-examples",
204+
"--pro",
195205
`
196206
197-
Allow interactive selection of community templates and LlamaPacks.
207+
Allow interactive selection of all features.
198208
`,
209+
false,
199210
)
200211
.allowUnknownOption()
201212
.parse(process.argv);
202-
if (process.argv.includes("--no-frontend")) {
203-
program.frontend = false;
204-
}
205-
if (process.argv.includes("--tools")) {
206-
if (program.tools === "none") {
207-
program.tools = [];
208-
} else {
209-
program.tools = getTools(program.tools.split(","));
210-
}
211-
}
213+
214+
const options = program.opts();
215+
212216
if (
213217
process.argv.includes("--no-llama-parse") ||
214-
program.template === "extractor"
218+
options.template === "extractor"
215219
) {
216-
program.useLlamaParse = false;
220+
options.useLlamaParse = false;
217221
}
218-
program.askModels = process.argv.includes("--ask-models");
219-
program.askExamples = process.argv.includes("--ask-examples");
220222
if (process.argv.includes("--no-files")) {
221-
program.dataSources = [];
223+
options.dataSources = [];
222224
} else if (process.argv.includes("--example-file")) {
223-
program.dataSources = getDataSources(program.files, program.exampleFile);
225+
options.dataSources = getDataSources(options.files, options.exampleFile);
224226
} else if (process.argv.includes("--llamacloud")) {
225-
program.dataSources = [
226-
{
227-
type: "llamacloud",
228-
config: {},
229-
},
230-
EXAMPLE_FILE,
231-
];
227+
options.dataSources = [EXAMPLE_FILE];
228+
options.vectorDb = "llamacloud";
232229
} else if (process.argv.includes("--web-source")) {
233-
program.dataSources = [
230+
options.dataSources = [
234231
{
235232
type: "web",
236233
config: {
237-
baseUrl: program.webSource,
238-
prefix: program.webSource,
234+
baseUrl: options.webSource,
235+
prefix: options.webSource,
239236
depth: 1,
240237
},
241238
},
242239
];
243240
} else if (process.argv.includes("--db-source")) {
244-
program.dataSources = [
241+
options.dataSources = [
245242
{
246243
type: "db",
247244
config: {
248-
uri: program.dbSource,
249-
queries: program.dbQuery || "SELECT * FROM mytable",
245+
uri: options.dbSource,
246+
queries: options.dbQuery || "SELECT * FROM mytable",
250247
},
251248
},
252249
];
253250
}
254251

255-
const packageManager = !!program.useNpm
252+
const packageManager = !!options.useNpm
256253
? "npm"
257-
: !!program.usePnpm
254+
: !!options.usePnpm
258255
? "pnpm"
259-
: !!program.useYarn
256+
: !!options.useYarn
260257
? "yarn"
261258
: getPkgManager();
262259

263260
async function run(): Promise<void> {
264-
const conf = new Conf({ projectName: "create-llama" });
265-
266-
if (program.resetPreferences) {
267-
conf.clear();
268-
console.log(`Preferences reset successfully`);
269-
return;
270-
}
271-
272261
if (typeof projectPath === "string") {
273262
projectPath = projectPath.trim();
274263
}
@@ -331,35 +320,16 @@ async function run(): Promise<void> {
331320
process.exit(1);
332321
}
333322

334-
const preferences = (conf.get("preferences") || {}) as QuestionArgs;
335-
await askQuestions(
336-
program as unknown as QuestionArgs,
337-
preferences,
338-
program.openAiKey,
339-
);
323+
const answers = await askQuestions(options as unknown as QuestionArgs);
340324

341325
await createApp({
342-
template: program.template,
343-
framework: program.framework,
344-
ui: program.ui,
326+
...answers,
345327
appPath: resolvedProjectPath,
346328
packageManager,
347-
frontend: program.frontend,
348-
modelConfig: program.modelConfig,
349-
llamaCloudKey: program.llamaCloudKey,
350-
communityProjectConfig: program.communityProjectConfig,
351-
llamapack: program.llamapack,
352-
vectorDb: program.vectorDb,
353-
externalPort: program.externalPort,
354-
postInstallAction: program.postInstallAction,
355-
dataSources: program.dataSources,
356-
tools: program.tools,
357-
useLlamaParse: program.useLlamaParse,
358-
observability: program.observability,
329+
externalPort: options.externalPort,
359330
});
360-
conf.set("preferences", preferences);
361331

362-
if (program.postInstallAction === "VSCode") {
332+
if (answers.postInstallAction === "VSCode") {
363333
console.log(`Starting VSCode in ${root}...`);
364334
try {
365335
execSync(`code . --new-window --goto README.md`, {
@@ -383,15 +353,15 @@ Please check ${cyan(
383353
)} for more information.`,
384354
);
385355
}
386-
} else if (program.postInstallAction === "runApp") {
356+
} else if (answers.postInstallAction === "runApp") {
387357
console.log(`Running app in ${root}...`);
388358
await runApp(
389359
root,
390-
program.template,
391-
program.frontend,
392-
program.framework,
393-
program.port,
394-
program.externalPort,
360+
answers.template,
361+
answers.frontend,
362+
answers.framework,
363+
options.port,
364+
options.externalPort,
395365
);
396366
}
397367
}

package.json

+2-3
Original file line numberDiff line numberDiff line change
@@ -49,8 +49,7 @@
4949
"async-retry": "1.3.1",
5050
"async-sema": "3.0.1",
5151
"ci-info": "github:watson/ci-info#f43f6a1cefff47fb361c88cf4b943fdbcaafe540",
52-
"commander": "2.20.0",
53-
"conf": "10.2.0",
52+
"commander": "12.1.0",
5453
"cross-spawn": "7.0.3",
5554
"fast-glob": "3.3.1",
5655
"fs-extra": "11.2.0",
@@ -59,7 +58,7 @@
5958
"ollama": "^0.5.0",
6059
"ora": "^8.0.1",
6160
"picocolors": "1.0.0",
62-
"prompts": "2.1.0",
61+
"prompts": "2.4.2",
6362
"smol-toml": "^1.1.4",
6463
"tar": "6.1.15",
6564
"terminal-link": "^3.0.0",

0 commit comments

Comments
 (0)