Skip to content

Commit 384a136

Browse files
authored
Add mypy checker for importing and update CI condition (#387)
1 parent 189c0e3 commit 384a136

File tree

22 files changed

+41
-51
lines changed

22 files changed

+41
-51
lines changed

.changeset/two-masks-design.md

+5
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
"create-llama": patch
3+
---
4+
5+
Fix import error if the artifact tool is selected

helpers/providers/anthropic.ts

+1-4
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
import ciInfo from "ci-info";
21
import prompts from "prompts";
32
import { ModelConfigParams } from ".";
43
import { questionHandlers, toChoice } from "../../questions/utils";
@@ -70,9 +69,7 @@ export async function askAnthropicQuestions({
7069
config.apiKey = key || process.env.ANTHROPIC_API_KEY;
7170
}
7271

73-
// use default model values in CI or if user should not be asked
74-
const useDefaults = ciInfo.isCI || !askModels;
75-
if (!useDefaults) {
72+
if (askModels) {
7673
const { model } = await prompts(
7774
{
7875
type: "select",

helpers/providers/azure.ts

+1-4
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
import ciInfo from "ci-info";
21
import prompts from "prompts";
32
import { ModelConfigParams, ModelConfigQuestionsParams } from ".";
43
import { questionHandlers } from "../../questions/utils";
@@ -67,9 +66,7 @@ export async function askAzureQuestions({
6766
},
6867
};
6968

70-
// use default model values in CI or if user should not be asked
71-
const useDefaults = ciInfo.isCI || !askModels;
72-
if (!useDefaults) {
69+
if (askModels) {
7370
const { model } = await prompts(
7471
{
7572
type: "select",

helpers/providers/gemini.ts

+1-4
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
import ciInfo from "ci-info";
21
import prompts from "prompts";
32
import { ModelConfigParams } from ".";
43
import { questionHandlers, toChoice } from "../../questions/utils";
@@ -54,9 +53,7 @@ export async function askGeminiQuestions({
5453
config.apiKey = key || process.env.GOOGLE_API_KEY;
5554
}
5655

57-
// use default model values in CI or if user should not be asked
58-
const useDefaults = ciInfo.isCI || !askModels;
59-
if (!useDefaults) {
56+
if (askModels) {
6057
const { model } = await prompts(
6158
{
6259
type: "select",

helpers/providers/groq.ts

+1-4
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
import ciInfo from "ci-info";
21
import prompts from "prompts";
32
import { ModelConfigParams } from ".";
43
import { questionHandlers, toChoice } from "../../questions/utils";
@@ -110,9 +109,7 @@ export async function askGroqQuestions({
110109
config.apiKey = key || process.env.GROQ_API_KEY;
111110
}
112111

113-
// use default model values in CI or if user should not be asked
114-
const useDefaults = ciInfo.isCI || !askModels;
115-
if (!useDefaults) {
112+
if (askModels) {
116113
const modelChoices = await getAvailableModelChoicesGroq(config.apiKey!);
117114

118115
const { model } = await prompts(

helpers/providers/llmhub.ts

+1-4
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
import ciInfo from "ci-info";
21
import got from "got";
32
import ora from "ora";
43
import { red } from "picocolors";
@@ -80,9 +79,7 @@ export async function askLLMHubQuestions({
8079
config.apiKey = key || process.env.T_SYSTEMS_LLMHUB_API_KEY;
8180
}
8281

83-
// use default model values in CI or if user should not be asked
84-
const useDefaults = ciInfo.isCI || !askModels;
85-
if (!useDefaults) {
82+
if (askModels) {
8683
const { model } = await prompts(
8784
{
8885
type: "select",

helpers/providers/mistral.ts

+1-4
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
import ciInfo from "ci-info";
21
import prompts from "prompts";
32
import { ModelConfigParams } from ".";
43
import { questionHandlers, toChoice } from "../../questions/utils";
@@ -53,9 +52,7 @@ export async function askMistralQuestions({
5352
config.apiKey = key || process.env.MISTRAL_API_KEY;
5453
}
5554

56-
// use default model values in CI or if user should not be asked
57-
const useDefaults = ciInfo.isCI || !askModels;
58-
if (!useDefaults) {
55+
if (askModels) {
5956
const { model } = await prompts(
6057
{
6158
type: "select",

helpers/providers/ollama.ts

+1-4
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
import ciInfo from "ci-info";
21
import ollama, { type ModelResponse } from "ollama";
32
import { red } from "picocolors";
43
import prompts from "prompts";
@@ -34,9 +33,7 @@ export async function askOllamaQuestions({
3433
},
3534
};
3635

37-
// use default model values in CI or if user should not be asked
38-
const useDefaults = ciInfo.isCI || !askModels;
39-
if (!useDefaults) {
36+
if (askModels) {
4037
const { model } = await prompts(
4138
{
4239
type: "select",

helpers/providers/openai.ts

+1-4
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
import ciInfo from "ci-info";
21
import got from "got";
32
import ora from "ora";
43
import { red } from "picocolors";
@@ -54,9 +53,7 @@ export async function askOpenAIQuestions({
5453
config.apiKey = key || process.env.OPENAI_API_KEY;
5554
}
5655

57-
// use default model values in CI or if user should not be asked
58-
const useDefaults = ciInfo.isCI || !askModels;
59-
if (!useDefaults) {
56+
if (askModels) {
6057
const { model } = await prompts(
6158
{
6259
type: "select",

helpers/tools.ts

+1-1
Original file line numberDiff line numberDiff line change
@@ -170,7 +170,7 @@ For better results, you can specify the region parameter to get results from a s
170170
dependencies: [
171171
{
172172
name: "e2b_code_interpreter",
173-
version: "^0.0.11b38",
173+
version: "0.0.11b38",
174174
},
175175
],
176176
supportedFrameworks: ["fastapi", "express", "nextjs"],

questions/index.ts

+1-1
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ import { QuestionArgs, QuestionResults } from "./types";
77
export const askQuestions = async (
88
args: QuestionArgs,
99
): Promise<QuestionResults> => {
10-
if (ciInfo.isCI) {
10+
if (ciInfo.isCI || process.env.PLAYWRIGHT_TEST === "1") {
1111
return await getCIQuestionResults(args);
1212
} else if (args.pro) {
1313
// TODO: refactor pro questions to return a result object

templates/components/engines/python/agent/tools/document_generator.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -105,7 +105,7 @@ def _generate_html_content(cls, original_content: str) -> str:
105105
Generate HTML content from the original markdown content.
106106
"""
107107
try:
108-
import markdown
108+
import markdown # type: ignore
109109
except ImportError:
110110
raise ImportError(
111111
"Failed to import required modules. Please install markdown."

templates/components/engines/python/agent/tools/img_gen.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
import uuid
44
from typing import Optional
55

6-
import requests
6+
import requests # type: ignore
77
from llama_index.core.tools import FunctionTool
88
from pydantic import BaseModel, Field
99

templates/components/engines/python/agent/tools/openapi_action.py

+4-2
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
from typing import Dict, List, Tuple
2+
23
from llama_index.tools.openapi import OpenAPIToolSpec
34
from llama_index.tools.requests import RequestsToolSpec
45

@@ -43,11 +44,12 @@ def _load_openapi_spec(uri: str) -> Tuple[Dict, List[str]]:
4344
Returns:
4445
List[Document]: A list of Document objects.
4546
"""
46-
import yaml
4747
from urllib.parse import urlparse
4848

49+
import yaml # type: ignore
50+
4951
if uri.startswith("http"):
50-
import requests
52+
import requests # type: ignore
5153

5254
response = requests.get(uri)
5355
if response.status_code != 200:

templates/components/engines/python/agent/tools/weather.py

+3-2
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,9 @@
11
"""Open Meteo weather map tool spec."""
22

33
import logging
4-
import requests
5-
import pytz
4+
5+
import pytz # type: ignore
6+
import requests # type: ignore
67
from llama_index.core.tools import FunctionTool
78

89
logger = logging.getLogger(__name__)

templates/components/routers/python/sandbox.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -20,8 +20,8 @@
2020
from typing import Any, Dict, List, Optional, Union
2121

2222
from app.engine.tools.artifact import CodeArtifact
23-
from app.engine.utils.file_helper import save_file
24-
from e2b_code_interpreter import CodeInterpreter, Sandbox
23+
from app.services.file import FileService
24+
from e2b_code_interpreter import CodeInterpreter, Sandbox # type: ignore
2525
from fastapi import APIRouter, HTTPException, Request
2626
from pydantic import BaseModel
2727

@@ -175,7 +175,7 @@ def _download_cell_results(cell_results: Optional[List]) -> List[Dict[str, str]]
175175
base64_data = data
176176
buffer = base64.b64decode(base64_data)
177177
file_name = f"{uuid.uuid4()}.{ext}"
178-
file_meta = save_file(
178+
file_meta = FileService.save_file(
179179
content=buffer,
180180
file_name=file_name,
181181
save_dir=os.path.join("output", "tools"),

templates/components/vectordbs/python/llamacloud/generate.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
import logging
99

1010
from app.engine.index import get_client, get_index
11-
from app.engine.service import LLamaCloudFileService
11+
from app.engine.service import LLamaCloudFileService # type: ignore
1212
from app.settings import init_settings
1313
from llama_cloud import PipelineType
1414
from llama_index.core.readers import SimpleDirectoryReader

templates/types/streaming/fastapi/app/api/routers/__init__.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111

1212
# Dynamically adding additional routers if they exist
1313
try:
14-
from .sandbox import sandbox_router # noqa: F401
14+
from .sandbox import sandbox_router # type: ignore
1515

1616
api_router.include_router(sandbox_router, prefix="/sandbox")
1717
except ImportError:

templates/types/streaming/fastapi/app/api/routers/chat_config.py

+4-2
Original file line numberDiff line numberDiff line change
@@ -12,15 +12,17 @@
1212

1313
def _is_llama_cloud_service_configured():
1414
try:
15-
from app.engine.service import LLamaCloudFileService # noqa
15+
from app.engine.service import (
16+
LLamaCloudFileService, # type: ignore # noqa: F401
17+
)
1618

1719
return True
1820
except ImportError:
1921
return False
2022

2123

2224
async def chat_llama_cloud_config():
23-
from app.engine.service import LLamaCloudFileService
25+
from app.engine.service import LLamaCloudFileService # type: ignore
2426

2527
if not os.getenv("LLAMA_CLOUD_API_KEY"):
2628
raise HTTPException(

templates/types/streaming/fastapi/app/api/routers/vercel_response.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -138,7 +138,7 @@ def _process_response_nodes(
138138
):
139139
try:
140140
# Start background tasks to download documents from LlamaCloud if needed
141-
from app.engine.service import LLamaCloudFileService
141+
from app.engine.service import LLamaCloudFileService # type: ignore
142142

143143
LLamaCloudFileService.download_files_from_nodes(
144144
source_nodes, background_tasks

templates/types/streaming/fastapi/app/services/file.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -241,7 +241,7 @@ def _add_file_to_llama_cloud_index(
241241
LlamaCloudIndex is a managed index so we can directly use the files.
242242
"""
243243
try:
244-
from app.engine.service import LLamaCloudFileService
244+
from app.engine.service import LLamaCloudFileService # type: ignore
245245
except ImportError as e:
246246
raise ValueError("LlamaCloudFileService is not found") from e
247247

@@ -287,7 +287,7 @@ def _default_file_loaders_map():
287287

288288
def _get_available_tools() -> Dict[str, List[FunctionTool]]:
289289
try:
290-
from app.engine.tools import ToolFactory
290+
from app.engine.tools import ToolFactory # type: ignore
291291
except ImportError:
292292
logger.warning("ToolFactory not found, no tools will be available")
293293
return {}

templates/types/streaming/fastapi/pyproject.toml

+5-1
Original file line numberDiff line numberDiff line change
@@ -36,4 +36,8 @@ ignore_missing_imports = true
3636
follow_imports = "silent"
3737
implicit_optional = true
3838
strict_optional = false
39-
disable_error_code = ["return-value", "import-untyped", "assignment"]
39+
disable_error_code = ["return-value", "assignment"]
40+
41+
[[tool.mypy.overrides]]
42+
module = "app.*"
43+
ignore_missing_imports = false

0 commit comments

Comments
 (0)