Skip to content

Commit 0d7077e

Browse files
committed
remove structurellm
1 parent a9ff7d0 commit 0d7077e

File tree

2 files changed

+34
-26
lines changed

2 files changed

+34
-26
lines changed

helpers/env-variables.ts

Lines changed: 14 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -486,26 +486,28 @@ It\\'s cute animal.
486486
return systemPromptEnv;
487487
};
488488

489-
const getTemplateEnvs = (
490-
template?: TemplateType,
491-
framework?: TemplateFramework,
492-
): EnvVar[] => {
489+
const getTemplateEnvs = (template?: TemplateType): EnvVar[] => {
493490
const nextQuestionEnvs: EnvVar[] = [
494491
{
495-
name: "NEXT_QUESTION_PROMPT",
492+
name: "NEXT_QUESTION_PROMPT_TEMPLATE",
496493
description: `Customize prompt to generate the next question suggestions based on the conversation history.
497494
Disable this prompt to disable the next question suggestions feature.`,
498495
value: `"You're a helpful assistant! Your task is to suggest the next question that user might ask.
499496
Here is the conversation history
500-
---------------------\n{conversation}\n---------------------
501-
Given the conversation history, please give me 3 questions that you might ask next!"`,
497+
---------------------
498+
$conversation
499+
---------------------
500+
Given the conversation history, please give me 3 questions that you might ask next!
501+
Your answer should be wrapped in three sticks which follows the following format:
502+
\`\`\`
503+
<question 1>
504+
<question 2>
505+
<question 3>
506+
\`\`\`"`,
502507
},
503508
];
504509

505-
if (
506-
framework === "fastapi" &&
507-
(template === "multiagent" || template === "streaming")
508-
) {
510+
if (template === "multiagent" || template === "streaming") {
509511
return nextQuestionEnvs;
510512
}
511513
return [];
@@ -555,7 +557,7 @@ export const createBackendEnvFile = async (
555557
...getVectorDBEnvs(opts.vectorDb, opts.framework),
556558
...getFrameworkEnvs(opts.framework, opts.externalPort),
557559
...getToolEnvs(opts.tools),
558-
...getTemplateEnvs(opts.template, opts.framework),
560+
...getTemplateEnvs(opts.template),
559561
...getObservabilityEnvs(opts.observability),
560562
...getSystemPromptEnv(opts.tools, opts.dataSources, opts.framework),
561563
];

templates/components/services/python/suggestion.py

Lines changed: 20 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -1,26 +1,27 @@
11
import logging
22
import os
3+
import re
34
from typing import List, Optional
45

56
from app.api.routers.models import Message
67
from llama_index.core.prompts import PromptTemplate
78
from llama_index.core.settings import Settings
8-
from pydantic import BaseModel
99

1010
logger = logging.getLogger("uvicorn")
1111

1212

13-
class NextQuestions(BaseModel):
14-
"""A list of questions that user might ask next"""
15-
16-
questions: List[str]
17-
18-
1913
class NextQuestionSuggestion:
14+
"""
15+
Suggest the next questions that user might ask based on the conversation history
16+
Disable this feature by removing the NEXT_QUESTION_PROMPT_TEMPLATE environment variable
17+
"""
2018

2119
@classmethod
2220
def get_configured_prompt(cls) -> Optional[str]:
23-
return os.getenv("NEXT_QUESTION_PROMPT", None)
21+
prompt = os.getenv("NEXT_QUESTION_PROMPT_TEMPLATE", None)
22+
if not prompt:
23+
return None
24+
return PromptTemplate(prompt)
2425

2526
@classmethod
2627
async def suggest_next_questions(
@@ -48,13 +49,18 @@ async def suggest_next_questions(
4849
break
4950
conversation: str = f"{last_user_message}\n{last_assistant_message}"
5051

51-
output: NextQuestions = await Settings.llm.astructured_predict(
52-
NextQuestions,
53-
prompt=PromptTemplate(prompt_template),
54-
conversation=conversation,
55-
)
52+
# Call the LLM and parse questions from the output
53+
prompt = prompt_template.format(conversation=conversation)
54+
output = await Settings.llm.acomplete(prompt)
55+
questions = cls._extract_questions(output.text)
5656

57-
return output.questions
57+
return questions
5858
except Exception as e:
5959
logger.error(f"Error when generating next question: {e}")
6060
return None
61+
62+
@classmethod
63+
def _extract_questions(cls, text: str) -> List[str]:
64+
content_match = re.search(r"```(.*?)```", text, re.DOTALL)
65+
content = content_match.group(1) if content_match else ""
66+
return content.strip().split("\n")

0 commit comments

Comments
 (0)