From a1fc31616aef7485431167f138c42ea5bf489e7f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E6=9C=B1=E6=BD=AE?= Date: Fri, 28 Nov 2025 00:29:12 +0800 Subject: [PATCH] preamble --- prompt/preamble_prompt.md | 41 +++++++++++++++++++++++++++++++++------ routes/chat.py | 8 ++++++-- 2 files changed, 41 insertions(+), 8 deletions(-) diff --git a/prompt/preamble_prompt.md b/prompt/preamble_prompt.md index 955d6c9..9957500 100644 --- a/prompt/preamble_prompt.md +++ b/prompt/preamble_prompt.md @@ -2,8 +2,26 @@ You are an AI agent that is expected to generate a preamble message for the cust The actual message will be sent later by a smarter agent. Your job is only to generate the right preamble in order to save time. +## Scenario Detection Logic -These are the preamble messages you can choose from. You must ONLY choose one of these: ### +FIRST, determine if this is a SIMPLE CHAT scenario or COMPLEX scenario: + +**Simple Chat Scenarios (NO preamble needed - output ""):** +- Basic greetings: "hi", "hello", "你好", "在吗", "早上好/晚上好" +- Simple acknowledgments: "thanks", "ok", "好的", "谢谢" +- Small talk: "how are you", "天气怎么样", "最近怎么样" +- Simple farewells: "bye", "goodbye", "再见" + +**Complex Scenarios (preamble needed):** +- Query scenarios: User is asking for information, searching, or looking up data +- Action scenarios: User wants to perform an operation, execute a task +- Knowledge retrieval scenarios: User needs to search knowledge base, documents, or databases +- Problem-solving: User is reporting issues, asking for help with problems +- Complex requests: Multi-step tasks, detailed instructions needed + +## Preamble Selection + +IF this is a COMPLEX scenario, choose from these preamble messages. You must ONLY choose one of these: ### {preamble_choices_text} ### @@ -11,11 +29,13 @@ Basically, the preamble is something very short that continues the interaction n We leave that later response to another agent. Make sure you understand this. Instructions: -- Note that some of the choices are more generic, and some are more specific to a particular scenario. -- If you're unsure what to choose --> prefer to go with a more generic, bland choice. This should be 80% of cases. - Examples of generic choices: "Hey there!", "Just a moment.", "Hello.", "Got it." -- If you see clear value in saying something more specific and nuanced --> then go with a more specific choice. This should be 20% or less of cases. - Examples of specific choices: "Let me check that for you.", "Sorry to hear that.", "Thanks for your patience." +- For SIMPLE CHAT scenarios: Always output preamble: "" +- For COMPLEX scenarios: + - Note that some of the choices are more generic, and some are more specific to a particular scenario. + - If you're unsure what to choose --> prefer to go with a more generic, bland choice. This should be 80% of cases. + Examples of generic choices: "Hey there!", "Just a moment.", "Hello.", "Got it." + - If you see clear value in saying something more specific and nuanced --> then go with a more specific choice. This should be 20% or less of cases. + Examples of specific choices: "Let me check that for you.", "Sorry to hear that.", "Thanks for your patience." Chat History: @@ -27,6 +47,15 @@ User's Last Message: OUTPUT FORMAT: You must now choose the preamble message. You must produce a JSON object with a single key, "preamble", holding the preamble message as a string, EXACTLY as it is given (pay attention to subtleties like punctuation and copy your choice EXACTLY as it is given above).The content in JSON format needs to be wrapped in "```json" and "```". + +For SIMPLE CHAT scenarios: +```json +{ + "preamble": "" +} +``` + +For COMPLEX scenarios: ```json { "preamble": "Just a moment." diff --git a/routes/chat.py b/routes/chat.py index 0c96fe8..60135e0 100644 --- a/routes/chat.py +++ b/routes/chat.py @@ -469,8 +469,12 @@ async def create_agent_and_generate_response( query_text = get_user_last_message_content(messages) chat_history = format_messages_to_chat_history(messages) preamble_text = await call_preamble_llm(chat_history, query_text, get_preamble_text(language), language, model_name, api_key, model_server) - pre_message_list.append({"role": "assistant","content": preamble_text}) - logger.info(f"Stream mode: Generated preamble text ({len(preamble_text)} chars)") + # 只有当preamble_text不为空且不为""时才添加到消息列表 + if preamble_text and preamble_text.strip() and preamble_text != "": + pre_message_list.append({"role": "assistant","content": preamble_text}) + logger.info(f"Stream mode: Generated preamble text ({len(preamble_text)} chars)") + else: + logger.info("Stream mode: Skipped empty preamble text") # 如果是流式模式,使用增强的流式响应生成器