From d23c27a177ce6879d69cd09eaa39457502e369c1 Mon Sep 17 00:00:00 2001 From: renee <50965960+wurenee@users.noreply.github.com> Date: Mon, 2 Feb 2026 12:39:01 -0800 Subject: [PATCH] =?UTF-8?q?=E5=8A=A0=E5=85=A5=E5=9B=BE=E7=89=87ai=E5=8A=9F?= =?UTF-8?q?=E8=83=BD?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app/ai/ai.py | 34 +++++++++++++++------------------- 1 file changed, 15 insertions(+), 19 deletions(-) diff --git a/app/ai/ai.py b/app/ai/ai.py index b7c5e0b..630d256 100644 --- a/app/ai/ai.py +++ b/app/ai/ai.py @@ -1,11 +1,3 @@ -# -*- coding: utf-8 -*- -"""gemini-hackathon ai.ipynb - -Automatically generated by Colab. - -Original file is located at - https://colab.research.google.com/drive/1FyV9Lq9Sxh_dFiaNIqeu1brOl8DAoKUO -""" !pip install -q langgraph-checkpoint-sqlite langchain_google_genai @@ -18,6 +10,8 @@ from langgraph.graph import StateGraph, START, END from langgraph.checkpoint.sqlite import SqliteSaver from langgraph.graph.message import add_messages from langchain_core.runnables import RunnableConfig +from typing import Union, List, Dict + # --- 1. 状态定义 --- class State(TypedDict): @@ -57,6 +51,7 @@ def summarize_conversation(state: State): "你是一个记忆管理专家。请更新摘要,合并新旧信息。" "1. 保持简练,仅保留事实(姓名、偏好、核心议题)。" "2. 如果新消息包含对旧信息的修正,请更新它。" + "3. 如果对话中包含图片描述,请将图片的关键视觉信息也记录在摘要中" ) summary_input = f"现有摘要: {summary}\n\n待加入的新信息: {messages_to_summarize}" @@ -96,7 +91,7 @@ workflow.add_edge("summarize", END) app = workflow.compile(checkpointer=memory) -def chat(thread_id: str, system_prompt: str, user_message: str): +def chat(thread_id: str, system_prompt: str, user_content: Union[str, List[Dict]]): """ Processes a single user message and returns the AI response, persisting memory via the thread_id. @@ -109,7 +104,7 @@ def chat(thread_id: str, system_prompt: str, user_message: str): } # Prepare the input for this specific turn - input_data = {"messages": [HumanMessage(content=user_message)]} + input_data = {"messages": [HumanMessage(content=user_content)]} ai_response = "" @@ -122,14 +117,15 @@ def chat(thread_id: str, system_prompt: str, user_message: str): return ai_response -if __name__ == "__main__": - tid = "py_expert_001" - sys_p = "你是个善解人意的机器人。" +# 使用范例 +# if __name__ == "__main__": +# tid = "py_expert_001" +# sys_p = "你是个善解人意的机器人。" - # Call 1: Establish context - resp1 = chat(tid, sys_p, "你好,我叫小明。") - print(f"Bot: {resp1}") +# # Call 1: Establish context +# resp1 = chat(tid, sys_p, "你好,我叫小明。") +# print(f"Bot: {resp1}") - # Call 2: Test memory (The model should remember the name '小明') - resp2 = chat(tid, sys_p, "我今天很开心") - print(f"Bot: {resp2}") \ No newline at end of file +# # Call 2: Test memory (The model should remember the name '小明') +# resp2 = chat(tid, sys_p, "我今天很开心") +# print(f"Bot: {resp2}") \ No newline at end of file