📌 实现思路图解
核心实现代码
def debug(self, app_id: UUID):req = CompletionReq()if not req.validate():return validate_error_json(req.errors)prompt = ChatPromptTemplate.from_messages([("system", "你是一个强大的聊天机器人,能根据用户的提问回复对应的问题"),MessagesPlaceholder("history"),("human", "{query}"),])memory = ConversationBufferWindowMemory(k=3,input_key="query",output_key="output",return_messages=True,chat_memory=FileChatMessageHistory("./storage/memory/chat_history.txt"),)llm = ChatOpenAI(model="gpt-3.5-turbo-16k")chain = RunnablePassthrough.assign(history=RunnableLambda(memory.load_memory_variables) | itemgetter("history")) | prompt | llm | StrOutputParser()chain_input = {"query": req.query.data}content = chain.invoke(chain_input)memory.save_context(chain_input, {"output": content})return success_json({"content": content})