1. autogen中的docker容器初始化
from autogen_core.base import CancellationToken
from autogen_core.components.tools import PythonCodeExecutionTool
from autogen_ext.code_executors import DockerCommandLineCodeExecutor# Create the tool.
code_executor = DockerCommandLineCodeExecutor()
await code_executor.start()
code_execution_tool = PythonCodeExecutionTool(code_executor)
cancellation_token = CancellationToken()
2. 将docker容器转化为langgraph中的toolnode节点
from langchain_core.messages import AIMessage
from langchain_core.tools import toolfrom langgraph.prebuilt import ToolNode
@tool
async def code_executor(code: str):"""excute code"""result = await code_execution_tool.run_json({"code": code}, cancellation_token)print(code_execution_tool.return_value_as_string(result))return "already excute code"tools = [code_executor]
tool_node = ToolNode(tools)
3. 模型绑定工具
from typing import Literalfrom langgraph.graph import StateGraph, MessagesState
from langgraph.prebuilt import ToolNode
from langchain_openai import ChatOpenAIllm = ChatOpenAI(temperature=0,model="GLM-4-plus",openai_api_key="your api key",openai_api_base="https://open.bigmodel.cn/api/paas/v4/"
)model_with_tools = llm.bind_tools(tools)
model_with_tools.ainvoke("excute code print('Hello, world!')")
<coroutine object RunnableBindingBase.ainvoke at 0x000002466D93D030>
4. graph 定义
from typing import Literalfrom langgraph.graph import StateGraph, MessagesState, START, ENDdef should_continue(state: MessagesState):messages = state["messages"]last_message = messages[-1]if last_message.tool_calls:return "tools"return ENDdef call_model(state: MessagesState):messages = state["messages"]response = model_with_tools.invoke(messages)return {"messages": [response]}workflow = StateGraph(MessagesState)# Define the two nodes we will cycle between
workflow.add_node("agent", call_model)
workflow.add_node("tools", tool_node)workflow.add_edge(START, "agent")
workflow.add_conditional_edges("agent", should_continue, ["tools", END])
workflow.add_edge("tools", "agent")app = workflow.compile()
5. 示例
问题: excute code print(‘Hello, world! I love you’)
# example with a single tool call
async for chunk in app.astream({"messages": [("human", "excute code print('Hello, world! I love you')")]}, stream_mode="values"
):chunk["messages"][-1].pretty_print()
================================[1m Human Message [0m=================================excute code print('Hello, world! I love you')
==================================[1m Ai Message [0m==================================
Tool Calls:code_executor (call_-9186119860808547145)Call ID: call_-9186119860808547145Args:code: print('Hello, world! I love you')
Hello, world! I love you=================================[1m Tool Message [0m=================================
Name: code_executoralready excute code
==================================[1m Ai Message [0m==================================代码已成功执行,输出结果为:“Hello, world! I love you”。如果您有其他代码需要执行或有其他问题,请随时告诉我!
问题: 1 puls 1 equals what, use the code tool
async for chunk in app.astream({"messages": [("human", "1 puls 1 equals what, use the code tool")]}, stream_mode="values"
):chunk["messages"][-1].pretty_print()
================================ Human Message =================================1 puls 1 equals what, use the code tool
================================== Ai Message ==================================
Tool Calls:code_executor (call_-9186116218674576385)Call ID: call_-9186116218674576385Args:code: 1 + 1================================= Tool Message =================================
Name: code_executoralready excute code
================================== Ai Message ==================================The result of 1 plus 1 is 2.
如果有任何问题,欢迎在评论区提问。