基于 LangChain 与 LangGraph 的多轮对话机器人设计与实现
1.开发环境
python3.10+
2.安装langgraph
pip install -U langgraph
3.接入模型
如果你用 OpenAI 接模型,LangChain 官方当前对应包是:
pip install -U langchain-openai
这里我用的是deepseek
pip install -U langchain langgraph langchain-openai python-dotenv
4.管理api-key
在项目列表中新建.env文件并输入你的api-key
``DEEPSEEK_API_KEY=你的_deepseek_key`
5.实现代码****
然后再项目中新建python文件,输入
import os
from dotenv import load_dotenv
from langchain_openai import ChatOpenAI
from langchain_core.messages import HumanMessage, SystemMessage
from langgraph.graph import StateGraph, MessagesState, START, END
from langgraph.checkpoint.memory import InMemorySaver
load_dotenv()
# 用 DeepSeek 的 OpenAI-compatible API
llm = ChatOpenAI(
model="deepseek-chat",
api_key=os.getenv("DEEPSEEK_API_KEY"),
base_url="https://api.deepseek.com",
temperature=0.7,
)
def chatbot_node(state: MessagesState):
messages = [
SystemMessage(content="你是一个礼貌、简洁、乐于助人的中文聊天机器人。")
] + state["messages"]
response = llm.invoke(messages)
return {"messages": [response]}
builder = StateGraph(MessagesState)
builder.add_node("chatbot", chatbot_node)
builder.add_edge(START, "chatbot")
builder.add_edge("chatbot", END)
memory = InMemorySaver()
graph = builder.compile(checkpointer=memory)
thread_id = "user-1"
print("聊天机器人已启动,输入 exit 退出,输入 clear 清空会话。")
while True:
user_input = input("你: ").strip()
if user_input.lower() == "exit":
print("机器人: 再见!")
break
if user_input.lower() == "clear":
thread_id = f"user-reset-{os.urandom(4).hex()}"
print("机器人: 当前会话已清空。")
continue
result = graph.invoke(
{"messages": [HumanMessage(content=user_input)]},
config={"configurable": {"thread_id": thread_id}},
)
print("机器人:", result["messages"][-1].content)
也可以把模型换成
model="deepseek-reasoner"
然后运行尝试

发现可以进行基本的对话,但是不能进行联网查询
6.实现联网查询
这里有两个方法
方法一:利用tavily实现联网查询
首先你需要有一个tavily的账号
注册成功后得到key
每个月可以免费搜索1K次
a.安装依赖
pip install -U langchain langgraph langchain-openai python-dotenv langchain-community tavily-python
b.添加.env
在刚才的.env文件中改为
DEEPSEEK_API_KEY=你的deepseek_key
TAVILY_API_KEY=你的tavily_key
c.新一版实现代码
import os
from dotenv import load_dotenv
from langchain_openai import ChatOpenAI
from langchain.tools import tool
from langchain_core.messages import HumanMessage, SystemMessage
from langchain_community.retrievers import TavilySearchAPIRetriever
from langgraph.graph import StateGraph, MessagesState, START
from langgraph.checkpoint.memory import InMemorySaver
from langgraph.prebuilt import ToolNode, tools_condition
load_dotenv()
# 1) 联网搜索底层
retriever = TavilySearchAPIRetriever(k=3)
# 2) 定义工具(function calling 暴露给模型的函数)
@tool
def web_search(query: str) -> str:
"""Search the web for up-to-date information.
Use this for news, current events, prices, versions, weather, schedules,
releases, recent research, and any time-sensitive facts.
"""
docs = retriever.invoke(query)
if not docs:
return "没有检索到可用的联网结果。"
results = []
for i, doc in enumerate(docs, 1):
title = doc.metadata.get("title", "无标题")
url = doc.metadata.get("source", doc.metadata.get("url", ""))
snippet = (doc.page_content or "").strip().replace("\n", " ")
results.append(
f"[{i}] 标题: {title}\n链接: {url}\n摘要: {snippet[:500]}"
)
return "\n\n".join(results)
tools = [web_search]
# 3) DeepSeek 模型
llm = ChatOpenAI(
model="deepseek-chat",
api_key=os.getenv("DEEPSEEK_API_KEY"),
base_url="https://api.deepseek.com",
temperature=0.3,
)
# 4) 绑定工具
llm_with_tools = llm.bind_tools(tools, parallel_tool_calls=False)
# 5) 模型节点
def chatbot_node(state: MessagesState):
system = SystemMessage(
content=(
"你是一个中文聊天机器人。\n"
"规则:\n"
"1. 普通闲聊时直接回答。\n"
"2. 当问题涉及最新、实时、联网、新闻、天气、价格、版本、发布日期、比赛结果、当前人物职位等时,优先调用 web_search。\n"
"3. 调用工具后,基于工具结果回答。\n"
"4. 如果工具结果里带有链接,尽量在回答末尾列出主要来源链接。\n"
)
)
response = llm_with_tools.invoke([system] + state["messages"])
return {"messages": [response]}
# 6) 工具节点
tool_node = ToolNode(tools, handle_tool_errors=True)
# 7) 搭图
builder = StateGraph(MessagesState)
builder.add_node("chatbot", chatbot_node)
builder.add_node("tools", tool_node)
builder.add_edge(START, "chatbot")
builder.add_conditional_edges("chatbot", tools_condition) # 有工具调用 -> tools;否则结束
builder.add_edge("tools", "chatbot") # 工具结果返回后,再让模型总结
memory = InMemorySaver()
graph = builder.compile(checkpointer=memory)
# 8) 命令行测试
thread_id = "user-1"
print("机器人已启动。输入 exit 退出,输入 clear 清空会话。")
while True:
user_input = input("你: ").strip()
if user_input.lower() == "exit":
print("机器人: 再见!")
break
if user_input.lower() == "clear":
thread_id = f"user-reset-{os.urandom(4).hex()}"
print("机器人: 当前会话已清空。")
continue
result = graph.invoke(
{"messages": [HumanMessage(content=user_input)]},
config={"configurable": {"thread_id": thread_id}},
)
print("机器人:", result["messages"][-1].content)
尝试运用

成功!
方法二 function calling实现
直接调用函数
import os
import requests
from dotenv import load_dotenv
from langchain.tools import tool
from langchain_openai import ChatOpenAI
from langchain.agents import create_tool_calling_agent, AgentExecutor
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
from langchain_core.messages import HumanMessage, AIMessage
load_dotenv()
# =========================
# 【核心1】@tool
# 把普通函数注册成 function calling 可调用工具
# =========================
@tool
def get_weather(location: str) -> str:
"""获取某个地点的当前天气。location 可以是城市、省份或地区名,例如:北京、上海、山东。"""
geo_resp = requests.get(
"https://geocoding-api.open-meteo.com/v1/search",
params={
"name": location,
"count": 1,
"format": "json",
"language": "zh",
},
timeout=10,
)
geo_resp.raise_for_status()
geo_data = geo_resp.json()
if not geo_data.get("results"):
return f"未找到地点:{location}"
place = geo_data["results"][0]
lat, lon = place["latitude"], place["longitude"]
place_name = place.get("name", location)
country = place.get("country", "")
weather_resp = requests.get(
"https://api.open-meteo.com/v1/forecast",
params={
"latitude": lat,
"longitude": lon,
"current": "temperature_2m,wind_speed_10m",
"timezone": "auto",
},
timeout=10,
)
weather_resp.raise_for_status()
weather_data = weather_resp.json()
current = weather_data.get("current", {})
temp = current.get("temperature_2m", "未知")
wind = current.get("wind_speed_10m", "未知")
return f"{place_name}{country}当前温度:{temp}°C,风速:{wind} km/h。"
llm = ChatOpenAI(
model="deepseek-chat",
api_key=os.getenv("DEEPSEEK_API_KEY"),
base_url="https://api.deepseek.com",
temperature=0,
)
prompt = ChatPromptTemplate.from_messages([
("system", "你是一个中文助手。如果用户询问天气,优先调用 get_weather 工具进行查询,再用自然中文回答。"),
MessagesPlaceholder(variable_name="chat_history"),
("human", "{input}"),
MessagesPlaceholder(variable_name="agent_scratchpad"),
])
tools = [get_weather]
# =========================
# 【核心2】create_tool_calling_agent
# 创建支持 function calling 的 agent
# 并把 tools 交给模型
# =========================
agent = create_tool_calling_agent(
llm=llm,
tools=tools,
prompt=prompt,
)
executor = AgentExecutor(
agent=agent,
tools=tools,
verbose=True,
)
def main():
chat_history = []
print("天气问答机器人已启动,输入 exit 退出。")
while True:
user_input = input("你:").strip()
if user_input.lower() in ["exit", "quit", "退出"]:
print("助手:再见!")
break
# =========================
# 【核心3】executor.invoke(...)
# 真正启动一次 function calling 流程
# 模型会在这里决定要不要调用 get_weather
# =========================
result = executor.invoke({
"input": user_input,
"chat_history": chat_history,
})
# =========================
# 【核心4】拿到 function calling 后的最终输出
# =========================
output = result["output"]
print("助手:", output)
chat_history.append(HumanMessage(content=user_input))
chat_history.append(AIMessage(content=output))
if __name__ == "__main__":
main()
尝试运行

成功!

浙公网安备 33010602011771号