from langchain.chains import LLMChain, SimpleSequentialChain #简单序列链
from langchain_community.llms.ollama import Ollama
from langchain_core.prompts import PromptTemplate
llm = Ollama(model="qwen:7b")
template = """您的工作是根据用户建议的区域制作一道经典菜肴。
%用户位置
{user_location}
AI回答:
"""
prompt_template = PromptTemplate(input_variables=["user_location"], template = template)
location_chain = LLMChain(llm=llm, prompt=prompt_template)
template2 = """给出一个简短的食谱,说明如何在家做这到菜
%菜谱
{user_meal}
AI回答
"""
prompt_template2 = PromptTemplate(input_variables=["user_meal"], template=template2)
meal_chain = LLMChain(llm = llm, prompt=prompt_template2)
#顺序链 就是将第一个链的输出做为第二个链的输入
overall_chain = SimpleSequentialChain(chains=[location_chain, meal_chain], verbose=True)
response = overall_chain.run("河北张家口")
print(response)