from langchain_community.embeddings import OllamaEmbeddings
from langchain_community.llms.ollama import Ollama
from langchain_community.vectorstores.faiss import FAISS
from langchain_core.example_selectors import SemanticSimilarityExampleSelector
from langchain_core.prompts import ChatPromptTemplate, PromptTemplate, FewShotPromptTemplate
llm = Ollama(model="qwen:7b")
template = '''
示例输入:{input}, 示例输出:{output}
'''
example_prompt = PromptTemplate(
input_variables=['input','output'],
template=template
)
examples = [
{"input":"海盗", "output":"船"},
{"input":"飞行员", "output":"飞机"},
{"input":"驾驶员", "output":"车"},
{"input":"树", "output":"地面"},
{"input":"鸟", "output":"鸟巢"}
]
#调用示例选择器
#SemanticSimilarityExampleSelector 将根据语义选择与您的输入相似的示例
example_selector = SemanticSimilarityExampleSelector.from_examples(
#可供选择的示例模板
examples,
#测量语义相似性的嵌入的嵌入类
OllamaEmbeddings(),
#存储嵌入和进行相似搜索的Vectortore类
FAISS,
#要生成的示例数
k=2
)
similar_propt = FewShotPromptTemplate(
#有助于选择示例的对象
example_selector = example_selector,
#提示词
example_prompt = example_prompt,
#将添加到提示顶部和底部的自定义项
prefix = "根据下面示例,写出输出",
suffix = "输入:{noun}\n输出:",
#你的提示词接受的输入
input_variables=['noun']
)
final_prompt = similar_propt.format(noun='硬盘')
print()
response = llm(final_prompt)
print(response)