LLM ROUTER
LLM ROUTER
https://github.com/johnsosoka/langgraph-model-router/tree/main
from langgraph.graph import START, END, StateGraph from workflow.nodes import handle_advanced, handle_simple, end from workflow.routers import query_router from workflow.state import State # Create a new StateGraph workflow = StateGraph(State) # Add nodes to the graph workflow.add_node("handle_advanced", handle_advanced) workflow.add_node("handle_simple", handle_simple) workflow.add_node("review", end) # Set the entry point # (This entrypoint is ALSO a conditional edge) workflow.add_conditional_edges(START, query_router, { "advanced": "handle_advanced", "simple": "handle_simple" } ) # Join the branches to the review node & END workflow.add_edge("handle_advanced", "review") workflow.add_edge("handle_simple", "review") workflow.add_edge("review", END) # Compile the graph graph = workflow.compile()
from langchain.output_parsers import EnumOutputParser from langchain_core.prompts import PromptTemplate from core.language_models import LanguageModels from workflow.routing_options import RoutingOptions from workflow.state import State import logging language_models = LanguageModels() # This is the Conditional Edge def query_router(state: State): """Determine if the user query should be routed to the advanced or simple language model.""" logging.info("Determining where to route the user query.") parser = EnumOutputParser(enum=RoutingOptions) # TODO - Utilize create_structured_output_runnable to avoid hard coding valid outputs prompt = PromptTemplate( template="Determine if the user query requires a simple or advanced model. An 'advanced' request might require \ multiple steps like retrieving an order ID and looking up shipping information, whereas a 'simple' request can \ handle more straightforward queries. return either 'simple' or 'advanced'. Do not explain your reasoning Your \ only task is to determine where to route the user query. \ user_query: {input}", input_variables=["input"] ) routing_chain = prompt | language_models.get_router_llm() | parser result = routing_chain.invoke({"input": state["user_query"]}) logging.info(f"Query Router Determined Model: {result.value}") return result.value
SEMANITC ROUTER
https://github.com/mohamedfarag22/LangGraph_with_multiple_Tools_and_agents_by_semantic_route-and-Prompt_Engineer/tree/main
# Define the time route time_route = Route( name="get_time", utterances=[ "what time is it?", "when should I eat my next meal?", "how long should I rest until training again?", "when should I go to the gym?", "when go bed for sleep?" ], ) # Define the search route search_route = Route( name="search", utterances=[ "find the best restaurants nearby", "search for top movies this year", "show me the latest news", "find a good book to read", "get last news about AI in 2024?", "what is the Machine Learning ? ", ], ) # Define the weather route weather_route = Route( name="get_weather", utterances=[ "what's the weather like today?", "is it going to rain tomorrow?", "how's the weather in New York?", "what's the forecast for this weekend?", "what temperature in Egypt?", "how weather today in Bahrain?" ], ) # Combine all routes into a list routes = [time_route, search_route, weather_route] # Initialize the semantic router rl = RouteLayer(encoder=encoder, routes=routes)
出处:http://www.cnblogs.com/lightsong/
本文版权归作者和博客园共有,欢迎转载,但未经作者同意必须保留此段声明,且在文章页面明显位置给出原文连接。