Add the Required Packages
Copy
Ask AI
import os
from functools import lru_cache
from typing import Annotated, List, Literal, Sequence, Tuple, TypedDict, Union
from langchain import hub
from langchain_anthropic import Anthropic, ChatAnthropic
from langchain_community.tools.ddg_search.tool import DuckDuckGoSearchTool
from langchain_community.tools.tavily_search import TavilySearchResults
from langchain_core.messages import BaseMessage
from langchain_openai import ChatOpenAI
from langgraph.graph import END, START, StateGraph, add_messages
from langgraph.prebuilt import ToolNode, create_react_agent
from pydantic.type_adapter import R
Create the LangGraph Agent
Copy
Ask AI
openAIKey = os.environ.get("OPENAI_API_KEY", None)
anthropicApiKey = os.environ.get("ANTHROPIC_API_KEY", None)
tavilyApiKey = os.environ.get("TAVILY_API_KEY", None)
Define the Agent State & Tools
Copy
Ask AI
class AgentState(TypedDict):
    messages: Annotated[Sequence[BaseMessage], add_messages]
tools = [TavilySearchResults(max_results=1, tavily_api_key=tavilyApiKey)]
Model Selection Helper
Copy
Ask AI
@lru_cache(maxsize=4)
def _get_model(model_name: str):
    if model_name == "openai":
        model = ChatOpenAI(temperature=0, model_name="gpt-4o", api_key=openAIKey)
    elif model_name == "anthropic":
        model = ChatAnthropic(
            temperature=0,
            model_name="claude-3-sonnet-20240229",
            api_key=anthropicApiKey,
        )
    else:
        raise ValueError(f"Unsupported model type: {model_name}")
    model = model.bind_tools(tools)
    return model
Define the Function That Determines Whether to Continue or Not
Copy
Ask AI
def should_continue(state):
    messages = state["messages"]
    last_message = messages[-1]
    # If there are no tool calls, then we finish
    if not last_message.tool_calls:
        return "end"
    # Otherwise if there is, we continue
    else:
        return "continue"
system_prompt = """Be a helpful assistant"""
Define the Function That Calls the Model
Copy
Ask AI
def call_model(state, config):
    messages = state["messages"]
    messages = [{"role": "system", "content": system_prompt}] + messages
    model_name = config.get("configurable", {}).get("model_name", "anthropic")
    model = _get_model(model_name)
    response = model.invoke(messages)
    # We return a list, because this will get added to the existing list
    return {"messages": [response]}
Define the Function to Execute Tools
Copy
Ask AI
tool_node = ToolNode(tools)
Define the Config
Copy
Ask AI
class GraphConfig(TypedDict):
    model_name: Literal["anthropic", "openai"]
Define a New Graph
Copy
Ask AI
workflow = StateGraph(AgentState, config_schema=GraphConfig)
# Define the two nodes we will cycle between
workflow.add_node("agent", call_model)
workflow.add_node("action", tool_node)
# Set the entrypoint as `agent`
# This means that this node is the first one called
workflow.set_entry_point("agent")
# We now add a conditional edge
workflow.add_conditional_edges(
    # First, we define the start node. We use `agent`.
    # This means these are the edges taken after the `agent` node is called.
    "agent",
    # Next, we pass in the function that will determine which node is called next.
    should_continue,
    # Finally we pass in a mapping.
    # The keys are strings, and the values are other nodes.
    # END is a special node marking that the graph should finish.
    # What will happen is we will call `should_continue`, and then the output of that
    # will be matched against the keys in this mapping.
    # Based on which one it matches, that node will then be called.
    {
        # If `tools`, then we call the tool node.
        "continue": "action",
        # Otherwise we finish.
        "end": END,
    },
)
# We now add a normal edge from `tools` to `agent`.
# This means that after `tools` is called, `agent` node is called next.
workflow.add_edge("action", "agent")
Maxim Logger Integration
Copy
Ask AI
from maxim import  Maxim
from maxim.decorators import current_trace
logger = Maxim({}).logger()
Copy
Ask AI
from maxim.logger.langchain import MaximLangchainTracer
app = workflow.compile()
maxim_langchain_tracer = MaximLangchainTracer(logger)
def another_method(query:str)->str:
    return query
async def ask_agent(query: str) -> str:
    config = {"recursion_limit": 50, "callbacks": [maxim_langchain_tracer]}
    async for event in app.astream(input={"messages": [query]}, config=config):
        for k, v in event.items():
            if k == "agent":
                response = str(v["messages"][0].content)
    return response
async def handle(query:str):
    resp = await ask_agent(query)    
    another_method(str(resp))
    return resp
Get the Response from the Agent
Copy
Ask AI
resp = await handle("tell me latest football news?") 
print(resp)