We integrated LangGraph into Keywords AI so that you can use it with our tracing feature. In this guide, you will know how to run your graph and our tracer will automatically keep track of everything that happens in chronological order in this graph run.

1

Import LangGraph dependencies

Python
from typing import Annotated
from typing_extensions import TypedDict
from langgraph.graph import StateGraph, START, END
from langgraph.graph.message import add_messages

# Initialize the State for langgraph
class State(TypedDict):
    # Messages have the type "list". The `add_messages` function
    # in the annotation defines how this state key should be updated
    # (in this case, it appends messages to the list, rather than overwriting them)
    messages: Annotated[list, add_messages]


graph_builder = StateGraph(State)
2

Initialize the LLM

Python
from langchain_anthropic import ChatAnthropic

llm = ChatAnthropic(model="claude-3-5-sonnet-20240620", temperature=0)
3

Add the LLM node to the graph

Python
from keywordsai_tracing.main import KeywordsAITelemetry
from keywordsai_tracing.decorators import task, workflow
ktl = KeywordsAITelemetry()
4

Define the chatbot function and wrap it with the tracing decorator

Python
@task(name="chatbot_response")
def chatbot_respond(state: State):
    return {"messages": [llm.invoke(state["messages"])]}
5

Setup the node and edges

Python
graph_builder.add_node("chatbot", chatbot_respond)
graph_builder.add_edge(START, "chatbot")
graph_builder.add_edge("chatbot", END)
graph = graph_builder.compile()
6

Define the stream_graph_updates function

Python
def stream_graph_updates(user_input: str):
    for event in graph.stream({"messages": [{"role": "user", "content": user_input}]}):
        for value in event.values():
            print("Assistant:", value["messages"][-1].content)
7

Define the user_send_message function

Python
@task(name="user_send_message")
def user_send_message() -> str:
    input_message = input("User: ")
    return input_message
8

Wrap everything in a workflow

Python
@workflow(name="chatbot_qa")
def chatbot_qa():
    while True:
        try:
            user_input = user_send_message()
            if user_input.lower() in ["quit", "exit", "q"]:
                print("Goodbye!")
                break

            stream_graph_updates(user_input)
        except:
            # fallback if input() is not available
            user_input = "What do you know about LangGraph?"
            print("User: " + user_input)
            stream_graph_updates(user_input)
            break
9

Run the workflow

Python
if __name__ == "__main__":
    chatbot_qa()