We integrated LangGraph into Keywords AI so that you can use it with our tracing feature. In this guide, you will know how to run your graph and our tracer will automatically keep track of everything that happens in chronological order in this graph run.
Import LangGraph dependencies
from typing import Annotated
from typing_extensions import TypedDict
from langgraph.graph import StateGraph, START, END
from langgraph.graph.message import add_messages
# Initialize the State for langgraph
class State(TypedDict):
# Messages have the type "list". The `add_messages` function
# in the annotation defines how this state key should be updated
# (in this case, it appends messages to the list, rather than overwriting them)
messages: Annotated[list, add_messages]
graph_builder = StateGraph(State)
Initialize the LLM
from langchain_anthropic import ChatAnthropic
llm = ChatAnthropic(model="claude-3-5-sonnet-20240620", temperature=0)
Add the LLM node to the graph
from keywordsai_tracing.main import KeywordsAITelemetry
from keywordsai_tracing.decorators import task, workflow
ktl = KeywordsAITelemetry()
Define the chatbot function and wrap it with the tracing decorator
@task(name="chatbot_response")
def chatbot_respond(state: State):
return {"messages": [llm.invoke(state["messages"])]}
Setup the node and edges
graph_builder.add_node("chatbot", chatbot_respond)
graph_builder.add_edge(START, "chatbot")
graph_builder.add_edge("chatbot", END)
graph = graph_builder.compile()
Define the stream_graph_updates function
def stream_graph_updates(user_input: str):
for event in graph.stream({"messages": [{"role": "user", "content": user_input}]}):
for value in event.values():
print("Assistant:", value["messages"][-1].content)
Define the user_send_message function
@task(name="user_send_message")
def user_send_message() -> str:
input_message = input("User: ")
return input_message
Wrap everything in a workflow
@workflow(name="chatbot_qa")
def chatbot_qa():
while True:
try:
user_input = user_send_message()
if user_input.lower() in ["quit", "exit", "q"]:
print("Goodbye!")
break
stream_graph_updates(user_input)
except:
# fallback if input() is not available
user_input = "What do you know about LangGraph?"
print("User: " + user_input)
stream_graph_updates(user_input)
break
Run the workflow
if __name__ == "__main__":
chatbot_qa()