import { createRaindropLangChain } from "@raindrop-ai/langchain";
import { ChatOpenAI } from "@langchain/openai";
import { HumanMessage } from "@langchain/core/messages";
import { StateGraph, MessagesAnnotation, END } from "@langchain/langgraph";
import { ToolNode } from "@langchain/langgraph/prebuilt";
import { tool } from "@langchain/core/tools";
import { z } from "zod";
const raindrop = createRaindropLangChain({
writeKey: "rk_...",
userId: "user-123",
convoId: "convo-456",
});
const getWeather = tool(
async ({ city }) => `The weather in ${city} is 22°C.`,
{ name: "get_weather", description: "Get weather", schema: z.object({ city: z.string() }) },
);
const model = new ChatOpenAI({ model: "gpt-4o-mini" }).bindTools([getWeather]);
const callModel = async (state: typeof MessagesAnnotation.State) => {
const response = await model.invoke(state.messages, {
callbacks: [raindrop.handler],
});
return { messages: [response] };
};
const graph = new StateGraph(MessagesAnnotation)
.addNode("agent", callModel)
.addNode("tools", new ToolNode([getWeather]))
.addEdge("__start__", "agent")
.addConditionalEdges("agent", (state) => {
const last = state.messages[state.messages.length - 1];
if ("tool_calls" in last && Array.isArray(last.tool_calls) && last.tool_calls.length > 0) return "tools";
return END;
})
.addEdge("tools", "agent")
.compile();
// Pass callbacks only to the model inside the node — NOT to graph.invoke().
// This ensures clean, deduplicated events without LangGraph internal noise.
const result = await graph.invoke(
{ messages: [new HumanMessage("What's the weather in Paris?")] },
);
await raindrop.shutdown();