LangChain
The LangChain provider transforms Devcaster tools into LangChain's StructuredTool format with built-in execution.
Install
pip install devcaster devcaster_langchain langchain langchain_openainpm install @devcaster/core @devcaster/langchain @langchain/openai @langchain/langgraph @langchain/coreConfigure API Keys
Set DEVCASTER_API_KEY with your API key from Settings and OPENAI_API_KEY with your OpenAI API key.
DEVCASTER_API_KEY=xxxxxxxxx
OPENAI_API_KEY=xxxxxxxxxCreate session and run
from devcaster import Devcaster
from devcaster_langchain import LangchainProvider
from langchain.agents import create_agent
from langchain_openai import ChatOpenAI
devcaster = Devcaster(provider=LangchainProvider())
llm = ChatOpenAI(model="gpt-5.2")
# Create a session for your user
session = devcaster.create(user_id="user_123")
tools = session.tools()
agent = create_agent(tools=tools, model=llm)
result = agent.invoke({"messages": [("user", "Send an email to john@example.com with the subject 'Hello' and body 'Hello from Devcaster!'")]})
print(result["messages"][-1].content)import { ChatOpenAI } from '@langchain/openai';
import { HumanMessage, AIMessage } from '@langchain/core/messages';
import { ToolNode } from '@langchain/langgraph/prebuilt';
import { StateGraph, MessagesAnnotation } from '@langchain/langgraph';
import { Devcaster } from '@devcaster/core';
import { LangchainProvider } from '@devcaster/langchain';
const devcaster = new Devcaster({
provider: new LangchainProvider(),
});
// Create a session for your user
const session = await devcaster.create("user_123");
const tools = await session.tools();
const toolNode = new ToolNode(tools);
const model = new ChatOpenAI({
model: 'gpt-5.2',
temperature: 0,
}).bindTools(tools);
function shouldContinue({ messages }: typeof MessagesAnnotation.State) {
const lastMessage = messages[messages.length - 1] as AIMessage;
if (lastMessage.tool_calls?.length) {
return 'tools';
}
return '__end__';
}
async function callModel(state: typeof MessagesAnnotation.State) {
const response = await model.invoke(state.messages);
return { messages: [response] };
}
const workflow = new StateGraph(MessagesAnnotation)
.addNode('agent', callModel)
.addEdge('__start__', 'agent')
.addNode('tools', toolNode)
.addEdge('tools', 'agent')
.addConditionalEdges('agent', shouldContinue);
const app = workflow.compile();
const finalState = await app.invoke({
messages: [new HumanMessage("Send an email to john@example.com with the subject 'Hello' and body 'Hello from Devcaster!'")],
});
console.log(finalState.messages[finalState.messages.length - 1].content);