import { MCPAgent, MCPClient } from "mcp-use";
import { ChatOpenAI } from "@langchain/openai";
const client = new MCPClient({
mcpServers: {
filesystem: {
command: "npx",
args: ["-y", "@modelcontextprotocol/server-filesystem", "./"]
}
}
});
const llm = new ChatOpenAI({ model: "gpt-4o" });
const agent = new MCPAgent({
llm,
client,
memoryEnabled: true // Agent manages memory internally (default)
});
// The agent will automatically maintain conversation context
const response1 = await agent.run("Hello, my name is Alice");
const response2 = await agent.run("What's my name?"); // Agent remembers Alice
await client.closeAllSessions();