ai-sdk-next-openai/app/mcp/chat/route.ts
2025-09-26 15:46:29 +00:00

48 lines
1.4 KiB
TypeScript

import { openai } from '@ai-sdk/openai';
import { StreamableHTTPClientTransport } from '@modelcontextprotocol/sdk/client/streamableHttp.js';
import {
convertToModelMessages,
experimental_createMCPClient,
stepCountIs,
streamText,
} from 'ai';
export async function POST(req: Request) {
const url = new URL('http://localhost:3000/mcp/server');
const transport = new StreamableHTTPClientTransport(url);
const [client, { messages }] = await Promise.all([
experimental_createMCPClient({
transport,
}),
req.json(),
]);
try {
const tools = await client.tools();
const result = streamText({
model: openai('gpt-4o-mini'),
tools,
stopWhen: stepCountIs(5),
onStepFinish: async ({ toolResults }) => {
console.log(`STEP RESULTS: ${JSON.stringify(toolResults, null, 2)}`);
},
system: 'You are a helpful chatbot capable of basic arithmetic problems',
messages: convertToModelMessages(messages),
onFinish: async () => {
await client.close();
},
// Optional, enables immediate clean up of resources but connection will not be retained for retries:
// onError: async error => {
// await client.close();
// },
});
return result.toUIMessageStreamResponse();
} catch (error) {
console.error(error);
return Response.json({ error: 'Unexpected error' }, { status: 500 });
}
}