import { NextRequest, NextResponse } from "next/server";
import OpenAI from "openai";
import { transformStream } from "@crayonai/stream";
import { DBMessage, getMessageStore } from "./messageStore";
export async function POST(req: NextRequest) {
const { prompt, threadId, responseId } = (await req.json()) as {
prompt: DBMessage;
threadId: string;
responseId: string;
};
// Initialize the OpenAI client
const client = new OpenAI({
baseURL: "https://api.thesys.dev/v1/embed/",
apiKey: process.env.THESYS_API_KEY,
});
// Get message store and add user message
const messageStore = getMessageStore(threadId);
messageStore.addMessage(prompt);
// Create streaming chat completion
const llmStream = await client.chat.completions.create({
model: "c1/anthropic/claude-sonnet-4/v-20251230",
messages: messageStore.getOpenAICompatibleMessageList(),
stream: true,
});
// Transform the response stream
const responseStream = transformStream(
llmStream,
(chunk) => {
return chunk.choices[0].delta.content;
},
{
onEnd: ({ accumulated }) => {
const message = accumulated.filter((message) => message).join("");
messageStore.addMessage({
role: "assistant",
content: message,
id: responseId,
});
},
}
) as ReadableStream;
// Return the streaming response
return new NextResponse(responseStream, {
headers: {
"Content-Type": "text/event-stream",
"Cache-Control": "no-cache, no-transform",
Connection: "keep-alive",
},
});
}