import { NextRequest, NextResponse } from "next/server";
import OpenAI from "openai";
import type { ChatCompletionMessageParam } from "openai/resources.mjs";
import { transformStream } from "@crayonai/stream";
import { getMessageStore } from "./messageStore";
import { makeC1Response } from "@thesysai/genui-sdk/server";
// This is a hypothetical function that validates the user query based on some criteria, such as identifying if it contains or requests PII.
import { checkForPII } from "./guardrails";
export async function POST(req: NextRequest) {
const c1Response = makeC1Response();
const { prompt, threadId, responseId } = (await req.json()) as {
prompt: ChatCompletionMessageParam;
threadId: string;
responseId: string;
};
if (checkForPII(prompt)) {
c1Response.writeCustomMarkdown(
"I'm unable to assist with this request because it contains, or asks for, PII (*personally identifiable information*). Please remove any sensitive information and try again."
);
c1Response.end(); // This is necessary to stop showing the "loading" state once the response is done streaming.
return new NextResponse(c1Response.responseStream, {
headers: {
"Content-Type": "text/event-stream",
"Cache-Control": "no-cache, no-transform",
Connection: "keep-alive",
},
});
}
const client = new OpenAI({
baseURL: "https://api.thesys.dev/v1/embed",
apiKey: process.env.THESYS_API_KEY, // Use the API key you created in the previous step
});
const messageStore = getMessageStore(threadId);
messageStore.addMessage(prompt);
const llmStream = await client.chat.completions.create({
model: "c1/anthropic/claude-sonnet-4/v-20250617",
messages: messageStore.getOpenAICompatibleMessageList(),
stream: true,
});
// Unwrap the OpenAI stream to a C1 stream
transformStream(
llmStream,
(chunk) => {
const contentDelta = chunk.choices[0].delta.content;
if (contentDelta) {
c1Response.writeContent(contentDelta);
}
return contentDelta;
},
{
onEnd: ({ accumulated }) => {
c1Response.end(); // This is necessary to stop showing the "loading" state once the response is done streaming.
const message = accumulated.filter((chunk) => chunk).join("");
messageStore.addMessage({
id: responseId,
role: "assistant",
content: message,
});
},
}
) as ReadableStream<string>;
return new NextResponse(c1Response.responseStream, {
headers: {
"Content-Type": "text/event-stream",
"Cache-Control": "no-cache, no-transform",
Connection: "keep-alive",
},
});
}