fix(frontend): send correct body format to chat-api messages endpoint

Chat-api expects {content, temperature, max_tokens, top_k} but frontend
was sending {messages: [...]}. Now extracts last user message as content
when proxying to /api/conversations/:id/messages.

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
Manmohan Sharma 2026-04-16 14:49:22 -07:00
parent 40ce6c1a89
commit faf4810696
No known key found for this signature in database

View File

@ -17,20 +17,14 @@ function sseEvent(data: Record<string, unknown>) {
return encoder.encode(`data: ${JSON.stringify(data)}\n\n`);
}
async function proxyUpstream(body: StreamBody, upstreamUrl: string, authHeader: string | null) {
async function proxyUpstream(body: Record<string, unknown>, upstreamUrl: string, authHeader: string | null) {
const headers: Record<string, string> = { 'Content-Type': 'application/json' };
if (authHeader) headers['Authorization'] = authHeader;
const upstream = await fetch(upstreamUrl, {
method: 'POST',
headers,
body: JSON.stringify({
messages: body.messages,
temperature: body.temperature ?? 0.8,
top_k: body.topK ?? 50,
max_tokens: 512,
model: body.model,
}),
body: JSON.stringify(body),
});
if (!upstream.ok || !upstream.body) {
@ -94,8 +88,17 @@ export async function POST(req: NextRequest) {
// If we have a conversationId and auth, use the persisted messages endpoint
const convId = body.conversationId;
if (convId && authHeader) {
// Chat-api expects {content, temperature, max_tokens, top_k}
// Extract the last user message as the content
const lastUserMsg = [...body.messages].reverse().find(m => m.role === 'user');
const chatApiBody = {
content: lastUserMsg?.content ?? '',
temperature: body.temperature,
max_tokens: body.maxTokens,
top_k: body.topK,
};
return await proxyUpstream(
body,
chatApiBody as any,
`${upstream.replace(/\/$/, '')}/api/conversations/${convId}/messages`,
authHeader,
);