Files
vibn-frontend/app/api/projects/[projectId]/knowledge/import-ai-chat/route.ts
Mark Henderson 6f79a88abd fix(gitea-bot): add write:organization scope so bot can create repos
Without this the bot PAT 403s on POST /orgs/{org}/repos, which is
the single most important operation — creating new project repos
inside the workspace's Gitea org.

Made-with: Cursor
2026-04-21 11:05:55 -07:00

95 lines
3.0 KiB
TypeScript

import { NextResponse } from 'next/server';
import { authSession } from "@/lib/auth/session-server";
import { query } from '@/lib/db-postgres';
import { createKnowledgeItem } from '@/lib/server/knowledge';
import type { KnowledgeSourceMeta } from '@/lib/types/knowledge';
const PROVIDER_MAP = new Set(['chatgpt', 'gemini', 'claude', 'cursor', 'vibn', 'other']);
interface ImportAiChatRequest {
title?: string;
provider?: string;
transcript?: string;
sourceLink?: string | null;
createdAtOriginal?: string | null;
}
export async function POST(
request: Request,
{ params }: { params: Promise<{ projectId: string }> },
) {
try {
const { projectId } = await params;
if (!projectId) {
return NextResponse.json({ error: 'Missing projectId' }, { status: 400 });
}
const body = (await request.json()) as ImportAiChatRequest;
const transcript = body.transcript?.trim();
const provider = body.provider?.toLowerCase();
if (!transcript) {
return NextResponse.json({ error: 'transcript is required' }, { status: 400 });
}
const session = await authSession();
if (!session?.user?.email) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const projectRows = await query(`SELECT id FROM fs_projects WHERE id = $1 LIMIT 1`, [projectId]);
if (projectRows.length === 0) {
return NextResponse.json({ error: 'Project not found' }, { status: 404 });
}
const origin = PROVIDER_MAP.has(provider ?? '') ? provider : 'other';
const sourceMeta: KnowledgeSourceMeta = {
origin: (origin as KnowledgeSourceMeta['origin']) ?? 'other',
url: body.sourceLink ?? null,
filename: body.title ?? null,
createdAtOriginal: body.createdAtOriginal ?? null,
importance: 'primary',
tags: ['ai_chat'],
};
const knowledgeItem = await createKnowledgeItem({
projectId,
sourceType: 'imported_ai_chat',
title: body.title ?? null,
content: transcript,
sourceMeta,
});
// Chunk and embed in background (don't block response)
// This populates AlloyDB knowledge_chunks for vector search
(async () => {
try {
const { writeKnowledgeChunksForItem } = await import('@/lib/server/vector-memory');
await writeKnowledgeChunksForItem({
id: knowledgeItem.id,
projectId: knowledgeItem.projectId,
content: knowledgeItem.content,
sourceMeta: knowledgeItem.sourceMeta,
});
} catch (error) {
// Log but don't fail the request
console.error('[import-ai-chat] Failed to chunk/embed knowledge_item:', error);
}
})();
return NextResponse.json({ knowledgeItem });
} catch (error) {
console.error('[import-ai-chat] Failed to import chat', error);
return NextResponse.json(
{
error: 'Failed to import AI chat transcript',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 },
);
}
}