VIBN Frontend for Coolify deployment

This commit is contained in:
2026-02-15 19:25:52 -08:00
commit 40bf8428cd
398 changed files with 76513 additions and 0 deletions

View File

@@ -0,0 +1,196 @@
import { NextResponse } from 'next/server';
import { getAdminDb } from '@/lib/firebase/admin';
import { runChatExtraction } from '@/lib/ai/chat-extractor';
import { GeminiLlmClient } from '@/lib/ai/gemini-client';
import { createChatExtraction } from '@/lib/server/chat-extraction';
import { FieldValue } from 'firebase-admin/firestore';
import type { ProjectPhaseScores } from '@/lib/types/project-artifacts';
import type { KnowledgeItem } from '@/lib/types/knowledge';
export const maxDuration = 300; // 5 minutes for batch processing
interface BatchExtractionResult {
knowledgeItemId: string;
success: boolean;
error?: string;
}
export async function POST(
request: Request,
context: { params?: Promise<{ projectId?: string }> | { projectId?: string } } = {},
) {
try {
// Await params if it's a Promise (Next.js 15+)
const params = context.params instanceof Promise ? await context.params : context.params;
const url = new URL(request.url);
const pathSegments = url.pathname.split('/');
const projectsIndex = pathSegments.indexOf('projects');
const projectIdFromPath =
projectsIndex !== -1 ? pathSegments[projectsIndex + 1] : undefined;
const projectId =
(params?.projectId ?? projectIdFromPath ?? url.searchParams.get('projectId') ?? '').trim();
if (!projectId) {
return NextResponse.json({ error: 'Missing projectId' }, { status: 400 });
}
const adminDb = getAdminDb();
// Get all knowledge_items for this project
const knowledgeSnapshot = await adminDb
.collection('projects')
.doc(projectId)
.collection('knowledge_items')
.get();
if (knowledgeSnapshot.empty) {
return NextResponse.json({
message: 'No knowledge items to extract',
results: []
});
}
const knowledgeItems = knowledgeSnapshot.docs.map(doc => ({
id: doc.id,
...doc.data()
})) as KnowledgeItem[];
// Get existing extractions to avoid re-processing
const extractionsSnapshot = await adminDb
.collection('projects')
.doc(projectId)
.collection('chat_extractions')
.get();
const processedKnowledgeIds = new Set(
extractionsSnapshot.docs.map(doc => doc.data().knowledgeItemId)
);
// Filter to only unprocessed items
const itemsToProcess = knowledgeItems.filter(
item => !processedKnowledgeIds.has(item.id)
);
if (itemsToProcess.length === 0) {
return NextResponse.json({
message: 'All knowledge items already extracted',
results: []
});
}
console.log(`[batch-extract] Processing ${itemsToProcess.length} knowledge items for project ${projectId}`);
const llm = new GeminiLlmClient();
const results: BatchExtractionResult[] = [];
let successCount = 0;
let lastSuccessfulExtraction = null;
// Process each item
for (const knowledgeItem of itemsToProcess) {
try {
console.log(`[batch-extract] Extracting from knowledgeItemId=${knowledgeItem.id}`);
const extractionData = await runChatExtraction(knowledgeItem, llm);
const overallCompletion = extractionData.summary_scores.overall_completion ?? 0;
const overallConfidence = extractionData.summary_scores.overall_confidence ?? 0;
const extraction = await createChatExtraction({
projectId,
knowledgeItemId: knowledgeItem.id,
data: extractionData,
overallCompletion,
overallConfidence,
});
lastSuccessfulExtraction = extraction;
successCount++;
results.push({
knowledgeItemId: knowledgeItem.id,
success: true
});
console.log(`[batch-extract] Successfully extracted from knowledgeItemId=${knowledgeItem.id}`);
// Also chunk and embed this item (fire-and-forget)
(async () => {
try {
const { writeKnowledgeChunksForItem } = await import('@/lib/server/vector-memory');
await writeKnowledgeChunksForItem({
id: knowledgeItem.id,
projectId: knowledgeItem.projectId,
content: knowledgeItem.content,
sourceMeta: knowledgeItem.sourceMeta,
});
} catch (chunkError) {
console.error(`[batch-extract] Failed to chunk item ${knowledgeItem.id}:`, chunkError);
}
})();
} catch (error) {
console.error(`[batch-extract] Failed to extract from knowledgeItemId=${knowledgeItem.id}:`, error);
results.push({
knowledgeItemId: knowledgeItem.id,
success: false,
error: error instanceof Error ? error.message : String(error)
});
}
}
// Update project phase if we had any successful extractions
if (successCount > 0 && lastSuccessfulExtraction) {
const projectRef = adminDb.collection('projects').doc(projectId);
const snapshot = await projectRef.get();
const docData = snapshot.data() ?? {};
const existingScores = (docData.phaseScores ?? {}) as ProjectPhaseScores;
const phaseHistory = Array.isArray(docData.phaseHistory) ? [...docData.phaseHistory] : [];
phaseHistory.push({
phase: 'extractor',
status: 'completed',
knowledgeItemId: 'batch_extraction',
timestamp: new Date().toISOString(),
});
// Use the last extraction's scores as representative
const lastData = lastSuccessfulExtraction.data as { summary_scores?: { overall_completion?: number; overall_confidence?: number } };
existingScores.extractor = {
knowledgeItemId: 'batch_extraction',
overallCompletion: lastData.summary_scores?.overall_completion ?? 0,
overallConfidence: lastData.summary_scores?.overall_confidence ?? 0,
updatedAt: new Date().toISOString(),
};
await projectRef.set(
{
currentPhase: 'analyzed',
phaseScores: existingScores,
phaseStatus: 'in_progress',
phaseHistory,
updatedAt: FieldValue.serverTimestamp(),
},
{ merge: true },
);
console.log(`[batch-extract] Updated project phase to 'analyzed' for project ${projectId}`);
}
return NextResponse.json({
message: `Processed ${itemsToProcess.length} items: ${successCount} succeeded, ${results.filter(r => !r.success).length} failed`,
results,
successCount,
totalProcessed: itemsToProcess.length
});
} catch (error) {
console.error('[batch-extract] Batch extraction failed:', error);
return NextResponse.json(
{
error: 'Failed to batch extract knowledge items',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 },
);
}
}

View File

@@ -0,0 +1,118 @@
import { NextResponse } from 'next/server';
import { getAdminAuth, getAdminDb } from '@/lib/firebase/admin';
import { createKnowledgeItem } from '@/lib/server/knowledge';
import { writeKnowledgeChunksForItem } from '@/lib/server/vector-memory';
import type { KnowledgeSourceMeta } from '@/lib/types/knowledge';
export const maxDuration = 60;
interface ChunkInsightRequest {
content: string;
title?: string;
importance?: 'primary' | 'supporting' | 'irrelevant';
tags?: string[];
sourceKnowledgeItemId?: string;
metadata?: Record<string, any>;
}
export async function POST(
request: Request,
context: { params: Promise<{ projectId: string }> | { projectId: string } }
) {
try {
// Verify auth
const authHeader = request.headers.get('Authorization');
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const idToken = authHeader.split('Bearer ')[1];
const adminAuth = getAdminAuth();
let userId: string;
try {
const decodedToken = await adminAuth.verifyIdToken(idToken);
userId = decodedToken.uid;
} catch (error) {
return NextResponse.json({ error: 'Invalid token' }, { status: 401 });
}
// Handle async params in Next.js 16
const params = 'then' in context.params ? await context.params : context.params;
const projectId = params.projectId;
if (!projectId) {
return NextResponse.json({ error: 'Missing projectId' }, { status: 400 });
}
const body = await request.json() as ChunkInsightRequest;
if (!body.content || body.content.trim().length === 0) {
return NextResponse.json({ error: 'Content is required' }, { status: 400 });
}
const adminDb = getAdminDb();
const projectSnap = await adminDb.collection('projects').doc(projectId).get();
if (!projectSnap.exists) {
return NextResponse.json({ error: 'Project not found' }, { status: 404 });
}
console.log(`[chunk-insight] Creating confirmed insight for project ${projectId}`);
// Create source metadata
const sourceMeta: KnowledgeSourceMeta = {
origin: 'vibn',
createdAtOriginal: new Date().toISOString(),
importance: body.importance || 'primary',
tags: [
'extracted_insight',
'user_confirmed',
'extracted_by:' + userId,
...(body.sourceKnowledgeItemId ? [`source:${body.sourceKnowledgeItemId}`] : []),
...(body.tags || [])
],
};
// Store the confirmed insight as a knowledge_item
const knowledgeItem = await createKnowledgeItem({
projectId,
sourceType: 'other',
title: body.title || 'Extracted Insight',
content: body.content,
sourceMeta,
});
console.log(`[chunk-insight] Created knowledge_item ${knowledgeItem.id}`);
// Chunk and embed in AlloyDB (synchronous for this endpoint)
try {
await writeKnowledgeChunksForItem({
id: knowledgeItem.id,
projectId: knowledgeItem.projectId,
content: knowledgeItem.content,
sourceMeta: knowledgeItem.sourceMeta,
});
console.log(`[chunk-insight] Successfully chunked and embedded insight`);
} catch (chunkError) {
console.error(`[chunk-insight] Failed to chunk item ${knowledgeItem.id}:`, chunkError);
// Don't fail the request, item is still saved in Firestore
}
return NextResponse.json({
success: true,
knowledgeItemId: knowledgeItem.id,
message: 'Insight chunked and stored successfully',
});
} catch (error) {
console.error('[chunk-insight] Error:', error);
return NextResponse.json(
{
error: 'Failed to store insight',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 },
);
}
}

View File

@@ -0,0 +1,75 @@
import { NextResponse } from 'next/server';
import { getAdminAuth } from '@/lib/firebase/admin';
import { getAlloyDbClient } from '@/lib/db/alloydb';
export async function GET(
request: Request,
{ params }: { params: Promise<{ projectId: string }> }
) {
try {
const { projectId } = await params;
// Authentication (skip in development if no auth header)
const authHeader = request.headers.get('Authorization');
const isDevelopment = process.env.NODE_ENV === 'development';
if (!isDevelopment || authHeader?.startsWith('Bearer ')) {
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const token = authHeader.substring(7);
const auth = getAdminAuth();
const decoded = await auth.verifyIdToken(token);
if (!decoded?.uid) {
return NextResponse.json({ error: 'Invalid token' }, { status: 401 });
}
}
// Fetch knowledge chunks from AlloyDB
let chunks = [];
let count = 0;
try {
const pool = await getAlloyDbClient();
const result = await pool.query(
`SELECT
id,
chunk_index,
content,
source_type,
importance,
created_at
FROM knowledge_chunks
WHERE project_id = $1
ORDER BY created_at DESC
LIMIT 100`,
[projectId]
);
chunks = result.rows;
count = result.rowCount || 0;
console.log('[API /knowledge/chunks] Found', count, 'chunks');
} catch (dbError) {
console.error('[API /knowledge/chunks] AlloyDB query failed:', dbError);
console.error('[API /knowledge/chunks] This is likely due to AlloyDB not being configured or connected');
// Return empty array instead of failing
chunks = [];
count = 0;
}
return NextResponse.json({
success: true,
chunks,
count,
});
} catch (error) {
console.error('[API] Error fetching knowledge chunks:', error);
return NextResponse.json(
{ error: 'Failed to fetch knowledge chunks' },
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,90 @@
import { NextResponse } from 'next/server';
import { getAdminDb } from '@/lib/firebase/admin';
import { createKnowledgeItem } from '@/lib/server/knowledge';
import type { KnowledgeSourceMeta } from '@/lib/types/knowledge';
const PROVIDER_MAP = new Set(['chatgpt', 'gemini', 'claude', 'cursor', 'vibn', 'other']);
interface ImportAiChatRequest {
title?: string;
provider?: string;
transcript?: string;
sourceLink?: string | null;
createdAtOriginal?: string | null;
}
export async function POST(
request: Request,
{ params }: { params: Promise<{ projectId: string }> },
) {
try {
const { projectId } = await params;
if (!projectId) {
return NextResponse.json({ error: 'Missing projectId' }, { status: 400 });
}
const body = (await request.json()) as ImportAiChatRequest;
const transcript = body.transcript?.trim();
const provider = body.provider?.toLowerCase();
if (!transcript) {
return NextResponse.json({ error: 'transcript is required' }, { status: 400 });
}
const adminDb = getAdminDb();
const projectSnap = await adminDb.collection('projects').doc(projectId).get();
if (!projectSnap.exists) {
return NextResponse.json({ error: 'Project not found' }, { status: 404 });
}
const origin = PROVIDER_MAP.has(provider ?? '') ? provider : 'other';
const sourceMeta: KnowledgeSourceMeta = {
origin: (origin as KnowledgeSourceMeta['origin']) ?? 'other',
url: body.sourceLink ?? null,
filename: body.title ?? null,
createdAtOriginal: body.createdAtOriginal ?? null,
importance: 'primary',
tags: ['ai_chat'],
};
const knowledgeItem = await createKnowledgeItem({
projectId,
sourceType: 'imported_ai_chat',
title: body.title ?? null,
content: transcript,
sourceMeta,
});
// Chunk and embed in background (don't block response)
// This populates AlloyDB knowledge_chunks for vector search
(async () => {
try {
const { writeKnowledgeChunksForItem } = await import('@/lib/server/vector-memory');
await writeKnowledgeChunksForItem({
id: knowledgeItem.id,
projectId: knowledgeItem.projectId,
content: knowledgeItem.content,
sourceMeta: knowledgeItem.sourceMeta,
});
} catch (error) {
// Log but don't fail the request
console.error('[import-ai-chat] Failed to chunk/embed knowledge_item:', error);
}
})();
return NextResponse.json({ knowledgeItem });
} catch (error) {
console.error('[import-ai-chat] Failed to import chat', error);
return NextResponse.json(
{
error: 'Failed to import AI chat transcript',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 },
);
}
}

View File

@@ -0,0 +1,136 @@
import { NextResponse } from 'next/server';
import { getAdminDb } from '@/lib/firebase/admin';
import { createKnowledgeItem } from '@/lib/server/knowledge';
import type { KnowledgeSourceMeta } from '@/lib/types/knowledge';
import { chunkDocument } from '@/lib/utils/document-chunker';
interface ImportDocumentRequest {
filename?: string;
content?: string;
mimeType?: string;
}
export const maxDuration = 30;
export async function POST(
request: Request,
{ params }: { params: Promise<{ projectId: string }> },
) {
try {
const { projectId } = await params;
if (!projectId) {
return NextResponse.json({ error: 'Missing projectId' }, { status: 400 });
}
const body = (await request.json()) as ImportDocumentRequest;
const content = body.content?.trim();
const filename = body.filename?.trim();
if (!content || !filename) {
return NextResponse.json({ error: 'filename and content are required' }, { status: 400 });
}
const adminDb = getAdminDb();
const projectSnap = await adminDb.collection('projects').doc(projectId).get();
if (!projectSnap.exists) {
return NextResponse.json({ error: 'Project not found' }, { status: 404 });
}
console.log(`[import-document] Processing ${filename}, length=${content.length}`);
// Chunk the document
const chunks = chunkDocument(content, {
maxChunkSize: 2000,
chunkOverlap: 200,
preserveParagraphs: true,
preserveCodeBlocks: true,
});
console.log(`[import-document] Created ${chunks.length} chunks for ${filename}`);
// Store each chunk as a separate knowledge_item
const knowledgeItemIds: string[] = [];
for (const chunk of chunks) {
const sourceMeta: KnowledgeSourceMeta = {
origin: 'other',
url: null,
filename,
createdAtOriginal: new Date().toISOString(),
importance: 'primary',
tags: ['document', 'chunked'],
};
const chunkTitle = chunks.length > 1
? `${filename} (chunk ${chunk.metadata.chunkIndex + 1}/${chunk.metadata.totalChunks})`
: filename;
const knowledgeItem = await createKnowledgeItem({
projectId,
sourceType: 'imported_document',
title: chunkTitle,
content: chunk.content,
sourceMeta: {
...sourceMeta,
chunkMetadata: {
chunkIndex: chunk.metadata.chunkIndex,
totalChunks: chunk.metadata.totalChunks,
startChar: chunk.metadata.startChar,
endChar: chunk.metadata.endChar,
tokenCount: chunk.metadata.tokenCount,
},
},
});
knowledgeItemIds.push(knowledgeItem.id);
// Chunk and embed in AlloyDB (fire-and-forget)
(async () => {
try {
const { writeKnowledgeChunksForItem } = await import('@/lib/server/vector-memory');
await writeKnowledgeChunksForItem({
id: knowledgeItem.id,
projectId: knowledgeItem.projectId,
content: knowledgeItem.content,
sourceMeta: knowledgeItem.sourceMeta,
});
} catch (error) {
console.error(`[import-document] Failed to chunk item ${knowledgeItem.id}:`, error);
}
})();
}
// Also create a summary record in contextSources for UI display
const contextSourcesRef = adminDb.collection('projects').doc(projectId).collection('contextSources');
await contextSourcesRef.add({
type: 'document',
name: filename,
summary: `Document with ${chunks.length} chunks (${content.length} characters)`,
connectedAt: new Date(),
metadata: {
chunkCount: chunks.length,
totalChars: content.length,
mimeType: body.mimeType,
knowledgeItemIds,
},
});
return NextResponse.json({
success: true,
filename,
chunkCount: chunks.length,
knowledgeItemIds,
});
} catch (error) {
console.error('[import-document] Failed to import document', error);
return NextResponse.json(
{
error: 'Failed to import document',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 },
);
}
}

View File

@@ -0,0 +1,81 @@
import { NextResponse } from 'next/server';
import { getAdminAuth, getAdminDb } from '@/lib/firebase/admin';
export async function GET(
request: Request,
{ params }: { params: Promise<{ projectId: string }> }
) {
try {
const { projectId } = await params;
// Authentication (skip in development if no auth header)
const authHeader = request.headers.get('Authorization');
const isDevelopment = process.env.NODE_ENV === 'development';
if (!isDevelopment || authHeader?.startsWith('Bearer ')) {
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const token = authHeader.substring(7);
const auth = getAdminAuth();
const decoded = await auth.verifyIdToken(token);
if (!decoded?.uid) {
return NextResponse.json({ error: 'Invalid token' }, { status: 401 });
}
}
// Fetch knowledge items from Firestore
console.log('[API /knowledge/items] Fetching items for project:', projectId);
let items = [];
try {
const adminDb = getAdminDb();
const knowledgeSnapshot = await adminDb
.collection('knowledge')
.where('projectId', '==', projectId)
.orderBy('createdAt', 'desc')
.limit(100)
.get();
console.log('[API /knowledge/items] Found', knowledgeSnapshot.size, 'items');
items = knowledgeSnapshot.docs.map(doc => {
const data = doc.data();
return {
id: doc.id,
title: data.title || data.content?.substring(0, 50) || 'Untitled',
sourceType: data.sourceType,
content: data.content,
sourceMeta: data.sourceMeta,
createdAt: data.createdAt?.toDate?.()?.toISOString() || data.createdAt,
updatedAt: data.updatedAt?.toDate?.()?.toISOString() || data.updatedAt,
};
});
} catch (firestoreError) {
console.error('[API /knowledge/items] Firestore query failed:', firestoreError);
console.error('[API /knowledge/items] This is likely due to missing Firebase Admin credentials or Firestore not being set up');
// Return empty array instead of failing - the UI will show "No chats yet" and "No images yet"
items = [];
}
return NextResponse.json({
success: true,
items,
count: items.length,
});
} catch (error) {
console.error('[API /knowledge/items] Error fetching knowledge items:', error);
console.error('[API /knowledge/items] Error stack:', error instanceof Error ? error.stack : 'No stack trace');
return NextResponse.json(
{
error: 'Failed to fetch knowledge items',
details: error instanceof Error ? error.message : String(error)
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,105 @@
import { NextResponse } from 'next/server';
import { getAdminAuth, getAdminDb } from '@/lib/firebase/admin';
import { getAlloyDbClient } from '@/lib/db/alloydb';
import { GeminiLlmClient } from '@/lib/ai/gemini-client';
import { z } from 'zod';
const ThemeGroupingSchema = z.object({
themes: z.array(z.object({
theme: z.string().describe('A short, descriptive theme name (2-4 words)'),
description: z.string().describe('A brief description of what this theme represents'),
insightIds: z.array(z.string()).describe('Array of insight IDs that belong to this theme'),
})),
});
export async function POST(
request: Request,
{ params }: { params: Promise<{ projectId: string }> }
) {
try {
const { projectId } = await params;
// Authentication (skip in development if no auth header)
const authHeader = request.headers.get('Authorization');
const isDevelopment = process.env.NODE_ENV === 'development';
if (!isDevelopment || authHeader?.startsWith('Bearer ')) {
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const token = authHeader.substring(7);
const auth = getAdminAuth();
const decoded = await auth.verifyIdToken(token);
if (!decoded?.uid) {
return NextResponse.json({ error: 'Invalid token' }, { status: 401 });
}
}
// Get insights from request body
const { insights } = await request.json();
if (!insights || insights.length === 0) {
return NextResponse.json({
success: true,
themes: [],
});
}
console.log('[API /knowledge/themes] Grouping', insights.length, 'insights into themes');
// Prepare insights for AI analysis
const insightsContext = insights.map((insight: any, index: number) =>
`[${insight.id}] ${insight.content?.substring(0, 200) || insight.title}`
).join('\n\n');
// Use AI to group insights into themes
const llm = new GeminiLlmClient();
const systemPrompt = `You are an expert at analyzing and categorizing information. Given a list of insights/knowledge chunks, group them into meaningful themes. Each theme should represent a coherent topic or concept. Aim for 3-7 themes depending on the diversity of content.`;
const userPrompt = `Analyze these insights and group them into themes:
${insightsContext}
Group these insights into themes. Each insight ID is in brackets at the start of each line. Return the themes with their associated insight IDs.`;
try {
const result = await llm.structuredCall({
model: 'gemini',
systemPrompt,
messages: [{ role: 'user', content: userPrompt }],
schema: ThemeGroupingSchema,
temperature: 0.3,
});
console.log('[API /knowledge/themes] Generated', result.themes.length, 'themes');
return NextResponse.json({
success: true,
themes: result.themes,
});
} catch (aiError) {
console.error('[API /knowledge/themes] AI grouping failed:', aiError);
// Fallback: create a single "Ungrouped" theme with all insights
return NextResponse.json({
success: true,
themes: [{
theme: 'All Insights',
description: 'Ungrouped insights',
insightIds: insights.map((i: any) => i.id),
}],
});
}
} catch (error) {
console.error('[API /knowledge/themes] Error:', error);
return NextResponse.json(
{
error: 'Failed to group insights into themes',
details: error instanceof Error ? error.message : String(error)
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,146 @@
import { NextResponse } from 'next/server';
import { getAdminAuth, getAdminDb } from '@/lib/firebase/admin';
import { createKnowledgeItem } from '@/lib/server/knowledge';
import type { KnowledgeSourceMeta } from '@/lib/types/knowledge';
// import { chunkDocument } from '@/lib/utils/document-chunker'; // Not needed - Extractor AI handles chunking
import { getStorage } from 'firebase-admin/storage';
export const maxDuration = 60;
export async function POST(
request: Request,
context: { params: Promise<{ projectId: string }> | { projectId: string } }
) {
try {
// Verify auth
const authHeader = request.headers.get('Authorization');
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const idToken = authHeader.split('Bearer ')[1];
const adminAuth = getAdminAuth();
let userId: string;
try {
const decodedToken = await adminAuth.verifyIdToken(idToken);
userId = decodedToken.uid;
} catch (error) {
return NextResponse.json({ error: 'Invalid token' }, { status: 401 });
}
// Handle async params in Next.js 16
const params = 'then' in context.params ? await context.params : context.params;
const projectId = params.projectId;
if (!projectId) {
return NextResponse.json({ error: 'Missing projectId' }, { status: 400 });
}
// Parse multipart form data
const formData = await request.formData();
const file = formData.get('file') as File;
if (!file) {
return NextResponse.json({ error: 'No file provided' }, { status: 400 });
}
const adminDb = getAdminDb();
const projectSnap = await adminDb.collection('projects').doc(projectId).get();
if (!projectSnap.exists) {
return NextResponse.json({ error: 'Project not found' }, { status: 404 });
}
console.log(`[upload-document] Uploading ${file.name}, size=${file.size}`);
// Read file content
const arrayBuffer = await file.arrayBuffer();
const buffer = Buffer.from(arrayBuffer);
const content = buffer.toString('utf-8');
// Upload original file to Firebase Storage
const storage = getStorage();
const bucket = storage.bucket();
const storagePath = `projects/${projectId}/documents/${Date.now()}_${file.name}`;
const fileRef = bucket.file(storagePath);
await fileRef.save(buffer, {
metadata: {
contentType: file.type,
metadata: {
uploadedBy: userId,
projectId,
originalFilename: file.name,
uploadedAt: new Date().toISOString(),
},
},
});
// Make file publicly accessible (or use signed URLs if you want private)
await fileRef.makePublic();
const publicUrl = `https://storage.googleapis.com/${bucket.name}/${storagePath}`;
console.log(`[upload-document] File saved to Storage: ${publicUrl}`);
// Store whole document as single knowledge_item (no chunking)
// Extractor AI will collaboratively chunk important sections later
const sourceMeta: KnowledgeSourceMeta = {
origin: 'other',
url: publicUrl,
filename: file.name,
createdAtOriginal: new Date().toISOString(),
importance: 'primary',
tags: ['document', 'uploaded', 'pending_extraction'],
};
const knowledgeItem = await createKnowledgeItem({
projectId,
sourceType: 'imported_document',
title: file.name,
content: content,
sourceMeta,
});
console.log(`[upload-document] Stored whole document as knowledge_item: ${knowledgeItem.id}`);
const knowledgeItemIds = [knowledgeItem.id];
// Create a summary record in contextSources for UI display
const contextSourcesRef = adminDb.collection('projects').doc(projectId).collection('contextSources');
await contextSourcesRef.add({
type: 'document',
name: file.name,
summary: `Document (${content.length} characters) - pending extraction`,
url: publicUrl,
connectedAt: new Date(),
metadata: {
totalChars: content.length,
fileSize: file.size,
mimeType: file.type,
storagePath,
knowledgeItemId: knowledgeItem.id,
uploadedBy: userId,
status: 'pending_extraction',
},
});
return NextResponse.json({
success: true,
filename: file.name,
url: publicUrl,
knowledgeItemId: knowledgeItem.id,
status: 'stored',
message: 'Document stored. Extractor AI will review and chunk important sections.',
});
} catch (error) {
console.error('[upload-document] Failed to upload document', error);
return NextResponse.json(
{
error: 'Failed to upload document',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 },
);
}
}