VIBN Frontend for Coolify deployment

This commit is contained in:
2026-02-15 19:25:52 -08:00
commit 40bf8428cd
398 changed files with 76513 additions and 0 deletions

View File

@@ -0,0 +1,46 @@
import { NextResponse } from 'next/server';
import { getAdminDb } from '@/lib/firebase/admin';
export async function GET(request: Request) {
try {
const { searchParams } = new URL(request.url);
const projectId = searchParams.get('projectId');
const userId = searchParams.get('userId');
const adminDb = getAdminDb();
// Get all sessions for this user
const sessionsSnapshot = await adminDb
.collection('sessions')
.where('userId', '==', userId)
.get();
const allSessions = sessionsSnapshot.docs.map(doc => {
const data = doc.data();
return {
id: doc.id,
projectId: data.projectId || null,
workspacePath: data.workspacePath || null,
workspaceName: data.workspaceName || null,
needsProjectAssociation: data.needsProjectAssociation,
messageCount: data.messageCount,
conversationLength: data.conversation?.length || 0,
};
});
// Filter sessions that match this project
const matchingSessions = allSessions.filter(s => s.projectId === projectId);
return NextResponse.json({
totalSessions: allSessions.length,
matchingSessions: matchingSessions.length,
allSessions,
projectId,
userId,
});
} catch (error: any) {
console.error('[Admin Check Sessions] Error:', error);
return NextResponse.json({ error: error.message }, { status: 500 });
}
}

View File

@@ -0,0 +1,59 @@
import { NextResponse } from 'next/server';
import { getAdminDb } from '@/lib/firebase/admin';
export async function POST(request: Request) {
try {
const { projectId, workspacePath } = await request.json();
if (!projectId || !workspacePath) {
return NextResponse.json(
{ error: 'projectId and workspacePath required' },
{ status: 400 }
);
}
const adminDb = getAdminDb();
// Update project with workspacePath
await adminDb.collection('projects').doc(projectId).update({
workspacePath,
updatedAt: new Date(),
});
console.log(`[Fix Project] Set workspacePath for ${projectId}: ${workspacePath}`);
// Now find and link all matching sessions
const sessionsSnapshot = await adminDb
.collection('sessions')
.where('workspacePath', '==', workspacePath)
.where('needsProjectAssociation', '==', true)
.get();
const batch = adminDb.batch();
let linkedCount = 0;
for (const sessionDoc of sessionsSnapshot.docs) {
batch.update(sessionDoc.ref, {
projectId,
needsProjectAssociation: false,
updatedAt: new Date(),
});
linkedCount++;
}
await batch.commit();
console.log(`[Fix Project] Linked ${linkedCount} sessions to project ${projectId}`);
return NextResponse.json({
success: true,
projectId,
workspacePath,
sessionsLinked: linkedCount,
});
} catch (error: any) {
console.error('[Fix Project] Error:', error);
return NextResponse.json({ error: error.message }, { status: 500 });
}
}

504
app/api/ai/chat/route.ts Normal file
View File

@@ -0,0 +1,504 @@
import { NextResponse } from 'next/server';
import { z } from 'zod';
import { GeminiLlmClient } from '@/lib/ai/gemini-client';
import type { LlmClient } from '@/lib/ai/llm-client';
import { getAdminDb } from '@/lib/firebase/admin';
import { FieldValue } from 'firebase-admin/firestore';
import { MODE_SYSTEM_PROMPTS, ChatMode } from '@/lib/ai/chat-modes';
import { resolveChatMode } from '@/lib/server/chat-mode-resolver';
import {
buildProjectContextForChat,
determineArtifactsUsed,
formatContextForPrompt,
} from '@/lib/server/chat-context';
import { logProjectEvent } from '@/lib/server/logs';
import type { CollectorPhaseHandoff } from '@/lib/types/phase-handoff';
// Increase timeout for Gemini 3 Pro thinking mode (can take 1-2 minutes)
export const maxDuration = 180; // 3 minutes
export const dynamic = 'force-dynamic';
const ChatReplySchema = z.object({
reply: z.string(),
visionAnswers: z.object({
q1: z.string().optional(), // Answer to question 1
q2: z.string().optional(), // Answer to question 2
q3: z.string().optional(), // Answer to question 3
allAnswered: z.boolean().optional(), // True when all 3 are complete
}).optional(),
collectorHandoff: z.object({
hasDocuments: z.boolean().optional(),
documentCount: z.number().optional(),
githubConnected: z.boolean().optional(),
githubRepo: z.string().optional(),
extensionLinked: z.boolean().optional(),
extensionDeclined: z.boolean().optional(),
noGithubYet: z.boolean().optional(),
readyForExtraction: z.boolean().optional(),
}).optional(),
extractionReviewHandoff: z.object({
extractionApproved: z.boolean().optional(),
readyForVision: z.boolean().optional(),
}).optional(),
});
interface ChatRequestBody {
projectId?: string;
message?: string;
overrideMode?: ChatMode;
}
async function appendConversation(
projectId: string,
messages: Array<{ role: 'user' | 'assistant'; content: string }>,
) {
const adminDb = getAdminDb();
const docRef = adminDb.collection('chat_conversations').doc(projectId);
await adminDb.runTransaction(async (tx) => {
const snapshot = await tx.get(docRef);
const existing = (snapshot.exists ? (snapshot.data()?.messages as unknown[]) : []) ?? [];
const now = new Date().toISOString();
const newMessages = messages.map((m) => ({
role: m.role,
content: m.content,
// Use a simple ISO string for message timestamps to avoid FieldValue
// restrictions inside arrays.
createdAt: now,
}));
tx.set(
docRef,
{
projectId,
messages: [...existing, ...newMessages],
updatedAt: FieldValue.serverTimestamp(),
},
{ merge: true },
);
});
}
export async function POST(request: Request) {
try {
const body = (await request.json()) as ChatRequestBody;
const projectId = body.projectId?.trim();
const message = body.message?.trim();
if (!projectId || !message) {
return NextResponse.json({ error: 'projectId and message are required' }, { status: 400 });
}
// Verify project exists
const adminDb = getAdminDb();
const projectSnapshot = await adminDb.collection('projects').doc(projectId).get();
if (!projectSnapshot.exists) {
return NextResponse.json({ error: 'Project not found' }, { status: 404 });
}
const projectData = projectSnapshot.data() ?? {};
// Resolve chat mode (uses new resolver)
const resolvedMode = body.overrideMode ?? await resolveChatMode(projectId);
console.log(`[AI Chat] Mode: ${resolvedMode}`);
// Build comprehensive context with vector retrieval
// Only include GitHub analysis for MVP generation (not needed for vision questions)
const context = await buildProjectContextForChat(projectId, resolvedMode, message, {
retrievalLimit: 10,
includeVectorSearch: true,
includeGitHubAnalysis: resolvedMode === 'mvp_mode', // Only load repo analysis when generating MVP
});
console.log(`[AI Chat] Context built: ${context.retrievedChunks.length} vector chunks retrieved`);
// Get mode-specific system prompt
const systemPrompt = MODE_SYSTEM_PROMPTS[resolvedMode];
// Format context for LLM
const contextSummary = formatContextForPrompt(context);
// Prepare enhanced system prompt with context
const enhancedSystemPrompt = `${systemPrompt}
## Current Project Context
${contextSummary}
---
You have access to:
- Project artifacts (product model, MVP plan, marketing plan)
- Knowledge items (${context.knowledgeSummary.totalCount} total)
- Extraction signals (${context.extractionSummary.totalCount} analyzed)
${context.retrievedChunks.length > 0 ? `- ${context.retrievedChunks.length} relevant chunks from vector search (most similar to user's query)` : ''}
${context.repositoryAnalysis ? `- GitHub repository analysis (${context.repositoryAnalysis.totalFiles} files)` : ''}
${context.sessionHistory.totalSessions > 0 ? `- Complete Cursor session history (${context.sessionHistory.totalSessions} sessions, ${context.sessionHistory.messages.length} messages in chronological order)` : ''}
Use this context to provide specific, grounded responses. The session history shows your complete conversation history with the user - use it to understand what has been built and discussed.`;
// Load existing conversation history
const conversationDoc = await adminDb.collection('chat_conversations').doc(projectId).get();
const conversationData = conversationDoc.exists ? conversationDoc.data() : null;
const conversationHistory = Array.isArray(conversationData?.messages)
? conversationData.messages
: [];
// Build full message context (history + current message)
const messages = [
...conversationHistory.map((msg: any) => ({
role: msg.role as 'user' | 'assistant',
content: msg.content as string,
})),
{
role: 'user' as const,
content: message,
},
];
console.log(`[AI Chat] Sending ${messages.length} messages to LLM (${conversationHistory.length} from history + 1 new)`);
console.log(`[AI Chat] Mode: ${resolvedMode}, Phase: ${projectData.currentPhase}, Has extraction: ${!!context.phaseHandoffs?.extraction}`);
// Log system prompt length
console.log(`[AI Chat] System prompt length: ${enhancedSystemPrompt.length} chars (~${Math.ceil(enhancedSystemPrompt.length / 4)} tokens)`);
// Log each message length
messages.forEach((msg, i) => {
console.log(`[AI Chat] Message ${i + 1} (${msg.role}): ${msg.content.length} chars (~${Math.ceil(msg.content.length / 4)} tokens)`);
});
const totalInputChars = enhancedSystemPrompt.length + messages.reduce((sum, msg) => sum + msg.content.length, 0);
console.log(`[AI Chat] Total input: ${totalInputChars} chars (~${Math.ceil(totalInputChars / 4)} tokens)`);
// Log system prompt preview (first 500 chars)
console.log(`[AI Chat] System prompt preview: ${enhancedSystemPrompt.substring(0, 500)}...`);
// Log last user message
const lastUserMsg = messages[messages.length - 1];
console.log(`[AI Chat] User message: ${lastUserMsg.content}`);
// Safety check: extraction_review_mode requires extraction results
if (resolvedMode === 'extraction_review_mode' && !context.phaseHandoffs?.extraction) {
console.warn(`[AI Chat] WARNING: extraction_review_mode active but no extraction results found for project ${projectId}`);
}
const llm: LlmClient = new GeminiLlmClient();
// Configure thinking mode based on task complexity
// Simple modes (collector, extraction_review) don't need deep thinking
// Complex modes (mvp, vision) benefit from extended reasoning
const needsThinking = resolvedMode === 'mvp_mode' || resolvedMode === 'vision_mode';
const reply = await llm.structuredCall<{
reply: string;
visionAnswers?: {
q1?: string;
q2?: string;
q3?: string;
allAnswered?: boolean;
};
collectorHandoff?: {
hasDocuments?: boolean;
documentCount?: number;
githubConnected?: boolean;
githubRepo?: string;
extensionLinked?: boolean;
extensionDeclined?: boolean;
noGithubYet?: boolean;
readyForExtraction?: boolean;
};
extractionReviewHandoff?: {
extractionApproved?: boolean;
readyForVision?: boolean;
};
}>({
model: 'gemini',
systemPrompt: enhancedSystemPrompt,
messages: messages, // Full conversation history!
schema: ChatReplySchema,
temperature: 0.4,
thinking_config: needsThinking ? {
thinking_level: 'high',
include_thoughts: false,
} : undefined,
});
// Store all vision answers when provided
if (reply.visionAnswers) {
const updates: any = {};
if (reply.visionAnswers.q1) {
updates['visionAnswers.q1'] = reply.visionAnswers.q1;
console.log('[AI Chat] Storing vision answer Q1');
}
if (reply.visionAnswers.q2) {
updates['visionAnswers.q2'] = reply.visionAnswers.q2;
console.log('[AI Chat] Storing vision answer Q2');
}
if (reply.visionAnswers.q3) {
updates['visionAnswers.q3'] = reply.visionAnswers.q3;
console.log('[AI Chat] Storing vision answer Q3');
}
// If all answers are complete, trigger MVP generation
if (reply.visionAnswers.allAnswered) {
updates['visionAnswers.allAnswered'] = true;
updates['readyForMVP'] = true;
console.log('[AI Chat] ✅ All 3 vision answers complete - ready for MVP generation');
}
if (Object.keys(updates).length > 0) {
updates['visionAnswers.updatedAt'] = new Date().toISOString();
await adminDb.collection('projects').doc(projectId).set(updates, { merge: true })
.catch((error) => {
console.error('[ai/chat] Failed to store vision answers', error);
});
}
}
// Best-effort: append this turn to the persisted conversation history
appendConversation(projectId, [
{ role: 'user', content: message },
{ role: 'assistant', content: reply.reply },
]).catch((error) => {
console.error('[ai/chat] Failed to append conversation history', error);
});
// If in collector mode, always update handoff state based on actual project context
// This ensures the checklist updates even if AI doesn't return collectorHandoff
if (resolvedMode === 'collector_mode') {
// Derive handoff state from actual project context
const hasDocuments = (context.knowledgeSummary.bySourceType['imported_document'] ?? 0) > 0;
const documentCount = context.knowledgeSummary.bySourceType['imported_document'] ?? 0;
const githubConnected = !!context.project.githubRepo;
const extensionLinked = context.project.extensionLinked ?? false;
// Check if AI indicated readiness (from reply if provided, otherwise check reply text)
let readyForExtraction = reply.collectorHandoff?.readyForExtraction ?? false;
// Fallback: If AI says certain phrases, assume user confirmed readiness
// IMPORTANT: These phrases must be SPECIFIC to avoid false positives
if (!readyForExtraction && reply.reply) {
const replyLower = reply.reply.toLowerCase();
// Check for explicit analysis/digging phrases (not just "perfect!")
const analysisKeywords = ['analyze', 'analyzing', 'digging', 'extraction', 'processing'];
const hasAnalysisKeyword = analysisKeywords.some(keyword => replyLower.includes(keyword));
// Only trigger if AI mentions BOTH readiness AND analysis action
if (hasAnalysisKeyword) {
const confirmPhrases = [
'let me analyze what you',
'i\'ll start digging into',
'i\'m starting the analysis',
'running the extraction',
'processing what you\'ve shared',
];
readyForExtraction = confirmPhrases.some(phrase => replyLower.includes(phrase));
if (readyForExtraction) {
console.log(`[AI Chat] Detected readiness from AI reply text: "${reply.reply.substring(0, 100)}"`);
}
}
}
const handoff: CollectorPhaseHandoff = {
phase: 'collector',
readyForNextPhase: readyForExtraction,
confidence: readyForExtraction ? 0.9 : 0.5,
confirmed: {
hasDocuments,
documentCount,
githubConnected,
githubRepo: context.project.githubRepo ?? undefined,
extensionLinked,
},
uncertain: {
extensionDeclined: reply.collectorHandoff?.extensionDeclined ?? false,
noGithubYet: reply.collectorHandoff?.noGithubYet ?? false,
},
missing: [],
questionsForUser: [],
sourceEvidence: [],
version: '1.0',
timestamp: new Date().toISOString(),
};
// Persist to project phaseData
await adminDb.collection('projects').doc(projectId).set(
{
'phaseData.phaseHandoffs.collector': handoff,
},
{ merge: true }
).catch((error) => {
console.error('[ai/chat] Failed to persist collector handoff', error);
});
console.log(`[AI Chat] Collector handoff persisted:`, {
hasDocuments: handoff.confirmed.hasDocuments,
githubConnected: handoff.confirmed.githubConnected,
extensionLinked: handoff.confirmed.extensionLinked,
readyForExtraction: handoff.readyForNextPhase,
});
// Auto-transition to extraction phase if ready
if (handoff.readyForNextPhase) {
console.log(`[AI Chat] Collector complete - triggering backend extraction`);
// Mark collector as complete
await adminDb.collection('projects').doc(projectId).update({
'phaseData.collectorCompletedAt': new Date().toISOString(),
}).catch((error) => {
console.error('[ai/chat] Failed to mark collector complete', error);
});
// Trigger backend extraction (async - don't await)
import('@/lib/server/backend-extractor').then(({ runBackendExtractionForProject }) => {
runBackendExtractionForProject(projectId).catch((error) => {
console.error(`[AI Chat] Backend extraction failed for project ${projectId}:`, error);
});
});
}
}
// Handle extraction review → vision phase transition
if (resolvedMode === 'extraction_review_mode') {
// Check if AI indicated extraction is approved and ready for vision
let readyForVision = reply.extractionReviewHandoff?.readyForVision ?? false;
// Fallback: Check reply text for approval phrases
if (!readyForVision && reply.reply) {
const replyLower = reply.reply.toLowerCase();
// Check for vision transition phrases
const visionKeywords = ['vision', 'mvp', 'roadmap', 'plan'];
const hasVisionKeyword = visionKeywords.some(keyword => replyLower.includes(keyword));
if (hasVisionKeyword) {
const confirmPhrases = [
'ready to move to',
'ready for vision',
'let\'s move to vision',
'moving to vision',
'great! let\'s define',
'perfect! now let\'s',
];
readyForVision = confirmPhrases.some(phrase => replyLower.includes(phrase));
if (readyForVision) {
console.log(`[AI Chat] Detected vision readiness from AI reply text: "${reply.reply.substring(0, 100)}"`);
}
}
}
if (readyForVision) {
console.log(`[AI Chat] Extraction review complete - transitioning to vision phase`);
// Mark extraction review as complete and transition to vision
await adminDb.collection('projects').doc(projectId).update({
currentPhase: 'vision',
phaseStatus: 'in_progress',
'phaseData.extractionReviewCompletedAt': new Date().toISOString(),
}).catch((error) => {
console.error('[ai/chat] Failed to transition to vision phase', error);
});
}
}
// Save conversation history
const newConversationHistory = [
...conversationHistory,
{
role: 'user' as const,
content: message,
createdAt: new Date().toISOString(),
},
{
role: 'assistant' as const,
content: reply.reply,
createdAt: new Date().toISOString(),
},
];
await adminDb.collection('chat_conversations').doc(projectId).set(
{
projectId,
userId: projectData.userId,
messages: newConversationHistory,
updatedAt: new Date().toISOString(),
},
{ merge: true }
).catch((error) => {
console.error('[ai/chat] Failed to save conversation history', error);
});
console.log(`[AI Chat] Conversation history saved (${newConversationHistory.length} total messages)`);
// Determine which artifacts were used
const artifactsUsed = determineArtifactsUsed(context);
// Log successful interaction
logProjectEvent({
projectId,
userId: projectData.userId ?? null,
eventType: 'chat_interaction',
mode: resolvedMode,
phase: projectData.currentPhase ?? null,
artifactsUsed,
usedVectorSearch: context.retrievedChunks.length > 0,
vectorChunkCount: context.retrievedChunks.length,
promptVersion: '2.0', // Updated with vector search
modelUsed: process.env.VERTEX_AI_MODEL || 'gemini-3-pro-preview',
success: true,
errorMessage: null,
metadata: {
knowledgeCount: context.knowledgeSummary.totalCount,
extractionCount: context.extractionSummary.totalCount,
hasGithubRepo: !!context.repositoryAnalysis,
},
}).catch((err) => console.error('[ai/chat] Failed to log event:', err));
return NextResponse.json({
reply: reply.reply,
mode: resolvedMode,
projectPhase: projectData.currentPhase ?? null,
artifactsUsed,
usedVectorSearch: context.retrievedChunks.length > 0,
});
} catch (error) {
console.error('[ai/chat] Error handling chat request', error);
// Log error (best-effort) - extract projectId from request body if available
const errorProjectId = typeof (error as { projectId?: string })?.projectId === 'string'
? (error as { projectId: string }).projectId
: null;
if (errorProjectId) {
logProjectEvent({
projectId: errorProjectId,
userId: null,
eventType: 'error',
mode: null,
phase: null,
artifactsUsed: [],
usedVectorSearch: false,
promptVersion: '2.0',
modelUsed: process.env.VERTEX_AI_MODEL || 'gemini-3-pro-preview',
success: false,
errorMessage: error instanceof Error ? error.message : String(error),
}).catch((err) => console.error('[ai/chat] Failed to log error:', err));
}
return NextResponse.json(
{
error: 'Failed to process chat message',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 },
);
}
}

View File

@@ -0,0 +1,37 @@
import { NextResponse } from 'next/server';
import { getAdminDb } from '@/lib/firebase/admin';
export async function POST(request: Request) {
try {
const url = new URL(request.url);
const body = await request
.json()
.catch(() => ({ projectId: url.searchParams.get('projectId') }));
const projectId = (body?.projectId ?? url.searchParams.get('projectId') ?? '').trim();
if (!projectId) {
return NextResponse.json(
{ error: 'projectId is required' },
{ status: 400 },
);
}
const adminDb = getAdminDb();
const docRef = adminDb.collection('chat_conversations').doc(projectId);
await docRef.delete();
return NextResponse.json({ success: true });
} catch (error) {
console.error('[ai/conversation/reset] Failed to reset conversation', error);
return NextResponse.json(
{
error: 'Failed to reset conversation',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 },
);
}
}

View File

@@ -0,0 +1,54 @@
import { NextResponse } from 'next/server';
import { getAdminDb } from '@/lib/firebase/admin';
type StoredMessageRole = 'user' | 'assistant';
type ConversationMessage = {
role: StoredMessageRole;
content: string;
createdAt?: { _seconds: number; _nanoseconds: number };
};
type ConversationResponse = {
messages: ConversationMessage[];
};
export async function GET(request: Request) {
try {
const url = new URL(request.url);
const projectId = (url.searchParams.get('projectId') ?? '').trim();
if (!projectId) {
return NextResponse.json(
{ error: 'projectId is required' },
{ status: 400 },
);
}
const adminDb = getAdminDb();
const docRef = adminDb.collection('chat_conversations').doc(projectId);
const snapshot = await docRef.get();
if (!snapshot.exists) {
const empty: ConversationResponse = { messages: [] };
return NextResponse.json(empty);
}
const data = snapshot.data() as { messages?: ConversationMessage[] };
const messages = Array.isArray(data.messages) ? data.messages : [];
const response: ConversationResponse = { messages };
return NextResponse.json(response);
} catch (error) {
console.error('[ai/conversation] Failed to load conversation', error);
return NextResponse.json(
{
error: 'Failed to load conversation',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 },
);
}
}

View File

@@ -0,0 +1,159 @@
/**
* Import ChatGPT conversations from exported conversations.json file
*/
import { NextResponse } from 'next/server';
import { getAdminAuth, getAdminDb } from '@/lib/firebase/admin';
interface ChatGPTMessage {
id: string;
author: {
role: string;
name?: string;
};
content: {
content_type: string;
parts: string[];
};
create_time?: number;
update_time?: number;
}
interface ChatGPTConversation {
id: string;
title: string;
create_time: number;
update_time?: number;
mapping: Record<string, {
id: string;
message?: ChatGPTMessage;
parent?: string;
children: string[];
}>;
}
export async function POST(request: Request) {
try {
// Authenticate user
const authHeader = request.headers.get('Authorization');
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const idToken = authHeader.split('Bearer ')[1];
const adminAuth = getAdminAuth();
const adminDb = getAdminDb();
let userId: string;
try {
const decodedToken = await adminAuth.verifyIdToken(idToken);
userId = decodedToken.uid;
} catch (error) {
return NextResponse.json({ error: 'Invalid token' }, { status: 401 });
}
const body = await request.json();
const { conversations, projectId } = body;
if (!conversations || !Array.isArray(conversations)) {
return NextResponse.json({ error: 'Invalid conversations data' }, { status: 400 });
}
console.log(`[ChatGPT Import] Processing ${conversations.length} conversations for user ${userId}`);
const importedConversations: Array<{ id: string; title: string; messageCount: number }> = [];
const batch = adminDb.batch();
let batchCount = 0;
for (const conv of conversations) {
try {
const conversation = conv as ChatGPTConversation;
// Extract messages from mapping
const messages: Array<{
role: string;
content: string;
timestamp?: number;
}> = [];
// Find the root and traverse the conversation tree
for (const [key, node] of Object.entries(conversation.mapping)) {
if (node.message && node.message.author.role !== 'system') {
const content = node.message.content.parts.join('\n');
if (content.trim()) {
messages.push({
role: node.message.author.role,
content: content,
timestamp: node.message.create_time,
});
}
}
}
// Sort messages by timestamp (if available) or keep order
messages.sort((a, b) => {
if (a.timestamp && b.timestamp) {
return a.timestamp - b.timestamp;
}
return 0;
});
// Store in Firestore
const importRef = adminDb.collection('chatgptImports').doc();
const importData = {
userId,
projectId: projectId || null,
conversationId: conversation.id,
title: conversation.title || 'Untitled Conversation',
messageCount: messages.length,
messages,
createdAt: conversation.create_time
? new Date(conversation.create_time * 1000).toISOString()
: new Date().toISOString(),
importedAt: new Date().toISOString(),
};
batch.set(importRef, importData);
batchCount++;
importedConversations.push({
id: conversation.id,
title: conversation.title,
messageCount: messages.length,
});
// Firestore batch limit is 500 operations
if (batchCount >= 500) {
await batch.commit();
batchCount = 0;
}
} catch (error) {
console.error(`[ChatGPT Import] Error processing conversation ${conv.id}:`, error);
// Continue with other conversations
}
}
// Commit remaining batch
if (batchCount > 0) {
await batch.commit();
}
console.log(`[ChatGPT Import] Successfully imported ${importedConversations.length} conversations`);
return NextResponse.json({
success: true,
imported: importedConversations.length,
conversations: importedConversations,
});
} catch (error) {
console.error('[ChatGPT Import] Error:', error);
return NextResponse.json(
{
error: 'Failed to import conversations',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,178 @@
/**
* Import ChatGPT conversations using OpenAI's Conversations API
*/
import { NextResponse } from 'next/server';
import { getAdminAuth, getAdminDb } from '@/lib/firebase/admin';
const OPENAI_API_URL = 'https://api.openai.com/v1/conversations';
export async function POST(request: Request) {
try {
// Authenticate user
const authHeader = request.headers.get('Authorization');
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const idToken = authHeader.split('Bearer ')[1];
const adminAuth = getAdminAuth();
const adminDb = getAdminDb();
let userId: string;
try {
const decodedToken = await adminAuth.verifyIdToken(idToken);
userId = decodedToken.uid;
} catch (error) {
return NextResponse.json({ error: 'Invalid token' }, { status: 401 });
}
const { conversationId, openaiApiKey, projectId } = await request.json();
if (!conversationId) {
return NextResponse.json({ error: 'Conversation ID is required' }, { status: 400 });
}
if (!openaiApiKey) {
return NextResponse.json({ error: 'OpenAI API key is required' }, { status: 400 });
}
// Fetch conversation from OpenAI
console.log(`[ChatGPT Import] Fetching conversation: ${conversationId}`);
const openaiResponse = await fetch(`${OPENAI_API_URL}/${conversationId}`, {
method: 'GET',
headers: {
'Authorization': `Bearer ${openaiApiKey}`,
'Content-Type': 'application/json',
},
});
if (!openaiResponse.ok) {
const errorText = await openaiResponse.text();
console.error('[ChatGPT Import] OpenAI API error:', openaiResponse.status, errorText);
return NextResponse.json(
{
error: 'Failed to fetch conversation from OpenAI',
details: errorText,
status: openaiResponse.status,
},
{ status: openaiResponse.status }
);
}
const conversationData = await openaiResponse.json();
console.log('[ChatGPT Import] Conversation fetched successfully');
// Extract relevant information
const messages = conversationData.messages || [];
const title = conversationData.title || 'Untitled Conversation';
const createdAt = conversationData.created_at || new Date().toISOString();
// Store in Firestore
const chatGPTImportRef = adminDb.collection('chatgptImports').doc();
await chatGPTImportRef.set({
userId,
projectId: projectId || null,
conversationId,
title,
createdAt,
importedAt: new Date().toISOString(),
messageCount: messages.length,
messages: messages.map((msg: any) => ({
role: msg.role || msg.author?.role || 'unknown',
content: msg.content?.parts?.join('\n') || msg.content || '',
timestamp: msg.create_time || msg.timestamp || null,
})),
rawData: conversationData, // Store full response for future reference
});
// If projectId provided, update project with ChatGPT reference
if (projectId) {
const projectRef = adminDb.collection('projects').doc(projectId);
await projectRef.update({
chatgptConversationId: conversationId,
chatgptTitle: title,
chatgptImportedAt: new Date().toISOString(),
});
console.log(`[ChatGPT Import] Updated project ${projectId} with conversation reference`);
}
return NextResponse.json({
success: true,
importId: chatGPTImportRef.id,
conversationId,
title,
messageCount: messages.length,
messages: messages.slice(0, 5).map((msg: any) => ({
role: msg.role || msg.author?.role || 'unknown',
preview: (msg.content?.parts?.join('\n') || msg.content || '').substring(0, 200) + '...',
})),
});
} catch (error) {
console.error('[ChatGPT Import] Error:', error);
return NextResponse.json(
{
error: 'Failed to import ChatGPT conversation',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 }
);
}
}
// GET endpoint to list imported conversations
export async function GET(request: Request) {
try {
const authHeader = request.headers.get('Authorization');
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const idToken = authHeader.split('Bearer ')[1];
const adminAuth = getAdminAuth();
const adminDb = getAdminDb();
let userId: string;
try {
const decodedToken = await adminAuth.verifyIdToken(idToken);
userId = decodedToken.uid;
} catch (error) {
return NextResponse.json({ error: 'Invalid token' }, { status: 401 });
}
// Get projectId from query params if provided
const url = new URL(request.url);
const projectId = url.searchParams.get('projectId');
let query = adminDb
.collection('chatgptImports')
.where('userId', '==', userId);
if (projectId) {
query = query.where('projectId', '==', projectId) as any;
}
const snapshot = await query.orderBy('importedAt', 'desc').limit(50).get();
const imports = snapshot.docs.map(doc => ({
id: doc.id,
...doc.data(),
// Don't send full rawData in list view
rawData: undefined,
}));
return NextResponse.json({ imports });
} catch (error) {
console.error('[ChatGPT Import] List error:', error);
return NextResponse.json(
{
error: 'Failed to list imports',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,60 @@
import { NextRequest, NextResponse } from "next/server";
import { GoogleGenAI } from "@google/genai";
const VERTEX_AI_MODEL = process.env.VERTEX_AI_MODEL || 'gemini-3-pro-preview';
const VERTEX_PROJECT_ID = process.env.VERTEX_AI_PROJECT_ID || 'gen-lang-client-0980079410';
const VERTEX_LOCATION = process.env.VERTEX_AI_LOCATION || 'global';
const genAI = new GoogleGenAI({
project: VERTEX_PROJECT_ID,
location: VERTEX_LOCATION,
vertexai: true,
});
export async function POST(request: NextRequest) {
try {
const { content, title } = await request.json();
if (!content) {
return NextResponse.json({ error: "Content is required" }, { status: 400 });
}
// Truncate content if it's too long (Gemini has token limits)
const maxContentLength = 30000; // ~30k characters
const truncatedContent = content.length > maxContentLength
? content.substring(0, maxContentLength) + "..."
: content;
const prompt = `Read this document titled "${title}" and provide a concise 1-2 sentence summary that captures the main topic and key points. Be specific and actionable.
Document content:
${truncatedContent}
Summary:`;
const result = await genAI.models.generateContent({
model: VERTEX_AI_MODEL,
contents: [{
role: 'user',
parts: [{ text: prompt }],
}],
config: {
temperature: 0.3, // Lower temperature for consistent summaries
},
});
const summary = result.candidates?.[0]?.content?.parts?.[0]?.text?.trim() || 'Summary unavailable';
return NextResponse.json({ summary });
} catch (error) {
console.error("Error generating summary:", error);
return NextResponse.json(
{
error: "Failed to generate summary",
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,229 @@
import { NextRequest, NextResponse } from 'next/server';
import { adminAuth, adminDb } from '@/lib/firebase/admin';
export async function POST(request: NextRequest) {
try {
// Verify authentication using API key
const authHeader = request.headers.get('Authorization');
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json(
{ error: 'Unauthorized - Missing API key' },
{ status: 401 }
);
}
const apiKey = authHeader.substring(7);
// Look up user by API key
const apiKeysSnapshot = await adminDb
.collection('apiKeys')
.where('key', '==', apiKey)
.where('isActive', '==', true)
.limit(1)
.get();
if (apiKeysSnapshot.empty) {
return NextResponse.json(
{ error: 'Invalid API key' },
{ status: 401 }
);
}
const apiKeyDoc = apiKeysSnapshot.docs[0];
const apiKeyData = apiKeyDoc.data();
const userId = apiKeyData.userId;
if (!userId) {
return NextResponse.json(
{ error: 'API key not associated with user' },
{ status: 401 }
);
}
// Parse request body
const body = await request.json();
const {
projectId,
workspacePath,
githubUrl,
conversations
} = body;
if (!projectId || !conversations) {
return NextResponse.json(
{ error: 'Missing required fields: projectId, conversations' },
{ status: 400 }
);
}
// Verify user has access to the project
const projectRef = adminDb.collection('projects').doc(projectId);
const projectDoc = await projectRef.get();
if (!projectDoc.exists) {
return NextResponse.json(
{ error: 'Project not found' },
{ status: 404 }
);
}
const projectData = projectDoc.data();
if (projectData?.userId !== userId) {
return NextResponse.json(
{ error: 'Access denied to this project' },
{ status: 403 }
);
}
// Process and store conversations
const { composers, workspaceFiles, totalGenerations } = conversations;
let conversationCount = 0;
let totalMessagesWritten = 0;
// Determine filtering keywords based on project context
// TODO: Make this configurable per project
const projectKeywords = ['vibn', 'project', 'extension', 'collector', 'cursor-monitor'];
const excludeKeywords = ['nhl', 'hockey', 'market', 'transaction'];
// Store each composer (chat session) as a separate document
for (const composer of composers || []) {
if (composer.type !== 'head') continue; // Only process head composers
const conversationId = `cursor-${composer.composerId}`;
const conversationRef = adminDb
.collection('projects')
.doc(projectId)
.collection('cursorConversations')
.doc(conversationId);
const name = composer.name || 'Untitled Conversation';
const nameLower = name.toLowerCase();
// Simple relevance scoring
let relevanceScore = 0;
// Check for project keywords in name
for (const keyword of projectKeywords) {
if (nameLower.includes(keyword)) {
relevanceScore += 2;
}
}
// Penalize for exclude keywords
for (const keyword of excludeKeywords) {
if (nameLower.includes(keyword)) {
relevanceScore -= 3;
}
}
// Check if name mentions files from this workspace
if (workspaceFiles && Array.isArray(workspaceFiles)) {
for (const file of workspaceFiles) {
if (nameLower.includes(file.toLowerCase())) {
relevanceScore += 1;
}
}
}
// Count messages
let messageCount = 0;
if (composer.bubbles && Array.isArray(composer.bubbles)) {
messageCount = composer.bubbles.length;
}
const conversationData = {
userId,
projectId,
conversationId,
composerId: composer.composerId,
name,
createdAt: new Date(composer.createdAt).toISOString(),
lastUpdatedAt: new Date(composer.lastUpdatedAt).toISOString(),
unifiedMode: composer.unifiedMode || false,
forceMode: composer.forceMode || false,
workspacePath,
githubUrl: githubUrl || null,
importedAt: new Date().toISOString(),
relevanceScore, // For filtering
messageCount,
metadata: {
source: 'cursor-monitor-extension',
composerType: composer.type,
}
};
// Write conversation document first
await conversationRef.set(conversationData);
// Store messages in chunks to avoid Firestore batch limit (500 operations)
if (composer.bubbles && Array.isArray(composer.bubbles)) {
const BATCH_SIZE = 400; // Leave room for overhead
for (let i = 0; i < composer.bubbles.length; i += BATCH_SIZE) {
const batch = adminDb.batch();
const chunk = composer.bubbles.slice(i, i + BATCH_SIZE);
for (const bubble of chunk) {
const messageRef = conversationRef
.collection('messages')
.doc(bubble.bubbleId);
batch.set(messageRef, {
bubbleId: bubble.bubbleId,
type: bubble.type, // 1 = user, 2 = AI
role: bubble.type === 1 ? 'user' : bubble.type === 2 ? 'assistant' : 'unknown',
text: bubble.text || '',
createdAt: bubble.createdAt,
requestId: bubble.requestId,
attachedFiles: bubble.attachedFiles || []
});
}
await batch.commit();
totalMessagesWritten += chunk.length;
console.log(`✅ Wrote ${chunk.length} messages (${i + chunk.length}/${composer.bubbles.length}) for ${name}`);
}
}
conversationCount++;
}
// Store workspace metadata for reference
const workspaceMetaRef = adminDb
.collection('projects')
.doc(projectId)
.collection('cursorData')
.doc('workspace-meta');
await workspaceMetaRef.set({
workspacePath,
githubUrl,
workspaceFiles: workspaceFiles || [],
totalGenerations: totalGenerations || 0,
importedAt: new Date().toISOString(),
lastBatchImportedAt: new Date().toISOString(),
}, { merge: true });
console.log(`✅ Imported ${conversationCount} conversations to project ${projectId}`);
const workspaceFilesCount = conversations.workspaceFiles?.length || workspaceFiles?.length || 0;
const generationsCount = conversations.totalGenerations || totalGenerations || 0;
return NextResponse.json({
success: true,
conversationCount,
totalMessages: totalMessagesWritten,
workspaceFilesCount,
totalGenerations: generationsCount,
message: `Successfully imported ${conversationCount} conversations with ${totalMessagesWritten} messages`
});
} catch (error) {
console.error('Error importing Cursor conversations:', error);
return NextResponse.json(
{ error: 'Failed to import conversations', details: error instanceof Error ? error.message : String(error) },
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,54 @@
import { NextRequest, NextResponse } from 'next/server';
import { adminDb } from '@/lib/firebase/admin';
// TEMPORARY: For debugging/testing only - no auth required
export async function GET(request: NextRequest) {
try {
const projectId = request.nextUrl.searchParams.get('projectId');
if (!projectId) {
return NextResponse.json(
{ error: 'Missing projectId' },
{ status: 400 }
);
}
// Delete all cursor conversations for this project
const conversationsSnapshot = await adminDb
.collection('projects')
.doc(projectId)
.collection('cursorConversations')
.get();
const batch = adminDb.batch();
conversationsSnapshot.docs.forEach((doc: FirebaseFirestore.QueryDocumentSnapshot) => {
batch.delete(doc.ref);
});
// Also delete the messages data document
const messagesRef = adminDb
.collection('projects')
.doc(projectId)
.collection('cursorData')
.doc('messages');
batch.delete(messagesRef);
await batch.commit();
return NextResponse.json({
success: true,
deletedCount: conversationsSnapshot.size,
message: 'All cursor conversations cleared'
});
} catch (error) {
console.error('Error clearing cursor conversations:', error);
return NextResponse.json(
{ error: 'Failed to clear conversations', details: error instanceof Error ? error.message : String(error) },
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,192 @@
import { NextRequest, NextResponse } from 'next/server';
import { adminDb } from '@/lib/firebase/admin';
export async function POST(request: NextRequest) {
try {
const projectId = request.nextUrl.searchParams.get('projectId');
const sessionGapMinutes = parseInt(request.nextUrl.searchParams.get('gap') || '30'); // 30 min default
if (!projectId) {
return NextResponse.json({ error: 'Missing projectId' }, { status: 400 });
}
// Get all conversations sorted by time
const conversationsSnapshot = await adminDb
.collection('projects')
.doc(projectId)
.collection('cursorConversations')
.orderBy('createdAt', 'asc')
.get();
const conversations = conversationsSnapshot.docs.map((doc: FirebaseFirestore.QueryDocumentSnapshot) => {
const data = doc.data();
return {
id: doc.id,
ref: doc.ref,
name: data.name,
createdAt: new Date(data.createdAt),
relevanceScore: data.relevanceScore || 0
};
});
// Step 1: Group by date
const conversationsByDate: Record<string, typeof conversations> = {};
for (const conv of conversations) {
const dateKey = conv.createdAt.toISOString().split('T')[0]; // YYYY-MM-DD
if (!conversationsByDate[dateKey]) {
conversationsByDate[dateKey] = [];
}
conversationsByDate[dateKey].push(conv);
}
// Step 2: Within each date, create sessions based on time gaps
const sessions: any[] = [];
let sessionId = 0;
for (const [date, dayConversations] of Object.entries(conversationsByDate)) {
let currentSession: any = null;
for (const conv of dayConversations) {
if (!currentSession) {
// Start first session of the day
sessionId++;
currentSession = {
sessionId,
date,
startTime: conv.createdAt,
endTime: conv.createdAt,
conversations: [conv]
};
} else {
// Check time gap from last conversation
const gapMs = conv.createdAt.getTime() - currentSession.endTime.getTime();
const gapMinutes = gapMs / (1000 * 60);
if (gapMinutes <= sessionGapMinutes) {
// Same session
currentSession.conversations.push(conv);
currentSession.endTime = conv.createdAt;
} else {
// New session - close current and start new
sessions.push(currentSession);
sessionId++;
currentSession = {
sessionId,
date,
startTime: conv.createdAt,
endTime: conv.createdAt,
conversations: [conv]
};
}
}
}
// Add last session of the day
if (currentSession) {
sessions.push(currentSession);
}
}
// Step 3: Analyze each session and determine project
const projectKeywords = ['vibn', 'extension', 'collector', 'cursor-monitor'];
const excludeKeywords = ['nhl', 'hockey', 'market', 'transaction'];
const analyzedSessions = sessions.map(session => {
const allNames = session.conversations.map((c: any) => c.name.toLowerCase()).join(' ');
let projectTag = 'unknown';
let confidence = 'low';
// Check for strong exclude signals
for (const keyword of excludeKeywords) {
if (allNames.includes(keyword)) {
projectTag = 'other';
confidence = 'high';
break;
}
}
// If not excluded, check for vibn signals
if (projectTag === 'unknown') {
for (const keyword of projectKeywords) {
if (allNames.includes(keyword)) {
projectTag = 'vibn';
confidence = 'high';
break;
}
}
}
// If still unknown, check for generic "project" keyword
if (projectTag === 'unknown' && allNames.includes('project')) {
projectTag = 'vibn';
confidence = 'medium';
}
return {
...session,
projectTag,
confidence,
conversationCount: session.conversations.length
};
});
// Step 4: Update Firestore with session tags
const batch = adminDb.batch();
let updateCount = 0;
for (const session of analyzedSessions) {
for (const conv of session.conversations) {
batch.update(conv.ref, {
sessionId: session.sessionId,
sessionDate: session.date,
sessionProject: session.projectTag,
sessionConfidence: session.confidence
});
updateCount++;
}
}
await batch.commit();
// Return summary
const summary = {
totalConversations: conversations.length,
totalSessions: sessions.length,
sessionGapMinutes,
projectBreakdown: {
vibn: analyzedSessions.filter(s => s.projectTag === 'vibn').length,
other: analyzedSessions.filter(s => s.projectTag === 'other').length,
unknown: analyzedSessions.filter(s => s.projectTag === 'unknown').length
},
conversationBreakdown: {
vibn: analyzedSessions.filter(s => s.projectTag === 'vibn').reduce((sum, s) => sum + s.conversationCount, 0),
other: analyzedSessions.filter(s => s.projectTag === 'other').reduce((sum, s) => sum + s.conversationCount, 0),
unknown: analyzedSessions.filter(s => s.projectTag === 'unknown').reduce((sum, s) => sum + s.conversationCount, 0)
},
sampleSessions: analyzedSessions.slice(0, 10).map(s => ({
sessionId: s.sessionId,
date: s.date,
conversationCount: s.conversationCount,
projectTag: s.projectTag,
confidence: s.confidence,
conversationNames: s.conversations.slice(0, 3).map((c: any) => c.name)
}))
};
return NextResponse.json({
success: true,
updatedConversations: updateCount,
...summary
});
} catch (error) {
console.error('Error tagging sessions:', error);
return NextResponse.json(
{ error: 'Failed to tag sessions', details: error instanceof Error ? error.message : String(error) },
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,63 @@
import { NextResponse } from 'next/server';
import { getAdminDb } from '@/lib/firebase/admin';
import { FieldValue } from 'firebase-admin/firestore';
export async function POST(request: Request) {
try {
const body = await request.json().catch(() => ({}));
const projectId = (body.projectId ?? '').trim();
if (!projectId) {
return NextResponse.json(
{ error: 'projectId is required' },
{ status: 400 },
);
}
const adminDb = getAdminDb();
const docRef = adminDb.collection('chat_conversations').doc(projectId);
await adminDb.runTransaction(async (tx) => {
const snapshot = await tx.get(docRef);
const existing = (snapshot.exists ? (snapshot.data()?.messages as unknown[]) : []) ?? [];
const now = new Date().toISOString();
const newMessages = [
{
role: 'user' as const,
content: '[debug] test user message',
createdAt: now,
},
{
role: 'assistant' as const,
content: '[debug] test assistant reply',
createdAt: now,
},
];
tx.set(
docRef,
{
projectId,
messages: [...existing, ...newMessages],
updatedAt: FieldValue.serverTimestamp(),
},
{ merge: true },
);
});
return NextResponse.json({ success: true });
} catch (error) {
console.error('[debug/append-conversation] Failed to append messages', error);
return NextResponse.json(
{
error: 'Failed to append debug conversation messages',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 },
);
}
}

View File

@@ -0,0 +1,88 @@
/**
* Debug API to check session links
*/
import { NextResponse } from 'next/server';
import { getAdminAuth, getAdminDb } from '@/lib/firebase/admin';
export async function GET(request: Request) {
try {
const authHeader = request.headers.get('Authorization');
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const idToken = authHeader.split('Bearer ')[1];
const adminAuth = getAdminAuth();
const adminDb = getAdminDb();
let userId: string;
try {
const decodedToken = await adminAuth.verifyIdToken(idToken);
userId = decodedToken.uid;
} catch (error) {
return NextResponse.json({ error: 'Invalid token' }, { status: 401 });
}
// Get all user's sessions
const sessionsSnapshot = await adminDb
.collection('sessions')
.where('userId', '==', userId)
.get();
const linked: any[] = [];
const unlinked: any[] = [];
sessionsSnapshot.docs.forEach(doc => {
const data = doc.data();
const sessionInfo = {
id: doc.id,
workspaceName: data.workspaceName || 'Unknown',
workspacePath: data.workspacePath,
projectId: data.projectId,
needsProjectAssociation: data.needsProjectAssociation,
createdAt: data.createdAt?.toDate?.() || data.createdAt,
};
if (data.projectId) {
linked.push(sessionInfo);
} else {
unlinked.push(sessionInfo);
}
});
// Get all user's projects
const projectsSnapshot = await adminDb
.collection('projects')
.where('userId', '==', userId)
.get();
const projects = projectsSnapshot.docs.map(doc => ({
id: doc.id,
name: doc.data().productName || doc.data().name,
workspacePath: doc.data().workspacePath,
}));
return NextResponse.json({
summary: {
totalSessions: sessionsSnapshot.size,
linkedSessions: linked.length,
unlinkedSessions: unlinked.length,
totalProjects: projects.length,
},
linked,
unlinked,
projects,
});
} catch (error) {
console.error('Debug check error:', error);
return NextResponse.json(
{
error: 'Failed to check links',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,62 @@
import { NextRequest, NextResponse } from 'next/server';
import { adminDb } from '@/lib/firebase/admin';
export async function GET(request: NextRequest) {
try {
const projectId = request.nextUrl.searchParams.get('projectId');
if (!projectId) {
return NextResponse.json(
{ error: 'Missing projectId parameter' },
{ status: 400 }
);
}
// Check if project exists
const projectDoc = await adminDb.collection('projects').doc(projectId).get();
if (!projectDoc.exists) {
// List all projects to help debug
const allProjectsSnapshot = await adminDb.collection('projects').limit(20).get();
const allProjects = allProjectsSnapshot.docs.map(doc => ({
id: doc.id,
name: doc.data().name,
userId: doc.data().userId,
createdAt: doc.data().createdAt
}));
return NextResponse.json({
exists: false,
projectId,
message: 'Project not found',
availableProjects: allProjects
});
}
const projectData = projectDoc.data();
return NextResponse.json({
exists: true,
projectId,
project: {
name: projectData?.name,
userId: projectData?.userId,
createdAt: projectData?.createdAt,
githubRepo: projectData?.githubRepo
}
});
} catch (error) {
console.error('Error checking project:', error);
return NextResponse.json(
{ error: 'Failed to check project', details: error instanceof Error ? error.message : String(error) },
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,44 @@
import { NextResponse } from 'next/server';
import { getAdminDb } from '@/lib/firebase/admin';
export async function GET(request: Request) {
try {
const url = new URL(request.url);
const projectId = url.searchParams.get('projectId');
if (!projectId) {
return NextResponse.json({ error: 'Missing projectId' }, { status: 400 });
}
const adminDb = getAdminDb();
// Get contextSources subcollection
const contextSourcesRef = adminDb
.collection('projects')
.doc(projectId)
.collection('contextSources');
const snapshot = await contextSourcesRef.get();
const sources = snapshot.docs.map(doc => ({
id: doc.id,
...doc.data(),
}));
return NextResponse.json({
projectId,
count: sources.length,
sources,
});
} catch (error) {
console.error('[debug/context-sources] Error:', error);
return NextResponse.json(
{
error: 'Failed to fetch context sources',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 },
);
}
}

View File

@@ -0,0 +1,72 @@
import { NextRequest, NextResponse } from 'next/server';
import { adminDb } from '@/lib/firebase/admin';
export async function GET(request: NextRequest) {
try {
const projectId = request.nextUrl.searchParams.get('projectId');
if (!projectId) {
return NextResponse.json({ error: 'Missing projectId' }, { status: 400 });
}
// Get workspace metadata
const workspaceMetaDoc = await adminDb
.collection('projects')
.doc(projectId)
.collection('cursorData')
.doc('workspace-meta')
.get();
const workspaceMeta = workspaceMetaDoc.exists ? workspaceMetaDoc.data() : null;
// Get all conversations
const conversationsSnapshot = await adminDb
.collection('projects')
.doc(projectId)
.collection('cursorConversations')
.get();
// Analyze by score
const negative = conversationsSnapshot.docs
.map(doc => ({ name: doc.data().name, score: doc.data().relevanceScore || 0 }))
.filter(c => c.score < 0)
.sort((a, b) => a.score - b.score);
const positive = conversationsSnapshot.docs
.map(doc => ({ name: doc.data().name, score: doc.data().relevanceScore || 0 }))
.filter(c => c.score > 0)
.sort((a, b) => b.score - a.score);
const neutral = conversationsSnapshot.docs
.filter(doc => (doc.data().relevanceScore || 0) === 0)
.length;
return NextResponse.json({
workspacePath: workspaceMeta?.workspacePath,
githubUrl: workspaceMeta?.githubUrl,
workspaceFilesCount: workspaceMeta?.workspaceFiles?.length || 0,
workspaceFilesSample: (workspaceMeta?.workspaceFiles || []).slice(0, 20),
totalGenerations: workspaceMeta?.totalGenerations || 0,
totalConversations: conversationsSnapshot.size,
scoreBreakdown: {
negative: negative.length,
neutral,
positive: positive.length
},
negativeScoreConversations: negative,
topPositiveConversations: positive.slice(0, 10),
sampleNeutralConversations: conversationsSnapshot.docs
.filter(doc => (doc.data().relevanceScore || 0) === 0)
.slice(0, 10)
.map(doc => doc.data().name)
});
} catch (error) {
console.error('Error analyzing conversations:', error);
return NextResponse.json(
{ error: 'Failed to analyze', details: error instanceof Error ? error.message : String(error) },
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,72 @@
import { NextRequest, NextResponse } from 'next/server';
import { adminDb } from '@/lib/firebase/admin';
export async function GET(request: NextRequest) {
try {
const projectId = request.nextUrl.searchParams.get('projectId');
if (!projectId) {
return NextResponse.json({ error: 'Missing projectId' }, { status: 400 });
}
// Get 5 random conversations with content
const conversationsSnapshot = await adminDb
.collection('projects')
.doc(projectId)
.collection('cursorConversations')
.limit(5)
.get();
const samples = conversationsSnapshot.docs.map(doc => {
const data = doc.data();
return {
name: data.name,
messageCount: data.messageCount || 0,
promptCount: data.prompts?.length || 0,
generationCount: data.generations?.length || 0,
filesCount: data.files?.length || 0,
sampleFiles: (data.files || []).slice(0, 3),
samplePrompt: data.prompts?.[0]?.text?.substring(0, 100) || 'none',
hasContent: !!(data.prompts?.length || data.generations?.length)
};
});
// Get overall stats
const allConversationsSnapshot = await adminDb
.collection('projects')
.doc(projectId)
.collection('cursorConversations')
.get();
let totalWithContent = 0;
let totalWithFiles = 0;
let totalMessages = 0;
allConversationsSnapshot.docs.forEach(doc => {
const data = doc.data();
if (data.prompts?.length || data.generations?.length) {
totalWithContent++;
}
if (data.files?.length) {
totalWithFiles++;
}
totalMessages += data.messageCount || 0;
});
return NextResponse.json({
totalConversations: allConversationsSnapshot.size,
totalWithContent,
totalWithFiles,
totalMessages,
samples
});
} catch (error) {
console.error('Error fetching content sample:', error);
return NextResponse.json(
{ error: 'Failed to fetch sample', details: error instanceof Error ? error.message : String(error) },
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,55 @@
import { NextRequest, NextResponse } from 'next/server';
import { adminDb } from '@/lib/firebase/admin';
export async function GET(request: NextRequest) {
try {
const projectId = request.nextUrl.searchParams.get('projectId');
if (!projectId) {
return NextResponse.json({ error: 'Missing projectId' }, { status: 400 });
}
// Get cursor conversations for this project
const conversationsSnapshot = await adminDb
.collection('projects')
.doc(projectId)
.collection('cursorConversations')
.orderBy('createdAt', 'desc')
.get();
const conversations = conversationsSnapshot.docs.map(doc => ({
id: doc.id,
...doc.data()
}));
// Get the messages data
const messagesDoc = await adminDb
.collection('projects')
.doc(projectId)
.collection('cursorData')
.doc('messages')
.get();
const messagesData = messagesDoc.exists ? messagesDoc.data() : null;
return NextResponse.json({
projectId,
conversationCount: conversations.length,
conversations,
messagesData: messagesData ? {
promptCount: messagesData.prompts?.length || 0,
generationCount: messagesData.generations?.length || 0,
importedAt: messagesData.importedAt
} : null
});
} catch (error) {
console.error('Error fetching cursor conversations:', error);
return NextResponse.json(
{ error: 'Failed to fetch conversations', details: error instanceof Error ? error.message : String(error) },
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,56 @@
import { NextRequest, NextResponse } from 'next/server';
import { adminDb } from '@/lib/firebase/admin';
export async function GET(request: NextRequest) {
try {
const projectId = request.nextUrl.searchParams.get('projectId');
const minScore = parseInt(request.nextUrl.searchParams.get('minScore') || '0');
if (!projectId) {
return NextResponse.json({ error: 'Missing projectId' }, { status: 400 });
}
// Get all conversations
const conversationsSnapshot = await adminDb
.collection('projects')
.doc(projectId)
.collection('cursorConversations')
.get();
const conversations = conversationsSnapshot.docs
.map(doc => {
const data = doc.data();
return {
name: data.name,
relevanceScore: data.relevanceScore || 0,
createdAt: data.createdAt,
workspacePath: data.workspacePath
};
})
.filter(c => c.relevanceScore >= minScore)
.sort((a, b) => b.relevanceScore - a.relevanceScore);
// Group by score
const scoreGroups: Record<number, number> = {};
conversationsSnapshot.docs.forEach(doc => {
const score = doc.data().relevanceScore || 0;
scoreGroups[score] = (scoreGroups[score] || 0) + 1;
});
return NextResponse.json({
totalConversations: conversationsSnapshot.size,
filteredConversations: conversations.length,
minScore,
scoreDistribution: scoreGroups,
conversations: conversations.slice(0, 50) // First 50
});
} catch (error) {
console.error('Error fetching relevant conversations:', error);
return NextResponse.json(
{ error: 'Failed to fetch conversations', details: error instanceof Error ? error.message : String(error) },
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,41 @@
import { NextRequest, NextResponse } from 'next/server';
import { adminDb } from '@/lib/firebase/admin';
export async function GET(request: NextRequest) {
try {
const projectId = request.nextUrl.searchParams.get('projectId');
if (!projectId) {
return NextResponse.json({ error: 'Missing projectId' }, { status: 400 });
}
// Get 10 conversations with their dates
const conversationsSnapshot = await adminDb
.collection('projects')
.doc(projectId)
.collection('cursorConversations')
.orderBy('createdAt', 'desc')
.limit(10)
.get();
const samples = conversationsSnapshot.docs.map(doc => {
const data = doc.data();
return {
name: data.name,
createdAt: data.createdAt,
workspacePath: data.workspacePath
};
});
return NextResponse.json({ samples });
} catch (error) {
console.error('Error fetching samples:', error);
return NextResponse.json(
{ error: 'Failed to fetch samples', details: error instanceof Error ? error.message : String(error) },
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,55 @@
import { NextRequest, NextResponse } from 'next/server';
import { adminDb } from '@/lib/firebase/admin';
export async function GET(request: NextRequest) {
try {
const projectId = request.nextUrl.searchParams.get('projectId');
if (!projectId) {
return NextResponse.json({ error: 'Missing projectId' }, { status: 400 });
}
// Get all conversations sorted by time
const conversationsSnapshot = await adminDb
.collection('projects')
.doc(projectId)
.collection('cursorConversations')
.orderBy('createdAt', 'asc')
.get();
const conversations = conversationsSnapshot.docs.map(doc => {
const data = doc.data();
return {
name: data.name,
createdAt: new Date(data.createdAt),
score: data.relevanceScore || 0
};
});
// Find NHL work sessions (negative scores clustered in time)
const nhlConversations = conversations.filter(c => c.score < 0);
return NextResponse.json({
totalConversations: conversations.length,
nhlConversations: nhlConversations.length,
nhlDates: nhlConversations.map(c => ({
date: c.createdAt.toISOString().split('T')[0],
name: c.name
})),
// Find if NHL conversations cluster
nhlDateCounts: nhlConversations.reduce((acc: any, c) => {
const date = c.createdAt.toISOString().split('T')[0];
acc[date] = (acc[date] || 0) + 1;
return acc;
}, {})
});
} catch (error) {
console.error('Error analyzing session summary:', error);
return NextResponse.json(
{ error: 'Failed to analyze', details: error instanceof Error ? error.message : String(error) },
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,124 @@
import { NextRequest, NextResponse } from 'next/server';
import { adminDb } from '@/lib/firebase/admin';
export async function GET(request: NextRequest) {
try {
const projectId = request.nextUrl.searchParams.get('projectId');
const sessionGapMinutes = parseInt(request.nextUrl.searchParams.get('gap') || '120'); // 2 hours default
if (!projectId) {
return NextResponse.json({ error: 'Missing projectId' }, { status: 400 });
}
// Get all conversations sorted by time
const conversationsSnapshot = await adminDb
.collection('projects')
.doc(projectId)
.collection('cursorConversations')
.orderBy('createdAt', 'asc')
.get();
const conversations = conversationsSnapshot.docs.map(doc => {
const data = doc.data();
return {
id: doc.id,
name: data.name,
createdAt: new Date(data.createdAt),
relevanceScore: data.relevanceScore || 0
};
});
// Group into sessions based on time gaps
const sessions: any[] = [];
let currentSession: any = null;
for (const conv of conversations) {
if (!currentSession) {
// Start first session
currentSession = {
startTime: conv.createdAt,
endTime: conv.createdAt,
conversations: [conv],
relevanceScores: [conv.relevanceScore]
};
} else {
// Check time gap from last conversation
const gapMs = conv.createdAt.getTime() - currentSession.endTime.getTime();
const gapMinutes = gapMs / (1000 * 60);
if (gapMinutes <= sessionGapMinutes) {
// Same session
currentSession.conversations.push(conv);
currentSession.relevanceScores.push(conv.relevanceScore);
currentSession.endTime = conv.createdAt;
} else {
// New session - close current and start new
sessions.push(currentSession);
currentSession = {
startTime: conv.createdAt,
endTime: conv.createdAt,
conversations: [conv],
relevanceScores: [conv.relevanceScore]
};
}
}
}
// Add last session
if (currentSession) {
sessions.push(currentSession);
}
// Analyze each session
const analyzedSessions = sessions.map((session, idx) => {
const durationMinutes = Math.round((session.endTime.getTime() - session.startTime.getTime()) / (1000 * 60));
// Calculate session relevance score (average of all conversations)
const avgScore = session.relevanceScores.reduce((a: number, b: number) => a + b, 0) / session.relevanceScores.length;
// Count negative/positive conversations
const negative = session.relevanceScores.filter((s: number) => s < 0).length;
const positive = session.relevanceScores.filter((s: number) => s > 0).length;
const neutral = session.relevanceScores.filter((s: number) => s === 0).length;
// Determine likely project based on majority
let likelyProject = 'unknown';
if (negative > positive && negative > neutral) {
likelyProject = 'other (NHL/market)';
} else if (positive > negative && positive > neutral) {
likelyProject = 'vibn (likely)';
} else if (positive > 0 || avgScore > 0) {
likelyProject = 'vibn (mixed)';
} else {
likelyProject = 'unclear';
}
return {
sessionNumber: idx + 1,
startTime: session.startTime.toISOString(),
endTime: session.endTime.toISOString(),
durationMinutes,
conversationCount: session.conversations.length,
avgRelevanceScore: Math.round(avgScore * 100) / 100,
scoreBreakdown: { negative, neutral, positive },
likelyProject,
conversationNames: session.conversations.slice(0, 5).map((c: any) => c.name)
};
});
return NextResponse.json({
totalConversations: conversations.length,
totalSessions: sessions.length,
sessionGapMinutes,
sessions: analyzedSessions
});
} catch (error) {
console.error('Error analyzing sessions:', error);
return NextResponse.json(
{ error: 'Failed to analyze sessions', details: error instanceof Error ? error.message : String(error) },
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,69 @@
import { NextRequest, NextResponse } from 'next/server';
import { adminDb } from '@/lib/firebase/admin';
export async function GET(request: NextRequest) {
try {
const projectId = request.nextUrl.searchParams.get('projectId');
if (!projectId) {
return NextResponse.json({ error: 'Missing projectId' }, { status: 400 });
}
// Get all conversations
const conversationsSnapshot = await adminDb
.collection('projects')
.doc(projectId)
.collection('cursorConversations')
.get();
const conversations = conversationsSnapshot.docs.map(doc => doc.data());
// Find date range
const dates = conversations
.filter(c => c.createdAt)
.map(c => new Date(c.createdAt))
.sort((a, b) => a.getTime() - b.getTime());
if (dates.length === 0) {
return NextResponse.json({ error: 'No conversations with dates found' });
}
const earliest = dates[0];
const latest = dates[dates.length - 1];
const span = Math.floor((latest.getTime() - earliest.getTime()) / (1000 * 60 * 60 * 24));
// Find the actual conversation names for earliest and latest
const earliestConv = conversations.find(c =>
new Date(c.createdAt).getTime() === earliest.getTime()
);
const latestConv = conversations.find(c =>
new Date(c.createdAt).getTime() === latest.getTime()
);
return NextResponse.json({
totalConversations: conversations.length,
dateRange: {
earliest: earliest.toISOString(),
latest: latest.toISOString(),
spanDays: span
},
oldestConversation: {
name: earliestConv?.name || 'Unknown',
date: earliest.toISOString()
},
newestConversation: {
name: latestConv?.name || 'Unknown',
date: latest.toISOString()
}
});
} catch (error) {
console.error('Error fetching cursor stats:', error);
return NextResponse.json(
{ error: 'Failed to fetch stats', details: error instanceof Error ? error.message : String(error) },
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,59 @@
import { NextRequest, NextResponse } from 'next/server';
import { adminDb } from '@/lib/firebase/admin';
export async function GET(request: NextRequest) {
try {
const projectId = request.nextUrl.searchParams.get('projectId');
if (!projectId) {
return NextResponse.json({ error: 'Missing projectId' }, { status: 400 });
}
// Get all unknown conversations grouped by session
const conversationsSnapshot = await adminDb
.collection('projects')
.doc(projectId)
.collection('cursorConversations')
.where('sessionProject', '==', 'unknown')
.get();
const sessionMap: Record<number, any[]> = {};
conversationsSnapshot.docs.forEach(doc => {
const data = doc.data();
const sessionId = data.sessionId;
if (!sessionMap[sessionId]) {
sessionMap[sessionId] = [];
}
sessionMap[sessionId].push({
name: data.name,
date: data.sessionDate,
createdAt: data.createdAt
});
});
// Convert to array and take sample
const sessions = Object.entries(sessionMap).map(([sessionId, conversations]) => ({
sessionId: parseInt(sessionId),
date: conversations[0].date,
conversationCount: conversations.length,
conversationNames: conversations.map(c => c.name)
}));
return NextResponse.json({
totalUnknownSessions: sessions.length,
totalUnknownConversations: conversationsSnapshot.size,
sample: sessions.slice(0, 30)
});
} catch (error) {
console.error('Error fetching unknown sessions:', error);
return NextResponse.json(
{ error: 'Failed to fetch', details: error instanceof Error ? error.message : String(error) },
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,59 @@
import { NextRequest, NextResponse } from 'next/server';
import { adminDb } from '@/lib/firebase/admin';
export async function GET(request: NextRequest) {
try {
const projectId = request.nextUrl.searchParams.get('projectId');
if (!projectId) {
return NextResponse.json({ error: 'Missing projectId' }, { status: 400 });
}
// Get all conversations
const conversationsSnapshot = await adminDb
.collection('projects')
.doc(projectId)
.collection('cursorConversations')
.get();
const conversations = conversationsSnapshot.docs.map(doc => doc.data());
// Group by workspace path
const workspaceGroups: Record<string, number> = {};
const githubGroups: Record<string, number> = {};
conversations.forEach(conv => {
const workspace = conv.workspacePath || 'unknown';
const github = conv.githubUrl || 'none';
workspaceGroups[workspace] = (workspaceGroups[workspace] || 0) + 1;
githubGroups[github] = (githubGroups[github] || 0) + 1;
});
// Sort by count
const workspaceList = Object.entries(workspaceGroups)
.map(([path, count]) => ({ path, count }))
.sort((a, b) => b.count - a.count);
const githubList = Object.entries(githubGroups)
.map(([url, count]) => ({ url, count }))
.sort((a, b) => b.count - a.count);
return NextResponse.json({
totalConversations: conversations.length,
uniqueWorkspaces: workspaceList.length,
uniqueRepos: githubList.length,
workspaces: workspaceList,
repos: githubList
});
} catch (error) {
console.error('Error fetching workspace breakdown:', error);
return NextResponse.json(
{ error: 'Failed to fetch breakdown', details: error instanceof Error ? error.message : String(error) },
{ status: 500 }
);
}
}

18
app/api/debug/env/route.ts vendored Normal file
View File

@@ -0,0 +1,18 @@
import { NextResponse } from 'next/server';
export async function GET() {
return NextResponse.json({
firebaseProjectId: process.env.FIREBASE_PROJECT_ID ? 'SET' : 'NOT SET',
firebaseClientEmail: process.env.FIREBASE_CLIENT_EMAIL ? 'SET' : 'NOT SET',
firebasePrivateKey: process.env.FIREBASE_PRIVATE_KEY ? 'SET (length: ' + process.env.FIREBASE_PRIVATE_KEY.length + ')' : 'NOT SET',
publicApiKey: process.env.NEXT_PUBLIC_FIREBASE_API_KEY ? 'SET' : 'NOT SET',
publicAuthDomain: process.env.NEXT_PUBLIC_FIREBASE_AUTH_DOMAIN ? 'SET' : 'NOT SET',
publicProjectId: process.env.NEXT_PUBLIC_FIREBASE_PROJECT_ID ? 'SET' : 'NOT SET',
nodeEnv: process.env.NODE_ENV,
tip: 'If any Firebase vars show NOT SET, restart your dev server after updating .env.local'
});
}

View File

@@ -0,0 +1,33 @@
import { NextResponse } from 'next/server';
import { getAdminDb } from '@/lib/firebase/admin';
export async function GET() {
try {
const adminDb = getAdminDb();
const snapshot = await adminDb.collection('projects').limit(1).get();
if (snapshot.empty) {
return NextResponse.json(
{ error: 'No projects found' },
{ status: 404 },
);
}
const doc = snapshot.docs[0];
return NextResponse.json({
id: doc.id,
data: doc.data(),
});
} catch (error) {
console.error('[debug/first-project] Failed to load project', error);
return NextResponse.json(
{
error: 'Failed to load project',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 },
);
}
}

View File

@@ -0,0 +1,43 @@
import { NextResponse } from 'next/server';
import { getAdminDb } from '@/lib/firebase/admin';
export async function GET(request: Request) {
try {
const url = new URL(request.url);
const projectId = url.searchParams.get('projectId');
if (!projectId) {
return NextResponse.json({ error: 'Missing projectId' }, { status: 400 });
}
const adminDb = getAdminDb();
// Get knowledge_items collection
const knowledgeItemsRef = adminDb.collection('knowledge_items');
const snapshot = await knowledgeItemsRef
.where('projectId', '==', projectId)
.limit(20)
.get();
const items = snapshot.docs.map(doc => ({
id: doc.id,
...doc.data(),
}));
return NextResponse.json({
projectId,
count: items.length,
items,
});
} catch (error) {
console.error('[debug/knowledge-items] Error:', error);
return NextResponse.json(
{
error: 'Failed to fetch knowledge items',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 },
);
}
}

View File

@@ -0,0 +1,36 @@
import { NextResponse } from 'next/server';
import { getAdminDb } from '@/lib/firebase/admin';
export async function GET(request: Request) {
try {
const url = new URL(request.url);
const projectId = (url.searchParams.get('projectId') ?? '').trim();
if (!projectId) {
return NextResponse.json({ error: 'Missing projectId' }, { status: 400 });
}
const adminDb = getAdminDb();
// Use a simple filter query without orderBy to avoid requiring a composite index.
const snapshot = await adminDb
.collection('knowledge_items')
.where('projectId', '==', projectId)
.get();
const items = snapshot.docs.map((doc) => doc.data());
return NextResponse.json({ count: items.length, items });
} catch (error) {
console.error('[debug/knowledge] Failed to list knowledge items', error);
return NextResponse.json(
{
error: 'Failed to list knowledge items',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 },
);
}
}

61
app/api/diagnose/route.ts Normal file
View File

@@ -0,0 +1,61 @@
import { NextResponse } from 'next/server';
import { adminAuth, adminDb } from '@/lib/firebase/admin';
export async function GET(request: Request) {
const diagnostics: any = {
timestamp: new Date().toISOString(),
environment: {},
firebase: {},
token: {},
};
try {
// Check environment variables
diagnostics.environment = {
FIREBASE_PROJECT_ID: process.env.FIREBASE_PROJECT_ID ? 'SET' : 'NOT SET',
FIREBASE_CLIENT_EMAIL: process.env.FIREBASE_CLIENT_EMAIL ? 'SET' : 'NOT SET',
FIREBASE_PRIVATE_KEY: process.env.FIREBASE_PRIVATE_KEY ? `SET (${process.env.FIREBASE_PRIVATE_KEY.length} chars)` : 'NOT SET',
NEXT_PUBLIC_FIREBASE_PROJECT_ID: process.env.NEXT_PUBLIC_FIREBASE_PROJECT_ID || 'NOT SET',
};
// Test Firebase Admin
try {
const testDoc = await adminDb.collection('test').doc('diagnostic').get();
diagnostics.firebase.adminDb = 'OK - Can access Firestore';
diagnostics.firebase.adminAuth = 'OK - Auth service initialized';
} catch (error: any) {
diagnostics.firebase.error = error.message;
}
// Try to verify a token if provided
const authHeader = request.headers.get('authorization');
if (authHeader?.startsWith('Bearer ')) {
const token = authHeader.substring(7);
diagnostics.token.received = true;
diagnostics.token.length = token.length;
try {
const decodedToken = await adminAuth.verifyIdToken(token);
diagnostics.token.verification = 'SUCCESS';
diagnostics.token.uid = decodedToken.uid;
diagnostics.token.email = decodedToken.email;
} catch (error: any) {
diagnostics.token.verification = 'FAILED';
diagnostics.token.error = error.message;
diagnostics.token.errorCode = error.code;
}
} else {
diagnostics.token.received = false;
diagnostics.token.note = 'No token provided - add Authorization: Bearer <token> header to test';
}
return NextResponse.json(diagnostics, { status: 200 });
} catch (error: any) {
diagnostics.criticalError = {
message: error.message,
stack: error.stack,
};
return NextResponse.json(diagnostics, { status: 500 });
}
}

View File

@@ -0,0 +1,137 @@
import { NextResponse } from 'next/server';
import { getAdminAuth, getAdminDb } from '@/lib/firebase/admin';
/**
* Endpoint for the browser extension to link itself to a Vibn project.
* Extension sends: workspace path + Vibn project ID
* Backend stores: mapping for future requests
*/
export async function POST(request: Request) {
try {
// Verify auth
const authHeader = request.headers.get('Authorization');
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const idToken = authHeader.split('Bearer ')[1];
const adminAuth = getAdminAuth();
let userId: string;
try {
const decodedToken = await adminAuth.verifyIdToken(idToken);
userId = decodedToken.uid;
} catch (error) {
return NextResponse.json({ error: 'Invalid token' }, { status: 401 });
}
const body = await request.json();
const { projectId, workspacePath } = body;
if (!projectId || !workspacePath) {
return NextResponse.json(
{ error: 'Missing projectId or workspacePath' },
{ status: 400 }
);
}
const adminDb = getAdminDb();
// Verify project exists and user has access
const projectSnap = await adminDb.collection('projects').doc(projectId).get();
if (!projectSnap.exists) {
return NextResponse.json({ error: 'Project not found' }, { status: 404 });
}
const projectData = projectSnap.data();
if (projectData?.userId !== userId) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 403 });
}
// Store the workspace → project mapping
await adminDb
.collection('extensionWorkspaceLinks')
.doc(workspacePath)
.set({
projectId,
userId,
workspacePath,
linkedAt: new Date().toISOString(),
updatedAt: new Date().toISOString(),
}, { merge: true });
// Also update project metadata to indicate extension is linked
await adminDb.collection('projects').doc(projectId).set(
{
extensionLinked: true,
extensionLinkedAt: new Date().toISOString(),
},
{ merge: true }
);
console.log(`[Extension] Linked workspace "${workspacePath}" to project ${projectId}`);
return NextResponse.json({
success: true,
message: 'Extension linked to project',
projectId,
workspacePath,
});
} catch (error) {
console.error('[Extension] Link error:', error);
return NextResponse.json(
{
error: 'Failed to link extension',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 }
);
}
}
/**
* Get the Vibn project ID for a given workspace path
*/
export async function GET(request: Request) {
try {
const { searchParams } = new URL(request.url);
const workspacePath = searchParams.get('workspacePath');
if (!workspacePath) {
return NextResponse.json(
{ error: 'Missing workspacePath query param' },
{ status: 400 }
);
}
const adminDb = getAdminDb();
const linkSnap = await adminDb
.collection('extensionWorkspaceLinks')
.doc(workspacePath)
.get();
if (!linkSnap.exists) {
return NextResponse.json(
{ error: 'No project linked for this workspace' },
{ status: 404 }
);
}
const linkData = linkSnap.data();
return NextResponse.json({
projectId: linkData?.projectId,
linkedAt: linkData?.linkedAt,
});
} catch (error) {
console.error('[Extension] Get link error:', error);
return NextResponse.json(
{
error: 'Failed to get link',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,58 @@
import { NextResponse } from 'next/server';
import { adminDb, adminAuth } from '@/lib/firebase/admin';
export async function GET() {
try {
// Test 1: Check if Firebase Admin is initialized
if (!adminDb) {
return NextResponse.json(
{ error: 'Firebase Admin not initialized' },
{ status: 500 }
);
}
// Test 2: Try to access Firestore (this will verify credentials)
const testCollection = adminDb.collection('_healthcheck');
const timestamp = new Date().toISOString();
// Write a test document
const docRef = await testCollection.add({
message: 'Firebase connection test',
timestamp: timestamp,
});
// Read it back
const doc = await docRef.get();
const data = doc.data();
// Clean up
await docRef.delete();
// Test 3: Check Auth is working
const authCheck = adminAuth ? 'OK' : 'Failed';
return NextResponse.json({
success: true,
message: 'Firebase is connected successfully! 🎉',
tests: {
adminInit: 'OK',
firestoreWrite: 'OK',
firestoreRead: 'OK',
authInit: authCheck,
},
projectId: process.env.FIREBASE_PROJECT_ID,
testData: data,
});
} catch (error) {
console.error('Firebase test error:', error);
return NextResponse.json(
{
error: 'Firebase connection failed',
details: error instanceof Error ? error.message : String(error),
tip: 'Check your .env.local file for correct FIREBASE_PROJECT_ID, FIREBASE_CLIENT_EMAIL, and FIREBASE_PRIVATE_KEY',
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,151 @@
import { NextResponse } from 'next/server';
import { getAdminAuth, getAdminDb } from '@/lib/firebase/admin';
import { FieldValue } from 'firebase-admin/firestore';
/**
* Store GitHub connection for authenticated user
* Encrypts and stores the access token securely
*/
export async function POST(request: Request) {
try {
const authHeader = request.headers.get('Authorization');
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const idToken = authHeader.split('Bearer ')[1];
const adminAuth = getAdminAuth();
const adminDb = getAdminDb();
let userId: string;
try {
const decodedToken = await adminAuth.verifyIdToken(idToken);
userId = decodedToken.uid;
} catch (error) {
return NextResponse.json({ error: 'Invalid token' }, { status: 401 });
}
const { accessToken, githubUser } = await request.json();
if (!accessToken || !githubUser) {
return NextResponse.json(
{ error: 'Missing required fields' },
{ status: 400 }
);
}
// TODO: Encrypt the access token before storing
// For now, we'll store it directly (should use crypto.subtle or a library)
const encryptedToken = accessToken; // PLACEHOLDER
// Store GitHub connection
const connectionRef = adminDb.collection('githubConnections').doc(userId);
await connectionRef.set({
userId,
githubUserId: githubUser.id,
githubUsername: githubUser.login,
githubName: githubUser.name,
githubEmail: githubUser.email,
githubAvatarUrl: githubUser.avatar_url,
accessToken: encryptedToken,
connectedAt: FieldValue.serverTimestamp(),
lastSyncedAt: null,
});
return NextResponse.json({
success: true,
githubUsername: githubUser.login,
});
} catch (error) {
console.error('[GitHub Connect] Error:', error);
return NextResponse.json(
{ error: 'Failed to store GitHub connection' },
{ status: 500 }
);
}
}
/**
* Get GitHub connection status for authenticated user
*/
export async function GET(request: Request) {
try {
const authHeader = request.headers.get('Authorization');
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const idToken = authHeader.split('Bearer ')[1];
const adminAuth = getAdminAuth();
const adminDb = getAdminDb();
let userId: string;
try {
const decodedToken = await adminAuth.verifyIdToken(idToken);
userId = decodedToken.uid;
} catch (error) {
return NextResponse.json({ error: 'Invalid token' }, { status: 401 });
}
const connectionDoc = await adminDb
.collection('githubConnections')
.doc(userId)
.get();
if (!connectionDoc.exists) {
return NextResponse.json({ connected: false });
}
const data = connectionDoc.data()!;
return NextResponse.json({
connected: true,
githubUsername: data.githubUsername,
githubName: data.githubName,
githubAvatarUrl: data.githubAvatarUrl,
connectedAt: data.connectedAt,
lastSyncedAt: data.lastSyncedAt,
});
} catch (error) {
console.error('[GitHub Connect] Error:', error);
return NextResponse.json(
{ error: 'Failed to fetch GitHub connection' },
{ status: 500 }
);
}
}
/**
* Disconnect GitHub account
*/
export async function DELETE(request: Request) {
try {
const authHeader = request.headers.get('Authorization');
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const idToken = authHeader.split('Bearer ')[1];
const adminAuth = getAdminAuth();
const adminDb = getAdminDb();
let userId: string;
try {
const decodedToken = await adminAuth.verifyIdToken(idToken);
userId = decodedToken.uid;
} catch (error) {
return NextResponse.json({ error: 'Invalid token' }, { status: 401 });
}
await adminDb.collection('githubConnections').doc(userId).delete();
return NextResponse.json({ success: true });
} catch (error) {
console.error('[GitHub Disconnect] Error:', error);
return NextResponse.json(
{ error: 'Failed to disconnect GitHub' },
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,99 @@
import { NextResponse } from 'next/server';
import { getAdminAuth, getAdminDb } from '@/lib/firebase/admin';
/**
* Fetch file content from GitHub
* GET /api/github/file-content?owner=X&repo=Y&path=Z
*/
export async function GET(request: Request) {
try {
const authHeader = request.headers.get('Authorization');
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const idToken = authHeader.split('Bearer ')[1];
const adminAuth = getAdminAuth();
const adminDb = getAdminDb();
let userId: string;
try {
const decodedToken = await adminAuth.verifyIdToken(idToken);
userId = decodedToken.uid;
} catch (error) {
return NextResponse.json({ error: 'Invalid token' }, { status: 401 });
}
const url = new URL(request.url);
const owner = url.searchParams.get('owner');
const repo = url.searchParams.get('repo');
const path = url.searchParams.get('path');
const branch = url.searchParams.get('branch') || 'main';
if (!owner || !repo || !path) {
return NextResponse.json(
{ error: 'Missing owner, repo, or path' },
{ status: 400 }
);
}
// Get GitHub connection
const connectionDoc = await adminDb
.collection('githubConnections')
.doc(userId)
.get();
if (!connectionDoc.exists) {
return NextResponse.json(
{ error: 'GitHub not connected' },
{ status: 404 }
);
}
const connection = connectionDoc.data()!;
const accessToken = connection.accessToken; // TODO: Decrypt
// Fetch file content from GitHub API
const response = await fetch(
`https://api.github.com/repos/${owner}/${repo}/contents/${encodeURIComponent(path)}?ref=${branch}`,
{
headers: {
Authorization: `Bearer ${accessToken}`,
Accept: 'application/vnd.github.v3+json',
},
}
);
if (!response.ok) {
if (response.status === 404) {
return NextResponse.json({ error: 'File not found' }, { status: 404 });
}
const error = await response.json();
throw new Error(`GitHub API error: ${error.message || response.statusText}`);
}
const data = await response.json();
// GitHub returns base64-encoded content
const content = Buffer.from(data.content, 'base64').toString('utf-8');
return NextResponse.json({
path: data.path,
name: data.name,
size: data.size,
sha: data.sha,
content,
encoding: 'utf-8',
});
} catch (error) {
console.error('[GitHub File Content] Error:', error);
return NextResponse.json(
{
error: 'Failed to fetch file content',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,140 @@
'use client';
import { useEffect, useState, Suspense } from 'react';
import { useSearchParams, useRouter } from 'next/navigation';
import { auth } from '@/lib/firebase/config';
import { exchangeCodeForToken, getGitHubUser } from '@/lib/github/oauth';
import { Loader2, CheckCircle2, XCircle } from 'lucide-react';
function GitHubCallbackContent() {
const searchParams = useSearchParams();
const router = useRouter();
const [status, setStatus] = useState<'loading' | 'success' | 'error'>('loading');
const [error, setError] = useState<string | null>(null);
useEffect(() => {
async function handleCallback() {
try {
const code = searchParams.get('code');
const state = searchParams.get('state');
const error = searchParams.get('error');
if (error) {
throw new Error(`GitHub OAuth error: ${error}`);
}
if (!code) {
throw new Error('No authorization code received');
}
// Verify state (CSRF protection)
const storedState = sessionStorage.getItem('github_oauth_state');
if (state !== storedState) {
throw new Error('Invalid state parameter');
}
sessionStorage.removeItem('github_oauth_state');
// Exchange code for token
const tokenData = await exchangeCodeForToken(code);
// Get GitHub user info
const githubUser = await getGitHubUser(tokenData.access_token);
// Store connection in Firebase
const user = auth.currentUser;
if (!user) {
throw new Error('User not authenticated');
}
const idToken = await user.getIdToken();
const response = await fetch('/api/github/connect', {
method: 'POST',
headers: {
'Authorization': `Bearer ${idToken}`,
'Content-Type': 'application/json',
},
body: JSON.stringify({
accessToken: tokenData.access_token,
githubUser,
}),
});
if (!response.ok) {
throw new Error('Failed to store GitHub connection');
}
setStatus('success');
// Redirect back to connections page after 2 seconds
setTimeout(() => {
const workspace = user.displayName || 'workspace';
router.push(`/${workspace}/connections`);
}, 2000);
} catch (err: any) {
console.error('[GitHub Callback] Error:', err);
setError(err.message);
setStatus('error');
}
}
handleCallback();
}, [searchParams, router]);
return (
<div className="flex min-h-screen items-center justify-center bg-background p-6">
<div className="w-full max-w-md space-y-6 text-center">
{status === 'loading' && (
<>
<Loader2 className="mx-auto h-12 w-12 animate-spin text-primary" />
<h1 className="text-2xl font-bold">Connecting to GitHub...</h1>
<p className="text-muted-foreground">
Please wait while we complete the connection.
</p>
</>
)}
{status === 'success' && (
<>
<CheckCircle2 className="mx-auto h-12 w-12 text-green-500" />
<h1 className="text-2xl font-bold">Successfully Connected!</h1>
<p className="text-muted-foreground">
Your GitHub account has been connected. Redirecting...
</p>
</>
)}
{status === 'error' && (
<>
<XCircle className="mx-auto h-12 w-12 text-red-500" />
<h1 className="text-2xl font-bold">Connection Failed</h1>
<p className="text-muted-foreground">{error}</p>
<button
onClick={() => router.push('/connections')}
className="mt-4 rounded-lg bg-primary px-6 py-2 text-white hover:bg-primary/90"
>
Back to Connections
</button>
</>
)}
</div>
</div>
);
}
export default function GitHubCallbackPage() {
return (
<Suspense
fallback={
<div className="flex min-h-screen items-center justify-center bg-background p-6">
<div className="w-full max-w-md space-y-6 text-center">
<Loader2 className="mx-auto h-12 w-12 animate-spin text-primary" />
<h1 className="text-2xl font-bold">Loading...</h1>
</div>
</div>
}
>
<GitHubCallbackContent />
</Suspense>
);
}

View File

@@ -0,0 +1,68 @@
import { NextResponse } from 'next/server';
/**
* Exchange GitHub OAuth code for access token
* This must be done server-side to keep client secret secure
*/
export async function POST(request: Request) {
try {
const { code } = await request.json();
if (!code) {
return NextResponse.json(
{ error: 'Authorization code is required' },
{ status: 400 }
);
}
const clientId = process.env.NEXT_PUBLIC_GITHUB_CLIENT_ID;
const clientSecret = process.env.GITHUB_CLIENT_SECRET;
if (!clientId || !clientSecret) {
return NextResponse.json(
{ error: 'GitHub OAuth not configured' },
{ status: 500 }
);
}
// Exchange code for token
const tokenResponse = await fetch('https://github.com/login/oauth/access_token', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Accept: 'application/json',
},
body: JSON.stringify({
client_id: clientId,
client_secret: clientSecret,
code,
}),
});
if (!tokenResponse.ok) {
throw new Error('Failed to exchange code for token');
}
const tokenData = await tokenResponse.json();
if (tokenData.error) {
return NextResponse.json(
{ error: tokenData.error_description || tokenData.error },
{ status: 400 }
);
}
return NextResponse.json({
access_token: tokenData.access_token,
token_type: tokenData.token_type,
scope: tokenData.scope,
});
} catch (error) {
console.error('[GitHub OAuth] Error:', error);
return NextResponse.json(
{ error: 'Failed to exchange code for token' },
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,149 @@
import { NextResponse } from 'next/server';
import { getAdminAuth, getAdminDb } from '@/lib/firebase/admin';
/**
* Fetch repository file tree from GitHub
* GET /api/github/repo-tree?owner=X&repo=Y
*/
export async function GET(request: Request) {
try {
const authHeader = request.headers.get('Authorization');
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const idToken = authHeader.split('Bearer ')[1];
const adminAuth = getAdminAuth();
const adminDb = getAdminDb();
let userId: string;
try {
const decodedToken = await adminAuth.verifyIdToken(idToken);
userId = decodedToken.uid;
} catch (error) {
return NextResponse.json({ error: 'Invalid token' }, { status: 401 });
}
const url = new URL(request.url);
const owner = url.searchParams.get('owner');
const repo = url.searchParams.get('repo');
const branch = url.searchParams.get('branch') || 'main';
if (!owner || !repo) {
return NextResponse.json({ error: 'Missing owner or repo' }, { status: 400 });
}
// Get GitHub connection
const connectionDoc = await adminDb
.collection('githubConnections')
.doc(userId)
.get();
if (!connectionDoc.exists) {
return NextResponse.json(
{ error: 'GitHub not connected' },
{ status: 404 }
);
}
const connection = connectionDoc.data()!;
const accessToken = connection.accessToken; // TODO: Decrypt
// Fetch repository tree from GitHub API (recursive)
const response = await fetch(
`https://api.github.com/repos/${owner}/${repo}/git/trees/${branch}?recursive=1`,
{
headers: {
Authorization: `Bearer ${accessToken}`,
Accept: 'application/vnd.github.v3+json',
},
}
);
if (!response.ok) {
const error = await response.json();
throw new Error(`GitHub API error: ${error.message || response.statusText}`);
}
const data = await response.json();
// Filter to only include files (not directories)
// and exclude common non-code files
const excludePatterns = [
/node_modules\//,
/\.git\//,
/dist\//,
/build\//,
/\.next\//,
/coverage\//,
/\.cache\//,
/\.env/,
/package-lock\.json$/,
/yarn\.lock$/,
/pnpm-lock\.yaml$/,
/\.png$/,
/\.jpg$/,
/\.jpeg$/,
/\.gif$/,
/\.svg$/,
/\.ico$/,
/\.woff$/,
/\.woff2$/,
/\.ttf$/,
/\.eot$/,
/\.min\.js$/,
/\.min\.css$/,
/\.map$/,
];
// Include common code file extensions
const includePatterns = [
/\.(ts|tsx|js|jsx|py|java|go|rs|cpp|c|h|cs|rb|php|swift|kt)$/,
/\.(json|yaml|yml|toml|xml)$/,
/\.(md|txt)$/,
/\.(sql|graphql|proto)$/,
/\.(css|scss|sass|less)$/,
/\.(html|htm)$/,
/Dockerfile$/,
/Makefile$/,
/README$/,
];
const files = data.tree
.filter((item: any) => item.type === 'blob')
.filter((item: any) => {
// Exclude patterns
if (excludePatterns.some(pattern => pattern.test(item.path))) {
return false;
}
// Include patterns
return includePatterns.some(pattern => pattern.test(item.path));
})
.map((item: any) => ({
path: item.path,
sha: item.sha,
size: item.size,
url: item.url,
}));
console.log(`[GitHub Tree] Found ${files.length} code files in ${owner}/${repo}`);
return NextResponse.json({
owner,
repo,
branch,
totalFiles: files.length,
files,
});
} catch (error) {
console.error('[GitHub Tree] Error:', error);
return NextResponse.json(
{
error: 'Failed to fetch repository tree',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,79 @@
import { NextResponse } from 'next/server';
import { getAdminAuth, getAdminDb } from '@/lib/firebase/admin';
/**
* Fetch user's GitHub repositories
*/
export async function GET(request: Request) {
try {
const authHeader = request.headers.get('Authorization');
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const idToken = authHeader.split('Bearer ')[1];
const adminAuth = getAdminAuth();
const adminDb = getAdminDb();
let userId: string;
try {
const decodedToken = await adminAuth.verifyIdToken(idToken);
userId = decodedToken.uid;
} catch (error) {
return NextResponse.json({ error: 'Invalid token' }, { status: 401 });
}
// Get GitHub connection
const connectionDoc = await adminDb
.collection('githubConnections')
.doc(userId)
.get();
if (!connectionDoc.exists) {
return NextResponse.json(
{ error: 'GitHub not connected' },
{ status: 404 }
);
}
const connection = connectionDoc.data()!;
const accessToken = connection.accessToken; // TODO: Decrypt
// Fetch repos from GitHub API
const response = await fetch('https://api.github.com/user/repos?sort=updated&per_page=100', {
headers: {
Authorization: `Bearer ${accessToken}`,
Accept: 'application/vnd.github.v3+json',
},
});
if (!response.ok) {
throw new Error('Failed to fetch repositories from GitHub');
}
const repos = await response.json();
// Return simplified repo data
return NextResponse.json(
repos.map((repo: any) => ({
id: repo.id,
name: repo.name,
full_name: repo.full_name,
description: repo.description,
html_url: repo.html_url,
language: repo.language,
default_branch: repo.default_branch,
private: repo.private,
topics: repo.topics || [],
updated_at: repo.updated_at,
}))
);
} catch (error) {
console.error('[GitHub Repos] Error:', error);
return NextResponse.json(
{ error: 'Failed to fetch repositories' },
{ status: 500 }
);
}
}

84
app/api/keys/get/route.ts Normal file
View File

@@ -0,0 +1,84 @@
/**
* Internal API to get decrypted key value
* This endpoint is used by Vibn internally, not exposed to frontend
*/
import { NextResponse } from 'next/server';
import { getAdminAuth, getAdminDb } from '@/lib/firebase/admin';
import { FieldValue } from 'firebase-admin/firestore';
import * as crypto from 'crypto';
const ENCRYPTION_KEY = process.env.ENCRYPTION_KEY || 'vibn-default-encryption-key-change-me!!';
const ALGORITHM = 'aes-256-cbc';
function decrypt(encrypted: string, ivHex: string): string {
const key = crypto.createHash('sha256').update(ENCRYPTION_KEY).digest();
const iv = Buffer.from(ivHex, 'hex');
const decipher = crypto.createDecipheriv(ALGORITHM, key, iv);
let decrypted = decipher.update(encrypted, 'hex', 'utf8');
decrypted += decipher.final('utf8');
return decrypted;
}
export async function POST(request: Request) {
try {
const authHeader = request.headers.get('Authorization');
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const idToken = authHeader.split('Bearer ')[1];
const adminAuth = getAdminAuth();
const adminDb = getAdminDb();
let userId: string;
try {
const decodedToken = await adminAuth.verifyIdToken(idToken);
userId = decodedToken.uid;
} catch (error) {
return NextResponse.json({ error: 'Invalid token' }, { status: 401 });
}
const { service } = await request.json();
if (!service) {
return NextResponse.json({ error: 'Service is required' }, { status: 400 });
}
// Get the key
const keysSnapshot = await adminDb
.collection('userKeys')
.where('userId', '==', userId)
.where('service', '==', service)
.limit(1)
.get();
if (keysSnapshot.empty) {
return NextResponse.json({ error: 'Key not found', hasKey: false }, { status: 404 });
}
const keyDoc = keysSnapshot.docs[0];
const keyData = keyDoc.data();
// Decrypt the key
const decryptedKey = decrypt(keyData.encryptedKey, keyData.iv);
// Update last used timestamp
await keyDoc.ref.update({
lastUsed: FieldValue.serverTimestamp(),
});
return NextResponse.json({
hasKey: true,
keyValue: decryptedKey,
service: keyData.service,
});
} catch (error) {
console.error('Error getting key:', error);
return NextResponse.json(
{ error: 'Failed to get key', details: error instanceof Error ? error.message : String(error) },
{ status: 500 }
);
}
}

214
app/api/keys/route.ts Normal file
View File

@@ -0,0 +1,214 @@
/**
* Manage user's third-party API keys (OpenAI, GitHub, etc.)
*/
import { NextResponse } from 'next/server';
import { getAdminAuth, getAdminDb } from '@/lib/firebase/admin';
import { FieldValue } from 'firebase-admin/firestore';
import * as crypto from 'crypto';
// Encryption helpers
const ENCRYPTION_KEY = process.env.ENCRYPTION_KEY || 'vibn-default-encryption-key-change-me!!';
const ALGORITHM = 'aes-256-cbc';
function encrypt(text: string): { encrypted: string; iv: string } {
const key = crypto.createHash('sha256').update(ENCRYPTION_KEY).digest();
const iv = crypto.randomBytes(16);
const cipher = crypto.createCipheriv(ALGORITHM, key, iv);
let encrypted = cipher.update(text, 'utf8', 'hex');
encrypted += cipher.final('hex');
return { encrypted, iv: iv.toString('hex') };
}
function decrypt(encrypted: string, ivHex: string): string {
const key = crypto.createHash('sha256').update(ENCRYPTION_KEY).digest();
const iv = Buffer.from(ivHex, 'hex');
const decipher = crypto.createDecipheriv(ALGORITHM, key, iv);
let decrypted = decipher.update(encrypted, 'hex', 'utf8');
decrypted += decipher.final('utf8');
return decrypted;
}
// GET - List all keys (metadata only, not actual values)
export async function GET(request: Request) {
try {
const authHeader = request.headers.get('Authorization');
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const idToken = authHeader.split('Bearer ')[1];
const adminAuth = getAdminAuth();
const adminDb = getAdminDb();
let userId: string;
try {
const decodedToken = await adminAuth.verifyIdToken(idToken);
userId = decodedToken.uid;
} catch (error) {
return NextResponse.json({ error: 'Invalid token' }, { status: 401 });
}
const keysSnapshot = await adminDb
.collection('userKeys')
.where('userId', '==', userId)
.get();
const keys = keysSnapshot.docs.map(doc => {
const data = doc.data();
return {
id: doc.id,
service: data.service,
name: data.name,
createdAt: data.createdAt,
lastUsed: data.lastUsed,
// Don't send the actual key
};
});
return NextResponse.json({ keys });
} catch (error) {
console.error('Error fetching keys:', error);
return NextResponse.json(
{ error: 'Failed to fetch keys', details: error instanceof Error ? error.message : String(error) },
{ status: 500 }
);
}
}
// POST - Add or update a key
export async function POST(request: Request) {
try {
const authHeader = request.headers.get('Authorization');
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const idToken = authHeader.split('Bearer ')[1];
const adminAuth = getAdminAuth();
const adminDb = getAdminDb();
let userId: string;
try {
const decodedToken = await adminAuth.verifyIdToken(idToken);
userId = decodedToken.uid;
} catch (error) {
return NextResponse.json({ error: 'Invalid token' }, { status: 401 });
}
const { service, name, keyValue } = await request.json();
if (!service || !keyValue) {
return NextResponse.json({ error: 'Service and key value are required' }, { status: 400 });
}
// Encrypt the key
const { encrypted, iv } = encrypt(keyValue);
// Check if key already exists for this service
const existingKeysSnapshot = await adminDb
.collection('userKeys')
.where('userId', '==', userId)
.where('service', '==', service)
.limit(1)
.get();
if (!existingKeysSnapshot.empty) {
// Update existing key
const keyDoc = existingKeysSnapshot.docs[0];
await keyDoc.ref.update({
name: name || service,
encryptedKey: encrypted,
iv,
updatedAt: FieldValue.serverTimestamp(),
});
return NextResponse.json({
success: true,
message: `${service} key updated`,
id: keyDoc.id,
});
} else {
// Create new key
const keyRef = await adminDb.collection('userKeys').add({
userId,
service,
name: name || service,
encryptedKey: encrypted,
iv,
createdAt: FieldValue.serverTimestamp(),
updatedAt: FieldValue.serverTimestamp(),
lastUsed: null,
});
return NextResponse.json({
success: true,
message: `${service} key added`,
id: keyRef.id,
});
}
} catch (error) {
console.error('Error saving key:', error);
return NextResponse.json(
{ error: 'Failed to save key', details: error instanceof Error ? error.message : String(error) },
{ status: 500 }
);
}
}
// DELETE - Remove a key
export async function DELETE(request: Request) {
try {
const authHeader = request.headers.get('Authorization');
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const idToken = authHeader.split('Bearer ')[1];
const adminAuth = getAdminAuth();
const adminDb = getAdminDb();
let userId: string;
try {
const decodedToken = await adminAuth.verifyIdToken(idToken);
userId = decodedToken.uid;
} catch (error) {
return NextResponse.json({ error: 'Invalid token' }, { status: 401 });
}
const { service } = await request.json();
if (!service) {
return NextResponse.json({ error: 'Service is required' }, { status: 400 });
}
// Find and delete the key
const keysSnapshot = await adminDb
.collection('userKeys')
.where('userId', '==', userId)
.where('service', '==', service)
.get();
if (keysSnapshot.empty) {
return NextResponse.json({ error: 'Key not found' }, { status: 404 });
}
const batch = adminDb.batch();
keysSnapshot.docs.forEach(doc => {
batch.delete(doc.ref);
});
await batch.commit();
return NextResponse.json({
success: true,
message: `${service} key deleted`,
});
} catch (error) {
console.error('Error deleting key:', error);
return NextResponse.json(
{ error: 'Failed to delete key', details: error instanceof Error ? error.message : String(error) },
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,126 @@
/**
* Generate a long-lived MCP API key for ChatGPT integration
*/
import { NextResponse } from 'next/server';
import { getAdminAuth, getAdminDb } from '@/lib/firebase/admin';
import { randomBytes } from 'crypto';
export async function POST(request: Request) {
try {
// Authenticate user
const authHeader = request.headers.get('Authorization');
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const idToken = authHeader.split('Bearer ')[1];
const adminAuth = getAdminAuth();
const adminDb = getAdminDb();
let userId: string;
try {
const decodedToken = await adminAuth.verifyIdToken(idToken);
userId = decodedToken.uid;
} catch (error) {
return NextResponse.json({ error: 'Invalid token' }, { status: 401 });
}
// Check if user already has an MCP key
const mcpKeysRef = adminDb.collection('mcpKeys');
const existingKey = await mcpKeysRef
.where('userId', '==', userId)
.limit(1)
.get();
if (!existingKey.empty) {
// Return existing key
const keyDoc = existingKey.docs[0];
const keyData = keyDoc.data();
return NextResponse.json({
apiKey: keyData.key,
createdAt: keyData.createdAt,
message: 'Using existing MCP API key',
});
}
// Generate new API key
const apiKey = `vibn_mcp_${randomBytes(32).toString('hex')}`;
// Store in Firestore
await mcpKeysRef.add({
userId,
key: apiKey,
type: 'mcp',
createdAt: new Date().toISOString(),
lastUsed: null,
});
return NextResponse.json({
apiKey,
createdAt: new Date().toISOString(),
message: 'MCP API key generated successfully',
});
} catch (error) {
console.error('Error generating MCP key:', error);
return NextResponse.json(
{
error: 'Failed to generate MCP key',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 }
);
}
}
// DELETE endpoint to revoke MCP key
export async function DELETE(request: Request) {
try {
const authHeader = request.headers.get('Authorization');
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const idToken = authHeader.split('Bearer ')[1];
const adminAuth = getAdminAuth();
const adminDb = getAdminDb();
let userId: string;
try {
const decodedToken = await adminAuth.verifyIdToken(idToken);
userId = decodedToken.uid;
} catch (error) {
return NextResponse.json({ error: 'Invalid token' }, { status: 401 });
}
// Delete user's MCP key
const mcpKeysRef = adminDb.collection('mcpKeys');
const existingKey = await mcpKeysRef
.where('userId', '==', userId)
.get();
if (existingKey.empty) {
return NextResponse.json({ message: 'No MCP key to delete' });
}
// Delete all keys for this user
const batch = adminDb.batch();
existingKey.docs.forEach(doc => {
batch.delete(doc.ref);
});
await batch.commit();
return NextResponse.json({ message: 'MCP key deleted successfully' });
} catch (error) {
console.error('Error deleting MCP key:', error);
return NextResponse.json(
{
error: 'Failed to delete MCP key',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 }
);
}
}

298
app/api/mcp/route.ts Normal file
View File

@@ -0,0 +1,298 @@
/**
* Vibn MCP HTTP API
*
* Exposes MCP capabilities over HTTP for web-based AI assistants
*/
import { NextResponse } from 'next/server';
import { getAdminAuth, getAdminDb } from '@/lib/firebase/admin';
export async function POST(request: Request) {
try {
// Authenticate user
const authHeader = request.headers.get('Authorization');
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const token = authHeader.split('Bearer ')[1];
const adminAuth = getAdminAuth();
const adminDb = getAdminDb();
let userId: string;
// Try MCP API key first (for ChatGPT integration)
if (token.startsWith('vibn_mcp_')) {
const mcpKeysSnapshot = await adminDb
.collection('mcpKeys')
.where('key', '==', token)
.limit(1)
.get();
if (mcpKeysSnapshot.empty) {
return NextResponse.json({ error: 'Invalid MCP API key' }, { status: 401 });
}
const keyDoc = mcpKeysSnapshot.docs[0];
userId = keyDoc.data().userId;
// Update last used timestamp
await keyDoc.ref.update({
lastUsed: new Date().toISOString(),
});
} else {
// Try Firebase ID token (for direct user access)
try {
const decodedToken = await adminAuth.verifyIdToken(token);
userId = decodedToken.uid;
} catch (error) {
return NextResponse.json({ error: 'Invalid token' }, { status: 401 });
}
}
const body = await request.json();
const { action, params } = body;
// Handle different MCP actions
switch (action) {
case 'list_resources': {
return NextResponse.json({
resources: [
{
uri: `vibn://projects/${userId}`,
name: 'My Projects',
description: 'All your Vibn projects',
mimeType: 'application/json',
},
{
uri: `vibn://sessions/${userId}`,
name: 'My Sessions',
description: 'All your coding sessions',
mimeType: 'application/json',
},
],
});
}
case 'read_resource': {
const { uri } = params;
if (uri === `vibn://projects/${userId}`) {
const projectsSnapshot = await adminDb
.collection('projects')
.where('userId', '==', userId)
.orderBy('createdAt', 'desc')
.limit(50)
.get();
const projects = projectsSnapshot.docs.map(doc => ({
id: doc.id,
...doc.data(),
}));
return NextResponse.json({
contents: [
{
uri,
mimeType: 'application/json',
text: JSON.stringify(projects, null, 2),
},
],
});
}
if (uri.startsWith('vibn://projects/') && uri.split('/').length === 4) {
const projectId = uri.split('/')[3];
const projectDoc = await adminDb.collection('projects').doc(projectId).get();
if (!projectDoc.exists || projectDoc.data()?.userId !== userId) {
return NextResponse.json({ error: 'Project not found' }, { status: 404 });
}
return NextResponse.json({
contents: [
{
uri,
mimeType: 'application/json',
text: JSON.stringify({ id: projectDoc.id, ...projectDoc.data() }, null, 2),
},
],
});
}
if (uri === `vibn://sessions/${userId}`) {
const sessionsSnapshot = await adminDb
.collection('sessions')
.where('userId', '==', userId)
.orderBy('createdAt', 'desc')
.limit(50)
.get();
const sessions = sessionsSnapshot.docs.map(doc => ({
id: doc.id,
...doc.data(),
}));
return NextResponse.json({
contents: [
{
uri,
mimeType: 'application/json',
text: JSON.stringify(sessions, null, 2),
},
],
});
}
return NextResponse.json({ error: 'Unknown resource' }, { status: 404 });
}
case 'call_tool': {
const { name, arguments: args } = params;
if (name === 'get_project_summary') {
const { projectId } = args;
const projectDoc = await adminDb.collection('projects').doc(projectId).get();
if (!projectDoc.exists || projectDoc.data()?.userId !== userId) {
return NextResponse.json({ error: 'Project not found' }, { status: 404 });
}
const project = { id: projectDoc.id, ...projectDoc.data() };
const sessionsSnapshot = await adminDb
.collection('sessions')
.where('projectId', '==', projectId)
.where('userId', '==', userId)
.get();
const sessions = sessionsSnapshot.docs.map(doc => doc.data());
const totalCost = sessions.reduce((sum, s: any) => sum + (s.cost || 0), 0);
const totalTokens = sessions.reduce((sum, s: any) => sum + (s.tokensUsed || 0), 0);
const totalDuration = sessions.reduce((sum, s: any) => sum + (s.duration || 0), 0);
const summary = {
project,
stats: {
totalSessions: sessions.length,
totalCost,
totalTokens,
totalDuration,
},
recentSessions: sessions.slice(0, 5),
};
return NextResponse.json({
content: [
{
type: 'text',
text: JSON.stringify(summary, null, 2),
},
],
});
}
if (name === 'search_sessions') {
const { projectId, workspacePath } = args;
let query = adminDb.collection('sessions').where('userId', '==', userId);
if (projectId) {
query = query.where('projectId', '==', projectId) as any;
}
if (workspacePath) {
query = query.where('workspacePath', '==', workspacePath) as any;
}
const snapshot = await (query as any).orderBy('createdAt', 'desc').limit(50).get();
const sessions = snapshot.docs.map((doc: any) => ({
id: doc.id,
...doc.data(),
}));
return NextResponse.json({
content: [
{
type: 'text',
text: JSON.stringify(sessions, null, 2),
},
],
});
}
if (name === 'get_conversation_context') {
const { projectId, limit = 50 } = args;
const projectDoc = await adminDb.collection('projects').doc(projectId).get();
if (!projectDoc.exists || projectDoc.data()?.userId !== userId) {
return NextResponse.json({ error: 'Project not found' }, { status: 404 });
}
const conversationsSnapshot = await adminDb
.collection('projects')
.doc(projectId)
.collection('aiConversations')
.orderBy('createdAt', 'asc')
.limit(limit)
.get();
const conversations = conversationsSnapshot.docs.map(doc => ({
id: doc.id,
...doc.data(),
}));
return NextResponse.json({
content: [
{
type: 'text',
text: JSON.stringify(conversations, null, 2),
},
],
});
}
return NextResponse.json({ error: 'Unknown tool' }, { status: 404 });
}
default:
return NextResponse.json({ error: 'Unknown action' }, { status: 400 });
}
} catch (error) {
console.error('MCP API error:', error);
return NextResponse.json(
{
error: 'Failed to process MCP request',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 }
);
}
}
// GET endpoint to list capabilities
export async function GET(request: Request) {
return NextResponse.json({
name: 'vibn-mcp-server',
version: '1.0.0',
capabilities: {
resources: {
supported: true,
endpoints: [
'vibn://projects/{userId}',
'vibn://projects/{userId}/{projectId}',
'vibn://sessions/{userId}',
],
},
tools: {
supported: true,
available: [
'get_project_summary',
'search_sessions',
'get_conversation_context',
],
},
},
documentation: 'https://vibnai.com/docs/mcp',
});
}

View File

@@ -0,0 +1,72 @@
/**
* Retrieve ChatGPT GPT information
* Note: This is limited by what's available via OpenAI API
* GPTs are primarily accessible through the ChatGPT web interface
*/
import { NextResponse } from 'next/server';
import { getAdminAuth } from '@/lib/firebase/admin';
export async function POST(request: Request) {
try {
// Authenticate user
const authHeader = request.headers.get('Authorization');
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const idToken = authHeader.split('Bearer ')[1];
const adminAuth = getAdminAuth();
let userId: string;
try {
const decodedToken = await adminAuth.verifyIdToken(idToken);
userId = decodedToken.uid;
} catch (error) {
return NextResponse.json({ error: 'Invalid token' }, { status: 401 });
}
const { gptUrl } = await request.json();
if (!gptUrl) {
return NextResponse.json({ error: 'GPT URL is required' }, { status: 400 });
}
// Extract GPT ID from URL
// Format: https://chatgpt.com/g/g-p-[id]-[name]/project
const gptMatch = gptUrl.match(/\/g\/(g-p-[a-zA-Z0-9]+)/);
if (!gptMatch) {
return NextResponse.json({ error: 'Invalid GPT URL format' }, { status: 400 });
}
const gptId = gptMatch[1];
const nameMatch = gptUrl.match(/g-p-[a-zA-Z0-9]+-([^\/]+)/);
const gptName = nameMatch ? nameMatch[1].replace(/-/g, ' ') : 'Unknown GPT';
console.log(`[ChatGPT GPT] Extracted ID: ${gptId}, Name: ${gptName}`);
// Note: OpenAI API doesn't currently expose GPTs directly
// We'll store the reference for now and display in the UI
return NextResponse.json({
success: true,
data: {
id: gptId,
name: gptName,
url: gptUrl,
type: 'chatgpt-gpt',
message: 'GPT reference saved. To capture conversations with this GPT, import individual chat sessions.',
},
});
} catch (error) {
console.error('[ChatGPT GPT] Error:', error);
return NextResponse.json(
{
error: 'Failed to process GPT',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,82 @@
/**
* Retrieve OpenAI Projects using the Projects API
* https://platform.openai.com/docs/api-reference/projects
*/
import { NextResponse } from 'next/server';
import { getAdminAuth } from '@/lib/firebase/admin';
const OPENAI_API_URL = 'https://api.openai.com/v1/projects';
export async function POST(request: Request) {
try {
// Authenticate user
const authHeader = request.headers.get('Authorization');
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const idToken = authHeader.split('Bearer ')[1];
const adminAuth = getAdminAuth();
let userId: string;
try {
const decodedToken = await adminAuth.verifyIdToken(idToken);
userId = decodedToken.uid;
} catch (error) {
return NextResponse.json({ error: 'Invalid token' }, { status: 401 });
}
const { openaiApiKey, projectId } = await request.json();
if (!openaiApiKey) {
return NextResponse.json({ error: 'OpenAI API key is required' }, { status: 400 });
}
// If projectId provided, fetch specific project
const url = projectId
? `${OPENAI_API_URL}/${projectId}`
: OPENAI_API_URL; // List all projects
console.log(`[OpenAI Projects] Fetching: ${url}`);
const openaiResponse = await fetch(url, {
method: 'GET',
headers: {
'Authorization': `Bearer ${openaiApiKey}`,
'Content-Type': 'application/json',
},
});
if (!openaiResponse.ok) {
const errorText = await openaiResponse.text();
console.error('[OpenAI Projects] API error:', openaiResponse.status, errorText);
return NextResponse.json(
{
error: 'Failed to fetch from OpenAI',
details: errorText,
status: openaiResponse.status,
},
{ status: openaiResponse.status }
);
}
const projectData = await openaiResponse.json();
console.log('[OpenAI Projects] Data fetched successfully');
return NextResponse.json({
success: true,
data: projectData,
});
} catch (error) {
console.error('[OpenAI Projects] Error:', error);
return NextResponse.json(
{
error: 'Failed to fetch OpenAI project',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,84 @@
import { NextRequest, NextResponse } from 'next/server';
import { adminDb } from '@/lib/firebase/admin';
export async function GET(
request: NextRequest,
{ params }: { params: Promise<{ projectId: string }> }
) {
try {
const { projectId } = await params;
// Get all sessions for this project
const sessionsSnapshot = await adminDb
.collection('sessions')
.where('projectId', '==', projectId)
.get();
const sessions = sessionsSnapshot.docs
.map(doc => {
const data = doc.data();
return {
id: doc.id,
startTime: data.startTime?.toDate?.() || data.startTime,
endTime: data.endTime?.toDate?.() || data.endTime,
duration: data.duration || 0,
filesModified: data.filesModified || [],
conversationSummary: data.conversationSummary || '',
workspacePath: data.workspacePath || '',
conversation: data.conversation || []
};
})
.sort((a, b) => {
const aTime = a.startTime ? new Date(a.startTime).getTime() : 0;
const bTime = b.startTime ? new Date(b.startTime).getTime() : 0;
return aTime - bTime;
});
// Analyze activity
const fileActivity: Record<string, { count: number; dates: string[] }> = {};
const dailyActivity: Record<string, number> = {};
sessions.forEach(session => {
if (!session.startTime) return;
const date = new Date(session.startTime).toISOString().split('T')[0];
dailyActivity[date] = (dailyActivity[date] || 0) + 1;
session.filesModified.forEach((file: string) => {
if (!fileActivity[file]) {
fileActivity[file] = { count: 0, dates: [] };
}
fileActivity[file].count++;
if (!fileActivity[file].dates.includes(date)) {
fileActivity[file].dates.push(date);
}
});
});
// Get top files
const topFiles = Object.entries(fileActivity)
.map(([file, data]) => ({ file, ...data }))
.sort((a, b) => b.count - a.count)
.slice(0, 50);
return NextResponse.json({
totalSessions: sessions.length,
sessions,
fileActivity: topFiles,
dailyActivity: Object.entries(dailyActivity)
.map(([date, count]) => ({ date, sessionCount: count }))
.sort((a, b) => a.date.localeCompare(b.date))
});
} catch (error) {
console.error('Error fetching activity:', error);
return NextResponse.json(
{
error: 'Failed to fetch activity',
details: error instanceof Error ? error.message : String(error)
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,28 @@
import { NextResponse } from 'next/server';
import { buildCanonicalProductModel } from '@/lib/server/product-model';
export async function POST(
_request: Request,
{ params }: { params: Promise<{ projectId: string }> },
) {
try {
const { projectId } = await params;
if (!projectId) {
return NextResponse.json({ error: 'Missing projectId' }, { status: 400 });
}
const canonicalProductModel = await buildCanonicalProductModel(projectId);
return NextResponse.json({ canonicalProductModel });
} catch (error) {
console.error('[aggregate] Failed to build canonical product model', error);
return NextResponse.json(
{
error: 'Failed to aggregate product signals',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 },
);
}
}

View File

@@ -0,0 +1,190 @@
import { NextResponse } from 'next/server';
import { getAdminAuth, getAdminDb } from '@/lib/firebase/admin';
import { FieldValue } from 'firebase-admin/firestore';
/**
* Associate existing sessions with a project when GitHub is connected
* Matches sessions by:
* 1. githubRepo field (from Cursor extension)
* 2. workspacePath (if repo name matches)
*/
export async function POST(
request: Request,
{ params }: { params: Promise<{ projectId: string }> }
) {
try {
const { projectId } = await params;
const authHeader = request.headers.get('Authorization');
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const idToken = authHeader.split('Bearer ')[1];
const adminAuth = getAdminAuth();
const adminDb = getAdminDb();
let userId: string;
try {
const decodedToken = await adminAuth.verifyIdToken(idToken);
userId = decodedToken.uid;
} catch (error) {
return NextResponse.json({ error: 'Invalid token' }, { status: 401 });
}
const { githubRepo, githubRepoUrl } = await request.json();
if (!githubRepo) {
return NextResponse.json(
{ error: 'githubRepo is required' },
{ status: 400 }
);
}
// Verify project belongs to user
const projectDoc = await adminDb.collection('projects').doc(projectId).get();
if (!projectDoc.exists || projectDoc.data()?.userId !== userId) {
return NextResponse.json(
{ error: 'Project not found or unauthorized' },
{ status: 403 }
);
}
const projectData = projectDoc.data();
const projectWorkspacePath = projectData?.workspacePath;
console.log(`[Associate GitHub Sessions] Project: ${projectId}`);
console.log(`[Associate GitHub Sessions] GitHub repo: ${githubRepo}`);
console.log(`[Associate GitHub Sessions] Project workspace path: ${projectWorkspacePath || 'not set'}`);
console.log(`[Associate GitHub Sessions] User ID: ${userId}`);
// Strategy 1: Match by exact githubRepo field in sessions
// (This requires the Cursor extension to send githubRepo with sessions)
const sessionsSnapshot1 = await adminDb
.collection('sessions')
.where('userId', '==', userId)
.where('githubRepo', '==', githubRepo)
.where('needsProjectAssociation', '==', true)
.get();
console.log(`[Associate GitHub Sessions] Found ${sessionsSnapshot1.size} sessions with exact githubRepo match`);
// Strategy 2: Match by exact workspacePath (if project has one set)
let matchedByPath: any[] = [];
if (projectWorkspacePath) {
console.log(`[Associate GitHub Sessions] Strategy 2A: Exact workspace path match`);
console.log(`[Associate GitHub Sessions] Looking for sessions from: ${projectWorkspacePath}`);
const pathMatchSnapshot = await adminDb
.collection('sessions')
.where('userId', '==', userId)
.where('workspacePath', '==', projectWorkspacePath)
.where('needsProjectAssociation', '==', true)
.get();
matchedByPath = pathMatchSnapshot.docs;
console.log(`[Associate GitHub Sessions] Found ${matchedByPath.length} sessions with exact workspace path match`);
} else {
// Fallback: Match by repo name (less reliable but better than nothing)
console.log(`[Associate GitHub Sessions] Strategy 2B: Fuzzy match by repo folder name (project has no workspace path set)`);
const repoName = githubRepo.split('/')[1]; // Extract "my-app" from "username/my-app"
console.log(`[Associate GitHub Sessions] Looking for folders ending with: ${repoName}`);
const allUnassociatedSessions = await adminDb
.collection('sessions')
.where('userId', '==', userId)
.where('needsProjectAssociation', '==', true)
.get();
console.log(`[Associate GitHub Sessions] Total unassociated sessions for user: ${allUnassociatedSessions.size}`);
matchedByPath = allUnassociatedSessions.docs.filter(doc => {
const workspacePath = doc.data().workspacePath;
if (!workspacePath) return false;
const pathSegments = workspacePath.split('/');
const lastSegment = pathSegments[pathSegments.length - 1];
const matches = lastSegment === repoName;
if (matches) {
console.log(`[Associate GitHub Sessions] ✅ Fuzzy match: ${workspacePath} ends with ${repoName}`);
}
return matches;
});
console.log(`[Associate GitHub Sessions] Found ${matchedByPath.length} sessions with fuzzy folder name match`);
// Debug: Log some example workspace paths to help diagnose
if (matchedByPath.length === 0 && allUnassociatedSessions.size > 0) {
console.log(`[Associate GitHub Sessions] Debug - Example workspace paths in unassociated sessions:`);
allUnassociatedSessions.docs.slice(0, 5).forEach(doc => {
const path = doc.data().workspacePath;
const folder = path ? path.split('/').pop() : 'null';
console.log(` - ${path} (folder: ${folder})`);
});
console.log(`[Associate GitHub Sessions] Tip: Set project.workspacePath for accurate matching`);
}
}
// Combine both strategies (deduplicate by session ID)
const allMatchedSessions = new Map();
// Add exact matches
sessionsSnapshot1.docs.forEach(doc => {
allMatchedSessions.set(doc.id, doc);
});
// Add path matches
matchedByPath.forEach(doc => {
allMatchedSessions.set(doc.id, doc);
});
// Batch update all matched sessions
if (allMatchedSessions.size > 0) {
const batch = adminDb.batch();
let count = 0;
allMatchedSessions.forEach((doc) => {
batch.update(doc.ref, {
projectId,
needsProjectAssociation: false,
updatedAt: FieldValue.serverTimestamp(),
});
count++;
});
await batch.commit();
console.log(`[Associate GitHub Sessions] Successfully associated ${count} sessions with project ${projectId}`);
return NextResponse.json({
success: true,
sessionsAssociated: count,
message: `Found and linked ${count} existing chat sessions from this repository`,
details: {
exactMatches: sessionsSnapshot1.size,
pathMatches: matchedByPath.length,
}
});
}
return NextResponse.json({
success: true,
sessionsAssociated: 0,
message: 'No matching sessions found for this repository',
});
} catch (error) {
console.error('[Associate GitHub Sessions] Error:', error);
return NextResponse.json(
{
error: 'Failed to associate sessions',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,505 @@
import { NextRequest, NextResponse } from 'next/server';
import { adminDb } from '@/lib/firebase/admin';
import { getApiUrl } from '@/lib/utils/api-url';
// Types
interface WorkSession {
sessionId: string;
date: string;
startTime: Date;
endTime: Date;
duration: number; // minutes
messageCount: number;
userMessages: number;
aiMessages: number;
topics: string[];
filesWorkedOn: string[];
}
interface TimelineAnalysis {
firstActivity: Date | null;
lastActivity: Date | null;
totalDays: number;
activeDays: number;
totalSessions: number;
sessions: WorkSession[];
velocity: {
messagesPerDay: number;
averageSessionLength: number;
peakProductivityHours: number[];
};
}
interface CostAnalysis {
messageStats: {
totalMessages: number;
userMessages: number;
aiMessages: number;
avgMessageLength: number;
};
estimatedTokens: {
input: number;
output: number;
total: number;
};
costs: {
inputCost: number;
outputCost: number;
totalCost: number;
currency: string;
};
model: string;
pricing: {
inputPer1M: number;
outputPer1M: number;
};
}
interface Feature {
name: string;
description: string;
pages: string[];
apis: string[];
status: 'complete' | 'in-progress' | 'planned';
}
export async function POST(
request: NextRequest,
{ params }: { params: Promise<{ projectId: string }> }
) {
try {
const { projectId } = await params;
// 1. Load conversations from Firestore
console.log(`🔍 Loading conversations for project ${projectId}...`);
const conversationsSnapshot = await adminDb
.collection('projects')
.doc(projectId)
.collection('cursorConversations')
.get();
if (conversationsSnapshot.empty) {
return NextResponse.json({
error: 'No conversations found for this project',
suggestion: 'Import Cursor conversations first'
}, { status: 404 });
}
const conversations = conversationsSnapshot.docs.map(doc => ({
id: doc.id,
...doc.data()
}));
console.log(`✅ Found ${conversations.length} conversations`);
// 2. Load all messages for each conversation
let allMessages: any[] = [];
for (const conv of conversations) {
const messagesSnapshot = await adminDb
.collection('projects')
.doc(projectId)
.collection('cursorConversations')
.doc(conv.id)
.collection('messages')
.orderBy('createdAt', 'asc')
.get();
const messages = messagesSnapshot.docs.map(doc => ({
...doc.data(),
conversationId: conv.id,
conversationName: conv.name
}));
allMessages = allMessages.concat(messages);
}
console.log(`✅ Loaded ${allMessages.length} total messages`);
// 3. Load extension activity data (files edited, sessions)
let extensionActivity: any = null;
try {
const activitySnapshot = await adminDb
.collection('sessions')
.where('projectId', '==', projectId)
.get();
const extensionSessions = activitySnapshot.docs
.map(doc => {
const data = doc.data();
return {
startTime: data.startTime?.toDate?.() || data.startTime,
endTime: data.endTime?.toDate?.() || data.endTime,
filesModified: data.filesModified || [],
conversationSummary: data.conversationSummary || ''
};
})
.sort((a, b) => {
const aTime = a.startTime ? new Date(a.startTime).getTime() : 0;
const bTime = b.startTime ? new Date(b.startTime).getTime() : 0;
return aTime - bTime;
});
// Analyze file activity
const fileActivity: Record<string, number> = {};
extensionSessions.forEach(session => {
session.filesModified.forEach((file: string) => {
fileActivity[file] = (fileActivity[file] || 0) + 1;
});
});
const topFiles = Object.entries(fileActivity)
.map(([file, count]) => ({ file, editCount: count }))
.sort((a, b) => b.editCount - a.editCount)
.slice(0, 20);
extensionActivity = {
totalSessions: extensionSessions.length,
uniqueFilesEdited: Object.keys(fileActivity).length,
topFiles,
earliestActivity: extensionSessions[0]?.startTime || null,
latestActivity: extensionSessions[extensionSessions.length - 1]?.endTime || null
};
console.log(`✅ Loaded ${extensionSessions.length} extension activity sessions`);
} catch (error) {
console.log(`⚠️ Could not load extension activity: ${error}`);
}
// 4. Load Git commit history
let gitHistory: any = null;
try {
const gitResponse = await fetch(getApiUrl(`/api/projects/${projectId}/git-history`, request));
if (gitResponse.ok) {
gitHistory = await gitResponse.json();
console.log(`✅ Loaded ${gitHistory.totalCommits} Git commits`);
}
} catch (error) {
console.log(`⚠️ Could not load Git history: ${error}`);
}
// 4b. Load unified timeline (combines all data sources by day)
let unifiedTimeline: any = null;
try {
const timelineResponse = await fetch(getApiUrl(`/api/projects/${projectId}/timeline`, request));
if (timelineResponse.ok) {
unifiedTimeline = await timelineResponse.json();
console.log(`✅ Loaded unified timeline with ${unifiedTimeline.days.length} days`);
}
} catch (error) {
console.log(`⚠️ Could not load unified timeline: ${error}`);
}
// 5. Analyze timeline
const timeline = analyzeTimeline(allMessages);
// 6. Calculate costs
const costs = calculateCosts(allMessages);
// 7. Extract features from codebase (static list for now)
const features = getFeaturesList();
// 8. Get tech stack
const techStack = getTechStack();
// 9. Generate report
const report = {
projectId,
generatedAt: new Date().toISOString(),
timeline,
costs,
features,
techStack,
extensionActivity,
gitHistory,
unifiedTimeline,
summary: {
totalConversations: conversations.length,
totalMessages: allMessages.length,
developmentPeriod: timeline.totalDays,
estimatedCost: costs.costs.totalCost,
extensionSessions: extensionActivity?.totalSessions || 0,
filesEdited: extensionActivity?.uniqueFilesEdited || 0,
gitCommits: gitHistory?.totalCommits || 0,
linesAdded: gitHistory?.totalInsertions || 0,
linesRemoved: gitHistory?.totalDeletions || 0,
timelineDays: unifiedTimeline?.days.length || 0
}
};
console.log(`✅ Audit report generated successfully`);
return NextResponse.json(report);
} catch (error) {
console.error('Error generating audit report:', error);
return NextResponse.json(
{
error: 'Failed to generate audit report',
details: error instanceof Error ? error.message : String(error)
},
{ status: 500 }
);
}
}
// Helper: Analyze timeline from messages
function analyzeTimeline(messages: any[]): TimelineAnalysis {
if (messages.length === 0) {
return {
firstActivity: null,
lastActivity: null,
totalDays: 0,
activeDays: 0,
totalSessions: 0,
sessions: [],
velocity: {
messagesPerDay: 0,
averageSessionLength: 0,
peakProductivityHours: []
}
};
}
// Sort messages by time
const sorted = [...messages].sort((a, b) =>
new Date(a.createdAt).getTime() - new Date(b.createdAt).getTime()
);
const firstActivity = new Date(sorted[0].createdAt);
const lastActivity = new Date(sorted[sorted.length - 1].createdAt);
const totalDays = Math.ceil((lastActivity.getTime() - firstActivity.getTime()) / (1000 * 60 * 60 * 24));
// Group into sessions (gap > 4 hours = new session)
const SESSION_GAP = 4 * 60 * 60 * 1000; // 4 hours
const sessions: WorkSession[] = [];
let currentSession: any = null;
for (const msg of sorted) {
const msgTime = new Date(msg.createdAt).getTime();
if (!currentSession || msgTime - currentSession.endTime > SESSION_GAP) {
// Start new session
if (currentSession) {
sessions.push(formatSession(currentSession));
}
currentSession = {
messages: [msg],
startTime: msgTime,
endTime: msgTime,
date: new Date(msgTime).toISOString().split('T')[0]
};
} else {
// Add to current session
currentSession.messages.push(msg);
currentSession.endTime = msgTime;
}
}
// Don't forget the last session
if (currentSession) {
sessions.push(formatSession(currentSession));
}
// Calculate velocity metrics
const activeDays = new Set(sorted.map(m =>
new Date(m.createdAt).toISOString().split('T')[0]
)).size;
const totalSessionMinutes = sessions.reduce((sum, s) => sum + s.duration, 0);
const averageSessionLength = sessions.length > 0 ? totalSessionMinutes / sessions.length : 0;
// Find peak productivity hours
const hourCounts = new Map<number, number>();
sorted.forEach(msg => {
const hour = new Date(msg.createdAt).getHours();
hourCounts.set(hour, (hourCounts.get(hour) || 0) + 1);
});
const peakProductivityHours = Array.from(hourCounts.entries())
.sort((a, b) => b[1] - a[1])
.slice(0, 3)
.map(([hour]) => hour)
.sort((a, b) => a - b);
return {
firstActivity,
lastActivity,
totalDays,
activeDays,
totalSessions: sessions.length,
sessions,
velocity: {
messagesPerDay: messages.length / activeDays,
averageSessionLength: Math.round(averageSessionLength),
peakProductivityHours
}
};
}
function formatSession(sessionData: any): WorkSession {
const duration = Math.ceil((sessionData.endTime - sessionData.startTime) / (1000 * 60));
const userMessages = sessionData.messages.filter((m: any) => m.type === 1).length;
const aiMessages = sessionData.messages.filter((m: any) => m.type === 2).length;
// Extract topics (first 3 unique conversation names)
const topics = [...new Set(sessionData.messages.map((m: any) => m.conversationName))].slice(0, 3);
// Extract files
const files = [...new Set(
sessionData.messages.flatMap((m: any) => m.attachedFiles || [])
)];
return {
sessionId: `session-${sessionData.date}-${sessionData.startTime}`,
date: sessionData.date,
startTime: new Date(sessionData.startTime),
endTime: new Date(sessionData.endTime),
duration,
messageCount: sessionData.messages.length,
userMessages,
aiMessages,
topics,
filesWorkedOn: files
};
}
// Helper: Calculate costs
function calculateCosts(messages: any[]): CostAnalysis {
const userMessages = messages.filter(m => m.type === 1);
const aiMessages = messages.filter(m => m.type === 2);
// Calculate average message length
const totalChars = messages.reduce((sum, m) => sum + (m.text?.length || 0), 0);
const avgMessageLength = messages.length > 0 ? Math.round(totalChars / messages.length) : 0;
// Estimate tokens (rough: 1 token ≈ 4 characters)
const inputChars = userMessages.reduce((sum, m) => sum + (m.text?.length || 0), 0);
const outputChars = aiMessages.reduce((sum, m) => sum + (m.text?.length || 0), 0);
const inputTokens = Math.ceil(inputChars / 4);
const outputTokens = Math.ceil(outputChars / 4);
const totalTokens = inputTokens + outputTokens;
// Claude Sonnet 3.5 pricing (Nov 2024)
const INPUT_COST_PER_1M = 3.0;
const OUTPUT_COST_PER_1M = 15.0;
const inputCost = (inputTokens / 1_000_000) * INPUT_COST_PER_1M;
const outputCost = (outputTokens / 1_000_000) * OUTPUT_COST_PER_1M;
const totalAICost = inputCost + outputCost;
return {
messageStats: {
totalMessages: messages.length,
userMessages: userMessages.length,
aiMessages: aiMessages.length,
avgMessageLength
},
estimatedTokens: {
input: inputTokens,
output: outputTokens,
total: totalTokens
},
costs: {
inputCost: Math.round(inputCost * 100) / 100,
outputCost: Math.round(outputCost * 100) / 100,
totalCost: Math.round(totalAICost * 100) / 100,
currency: 'USD'
},
model: 'Claude Sonnet 3.5',
pricing: {
inputPer1M: INPUT_COST_PER_1M,
outputPer1M: OUTPUT_COST_PER_1M
}
};
}
// Helper: Get features list
function getFeaturesList(): Feature[] {
return [
{
name: "Project Management",
description: "Create, manage, and organize AI-coded projects",
pages: ["/projects", "/project/[id]/overview", "/project/[id]/settings"],
apis: ["/api/projects/create", "/api/projects/[id]", "/api/projects/delete"],
status: "complete"
},
{
name: "AI Chat Integration",
description: "Real-time chat with AI assistants for development",
pages: ["/project/[id]/v_ai_chat"],
apis: ["/api/ai/chat", "/api/ai/conversation"],
status: "complete"
},
{
name: "Cursor Import",
description: "Import historical conversations from Cursor IDE",
pages: [],
apis: ["/api/cursor/backfill", "/api/cursor/tag-sessions"],
status: "complete"
},
{
name: "GitHub Integration",
description: "Connect GitHub repositories and browse code",
pages: ["/connections"],
apis: ["/api/github/connect", "/api/github/repos", "/api/github/repo-tree"],
status: "complete"
},
{
name: "Session Tracking",
description: "Track development sessions and activity",
pages: ["/project/[id]/sessions"],
apis: ["/api/sessions/track", "/api/sessions/associate-project"],
status: "complete"
},
{
name: "Knowledge Base",
description: "Document and organize project knowledge",
pages: ["/project/[id]/context"],
apis: ["/api/projects/[id]/knowledge/*"],
status: "complete"
},
{
name: "Planning & Automation",
description: "Generate development plans and automate workflows",
pages: ["/project/[id]/plan", "/project/[id]/automation"],
apis: ["/api/projects/[id]/plan/mvp", "/api/projects/[id]/plan/marketing"],
status: "in-progress"
},
{
name: "Analytics & Costs",
description: "Track development costs and project analytics",
pages: ["/project/[id]/analytics", "/costs"],
apis: ["/api/stats", "/api/projects/[id]/aggregate"],
status: "in-progress"
}
];
}
// Helper: Get tech stack
function getTechStack() {
return {
frontend: {
framework: "Next.js 16.0.1",
react: "19.2.0",
typescript: "5.x",
styling: "Tailwind CSS 4",
uiComponents: "Radix UI + shadcn/ui",
icons: "Lucide React",
fonts: "Geist Sans, Geist Mono"
},
backend: {
runtime: "Next.js API Routes",
database: "Firebase Firestore",
auth: "Firebase Auth",
storage: "Firebase Storage"
},
integrations: [
"Google Vertex AI",
"Google Generative AI",
"GitHub OAuth",
"v0.dev SDK"
]
};
}

View File

@@ -0,0 +1,165 @@
import { NextRequest, NextResponse } from 'next/server';
import { getApiUrl } from '@/lib/utils/api-url';
/**
* Complete Chronological History
* Returns ALL project data in a single chronological timeline
* Optimized for AI consumption - no truncation, no summaries
*/
export async function GET(
request: NextRequest,
{ params }: { params: Promise<{ projectId: string }> }
) {
try {
const { projectId } = await params;
// Load all three data sources
const [contextRes, gitRes, activityRes, timelineRes] = await Promise.all([
fetch(getApiUrl(`/api/projects/${projectId}/context`, request)),
fetch(getApiUrl(`/api/projects/${projectId}/git-history`, request)),
fetch(getApiUrl(`/api/projects/${projectId}/activity`, request)),
fetch(getApiUrl(`/api/projects/${projectId}/timeline`, request))
]);
const context = contextRes.ok ? await contextRes.json() : null;
const git = gitRes.ok ? await gitRes.json() : null;
const activity = activityRes.ok ? await activityRes.json() : null;
const timeline = timelineRes.ok ? await timelineRes.json() : null;
// Build complete chronological event stream
const events: any[] = [];
// Add all Git commits as events
if (git?.commits) {
for (const commit of git.commits) {
events.push({
type: 'git_commit',
timestamp: new Date(commit.date).toISOString(),
date: commit.date.split(' ')[0],
data: {
hash: commit.hash,
author: commit.author,
message: commit.message,
filesChanged: commit.filesChanged,
insertions: commit.insertions,
deletions: commit.deletions
}
});
}
}
// Add all extension sessions as events
if (activity?.sessions) {
for (const session of activity.sessions) {
events.push({
type: 'extension_session',
timestamp: session.startTime,
date: new Date(session.startTime).toISOString().split('T')[0],
data: {
id: session.id,
startTime: session.startTime,
endTime: session.endTime,
duration: session.duration,
filesModified: session.filesModified,
conversationSummary: session.conversationSummary?.substring(0, 200),
conversationSnippets: (session.conversation || []).slice(0, 5).map((msg: any) => ({
role: msg.role,
message: msg.message?.substring(0, 100),
timestamp: msg.timestamp
}))
}
});
}
}
// Add Cursor conversations (from recent conversations in context)
if (context?.activity?.recentConversations) {
for (const conv of context.activity.recentConversations) {
events.push({
type: 'cursor_conversation',
timestamp: conv.createdAt,
date: new Date(conv.createdAt).toISOString().split('T')[0],
data: {
id: conv.id,
name: conv.name,
createdAt: conv.createdAt,
messageCount: conv.recentMessages?.length || 0,
recentMessages: conv.recentMessages?.map((msg: any) => ({
type: msg.type,
text: msg.text?.substring(0, 150),
createdAt: msg.createdAt
}))
}
});
}
}
// Sort everything chronologically
events.sort((a, b) =>
new Date(a.timestamp).getTime() - new Date(b.timestamp).getTime()
);
// Group by date for easier consumption
const eventsByDate: Record<string, any[]> = {};
for (const event of events) {
if (!eventsByDate[event.date]) {
eventsByDate[event.date] = [];
}
eventsByDate[event.date].push(event);
}
// Build response
const completeHistory = {
project: {
id: projectId,
name: context?.project?.name,
vision: context?.project?.vision,
githubRepo: context?.project?.githubRepo
},
summary: {
totalEvents: events.length,
dateRange: {
earliest: events[0]?.date,
latest: events[events.length - 1]?.date,
totalDays: Object.keys(eventsByDate).length
},
breakdown: {
gitCommits: events.filter(e => e.type === 'git_commit').length,
extensionSessions: events.filter(e => e.type === 'extension_session').length,
cursorConversations: events.filter(e => e.type === 'cursor_conversation').length
}
},
chronologicalEvents: events,
eventsByDate: Object.keys(eventsByDate)
.sort()
.map(date => ({
date,
dayOfWeek: new Date(date).toLocaleDateString('en-US', { weekday: 'long' }),
eventCount: eventsByDate[date].length,
events: eventsByDate[date]
})),
metadata: {
generatedAt: new Date().toISOString(),
dataComplete: true,
includesFullHistory: true
}
};
return NextResponse.json(completeHistory);
} catch (error) {
console.error('Error generating complete history:', error);
return NextResponse.json(
{
error: 'Failed to generate complete history',
details: error instanceof Error ? error.message : String(error)
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,254 @@
import { NextRequest, NextResponse } from 'next/server';
import { adminDb } from '@/lib/firebase/admin';
import { getApiUrl } from '@/lib/utils/api-url';
/**
* Complete Project Context API
* Returns everything an AI needs to understand the project state
*/
export async function GET(
request: NextRequest,
{ params }: { params: Promise<{ projectId: string }> }
) {
try {
const { projectId } = await params;
// 1. Load project metadata
const projectDoc = await adminDb
.collection('projects')
.doc(projectId)
.get();
if (!projectDoc.exists) {
return NextResponse.json(
{ error: 'Project not found' },
{ status: 404 }
);
}
const projectData = projectDoc.data();
// 2. Load timeline data
const timelineResponse = await fetch(
getApiUrl(`/api/projects/${projectId}/timeline`, request)
);
const timeline = timelineResponse.ok ? await timelineResponse.json() : null;
// 3. Load Git history summary
const gitResponse = await fetch(
getApiUrl(`/api/projects/${projectId}/git-history`, request)
);
const gitHistory = gitResponse.ok ? await gitResponse.json() : null;
// 4. Load extension activity
const activityResponse = await fetch(
getApiUrl(`/api/projects/${projectId}/activity`, request)
);
const activity = activityResponse.ok ? await activityResponse.json() : null;
// 5. Load uploaded documents
const documentsSnapshot = await adminDb
.collection('projects')
.doc(projectId)
.collection('documents')
.orderBy('uploadedAt', 'desc')
.get();
const documents = documentsSnapshot.docs.map(doc => ({
id: doc.id,
...doc.data()
}));
// 6. Get recent conversations (last 7 days)
const sevenDaysAgo = new Date();
sevenDaysAgo.setDate(sevenDaysAgo.getDate() - 7);
const conversationsSnapshot = await adminDb
.collection('projects')
.doc(projectId)
.collection('cursorConversations')
.where('createdAt', '>=', sevenDaysAgo.toISOString())
.orderBy('createdAt', 'desc')
.limit(10)
.get();
const recentConversations = [];
for (const convDoc of conversationsSnapshot.docs) {
const conv = convDoc.data();
const messagesSnapshot = await adminDb
.collection('projects')
.doc(projectId)
.collection('cursorConversations')
.doc(convDoc.id)
.collection('messages')
.orderBy('createdAt', 'desc')
.limit(5)
.get();
recentConversations.push({
id: convDoc.id,
name: conv.name,
createdAt: conv.createdAt,
recentMessages: messagesSnapshot.docs.map(m => ({
type: m.data().type === 1 ? 'user' : 'assistant',
text: m.data().text?.substring(0, 200) + '...',
createdAt: m.data().createdAt
}))
});
}
// 7. Calculate key metrics
const activeDays = timeline?.days?.filter((d: any) =>
d.summary.totalGitCommits > 0 ||
d.summary.totalExtensionSessions > 0 ||
d.summary.totalCursorMessages > 0
).length || 0;
const topFiles = activity?.fileActivity?.slice(0, 10) || [];
// 8. Extract key milestones (commits with significant changes)
const keyMilestones = gitHistory?.commits
?.filter((c: any) => c.insertions + c.deletions > 1000)
.slice(0, 5)
.map((c: any) => ({
date: c.date,
message: c.message,
author: c.author,
impact: `+${c.insertions}/-${c.deletions} lines`
})) || [];
// 9. Generate AI-friendly summary
const context = {
project: {
id: projectId,
name: projectData?.name || 'Untitled Project',
vision: projectData?.vision || null,
description: projectData?.description || null,
createdAt: projectData?.createdAt || null,
githubRepo: projectData?.githubRepo || null
},
timeline: {
dateRange: {
earliest: timeline?.dateRange?.earliest,
latest: timeline?.dateRange?.latest,
totalDays: timeline?.dateRange?.totalDays || 0,
activeDays
},
dataSources: {
git: {
available: timeline?.dataSources?.git?.available || false,
totalCommits: timeline?.dataSources?.git?.totalRecords || 0,
dateRange: {
first: timeline?.dataSources?.git?.firstDate,
last: timeline?.dataSources?.git?.lastDate
}
},
extension: {
available: timeline?.dataSources?.extension?.available || false,
totalSessions: timeline?.dataSources?.extension?.totalRecords || 0,
dateRange: {
first: timeline?.dataSources?.extension?.firstDate,
last: timeline?.dataSources?.extension?.lastDate
}
},
cursor: {
available: timeline?.dataSources?.cursor?.available || false,
totalMessages: timeline?.dataSources?.cursor?.totalRecords || 0,
dateRange: {
first: timeline?.dataSources?.cursor?.firstDate,
last: timeline?.dataSources?.cursor?.lastDate
}
}
}
},
codebase: {
totalCommits: gitHistory?.totalCommits || 0,
totalLinesAdded: gitHistory?.totalInsertions || 0,
totalLinesRemoved: gitHistory?.totalDeletions || 0,
contributors: gitHistory?.authors || [],
topFiles: gitHistory?.topFiles?.slice(0, 20) || []
},
activity: {
totalSessions: activity?.totalSessions || 0,
uniqueFilesEdited: activity?.fileActivity?.length || 0,
topEditedFiles: topFiles,
recentConversations
},
milestones: keyMilestones,
documents: documents.map(doc => ({
id: doc.id,
title: doc.title,
type: doc.type,
uploadedAt: doc.uploadedAt,
contentPreview: doc.content?.substring(0, 500) + '...'
})),
summary: generateProjectSummary({
projectData,
timeline,
gitHistory,
activity,
documents
})
};
return NextResponse.json(context);
} catch (error) {
console.error('Error loading project context:', error);
return NextResponse.json(
{
error: 'Failed to load project context',
details: error instanceof Error ? error.message : String(error)
},
{ status: 500 }
);
}
}
// Helper to generate human-readable summary
function generateProjectSummary(data: any): string {
const { projectData, timeline, gitHistory, activity, documents } = data;
const parts = [];
// Project basics
if (projectData?.name) {
parts.push(`Project: ${projectData.name}`);
}
if (projectData?.vision) {
parts.push(`Vision: ${projectData.vision}`);
}
// Timeline
if (timeline?.dateRange?.totalDays) {
parts.push(`Development span: ${timeline.dateRange.totalDays} days`);
}
// Git stats
if (gitHistory?.totalCommits) {
parts.push(
`Code: ${gitHistory.totalCommits} commits, ` +
`+${gitHistory.totalInsertions.toLocaleString()}/-${gitHistory.totalDeletions.toLocaleString()} lines`
);
}
// Activity
if (activity?.totalSessions) {
parts.push(`Activity: ${activity.totalSessions} development sessions`);
}
// Documents
if (documents?.length) {
parts.push(`Documentation: ${documents.length} documents uploaded`);
}
return parts.join(' | ');
}

View File

@@ -0,0 +1,59 @@
import { NextResponse } from 'next/server';
import { getAdminDb } from '@/lib/firebase/admin';
export async function GET(
request: Request,
context: { params: Promise<{ projectId: string }> | { projectId: string } }
) {
try {
const params = 'then' in context.params ? await context.params : context.params;
const projectId = params.projectId;
if (!projectId) {
return NextResponse.json({ error: 'Missing projectId' }, { status: 400 });
}
const adminDb = getAdminDb();
// Get ALL knowledge items for this project
const knowledgeSnapshot = await adminDb
.collection('knowledge_items')
.where('projectId', '==', projectId)
.get();
const items = knowledgeSnapshot.docs.map(doc => {
const data = doc.data();
return {
id: doc.id,
title: data.title,
sourceType: data.sourceType,
contentLength: data.content?.length || 0,
createdAt: data.createdAt,
tags: data.sourceMeta?.tags || [],
};
});
// Get project info
const projectDoc = await adminDb.collection('projects').doc(projectId).get();
const projectData = projectDoc.data();
return NextResponse.json({
projectId,
projectName: projectData?.name,
currentPhase: projectData?.currentPhase,
totalKnowledgeItems: items.length,
items,
extractionHandoff: projectData?.phaseData?.phaseHandoffs?.extraction,
});
} catch (error) {
console.error('[debug-knowledge] Error:', error);
return NextResponse.json(
{
error: 'Failed to debug knowledge',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,158 @@
import { NextRequest, NextResponse } from 'next/server';
import { adminDb } from '@/lib/firebase/admin';
export async function POST(
request: NextRequest,
{ params }: { params: Promise<{ projectId: string }> }
) {
try {
const { projectId } = await params;
const body = await request.json();
const { title, content, type, source } = body;
if (!title || !content) {
return NextResponse.json(
{ error: 'Title and content are required' },
{ status: 400 }
);
}
// Create document
const docRef = await adminDb
.collection('projects')
.doc(projectId)
.collection('documents')
.add({
title,
content,
type: type || 'text', // text, markdown, pdf, etc.
source: source || 'manual_upload', // chatgpt, slack, manual_upload, etc.
uploadedAt: new Date().toISOString(),
wordCount: content.split(/\s+/).length,
charCount: content.length
});
console.log(`✅ Document uploaded: ${title} (${docRef.id})`);
return NextResponse.json({
success: true,
documentId: docRef.id,
message: 'Document uploaded successfully'
});
} catch (error) {
console.error('Error uploading document:', error);
return NextResponse.json(
{
error: 'Failed to upload document',
details: error instanceof Error ? error.message : String(error)
},
{ status: 500 }
);
}
}
// Batch upload multiple documents
export async function PUT(
request: NextRequest,
{ params }: { params: Promise<{ projectId: string }> }
) {
try {
const { projectId } = await params;
const body = await request.json();
const { documents } = body;
if (!Array.isArray(documents) || documents.length === 0) {
return NextResponse.json(
{ error: 'Documents array is required' },
{ status: 400 }
);
}
const uploadedDocs = [];
for (const doc of documents) {
if (!doc.title || !doc.content) {
continue; // Skip invalid documents
}
const docRef = await adminDb
.collection('projects')
.doc(projectId)
.collection('documents')
.add({
title: doc.title,
content: doc.content,
type: doc.type || 'text',
source: doc.source || 'batch_upload',
uploadedAt: new Date().toISOString(),
wordCount: doc.content.split(/\s+/).length,
charCount: doc.content.length,
metadata: doc.metadata || {}
});
uploadedDocs.push({
id: docRef.id,
title: doc.title
});
}
console.log(`✅ Batch uploaded ${uploadedDocs.length} documents`);
return NextResponse.json({
success: true,
uploadedCount: uploadedDocs.length,
documents: uploadedDocs
});
} catch (error) {
console.error('Error batch uploading documents:', error);
return NextResponse.json(
{
error: 'Failed to batch upload documents',
details: error instanceof Error ? error.message : String(error)
},
{ status: 500 }
);
}
}
// Get all documents
export async function GET(
request: NextRequest,
{ params }: { params: Promise<{ projectId: string }> }
) {
try {
const { projectId } = await params;
const documentsSnapshot = await adminDb
.collection('projects')
.doc(projectId)
.collection('documents')
.orderBy('uploadedAt', 'desc')
.get();
const documents = documentsSnapshot.docs.map(doc => ({
id: doc.id,
...doc.data()
}));
return NextResponse.json({
total: documents.length,
documents
});
} catch (error) {
console.error('Error fetching documents:', error);
return NextResponse.json(
{
error: 'Failed to fetch documents',
details: error instanceof Error ? error.message : String(error)
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,98 @@
import { NextResponse } from 'next/server';
import { FieldValue } from 'firebase-admin/firestore';
import { GeminiLlmClient } from '@/lib/ai/gemini-client';
import { runChatExtraction } from '@/lib/ai/chat-extractor';
import { getKnowledgeItem } from '@/lib/server/knowledge';
import { createChatExtraction } from '@/lib/server/chat-extraction';
import { getAdminDb } from '@/lib/firebase/admin';
import type { ProjectPhaseScores } from '@/lib/types/project-artifacts';
interface ExtractFromChatRequest {
knowledgeItemId?: string;
}
// Increase Vercel/Next timeout for large transcripts
export const maxDuration = 60;
export async function POST(
request: Request,
{ params }: { params: Promise<{ projectId: string }> },
) {
try {
const { projectId } = await params;
if (!projectId) {
return NextResponse.json({ error: 'Missing projectId' }, { status: 400 });
}
const body = (await request.json()) as ExtractFromChatRequest;
const knowledgeItemId = body.knowledgeItemId?.trim();
if (!knowledgeItemId) {
return NextResponse.json({ error: 'knowledgeItemId is required' }, { status: 400 });
}
const knowledgeItem = await getKnowledgeItem(projectId, knowledgeItemId);
if (!knowledgeItem) {
return NextResponse.json({ error: 'Knowledge item not found' }, { status: 404 });
}
console.log(`[extract-from-chat] Starting extraction for knowledgeItemId=${knowledgeItemId}, content length=${knowledgeItem.content.length}`);
const llm = new GeminiLlmClient();
const extractionData = await runChatExtraction(knowledgeItem, llm);
console.log(`[extract-from-chat] Extraction complete for knowledgeItemId=${knowledgeItemId}`);
const overallCompletion = extractionData.summary_scores.overall_completion ?? 0;
const overallConfidence = extractionData.summary_scores.overall_confidence ?? 0;
const extraction = await createChatExtraction({
projectId,
knowledgeItemId,
data: extractionData,
overallCompletion,
overallConfidence,
});
const adminDb = getAdminDb();
const projectRef = adminDb.collection('projects').doc(projectId);
const snapshot = await projectRef.get();
const docData = snapshot.data() ?? {};
const existingScores = (docData.phaseScores ?? {}) as ProjectPhaseScores;
const phaseHistory = Array.isArray(docData.phaseHistory) ? [...docData.phaseHistory] : [];
phaseHistory.push({
phase: 'extractor',
status: 'completed',
knowledgeItemId,
timestamp: new Date().toISOString(),
});
existingScores.extractor = {
knowledgeItemId,
overallCompletion,
overallConfidence,
updatedAt: new Date().toISOString(),
};
await projectRef.set(
{
currentPhase: 'analyzed',
phaseScores: existingScores,
phaseStatus: 'in_progress',
phaseHistory,
updatedAt: FieldValue.serverTimestamp(),
},
{ merge: true },
);
return NextResponse.json({ extraction });
} catch (error) {
console.error('[extract-from-chat] Extraction failed', error);
return NextResponse.json(
{
error: 'Failed to extract product signals',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 },
);
}
}

View File

@@ -0,0 +1,110 @@
import { NextRequest, NextResponse } from 'next/server';
import admin from '@/lib/firebase/admin';
/**
* Extract vision answers from chat history and save to project
* This is a helper endpoint to migrate from AI chat-based vision collection
* to the structured visionAnswers field
*/
export async function POST(
request: NextRequest,
{ params }: { params: Promise<{ projectId: string }> }
) {
try {
const { projectId } = await params;
const db = admin.firestore();
console.log(`[Extract Vision] Extracting vision answers from chat for project ${projectId}`);
// Get chat messages
const conversationRef = db
.collection('projects')
.doc(projectId)
.collection('conversations')
.doc('ai_chat');
const messagesSnapshot = await conversationRef
.collection('messages')
.orderBy('createdAt', 'asc')
.get();
if (messagesSnapshot.empty) {
return NextResponse.json(
{ error: 'No chat messages found' },
{ status: 404 }
);
}
const messages = messagesSnapshot.docs.map(doc => ({
id: doc.id,
...doc.data()
}));
console.log(`[Extract Vision] Found ${messages.length} total messages`);
// Extract user messages (answers to the 3 vision questions)
const userMessages = messages.filter((m: any) => m.role === 'user');
console.log(`[Extract Vision] Found ${userMessages.length} user messages`);
if (userMessages.length < 3) {
return NextResponse.json(
{
error: 'Not enough answers found',
details: `Found ${userMessages.length} answers, need 3`,
userMessages: userMessages.map((m: any) => m.content?.substring(0, 100))
},
{ status: 400 }
);
}
// The first 3 user messages should be the answers to Q1, Q2, Q3
const visionAnswers = {
q1: userMessages[0].content,
q2: userMessages[1].content,
q3: userMessages[2].content,
allAnswered: true,
updatedAt: new Date().toISOString(),
};
console.log(`[Extract Vision] Extracted vision answers:`, {
q1: visionAnswers.q1.substring(0, 50) + '...',
q2: visionAnswers.q2.substring(0, 50) + '...',
q3: visionAnswers.q3.substring(0, 50) + '...',
});
// Save to project
await db.collection('projects').doc(projectId).set(
{
visionAnswers,
readyForMVP: true,
currentPhase: 'mvp',
phaseStatus: 'ready',
},
{ merge: true }
);
console.log(`[Extract Vision] ✅ Vision answers saved for project ${projectId}`);
return NextResponse.json({
success: true,
message: 'Vision answers extracted and saved',
visionAnswers: {
q1: visionAnswers.q1.substring(0, 100) + '...',
q2: visionAnswers.q2.substring(0, 100) + '...',
q3: visionAnswers.q3.substring(0, 100) + '...',
}
});
} catch (error) {
console.error('[Extract Vision] Error:', error);
return NextResponse.json(
{
error: 'Failed to extract vision answers',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,115 @@
import { NextResponse } from 'next/server';
import { getAdminDb } from '@/lib/firebase/admin';
export async function GET(
request: Request,
context: { params: Promise<{ projectId: string }> | { projectId: string } }
) {
try {
// Handle async params
const params = 'then' in context.params ? await context.params : context.params;
const projectId = params.projectId;
if (!projectId) {
return NextResponse.json({ error: 'Missing projectId' }, { status: 400 });
}
const adminDb = getAdminDb();
// Fetch project to get extraction handoff
const projectDoc = await adminDb.collection('projects').doc(projectId).get();
if (!projectDoc.exists) {
return NextResponse.json({ error: 'Project not found' }, { status: 404 });
}
const projectData = projectDoc.data();
const extractionHandoff = projectData?.phaseData?.phaseHandoffs?.extraction;
if (!extractionHandoff) {
return NextResponse.json({ error: 'No extraction results found' }, { status: 404 });
}
return NextResponse.json({
handoff: extractionHandoff,
});
} catch (error) {
console.error('[extraction-handoff] Error:', error);
return NextResponse.json(
{
error: 'Failed to fetch extraction handoff',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 }
);
}
}
export async function PATCH(
request: Request,
context: { params: Promise<{ projectId: string }> | { projectId: string } }
) {
try {
// Handle async params
const params = 'then' in context.params ? await context.params : context.params;
const projectId = params.projectId;
if (!projectId) {
return NextResponse.json({ error: 'Missing projectId' }, { status: 400 });
}
const body = await request.json();
const { confirmed } = body;
if (!confirmed) {
return NextResponse.json({ error: 'Missing confirmed data' }, { status: 400 });
}
const adminDb = getAdminDb();
// Fetch current handoff
const projectDoc = await adminDb.collection('projects').doc(projectId).get();
if (!projectDoc.exists) {
return NextResponse.json({ error: 'Project not found' }, { status: 404 });
}
const projectData = projectDoc.data();
const currentHandoff = projectData?.phaseData?.phaseHandoffs?.extraction;
if (!currentHandoff) {
return NextResponse.json({ error: 'No extraction handoff found' }, { status: 404 });
}
// Update the handoff with edited data
const updatedHandoff = {
...currentHandoff,
confirmed: {
...currentHandoff.confirmed,
...confirmed,
},
updatedAt: new Date().toISOString(),
};
// Save to Firestore
await adminDb.collection('projects').doc(projectId).update({
'phaseData.phaseHandoffs.extraction': updatedHandoff,
updatedAt: new Date().toISOString(),
});
return NextResponse.json({
success: true,
handoff: updatedHandoff,
});
} catch (error) {
console.error('[extraction-handoff] PATCH error:', error);
return NextResponse.json(
{
error: 'Failed to update extraction handoff',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,169 @@
import { NextRequest, NextResponse } from 'next/server';
import { exec } from 'child_process';
import { promisify } from 'util';
const execAsync = promisify(exec);
interface GitCommit {
hash: string;
date: string;
author: string;
message: string;
filesChanged: number;
insertions: number;
deletions: number;
}
interface GitStats {
totalCommits: number;
firstCommit: string | null;
lastCommit: string | null;
totalFilesChanged: number;
totalInsertions: number;
totalDeletions: number;
commits: GitCommit[];
topFiles: Array<{ filePath: string; changeCount: number }>;
commitsByDay: Record<string, number>;
authors: Array<{ name: string; commitCount: number }>;
}
export async function GET(
request: NextRequest,
{ params }: { params: Promise<{ projectId: string }> }
) {
try {
const { projectId } = await params;
// For now, we'll use the current workspace
// In the future, we can store git repo path in project metadata
const repoPath = '/Users/markhenderson/ai-proxy';
// Get all commits with detailed stats
const { stdout: commitsOutput } = await execAsync(
`cd "${repoPath}" && git log --all --pretty=format:"%H|%ai|%an|%s" --numstat`,
{ maxBuffer: 10 * 1024 * 1024 } // 10MB buffer for large repos
);
if (!commitsOutput.trim()) {
return NextResponse.json({
totalCommits: 0,
firstCommit: null,
lastCommit: null,
totalFilesChanged: 0,
totalInsertions: 0,
totalDeletions: 0,
commits: [],
topFiles: [],
commitsByDay: {},
authors: []
});
}
// Parse commit data
const commits: GitCommit[] = [];
const fileChangeCounts = new Map<string, number>();
const commitsByDay: Record<string, number> = {};
const authorCounts = new Map<string, number>();
let totalFilesChanged = 0;
let totalInsertions = 0;
let totalDeletions = 0;
const lines = commitsOutput.split('\n');
let currentCommit: Partial<GitCommit> | null = null;
for (const line of lines) {
if (line.includes('|')) {
// This is a commit header line
if (currentCommit) {
commits.push(currentCommit as GitCommit);
}
const [hash, date, author, message] = line.split('|');
currentCommit = {
hash: hash.substring(0, 8),
date,
author,
message,
filesChanged: 0,
insertions: 0,
deletions: 0
};
// Count commits by day
const day = date.split(' ')[0];
commitsByDay[day] = (commitsByDay[day] || 0) + 1;
// Count commits by author
authorCounts.set(author, (authorCounts.get(author) || 0) + 1);
} else if (line.trim() && currentCommit) {
// This is a file stat line (insertions, deletions, filename)
const parts = line.trim().split('\t');
if (parts.length === 3) {
const [insertStr, delStr, filepath] = parts;
const insertions = insertStr === '-' ? 0 : parseInt(insertStr, 10) || 0;
const deletions = delStr === '-' ? 0 : parseInt(delStr, 10) || 0;
currentCommit.filesChanged!++;
currentCommit.insertions! += insertions;
currentCommit.deletions! += deletions;
totalFilesChanged++;
totalInsertions += insertions;
totalDeletions += deletions;
fileChangeCounts.set(filepath, (fileChangeCounts.get(filepath) || 0) + 1);
}
}
}
// Push the last commit
if (currentCommit) {
commits.push(currentCommit as GitCommit);
}
// Sort commits by date (most recent first)
commits.sort((a, b) => new Date(b.date).getTime() - new Date(a.date).getTime());
const firstCommit = commits.length > 0 ? commits[commits.length - 1].date : null;
const lastCommit = commits.length > 0 ? commits[0].date : null;
// Get top 20 most changed files
const topFiles = Array.from(fileChangeCounts.entries())
.sort(([, countA], [, countB]) => countB - countA)
.slice(0, 20)
.map(([filePath, changeCount]) => ({ filePath, changeCount }));
// Get author stats
const authors = Array.from(authorCounts.entries())
.sort(([, countA], [, countB]) => countB - countA)
.map(([name, commitCount]) => ({ name, commitCount }));
const stats: GitStats = {
totalCommits: commits.length,
firstCommit,
lastCommit,
totalFilesChanged,
totalInsertions,
totalDeletions,
commits: commits.slice(0, 50), // Return last 50 commits for display
topFiles,
commitsByDay,
authors
};
return NextResponse.json(stats);
} catch (error) {
console.error('Error loading Git history:', error);
return NextResponse.json(
{
error: 'Could not load Git history',
details: error instanceof Error ? error.message : String(error)
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,196 @@
import { NextResponse } from 'next/server';
import { getAdminDb } from '@/lib/firebase/admin';
import { runChatExtraction } from '@/lib/ai/chat-extractor';
import { GeminiLlmClient } from '@/lib/ai/gemini-client';
import { createChatExtraction } from '@/lib/server/chat-extraction';
import { FieldValue } from 'firebase-admin/firestore';
import type { ProjectPhaseScores } from '@/lib/types/project-artifacts';
import type { KnowledgeItem } from '@/lib/types/knowledge';
export const maxDuration = 300; // 5 minutes for batch processing
interface BatchExtractionResult {
knowledgeItemId: string;
success: boolean;
error?: string;
}
export async function POST(
request: Request,
context: { params?: Promise<{ projectId?: string }> | { projectId?: string } } = {},
) {
try {
// Await params if it's a Promise (Next.js 15+)
const params = context.params instanceof Promise ? await context.params : context.params;
const url = new URL(request.url);
const pathSegments = url.pathname.split('/');
const projectsIndex = pathSegments.indexOf('projects');
const projectIdFromPath =
projectsIndex !== -1 ? pathSegments[projectsIndex + 1] : undefined;
const projectId =
(params?.projectId ?? projectIdFromPath ?? url.searchParams.get('projectId') ?? '').trim();
if (!projectId) {
return NextResponse.json({ error: 'Missing projectId' }, { status: 400 });
}
const adminDb = getAdminDb();
// Get all knowledge_items for this project
const knowledgeSnapshot = await adminDb
.collection('projects')
.doc(projectId)
.collection('knowledge_items')
.get();
if (knowledgeSnapshot.empty) {
return NextResponse.json({
message: 'No knowledge items to extract',
results: []
});
}
const knowledgeItems = knowledgeSnapshot.docs.map(doc => ({
id: doc.id,
...doc.data()
})) as KnowledgeItem[];
// Get existing extractions to avoid re-processing
const extractionsSnapshot = await adminDb
.collection('projects')
.doc(projectId)
.collection('chat_extractions')
.get();
const processedKnowledgeIds = new Set(
extractionsSnapshot.docs.map(doc => doc.data().knowledgeItemId)
);
// Filter to only unprocessed items
const itemsToProcess = knowledgeItems.filter(
item => !processedKnowledgeIds.has(item.id)
);
if (itemsToProcess.length === 0) {
return NextResponse.json({
message: 'All knowledge items already extracted',
results: []
});
}
console.log(`[batch-extract] Processing ${itemsToProcess.length} knowledge items for project ${projectId}`);
const llm = new GeminiLlmClient();
const results: BatchExtractionResult[] = [];
let successCount = 0;
let lastSuccessfulExtraction = null;
// Process each item
for (const knowledgeItem of itemsToProcess) {
try {
console.log(`[batch-extract] Extracting from knowledgeItemId=${knowledgeItem.id}`);
const extractionData = await runChatExtraction(knowledgeItem, llm);
const overallCompletion = extractionData.summary_scores.overall_completion ?? 0;
const overallConfidence = extractionData.summary_scores.overall_confidence ?? 0;
const extraction = await createChatExtraction({
projectId,
knowledgeItemId: knowledgeItem.id,
data: extractionData,
overallCompletion,
overallConfidence,
});
lastSuccessfulExtraction = extraction;
successCount++;
results.push({
knowledgeItemId: knowledgeItem.id,
success: true
});
console.log(`[batch-extract] Successfully extracted from knowledgeItemId=${knowledgeItem.id}`);
// Also chunk and embed this item (fire-and-forget)
(async () => {
try {
const { writeKnowledgeChunksForItem } = await import('@/lib/server/vector-memory');
await writeKnowledgeChunksForItem({
id: knowledgeItem.id,
projectId: knowledgeItem.projectId,
content: knowledgeItem.content,
sourceMeta: knowledgeItem.sourceMeta,
});
} catch (chunkError) {
console.error(`[batch-extract] Failed to chunk item ${knowledgeItem.id}:`, chunkError);
}
})();
} catch (error) {
console.error(`[batch-extract] Failed to extract from knowledgeItemId=${knowledgeItem.id}:`, error);
results.push({
knowledgeItemId: knowledgeItem.id,
success: false,
error: error instanceof Error ? error.message : String(error)
});
}
}
// Update project phase if we had any successful extractions
if (successCount > 0 && lastSuccessfulExtraction) {
const projectRef = adminDb.collection('projects').doc(projectId);
const snapshot = await projectRef.get();
const docData = snapshot.data() ?? {};
const existingScores = (docData.phaseScores ?? {}) as ProjectPhaseScores;
const phaseHistory = Array.isArray(docData.phaseHistory) ? [...docData.phaseHistory] : [];
phaseHistory.push({
phase: 'extractor',
status: 'completed',
knowledgeItemId: 'batch_extraction',
timestamp: new Date().toISOString(),
});
// Use the last extraction's scores as representative
const lastData = lastSuccessfulExtraction.data as { summary_scores?: { overall_completion?: number; overall_confidence?: number } };
existingScores.extractor = {
knowledgeItemId: 'batch_extraction',
overallCompletion: lastData.summary_scores?.overall_completion ?? 0,
overallConfidence: lastData.summary_scores?.overall_confidence ?? 0,
updatedAt: new Date().toISOString(),
};
await projectRef.set(
{
currentPhase: 'analyzed',
phaseScores: existingScores,
phaseStatus: 'in_progress',
phaseHistory,
updatedAt: FieldValue.serverTimestamp(),
},
{ merge: true },
);
console.log(`[batch-extract] Updated project phase to 'analyzed' for project ${projectId}`);
}
return NextResponse.json({
message: `Processed ${itemsToProcess.length} items: ${successCount} succeeded, ${results.filter(r => !r.success).length} failed`,
results,
successCount,
totalProcessed: itemsToProcess.length
});
} catch (error) {
console.error('[batch-extract] Batch extraction failed:', error);
return NextResponse.json(
{
error: 'Failed to batch extract knowledge items',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 },
);
}
}

View File

@@ -0,0 +1,118 @@
import { NextResponse } from 'next/server';
import { getAdminAuth, getAdminDb } from '@/lib/firebase/admin';
import { createKnowledgeItem } from '@/lib/server/knowledge';
import { writeKnowledgeChunksForItem } from '@/lib/server/vector-memory';
import type { KnowledgeSourceMeta } from '@/lib/types/knowledge';
export const maxDuration = 60;
interface ChunkInsightRequest {
content: string;
title?: string;
importance?: 'primary' | 'supporting' | 'irrelevant';
tags?: string[];
sourceKnowledgeItemId?: string;
metadata?: Record<string, any>;
}
export async function POST(
request: Request,
context: { params: Promise<{ projectId: string }> | { projectId: string } }
) {
try {
// Verify auth
const authHeader = request.headers.get('Authorization');
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const idToken = authHeader.split('Bearer ')[1];
const adminAuth = getAdminAuth();
let userId: string;
try {
const decodedToken = await adminAuth.verifyIdToken(idToken);
userId = decodedToken.uid;
} catch (error) {
return NextResponse.json({ error: 'Invalid token' }, { status: 401 });
}
// Handle async params in Next.js 16
const params = 'then' in context.params ? await context.params : context.params;
const projectId = params.projectId;
if (!projectId) {
return NextResponse.json({ error: 'Missing projectId' }, { status: 400 });
}
const body = await request.json() as ChunkInsightRequest;
if (!body.content || body.content.trim().length === 0) {
return NextResponse.json({ error: 'Content is required' }, { status: 400 });
}
const adminDb = getAdminDb();
const projectSnap = await adminDb.collection('projects').doc(projectId).get();
if (!projectSnap.exists) {
return NextResponse.json({ error: 'Project not found' }, { status: 404 });
}
console.log(`[chunk-insight] Creating confirmed insight for project ${projectId}`);
// Create source metadata
const sourceMeta: KnowledgeSourceMeta = {
origin: 'vibn',
createdAtOriginal: new Date().toISOString(),
importance: body.importance || 'primary',
tags: [
'extracted_insight',
'user_confirmed',
'extracted_by:' + userId,
...(body.sourceKnowledgeItemId ? [`source:${body.sourceKnowledgeItemId}`] : []),
...(body.tags || [])
],
};
// Store the confirmed insight as a knowledge_item
const knowledgeItem = await createKnowledgeItem({
projectId,
sourceType: 'other',
title: body.title || 'Extracted Insight',
content: body.content,
sourceMeta,
});
console.log(`[chunk-insight] Created knowledge_item ${knowledgeItem.id}`);
// Chunk and embed in AlloyDB (synchronous for this endpoint)
try {
await writeKnowledgeChunksForItem({
id: knowledgeItem.id,
projectId: knowledgeItem.projectId,
content: knowledgeItem.content,
sourceMeta: knowledgeItem.sourceMeta,
});
console.log(`[chunk-insight] Successfully chunked and embedded insight`);
} catch (chunkError) {
console.error(`[chunk-insight] Failed to chunk item ${knowledgeItem.id}:`, chunkError);
// Don't fail the request, item is still saved in Firestore
}
return NextResponse.json({
success: true,
knowledgeItemId: knowledgeItem.id,
message: 'Insight chunked and stored successfully',
});
} catch (error) {
console.error('[chunk-insight] Error:', error);
return NextResponse.json(
{
error: 'Failed to store insight',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 },
);
}
}

View File

@@ -0,0 +1,75 @@
import { NextResponse } from 'next/server';
import { getAdminAuth } from '@/lib/firebase/admin';
import { getAlloyDbClient } from '@/lib/db/alloydb';
export async function GET(
request: Request,
{ params }: { params: Promise<{ projectId: string }> }
) {
try {
const { projectId } = await params;
// Authentication (skip in development if no auth header)
const authHeader = request.headers.get('Authorization');
const isDevelopment = process.env.NODE_ENV === 'development';
if (!isDevelopment || authHeader?.startsWith('Bearer ')) {
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const token = authHeader.substring(7);
const auth = getAdminAuth();
const decoded = await auth.verifyIdToken(token);
if (!decoded?.uid) {
return NextResponse.json({ error: 'Invalid token' }, { status: 401 });
}
}
// Fetch knowledge chunks from AlloyDB
let chunks = [];
let count = 0;
try {
const pool = await getAlloyDbClient();
const result = await pool.query(
`SELECT
id,
chunk_index,
content,
source_type,
importance,
created_at
FROM knowledge_chunks
WHERE project_id = $1
ORDER BY created_at DESC
LIMIT 100`,
[projectId]
);
chunks = result.rows;
count = result.rowCount || 0;
console.log('[API /knowledge/chunks] Found', count, 'chunks');
} catch (dbError) {
console.error('[API /knowledge/chunks] AlloyDB query failed:', dbError);
console.error('[API /knowledge/chunks] This is likely due to AlloyDB not being configured or connected');
// Return empty array instead of failing
chunks = [];
count = 0;
}
return NextResponse.json({
success: true,
chunks,
count,
});
} catch (error) {
console.error('[API] Error fetching knowledge chunks:', error);
return NextResponse.json(
{ error: 'Failed to fetch knowledge chunks' },
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,90 @@
import { NextResponse } from 'next/server';
import { getAdminDb } from '@/lib/firebase/admin';
import { createKnowledgeItem } from '@/lib/server/knowledge';
import type { KnowledgeSourceMeta } from '@/lib/types/knowledge';
const PROVIDER_MAP = new Set(['chatgpt', 'gemini', 'claude', 'cursor', 'vibn', 'other']);
interface ImportAiChatRequest {
title?: string;
provider?: string;
transcript?: string;
sourceLink?: string | null;
createdAtOriginal?: string | null;
}
export async function POST(
request: Request,
{ params }: { params: Promise<{ projectId: string }> },
) {
try {
const { projectId } = await params;
if (!projectId) {
return NextResponse.json({ error: 'Missing projectId' }, { status: 400 });
}
const body = (await request.json()) as ImportAiChatRequest;
const transcript = body.transcript?.trim();
const provider = body.provider?.toLowerCase();
if (!transcript) {
return NextResponse.json({ error: 'transcript is required' }, { status: 400 });
}
const adminDb = getAdminDb();
const projectSnap = await adminDb.collection('projects').doc(projectId).get();
if (!projectSnap.exists) {
return NextResponse.json({ error: 'Project not found' }, { status: 404 });
}
const origin = PROVIDER_MAP.has(provider ?? '') ? provider : 'other';
const sourceMeta: KnowledgeSourceMeta = {
origin: (origin as KnowledgeSourceMeta['origin']) ?? 'other',
url: body.sourceLink ?? null,
filename: body.title ?? null,
createdAtOriginal: body.createdAtOriginal ?? null,
importance: 'primary',
tags: ['ai_chat'],
};
const knowledgeItem = await createKnowledgeItem({
projectId,
sourceType: 'imported_ai_chat',
title: body.title ?? null,
content: transcript,
sourceMeta,
});
// Chunk and embed in background (don't block response)
// This populates AlloyDB knowledge_chunks for vector search
(async () => {
try {
const { writeKnowledgeChunksForItem } = await import('@/lib/server/vector-memory');
await writeKnowledgeChunksForItem({
id: knowledgeItem.id,
projectId: knowledgeItem.projectId,
content: knowledgeItem.content,
sourceMeta: knowledgeItem.sourceMeta,
});
} catch (error) {
// Log but don't fail the request
console.error('[import-ai-chat] Failed to chunk/embed knowledge_item:', error);
}
})();
return NextResponse.json({ knowledgeItem });
} catch (error) {
console.error('[import-ai-chat] Failed to import chat', error);
return NextResponse.json(
{
error: 'Failed to import AI chat transcript',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 },
);
}
}

View File

@@ -0,0 +1,136 @@
import { NextResponse } from 'next/server';
import { getAdminDb } from '@/lib/firebase/admin';
import { createKnowledgeItem } from '@/lib/server/knowledge';
import type { KnowledgeSourceMeta } from '@/lib/types/knowledge';
import { chunkDocument } from '@/lib/utils/document-chunker';
interface ImportDocumentRequest {
filename?: string;
content?: string;
mimeType?: string;
}
export const maxDuration = 30;
export async function POST(
request: Request,
{ params }: { params: Promise<{ projectId: string }> },
) {
try {
const { projectId } = await params;
if (!projectId) {
return NextResponse.json({ error: 'Missing projectId' }, { status: 400 });
}
const body = (await request.json()) as ImportDocumentRequest;
const content = body.content?.trim();
const filename = body.filename?.trim();
if (!content || !filename) {
return NextResponse.json({ error: 'filename and content are required' }, { status: 400 });
}
const adminDb = getAdminDb();
const projectSnap = await adminDb.collection('projects').doc(projectId).get();
if (!projectSnap.exists) {
return NextResponse.json({ error: 'Project not found' }, { status: 404 });
}
console.log(`[import-document] Processing ${filename}, length=${content.length}`);
// Chunk the document
const chunks = chunkDocument(content, {
maxChunkSize: 2000,
chunkOverlap: 200,
preserveParagraphs: true,
preserveCodeBlocks: true,
});
console.log(`[import-document] Created ${chunks.length} chunks for ${filename}`);
// Store each chunk as a separate knowledge_item
const knowledgeItemIds: string[] = [];
for (const chunk of chunks) {
const sourceMeta: KnowledgeSourceMeta = {
origin: 'other',
url: null,
filename,
createdAtOriginal: new Date().toISOString(),
importance: 'primary',
tags: ['document', 'chunked'],
};
const chunkTitle = chunks.length > 1
? `${filename} (chunk ${chunk.metadata.chunkIndex + 1}/${chunk.metadata.totalChunks})`
: filename;
const knowledgeItem = await createKnowledgeItem({
projectId,
sourceType: 'imported_document',
title: chunkTitle,
content: chunk.content,
sourceMeta: {
...sourceMeta,
chunkMetadata: {
chunkIndex: chunk.metadata.chunkIndex,
totalChunks: chunk.metadata.totalChunks,
startChar: chunk.metadata.startChar,
endChar: chunk.metadata.endChar,
tokenCount: chunk.metadata.tokenCount,
},
},
});
knowledgeItemIds.push(knowledgeItem.id);
// Chunk and embed in AlloyDB (fire-and-forget)
(async () => {
try {
const { writeKnowledgeChunksForItem } = await import('@/lib/server/vector-memory');
await writeKnowledgeChunksForItem({
id: knowledgeItem.id,
projectId: knowledgeItem.projectId,
content: knowledgeItem.content,
sourceMeta: knowledgeItem.sourceMeta,
});
} catch (error) {
console.error(`[import-document] Failed to chunk item ${knowledgeItem.id}:`, error);
}
})();
}
// Also create a summary record in contextSources for UI display
const contextSourcesRef = adminDb.collection('projects').doc(projectId).collection('contextSources');
await contextSourcesRef.add({
type: 'document',
name: filename,
summary: `Document with ${chunks.length} chunks (${content.length} characters)`,
connectedAt: new Date(),
metadata: {
chunkCount: chunks.length,
totalChars: content.length,
mimeType: body.mimeType,
knowledgeItemIds,
},
});
return NextResponse.json({
success: true,
filename,
chunkCount: chunks.length,
knowledgeItemIds,
});
} catch (error) {
console.error('[import-document] Failed to import document', error);
return NextResponse.json(
{
error: 'Failed to import document',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 },
);
}
}

View File

@@ -0,0 +1,81 @@
import { NextResponse } from 'next/server';
import { getAdminAuth, getAdminDb } from '@/lib/firebase/admin';
export async function GET(
request: Request,
{ params }: { params: Promise<{ projectId: string }> }
) {
try {
const { projectId } = await params;
// Authentication (skip in development if no auth header)
const authHeader = request.headers.get('Authorization');
const isDevelopment = process.env.NODE_ENV === 'development';
if (!isDevelopment || authHeader?.startsWith('Bearer ')) {
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const token = authHeader.substring(7);
const auth = getAdminAuth();
const decoded = await auth.verifyIdToken(token);
if (!decoded?.uid) {
return NextResponse.json({ error: 'Invalid token' }, { status: 401 });
}
}
// Fetch knowledge items from Firestore
console.log('[API /knowledge/items] Fetching items for project:', projectId);
let items = [];
try {
const adminDb = getAdminDb();
const knowledgeSnapshot = await adminDb
.collection('knowledge')
.where('projectId', '==', projectId)
.orderBy('createdAt', 'desc')
.limit(100)
.get();
console.log('[API /knowledge/items] Found', knowledgeSnapshot.size, 'items');
items = knowledgeSnapshot.docs.map(doc => {
const data = doc.data();
return {
id: doc.id,
title: data.title || data.content?.substring(0, 50) || 'Untitled',
sourceType: data.sourceType,
content: data.content,
sourceMeta: data.sourceMeta,
createdAt: data.createdAt?.toDate?.()?.toISOString() || data.createdAt,
updatedAt: data.updatedAt?.toDate?.()?.toISOString() || data.updatedAt,
};
});
} catch (firestoreError) {
console.error('[API /knowledge/items] Firestore query failed:', firestoreError);
console.error('[API /knowledge/items] This is likely due to missing Firebase Admin credentials or Firestore not being set up');
// Return empty array instead of failing - the UI will show "No chats yet" and "No images yet"
items = [];
}
return NextResponse.json({
success: true,
items,
count: items.length,
});
} catch (error) {
console.error('[API /knowledge/items] Error fetching knowledge items:', error);
console.error('[API /knowledge/items] Error stack:', error instanceof Error ? error.stack : 'No stack trace');
return NextResponse.json(
{
error: 'Failed to fetch knowledge items',
details: error instanceof Error ? error.message : String(error)
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,105 @@
import { NextResponse } from 'next/server';
import { getAdminAuth, getAdminDb } from '@/lib/firebase/admin';
import { getAlloyDbClient } from '@/lib/db/alloydb';
import { GeminiLlmClient } from '@/lib/ai/gemini-client';
import { z } from 'zod';
const ThemeGroupingSchema = z.object({
themes: z.array(z.object({
theme: z.string().describe('A short, descriptive theme name (2-4 words)'),
description: z.string().describe('A brief description of what this theme represents'),
insightIds: z.array(z.string()).describe('Array of insight IDs that belong to this theme'),
})),
});
export async function POST(
request: Request,
{ params }: { params: Promise<{ projectId: string }> }
) {
try {
const { projectId } = await params;
// Authentication (skip in development if no auth header)
const authHeader = request.headers.get('Authorization');
const isDevelopment = process.env.NODE_ENV === 'development';
if (!isDevelopment || authHeader?.startsWith('Bearer ')) {
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const token = authHeader.substring(7);
const auth = getAdminAuth();
const decoded = await auth.verifyIdToken(token);
if (!decoded?.uid) {
return NextResponse.json({ error: 'Invalid token' }, { status: 401 });
}
}
// Get insights from request body
const { insights } = await request.json();
if (!insights || insights.length === 0) {
return NextResponse.json({
success: true,
themes: [],
});
}
console.log('[API /knowledge/themes] Grouping', insights.length, 'insights into themes');
// Prepare insights for AI analysis
const insightsContext = insights.map((insight: any, index: number) =>
`[${insight.id}] ${insight.content?.substring(0, 200) || insight.title}`
).join('\n\n');
// Use AI to group insights into themes
const llm = new GeminiLlmClient();
const systemPrompt = `You are an expert at analyzing and categorizing information. Given a list of insights/knowledge chunks, group them into meaningful themes. Each theme should represent a coherent topic or concept. Aim for 3-7 themes depending on the diversity of content.`;
const userPrompt = `Analyze these insights and group them into themes:
${insightsContext}
Group these insights into themes. Each insight ID is in brackets at the start of each line. Return the themes with their associated insight IDs.`;
try {
const result = await llm.structuredCall({
model: 'gemini',
systemPrompt,
messages: [{ role: 'user', content: userPrompt }],
schema: ThemeGroupingSchema,
temperature: 0.3,
});
console.log('[API /knowledge/themes] Generated', result.themes.length, 'themes');
return NextResponse.json({
success: true,
themes: result.themes,
});
} catch (aiError) {
console.error('[API /knowledge/themes] AI grouping failed:', aiError);
// Fallback: create a single "Ungrouped" theme with all insights
return NextResponse.json({
success: true,
themes: [{
theme: 'All Insights',
description: 'Ungrouped insights',
insightIds: insights.map((i: any) => i.id),
}],
});
}
} catch (error) {
console.error('[API /knowledge/themes] Error:', error);
return NextResponse.json(
{
error: 'Failed to group insights into themes',
details: error instanceof Error ? error.message : String(error)
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,146 @@
import { NextResponse } from 'next/server';
import { getAdminAuth, getAdminDb } from '@/lib/firebase/admin';
import { createKnowledgeItem } from '@/lib/server/knowledge';
import type { KnowledgeSourceMeta } from '@/lib/types/knowledge';
// import { chunkDocument } from '@/lib/utils/document-chunker'; // Not needed - Extractor AI handles chunking
import { getStorage } from 'firebase-admin/storage';
export const maxDuration = 60;
export async function POST(
request: Request,
context: { params: Promise<{ projectId: string }> | { projectId: string } }
) {
try {
// Verify auth
const authHeader = request.headers.get('Authorization');
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const idToken = authHeader.split('Bearer ')[1];
const adminAuth = getAdminAuth();
let userId: string;
try {
const decodedToken = await adminAuth.verifyIdToken(idToken);
userId = decodedToken.uid;
} catch (error) {
return NextResponse.json({ error: 'Invalid token' }, { status: 401 });
}
// Handle async params in Next.js 16
const params = 'then' in context.params ? await context.params : context.params;
const projectId = params.projectId;
if (!projectId) {
return NextResponse.json({ error: 'Missing projectId' }, { status: 400 });
}
// Parse multipart form data
const formData = await request.formData();
const file = formData.get('file') as File;
if (!file) {
return NextResponse.json({ error: 'No file provided' }, { status: 400 });
}
const adminDb = getAdminDb();
const projectSnap = await adminDb.collection('projects').doc(projectId).get();
if (!projectSnap.exists) {
return NextResponse.json({ error: 'Project not found' }, { status: 404 });
}
console.log(`[upload-document] Uploading ${file.name}, size=${file.size}`);
// Read file content
const arrayBuffer = await file.arrayBuffer();
const buffer = Buffer.from(arrayBuffer);
const content = buffer.toString('utf-8');
// Upload original file to Firebase Storage
const storage = getStorage();
const bucket = storage.bucket();
const storagePath = `projects/${projectId}/documents/${Date.now()}_${file.name}`;
const fileRef = bucket.file(storagePath);
await fileRef.save(buffer, {
metadata: {
contentType: file.type,
metadata: {
uploadedBy: userId,
projectId,
originalFilename: file.name,
uploadedAt: new Date().toISOString(),
},
},
});
// Make file publicly accessible (or use signed URLs if you want private)
await fileRef.makePublic();
const publicUrl = `https://storage.googleapis.com/${bucket.name}/${storagePath}`;
console.log(`[upload-document] File saved to Storage: ${publicUrl}`);
// Store whole document as single knowledge_item (no chunking)
// Extractor AI will collaboratively chunk important sections later
const sourceMeta: KnowledgeSourceMeta = {
origin: 'other',
url: publicUrl,
filename: file.name,
createdAtOriginal: new Date().toISOString(),
importance: 'primary',
tags: ['document', 'uploaded', 'pending_extraction'],
};
const knowledgeItem = await createKnowledgeItem({
projectId,
sourceType: 'imported_document',
title: file.name,
content: content,
sourceMeta,
});
console.log(`[upload-document] Stored whole document as knowledge_item: ${knowledgeItem.id}`);
const knowledgeItemIds = [knowledgeItem.id];
// Create a summary record in contextSources for UI display
const contextSourcesRef = adminDb.collection('projects').doc(projectId).collection('contextSources');
await contextSourcesRef.add({
type: 'document',
name: file.name,
summary: `Document (${content.length} characters) - pending extraction`,
url: publicUrl,
connectedAt: new Date(),
metadata: {
totalChars: content.length,
fileSize: file.size,
mimeType: file.type,
storagePath,
knowledgeItemId: knowledgeItem.id,
uploadedBy: userId,
status: 'pending_extraction',
},
});
return NextResponse.json({
success: true,
filename: file.name,
url: publicUrl,
knowledgeItemId: knowledgeItem.id,
status: 'stored',
message: 'Document stored. Extractor AI will review and chunk important sections.',
});
} catch (error) {
console.error('[upload-document] Failed to upload document', error);
return NextResponse.json(
{
error: 'Failed to upload document',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 },
);
}
}

View File

@@ -0,0 +1,222 @@
import { NextResponse } from 'next/server';
import { getAdminAuth, getAdminDb } from '@/lib/firebase/admin';
import { getAlloyDbClient } from '@/lib/db/alloydb';
import { GeminiLlmClient } from '@/lib/ai/gemini-client';
import { z } from 'zod';
const MissionFrameworkSchema = z.object({
targetCustomer: z.object({
primaryAudience: z.string().describe('Primary narrow target segment (include geography/region if mentioned in context)'),
theirSituation: z.string().describe('What situation or context they are in'),
relatedMarkets: z.array(z.string()).describe('2-4 additional related market segments or customer types that could benefit'),
}),
existingSolutions: z.array(z.object({
category: z.string().describe('Category of solution (e.g., "Legacy EMR Systems", "AI Scribes", "Practice Management", "Open Source")'),
description: z.string().describe('Description of this category and its limitations'),
products: z.array(z.object({
name: z.string().describe('Product/company name'),
url: z.string().optional().describe('Website URL if known'),
})).min(5).max(20).describe('Comprehensive list of 5-20 specific products in this category. Include all major players and notable solutions.'),
})).min(4).max(7).describe('4-7 categories of existing solutions with comprehensive product lists. ALWAYS include an "Open Source" category if applicable to the market.'),
innovations: z.array(z.object({
title: z.string().describe('Short title for this innovation (3-5 words)'),
description: z.string().describe('How this makes you different and better'),
})).describe('3 key innovations or differentiators'),
ideaValidation: z.array(z.object({
title: z.string().describe('Name of this validation metric'),
description: z.string().describe('What success looks like for this metric'),
})).describe('3 ways to validate the idea is sound'),
financialSuccess: z.object({
subscribers: z.number().describe('Target number of subscribers (Year 1)'),
pricePoint: z.number().describe('Monthly price per subscriber in dollars'),
retentionRate: z.number().describe('Target monthly retention rate as a percentage (0-100)'),
}),
});
export async function POST(
request: Request,
{ params }: { params: Promise<{ projectId: string }> }
) {
try {
const { projectId } = await params;
// Authentication (skip in development if no auth header)
const authHeader = request.headers.get('Authorization');
const isDevelopment = process.env.NODE_ENV === 'development';
if (!isDevelopment || authHeader?.startsWith('Bearer ')) {
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const token = authHeader.substring(7);
const auth = getAdminAuth();
const decoded = await auth.verifyIdToken(token);
if (!decoded?.uid) {
return NextResponse.json({ error: 'Invalid token' }, { status: 401 });
}
}
console.log('[API /mission/generate] Generating mission framework for project:', projectId);
// Fetch insights from AlloyDB
let insights: any[] = [];
try {
const pool = await getAlloyDbClient();
const result = await pool.query(
`SELECT content, source_type, importance, created_at
FROM knowledge_chunks
WHERE project_id = $1
ORDER BY importance DESC, created_at DESC
LIMIT 50`,
[projectId]
);
insights = result.rows;
console.log('[API /mission/generate] Found', insights.length, 'insights');
} catch (dbError) {
console.log('[API /mission/generate] No AlloyDB insights available');
}
// Fetch knowledge items from Firestore
let knowledgeItems: any[] = [];
try {
const adminDb = getAdminDb();
const knowledgeSnapshot = await adminDb
.collection('knowledge')
.where('projectId', '==', projectId)
.orderBy('createdAt', 'desc')
.limit(20)
.get();
knowledgeItems = knowledgeSnapshot.docs.map(doc => doc.data());
console.log('[API /mission/generate] Found', knowledgeItems.length, 'knowledge items');
} catch (firestoreError) {
console.log('[API /mission/generate] No Firestore knowledge available');
}
// Get project data
let projectData: any = {};
try {
const adminDb = getAdminDb();
const projectDoc = await adminDb.collection('projects').doc(projectId).get();
if (projectDoc.exists) {
projectData = projectDoc.data();
}
} catch (error) {
console.log('[API /mission/generate] Could not fetch project data');
}
// Build context from available data
const contextParts = [];
if (projectData?.productVision) {
contextParts.push(`Product Vision: ${projectData.productVision}`);
}
if (projectData?.phaseData?.canonicalProductModel) {
const model = projectData.phaseData.canonicalProductModel;
if (model.oneLiner) contextParts.push(`Product Description: ${model.oneLiner}`);
if (model.problem) contextParts.push(`Problem: ${model.problem}`);
if (model.targetUser) contextParts.push(`Target User: ${model.targetUser}`);
if (model.coreSolution) contextParts.push(`Solution: ${model.coreSolution}`);
}
if (insights.length > 0) {
const insightTexts = insights.slice(0, 10).map(i => i.content).join('\n- ');
contextParts.push(`Key Insights:\n- ${insightTexts}`);
}
if (knowledgeItems.length > 0) {
const knowledgeTexts = knowledgeItems.slice(0, 5)
.map(k => k.title || k.content?.substring(0, 100))
.filter(Boolean)
.join('\n- ');
if (knowledgeTexts) {
contextParts.push(`Additional Context:\n- ${knowledgeTexts}`);
}
}
const context = contextParts.length > 0
? contextParts.join('\n\n')
: 'No project context available yet. Please create a generic framework based on best practices for new product development.';
console.log('[API /mission/generate] Context length:', context.length);
// Use AI to generate the mission framework
const llm = new GeminiLlmClient();
const systemPrompt = `You are a product strategy expert. Based on the provided project information, create a comprehensive mission framework that helps the founder clearly articulate their product vision, market position, and success metrics.
CRITICAL: For Target Customer, be VERY SPECIFIC and NARROW:
- Look for geographic/regional targeting in the context (country, state, city, region)
- Look for specific customer segments, verticals, or niches
- Avoid broad generalizations like "all doctors" or "businesses everywhere"
- If region is mentioned, ALWAYS include it in the primary audience
- Target the smallest viable market segment that can sustain the business
Be specific and actionable. Use the project context to inform your recommendations.`;
const userPrompt = `Based on this project information, generate a complete mission framework:
${context}
Create a structured mission framework that includes:
1. Target Customer:
- Primary Audience: Be EXTREMELY SPECIFIC and narrow (include geography if mentioned)
Example: "Solo family practice physicians in rural Oregon" NOT "Primary care doctors"
- Their Situation: What problem/context they face
- Related Markets: List 2-4 other related customer segments that could also benefit
Example: ["Urgent care clinics", "Pediatric specialists in small practices", "Telemedicine providers"]
2. Existing Solutions: Group into 4-7 CATEGORIES (e.g., "Legacy EMR Systems", "AI Medical Scribes", "Open Source", etc.)
- For each category: provide a description of what they do and their limitations
- List 5-20 specific PRODUCTS/COMPANIES in each category with website URLs if you know them
- Be COMPREHENSIVE - include all major players, notable solutions, and emerging alternatives
- ALWAYS include an "Open Source" category listing relevant open-source alternatives (GitHub, frameworks, libraries, tools)
- Include direct competitors, adjacent solutions, and legacy approaches
3. Your Innovations (3 key differentiators)
4. Idea Validation (3 validation metrics)
5. Financial Success (subscribers, price point, retention rate)
Be comprehensive with existing solutions. Be specific and narrow with primary target, but show the range of related markets.`;
const result = await llm.structuredCall({
model: 'gemini',
systemPrompt,
messages: [{ role: 'user', content: userPrompt }],
schema: MissionFrameworkSchema,
temperature: 0.7,
});
console.log('[API /mission/generate] Successfully generated mission framework');
// Store the generated framework in Firestore
try {
const adminDb = getAdminDb();
await adminDb.collection('projects').doc(projectId).update({
'phaseData.missionFramework': result,
'phaseData.missionFrameworkUpdatedAt': new Date(),
});
console.log('[API /mission/generate] Saved framework to Firestore');
} catch (saveError) {
console.error('[API /mission/generate] Could not save framework:', saveError);
}
return NextResponse.json({
success: true,
framework: result,
});
} catch (error) {
console.error('[API /mission/generate] Error:', error);
return NextResponse.json(
{
error: 'Failed to generate mission framework',
details: error instanceof Error ? error.message : String(error)
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,966 @@
import { NextRequest, NextResponse } from 'next/server';
import admin from '@/lib/firebase/admin';
import { GoogleGenerativeAI } from '@google/generative-ai';
import { getApiUrl } from '@/lib/utils/api-url';
import fs from 'fs';
import path from 'path';
/**
* MVP Page & Feature Checklist Generator (AI-Powered)
* Uses Gemini AI with the Vibn MVP Planner agent spec to generate intelligent,
* context-aware plans from project vision answers and existing work
*/
export async function GET(
request: NextRequest,
{ params }: { params: Promise<{ projectId: string }> }
) {
try {
const { projectId } = await params;
const db = admin.firestore();
// Check if we have a saved plan
const projectDoc = await db.collection('projects').doc(projectId).get();
const projectData = projectDoc.data();
if (projectData?.mvpChecklist && !request.nextUrl.searchParams.get('regenerate')) {
console.log('Loading saved MVP checklist');
return NextResponse.json({
...projectData.mvpChecklist,
cached: true,
cachedAt: projectData.mvpChecklistGeneratedAt
});
}
// If no checklist exists and not forcing regeneration, return empty state
if (!projectData?.mvpChecklist && !request.nextUrl.searchParams.get('regenerate')) {
console.log('[MVP Generation] No checklist exists - returning empty state');
return NextResponse.json({
error: 'No MVP checklist generated yet',
message: 'Click "Regenerate Plan" to create your MVP checklist',
mvpChecklist: [],
summary: { totalPages: 0, estimatedDays: 0 }
});
}
console.log('[MVP Generation] 🚀 Starting MVP checklist generation...');
// Load complete history
console.log('[MVP Generation] 📊 Loading project history...');
const historyResponse = await fetch(
getApiUrl(`/api/projects/${projectId}/complete-history`, request)
);
const history = await historyResponse.json();
console.log('[MVP Generation] ✅ History loaded');
// Load intelligent analysis (with fallback if project doesn't have codebase access)
console.log('[MVP Generation] 🧠 Running intelligent analysis...');
let analysis = null;
try {
const analysisResponse = await fetch(
getApiUrl(`/api/projects/${projectId}/plan/intelligent`, request)
);
if (analysisResponse.ok) {
analysis = await analysisResponse.json();
console.log('[MVP Generation] ✅ Analysis complete');
} else {
console.log('[MVP Generation] ⚠️ Analysis failed (project may lack codebase access), using fallback');
analysis = { codebaseAnalysis: null, intelligentPlan: null };
}
} catch (error) {
console.log('[MVP Generation] ⚠️ Analysis error:', error instanceof Error ? error.message : String(error));
analysis = { codebaseAnalysis: null, intelligentPlan: null };
}
// Generate MVP checklist using AI
console.log('[MVP Generation] 🤖 Calling AI to generate MVP plan...');
const checklist = await generateAIMVPChecklist(projectId, history, analysis, projectData);
console.log('[MVP Generation] ✅ MVP plan generated!');
// Save to Firestore (filter out undefined values to avoid Firestore errors)
const cleanChecklist = JSON.parse(JSON.stringify(checklist, (key, value) =>
value === undefined ? null : value
));
await db.collection('projects').doc(projectId).update({
mvpChecklist: cleanChecklist,
mvpChecklistGeneratedAt: admin.firestore.FieldValue.serverTimestamp()
});
console.log('[MVP Generation] ✅ MVP checklist saved to Firestore');
return NextResponse.json(checklist);
} catch (error) {
console.error('Error generating MVP checklist:', error);
return NextResponse.json(
{
error: 'Failed to generate MVP checklist',
details: error instanceof Error ? error.message : String(error)
},
{ status: 500 }
);
}
}
/**
* POST to force regeneration of the checklist
*/
export async function POST(
request: NextRequest,
{ params }: { params: Promise<{ projectId: string }> }
) {
try {
const { projectId } = await params;
const db = admin.firestore();
console.log('[MVP Generation] 🚀 Starting MVP checklist regeneration...');
// Re-fetch project data
const projectDoc = await db.collection('projects').doc(projectId).get();
const projectData = projectDoc.data();
// Load complete history
console.log('[MVP Generation] 📊 Loading project history...');
const historyResponse = await fetch(
getApiUrl(`/api/projects/${projectId}/complete-history`, request)
);
const history = await historyResponse.json();
console.log('[MVP Generation] ✅ History loaded');
// Load intelligent analysis (with fallback if project doesn't have codebase access)
console.log('[MVP Generation] 🧠 Running intelligent analysis...');
let analysis = null;
try {
const analysisResponse = await fetch(
getApiUrl(`/api/projects/${projectId}/plan/intelligent`, request)
);
if (analysisResponse.ok) {
analysis = await analysisResponse.json();
console.log('[MVP Generation] ✅ Analysis complete');
} else {
console.log('[MVP Generation] ⚠️ Analysis failed (project may lack codebase access), using fallback');
analysis = { codebaseAnalysis: null, intelligentPlan: null };
}
} catch (error) {
console.log('[MVP Generation] ⚠️ Analysis error:', error instanceof Error ? error.message : String(error));
analysis = { codebaseAnalysis: null, intelligentPlan: null };
}
// Generate MVP checklist using AI
console.log('[MVP Generation] 🤖 Calling AI to generate MVP plan...');
const checklist = await generateAIMVPChecklist(projectId, history, analysis, projectData);
console.log('[MVP Generation] ✅ MVP plan generated!');
console.log('[MVP Generation] 📊 Summary:', JSON.stringify(checklist.summary, null, 2));
// Save to Firestore (filter out undefined values to avoid Firestore errors)
const cleanChecklist = JSON.parse(JSON.stringify(checklist, (key, value) =>
value === undefined ? null : value
));
await db.collection('projects').doc(projectId).update({
mvpChecklist: cleanChecklist,
mvpChecklistGeneratedAt: admin.firestore.FieldValue.serverTimestamp()
});
console.log('[MVP Generation] ✅ MVP checklist saved to Firestore');
return NextResponse.json({
...checklist,
regenerated: true
});
} catch (error) {
console.error('[MVP Generation] ❌ Error regenerating MVP checklist:', error);
return NextResponse.json(
{
error: 'Failed to regenerate MVP checklist',
details: error instanceof Error ? error.message : String(error)
},
{ status: 500 }
);
}
}
/**
* Generate AI-powered MVP Checklist using Gemini and the Vibn MVP Planner agent spec
*/
async function generateAIMVPChecklist(
projectId: string,
history: any,
analysis: any,
projectData: any
) {
try {
// Check for Gemini API key
const geminiApiKey = process.env.GEMINI_API_KEY;
if (!geminiApiKey) {
console.warn('[MVP Generation] ⚠️ No GEMINI_API_KEY found, falling back to template-based generation');
return generateFallbackChecklist(history, analysis);
}
console.log('[MVP Generation] 🔑 GEMINI_API_KEY found, using AI generation');
// Load the agent spec
const agentSpecPath = path.join(process.cwd(), '..', 'vibn-vision', 'initial-questions.json');
const agentSpec = JSON.parse(fs.readFileSync(agentSpecPath, 'utf-8'));
console.log('[MVP Generation] 📋 Agent spec loaded');
// Initialize Gemini
const genAI = new GoogleGenerativeAI(geminiApiKey);
const model = genAI.getGenerativeModel({
model: "gemini-2.0-flash-exp",
generationConfig: {
temperature: 0.4,
topP: 0.95,
topK: 40,
maxOutputTokens: 8192,
responseMimeType: "application/json",
},
});
console.log('[MVP Generation] 🤖 Gemini model initialized (gemini-2.0-flash-exp)');
// Prepare vision input from project data
const visionInput = prepareVisionInput(projectData, history);
console.log('[MVP Generation] 📝 Vision input prepared:', {
q1: visionInput.q1_who_and_problem.raw_answer?.substring(0, 50) + '...',
q2: visionInput.q2_story.raw_answer?.substring(0, 50) + '...',
q3: visionInput.q3_improvement.raw_answer?.substring(0, 50) + '...'
});
// Log what data we have vs missing
console.log('[MVP Generation] 📊 Data availability check:');
console.log(' ✅ Vision answers:', !!projectData.visionAnswers);
console.log(' ✅ GitHub repo:', projectData.githubRepo || 'None');
console.log(' ⚠️ GitHub userId:', projectData.userId || 'MISSING - cannot load repo code');
console.log(' ✅ Git commits:', history.gitSummary?.totalCommits || 0);
console.log(' ✅ Cursor sessions:', history.summary?.breakdown?.extensionSessions || 0);
console.log(' ✅ Codebase analysis:', analysis.codebaseAnalysis?.builtFeatures?.length || 0, 'features found');
// Load Cursor conversation history from Firestore
console.log('[MVP Generation] 💬 Loading Cursor conversation history...');
const adminDb = admin.firestore();
let cursorConversations: any[] = [];
let cursorMessageCount = 0;
try {
const conversationsSnapshot = await adminDb
.collection('projects')
.doc(projectId)
.collection('cursorConversations')
.orderBy('lastUpdatedAt', 'desc')
.limit(10) // Get most recent 10 conversations
.get();
for (const convDoc of conversationsSnapshot.docs) {
const convData = convDoc.data();
const messagesSnapshot = await adminDb
.collection('projects')
.doc(projectId)
.collection('cursorConversations')
.doc(convDoc.id)
.collection('messages')
.orderBy('createdAt', 'asc')
.limit(50) // Limit messages per conversation to avoid token bloat
.get();
const messages = messagesSnapshot.docs.map(msgDoc => {
const msg = msgDoc.data();
return {
role: msg.type === 1 ? 'user' : 'assistant',
text: msg.text || '',
createdAt: msg.createdAt
};
});
cursorMessageCount += messages.length;
cursorConversations.push({
name: convData.name || 'Untitled',
messageCount: messages.length,
messages: messages,
createdAt: convData.createdAt,
lastUpdatedAt: convData.lastUpdatedAt
});
}
console.log('[MVP Generation] ✅ Loaded', cursorConversations.length, 'Cursor conversations with', cursorMessageCount, 'messages');
} catch (error) {
console.error('[MVP Generation] ⚠️ Failed to load Cursor conversations:', error);
}
// Prepare work_to_date context with all available data
const githubSummary = history.gitSummary
? `${history.gitSummary.totalCommits || 0} commits, ${history.gitSummary.filesChanged || 0} files changed`
: 'No Git history available';
const codebaseSummary = analysis.codebaseAnalysis?.summary
|| (analysis.codebaseAnalysis?.builtFeatures?.length > 0
? `Built: ${analysis.codebaseAnalysis.builtFeatures.map((f: any) => f.name).join(', ')}`
: 'No codebase analysis available');
const cursorSessionsSummary = cursorConversations.length > 0
? `${cursorConversations.length} Cursor conversations with ${cursorMessageCount} messages imported from Cursor IDE`
: 'No Cursor conversation history available';
// Format Cursor conversations for the prompt
const cursorContextText = cursorConversations.length > 0
? cursorConversations.map(conv =>
`Conversation: "${conv.name}" (${conv.messageCount} messages)\n` +
conv.messages.slice(0, 10).map((m: any) => ` ${m.role}: ${m.text.substring(0, 200)}`).join('\n')
).join('\n\n')
: '';
const workToDate = {
code_summary: codebaseSummary,
github_summary: githubSummary,
cursor_sessions_summary: cursorSessionsSummary,
cursor_conversations: cursorContextText, // Include actual conversation snippets
existing_assets_notes: `Built features: ${analysis.codebaseAnalysis?.builtFeatures?.length || 0}, Missing: ${analysis.codebaseAnalysis?.missingFeatures?.length || 0}`
};
console.log('[MVP Generation] 🔍 Work context prepared:', {
...workToDate,
cursor_conversations: cursorContextText.length > 0 ? `${cursorContextText.length} chars from conversations` : 'None'
});
// Build the prompt with agent spec instructions
const prompt = `${agentSpec.agent_spec.instructions_for_model}
Here is the input data:
${JSON.stringify({
vision_input: visionInput,
work_to_date: workToDate
}, null, 2)}
Return ONLY valid JSON matching the output schema, with no additional text or markdown.`;
console.log('[MVP Generation] 📤 Sending prompt to Gemini (length:', prompt.length, 'chars)');
// Call Gemini
const result = await model.generateContent(prompt);
const response = result.response;
const text = response.text();
console.log('[MVP Generation] 📥 Received AI response (length:', text.length, 'chars)');
// Parse AI response (Gemini returns JSON directly with responseMimeType set)
const aiResponse = JSON.parse(text);
console.log('[MVP Generation] ✅ AI response parsed successfully');
console.log('[MVP Generation] 🔍 AI Response structure:', JSON.stringify({
has_journey_tree: !!aiResponse.journey_tree,
has_touchpoints_tree: !!aiResponse.touchpoints_tree,
has_system_tree: !!aiResponse.system_tree,
journey_nodes: aiResponse.journey_tree?.nodes?.length || 0,
touchpoints_nodes: aiResponse.touchpoints_tree?.nodes?.length || 0,
system_nodes: aiResponse.system_tree?.nodes?.length || 0,
summary: aiResponse.summary
}, null, 2));
// Transform AI trees into our existing format
const checklist = transformAIResponseToChecklist(aiResponse, history, analysis);
console.log('[MVP Generation] ✅ Checklist transformed, total pages:', checklist.summary?.totalPages || 0);
return checklist;
} catch (error) {
console.error('[MVP Generation] ❌ Error generating AI MVP checklist:', error);
console.warn('[MVP Generation] ⚠️ Falling back to template-based generation');
return generateFallbackChecklist(history, analysis);
}
}
/**
* Fallback to template-based generation if AI fails
*/
function generateFallbackChecklist(history: any, analysis: any) {
const vision = history.project.vision || '';
const builtFeatures = analysis.codebaseAnalysis?.builtFeatures || [];
const missingFeatures = analysis.codebaseAnalysis?.missingFeatures || [];
// Scan commit messages for evidence of pages
const commitMessages = history.chronologicalEvents
.filter((e: any) => e.type === 'git_commit')
.map((e: any) => e.data.message);
// Simple flat taxonomy structure (existing template)
const corePages = [
{
category: 'Core Features',
pages: [
{
path: '/auth',
title: 'Authentication',
status: detectPageStatus('auth', commitMessages, builtFeatures),
priority: 'critical',
evidence: findEvidence('auth', commitMessages)
},
{
path: '/[workspace]',
title: 'Workspace Selector',
status: detectPageStatus('workspace', commitMessages, builtFeatures),
priority: 'critical',
evidence: findEvidence('workspace', commitMessages)
},
{
path: '/[workspace]/projects',
title: 'Projects List',
status: detectPageStatus('projects page', commitMessages, builtFeatures),
priority: 'critical',
evidence: findEvidence('projects list', commitMessages)
},
{
path: '/project/[id]/overview',
title: 'Project Dashboard',
status: detectPageStatus('overview', commitMessages, builtFeatures),
priority: 'critical',
evidence: findEvidence('overview', commitMessages)
},
{
path: '/project/[id]/mission',
title: 'Vision/Mission Screen',
status: detectPageStatus('mission|vision', commitMessages, builtFeatures),
priority: 'critical',
evidence: findEvidence('vision|mission', commitMessages)
},
{
path: '/project/[id]/audit',
title: 'Project History & Audit',
status: detectPageStatus('audit', commitMessages, builtFeatures),
priority: 'high',
evidence: findEvidence('audit', commitMessages)
},
{
path: '/project/[id]/timeline-plan',
title: 'MVP Timeline & Checklist',
status: detectPageStatus('timeline-plan', commitMessages, builtFeatures),
priority: 'critical',
evidence: findEvidence('timeline-plan', commitMessages)
},
{
path: '/api/github/oauth',
title: 'GitHub OAuth API',
status: detectPageStatus('github/oauth', commitMessages, builtFeatures),
priority: 'critical',
evidence: findEvidence('github oauth', commitMessages)
},
{
path: '/api/projects',
title: 'Project Management APIs',
status: detectPageStatus('api/projects', commitMessages, builtFeatures),
priority: 'critical',
evidence: findEvidence('project api', commitMessages)
},
{
path: '/api/projects/[id]/mvp-checklist',
title: 'MVP Checklist Generation API',
status: detectPageStatus('mvp-checklist', commitMessages, builtFeatures),
priority: 'critical',
evidence: findEvidence('mvp-checklist', commitMessages)
}
]
},
{
category: 'Flows',
pages: [
{
path: 'flow/onboarding',
title: 'User Onboarding Flow',
status: 'in_progress',
priority: 'critical',
evidence: [],
note: 'Sign Up → Workspace Creation → Connect GitHub'
},
{
path: 'flow/project-creation',
title: 'Project Creation Flow',
status: 'in_progress',
priority: 'critical',
evidence: findEvidence('project creation', commitMessages),
note: 'Import/New Project → Repository → History Import → Vision Setup'
},
{
path: 'flow/plan-generation',
title: 'Plan Generation Flow',
status: 'in_progress',
priority: 'critical',
evidence: findEvidence('plan', commitMessages),
note: 'Context Analysis → MVP Checklist → Timeline View'
}
]
},
{
category: 'Marketing',
pages: [
{
path: '/project/[id]/marketing',
title: 'Marketing Dashboard',
status: 'missing',
priority: 'high',
evidence: [],
note: 'Have /plan/marketing API but no UI'
},
{
path: '/api/projects/[id]/plan/marketing',
title: 'Marketing Plan Generation API',
status: detectPageStatus('marketing api', commitMessages, builtFeatures),
priority: 'high',
evidence: findEvidence('marketing', commitMessages)
},
{
path: '/',
title: 'Marketing Landing Page',
status: detectPageStatus('marketing page', commitMessages, builtFeatures),
priority: 'high',
evidence: findEvidence('marketing site|landing', commitMessages)
}
]
},
{
category: 'Social',
pages: [
{
path: '/[workspace]/connections',
title: 'Social Connections & Integrations',
status: detectPageStatus('connections', commitMessages, builtFeatures),
priority: 'medium',
evidence: findEvidence('connections', commitMessages)
}
]
},
{
category: 'Content',
pages: [
{
path: '/docs',
title: 'Documentation Pages',
status: 'missing',
priority: 'medium',
evidence: []
},
{
path: '/project/[id]/getting-started',
title: 'Getting Started Guide',
status: detectPageStatus('getting-started', commitMessages, builtFeatures),
priority: 'medium',
evidence: findEvidence('getting-started|onboarding', commitMessages)
}
]
},
{
category: 'Settings',
pages: [
{
path: '/project/[id]/settings',
title: 'Project Settings',
status: detectPageStatus('settings', commitMessages, builtFeatures),
priority: 'high',
evidence: findEvidence('settings', commitMessages)
},
{
path: '/[workspace]/settings',
title: 'User Settings',
status: detectPageStatus('settings', commitMessages, builtFeatures),
priority: 'medium',
evidence: findEvidence('settings', commitMessages)
}
]
}
];
// Calculate statistics
const allPages = corePages.flatMap(c => c.pages);
const builtCount = allPages.filter(p => p.status === 'built').length;
const inProgressCount = allPages.filter(p => p.status === 'in_progress').length;
const missingCount = allPages.filter(p => p.status === 'missing').length;
return {
project: {
name: history.project.name,
vision: history.project.vision,
githubRepo: history.project.githubRepo
},
summary: {
totalPages: allPages.length,
built: builtCount,
inProgress: inProgressCount,
missing: missingCount,
completionPercentage: Math.round((builtCount / allPages.length) * 100)
},
visionSummary: extractVisionPillars(vision),
mvpChecklist: corePages,
nextSteps: generateNextSteps(corePages, missingFeatures),
generatedAt: new Date().toISOString(),
// Empty trees for fallback (will be populated when AI generation works)
journeyTree: { label: "Journey", nodes: [] },
touchpointsTree: { label: "Touchpoints", nodes: [] },
systemTree: { label: "System", nodes: [] },
};
}
function detectPageStatus(pagePath: string, commitMessages: string[], builtFeatures: any[]): string {
const searchTerms = pagePath.split('|');
for (const term of searchTerms) {
const hasCommit = commitMessages.some(msg =>
msg.toLowerCase().includes(term.toLowerCase())
);
const hasFeature = builtFeatures.some(f =>
f.name.toLowerCase().includes(term.toLowerCase()) ||
f.evidence.some((e: string) => e.toLowerCase().includes(term.toLowerCase()))
);
if (hasCommit || hasFeature) {
return 'built';
}
}
return 'missing';
}
function findEvidence(searchTerm: string, commitMessages: string[]): string[] {
const terms = searchTerm.split('|');
const evidence: string[] = [];
for (const term of terms) {
const matches = commitMessages.filter(msg =>
msg.toLowerCase().includes(term.toLowerCase())
);
evidence.push(...matches.slice(0, 2));
}
return evidence;
}
function extractVisionPillars(vision: string): string[] {
const pillars = [];
if (vision.includes('start from scratch') || vision.includes('import')) {
pillars.push('Project ingestion (start from scratch or import existing work)');
}
if (vision.includes('understand') || vision.includes('vision')) {
pillars.push('Project understanding (vision, history, structure, metadata)');
}
if (vision.includes('plan') || vision.includes('checklist')) {
pillars.push('Project planning (auto-generated v1 roadmap/checklist)');
}
if (vision.includes('marketing') || vision.includes('communication') || vision.includes('automation')) {
pillars.push('Automation + AI support (marketing, chat, context-aware support)');
}
return pillars;
}
function generateNextSteps(corePages: any[], missingFeatures: any[]): any[] {
const steps = [];
// Find critical missing pages
const criticalMissing = corePages
.flatMap(c => c.pages)
.filter(p => p.status === 'missing' && p.priority === 'critical');
for (const page of criticalMissing.slice(0, 3)) {
steps.push({
priority: 1,
task: `Build ${page.title}`,
path: page.path || '',
reason: page.note || 'Critical for MVP launch'
});
}
// Add missing features
if (missingFeatures && Array.isArray(missingFeatures)) {
for (const feature of missingFeatures.slice(0, 2)) {
if (feature && (feature.feature || feature.task)) {
steps.push({
priority: 2,
task: feature.feature || feature.task || 'Complete missing feature',
reason: feature.reason || 'Important for MVP'
});
}
}
}
return steps;
}
/**
* Prepare vision input from project data
* Maps project vision to the 3-question format
*/
function prepareVisionInput(projectData: any, history: any) {
const vision = projectData.vision || history.project?.vision || '';
// Try to extract answers from vision field
// If vision is structured with questions, parse them
// Otherwise, treat entire vision as the story (q2)
return {
q1_who_and_problem: {
prompt: "Who has the problem you want to fix and what is it?",
raw_answer: projectData.visionAnswers?.q1 || extractProblemFromVision(vision) || vision
},
q2_story: {
prompt: "Tell me a story of this person using your tool and experiencing your vision?",
raw_answer: projectData.visionAnswers?.q2 || vision
},
q3_improvement: {
prompt: "How much did that improve things for them?",
raw_answer: projectData.visionAnswers?.q3 || extractImprovementFromVision(vision) || 'Significantly faster and more efficient workflow'
}
};
}
/**
* Extract problem statement from unstructured vision
*/
function extractProblemFromVision(vision: string): string {
// Simple heuristic: Look for problem-related keywords
const problemKeywords = ['problem', 'struggle', 'difficult', 'challenge', 'pain', 'need'];
const sentences = vision.split(/[.!?]+/);
for (const sentence of sentences) {
const lowerSentence = sentence.toLowerCase();
if (problemKeywords.some(keyword => lowerSentence.includes(keyword))) {
return sentence.trim();
}
}
return vision.split(/[.!?]+/)[0]?.trim() || vision;
}
/**
* Extract improvement/value from unstructured vision
*/
function extractImprovementFromVision(vision: string): string {
// Look for value/benefit keywords
const valueKeywords = ['faster', 'better', 'easier', 'save', 'improve', 'automate', 'help'];
const sentences = vision.split(/[.!?]+/);
for (const sentence of sentences) {
const lowerSentence = sentence.toLowerCase();
if (valueKeywords.some(keyword => lowerSentence.includes(keyword))) {
return sentence.trim();
}
}
return '';
}
/**
* Transform AI response trees into our existing checklist format
*/
function transformAIResponseToChecklist(aiResponse: any, history: any, analysis: any) {
const { journey_tree, touchpoints_tree, system_tree, summary } = aiResponse;
// Scan commit messages for evidence
const commitMessages = history.chronologicalEvents
?.filter((e: any) => e.type === 'git_commit')
?.map((e: any) => e.data.message) || [];
const builtFeatures = analysis.codebaseAnalysis?.builtFeatures || [];
// Combine touchpoints and system into categories
const categories: any[] = [];
// Process Touchpoints tree
if (touchpoints_tree?.nodes) {
const touchpointCategories = groupAssetsByCategory(
touchpoints_tree.nodes,
'touchpoint',
commitMessages,
builtFeatures
);
categories.push(...touchpointCategories);
}
// Process System tree
if (system_tree?.nodes) {
const systemCategories = groupAssetsByCategory(
system_tree.nodes,
'system',
commitMessages,
builtFeatures
);
categories.push(...systemCategories);
}
// Calculate statistics
const allPages = categories.flatMap(c => c.pages);
const builtCount = allPages.filter((p: any) => p.status === 'built').length;
const inProgressCount = allPages.filter((p: any) => p.status === 'in_progress').length;
const missingCount = allPages.filter((p: any) => p.status === 'missing').length;
return {
project: {
name: history.project.name,
vision: history.project.vision,
githubRepo: history.project.githubRepo
},
summary: {
totalPages: allPages.length,
built: builtCount,
inProgress: inProgressCount,
missing: missingCount,
completionPercentage: Math.round((builtCount / allPages.length) * 100)
},
visionSummary: [summary || 'AI-generated MVP plan'],
mvpChecklist: categories,
nextSteps: generateNextStepsFromAI(allPages),
generatedAt: new Date().toISOString(),
aiGenerated: true,
// Include raw trees for Journey/Design/Tech views
journeyTree: journey_tree,
touchpointsTree: touchpoints_tree,
systemTree: system_tree,
};
}
/**
* Group asset nodes by category
*/
function groupAssetsByCategory(
nodes: any[],
listType: 'touchpoint' | 'system',
commitMessages: string[],
builtFeatures: any[]
) {
const categoryMap = new Map<string, any[]>();
for (const node of nodes) {
const category = inferCategory(node, listType);
if (!categoryMap.has(category)) {
categoryMap.set(category, []);
}
const page = {
id: node.id,
path: inferPath(node),
title: node.name,
status: detectAINodeStatus(node, commitMessages, builtFeatures),
priority: node.must_have_for_v1 ? 'critical' : 'medium',
evidence: findEvidenceForNode(node, commitMessages),
note: node.asset_metadata?.why_it_exists,
metadata: node.asset_metadata,
requirements: flattenChildrenToRequirements(node.children)
};
categoryMap.get(category)!.push(page);
}
return Array.from(categoryMap.entries()).map(([category, pages]) => ({
category,
pages
}));
}
/**
* Infer category from node metadata
*/
function inferCategory(node: any, listType: 'touchpoint' | 'system'): string {
const assetType = node.asset_type;
const journeyStage = node.asset_metadata?.journey_stage || '';
if (listType === 'system') {
if (assetType === 'api_endpoint' || assetType === 'service') return 'Core Features';
if (assetType === 'integration') return 'Settings';
return 'Settings';
}
// Touchpoints
if (assetType === 'flow') return 'Flows';
if (assetType === 'social_post') return 'Social';
if (assetType === 'document') return 'Content';
if (assetType === 'email') return 'Marketing';
if (journeyStage.toLowerCase().includes('aware') || journeyStage.toLowerCase().includes('discover')) {
return 'Marketing';
}
return 'Core Features';
}
/**
* Infer path from node
*/
function inferPath(node: any): string {
// Try to extract path from implementation_notes or name
const implNotes = node.asset_metadata?.implementation_notes || '';
const pathMatch = implNotes.match(/\/[\w\-\/\[\]]+/);
if (pathMatch) return pathMatch[0];
// Generate a reasonable path from name and type
const slug = node.name.toLowerCase().replace(/\s+/g, '-').replace(/[^a-z0-9\-]/g, '');
if (node.asset_type === 'api_endpoint') return `/api/${slug}`;
if (node.asset_type === 'flow') return `flow/${slug}`;
return `/${slug}`;
}
/**
* Detect status of AI node based on existing work
*/
function detectAINodeStatus(node: any, commitMessages: string[], builtFeatures: any[]): string {
const name = node.name.toLowerCase();
const path = inferPath(node).toLowerCase();
// Check commit messages
const hasCommit = commitMessages.some(msg =>
msg.toLowerCase().includes(name) || msg.toLowerCase().includes(path)
);
// Check built features
const hasFeature = builtFeatures.some((f: any) =>
f.name?.toLowerCase().includes(name) ||
f.evidence?.some((e: string) => e.toLowerCase().includes(name))
);
if (hasCommit || hasFeature) return 'built';
return node.must_have_for_v1 ? 'missing' : 'missing';
}
/**
* Find evidence for a node in commit messages
*/
function findEvidenceForNode(node: any, commitMessages: string[]): string[] {
const name = node.name.toLowerCase();
const evidence = commitMessages
.filter(msg => msg.toLowerCase().includes(name))
.slice(0, 2);
return evidence;
}
/**
* Flatten children nodes to requirements
*/
function flattenChildrenToRequirements(children: any[]): any[] {
if (!children || children.length === 0) return [];
return children.map((child, index) => ({
id: index + 1,
text: child.name,
status: 'missing'
}));
}
/**
* Generate next steps from AI-generated pages
*/
function generateNextStepsFromAI(pages: any[]): any[] {
const criticalMissing = pages
.filter((p: any) => p.status === 'missing' && p.priority === 'critical')
.slice(0, 5);
return criticalMissing.map((page: any, index: number) => ({
priority: index + 1,
task: `Build ${page.title}`,
path: page.path || '',
reason: page.note || 'Critical for MVP V1'
}));
}

View File

@@ -0,0 +1,346 @@
import { NextRequest, NextResponse } from 'next/server';
import { exec } from 'child_process';
import { promisify } from 'util';
import { getApiUrl } from '@/lib/utils/api-url';
const execAsync = promisify(exec);
/**
* Intelligent V1 Launch Planning
* Analyzes ACTUAL codebase to generate specific, actionable tasks
*/
export async function GET(
request: NextRequest,
{ params }: { params: Promise<{ projectId: string }> }
) {
try {
const { projectId } = await params;
// 1. Load project context
const contextResponse = await fetch(
getApiUrl(`/api/projects/${projectId}/context`, request)
);
const context = await contextResponse.json();
// 2. Scan actual codebase structure
const repoPath = '/Users/markhenderson/ai-proxy';
const { stdout: pagesOutput } = await execAsync(
`cd "${repoPath}" && find vibn-frontend/app -name "*.tsx" | grep "page.tsx" | wc -l`
);
const { stdout: apiOutput } = await execAsync(
`cd "${repoPath}" && find vibn-frontend/app/api -name "route.ts" | wc -l`
);
const { stdout: componentsOutput } = await execAsync(
`cd "${repoPath}" && find vibn-frontend/components -name "*.tsx" 2>/dev/null | wc -l || echo 0`
);
const codebaseStats = {
totalPages: parseInt(pagesOutput.trim()),
totalAPIs: parseInt(apiOutput.trim()),
totalComponents: parseInt(componentsOutput.trim())
};
// 3. Analyze what's ACTUALLY built vs vision
const analysis = await analyzeRealCodebase(context, codebaseStats, repoPath);
// 4. Generate intelligent, specific tasks
const intelligentPlan = generateIntelligentPlan(context, analysis);
return NextResponse.json({
projectContext: {
name: context.project.name,
vision: context.project.vision
},
codebaseAnalysis: analysis,
intelligentPlan,
confidence: analysis.confidence
});
} catch (error) {
console.error('Error generating intelligent plan:', error);
return NextResponse.json(
{
error: 'Failed to generate intelligent plan',
details: error instanceof Error ? error.message : String(error)
},
{ status: 500 }
);
}
}
async function analyzeRealCodebase(context: any, stats: any, repoPath: string) {
const analysis: any = {
builtFeatures: [],
missingFeatures: [],
confidence: 'high',
specificInsights: []
};
// Analyze actual file structure
const { stdout: pagesListOutput } = await execAsync(
`cd "${repoPath}" && find vibn-frontend/app -name "page.tsx" | sed 's|vibn-frontend/app/||' | sed 's|/page.tsx||'`
);
const actualPages = pagesListOutput.trim().split('\n').filter(p => p);
const { stdout: apiListOutput } = await execAsync(
`cd "${repoPath}" && find vibn-frontend/app/api -name "route.ts" | sed 's|vibn-frontend/app/api/||' | sed 's|/route.ts||'`
);
const actualAPIs = apiListOutput.trim().split('\n').filter(a => a);
// Analyze based on vision: "VIBN gets your project to v1 launch and beyond"
const vision = context.project.vision || '';
// Check for key features mentioned in vision
const visionKeywords = {
'project plan': { pages: ['plan', 'getting-started'], apis: ['plan/'] },
'marketing automation': { pages: ['marketing'], apis: ['plan/marketing'] },
'communication automation': { pages: ['communication', 'chat'], apis: ['ai/chat'] },
'ai chat support': { pages: ['chat', 'v_ai_chat'], apis: ['ai/chat'] }
};
// Analyze what's built
if (actualPages.some(p => p.includes('plan') || p.includes('getting-started'))) {
analysis.builtFeatures.push({
name: 'Project Planning System',
evidence: actualPages.filter(p => p.includes('plan')).slice(0, 3),
status: 'built'
});
}
if (actualPages.some(p => p.includes('v_ai_chat')) && actualAPIs.some(a => a.includes('ai/chat'))) {
analysis.builtFeatures.push({
name: 'AI Chat Interface',
evidence: ['v_ai_chat page', 'ai/chat API'],
status: 'built'
});
}
if (actualAPIs.some(a => a.includes('github/'))) {
analysis.builtFeatures.push({
name: 'GitHub Integration',
evidence: actualAPIs.filter(a => a.includes('github/')),
status: 'built'
});
}
if (actualAPIs.some(a => a.includes('cursor/'))) {
analysis.builtFeatures.push({
name: 'Cursor History Import',
evidence: actualAPIs.filter(a => a.includes('cursor/')),
status: 'built'
});
}
if (actualAPIs.some(a => a.includes('sessions/'))) {
analysis.builtFeatures.push({
name: 'Session Tracking',
evidence: ['sessions/track', 'sessions/associate-project'],
status: 'built'
});
}
if (actualPages.some(p => p.includes('audit'))) {
analysis.builtFeatures.push({
name: 'Project Audit Report',
evidence: ['audit page', 'audit/generate API'],
status: 'built'
});
}
// Identify gaps based on vision
if (vision.includes('marketing automation') && !actualPages.some(p => p.includes('marketing'))) {
analysis.missingFeatures.push({
name: 'Marketing Automation UI',
reason: 'Mentioned in vision but no UI found',
priority: 'high'
});
}
if (vision.includes('communication automation')) {
const hasCommAutomation = actualAPIs.some(a =>
a.includes('email') || a.includes('slack') || a.includes('notification')
);
if (!hasCommAutomation) {
analysis.missingFeatures.push({
name: 'Communication Automation',
reason: 'Mentioned in vision but no APIs found',
priority: 'high'
});
}
}
// Check for production readiness
if (!actualAPIs.some(a => a.includes('health') || a.includes('status'))) {
analysis.missingFeatures.push({
name: 'Health Check Endpoint',
reason: 'Needed for production monitoring',
priority: 'medium'
});
}
// Check for onboarding
const hasOnboarding = actualPages.some(p => p.includes('getting-started') || p.includes('onboarding'));
if (hasOnboarding) {
analysis.builtFeatures.push({
name: 'User Onboarding Flow',
evidence: actualPages.filter(p => p.includes('getting-started')),
status: 'built'
});
} else {
analysis.missingFeatures.push({
name: 'User Onboarding Tutorial',
reason: 'Critical for first-time users',
priority: 'high'
});
}
// Check for task management
const hasTaskUI = actualPages.some(p => p.includes('task') || p.includes('checklist') || p.includes('todo'));
if (!hasTaskUI && actualAPIs.some(a => a.includes('plan/'))) {
analysis.missingFeatures.push({
name: 'Task Management UI',
reason: 'Have plan APIs but no UI to track tasks',
priority: 'high'
});
}
// Specific insights from commit history
const recentCommits = context.codebase?.topFiles || [];
if (recentCommits.length > 0) {
analysis.specificInsights.push(
`Recently worked on: ${recentCommits.slice(0, 3).map((f: any) => f.filePath.split('/').pop()).join(', ')}`
);
}
// Activity insights
const topFiles = context.activity?.topEditedFiles || [];
if (topFiles.length > 0) {
const topFile = topFiles[0].file.split('/').pop();
analysis.specificInsights.push(`Most edited: ${topFile} (${topFiles[0].count} times)`);
}
return analysis;
}
function generateIntelligentPlan(context: any, analysis: any) {
const plan = {
summary: `Based on ${analysis.builtFeatures.length} built features and ${analysis.missingFeatures.length} identified gaps`,
categories: [] as any[]
};
// Product Completion (based on what's actually missing)
const productTasks = [];
for (const missing of analysis.missingFeatures) {
if (missing.name === 'Task Management UI') {
productTasks.push({
id: `prod-task-ui`,
title: 'Build Task Management UI',
description: `You have plan/simulate API but no UI. Create a checklist interface to show and track V1 launch tasks.`,
status: 'pending',
priority: missing.priority,
specificTo: 'Your codebase has the backend but missing frontend'
});
}
if (missing.name === 'Marketing Automation UI') {
productTasks.push({
id: `prod-mkt-ui`,
title: 'Build Marketing Automation Dashboard',
description: `Your vision mentions marketing automation. Create UI for /plan/marketing API to manage campaigns.`,
status: 'pending',
priority: missing.priority,
specificTo: 'Mentioned in your vision statement'
});
}
if (missing.name === 'Communication Automation') {
productTasks.push({
id: `prod-comm-auto`,
title: 'Add Communication Automation',
description: `Build email/Slack notification system for project updates and milestones.`,
status: 'pending',
priority: missing.priority,
specificTo: 'Core to your vision: "communication automation"'
});
}
if (missing.name === 'User Onboarding Tutorial') {
productTasks.push({
id: `prod-onboard`,
title: 'Create Interactive Onboarding',
description: `Guide new users through: 1) New vs existing project, 2) GitHub connect, 3) Run Cursor import, 4) Define vision.`,
status: 'pending',
priority: missing.priority,
specificTo: 'Your vision flow from earlier conversation'
});
}
}
if (productTasks.length > 0) {
plan.categories.push({
name: 'Product Completion',
status: 'in_progress',
description: 'Missing features identified from your codebase and vision',
tasks: productTasks
});
}
// Polish Existing Features (based on what's built but might need work)
const polishTasks = [];
for (const built of analysis.builtFeatures) {
if (built.name === 'Project Planning System') {
polishTasks.push({
id: 'polish-plan',
title: 'Connect Planning APIs to UI',
description: `You have /plan/mvp, /plan/marketing, /plan/simulate APIs. Ensure they're all wired to your ${built.evidence.length} planning pages.`,
status: 'in_progress',
priority: 'high',
specificTo: `Found ${built.evidence.length} planning pages in your codebase`
});
}
}
if (polishTasks.length > 0) {
plan.categories.push({
name: 'Polish & Integration',
status: 'in_progress',
description: 'Connect your existing features together',
tasks: polishTasks
});
}
// Launch Readiness (production concerns)
const launchTasks = [
{
id: 'launch-monitoring',
title: 'Add Production Monitoring',
description: `Add health check endpoint and error tracking for your ${context.codebase?.totalCommits} commits of code.`,
status: 'pending',
priority: 'high',
specificTo: 'Your 104k lines of code need monitoring'
},
{
id: 'launch-docs',
title: 'Document All Features',
description: `Create docs for your ${analysis.builtFeatures.length} built features: ${analysis.builtFeatures.map((f: any) => f.name).join(', ')}.`,
status: 'pending',
priority: 'medium',
specificTo: `Specific to your ${analysis.builtFeatures.length} features`
},
{
id: 'launch-demo',
title: 'Create Demo Video',
description: `Show: GitHub import → Cursor analysis → AI chat → Launch plan. Highlight your unique value.`,
status: 'pending',
priority: 'high',
specificTo: 'Your specific user journey'
}
];
plan.categories.push({
name: 'Launch Preparation',
status: 'pending',
description: 'Get ready for public launch',
tasks: launchTasks
});
return plan;
}

View File

@@ -0,0 +1,30 @@
import { NextResponse } from 'next/server';
import { GeminiLlmClient } from '@/lib/ai/gemini-client';
import { runMarketingPlanning } from '@/lib/ai/marketing-agent';
export async function POST(
_request: Request,
{ params }: { params: Promise<{ projectId: string }> },
) {
try {
const { projectId } = await params;
if (!projectId) {
return NextResponse.json({ error: 'Missing projectId' }, { status: 400 });
}
const llm = new GeminiLlmClient();
const marketingPlan = await runMarketingPlanning(projectId, llm);
return NextResponse.json({ marketingPlan });
} catch (error) {
console.error('[plan/marketing] Failed to generate marketing plan', error);
return NextResponse.json(
{
error: 'Failed to generate marketing plan',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 },
);
}
}

View File

@@ -0,0 +1,30 @@
import { NextResponse } from 'next/server';
import { GeminiLlmClient } from '@/lib/ai/gemini-client';
import { runMvpPlanning } from '@/lib/ai/mvp-agent';
export async function POST(
_request: Request,
{ params }: { params: Promise<{ projectId: string }> },
) {
try {
const { projectId } = await params;
if (!projectId) {
return NextResponse.json({ error: 'Missing projectId' }, { status: 400 });
}
const llm = new GeminiLlmClient();
const mvpPlan = await runMvpPlanning(projectId, llm);
return NextResponse.json({ mvpPlan });
} catch (error) {
console.error('[plan/mvp] Failed to generate MVP plan', error);
return NextResponse.json(
{
error: 'Failed to generate MVP plan',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 },
);
}
}

View File

@@ -0,0 +1,403 @@
import { NextRequest, NextResponse } from 'next/server';
import { getApiUrl } from '@/lib/utils/api-url';
/**
* Simulates AI-powered V1 Launch Planning
* Uses complete project context to generate actionable plan
*/
export async function GET(
request: NextRequest,
{ params }: { params: Promise<{ projectId: string }> }
) {
try {
const { projectId } = await params;
// 1. Load complete project context
const contextResponse = await fetch(
getApiUrl(`/api/projects/${projectId}/context`, request)
);
if (!contextResponse.ok) {
return NextResponse.json(
{ error: 'Failed to load project context' },
{ status: 500 }
);
}
const context = await contextResponse.json();
// 2. Simulate AI Analysis
const aiAnalysis = analyzeProjectForV1Launch(context);
// 3. Generate V1 Launch Plan
const launchPlan = generateV1LaunchPlan(context, aiAnalysis);
return NextResponse.json({
projectContext: {
name: context.project.name,
vision: context.project.vision,
historicalData: {
totalDays: context.timeline.dateRange.totalDays,
activeDays: context.timeline.dateRange.activeDays,
commits: context.codebase.totalCommits,
sessions: context.activity.totalSessions,
messages: context.timeline.dataSources.cursor.totalMessages
}
},
aiAnalysis,
launchPlan,
nextSteps: generateNextSteps(launchPlan)
});
} catch (error) {
console.error('Error simulating launch plan:', error);
return NextResponse.json(
{
error: 'Failed to simulate launch plan',
details: error instanceof Error ? error.message : String(error)
},
{ status: 500 }
);
}
}
// Analyze project state for V1 launch readiness
function analyzeProjectForV1Launch(context: any) {
const analysis = {
currentState: determineCurrentState(context),
strengths: [],
gaps: [],
estimatedCompleteness: 0,
recommendations: []
};
// Analyze codebase maturity
if (context.codebase.totalCommits > 50) {
analysis.strengths.push('Active development with 63 commits');
}
if (context.codebase.totalLinesAdded > 100000) {
analysis.strengths.push('Substantial codebase (~104k lines added)');
}
// Analyze development activity
if (context.activity.totalSessions > 100) {
analysis.strengths.push(`Consistent development (${context.activity.totalSessions} sessions)`);
}
// Check for gaps
if (!context.project.vision) {
analysis.gaps.push('Product vision not documented');
} else {
analysis.strengths.push('Clear product vision defined');
}
if (context.documents.length === 0) {
analysis.gaps.push('No documentation uploaded (specs, PRDs, designs)');
}
// Check Git history span
const daysSinceStart = context.timeline.dateRange.totalDays;
if (daysSinceStart < 30) {
analysis.gaps.push('Project is in early stages (< 30 days old)');
} else if (daysSinceStart > 90) {
analysis.strengths.push('Mature project (90+ days of development)');
}
// Estimate completeness
const hasVision = context.project.vision ? 20 : 0;
const hasCode = context.codebase.totalCommits > 20 ? 40 : 20;
const hasActivity = context.activity.totalSessions > 50 ? 20 : 10;
const hasDocs = context.documents.length > 0 ? 20 : 0;
analysis.estimatedCompleteness = hasVision + hasCode + hasActivity + hasDocs;
// Generate recommendations
if (analysis.estimatedCompleteness < 60) {
analysis.recommendations.push('Focus on core functionality before launch');
analysis.recommendations.push('Document key features and user flows');
} else if (analysis.estimatedCompleteness < 80) {
analysis.recommendations.push('Prepare for beta testing');
analysis.recommendations.push('Set up monitoring and analytics');
} else {
analysis.recommendations.push('Ready for soft launch preparation');
}
return analysis;
}
// Determine current project state
function determineCurrentState(context: any): string {
const commits = context.codebase.totalCommits;
const days = context.timeline.dateRange.totalDays;
if (commits < 20) return 'Initial Development';
if (commits < 50) return 'Alpha Stage';
if (commits < 100 && days < 60) return 'Active Development';
return 'Pre-Launch';
}
// Generate V1 launch checklist
function generateV1LaunchPlan(context: any, analysis: any) {
const plan = {
phase: analysis.currentState,
estimatedCompletion: `${analysis.estimatedCompleteness}%`,
categories: [
{
name: 'Product Development',
status: analysis.estimatedCompleteness > 60 ? 'in_progress' : 'pending',
tasks: [
{
id: 'pd-1',
title: 'Core Feature Implementation',
status: context.codebase.totalCommits > 40 ? 'complete' : 'in_progress',
description: 'Build primary user-facing features',
dependencies: []
},
{
id: 'pd-2',
title: 'User Authentication & Authorization',
status: 'in_progress',
description: 'Secure login, signup, and permission system',
dependencies: ['pd-1']
},
{
id: 'pd-3',
title: 'Database Schema & Models',
status: context.codebase.totalLinesAdded > 50000 ? 'complete' : 'in_progress',
description: 'Define data structures and relationships',
dependencies: []
},
{
id: 'pd-4',
title: 'API Endpoints',
status: 'in_progress',
description: 'REST/GraphQL APIs for frontend communication',
dependencies: ['pd-3']
},
{
id: 'pd-5',
title: 'Error Handling & Logging',
status: 'pending',
description: 'Comprehensive error management and monitoring',
dependencies: ['pd-4']
}
]
},
{
name: 'Testing & Quality',
status: 'pending',
tasks: [
{
id: 'tq-1',
title: 'Unit Tests',
status: 'pending',
description: 'Test individual components and functions',
dependencies: ['pd-1']
},
{
id: 'tq-2',
title: 'Integration Tests',
status: 'pending',
description: 'Test system interactions',
dependencies: ['pd-4']
},
{
id: 'tq-3',
title: 'User Acceptance Testing',
status: 'pending',
description: 'Beta testing with real users',
dependencies: ['tq-1', 'tq-2']
},
{
id: 'tq-4',
title: 'Performance Testing',
status: 'pending',
description: 'Load testing and optimization',
dependencies: ['tq-2']
}
]
},
{
name: 'Documentation',
status: context.documents.length > 0 ? 'in_progress' : 'pending',
tasks: [
{
id: 'doc-1',
title: 'User Guide',
status: 'pending',
description: 'End-user documentation',
dependencies: ['pd-1']
},
{
id: 'doc-2',
title: 'API Documentation',
status: 'pending',
description: 'Developer-facing API docs',
dependencies: ['pd-4']
},
{
id: 'doc-3',
title: 'Onboarding Flow',
status: 'pending',
description: 'New user tutorial and setup',
dependencies: ['doc-1']
}
]
},
{
name: 'Infrastructure',
status: 'in_progress',
tasks: [
{
id: 'infra-1',
title: 'Production Environment Setup',
status: context.codebase.totalCommits > 30 ? 'complete' : 'in_progress',
description: 'Deploy to production servers',
dependencies: []
},
{
id: 'infra-2',
title: 'CI/CD Pipeline',
status: 'pending',
description: 'Automated testing and deployment',
dependencies: ['infra-1']
},
{
id: 'infra-3',
title: 'Monitoring & Alerts',
status: 'pending',
description: 'System health monitoring',
dependencies: ['infra-1']
},
{
id: 'infra-4',
title: 'Backup & Recovery',
status: 'pending',
description: 'Data backup strategy',
dependencies: ['infra-1']
}
]
},
{
name: 'Marketing & Launch',
status: 'pending',
tasks: [
{
id: 'mkt-1',
title: 'Landing Page',
status: 'pending',
description: 'Public-facing marketing site',
dependencies: []
},
{
id: 'mkt-2',
title: 'Email Marketing Setup',
status: 'pending',
description: 'Email campaigns and automation',
dependencies: ['mkt-1']
},
{
id: 'mkt-3',
title: 'Analytics Integration',
status: 'pending',
description: 'Track user behavior and metrics',
dependencies: ['pd-1']
},
{
id: 'mkt-4',
title: 'Launch Strategy',
status: 'pending',
description: 'Product Hunt, social media, PR',
dependencies: ['mkt-1', 'doc-1']
}
]
},
{
name: 'Legal & Compliance',
status: 'pending',
tasks: [
{
id: 'legal-1',
title: 'Privacy Policy',
status: 'pending',
description: 'GDPR/CCPA compliant privacy policy',
dependencies: []
},
{
id: 'legal-2',
title: 'Terms of Service',
status: 'pending',
description: 'User agreement and terms',
dependencies: []
},
{
id: 'legal-3',
title: 'Security Audit',
status: 'pending',
description: 'Third-party security review',
dependencies: ['pd-5']
}
]
}
],
timeline: {
estimated_days_to_v1: calculateEstimatedDays(analysis),
recommended_milestones: [
{
name: 'Alpha Release',
description: 'Internal testing with core features',
target: 'Week 1-2'
},
{
name: 'Beta Release',
description: 'Limited external user testing',
target: 'Week 3-4'
},
{
name: 'Soft Launch',
description: 'Public but limited announcement',
target: 'Week 5-6'
},
{
name: 'V1 Launch',
description: 'Full public launch',
target: 'Week 7-8'
}
]
}
};
return plan;
}
// Calculate estimated days to V1
function calculateEstimatedDays(analysis: any): number {
const completeness = analysis.estimatedCompleteness;
if (completeness > 80) return 14; // ~2 weeks
if (completeness > 60) return 30; // ~1 month
if (completeness > 40) return 60; // ~2 months
return 90; // ~3 months
}
// Generate immediate next steps
function generateNextSteps(plan: any) {
const nextSteps = [];
// Find first pending task in each category
for (const category of plan.categories) {
const pendingTask = category.tasks.find((t: any) => t.status === 'pending' || t.status === 'in_progress');
if (pendingTask && nextSteps.length < 5) {
nextSteps.push({
category: category.name,
task: pendingTask.title,
priority: nextSteps.length + 1,
description: pendingTask.description
});
}
}
return nextSteps;
}

View File

@@ -0,0 +1,199 @@
import { NextResponse } from 'next/server';
import { getAdminAuth, getAdminDb } from '@/lib/firebase/admin';
import { GeminiLlmClient } from '@/lib/ai/gemini-client';
import { z } from 'zod';
const MarketResearchSchema = z.object({
targetNiches: z.array(z.object({
name: z.string(),
description: z.string(),
marketSize: z.string(),
competitionLevel: z.enum(['low', 'medium', 'high']),
opportunity: z.string(),
})),
competitors: z.array(z.object({
name: z.string(),
positioning: z.string(),
strengths: z.array(z.string()),
weaknesses: z.array(z.string()),
})),
marketGaps: z.array(z.object({
gap: z.string(),
impact: z.enum(['low', 'medium', 'high']),
reasoning: z.string(),
})),
recommendations: z.array(z.string()),
sources: z.array(z.string()),
});
export async function POST(
request: Request,
{ params }: { params: Promise<{ projectId: string }> }
) {
try {
const { projectId } = await params;
const authHeader = request.headers.get('Authorization');
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const token = authHeader.substring(7);
const auth = getAdminAuth();
const decoded = await auth.verifyIdToken(token);
if (!decoded?.uid) {
return NextResponse.json({ error: 'Invalid token' }, { status: 401 });
}
// Get project data
const adminDb = getAdminDb();
const projectRef = adminDb.collection('projects').doc(projectId);
const projectDoc = await projectRef.get();
if (!projectDoc.exists) {
return NextResponse.json({ error: 'Project not found' }, { status: 404 });
}
const projectData = projectDoc.data();
const productVision = projectData?.productVision || '';
const productName = projectData?.productName || '';
const phaseData = projectData?.phaseData || {};
const canonicalModel = phaseData.canonicalProductModel || {};
// Build context for the agent
const ideaContext = canonicalModel.oneLiner || productVision ||
`${productName}: Help users build and launch products faster`;
console.log('[Market Research] Starting research for:', ideaContext);
// Initialize LLM client
const llm = new GeminiLlmClient();
// Conduct market research using the agent
const systemPrompt = `You are a market research analyst specializing in finding product-market fit and identifying underserved niches.
Your task is to analyze the given product idea and conduct comprehensive market research to:
1. Identify specific target niches that would benefit most from this product
2. Analyze competitors and their positioning
3. Find market gaps and opportunities
4. Provide actionable recommendations
Be specific, data-driven, and focused on actionable insights.`;
const userPrompt = `Analyze this product idea and conduct market research:
Product Idea: "${ideaContext}"
${canonicalModel.problem ? `Problem Being Solved: ${canonicalModel.problem}` : ''}
${canonicalModel.targetUser ? `Target User: ${canonicalModel.targetUser}` : ''}
${canonicalModel.coreSolution ? `Core Solution: ${canonicalModel.coreSolution}` : ''}
Provide a comprehensive market research analysis including:
- Target niches with high potential
- Competitor analysis
- Market gaps and opportunities
- Strategic recommendations
Focus on finding specific, underserved niches where this product can win.`;
const research = await llm.structuredCall({
model: 'gemini',
systemPrompt,
messages: [
{
role: 'user',
content: userPrompt,
},
],
schema: MarketResearchSchema,
temperature: 0.7,
});
console.log('[Market Research] Research completed:', {
niches: research.targetNiches.length,
competitors: research.competitors.length,
gaps: research.marketGaps.length,
});
// Store research results in Firestore
const researchRef = adminDb.collection('marketResearch').doc();
await researchRef.set({
id: researchRef.id,
projectId,
userId: decoded.uid,
research,
ideaContext,
createdAt: new Date(),
updatedAt: new Date(),
});
// Also store as knowledge items for vector search
const knowledgePromises = [];
// Store each niche as a knowledge item
for (const niche of research.targetNiches) {
const nicheRef = adminDb.collection('knowledge').doc();
knowledgePromises.push(
nicheRef.set({
id: nicheRef.id,
projectId,
userId: decoded.uid,
sourceType: 'research',
title: `Target Niche: ${niche.name}`,
content: `${niche.description}\n\nMarket Size: ${niche.marketSize}\nCompetition: ${niche.competitionLevel}\n\nOpportunity: ${niche.opportunity}`,
sourceMeta: {
origin: 'vibn',
researchType: 'market_niche',
researchId: researchRef.id,
},
createdAt: new Date(),
updatedAt: new Date(),
})
);
}
// Store market gaps
for (const gap of research.marketGaps) {
const gapRef = adminDb.collection('knowledge').doc();
knowledgePromises.push(
gapRef.set({
id: gapRef.id,
projectId,
userId: decoded.uid,
sourceType: 'research',
title: `Market Gap: ${gap.gap.substring(0, 50)}`,
content: `${gap.gap}\n\nImpact: ${gap.impact}\n\nReasoning: ${gap.reasoning}`,
sourceMeta: {
origin: 'vibn',
researchType: 'market_gap',
researchId: researchRef.id,
},
createdAt: new Date(),
updatedAt: new Date(),
})
);
}
await Promise.all(knowledgePromises);
console.log('[Market Research] Stored', knowledgePromises.length, 'knowledge items');
return NextResponse.json({
success: true,
research,
researchId: researchRef.id,
knowledgeItemsCreated: knowledgePromises.length,
});
} catch (error) {
console.error('[Market Research] Error:', error);
return NextResponse.json(
{
error: 'Failed to conduct market research',
details: error instanceof Error ? error.message : 'Unknown error'
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,115 @@
import { NextResponse } from 'next/server';
import { getAdminAuth, getAdminDb } from '@/lib/firebase/admin';
export async function GET(
request: Request,
{ params }: { params: Promise<{ projectId: string }> }
) {
try {
const { projectId } = await params;
// Authentication (skip in development if no auth header)
const authHeader = request.headers.get('Authorization');
const isDevelopment = process.env.NODE_ENV === 'development';
if (!isDevelopment || authHeader?.startsWith('Bearer ')) {
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const token = authHeader.substring(7);
const auth = getAdminAuth();
const decoded = await auth.verifyIdToken(token);
if (!decoded?.uid) {
return NextResponse.json({ error: 'Invalid token' }, { status: 401 });
}
}
// Fetch project from Firestore
const adminDb = getAdminDb();
const projectDoc = await adminDb.collection('projects').doc(projectId).get();
if (!projectDoc.exists) {
return NextResponse.json({ error: 'Project not found' }, { status: 404 });
}
const projectData = projectDoc.data();
return NextResponse.json({
success: true,
project: {
id: projectDoc.id,
...projectData,
},
});
} catch (error) {
console.error('[API /projects/:id] Error fetching project:', error);
return NextResponse.json(
{
error: 'Failed to fetch project',
details: error instanceof Error ? error.message : String(error)
},
{ status: 500 }
);
}
}
export async function PATCH(
request: Request,
{ params }: { params: Promise<{ projectId: string }> }
) {
try {
const { projectId } = await params;
const body = await request.json();
// Authentication (skip in development if no auth header)
const authHeader = request.headers.get('Authorization');
const isDevelopment = process.env.NODE_ENV === 'development';
if (!isDevelopment || authHeader?.startsWith('Bearer ')) {
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const token = authHeader.substring(7);
const auth = getAdminAuth();
const decoded = await auth.verifyIdToken(token);
if (!decoded?.uid) {
return NextResponse.json({ error: 'Invalid token' }, { status: 401 });
}
}
// Update project in Firestore
const adminDb = getAdminDb();
const updateData: any = {};
// Only update fields that are provided
if (body.vision !== undefined) updateData.vision = body.vision;
if (body.description !== undefined) updateData.description = body.description;
if (body.name !== undefined) updateData.name = body.name;
if (body.githubRepo !== undefined) updateData.githubRepo = body.githubRepo;
updateData.updatedAt = new Date().toISOString();
await adminDb.collection('projects').doc(projectId).update(updateData);
return NextResponse.json({
success: true,
message: 'Project updated successfully',
updated: Object.keys(updateData)
});
} catch (error) {
console.error('[API /projects/:id] Error updating project:', error);
return NextResponse.json(
{
error: 'Failed to update project',
details: error instanceof Error ? error.message : String(error)
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,62 @@
/**
* Manual Extraction Trigger
*
* Endpoint to manually run backend extraction for a project.
* Useful for testing or re-running extraction.
*/
import { NextResponse } from 'next/server';
import { getAdminAuth } from '@/lib/firebase/admin';
import { runBackendExtractionForProject } from '@/lib/server/backend-extractor';
export const maxDuration = 300; // 5 minutes for extraction
export async function POST(
request: Request,
context: { params: Promise<{ projectId: string }> | { projectId: string } }
) {
try {
// Verify auth
const authHeader = request.headers.get('Authorization');
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const idToken = authHeader.split('Bearer ')[1];
const adminAuth = getAdminAuth();
try {
await adminAuth.verifyIdToken(idToken);
} catch (error) {
return NextResponse.json({ error: 'Invalid token' }, { status: 401 });
}
// Handle async params
const params = 'then' in context.params ? await context.params : context.params;
const projectId = params.projectId;
if (!projectId) {
return NextResponse.json({ error: 'Missing projectId' }, { status: 400 });
}
console.log(`[API] Manual extraction triggered for project ${projectId}`);
// Run extraction
await runBackendExtractionForProject(projectId);
return NextResponse.json({
success: true,
message: 'Extraction completed successfully',
});
} catch (error) {
console.error('[API] Extraction failed:', error);
return NextResponse.json(
{
error: 'Extraction failed',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,70 @@
import { NextResponse } from 'next/server';
import { getAdminAuth, getAdminStorage } from '@/lib/firebase/admin';
export async function GET(
request: Request,
{ params }: { params: Promise<{ projectId: string }> }
) {
try {
const { projectId } = await params;
// Authentication (skip in development if no auth header)
const authHeader = request.headers.get('Authorization');
const isDevelopment = process.env.NODE_ENV === 'development';
if (!isDevelopment || authHeader?.startsWith('Bearer ')) {
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const token = authHeader.substring(7);
const auth = getAdminAuth();
const decoded = await auth.verifyIdToken(token);
if (!decoded?.uid) {
return NextResponse.json({ error: 'Invalid token' }, { status: 401 });
}
}
// List files in Firebase Storage for this project
let fileList = [];
try {
const storage = getAdminStorage();
const bucket = storage.bucket();
const [files] = await bucket.getFiles({
prefix: `projects/${projectId}/`,
maxResults: 100,
});
fileList = files.map(file => ({
name: file.name.split('/').pop() || file.name,
fullPath: file.name,
size: file.metadata.size,
contentType: file.metadata.contentType,
timeCreated: file.metadata.timeCreated,
updated: file.metadata.updated,
}));
console.log('[API /storage/files] Found', fileList.length, 'files');
} catch (storageError) {
console.error('[API /storage/files] Firebase Storage query failed:', storageError);
console.error('[API /storage/files] This is likely due to missing Firebase Admin credentials');
// Return empty array instead of failing
fileList = [];
}
return NextResponse.json({
success: true,
files: fileList,
count: fileList.length,
});
} catch (error) {
console.error('[API] Error fetching storage files:', error);
return NextResponse.json(
{ error: 'Failed to fetch storage files' },
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,310 @@
import { NextRequest, NextResponse } from 'next/server';
import admin from '@/lib/firebase/admin';
import { getApiUrl } from '@/lib/utils/api-url';
/**
* Timeline View Data
* Structures MVP checklist pages with their development sessions on a timeline
*/
export async function GET(
request: NextRequest,
{ params }: { params: Promise<{ projectId: string }> }
) {
try {
const { projectId } = await params;
// Load project data, MVP checklist, git history, and activity in parallel
const db = admin.firestore();
const projectRef = db.collection('projects').doc(projectId);
const [projectDoc, checklistResponse, gitResponse, activityResponse] = await Promise.all([
projectRef.get(),
fetch(getApiUrl(`/api/projects/${projectId}/mvp-checklist`, request)),
fetch(getApiUrl(`/api/projects/${projectId}/git-history`, request)),
fetch(getApiUrl(`/api/projects/${projectId}/activity`, request))
]);
const projectData = projectDoc.exists ? projectDoc.data() : null;
const checklist = await checklistResponse.json();
const git = await gitResponse.json();
const activity = await activityResponse.json();
// Check if checklist exists and has the expected structure
if (!checklist || checklist.error || !checklist.mvpChecklist || !Array.isArray(checklist.mvpChecklist)) {
return NextResponse.json({
workItems: [],
timeline: {
start: new Date().toISOString(),
end: new Date().toISOString(),
totalDays: 0
},
summary: {
totalWorkItems: 0,
withActivity: 0,
noActivity: 0,
built: 0,
missing: 0
},
projectCreator: projectData?.createdBy || projectData?.owner || 'You',
message: 'No MVP checklist generated yet. Click "Regenerate Plan" to create one.'
});
}
// Build lightweight history object with just what we need
const history = {
chronologicalEvents: [
// Add git commits
...(git.commits || []).map((commit: any) => ({
type: 'git_commit',
timestamp: new Date(commit.date).toISOString(),
data: {
hash: commit.hash,
message: commit.message,
filesChanged: commit.filesChanged,
insertions: commit.insertions,
deletions: commit.deletions
}
})),
// Add extension sessions
...(activity.sessions || []).map((session: any) => ({
type: 'extension_session',
timestamp: session.startTime,
data: {
duration: session.duration,
filesModified: session.filesModified
}
}))
]
};
// Map pages to work items with session data
const workItems = [];
for (const category of checklist.mvpChecklist) {
for (const item of category.pages) {
const relatedSessions = findRelatedSessions(item, history);
const relatedCommits = findRelatedCommits(item, history);
const hasActivity = relatedSessions.length > 0 || relatedCommits.length > 0;
const startDate = hasActivity
? getEarliestDate([...relatedSessions, ...relatedCommits])
: null;
const endDate = hasActivity
? getLatestDate([...relatedSessions, ...relatedCommits])
: null;
workItems.push({
id: `${category.category.toLowerCase().replace(/\s+/g, '-')}-${item.title.toLowerCase().replace(/\s+/g, '-')}`,
title: item.title,
category: category.category,
path: item.path,
status: item.status,
priority: item.priority,
startDate,
endDate,
duration: calculateDuration(startDate, endDate),
sessionsCount: relatedSessions.length,
commitsCount: relatedCommits.length,
totalActivity: relatedSessions.length + relatedCommits.length,
sessions: relatedSessions,
commits: relatedCommits,
requirements: generateRequirements(item, { name: category.category }),
evidence: item.evidence || [],
note: item.note
});
}
}
// Sort by category order and status
// Priority: Core Features -> Marketing -> Social -> Content -> Settings
const categoryOrder = [
'Core Features',
'Marketing',
'Social',
'Content',
'Settings'
];
workItems.sort((a, b) => {
// First by category
const catCompare = categoryOrder.indexOf(a.category) - categoryOrder.indexOf(b.category);
if (catCompare !== 0) return catCompare;
// Then by status (built first, then in_progress, then missing)
const statusOrder = { 'built': 0, 'in_progress': 1, 'missing': 2 };
return (statusOrder[a.status as keyof typeof statusOrder] || 3) -
(statusOrder[b.status as keyof typeof statusOrder] || 3);
});
// Calculate timeline range
const allDates = workItems
.filter(w => w.startDate)
.flatMap(w => [w.startDate, w.endDate].filter(Boolean))
.map(d => new Date(d!));
const timelineStart = allDates.length > 0
? new Date(Math.min(...allDates.map(d => d.getTime())))
: new Date();
const timelineEnd = new Date(); // Today
return NextResponse.json({
workItems,
timeline: {
start: timelineStart.toISOString(),
end: timelineEnd.toISOString(),
totalDays: Math.ceil((timelineEnd.getTime() - timelineStart.getTime()) / (1000 * 60 * 60 * 24))
},
summary: {
totalWorkItems: workItems.length,
withActivity: workItems.filter(w => w.totalActivity > 0).length,
noActivity: workItems.filter(w => w.totalActivity === 0).length,
built: workItems.filter(w => w.status === 'built').length,
missing: workItems.filter(w => w.status === 'missing').length
},
projectCreator: projectData?.createdBy || projectData?.owner || 'You'
});
} catch (error) {
console.error('Error generating timeline view:', error);
return NextResponse.json(
{
error: 'Failed to generate timeline view',
details: error instanceof Error ? error.message : String(error)
},
{ status: 500 }
);
}
}
function findRelatedSessions(page: any, history: any) {
const pagePath = page.path.toLowerCase();
const pageTitle = page.title.toLowerCase();
return history.chronologicalEvents
.filter((e: any) => e.type === 'extension_session')
.filter((e: any) => {
const filesModified = e.data.filesModified || [];
return filesModified.some((f: string) => {
const lowerFile = f.toLowerCase();
return lowerFile.includes(pagePath) ||
lowerFile.includes(pageTitle.replace(/\s+/g, '-')) ||
(page.evidence && page.evidence.some((ev: string) => lowerFile.includes(ev.toLowerCase())));
});
})
.map((e: any) => ({
timestamp: e.timestamp,
duration: e.data.duration,
filesModified: e.data.filesModified
}));
}
function findRelatedCommits(page: any, history: any) {
const pagePath = page.path.toLowerCase();
const pageTitle = page.title.toLowerCase();
return history.chronologicalEvents
.filter((e: any) => e.type === 'git_commit')
.filter((e: any) => {
const message = e.data.message.toLowerCase();
return message.includes(pagePath) ||
message.includes(pageTitle.replace(/\s+/g, ' ')) ||
(page.evidence && page.evidence.some((ev: string) => message.includes(ev.toLowerCase())));
})
.map((e: any) => ({
timestamp: e.timestamp,
hash: e.data.hash,
message: e.data.message,
insertions: e.data.insertions,
deletions: e.data.deletions
}));
}
function getEarliestDate(events: any[]) {
if (events.length === 0) return null;
const dates = events.map(e => new Date(e.timestamp).getTime());
return new Date(Math.min(...dates)).toISOString();
}
function getLatestDate(events: any[]) {
if (events.length === 0) return null;
const dates = events.map(e => new Date(e.timestamp).getTime());
return new Date(Math.max(...dates)).toISOString();
}
function calculateDuration(startDate: string | null, endDate: string | null): number {
if (!startDate || !endDate) return 0;
const diff = new Date(endDate).getTime() - new Date(startDate).getTime();
return Math.ceil(diff / (1000 * 60 * 60 * 24));
}
function generateRequirements(page: any, category: any): any[] {
const requirements = [];
// Generate specific requirements based on page type
if (page.title.includes('Sign In') || page.title.includes('Sign Up')) {
requirements.push(
{ id: 1, text: 'Email/password authentication', status: 'built' },
{ id: 2, text: 'GitHub OAuth integration', status: 'built' },
{ id: 3, text: 'Password reset flow', status: 'missing' },
{ id: 4, text: 'Session management', status: 'built' }
);
} else if (page.title.includes('Checklist')) {
requirements.push(
{ id: 1, text: 'Display generated tasks from API', status: 'missing' },
{ id: 2, text: 'Mark tasks as complete', status: 'missing' },
{ id: 3, text: 'Drag-and-drop reordering', status: 'missing' },
{ id: 4, text: 'Save checklist state', status: 'missing' },
{ id: 5, text: 'Export to markdown/PDF', status: 'missing' }
);
} else if (page.title.includes('Vision') || page.title.includes('Mission')) {
requirements.push(
{ id: 1, text: 'Capture product vision text', status: 'missing' },
{ id: 2, text: 'AI-assisted vision refinement', status: 'missing' },
{ id: 3, text: 'Upload supporting documents', status: 'missing' },
{ id: 4, text: 'Save vision to project metadata', status: 'built' }
);
} else if (page.title.includes('Marketing Automation')) {
requirements.push(
{ id: 1, text: 'Connect to /plan/marketing API', status: 'missing' },
{ id: 2, text: 'Generate landing page copy', status: 'missing' },
{ id: 3, text: 'Generate email sequences', status: 'missing' },
{ id: 4, text: 'Export marketing materials', status: 'missing' }
);
} else if (page.title.includes('Communication Automation')) {
requirements.push(
{ id: 1, text: 'Email template builder', status: 'missing' },
{ id: 2, text: 'Slack integration', status: 'missing' },
{ id: 3, text: 'Automated project updates', status: 'missing' },
{ id: 4, text: 'Team notifications', status: 'missing' }
);
} else if (page.title.includes('Import') && page.title.includes('Modal')) {
requirements.push(
{ id: 1, text: 'Start from scratch option', status: 'built' },
{ id: 2, text: 'Import from GitHub', status: 'built' },
{ id: 3, text: 'Import from local folder', status: 'missing' },
{ id: 4, text: 'Auto-detect project type', status: 'missing' },
{ id: 5, text: 'Trigger Cursor import', status: 'built' },
{ id: 6, text: 'Create .vibn file', status: 'built' }
);
} else if (page.status === 'built') {
requirements.push(
{ id: 1, text: 'Page built and accessible', status: 'built' },
{ id: 2, text: 'Connected to backend API', status: 'built' }
);
} else {
requirements.push(
{ id: 1, text: 'Design page layout', status: 'missing' },
{ id: 2, text: 'Implement core functionality', status: 'missing' },
{ id: 3, text: 'Connect to backend API', status: 'missing' },
{ id: 4, text: 'Add error handling', status: 'missing' }
);
}
return requirements;
}

View File

@@ -0,0 +1,397 @@
import { NextRequest, NextResponse } from 'next/server';
import { adminDb } from '@/lib/firebase/admin';
import { exec } from 'child_process';
import { promisify } from 'util';
const execAsync = promisify(exec);
interface TimelineDay {
date: string; // YYYY-MM-DD format
dayOfWeek: string;
gitCommits: Array<{
hash: string;
time: string;
author: string;
message: string;
filesChanged: number;
insertions: number;
deletions: number;
}>;
extensionSessions: Array<{
startTime: string;
endTime: string;
duration: number; // minutes
filesModified: string[];
conversationSummary?: string;
}>;
cursorMessages: Array<{
time: string;
type: 'user' | 'assistant';
conversationName: string;
preview: string; // First 100 chars
}>;
summary: {
totalGitCommits: number;
totalExtensionSessions: number;
totalCursorMessages: number;
linesAdded: number;
linesRemoved: number;
uniqueFilesModified: number;
};
}
interface UnifiedTimeline {
projectId: string;
dateRange: {
earliest: string;
latest: string;
totalDays: number;
};
days: TimelineDay[];
dataSources: {
git: { available: boolean; firstDate: string | null; lastDate: string | null; totalRecords: number };
extension: { available: boolean; firstDate: string | null; lastDate: string | null; totalRecords: number };
cursor: { available: boolean; firstDate: string | null; lastDate: string | null; totalRecords: number };
};
}
export async function GET(
request: NextRequest,
{ params }: { params: Promise<{ projectId: string }> }
) {
try {
const { projectId } = await params;
// 1. Load Git commits
const repoPath = '/Users/markhenderson/ai-proxy';
let gitCommits: any[] = [];
let gitFirstDate: string | null = null;
let gitLastDate: string | null = null;
try {
const { stdout: commitsOutput } = await execAsync(
`cd "${repoPath}" && git log --all --pretty=format:"%H|%ai|%an|%s" --numstat`,
{ maxBuffer: 10 * 1024 * 1024 }
);
if (commitsOutput.trim()) {
const lines = commitsOutput.split('\n');
let currentCommit: any = null;
for (const line of lines) {
if (line.includes('|')) {
if (currentCommit) {
gitCommits.push(currentCommit);
}
const [hash, date, author, message] = line.split('|');
currentCommit = {
hash: hash.substring(0, 8),
date,
author,
message,
filesChanged: 0,
insertions: 0,
deletions: 0
};
} else if (line.trim() && currentCommit) {
const parts = line.trim().split('\t');
if (parts.length === 3) {
const [insertStr, delStr] = parts;
const insertions = insertStr === '-' ? 0 : parseInt(insertStr, 10) || 0;
const deletions = delStr === '-' ? 0 : parseInt(delStr, 10) || 0;
currentCommit.filesChanged++;
currentCommit.insertions += insertions;
currentCommit.deletions += deletions;
}
}
}
if (currentCommit) {
gitCommits.push(currentCommit);
}
gitCommits.sort((a, b) => new Date(a.date).getTime() - new Date(b.date).getTime());
if (gitCommits.length > 0) {
gitFirstDate = gitCommits[0].date;
gitLastDate = gitCommits[gitCommits.length - 1].date;
}
}
} catch (error) {
console.log('⚠️ Could not load Git commits:', error);
}
// 2. Load Extension sessions
let extensionSessions: any[] = [];
let extensionFirstDate: string | null = null;
let extensionLastDate: string | null = null;
try {
// Try to find sessions by projectId first
let sessionsSnapshot = await adminDb
.collection('sessions')
.where('projectId', '==', projectId)
.get();
// If no sessions found by projectId, try by workspacePath
if (sessionsSnapshot.empty) {
const workspacePath = '/Users/markhenderson/ai-proxy';
sessionsSnapshot = await adminDb
.collection('sessions')
.where('workspacePath', '==', workspacePath)
.get();
}
extensionSessions = sessionsSnapshot.docs.map(doc => {
const data = doc.data();
const startTime = data.startTime?.toDate?.() || new Date(data.startTime);
const endTime = data.endTime?.toDate?.() || new Date(data.endTime);
return {
startTime,
endTime,
filesModified: data.filesModified || [],
conversationSummary: data.conversationSummary || '',
conversation: data.conversation || []
};
});
extensionSessions.sort((a, b) =>
new Date(a.startTime).getTime() - new Date(b.startTime).getTime()
);
if (extensionSessions.length > 0) {
extensionFirstDate = extensionSessions[0].startTime.toISOString();
extensionLastDate = extensionSessions[extensionSessions.length - 1].endTime.toISOString();
}
} catch (error) {
console.log('⚠️ Could not load extension sessions:', error);
}
// 3. Load Cursor messages (from both cursorConversations and extension sessions)
let cursorMessages: any[] = [];
let cursorFirstDate: string | null = null;
let cursorLastDate: string | null = null;
try {
// Load from cursorConversations (backfilled historical data)
const conversationsSnapshot = await adminDb
.collection('projects')
.doc(projectId)
.collection('cursorConversations')
.get();
for (const convDoc of conversationsSnapshot.docs) {
const conv = convDoc.data();
const messagesSnapshot = await adminDb
.collection('projects')
.doc(projectId)
.collection('cursorConversations')
.doc(convDoc.id)
.collection('messages')
.orderBy('createdAt', 'asc')
.get();
const messages = messagesSnapshot.docs.map(msgDoc => {
const msg = msgDoc.data();
return {
createdAt: msg.createdAt,
type: msg.type === 1 ? 'user' : 'assistant',
text: msg.text || '',
conversationName: conv.name || 'Untitled'
};
});
cursorMessages = cursorMessages.concat(messages);
}
// Also load from extension sessions conversation data
for (const session of extensionSessions) {
if (session.conversation && Array.isArray(session.conversation)) {
for (const msg of session.conversation) {
cursorMessages.push({
createdAt: msg.timestamp || session.startTime.toISOString(),
type: msg.role === 'user' ? 'user' : 'assistant',
text: msg.message || '',
conversationName: 'Extension Session'
});
}
}
}
cursorMessages.sort((a, b) =>
new Date(a.createdAt).getTime() - new Date(b.createdAt).getTime()
);
if (cursorMessages.length > 0) {
cursorFirstDate = cursorMessages[0].createdAt;
cursorLastDate = cursorMessages[cursorMessages.length - 1].createdAt;
}
} catch (error) {
console.log('⚠️ Could not load Cursor messages:', error);
}
// 4. Find overall date range
const allFirstDates = [
gitFirstDate ? new Date(gitFirstDate) : null,
extensionFirstDate ? new Date(extensionFirstDate) : null,
cursorFirstDate ? new Date(cursorFirstDate) : null
].filter(d => d !== null) as Date[];
const allLastDates = [
gitLastDate ? new Date(gitLastDate) : null,
extensionLastDate ? new Date(extensionLastDate) : null,
cursorLastDate ? new Date(cursorLastDate) : null
].filter(d => d !== null) as Date[];
if (allFirstDates.length === 0 && allLastDates.length === 0) {
return NextResponse.json({
error: 'No timeline data available',
projectId,
dateRange: { earliest: null, latest: null, totalDays: 0 },
days: [],
dataSources: {
git: { available: false, firstDate: null, lastDate: null, totalRecords: 0 },
extension: { available: false, firstDate: null, lastDate: null, totalRecords: 0 },
cursor: { available: false, firstDate: null, lastDate: null, totalRecords: 0 }
}
});
}
const earliestDate = new Date(Math.min(...allFirstDates.map(d => d.getTime())));
const latestDate = new Date(Math.max(...allLastDates.map(d => d.getTime())));
const totalDays = Math.ceil((latestDate.getTime() - earliestDate.getTime()) / (1000 * 60 * 60 * 24)) + 1;
// 5. Group data by day
const dayMap = new Map<string, TimelineDay>();
// Initialize all days
for (let i = 0; i < totalDays; i++) {
const date = new Date(earliestDate);
date.setDate(date.getDate() + i);
const dateKey = date.toISOString().split('T')[0];
const dayOfWeek = date.toLocaleDateString('en-US', { weekday: 'long' });
dayMap.set(dateKey, {
date: dateKey,
dayOfWeek,
gitCommits: [],
extensionSessions: [],
cursorMessages: [],
summary: {
totalGitCommits: 0,
totalExtensionSessions: 0,
totalCursorMessages: 0,
linesAdded: 0,
linesRemoved: 0,
uniqueFilesModified: 0
}
});
}
// Add Git commits to days
for (const commit of gitCommits) {
const dateKey = commit.date.split(' ')[0];
const day = dayMap.get(dateKey);
if (day) {
day.gitCommits.push({
hash: commit.hash,
time: commit.date,
author: commit.author,
message: commit.message,
filesChanged: commit.filesChanged,
insertions: commit.insertions,
deletions: commit.deletions
});
day.summary.totalGitCommits++;
day.summary.linesAdded += commit.insertions;
day.summary.linesRemoved += commit.deletions;
}
}
// Add Extension sessions to days
for (const session of extensionSessions) {
const dateKey = new Date(session.startTime).toISOString().split('T')[0];
const day = dayMap.get(dateKey);
if (day) {
const startTime = new Date(session.startTime);
const endTime = new Date(session.endTime);
const duration = Math.round((endTime.getTime() - startTime.getTime()) / (1000 * 60));
day.extensionSessions.push({
startTime: session.startTime.toISOString(),
endTime: session.endTime.toISOString(),
duration,
filesModified: session.filesModified,
conversationSummary: session.conversationSummary
});
day.summary.totalExtensionSessions++;
// Track unique files
const uniqueFiles = new Set([...session.filesModified]);
day.summary.uniqueFilesModified += uniqueFiles.size;
}
}
// Add Cursor messages to days
for (const message of cursorMessages) {
const dateKey = new Date(message.createdAt).toISOString().split('T')[0];
const day = dayMap.get(dateKey);
if (day) {
day.cursorMessages.push({
time: message.createdAt,
type: message.type,
conversationName: message.conversationName,
preview: message.text.substring(0, 100)
});
day.summary.totalCursorMessages++;
}
}
// Convert to array and sort by date
const days = Array.from(dayMap.values()).sort((a, b) =>
new Date(a.date).getTime() - new Date(b.date).getTime()
);
const timeline: UnifiedTimeline = {
projectId,
dateRange: {
earliest: earliestDate.toISOString(),
latest: latestDate.toISOString(),
totalDays
},
days,
dataSources: {
git: {
available: gitCommits.length > 0,
firstDate: gitFirstDate,
lastDate: gitLastDate,
totalRecords: gitCommits.length
},
extension: {
available: extensionSessions.length > 0,
firstDate: extensionFirstDate,
lastDate: extensionLastDate,
totalRecords: extensionSessions.length
},
cursor: {
available: cursorMessages.length > 0,
firstDate: cursorFirstDate,
lastDate: cursorLastDate,
totalRecords: cursorMessages.length
}
}
};
return NextResponse.json(timeline);
} catch (error) {
console.error('Error generating unified timeline:', error);
return NextResponse.json(
{
error: 'Failed to generate unified timeline',
details: error instanceof Error ? error.message : String(error)
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,68 @@
import { NextRequest, NextResponse } from 'next/server';
import { getAdminDb } from '@/lib/firebase/admin';
/**
* Save vision answers to Firestore
*/
export async function POST(
request: NextRequest,
{ params }: { params: Promise<{ projectId: string }> }
) {
try {
const { projectId } = await params;
const body = await request.json();
const { visionAnswers } = body;
if (!visionAnswers || !visionAnswers.q1 || !visionAnswers.q2 || !visionAnswers.q3) {
return NextResponse.json(
{ error: 'All 3 vision answers are required' },
{ status: 400 }
);
}
const adminDb = getAdminDb();
// Save vision answers and mark ready for MVP
await adminDb.collection('projects').doc(projectId).set(
{
visionAnswers: {
q1: visionAnswers.q1,
q2: visionAnswers.q2,
q3: visionAnswers.q3,
allAnswered: true,
updatedAt: visionAnswers.updatedAt || new Date().toISOString(),
},
readyForMVP: true,
currentPhase: 'mvp',
phaseStatus: 'ready',
},
{ merge: true }
);
console.log(`[Vision API] Saved vision answers for project ${projectId}`);
// Trigger MVP generation (async - don't wait)
console.log(`[Vision API] Triggering MVP generation for project ${projectId}...`);
fetch(new URL(`/api/projects/${projectId}/mvp-checklist`, request.url).toString(), {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
}).catch((error) => {
console.error(`[Vision API] Failed to trigger MVP generation:`, error);
});
return NextResponse.json({
success: true,
message: 'Vision answers saved and MVP generation triggered',
});
} catch (error) {
console.error('[Vision API] Error saving vision answers:', error);
return NextResponse.json(
{
error: 'Failed to save vision answers',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,172 @@
import { NextRequest, NextResponse } from 'next/server';
import admin from '@/lib/firebase/admin';
/**
* Post a new message/comment on a work item
*/
export async function POST(
request: NextRequest,
{ params }: { params: Promise<{ projectId: string; itemId: string }> }
) {
try {
const { projectId, itemId } = await params;
const { message, author, authorId, type } = await request.json();
if (!message || !author) {
return NextResponse.json(
{ error: 'Message and author are required' },
{ status: 400 }
);
}
const db = admin.firestore();
// Create new message
const messageRef = db
.collection('projects')
.doc(projectId)
.collection('workItems')
.doc(itemId)
.collection('messages')
.doc();
await messageRef.set({
message,
author,
authorId: authorId || 'anonymous',
type: type || 'comment', // comment, feedback, question, etc.
createdAt: admin.firestore.FieldValue.serverTimestamp(),
reactions: [],
});
// Update message count on work item metadata
await db
.collection('projects')
.doc(projectId)
.collection('workItemStates')
.doc(itemId)
.set(
{
messageCount: admin.firestore.FieldValue.increment(1),
lastMessageAt: admin.firestore.FieldValue.serverTimestamp(),
},
{ merge: true }
);
return NextResponse.json({
success: true,
messageId: messageRef.id,
});
} catch (error) {
console.error('Error posting message:', error);
return NextResponse.json(
{
error: 'Failed to post message',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 }
);
}
}
/**
* Get messages/comments for a work item
*/
export async function GET(
request: NextRequest,
{ params }: { params: Promise<{ projectId: string; itemId: string }> }
) {
try {
const { projectId, itemId } = await params;
const db = admin.firestore();
const messagesSnapshot = await db
.collection('projects')
.doc(projectId)
.collection('workItems')
.doc(itemId)
.collection('messages')
.orderBy('createdAt', 'desc')
.get();
const messages = messagesSnapshot.docs.map(doc => ({
id: doc.id,
...doc.data(),
createdAt: doc.data().createdAt?.toDate().toISOString(),
}));
return NextResponse.json({
messages,
count: messages.length,
});
} catch (error) {
console.error('Error fetching messages:', error);
return NextResponse.json(
{
error: 'Failed to fetch messages',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 }
);
}
}
/**
* Delete a message
*/
export async function DELETE(
request: NextRequest,
{ params }: { params: Promise<{ projectId: string; itemId: string }> }
) {
try {
const { projectId, itemId } = await params;
const { searchParams } = new URL(request.url);
const messageId = searchParams.get('messageId');
if (!messageId) {
return NextResponse.json(
{ error: 'Message ID is required' },
{ status: 400 }
);
}
const db = admin.firestore();
// Delete message
await db
.collection('projects')
.doc(projectId)
.collection('workItems')
.doc(itemId)
.collection('messages')
.doc(messageId)
.delete();
// Update message count
await db
.collection('projects')
.doc(projectId)
.collection('workItemStates')
.doc(itemId)
.set(
{
messageCount: admin.firestore.FieldValue.increment(-1),
},
{ merge: true }
);
return NextResponse.json({
success: true,
});
} catch (error) {
console.error('Error deleting message:', error);
return NextResponse.json(
{
error: 'Failed to delete message',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,94 @@
import { NextRequest, NextResponse } from 'next/server';
import admin from '@/lib/firebase/admin';
/**
* Update work item state (draft/final)
*/
export async function PATCH(
request: NextRequest,
{ params }: { params: Promise<{ projectId: string; itemId: string }> }
) {
try {
const { projectId, itemId } = await params;
const { state } = await request.json();
if (!state || !['draft', 'final'].includes(state)) {
return NextResponse.json(
{ error: 'Invalid state. Must be "draft" or "final"' },
{ status: 400 }
);
}
const db = admin.firestore();
// Update state in work item
// For now, store in a separate collection since work items are generated from MVP checklist
await db
.collection('projects')
.doc(projectId)
.collection('workItemStates')
.doc(itemId)
.set(
{
state,
updatedAt: admin.firestore.FieldValue.serverTimestamp(),
},
{ merge: true }
);
return NextResponse.json({
success: true,
state,
});
} catch (error) {
console.error('Error updating work item state:', error);
return NextResponse.json(
{
error: 'Failed to update state',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 }
);
}
}
/**
* Get work item state
*/
export async function GET(
request: NextRequest,
{ params }: { params: Promise<{ projectId: string; itemId: string }> }
) {
try {
const { projectId, itemId } = await params;
const db = admin.firestore();
const stateDoc = await db
.collection('projects')
.doc(projectId)
.collection('workItemStates')
.doc(itemId)
.get();
if (!stateDoc.exists) {
return NextResponse.json({
state: 'draft', // Default state
});
}
return NextResponse.json({
state: stateDoc.data()?.state || 'draft',
updatedAt: stateDoc.data()?.updatedAt,
});
} catch (error) {
console.error('Error fetching work item state:', error);
return NextResponse.json(
{
error: 'Failed to fetch state',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,106 @@
import { NextRequest, NextResponse } from 'next/server';
import admin from '@/lib/firebase/admin';
/**
* Create a new version of a work item
*/
export async function POST(
request: NextRequest,
{ params }: { params: Promise<{ projectId: string; itemId: string }> }
) {
try {
const { projectId, itemId } = await params;
const { description, changes, createdBy } = await request.json();
const db = admin.firestore();
// Get current version count
const versionsSnapshot = await db
.collection('projects')
.doc(projectId)
.collection('workItems')
.doc(itemId)
.collection('versions')
.orderBy('versionNumber', 'desc')
.limit(1)
.get();
const currentVersion = versionsSnapshot.empty ? 0 : versionsSnapshot.docs[0].data().versionNumber;
const newVersionNumber = currentVersion + 1;
// Create new version
const versionRef = db
.collection('projects')
.doc(projectId)
.collection('workItems')
.doc(itemId)
.collection('versions')
.doc();
await versionRef.set({
versionNumber: newVersionNumber,
description: description || `Version ${newVersionNumber}`,
changes: changes || {},
createdBy: createdBy || 'system',
createdAt: admin.firestore.FieldValue.serverTimestamp(),
});
return NextResponse.json({
success: true,
versionId: versionRef.id,
versionNumber: newVersionNumber,
});
} catch (error) {
console.error('Error creating version:', error);
return NextResponse.json(
{
error: 'Failed to create version',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 }
);
}
}
/**
* Get version history for a work item
*/
export async function GET(
request: NextRequest,
{ params }: { params: Promise<{ projectId: string; itemId: string }> }
) {
try {
const { projectId, itemId } = await params;
const db = admin.firestore();
const versionsSnapshot = await db
.collection('projects')
.doc(projectId)
.collection('workItems')
.doc(itemId)
.collection('versions')
.orderBy('versionNumber', 'desc')
.get();
const versions = versionsSnapshot.docs.map(doc => ({
id: doc.id,
...doc.data(),
createdAt: doc.data().createdAt?.toDate().toISOString(),
}));
return NextResponse.json({
versions,
count: versions.length,
});
} catch (error) {
console.error('Error fetching versions:', error);
return NextResponse.json(
{
error: 'Failed to fetch versions',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,139 @@
import { NextResponse } from 'next/server';
import { getAdminAuth, getAdminDb } from '@/lib/firebase/admin';
import { FieldValue } from 'firebase-admin/firestore';
import type { ProjectPhaseData, ProjectPhaseScores } from '@/lib/types/project-artifacts';
export async function POST(request: Request) {
try {
const authHeader = request.headers.get('Authorization');
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const idToken = authHeader.split('Bearer ')[1];
const adminAuth = getAdminAuth();
const adminDb = getAdminDb();
let userId: string;
try {
const decodedToken = await adminAuth.verifyIdToken(idToken);
userId = decodedToken.uid;
} catch (error) {
return NextResponse.json({ error: 'Invalid token' }, { status: 401 });
}
const body = await request.json();
const {
projectName,
projectType,
slug,
vision,
product,
workspacePath, // Optional: if coming from association prompt
chatgptUrl, // Optional: if from ChatGPT
githubRepo, // Optional: if from GitHub
githubRepoId,
githubRepoUrl,
githubDefaultBranch,
} = body;
// Check if slug is available
const existingProject = await adminDb
.collection('projects')
.where('slug', '==', slug)
.limit(1)
.get();
if (!existingProject.empty) {
return NextResponse.json(
{ error: 'Project slug already exists' },
{ status: 400 }
);
}
// Get user data
const userDoc = await adminDb.collection('users').doc(userId).get();
const userData = userDoc.data();
const workspace = userData?.workspace || 'my-workspace';
// Create project
const projectRef = adminDb.collection('projects').doc();
await projectRef.set({
id: projectRef.id,
name: projectName,
slug,
userId,
workspace,
projectType,
productName: product.name,
productVision: vision || '',
isForClient: product.isForClient || false,
hasLogo: product.hasLogo || false,
hasDomain: product.hasDomain || false,
hasWebsite: product.hasWebsite || false,
hasGithub: !!githubRepo,
hasChatGPT: !!chatgptUrl,
workspacePath: workspacePath || null,
workspaceName: workspacePath ? workspacePath.split('/').pop() : null,
// GitHub data
githubRepo: githubRepo || null,
githubRepoId: githubRepoId || null,
githubRepoUrl: githubRepoUrl || null,
githubDefaultBranch: githubDefaultBranch || null,
// ChatGPT data
chatgptUrl: chatgptUrl || null,
// Extension tracking
extensionLinked: false,
status: 'active',
// Pipeline tracking
currentPhase: 'collector',
phaseStatus: 'not_started',
phaseData: {} as ProjectPhaseData,
phaseScores: {} as ProjectPhaseScores,
createdAt: FieldValue.serverTimestamp(),
updatedAt: FieldValue.serverTimestamp(),
});
console.log(`[API] Created project ${projectRef.id} (${slug})`);
// If workspacePath provided, associate existing sessions
if (workspacePath) {
const sessionsSnapshot = await adminDb
.collection('sessions')
.where('userId', '==', userId)
.where('workspacePath', '==', workspacePath)
.where('needsProjectAssociation', '==', true)
.get();
if (!sessionsSnapshot.empty) {
const batch = adminDb.batch();
sessionsSnapshot.docs.forEach((doc) => {
batch.update(doc.ref, {
projectId: projectRef.id,
needsProjectAssociation: false,
updatedAt: FieldValue.serverTimestamp(),
});
});
await batch.commit();
console.log(`[API] Associated ${sessionsSnapshot.size} sessions with project`);
}
}
return NextResponse.json({
success: true,
projectId: projectRef.id,
slug,
workspace,
});
} catch (error) {
console.error('Error creating project:', error);
return NextResponse.json(
{
error: 'Failed to create project',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,93 @@
import { NextResponse } from 'next/server';
import { getAdminAuth, getAdminDb } from '@/lib/firebase/admin';
import { FieldValue } from 'firebase-admin/firestore';
/**
* Delete a project (soft delete - keeps sessions intact)
* Sessions will remain in the database but projectId will be set to null
*/
export async function POST(request: Request) {
try {
const authHeader = request.headers.get('Authorization');
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const idToken = authHeader.split('Bearer ')[1];
const adminAuth = getAdminAuth();
const adminDb = getAdminDb();
let userId: string;
try {
const decodedToken = await adminAuth.verifyIdToken(idToken);
userId = decodedToken.uid;
} catch (error) {
return NextResponse.json({ error: 'Invalid token' }, { status: 401 });
}
const { projectId } = await request.json();
if (!projectId) {
return NextResponse.json(
{ error: 'Project ID is required' },
{ status: 400 }
);
}
// Verify project belongs to user
const projectDoc = await adminDb.collection('projects').doc(projectId).get();
if (!projectDoc.exists) {
return NextResponse.json(
{ error: 'Project not found' },
{ status: 404 }
);
}
if (projectDoc.data()?.userId !== userId) {
return NextResponse.json(
{ error: 'Unauthorized to delete this project' },
{ status: 403 }
);
}
// Delete the project document
await adminDb.collection('projects').doc(projectId).delete();
// Optional: Update sessions to remove project reference
// This makes sessions "orphaned" but keeps all the data
const sessionsSnapshot = await adminDb
.collection('sessions')
.where('projectId', '==', projectId)
.get();
if (!sessionsSnapshot.empty) {
const batch = adminDb.batch();
sessionsSnapshot.docs.forEach((doc) => {
batch.update(doc.ref, {
projectId: null,
// Flag these as needing reassignment if user wants to link them later
needsProjectAssociation: true,
updatedAt: FieldValue.serverTimestamp(),
});
});
await batch.commit();
}
return NextResponse.json({
success: true,
message: 'Project deleted successfully',
sessionsPreserved: sessionsSnapshot.size,
});
} catch (error) {
console.error('[Project Delete] Error:', error);
return NextResponse.json(
{
error: 'Failed to delete project',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,166 @@
import { NextResponse } from 'next/server';
import { getAdminAuth, getAdminDb } from '@/lib/firebase/admin';
import { FieldValue } from 'firebase-admin/firestore';
import type { ProjectPhase, PhaseStatus } from '@/lib/types/phases';
/**
* GET - Get current phase for a project
* POST - Update phase (start, complete, or add data)
*/
export async function GET(request: Request) {
try {
const authHeader = request.headers.get('Authorization');
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const idToken = authHeader.split('Bearer ')[1];
const adminAuth = getAdminAuth();
const adminDb = getAdminDb();
await adminAuth.verifyIdToken(idToken);
const { searchParams } = new URL(request.url);
const projectId = searchParams.get('projectId');
if (!projectId) {
return NextResponse.json({ error: 'Project ID required' }, { status: 400 });
}
// Get project phase data
const projectDoc = await adminDb.collection('projects').doc(projectId).get();
if (!projectDoc.exists) {
return NextResponse.json({ error: 'Project not found' }, { status: 404 });
}
const projectData = projectDoc.data();
// Return current phase info
return NextResponse.json({
currentPhase: projectData?.currentPhase || 'gathering',
phaseStatus: projectData?.phaseStatus || 'not_started',
phaseData: projectData?.phaseData || {},
phaseHistory: projectData?.phaseHistory || []
});
} catch (error) {
console.error('Error getting project phase:', error);
return NextResponse.json(
{ error: 'Failed to get phase', details: error instanceof Error ? error.message : String(error) },
{ status: 500 }
);
}
}
export async function POST(request: Request) {
try {
const authHeader = request.headers.get('Authorization');
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const idToken = authHeader.split('Bearer ')[1];
const adminAuth = getAdminAuth();
const adminDb = getAdminDb();
const decodedToken = await adminAuth.verifyIdToken(idToken);
const userId = decodedToken.uid;
const body = await request.json();
const { projectId, action, phase, data } = body;
if (!projectId || !action) {
return NextResponse.json(
{ error: 'projectId and action are required' },
{ status: 400 }
);
}
const projectRef = adminDb.collection('projects').doc(projectId);
const projectDoc = await projectRef.get();
if (!projectDoc.exists) {
return NextResponse.json({ error: 'Project not found' }, { status: 404 });
}
const projectData = projectDoc.data();
// Handle different actions
switch (action) {
case 'start': {
// Start a new phase
if (!phase) {
return NextResponse.json({ error: 'phase required for start action' }, { status: 400 });
}
await projectRef.update({
currentPhase: phase,
phaseStatus: 'in_progress',
[`phaseData.${phase}.startedAt`]: FieldValue.serverTimestamp(),
updatedAt: FieldValue.serverTimestamp()
});
console.log(`[Phase] Started ${phase} for project ${projectId}`);
return NextResponse.json({ success: true, phase, status: 'in_progress' });
}
case 'complete': {
// Complete current phase
const currentPhase = projectData?.currentPhase || 'gathering';
await projectRef.update({
phaseStatus: 'completed',
[`phaseData.${currentPhase}.completedAt`]: FieldValue.serverTimestamp(),
phaseHistory: FieldValue.arrayUnion({
phase: currentPhase,
completedAt: FieldValue.serverTimestamp()
}),
updatedAt: FieldValue.serverTimestamp()
});
console.log(`[Phase] Completed ${currentPhase} for project ${projectId}`);
return NextResponse.json({ success: true, phase: currentPhase, status: 'completed' });
}
case 'save_data': {
// Save phase-specific data (insights, vision board, etc.)
const currentPhase = projectData?.currentPhase || 'gathering';
await projectRef.update({
[`phaseData.${currentPhase}.data`]: data,
[`phaseData.${currentPhase}.lastUpdated`]: FieldValue.serverTimestamp(),
updatedAt: FieldValue.serverTimestamp()
});
console.log(`[Phase] Saved data for ${currentPhase} in project ${projectId}`);
return NextResponse.json({ success: true, phase: currentPhase });
}
case 'add_insight': {
// Add a gathering insight
if (!data || !data.insight) {
return NextResponse.json({ error: 'insight data required' }, { status: 400 });
}
await projectRef.update({
'phaseData.gathering.insights': FieldValue.arrayUnion(data),
updatedAt: FieldValue.serverTimestamp()
});
console.log(`[Phase] Added insight to project ${projectId}`);
return NextResponse.json({ success: true });
}
default:
return NextResponse.json({ error: 'Invalid action' }, { status: 400 });
}
} catch (error) {
console.error('Error updating project phase:', error);
return NextResponse.json(
{ error: 'Failed to update phase', details: error instanceof Error ? error.message : String(error) },
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,73 @@
import { NextResponse } from 'next/server';
import { adminDb } from '@/lib/firebase/admin';
import { FieldValue } from 'firebase-admin/firestore';
export async function POST(request: Request) {
try {
const body = await request.json();
const { workspacePath, projectId, userId } = body;
if (!workspacePath || !projectId || !userId) {
return NextResponse.json(
{ error: 'Missing required fields' },
{ status: 400 }
);
}
// Verify the project belongs to the user
const projectDoc = await adminDb.collection('projects').doc(projectId).get();
if (!projectDoc.exists || projectDoc.data()?.userId !== userId) {
return NextResponse.json(
{ error: 'Project not found or unauthorized' },
{ status: 403 }
);
}
// Update all sessions with this workspace path to associate with the project
const sessionsSnapshot = await adminDb
.collection('sessions')
.where('userId', '==', userId)
.where('workspacePath', '==', workspacePath)
.where('needsProjectAssociation', '==', true)
.get();
const batch = adminDb.batch();
let count = 0;
sessionsSnapshot.docs.forEach((doc: FirebaseFirestore.QueryDocumentSnapshot) => {
batch.update(doc.ref, {
projectId,
needsProjectAssociation: false,
updatedAt: FieldValue.serverTimestamp(),
});
count++;
});
await batch.commit();
// Update the project's workspace path if not set
if (!projectDoc.data()?.workspacePath) {
await projectDoc.ref.update({
workspacePath,
updatedAt: FieldValue.serverTimestamp(),
});
}
return NextResponse.json({
success: true,
sessionsUpdated: count,
message: `Associated ${count} sessions with project`,
});
} catch (error) {
console.error('Error associating sessions:', error);
return NextResponse.json(
{
error: 'Failed to associate sessions',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 }
);
}
}

62
app/api/sessions/route.ts Normal file
View File

@@ -0,0 +1,62 @@
import { NextResponse } from 'next/server';
import { getAdminDb } from '@/lib/firebase/admin';
export async function GET(request: Request) {
try {
const { searchParams } = new URL(request.url);
const projectId = searchParams.get('projectId');
const limit = parseInt(searchParams.get('limit') || '10');
console.log(`[API] Fetching sessions for project ${projectId}, limit ${limit}`);
const adminDb = getAdminDb();
let sessionsQuery = adminDb.collection('sessions');
// Filter by projectId if provided
if (projectId) {
sessionsQuery = sessionsQuery.where('projectId', '==', projectId) as any;
}
const sessionsSnapshot = await sessionsQuery
.orderBy('createdAt', 'desc')
.limit(limit)
.get();
const sessions = sessionsSnapshot.docs.map(doc => {
const data = doc.data();
return {
id: doc.id,
session_id: doc.id,
projectId: data.projectId,
userId: data.userId,
workspacePath: data.workspacePath,
workspaceName: data.workspaceName,
startTime: data.startTime,
endTime: data.endTime,
duration: data.duration,
duration_minutes: data.duration ? Math.round(data.duration / 60) : 0,
tokensUsed: data.tokensUsed || 0,
total_tokens: data.tokensUsed || 0,
cost: data.cost || 0,
estimated_cost_usd: data.cost || 0,
model: data.model || 'unknown',
primary_ai_model: data.model || 'unknown',
filesModified: data.filesModified || [],
summary: data.conversationSummary || null,
message_count: data.messageCount || 0,
ide_name: 'Cursor',
github_branch: data.githubBranch || null,
conversation: data.conversation || [],
file_changes: data.fileChanges || [],
createdAt: data.createdAt,
last_updated: data.updatedAt || data.createdAt,
};
});
console.log(`[API] Found ${sessions.length} sessions from Firebase`);
return NextResponse.json(sessions);
} catch (error) {
console.error('[API] Error fetching sessions:', error);
return NextResponse.json([]);
}
}

View File

@@ -0,0 +1,112 @@
import { NextResponse } from 'next/server';
import { adminDb } from '@/lib/firebase/admin';
import { FieldValue, Timestamp } from 'firebase-admin/firestore';
export async function POST(request: Request) {
try {
const body = await request.json();
const { apiKey, sessionData } = body;
if (!apiKey) {
return NextResponse.json(
{ error: 'API key is required' },
{ status: 401 }
);
}
// Verify API key and get userId
const keyDoc = await adminDb.collection('apiKeys').doc(apiKey).get();
if (!keyDoc.exists || !keyDoc.data()?.isActive) {
return NextResponse.json(
{ error: 'Invalid or inactive API key' },
{ status: 401 }
);
}
const userId = keyDoc.data()!.userId;
if (!userId) {
return NextResponse.json(
{ error: 'User not found for API key' },
{ status: 401 }
);
}
// Update last used timestamp
await keyDoc.ref.update({
lastUsed: FieldValue.serverTimestamp(),
});
// Check if workspace has an associated project
let projectId = sessionData.projectId || null;
let needsProjectAssociation = false;
if (!projectId && sessionData.workspacePath) {
// Try to find a project with this workspace path
const projectsSnapshot = await adminDb
.collection('projects')
.where('userId', '==', userId)
.where('workspacePath', '==', sessionData.workspacePath)
.limit(1)
.get();
if (!projectsSnapshot.empty) {
// Found a matching project, auto-associate
projectId = projectsSnapshot.docs[0].id;
console.log(`✅ Auto-associated session with project: ${projectId}`);
} else {
// No matching project found, flag for user action
needsProjectAssociation = true;
console.log(`⚠️ New workspace detected: ${sessionData.workspacePath}`);
}
}
// Create session document
const sessionRef = adminDb.collection('sessions').doc();
await sessionRef.set({
id: sessionRef.id,
userId,
projectId,
// Session data
startTime: Timestamp.fromMillis(new Date(sessionData.startTime).getTime()),
endTime: sessionData.endTime ? Timestamp.fromMillis(new Date(sessionData.endTime).getTime()) : null,
duration: sessionData.duration || null,
// Project context
workspacePath: sessionData.workspacePath || null,
workspaceName: sessionData.workspacePath ? sessionData.workspacePath.split('/').pop() : null,
needsProjectAssociation,
// AI usage
model: sessionData.model || 'unknown',
tokensUsed: sessionData.tokensUsed || 0,
cost: sessionData.cost || 0,
// Context
filesModified: sessionData.filesModified || [],
conversationSummary: sessionData.conversationSummary || null,
conversation: sessionData.conversation || [],
messageCount: sessionData.conversation?.length || 0,
createdAt: FieldValue.serverTimestamp(),
});
return NextResponse.json({
success: true,
sessionId: sessionRef.id,
message: 'Session tracked successfully',
});
} catch (error) {
console.error('Error tracking session:', error);
return NextResponse.json(
{
error: 'Failed to track session',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 }
);
}
}

70
app/api/stats/route.ts Normal file
View File

@@ -0,0 +1,70 @@
import { NextResponse } from 'next/server';
import { getAdminDb } from '@/lib/firebase/admin';
import type { DashboardStats } from '@/lib/types';
export async function GET(request: Request) {
try {
const { searchParams } = new URL(request.url);
const projectId = searchParams.get('projectId');
console.log(`[API] Fetching stats for project ${projectId}`);
const adminDb = getAdminDb();
// Query sessions for this project
let sessionsQuery = adminDb.collection('sessions');
if (projectId) {
sessionsQuery = sessionsQuery.where('projectId', '==', projectId) as any;
}
const sessionsSnapshot = await sessionsQuery.get();
// Calculate stats
let totalSessions = 0;
let totalCost = 0;
let totalTokens = 0;
let totalDuration = 0;
sessionsSnapshot.docs.forEach(doc => {
const data = doc.data();
totalSessions++;
totalCost += data.cost || 0;
totalTokens += data.tokensUsed || 0;
totalDuration += data.duration || 0;
});
// Query work completed for this project
let workQuery = adminDb.collection('workCompleted');
if (projectId) {
workQuery = workQuery.where('projectId', '==', projectId) as any;
}
const workSnapshot = await workQuery.get();
const workCompleted = workSnapshot.size;
const stats: DashboardStats = {
totalSessions,
totalCost,
totalTokens,
totalFeatures: workCompleted,
completedFeatures: workCompleted,
totalDuration: Math.round(totalDuration / 60), // Convert to minutes
};
console.log(`[API] Stats fetched successfully:`, stats);
return NextResponse.json(stats);
} catch (error) {
console.error('[API] Error fetching stats:', error);
const emptyStats: DashboardStats = {
totalSessions: 0,
totalCost: 0,
totalTokens: 0,
totalFeatures: 0,
completedFeatures: 0,
totalDuration: 0,
};
return NextResponse.json(emptyStats);
}
}

View File

@@ -0,0 +1,37 @@
import { NextResponse } from 'next/server';
import { auth } from '@/lib/firebase/config';
import { adminAuth } from '@/lib/firebase/admin';
export async function GET() {
try {
// Get current user from client-side auth
const user = auth.currentUser;
if (!user) {
return NextResponse.json({ error: 'Not authenticated' }, { status: 401 });
}
// Get ID token
const token = await user.getIdToken();
console.log('Token length:', token.length);
console.log('User UID:', user.uid);
// Try to verify it with Admin SDK
const decodedToken = await adminAuth.verifyIdToken(token);
return NextResponse.json({
success: true,
clientUid: user.uid,
decodedUid: decodedToken.uid,
match: user.uid === decodedToken.uid,
});
} catch (error) {
console.error('Token verification error:', error);
return NextResponse.json({
error: 'Token verification failed',
details: error instanceof Error ? error.message : String(error),
}, { status: 500 });
}
}

View File

@@ -0,0 +1,76 @@
import { NextResponse } from 'next/server';
import { adminAuth, adminDb } from '@/lib/firebase/admin';
import { v4 as uuidv4 } from 'uuid';
import { FieldValue } from 'firebase-admin/firestore';
export async function GET(request: Request) {
try {
console.log('[API] Getting API key...');
// Get the authorization header
const authHeader = request.headers.get('authorization');
if (!authHeader?.startsWith('Bearer ')) {
console.error('[API] No authorization header');
return NextResponse.json(
{ error: 'No authorization token provided' },
{ status: 401 }
);
}
const token = authHeader.substring(7);
console.log('[API] Token received, verifying...');
// Verify the Firebase ID token
const decodedToken = await adminAuth.verifyIdToken(token);
const userId = decodedToken.uid;
console.log('[API] Token verified, userId:', userId);
// Check if user already has an API key
console.log('[API] Checking for existing API key...');
const userDoc = await adminDb.collection('users').doc(userId).get();
if (userDoc.exists && userDoc.data()?.apiKey) {
console.log('[API] Found existing API key');
return NextResponse.json({
apiKey: userDoc.data()!.apiKey,
});
}
// Generate new API key
console.log('[API] Generating new API key...');
const apiKey = `vibn_${uuidv4().replace(/-/g, '')}`;
// Store API key document
console.log('[API] Storing API key in Firestore...');
await adminDb.collection('apiKeys').doc(apiKey).set({
key: apiKey,
userId,
createdAt: FieldValue.serverTimestamp(),
isActive: true,
});
// Update user document with API key reference (or create if doesn't exist)
console.log('[API] Updating user document...');
await adminDb.collection('users').doc(userId).set({
apiKey,
updatedAt: FieldValue.serverTimestamp(),
}, { merge: true });
console.log('[API] API key created successfully:', apiKey);
return NextResponse.json({
apiKey,
isNew: true,
});
} catch (error) {
console.error('[API] Error getting/creating API key:', error);
console.error('[API] Error stack:', error instanceof Error ? error.stack : 'No stack trace');
return NextResponse.json(
{
error: 'Failed to get API key',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,76 @@
import { NextResponse } from 'next/server';
import { v0 } from 'v0-sdk';
export async function POST(request: Request) {
try {
const { prompt, style, projectId } = await request.json();
if (!prompt) {
return NextResponse.json(
{ error: 'Prompt is required' },
{ status: 400 }
);
}
// Check for API key
const apiKey = process.env.V0_API_KEY;
if (!apiKey) {
return NextResponse.json(
{ error: 'V0_API_KEY not configured' },
{ status: 500 }
);
}
// Create system message based on style
const styleInstructions: Record<string, string> = {
modern: 'Create a modern, sleek design with clean lines and contemporary aesthetics',
minimal: 'Create a minimal, clean design with lots of whitespace and simple elements',
colorful: 'Create a vibrant, colorful design with bold colors and energetic feel',
dark: 'Create a dark mode design with dark backgrounds and light text',
glassmorphism: 'Create a glassmorphism design with frosted glass effects and transparency',
};
const styleInstruction = style && styleInstructions[style]
? styleInstructions[style]
: 'Create a modern, professional design';
const systemMessage = `You are an expert React and Tailwind CSS developer. ${styleInstruction}. Use Next.js conventions and best practices. Make it responsive and accessible.`;
// Create a new chat with v0
const chat = await v0.chats.create({
message: prompt,
system: systemMessage,
});
// Type guard to check if chat has the expected structure
if (!('id' in chat) || !('messages' in chat)) {
throw new Error('Unexpected response format from v0 API');
}
console.log(`[v0] Chat created: ${chat.id}`);
// Extract the generated code from the latest message
const latestMessage = chat.messages[chat.messages.length - 1];
const code = latestMessage?.content || '';
// Return the chat details
return NextResponse.json({
success: true,
chatId: chat.id,
webUrl: chat.webUrl,
code,
message: latestMessage,
});
} catch (error) {
console.error('[v0] Error generating design:', error);
return NextResponse.json(
{
error: 'Failed to generate design',
details: error instanceof Error ? error.message : 'Unknown error'
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,101 @@
import { NextResponse } from 'next/server';
import { v0 } from 'v0-sdk';
export async function POST(request: Request) {
try {
const { chatId, chatUrl } = await request.json();
if (!chatId && !chatUrl) {
return NextResponse.json(
{ error: 'Either chatId or chatUrl is required' },
{ status: 400 }
);
}
// Check for API key
const apiKey = process.env.V0_API_KEY;
if (!apiKey) {
return NextResponse.json(
{ error: 'V0_API_KEY not configured' },
{ status: 500 }
);
}
// Extract chat ID from URL if provided
let extractedChatId = chatId;
if (chatUrl && !chatId) {
// v0.dev URLs look like: https://v0.dev/chat/abc123xyz
const match = chatUrl.match(/\/chat\/([^/?]+)/);
if (match) {
extractedChatId = match[1];
} else {
return NextResponse.json(
{ error: 'Invalid v0 chat URL format' },
{ status: 400 }
);
}
}
console.log(`[v0] Attempting to import chat: ${extractedChatId}`);
// The v0 SDK doesn't support retrieving individual chats by ID
// We'll store a reference to the chat URL for the user to access it
const fullChatUrl = chatUrl || `https://v0.dev/chat/${extractedChatId}`;
console.log(`[v0] Importing chat reference: ${extractedChatId}`);
const chatInfo = {
id: extractedChatId,
webUrl: fullChatUrl,
message: 'Chat link saved. You can access it via the web URL.',
note: 'The v0 API does not currently support retrieving chat history via API. Use the web URL to view and continue the conversation.'
};
return NextResponse.json({
success: true,
chat: chatInfo,
message: 'Chat reference saved successfully'
});
} catch (error) {
console.error('[v0] Error importing chat:', error);
return NextResponse.json(
{
error: 'Failed to import chat',
details: error instanceof Error ? error.message : 'Unknown error'
},
{ status: 500 }
);
}
}
// Also support GET for testing
export async function GET(request: Request) {
const { searchParams } = new URL(request.url);
const chatId = searchParams.get('chatId');
const chatUrl = searchParams.get('chatUrl');
if (!chatId && !chatUrl) {
return NextResponse.json({
message: 'Import a v0 chat',
usage: 'POST /api/v0/import-chat with { "chatId": "abc123" } or { "chatUrl": "https://v0.dev/chat/abc123" }',
example: {
method: 'POST',
body: {
chatUrl: 'https://v0.dev/chat/your-chat-id'
}
}
});
}
// Forward to POST handler
const body = JSON.stringify({ chatId, chatUrl });
const req = new Request(request.url, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body,
});
return POST(req);
}

View File

@@ -0,0 +1,41 @@
import { NextResponse } from 'next/server';
import { v0 } from 'v0-sdk';
export async function POST(request: Request) {
try {
const { chatId, message, projectId } = await request.json();
if (!chatId || !message) {
return NextResponse.json(
{ error: 'Missing required fields: chatId and message' },
{ status: 400 }
);
}
console.log(`[API] Iterate request for chat ${chatId}`);
// The v0 SDK doesn't support sending follow-up messages via API
// Users need to continue the conversation on v0.dev
const webUrl = `https://v0.dev/chat/${chatId}`;
return NextResponse.json({
success: true,
chatId: chatId,
webUrl: webUrl,
message: 'To continue this conversation, please visit the chat on v0.dev',
note: 'The v0 API does not currently support sending follow-up messages. Use the web interface to iterate on your design.',
yourMessage: message,
});
} catch (error) {
console.error('[API] Error processing iteration request:', error);
return NextResponse.json(
{
error: error instanceof Error ? error.message : 'Failed to process request',
details: error instanceof Error ? error.stack : undefined,
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,68 @@
import { NextResponse } from 'next/server';
import { v0 } from 'v0-sdk';
export async function GET(request: Request) {
try {
// Check for API key
const apiKey = process.env.V0_API_KEY;
if (!apiKey) {
return NextResponse.json(
{ error: 'V0_API_KEY not configured' },
{ status: 500 }
);
}
console.log('[v0] Attempting to list chats...');
// Try to list chats - this may or may not be supported
// The v0 SDK documentation shows chats.create() but we need to check if list() exists
try {
// @ts-ignore - Checking if this method exists
const chats = await v0.chats.list();
console.log('[v0] Chats retrieved:', chats);
return NextResponse.json({
success: true,
chats,
count: chats?.length || 0,
});
} catch (listError) {
console.error('[v0] List method error:', listError);
// Try alternative: Get account info or projects
try {
// @ts-ignore - Checking if this method exists
const projects = await v0.projects?.list();
console.log('[v0] Projects retrieved:', projects);
return NextResponse.json({
success: true,
projects,
count: projects?.length || 0,
note: 'Retrieved projects instead of chats'
});
} catch (projectError) {
console.error('[v0] Projects error:', projectError);
return NextResponse.json({
error: 'Unable to list chats or projects',
details: 'The v0 SDK may not support listing existing chats via API',
suggestion: 'You may need to manually provide chat IDs to import existing designs',
sdkError: listError instanceof Error ? listError.message : 'Unknown error'
}, { status: 400 });
}
}
} catch (error) {
console.error('[v0] Error:', error);
return NextResponse.json(
{
error: 'Failed to access v0 API',
details: error instanceof Error ? error.message : 'Unknown error',
tip: 'The v0 SDK primarily supports creating new chats. To import existing designs, you may need to provide specific chat IDs or URLs.'
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,120 @@
import { NextRequest, NextResponse } from "next/server";
import { adminDb } from "@/lib/firebase/admin";
import { getAuth } from "firebase-admin/auth";
interface VisionBoardUpdate {
vision?: {
problemSolving?: string;
changeCreated?: string;
};
targetUser?: {
who?: string;
whereTheyHangOut?: string;
};
needs?: {
problemSolved?: string;
benefitProvided?: string;
};
product?: {
description?: string;
differentiation?: string;
};
validationGoals?: {
firstUser?: string;
pathTo10Users?: string;
pricing?: string;
};
}
export async function POST(request: NextRequest) {
try {
const authHeader = request.headers.get("authorization");
if (!authHeader?.startsWith("Bearer ")) {
return NextResponse.json({ error: "Unauthorized" }, { status: 401 });
}
const token = authHeader.split("Bearer ")[1];
const decodedToken = await getAuth().verifyIdToken(token);
const userId = decodedToken.uid;
const { projectId, updates } = (await request.json()) as {
projectId: string;
updates: VisionBoardUpdate;
};
if (!projectId || !updates) {
return NextResponse.json(
{ error: "Missing projectId or updates" },
{ status: 400 }
);
}
// Verify user has access to this project
const projectRef = adminDb.collection("projects").doc(projectId);
const projectSnap = await projectRef.get();
if (!projectSnap.exists) {
return NextResponse.json({ error: "Project not found" }, { status: 404 });
}
const projectData = projectSnap.data();
if (projectData?.userId !== userId) {
return NextResponse.json({ error: "Forbidden" }, { status: 403 });
}
// Update vision board
const visionRef = projectRef.collection("visionBoard").doc("current");
const visionSnap = await visionRef.get();
const now = new Date();
if (visionSnap.exists()) {
// Merge updates
const currentData = visionSnap.data();
const mergedData = {
...currentData,
...updates,
vision: { ...currentData?.vision, ...updates.vision },
targetUser: { ...currentData?.targetUser, ...updates.targetUser },
needs: { ...currentData?.needs, ...updates.needs },
product: { ...currentData?.product, ...updates.product },
validationGoals: {
...currentData?.validationGoals,
...updates.validationGoals,
},
updatedAt: now,
};
await visionRef.update(mergedData);
return NextResponse.json({ success: true, data: mergedData });
} else {
// Create new vision board
const newData = {
vision: updates.vision || { problemSolving: "", changeCreated: "" },
targetUser: updates.targetUser || { who: "", whereTheyHangOut: "" },
needs: updates.needs || { problemSolved: "", benefitProvided: "" },
product: updates.product || { description: "", differentiation: "" },
validationGoals: updates.validationGoals || {
firstUser: "",
pathTo10Users: "",
pricing: "",
},
createdAt: now,
updatedAt: now,
};
await visionRef.set(newData);
return NextResponse.json({ success: true, data: newData });
}
} catch (error) {
console.error("Vision board update error:", error);
return NextResponse.json(
{
error: "Failed to update vision board",
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,42 @@
import { NextResponse } from 'next/server';
import { query } from '@/lib/db';
import type { WorkCompleted } from '@/lib/types';
export async function GET(request: Request) {
try {
const { searchParams } = new URL(request.url);
const projectId = searchParams.get('projectId');
const limit = searchParams.get('limit') || '20';
const workItems = await query<WorkCompleted>(
`SELECT
wc.*,
s.session_id,
s.primary_ai_model,
s.duration_minutes
FROM work_completed wc
LEFT JOIN sessions s ON wc.session_id = s.id
WHERE wc.project_id = $1
ORDER BY wc.completed_at DESC
LIMIT $2`,
[projectId || 1, limit]
);
// Parse JSON fields
const parsedWork = workItems.map(item => ({
...item,
files_modified: typeof item.files_modified === 'string'
? JSON.parse(item.files_modified as any)
: item.files_modified,
}));
return NextResponse.json(parsedWork);
} catch (error) {
console.error('Error fetching work completed:', error);
return NextResponse.json(
{ error: 'Failed to fetch work completed' },
{ status: 500 }
);
}
}