VIBN Frontend for Coolify deployment

This commit is contained in:
2026-02-15 19:25:52 -08:00
commit 40bf8428cd
398 changed files with 76513 additions and 0 deletions

View File

@@ -0,0 +1,84 @@
import { NextRequest, NextResponse } from 'next/server';
import { adminDb } from '@/lib/firebase/admin';
export async function GET(
request: NextRequest,
{ params }: { params: Promise<{ projectId: string }> }
) {
try {
const { projectId } = await params;
// Get all sessions for this project
const sessionsSnapshot = await adminDb
.collection('sessions')
.where('projectId', '==', projectId)
.get();
const sessions = sessionsSnapshot.docs
.map(doc => {
const data = doc.data();
return {
id: doc.id,
startTime: data.startTime?.toDate?.() || data.startTime,
endTime: data.endTime?.toDate?.() || data.endTime,
duration: data.duration || 0,
filesModified: data.filesModified || [],
conversationSummary: data.conversationSummary || '',
workspacePath: data.workspacePath || '',
conversation: data.conversation || []
};
})
.sort((a, b) => {
const aTime = a.startTime ? new Date(a.startTime).getTime() : 0;
const bTime = b.startTime ? new Date(b.startTime).getTime() : 0;
return aTime - bTime;
});
// Analyze activity
const fileActivity: Record<string, { count: number; dates: string[] }> = {};
const dailyActivity: Record<string, number> = {};
sessions.forEach(session => {
if (!session.startTime) return;
const date = new Date(session.startTime).toISOString().split('T')[0];
dailyActivity[date] = (dailyActivity[date] || 0) + 1;
session.filesModified.forEach((file: string) => {
if (!fileActivity[file]) {
fileActivity[file] = { count: 0, dates: [] };
}
fileActivity[file].count++;
if (!fileActivity[file].dates.includes(date)) {
fileActivity[file].dates.push(date);
}
});
});
// Get top files
const topFiles = Object.entries(fileActivity)
.map(([file, data]) => ({ file, ...data }))
.sort((a, b) => b.count - a.count)
.slice(0, 50);
return NextResponse.json({
totalSessions: sessions.length,
sessions,
fileActivity: topFiles,
dailyActivity: Object.entries(dailyActivity)
.map(([date, count]) => ({ date, sessionCount: count }))
.sort((a, b) => a.date.localeCompare(b.date))
});
} catch (error) {
console.error('Error fetching activity:', error);
return NextResponse.json(
{
error: 'Failed to fetch activity',
details: error instanceof Error ? error.message : String(error)
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,28 @@
import { NextResponse } from 'next/server';
import { buildCanonicalProductModel } from '@/lib/server/product-model';
export async function POST(
_request: Request,
{ params }: { params: Promise<{ projectId: string }> },
) {
try {
const { projectId } = await params;
if (!projectId) {
return NextResponse.json({ error: 'Missing projectId' }, { status: 400 });
}
const canonicalProductModel = await buildCanonicalProductModel(projectId);
return NextResponse.json({ canonicalProductModel });
} catch (error) {
console.error('[aggregate] Failed to build canonical product model', error);
return NextResponse.json(
{
error: 'Failed to aggregate product signals',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 },
);
}
}

View File

@@ -0,0 +1,190 @@
import { NextResponse } from 'next/server';
import { getAdminAuth, getAdminDb } from '@/lib/firebase/admin';
import { FieldValue } from 'firebase-admin/firestore';
/**
* Associate existing sessions with a project when GitHub is connected
* Matches sessions by:
* 1. githubRepo field (from Cursor extension)
* 2. workspacePath (if repo name matches)
*/
export async function POST(
request: Request,
{ params }: { params: Promise<{ projectId: string }> }
) {
try {
const { projectId } = await params;
const authHeader = request.headers.get('Authorization');
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const idToken = authHeader.split('Bearer ')[1];
const adminAuth = getAdminAuth();
const adminDb = getAdminDb();
let userId: string;
try {
const decodedToken = await adminAuth.verifyIdToken(idToken);
userId = decodedToken.uid;
} catch (error) {
return NextResponse.json({ error: 'Invalid token' }, { status: 401 });
}
const { githubRepo, githubRepoUrl } = await request.json();
if (!githubRepo) {
return NextResponse.json(
{ error: 'githubRepo is required' },
{ status: 400 }
);
}
// Verify project belongs to user
const projectDoc = await adminDb.collection('projects').doc(projectId).get();
if (!projectDoc.exists || projectDoc.data()?.userId !== userId) {
return NextResponse.json(
{ error: 'Project not found or unauthorized' },
{ status: 403 }
);
}
const projectData = projectDoc.data();
const projectWorkspacePath = projectData?.workspacePath;
console.log(`[Associate GitHub Sessions] Project: ${projectId}`);
console.log(`[Associate GitHub Sessions] GitHub repo: ${githubRepo}`);
console.log(`[Associate GitHub Sessions] Project workspace path: ${projectWorkspacePath || 'not set'}`);
console.log(`[Associate GitHub Sessions] User ID: ${userId}`);
// Strategy 1: Match by exact githubRepo field in sessions
// (This requires the Cursor extension to send githubRepo with sessions)
const sessionsSnapshot1 = await adminDb
.collection('sessions')
.where('userId', '==', userId)
.where('githubRepo', '==', githubRepo)
.where('needsProjectAssociation', '==', true)
.get();
console.log(`[Associate GitHub Sessions] Found ${sessionsSnapshot1.size} sessions with exact githubRepo match`);
// Strategy 2: Match by exact workspacePath (if project has one set)
let matchedByPath: any[] = [];
if (projectWorkspacePath) {
console.log(`[Associate GitHub Sessions] Strategy 2A: Exact workspace path match`);
console.log(`[Associate GitHub Sessions] Looking for sessions from: ${projectWorkspacePath}`);
const pathMatchSnapshot = await adminDb
.collection('sessions')
.where('userId', '==', userId)
.where('workspacePath', '==', projectWorkspacePath)
.where('needsProjectAssociation', '==', true)
.get();
matchedByPath = pathMatchSnapshot.docs;
console.log(`[Associate GitHub Sessions] Found ${matchedByPath.length} sessions with exact workspace path match`);
} else {
// Fallback: Match by repo name (less reliable but better than nothing)
console.log(`[Associate GitHub Sessions] Strategy 2B: Fuzzy match by repo folder name (project has no workspace path set)`);
const repoName = githubRepo.split('/')[1]; // Extract "my-app" from "username/my-app"
console.log(`[Associate GitHub Sessions] Looking for folders ending with: ${repoName}`);
const allUnassociatedSessions = await adminDb
.collection('sessions')
.where('userId', '==', userId)
.where('needsProjectAssociation', '==', true)
.get();
console.log(`[Associate GitHub Sessions] Total unassociated sessions for user: ${allUnassociatedSessions.size}`);
matchedByPath = allUnassociatedSessions.docs.filter(doc => {
const workspacePath = doc.data().workspacePath;
if (!workspacePath) return false;
const pathSegments = workspacePath.split('/');
const lastSegment = pathSegments[pathSegments.length - 1];
const matches = lastSegment === repoName;
if (matches) {
console.log(`[Associate GitHub Sessions] ✅ Fuzzy match: ${workspacePath} ends with ${repoName}`);
}
return matches;
});
console.log(`[Associate GitHub Sessions] Found ${matchedByPath.length} sessions with fuzzy folder name match`);
// Debug: Log some example workspace paths to help diagnose
if (matchedByPath.length === 0 && allUnassociatedSessions.size > 0) {
console.log(`[Associate GitHub Sessions] Debug - Example workspace paths in unassociated sessions:`);
allUnassociatedSessions.docs.slice(0, 5).forEach(doc => {
const path = doc.data().workspacePath;
const folder = path ? path.split('/').pop() : 'null';
console.log(` - ${path} (folder: ${folder})`);
});
console.log(`[Associate GitHub Sessions] Tip: Set project.workspacePath for accurate matching`);
}
}
// Combine both strategies (deduplicate by session ID)
const allMatchedSessions = new Map();
// Add exact matches
sessionsSnapshot1.docs.forEach(doc => {
allMatchedSessions.set(doc.id, doc);
});
// Add path matches
matchedByPath.forEach(doc => {
allMatchedSessions.set(doc.id, doc);
});
// Batch update all matched sessions
if (allMatchedSessions.size > 0) {
const batch = adminDb.batch();
let count = 0;
allMatchedSessions.forEach((doc) => {
batch.update(doc.ref, {
projectId,
needsProjectAssociation: false,
updatedAt: FieldValue.serverTimestamp(),
});
count++;
});
await batch.commit();
console.log(`[Associate GitHub Sessions] Successfully associated ${count} sessions with project ${projectId}`);
return NextResponse.json({
success: true,
sessionsAssociated: count,
message: `Found and linked ${count} existing chat sessions from this repository`,
details: {
exactMatches: sessionsSnapshot1.size,
pathMatches: matchedByPath.length,
}
});
}
return NextResponse.json({
success: true,
sessionsAssociated: 0,
message: 'No matching sessions found for this repository',
});
} catch (error) {
console.error('[Associate GitHub Sessions] Error:', error);
return NextResponse.json(
{
error: 'Failed to associate sessions',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,505 @@
import { NextRequest, NextResponse } from 'next/server';
import { adminDb } from '@/lib/firebase/admin';
import { getApiUrl } from '@/lib/utils/api-url';
// Types
interface WorkSession {
sessionId: string;
date: string;
startTime: Date;
endTime: Date;
duration: number; // minutes
messageCount: number;
userMessages: number;
aiMessages: number;
topics: string[];
filesWorkedOn: string[];
}
interface TimelineAnalysis {
firstActivity: Date | null;
lastActivity: Date | null;
totalDays: number;
activeDays: number;
totalSessions: number;
sessions: WorkSession[];
velocity: {
messagesPerDay: number;
averageSessionLength: number;
peakProductivityHours: number[];
};
}
interface CostAnalysis {
messageStats: {
totalMessages: number;
userMessages: number;
aiMessages: number;
avgMessageLength: number;
};
estimatedTokens: {
input: number;
output: number;
total: number;
};
costs: {
inputCost: number;
outputCost: number;
totalCost: number;
currency: string;
};
model: string;
pricing: {
inputPer1M: number;
outputPer1M: number;
};
}
interface Feature {
name: string;
description: string;
pages: string[];
apis: string[];
status: 'complete' | 'in-progress' | 'planned';
}
export async function POST(
request: NextRequest,
{ params }: { params: Promise<{ projectId: string }> }
) {
try {
const { projectId } = await params;
// 1. Load conversations from Firestore
console.log(`🔍 Loading conversations for project ${projectId}...`);
const conversationsSnapshot = await adminDb
.collection('projects')
.doc(projectId)
.collection('cursorConversations')
.get();
if (conversationsSnapshot.empty) {
return NextResponse.json({
error: 'No conversations found for this project',
suggestion: 'Import Cursor conversations first'
}, { status: 404 });
}
const conversations = conversationsSnapshot.docs.map(doc => ({
id: doc.id,
...doc.data()
}));
console.log(`✅ Found ${conversations.length} conversations`);
// 2. Load all messages for each conversation
let allMessages: any[] = [];
for (const conv of conversations) {
const messagesSnapshot = await adminDb
.collection('projects')
.doc(projectId)
.collection('cursorConversations')
.doc(conv.id)
.collection('messages')
.orderBy('createdAt', 'asc')
.get();
const messages = messagesSnapshot.docs.map(doc => ({
...doc.data(),
conversationId: conv.id,
conversationName: conv.name
}));
allMessages = allMessages.concat(messages);
}
console.log(`✅ Loaded ${allMessages.length} total messages`);
// 3. Load extension activity data (files edited, sessions)
let extensionActivity: any = null;
try {
const activitySnapshot = await adminDb
.collection('sessions')
.where('projectId', '==', projectId)
.get();
const extensionSessions = activitySnapshot.docs
.map(doc => {
const data = doc.data();
return {
startTime: data.startTime?.toDate?.() || data.startTime,
endTime: data.endTime?.toDate?.() || data.endTime,
filesModified: data.filesModified || [],
conversationSummary: data.conversationSummary || ''
};
})
.sort((a, b) => {
const aTime = a.startTime ? new Date(a.startTime).getTime() : 0;
const bTime = b.startTime ? new Date(b.startTime).getTime() : 0;
return aTime - bTime;
});
// Analyze file activity
const fileActivity: Record<string, number> = {};
extensionSessions.forEach(session => {
session.filesModified.forEach((file: string) => {
fileActivity[file] = (fileActivity[file] || 0) + 1;
});
});
const topFiles = Object.entries(fileActivity)
.map(([file, count]) => ({ file, editCount: count }))
.sort((a, b) => b.editCount - a.editCount)
.slice(0, 20);
extensionActivity = {
totalSessions: extensionSessions.length,
uniqueFilesEdited: Object.keys(fileActivity).length,
topFiles,
earliestActivity: extensionSessions[0]?.startTime || null,
latestActivity: extensionSessions[extensionSessions.length - 1]?.endTime || null
};
console.log(`✅ Loaded ${extensionSessions.length} extension activity sessions`);
} catch (error) {
console.log(`⚠️ Could not load extension activity: ${error}`);
}
// 4. Load Git commit history
let gitHistory: any = null;
try {
const gitResponse = await fetch(getApiUrl(`/api/projects/${projectId}/git-history`, request));
if (gitResponse.ok) {
gitHistory = await gitResponse.json();
console.log(`✅ Loaded ${gitHistory.totalCommits} Git commits`);
}
} catch (error) {
console.log(`⚠️ Could not load Git history: ${error}`);
}
// 4b. Load unified timeline (combines all data sources by day)
let unifiedTimeline: any = null;
try {
const timelineResponse = await fetch(getApiUrl(`/api/projects/${projectId}/timeline`, request));
if (timelineResponse.ok) {
unifiedTimeline = await timelineResponse.json();
console.log(`✅ Loaded unified timeline with ${unifiedTimeline.days.length} days`);
}
} catch (error) {
console.log(`⚠️ Could not load unified timeline: ${error}`);
}
// 5. Analyze timeline
const timeline = analyzeTimeline(allMessages);
// 6. Calculate costs
const costs = calculateCosts(allMessages);
// 7. Extract features from codebase (static list for now)
const features = getFeaturesList();
// 8. Get tech stack
const techStack = getTechStack();
// 9. Generate report
const report = {
projectId,
generatedAt: new Date().toISOString(),
timeline,
costs,
features,
techStack,
extensionActivity,
gitHistory,
unifiedTimeline,
summary: {
totalConversations: conversations.length,
totalMessages: allMessages.length,
developmentPeriod: timeline.totalDays,
estimatedCost: costs.costs.totalCost,
extensionSessions: extensionActivity?.totalSessions || 0,
filesEdited: extensionActivity?.uniqueFilesEdited || 0,
gitCommits: gitHistory?.totalCommits || 0,
linesAdded: gitHistory?.totalInsertions || 0,
linesRemoved: gitHistory?.totalDeletions || 0,
timelineDays: unifiedTimeline?.days.length || 0
}
};
console.log(`✅ Audit report generated successfully`);
return NextResponse.json(report);
} catch (error) {
console.error('Error generating audit report:', error);
return NextResponse.json(
{
error: 'Failed to generate audit report',
details: error instanceof Error ? error.message : String(error)
},
{ status: 500 }
);
}
}
// Helper: Analyze timeline from messages
function analyzeTimeline(messages: any[]): TimelineAnalysis {
if (messages.length === 0) {
return {
firstActivity: null,
lastActivity: null,
totalDays: 0,
activeDays: 0,
totalSessions: 0,
sessions: [],
velocity: {
messagesPerDay: 0,
averageSessionLength: 0,
peakProductivityHours: []
}
};
}
// Sort messages by time
const sorted = [...messages].sort((a, b) =>
new Date(a.createdAt).getTime() - new Date(b.createdAt).getTime()
);
const firstActivity = new Date(sorted[0].createdAt);
const lastActivity = new Date(sorted[sorted.length - 1].createdAt);
const totalDays = Math.ceil((lastActivity.getTime() - firstActivity.getTime()) / (1000 * 60 * 60 * 24));
// Group into sessions (gap > 4 hours = new session)
const SESSION_GAP = 4 * 60 * 60 * 1000; // 4 hours
const sessions: WorkSession[] = [];
let currentSession: any = null;
for (const msg of sorted) {
const msgTime = new Date(msg.createdAt).getTime();
if (!currentSession || msgTime - currentSession.endTime > SESSION_GAP) {
// Start new session
if (currentSession) {
sessions.push(formatSession(currentSession));
}
currentSession = {
messages: [msg],
startTime: msgTime,
endTime: msgTime,
date: new Date(msgTime).toISOString().split('T')[0]
};
} else {
// Add to current session
currentSession.messages.push(msg);
currentSession.endTime = msgTime;
}
}
// Don't forget the last session
if (currentSession) {
sessions.push(formatSession(currentSession));
}
// Calculate velocity metrics
const activeDays = new Set(sorted.map(m =>
new Date(m.createdAt).toISOString().split('T')[0]
)).size;
const totalSessionMinutes = sessions.reduce((sum, s) => sum + s.duration, 0);
const averageSessionLength = sessions.length > 0 ? totalSessionMinutes / sessions.length : 0;
// Find peak productivity hours
const hourCounts = new Map<number, number>();
sorted.forEach(msg => {
const hour = new Date(msg.createdAt).getHours();
hourCounts.set(hour, (hourCounts.get(hour) || 0) + 1);
});
const peakProductivityHours = Array.from(hourCounts.entries())
.sort((a, b) => b[1] - a[1])
.slice(0, 3)
.map(([hour]) => hour)
.sort((a, b) => a - b);
return {
firstActivity,
lastActivity,
totalDays,
activeDays,
totalSessions: sessions.length,
sessions,
velocity: {
messagesPerDay: messages.length / activeDays,
averageSessionLength: Math.round(averageSessionLength),
peakProductivityHours
}
};
}
function formatSession(sessionData: any): WorkSession {
const duration = Math.ceil((sessionData.endTime - sessionData.startTime) / (1000 * 60));
const userMessages = sessionData.messages.filter((m: any) => m.type === 1).length;
const aiMessages = sessionData.messages.filter((m: any) => m.type === 2).length;
// Extract topics (first 3 unique conversation names)
const topics = [...new Set(sessionData.messages.map((m: any) => m.conversationName))].slice(0, 3);
// Extract files
const files = [...new Set(
sessionData.messages.flatMap((m: any) => m.attachedFiles || [])
)];
return {
sessionId: `session-${sessionData.date}-${sessionData.startTime}`,
date: sessionData.date,
startTime: new Date(sessionData.startTime),
endTime: new Date(sessionData.endTime),
duration,
messageCount: sessionData.messages.length,
userMessages,
aiMessages,
topics,
filesWorkedOn: files
};
}
// Helper: Calculate costs
function calculateCosts(messages: any[]): CostAnalysis {
const userMessages = messages.filter(m => m.type === 1);
const aiMessages = messages.filter(m => m.type === 2);
// Calculate average message length
const totalChars = messages.reduce((sum, m) => sum + (m.text?.length || 0), 0);
const avgMessageLength = messages.length > 0 ? Math.round(totalChars / messages.length) : 0;
// Estimate tokens (rough: 1 token ≈ 4 characters)
const inputChars = userMessages.reduce((sum, m) => sum + (m.text?.length || 0), 0);
const outputChars = aiMessages.reduce((sum, m) => sum + (m.text?.length || 0), 0);
const inputTokens = Math.ceil(inputChars / 4);
const outputTokens = Math.ceil(outputChars / 4);
const totalTokens = inputTokens + outputTokens;
// Claude Sonnet 3.5 pricing (Nov 2024)
const INPUT_COST_PER_1M = 3.0;
const OUTPUT_COST_PER_1M = 15.0;
const inputCost = (inputTokens / 1_000_000) * INPUT_COST_PER_1M;
const outputCost = (outputTokens / 1_000_000) * OUTPUT_COST_PER_1M;
const totalAICost = inputCost + outputCost;
return {
messageStats: {
totalMessages: messages.length,
userMessages: userMessages.length,
aiMessages: aiMessages.length,
avgMessageLength
},
estimatedTokens: {
input: inputTokens,
output: outputTokens,
total: totalTokens
},
costs: {
inputCost: Math.round(inputCost * 100) / 100,
outputCost: Math.round(outputCost * 100) / 100,
totalCost: Math.round(totalAICost * 100) / 100,
currency: 'USD'
},
model: 'Claude Sonnet 3.5',
pricing: {
inputPer1M: INPUT_COST_PER_1M,
outputPer1M: OUTPUT_COST_PER_1M
}
};
}
// Helper: Get features list
function getFeaturesList(): Feature[] {
return [
{
name: "Project Management",
description: "Create, manage, and organize AI-coded projects",
pages: ["/projects", "/project/[id]/overview", "/project/[id]/settings"],
apis: ["/api/projects/create", "/api/projects/[id]", "/api/projects/delete"],
status: "complete"
},
{
name: "AI Chat Integration",
description: "Real-time chat with AI assistants for development",
pages: ["/project/[id]/v_ai_chat"],
apis: ["/api/ai/chat", "/api/ai/conversation"],
status: "complete"
},
{
name: "Cursor Import",
description: "Import historical conversations from Cursor IDE",
pages: [],
apis: ["/api/cursor/backfill", "/api/cursor/tag-sessions"],
status: "complete"
},
{
name: "GitHub Integration",
description: "Connect GitHub repositories and browse code",
pages: ["/connections"],
apis: ["/api/github/connect", "/api/github/repos", "/api/github/repo-tree"],
status: "complete"
},
{
name: "Session Tracking",
description: "Track development sessions and activity",
pages: ["/project/[id]/sessions"],
apis: ["/api/sessions/track", "/api/sessions/associate-project"],
status: "complete"
},
{
name: "Knowledge Base",
description: "Document and organize project knowledge",
pages: ["/project/[id]/context"],
apis: ["/api/projects/[id]/knowledge/*"],
status: "complete"
},
{
name: "Planning & Automation",
description: "Generate development plans and automate workflows",
pages: ["/project/[id]/plan", "/project/[id]/automation"],
apis: ["/api/projects/[id]/plan/mvp", "/api/projects/[id]/plan/marketing"],
status: "in-progress"
},
{
name: "Analytics & Costs",
description: "Track development costs and project analytics",
pages: ["/project/[id]/analytics", "/costs"],
apis: ["/api/stats", "/api/projects/[id]/aggregate"],
status: "in-progress"
}
];
}
// Helper: Get tech stack
function getTechStack() {
return {
frontend: {
framework: "Next.js 16.0.1",
react: "19.2.0",
typescript: "5.x",
styling: "Tailwind CSS 4",
uiComponents: "Radix UI + shadcn/ui",
icons: "Lucide React",
fonts: "Geist Sans, Geist Mono"
},
backend: {
runtime: "Next.js API Routes",
database: "Firebase Firestore",
auth: "Firebase Auth",
storage: "Firebase Storage"
},
integrations: [
"Google Vertex AI",
"Google Generative AI",
"GitHub OAuth",
"v0.dev SDK"
]
};
}

View File

@@ -0,0 +1,165 @@
import { NextRequest, NextResponse } from 'next/server';
import { getApiUrl } from '@/lib/utils/api-url';
/**
* Complete Chronological History
* Returns ALL project data in a single chronological timeline
* Optimized for AI consumption - no truncation, no summaries
*/
export async function GET(
request: NextRequest,
{ params }: { params: Promise<{ projectId: string }> }
) {
try {
const { projectId } = await params;
// Load all three data sources
const [contextRes, gitRes, activityRes, timelineRes] = await Promise.all([
fetch(getApiUrl(`/api/projects/${projectId}/context`, request)),
fetch(getApiUrl(`/api/projects/${projectId}/git-history`, request)),
fetch(getApiUrl(`/api/projects/${projectId}/activity`, request)),
fetch(getApiUrl(`/api/projects/${projectId}/timeline`, request))
]);
const context = contextRes.ok ? await contextRes.json() : null;
const git = gitRes.ok ? await gitRes.json() : null;
const activity = activityRes.ok ? await activityRes.json() : null;
const timeline = timelineRes.ok ? await timelineRes.json() : null;
// Build complete chronological event stream
const events: any[] = [];
// Add all Git commits as events
if (git?.commits) {
for (const commit of git.commits) {
events.push({
type: 'git_commit',
timestamp: new Date(commit.date).toISOString(),
date: commit.date.split(' ')[0],
data: {
hash: commit.hash,
author: commit.author,
message: commit.message,
filesChanged: commit.filesChanged,
insertions: commit.insertions,
deletions: commit.deletions
}
});
}
}
// Add all extension sessions as events
if (activity?.sessions) {
for (const session of activity.sessions) {
events.push({
type: 'extension_session',
timestamp: session.startTime,
date: new Date(session.startTime).toISOString().split('T')[0],
data: {
id: session.id,
startTime: session.startTime,
endTime: session.endTime,
duration: session.duration,
filesModified: session.filesModified,
conversationSummary: session.conversationSummary?.substring(0, 200),
conversationSnippets: (session.conversation || []).slice(0, 5).map((msg: any) => ({
role: msg.role,
message: msg.message?.substring(0, 100),
timestamp: msg.timestamp
}))
}
});
}
}
// Add Cursor conversations (from recent conversations in context)
if (context?.activity?.recentConversations) {
for (const conv of context.activity.recentConversations) {
events.push({
type: 'cursor_conversation',
timestamp: conv.createdAt,
date: new Date(conv.createdAt).toISOString().split('T')[0],
data: {
id: conv.id,
name: conv.name,
createdAt: conv.createdAt,
messageCount: conv.recentMessages?.length || 0,
recentMessages: conv.recentMessages?.map((msg: any) => ({
type: msg.type,
text: msg.text?.substring(0, 150),
createdAt: msg.createdAt
}))
}
});
}
}
// Sort everything chronologically
events.sort((a, b) =>
new Date(a.timestamp).getTime() - new Date(b.timestamp).getTime()
);
// Group by date for easier consumption
const eventsByDate: Record<string, any[]> = {};
for (const event of events) {
if (!eventsByDate[event.date]) {
eventsByDate[event.date] = [];
}
eventsByDate[event.date].push(event);
}
// Build response
const completeHistory = {
project: {
id: projectId,
name: context?.project?.name,
vision: context?.project?.vision,
githubRepo: context?.project?.githubRepo
},
summary: {
totalEvents: events.length,
dateRange: {
earliest: events[0]?.date,
latest: events[events.length - 1]?.date,
totalDays: Object.keys(eventsByDate).length
},
breakdown: {
gitCommits: events.filter(e => e.type === 'git_commit').length,
extensionSessions: events.filter(e => e.type === 'extension_session').length,
cursorConversations: events.filter(e => e.type === 'cursor_conversation').length
}
},
chronologicalEvents: events,
eventsByDate: Object.keys(eventsByDate)
.sort()
.map(date => ({
date,
dayOfWeek: new Date(date).toLocaleDateString('en-US', { weekday: 'long' }),
eventCount: eventsByDate[date].length,
events: eventsByDate[date]
})),
metadata: {
generatedAt: new Date().toISOString(),
dataComplete: true,
includesFullHistory: true
}
};
return NextResponse.json(completeHistory);
} catch (error) {
console.error('Error generating complete history:', error);
return NextResponse.json(
{
error: 'Failed to generate complete history',
details: error instanceof Error ? error.message : String(error)
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,254 @@
import { NextRequest, NextResponse } from 'next/server';
import { adminDb } from '@/lib/firebase/admin';
import { getApiUrl } from '@/lib/utils/api-url';
/**
* Complete Project Context API
* Returns everything an AI needs to understand the project state
*/
export async function GET(
request: NextRequest,
{ params }: { params: Promise<{ projectId: string }> }
) {
try {
const { projectId } = await params;
// 1. Load project metadata
const projectDoc = await adminDb
.collection('projects')
.doc(projectId)
.get();
if (!projectDoc.exists) {
return NextResponse.json(
{ error: 'Project not found' },
{ status: 404 }
);
}
const projectData = projectDoc.data();
// 2. Load timeline data
const timelineResponse = await fetch(
getApiUrl(`/api/projects/${projectId}/timeline`, request)
);
const timeline = timelineResponse.ok ? await timelineResponse.json() : null;
// 3. Load Git history summary
const gitResponse = await fetch(
getApiUrl(`/api/projects/${projectId}/git-history`, request)
);
const gitHistory = gitResponse.ok ? await gitResponse.json() : null;
// 4. Load extension activity
const activityResponse = await fetch(
getApiUrl(`/api/projects/${projectId}/activity`, request)
);
const activity = activityResponse.ok ? await activityResponse.json() : null;
// 5. Load uploaded documents
const documentsSnapshot = await adminDb
.collection('projects')
.doc(projectId)
.collection('documents')
.orderBy('uploadedAt', 'desc')
.get();
const documents = documentsSnapshot.docs.map(doc => ({
id: doc.id,
...doc.data()
}));
// 6. Get recent conversations (last 7 days)
const sevenDaysAgo = new Date();
sevenDaysAgo.setDate(sevenDaysAgo.getDate() - 7);
const conversationsSnapshot = await adminDb
.collection('projects')
.doc(projectId)
.collection('cursorConversations')
.where('createdAt', '>=', sevenDaysAgo.toISOString())
.orderBy('createdAt', 'desc')
.limit(10)
.get();
const recentConversations = [];
for (const convDoc of conversationsSnapshot.docs) {
const conv = convDoc.data();
const messagesSnapshot = await adminDb
.collection('projects')
.doc(projectId)
.collection('cursorConversations')
.doc(convDoc.id)
.collection('messages')
.orderBy('createdAt', 'desc')
.limit(5)
.get();
recentConversations.push({
id: convDoc.id,
name: conv.name,
createdAt: conv.createdAt,
recentMessages: messagesSnapshot.docs.map(m => ({
type: m.data().type === 1 ? 'user' : 'assistant',
text: m.data().text?.substring(0, 200) + '...',
createdAt: m.data().createdAt
}))
});
}
// 7. Calculate key metrics
const activeDays = timeline?.days?.filter((d: any) =>
d.summary.totalGitCommits > 0 ||
d.summary.totalExtensionSessions > 0 ||
d.summary.totalCursorMessages > 0
).length || 0;
const topFiles = activity?.fileActivity?.slice(0, 10) || [];
// 8. Extract key milestones (commits with significant changes)
const keyMilestones = gitHistory?.commits
?.filter((c: any) => c.insertions + c.deletions > 1000)
.slice(0, 5)
.map((c: any) => ({
date: c.date,
message: c.message,
author: c.author,
impact: `+${c.insertions}/-${c.deletions} lines`
})) || [];
// 9. Generate AI-friendly summary
const context = {
project: {
id: projectId,
name: projectData?.name || 'Untitled Project',
vision: projectData?.vision || null,
description: projectData?.description || null,
createdAt: projectData?.createdAt || null,
githubRepo: projectData?.githubRepo || null
},
timeline: {
dateRange: {
earliest: timeline?.dateRange?.earliest,
latest: timeline?.dateRange?.latest,
totalDays: timeline?.dateRange?.totalDays || 0,
activeDays
},
dataSources: {
git: {
available: timeline?.dataSources?.git?.available || false,
totalCommits: timeline?.dataSources?.git?.totalRecords || 0,
dateRange: {
first: timeline?.dataSources?.git?.firstDate,
last: timeline?.dataSources?.git?.lastDate
}
},
extension: {
available: timeline?.dataSources?.extension?.available || false,
totalSessions: timeline?.dataSources?.extension?.totalRecords || 0,
dateRange: {
first: timeline?.dataSources?.extension?.firstDate,
last: timeline?.dataSources?.extension?.lastDate
}
},
cursor: {
available: timeline?.dataSources?.cursor?.available || false,
totalMessages: timeline?.dataSources?.cursor?.totalRecords || 0,
dateRange: {
first: timeline?.dataSources?.cursor?.firstDate,
last: timeline?.dataSources?.cursor?.lastDate
}
}
}
},
codebase: {
totalCommits: gitHistory?.totalCommits || 0,
totalLinesAdded: gitHistory?.totalInsertions || 0,
totalLinesRemoved: gitHistory?.totalDeletions || 0,
contributors: gitHistory?.authors || [],
topFiles: gitHistory?.topFiles?.slice(0, 20) || []
},
activity: {
totalSessions: activity?.totalSessions || 0,
uniqueFilesEdited: activity?.fileActivity?.length || 0,
topEditedFiles: topFiles,
recentConversations
},
milestones: keyMilestones,
documents: documents.map(doc => ({
id: doc.id,
title: doc.title,
type: doc.type,
uploadedAt: doc.uploadedAt,
contentPreview: doc.content?.substring(0, 500) + '...'
})),
summary: generateProjectSummary({
projectData,
timeline,
gitHistory,
activity,
documents
})
};
return NextResponse.json(context);
} catch (error) {
console.error('Error loading project context:', error);
return NextResponse.json(
{
error: 'Failed to load project context',
details: error instanceof Error ? error.message : String(error)
},
{ status: 500 }
);
}
}
// Helper to generate human-readable summary
function generateProjectSummary(data: any): string {
const { projectData, timeline, gitHistory, activity, documents } = data;
const parts = [];
// Project basics
if (projectData?.name) {
parts.push(`Project: ${projectData.name}`);
}
if (projectData?.vision) {
parts.push(`Vision: ${projectData.vision}`);
}
// Timeline
if (timeline?.dateRange?.totalDays) {
parts.push(`Development span: ${timeline.dateRange.totalDays} days`);
}
// Git stats
if (gitHistory?.totalCommits) {
parts.push(
`Code: ${gitHistory.totalCommits} commits, ` +
`+${gitHistory.totalInsertions.toLocaleString()}/-${gitHistory.totalDeletions.toLocaleString()} lines`
);
}
// Activity
if (activity?.totalSessions) {
parts.push(`Activity: ${activity.totalSessions} development sessions`);
}
// Documents
if (documents?.length) {
parts.push(`Documentation: ${documents.length} documents uploaded`);
}
return parts.join(' | ');
}

View File

@@ -0,0 +1,59 @@
import { NextResponse } from 'next/server';
import { getAdminDb } from '@/lib/firebase/admin';
export async function GET(
request: Request,
context: { params: Promise<{ projectId: string }> | { projectId: string } }
) {
try {
const params = 'then' in context.params ? await context.params : context.params;
const projectId = params.projectId;
if (!projectId) {
return NextResponse.json({ error: 'Missing projectId' }, { status: 400 });
}
const adminDb = getAdminDb();
// Get ALL knowledge items for this project
const knowledgeSnapshot = await adminDb
.collection('knowledge_items')
.where('projectId', '==', projectId)
.get();
const items = knowledgeSnapshot.docs.map(doc => {
const data = doc.data();
return {
id: doc.id,
title: data.title,
sourceType: data.sourceType,
contentLength: data.content?.length || 0,
createdAt: data.createdAt,
tags: data.sourceMeta?.tags || [],
};
});
// Get project info
const projectDoc = await adminDb.collection('projects').doc(projectId).get();
const projectData = projectDoc.data();
return NextResponse.json({
projectId,
projectName: projectData?.name,
currentPhase: projectData?.currentPhase,
totalKnowledgeItems: items.length,
items,
extractionHandoff: projectData?.phaseData?.phaseHandoffs?.extraction,
});
} catch (error) {
console.error('[debug-knowledge] Error:', error);
return NextResponse.json(
{
error: 'Failed to debug knowledge',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,158 @@
import { NextRequest, NextResponse } from 'next/server';
import { adminDb } from '@/lib/firebase/admin';
export async function POST(
request: NextRequest,
{ params }: { params: Promise<{ projectId: string }> }
) {
try {
const { projectId } = await params;
const body = await request.json();
const { title, content, type, source } = body;
if (!title || !content) {
return NextResponse.json(
{ error: 'Title and content are required' },
{ status: 400 }
);
}
// Create document
const docRef = await adminDb
.collection('projects')
.doc(projectId)
.collection('documents')
.add({
title,
content,
type: type || 'text', // text, markdown, pdf, etc.
source: source || 'manual_upload', // chatgpt, slack, manual_upload, etc.
uploadedAt: new Date().toISOString(),
wordCount: content.split(/\s+/).length,
charCount: content.length
});
console.log(`✅ Document uploaded: ${title} (${docRef.id})`);
return NextResponse.json({
success: true,
documentId: docRef.id,
message: 'Document uploaded successfully'
});
} catch (error) {
console.error('Error uploading document:', error);
return NextResponse.json(
{
error: 'Failed to upload document',
details: error instanceof Error ? error.message : String(error)
},
{ status: 500 }
);
}
}
// Batch upload multiple documents
export async function PUT(
request: NextRequest,
{ params }: { params: Promise<{ projectId: string }> }
) {
try {
const { projectId } = await params;
const body = await request.json();
const { documents } = body;
if (!Array.isArray(documents) || documents.length === 0) {
return NextResponse.json(
{ error: 'Documents array is required' },
{ status: 400 }
);
}
const uploadedDocs = [];
for (const doc of documents) {
if (!doc.title || !doc.content) {
continue; // Skip invalid documents
}
const docRef = await adminDb
.collection('projects')
.doc(projectId)
.collection('documents')
.add({
title: doc.title,
content: doc.content,
type: doc.type || 'text',
source: doc.source || 'batch_upload',
uploadedAt: new Date().toISOString(),
wordCount: doc.content.split(/\s+/).length,
charCount: doc.content.length,
metadata: doc.metadata || {}
});
uploadedDocs.push({
id: docRef.id,
title: doc.title
});
}
console.log(`✅ Batch uploaded ${uploadedDocs.length} documents`);
return NextResponse.json({
success: true,
uploadedCount: uploadedDocs.length,
documents: uploadedDocs
});
} catch (error) {
console.error('Error batch uploading documents:', error);
return NextResponse.json(
{
error: 'Failed to batch upload documents',
details: error instanceof Error ? error.message : String(error)
},
{ status: 500 }
);
}
}
// Get all documents
export async function GET(
request: NextRequest,
{ params }: { params: Promise<{ projectId: string }> }
) {
try {
const { projectId } = await params;
const documentsSnapshot = await adminDb
.collection('projects')
.doc(projectId)
.collection('documents')
.orderBy('uploadedAt', 'desc')
.get();
const documents = documentsSnapshot.docs.map(doc => ({
id: doc.id,
...doc.data()
}));
return NextResponse.json({
total: documents.length,
documents
});
} catch (error) {
console.error('Error fetching documents:', error);
return NextResponse.json(
{
error: 'Failed to fetch documents',
details: error instanceof Error ? error.message : String(error)
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,98 @@
import { NextResponse } from 'next/server';
import { FieldValue } from 'firebase-admin/firestore';
import { GeminiLlmClient } from '@/lib/ai/gemini-client';
import { runChatExtraction } from '@/lib/ai/chat-extractor';
import { getKnowledgeItem } from '@/lib/server/knowledge';
import { createChatExtraction } from '@/lib/server/chat-extraction';
import { getAdminDb } from '@/lib/firebase/admin';
import type { ProjectPhaseScores } from '@/lib/types/project-artifacts';
interface ExtractFromChatRequest {
knowledgeItemId?: string;
}
// Increase Vercel/Next timeout for large transcripts
export const maxDuration = 60;
export async function POST(
request: Request,
{ params }: { params: Promise<{ projectId: string }> },
) {
try {
const { projectId } = await params;
if (!projectId) {
return NextResponse.json({ error: 'Missing projectId' }, { status: 400 });
}
const body = (await request.json()) as ExtractFromChatRequest;
const knowledgeItemId = body.knowledgeItemId?.trim();
if (!knowledgeItemId) {
return NextResponse.json({ error: 'knowledgeItemId is required' }, { status: 400 });
}
const knowledgeItem = await getKnowledgeItem(projectId, knowledgeItemId);
if (!knowledgeItem) {
return NextResponse.json({ error: 'Knowledge item not found' }, { status: 404 });
}
console.log(`[extract-from-chat] Starting extraction for knowledgeItemId=${knowledgeItemId}, content length=${knowledgeItem.content.length}`);
const llm = new GeminiLlmClient();
const extractionData = await runChatExtraction(knowledgeItem, llm);
console.log(`[extract-from-chat] Extraction complete for knowledgeItemId=${knowledgeItemId}`);
const overallCompletion = extractionData.summary_scores.overall_completion ?? 0;
const overallConfidence = extractionData.summary_scores.overall_confidence ?? 0;
const extraction = await createChatExtraction({
projectId,
knowledgeItemId,
data: extractionData,
overallCompletion,
overallConfidence,
});
const adminDb = getAdminDb();
const projectRef = adminDb.collection('projects').doc(projectId);
const snapshot = await projectRef.get();
const docData = snapshot.data() ?? {};
const existingScores = (docData.phaseScores ?? {}) as ProjectPhaseScores;
const phaseHistory = Array.isArray(docData.phaseHistory) ? [...docData.phaseHistory] : [];
phaseHistory.push({
phase: 'extractor',
status: 'completed',
knowledgeItemId,
timestamp: new Date().toISOString(),
});
existingScores.extractor = {
knowledgeItemId,
overallCompletion,
overallConfidence,
updatedAt: new Date().toISOString(),
};
await projectRef.set(
{
currentPhase: 'analyzed',
phaseScores: existingScores,
phaseStatus: 'in_progress',
phaseHistory,
updatedAt: FieldValue.serverTimestamp(),
},
{ merge: true },
);
return NextResponse.json({ extraction });
} catch (error) {
console.error('[extract-from-chat] Extraction failed', error);
return NextResponse.json(
{
error: 'Failed to extract product signals',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 },
);
}
}

View File

@@ -0,0 +1,110 @@
import { NextRequest, NextResponse } from 'next/server';
import admin from '@/lib/firebase/admin';
/**
* Extract vision answers from chat history and save to project
* This is a helper endpoint to migrate from AI chat-based vision collection
* to the structured visionAnswers field
*/
export async function POST(
request: NextRequest,
{ params }: { params: Promise<{ projectId: string }> }
) {
try {
const { projectId } = await params;
const db = admin.firestore();
console.log(`[Extract Vision] Extracting vision answers from chat for project ${projectId}`);
// Get chat messages
const conversationRef = db
.collection('projects')
.doc(projectId)
.collection('conversations')
.doc('ai_chat');
const messagesSnapshot = await conversationRef
.collection('messages')
.orderBy('createdAt', 'asc')
.get();
if (messagesSnapshot.empty) {
return NextResponse.json(
{ error: 'No chat messages found' },
{ status: 404 }
);
}
const messages = messagesSnapshot.docs.map(doc => ({
id: doc.id,
...doc.data()
}));
console.log(`[Extract Vision] Found ${messages.length} total messages`);
// Extract user messages (answers to the 3 vision questions)
const userMessages = messages.filter((m: any) => m.role === 'user');
console.log(`[Extract Vision] Found ${userMessages.length} user messages`);
if (userMessages.length < 3) {
return NextResponse.json(
{
error: 'Not enough answers found',
details: `Found ${userMessages.length} answers, need 3`,
userMessages: userMessages.map((m: any) => m.content?.substring(0, 100))
},
{ status: 400 }
);
}
// The first 3 user messages should be the answers to Q1, Q2, Q3
const visionAnswers = {
q1: userMessages[0].content,
q2: userMessages[1].content,
q3: userMessages[2].content,
allAnswered: true,
updatedAt: new Date().toISOString(),
};
console.log(`[Extract Vision] Extracted vision answers:`, {
q1: visionAnswers.q1.substring(0, 50) + '...',
q2: visionAnswers.q2.substring(0, 50) + '...',
q3: visionAnswers.q3.substring(0, 50) + '...',
});
// Save to project
await db.collection('projects').doc(projectId).set(
{
visionAnswers,
readyForMVP: true,
currentPhase: 'mvp',
phaseStatus: 'ready',
},
{ merge: true }
);
console.log(`[Extract Vision] ✅ Vision answers saved for project ${projectId}`);
return NextResponse.json({
success: true,
message: 'Vision answers extracted and saved',
visionAnswers: {
q1: visionAnswers.q1.substring(0, 100) + '...',
q2: visionAnswers.q2.substring(0, 100) + '...',
q3: visionAnswers.q3.substring(0, 100) + '...',
}
});
} catch (error) {
console.error('[Extract Vision] Error:', error);
return NextResponse.json(
{
error: 'Failed to extract vision answers',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,115 @@
import { NextResponse } from 'next/server';
import { getAdminDb } from '@/lib/firebase/admin';
export async function GET(
request: Request,
context: { params: Promise<{ projectId: string }> | { projectId: string } }
) {
try {
// Handle async params
const params = 'then' in context.params ? await context.params : context.params;
const projectId = params.projectId;
if (!projectId) {
return NextResponse.json({ error: 'Missing projectId' }, { status: 400 });
}
const adminDb = getAdminDb();
// Fetch project to get extraction handoff
const projectDoc = await adminDb.collection('projects').doc(projectId).get();
if (!projectDoc.exists) {
return NextResponse.json({ error: 'Project not found' }, { status: 404 });
}
const projectData = projectDoc.data();
const extractionHandoff = projectData?.phaseData?.phaseHandoffs?.extraction;
if (!extractionHandoff) {
return NextResponse.json({ error: 'No extraction results found' }, { status: 404 });
}
return NextResponse.json({
handoff: extractionHandoff,
});
} catch (error) {
console.error('[extraction-handoff] Error:', error);
return NextResponse.json(
{
error: 'Failed to fetch extraction handoff',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 }
);
}
}
export async function PATCH(
request: Request,
context: { params: Promise<{ projectId: string }> | { projectId: string } }
) {
try {
// Handle async params
const params = 'then' in context.params ? await context.params : context.params;
const projectId = params.projectId;
if (!projectId) {
return NextResponse.json({ error: 'Missing projectId' }, { status: 400 });
}
const body = await request.json();
const { confirmed } = body;
if (!confirmed) {
return NextResponse.json({ error: 'Missing confirmed data' }, { status: 400 });
}
const adminDb = getAdminDb();
// Fetch current handoff
const projectDoc = await adminDb.collection('projects').doc(projectId).get();
if (!projectDoc.exists) {
return NextResponse.json({ error: 'Project not found' }, { status: 404 });
}
const projectData = projectDoc.data();
const currentHandoff = projectData?.phaseData?.phaseHandoffs?.extraction;
if (!currentHandoff) {
return NextResponse.json({ error: 'No extraction handoff found' }, { status: 404 });
}
// Update the handoff with edited data
const updatedHandoff = {
...currentHandoff,
confirmed: {
...currentHandoff.confirmed,
...confirmed,
},
updatedAt: new Date().toISOString(),
};
// Save to Firestore
await adminDb.collection('projects').doc(projectId).update({
'phaseData.phaseHandoffs.extraction': updatedHandoff,
updatedAt: new Date().toISOString(),
});
return NextResponse.json({
success: true,
handoff: updatedHandoff,
});
} catch (error) {
console.error('[extraction-handoff] PATCH error:', error);
return NextResponse.json(
{
error: 'Failed to update extraction handoff',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,169 @@
import { NextRequest, NextResponse } from 'next/server';
import { exec } from 'child_process';
import { promisify } from 'util';
const execAsync = promisify(exec);
interface GitCommit {
hash: string;
date: string;
author: string;
message: string;
filesChanged: number;
insertions: number;
deletions: number;
}
interface GitStats {
totalCommits: number;
firstCommit: string | null;
lastCommit: string | null;
totalFilesChanged: number;
totalInsertions: number;
totalDeletions: number;
commits: GitCommit[];
topFiles: Array<{ filePath: string; changeCount: number }>;
commitsByDay: Record<string, number>;
authors: Array<{ name: string; commitCount: number }>;
}
export async function GET(
request: NextRequest,
{ params }: { params: Promise<{ projectId: string }> }
) {
try {
const { projectId } = await params;
// For now, we'll use the current workspace
// In the future, we can store git repo path in project metadata
const repoPath = '/Users/markhenderson/ai-proxy';
// Get all commits with detailed stats
const { stdout: commitsOutput } = await execAsync(
`cd "${repoPath}" && git log --all --pretty=format:"%H|%ai|%an|%s" --numstat`,
{ maxBuffer: 10 * 1024 * 1024 } // 10MB buffer for large repos
);
if (!commitsOutput.trim()) {
return NextResponse.json({
totalCommits: 0,
firstCommit: null,
lastCommit: null,
totalFilesChanged: 0,
totalInsertions: 0,
totalDeletions: 0,
commits: [],
topFiles: [],
commitsByDay: {},
authors: []
});
}
// Parse commit data
const commits: GitCommit[] = [];
const fileChangeCounts = new Map<string, number>();
const commitsByDay: Record<string, number> = {};
const authorCounts = new Map<string, number>();
let totalFilesChanged = 0;
let totalInsertions = 0;
let totalDeletions = 0;
const lines = commitsOutput.split('\n');
let currentCommit: Partial<GitCommit> | null = null;
for (const line of lines) {
if (line.includes('|')) {
// This is a commit header line
if (currentCommit) {
commits.push(currentCommit as GitCommit);
}
const [hash, date, author, message] = line.split('|');
currentCommit = {
hash: hash.substring(0, 8),
date,
author,
message,
filesChanged: 0,
insertions: 0,
deletions: 0
};
// Count commits by day
const day = date.split(' ')[0];
commitsByDay[day] = (commitsByDay[day] || 0) + 1;
// Count commits by author
authorCounts.set(author, (authorCounts.get(author) || 0) + 1);
} else if (line.trim() && currentCommit) {
// This is a file stat line (insertions, deletions, filename)
const parts = line.trim().split('\t');
if (parts.length === 3) {
const [insertStr, delStr, filepath] = parts;
const insertions = insertStr === '-' ? 0 : parseInt(insertStr, 10) || 0;
const deletions = delStr === '-' ? 0 : parseInt(delStr, 10) || 0;
currentCommit.filesChanged!++;
currentCommit.insertions! += insertions;
currentCommit.deletions! += deletions;
totalFilesChanged++;
totalInsertions += insertions;
totalDeletions += deletions;
fileChangeCounts.set(filepath, (fileChangeCounts.get(filepath) || 0) + 1);
}
}
}
// Push the last commit
if (currentCommit) {
commits.push(currentCommit as GitCommit);
}
// Sort commits by date (most recent first)
commits.sort((a, b) => new Date(b.date).getTime() - new Date(a.date).getTime());
const firstCommit = commits.length > 0 ? commits[commits.length - 1].date : null;
const lastCommit = commits.length > 0 ? commits[0].date : null;
// Get top 20 most changed files
const topFiles = Array.from(fileChangeCounts.entries())
.sort(([, countA], [, countB]) => countB - countA)
.slice(0, 20)
.map(([filePath, changeCount]) => ({ filePath, changeCount }));
// Get author stats
const authors = Array.from(authorCounts.entries())
.sort(([, countA], [, countB]) => countB - countA)
.map(([name, commitCount]) => ({ name, commitCount }));
const stats: GitStats = {
totalCommits: commits.length,
firstCommit,
lastCommit,
totalFilesChanged,
totalInsertions,
totalDeletions,
commits: commits.slice(0, 50), // Return last 50 commits for display
topFiles,
commitsByDay,
authors
};
return NextResponse.json(stats);
} catch (error) {
console.error('Error loading Git history:', error);
return NextResponse.json(
{
error: 'Could not load Git history',
details: error instanceof Error ? error.message : String(error)
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,196 @@
import { NextResponse } from 'next/server';
import { getAdminDb } from '@/lib/firebase/admin';
import { runChatExtraction } from '@/lib/ai/chat-extractor';
import { GeminiLlmClient } from '@/lib/ai/gemini-client';
import { createChatExtraction } from '@/lib/server/chat-extraction';
import { FieldValue } from 'firebase-admin/firestore';
import type { ProjectPhaseScores } from '@/lib/types/project-artifacts';
import type { KnowledgeItem } from '@/lib/types/knowledge';
export const maxDuration = 300; // 5 minutes for batch processing
interface BatchExtractionResult {
knowledgeItemId: string;
success: boolean;
error?: string;
}
export async function POST(
request: Request,
context: { params?: Promise<{ projectId?: string }> | { projectId?: string } } = {},
) {
try {
// Await params if it's a Promise (Next.js 15+)
const params = context.params instanceof Promise ? await context.params : context.params;
const url = new URL(request.url);
const pathSegments = url.pathname.split('/');
const projectsIndex = pathSegments.indexOf('projects');
const projectIdFromPath =
projectsIndex !== -1 ? pathSegments[projectsIndex + 1] : undefined;
const projectId =
(params?.projectId ?? projectIdFromPath ?? url.searchParams.get('projectId') ?? '').trim();
if (!projectId) {
return NextResponse.json({ error: 'Missing projectId' }, { status: 400 });
}
const adminDb = getAdminDb();
// Get all knowledge_items for this project
const knowledgeSnapshot = await adminDb
.collection('projects')
.doc(projectId)
.collection('knowledge_items')
.get();
if (knowledgeSnapshot.empty) {
return NextResponse.json({
message: 'No knowledge items to extract',
results: []
});
}
const knowledgeItems = knowledgeSnapshot.docs.map(doc => ({
id: doc.id,
...doc.data()
})) as KnowledgeItem[];
// Get existing extractions to avoid re-processing
const extractionsSnapshot = await adminDb
.collection('projects')
.doc(projectId)
.collection('chat_extractions')
.get();
const processedKnowledgeIds = new Set(
extractionsSnapshot.docs.map(doc => doc.data().knowledgeItemId)
);
// Filter to only unprocessed items
const itemsToProcess = knowledgeItems.filter(
item => !processedKnowledgeIds.has(item.id)
);
if (itemsToProcess.length === 0) {
return NextResponse.json({
message: 'All knowledge items already extracted',
results: []
});
}
console.log(`[batch-extract] Processing ${itemsToProcess.length} knowledge items for project ${projectId}`);
const llm = new GeminiLlmClient();
const results: BatchExtractionResult[] = [];
let successCount = 0;
let lastSuccessfulExtraction = null;
// Process each item
for (const knowledgeItem of itemsToProcess) {
try {
console.log(`[batch-extract] Extracting from knowledgeItemId=${knowledgeItem.id}`);
const extractionData = await runChatExtraction(knowledgeItem, llm);
const overallCompletion = extractionData.summary_scores.overall_completion ?? 0;
const overallConfidence = extractionData.summary_scores.overall_confidence ?? 0;
const extraction = await createChatExtraction({
projectId,
knowledgeItemId: knowledgeItem.id,
data: extractionData,
overallCompletion,
overallConfidence,
});
lastSuccessfulExtraction = extraction;
successCount++;
results.push({
knowledgeItemId: knowledgeItem.id,
success: true
});
console.log(`[batch-extract] Successfully extracted from knowledgeItemId=${knowledgeItem.id}`);
// Also chunk and embed this item (fire-and-forget)
(async () => {
try {
const { writeKnowledgeChunksForItem } = await import('@/lib/server/vector-memory');
await writeKnowledgeChunksForItem({
id: knowledgeItem.id,
projectId: knowledgeItem.projectId,
content: knowledgeItem.content,
sourceMeta: knowledgeItem.sourceMeta,
});
} catch (chunkError) {
console.error(`[batch-extract] Failed to chunk item ${knowledgeItem.id}:`, chunkError);
}
})();
} catch (error) {
console.error(`[batch-extract] Failed to extract from knowledgeItemId=${knowledgeItem.id}:`, error);
results.push({
knowledgeItemId: knowledgeItem.id,
success: false,
error: error instanceof Error ? error.message : String(error)
});
}
}
// Update project phase if we had any successful extractions
if (successCount > 0 && lastSuccessfulExtraction) {
const projectRef = adminDb.collection('projects').doc(projectId);
const snapshot = await projectRef.get();
const docData = snapshot.data() ?? {};
const existingScores = (docData.phaseScores ?? {}) as ProjectPhaseScores;
const phaseHistory = Array.isArray(docData.phaseHistory) ? [...docData.phaseHistory] : [];
phaseHistory.push({
phase: 'extractor',
status: 'completed',
knowledgeItemId: 'batch_extraction',
timestamp: new Date().toISOString(),
});
// Use the last extraction's scores as representative
const lastData = lastSuccessfulExtraction.data as { summary_scores?: { overall_completion?: number; overall_confidence?: number } };
existingScores.extractor = {
knowledgeItemId: 'batch_extraction',
overallCompletion: lastData.summary_scores?.overall_completion ?? 0,
overallConfidence: lastData.summary_scores?.overall_confidence ?? 0,
updatedAt: new Date().toISOString(),
};
await projectRef.set(
{
currentPhase: 'analyzed',
phaseScores: existingScores,
phaseStatus: 'in_progress',
phaseHistory,
updatedAt: FieldValue.serverTimestamp(),
},
{ merge: true },
);
console.log(`[batch-extract] Updated project phase to 'analyzed' for project ${projectId}`);
}
return NextResponse.json({
message: `Processed ${itemsToProcess.length} items: ${successCount} succeeded, ${results.filter(r => !r.success).length} failed`,
results,
successCount,
totalProcessed: itemsToProcess.length
});
} catch (error) {
console.error('[batch-extract] Batch extraction failed:', error);
return NextResponse.json(
{
error: 'Failed to batch extract knowledge items',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 },
);
}
}

View File

@@ -0,0 +1,118 @@
import { NextResponse } from 'next/server';
import { getAdminAuth, getAdminDb } from '@/lib/firebase/admin';
import { createKnowledgeItem } from '@/lib/server/knowledge';
import { writeKnowledgeChunksForItem } from '@/lib/server/vector-memory';
import type { KnowledgeSourceMeta } from '@/lib/types/knowledge';
export const maxDuration = 60;
interface ChunkInsightRequest {
content: string;
title?: string;
importance?: 'primary' | 'supporting' | 'irrelevant';
tags?: string[];
sourceKnowledgeItemId?: string;
metadata?: Record<string, any>;
}
export async function POST(
request: Request,
context: { params: Promise<{ projectId: string }> | { projectId: string } }
) {
try {
// Verify auth
const authHeader = request.headers.get('Authorization');
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const idToken = authHeader.split('Bearer ')[1];
const adminAuth = getAdminAuth();
let userId: string;
try {
const decodedToken = await adminAuth.verifyIdToken(idToken);
userId = decodedToken.uid;
} catch (error) {
return NextResponse.json({ error: 'Invalid token' }, { status: 401 });
}
// Handle async params in Next.js 16
const params = 'then' in context.params ? await context.params : context.params;
const projectId = params.projectId;
if (!projectId) {
return NextResponse.json({ error: 'Missing projectId' }, { status: 400 });
}
const body = await request.json() as ChunkInsightRequest;
if (!body.content || body.content.trim().length === 0) {
return NextResponse.json({ error: 'Content is required' }, { status: 400 });
}
const adminDb = getAdminDb();
const projectSnap = await adminDb.collection('projects').doc(projectId).get();
if (!projectSnap.exists) {
return NextResponse.json({ error: 'Project not found' }, { status: 404 });
}
console.log(`[chunk-insight] Creating confirmed insight for project ${projectId}`);
// Create source metadata
const sourceMeta: KnowledgeSourceMeta = {
origin: 'vibn',
createdAtOriginal: new Date().toISOString(),
importance: body.importance || 'primary',
tags: [
'extracted_insight',
'user_confirmed',
'extracted_by:' + userId,
...(body.sourceKnowledgeItemId ? [`source:${body.sourceKnowledgeItemId}`] : []),
...(body.tags || [])
],
};
// Store the confirmed insight as a knowledge_item
const knowledgeItem = await createKnowledgeItem({
projectId,
sourceType: 'other',
title: body.title || 'Extracted Insight',
content: body.content,
sourceMeta,
});
console.log(`[chunk-insight] Created knowledge_item ${knowledgeItem.id}`);
// Chunk and embed in AlloyDB (synchronous for this endpoint)
try {
await writeKnowledgeChunksForItem({
id: knowledgeItem.id,
projectId: knowledgeItem.projectId,
content: knowledgeItem.content,
sourceMeta: knowledgeItem.sourceMeta,
});
console.log(`[chunk-insight] Successfully chunked and embedded insight`);
} catch (chunkError) {
console.error(`[chunk-insight] Failed to chunk item ${knowledgeItem.id}:`, chunkError);
// Don't fail the request, item is still saved in Firestore
}
return NextResponse.json({
success: true,
knowledgeItemId: knowledgeItem.id,
message: 'Insight chunked and stored successfully',
});
} catch (error) {
console.error('[chunk-insight] Error:', error);
return NextResponse.json(
{
error: 'Failed to store insight',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 },
);
}
}

View File

@@ -0,0 +1,75 @@
import { NextResponse } from 'next/server';
import { getAdminAuth } from '@/lib/firebase/admin';
import { getAlloyDbClient } from '@/lib/db/alloydb';
export async function GET(
request: Request,
{ params }: { params: Promise<{ projectId: string }> }
) {
try {
const { projectId } = await params;
// Authentication (skip in development if no auth header)
const authHeader = request.headers.get('Authorization');
const isDevelopment = process.env.NODE_ENV === 'development';
if (!isDevelopment || authHeader?.startsWith('Bearer ')) {
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const token = authHeader.substring(7);
const auth = getAdminAuth();
const decoded = await auth.verifyIdToken(token);
if (!decoded?.uid) {
return NextResponse.json({ error: 'Invalid token' }, { status: 401 });
}
}
// Fetch knowledge chunks from AlloyDB
let chunks = [];
let count = 0;
try {
const pool = await getAlloyDbClient();
const result = await pool.query(
`SELECT
id,
chunk_index,
content,
source_type,
importance,
created_at
FROM knowledge_chunks
WHERE project_id = $1
ORDER BY created_at DESC
LIMIT 100`,
[projectId]
);
chunks = result.rows;
count = result.rowCount || 0;
console.log('[API /knowledge/chunks] Found', count, 'chunks');
} catch (dbError) {
console.error('[API /knowledge/chunks] AlloyDB query failed:', dbError);
console.error('[API /knowledge/chunks] This is likely due to AlloyDB not being configured or connected');
// Return empty array instead of failing
chunks = [];
count = 0;
}
return NextResponse.json({
success: true,
chunks,
count,
});
} catch (error) {
console.error('[API] Error fetching knowledge chunks:', error);
return NextResponse.json(
{ error: 'Failed to fetch knowledge chunks' },
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,90 @@
import { NextResponse } from 'next/server';
import { getAdminDb } from '@/lib/firebase/admin';
import { createKnowledgeItem } from '@/lib/server/knowledge';
import type { KnowledgeSourceMeta } from '@/lib/types/knowledge';
const PROVIDER_MAP = new Set(['chatgpt', 'gemini', 'claude', 'cursor', 'vibn', 'other']);
interface ImportAiChatRequest {
title?: string;
provider?: string;
transcript?: string;
sourceLink?: string | null;
createdAtOriginal?: string | null;
}
export async function POST(
request: Request,
{ params }: { params: Promise<{ projectId: string }> },
) {
try {
const { projectId } = await params;
if (!projectId) {
return NextResponse.json({ error: 'Missing projectId' }, { status: 400 });
}
const body = (await request.json()) as ImportAiChatRequest;
const transcript = body.transcript?.trim();
const provider = body.provider?.toLowerCase();
if (!transcript) {
return NextResponse.json({ error: 'transcript is required' }, { status: 400 });
}
const adminDb = getAdminDb();
const projectSnap = await adminDb.collection('projects').doc(projectId).get();
if (!projectSnap.exists) {
return NextResponse.json({ error: 'Project not found' }, { status: 404 });
}
const origin = PROVIDER_MAP.has(provider ?? '') ? provider : 'other';
const sourceMeta: KnowledgeSourceMeta = {
origin: (origin as KnowledgeSourceMeta['origin']) ?? 'other',
url: body.sourceLink ?? null,
filename: body.title ?? null,
createdAtOriginal: body.createdAtOriginal ?? null,
importance: 'primary',
tags: ['ai_chat'],
};
const knowledgeItem = await createKnowledgeItem({
projectId,
sourceType: 'imported_ai_chat',
title: body.title ?? null,
content: transcript,
sourceMeta,
});
// Chunk and embed in background (don't block response)
// This populates AlloyDB knowledge_chunks for vector search
(async () => {
try {
const { writeKnowledgeChunksForItem } = await import('@/lib/server/vector-memory');
await writeKnowledgeChunksForItem({
id: knowledgeItem.id,
projectId: knowledgeItem.projectId,
content: knowledgeItem.content,
sourceMeta: knowledgeItem.sourceMeta,
});
} catch (error) {
// Log but don't fail the request
console.error('[import-ai-chat] Failed to chunk/embed knowledge_item:', error);
}
})();
return NextResponse.json({ knowledgeItem });
} catch (error) {
console.error('[import-ai-chat] Failed to import chat', error);
return NextResponse.json(
{
error: 'Failed to import AI chat transcript',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 },
);
}
}

View File

@@ -0,0 +1,136 @@
import { NextResponse } from 'next/server';
import { getAdminDb } from '@/lib/firebase/admin';
import { createKnowledgeItem } from '@/lib/server/knowledge';
import type { KnowledgeSourceMeta } from '@/lib/types/knowledge';
import { chunkDocument } from '@/lib/utils/document-chunker';
interface ImportDocumentRequest {
filename?: string;
content?: string;
mimeType?: string;
}
export const maxDuration = 30;
export async function POST(
request: Request,
{ params }: { params: Promise<{ projectId: string }> },
) {
try {
const { projectId } = await params;
if (!projectId) {
return NextResponse.json({ error: 'Missing projectId' }, { status: 400 });
}
const body = (await request.json()) as ImportDocumentRequest;
const content = body.content?.trim();
const filename = body.filename?.trim();
if (!content || !filename) {
return NextResponse.json({ error: 'filename and content are required' }, { status: 400 });
}
const adminDb = getAdminDb();
const projectSnap = await adminDb.collection('projects').doc(projectId).get();
if (!projectSnap.exists) {
return NextResponse.json({ error: 'Project not found' }, { status: 404 });
}
console.log(`[import-document] Processing ${filename}, length=${content.length}`);
// Chunk the document
const chunks = chunkDocument(content, {
maxChunkSize: 2000,
chunkOverlap: 200,
preserveParagraphs: true,
preserveCodeBlocks: true,
});
console.log(`[import-document] Created ${chunks.length} chunks for ${filename}`);
// Store each chunk as a separate knowledge_item
const knowledgeItemIds: string[] = [];
for (const chunk of chunks) {
const sourceMeta: KnowledgeSourceMeta = {
origin: 'other',
url: null,
filename,
createdAtOriginal: new Date().toISOString(),
importance: 'primary',
tags: ['document', 'chunked'],
};
const chunkTitle = chunks.length > 1
? `${filename} (chunk ${chunk.metadata.chunkIndex + 1}/${chunk.metadata.totalChunks})`
: filename;
const knowledgeItem = await createKnowledgeItem({
projectId,
sourceType: 'imported_document',
title: chunkTitle,
content: chunk.content,
sourceMeta: {
...sourceMeta,
chunkMetadata: {
chunkIndex: chunk.metadata.chunkIndex,
totalChunks: chunk.metadata.totalChunks,
startChar: chunk.metadata.startChar,
endChar: chunk.metadata.endChar,
tokenCount: chunk.metadata.tokenCount,
},
},
});
knowledgeItemIds.push(knowledgeItem.id);
// Chunk and embed in AlloyDB (fire-and-forget)
(async () => {
try {
const { writeKnowledgeChunksForItem } = await import('@/lib/server/vector-memory');
await writeKnowledgeChunksForItem({
id: knowledgeItem.id,
projectId: knowledgeItem.projectId,
content: knowledgeItem.content,
sourceMeta: knowledgeItem.sourceMeta,
});
} catch (error) {
console.error(`[import-document] Failed to chunk item ${knowledgeItem.id}:`, error);
}
})();
}
// Also create a summary record in contextSources for UI display
const contextSourcesRef = adminDb.collection('projects').doc(projectId).collection('contextSources');
await contextSourcesRef.add({
type: 'document',
name: filename,
summary: `Document with ${chunks.length} chunks (${content.length} characters)`,
connectedAt: new Date(),
metadata: {
chunkCount: chunks.length,
totalChars: content.length,
mimeType: body.mimeType,
knowledgeItemIds,
},
});
return NextResponse.json({
success: true,
filename,
chunkCount: chunks.length,
knowledgeItemIds,
});
} catch (error) {
console.error('[import-document] Failed to import document', error);
return NextResponse.json(
{
error: 'Failed to import document',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 },
);
}
}

View File

@@ -0,0 +1,81 @@
import { NextResponse } from 'next/server';
import { getAdminAuth, getAdminDb } from '@/lib/firebase/admin';
export async function GET(
request: Request,
{ params }: { params: Promise<{ projectId: string }> }
) {
try {
const { projectId } = await params;
// Authentication (skip in development if no auth header)
const authHeader = request.headers.get('Authorization');
const isDevelopment = process.env.NODE_ENV === 'development';
if (!isDevelopment || authHeader?.startsWith('Bearer ')) {
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const token = authHeader.substring(7);
const auth = getAdminAuth();
const decoded = await auth.verifyIdToken(token);
if (!decoded?.uid) {
return NextResponse.json({ error: 'Invalid token' }, { status: 401 });
}
}
// Fetch knowledge items from Firestore
console.log('[API /knowledge/items] Fetching items for project:', projectId);
let items = [];
try {
const adminDb = getAdminDb();
const knowledgeSnapshot = await adminDb
.collection('knowledge')
.where('projectId', '==', projectId)
.orderBy('createdAt', 'desc')
.limit(100)
.get();
console.log('[API /knowledge/items] Found', knowledgeSnapshot.size, 'items');
items = knowledgeSnapshot.docs.map(doc => {
const data = doc.data();
return {
id: doc.id,
title: data.title || data.content?.substring(0, 50) || 'Untitled',
sourceType: data.sourceType,
content: data.content,
sourceMeta: data.sourceMeta,
createdAt: data.createdAt?.toDate?.()?.toISOString() || data.createdAt,
updatedAt: data.updatedAt?.toDate?.()?.toISOString() || data.updatedAt,
};
});
} catch (firestoreError) {
console.error('[API /knowledge/items] Firestore query failed:', firestoreError);
console.error('[API /knowledge/items] This is likely due to missing Firebase Admin credentials or Firestore not being set up');
// Return empty array instead of failing - the UI will show "No chats yet" and "No images yet"
items = [];
}
return NextResponse.json({
success: true,
items,
count: items.length,
});
} catch (error) {
console.error('[API /knowledge/items] Error fetching knowledge items:', error);
console.error('[API /knowledge/items] Error stack:', error instanceof Error ? error.stack : 'No stack trace');
return NextResponse.json(
{
error: 'Failed to fetch knowledge items',
details: error instanceof Error ? error.message : String(error)
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,105 @@
import { NextResponse } from 'next/server';
import { getAdminAuth, getAdminDb } from '@/lib/firebase/admin';
import { getAlloyDbClient } from '@/lib/db/alloydb';
import { GeminiLlmClient } from '@/lib/ai/gemini-client';
import { z } from 'zod';
const ThemeGroupingSchema = z.object({
themes: z.array(z.object({
theme: z.string().describe('A short, descriptive theme name (2-4 words)'),
description: z.string().describe('A brief description of what this theme represents'),
insightIds: z.array(z.string()).describe('Array of insight IDs that belong to this theme'),
})),
});
export async function POST(
request: Request,
{ params }: { params: Promise<{ projectId: string }> }
) {
try {
const { projectId } = await params;
// Authentication (skip in development if no auth header)
const authHeader = request.headers.get('Authorization');
const isDevelopment = process.env.NODE_ENV === 'development';
if (!isDevelopment || authHeader?.startsWith('Bearer ')) {
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const token = authHeader.substring(7);
const auth = getAdminAuth();
const decoded = await auth.verifyIdToken(token);
if (!decoded?.uid) {
return NextResponse.json({ error: 'Invalid token' }, { status: 401 });
}
}
// Get insights from request body
const { insights } = await request.json();
if (!insights || insights.length === 0) {
return NextResponse.json({
success: true,
themes: [],
});
}
console.log('[API /knowledge/themes] Grouping', insights.length, 'insights into themes');
// Prepare insights for AI analysis
const insightsContext = insights.map((insight: any, index: number) =>
`[${insight.id}] ${insight.content?.substring(0, 200) || insight.title}`
).join('\n\n');
// Use AI to group insights into themes
const llm = new GeminiLlmClient();
const systemPrompt = `You are an expert at analyzing and categorizing information. Given a list of insights/knowledge chunks, group them into meaningful themes. Each theme should represent a coherent topic or concept. Aim for 3-7 themes depending on the diversity of content.`;
const userPrompt = `Analyze these insights and group them into themes:
${insightsContext}
Group these insights into themes. Each insight ID is in brackets at the start of each line. Return the themes with their associated insight IDs.`;
try {
const result = await llm.structuredCall({
model: 'gemini',
systemPrompt,
messages: [{ role: 'user', content: userPrompt }],
schema: ThemeGroupingSchema,
temperature: 0.3,
});
console.log('[API /knowledge/themes] Generated', result.themes.length, 'themes');
return NextResponse.json({
success: true,
themes: result.themes,
});
} catch (aiError) {
console.error('[API /knowledge/themes] AI grouping failed:', aiError);
// Fallback: create a single "Ungrouped" theme with all insights
return NextResponse.json({
success: true,
themes: [{
theme: 'All Insights',
description: 'Ungrouped insights',
insightIds: insights.map((i: any) => i.id),
}],
});
}
} catch (error) {
console.error('[API /knowledge/themes] Error:', error);
return NextResponse.json(
{
error: 'Failed to group insights into themes',
details: error instanceof Error ? error.message : String(error)
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,146 @@
import { NextResponse } from 'next/server';
import { getAdminAuth, getAdminDb } from '@/lib/firebase/admin';
import { createKnowledgeItem } from '@/lib/server/knowledge';
import type { KnowledgeSourceMeta } from '@/lib/types/knowledge';
// import { chunkDocument } from '@/lib/utils/document-chunker'; // Not needed - Extractor AI handles chunking
import { getStorage } from 'firebase-admin/storage';
export const maxDuration = 60;
export async function POST(
request: Request,
context: { params: Promise<{ projectId: string }> | { projectId: string } }
) {
try {
// Verify auth
const authHeader = request.headers.get('Authorization');
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const idToken = authHeader.split('Bearer ')[1];
const adminAuth = getAdminAuth();
let userId: string;
try {
const decodedToken = await adminAuth.verifyIdToken(idToken);
userId = decodedToken.uid;
} catch (error) {
return NextResponse.json({ error: 'Invalid token' }, { status: 401 });
}
// Handle async params in Next.js 16
const params = 'then' in context.params ? await context.params : context.params;
const projectId = params.projectId;
if (!projectId) {
return NextResponse.json({ error: 'Missing projectId' }, { status: 400 });
}
// Parse multipart form data
const formData = await request.formData();
const file = formData.get('file') as File;
if (!file) {
return NextResponse.json({ error: 'No file provided' }, { status: 400 });
}
const adminDb = getAdminDb();
const projectSnap = await adminDb.collection('projects').doc(projectId).get();
if (!projectSnap.exists) {
return NextResponse.json({ error: 'Project not found' }, { status: 404 });
}
console.log(`[upload-document] Uploading ${file.name}, size=${file.size}`);
// Read file content
const arrayBuffer = await file.arrayBuffer();
const buffer = Buffer.from(arrayBuffer);
const content = buffer.toString('utf-8');
// Upload original file to Firebase Storage
const storage = getStorage();
const bucket = storage.bucket();
const storagePath = `projects/${projectId}/documents/${Date.now()}_${file.name}`;
const fileRef = bucket.file(storagePath);
await fileRef.save(buffer, {
metadata: {
contentType: file.type,
metadata: {
uploadedBy: userId,
projectId,
originalFilename: file.name,
uploadedAt: new Date().toISOString(),
},
},
});
// Make file publicly accessible (or use signed URLs if you want private)
await fileRef.makePublic();
const publicUrl = `https://storage.googleapis.com/${bucket.name}/${storagePath}`;
console.log(`[upload-document] File saved to Storage: ${publicUrl}`);
// Store whole document as single knowledge_item (no chunking)
// Extractor AI will collaboratively chunk important sections later
const sourceMeta: KnowledgeSourceMeta = {
origin: 'other',
url: publicUrl,
filename: file.name,
createdAtOriginal: new Date().toISOString(),
importance: 'primary',
tags: ['document', 'uploaded', 'pending_extraction'],
};
const knowledgeItem = await createKnowledgeItem({
projectId,
sourceType: 'imported_document',
title: file.name,
content: content,
sourceMeta,
});
console.log(`[upload-document] Stored whole document as knowledge_item: ${knowledgeItem.id}`);
const knowledgeItemIds = [knowledgeItem.id];
// Create a summary record in contextSources for UI display
const contextSourcesRef = adminDb.collection('projects').doc(projectId).collection('contextSources');
await contextSourcesRef.add({
type: 'document',
name: file.name,
summary: `Document (${content.length} characters) - pending extraction`,
url: publicUrl,
connectedAt: new Date(),
metadata: {
totalChars: content.length,
fileSize: file.size,
mimeType: file.type,
storagePath,
knowledgeItemId: knowledgeItem.id,
uploadedBy: userId,
status: 'pending_extraction',
},
});
return NextResponse.json({
success: true,
filename: file.name,
url: publicUrl,
knowledgeItemId: knowledgeItem.id,
status: 'stored',
message: 'Document stored. Extractor AI will review and chunk important sections.',
});
} catch (error) {
console.error('[upload-document] Failed to upload document', error);
return NextResponse.json(
{
error: 'Failed to upload document',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 },
);
}
}

View File

@@ -0,0 +1,222 @@
import { NextResponse } from 'next/server';
import { getAdminAuth, getAdminDb } from '@/lib/firebase/admin';
import { getAlloyDbClient } from '@/lib/db/alloydb';
import { GeminiLlmClient } from '@/lib/ai/gemini-client';
import { z } from 'zod';
const MissionFrameworkSchema = z.object({
targetCustomer: z.object({
primaryAudience: z.string().describe('Primary narrow target segment (include geography/region if mentioned in context)'),
theirSituation: z.string().describe('What situation or context they are in'),
relatedMarkets: z.array(z.string()).describe('2-4 additional related market segments or customer types that could benefit'),
}),
existingSolutions: z.array(z.object({
category: z.string().describe('Category of solution (e.g., "Legacy EMR Systems", "AI Scribes", "Practice Management", "Open Source")'),
description: z.string().describe('Description of this category and its limitations'),
products: z.array(z.object({
name: z.string().describe('Product/company name'),
url: z.string().optional().describe('Website URL if known'),
})).min(5).max(20).describe('Comprehensive list of 5-20 specific products in this category. Include all major players and notable solutions.'),
})).min(4).max(7).describe('4-7 categories of existing solutions with comprehensive product lists. ALWAYS include an "Open Source" category if applicable to the market.'),
innovations: z.array(z.object({
title: z.string().describe('Short title for this innovation (3-5 words)'),
description: z.string().describe('How this makes you different and better'),
})).describe('3 key innovations or differentiators'),
ideaValidation: z.array(z.object({
title: z.string().describe('Name of this validation metric'),
description: z.string().describe('What success looks like for this metric'),
})).describe('3 ways to validate the idea is sound'),
financialSuccess: z.object({
subscribers: z.number().describe('Target number of subscribers (Year 1)'),
pricePoint: z.number().describe('Monthly price per subscriber in dollars'),
retentionRate: z.number().describe('Target monthly retention rate as a percentage (0-100)'),
}),
});
export async function POST(
request: Request,
{ params }: { params: Promise<{ projectId: string }> }
) {
try {
const { projectId } = await params;
// Authentication (skip in development if no auth header)
const authHeader = request.headers.get('Authorization');
const isDevelopment = process.env.NODE_ENV === 'development';
if (!isDevelopment || authHeader?.startsWith('Bearer ')) {
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const token = authHeader.substring(7);
const auth = getAdminAuth();
const decoded = await auth.verifyIdToken(token);
if (!decoded?.uid) {
return NextResponse.json({ error: 'Invalid token' }, { status: 401 });
}
}
console.log('[API /mission/generate] Generating mission framework for project:', projectId);
// Fetch insights from AlloyDB
let insights: any[] = [];
try {
const pool = await getAlloyDbClient();
const result = await pool.query(
`SELECT content, source_type, importance, created_at
FROM knowledge_chunks
WHERE project_id = $1
ORDER BY importance DESC, created_at DESC
LIMIT 50`,
[projectId]
);
insights = result.rows;
console.log('[API /mission/generate] Found', insights.length, 'insights');
} catch (dbError) {
console.log('[API /mission/generate] No AlloyDB insights available');
}
// Fetch knowledge items from Firestore
let knowledgeItems: any[] = [];
try {
const adminDb = getAdminDb();
const knowledgeSnapshot = await adminDb
.collection('knowledge')
.where('projectId', '==', projectId)
.orderBy('createdAt', 'desc')
.limit(20)
.get();
knowledgeItems = knowledgeSnapshot.docs.map(doc => doc.data());
console.log('[API /mission/generate] Found', knowledgeItems.length, 'knowledge items');
} catch (firestoreError) {
console.log('[API /mission/generate] No Firestore knowledge available');
}
// Get project data
let projectData: any = {};
try {
const adminDb = getAdminDb();
const projectDoc = await adminDb.collection('projects').doc(projectId).get();
if (projectDoc.exists) {
projectData = projectDoc.data();
}
} catch (error) {
console.log('[API /mission/generate] Could not fetch project data');
}
// Build context from available data
const contextParts = [];
if (projectData?.productVision) {
contextParts.push(`Product Vision: ${projectData.productVision}`);
}
if (projectData?.phaseData?.canonicalProductModel) {
const model = projectData.phaseData.canonicalProductModel;
if (model.oneLiner) contextParts.push(`Product Description: ${model.oneLiner}`);
if (model.problem) contextParts.push(`Problem: ${model.problem}`);
if (model.targetUser) contextParts.push(`Target User: ${model.targetUser}`);
if (model.coreSolution) contextParts.push(`Solution: ${model.coreSolution}`);
}
if (insights.length > 0) {
const insightTexts = insights.slice(0, 10).map(i => i.content).join('\n- ');
contextParts.push(`Key Insights:\n- ${insightTexts}`);
}
if (knowledgeItems.length > 0) {
const knowledgeTexts = knowledgeItems.slice(0, 5)
.map(k => k.title || k.content?.substring(0, 100))
.filter(Boolean)
.join('\n- ');
if (knowledgeTexts) {
contextParts.push(`Additional Context:\n- ${knowledgeTexts}`);
}
}
const context = contextParts.length > 0
? contextParts.join('\n\n')
: 'No project context available yet. Please create a generic framework based on best practices for new product development.';
console.log('[API /mission/generate] Context length:', context.length);
// Use AI to generate the mission framework
const llm = new GeminiLlmClient();
const systemPrompt = `You are a product strategy expert. Based on the provided project information, create a comprehensive mission framework that helps the founder clearly articulate their product vision, market position, and success metrics.
CRITICAL: For Target Customer, be VERY SPECIFIC and NARROW:
- Look for geographic/regional targeting in the context (country, state, city, region)
- Look for specific customer segments, verticals, or niches
- Avoid broad generalizations like "all doctors" or "businesses everywhere"
- If region is mentioned, ALWAYS include it in the primary audience
- Target the smallest viable market segment that can sustain the business
Be specific and actionable. Use the project context to inform your recommendations.`;
const userPrompt = `Based on this project information, generate a complete mission framework:
${context}
Create a structured mission framework that includes:
1. Target Customer:
- Primary Audience: Be EXTREMELY SPECIFIC and narrow (include geography if mentioned)
Example: "Solo family practice physicians in rural Oregon" NOT "Primary care doctors"
- Their Situation: What problem/context they face
- Related Markets: List 2-4 other related customer segments that could also benefit
Example: ["Urgent care clinics", "Pediatric specialists in small practices", "Telemedicine providers"]
2. Existing Solutions: Group into 4-7 CATEGORIES (e.g., "Legacy EMR Systems", "AI Medical Scribes", "Open Source", etc.)
- For each category: provide a description of what they do and their limitations
- List 5-20 specific PRODUCTS/COMPANIES in each category with website URLs if you know them
- Be COMPREHENSIVE - include all major players, notable solutions, and emerging alternatives
- ALWAYS include an "Open Source" category listing relevant open-source alternatives (GitHub, frameworks, libraries, tools)
- Include direct competitors, adjacent solutions, and legacy approaches
3. Your Innovations (3 key differentiators)
4. Idea Validation (3 validation metrics)
5. Financial Success (subscribers, price point, retention rate)
Be comprehensive with existing solutions. Be specific and narrow with primary target, but show the range of related markets.`;
const result = await llm.structuredCall({
model: 'gemini',
systemPrompt,
messages: [{ role: 'user', content: userPrompt }],
schema: MissionFrameworkSchema,
temperature: 0.7,
});
console.log('[API /mission/generate] Successfully generated mission framework');
// Store the generated framework in Firestore
try {
const adminDb = getAdminDb();
await adminDb.collection('projects').doc(projectId).update({
'phaseData.missionFramework': result,
'phaseData.missionFrameworkUpdatedAt': new Date(),
});
console.log('[API /mission/generate] Saved framework to Firestore');
} catch (saveError) {
console.error('[API /mission/generate] Could not save framework:', saveError);
}
return NextResponse.json({
success: true,
framework: result,
});
} catch (error) {
console.error('[API /mission/generate] Error:', error);
return NextResponse.json(
{
error: 'Failed to generate mission framework',
details: error instanceof Error ? error.message : String(error)
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,966 @@
import { NextRequest, NextResponse } from 'next/server';
import admin from '@/lib/firebase/admin';
import { GoogleGenerativeAI } from '@google/generative-ai';
import { getApiUrl } from '@/lib/utils/api-url';
import fs from 'fs';
import path from 'path';
/**
* MVP Page & Feature Checklist Generator (AI-Powered)
* Uses Gemini AI with the Vibn MVP Planner agent spec to generate intelligent,
* context-aware plans from project vision answers and existing work
*/
export async function GET(
request: NextRequest,
{ params }: { params: Promise<{ projectId: string }> }
) {
try {
const { projectId } = await params;
const db = admin.firestore();
// Check if we have a saved plan
const projectDoc = await db.collection('projects').doc(projectId).get();
const projectData = projectDoc.data();
if (projectData?.mvpChecklist && !request.nextUrl.searchParams.get('regenerate')) {
console.log('Loading saved MVP checklist');
return NextResponse.json({
...projectData.mvpChecklist,
cached: true,
cachedAt: projectData.mvpChecklistGeneratedAt
});
}
// If no checklist exists and not forcing regeneration, return empty state
if (!projectData?.mvpChecklist && !request.nextUrl.searchParams.get('regenerate')) {
console.log('[MVP Generation] No checklist exists - returning empty state');
return NextResponse.json({
error: 'No MVP checklist generated yet',
message: 'Click "Regenerate Plan" to create your MVP checklist',
mvpChecklist: [],
summary: { totalPages: 0, estimatedDays: 0 }
});
}
console.log('[MVP Generation] 🚀 Starting MVP checklist generation...');
// Load complete history
console.log('[MVP Generation] 📊 Loading project history...');
const historyResponse = await fetch(
getApiUrl(`/api/projects/${projectId}/complete-history`, request)
);
const history = await historyResponse.json();
console.log('[MVP Generation] ✅ History loaded');
// Load intelligent analysis (with fallback if project doesn't have codebase access)
console.log('[MVP Generation] 🧠 Running intelligent analysis...');
let analysis = null;
try {
const analysisResponse = await fetch(
getApiUrl(`/api/projects/${projectId}/plan/intelligent`, request)
);
if (analysisResponse.ok) {
analysis = await analysisResponse.json();
console.log('[MVP Generation] ✅ Analysis complete');
} else {
console.log('[MVP Generation] ⚠️ Analysis failed (project may lack codebase access), using fallback');
analysis = { codebaseAnalysis: null, intelligentPlan: null };
}
} catch (error) {
console.log('[MVP Generation] ⚠️ Analysis error:', error instanceof Error ? error.message : String(error));
analysis = { codebaseAnalysis: null, intelligentPlan: null };
}
// Generate MVP checklist using AI
console.log('[MVP Generation] 🤖 Calling AI to generate MVP plan...');
const checklist = await generateAIMVPChecklist(projectId, history, analysis, projectData);
console.log('[MVP Generation] ✅ MVP plan generated!');
// Save to Firestore (filter out undefined values to avoid Firestore errors)
const cleanChecklist = JSON.parse(JSON.stringify(checklist, (key, value) =>
value === undefined ? null : value
));
await db.collection('projects').doc(projectId).update({
mvpChecklist: cleanChecklist,
mvpChecklistGeneratedAt: admin.firestore.FieldValue.serverTimestamp()
});
console.log('[MVP Generation] ✅ MVP checklist saved to Firestore');
return NextResponse.json(checklist);
} catch (error) {
console.error('Error generating MVP checklist:', error);
return NextResponse.json(
{
error: 'Failed to generate MVP checklist',
details: error instanceof Error ? error.message : String(error)
},
{ status: 500 }
);
}
}
/**
* POST to force regeneration of the checklist
*/
export async function POST(
request: NextRequest,
{ params }: { params: Promise<{ projectId: string }> }
) {
try {
const { projectId } = await params;
const db = admin.firestore();
console.log('[MVP Generation] 🚀 Starting MVP checklist regeneration...');
// Re-fetch project data
const projectDoc = await db.collection('projects').doc(projectId).get();
const projectData = projectDoc.data();
// Load complete history
console.log('[MVP Generation] 📊 Loading project history...');
const historyResponse = await fetch(
getApiUrl(`/api/projects/${projectId}/complete-history`, request)
);
const history = await historyResponse.json();
console.log('[MVP Generation] ✅ History loaded');
// Load intelligent analysis (with fallback if project doesn't have codebase access)
console.log('[MVP Generation] 🧠 Running intelligent analysis...');
let analysis = null;
try {
const analysisResponse = await fetch(
getApiUrl(`/api/projects/${projectId}/plan/intelligent`, request)
);
if (analysisResponse.ok) {
analysis = await analysisResponse.json();
console.log('[MVP Generation] ✅ Analysis complete');
} else {
console.log('[MVP Generation] ⚠️ Analysis failed (project may lack codebase access), using fallback');
analysis = { codebaseAnalysis: null, intelligentPlan: null };
}
} catch (error) {
console.log('[MVP Generation] ⚠️ Analysis error:', error instanceof Error ? error.message : String(error));
analysis = { codebaseAnalysis: null, intelligentPlan: null };
}
// Generate MVP checklist using AI
console.log('[MVP Generation] 🤖 Calling AI to generate MVP plan...');
const checklist = await generateAIMVPChecklist(projectId, history, analysis, projectData);
console.log('[MVP Generation] ✅ MVP plan generated!');
console.log('[MVP Generation] 📊 Summary:', JSON.stringify(checklist.summary, null, 2));
// Save to Firestore (filter out undefined values to avoid Firestore errors)
const cleanChecklist = JSON.parse(JSON.stringify(checklist, (key, value) =>
value === undefined ? null : value
));
await db.collection('projects').doc(projectId).update({
mvpChecklist: cleanChecklist,
mvpChecklistGeneratedAt: admin.firestore.FieldValue.serverTimestamp()
});
console.log('[MVP Generation] ✅ MVP checklist saved to Firestore');
return NextResponse.json({
...checklist,
regenerated: true
});
} catch (error) {
console.error('[MVP Generation] ❌ Error regenerating MVP checklist:', error);
return NextResponse.json(
{
error: 'Failed to regenerate MVP checklist',
details: error instanceof Error ? error.message : String(error)
},
{ status: 500 }
);
}
}
/**
* Generate AI-powered MVP Checklist using Gemini and the Vibn MVP Planner agent spec
*/
async function generateAIMVPChecklist(
projectId: string,
history: any,
analysis: any,
projectData: any
) {
try {
// Check for Gemini API key
const geminiApiKey = process.env.GEMINI_API_KEY;
if (!geminiApiKey) {
console.warn('[MVP Generation] ⚠️ No GEMINI_API_KEY found, falling back to template-based generation');
return generateFallbackChecklist(history, analysis);
}
console.log('[MVP Generation] 🔑 GEMINI_API_KEY found, using AI generation');
// Load the agent spec
const agentSpecPath = path.join(process.cwd(), '..', 'vibn-vision', 'initial-questions.json');
const agentSpec = JSON.parse(fs.readFileSync(agentSpecPath, 'utf-8'));
console.log('[MVP Generation] 📋 Agent spec loaded');
// Initialize Gemini
const genAI = new GoogleGenerativeAI(geminiApiKey);
const model = genAI.getGenerativeModel({
model: "gemini-2.0-flash-exp",
generationConfig: {
temperature: 0.4,
topP: 0.95,
topK: 40,
maxOutputTokens: 8192,
responseMimeType: "application/json",
},
});
console.log('[MVP Generation] 🤖 Gemini model initialized (gemini-2.0-flash-exp)');
// Prepare vision input from project data
const visionInput = prepareVisionInput(projectData, history);
console.log('[MVP Generation] 📝 Vision input prepared:', {
q1: visionInput.q1_who_and_problem.raw_answer?.substring(0, 50) + '...',
q2: visionInput.q2_story.raw_answer?.substring(0, 50) + '...',
q3: visionInput.q3_improvement.raw_answer?.substring(0, 50) + '...'
});
// Log what data we have vs missing
console.log('[MVP Generation] 📊 Data availability check:');
console.log(' ✅ Vision answers:', !!projectData.visionAnswers);
console.log(' ✅ GitHub repo:', projectData.githubRepo || 'None');
console.log(' ⚠️ GitHub userId:', projectData.userId || 'MISSING - cannot load repo code');
console.log(' ✅ Git commits:', history.gitSummary?.totalCommits || 0);
console.log(' ✅ Cursor sessions:', history.summary?.breakdown?.extensionSessions || 0);
console.log(' ✅ Codebase analysis:', analysis.codebaseAnalysis?.builtFeatures?.length || 0, 'features found');
// Load Cursor conversation history from Firestore
console.log('[MVP Generation] 💬 Loading Cursor conversation history...');
const adminDb = admin.firestore();
let cursorConversations: any[] = [];
let cursorMessageCount = 0;
try {
const conversationsSnapshot = await adminDb
.collection('projects')
.doc(projectId)
.collection('cursorConversations')
.orderBy('lastUpdatedAt', 'desc')
.limit(10) // Get most recent 10 conversations
.get();
for (const convDoc of conversationsSnapshot.docs) {
const convData = convDoc.data();
const messagesSnapshot = await adminDb
.collection('projects')
.doc(projectId)
.collection('cursorConversations')
.doc(convDoc.id)
.collection('messages')
.orderBy('createdAt', 'asc')
.limit(50) // Limit messages per conversation to avoid token bloat
.get();
const messages = messagesSnapshot.docs.map(msgDoc => {
const msg = msgDoc.data();
return {
role: msg.type === 1 ? 'user' : 'assistant',
text: msg.text || '',
createdAt: msg.createdAt
};
});
cursorMessageCount += messages.length;
cursorConversations.push({
name: convData.name || 'Untitled',
messageCount: messages.length,
messages: messages,
createdAt: convData.createdAt,
lastUpdatedAt: convData.lastUpdatedAt
});
}
console.log('[MVP Generation] ✅ Loaded', cursorConversations.length, 'Cursor conversations with', cursorMessageCount, 'messages');
} catch (error) {
console.error('[MVP Generation] ⚠️ Failed to load Cursor conversations:', error);
}
// Prepare work_to_date context with all available data
const githubSummary = history.gitSummary
? `${history.gitSummary.totalCommits || 0} commits, ${history.gitSummary.filesChanged || 0} files changed`
: 'No Git history available';
const codebaseSummary = analysis.codebaseAnalysis?.summary
|| (analysis.codebaseAnalysis?.builtFeatures?.length > 0
? `Built: ${analysis.codebaseAnalysis.builtFeatures.map((f: any) => f.name).join(', ')}`
: 'No codebase analysis available');
const cursorSessionsSummary = cursorConversations.length > 0
? `${cursorConversations.length} Cursor conversations with ${cursorMessageCount} messages imported from Cursor IDE`
: 'No Cursor conversation history available';
// Format Cursor conversations for the prompt
const cursorContextText = cursorConversations.length > 0
? cursorConversations.map(conv =>
`Conversation: "${conv.name}" (${conv.messageCount} messages)\n` +
conv.messages.slice(0, 10).map((m: any) => ` ${m.role}: ${m.text.substring(0, 200)}`).join('\n')
).join('\n\n')
: '';
const workToDate = {
code_summary: codebaseSummary,
github_summary: githubSummary,
cursor_sessions_summary: cursorSessionsSummary,
cursor_conversations: cursorContextText, // Include actual conversation snippets
existing_assets_notes: `Built features: ${analysis.codebaseAnalysis?.builtFeatures?.length || 0}, Missing: ${analysis.codebaseAnalysis?.missingFeatures?.length || 0}`
};
console.log('[MVP Generation] 🔍 Work context prepared:', {
...workToDate,
cursor_conversations: cursorContextText.length > 0 ? `${cursorContextText.length} chars from conversations` : 'None'
});
// Build the prompt with agent spec instructions
const prompt = `${agentSpec.agent_spec.instructions_for_model}
Here is the input data:
${JSON.stringify({
vision_input: visionInput,
work_to_date: workToDate
}, null, 2)}
Return ONLY valid JSON matching the output schema, with no additional text or markdown.`;
console.log('[MVP Generation] 📤 Sending prompt to Gemini (length:', prompt.length, 'chars)');
// Call Gemini
const result = await model.generateContent(prompt);
const response = result.response;
const text = response.text();
console.log('[MVP Generation] 📥 Received AI response (length:', text.length, 'chars)');
// Parse AI response (Gemini returns JSON directly with responseMimeType set)
const aiResponse = JSON.parse(text);
console.log('[MVP Generation] ✅ AI response parsed successfully');
console.log('[MVP Generation] 🔍 AI Response structure:', JSON.stringify({
has_journey_tree: !!aiResponse.journey_tree,
has_touchpoints_tree: !!aiResponse.touchpoints_tree,
has_system_tree: !!aiResponse.system_tree,
journey_nodes: aiResponse.journey_tree?.nodes?.length || 0,
touchpoints_nodes: aiResponse.touchpoints_tree?.nodes?.length || 0,
system_nodes: aiResponse.system_tree?.nodes?.length || 0,
summary: aiResponse.summary
}, null, 2));
// Transform AI trees into our existing format
const checklist = transformAIResponseToChecklist(aiResponse, history, analysis);
console.log('[MVP Generation] ✅ Checklist transformed, total pages:', checklist.summary?.totalPages || 0);
return checklist;
} catch (error) {
console.error('[MVP Generation] ❌ Error generating AI MVP checklist:', error);
console.warn('[MVP Generation] ⚠️ Falling back to template-based generation');
return generateFallbackChecklist(history, analysis);
}
}
/**
* Fallback to template-based generation if AI fails
*/
function generateFallbackChecklist(history: any, analysis: any) {
const vision = history.project.vision || '';
const builtFeatures = analysis.codebaseAnalysis?.builtFeatures || [];
const missingFeatures = analysis.codebaseAnalysis?.missingFeatures || [];
// Scan commit messages for evidence of pages
const commitMessages = history.chronologicalEvents
.filter((e: any) => e.type === 'git_commit')
.map((e: any) => e.data.message);
// Simple flat taxonomy structure (existing template)
const corePages = [
{
category: 'Core Features',
pages: [
{
path: '/auth',
title: 'Authentication',
status: detectPageStatus('auth', commitMessages, builtFeatures),
priority: 'critical',
evidence: findEvidence('auth', commitMessages)
},
{
path: '/[workspace]',
title: 'Workspace Selector',
status: detectPageStatus('workspace', commitMessages, builtFeatures),
priority: 'critical',
evidence: findEvidence('workspace', commitMessages)
},
{
path: '/[workspace]/projects',
title: 'Projects List',
status: detectPageStatus('projects page', commitMessages, builtFeatures),
priority: 'critical',
evidence: findEvidence('projects list', commitMessages)
},
{
path: '/project/[id]/overview',
title: 'Project Dashboard',
status: detectPageStatus('overview', commitMessages, builtFeatures),
priority: 'critical',
evidence: findEvidence('overview', commitMessages)
},
{
path: '/project/[id]/mission',
title: 'Vision/Mission Screen',
status: detectPageStatus('mission|vision', commitMessages, builtFeatures),
priority: 'critical',
evidence: findEvidence('vision|mission', commitMessages)
},
{
path: '/project/[id]/audit',
title: 'Project History & Audit',
status: detectPageStatus('audit', commitMessages, builtFeatures),
priority: 'high',
evidence: findEvidence('audit', commitMessages)
},
{
path: '/project/[id]/timeline-plan',
title: 'MVP Timeline & Checklist',
status: detectPageStatus('timeline-plan', commitMessages, builtFeatures),
priority: 'critical',
evidence: findEvidence('timeline-plan', commitMessages)
},
{
path: '/api/github/oauth',
title: 'GitHub OAuth API',
status: detectPageStatus('github/oauth', commitMessages, builtFeatures),
priority: 'critical',
evidence: findEvidence('github oauth', commitMessages)
},
{
path: '/api/projects',
title: 'Project Management APIs',
status: detectPageStatus('api/projects', commitMessages, builtFeatures),
priority: 'critical',
evidence: findEvidence('project api', commitMessages)
},
{
path: '/api/projects/[id]/mvp-checklist',
title: 'MVP Checklist Generation API',
status: detectPageStatus('mvp-checklist', commitMessages, builtFeatures),
priority: 'critical',
evidence: findEvidence('mvp-checklist', commitMessages)
}
]
},
{
category: 'Flows',
pages: [
{
path: 'flow/onboarding',
title: 'User Onboarding Flow',
status: 'in_progress',
priority: 'critical',
evidence: [],
note: 'Sign Up → Workspace Creation → Connect GitHub'
},
{
path: 'flow/project-creation',
title: 'Project Creation Flow',
status: 'in_progress',
priority: 'critical',
evidence: findEvidence('project creation', commitMessages),
note: 'Import/New Project → Repository → History Import → Vision Setup'
},
{
path: 'flow/plan-generation',
title: 'Plan Generation Flow',
status: 'in_progress',
priority: 'critical',
evidence: findEvidence('plan', commitMessages),
note: 'Context Analysis → MVP Checklist → Timeline View'
}
]
},
{
category: 'Marketing',
pages: [
{
path: '/project/[id]/marketing',
title: 'Marketing Dashboard',
status: 'missing',
priority: 'high',
evidence: [],
note: 'Have /plan/marketing API but no UI'
},
{
path: '/api/projects/[id]/plan/marketing',
title: 'Marketing Plan Generation API',
status: detectPageStatus('marketing api', commitMessages, builtFeatures),
priority: 'high',
evidence: findEvidence('marketing', commitMessages)
},
{
path: '/',
title: 'Marketing Landing Page',
status: detectPageStatus('marketing page', commitMessages, builtFeatures),
priority: 'high',
evidence: findEvidence('marketing site|landing', commitMessages)
}
]
},
{
category: 'Social',
pages: [
{
path: '/[workspace]/connections',
title: 'Social Connections & Integrations',
status: detectPageStatus('connections', commitMessages, builtFeatures),
priority: 'medium',
evidence: findEvidence('connections', commitMessages)
}
]
},
{
category: 'Content',
pages: [
{
path: '/docs',
title: 'Documentation Pages',
status: 'missing',
priority: 'medium',
evidence: []
},
{
path: '/project/[id]/getting-started',
title: 'Getting Started Guide',
status: detectPageStatus('getting-started', commitMessages, builtFeatures),
priority: 'medium',
evidence: findEvidence('getting-started|onboarding', commitMessages)
}
]
},
{
category: 'Settings',
pages: [
{
path: '/project/[id]/settings',
title: 'Project Settings',
status: detectPageStatus('settings', commitMessages, builtFeatures),
priority: 'high',
evidence: findEvidence('settings', commitMessages)
},
{
path: '/[workspace]/settings',
title: 'User Settings',
status: detectPageStatus('settings', commitMessages, builtFeatures),
priority: 'medium',
evidence: findEvidence('settings', commitMessages)
}
]
}
];
// Calculate statistics
const allPages = corePages.flatMap(c => c.pages);
const builtCount = allPages.filter(p => p.status === 'built').length;
const inProgressCount = allPages.filter(p => p.status === 'in_progress').length;
const missingCount = allPages.filter(p => p.status === 'missing').length;
return {
project: {
name: history.project.name,
vision: history.project.vision,
githubRepo: history.project.githubRepo
},
summary: {
totalPages: allPages.length,
built: builtCount,
inProgress: inProgressCount,
missing: missingCount,
completionPercentage: Math.round((builtCount / allPages.length) * 100)
},
visionSummary: extractVisionPillars(vision),
mvpChecklist: corePages,
nextSteps: generateNextSteps(corePages, missingFeatures),
generatedAt: new Date().toISOString(),
// Empty trees for fallback (will be populated when AI generation works)
journeyTree: { label: "Journey", nodes: [] },
touchpointsTree: { label: "Touchpoints", nodes: [] },
systemTree: { label: "System", nodes: [] },
};
}
function detectPageStatus(pagePath: string, commitMessages: string[], builtFeatures: any[]): string {
const searchTerms = pagePath.split('|');
for (const term of searchTerms) {
const hasCommit = commitMessages.some(msg =>
msg.toLowerCase().includes(term.toLowerCase())
);
const hasFeature = builtFeatures.some(f =>
f.name.toLowerCase().includes(term.toLowerCase()) ||
f.evidence.some((e: string) => e.toLowerCase().includes(term.toLowerCase()))
);
if (hasCommit || hasFeature) {
return 'built';
}
}
return 'missing';
}
function findEvidence(searchTerm: string, commitMessages: string[]): string[] {
const terms = searchTerm.split('|');
const evidence: string[] = [];
for (const term of terms) {
const matches = commitMessages.filter(msg =>
msg.toLowerCase().includes(term.toLowerCase())
);
evidence.push(...matches.slice(0, 2));
}
return evidence;
}
function extractVisionPillars(vision: string): string[] {
const pillars = [];
if (vision.includes('start from scratch') || vision.includes('import')) {
pillars.push('Project ingestion (start from scratch or import existing work)');
}
if (vision.includes('understand') || vision.includes('vision')) {
pillars.push('Project understanding (vision, history, structure, metadata)');
}
if (vision.includes('plan') || vision.includes('checklist')) {
pillars.push('Project planning (auto-generated v1 roadmap/checklist)');
}
if (vision.includes('marketing') || vision.includes('communication') || vision.includes('automation')) {
pillars.push('Automation + AI support (marketing, chat, context-aware support)');
}
return pillars;
}
function generateNextSteps(corePages: any[], missingFeatures: any[]): any[] {
const steps = [];
// Find critical missing pages
const criticalMissing = corePages
.flatMap(c => c.pages)
.filter(p => p.status === 'missing' && p.priority === 'critical');
for (const page of criticalMissing.slice(0, 3)) {
steps.push({
priority: 1,
task: `Build ${page.title}`,
path: page.path || '',
reason: page.note || 'Critical for MVP launch'
});
}
// Add missing features
if (missingFeatures && Array.isArray(missingFeatures)) {
for (const feature of missingFeatures.slice(0, 2)) {
if (feature && (feature.feature || feature.task)) {
steps.push({
priority: 2,
task: feature.feature || feature.task || 'Complete missing feature',
reason: feature.reason || 'Important for MVP'
});
}
}
}
return steps;
}
/**
* Prepare vision input from project data
* Maps project vision to the 3-question format
*/
function prepareVisionInput(projectData: any, history: any) {
const vision = projectData.vision || history.project?.vision || '';
// Try to extract answers from vision field
// If vision is structured with questions, parse them
// Otherwise, treat entire vision as the story (q2)
return {
q1_who_and_problem: {
prompt: "Who has the problem you want to fix and what is it?",
raw_answer: projectData.visionAnswers?.q1 || extractProblemFromVision(vision) || vision
},
q2_story: {
prompt: "Tell me a story of this person using your tool and experiencing your vision?",
raw_answer: projectData.visionAnswers?.q2 || vision
},
q3_improvement: {
prompt: "How much did that improve things for them?",
raw_answer: projectData.visionAnswers?.q3 || extractImprovementFromVision(vision) || 'Significantly faster and more efficient workflow'
}
};
}
/**
* Extract problem statement from unstructured vision
*/
function extractProblemFromVision(vision: string): string {
// Simple heuristic: Look for problem-related keywords
const problemKeywords = ['problem', 'struggle', 'difficult', 'challenge', 'pain', 'need'];
const sentences = vision.split(/[.!?]+/);
for (const sentence of sentences) {
const lowerSentence = sentence.toLowerCase();
if (problemKeywords.some(keyword => lowerSentence.includes(keyword))) {
return sentence.trim();
}
}
return vision.split(/[.!?]+/)[0]?.trim() || vision;
}
/**
* Extract improvement/value from unstructured vision
*/
function extractImprovementFromVision(vision: string): string {
// Look for value/benefit keywords
const valueKeywords = ['faster', 'better', 'easier', 'save', 'improve', 'automate', 'help'];
const sentences = vision.split(/[.!?]+/);
for (const sentence of sentences) {
const lowerSentence = sentence.toLowerCase();
if (valueKeywords.some(keyword => lowerSentence.includes(keyword))) {
return sentence.trim();
}
}
return '';
}
/**
* Transform AI response trees into our existing checklist format
*/
function transformAIResponseToChecklist(aiResponse: any, history: any, analysis: any) {
const { journey_tree, touchpoints_tree, system_tree, summary } = aiResponse;
// Scan commit messages for evidence
const commitMessages = history.chronologicalEvents
?.filter((e: any) => e.type === 'git_commit')
?.map((e: any) => e.data.message) || [];
const builtFeatures = analysis.codebaseAnalysis?.builtFeatures || [];
// Combine touchpoints and system into categories
const categories: any[] = [];
// Process Touchpoints tree
if (touchpoints_tree?.nodes) {
const touchpointCategories = groupAssetsByCategory(
touchpoints_tree.nodes,
'touchpoint',
commitMessages,
builtFeatures
);
categories.push(...touchpointCategories);
}
// Process System tree
if (system_tree?.nodes) {
const systemCategories = groupAssetsByCategory(
system_tree.nodes,
'system',
commitMessages,
builtFeatures
);
categories.push(...systemCategories);
}
// Calculate statistics
const allPages = categories.flatMap(c => c.pages);
const builtCount = allPages.filter((p: any) => p.status === 'built').length;
const inProgressCount = allPages.filter((p: any) => p.status === 'in_progress').length;
const missingCount = allPages.filter((p: any) => p.status === 'missing').length;
return {
project: {
name: history.project.name,
vision: history.project.vision,
githubRepo: history.project.githubRepo
},
summary: {
totalPages: allPages.length,
built: builtCount,
inProgress: inProgressCount,
missing: missingCount,
completionPercentage: Math.round((builtCount / allPages.length) * 100)
},
visionSummary: [summary || 'AI-generated MVP plan'],
mvpChecklist: categories,
nextSteps: generateNextStepsFromAI(allPages),
generatedAt: new Date().toISOString(),
aiGenerated: true,
// Include raw trees for Journey/Design/Tech views
journeyTree: journey_tree,
touchpointsTree: touchpoints_tree,
systemTree: system_tree,
};
}
/**
* Group asset nodes by category
*/
function groupAssetsByCategory(
nodes: any[],
listType: 'touchpoint' | 'system',
commitMessages: string[],
builtFeatures: any[]
) {
const categoryMap = new Map<string, any[]>();
for (const node of nodes) {
const category = inferCategory(node, listType);
if (!categoryMap.has(category)) {
categoryMap.set(category, []);
}
const page = {
id: node.id,
path: inferPath(node),
title: node.name,
status: detectAINodeStatus(node, commitMessages, builtFeatures),
priority: node.must_have_for_v1 ? 'critical' : 'medium',
evidence: findEvidenceForNode(node, commitMessages),
note: node.asset_metadata?.why_it_exists,
metadata: node.asset_metadata,
requirements: flattenChildrenToRequirements(node.children)
};
categoryMap.get(category)!.push(page);
}
return Array.from(categoryMap.entries()).map(([category, pages]) => ({
category,
pages
}));
}
/**
* Infer category from node metadata
*/
function inferCategory(node: any, listType: 'touchpoint' | 'system'): string {
const assetType = node.asset_type;
const journeyStage = node.asset_metadata?.journey_stage || '';
if (listType === 'system') {
if (assetType === 'api_endpoint' || assetType === 'service') return 'Core Features';
if (assetType === 'integration') return 'Settings';
return 'Settings';
}
// Touchpoints
if (assetType === 'flow') return 'Flows';
if (assetType === 'social_post') return 'Social';
if (assetType === 'document') return 'Content';
if (assetType === 'email') return 'Marketing';
if (journeyStage.toLowerCase().includes('aware') || journeyStage.toLowerCase().includes('discover')) {
return 'Marketing';
}
return 'Core Features';
}
/**
* Infer path from node
*/
function inferPath(node: any): string {
// Try to extract path from implementation_notes or name
const implNotes = node.asset_metadata?.implementation_notes || '';
const pathMatch = implNotes.match(/\/[\w\-\/\[\]]+/);
if (pathMatch) return pathMatch[0];
// Generate a reasonable path from name and type
const slug = node.name.toLowerCase().replace(/\s+/g, '-').replace(/[^a-z0-9\-]/g, '');
if (node.asset_type === 'api_endpoint') return `/api/${slug}`;
if (node.asset_type === 'flow') return `flow/${slug}`;
return `/${slug}`;
}
/**
* Detect status of AI node based on existing work
*/
function detectAINodeStatus(node: any, commitMessages: string[], builtFeatures: any[]): string {
const name = node.name.toLowerCase();
const path = inferPath(node).toLowerCase();
// Check commit messages
const hasCommit = commitMessages.some(msg =>
msg.toLowerCase().includes(name) || msg.toLowerCase().includes(path)
);
// Check built features
const hasFeature = builtFeatures.some((f: any) =>
f.name?.toLowerCase().includes(name) ||
f.evidence?.some((e: string) => e.toLowerCase().includes(name))
);
if (hasCommit || hasFeature) return 'built';
return node.must_have_for_v1 ? 'missing' : 'missing';
}
/**
* Find evidence for a node in commit messages
*/
function findEvidenceForNode(node: any, commitMessages: string[]): string[] {
const name = node.name.toLowerCase();
const evidence = commitMessages
.filter(msg => msg.toLowerCase().includes(name))
.slice(0, 2);
return evidence;
}
/**
* Flatten children nodes to requirements
*/
function flattenChildrenToRequirements(children: any[]): any[] {
if (!children || children.length === 0) return [];
return children.map((child, index) => ({
id: index + 1,
text: child.name,
status: 'missing'
}));
}
/**
* Generate next steps from AI-generated pages
*/
function generateNextStepsFromAI(pages: any[]): any[] {
const criticalMissing = pages
.filter((p: any) => p.status === 'missing' && p.priority === 'critical')
.slice(0, 5);
return criticalMissing.map((page: any, index: number) => ({
priority: index + 1,
task: `Build ${page.title}`,
path: page.path || '',
reason: page.note || 'Critical for MVP V1'
}));
}

View File

@@ -0,0 +1,346 @@
import { NextRequest, NextResponse } from 'next/server';
import { exec } from 'child_process';
import { promisify } from 'util';
import { getApiUrl } from '@/lib/utils/api-url';
const execAsync = promisify(exec);
/**
* Intelligent V1 Launch Planning
* Analyzes ACTUAL codebase to generate specific, actionable tasks
*/
export async function GET(
request: NextRequest,
{ params }: { params: Promise<{ projectId: string }> }
) {
try {
const { projectId } = await params;
// 1. Load project context
const contextResponse = await fetch(
getApiUrl(`/api/projects/${projectId}/context`, request)
);
const context = await contextResponse.json();
// 2. Scan actual codebase structure
const repoPath = '/Users/markhenderson/ai-proxy';
const { stdout: pagesOutput } = await execAsync(
`cd "${repoPath}" && find vibn-frontend/app -name "*.tsx" | grep "page.tsx" | wc -l`
);
const { stdout: apiOutput } = await execAsync(
`cd "${repoPath}" && find vibn-frontend/app/api -name "route.ts" | wc -l`
);
const { stdout: componentsOutput } = await execAsync(
`cd "${repoPath}" && find vibn-frontend/components -name "*.tsx" 2>/dev/null | wc -l || echo 0`
);
const codebaseStats = {
totalPages: parseInt(pagesOutput.trim()),
totalAPIs: parseInt(apiOutput.trim()),
totalComponents: parseInt(componentsOutput.trim())
};
// 3. Analyze what's ACTUALLY built vs vision
const analysis = await analyzeRealCodebase(context, codebaseStats, repoPath);
// 4. Generate intelligent, specific tasks
const intelligentPlan = generateIntelligentPlan(context, analysis);
return NextResponse.json({
projectContext: {
name: context.project.name,
vision: context.project.vision
},
codebaseAnalysis: analysis,
intelligentPlan,
confidence: analysis.confidence
});
} catch (error) {
console.error('Error generating intelligent plan:', error);
return NextResponse.json(
{
error: 'Failed to generate intelligent plan',
details: error instanceof Error ? error.message : String(error)
},
{ status: 500 }
);
}
}
async function analyzeRealCodebase(context: any, stats: any, repoPath: string) {
const analysis: any = {
builtFeatures: [],
missingFeatures: [],
confidence: 'high',
specificInsights: []
};
// Analyze actual file structure
const { stdout: pagesListOutput } = await execAsync(
`cd "${repoPath}" && find vibn-frontend/app -name "page.tsx" | sed 's|vibn-frontend/app/||' | sed 's|/page.tsx||'`
);
const actualPages = pagesListOutput.trim().split('\n').filter(p => p);
const { stdout: apiListOutput } = await execAsync(
`cd "${repoPath}" && find vibn-frontend/app/api -name "route.ts" | sed 's|vibn-frontend/app/api/||' | sed 's|/route.ts||'`
);
const actualAPIs = apiListOutput.trim().split('\n').filter(a => a);
// Analyze based on vision: "VIBN gets your project to v1 launch and beyond"
const vision = context.project.vision || '';
// Check for key features mentioned in vision
const visionKeywords = {
'project plan': { pages: ['plan', 'getting-started'], apis: ['plan/'] },
'marketing automation': { pages: ['marketing'], apis: ['plan/marketing'] },
'communication automation': { pages: ['communication', 'chat'], apis: ['ai/chat'] },
'ai chat support': { pages: ['chat', 'v_ai_chat'], apis: ['ai/chat'] }
};
// Analyze what's built
if (actualPages.some(p => p.includes('plan') || p.includes('getting-started'))) {
analysis.builtFeatures.push({
name: 'Project Planning System',
evidence: actualPages.filter(p => p.includes('plan')).slice(0, 3),
status: 'built'
});
}
if (actualPages.some(p => p.includes('v_ai_chat')) && actualAPIs.some(a => a.includes('ai/chat'))) {
analysis.builtFeatures.push({
name: 'AI Chat Interface',
evidence: ['v_ai_chat page', 'ai/chat API'],
status: 'built'
});
}
if (actualAPIs.some(a => a.includes('github/'))) {
analysis.builtFeatures.push({
name: 'GitHub Integration',
evidence: actualAPIs.filter(a => a.includes('github/')),
status: 'built'
});
}
if (actualAPIs.some(a => a.includes('cursor/'))) {
analysis.builtFeatures.push({
name: 'Cursor History Import',
evidence: actualAPIs.filter(a => a.includes('cursor/')),
status: 'built'
});
}
if (actualAPIs.some(a => a.includes('sessions/'))) {
analysis.builtFeatures.push({
name: 'Session Tracking',
evidence: ['sessions/track', 'sessions/associate-project'],
status: 'built'
});
}
if (actualPages.some(p => p.includes('audit'))) {
analysis.builtFeatures.push({
name: 'Project Audit Report',
evidence: ['audit page', 'audit/generate API'],
status: 'built'
});
}
// Identify gaps based on vision
if (vision.includes('marketing automation') && !actualPages.some(p => p.includes('marketing'))) {
analysis.missingFeatures.push({
name: 'Marketing Automation UI',
reason: 'Mentioned in vision but no UI found',
priority: 'high'
});
}
if (vision.includes('communication automation')) {
const hasCommAutomation = actualAPIs.some(a =>
a.includes('email') || a.includes('slack') || a.includes('notification')
);
if (!hasCommAutomation) {
analysis.missingFeatures.push({
name: 'Communication Automation',
reason: 'Mentioned in vision but no APIs found',
priority: 'high'
});
}
}
// Check for production readiness
if (!actualAPIs.some(a => a.includes('health') || a.includes('status'))) {
analysis.missingFeatures.push({
name: 'Health Check Endpoint',
reason: 'Needed for production monitoring',
priority: 'medium'
});
}
// Check for onboarding
const hasOnboarding = actualPages.some(p => p.includes('getting-started') || p.includes('onboarding'));
if (hasOnboarding) {
analysis.builtFeatures.push({
name: 'User Onboarding Flow',
evidence: actualPages.filter(p => p.includes('getting-started')),
status: 'built'
});
} else {
analysis.missingFeatures.push({
name: 'User Onboarding Tutorial',
reason: 'Critical for first-time users',
priority: 'high'
});
}
// Check for task management
const hasTaskUI = actualPages.some(p => p.includes('task') || p.includes('checklist') || p.includes('todo'));
if (!hasTaskUI && actualAPIs.some(a => a.includes('plan/'))) {
analysis.missingFeatures.push({
name: 'Task Management UI',
reason: 'Have plan APIs but no UI to track tasks',
priority: 'high'
});
}
// Specific insights from commit history
const recentCommits = context.codebase?.topFiles || [];
if (recentCommits.length > 0) {
analysis.specificInsights.push(
`Recently worked on: ${recentCommits.slice(0, 3).map((f: any) => f.filePath.split('/').pop()).join(', ')}`
);
}
// Activity insights
const topFiles = context.activity?.topEditedFiles || [];
if (topFiles.length > 0) {
const topFile = topFiles[0].file.split('/').pop();
analysis.specificInsights.push(`Most edited: ${topFile} (${topFiles[0].count} times)`);
}
return analysis;
}
function generateIntelligentPlan(context: any, analysis: any) {
const plan = {
summary: `Based on ${analysis.builtFeatures.length} built features and ${analysis.missingFeatures.length} identified gaps`,
categories: [] as any[]
};
// Product Completion (based on what's actually missing)
const productTasks = [];
for (const missing of analysis.missingFeatures) {
if (missing.name === 'Task Management UI') {
productTasks.push({
id: `prod-task-ui`,
title: 'Build Task Management UI',
description: `You have plan/simulate API but no UI. Create a checklist interface to show and track V1 launch tasks.`,
status: 'pending',
priority: missing.priority,
specificTo: 'Your codebase has the backend but missing frontend'
});
}
if (missing.name === 'Marketing Automation UI') {
productTasks.push({
id: `prod-mkt-ui`,
title: 'Build Marketing Automation Dashboard',
description: `Your vision mentions marketing automation. Create UI for /plan/marketing API to manage campaigns.`,
status: 'pending',
priority: missing.priority,
specificTo: 'Mentioned in your vision statement'
});
}
if (missing.name === 'Communication Automation') {
productTasks.push({
id: `prod-comm-auto`,
title: 'Add Communication Automation',
description: `Build email/Slack notification system for project updates and milestones.`,
status: 'pending',
priority: missing.priority,
specificTo: 'Core to your vision: "communication automation"'
});
}
if (missing.name === 'User Onboarding Tutorial') {
productTasks.push({
id: `prod-onboard`,
title: 'Create Interactive Onboarding',
description: `Guide new users through: 1) New vs existing project, 2) GitHub connect, 3) Run Cursor import, 4) Define vision.`,
status: 'pending',
priority: missing.priority,
specificTo: 'Your vision flow from earlier conversation'
});
}
}
if (productTasks.length > 0) {
plan.categories.push({
name: 'Product Completion',
status: 'in_progress',
description: 'Missing features identified from your codebase and vision',
tasks: productTasks
});
}
// Polish Existing Features (based on what's built but might need work)
const polishTasks = [];
for (const built of analysis.builtFeatures) {
if (built.name === 'Project Planning System') {
polishTasks.push({
id: 'polish-plan',
title: 'Connect Planning APIs to UI',
description: `You have /plan/mvp, /plan/marketing, /plan/simulate APIs. Ensure they're all wired to your ${built.evidence.length} planning pages.`,
status: 'in_progress',
priority: 'high',
specificTo: `Found ${built.evidence.length} planning pages in your codebase`
});
}
}
if (polishTasks.length > 0) {
plan.categories.push({
name: 'Polish & Integration',
status: 'in_progress',
description: 'Connect your existing features together',
tasks: polishTasks
});
}
// Launch Readiness (production concerns)
const launchTasks = [
{
id: 'launch-monitoring',
title: 'Add Production Monitoring',
description: `Add health check endpoint and error tracking for your ${context.codebase?.totalCommits} commits of code.`,
status: 'pending',
priority: 'high',
specificTo: 'Your 104k lines of code need monitoring'
},
{
id: 'launch-docs',
title: 'Document All Features',
description: `Create docs for your ${analysis.builtFeatures.length} built features: ${analysis.builtFeatures.map((f: any) => f.name).join(', ')}.`,
status: 'pending',
priority: 'medium',
specificTo: `Specific to your ${analysis.builtFeatures.length} features`
},
{
id: 'launch-demo',
title: 'Create Demo Video',
description: `Show: GitHub import → Cursor analysis → AI chat → Launch plan. Highlight your unique value.`,
status: 'pending',
priority: 'high',
specificTo: 'Your specific user journey'
}
];
plan.categories.push({
name: 'Launch Preparation',
status: 'pending',
description: 'Get ready for public launch',
tasks: launchTasks
});
return plan;
}

View File

@@ -0,0 +1,30 @@
import { NextResponse } from 'next/server';
import { GeminiLlmClient } from '@/lib/ai/gemini-client';
import { runMarketingPlanning } from '@/lib/ai/marketing-agent';
export async function POST(
_request: Request,
{ params }: { params: Promise<{ projectId: string }> },
) {
try {
const { projectId } = await params;
if (!projectId) {
return NextResponse.json({ error: 'Missing projectId' }, { status: 400 });
}
const llm = new GeminiLlmClient();
const marketingPlan = await runMarketingPlanning(projectId, llm);
return NextResponse.json({ marketingPlan });
} catch (error) {
console.error('[plan/marketing] Failed to generate marketing plan', error);
return NextResponse.json(
{
error: 'Failed to generate marketing plan',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 },
);
}
}

View File

@@ -0,0 +1,30 @@
import { NextResponse } from 'next/server';
import { GeminiLlmClient } from '@/lib/ai/gemini-client';
import { runMvpPlanning } from '@/lib/ai/mvp-agent';
export async function POST(
_request: Request,
{ params }: { params: Promise<{ projectId: string }> },
) {
try {
const { projectId } = await params;
if (!projectId) {
return NextResponse.json({ error: 'Missing projectId' }, { status: 400 });
}
const llm = new GeminiLlmClient();
const mvpPlan = await runMvpPlanning(projectId, llm);
return NextResponse.json({ mvpPlan });
} catch (error) {
console.error('[plan/mvp] Failed to generate MVP plan', error);
return NextResponse.json(
{
error: 'Failed to generate MVP plan',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 },
);
}
}

View File

@@ -0,0 +1,403 @@
import { NextRequest, NextResponse } from 'next/server';
import { getApiUrl } from '@/lib/utils/api-url';
/**
* Simulates AI-powered V1 Launch Planning
* Uses complete project context to generate actionable plan
*/
export async function GET(
request: NextRequest,
{ params }: { params: Promise<{ projectId: string }> }
) {
try {
const { projectId } = await params;
// 1. Load complete project context
const contextResponse = await fetch(
getApiUrl(`/api/projects/${projectId}/context`, request)
);
if (!contextResponse.ok) {
return NextResponse.json(
{ error: 'Failed to load project context' },
{ status: 500 }
);
}
const context = await contextResponse.json();
// 2. Simulate AI Analysis
const aiAnalysis = analyzeProjectForV1Launch(context);
// 3. Generate V1 Launch Plan
const launchPlan = generateV1LaunchPlan(context, aiAnalysis);
return NextResponse.json({
projectContext: {
name: context.project.name,
vision: context.project.vision,
historicalData: {
totalDays: context.timeline.dateRange.totalDays,
activeDays: context.timeline.dateRange.activeDays,
commits: context.codebase.totalCommits,
sessions: context.activity.totalSessions,
messages: context.timeline.dataSources.cursor.totalMessages
}
},
aiAnalysis,
launchPlan,
nextSteps: generateNextSteps(launchPlan)
});
} catch (error) {
console.error('Error simulating launch plan:', error);
return NextResponse.json(
{
error: 'Failed to simulate launch plan',
details: error instanceof Error ? error.message : String(error)
},
{ status: 500 }
);
}
}
// Analyze project state for V1 launch readiness
function analyzeProjectForV1Launch(context: any) {
const analysis = {
currentState: determineCurrentState(context),
strengths: [],
gaps: [],
estimatedCompleteness: 0,
recommendations: []
};
// Analyze codebase maturity
if (context.codebase.totalCommits > 50) {
analysis.strengths.push('Active development with 63 commits');
}
if (context.codebase.totalLinesAdded > 100000) {
analysis.strengths.push('Substantial codebase (~104k lines added)');
}
// Analyze development activity
if (context.activity.totalSessions > 100) {
analysis.strengths.push(`Consistent development (${context.activity.totalSessions} sessions)`);
}
// Check for gaps
if (!context.project.vision) {
analysis.gaps.push('Product vision not documented');
} else {
analysis.strengths.push('Clear product vision defined');
}
if (context.documents.length === 0) {
analysis.gaps.push('No documentation uploaded (specs, PRDs, designs)');
}
// Check Git history span
const daysSinceStart = context.timeline.dateRange.totalDays;
if (daysSinceStart < 30) {
analysis.gaps.push('Project is in early stages (< 30 days old)');
} else if (daysSinceStart > 90) {
analysis.strengths.push('Mature project (90+ days of development)');
}
// Estimate completeness
const hasVision = context.project.vision ? 20 : 0;
const hasCode = context.codebase.totalCommits > 20 ? 40 : 20;
const hasActivity = context.activity.totalSessions > 50 ? 20 : 10;
const hasDocs = context.documents.length > 0 ? 20 : 0;
analysis.estimatedCompleteness = hasVision + hasCode + hasActivity + hasDocs;
// Generate recommendations
if (analysis.estimatedCompleteness < 60) {
analysis.recommendations.push('Focus on core functionality before launch');
analysis.recommendations.push('Document key features and user flows');
} else if (analysis.estimatedCompleteness < 80) {
analysis.recommendations.push('Prepare for beta testing');
analysis.recommendations.push('Set up monitoring and analytics');
} else {
analysis.recommendations.push('Ready for soft launch preparation');
}
return analysis;
}
// Determine current project state
function determineCurrentState(context: any): string {
const commits = context.codebase.totalCommits;
const days = context.timeline.dateRange.totalDays;
if (commits < 20) return 'Initial Development';
if (commits < 50) return 'Alpha Stage';
if (commits < 100 && days < 60) return 'Active Development';
return 'Pre-Launch';
}
// Generate V1 launch checklist
function generateV1LaunchPlan(context: any, analysis: any) {
const plan = {
phase: analysis.currentState,
estimatedCompletion: `${analysis.estimatedCompleteness}%`,
categories: [
{
name: 'Product Development',
status: analysis.estimatedCompleteness > 60 ? 'in_progress' : 'pending',
tasks: [
{
id: 'pd-1',
title: 'Core Feature Implementation',
status: context.codebase.totalCommits > 40 ? 'complete' : 'in_progress',
description: 'Build primary user-facing features',
dependencies: []
},
{
id: 'pd-2',
title: 'User Authentication & Authorization',
status: 'in_progress',
description: 'Secure login, signup, and permission system',
dependencies: ['pd-1']
},
{
id: 'pd-3',
title: 'Database Schema & Models',
status: context.codebase.totalLinesAdded > 50000 ? 'complete' : 'in_progress',
description: 'Define data structures and relationships',
dependencies: []
},
{
id: 'pd-4',
title: 'API Endpoints',
status: 'in_progress',
description: 'REST/GraphQL APIs for frontend communication',
dependencies: ['pd-3']
},
{
id: 'pd-5',
title: 'Error Handling & Logging',
status: 'pending',
description: 'Comprehensive error management and monitoring',
dependencies: ['pd-4']
}
]
},
{
name: 'Testing & Quality',
status: 'pending',
tasks: [
{
id: 'tq-1',
title: 'Unit Tests',
status: 'pending',
description: 'Test individual components and functions',
dependencies: ['pd-1']
},
{
id: 'tq-2',
title: 'Integration Tests',
status: 'pending',
description: 'Test system interactions',
dependencies: ['pd-4']
},
{
id: 'tq-3',
title: 'User Acceptance Testing',
status: 'pending',
description: 'Beta testing with real users',
dependencies: ['tq-1', 'tq-2']
},
{
id: 'tq-4',
title: 'Performance Testing',
status: 'pending',
description: 'Load testing and optimization',
dependencies: ['tq-2']
}
]
},
{
name: 'Documentation',
status: context.documents.length > 0 ? 'in_progress' : 'pending',
tasks: [
{
id: 'doc-1',
title: 'User Guide',
status: 'pending',
description: 'End-user documentation',
dependencies: ['pd-1']
},
{
id: 'doc-2',
title: 'API Documentation',
status: 'pending',
description: 'Developer-facing API docs',
dependencies: ['pd-4']
},
{
id: 'doc-3',
title: 'Onboarding Flow',
status: 'pending',
description: 'New user tutorial and setup',
dependencies: ['doc-1']
}
]
},
{
name: 'Infrastructure',
status: 'in_progress',
tasks: [
{
id: 'infra-1',
title: 'Production Environment Setup',
status: context.codebase.totalCommits > 30 ? 'complete' : 'in_progress',
description: 'Deploy to production servers',
dependencies: []
},
{
id: 'infra-2',
title: 'CI/CD Pipeline',
status: 'pending',
description: 'Automated testing and deployment',
dependencies: ['infra-1']
},
{
id: 'infra-3',
title: 'Monitoring & Alerts',
status: 'pending',
description: 'System health monitoring',
dependencies: ['infra-1']
},
{
id: 'infra-4',
title: 'Backup & Recovery',
status: 'pending',
description: 'Data backup strategy',
dependencies: ['infra-1']
}
]
},
{
name: 'Marketing & Launch',
status: 'pending',
tasks: [
{
id: 'mkt-1',
title: 'Landing Page',
status: 'pending',
description: 'Public-facing marketing site',
dependencies: []
},
{
id: 'mkt-2',
title: 'Email Marketing Setup',
status: 'pending',
description: 'Email campaigns and automation',
dependencies: ['mkt-1']
},
{
id: 'mkt-3',
title: 'Analytics Integration',
status: 'pending',
description: 'Track user behavior and metrics',
dependencies: ['pd-1']
},
{
id: 'mkt-4',
title: 'Launch Strategy',
status: 'pending',
description: 'Product Hunt, social media, PR',
dependencies: ['mkt-1', 'doc-1']
}
]
},
{
name: 'Legal & Compliance',
status: 'pending',
tasks: [
{
id: 'legal-1',
title: 'Privacy Policy',
status: 'pending',
description: 'GDPR/CCPA compliant privacy policy',
dependencies: []
},
{
id: 'legal-2',
title: 'Terms of Service',
status: 'pending',
description: 'User agreement and terms',
dependencies: []
},
{
id: 'legal-3',
title: 'Security Audit',
status: 'pending',
description: 'Third-party security review',
dependencies: ['pd-5']
}
]
}
],
timeline: {
estimated_days_to_v1: calculateEstimatedDays(analysis),
recommended_milestones: [
{
name: 'Alpha Release',
description: 'Internal testing with core features',
target: 'Week 1-2'
},
{
name: 'Beta Release',
description: 'Limited external user testing',
target: 'Week 3-4'
},
{
name: 'Soft Launch',
description: 'Public but limited announcement',
target: 'Week 5-6'
},
{
name: 'V1 Launch',
description: 'Full public launch',
target: 'Week 7-8'
}
]
}
};
return plan;
}
// Calculate estimated days to V1
function calculateEstimatedDays(analysis: any): number {
const completeness = analysis.estimatedCompleteness;
if (completeness > 80) return 14; // ~2 weeks
if (completeness > 60) return 30; // ~1 month
if (completeness > 40) return 60; // ~2 months
return 90; // ~3 months
}
// Generate immediate next steps
function generateNextSteps(plan: any) {
const nextSteps = [];
// Find first pending task in each category
for (const category of plan.categories) {
const pendingTask = category.tasks.find((t: any) => t.status === 'pending' || t.status === 'in_progress');
if (pendingTask && nextSteps.length < 5) {
nextSteps.push({
category: category.name,
task: pendingTask.title,
priority: nextSteps.length + 1,
description: pendingTask.description
});
}
}
return nextSteps;
}

View File

@@ -0,0 +1,199 @@
import { NextResponse } from 'next/server';
import { getAdminAuth, getAdminDb } from '@/lib/firebase/admin';
import { GeminiLlmClient } from '@/lib/ai/gemini-client';
import { z } from 'zod';
const MarketResearchSchema = z.object({
targetNiches: z.array(z.object({
name: z.string(),
description: z.string(),
marketSize: z.string(),
competitionLevel: z.enum(['low', 'medium', 'high']),
opportunity: z.string(),
})),
competitors: z.array(z.object({
name: z.string(),
positioning: z.string(),
strengths: z.array(z.string()),
weaknesses: z.array(z.string()),
})),
marketGaps: z.array(z.object({
gap: z.string(),
impact: z.enum(['low', 'medium', 'high']),
reasoning: z.string(),
})),
recommendations: z.array(z.string()),
sources: z.array(z.string()),
});
export async function POST(
request: Request,
{ params }: { params: Promise<{ projectId: string }> }
) {
try {
const { projectId } = await params;
const authHeader = request.headers.get('Authorization');
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const token = authHeader.substring(7);
const auth = getAdminAuth();
const decoded = await auth.verifyIdToken(token);
if (!decoded?.uid) {
return NextResponse.json({ error: 'Invalid token' }, { status: 401 });
}
// Get project data
const adminDb = getAdminDb();
const projectRef = adminDb.collection('projects').doc(projectId);
const projectDoc = await projectRef.get();
if (!projectDoc.exists) {
return NextResponse.json({ error: 'Project not found' }, { status: 404 });
}
const projectData = projectDoc.data();
const productVision = projectData?.productVision || '';
const productName = projectData?.productName || '';
const phaseData = projectData?.phaseData || {};
const canonicalModel = phaseData.canonicalProductModel || {};
// Build context for the agent
const ideaContext = canonicalModel.oneLiner || productVision ||
`${productName}: Help users build and launch products faster`;
console.log('[Market Research] Starting research for:', ideaContext);
// Initialize LLM client
const llm = new GeminiLlmClient();
// Conduct market research using the agent
const systemPrompt = `You are a market research analyst specializing in finding product-market fit and identifying underserved niches.
Your task is to analyze the given product idea and conduct comprehensive market research to:
1. Identify specific target niches that would benefit most from this product
2. Analyze competitors and their positioning
3. Find market gaps and opportunities
4. Provide actionable recommendations
Be specific, data-driven, and focused on actionable insights.`;
const userPrompt = `Analyze this product idea and conduct market research:
Product Idea: "${ideaContext}"
${canonicalModel.problem ? `Problem Being Solved: ${canonicalModel.problem}` : ''}
${canonicalModel.targetUser ? `Target User: ${canonicalModel.targetUser}` : ''}
${canonicalModel.coreSolution ? `Core Solution: ${canonicalModel.coreSolution}` : ''}
Provide a comprehensive market research analysis including:
- Target niches with high potential
- Competitor analysis
- Market gaps and opportunities
- Strategic recommendations
Focus on finding specific, underserved niches where this product can win.`;
const research = await llm.structuredCall({
model: 'gemini',
systemPrompt,
messages: [
{
role: 'user',
content: userPrompt,
},
],
schema: MarketResearchSchema,
temperature: 0.7,
});
console.log('[Market Research] Research completed:', {
niches: research.targetNiches.length,
competitors: research.competitors.length,
gaps: research.marketGaps.length,
});
// Store research results in Firestore
const researchRef = adminDb.collection('marketResearch').doc();
await researchRef.set({
id: researchRef.id,
projectId,
userId: decoded.uid,
research,
ideaContext,
createdAt: new Date(),
updatedAt: new Date(),
});
// Also store as knowledge items for vector search
const knowledgePromises = [];
// Store each niche as a knowledge item
for (const niche of research.targetNiches) {
const nicheRef = adminDb.collection('knowledge').doc();
knowledgePromises.push(
nicheRef.set({
id: nicheRef.id,
projectId,
userId: decoded.uid,
sourceType: 'research',
title: `Target Niche: ${niche.name}`,
content: `${niche.description}\n\nMarket Size: ${niche.marketSize}\nCompetition: ${niche.competitionLevel}\n\nOpportunity: ${niche.opportunity}`,
sourceMeta: {
origin: 'vibn',
researchType: 'market_niche',
researchId: researchRef.id,
},
createdAt: new Date(),
updatedAt: new Date(),
})
);
}
// Store market gaps
for (const gap of research.marketGaps) {
const gapRef = adminDb.collection('knowledge').doc();
knowledgePromises.push(
gapRef.set({
id: gapRef.id,
projectId,
userId: decoded.uid,
sourceType: 'research',
title: `Market Gap: ${gap.gap.substring(0, 50)}`,
content: `${gap.gap}\n\nImpact: ${gap.impact}\n\nReasoning: ${gap.reasoning}`,
sourceMeta: {
origin: 'vibn',
researchType: 'market_gap',
researchId: researchRef.id,
},
createdAt: new Date(),
updatedAt: new Date(),
})
);
}
await Promise.all(knowledgePromises);
console.log('[Market Research] Stored', knowledgePromises.length, 'knowledge items');
return NextResponse.json({
success: true,
research,
researchId: researchRef.id,
knowledgeItemsCreated: knowledgePromises.length,
});
} catch (error) {
console.error('[Market Research] Error:', error);
return NextResponse.json(
{
error: 'Failed to conduct market research',
details: error instanceof Error ? error.message : 'Unknown error'
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,115 @@
import { NextResponse } from 'next/server';
import { getAdminAuth, getAdminDb } from '@/lib/firebase/admin';
export async function GET(
request: Request,
{ params }: { params: Promise<{ projectId: string }> }
) {
try {
const { projectId } = await params;
// Authentication (skip in development if no auth header)
const authHeader = request.headers.get('Authorization');
const isDevelopment = process.env.NODE_ENV === 'development';
if (!isDevelopment || authHeader?.startsWith('Bearer ')) {
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const token = authHeader.substring(7);
const auth = getAdminAuth();
const decoded = await auth.verifyIdToken(token);
if (!decoded?.uid) {
return NextResponse.json({ error: 'Invalid token' }, { status: 401 });
}
}
// Fetch project from Firestore
const adminDb = getAdminDb();
const projectDoc = await adminDb.collection('projects').doc(projectId).get();
if (!projectDoc.exists) {
return NextResponse.json({ error: 'Project not found' }, { status: 404 });
}
const projectData = projectDoc.data();
return NextResponse.json({
success: true,
project: {
id: projectDoc.id,
...projectData,
},
});
} catch (error) {
console.error('[API /projects/:id] Error fetching project:', error);
return NextResponse.json(
{
error: 'Failed to fetch project',
details: error instanceof Error ? error.message : String(error)
},
{ status: 500 }
);
}
}
export async function PATCH(
request: Request,
{ params }: { params: Promise<{ projectId: string }> }
) {
try {
const { projectId } = await params;
const body = await request.json();
// Authentication (skip in development if no auth header)
const authHeader = request.headers.get('Authorization');
const isDevelopment = process.env.NODE_ENV === 'development';
if (!isDevelopment || authHeader?.startsWith('Bearer ')) {
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const token = authHeader.substring(7);
const auth = getAdminAuth();
const decoded = await auth.verifyIdToken(token);
if (!decoded?.uid) {
return NextResponse.json({ error: 'Invalid token' }, { status: 401 });
}
}
// Update project in Firestore
const adminDb = getAdminDb();
const updateData: any = {};
// Only update fields that are provided
if (body.vision !== undefined) updateData.vision = body.vision;
if (body.description !== undefined) updateData.description = body.description;
if (body.name !== undefined) updateData.name = body.name;
if (body.githubRepo !== undefined) updateData.githubRepo = body.githubRepo;
updateData.updatedAt = new Date().toISOString();
await adminDb.collection('projects').doc(projectId).update(updateData);
return NextResponse.json({
success: true,
message: 'Project updated successfully',
updated: Object.keys(updateData)
});
} catch (error) {
console.error('[API /projects/:id] Error updating project:', error);
return NextResponse.json(
{
error: 'Failed to update project',
details: error instanceof Error ? error.message : String(error)
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,62 @@
/**
* Manual Extraction Trigger
*
* Endpoint to manually run backend extraction for a project.
* Useful for testing or re-running extraction.
*/
import { NextResponse } from 'next/server';
import { getAdminAuth } from '@/lib/firebase/admin';
import { runBackendExtractionForProject } from '@/lib/server/backend-extractor';
export const maxDuration = 300; // 5 minutes for extraction
export async function POST(
request: Request,
context: { params: Promise<{ projectId: string }> | { projectId: string } }
) {
try {
// Verify auth
const authHeader = request.headers.get('Authorization');
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const idToken = authHeader.split('Bearer ')[1];
const adminAuth = getAdminAuth();
try {
await adminAuth.verifyIdToken(idToken);
} catch (error) {
return NextResponse.json({ error: 'Invalid token' }, { status: 401 });
}
// Handle async params
const params = 'then' in context.params ? await context.params : context.params;
const projectId = params.projectId;
if (!projectId) {
return NextResponse.json({ error: 'Missing projectId' }, { status: 400 });
}
console.log(`[API] Manual extraction triggered for project ${projectId}`);
// Run extraction
await runBackendExtractionForProject(projectId);
return NextResponse.json({
success: true,
message: 'Extraction completed successfully',
});
} catch (error) {
console.error('[API] Extraction failed:', error);
return NextResponse.json(
{
error: 'Extraction failed',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,70 @@
import { NextResponse } from 'next/server';
import { getAdminAuth, getAdminStorage } from '@/lib/firebase/admin';
export async function GET(
request: Request,
{ params }: { params: Promise<{ projectId: string }> }
) {
try {
const { projectId } = await params;
// Authentication (skip in development if no auth header)
const authHeader = request.headers.get('Authorization');
const isDevelopment = process.env.NODE_ENV === 'development';
if (!isDevelopment || authHeader?.startsWith('Bearer ')) {
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const token = authHeader.substring(7);
const auth = getAdminAuth();
const decoded = await auth.verifyIdToken(token);
if (!decoded?.uid) {
return NextResponse.json({ error: 'Invalid token' }, { status: 401 });
}
}
// List files in Firebase Storage for this project
let fileList = [];
try {
const storage = getAdminStorage();
const bucket = storage.bucket();
const [files] = await bucket.getFiles({
prefix: `projects/${projectId}/`,
maxResults: 100,
});
fileList = files.map(file => ({
name: file.name.split('/').pop() || file.name,
fullPath: file.name,
size: file.metadata.size,
contentType: file.metadata.contentType,
timeCreated: file.metadata.timeCreated,
updated: file.metadata.updated,
}));
console.log('[API /storage/files] Found', fileList.length, 'files');
} catch (storageError) {
console.error('[API /storage/files] Firebase Storage query failed:', storageError);
console.error('[API /storage/files] This is likely due to missing Firebase Admin credentials');
// Return empty array instead of failing
fileList = [];
}
return NextResponse.json({
success: true,
files: fileList,
count: fileList.length,
});
} catch (error) {
console.error('[API] Error fetching storage files:', error);
return NextResponse.json(
{ error: 'Failed to fetch storage files' },
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,310 @@
import { NextRequest, NextResponse } from 'next/server';
import admin from '@/lib/firebase/admin';
import { getApiUrl } from '@/lib/utils/api-url';
/**
* Timeline View Data
* Structures MVP checklist pages with their development sessions on a timeline
*/
export async function GET(
request: NextRequest,
{ params }: { params: Promise<{ projectId: string }> }
) {
try {
const { projectId } = await params;
// Load project data, MVP checklist, git history, and activity in parallel
const db = admin.firestore();
const projectRef = db.collection('projects').doc(projectId);
const [projectDoc, checklistResponse, gitResponse, activityResponse] = await Promise.all([
projectRef.get(),
fetch(getApiUrl(`/api/projects/${projectId}/mvp-checklist`, request)),
fetch(getApiUrl(`/api/projects/${projectId}/git-history`, request)),
fetch(getApiUrl(`/api/projects/${projectId}/activity`, request))
]);
const projectData = projectDoc.exists ? projectDoc.data() : null;
const checklist = await checklistResponse.json();
const git = await gitResponse.json();
const activity = await activityResponse.json();
// Check if checklist exists and has the expected structure
if (!checklist || checklist.error || !checklist.mvpChecklist || !Array.isArray(checklist.mvpChecklist)) {
return NextResponse.json({
workItems: [],
timeline: {
start: new Date().toISOString(),
end: new Date().toISOString(),
totalDays: 0
},
summary: {
totalWorkItems: 0,
withActivity: 0,
noActivity: 0,
built: 0,
missing: 0
},
projectCreator: projectData?.createdBy || projectData?.owner || 'You',
message: 'No MVP checklist generated yet. Click "Regenerate Plan" to create one.'
});
}
// Build lightweight history object with just what we need
const history = {
chronologicalEvents: [
// Add git commits
...(git.commits || []).map((commit: any) => ({
type: 'git_commit',
timestamp: new Date(commit.date).toISOString(),
data: {
hash: commit.hash,
message: commit.message,
filesChanged: commit.filesChanged,
insertions: commit.insertions,
deletions: commit.deletions
}
})),
// Add extension sessions
...(activity.sessions || []).map((session: any) => ({
type: 'extension_session',
timestamp: session.startTime,
data: {
duration: session.duration,
filesModified: session.filesModified
}
}))
]
};
// Map pages to work items with session data
const workItems = [];
for (const category of checklist.mvpChecklist) {
for (const item of category.pages) {
const relatedSessions = findRelatedSessions(item, history);
const relatedCommits = findRelatedCommits(item, history);
const hasActivity = relatedSessions.length > 0 || relatedCommits.length > 0;
const startDate = hasActivity
? getEarliestDate([...relatedSessions, ...relatedCommits])
: null;
const endDate = hasActivity
? getLatestDate([...relatedSessions, ...relatedCommits])
: null;
workItems.push({
id: `${category.category.toLowerCase().replace(/\s+/g, '-')}-${item.title.toLowerCase().replace(/\s+/g, '-')}`,
title: item.title,
category: category.category,
path: item.path,
status: item.status,
priority: item.priority,
startDate,
endDate,
duration: calculateDuration(startDate, endDate),
sessionsCount: relatedSessions.length,
commitsCount: relatedCommits.length,
totalActivity: relatedSessions.length + relatedCommits.length,
sessions: relatedSessions,
commits: relatedCommits,
requirements: generateRequirements(item, { name: category.category }),
evidence: item.evidence || [],
note: item.note
});
}
}
// Sort by category order and status
// Priority: Core Features -> Marketing -> Social -> Content -> Settings
const categoryOrder = [
'Core Features',
'Marketing',
'Social',
'Content',
'Settings'
];
workItems.sort((a, b) => {
// First by category
const catCompare = categoryOrder.indexOf(a.category) - categoryOrder.indexOf(b.category);
if (catCompare !== 0) return catCompare;
// Then by status (built first, then in_progress, then missing)
const statusOrder = { 'built': 0, 'in_progress': 1, 'missing': 2 };
return (statusOrder[a.status as keyof typeof statusOrder] || 3) -
(statusOrder[b.status as keyof typeof statusOrder] || 3);
});
// Calculate timeline range
const allDates = workItems
.filter(w => w.startDate)
.flatMap(w => [w.startDate, w.endDate].filter(Boolean))
.map(d => new Date(d!));
const timelineStart = allDates.length > 0
? new Date(Math.min(...allDates.map(d => d.getTime())))
: new Date();
const timelineEnd = new Date(); // Today
return NextResponse.json({
workItems,
timeline: {
start: timelineStart.toISOString(),
end: timelineEnd.toISOString(),
totalDays: Math.ceil((timelineEnd.getTime() - timelineStart.getTime()) / (1000 * 60 * 60 * 24))
},
summary: {
totalWorkItems: workItems.length,
withActivity: workItems.filter(w => w.totalActivity > 0).length,
noActivity: workItems.filter(w => w.totalActivity === 0).length,
built: workItems.filter(w => w.status === 'built').length,
missing: workItems.filter(w => w.status === 'missing').length
},
projectCreator: projectData?.createdBy || projectData?.owner || 'You'
});
} catch (error) {
console.error('Error generating timeline view:', error);
return NextResponse.json(
{
error: 'Failed to generate timeline view',
details: error instanceof Error ? error.message : String(error)
},
{ status: 500 }
);
}
}
function findRelatedSessions(page: any, history: any) {
const pagePath = page.path.toLowerCase();
const pageTitle = page.title.toLowerCase();
return history.chronologicalEvents
.filter((e: any) => e.type === 'extension_session')
.filter((e: any) => {
const filesModified = e.data.filesModified || [];
return filesModified.some((f: string) => {
const lowerFile = f.toLowerCase();
return lowerFile.includes(pagePath) ||
lowerFile.includes(pageTitle.replace(/\s+/g, '-')) ||
(page.evidence && page.evidence.some((ev: string) => lowerFile.includes(ev.toLowerCase())));
});
})
.map((e: any) => ({
timestamp: e.timestamp,
duration: e.data.duration,
filesModified: e.data.filesModified
}));
}
function findRelatedCommits(page: any, history: any) {
const pagePath = page.path.toLowerCase();
const pageTitle = page.title.toLowerCase();
return history.chronologicalEvents
.filter((e: any) => e.type === 'git_commit')
.filter((e: any) => {
const message = e.data.message.toLowerCase();
return message.includes(pagePath) ||
message.includes(pageTitle.replace(/\s+/g, ' ')) ||
(page.evidence && page.evidence.some((ev: string) => message.includes(ev.toLowerCase())));
})
.map((e: any) => ({
timestamp: e.timestamp,
hash: e.data.hash,
message: e.data.message,
insertions: e.data.insertions,
deletions: e.data.deletions
}));
}
function getEarliestDate(events: any[]) {
if (events.length === 0) return null;
const dates = events.map(e => new Date(e.timestamp).getTime());
return new Date(Math.min(...dates)).toISOString();
}
function getLatestDate(events: any[]) {
if (events.length === 0) return null;
const dates = events.map(e => new Date(e.timestamp).getTime());
return new Date(Math.max(...dates)).toISOString();
}
function calculateDuration(startDate: string | null, endDate: string | null): number {
if (!startDate || !endDate) return 0;
const diff = new Date(endDate).getTime() - new Date(startDate).getTime();
return Math.ceil(diff / (1000 * 60 * 60 * 24));
}
function generateRequirements(page: any, category: any): any[] {
const requirements = [];
// Generate specific requirements based on page type
if (page.title.includes('Sign In') || page.title.includes('Sign Up')) {
requirements.push(
{ id: 1, text: 'Email/password authentication', status: 'built' },
{ id: 2, text: 'GitHub OAuth integration', status: 'built' },
{ id: 3, text: 'Password reset flow', status: 'missing' },
{ id: 4, text: 'Session management', status: 'built' }
);
} else if (page.title.includes('Checklist')) {
requirements.push(
{ id: 1, text: 'Display generated tasks from API', status: 'missing' },
{ id: 2, text: 'Mark tasks as complete', status: 'missing' },
{ id: 3, text: 'Drag-and-drop reordering', status: 'missing' },
{ id: 4, text: 'Save checklist state', status: 'missing' },
{ id: 5, text: 'Export to markdown/PDF', status: 'missing' }
);
} else if (page.title.includes('Vision') || page.title.includes('Mission')) {
requirements.push(
{ id: 1, text: 'Capture product vision text', status: 'missing' },
{ id: 2, text: 'AI-assisted vision refinement', status: 'missing' },
{ id: 3, text: 'Upload supporting documents', status: 'missing' },
{ id: 4, text: 'Save vision to project metadata', status: 'built' }
);
} else if (page.title.includes('Marketing Automation')) {
requirements.push(
{ id: 1, text: 'Connect to /plan/marketing API', status: 'missing' },
{ id: 2, text: 'Generate landing page copy', status: 'missing' },
{ id: 3, text: 'Generate email sequences', status: 'missing' },
{ id: 4, text: 'Export marketing materials', status: 'missing' }
);
} else if (page.title.includes('Communication Automation')) {
requirements.push(
{ id: 1, text: 'Email template builder', status: 'missing' },
{ id: 2, text: 'Slack integration', status: 'missing' },
{ id: 3, text: 'Automated project updates', status: 'missing' },
{ id: 4, text: 'Team notifications', status: 'missing' }
);
} else if (page.title.includes('Import') && page.title.includes('Modal')) {
requirements.push(
{ id: 1, text: 'Start from scratch option', status: 'built' },
{ id: 2, text: 'Import from GitHub', status: 'built' },
{ id: 3, text: 'Import from local folder', status: 'missing' },
{ id: 4, text: 'Auto-detect project type', status: 'missing' },
{ id: 5, text: 'Trigger Cursor import', status: 'built' },
{ id: 6, text: 'Create .vibn file', status: 'built' }
);
} else if (page.status === 'built') {
requirements.push(
{ id: 1, text: 'Page built and accessible', status: 'built' },
{ id: 2, text: 'Connected to backend API', status: 'built' }
);
} else {
requirements.push(
{ id: 1, text: 'Design page layout', status: 'missing' },
{ id: 2, text: 'Implement core functionality', status: 'missing' },
{ id: 3, text: 'Connect to backend API', status: 'missing' },
{ id: 4, text: 'Add error handling', status: 'missing' }
);
}
return requirements;
}

View File

@@ -0,0 +1,397 @@
import { NextRequest, NextResponse } from 'next/server';
import { adminDb } from '@/lib/firebase/admin';
import { exec } from 'child_process';
import { promisify } from 'util';
const execAsync = promisify(exec);
interface TimelineDay {
date: string; // YYYY-MM-DD format
dayOfWeek: string;
gitCommits: Array<{
hash: string;
time: string;
author: string;
message: string;
filesChanged: number;
insertions: number;
deletions: number;
}>;
extensionSessions: Array<{
startTime: string;
endTime: string;
duration: number; // minutes
filesModified: string[];
conversationSummary?: string;
}>;
cursorMessages: Array<{
time: string;
type: 'user' | 'assistant';
conversationName: string;
preview: string; // First 100 chars
}>;
summary: {
totalGitCommits: number;
totalExtensionSessions: number;
totalCursorMessages: number;
linesAdded: number;
linesRemoved: number;
uniqueFilesModified: number;
};
}
interface UnifiedTimeline {
projectId: string;
dateRange: {
earliest: string;
latest: string;
totalDays: number;
};
days: TimelineDay[];
dataSources: {
git: { available: boolean; firstDate: string | null; lastDate: string | null; totalRecords: number };
extension: { available: boolean; firstDate: string | null; lastDate: string | null; totalRecords: number };
cursor: { available: boolean; firstDate: string | null; lastDate: string | null; totalRecords: number };
};
}
export async function GET(
request: NextRequest,
{ params }: { params: Promise<{ projectId: string }> }
) {
try {
const { projectId } = await params;
// 1. Load Git commits
const repoPath = '/Users/markhenderson/ai-proxy';
let gitCommits: any[] = [];
let gitFirstDate: string | null = null;
let gitLastDate: string | null = null;
try {
const { stdout: commitsOutput } = await execAsync(
`cd "${repoPath}" && git log --all --pretty=format:"%H|%ai|%an|%s" --numstat`,
{ maxBuffer: 10 * 1024 * 1024 }
);
if (commitsOutput.trim()) {
const lines = commitsOutput.split('\n');
let currentCommit: any = null;
for (const line of lines) {
if (line.includes('|')) {
if (currentCommit) {
gitCommits.push(currentCommit);
}
const [hash, date, author, message] = line.split('|');
currentCommit = {
hash: hash.substring(0, 8),
date,
author,
message,
filesChanged: 0,
insertions: 0,
deletions: 0
};
} else if (line.trim() && currentCommit) {
const parts = line.trim().split('\t');
if (parts.length === 3) {
const [insertStr, delStr] = parts;
const insertions = insertStr === '-' ? 0 : parseInt(insertStr, 10) || 0;
const deletions = delStr === '-' ? 0 : parseInt(delStr, 10) || 0;
currentCommit.filesChanged++;
currentCommit.insertions += insertions;
currentCommit.deletions += deletions;
}
}
}
if (currentCommit) {
gitCommits.push(currentCommit);
}
gitCommits.sort((a, b) => new Date(a.date).getTime() - new Date(b.date).getTime());
if (gitCommits.length > 0) {
gitFirstDate = gitCommits[0].date;
gitLastDate = gitCommits[gitCommits.length - 1].date;
}
}
} catch (error) {
console.log('⚠️ Could not load Git commits:', error);
}
// 2. Load Extension sessions
let extensionSessions: any[] = [];
let extensionFirstDate: string | null = null;
let extensionLastDate: string | null = null;
try {
// Try to find sessions by projectId first
let sessionsSnapshot = await adminDb
.collection('sessions')
.where('projectId', '==', projectId)
.get();
// If no sessions found by projectId, try by workspacePath
if (sessionsSnapshot.empty) {
const workspacePath = '/Users/markhenderson/ai-proxy';
sessionsSnapshot = await adminDb
.collection('sessions')
.where('workspacePath', '==', workspacePath)
.get();
}
extensionSessions = sessionsSnapshot.docs.map(doc => {
const data = doc.data();
const startTime = data.startTime?.toDate?.() || new Date(data.startTime);
const endTime = data.endTime?.toDate?.() || new Date(data.endTime);
return {
startTime,
endTime,
filesModified: data.filesModified || [],
conversationSummary: data.conversationSummary || '',
conversation: data.conversation || []
};
});
extensionSessions.sort((a, b) =>
new Date(a.startTime).getTime() - new Date(b.startTime).getTime()
);
if (extensionSessions.length > 0) {
extensionFirstDate = extensionSessions[0].startTime.toISOString();
extensionLastDate = extensionSessions[extensionSessions.length - 1].endTime.toISOString();
}
} catch (error) {
console.log('⚠️ Could not load extension sessions:', error);
}
// 3. Load Cursor messages (from both cursorConversations and extension sessions)
let cursorMessages: any[] = [];
let cursorFirstDate: string | null = null;
let cursorLastDate: string | null = null;
try {
// Load from cursorConversations (backfilled historical data)
const conversationsSnapshot = await adminDb
.collection('projects')
.doc(projectId)
.collection('cursorConversations')
.get();
for (const convDoc of conversationsSnapshot.docs) {
const conv = convDoc.data();
const messagesSnapshot = await adminDb
.collection('projects')
.doc(projectId)
.collection('cursorConversations')
.doc(convDoc.id)
.collection('messages')
.orderBy('createdAt', 'asc')
.get();
const messages = messagesSnapshot.docs.map(msgDoc => {
const msg = msgDoc.data();
return {
createdAt: msg.createdAt,
type: msg.type === 1 ? 'user' : 'assistant',
text: msg.text || '',
conversationName: conv.name || 'Untitled'
};
});
cursorMessages = cursorMessages.concat(messages);
}
// Also load from extension sessions conversation data
for (const session of extensionSessions) {
if (session.conversation && Array.isArray(session.conversation)) {
for (const msg of session.conversation) {
cursorMessages.push({
createdAt: msg.timestamp || session.startTime.toISOString(),
type: msg.role === 'user' ? 'user' : 'assistant',
text: msg.message || '',
conversationName: 'Extension Session'
});
}
}
}
cursorMessages.sort((a, b) =>
new Date(a.createdAt).getTime() - new Date(b.createdAt).getTime()
);
if (cursorMessages.length > 0) {
cursorFirstDate = cursorMessages[0].createdAt;
cursorLastDate = cursorMessages[cursorMessages.length - 1].createdAt;
}
} catch (error) {
console.log('⚠️ Could not load Cursor messages:', error);
}
// 4. Find overall date range
const allFirstDates = [
gitFirstDate ? new Date(gitFirstDate) : null,
extensionFirstDate ? new Date(extensionFirstDate) : null,
cursorFirstDate ? new Date(cursorFirstDate) : null
].filter(d => d !== null) as Date[];
const allLastDates = [
gitLastDate ? new Date(gitLastDate) : null,
extensionLastDate ? new Date(extensionLastDate) : null,
cursorLastDate ? new Date(cursorLastDate) : null
].filter(d => d !== null) as Date[];
if (allFirstDates.length === 0 && allLastDates.length === 0) {
return NextResponse.json({
error: 'No timeline data available',
projectId,
dateRange: { earliest: null, latest: null, totalDays: 0 },
days: [],
dataSources: {
git: { available: false, firstDate: null, lastDate: null, totalRecords: 0 },
extension: { available: false, firstDate: null, lastDate: null, totalRecords: 0 },
cursor: { available: false, firstDate: null, lastDate: null, totalRecords: 0 }
}
});
}
const earliestDate = new Date(Math.min(...allFirstDates.map(d => d.getTime())));
const latestDate = new Date(Math.max(...allLastDates.map(d => d.getTime())));
const totalDays = Math.ceil((latestDate.getTime() - earliestDate.getTime()) / (1000 * 60 * 60 * 24)) + 1;
// 5. Group data by day
const dayMap = new Map<string, TimelineDay>();
// Initialize all days
for (let i = 0; i < totalDays; i++) {
const date = new Date(earliestDate);
date.setDate(date.getDate() + i);
const dateKey = date.toISOString().split('T')[0];
const dayOfWeek = date.toLocaleDateString('en-US', { weekday: 'long' });
dayMap.set(dateKey, {
date: dateKey,
dayOfWeek,
gitCommits: [],
extensionSessions: [],
cursorMessages: [],
summary: {
totalGitCommits: 0,
totalExtensionSessions: 0,
totalCursorMessages: 0,
linesAdded: 0,
linesRemoved: 0,
uniqueFilesModified: 0
}
});
}
// Add Git commits to days
for (const commit of gitCommits) {
const dateKey = commit.date.split(' ')[0];
const day = dayMap.get(dateKey);
if (day) {
day.gitCommits.push({
hash: commit.hash,
time: commit.date,
author: commit.author,
message: commit.message,
filesChanged: commit.filesChanged,
insertions: commit.insertions,
deletions: commit.deletions
});
day.summary.totalGitCommits++;
day.summary.linesAdded += commit.insertions;
day.summary.linesRemoved += commit.deletions;
}
}
// Add Extension sessions to days
for (const session of extensionSessions) {
const dateKey = new Date(session.startTime).toISOString().split('T')[0];
const day = dayMap.get(dateKey);
if (day) {
const startTime = new Date(session.startTime);
const endTime = new Date(session.endTime);
const duration = Math.round((endTime.getTime() - startTime.getTime()) / (1000 * 60));
day.extensionSessions.push({
startTime: session.startTime.toISOString(),
endTime: session.endTime.toISOString(),
duration,
filesModified: session.filesModified,
conversationSummary: session.conversationSummary
});
day.summary.totalExtensionSessions++;
// Track unique files
const uniqueFiles = new Set([...session.filesModified]);
day.summary.uniqueFilesModified += uniqueFiles.size;
}
}
// Add Cursor messages to days
for (const message of cursorMessages) {
const dateKey = new Date(message.createdAt).toISOString().split('T')[0];
const day = dayMap.get(dateKey);
if (day) {
day.cursorMessages.push({
time: message.createdAt,
type: message.type,
conversationName: message.conversationName,
preview: message.text.substring(0, 100)
});
day.summary.totalCursorMessages++;
}
}
// Convert to array and sort by date
const days = Array.from(dayMap.values()).sort((a, b) =>
new Date(a.date).getTime() - new Date(b.date).getTime()
);
const timeline: UnifiedTimeline = {
projectId,
dateRange: {
earliest: earliestDate.toISOString(),
latest: latestDate.toISOString(),
totalDays
},
days,
dataSources: {
git: {
available: gitCommits.length > 0,
firstDate: gitFirstDate,
lastDate: gitLastDate,
totalRecords: gitCommits.length
},
extension: {
available: extensionSessions.length > 0,
firstDate: extensionFirstDate,
lastDate: extensionLastDate,
totalRecords: extensionSessions.length
},
cursor: {
available: cursorMessages.length > 0,
firstDate: cursorFirstDate,
lastDate: cursorLastDate,
totalRecords: cursorMessages.length
}
}
};
return NextResponse.json(timeline);
} catch (error) {
console.error('Error generating unified timeline:', error);
return NextResponse.json(
{
error: 'Failed to generate unified timeline',
details: error instanceof Error ? error.message : String(error)
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,68 @@
import { NextRequest, NextResponse } from 'next/server';
import { getAdminDb } from '@/lib/firebase/admin';
/**
* Save vision answers to Firestore
*/
export async function POST(
request: NextRequest,
{ params }: { params: Promise<{ projectId: string }> }
) {
try {
const { projectId } = await params;
const body = await request.json();
const { visionAnswers } = body;
if (!visionAnswers || !visionAnswers.q1 || !visionAnswers.q2 || !visionAnswers.q3) {
return NextResponse.json(
{ error: 'All 3 vision answers are required' },
{ status: 400 }
);
}
const adminDb = getAdminDb();
// Save vision answers and mark ready for MVP
await adminDb.collection('projects').doc(projectId).set(
{
visionAnswers: {
q1: visionAnswers.q1,
q2: visionAnswers.q2,
q3: visionAnswers.q3,
allAnswered: true,
updatedAt: visionAnswers.updatedAt || new Date().toISOString(),
},
readyForMVP: true,
currentPhase: 'mvp',
phaseStatus: 'ready',
},
{ merge: true }
);
console.log(`[Vision API] Saved vision answers for project ${projectId}`);
// Trigger MVP generation (async - don't wait)
console.log(`[Vision API] Triggering MVP generation for project ${projectId}...`);
fetch(new URL(`/api/projects/${projectId}/mvp-checklist`, request.url).toString(), {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
}).catch((error) => {
console.error(`[Vision API] Failed to trigger MVP generation:`, error);
});
return NextResponse.json({
success: true,
message: 'Vision answers saved and MVP generation triggered',
});
} catch (error) {
console.error('[Vision API] Error saving vision answers:', error);
return NextResponse.json(
{
error: 'Failed to save vision answers',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,172 @@
import { NextRequest, NextResponse } from 'next/server';
import admin from '@/lib/firebase/admin';
/**
* Post a new message/comment on a work item
*/
export async function POST(
request: NextRequest,
{ params }: { params: Promise<{ projectId: string; itemId: string }> }
) {
try {
const { projectId, itemId } = await params;
const { message, author, authorId, type } = await request.json();
if (!message || !author) {
return NextResponse.json(
{ error: 'Message and author are required' },
{ status: 400 }
);
}
const db = admin.firestore();
// Create new message
const messageRef = db
.collection('projects')
.doc(projectId)
.collection('workItems')
.doc(itemId)
.collection('messages')
.doc();
await messageRef.set({
message,
author,
authorId: authorId || 'anonymous',
type: type || 'comment', // comment, feedback, question, etc.
createdAt: admin.firestore.FieldValue.serverTimestamp(),
reactions: [],
});
// Update message count on work item metadata
await db
.collection('projects')
.doc(projectId)
.collection('workItemStates')
.doc(itemId)
.set(
{
messageCount: admin.firestore.FieldValue.increment(1),
lastMessageAt: admin.firestore.FieldValue.serverTimestamp(),
},
{ merge: true }
);
return NextResponse.json({
success: true,
messageId: messageRef.id,
});
} catch (error) {
console.error('Error posting message:', error);
return NextResponse.json(
{
error: 'Failed to post message',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 }
);
}
}
/**
* Get messages/comments for a work item
*/
export async function GET(
request: NextRequest,
{ params }: { params: Promise<{ projectId: string; itemId: string }> }
) {
try {
const { projectId, itemId } = await params;
const db = admin.firestore();
const messagesSnapshot = await db
.collection('projects')
.doc(projectId)
.collection('workItems')
.doc(itemId)
.collection('messages')
.orderBy('createdAt', 'desc')
.get();
const messages = messagesSnapshot.docs.map(doc => ({
id: doc.id,
...doc.data(),
createdAt: doc.data().createdAt?.toDate().toISOString(),
}));
return NextResponse.json({
messages,
count: messages.length,
});
} catch (error) {
console.error('Error fetching messages:', error);
return NextResponse.json(
{
error: 'Failed to fetch messages',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 }
);
}
}
/**
* Delete a message
*/
export async function DELETE(
request: NextRequest,
{ params }: { params: Promise<{ projectId: string; itemId: string }> }
) {
try {
const { projectId, itemId } = await params;
const { searchParams } = new URL(request.url);
const messageId = searchParams.get('messageId');
if (!messageId) {
return NextResponse.json(
{ error: 'Message ID is required' },
{ status: 400 }
);
}
const db = admin.firestore();
// Delete message
await db
.collection('projects')
.doc(projectId)
.collection('workItems')
.doc(itemId)
.collection('messages')
.doc(messageId)
.delete();
// Update message count
await db
.collection('projects')
.doc(projectId)
.collection('workItemStates')
.doc(itemId)
.set(
{
messageCount: admin.firestore.FieldValue.increment(-1),
},
{ merge: true }
);
return NextResponse.json({
success: true,
});
} catch (error) {
console.error('Error deleting message:', error);
return NextResponse.json(
{
error: 'Failed to delete message',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,94 @@
import { NextRequest, NextResponse } from 'next/server';
import admin from '@/lib/firebase/admin';
/**
* Update work item state (draft/final)
*/
export async function PATCH(
request: NextRequest,
{ params }: { params: Promise<{ projectId: string; itemId: string }> }
) {
try {
const { projectId, itemId } = await params;
const { state } = await request.json();
if (!state || !['draft', 'final'].includes(state)) {
return NextResponse.json(
{ error: 'Invalid state. Must be "draft" or "final"' },
{ status: 400 }
);
}
const db = admin.firestore();
// Update state in work item
// For now, store in a separate collection since work items are generated from MVP checklist
await db
.collection('projects')
.doc(projectId)
.collection('workItemStates')
.doc(itemId)
.set(
{
state,
updatedAt: admin.firestore.FieldValue.serverTimestamp(),
},
{ merge: true }
);
return NextResponse.json({
success: true,
state,
});
} catch (error) {
console.error('Error updating work item state:', error);
return NextResponse.json(
{
error: 'Failed to update state',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 }
);
}
}
/**
* Get work item state
*/
export async function GET(
request: NextRequest,
{ params }: { params: Promise<{ projectId: string; itemId: string }> }
) {
try {
const { projectId, itemId } = await params;
const db = admin.firestore();
const stateDoc = await db
.collection('projects')
.doc(projectId)
.collection('workItemStates')
.doc(itemId)
.get();
if (!stateDoc.exists) {
return NextResponse.json({
state: 'draft', // Default state
});
}
return NextResponse.json({
state: stateDoc.data()?.state || 'draft',
updatedAt: stateDoc.data()?.updatedAt,
});
} catch (error) {
console.error('Error fetching work item state:', error);
return NextResponse.json(
{
error: 'Failed to fetch state',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,106 @@
import { NextRequest, NextResponse } from 'next/server';
import admin from '@/lib/firebase/admin';
/**
* Create a new version of a work item
*/
export async function POST(
request: NextRequest,
{ params }: { params: Promise<{ projectId: string; itemId: string }> }
) {
try {
const { projectId, itemId } = await params;
const { description, changes, createdBy } = await request.json();
const db = admin.firestore();
// Get current version count
const versionsSnapshot = await db
.collection('projects')
.doc(projectId)
.collection('workItems')
.doc(itemId)
.collection('versions')
.orderBy('versionNumber', 'desc')
.limit(1)
.get();
const currentVersion = versionsSnapshot.empty ? 0 : versionsSnapshot.docs[0].data().versionNumber;
const newVersionNumber = currentVersion + 1;
// Create new version
const versionRef = db
.collection('projects')
.doc(projectId)
.collection('workItems')
.doc(itemId)
.collection('versions')
.doc();
await versionRef.set({
versionNumber: newVersionNumber,
description: description || `Version ${newVersionNumber}`,
changes: changes || {},
createdBy: createdBy || 'system',
createdAt: admin.firestore.FieldValue.serverTimestamp(),
});
return NextResponse.json({
success: true,
versionId: versionRef.id,
versionNumber: newVersionNumber,
});
} catch (error) {
console.error('Error creating version:', error);
return NextResponse.json(
{
error: 'Failed to create version',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 }
);
}
}
/**
* Get version history for a work item
*/
export async function GET(
request: NextRequest,
{ params }: { params: Promise<{ projectId: string; itemId: string }> }
) {
try {
const { projectId, itemId } = await params;
const db = admin.firestore();
const versionsSnapshot = await db
.collection('projects')
.doc(projectId)
.collection('workItems')
.doc(itemId)
.collection('versions')
.orderBy('versionNumber', 'desc')
.get();
const versions = versionsSnapshot.docs.map(doc => ({
id: doc.id,
...doc.data(),
createdAt: doc.data().createdAt?.toDate().toISOString(),
}));
return NextResponse.json({
versions,
count: versions.length,
});
} catch (error) {
console.error('Error fetching versions:', error);
return NextResponse.json(
{
error: 'Failed to fetch versions',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,139 @@
import { NextResponse } from 'next/server';
import { getAdminAuth, getAdminDb } from '@/lib/firebase/admin';
import { FieldValue } from 'firebase-admin/firestore';
import type { ProjectPhaseData, ProjectPhaseScores } from '@/lib/types/project-artifacts';
export async function POST(request: Request) {
try {
const authHeader = request.headers.get('Authorization');
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const idToken = authHeader.split('Bearer ')[1];
const adminAuth = getAdminAuth();
const adminDb = getAdminDb();
let userId: string;
try {
const decodedToken = await adminAuth.verifyIdToken(idToken);
userId = decodedToken.uid;
} catch (error) {
return NextResponse.json({ error: 'Invalid token' }, { status: 401 });
}
const body = await request.json();
const {
projectName,
projectType,
slug,
vision,
product,
workspacePath, // Optional: if coming from association prompt
chatgptUrl, // Optional: if from ChatGPT
githubRepo, // Optional: if from GitHub
githubRepoId,
githubRepoUrl,
githubDefaultBranch,
} = body;
// Check if slug is available
const existingProject = await adminDb
.collection('projects')
.where('slug', '==', slug)
.limit(1)
.get();
if (!existingProject.empty) {
return NextResponse.json(
{ error: 'Project slug already exists' },
{ status: 400 }
);
}
// Get user data
const userDoc = await adminDb.collection('users').doc(userId).get();
const userData = userDoc.data();
const workspace = userData?.workspace || 'my-workspace';
// Create project
const projectRef = adminDb.collection('projects').doc();
await projectRef.set({
id: projectRef.id,
name: projectName,
slug,
userId,
workspace,
projectType,
productName: product.name,
productVision: vision || '',
isForClient: product.isForClient || false,
hasLogo: product.hasLogo || false,
hasDomain: product.hasDomain || false,
hasWebsite: product.hasWebsite || false,
hasGithub: !!githubRepo,
hasChatGPT: !!chatgptUrl,
workspacePath: workspacePath || null,
workspaceName: workspacePath ? workspacePath.split('/').pop() : null,
// GitHub data
githubRepo: githubRepo || null,
githubRepoId: githubRepoId || null,
githubRepoUrl: githubRepoUrl || null,
githubDefaultBranch: githubDefaultBranch || null,
// ChatGPT data
chatgptUrl: chatgptUrl || null,
// Extension tracking
extensionLinked: false,
status: 'active',
// Pipeline tracking
currentPhase: 'collector',
phaseStatus: 'not_started',
phaseData: {} as ProjectPhaseData,
phaseScores: {} as ProjectPhaseScores,
createdAt: FieldValue.serverTimestamp(),
updatedAt: FieldValue.serverTimestamp(),
});
console.log(`[API] Created project ${projectRef.id} (${slug})`);
// If workspacePath provided, associate existing sessions
if (workspacePath) {
const sessionsSnapshot = await adminDb
.collection('sessions')
.where('userId', '==', userId)
.where('workspacePath', '==', workspacePath)
.where('needsProjectAssociation', '==', true)
.get();
if (!sessionsSnapshot.empty) {
const batch = adminDb.batch();
sessionsSnapshot.docs.forEach((doc) => {
batch.update(doc.ref, {
projectId: projectRef.id,
needsProjectAssociation: false,
updatedAt: FieldValue.serverTimestamp(),
});
});
await batch.commit();
console.log(`[API] Associated ${sessionsSnapshot.size} sessions with project`);
}
}
return NextResponse.json({
success: true,
projectId: projectRef.id,
slug,
workspace,
});
} catch (error) {
console.error('Error creating project:', error);
return NextResponse.json(
{
error: 'Failed to create project',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,93 @@
import { NextResponse } from 'next/server';
import { getAdminAuth, getAdminDb } from '@/lib/firebase/admin';
import { FieldValue } from 'firebase-admin/firestore';
/**
* Delete a project (soft delete - keeps sessions intact)
* Sessions will remain in the database but projectId will be set to null
*/
export async function POST(request: Request) {
try {
const authHeader = request.headers.get('Authorization');
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const idToken = authHeader.split('Bearer ')[1];
const adminAuth = getAdminAuth();
const adminDb = getAdminDb();
let userId: string;
try {
const decodedToken = await adminAuth.verifyIdToken(idToken);
userId = decodedToken.uid;
} catch (error) {
return NextResponse.json({ error: 'Invalid token' }, { status: 401 });
}
const { projectId } = await request.json();
if (!projectId) {
return NextResponse.json(
{ error: 'Project ID is required' },
{ status: 400 }
);
}
// Verify project belongs to user
const projectDoc = await adminDb.collection('projects').doc(projectId).get();
if (!projectDoc.exists) {
return NextResponse.json(
{ error: 'Project not found' },
{ status: 404 }
);
}
if (projectDoc.data()?.userId !== userId) {
return NextResponse.json(
{ error: 'Unauthorized to delete this project' },
{ status: 403 }
);
}
// Delete the project document
await adminDb.collection('projects').doc(projectId).delete();
// Optional: Update sessions to remove project reference
// This makes sessions "orphaned" but keeps all the data
const sessionsSnapshot = await adminDb
.collection('sessions')
.where('projectId', '==', projectId)
.get();
if (!sessionsSnapshot.empty) {
const batch = adminDb.batch();
sessionsSnapshot.docs.forEach((doc) => {
batch.update(doc.ref, {
projectId: null,
// Flag these as needing reassignment if user wants to link them later
needsProjectAssociation: true,
updatedAt: FieldValue.serverTimestamp(),
});
});
await batch.commit();
}
return NextResponse.json({
success: true,
message: 'Project deleted successfully',
sessionsPreserved: sessionsSnapshot.size,
});
} catch (error) {
console.error('[Project Delete] Error:', error);
return NextResponse.json(
{
error: 'Failed to delete project',
details: error instanceof Error ? error.message : String(error),
},
{ status: 500 }
);
}
}

View File

@@ -0,0 +1,166 @@
import { NextResponse } from 'next/server';
import { getAdminAuth, getAdminDb } from '@/lib/firebase/admin';
import { FieldValue } from 'firebase-admin/firestore';
import type { ProjectPhase, PhaseStatus } from '@/lib/types/phases';
/**
* GET - Get current phase for a project
* POST - Update phase (start, complete, or add data)
*/
export async function GET(request: Request) {
try {
const authHeader = request.headers.get('Authorization');
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const idToken = authHeader.split('Bearer ')[1];
const adminAuth = getAdminAuth();
const adminDb = getAdminDb();
await adminAuth.verifyIdToken(idToken);
const { searchParams } = new URL(request.url);
const projectId = searchParams.get('projectId');
if (!projectId) {
return NextResponse.json({ error: 'Project ID required' }, { status: 400 });
}
// Get project phase data
const projectDoc = await adminDb.collection('projects').doc(projectId).get();
if (!projectDoc.exists) {
return NextResponse.json({ error: 'Project not found' }, { status: 404 });
}
const projectData = projectDoc.data();
// Return current phase info
return NextResponse.json({
currentPhase: projectData?.currentPhase || 'gathering',
phaseStatus: projectData?.phaseStatus || 'not_started',
phaseData: projectData?.phaseData || {},
phaseHistory: projectData?.phaseHistory || []
});
} catch (error) {
console.error('Error getting project phase:', error);
return NextResponse.json(
{ error: 'Failed to get phase', details: error instanceof Error ? error.message : String(error) },
{ status: 500 }
);
}
}
export async function POST(request: Request) {
try {
const authHeader = request.headers.get('Authorization');
if (!authHeader?.startsWith('Bearer ')) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const idToken = authHeader.split('Bearer ')[1];
const adminAuth = getAdminAuth();
const adminDb = getAdminDb();
const decodedToken = await adminAuth.verifyIdToken(idToken);
const userId = decodedToken.uid;
const body = await request.json();
const { projectId, action, phase, data } = body;
if (!projectId || !action) {
return NextResponse.json(
{ error: 'projectId and action are required' },
{ status: 400 }
);
}
const projectRef = adminDb.collection('projects').doc(projectId);
const projectDoc = await projectRef.get();
if (!projectDoc.exists) {
return NextResponse.json({ error: 'Project not found' }, { status: 404 });
}
const projectData = projectDoc.data();
// Handle different actions
switch (action) {
case 'start': {
// Start a new phase
if (!phase) {
return NextResponse.json({ error: 'phase required for start action' }, { status: 400 });
}
await projectRef.update({
currentPhase: phase,
phaseStatus: 'in_progress',
[`phaseData.${phase}.startedAt`]: FieldValue.serverTimestamp(),
updatedAt: FieldValue.serverTimestamp()
});
console.log(`[Phase] Started ${phase} for project ${projectId}`);
return NextResponse.json({ success: true, phase, status: 'in_progress' });
}
case 'complete': {
// Complete current phase
const currentPhase = projectData?.currentPhase || 'gathering';
await projectRef.update({
phaseStatus: 'completed',
[`phaseData.${currentPhase}.completedAt`]: FieldValue.serverTimestamp(),
phaseHistory: FieldValue.arrayUnion({
phase: currentPhase,
completedAt: FieldValue.serverTimestamp()
}),
updatedAt: FieldValue.serverTimestamp()
});
console.log(`[Phase] Completed ${currentPhase} for project ${projectId}`);
return NextResponse.json({ success: true, phase: currentPhase, status: 'completed' });
}
case 'save_data': {
// Save phase-specific data (insights, vision board, etc.)
const currentPhase = projectData?.currentPhase || 'gathering';
await projectRef.update({
[`phaseData.${currentPhase}.data`]: data,
[`phaseData.${currentPhase}.lastUpdated`]: FieldValue.serverTimestamp(),
updatedAt: FieldValue.serverTimestamp()
});
console.log(`[Phase] Saved data for ${currentPhase} in project ${projectId}`);
return NextResponse.json({ success: true, phase: currentPhase });
}
case 'add_insight': {
// Add a gathering insight
if (!data || !data.insight) {
return NextResponse.json({ error: 'insight data required' }, { status: 400 });
}
await projectRef.update({
'phaseData.gathering.insights': FieldValue.arrayUnion(data),
updatedAt: FieldValue.serverTimestamp()
});
console.log(`[Phase] Added insight to project ${projectId}`);
return NextResponse.json({ success: true });
}
default:
return NextResponse.json({ error: 'Invalid action' }, { status: 400 });
}
} catch (error) {
console.error('Error updating project phase:', error);
return NextResponse.json(
{ error: 'Failed to update phase', details: error instanceof Error ? error.message : String(error) },
{ status: 500 }
);
}
}