Files
vibn-frontend/app/api/projects/[projectId]/analyze-repo/route.ts
Mark Henderson ab100f2e76 feat: implement 4 project type flows with unique AI experiences
- New multi-step CreateProjectFlow replaces 2-step modal with TypeSelector
  and 4 setup components (Fresh Idea, Chat Import, Code Import, Migrate)
- overview/page.tsx routes to unique main component per creationMode
- FreshIdeaMain: wraps AtlasChat with post-discovery decision banner
  (Generate PRD vs Plan MVP Test)
- ChatImportMain: 3-stage flow (intake → extracting → review) with
  editable insight buckets (decisions, ideas, questions, architecture, users)
- CodeImportMain: 4-stage flow (input → cloning → mapping → surfaces)
  with architecture map and surface selection
- MigrateMain: 5-stage flow with audit, review, planning, and migration
  plan doc with checkbox-tracked tasks and non-destructive warning banner
- New API routes: analyze-chats, analyze-repo, analysis-status,
  generate-migration-plan (all using Gemini)
- ProjectShell: accepts creationMode prop, filters/renames tabs per type
  (code-import hides PRD, migration hides PRD/Grow/Insights, renames Atlas tab)
- Right panel adapts content based on creationMode

Made-with: Cursor
2026-03-06 12:48:28 -08:00

217 lines
7.8 KiB
TypeScript

import { NextResponse } from 'next/server';
import { getServerSession } from 'next-auth';
import { authOptions } from '@/lib/auth/authOptions';
import { query } from '@/lib/db-postgres';
import { execSync } from 'child_process';
import { existsSync, readdirSync, readFileSync, statSync, rmSync } from 'fs';
import { join } from 'path';
export const maxDuration = 120;
const GEMINI_API_KEY = process.env.GOOGLE_API_KEY || '';
const GEMINI_MODEL = process.env.GEMINI_MODEL || 'gemini-2.0-flash-exp';
const GEMINI_BASE_URL = 'https://generativelanguage.googleapis.com/v1beta/models';
async function callGemini(prompt: string): Promise<string> {
const res = await fetch(`${GEMINI_BASE_URL}/${GEMINI_MODEL}:generateContent?key=${GEMINI_API_KEY}`, {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
contents: [{ parts: [{ text: prompt }] }],
generationConfig: { temperature: 0.2, maxOutputTokens: 6000 },
}),
});
const data = await res.json();
return data?.candidates?.[0]?.content?.parts?.[0]?.text ?? '';
}
function parseJsonBlock(raw: string): unknown {
const trimmed = raw.trim();
const cleaned = trimmed.startsWith('```')
? trimmed.replace(/^```(?:json)?/i, '').replace(/```$/, '').trim()
: trimmed;
return JSON.parse(cleaned);
}
// Read a file safely, returning empty string on failure
function safeRead(path: string, maxBytes = 8000): string {
try {
if (!existsSync(path)) return '';
const content = readFileSync(path, 'utf8');
return content.slice(0, maxBytes);
} catch {
return '';
}
}
// Walk directory and collect file listing (relative paths), limited to avoid huge outputs
function walkDir(dir: string, depth = 0, maxDepth = 4, acc: string[] = []): string[] {
if (depth > maxDepth) return acc;
try {
const entries = readdirSync(dir, { withFileTypes: true });
for (const e of entries) {
if (e.name.startsWith('.') || e.name === 'node_modules' || e.name === '__pycache__' || e.name === '.git') continue;
const full = join(dir, e.name);
const rel = full.replace(dir + '/', '');
if (e.isDirectory()) {
acc.push(rel + '/');
walkDir(full, depth + 1, maxDepth, acc);
} else {
acc.push(rel);
}
}
} catch { /* skip */ }
return acc;
}
async function updateStage(projectId: string, currentData: Record<string, unknown>, stage: string) {
const updated = { ...currentData, analysisStage: stage, updatedAt: new Date().toISOString() };
await query(
`UPDATE fs_projects SET data = $2::jsonb WHERE id = $1::text`,
[projectId, JSON.stringify(updated)]
);
return updated;
}
export async function POST(
req: Request,
{ params }: { params: Promise<{ projectId: string }> }
) {
try {
const { projectId } = await params;
const session = await getServerSession(authOptions);
if (!session?.user?.email) {
return NextResponse.json({ error: 'Unauthorized' }, { status: 401 });
}
const body = await req.json() as { repoUrl?: string };
const repoUrl = body.repoUrl?.trim() || '';
if (!repoUrl.startsWith('http')) {
return NextResponse.json({ error: 'Invalid repository URL' }, { status: 400 });
}
// Verify ownership
const rows = await query<{ data: Record<string, unknown> }>(
`SELECT p.data FROM fs_projects p
JOIN fs_users u ON u.id = p.user_id
WHERE p.id = $1::text AND u.data->>'email' = $2::text LIMIT 1`,
[projectId, session.user.email]
);
if (rows.length === 0) {
return NextResponse.json({ error: 'Project not found' }, { status: 404 });
}
let currentData = rows[0].data ?? {};
currentData = await updateStage(projectId, currentData, 'cloning');
// Clone repo into temp dir (fire and forget — status is polled separately)
const tmpDir = `/tmp/vibn-${projectId}`;
// Run async so the request returns quickly and client can poll
setImmediate(async () => {
try {
// Clean up any existing clone
if (existsSync(tmpDir)) {
rmSync(tmpDir, { recursive: true, force: true });
}
execSync(`git clone --depth=1 "${repoUrl}" "${tmpDir}"`, {
timeout: 60_000,
stdio: 'ignore',
});
let data = { ...currentData };
data = await updateStage(projectId, data, 'reading');
// Read key files
const manifest: Record<string, string> = {};
const keyFiles = [
'package.json', 'package-lock.json', 'yarn.lock', 'pnpm-lock.yaml',
'requirements.txt', 'Pipfile', 'pyproject.toml',
'Dockerfile', 'docker-compose.yml', 'docker-compose.yaml',
'README.md', '.env.example', '.env.sample',
'next.config.js', 'next.config.ts', 'next.config.mjs',
'vite.config.ts', 'vite.config.js',
'tsconfig.json',
'prisma/schema.prisma', 'schema.prisma',
];
for (const f of keyFiles) {
const content = safeRead(join(tmpDir, f));
if (content) manifest[f] = content;
}
const fileListing = walkDir(tmpDir).slice(0, 300).join('\n');
data = await updateStage(projectId, data, 'analyzing');
const analysisPrompt = `You are a senior full-stack architect. Analyse this repository and return a structured architecture map.
File listing (top-level):
${fileListing}
Key file contents:
${Object.entries(manifest).map(([k, v]) => `\n### ${k}\n${v}`).join('')}
Return ONLY valid JSON with this structure:
{
"summary": "1-2 sentence project summary",
"rows": [
{ "category": "Tech Stack", "item": "Next.js 15", "status": "found", "detail": "next.config.ts present" },
{ "category": "Database", "item": "PostgreSQL", "status": "found", "detail": "prisma/schema.prisma detected" },
{ "category": "Auth", "item": "Authentication", "status": "missing", "detail": "No auth library detected" }
],
"suggestedSurfaces": ["marketing", "admin"]
}
Categories to cover: Tech Stack, Infrastructure, Database, API Surface, Frontend, Auth, Third-party, Missing / Gaps
Status values: "found", "partial", "missing"
suggestedSurfaces should only include items from: ["marketing", "web-app", "admin", "api"]
Suggest surfaces that are MISSING or incomplete in the current codebase.
Return only the JSON:`;
const raw = await callGemini(analysisPrompt);
let analysisResult;
try {
analysisResult = parseJsonBlock(raw);
} catch {
analysisResult = {
summary: 'Could not fully parse the repository structure.',
rows: [{ category: 'Tech Stack', item: 'Repository detected', status: 'found', detail: fileListing.split('\n').slice(0, 5).join(', ') }],
suggestedSurfaces: ['marketing'],
};
}
// Save result and mark done
const finalData = {
...data,
analysisStage: 'done',
analysisResult,
creationStage: 'mapping',
sourceData: { ...(data.sourceData as object || {}), repoUrl },
updatedAt: new Date().toISOString(),
};
await query(
`UPDATE fs_projects SET data = $2::jsonb WHERE id = $1::text`,
[projectId, JSON.stringify(finalData)]
);
} catch (err) {
console.error('[analyze-repo] background error', err);
await query(
`UPDATE fs_projects SET data = $2::jsonb WHERE id = $1::text`,
[projectId, JSON.stringify({ ...currentData, analysisStage: 'error', analysisError: String(err) })]
);
} finally {
// Clean up
try { if (existsSync(tmpDir)) rmSync(tmpDir, { recursive: true, force: true }); } catch { /* ok */ }
}
});
return NextResponse.json({ started: true });
} catch (err) {
console.error('[analyze-repo]', err);
return NextResponse.json({ error: 'Internal error' }, { status: 500 });
}
}