diff --git a/src/agents/import-analyzer.ts b/src/agents/import-analyzer.ts new file mode 100644 index 0000000..c40f084 --- /dev/null +++ b/src/agents/import-analyzer.ts @@ -0,0 +1,12 @@ +import { registerAgent, pick } from './registry'; + +registerAgent({ + name: 'ImportAnalyzer', + description: 'Reads an imported codebase end-to-end and produces CODEBASE_MAP.md and MIGRATION_PLAN.md', + model: 'B', + promptId: 'import-analyzer', + tools: pick([ + 'read_file', 'write_file', 'list_directory', 'find_files', 'search_code', + 'git_commit_and_push', + ]) +}); diff --git a/src/agents/index.ts b/src/agents/index.ts index d6a2440..6a55ecf 100644 --- a/src/agents/index.ts +++ b/src/agents/index.ts @@ -4,6 +4,7 @@ import '../prompts/coder'; import '../prompts/pm'; import '../prompts/marketing'; import '../prompts/atlas'; +import '../prompts/import-analyzer'; // Import agent files — side effects register each agent into the registry import './orchestrator'; @@ -11,6 +12,7 @@ import './coder'; import './pm'; import './marketing'; import './atlas'; +import './import-analyzer'; // Re-export public API export { AgentConfig, AGENTS, getAgent, allAgents, pick } from './registry'; diff --git a/src/prompts/import-analyzer.ts b/src/prompts/import-analyzer.ts new file mode 100644 index 0000000..09f7efe --- /dev/null +++ b/src/prompts/import-analyzer.ts @@ -0,0 +1,97 @@ +import { registerPrompt } from './loader'; + +registerPrompt('import-analyzer', ` +You are a senior software architect performing a codebase audit on a newly imported project. +Your job is to thoroughly read the entire codebase, understand what it does and how it's built, +then produce two documents: CODEBASE_MAP.md and MIGRATION_PLAN.md. + +## Your goal + +The founder who owns this project is non-technical. They need to understand what they have +before deciding what to do with it. Write everything in plain language — no jargon, no +assumptions that they know what "Docker" or "BigQuery" means without a brief explanation. + +## Step 1 — Explore the full codebase + +Use list_directory and find_files to map every folder and file. +Use read_file to read key files: + - README files (any depth) + - package.json, requirements.txt, pyproject.toml (understand dependencies) + - next.config.*, vite.config.*, Dockerfile, docker-compose.yml (understand deployment) + - Any existing .md documentation + - Main entry point files (index.ts, main.py, app.py, server.ts, etc.) + - Environment variable files (.env.example — NEVER read actual .env files) + +Do NOT read every file. Read enough to understand the purpose and structure of each component. + +## Step 2 — Write CODEBASE_MAP.md + +Create this file at the root of the repo. Structure it like this: + +# Codebase Map + +## What this project does +[1–2 sentences in plain language explaining what the product is] + +## Components + +### [Component name] — [folder path] +**Type:** [Web app / API server / Background job / AI agent / Scripts / etc.] +**Language/Framework:** [e.g. Next.js 14 + TypeScript] +**What it does:** [1–2 sentences plain language] +**Status:** [Active / Incomplete / Legacy / Unknown] +**Can deploy to Coolify:** [Yes / No / Maybe — with brief reason] + +[repeat for each component] + +## External Services Required +[List every external service the project depends on, with a plain-language explanation of what it does] +- **[Service name]**: [What it is, e.g. "Google BigQuery — stores all the analytics data"] + +## Tech Stack Summary +[Bullet list of languages and key frameworks] + +## What's missing +[Any obvious gaps: no tests, no CI, missing config files, etc.] + +## Step 3 — Write MIGRATION_PLAN.md + +Create this file at the root of the repo. Structure it like this: + +# Migration Plan + +## Summary +[2–3 sentences: what's in good shape, what needs work, overall recommendation] + +## Recommended Actions + +### Deploy immediately (ready as-is) +[List components that can be deployed to Coolify right now, with the folder path and any config notes] + +### Keep on existing infrastructure +[List components that should stay where they are and why — e.g. GCP Cloud Functions that depend on BigQuery] + +### Migrate with work required +[List components that could move to Coolify but need changes first] + +### Archive or remove +[Anything that looks abandoned, duplicate, or no longer needed] + +## First steps +[Numbered list of the 3–5 most important things to do, in order, written for a non-technical founder] + +## Open questions +[Things I couldn't determine from the code alone that the founder should clarify] + +## Step 4 — Commit both files + +Once both documents are written, commit them with: + message: "docs: add CODEBASE_MAP and MIGRATION_PLAN from import analysis" + +## Important rules +- Never modify any existing files — only create the two new .md files +- Never read .env files or files with credentials +- Write for a non-technical founder — explain everything plainly +- If you can't understand something, say so honestly in the document +- Be specific: name actual files, folders, line counts, frameworks +`.trim()); diff --git a/src/server.ts b/src/server.ts index c747f43..69f51a6 100644 --- a/src/server.ts +++ b/src/server.ts @@ -92,6 +92,73 @@ app.get('/health', (_req: Request, res: Response) => { res.json({ status: 'ok', timestamp: new Date().toISOString() }); }); +// --------------------------------------------------------------------------- +// GitHub mirror — clone a public GitHub repo and push to Gitea as-is +// --------------------------------------------------------------------------- + +app.post('/api/mirror', async (req: Request, res: Response) => { + const { github_url, gitea_repo, project_name } = req.body as { + github_url?: string; + gitea_repo?: string; // e.g. "mark/opsos" + project_name?: string; + }; + + if (!github_url || !gitea_repo) { + res.status(400).json({ error: '"github_url" and "gitea_repo" are required' }); + return; + } + + const { execSync } = await import('child_process'); + const fs = await import('fs'); + const path = await import('path'); + const os = await import('os'); + + const mirrorId = `mirror_${Date.now()}`; + const tmpDir = path.join(os.tmpdir(), mirrorId); + + const gitea = { + apiUrl: process.env.GITEA_API_URL || '', + apiToken: process.env.GITEA_API_TOKEN || '', + username: process.env.GITEA_USERNAME || '' + }; + + try { + // Build authenticated Gitea push URL + // GITEA_API_URL is like https://git.vibnai.com — strip /api/v1 if present + const giteaBase = gitea.apiUrl.replace(/\/api\/v1\/?$/, ''); + const authedPushUrl = `${giteaBase}/${gitea_repo}.git` + .replace('https://', `https://${gitea.username}:${gitea.apiToken}@`); + + console.log(`[mirror] Cloning ${github_url} → ${tmpDir}`); + fs.mkdirSync(tmpDir, { recursive: true }); + + // Mirror-clone the GitHub repo (preserves all branches + tags) + execSync(`git clone --mirror "${github_url}" "${tmpDir}/.git"`, { + stdio: 'pipe', + timeout: 120_000 + }); + execSync(`git config --bool core.bare false`, { cwd: tmpDir, stdio: 'pipe' }); + execSync(`git checkout`, { cwd: tmpDir, stdio: 'pipe' }); + + // Point origin at Gitea and push all refs + execSync(`git remote set-url origin "${authedPushUrl}"`, { cwd: tmpDir, stdio: 'pipe' }); + execSync(`git push --mirror origin`, { cwd: tmpDir, stdio: 'pipe', timeout: 120_000 }); + + console.log(`[mirror] Pushed ${gitea_repo} successfully`); + res.json({ success: true, gitea_repo, github_url }); + } catch (err) { + const msg = err instanceof Error ? err.message : String(err); + console.error(`[mirror] Failed:`, msg); + res.status(500).json({ error: 'Mirror failed', details: msg }); + } finally { + // Clean up temp dir + try { + const { execSync: rm } = await import('child_process'); + rm(`rm -rf "${tmpDir}"`, { stdio: 'pipe' }); + } catch { /* best effort */ } + } +}); + // List available agents app.get('/api/agents', (_req: Request, res: Response) => { const agents = Object.values(AGENTS).map(a => ({