wire up /agent/execute and /agent/stop endpoints

- Add runSessionAgent: streaming variant of runAgent that PATCHes VIBN DB
  after every LLM turn and tool call so frontend can poll live output
- Track changed files from write_file / replace_in_file tool calls
- Add /agent/execute: receives sessionId + giteaRepo + task, clones repo,
  scopes workspace to appPath, runs Coder agent async (returns 202 immediately)
- Add /agent/stop: sets stopped flag; agent checks between turns and exits cleanly
- Agent does NOT commit on completion — leaves changes for user review/approval

Made-with: Cursor
This commit is contained in:
2026-03-06 18:01:30 -08:00
parent 335c7a7e97
commit 5aeddace91
13 changed files with 850 additions and 5 deletions

126
dist/server.js vendored
View File

@@ -44,10 +44,12 @@ const crypto = __importStar(require("crypto"));
const child_process_1 = require("child_process");
const job_store_1 = require("./job-store");
const agent_runner_1 = require("./agent-runner");
const agent_session_runner_1 = require("./agent-session-runner");
const agents_1 = require("./agents");
const security_1 = require("./tools/security");
const orchestrator_1 = require("./orchestrator");
const atlas_1 = require("./atlas");
const llm_1 = require("./llm");
const app = (0, express_1.default)();
app.use((0, cors_1.default)());
const startTime = new Date();
@@ -219,7 +221,7 @@ app.delete('/orchestrator/sessions/:id', (req, res) => {
// Atlas — PRD discovery agent
// ---------------------------------------------------------------------------
app.post('/atlas/chat', async (req, res) => {
const { message, session_id, history } = req.body;
const { message, session_id, history, is_init, } = req.body;
if (!message) {
res.status(400).json({ error: '"message" is required' });
return;
@@ -227,7 +229,10 @@ app.post('/atlas/chat', async (req, res) => {
const sessionId = session_id || `atlas_${Date.now()}`;
const ctx = buildContext();
try {
const result = await (0, atlas_1.atlasChat)(sessionId, message, ctx, { preloadedHistory: history });
const result = await (0, atlas_1.atlasChat)(sessionId, message, ctx, {
preloadedHistory: history,
isInit: is_init,
});
res.json(result);
}
catch (err) {
@@ -320,6 +325,123 @@ app.post('/webhook/gitea', (req, res) => {
});
});
// ---------------------------------------------------------------------------
// Agent Execute — VIBN Build > Code > Agent tab
//
// Receives a task from the VIBN frontend, runs the Coder agent against
// the project's Gitea repo, and streams progress back to the VIBN DB
// via PATCH /api/projects/[id]/agent/sessions/[sid].
//
// This endpoint returns immediately (202) and runs the agent async so
// the browser can close without killing the loop.
// ---------------------------------------------------------------------------
// Track active sessions for stop support
const activeSessions = new Map();
app.post('/agent/execute', async (req, res) => {
const { sessionId, projectId, appName, appPath, giteaRepo, task } = req.body;
if (!sessionId || !projectId || !appPath || !task) {
res.status(400).json({ error: 'sessionId, projectId, appPath and task are required' });
return;
}
const vibnApiUrl = process.env.VIBN_API_URL ?? 'https://vibnai.com';
// Register session as active
const sessionState = { stopped: false };
activeSessions.set(sessionId, sessionState);
// Respond immediately — execution is async
res.status(202).json({ sessionId, status: 'running' });
// Build workspace context — clone/update the Gitea repo if provided
let ctx;
try {
ctx = buildContext(giteaRepo);
}
catch (err) {
const msg = err instanceof Error ? err.message : String(err);
console.error('[agent/execute] buildContext failed:', msg);
// Notify VIBN DB of failure
fetch(`${vibnApiUrl}/api/projects/${projectId}/agent/sessions/${sessionId}`, {
method: 'PATCH',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ status: 'failed', error: msg }),
}).catch(() => { });
activeSessions.delete(sessionId);
return;
}
// Scope workspace to the app subdirectory so the agent works there naturally
if (appPath) {
const path = require('path');
ctx.workspaceRoot = path.join(ctx.workspaceRoot, appPath);
const fs = require('fs');
fs.mkdirSync(ctx.workspaceRoot, { recursive: true });
}
const agentConfig = agents_1.AGENTS['Coder'];
if (!agentConfig) {
fetch(`${vibnApiUrl}/api/projects/${projectId}/agent/sessions/${sessionId}`, {
method: 'PATCH',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ status: 'failed', error: 'Coder agent not registered' }),
}).catch(() => { });
activeSessions.delete(sessionId);
return;
}
// Run the streaming agent loop (fire and forget)
(0, agent_session_runner_1.runSessionAgent)(agentConfig, task, ctx, {
sessionId,
projectId,
vibnApiUrl,
appPath,
isStopped: () => sessionState.stopped,
})
.catch(err => {
const msg = err instanceof Error ? err.message : String(err);
console.error(`[agent/execute] session ${sessionId} crashed:`, msg);
fetch(`${vibnApiUrl}/api/projects/${projectId}/agent/sessions/${sessionId}`, {
method: 'PATCH',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ status: 'failed', error: msg }),
}).catch(() => { });
})
.finally(() => {
activeSessions.delete(sessionId);
});
});
app.post('/agent/stop', (req, res) => {
const { sessionId } = req.body;
if (!sessionId) {
res.status(400).json({ error: 'sessionId required' });
return;
}
const session = activeSessions.get(sessionId);
if (session) {
session.stopped = true;
res.json({ ok: true, message: 'Stop signal sent — agent will halt after current step.' });
}
else {
// Session may have already finished
res.json({ ok: true, message: 'Session not active (may have already completed).' });
}
});
// ---------------------------------------------------------------------------
// Generate — thin structured-generation endpoint (no session, no system prompt)
// Use this for one-shot tasks like architecture recommendations.
// ---------------------------------------------------------------------------
app.post('/generate', async (req, res) => {
const { prompt, model } = req.body;
if (!prompt) {
res.status(400).json({ error: '"prompt" is required' });
return;
}
try {
const llm = (0, llm_1.createLLM)(model ?? 'A', { temperature: 0.3 });
const messages = [
{ role: 'user', content: prompt }
];
const response = await llm.chat(messages, [], 8192);
res.json({ reply: response.content ?? '' });
}
catch (err) {
res.status(500).json({ error: err instanceof Error ? err.message : String(err) });
}
});
// ---------------------------------------------------------------------------
// Error handler
// ---------------------------------------------------------------------------
app.use((err, _req, res, _next) => {