When is_init=true, no user message was being added to history before calling the LLM. Gemini requires at least one user turn — without it the API returned "contents are required" and Atlas never sent its opening greeting. Now adds the init message marked internally so it's sent to the LLM but filtered out of returned/stored history. Made-with: Cursor
176 lines
6.0 KiB
TypeScript
176 lines
6.0 KiB
TypeScript
import { createLLM, toOAITools, LLMMessage } from './llm';
|
|
import { ALL_TOOLS, executeTool, ToolContext } from './tools';
|
|
import { resolvePrompt } from './prompts/loader';
|
|
import { prdStore } from './tools/prd';
|
|
|
|
const MAX_TURNS = 10; // Atlas is conversational — low turn count, no deep tool loops
|
|
|
|
// ---------------------------------------------------------------------------
|
|
// Session store
|
|
// ---------------------------------------------------------------------------
|
|
|
|
interface AtlasSession {
|
|
id: string;
|
|
history: LLMMessage[];
|
|
prdContent: string | null;
|
|
createdAt: string;
|
|
lastActiveAt: string;
|
|
}
|
|
|
|
const sessions = new Map<string, AtlasSession>();
|
|
|
|
function getOrCreateSession(sessionId: string): AtlasSession {
|
|
if (!sessions.has(sessionId)) {
|
|
sessions.set(sessionId, {
|
|
id: sessionId,
|
|
history: [],
|
|
prdContent: null,
|
|
createdAt: new Date().toISOString(),
|
|
lastActiveAt: new Date().toISOString()
|
|
});
|
|
}
|
|
const session = sessions.get(sessionId)!;
|
|
session.lastActiveAt = new Date().toISOString();
|
|
return session;
|
|
}
|
|
|
|
export function clearAtlasSession(sessionId: string): void {
|
|
sessions.delete(sessionId);
|
|
}
|
|
|
|
export function listAtlasSessions() {
|
|
return Array.from(sessions.values()).map(s => ({
|
|
id: s.id,
|
|
messages: s.history.length,
|
|
prdReady: s.prdContent !== null,
|
|
createdAt: s.createdAt,
|
|
lastActiveAt: s.lastActiveAt
|
|
}));
|
|
}
|
|
|
|
// ---------------------------------------------------------------------------
|
|
// Atlas chat result
|
|
// ---------------------------------------------------------------------------
|
|
|
|
export interface AtlasChatResult {
|
|
reply: string;
|
|
sessionId: string;
|
|
history: LLMMessage[];
|
|
/** Set when Atlas has called finalize_prd — contains the full PRD markdown */
|
|
prdContent: string | null;
|
|
model: string;
|
|
}
|
|
|
|
// ---------------------------------------------------------------------------
|
|
// Main chat handler
|
|
// ---------------------------------------------------------------------------
|
|
|
|
const ATLAS_TOOLS = ALL_TOOLS.filter(t => ['finalize_prd', 'web_search'].includes(t.name));
|
|
|
|
export async function atlasChat(
|
|
sessionId: string,
|
|
userMessage: string,
|
|
ctx: ToolContext,
|
|
opts?: {
|
|
preloadedHistory?: LLMMessage[];
|
|
/** When true, the user message is an internal init trigger and should not be stored in history */
|
|
isInit?: boolean;
|
|
}
|
|
): Promise<AtlasChatResult> {
|
|
const llm = createLLM(process.env.ATLAS_MODEL ?? 'A', { temperature: 0.5 });
|
|
const session = getOrCreateSession(sessionId);
|
|
|
|
// Seed from DB history if this is a fresh in-memory session
|
|
if (opts?.preloadedHistory && opts.preloadedHistory.length > 0 && session.history.length === 0) {
|
|
session.history = [...opts.preloadedHistory];
|
|
}
|
|
|
|
const oaiTools = toOAITools(ATLAS_TOOLS);
|
|
const systemPrompt = resolvePrompt('atlas');
|
|
|
|
// Always push the user message so Gemini gets a valid conversation (requires at least one user turn).
|
|
// For init triggers, we mark it so we can strip it from the returned history — it's an internal
|
|
// prompt, not a real user message, and shouldn't appear in the conversation UI or DB.
|
|
const INIT_MARKER = '__atlas_init_marker__';
|
|
session.history.push({
|
|
role: 'user',
|
|
content: opts?.isInit ? INIT_MARKER + userMessage : userMessage
|
|
});
|
|
|
|
const buildMessages = (): LLMMessage[] => [
|
|
{ role: 'system', content: systemPrompt },
|
|
...session.history.slice(-60).map(m =>
|
|
// Strip the init marker before sending to the LLM
|
|
m.role === 'user' && typeof m.content === 'string' && m.content.startsWith(INIT_MARKER)
|
|
? { ...m, content: m.content.slice(INIT_MARKER.length) }
|
|
: m
|
|
)
|
|
];
|
|
|
|
let turn = 0;
|
|
let finalReply = '';
|
|
let prdContent: string | null = session.prdContent;
|
|
|
|
while (turn < MAX_TURNS) {
|
|
turn++;
|
|
|
|
const response = await llm.chat(buildMessages(), oaiTools, 4096);
|
|
|
|
const hasContent = response.content !== null && response.content !== '';
|
|
const hasToolCalls = response.tool_calls.length > 0;
|
|
|
|
if (hasContent || hasToolCalls) {
|
|
session.history.push({
|
|
role: 'assistant',
|
|
content: response.content,
|
|
tool_calls: hasToolCalls ? response.tool_calls : undefined
|
|
});
|
|
}
|
|
|
|
if (!hasToolCalls) {
|
|
finalReply = response.content ?? '';
|
|
break;
|
|
}
|
|
|
|
// Execute tool calls (only finalize_prd for Atlas)
|
|
for (const tc of response.tool_calls) {
|
|
let fnArgs: Record<string, unknown> = {};
|
|
try { fnArgs = JSON.parse(tc.function.arguments || '{}'); } catch { /* bad JSON */ }
|
|
|
|
let result: unknown;
|
|
try {
|
|
result = await executeTool(tc.function.name, fnArgs, ctx);
|
|
} catch (err) {
|
|
result = { error: err instanceof Error ? err.message : String(err) };
|
|
}
|
|
|
|
// Check if PRD was just saved
|
|
const stored = prdStore.get(ctx.workspaceRoot);
|
|
if (stored && !prdContent) {
|
|
prdContent = stored;
|
|
session.prdContent = stored;
|
|
prdStore.delete(ctx.workspaceRoot); // consume it
|
|
}
|
|
|
|
session.history.push({
|
|
role: 'tool',
|
|
tool_call_id: tc.id,
|
|
name: tc.function.name,
|
|
content: typeof result === 'string' ? result : JSON.stringify(result)
|
|
});
|
|
}
|
|
}
|
|
|
|
return {
|
|
reply: finalReply,
|
|
sessionId,
|
|
history: session.history
|
|
// Drop the internal init user turn — it's not a real user message
|
|
.filter(m => !(m.role === 'user' && typeof m.content === 'string' && m.content.startsWith(INIT_MARKER)))
|
|
.filter(m => m.role !== 'assistant' || m.content || m.tool_calls?.length)
|
|
.slice(-60),
|
|
prdContent,
|
|
model: llm.modelId
|
|
};
|
|
}
|