import type { LlmClient } from '@/lib/ai/llm-client'; import { ChatExtractionSchema } from '@/lib/ai/chat-extraction-types'; import type { ChatExtractionData } from '@/lib/ai/chat-extraction-types'; import type { KnowledgeItem } from '@/lib/types/knowledge'; const SYSTEM_PROMPT = ` You are the Product Chat Signal Extractor for stalled SaaS projects. - Read the provided transcript carefully. - Extract grounded signals about the product, market, users, execution status, and unknowns. - Never invent data. Use "null" or empty arrays when the transcript lacks information. - Respond with valid JSON that matches the provided schema exactly. Do not include prose or code fences. `.trim(); export async function runChatExtraction( knowledgeItem: KnowledgeItem, llm: LlmClient, ): Promise { const transcript = knowledgeItem.content.trim(); const userMessage = ` You will analyze the following transcript. Use message references when listing evidence (e.g., msg_1). Focus on actionable product-building insights. TRANSCRIPT_START ${transcript} TRANSCRIPT_END`.trim(); return llm.structuredCall({ model: 'gemini', systemPrompt: SYSTEM_PROMPT, messages: [ { role: 'user', content: userMessage, }, ], schema: ChatExtractionSchema, temperature: 0.2, }); }