VIBN Frontend for Coolify deployment
This commit is contained in:
42
lib/ai/chat-extractor.ts
Normal file
42
lib/ai/chat-extractor.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
import type { LlmClient } from '@/lib/ai/llm-client';
|
||||
import { ChatExtractionSchema } from '@/lib/ai/chat-extraction-types';
|
||||
import type { ChatExtractionData } from '@/lib/ai/chat-extraction-types';
|
||||
import type { KnowledgeItem } from '@/lib/types/knowledge';
|
||||
|
||||
const SYSTEM_PROMPT = `
|
||||
You are the Product Chat Signal Extractor for stalled SaaS projects.
|
||||
- Read the provided transcript carefully.
|
||||
- Extract grounded signals about the product, market, users, execution status, and unknowns.
|
||||
- Never invent data. Use "null" or empty arrays when the transcript lacks information.
|
||||
- Respond with valid JSON that matches the provided schema exactly. Do not include prose or code fences.
|
||||
`.trim();
|
||||
|
||||
export async function runChatExtraction(
|
||||
knowledgeItem: KnowledgeItem,
|
||||
llm: LlmClient,
|
||||
): Promise<ChatExtractionData> {
|
||||
const transcript = knowledgeItem.content.trim();
|
||||
|
||||
const userMessage = `
|
||||
You will analyze the following transcript. Use message references when listing evidence (e.g., msg_1).
|
||||
Focus on actionable product-building insights.
|
||||
|
||||
TRANSCRIPT_START
|
||||
${transcript}
|
||||
TRANSCRIPT_END`.trim();
|
||||
|
||||
return llm.structuredCall<ChatExtractionData>({
|
||||
model: 'gemini',
|
||||
systemPrompt: SYSTEM_PROMPT,
|
||||
messages: [
|
||||
{
|
||||
role: 'user',
|
||||
content: userMessage,
|
||||
},
|
||||
],
|
||||
schema: ChatExtractionSchema,
|
||||
temperature: 0.2,
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user