- Dockerfile now runs tsc during build so committed dist/ is never stale - ChatResult interface was missing history[] and memoryUpdates[] fields - Re-add missing MemoryUpdate import in orchestrator.ts - Rebuild dist/ with all new fields included Made-with: Cursor
68 lines
1.8 KiB
TypeScript
68 lines
1.8 KiB
TypeScript
export interface LLMMessage {
|
|
role: 'system' | 'user' | 'assistant' | 'tool';
|
|
content: string | null;
|
|
tool_calls?: LLMToolCall[];
|
|
tool_call_id?: string;
|
|
name?: string;
|
|
}
|
|
export interface LLMToolCall {
|
|
id: string;
|
|
type: 'function';
|
|
function: {
|
|
name: string;
|
|
arguments: string;
|
|
};
|
|
}
|
|
export interface LLMTool {
|
|
type: 'function';
|
|
function: {
|
|
name: string;
|
|
description: string;
|
|
parameters: Record<string, unknown>;
|
|
};
|
|
}
|
|
export interface LLMResponse {
|
|
content: string | null;
|
|
reasoning: string | null;
|
|
tool_calls: LLMToolCall[];
|
|
finish_reason: string;
|
|
usage?: {
|
|
prompt_tokens: number;
|
|
completion_tokens: number;
|
|
total_tokens: number;
|
|
};
|
|
}
|
|
export interface LLMClient {
|
|
modelId: string;
|
|
chat(messages: LLMMessage[], tools?: LLMTool[], maxTokens?: number): Promise<LLMResponse>;
|
|
}
|
|
export declare class VertexOpenAIClient implements LLMClient {
|
|
modelId: string;
|
|
private projectId;
|
|
private region;
|
|
private temperature;
|
|
constructor(modelId: string, opts?: {
|
|
projectId?: string;
|
|
region?: string;
|
|
temperature?: number;
|
|
});
|
|
chat(messages: LLMMessage[], tools?: LLMTool[], maxTokens?: number): Promise<LLMResponse>;
|
|
}
|
|
export declare class GeminiClient implements LLMClient {
|
|
modelId: string;
|
|
private temperature;
|
|
constructor(modelId?: string, opts?: {
|
|
temperature?: number;
|
|
});
|
|
chat(messages: LLMMessage[], tools?: LLMTool[], maxTokens?: number): Promise<LLMResponse>;
|
|
}
|
|
export type ModelTier = 'A' | 'B' | 'C';
|
|
export declare function createLLM(modelOrTier: string | ModelTier, opts?: {
|
|
temperature?: number;
|
|
}): LLMClient;
|
|
export declare function toOAITools(tools: Array<{
|
|
name: string;
|
|
description: string;
|
|
parameters: Record<string, unknown>;
|
|
}>): LLMTool[];
|