fix: compile dist from source in Docker, fix ChatResult interface

- Dockerfile now runs tsc during build so committed dist/ is never stale
- ChatResult interface was missing history[] and memoryUpdates[] fields
- Re-add missing MemoryUpdate import in orchestrator.ts
- Rebuild dist/ with all new fields included

Made-with: Cursor
This commit is contained in:
2026-02-27 19:27:42 -08:00
parent 837b6e8b8d
commit d9368e4abd
14 changed files with 675 additions and 259 deletions

67
dist/llm.d.ts vendored Normal file
View File

@@ -0,0 +1,67 @@
export interface LLMMessage {
role: 'system' | 'user' | 'assistant' | 'tool';
content: string | null;
tool_calls?: LLMToolCall[];
tool_call_id?: string;
name?: string;
}
export interface LLMToolCall {
id: string;
type: 'function';
function: {
name: string;
arguments: string;
};
}
export interface LLMTool {
type: 'function';
function: {
name: string;
description: string;
parameters: Record<string, unknown>;
};
}
export interface LLMResponse {
content: string | null;
reasoning: string | null;
tool_calls: LLMToolCall[];
finish_reason: string;
usage?: {
prompt_tokens: number;
completion_tokens: number;
total_tokens: number;
};
}
export interface LLMClient {
modelId: string;
chat(messages: LLMMessage[], tools?: LLMTool[], maxTokens?: number): Promise<LLMResponse>;
}
export declare class VertexOpenAIClient implements LLMClient {
modelId: string;
private projectId;
private region;
private temperature;
constructor(modelId: string, opts?: {
projectId?: string;
region?: string;
temperature?: number;
});
chat(messages: LLMMessage[], tools?: LLMTool[], maxTokens?: number): Promise<LLMResponse>;
}
export declare class GeminiClient implements LLMClient {
modelId: string;
private temperature;
constructor(modelId?: string, opts?: {
temperature?: number;
});
chat(messages: LLMMessage[], tools?: LLMTool[], maxTokens?: number): Promise<LLMResponse>;
}
export type ModelTier = 'A' | 'B' | 'C';
export declare function createLLM(modelOrTier: string | ModelTier, opts?: {
temperature?: number;
}): LLMClient;
export declare function toOAITools(tools: Array<{
name: string;
description: string;
parameters: Record<string, unknown>;
}>): LLMTool[];