export interface LLMMessage { role: 'system' | 'user' | 'assistant' | 'tool'; content: string | null; tool_calls?: LLMToolCall[]; tool_call_id?: string; name?: string; } export interface LLMToolCall { id: string; type: 'function'; function: { name: string; arguments: string; }; } export interface LLMTool { type: 'function'; function: { name: string; description: string; parameters: Record; }; } export interface LLMResponse { content: string | null; reasoning: string | null; tool_calls: LLMToolCall[]; finish_reason: string; usage?: { prompt_tokens: number; completion_tokens: number; total_tokens: number; }; } export interface LLMClient { modelId: string; chat(messages: LLMMessage[], tools?: LLMTool[], maxTokens?: number): Promise; } export declare class VertexOpenAIClient implements LLMClient { modelId: string; private projectId; private region; private temperature; constructor(modelId: string, opts?: { projectId?: string; region?: string; temperature?: number; }); chat(messages: LLMMessage[], tools?: LLMTool[], maxTokens?: number): Promise; } export declare class GeminiClient implements LLMClient { modelId: string; private temperature; constructor(modelId?: string, opts?: { temperature?: number; }); chat(messages: LLMMessage[], tools?: LLMTool[], maxTokens?: number): Promise; } export type ModelTier = 'A' | 'B' | 'C'; export declare function createLLM(modelOrTier: string | ModelTier, opts?: { temperature?: number; }): LLMClient; export declare function toOAITools(tools: Array<{ name: string; description: string; parameters: Record; }>): LLMTool[];