deploy: current vibn theia state
Some checks failed
Playwright Tests / Playwright Tests (ubuntu-22.04, Node.js 22.x) (push) Has been cancelled
3PP License Check / 3PP License Check (11, 22.x, ubuntu-22.04) (push) Has been cancelled
Publish packages to NPM / Perform Publishing (push) Has been cancelled

Made-with: Cursor
This commit is contained in:
2026-02-27 12:01:08 -08:00
commit 8bb5110148
3782 changed files with 640947 additions and 0 deletions

View File

@@ -0,0 +1,10 @@
/** @type {import('eslint').Linter.Config} */
module.exports = {
extends: [
'../../configs/build.eslintrc.json'
],
parserOptions: {
tsconfigRootDir: __dirname,
project: 'tsconfig.json'
}
};

View File

@@ -0,0 +1,32 @@
<div align='center'>
<br />
<img src='https://raw.githubusercontent.com/eclipse-theia/theia/master/logo/theia.svg?sanitize=true' alt='theia-ext-logo' width='100px' />
<h2>ECLIPSE THEIA - VERCEL AI SDK INTEGRATION</h2>
<hr />
</div>
## Description
The `@theia/ai-vercel-ai` extension integrates Vercels's models with Theia AI.
The Vercel AI API key and the models to use can be configured via preferences.
## Additional Information
- [API documentation for `@theia/ai-vercel-ai`](https://eclipse-theia.github.io/theia/docs/next/modules/_theia_ai-vercel-ai.html)
- [Theia - GitHub](https://github.com/eclipse-theia/theia)
- [Theia - Website](https://theia-ide.org/)
## License
- [Eclipse Public License 2.0](http://www.eclipse.org/legal/epl-2.0/)
- [一 (Secondary) GNU General Public License, version 2 with the GNU Classpath Exception](https://projects.eclipse.org/license/secondary-gpl-2.0-cp)
## Trademark
"Theia" is a trademark of the Eclipse Foundation
<https://www.eclipse.org/theia>

View File

@@ -0,0 +1,55 @@
{
"name": "@theia/ai-vercel-ai",
"version": "1.68.0",
"description": "Theia - Vercel AI SDK Integration",
"dependencies": {
"@theia/ai-core": "1.68.0",
"@theia/core": "1.68.0",
"@theia/filesystem": "1.68.0",
"@theia/workspace": "1.68.0",
"ai": "^4.3.13",
"@ai-sdk/provider": "^1.1.3",
"@ai-sdk/openai": "^1.3.21",
"@ai-sdk/anthropic": "^1.2.10",
"tslib": "^2.6.2"
},
"publishConfig": {
"access": "public"
},
"theiaExtensions": [
{
"frontend": "lib/browser/vercel-ai-frontend-module",
"backend": "lib/node/vercel-ai-backend-module"
}
],
"keywords": [
"theia-extension"
],
"license": "EPL-2.0 OR GPL-2.0-only WITH Classpath-exception-2.0",
"repository": {
"type": "git",
"url": "https://github.com/eclipse-theia/theia.git"
},
"bugs": {
"url": "https://github.com/eclipse-theia/theia/issues"
},
"homepage": "https://github.com/eclipse-theia/theia",
"files": [
"lib",
"src"
],
"scripts": {
"build": "theiaext build",
"clean": "theiaext clean",
"compile": "theiaext compile",
"lint": "theiaext lint",
"test": "theiaext test",
"watch": "theiaext watch"
},
"devDependencies": {
"@theia/ext-scripts": "1.68.0"
},
"nyc": {
"extends": "../../configs/nyc.json"
}
}

View File

@@ -0,0 +1,226 @@
// *****************************************************************************
// Copyright (C) 2025 EclipseSource GmbH.
//
// This program and the accompanying materials are made available under the
// terms of the Eclipse Public License v. 2.0 which is available at
// http://www.eclipse.org/legal/epl-2.0.
//
// This Source Code may also be made available under the following Secondary
// Licenses when the conditions for such availability set forth in the Eclipse
// Public License v. 2.0 are satisfied: GNU General Public License, version 2
// with the GNU Classpath Exception which is available at
// https://www.gnu.org/software/classpath/license.html.
//
// SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-only WITH Classpath-exception-2.0
// *****************************************************************************
import { FrontendApplicationContribution } from '@theia/core/lib/browser';
import { inject, injectable } from '@theia/core/shared/inversify';
import { VercelAiLanguageModelsManager, VercelAiModelDescription, VercelAiProvider } from '../common';
import { ANTHROPIC_API_KEY_PREF, CUSTOM_ENDPOINTS_PREF, MODELS_PREF, OPENAI_API_KEY_PREF, VERCEL_AI_PROVIDER_ID } from '../common/vercel-ai-preferences';
import { AICorePreferences, PREFERENCE_NAME_MAX_RETRIES } from '@theia/ai-core/lib/common/ai-core-preferences';
import { PreferenceService, PreferenceChange } from '@theia/core';
interface ModelConfig {
id: string;
model: string;
provider: VercelAiProvider;
}
@injectable()
export class VercelAiFrontendApplicationContribution implements FrontendApplicationContribution {
@inject(PreferenceService)
protected preferenceService: PreferenceService;
@inject(VercelAiLanguageModelsManager)
protected manager: VercelAiLanguageModelsManager;
@inject(AICorePreferences)
protected aiCorePreferences: AICorePreferences;
onStart(): void {
this.preferenceService.ready.then(() => {
// Set up provider-specific API keys
const openaiApiKey = this.preferenceService.get<string>(OPENAI_API_KEY_PREF, undefined);
const anthropicApiKey = this.preferenceService.get<string>(ANTHROPIC_API_KEY_PREF, undefined);
// Set provider configs
if (openaiApiKey) {
this.manager.setProviderConfig('openai', { provider: 'openai', apiKey: openaiApiKey });
}
if (anthropicApiKey) {
this.manager.setProviderConfig('anthropic', { provider: 'anthropic', apiKey: anthropicApiKey });
}
// Initial setup of models
const models = this.preferenceService.get<ModelConfig[]>(MODELS_PREF, []);
this.manager.createOrUpdateLanguageModels(...models.map(model => this.createVercelAiModelDescription(model)));
const customModels = this.preferenceService.get<Partial<VercelAiModelDescription>[]>(CUSTOM_ENDPOINTS_PREF, []);
this.manager.createOrUpdateLanguageModels(...this.createCustomModelDescriptionsFromPreferences(customModels));
// Set up listeners for preference changes
this.preferenceService.onPreferenceChanged(this.handlePreferenceChange.bind(this));
this.aiCorePreferences.onPreferenceChanged(event => {
if (event.preferenceName === PREFERENCE_NAME_MAX_RETRIES) {
this.updateAllModels();
}
});
});
}
protected handlePreferenceChange(event: PreferenceChange): void {
switch (event.preferenceName) {
case OPENAI_API_KEY_PREF:
this.manager.setProviderConfig('openai', { provider: 'openai', apiKey: this.preferenceService.get<string>(OPENAI_API_KEY_PREF, undefined) });
this.updateAllModels();
break;
case ANTHROPIC_API_KEY_PREF:
this.manager.setProviderConfig('anthropic', { provider: 'anthropic', apiKey: this.preferenceService.get<string>(ANTHROPIC_API_KEY_PREF, undefined) });
this.updateAllModels();
break;
case MODELS_PREF:
this.handleModelChanges(event);
break;
case CUSTOM_ENDPOINTS_PREF:
this.handleCustomModelChanges(event);
break;
}
}
protected previousModels: ModelConfig[] = [];
protected previousCustomModels: Partial<VercelAiModelDescription>[] = [];
protected handleModelChanges(event: PreferenceChange): void {
const newModels = this.ensureModelConfigArray(this.preferenceService.get(MODELS_PREF, []));
const oldModels = this.previousModels;
this.previousModels = newModels;
const oldModelIds = new Set(oldModels.map(m => m.id));
const newModelIds = new Set(newModels.map(m => m.id));
const modelsToRemove = [...oldModelIds].filter(modelId => !newModelIds.has(modelId));
const modelsToAdd = newModels.filter(model => !oldModelIds.has(model.id));
this.manager.removeLanguageModels(...modelsToRemove);
this.manager.createOrUpdateLanguageModels(...modelsToAdd.map(model => this.createVercelAiModelDescription(model)));
}
protected handleCustomModelChanges(event: PreferenceChange): void {
const newCustomModels = this.ensureCustomModelArray(this.preferenceService.get(CUSTOM_ENDPOINTS_PREF, []));
const oldCustomModels = this.previousCustomModels;
this.previousCustomModels = newCustomModels;
const oldModels = this.createCustomModelDescriptionsFromPreferences(oldCustomModels);
const newModels = this.createCustomModelDescriptionsFromPreferences(newCustomModels);
const modelsToRemove = oldModels.filter(model => !newModels.some(newModel => newModel.id === model.id));
const modelsToAddOrUpdate = newModels.filter(newModel =>
!oldModels.some(model =>
model.id === newModel.id &&
model.model === newModel.model &&
model.url === newModel.url &&
model.apiKey === newModel.apiKey &&
model.supportsStructuredOutput === newModel.supportsStructuredOutput &&
model.enableStreaming === newModel.enableStreaming &&
model.provider === newModel.provider));
this.manager.removeLanguageModels(...modelsToRemove.map(model => model.id));
this.manager.createOrUpdateLanguageModels(...modelsToAddOrUpdate);
}
protected ensureModelConfigArray(value: unknown): ModelConfig[] {
if (!value || !Array.isArray(value)) {
return [];
}
return value.filter(item =>
item &&
typeof item === 'object' &&
'id' in item &&
'model' in item &&
'provider' in item &&
typeof item.id === 'string' &&
typeof item.model === 'string' &&
(typeof item.provider === 'string' || item.provider === undefined)
) as ModelConfig[];
}
protected ensureCustomModelArray(value: unknown): Partial<VercelAiModelDescription>[] {
if (!value || !Array.isArray(value)) {
return [];
}
return value.filter(item =>
item &&
typeof item === 'object'
) as Partial<VercelAiModelDescription>[];
}
protected updateAllModels(): void {
const models = this.preferenceService.get<ModelConfig[]>(MODELS_PREF, []);
this.manager.createOrUpdateLanguageModels(...models.map(model => this.createVercelAiModelDescription(model)));
const customModels = this.preferenceService.get<Partial<VercelAiModelDescription>[]>(CUSTOM_ENDPOINTS_PREF, []);
this.manager.createOrUpdateLanguageModels(...this.createCustomModelDescriptionsFromPreferences(customModels));
}
protected createVercelAiModelDescription(modelInfo: ModelConfig): VercelAiModelDescription {
const maxRetries = this.aiCorePreferences.get(PREFERENCE_NAME_MAX_RETRIES) ?? 3;
// The model ID already includes the 'vercel' prefix from preferences
return {
id: modelInfo.id,
model: modelInfo.model,
provider: modelInfo.provider,
apiKey: true,
enableStreaming: true,
supportsStructuredOutput: modelsSupportingStructuredOutput.includes(modelInfo.model),
maxRetries: maxRetries
};
}
protected createCustomModelDescriptionsFromPreferences(
preferences: Partial<VercelAiModelDescription>[]
): VercelAiModelDescription[] {
const maxRetries = this.aiCorePreferences.get(PREFERENCE_NAME_MAX_RETRIES) ?? 3;
return preferences.reduce((acc, pref) => {
if (!pref.model || !pref.url || typeof pref.model !== 'string' || typeof pref.url !== 'string') {
return acc;
}
// Ensure custom model IDs have the 'vercel' prefix
const modelId = pref.id && typeof pref.id === 'string' ? pref.id : pref.model;
const prefixedId = modelId.startsWith('vercel/') ? modelId : `${VERCEL_AI_PROVIDER_ID}/${modelId}`;
return [
...acc,
{
id: prefixedId,
model: pref.model,
url: pref.url,
provider: pref.provider || 'openai',
apiKey: typeof pref.apiKey === 'string' || pref.apiKey === true ? pref.apiKey : undefined,
supportsStructuredOutput: pref.supportsStructuredOutput ?? true,
enableStreaming: pref.enableStreaming ?? true,
maxRetries: pref.maxRetries ?? maxRetries
}
];
}, []);
}
}
// List of models that support structured output via JSON schema
const modelsSupportingStructuredOutput = [
'gpt-4.1',
'gpt-4.1-mini',
'gpt-4.1-nano',
'gpt-4o',
'gpt-4o-mini',
'gpt-4-turbo',
'claude-3-7-sonnet-20250219',
'claude-3-5-haiku-20241022',
'claude-3-opus-20240229'
];

View File

@@ -0,0 +1,32 @@
// *****************************************************************************
// Copyright (C) 2025 EclipseSource GmbH.
//
// This program and the accompanying materials are made available under the
// terms of the Eclipse Public License v. 2.0 which is available at
// http://www.eclipse.org/legal/epl-2.0.
//
// This Source Code may also be made available under the following Secondary
// Licenses when the conditions for such availability set forth in the Eclipse
// Public License v. 2.0 are satisfied: GNU General Public License, version 2
// with the GNU Classpath Exception which is available at
// https://www.gnu.org/software/classpath/license.html.
//
// SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-only WITH Classpath-exception-2.0
// *****************************************************************************
import { ContainerModule } from '@theia/core/shared/inversify';
import { VercelAiPreferencesSchema } from '../common/vercel-ai-preferences';
import { FrontendApplicationContribution, RemoteConnectionProvider, ServiceConnectionProvider } from '@theia/core/lib/browser';
import { VercelAiFrontendApplicationContribution } from './vercel-ai-frontend-application-contribution';
import { VERCEL_AI_LANGUAGE_MODELS_MANAGER_PATH, VercelAiLanguageModelsManager } from '../common';
import { PreferenceContribution } from '@theia/core';
export default new ContainerModule(bind => {
bind(PreferenceContribution).toConstantValue({ schema: VercelAiPreferencesSchema });
bind(VercelAiFrontendApplicationContribution).toSelf().inSingletonScope();
bind(FrontendApplicationContribution).toService(VercelAiFrontendApplicationContribution);
bind(VercelAiLanguageModelsManager).toDynamicValue(ctx => {
const provider = ctx.container.get<ServiceConnectionProvider>(RemoteConnectionProvider);
return provider.createProxy<VercelAiLanguageModelsManager>(VERCEL_AI_LANGUAGE_MODELS_MANAGER_PATH);
}).inSingletonScope();
});

View File

@@ -0,0 +1,16 @@
// *****************************************************************************
// Copyright (C) 2025 EclipseSource GmbH.
//
// This program and the accompanying materials are made available under the
// terms of the Eclipse Public License v. 2.0 which is available at
// http://www.eclipse.org/legal/epl-2.0.
//
// This Source Code may also be made available under the following Secondary
// Licenses when the conditions for such availability set forth in the Eclipse
// Public License v. 2.0 are satisfied: GNU General Public License, version 2
// with the GNU Classpath Exception which is available at
// https://www.gnu.org/software/classpath/license.html.
//
// SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-only WITH Classpath-exception-2.0
// *****************************************************************************
export * from './vercel-ai-language-models-manager';

View File

@@ -0,0 +1,67 @@
// *****************************************************************************
// Copyright (C) 2025 EclipseSource GmbH.
//
// This program and the accompanying materials are made available under the
// terms of the Eclipse Public License v. 2.0 which is available at
// http://www.eclipse.org/legal/epl-2.0.
//
// This Source Code may also be made available under the following Secondary
// Licenses when the conditions for such availability set forth in the Eclipse
// Public License v. 2.0 are satisfied: GNU General Public License, version 2
// with the GNU Classpath Exception which is available at
// https://www.gnu.org/software/classpath/license.html.
//
// SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-only WITH Classpath-exception-2.0
// *****************************************************************************
export const VERCEL_AI_LANGUAGE_MODELS_MANAGER_PATH = '/services/vercel-ai/language-model-manager';
export type VercelAiProvider = 'openai' | 'anthropic';
export interface VercelAiProviderConfig {
provider: VercelAiProvider;
apiKey?: string;
baseURL?: string;
}
export interface VercelAiModelDescription {
/**
* The identifier of the model which will be shown in the UI.
*/
id: string;
/**
* The model ID as used by the Vercel AI SDK.
*/
model: string;
/**
* The provider of the model (openai, anthropic, etc.)
*/
provider?: VercelAiProvider;
/**
* The API base URL where the model is hosted. If not provided the default provider endpoint will be used.
*/
url?: string;
/**
* The key for the model. If 'true' is provided the global provider API key will be used.
*/
apiKey: string | true | undefined;
/**
* Controls whether streaming is enabled for this model.
*/
enableStreaming: boolean;
/**
* Flag to configure whether the model supports structured output. Default is `true`.
*/
supportsStructuredOutput: boolean;
/**
* Maximum number of retry attempts when a request fails. Default is 3.
*/
maxRetries: number;
}
export const VercelAiLanguageModelsManager = Symbol('VercelAiLanguageModelsManager');
export interface VercelAiLanguageModelsManager {
apiKey: string | undefined;
setProviderConfig(provider: VercelAiProvider, config: Partial<VercelAiProviderConfig>): void;
createOrUpdateLanguageModels(...models: VercelAiModelDescription[]): Promise<void>;
removeLanguageModels(...modelIds: string[]): void;
}

View File

@@ -0,0 +1,141 @@
// *****************************************************************************
// Copyright (C) 2025 EclipseSource GmbH.
//
// This program and the accompanying materials are made available under the
// terms of the Eclipse Public License v. 2.0 which is available at
// http://www.eclipse.org/legal/epl-2.0.
//
// This Source Code may also be made available under the following Secondary
// Licenses when the conditions for such availability set forth in the Eclipse
// Public License v. 2.0 are satisfied: GNU General Public License, version 2
// with the GNU Classpath Exception which is available at
// https://www.gnu.org/software/classpath/license.html.
//
// SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-only WITH Classpath-exception-2.0
// *****************************************************************************
import { AI_CORE_PREFERENCES_TITLE } from '@theia/ai-core/lib/common/ai-core-preferences';
import { nls, PreferenceSchema } from '@theia/core';
export const OPENAI_API_KEY_PREF = 'ai-features.vercelAi.openaiApiKey';
export const ANTHROPIC_API_KEY_PREF = 'ai-features.vercelAi.anthropicApiKey';
export const MODELS_PREF = 'ai-features.vercelAi.officialModels';
export const CUSTOM_ENDPOINTS_PREF = 'ai-features.vercelAi.customModels';
export const VERCEL_AI_PROVIDER_ID = 'vercel-ai';
export const VercelAiPreferencesSchema: PreferenceSchema = {
properties: {
[OPENAI_API_KEY_PREF]: {
type: 'string',
markdownDescription: nls.localize('theia/ai/vercelai/openaiApiKey/mdDescription',
'Enter an API Key for OpenAI models used by the Vercel AI SDK. \
**Please note:** By using this preference the API key will be stored in clear text \
on the machine running Theia. Use the environment variable `OPENAI_API_KEY` to set the key securely.'),
title: AI_CORE_PREFERENCES_TITLE,
tags: ['experimental']
},
[ANTHROPIC_API_KEY_PREF]: {
type: 'string',
markdownDescription: nls.localize('theia/ai/vercelai/anthropicApiKey/mdDescription',
'Enter an API Key for Anthropic models used by the Vercel AI SDK. \
**Please note:** By using this preference the API key will be stored in clear text \
on the machine running Theia. Use the environment variable `ANTHROPIC_API_KEY` to set the key securely.'),
title: AI_CORE_PREFERENCES_TITLE,
tags: ['experimental']
},
[MODELS_PREF]: {
type: 'array',
description: nls.localize('theia/ai/vercelai/models/description', 'Official models to use with Vercel AI SDK'),
title: AI_CORE_PREFERENCES_TITLE,
default: [
{ id: 'vercel/openai/gpt-4.1', model: 'gpt-4.1', provider: 'openai' },
{ id: 'vercel/openai/gpt-4.1-nano', model: 'gpt-4.1-nano', provider: 'openai' },
{ id: 'vercel/openai/gpt-4.1-mini', model: 'gpt-4.1-mini', provider: 'openai' },
{ id: 'vercel/openai/gpt-4-turbo', model: 'gpt-4-turbo', provider: 'openai' },
{ id: 'vercel/openai/gpt-4o', model: 'gpt-4o', provider: 'openai' },
{ id: 'vercel/openai/gpt-4o-mini', model: 'gpt-4o-mini', provider: 'openai' },
{ id: 'vercel/anthropic/claude-3-7-sonnet-20250219', model: 'claude-3-7-sonnet-20250219', provider: 'anthropic' },
{ id: 'vercel/anthropic/claude-3-5-haiku-20241022', model: 'claude-3-5-haiku-20241022', provider: 'anthropic' },
{ id: 'vercel/anthropic/claude-3-opus-20240229', model: 'claude-3-opus-20240229', provider: 'anthropic' }
],
items: {
type: 'object',
properties: {
id: {
type: 'string',
title: nls.localize('theia/ai/vercelai/models/id/title', 'Model ID')
},
model: {
type: 'string',
title: nls.localize('theia/ai/vercelai/models/model/title', 'Model Name')
},
provider: {
type: 'string',
enum: ['openai', 'anthropic'],
title: nls.localizeByDefault('Provider')
}
},
required: ['id', 'model', 'provider']
},
tags: ['experimental']
},
[CUSTOM_ENDPOINTS_PREF]: {
type: 'array',
title: AI_CORE_PREFERENCES_TITLE,
markdownDescription: nls.localize('theia/ai/vercelai/customEndpoints/mdDescription',
'Integrate custom models compatible with the Vercel AI SDK. The required attributes are `model` and `url`.\
\n\
Optionally, you can\
\n\
- specify a unique `id` to identify the custom model in the UI. If none is given `model` will be used as `id`.\
\n\
- provide an `apiKey` to access the API served at the given url. Use `true` to indicate the use of the global API key.\
\n\
- specify `supportsStructuredOutput: false` to indicate that structured output shall not be used.\
\n\
- specify `enableStreaming: false` to indicate that streaming shall not be used.\
\n\
- specify `provider` to indicate which provider the model is from (openai, anthropic).'),
default: [],
items: {
type: 'object',
properties: {
model: {
type: 'string',
title: nls.localize('theia/ai/vercelai/customEndpoints/modelId/title', 'Model ID')
},
url: {
type: 'string',
title: nls.localize('theia/ai/vercelai/customEndpoints/url/title', 'The API endpoint where the model is hosted')
},
id: {
type: 'string',
title: nls.localize('theia/ai/vercelai/customEndpoints/id/title', 'A unique identifier which is used in the UI to identify the custom model'),
},
provider: {
type: 'string',
enum: ['openai', 'anthropic'],
title: nls.localizeByDefault('Provider')
},
apiKey: {
type: ['string', 'boolean'],
title: nls.localize('theia/ai/vercelai/customEndpoints/apiKey/title',
'Either the key to access the API served at the given url or `true` to use the global API key'),
},
supportsStructuredOutput: {
type: 'boolean',
title: nls.localize('theia/ai/vercelai/customEndpoints/supportsStructuredOutput/title',
'Indicates whether the model supports structured output. `true` by default.'),
},
enableStreaming: {
type: 'boolean',
title: nls.localize('theia/ai/vercelai/customEndpoints/enableStreaming/title',
'Indicates whether the streaming API shall be used. `true` by default.'),
}
}
},
tags: ['experimental']
}
}
};

View File

@@ -0,0 +1,37 @@
// *****************************************************************************
// Copyright (C) 2025 EclipseSource GmbH.
//
// This program and the accompanying materials are made available under the
// terms of the Eclipse Public License v. 2.0 which is available at
// http://www.eclipse.org/legal/epl-2.0.
//
// This Source Code may also be made available under the following Secondary
// Licenses when the conditions for such availability set forth in the Eclipse
// Public License v. 2.0 are satisfied: GNU General Public License, version 2
// with the GNU Classpath Exception which is available at
// https://www.gnu.org/software/classpath/license.html.
//
// SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-only WITH Classpath-exception-2.0
// *****************************************************************************
import { ContainerModule } from '@theia/core/shared/inversify';
import { VERCEL_AI_LANGUAGE_MODELS_MANAGER_PATH, VercelAiLanguageModelsManager } from '../common/vercel-ai-language-models-manager';
import { ConnectionHandler, PreferenceContribution, RpcConnectionHandler } from '@theia/core';
import { VercelAiLanguageModelsManagerImpl } from './vercel-ai-language-models-manager-impl';
import { ConnectionContainerModule } from '@theia/core/lib/node/messaging/connection-container-module';
import { VercelAiLanguageModelFactory } from './vercel-ai-language-model-factory';
import { VercelAiPreferencesSchema } from '../common/vercel-ai-preferences';
const vercelAiConnectionModule = ConnectionContainerModule.create(({ bind, bindBackendService, bindFrontendService }) => {
bind(VercelAiLanguageModelsManagerImpl).toSelf().inSingletonScope();
bind(VercelAiLanguageModelsManager).toService(VercelAiLanguageModelsManagerImpl);
bind(VercelAiLanguageModelFactory).toSelf().inSingletonScope();
bind(ConnectionHandler).toDynamicValue(ctx =>
new RpcConnectionHandler(VERCEL_AI_LANGUAGE_MODELS_MANAGER_PATH, () => ctx.container.get(VercelAiLanguageModelsManager))
).inSingletonScope();
});
export default new ContainerModule(bind => {
bind(PreferenceContribution).toConstantValue({ schema: VercelAiPreferencesSchema });
bind(ConnectionContainerModule).toConstantValue(vercelAiConnectionModule);
});

View File

@@ -0,0 +1,82 @@
// *****************************************************************************
// Copyright (C) 2025 EclipseSource GmbH.
//
// This program and the accompanying materials are made available under the
// terms of the Eclipse Public License v. 2.0 which is available at
// http://www.eclipse.org/legal/epl-2.0.
//
// This Source Code may also be made available under the following Secondary
// Licenses when the conditions for such availability set forth in the Eclipse
// Public License v. 2.0 are satisfied: GNU General Public License, version 2
// with the GNU Classpath Exception which is available at
// https://www.gnu.org/software/classpath/license.html.
//
// SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-only WITH Classpath-exception-2.0
// *****************************************************************************
import { createAnthropic } from '@ai-sdk/anthropic';
import { createOpenAI } from '@ai-sdk/openai';
import { LanguageModelV1 } from '@ai-sdk/provider';
import { injectable } from '@theia/core/shared/inversify';
import { VercelAiModelDescription } from '../common';
export type VercelAiProvider = 'openai' | 'anthropic';
export interface VercelAiProviderConfig {
provider: VercelAiProvider;
apiKey?: string;
baseURL?: string;
}
@injectable()
export class VercelAiLanguageModelFactory {
createLanguageModel(modelDescription: VercelAiModelDescription, providerConfig: VercelAiProviderConfig): LanguageModelV1 {
const apiKey = this.resolveApiKey(modelDescription, providerConfig);
if (!apiKey) {
throw new Error(`Please provide an API key for ${providerConfig.provider} in preferences or via environment variable`);
}
const baseURL = modelDescription.url || providerConfig.baseURL;
switch (providerConfig.provider) {
case 'openai':
return createOpenAI({
apiKey,
baseURL,
compatibility: 'strict'
}).languageModel(modelDescription.model);
case 'anthropic':
return createAnthropic({
apiKey,
baseURL
}).languageModel(modelDescription.model);
default:
throw new Error(`Unsupported provider: ${providerConfig.provider}`);
}
}
private resolveApiKey(modelDescription: VercelAiModelDescription, providerConfig: VercelAiProviderConfig): string | undefined {
if (modelDescription.apiKey === true) {
return this.getApiKeyBasedOnProvider(providerConfig);
}
if (modelDescription.apiKey) {
return modelDescription.apiKey;
}
return this.getApiKeyBasedOnProvider(providerConfig);
}
private getApiKeyBasedOnProvider(providerConfig: VercelAiProviderConfig): string | undefined {
if (providerConfig.apiKey) {
return providerConfig.apiKey;
}
switch (providerConfig.provider) {
case 'openai':
return process.env.OPENAI_API_KEY;
case 'anthropic':
return process.env.ANTHROPIC_API_KEY;
default:
return undefined;
}
}
}

View File

@@ -0,0 +1,413 @@
// *****************************************************************************
// Copyright (C) 2025 EclipseSource GmbH.
//
// This program and the accompanying materials are made available under the
// terms of the Eclipse Public License v. 2.0 which is available at
// http://www.eclipse.org/legal/epl-2.0.
//
// This Source Code may also be made available under the following Secondary
// Licenses when the conditions for such availability set forth in the Eclipse
// Public License v. 2.0 are satisfied: GNU General Public License, version 2
// with the GNU Classpath Exception which is available at
// https://www.gnu.org/software/classpath/license.html.
//
// SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-only WITH Classpath-exception-2.0
// *****************************************************************************
import { LanguageModelV1 } from '@ai-sdk/provider';
import {
LanguageModel,
LanguageModelMessage,
LanguageModelParsedResponse,
LanguageModelRequest,
LanguageModelResponse,
LanguageModelStatus,
LanguageModelStreamResponse,
LanguageModelStreamResponsePart,
LanguageModelTextResponse,
TokenUsageService,
ToolCall,
UserRequest,
} from '@theia/ai-core';
import { CancellationToken, Disposable, ILogger } from '@theia/core';
import {
CoreMessage,
generateObject,
GenerateObjectResult,
generateText,
GenerateTextResult,
jsonSchema,
StepResult,
streamText,
TextStreamPart,
tool,
ToolExecutionOptions,
ToolResultPart,
ToolSet
} from 'ai';
import { VercelAiLanguageModelFactory, VercelAiProviderConfig } from './vercel-ai-language-model-factory';
interface VercelCancellationToken extends Disposable {
signal: AbortSignal;
cancellationToken: CancellationToken;
isCancellationRequested: boolean;
}
type StreamPart = ToolResultPart | {
type: string;
textDelta?: string;
toolCallId?: string;
toolName?: string;
args?: object | string;
argsTextDelta?: string;
usage?: { promptTokens: number; completionTokens: number };
signature?: string;
};
interface VercelAiStream extends AsyncIterable<TextStreamPart<ToolSet>> {
cancel: () => void;
}
interface StreamContext {
logger: ILogger;
cancellationToken?: VercelCancellationToken;
}
export class VercelAiStreamTransformer {
private toolCallsMap = new Map<string, ToolCall>();
constructor(
protected readonly fullStream: VercelAiStream,
protected readonly context: StreamContext
) { }
async *transform(): AsyncGenerator<LanguageModelStreamResponsePart> {
this.toolCallsMap.clear();
try {
for await (const part of this.fullStream) {
this.context.logger.trace('Received stream part:', part);
if (this.context.cancellationToken?.isCancellationRequested) {
this.context.logger.debug('Cancellation requested, stopping stream');
this.fullStream.cancel();
break;
}
let toolCallUpdated = false;
switch (part.type) {
case 'text-delta':
if (part.textDelta) {
yield { content: part.textDelta };
}
break;
case 'tool-call':
if (part.toolCallId && part.toolName) {
const args = typeof part.args === 'object' ? JSON.stringify(part.args) : (part.args || '');
toolCallUpdated = this.updateToolCall(part.toolCallId, part.toolName, args);
}
break;
case 'tool-call-streaming-start':
if (part.toolCallId && part.toolName) {
toolCallUpdated = this.updateToolCall(part.toolCallId, part.toolName);
}
break;
case 'tool-call-delta':
if (part.toolCallId && part.argsTextDelta) {
toolCallUpdated = this.appendToToolCallArgs(part.toolCallId, part.argsTextDelta);
}
break;
default:
if (this.isToolResultPart(part)) {
toolCallUpdated = this.processToolResult(part);
}
break;
}
if (toolCallUpdated && this.toolCallsMap.size > 0) {
yield { tool_calls: Array.from(this.toolCallsMap.values()) };
}
}
} catch (error) {
this.context.logger.error('Error in AI SDK stream:', error);
}
}
private isToolResultPart(part: StreamPart): part is ToolResultPart {
return part.type === 'tool-result';
}
private updateToolCall(id: string, name: string, args?: string): boolean {
const toolCall: ToolCall = {
id,
function: { name, arguments: args ? args : '' },
finished: false
};
this.toolCallsMap.set(id, toolCall);
return true;
}
private appendToToolCallArgs(id: string, argsTextDelta: string): boolean {
const existingCall = this.toolCallsMap.get(id);
if (existingCall?.function) {
existingCall.function.arguments = (existingCall.function.arguments || '') + argsTextDelta;
return true;
}
return false;
}
private processToolResult(part: ToolResultPart): boolean {
if (!part.toolCallId) {
return false;
}
const completedCall = this.toolCallsMap.get(part.toolCallId);
if (!completedCall) {
return false;
}
completedCall.result = part.result as string;
completedCall.finished = true;
return true;
}
}
export class VercelAiModel implements LanguageModel {
constructor(
public readonly id: string,
public model: string,
public status: LanguageModelStatus,
public enableStreaming: boolean,
public supportsStructuredOutput: boolean,
public url: string | undefined,
protected readonly logger: ILogger,
protected readonly languageModelFactory: VercelAiLanguageModelFactory,
protected providerConfig: () => VercelAiProviderConfig,
public maxRetries: number = 3,
protected readonly tokenUsageService?: TokenUsageService
) { }
protected getSettings(request: LanguageModelRequest): Record<string, unknown> {
return request.settings ?? {};
}
async request(request: UserRequest, cancellationToken?: CancellationToken): Promise<LanguageModelResponse> {
const settings = this.getSettings(request);
const model = this.languageModelFactory.createLanguageModel(
{
id: this.id,
model: this.model,
url: this.url,
apiKey: true, // We'll use the provider's API key
enableStreaming: this.enableStreaming,
supportsStructuredOutput: this.supportsStructuredOutput,
maxRetries: this.maxRetries
},
this.providerConfig()
);
const cancel = this.createCancellationToken(cancellationToken);
try {
if (request.response_format?.type === 'json_schema' && this.supportsStructuredOutput) {
return this.handleStructuredOutputRequest(model, request, cancel);
}
if (!this.enableStreaming || (typeof settings.stream === 'boolean' && !settings.stream)) {
return this.handleNonStreamingRequest(model, request, cancel);
}
return this.handleStreamingRequest(model, request, cancel);
} catch (error) {
this.logger.error('Error in Vercel AI model request:', error);
throw error;
} finally {
cancel.dispose();
}
}
protected createCancellationToken(cancellationToken?: CancellationToken): VercelCancellationToken {
const abortController = new AbortController();
const abortSignal = abortController.signal;
if (cancellationToken?.isCancellationRequested) {
abortController.abort();
}
const cancellationListener = cancellationToken ?
cancellationToken.onCancellationRequested(() => {
abortController.abort();
}) : undefined;
return {
signal: abortSignal,
cancellationToken: cancellationToken ?? CancellationToken.None,
get isCancellationRequested(): boolean {
return cancellationToken?.isCancellationRequested ?? abortSignal.aborted;
},
dispose: () => cancellationListener?.dispose()
};
}
protected async handleNonStreamingRequest(
model: LanguageModelV1,
request: UserRequest,
cancellationToken?: VercelCancellationToken
): Promise<LanguageModelTextResponse> {
const settings = this.getSettings(request);
const messages = this.processMessages(request.messages);
const tools = this.createTools(request);
const abortSignal = cancellationToken?.signal;
const response = await generateText({
model,
messages,
tools,
toolChoice: 'auto',
abortSignal,
...settings
});
await this.recordTokenUsage(response, request);
return { text: response.text };
}
protected createTools(request: UserRequest): ToolSet | undefined {
if (!request.tools) {
return undefined;
}
const toolSet: ToolSet = {};
for (const toolRequest of request.tools) {
toolSet[toolRequest.name] = tool({
description: toolRequest.description,
parameters: jsonSchema(toolRequest.parameters),
execute: async (args: object, options: ToolExecutionOptions) => {
try {
const result = await toolRequest.handler(JSON.stringify(args), options);
return JSON.stringify(result);
} catch (error) {
this.logger.error(`Error executing tool (${toolRequest.name}):`, error);
return { status: 'error', error: 'Tool execution failed', details: error };
}
}
});
}
return toolSet;
}
protected async handleStructuredOutputRequest(
model: LanguageModelV1,
request: UserRequest,
cancellationToken?: VercelCancellationToken
): Promise<LanguageModelParsedResponse | LanguageModelStreamResponse> {
if (request.response_format?.type !== 'json_schema' || !request.response_format.json_schema.schema) {
throw Error('Invalid response format for structured output request');
}
const schema = jsonSchema(request.response_format.json_schema.schema);
if (!schema) {
throw new Error('Schema extraction failed.');
}
const settings = this.getSettings(request);
const messages = this.processMessages(request.messages);
const abortSignal = cancellationToken?.signal;
const response = await generateObject<unknown>({
model,
output: 'object',
messages,
schema,
abortSignal,
...settings
});
await this.recordTokenUsage(response, request);
return {
content: JSON.stringify(response.object),
parsed: response.object
};
}
private async recordTokenUsage(
result: GenerateObjectResult<unknown> | GenerateTextResult<ToolSet, unknown>,
request: UserRequest
): Promise<void> {
if (this.tokenUsageService && !isNaN(result.usage.completionTokens) && !isNaN(result.usage.promptTokens)) {
await this.tokenUsageService.recordTokenUsage(
this.id,
{
inputTokens: result.usage.promptTokens,
outputTokens: result.usage.completionTokens,
requestId: request.requestId
}
);
}
}
protected async handleStreamingRequest(
model: LanguageModelV1,
request: UserRequest,
cancellationToken?: VercelCancellationToken
): Promise<LanguageModelStreamResponse> {
const settings = this.getSettings(request);
const messages = this.processMessages(request.messages);
const tools = this.createTools(request);
const abortSignal = cancellationToken?.signal;
const { fullStream } = streamText({
model,
messages,
tools,
toolChoice: 'auto',
maxSteps: 100,
maxRetries: this.maxRetries,
toolCallStreaming: true,
abortSignal,
onStepFinish: (stepResult: StepResult<ToolSet>) => {
if (!isNaN(stepResult.usage.completionTokens) && !isNaN(stepResult.usage.promptTokens)) {
this.tokenUsageService?.recordTokenUsage(this.id, {
inputTokens: stepResult.usage.promptTokens,
outputTokens: stepResult.usage.completionTokens,
requestId: request.requestId
});
}
},
...settings
});
const transformer = new VercelAiStreamTransformer(
fullStream, { cancellationToken, logger: this.logger }
);
return {
stream: transformer.transform()
};
}
protected processMessages(messages: LanguageModelMessage[]): Array<CoreMessage> {
return messages.map(message => {
const content = LanguageModelMessage.isTextMessage(message) ? message.text : '';
let role: 'user' | 'assistant' | 'system';
switch (message.actor) {
case 'user':
role = 'user';
break;
case 'ai':
role = 'assistant';
break;
case 'system':
role = 'system';
break;
default:
role = 'user';
}
return {
role,
content,
};
});
}
}

View File

@@ -0,0 +1,115 @@
// *****************************************************************************
// Copyright (C) 2025 EclipseSource GmbH.
//
// This program and the accompanying materials are made available under the
// terms of the Eclipse Public License v. 2.0 which is available at
// http://www.eclipse.org/legal/epl-2.0.
//
// This Source Code may also be made available under the following Secondary
// Licenses when the conditions for such availability set forth in the Eclipse
// Public License v. 2.0 are satisfied: GNU General Public License, version 2
// with the GNU Classpath Exception which is available at
// https://www.gnu.org/software/classpath/license.html.
//
// SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-only WITH Classpath-exception-2.0
// *****************************************************************************
import { LanguageModelRegistry, LanguageModelStatus, TokenUsageService } from '@theia/ai-core';
import { inject, injectable, named } from '@theia/core/shared/inversify';
import { VercelAiModel } from './vercel-ai-language-model';
import { VercelAiLanguageModelsManager, VercelAiModelDescription } from '../common';
import { ILogger } from '@theia/core';
import { VercelAiLanguageModelFactory, VercelAiProvider, VercelAiProviderConfig } from './vercel-ai-language-model-factory';
@injectable()
export class VercelAiLanguageModelsManagerImpl implements VercelAiLanguageModelsManager {
apiKey: string | undefined;
protected providerConfigs: Map<VercelAiProvider, VercelAiProviderConfig> = new Map();
@inject(LanguageModelRegistry)
protected readonly languageModelRegistry: LanguageModelRegistry;
@inject(TokenUsageService)
protected readonly tokenUsageService: TokenUsageService;
@inject(ILogger) @named('vercel-ai')
protected readonly logger: ILogger;
@inject(VercelAiLanguageModelFactory)
protected readonly languageModelFactory: VercelAiLanguageModelFactory;
// Triggered from frontend. In case you want to use the models on the backend
// without a frontend then call this yourself
protected calculateStatus(effectiveApiKey: string | undefined): LanguageModelStatus {
return effectiveApiKey
? { status: 'ready' }
: { status: 'unavailable', message: 'No Vercel AI API key set' };
}
async createOrUpdateLanguageModels(...modelDescriptions: VercelAiModelDescription[]): Promise<void> {
for (const modelDescription of modelDescriptions) {
this.logger.info(`Vercel AI: Creating or updating model ${modelDescription.id}`);
const model = await this.languageModelRegistry.getLanguageModel(modelDescription.id);
const provider = this.determineProvider(modelDescription);
const providerConfig = this.getProviderConfig(provider);
const effectiveApiKey = providerConfig.apiKey || this.apiKey;
const status = this.calculateStatus(effectiveApiKey);
if (model) {
if (!(model instanceof VercelAiModel)) {
this.logger.warn(`Vercel AI: model ${modelDescription.id} is not a Vercel AI model`);
continue;
}
await this.languageModelRegistry.patchLanguageModel<VercelAiModel>(modelDescription.id, {
model: modelDescription.model,
enableStreaming: modelDescription.enableStreaming,
url: modelDescription.url,
supportsStructuredOutput: modelDescription.supportsStructuredOutput,
status,
maxRetries: modelDescription.maxRetries
});
this.providerConfigs.set(provider, providerConfig);
} else {
this.languageModelRegistry.addLanguageModels([
new VercelAiModel(
modelDescription.id,
modelDescription.model,
status,
modelDescription.enableStreaming,
modelDescription.supportsStructuredOutput,
modelDescription.url,
this.logger,
this.languageModelFactory,
() => this.getProviderConfig(provider),
modelDescription.maxRetries,
this.tokenUsageService
)
]);
}
}
}
removeLanguageModels(...modelIds: string[]): void {
this.languageModelRegistry.removeLanguageModels(modelIds);
}
setProviderConfig(provider: VercelAiProvider, config: Partial<VercelAiProviderConfig>): void {
const existingConfig = this.providerConfigs.get(provider) || { provider };
this.providerConfigs.set(provider, { ...existingConfig, ...config });
}
private determineProvider(modelDescription: VercelAiModelDescription): VercelAiProvider {
// Use the provider from the model description or default to OpenAI
return modelDescription.provider || 'openai';
}
private getProviderConfig(provider: VercelAiProvider): VercelAiProviderConfig {
let config = this.providerConfigs.get(provider);
if (!config) {
config = { provider, apiKey: this.apiKey };
this.providerConfigs.set(provider, config);
}
return config;
}
}

View File

@@ -0,0 +1,27 @@
// *****************************************************************************
// Copyright (C) 2025 EclipseSource GmbH and others.
//
// This program and the accompanying materials are made available under the
// terms of the Eclipse Public License v. 2.0 which is available at
// http://www.eclipse.org/legal/epl-2.0.
//
// This Source Code may also be made available under the following Secondary
// Licenses when the conditions for such availability set forth in the Eclipse
// Public License v. 2.0 are satisfied: GNU General Public License, version 2
// with the GNU Classpath Exception which is available at
// https://www.gnu.org/software/classpath/license.html.
//
// SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-only WITH Classpath-exception-2.0
// *****************************************************************************
/* note: this bogus test file is required so that
we are able to run mocha unit tests on this
package, without having any actual unit tests in it.
This way a coverage report will be generated,
showing 0% coverage, instead of no report.
This file can be removed once we have real unit
tests in place. */
describe('ai-vercel-ai package', () => {
it('support code coverage statistics', () => true);
});

View File

@@ -0,0 +1,25 @@
{
"extends": "../../configs/base.tsconfig",
"compilerOptions": {
"composite": true,
"rootDir": "src",
"outDir": "lib"
},
"include": [
"src"
],
"references": [
{
"path": "../ai-core"
},
{
"path": "../core"
},
{
"path": "../filesystem"
},
{
"path": "../workspace"
}
]
}