deploy: current vibn theia state
Some checks failed
Playwright Tests / Playwright Tests (ubuntu-22.04, Node.js 22.x) (push) Has been cancelled
3PP License Check / 3PP License Check (11, 22.x, ubuntu-22.04) (push) Has been cancelled
Publish packages to NPM / Perform Publishing (push) Has been cancelled

Made-with: Cursor
This commit is contained in:
2026-02-27 12:01:08 -08:00
commit 8bb5110148
3782 changed files with 640947 additions and 0 deletions

View File

@@ -0,0 +1,16 @@
// *****************************************************************************
// Copyright (C) 2024 EclipseSource GmbH.
//
// This program and the accompanying materials are made available under the
// terms of the Eclipse Public License v. 2.0 which is available at
// http://www.eclipse.org/legal/epl-2.0.
//
// This Source Code may also be made available under the following Secondary
// Licenses when the conditions for such availability set forth in the Eclipse
// Public License v. 2.0 are satisfied: GNU General Public License, version 2
// with the GNU Classpath Exception which is available at
// https://www.gnu.org/software/classpath/license.html.
//
// SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-only WITH Classpath-exception-2.0
// *****************************************************************************
export * from './openai-language-models-manager';

View File

@@ -0,0 +1,79 @@
// *****************************************************************************
// Copyright (C) 2024 EclipseSource GmbH.
//
// This program and the accompanying materials are made available under the
// terms of the Eclipse Public License v. 2.0 which is available at
// http://www.eclipse.org/legal/epl-2.0.
//
// This Source Code may also be made available under the following Secondary
// Licenses when the conditions for such availability set forth in the Eclipse
// Public License v. 2.0 are satisfied: GNU General Public License, version 2
// with the GNU Classpath Exception which is available at
// https://www.gnu.org/software/classpath/license.html.
//
// SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-only WITH Classpath-exception-2.0
// *****************************************************************************
export const OPENAI_LANGUAGE_MODELS_MANAGER_PATH = '/services/open-ai/language-model-manager';
export const OpenAiLanguageModelsManager = Symbol('OpenAiLanguageModelsManager');
export const OPENAI_PROVIDER_ID = 'openai';
export interface OpenAiModelDescription {
/**
* The identifier of the model which will be shown in the UI.
*/
id: string;
/**
* The model ID as used by the OpenAI API.
*/
model: string;
/**
* The OpenAI API compatible endpoint where the model is hosted. If not provided the default OpenAI endpoint will be used.
*/
url?: string;
/**
* The key for the model. If 'true' is provided the global OpenAI API key will be used.
*/
apiKey: string | true | undefined;
/**
* The version for the api. If 'true' is provided the global OpenAI version will be used.
*/
apiVersion: string | true | undefined;
/**
* Optional deployment name for Azure OpenAI.
*/
deployment?: string;
/**
* Indicate whether the streaming API shall be used.
*/
enableStreaming: boolean;
/**
* Property to configure the developer message of the model. Setting this property to 'user', 'system', or 'developer' will use that string as the role for the system message.
* Setting it to 'mergeWithFollowingUserMessage' will prefix the following user message with the system message or convert the system message to user if the following message
* is not a user message. 'skip' will remove the system message altogether.
* Defaults to 'developer'.
*/
developerMessageSettings?: 'user' | 'system' | 'developer' | 'mergeWithFollowingUserMessage' | 'skip';
/**
* Flag to configure whether the OpenAPI model supports structured output. Default is `true`.
*/
supportsStructuredOutput: boolean;
/**
* Maximum number of retry attempts when a request fails. Default is 3.
*/
maxRetries: number;
/**
* Flag to configure whether to use the newer OpenAI Response API instead of the Chat Completion API.
* For official OpenAI models, this defaults to `true`. For custom providers, users must explicitly enable it.
* Default is `false` for custom models.
*/
useResponseApi?: boolean;
}
export interface OpenAiLanguageModelsManager {
apiKey: string | undefined;
setApiKey(key: string | undefined): void;
setApiVersion(version: string | undefined): void;
setProxyUrl(proxyUrl: string | undefined): void;
createOrUpdateLanguageModels(...models: OpenAiModelDescription[]): Promise<void>;
removeLanguageModels(...modelIds: string[]): void
}

View File

@@ -0,0 +1,152 @@
// *****************************************************************************
// Copyright (C) 2024 EclipseSource GmbH.
//
// This program and the accompanying materials are made available under the
// terms of the Eclipse Public License v. 2.0 which is available at
// http://www.eclipse.org/legal/epl-2.0.
//
// This Source Code may also be made available under the following Secondary
// Licenses when the conditions for such availability set forth in the Eclipse
// Public License v. 2.0 are satisfied: GNU General Public License, version 2
// with the GNU Classpath Exception which is available at
// https://www.gnu.org/software/classpath/license.html.
//
// SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-only WITH Classpath-exception-2.0
// *****************************************************************************
import { AI_CORE_PREFERENCES_TITLE } from '@theia/ai-core/lib/common/ai-core-preferences';
import { nls, PreferenceSchema } from '@theia/core';
export const API_KEY_PREF = 'ai-features.openAiOfficial.openAiApiKey';
export const MODELS_PREF = 'ai-features.openAiOfficial.officialOpenAiModels';
export const USE_RESPONSE_API_PREF = 'ai-features.openAiOfficial.useResponseApi';
export const CUSTOM_ENDPOINTS_PREF = 'ai-features.openAiCustom.customOpenAiModels';
export const OpenAiPreferencesSchema: PreferenceSchema = {
properties: {
[API_KEY_PREF]: {
type: 'string',
markdownDescription: nls.localize('theia/ai/openai/apiKey/mdDescription',
'Enter an API Key of your official OpenAI Account. **Please note:** By using this preference the Open AI API key will be stored in clear text \
on the machine running Theia. Use the environment variable `OPENAI_API_KEY` to set the key securely.'),
title: AI_CORE_PREFERENCES_TITLE,
},
[MODELS_PREF]: {
type: 'array',
description: nls.localize('theia/ai/openai/models/description', 'Official OpenAI models to use'),
title: AI_CORE_PREFERENCES_TITLE,
default: [
'gpt-5.2',
'gpt-5.2-pro',
'gpt-5.1',
'gpt-5',
'gpt-5-mini',
'gpt-4.1',
'gpt-4.1-mini',
'gpt-4o'
],
items: {
type: 'string'
}
},
[USE_RESPONSE_API_PREF]: {
type: 'boolean',
default: false,
title: AI_CORE_PREFERENCES_TITLE,
markdownDescription: nls.localize('theia/ai/openai/useResponseApi/mdDescription',
'Use the newer OpenAI Response API instead of the Chat Completion API for official OpenAI models.\
\
This setting only applies to official OpenAI models - custom providers must configure this individually.\
\
Note that for the response API, tool call definitions must satisfy Open AI\'s [strict schema definition](https://platform.openai.com/docs/guides/function-calling#strict-mode).\
Best effort is made to convert non-conformant schemas, but errors are still possible.')
},
[CUSTOM_ENDPOINTS_PREF]: {
type: 'array',
title: AI_CORE_PREFERENCES_TITLE,
markdownDescription: nls.localize('theia/ai/openai/customEndpoints/mdDescription',
'Integrate custom models compatible with the OpenAI API, for example via `vllm`. The required attributes are `model` and `url`.\
\n\
Optionally, you can\
\n\
- specify a unique `id` to identify the custom model in the UI. If none is given `model` will be used as `id`.\
\n\
- provide an `apiKey` to access the API served at the given url. Use `true` to indicate the use of the global OpenAI API key.\
\n\
- provide an `apiVersion` to access the API served at the given url in Azure. Use `true` to indicate the use of the global OpenAI API version.\
\n\
- provide a `deployment` name for your Azure deployment.\
\n\
- set `developerMessageSettings` to one of `user`, `system`, `developer`, `mergeWithFollowingUserMessage`, or `skip` to control how the developer message is\
included (where `user`, `system`, and `developer` will be used as a role, `mergeWithFollowingUserMessage` will prefix the following user message with the system\
message or convert the system message to user message if the next message is not a user message. `skip` will just remove the system message).\
Defaulting to `developer`.\
\n\
- specify `supportsStructuredOutput: false` to indicate that structured output shall not be used.\
\n\
- specify `enableStreaming: false` to indicate that streaming shall not be used.\
\n\
- specify `useResponseApi: true` to use the newer OpenAI Response API instead of the Chat Completion API (requires compatible endpoint).\
\n\
Refer to [our documentation](https://theia-ide.org/docs/user_ai/#openai-compatible-models-eg-via-vllm) for more information.'),
default: [],
items: {
type: 'object',
properties: {
model: {
type: 'string',
title: nls.localize('theia/ai/openai/customEndpoints/modelId/title', 'Model ID')
},
url: {
type: 'string',
title: nls.localize('theia/ai/openai/customEndpoints/url/title', 'The Open AI API compatible endpoint where the model is hosted')
},
id: {
type: 'string',
title: nls.localize('theia/ai/openai/customEndpoints/id/title', 'A unique identifier which is used in the UI to identify the custom model'),
},
apiKey: {
type: ['string', 'boolean'],
title: nls.localize('theia/ai/openai/customEndpoints/apiKey/title',
'Either the key to access the API served at the given url or `true` to use the global OpenAI API key'),
},
apiVersion: {
type: ['string', 'boolean'],
title: nls.localize('theia/ai/openai/customEndpoints/apiVersion/title',
'Either the version to access the API served at the given url in Azure or `true` to use the global OpenAI API version'),
},
deployment: {
type: 'string',
title: nls.localize('theia/ai/openai/customEndpoints/deployment/title',
'The deployment name to access the API served at the given url in Azure'),
},
developerMessageSettings: {
type: 'string',
enum: ['user', 'system', 'developer', 'mergeWithFollowingUserMessage', 'skip'],
default: 'developer',
title: nls.localize('theia/ai/openai/customEndpoints/developerMessageSettings/title',
'Controls the handling of system messages: `user`, `system`, and `developer` will be used as a role, `mergeWithFollowingUserMessage` will prefix\
the following user message with the system message or convert the system message to user message if the next message is not a user message.\
`skip` will just remove the system message), defaulting to `developer`.')
},
supportsStructuredOutput: {
type: 'boolean',
title: nls.localize('theia/ai/openai/customEndpoints/supportsStructuredOutput/title',
'Indicates whether the model supports structured output. `true` by default.'),
},
enableStreaming: {
type: 'boolean',
title: nls.localize('theia/ai/openai/customEndpoints/enableStreaming/title',
'Indicates whether the streaming API shall be used. `true` by default.'),
},
useResponseApi: {
type: 'boolean',
title: nls.localize('theia/ai/openai/customEndpoints/useResponseApi/title',
'Use the newer OpenAI Response API instead of the Chat Completion API. `false` by default for custom providers.'
+ 'Note: Will automatically fall back to Chat Completions API when tools are used.'),
}
}
}
}
}
};