deploy: current vibn theia state
Made-with: Cursor
This commit is contained in:
62
packages/ai-llamafile/src/common/llamafile-preferences.ts
Normal file
62
packages/ai-llamafile/src/common/llamafile-preferences.ts
Normal file
@@ -0,0 +1,62 @@
|
||||
// *****************************************************************************
|
||||
// Copyright (C) 2024 EclipseSource GmbH.
|
||||
//
|
||||
// This program and the accompanying materials are made available under the
|
||||
// terms of the Eclipse Public License v. 2.0 which is available at
|
||||
// http://www.eclipse.org/legal/epl-2.0.
|
||||
//
|
||||
// This Source Code may also be made available under the following Secondary
|
||||
// Licenses when the conditions for such availability set forth in the Eclipse
|
||||
// Public License v. 2.0 are satisfied: GNU General Public License, version 2
|
||||
// with the GNU Classpath Exception which is available at
|
||||
// https://www.gnu.org/software/classpath/license.html.
|
||||
//
|
||||
// SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-only WITH Classpath-exception-2.0
|
||||
// *****************************************************************************
|
||||
|
||||
import { nls, PreferenceContribution, PreferenceSchema } from '@theia/core';
|
||||
import { interfaces } from '@theia/core/shared/inversify';
|
||||
|
||||
export const AI_LLAMAFILE_PREFERENCES_TITLE = nls.localize('theia/ai/llamaFile/prefs/title', '✨ AI LlamaFile');
|
||||
export const PREFERENCE_LLAMAFILE = 'ai-features.llamafile.llamafiles';
|
||||
|
||||
export const aiLlamafilePreferencesSchema: PreferenceSchema = {
|
||||
properties: {
|
||||
[PREFERENCE_LLAMAFILE]: {
|
||||
title: AI_LLAMAFILE_PREFERENCES_TITLE,
|
||||
markdownDescription: nls.localize('theia/ai/llamaFile/prefs/mdDescription', 'This setting allows you to configure and manage LlamaFile models in Theia IDE.\
|
||||
\n\
|
||||
Each entry requires a user-friendly `name`, the file `uri` pointing to your LlamaFile, and the `port` on which it will run.\
|
||||
\n\
|
||||
To start a LlamaFile, use the "Start LlamaFile" command, which enables you to select the desired model.\
|
||||
\n\
|
||||
If you edit an entry (e.g., change the port), any running instance will stop, and you will need to manually start it again.\
|
||||
\n\
|
||||
[Learn more about configuring and managing LlamaFiles in the Theia IDE documentation](https://theia-ide.org/docs/user_ai/#llamafile-models).'),
|
||||
type: 'array',
|
||||
default: [],
|
||||
items: {
|
||||
type: 'object',
|
||||
properties: {
|
||||
name: {
|
||||
type: 'string',
|
||||
description: nls.localize('theia/ai/llamaFile/prefs/name/description', 'The model name to use for this Llamafile.')
|
||||
},
|
||||
uri: {
|
||||
type: 'string',
|
||||
description: nls.localize('theia/ai/llamaFile/prefs/uri/description', 'The file uri to the Llamafile.')
|
||||
},
|
||||
port: {
|
||||
type: 'number',
|
||||
description: nls.localize('theia/ai/llamaFile/prefs/port/description', 'The port to use to start the server.')
|
||||
}
|
||||
}
|
||||
},
|
||||
tags: ['experimental']
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export function bindAILlamafilePreferences(bind: interfaces.Bind): void {
|
||||
bind(PreferenceContribution).toConstantValue({ schema: aiLlamafilePreferencesSchema });
|
||||
}
|
||||
Reference in New Issue
Block a user