deploy: current vibn theia state
Some checks failed
Playwright Tests / Playwright Tests (ubuntu-22.04, Node.js 22.x) (push) Has been cancelled
3PP License Check / 3PP License Check (11, 22.x, ubuntu-22.04) (push) Has been cancelled
Publish packages to NPM / Perform Publishing (push) Has been cancelled

Made-with: Cursor
This commit is contained in:
2026-02-27 12:01:08 -08:00
commit 8bb5110148
3782 changed files with 640947 additions and 0 deletions

View File

@@ -0,0 +1,10 @@
/** @type {import('eslint').Linter.Config} */
module.exports = {
extends: [
'../../configs/build.eslintrc.json'
],
parserOptions: {
tsconfigRootDir: __dirname,
project: 'tsconfig.json'
}
};

View File

@@ -0,0 +1,57 @@
# AI Llamafile Integration
The AI Llamafile package provides an integration that allows users to manage and interact with Llamafile language models within Theia IDE.
## Features
- Start and stop Llamafile language servers.
## Commands
### Start Llamafile
- **Command ID:** `llamafile.start`
- **Label:** `Start Llamafile`
- **Functionality:** Allows you to start a Llamafile language server by selecting from a list of configured Llamafiles.
### Stop Llamafile
- **Command ID:** `llamafile.stop`
- **Label:** `Stop Llamafile`
- **Functionality:** Allows you to stop a running Llamafile language server by selecting from a list of currently running Llamafiles.
## Usage
1. **Starting a Llamafile Language Server:**
- Use the command palette to invoke `Start Llamafile`.
- A quick pick menu will appear with a list of configured Llamafiles.
- Select a Llamafile to start its language server.
2. **Stopping a Llamafile Language Server:**
- Use the command palette to invoke `Stop Llamafile`.
- A quick pick menu will display a list of currently running Llamafiles.
- Select a Llamafile to stop its language server.
## Dependencies
This extension depends on the `@theia/ai-core` package for AI-related services and functionalities.
## Configuration
Make sure to configure your Llamafiles properly within the preference settings.
This setting is an array of objects, where each object defines a llamafile with a user-friendly name, the file uri, and the port to start the server on.
Example Configuration:
```json
{
"ai-features.llamafile.llamafiles": [
{
"name": "MyLlamaFile",
"uri": "file:///path/to/my.llamafile",
"port": 30000
}
]
}
```

View File

@@ -0,0 +1,51 @@
{
"name": "@theia/ai-llamafile",
"version": "1.68.0",
"description": "Theia - Llamafile Integration",
"dependencies": {
"@theia/ai-core": "1.68.0",
"@theia/core": "1.68.0",
"@theia/output": "1.68.0",
"tslib": "^2.6.2"
},
"publishConfig": {
"access": "public"
},
"theiaExtensions": [
{
"frontend": "lib/browser/llamafile-frontend-module",
"backend": "lib/node/llamafile-backend-module"
}
],
"keywords": [
"theia-extension"
],
"license": "EPL-2.0 OR GPL-2.0-only WITH Classpath-exception-2.0",
"repository": {
"type": "git",
"url": "https://github.com/eclipse-theia/theia.git"
},
"bugs": {
"url": "https://github.com/eclipse-theia/theia/issues"
},
"homepage": "https://github.com/eclipse-theia/theia",
"files": [
"lib",
"src"
],
"scripts": {
"build": "theiaext build",
"clean": "theiaext clean",
"compile": "theiaext compile",
"lint": "theiaext lint",
"test": "theiaext test",
"watch": "theiaext watch"
},
"devDependencies": {
"@theia/ext-scripts": "1.68.0"
},
"nyc": {
"extends": "../../configs/nyc.json"
},
"gitHead": "21358137e41342742707f660b8e222f940a27652"
}

View File

@@ -0,0 +1,103 @@
// *****************************************************************************
// Copyright (C) 2024 EclipseSource GmbH.
//
// This program and the accompanying materials are made available under the
// terms of the Eclipse Public License v. 2.0 which is available at
// http://www.eclipse.org/legal/epl-2.0.
//
// This Source Code may also be made available under the following Secondary
// Licenses when the conditions for such availability set forth in the Eclipse
// Public License v. 2.0 are satisfied: GNU General Public License, version 2
// with the GNU Classpath Exception which is available at
// https://www.gnu.org/software/classpath/license.html.
//
// SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-only WITH Classpath-exception-2.0
// *****************************************************************************
import { AICommandHandlerFactory } from '@theia/ai-core/lib/browser/ai-command-handler-factory';
import { CommandContribution, CommandRegistry, MessageService, nls, PreferenceService } from '@theia/core';
import { QuickInputService } from '@theia/core/lib/browser';
import { inject, injectable } from '@theia/core/shared/inversify';
import { LlamafileManager } from '../common/llamafile-manager';
import { PREFERENCE_LLAMAFILE } from '../common/llamafile-preferences';
import { LlamafileEntry } from './llamafile-frontend-application-contribution';
export const StartLlamafileCommand = {
id: 'llamafile.start',
label: nls.localize('theia/ai/llamaFile/start', 'Start Llamafile'),
};
export const StopLlamafileCommand = {
id: 'llamafile.stop',
label: nls.localize('theia/ai/llamaFile/stop', 'Stop Llamafile'),
};
@injectable()
export class LlamafileCommandContribution implements CommandContribution {
@inject(QuickInputService)
protected readonly quickInputService: QuickInputService;
@inject(AICommandHandlerFactory)
protected readonly commandHandlerFactory: AICommandHandlerFactory;
@inject(PreferenceService)
protected preferenceService: PreferenceService;
@inject(MessageService)
protected messageService: MessageService;
@inject(LlamafileManager)
protected llamafileManager: LlamafileManager;
registerCommands(commandRegistry: CommandRegistry): void {
commandRegistry.registerCommand(StartLlamafileCommand, this.commandHandlerFactory({
execute: async () => {
try {
const llamaFiles = this.preferenceService.get<LlamafileEntry[]>(PREFERENCE_LLAMAFILE);
if (llamaFiles === undefined || llamaFiles.length === 0) {
this.messageService.error(nls.localize('theia/ai/llamafile/error/noConfigured', 'No Llamafiles configured.'));
return;
}
const options = llamaFiles.map(llamaFile => ({ label: llamaFile.name }));
const result = await this.quickInputService.showQuickPick(options);
if (result === undefined) {
return;
}
this.llamafileManager.startServer(result.label);
} catch (error) {
console.error('Something went wrong during the llamafile start.', error);
this.messageService.error(
nls.localize(
'theia/ai/llamafile/error/startFailed',
'Something went wrong during the llamafile start: {0}.\nFor more information, see the console.',
error.message
));
}
}
}));
commandRegistry.registerCommand(StopLlamafileCommand, this.commandHandlerFactory({
execute: async () => {
try {
const llamaFiles = await this.llamafileManager.getStartedLlamafiles();
if (llamaFiles === undefined || llamaFiles.length === 0) {
this.messageService.error(nls.localize('theia/ai/llamafile/error/noRunning', 'No Llamafiles running.'));
return;
}
const options = llamaFiles.map(llamaFile => ({ label: llamaFile }));
const result = await this.quickInputService.showQuickPick(options);
if (result === undefined) {
return;
}
this.llamafileManager.stopServer(result.label);
} catch (error) {
console.error('Something went wrong during the llamafile stop.', error);
this.messageService.error(
nls.localize(
'theia/ai/llamafile/error/stopFailed',
'Something went wrong during the llamafile stop: {0}.\nFor more information, see the console.',
error.message
));
}
}
}));
}
}

View File

@@ -0,0 +1,104 @@
// *****************************************************************************
// Copyright (C) 2024 EclipseSource GmbH.
//
// This program and the accompanying materials are made available under the
// terms of the Eclipse Public License v. 2.0 which is available at
// http://www.eclipse.org/legal/epl-2.0.
//
// This Source Code may also be made available under the following Secondary
// Licenses when the conditions for such availability set forth in the Eclipse
// Public License v. 2.0 are satisfied: GNU General Public License, version 2
// with the GNU Classpath Exception which is available at
// https://www.gnu.org/software/classpath/license.html.
//
// SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-only WITH Classpath-exception-2.0
// *****************************************************************************
import { FrontendApplicationContribution } from '@theia/core/lib/browser';
import { inject, injectable } from '@theia/core/shared/inversify';
import { LlamafileManager, LlamafileModelDescription } from '../common/llamafile-manager';
import { PREFERENCE_LLAMAFILE } from '../common/llamafile-preferences';
import { PreferenceService } from '@theia/core';
@injectable()
export class LlamafileFrontendApplicationContribution implements FrontendApplicationContribution {
@inject(PreferenceService)
protected preferenceService: PreferenceService;
@inject(LlamafileManager)
protected llamafileManager: LlamafileManager;
private _knownLlamaFiles: Map<string, LlamafileEntry> = new Map();
onStart(): void {
this.preferenceService.ready.then(() => {
const llamafiles = this.preferenceService.get<LlamafileEntry[]>(PREFERENCE_LLAMAFILE, []);
const validLlamafiles = llamafiles.filter(LlamafileEntry.is);
const LlamafileModelDescriptions = this.getLLamaFileModelDescriptions(validLlamafiles);
this.llamafileManager.addLanguageModels(LlamafileModelDescriptions);
validLlamafiles.forEach(model => this._knownLlamaFiles.set(model.name, model));
this.preferenceService.onPreferenceChanged(event => {
if (event.preferenceName === PREFERENCE_LLAMAFILE) {
const currentLlamafiles = this.preferenceService.get<LlamafileEntry[]>(PREFERENCE_LLAMAFILE, []);
const newModels = currentLlamafiles.filter(LlamafileEntry.is);
this.handleLlamaFilePreferenceChange(newModels);
}
});
});
}
protected getLLamaFileModelDescriptions(llamafiles: LlamafileEntry[]): LlamafileModelDescription[] {
return llamafiles.map(llamafile => ({
name: llamafile.name,
uri: llamafile.uri,
port: llamafile.port
}));
}
protected handleLlamaFilePreferenceChange(newModels: LlamafileEntry[]): void {
const llamafilesToAdd = newModels.filter(llamafile =>
!this._knownLlamaFiles.has(llamafile.name) ||
!LlamafileEntry.equals(this._knownLlamaFiles.get(llamafile.name)!, llamafile));
const llamafileIdsToRemove = [...this._knownLlamaFiles.values()].filter(llamafile =>
!newModels.find(newModel => LlamafileEntry.equals(newModel, llamafile)))
.map(llamafile => llamafile.name);
this.llamafileManager.removeLanguageModels(llamafileIdsToRemove);
llamafileIdsToRemove.forEach(id => this._knownLlamaFiles.delete(id));
this.llamafileManager.addLanguageModels(this.getLLamaFileModelDescriptions(llamafilesToAdd));
llamafilesToAdd.forEach(model => this._knownLlamaFiles.set(model.name, model));
}
}
export interface LlamafileEntry {
name: string;
uri: string;
port: number;
}
namespace LlamafileEntry {
export function equals(a: LlamafileEntry, b: LlamafileEntry): boolean {
return (
a.name === b.name &&
a.uri === b.uri &&
a.port === b.port
);
}
export function is(entry: unknown): entry is LlamafileEntry {
return (
typeof entry === 'object' &&
// eslint-disable-next-line no-null/no-null
entry !== null &&
'name' in entry && typeof (entry as LlamafileEntry).name === 'string' &&
'uri' in entry && typeof (entry as LlamafileEntry).uri === 'string' &&
'port' in entry && typeof (entry as LlamafileEntry).port === 'number'
);
}
}

View File

@@ -0,0 +1,45 @@
// *****************************************************************************
// Copyright (C) 2024 EclipseSource GmbH.
//
// This program and the accompanying materials are made available under the
// terms of the Eclipse Public License v. 2.0 which is available at
// http://www.eclipse.org/legal/epl-2.0.
//
// This Source Code may also be made available under the following Secondary
// Licenses when the conditions for such availability set forth in the Eclipse
// Public License v. 2.0 are satisfied: GNU General Public License, version 2
// with the GNU Classpath Exception which is available at
// https://www.gnu.org/software/classpath/license.html.
//
// SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-only WITH Classpath-exception-2.0
// *****************************************************************************
import { CommandContribution } from '@theia/core';
import { FrontendApplicationContribution, RemoteConnectionProvider, ServiceConnectionProvider } from '@theia/core/lib/browser';
import { ContainerModule } from '@theia/core/shared/inversify';
import { OutputChannelManager, OutputChannelSeverity } from '@theia/output/lib/browser/output-channel';
import { LlamafileManager, LlamafileManagerPath, LlamafileServerManagerClient } from '../common/llamafile-manager';
import { LlamafileCommandContribution } from './llamafile-command-contribution';
import { LlamafileFrontendApplicationContribution } from './llamafile-frontend-application-contribution';
import { bindAILlamafilePreferences } from '../common/llamafile-preferences';
export default new ContainerModule(bind => {
bind(FrontendApplicationContribution).to(LlamafileFrontendApplicationContribution).inSingletonScope();
bind(CommandContribution).to(LlamafileCommandContribution).inSingletonScope();
bind(LlamafileManager).toDynamicValue(ctx => {
const connection = ctx.container.get<ServiceConnectionProvider>(RemoteConnectionProvider);
const outputChannelManager = ctx.container.get(OutputChannelManager);
const client: LlamafileServerManagerClient = {
error: (llamafileName, message) => {
const channel = outputChannelManager.getChannel(`${llamafileName}-llamafile`);
channel.appendLine(message, OutputChannelSeverity.Error);
},
log: (llamafileName, message) => {
const channel = outputChannelManager.getChannel(`${llamafileName}-llamafile`);
channel.appendLine(message, OutputChannelSeverity.Info);
}
};
return connection.createProxy<LlamafileManager>(LlamafileManagerPath, client);
}).inSingletonScope();
bindAILlamafilePreferences(bind);
});

View File

@@ -0,0 +1,134 @@
// *****************************************************************************
// Copyright (C) 2024 EclipseSource GmbH.
//
// This program and the accompanying materials are made available under the
// terms of the Eclipse Public License v. 2.0 which is available at
// http://www.eclipse.org/legal/epl-2.0.
//
// This Source Code may also be made available under the following Secondary
// Licenses when the conditions for such availability set forth in the Eclipse
// Public License v. 2.0 are satisfied: GNU General Public License, version 2
// with the GNU Classpath Exception which is available at
// https://www.gnu.org/software/classpath/license.html.
//
// SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-only WITH Classpath-exception-2.0
// *****************************************************************************
import { LanguageModel, LanguageModelMessage, LanguageModelRequest, LanguageModelResponse, LanguageModelStatus, LanguageModelStreamResponsePart } from '@theia/ai-core';
import { CancellationToken } from '@theia/core';
const createMessageContent = (message: LanguageModelMessage): string | undefined => {
if (LanguageModelMessage.isTextMessage(message)) {
return message.text;
}
return undefined;
};
export class LlamafileLanguageModel implements LanguageModel {
readonly providerId = 'llamafile';
readonly vendor: string = 'Mozilla';
/**
* @param name the unique name for this language model. It will be used to identify the model in the UI.
* @param uri the URI pointing to the Llamafile model location.
* @param port the port on which the Llamafile model server operates.
*/
constructor(
public readonly name: string,
public status: LanguageModelStatus,
public readonly uri: string,
public readonly port: number,
) { }
get id(): string {
return this.name;
}
protected getSettings(request: LanguageModelRequest): Record<string, unknown> {
return {
n_predict: 200,
stream: true,
stop: ['</s>', 'Llama:', 'User:', '<|eot_id|>'],
cache_prompt: true,
...(request.settings ?? {})
};
}
async request(request: LanguageModelRequest, cancellationToken?: CancellationToken): Promise<LanguageModelResponse> {
const settings = this.getSettings(request);
try {
let prompt = request.messages.map(message => {
const content = createMessageContent(message);
if (content === undefined) {
return undefined;
}
switch (message.actor) {
case 'user':
return `User: ${content}`;
case 'ai':
return `Llama: ${content}`;
case 'system':
return `${content.replace(/\n\n/g, '\n')}`;
}
}).filter(m => m !== undefined).join('\n');
prompt += '\nLlama:';
const response = await fetch(`http://localhost:${this.port}/completion`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
prompt: prompt,
...settings
}),
});
if (!response.ok) {
throw new Error(`HTTP error! status: ${response.status}`);
}
if (!response.body) {
throw new Error('Response body is undefined');
}
const reader = response.body.getReader();
const decoder = new TextDecoder();
return {
stream: {
[Symbol.asyncIterator](): AsyncIterator<LanguageModelStreamResponsePart> {
return {
async next(): Promise<IteratorResult<LanguageModelStreamResponsePart>> {
if (cancellationToken?.isCancellationRequested) {
reader.cancel();
return { value: undefined, done: true };
}
const { value, done } = await reader.read();
if (done) {
return { value: undefined, done: true };
}
const read = decoder.decode(value, { stream: true });
const chunk = read.split('\n').filter(l => l.length !== 0).reduce((acc, line) => {
try {
const parsed = JSON.parse(line.substring(6));
acc += parsed.content;
return acc;
} catch (error) {
console.error('Error parsing JSON:', error);
return acc;
}
}, '');
return { value: { content: chunk }, done: false };
}
};
}
}
};
} catch (error) {
console.error('Error:', error);
return {
text: `Error: ${error}`
};
}
}
}

View File

@@ -0,0 +1,37 @@
// *****************************************************************************
// Copyright (C) 2024 EclipseSource GmbH.
//
// This program and the accompanying materials are made available under the
// terms of the Eclipse Public License v. 2.0 which is available at
// http://www.eclipse.org/legal/epl-2.0.
//
// This Source Code may also be made available under the following Secondary
// Licenses when the conditions for such availability set forth in the Eclipse
// Public License v. 2.0 are satisfied: GNU General Public License, version 2
// with the GNU Classpath Exception which is available at
// https://www.gnu.org/software/classpath/license.html.
//
// SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-only WITH Classpath-exception-2.0
// *****************************************************************************
export const LlamafileManager = Symbol('LlamafileManager');
export const LlamafileManagerPath = '/services/llamafilemanager';
export interface LlamafileModelDescription {
name: string;
uri: string;
port: number;
}
export interface LlamafileManager {
startServer(name: string): Promise<void>;
stopServer(name: string): void;
getStartedLlamafiles(): Promise<string[]>;
setClient(client: LlamafileServerManagerClient): void;
addLanguageModels(llamaFiles: LlamafileModelDescription[]): Promise<void>;
removeLanguageModels(modelIds: string[]): void;
}
export interface LlamafileServerManagerClient {
log(llamafileName: string, message: string): void;
error(llamafileName: string, message: string): void;
}

View File

@@ -0,0 +1,62 @@
// *****************************************************************************
// Copyright (C) 2024 EclipseSource GmbH.
//
// This program and the accompanying materials are made available under the
// terms of the Eclipse Public License v. 2.0 which is available at
// http://www.eclipse.org/legal/epl-2.0.
//
// This Source Code may also be made available under the following Secondary
// Licenses when the conditions for such availability set forth in the Eclipse
// Public License v. 2.0 are satisfied: GNU General Public License, version 2
// with the GNU Classpath Exception which is available at
// https://www.gnu.org/software/classpath/license.html.
//
// SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-only WITH Classpath-exception-2.0
// *****************************************************************************
import { nls, PreferenceContribution, PreferenceSchema } from '@theia/core';
import { interfaces } from '@theia/core/shared/inversify';
export const AI_LLAMAFILE_PREFERENCES_TITLE = nls.localize('theia/ai/llamaFile/prefs/title', '✨ AI LlamaFile');
export const PREFERENCE_LLAMAFILE = 'ai-features.llamafile.llamafiles';
export const aiLlamafilePreferencesSchema: PreferenceSchema = {
properties: {
[PREFERENCE_LLAMAFILE]: {
title: AI_LLAMAFILE_PREFERENCES_TITLE,
markdownDescription: nls.localize('theia/ai/llamaFile/prefs/mdDescription', 'This setting allows you to configure and manage LlamaFile models in Theia IDE.\
\n\
Each entry requires a user-friendly `name`, the file `uri` pointing to your LlamaFile, and the `port` on which it will run.\
\n\
To start a LlamaFile, use the "Start LlamaFile" command, which enables you to select the desired model.\
\n\
If you edit an entry (e.g., change the port), any running instance will stop, and you will need to manually start it again.\
\n\
[Learn more about configuring and managing LlamaFiles in the Theia IDE documentation](https://theia-ide.org/docs/user_ai/#llamafile-models).'),
type: 'array',
default: [],
items: {
type: 'object',
properties: {
name: {
type: 'string',
description: nls.localize('theia/ai/llamaFile/prefs/name/description', 'The model name to use for this Llamafile.')
},
uri: {
type: 'string',
description: nls.localize('theia/ai/llamaFile/prefs/uri/description', 'The file uri to the Llamafile.')
},
port: {
type: 'number',
description: nls.localize('theia/ai/llamaFile/prefs/port/description', 'The port to use to start the server.')
}
}
},
tags: ['experimental']
}
}
};
export function bindAILlamafilePreferences(bind: interfaces.Bind): void {
bind(PreferenceContribution).toConstantValue({ schema: aiLlamafilePreferencesSchema });
}

View File

@@ -0,0 +1,40 @@
// *****************************************************************************
// Copyright (C) 2024 EclipseSource GmbH.
//
// This program and the accompanying materials are made available under the
// terms of the Eclipse Public License v. 2.0 which is available at
// http://www.eclipse.org/legal/epl-2.0.
//
// This Source Code may also be made available under the following Secondary
// Licenses when the conditions for such availability set forth in the Eclipse
// Public License v. 2.0 are satisfied: GNU General Public License, version 2
// with the GNU Classpath Exception which is available at
// https://www.gnu.org/software/classpath/license.html.
//
// SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-only WITH Classpath-exception-2.0
// *****************************************************************************
import { ContainerModule } from '@theia/core/shared/inversify';
import { LlamafileManagerImpl } from './llamafile-manager-impl';
import { LlamafileManager, LlamafileServerManagerClient, LlamafileManagerPath } from '../common/llamafile-manager';
import { ConnectionHandler, RpcConnectionHandler } from '@theia/core';
import { ConnectionContainerModule } from '@theia/core/lib/node/messaging/connection-container-module';
import { bindAILlamafilePreferences } from '../common/llamafile-preferences';
// We use a connection module to handle AI services separately for each frontend.
const llamafileConnectionModule = ConnectionContainerModule.create(({ bind, bindBackendService, bindFrontendService }) => {
bind(LlamafileManager).to(LlamafileManagerImpl).inSingletonScope();
bind(ConnectionHandler).toDynamicValue(ctx => new RpcConnectionHandler<LlamafileServerManagerClient>(
LlamafileManagerPath,
client => {
const service = ctx.container.get<LlamafileManager>(LlamafileManager);
service.setClient(client);
return service;
}
)).inSingletonScope();
});
export default new ContainerModule(bind => {
bindAILlamafilePreferences(bind);
bind(ConnectionContainerModule).toConstantValue(llamafileConnectionModule);
});

View File

@@ -0,0 +1,152 @@
// *****************************************************************************
// Copyright (C) 2024 EclipseSource GmbH.
//
// This program and the accompanying materials are made available under the
// terms of the Eclipse Public License v. 2.0 which is available at
// http://www.eclipse.org/legal/epl-2.0.
//
// This Source Code may also be made available under the following Secondary
// Licenses when the conditions for such availability set forth in the Eclipse
// Public License v. 2.0 are satisfied: GNU General Public License, version 2
// with the GNU Classpath Exception which is available at
// https://www.gnu.org/software/classpath/license.html.
//
// SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-only WITH Classpath-exception-2.0
// *****************************************************************************
import { LanguageModelRegistry, LanguageModelStatus } from '@theia/ai-core';
import { inject, injectable } from '@theia/core/shared/inversify';
import { ChildProcessWithoutNullStreams, spawn } from 'child_process';
import { basename, dirname } from 'path';
import { fileURLToPath } from 'url';
import { LlamafileLanguageModel } from '../common/llamafile-language-model';
import { LlamafileManager, LlamafileModelDescription, LlamafileServerManagerClient } from '../common/llamafile-manager';
@injectable()
export class LlamafileManagerImpl implements LlamafileManager {
@inject(LanguageModelRegistry)
protected languageModelRegistry: LanguageModelRegistry;
private processMap: Map<string, ChildProcessWithoutNullStreams> = new Map();
private client: LlamafileServerManagerClient;
async addLanguageModels(LlamafileModelDescriptions: LlamafileModelDescription[]): Promise<void> {
for (const llamafile of LlamafileModelDescriptions) {
const model = await this.languageModelRegistry.getLanguageModel(llamafile.name);
if (model) {
if (!(model instanceof LlamafileLanguageModel)) {
console.warn(`Llamafile: model ${model.id} is not a Llamafile model`);
continue;
} else {
// This can happen during the initializing of more than one frontends, changes are handled in the frontend
console.info(`Llamafile: skip creating or updating model ${llamafile.name} because it already exists.`);
}
} else {
this.languageModelRegistry.addLanguageModels([
new LlamafileLanguageModel(
llamafile.name,
this.calculateStatus(false),
llamafile.uri,
llamafile.port
)
]);
}
}
}
removeLanguageModels(modelIds: string[]): void {
modelIds.filter(modelId => this.isStarted(modelId)).forEach(modelId => this.stopServer(modelId));
this.languageModelRegistry.removeLanguageModels(modelIds);
}
async getStartedLlamafiles(): Promise<string[]> {
const models = await this.languageModelRegistry.getLanguageModels();
return models.filter(model => model instanceof LlamafileLanguageModel && this.isStarted(model.name)).map(model => model.id);
}
async startServer(name: string): Promise<void> {
if (this.processMap.has(name)) {
return;
}
const llm = await this.getLlamafileModel(name);
if (!llm) {
return Promise.reject(`Llamafile ${name} not found`);
}
const currentProcess = this.spawnLlamafileProcess(llm);
this.processMap.set(name, currentProcess);
await this.updateLanguageModelStatus(name, true);
this.attachProcessHandlers(name, currentProcess);
}
protected async getLlamafileModel(name: string): Promise<LlamafileLanguageModel | undefined> {
const models = await this.languageModelRegistry.getLanguageModels();
return models.find(model => model.id === name && model instanceof LlamafileLanguageModel) as LlamafileLanguageModel | undefined;
}
protected spawnLlamafileProcess(llm: LlamafileLanguageModel): ChildProcessWithoutNullStreams {
const filePath = fileURLToPath(llm.uri);
const dir = dirname(filePath);
const fileName = basename(filePath);
return spawn(`./${fileName}`, ['--port', '' + llm.port, '--server', '--nobrowser'], { cwd: dir });
}
protected attachProcessHandlers(name: string, currentProcess: ChildProcessWithoutNullStreams): void {
currentProcess.stdout.on('data', (data: Buffer) => {
this.client.log(name, data.toString());
});
currentProcess.stderr.on('data', (data: Buffer) => {
this.client.error(name, data.toString());
});
currentProcess.on('close', code => {
this.client.log(name, `LlamaFile process for file ${name} exited with code ${code}`);
this.processMap.delete(name);
// Set status to 'unavailable' when server stops
this.updateLanguageModelStatus(name, false);
});
currentProcess.on('error', error => {
this.client.error(name, `Error starting LlamaFile process for file ${name}: ${error.message}`);
this.processMap.delete(name);
// Set status to 'unavailable' on error
this.updateLanguageModelStatus(name, false, error.message);
});
}
protected async updateLanguageModelStatus(modelId: string, hasStarted: boolean, message?: string): Promise<void> {
const status: LanguageModelStatus = this.calculateStatus(hasStarted, message);
await this.languageModelRegistry.patchLanguageModel<LlamafileLanguageModel>(modelId, {
status
});
}
protected calculateStatus(started: boolean, message?: string): LanguageModelStatus {
if (started) {
return { status: 'ready' };
} else {
return { status: 'unavailable', message: message || 'Llamafile server is not running' };
}
}
stopServer(name: string): void {
if (this.processMap.has(name)) {
const currentProcess = this.processMap.get(name);
currentProcess!.kill();
this.processMap.delete(name);
// Set status to 'unavailable' when server is stopped
this.updateLanguageModelStatus(name, false);
}
}
isStarted(name: string): boolean {
return this.processMap.has(name);
}
setClient(client: LlamafileServerManagerClient): void {
this.client = client;
}
}

View File

@@ -0,0 +1,27 @@
// *****************************************************************************
// Copyright (C) 2024 TypeFox GmbH and others.
//
// This program and the accompanying materials are made available under the
// terms of the Eclipse Public License v. 2.0 which is available at
// http://www.eclipse.org/legal/epl-2.0.
//
// This Source Code may also be made available under the following Secondary
// Licenses when the conditions for such availability set forth in the Eclipse
// Public License v. 2.0 are satisfied: GNU General Public License, version 2
// with the GNU Classpath Exception which is available at
// https://www.gnu.org/software/classpath/license.html.
//
// SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-only WITH Classpath-exception-2.0
// *****************************************************************************
/* note: this bogus test file is required so that
we are able to run mocha unit tests on this
package, without having any actual unit tests in it.
This way a coverage report will be generated,
showing 0% coverage, instead of no report.
This file can be removed once we have real unit
tests in place. */
describe('ai-llamafile package', () => {
it('support code coverage statistics', () => true);
});

View File

@@ -0,0 +1,22 @@
{
"extends": "../../configs/base.tsconfig",
"compilerOptions": {
"composite": true,
"rootDir": "src",
"outDir": "lib"
},
"include": [
"src"
],
"references": [
{
"path": "../ai-core"
},
{
"path": "../core"
},
{
"path": "../output"
}
]
}