Skip to content

Commit

Permalink
incorporate review comments
Browse files Browse the repository at this point in the history
  • Loading branch information
eneufeld committed Oct 10, 2024
1 parent e466f26 commit 2a1a58e
Show file tree
Hide file tree
Showing 11 changed files with 104 additions and 122 deletions.
9 changes: 3 additions & 6 deletions packages/ai-llamafile/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,10 @@
"version": "1.54.0",
"description": "Theia - Llamafile Integration",
"dependencies": {
"@theia/core": "1.54.0",
"@theia/filesystem": "1.54.0",
"@theia/workspace": "1.54.0",
"minimatch": "^5.1.0",
"tslib": "^2.6.2",
"@theia/ai-core": "1.54.0",
"@theia/output": "1.54.0"
"@theia/core": "1.54.0",
"@theia/output": "1.54.0",
"tslib": "^2.6.2"
},
"publishConfig": {
"access": "public"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,13 +13,12 @@
//
// SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-only WITH Classpath-exception-2.0
// *****************************************************************************
import { LanguageModelRegistry } from '@theia/ai-core';
import { AICommandHandlerFactory } from '@theia/ai-core/lib/browser/ai-command-handler-factory';
import { CommandContribution, CommandRegistry, MessageService } from '@theia/core';
import { PreferenceService, QuickInputService } from '@theia/core/lib/browser';
import { inject, injectable } from '@theia/core/shared/inversify';
import { LlamafileLanguageModel } from '../common/llamafile-language-model';
import { LlamafileEntry, PREFERENCE_LLAMAFILE } from './llamafile-preferences';
import { LlamafileEntry, LlamafileManager } from '../common/llamafile-manager';
import { PREFERENCE_LLAMAFILE } from './llamafile-preferences';

export const StartLlamafileCommand = {
id: 'llamafile.start',
Expand All @@ -45,8 +44,8 @@ export class LlamafileCommandContribution implements CommandContribution {
@inject(MessageService)
protected messageService: MessageService;

@inject(LanguageModelRegistry)
protected languageModelRegistry: LanguageModelRegistry;
@inject(LlamafileManager)
protected llamafileManager: LlamafileManager;

registerCommands(commandRegistry: CommandRegistry): void {
commandRegistry.registerCommand(StartLlamafileCommand, this.commandHandlerFactory({
Expand All @@ -62,12 +61,7 @@ export class LlamafileCommandContribution implements CommandContribution {
if (result === undefined) {
return;
}
const model = await this.getLanguageModelForItem(result.label);
if (model === undefined) {
this.messageService.error('No fitting Llamafile model found.');
return;
}
model.startServer();
this.llamafileManager.startServer(result.label);
} catch (error) {
console.error('Something went wrong during the llamafile start.', error);
}
Expand All @@ -76,39 +70,21 @@ export class LlamafileCommandContribution implements CommandContribution {
commandRegistry.registerCommand(StopLlamafileCommand, this.commandHandlerFactory({
execute: async () => {
try {
const llamaFiles = await this.getStartedLlamafiles();
const llamaFiles = await this.llamafileManager.getStartedLlamafiles();
if (llamaFiles === undefined || llamaFiles.length === 0) {
this.messageService.error('No Llamafiles running.');
return;
}
const options = llamaFiles.map(llamaFile => ({ label: llamaFile.name }));
const options = llamaFiles.map(llamaFile => ({ label: llamaFile }));
const result = await this.quickInputService.showQuickPick(options);
if (result === undefined) {
return;
}
const model = llamaFiles.find(llamaFile => llamaFile.name === result.label);
if (model === undefined) {
this.messageService.error('No fitting Llamafile model found.');
return;
}
model.killServer();
this.llamafileManager.stopServer(result.label);
} catch (error) {
console.error('Something went wrong during the llamafile stop.', error);
}
}
}));
}

private async getLanguageModelForItem(name: string): Promise<LlamafileLanguageModel | undefined> {
const result = await this.languageModelRegistry.getLanguageModel(name);
if (result instanceof LlamafileLanguageModel) {
return result;
} else {
return undefined;
}
}
private async getStartedLlamafiles(): Promise<LlamafileLanguageModel[]> {
const models = await this.languageModelRegistry.getLanguageModels();
return models.filter(model => model instanceof LlamafileLanguageModel && model.isStarted) as LlamafileLanguageModel[];
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -14,48 +14,38 @@
// SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-only WITH Classpath-exception-2.0
// *****************************************************************************

import { LanguageModelRegistry } from '@theia/ai-core';
import { FrontendApplicationContribution, PreferenceService } from '@theia/core/lib/browser';
import { inject, injectable } from '@theia/core/shared/inversify';
import { LlamafileLanguageModel } from '../common/llamafile-language-model';
import { LlamafileServerManager } from '../common/llamafile-server-manager';
import { LlamafileEntry, PREFERENCE_LLAMAFILE } from './llamafile-preferences';
import { LlamafileEntry, LlamafileManager } from '../common/llamafile-manager';
import { PREFERENCE_LLAMAFILE } from './llamafile-preferences';

@injectable()
export class LlamafileFrontendApplicationContribution implements FrontendApplicationContribution {

@inject(PreferenceService)
protected preferenceService: PreferenceService;

@inject(LanguageModelRegistry)
protected languageModelRegistry: LanguageModelRegistry;

@inject(LlamafileServerManager)
protected llamafileServerManager: LlamafileServerManager;
@inject(LlamafileManager)
protected llamafileManager: LlamafileManager;

onStart(): void {
this.preferenceService.ready.then(() => {
const llamafiles = this.preferenceService.get<LlamafileEntry[]>(PREFERENCE_LLAMAFILE, []);

const models = llamafiles.map(llamafile =>
LlamafileLanguageModel.createNewLlamafileLanguageModel(llamafile.name, llamafile.uri, llamafile.port, this.llamafileServerManager));
this.languageModelRegistry.addLanguageModels(models);
this.llamafileManager.addLanguageModels(llamafiles);

this.preferenceService.onPreferenceChanged(event => {
if (event.preferenceName === PREFERENCE_LLAMAFILE) {
// old models in the preference
const oldModels = new Set((event.oldValue as LlamafileEntry[]).map(v => v.name));
const oldModels = new Set(((event.oldValue ?? []) as LlamafileEntry[]).map(v => v.name));
// new models in the preference as map to if a model was added and be able to create the llm and register it
const newModels = (event.newValue as LlamafileEntry[]).reduce((acc, v) => { acc.set(v.name, v); return acc; }, new Map<string, LlamafileEntry>());

const modelsToRemove = [...oldModels.values()].filter(model => !newModels.has(model));
const modelDescriptionsToAdd = [...newModels.values()].filter(model => !oldModels.has(model.name));
this.llamafileManager.removeLanguageModels(modelsToRemove);

this.languageModelRegistry.removeLanguageModels(modelsToRemove);

const modelsToAdd = modelDescriptionsToAdd.map(llamafile =>
LlamafileLanguageModel.createNewLlamafileLanguageModel(llamafile.name, llamafile.uri, llamafile.port, this.llamafileServerManager));
this.languageModelRegistry.addLanguageModels(modelsToAdd);
const modelDescriptionsToAdd = [...newModels.values()].filter(model => !oldModels.has(model.name));
this.llamafileManager.addLanguageModels(modelDescriptionsToAdd);
}
});
});
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,15 +17,15 @@ import { CommandContribution } from '@theia/core';
import { FrontendApplicationContribution, RemoteConnectionProvider, ServiceConnectionProvider } from '@theia/core/lib/browser';
import { ContainerModule } from '@theia/core/shared/inversify';
import { OutputChannelManager, OutputChannelSeverity } from '@theia/output/lib/browser/output-channel';
import { LlamafileServerManager, LlamafileServerManagerClient, LlamafileServerManagerPath } from '../common/llamafile-server-manager';
import { LlamafileManager, LlamafileManagerPath, LlamafileServerManagerClient } from '../common/llamafile-manager';
import { LlamafileCommandContribution } from './llamafile-command-contribution';
import { LlamafileFrontendApplicationContribution } from './llamafile-frontend-application-contribution';
import { bindAILlamafilePreferences } from './llamafile-preferences';

export default new ContainerModule(bind => {
bind(FrontendApplicationContribution).to(LlamafileFrontendApplicationContribution).inSingletonScope();
bind(CommandContribution).to(LlamafileCommandContribution).inSingletonScope();
bind(LlamafileServerManager).toDynamicValue(ctx => {
bind(LlamafileManager).toDynamicValue(ctx => {
const connection = ctx.container.get<ServiceConnectionProvider>(RemoteConnectionProvider);
const outputChannelManager = ctx.container.get(OutputChannelManager);
const client: LlamafileServerManagerClient = {
Expand All @@ -38,8 +38,8 @@ export default new ContainerModule(bind => {
channel.appendLine(message, OutputChannelSeverity.Info);
}
};
return connection.createProxy<LlamafileServerManager>(LlamafileServerManagerPath, client);
});
return connection.createProxy<LlamafileManager>(LlamafileManagerPath, client);
}).inSingletonScope();

bindAILlamafilePreferences(bind);
});
7 changes: 1 addition & 6 deletions packages/ai-llamafile/src/browser/llamafile-preferences.ts
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ export const aiLlamafilePreferencesSchema: PreferenceSchema = {
title: AI_LLAMAFILE_PREFERENCES_TITLE,
markdownDescription: '❗ This setting allows you to add llamafiles.\
\n\
You need to provide a user friendly name, the path to the llamafile and the port to use.\
You need to provide a user friendly `name`, the file `uri` to the llamafile and the `port` to use.\
\n\
In order to start your llamafile you have to call the "Start Llamafile" command where you can then select the llamafile to start.',
type: 'array',
Expand All @@ -52,11 +52,6 @@ export const aiLlamafilePreferencesSchema: PreferenceSchema = {
}
}
};
export interface LlamafileEntry {
name: string;
uri: string;
port: number;
}

export function bindAILlamafilePreferences(bind: interfaces.Bind): void {
bind(PreferenceContribution).toConstantValue({ schema: aiLlamafilePreferencesSchema });
Expand Down
31 changes: 1 addition & 30 deletions packages/ai-llamafile/src/common/llamafile-language-model.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,26 +15,13 @@
// *****************************************************************************

import { LanguageModel, LanguageModelRequest, LanguageModelResponse, LanguageModelStreamResponsePart } from '@theia/ai-core';
import { LlamafileServerManager } from './llamafile-server-manager';

export class LlamafileLanguageModel implements LanguageModel {

readonly providerId = 'llamafile';
readonly vendor: string = 'Mozilla';

constructor(readonly name: string, readonly uri: string, readonly port: number, readonly serverManager: LlamafileServerManager) {
}

startServer(): void {
this.serverManager.startServer(this.name, this.uri, this.port);
}

killServer(): void {
this.serverManager.killServer(this.name);
}

get isStarted(): boolean {
return this.serverManager.isStarted(this.name);
constructor(readonly name: string, readonly uri: string, readonly port: number) {
}

get id(): string {
Expand Down Expand Up @@ -72,7 +59,6 @@ export class LlamafileLanguageModel implements LanguageModel {
throw new Error(`HTTP error! status: ${response.status}`);
}

// TODO: Get the stream working
if (!response.body) {
throw new Error('Response body is undefined');
}
Expand Down Expand Up @@ -105,17 +91,6 @@ export class LlamafileLanguageModel implements LanguageModel {
}
}
};

// const data = await response.json();
// if (data && data.content) {
// return {
// text: data.content
// };
// } else {
// return {
// text: 'No content field found in the response.'
// };
// }
} catch (error) {
console.error('Error:', error);
return {
Expand All @@ -124,8 +99,4 @@ export class LlamafileLanguageModel implements LanguageModel {
}
}

static createNewLlamafileLanguageModel(name: string, uri: string, port: number, serverManager: LlamafileServerManager): LlamafileLanguageModel {
return new LlamafileLanguageModel(name, uri, port, serverManager);
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -13,17 +13,25 @@
//
// SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-only WITH Classpath-exception-2.0
// *****************************************************************************
export const LlamafileServerManager = Symbol('LlamafileServerManager');
export const LlamafileManager = Symbol('LlamafileManager');

export const LlamafileServerManagerPath = '/services/llamafileservermanager';
export const LlamafileManagerPath = '/services/llamafilemanager';

export interface LlamafileServerManager {
startServer(name: string, uri: string, port: number): void;
killServer(name: string): void;
isStarted(name: string): boolean;
export interface LlamafileManager {
startServer(name: string): Promise<void>;
stopServer(name: string): void;
getStartedLlamafiles(): Promise<string[]>;
setClient(client: LlamafileServerManagerClient): void;
addLanguageModels(llamaFiles: LlamafileEntry[]): void;
removeLanguageModels(modelIds: string[]): void;
}
export interface LlamafileServerManagerClient {
log(llamafileName: string, message: string): void;
error(llamafileName: string, message: string): void;
}

export interface LlamafileEntry {
name: string;
uri: string;
port: number;
}
10 changes: 5 additions & 5 deletions packages/ai-llamafile/src/node/llamafile-backend-module.ts
Original file line number Diff line number Diff line change
Expand Up @@ -15,16 +15,16 @@
// *****************************************************************************

import { ContainerModule } from '@theia/core/shared/inversify';
import { LlamafileServerManagerImpl } from './llamafile-server-manager-impl';
import { LlamafileServerManager, LlamafileServerManagerClient, LlamafileServerManagerPath } from '../common/llamafile-server-manager';
import { LlamafileManagerImpl } from './llamafile-manager-impl';
import { LlamafileManager, LlamafileServerManagerClient, LlamafileManagerPath } from '../common/llamafile-manager';
import { ConnectionHandler, RpcConnectionHandler } from '@theia/core';

export default new ContainerModule(bind => {
bind(LlamafileServerManager).to(LlamafileServerManagerImpl).inSingletonScope();
bind(LlamafileManager).to(LlamafileManagerImpl).inSingletonScope();
bind(ConnectionHandler).toDynamicValue(ctx => new RpcConnectionHandler<LlamafileServerManagerClient>(
LlamafileServerManagerPath,
LlamafileManagerPath,
client => {
const service = ctx.container.get<LlamafileServerManager>(LlamafileServerManager);
const service = ctx.container.get<LlamafileManager>(LlamafileManager);
service.setClient(client);
return service;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,26 +13,50 @@
//
// SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-only WITH Classpath-exception-2.0
// *****************************************************************************
import { injectable } from '@theia/core/shared/inversify';
import { LanguageModelRegistry } from '@theia/ai-core';
import { inject, injectable } from '@theia/core/shared/inversify';
import { ChildProcessWithoutNullStreams, spawn } from 'child_process';
import { dirname, basename } from 'path';
import { LlamafileServerManager, LlamafileServerManagerClient } from '../common/llamafile-server-manager';
import { basename, dirname } from 'path';
import { fileURLToPath } from 'url';
import { LlamafileLanguageModel } from '../common/llamafile-language-model';
import { LlamafileEntry, LlamafileManager, LlamafileServerManagerClient } from '../common/llamafile-manager';

@injectable()
export class LlamafileServerManagerImpl implements LlamafileServerManager {
export class LlamafileManagerImpl implements LlamafileManager {

@inject(LanguageModelRegistry)
protected languageModelRegistry: LanguageModelRegistry;

private processMap: Map<string, ChildProcessWithoutNullStreams> = new Map();
private client: LlamafileServerManagerClient;

startServer(name: string, uri: string, port: number): void {
addLanguageModels(llamaFiles: LlamafileEntry[]): void {
const models = llamaFiles.map(llamafile =>
new LlamafileLanguageModel(llamafile.name, llamafile.uri, llamafile.port));
this.languageModelRegistry.addLanguageModels(models);
}
removeLanguageModels(modelIds: string[]): void {
this.languageModelRegistry.removeLanguageModels(modelIds);
}

async getStartedLlamafiles(): Promise<string[]> {
const models = await this.languageModelRegistry.getLanguageModels();
return models.filter(model => model instanceof LlamafileLanguageModel && this.isStarted(model.name)).map(model => model.id);
}

async startServer(name: string): Promise<void> {
if (!this.processMap.has(name)) {
const filePath = fileURLToPath(uri);
const models = await this.languageModelRegistry.getLanguageModels();
const llm = models.find(model => model.id === name && model instanceof LlamafileLanguageModel) as LlamafileLanguageModel | undefined;
if (llm === undefined) {
return Promise.reject(`Llamafile ${name} not found`);
}
const filePath = fileURLToPath(llm.uri);

// Extract the directory and file name
const dir = dirname(filePath);
const fileName = basename(filePath);
const currentProcess = spawn(`./${fileName}`, ['--port', '' + port, '--server', '--nobrowser'], { cwd: dir });
const currentProcess = spawn(`./${fileName}`, ['--port', '' + llm.port, '--server', '--nobrowser'], { cwd: dir });
this.processMap.set(name, currentProcess);
currentProcess.stdout.on('data', (data: Buffer) => {
const output = data.toString();
Expand All @@ -49,7 +73,7 @@ export class LlamafileServerManagerImpl implements LlamafileServerManager {
}
}

killServer(name: string): void {
stopServer(name: string): void {
if (this.processMap.has(name)) {
const currentProcess = this.processMap.get(name);
currentProcess!.kill();
Expand Down
Loading

0 comments on commit 2a1a58e

Please sign in to comment.