Skip to content

Commit

Permalink
Introduce basic Llamafile support
Browse files Browse the repository at this point in the history
Added a view for displaying all the configured llamafiles.
Configured llamafiles can be started and killed.
One llamafile can be set as active, then being used in the chat.
The chat integration is currently hardcoded to use the active llamafile language model.
This should be changed as soon as the chat integration has a dropdown to select the language model (#42).
A follow up will be created to describe the next steps.
  • Loading branch information
sgraband committed Jul 30, 2024
1 parent 07a5f95 commit 39dfd11
Show file tree
Hide file tree
Showing 19 changed files with 724 additions and 29 deletions.
1 change: 1 addition & 0 deletions examples/browser/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
"@theia/ai-history-ui": "1.49.0",
"@theia/ai-openai": "1.49.0",
"@theia/ai-terminal": "1.49.0",
"@theia/ai-llamafile": "1.49.0",
"@theia/api-provider-sample": "1.49.0",
"@theia/api-samples": "1.49.0",
"@theia/bulk-edit": "1.49.0",
Expand Down
3 changes: 3 additions & 0 deletions examples/browser/tsconfig.json
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,9 @@
{
"path": "../../packages/ai-history-ui"
},
{
"path": "../../packages/ai-llamafile"
},
{
"path": "../../packages/ai-openai"
},
Expand Down
1 change: 1 addition & 0 deletions examples/electron/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@
"@theia/ai-history-ui": "1.49.0",
"@theia/ai-openai": "1.49.0",
"@theia/ai-terminal": "1.49.0",
"@theia/ai-llamafile": "1.49.0",
"@theia/api-provider-sample": "1.49.0",
"@theia/api-samples": "1.49.0",
"@theia/bulk-edit": "1.49.0",
Expand Down
3 changes: 3 additions & 0 deletions examples/electron/tsconfig.json
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,9 @@
{
"path": "../../packages/ai-history-ui"
},
{
"path": "../../packages/ai-llamafile"
},
{
"path": "../../packages/ai-openai"
},
Expand Down
61 changes: 33 additions & 28 deletions packages/ai-core/src/browser/frontend-language-model-registry.ts
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,36 @@ export class FrontendLanguageModelRegistryImpl
@inject(AISettingsService)
protected settingsService: AISettingsService;

override addLanguageModels(models: LanguageModelMetaData[] | LanguageModel[]): void {
models.map(model => {
if (LanguageModel.is(model)) {
this.languageModels.push(
new Proxy(
model,
languageModelOutputHandler(
this.outputChannelManager.getChannel(
model.id
)
)
)
);
} else {
this.languageModels.push(
new Proxy(
this.createFrontendLanguageModel(
model
),
languageModelOutputHandler(
this.outputChannelManager.getChannel(
model.id
)
)
)
);
}
});
}

@postConstruct()
protected override init(): void {
this.client.setReceiver(this);
Expand All @@ -97,25 +127,12 @@ export class FrontendLanguageModelRegistryImpl
const promises = contributions.map(provider => provider());
const backendDescriptions =
this.registryDelegate.getLanguageModelDescriptions();

Promise.allSettled([backendDescriptions, ...promises]).then(
results => {
const backendDescriptionsResult = results[0];
if (backendDescriptionsResult.status === 'fulfilled') {
this.languageModels.push(
...backendDescriptionsResult.value.map(
description =>
new Proxy(
this.createFrontendLanguageModel(
description
),
languageModelOutputHandler(
this.outputChannelManager.getChannel(
description.id
)
)
)
)
);
this.addLanguageModels(backendDescriptionsResult.value);
} else {
this.logger.error(
'Failed to add language models contributed from the backend',
Expand All @@ -128,19 +145,7 @@ export class FrontendLanguageModelRegistryImpl
| PromiseRejectedResult
| PromiseFulfilledResult<LanguageModel[]>;
if (languageModelResult.status === 'fulfilled') {
this.languageModels.push(
...languageModelResult.value.map(
languageModel =>
new Proxy(
languageModel,
languageModelOutputHandler(
this.outputChannelManager.getChannel(
languageModel.id
)
)
)
)
);
this.addLanguageModels(languageModelResult.value);
} else {
this.logger.error(
'Failed to add some language models:',
Expand Down
20 changes: 19 additions & 1 deletion packages/ai-core/src/common/language-model.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
// SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-only WITH Classpath-exception-2.0
// *****************************************************************************

import { ContributionProvider, ILogger } from '@theia/core';
import { ContributionProvider, ILogger, isFunction, isObject } from '@theia/core';
import { inject, injectable, named, postConstruct } from '@theia/core/shared/inversify';

export type ChatActor = 'user' | 'ai';
Expand Down Expand Up @@ -89,10 +89,22 @@ export interface LanguageModelMetaData {
readonly maxOutputTokens?: number;
}

export namespace LanguageModelMetaData {
export function is(arg: unknown): arg is LanguageModelMetaData {
return isObject(arg) && 'id' in arg && 'providerId' in arg;
}
}

export interface LanguageModel extends LanguageModelMetaData {
request(request: LanguageModelRequest): Promise<LanguageModelResponse>;
}

export namespace LanguageModel {
export function is(arg: unknown): arg is LanguageModel {
return isObject(arg) && 'id' in arg && 'providerId' in arg && isFunction(arg.request);
}
}

// See also VS Code `ILanguageModelChatSelector`
interface VsCodeLanguageModelSelector {
readonly identifier?: string;
Expand All @@ -110,6 +122,7 @@ export interface LanguageModelSelector extends VsCodeLanguageModelSelector {

export const LanguageModelRegistry = Symbol('LanguageModelRegistry');
export interface LanguageModelRegistry {
addLanguageModels(models: LanguageModel[]): void;
getLanguageModels(): Promise<LanguageModel[]>;
getLanguageModel(id: string): Promise<LanguageModel | undefined>;
selectLanguageModels(request: LanguageModelSelector): Promise<LanguageModel[]>;
Expand Down Expand Up @@ -143,6 +156,11 @@ export class DefaultLanguageModelRegistryImpl implements LanguageModelRegistry {
});
}

addLanguageModels(models: LanguageModel[]): void {
models.map(model => this.languageModels.push(model));
// TODO: notify frontend about new models
}

async getLanguageModels(): Promise<LanguageModel[]> {
await this.initialized;
return this.languageModels;
Expand Down
10 changes: 10 additions & 0 deletions packages/ai-llamafile/.eslintrc.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
/** @type {import('eslint').Linter.Config} */
module.exports = {
extends: [
'../../configs/build.eslintrc.json'
],
parserOptions: {
tsconfigRootDir: __dirname,
project: 'tsconfig.json'
}
};
1 change: 1 addition & 0 deletions packages/ai-llamafile/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
# AI Llamafile integration
52 changes: 52 additions & 0 deletions packages/ai-llamafile/package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
{
"name": "@theia/ai-llamafile",
"version": "1.49.0",
"description": "Theia - Llamafile Integration",
"dependencies": {
"@theia/core": "1.49.0",
"@theia/filesystem": "1.49.0",
"@theia/workspace": "1.49.0",
"minimatch": "^5.1.0",
"tslib": "^2.6.2",
"@theia/ai-core": "1.49.0"
},
"publishConfig": {
"access": "public"
},
"theiaExtensions": [
{
"frontend": "lib/browser/ai-llamafile-frontend-module",
"backend": "lib/node/ai-llamafile-backend-module"
}
],
"keywords": [
"theia-extension"
],
"license": "EPL-2.0 OR GPL-2.0-only WITH Classpath-exception-2.0",
"repository": {
"type": "git",
"url": "https://github.com/eclipse-theia/theia.git"
},
"bugs": {
"url": "https://github.com/eclipse-theia/theia/issues"
},
"homepage": "https://github.com/eclipse-theia/theia",
"files": [
"lib",
"src"
],
"scripts": {
"build": "theiaext build",
"clean": "theiaext clean",
"compile": "theiaext compile",
"lint": "theiaext lint",
"test": "theiaext test",
"watch": "theiaext watch"
},
"devDependencies": {
"@theia/ext-scripts": "1.49.0"
},
"nyc": {
"extends": "../../configs/nyc.json"
}
}
38 changes: 38 additions & 0 deletions packages/ai-llamafile/src/browser/ai-llamafile-frontend-module.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
// *****************************************************************************
// Copyright (C) 2024 EclipseSource GmbH.
//
// This program and the accompanying materials are made available under the
// terms of the Eclipse Public License v. 2.0 which is available at
// http://www.eclipse.org/legal/epl-2.0.
//
// This Source Code may also be made available under the following Secondary
// Licenses when the conditions for such availability set forth in the Eclipse
// Public License v. 2.0 are satisfied: GNU General Public License, version 2
// with the GNU Classpath Exception which is available at
// https://www.gnu.org/software/classpath/license.html.
//
// SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-only WITH Classpath-exception-2.0
// *****************************************************************************
import { CommandContribution } from '@theia/core';
import { ContainerModule } from '@theia/core/shared/inversify';
import { LlamafileCommandContribution, NewLlamafileConfigQuickInputProvider } from './llamafile-command-contribution';
import { bindViewContribution, WidgetFactory } from '@theia/core/lib/browser';
import { LlamafileViewContribution } from './llamafile-view-contribution';
import { LlamafileListWidget } from './llamafile-list-widget';
import { LlamafileServerManager, LlamafileServerManagerPath } from '../common/llamafile-server-manager';
import { RemoteConnectionProvider, ServiceConnectionProvider } from '@theia/core/lib/browser/messaging/service-connection-provider';

export default new ContainerModule(bind => {
bind(NewLlamafileConfigQuickInputProvider).toSelf().inSingletonScope();
bind(LlamafileListWidget).toSelf();
bind(WidgetFactory).toDynamicValue(context => ({
id: LlamafileListWidget.ID,
createWidget: () => context.container.get<LlamafileListWidget>(LlamafileListWidget),
})).inSingletonScope();
bind(CommandContribution).to(LlamafileCommandContribution).inSingletonScope();
bindViewContribution(bind, LlamafileViewContribution);
bind(LlamafileServerManager).toDynamicValue(ctx => {
const connection = ctx.container.get<ServiceConnectionProvider>(RemoteConnectionProvider);
return connection.createProxy<LlamafileServerManager>(LlamafileServerManagerPath);
});
});
109 changes: 109 additions & 0 deletions packages/ai-llamafile/src/browser/llamafile-command-contribution.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
// *****************************************************************************
// Copyright (C) 2024 EclipseSource GmbH.
//
// This program and the accompanying materials are made available under the
// terms of the Eclipse Public License v. 2.0 which is available at
// http://www.eclipse.org/legal/epl-2.0.
//
// This Source Code may also be made available under the following Secondary
// Licenses when the conditions for such availability set forth in the Eclipse
// Public License v. 2.0 are satisfied: GNU General Public License, version 2
// with the GNU Classpath Exception which is available at
// https://www.gnu.org/software/classpath/license.html.
//
// SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-only WITH Classpath-exception-2.0
// *****************************************************************************
import { CommandContribution, CommandRegistry } from '@theia/core';
import { inject, injectable } from '@theia/core/shared/inversify';
import { QuickInputService } from '@theia/core/lib/browser';
import { FileDialogService, OpenFileDialogProps } from '@theia/filesystem/lib/browser';
import { LlamafileListItem } from './llamafile-list-widget';

export const CREATE_LANGUAGE_MODEL = {
id: 'core.keyboard.languagemodel',
label: 'Create Language Model',
};

export const NewLlamafileEntryInput = {
id: 'llamafile.input.new.entry',
label: 'New Llamafile Entry',
};

@injectable()
export class NewLlamafileConfigQuickInputProvider {

@inject(QuickInputService)
protected readonly quickInputService: QuickInputService;

@inject(FileDialogService)
protected readonly fileDialogService: FileDialogService;

async askForNameAndPath(): Promise<LlamafileListItem> {
// Get the name input
const name = await this.quickInputService.input({
prompt: 'Enter a name'
});

if (!name) {
throw new Error('Name input was canceled.');
}

// Get the path input using a file system picker
const path = await this.askForPath();

if (!path) {
throw new Error('Path selection was canceled.');
}

const port = await this.quickInputService.input({
prompt: 'Enter a port'
});

if (!port || isNaN(Number(port))) {
throw new Error('Port input was canceled.');
}

return { name, path, port: Number(port), started: false, active: false };
}

private async askForPath(): Promise<string | undefined> {
const props: OpenFileDialogProps = {
title: 'Select a file',
canSelectFiles: true,
canSelectFolders: false,
filters: {
'Llamafile': ['llamafile']
},
canSelectMany: false
};

const uri = await this.fileDialogService.showOpenDialog(props);

if (uri) {
return uri.toString();
}

return undefined;
}
}

@injectable()
export class LlamafileCommandContribution implements CommandContribution {

@inject(NewLlamafileConfigQuickInputProvider)
protected readonly quickInputProvider: NewLlamafileConfigQuickInputProvider;

registerCommands(commandRegistry: CommandRegistry): void {
commandRegistry.registerCommand(NewLlamafileEntryInput, {
execute: async () => {
try {
return await this.quickInputProvider.askForNameAndPath();
} catch (error) {
console.error('Input process was canceled or failed.', error);
}
}
});
}


}
Loading

0 comments on commit 39dfd11

Please sign in to comment.