Skip to content

Commit

Permalink
Merge pull request #1041 from getappmap/develop
Browse files Browse the repository at this point in the history
Release
  • Loading branch information
dustinbyrne authored Oct 17, 2024
2 parents eadf011 + 8a697e0 commit 899f75a
Show file tree
Hide file tree
Showing 15 changed files with 357 additions and 82 deletions.
5 changes: 3 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -309,7 +309,8 @@
},
"appMap.navie.useVSCodeLM": {
"type": "boolean",
"description": "Use VSCode language model API for Navie AI if available.\nRequires a recent VSCode version and (currently) GitHub Copilot extension."
"description": "Use GitHub Copilot as Navie backend if available.\nRequires a recent VSCode version and GitHub Copilot extension.",
"default": true
},
"appMap.navie.rpcPort": {
"type": "number",
Expand Down Expand Up @@ -705,7 +706,7 @@
"dependencies": {
"@appland/appmap": "^3.129.0",
"@appland/client": "^1.14.1",
"@appland/components": "^4.38.3",
"@appland/components": "^4.39.0",
"@appland/diagrams": "^1.8.0",
"@appland/models": "^2.10.2",
"@appland/rpc": "^1.15.0",
Expand Down
12 changes: 8 additions & 4 deletions src/extension.ts
Original file line number Diff line number Diff line change
Expand Up @@ -225,10 +225,13 @@ export async function activate(context: vscode.ExtensionContext): Promise<AppMap

const processService = new NodeProcessService(context);

ChatCompletion.initialize(context);
await ChatCompletion.initialize(context);

AssetService.register(context);
const dependenciesInstalled = AssetService.updateAll();
const dependenciesInstalled = ExtensionSettings.appMapCommandLineToolsPath
? // do not try to download if we're using local tools anyway
Promise.resolve()
: AssetService.updateAll();
const chatSearchWebview: Promise<ChatSearchWebview> = (async () => {
await dependenciesInstalled;

Expand All @@ -243,13 +246,14 @@ export async function activate(context: vscode.ExtensionContext): Promise<AppMap

context.subscriptions.push(
vscode.workspace.onDidChangeConfiguration((e) => {
if (e.affectsConfiguration('appMap.commandLineEnvironment')) rpcService.scheduleRestart();
if (e.affectsConfiguration('appMap.commandLineEnvironment'))
rpcService.debouncedRestart();
}),
vscode.commands.registerCommand('appmap.rpc.restart', async () => {
await rpcService.restartServer();
vscode.window.showInformationMessage('Navie restarted successfully.');
}),
ChatCompletion.onSettingsChanged(rpcService.scheduleRestart, rpcService)
ChatCompletion.onSettingsChanged(rpcService.debouncedRestart, rpcService)
);

const webview = ChatSearchWebview.register(
Expand Down
21 changes: 21 additions & 0 deletions src/lib/once.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
import * as vscode from 'vscode';

/**
* Returns true if the given key has not been shown before, and updates the global state accordingly.
* @param context
* @param key
*/
export default function once(context: vscode.ExtensionContext, key: string): boolean {
const hasBeenShown = context.globalState.get(key, false);

if (!hasBeenShown) {
context.globalState.update(key, true);
return true;
}

return false;
}

once.reset = function (context: vscode.ExtensionContext, key: string) {
context.globalState.update(key, undefined);
};
155 changes: 105 additions & 50 deletions src/services/chatCompletion.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ import vscode, {
} from 'vscode';

import ExtensionSettings from '../configuration/extensionSettings';
import once from '../lib/once';

const debug = debuglog('appmap-vscode:chat-completion');

Expand All @@ -29,7 +30,11 @@ let instance: Promise<ChatCompletion> | undefined;
export default class ChatCompletion implements Disposable {
public readonly server: Server;

constructor(private portNumber = 0, public readonly key = randomKey()) {
constructor(
private portNumber = 0,
public readonly key = randomKey(),
public readonly host = '127.0.0.1'
) {
this.server = createServer(async (req, res) => {
try {
await this.handleRequest(req, res);
Expand All @@ -44,7 +49,7 @@ export default class ChatCompletion implements Disposable {
res.end(isNativeError(e) && e.message);
}
});
this.server.listen(portNumber);
this.server.listen(portNumber, host);
const listening = new Promise<ChatCompletion>((resolve, reject) =>
this.server
.on('listening', () => {
Expand All @@ -57,7 +62,10 @@ export default class ChatCompletion implements Disposable {
.on('error', reject)
);
this.server.on('error', (e) => warn(`Chat completion server error: ${e}`));
instance ??= listening;
if (!instance) {
instance = listening;
ChatCompletion.settingsChanged.fire();
}
}

get ready(): Promise<void> {
Expand All @@ -71,13 +79,14 @@ export default class ChatCompletion implements Disposable {
}

get url(): string {
return `http://localhost:${this.port}/vscode/copilot`;
return `http://${this.host}:${this.port}/vscode/copilot`;
}

get env(): Record<string, string> {
const pref = ChatCompletion.preferredModel;
if (!pref) return {};

const modelTokenLimit = pref?.maxInputTokens ?? 3926;
const modelTokenLimit = pref.maxInputTokens;
const tokenLimitSetting = ExtensionSettings.navieContextTokenLimit;
const tokenLimits = [modelTokenLimit, tokenLimitSetting].filter(
(limit) => limit && limit > 0
Expand All @@ -86,7 +95,8 @@ export default class ChatCompletion implements Disposable {
const env: Record<string, string> = {
OPENAI_API_KEY: this.key,
OPENAI_BASE_URL: this.url,
APPMAP_NAVIE_MODEL: pref?.family ?? 'gpt-4o',
APPMAP_NAVIE_MODEL: pref.family,
APPMAP_NAVIE_COMPLETION_BACKEND: 'openai',
};

if (tokenLimits.length) {
Expand All @@ -102,16 +112,16 @@ export default class ChatCompletion implements Disposable {
return ChatCompletion.models[0];
}

static async refreshModels(): Promise<void> {
static async refreshModels(): Promise<boolean> {
const previousBest = this.preferredModel?.id;
ChatCompletion.models = (await vscode.lm.selectChatModels()).sort(
(a, b) => b.maxInputTokens - a.maxInputTokens + b.family.localeCompare(a.family)
);
if (this.preferredModel?.id !== previousBest) this.settingsChanged.fire();
return this.preferredModel?.id !== previousBest;
}

static get instance(): Promise<ChatCompletion | undefined> {
if (!instance) return Promise.resolve(undefined);
static get instance(): Promise<ChatCompletion> | undefined {
if (!instance) return undefined;
return instance;
}

Expand Down Expand Up @@ -201,67 +211,112 @@ export default class ChatCompletion implements Disposable {
}

async dispose(): Promise<void> {
if ((await instance) === this) instance = undefined;
if ((await instance) === this) {
instance = undefined;
ChatCompletion.settingsChanged.fire();
}
this.server.close();
}

private static settingsChanged = new vscode.EventEmitter<void>();
static onSettingsChanged = ChatCompletion.settingsChanged.event;

static initialize(context: ExtensionContext) {
// TODO: make the messages and handling generic for all LM extensions
const hasLM = 'lm' in vscode && 'selectChatModels' in vscode.lm;

if (ExtensionSettings.useVsCodeLM && checkAvailability())
context.subscriptions.push(new ChatCompletion());
static async initialize(context: ExtensionContext) {
if (await this.checkConfiguration(context))
context.subscriptions.push(
vscode.lm.onDidChangeChatModels(() => this.checkConfiguration(context))
);

context.subscriptions.push(
vscode.workspace.onDidChangeConfiguration(async (e) => {
if (e.affectsConfiguration('appMap.navie.useVSCodeLM')) {
const instance = await ChatCompletion.instance;
if (!ExtensionSettings.useVsCodeLM && instance) await instance.dispose();
else if (ExtensionSettings.useVsCodeLM && checkAvailability())
context.subscriptions.push(new ChatCompletion());
this.settingsChanged.fire();
}
})
vscode.workspace.onDidChangeConfiguration(
(e) =>
e.affectsConfiguration('appMap.navie.useVSCodeLM') &&
this.checkConfiguration(context, true)
)
);
}

if (hasLM) {
ChatCompletion.refreshModels();
vscode.lm.onDidChangeChatModels(
ChatCompletion.refreshModels,
undefined,
context.subscriptions
);
static async checkConfiguration(context: ExtensionContext, switched = false): Promise<boolean> {
// TODO: make the messages and handling generic for all LM extensions
const hasLM = 'lm' in vscode && 'selectChatModels' in vscode.lm;
const wantsLM = ExtensionSettings.useVsCodeLM;

if (!hasLM) {
if (wantsLM) {
if (switched)
vscode.window.showErrorMessage(
'AppMap: Copilot backend for Navie is enabled, but the LanguageModel API is not available.\nPlease update your VS Code to the latest version.'
);
else if (once(context, 'no-lm-api-available'))
vscode.window.showInformationMessage(
'AppMap: Navie can use Copilot, but the LanguageModel API is not available.\nPlease update your VS Code to the latest version if you want to use it.'
);
}
return hasLM;
}
once.reset(context, 'no-lm-api-available');

if (!wantsLM) {
if (instance) {
await instance.then((i) => i.dispose());
// must have been switched, so show message
vscode.window.showInformationMessage('AppMap: Copilot backend for Navie is disabled.');
once.reset(context, 'chat-completion-ready');
once.reset(context, 'chat-completion-no-models');
}
return hasLM;
}

function checkAvailability() {
if (!hasLM)
vscode.window.showErrorMessage(
'AppMap: VS Code LM backend for Navie is enabled, but the LanguageModel API is not available.\nPlease update your VS Code to the latest version.'
// now it's hasLM and wantsLM
const changed = await this.refreshModels();
if (this.preferredModel) {
if (!instance) {
context.subscriptions.push(new this());
await this.instance;
} else if (changed) ChatCompletion.settingsChanged.fire();
if (switched)
vscode.window.showInformationMessage(
`AppMap: Copilot backend for Navie is enabled, using model: ${this.preferredModel.name}`
);
else if (once(context, 'chat-completion-ready'))
vscode.window.showInformationMessage(
`AppMap: Copilot backend for Navie is ready. Model: ${this.preferredModel.name}`
);
else if (!vscode.extensions.getExtension('github.copilot')) {
once.reset(context, 'chat-completion-no-models');
} else {
if (instance) await instance.then((i) => i.dispose());
if (switched)
vscode.window
.showErrorMessage(
'AppMap: VS Code LM backend for Navie is enabled, but the GitHub Copilot extension is not installed.\nPlease install it from the marketplace and reload the window.',
'AppMap: Copilot backend for Navie is enabled, but no compatible models were found.\nInstall Copilot to continue.',
'Install Copilot'
)
.then((selection) => {
if (selection === 'Install Copilot') {
const odc = vscode.lm.onDidChangeChatModels(() => {
context.subscriptions.push(new ChatCompletion());
ChatCompletion.settingsChanged.fire();
odc.dispose();
});
.then(
(selection) =>
selection === 'Install Copilot' &&
vscode.commands.executeCommand(
'workbench.extensions.installExtension',
'github.copilot'
);
}
});
} else return true;
)
);
else if (once(context, 'chat-completion-no-models'))
vscode.window
.showInformationMessage(
'AppMap: Navie can use Copilot, but no compatible models were found.\nYou can install Copilot to use this feature.',
'Install Copilot'
)
.then(
(selection) =>
selection === 'Install Copilot' &&
vscode.commands.executeCommand(
'workbench.extensions.installExtension',
'github.copilot'
)
);
once.reset(context, 'chat-completion-ready');
}

return hasLM;
}
}

Expand Down
9 changes: 9 additions & 0 deletions src/services/navieConfigurationService.ts
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,15 @@ export default function navieConfigurationService(context: vscode.ExtensionConte
);
}

export async function openAIApiKeyEquals(
extensionContext: vscode.ExtensionContext,
key: string | undefined
): Promise<boolean> {
const { secrets } = extensionContext;
const storedKey = await secrets.get(OPENAI_API_KEY);
return key === storedKey;
}

export async function setOpenAIApiKey(
extensionContext: vscode.ExtensionContext,
key: string | undefined
Expand Down
7 changes: 7 additions & 0 deletions src/services/processWatcher.ts
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,7 @@ export class ProcessWatcher implements vscode.Disposable {

protected _onError: vscode.EventEmitter<Error> = new vscode.EventEmitter<Error>();
protected _onAbort: vscode.EventEmitter<Error> = new vscode.EventEmitter<Error>();
protected _onBeforeRestart: vscode.EventEmitter<void> = new vscode.EventEmitter<void>();

protected shouldRun = false;
protected hasAborted = false;
Expand All @@ -101,6 +102,11 @@ export class ProcessWatcher implements vscode.Disposable {
return this._onAbort.event;
}

// Fired when the process is restarted.
public get onBeforeRestart(): vscode.Event<void> {
return this._onBeforeRestart.event;
}

public get id(): ProcessId {
return this.options.id;
}
Expand Down Expand Up @@ -169,6 +175,7 @@ export class ProcessWatcher implements vscode.Disposable {
}

async restart(): Promise<void> {
this._onBeforeRestart.fire();
await this.stop();
await this.start();
}
Expand Down
Loading

0 comments on commit 899f75a

Please sign in to comment.