Skip to content

Commit

Permalink
set default maxTokens for autocomplete in CompletionProvider and llmF…
Browse files Browse the repository at this point in the history
…romDescription
  • Loading branch information
ferenci84 committed Mar 3, 2025
1 parent ceede79 commit 7eeb5c6
Show file tree
Hide file tree
Showing 3 changed files with 23 additions and 8 deletions.
20 changes: 14 additions & 6 deletions core/autocomplete/CompletionProvider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -175,12 +175,20 @@ export class CompletionProvider {
this.ide.getWorkspaceDirs(),
]);

const { prompt, prefix, suffix, completionOptions } = renderPrompt({
const { prompt, prefix, suffix, completionOptions: _completionOptions } = renderPrompt({
snippetPayload,
workspaceDirs,
helper,
});

// Default maxTokens for autocomplete set in core/llm/llms/index.ts llmFromDescription()
const completionOptions = {
..._completionOptions,
maxTokens: _completionOptions?.maxTokens ||
llm.completionOptions.autoCompleteMaxTokens ||
llm.completionOptions.maxTokens
};

// Completion
let completion: string | undefined = "";

Expand Down Expand Up @@ -220,11 +228,11 @@ export class CompletionProvider {

const processedCompletion = helper.options.transform
? postprocessCompletion({
completion,
prefix: helper.prunedPrefix,
suffix: helper.prunedSuffix,
llm,
})
completion,
prefix: helper.prunedPrefix,
suffix: helper.prunedSuffix,
llm,
})
: completion;

completion = processedCompletion;
Expand Down
1 change: 1 addition & 0 deletions core/index.d.ts
Original file line number Diff line number Diff line change
Expand Up @@ -909,6 +909,7 @@ export interface BaseCompletionOptions {
mirostat?: number;
stop?: string[];
maxTokens?: number;
autoCompleteMaxTokens?: number;
numThreads?: number;
useMmap?: boolean;
keepAlive?: number;
Expand Down
10 changes: 8 additions & 2 deletions core/llm/llms/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@ import Cohere from "./Cohere";
import DeepInfra from "./DeepInfra";
import Deepseek from "./Deepseek";
import Fireworks from "./Fireworks";
import NCompass from "./NCompass";
import Flowise from "./Flowise";
import FreeTrial from "./FreeTrial";
import FunctionNetwork from "./FunctionNetwork";
Expand All @@ -35,7 +34,9 @@ import Mistral from "./Mistral";
import MockLLM from "./Mock";
import Moonshot from "./Moonshot";
import Msty from "./Msty";
import NCompass from "./NCompass";
import Nebius from "./Nebius";
import Novita from "./Novita";
import Nvidia from "./Nvidia";
import Ollama from "./Ollama";
import OpenAI from "./OpenAI";
Expand All @@ -49,7 +50,6 @@ import ContinueProxy from "./stubs/ContinueProxy";
import TestLLM from "./Test";
import TextGenWebUI from "./TextGenWebUI";
import Together from "./Together";
import Novita from "./Novita";
import VertexAI from "./VertexAI";
import Vllm from "./Vllm";
import WatsonX from "./WatsonX";
Expand Down Expand Up @@ -112,6 +112,7 @@ export async function llmFromDescription(
writeLog: (log: string) => Promise<void>,
completionOptions?: BaseCompletionOptions,
systemMessage?: string,
isAutocomplete = false
): Promise<BaseLLM | undefined> {
const cls = LLMClasses.find((llm) => llm.providerName === desc.provider);

Expand All @@ -137,6 +138,11 @@ export async function llmFromDescription(
maxTokens:
finalCompletionOptions.maxTokens ??
cls.defaultOptions?.completionOptions?.maxTokens,
autoCompleteMaxTokens:
finalCompletionOptions.autoCompleteMaxTokens ??
finalCompletionOptions.maxTokens ??
cls.defaultOptions?.completionOptions?.autoCompleteMaxTokens ??
256
},
systemMessage,
writeLog,
Expand Down

0 comments on commit 7eeb5c6

Please sign in to comment.