diff --git a/vscode/src/chat/agentic/CodyTool.ts b/vscode/src/chat/agentic/CodyTool.ts index 942398a52e2b..b10b2f33607a 100644 --- a/vscode/src/chat/agentic/CodyTool.ts +++ b/vscode/src/chat/agentic/CodyTool.ts @@ -183,7 +183,9 @@ class SearchTool extends CodyTool { const context = await this.contextRetriever.retrieveContext( toStructuredMentions([repo]), PromptString.unsafe_fromLLMResponse(query), - span + span, + undefined, + true ) // Store the search query to avoid running the same query again. this.performedSearch.add(query) diff --git a/vscode/src/chat/agentic/DeepCody.ts b/vscode/src/chat/agentic/DeepCody.ts index 9302ef57164c..0332eaed1917 100644 --- a/vscode/src/chat/agentic/DeepCody.ts +++ b/vscode/src/chat/agentic/DeepCody.ts @@ -4,7 +4,6 @@ import { PromptString, isDefined, logDebug, - modelsService, ps, telemetryRecorder, } from '@sourcegraph/cody-shared' @@ -45,10 +44,7 @@ export class DeepCodyAgent extends CodyChatAgent { chatAbortSignal: AbortSignal, maxLoops = 2 ): Promise { - const fastChatModel = modelsService.getModelByID( - 'anthropic::2024-10-22::claude-3-5-haiku-latest' - ) - this.models.review = fastChatModel?.id ?? this.chatBuilder.selectedModel + this.models.review = this.chatBuilder.selectedModel const startTime = performance.now() const count = await this.reviewLoop(span, chatAbortSignal, maxLoops) diff --git a/vscode/src/chat/agentic/prompts.ts b/vscode/src/chat/agentic/prompts.ts index 7821c33316b5..cb7eca9545e8 100644 --- a/vscode/src/chat/agentic/prompts.ts +++ b/vscode/src/chat/agentic/prompts.ts @@ -29,7 +29,9 @@ In this environment you have access to this set of tools you can use to fetch co 4. The user is working in the VS Code editor on ${getOSPromptString()}. [GOAL] -Determine if you can answer the question with the given context, or if you need more information. The output will be processed by a bot to gather the necessary context for the user's question, so skip answering the question directly or comments.` +- Determine if you can answer the question with the given context, or if you need more information. +- Do not provide the actual answer or comments in this step. This is an auto-generated message. +- Your response should only contains the word "CONTEXT_SUFFICIENT" or the appropriate tag(s) and nothing else.` export const CODYAGENT_PROMPTS = { review: REVIEW_PROMPT, diff --git a/vscode/src/chat/chat-view/ContextRetriever.ts b/vscode/src/chat/chat-view/ContextRetriever.ts index 3165595528c7..f38002b6f482 100644 --- a/vscode/src/chat/chat-view/ContextRetriever.ts +++ b/vscode/src/chat/chat-view/ContextRetriever.ts @@ -169,22 +169,32 @@ export class ContextRetriever implements vscode.Disposable { mentions: StructuredMentions, inputTextWithoutContextChips: PromptString, span: Span, - signal?: AbortSignal + signal?: AbortSignal, + skipQueryRewrite = false ): Promise { const roots = await codebaseRootsFromMentions(mentions, signal) - return await this._retrieveContext(roots, inputTextWithoutContextChips, span, signal) + return await this._retrieveContext( + roots, + inputTextWithoutContextChips, + span, + signal, + skipQueryRewrite + ) } private async _retrieveContext( roots: Root[], query: PromptString, span: Span, - signal?: AbortSignal + signal?: AbortSignal, + skipQueryRewrite = false ): Promise { if (roots.length === 0) { return [] } - const rewritten = await rewriteKeywordQuery(this.llms, query, signal) + const rewritten = skipQueryRewrite + ? query.toString() + : await rewriteKeywordQuery(this.llms, query, signal) const rewrittenQuery = { ...query, rewritten,