Skip to content

Commit

Permalink
add ChatController tests for sending, followups, editing, and errors
Browse files Browse the repository at this point in the history
No behavior change, just the addition of tests.
  • Loading branch information
sqs committed Oct 5, 2024
1 parent 711d939 commit c106223
Show file tree
Hide file tree
Showing 9 changed files with 549 additions and 154 deletions.
6 changes: 5 additions & 1 deletion lib/shared/src/chat/transcript/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,11 @@ export interface SerializedChatInteraction {

export function serializeChatMessage(chatMessage: ChatMessage): SerializedChatMessage {
return {
...chatMessage,
speaker: chatMessage.speaker,
model: chatMessage.model,
contextFiles: chatMessage.contextFiles,
editorState: chatMessage.editorState,
error: chatMessage.error,
text: chatMessage.text ? chatMessage.text.toString() : undefined,
}
}
3 changes: 3 additions & 0 deletions lib/shared/src/editor/editorState.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,9 @@ export function setEditorWindowIsFocused(editorWindowIsFocused: () => boolean):
}

export function editorWindowIsFocused(): boolean {
if (process.env.VITEST) {
return true
}
if (!_editorWindowIsFocused) {
throw new Error('must call setEditorWindowIsFocused first')
}
Expand Down
8 changes: 7 additions & 1 deletion lib/shared/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,7 @@ export {
modelTier,
parseModelRef,
toLegacyModel,
FIXTURE_MODEL,
} from './models/model'
export {
type EditModel,
Expand Down Expand Up @@ -328,7 +329,12 @@ export {
type ContextMentionProviderID,
type ContextMentionProviderMetadata,
} from './mentions/api'
export { TokenCounter, getTokenCounterUtils, TokenCounterUtils } from './token/counter'
export {
TokenCounter,
getTokenCounterUtils,
TokenCounterUtils,
useFakeTokenCounterUtils,
} from './token/counter'
export { CORPUS_CONTEXT_ALLOCATION as ENHANCED_CONTEXT_ALLOCATION } from './token/constants'
export { tokensToChars, charsToTokens } from './token/utils'
export * from './prompt/prompt-string'
Expand Down
6 changes: 6 additions & 0 deletions lib/shared/src/models/model.ts
Original file line number Diff line number Diff line change
Expand Up @@ -242,3 +242,9 @@ export function getServerModelTags(
}
return tags
}

export const FIXTURE_MODEL = createModel({
id: 'my-model',
usage: [ModelUsage.Chat],
tags: [ModelTag.Enterprise],
})
37 changes: 35 additions & 2 deletions lib/shared/src/token/counter.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,14 +10,47 @@ export interface TokenCounterUtils {
decode(encoded: number[]): string
countTokens(text: string): number
countPromptString(text: PromptString): number
getMessagesTokenCount(messages: Message[]): number
getTokenCountForMessage(message: Message): number
getMessagesTokenCount(messages: (Message | undefined)[]): number
getTokenCountForMessage(message: Message | undefined): number
}

let _useFakeTokenCounterUtils: TokenCounterUtils | undefined

/**
* @internal For testing only. Importing the weights for the token counter is slow and is not
* necessary for most tests.
*/
export function useFakeTokenCounterUtils(): void {
_useFakeTokenCounterUtils = {
encode(text) {
return text.split(' ').map(word => word.length)
},
decode(encoded) {
return encoded.map(n => ' '.repeat(n)).join('')
},
countTokens(text) {
return text.split(' ').length
},
countPromptString(text) {
return text.split(' ').length
},
getMessagesTokenCount(messages) {
return messages.reduce((acc, m) => acc + (m?.text?.split(' ').length ?? 0), 0)
},
getTokenCountForMessage(message) {
return message?.text?.split(' ').length ?? 0
},
}
}

/**
* Get the tokenizer, which is lazily-loaded it because it requires reading ~1 MB of tokenizer data.
*/
export async function getTokenCounterUtils(): Promise<TokenCounterUtils> {
if (_useFakeTokenCounterUtils) {
return _useFakeTokenCounterUtils
}

// This could have been implemented in a separate file that is wholly async-imported, but that
// carries too much risk of accidental non-async importing.
if (!_tokenCounterUtilsPromise) {
Expand Down
7 changes: 5 additions & 2 deletions vscode/src/chat/agentic/CodyTool.ts
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ class SearchTool extends CodyTool {
}

constructor(
private contextRetriever: ContextRetriever,
private contextRetriever: Pick<ContextRetriever, 'retrieveContext'>,
private span: Span
) {
super()
Expand Down Expand Up @@ -133,6 +133,9 @@ class SearchTool extends CodyTool {
}
}

export function getCodyTools(contextRetriever: ContextRetriever, span: Span): CodyTool[] {
export function getCodyTools(
contextRetriever: Pick<ContextRetriever, 'retrieveContext'>,
span: Span
): CodyTool[] {
return [new SearchTool(contextRetriever, span), new CliTool(), new FileTool()]
}
2 changes: 1 addition & 1 deletion vscode/src/chat/agentic/DeepCody.ts
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ export class DeepCodyAgent {

constructor(
private readonly chatBuilder: ChatBuilder,
private readonly chatClient: ChatClient,
private readonly chatClient: Pick<ChatClient, 'chat'>,
private readonly tools: CodyTool[],
mentions: ContextItem[] = []
) {
Expand Down
Loading

0 comments on commit c106223

Please sign in to comment.