Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

828 streamline chat model configuration info message network call #875

Merged
Show file tree
Hide file tree
Changes from 18 commits
Commits
Show all changes
25 commits
Select commit Hold shift + click to select a range
0ebbc2e
convert model_config_ids away from enum
pmarsh-scottlogic Mar 22, 2024
978bd40
rejig the chat models folder and use type instead of interface
pmarsh-scottlogic Mar 22, 2024
104f8dc
convert Model Config ID away from enum in frontend
pmarsh-scottlogic Mar 22, 2024
bc49402
rename to MODEL_CONFIG_ID
pmarsh-scottlogic Mar 22, 2024
f9382ce
adds deted backend validation on handleConfigureModel
pmarsh-scottlogic Mar 22, 2024
644cd38
adds first modelController unit test
pmarsh-scottlogic Mar 22, 2024
b6ed9be
clarifies error response message
pmarsh-scottlogic Mar 22, 2024
f5975b5
fills in unhappy path tests
pmarsh-scottlogic Mar 22, 2024
47097a0
deletes repeated unit test
pmarsh-scottlogic Mar 22, 2024
309efe0
now returns resulting info message
pmarsh-scottlogic Mar 22, 2024
b4f9885
update modelController test according to its now function
pmarsh-scottlogic Mar 22, 2024
b466448
chatService now extracts the resulting chat info message and returns it
pmarsh-scottlogic Mar 22, 2024
2fa7519
makes frontend use thie resultingChatInfoMessage
pmarsh-scottlogic Mar 22, 2024
2ac42df
renames resultingChatMessage t chatInfoMessage
pmarsh-scottlogic Mar 25, 2024
0de0076
move maxValue calculagtion below validation for configId
pmarsh-scottlogic Mar 25, 2024
7839a32
defines chatModelConfigurations according to existing type
pmarsh-scottlogic Mar 25, 2024
e907458
check response status before trying to convert json
pmarsh-scottlogic Mar 25, 2024
b782656
merge dev
pmarsh-scottlogic Mar 25, 2024
b245977
removes redundant else statmement
pmarsh-scottlogic Mar 26, 2024
1a4431c
merge dev
pmarsh-scottlogic Mar 26, 2024
a70205b
combine two identical response types to become chatInfoMessageResponse
pmarsh-scottlogic Mar 26, 2024
c51a48d
fixes type export
pmarsh-scottlogic Mar 27, 2024
ebac4a0
Merge branch 'dev' into 828-streamline-chat-model-configuration-info-…
pmarsh-scottlogic Mar 27, 2024
3961cd5
merge dev
pmarsh-scottlogic Mar 28, 2024
cff8d7d
fix import
pmarsh-scottlogic Mar 28, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
53 changes: 45 additions & 8 deletions backend/src/controller/modelController.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,12 @@ import { Response } from 'express';

import { OpenAiConfigureModelRequest } from '@src/models/api/OpenAiConfigureModelRequest';
import { OpenAiSetModelRequest } from '@src/models/api/OpenAiSetModelRequest';
import { MODEL_CONFIG } from '@src/models/chat';
import { MODEL_CONFIG_ID, modelConfigIds } from '@src/models/chat';
import { ChatInfoMessage } from '@src/models/chatMessage';
import { LEVEL_NAMES } from '@src/models/level';
import { pushMessageToHistory } from '@src/utils/chat';

import { sendErrorResponse } from './handleError';

function handleSetModel(req: OpenAiSetModelRequest, res: Response) {
const { model } = req.body;
Expand All @@ -19,16 +24,48 @@ function handleSetModel(req: OpenAiSetModelRequest, res: Response) {
}

function handleConfigureModel(req: OpenAiConfigureModelRequest, res: Response) {
const configId = req.body.configId as MODEL_CONFIG | undefined;
const configId = req.body.configId as MODEL_CONFIG_ID | undefined;
const value = req.body.value;
const maxValue = configId === MODEL_CONFIG.TOP_P ? 1 : 2;

if (configId && value !== undefined && value >= 0 && value <= maxValue) {
req.session.chatModel.configuration[configId] = value;
res.status(200).send();
} else {
res.status(400).send();
if (!configId) {
pmarsh-scottlogic marked this conversation as resolved.
Show resolved Hide resolved
sendErrorResponse(res, 400, 'Missing configId');
return;
}

if (!modelConfigIds.includes(configId)) {
sendErrorResponse(res, 400, 'Invalid configId');
return;
}

if (!Number.isFinite(value) || value === undefined) {
sendErrorResponse(res, 400, 'Missing or invalid value');
return;
}

const maxValue = configId === 'topP' ? 1 : 2;

if (value < 0 || value > maxValue) {
sendErrorResponse(
res,
400,
`Value should be between 0 and ${maxValue} for ${configId}`
);
return;
}

req.session.chatModel.configuration[configId] = value;

const chatInfoMessage = {
infoMessage: `changed ${configId} to ${value}`,
chatMessageType: 'GENERIC_INFO',
} as ChatInfoMessage;
req.session.levelState[LEVEL_NAMES.SANDBOX].chatHistory =
pushMessageToHistory(
req.session.levelState[LEVEL_NAMES.SANDBOX].chatHistory,
chatInfoMessage
);

res.status(200).send({ chatInfoMessage });
}

export { handleSetModel, handleConfigureModel };
4 changes: 3 additions & 1 deletion backend/src/models/api/OpenAiConfigureModelRequest.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
import { Request } from 'express';

import { ChatMessage } from '@src/models/chatMessage';

export type OpenAiConfigureModelRequest = Request<
never,
never,
null | { chatInfoMessage: ChatMessage },
{
configId?: string;
value?: number;
Expand Down
36 changes: 18 additions & 18 deletions backend/src/models/chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,24 +17,23 @@ enum CHAT_MODELS {
GPT_3_5_TURBO_16K_0613 = 'gpt-3.5-turbo-16k-0613',
}

enum MODEL_CONFIG {
TEMPERATURE = 'temperature',
TOP_P = 'topP',
FREQUENCY_PENALTY = 'frequencyPenalty',
PRESENCE_PENALTY = 'presencePenalty',
}

interface ChatModel {
type ChatModel = {
id: CHAT_MODELS;
configuration: ChatModelConfiguration;
}
configuration: ChatModelConfigurations;
};

interface ChatModelConfiguration {
temperature: number;
topP: number;
frequencyPenalty: number;
presencePenalty: number;
}
const modelConfigIds = [
pmarsh-scottlogic marked this conversation as resolved.
Show resolved Hide resolved
'temperature',
'topP',
'frequencyPenalty',
'presencePenalty',
] as const;

type MODEL_CONFIG_ID = (typeof modelConfigIds)[number];

type ChatModelConfigurations = {
Copy link
Member

@chriswilty chriswilty Mar 26, 2024

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

There must be a reason you pluralised this, right? Is there a clash with another type, or did you feel it made more sense as a plural? Or did you do so to match the frontend code?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I prefer it that way.

If each of top p, `frequency penalty etc are one configuration, then the set of all of them should be called configurations.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

That's fine, but then export it as that - currently you're exporting it as ChatModelConfiguration.

[key in MODEL_CONFIG_ID]: number;
};

interface DefenceReport {
blockedReason: string | null;
Expand Down Expand Up @@ -122,7 +121,7 @@ export type {
ChatGptReply,
ChatMalicious,
ChatModel,
ChatModelConfiguration,
ChatModelConfigurations as ChatModelConfiguration,
ChatResponse,
LevelHandlerResponse,
ChatHttpResponse,
Expand All @@ -131,5 +130,6 @@ export type {
ToolCallResponse,
MessageTransformation,
SingleDefenceReport,
MODEL_CONFIG_ID,
};
export { CHAT_MODELS, MODEL_CONFIG, defaultChatModel };
export { CHAT_MODELS, defaultChatModel, modelConfigIds };
156 changes: 156 additions & 0 deletions backend/test/unit/controller/modelController.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,156 @@
import { expect, jest, test, describe } from '@jest/globals';
import { Response } from 'express';

import { handleConfigureModel } from '@src/controller/modelController';
import { OpenAiConfigureModelRequest } from '@src/models/api/OpenAiConfigureModelRequest';
import { modelConfigIds } from '@src/models/chat';
import { ChatMessage } from '@src/models/chatMessage';
import { LEVEL_NAMES, LevelState } from '@src/models/level';

function responseMock() {
return {
send: jest.fn(),
status: jest.fn().mockReturnThis(),
} as unknown as Response;
}

describe('handleConfigureModel', () => {
test('WHEN passed sensible parameters THEN configures model AND adds info message to chat history AND responds with info message', () => {
const req = {
body: {
configId: 'topP',
value: 0.5,
},
session: {
chatModel: {
configuration: {
temperature: 0.0,
topP: 0.0,
presencePenalty: 0.0,
frequencyPenalty: 0.0,
},
},
levelState: [{}, {}, {}, { chatHistory: [] } as unknown as LevelState],
},
} as OpenAiConfigureModelRequest;
const res = responseMock();

handleConfigureModel(req, res);

expect(res.status).toHaveBeenCalledWith(200);
expect(req.session.chatModel.configuration.topP).toBe(0.5);

const expectedInfoMessage = {
infoMessage: 'changed topP to 0.5',
chatMessageType: 'GENERIC_INFO',
} as ChatMessage;
expect(
req.session.levelState[LEVEL_NAMES.SANDBOX].chatHistory.at(-1)
).toEqual(expectedInfoMessage);
expect(res.send).toHaveBeenCalledWith({
chatInfoMessage: expectedInfoMessage,
});
});

test('WHEN missing configId THEN does not configure model', () => {
const req = {
body: {
value: 0.5,
},
} as OpenAiConfigureModelRequest;
const res = responseMock();

handleConfigureModel(req, res);

expect(res.status).toHaveBeenCalledWith(400);
expect(res.send).toHaveBeenCalledWith('Missing configId');
});

test('WHEN configId is invalid THEN does not configure model', () => {
const req = {
body: {
configId: 'invalid config id',
value: 0.5,
},
} as OpenAiConfigureModelRequest;
const res = responseMock();

handleConfigureModel(req, res);

expect(res.status).toHaveBeenCalledWith(400);
expect(res.send).toHaveBeenCalledWith('Invalid configId');
});

test('WHEN value is missing THEN does not configure model', () => {
const req = {
body: {
configId: 'topP',
},
} as unknown as OpenAiConfigureModelRequest;
const res = responseMock();

handleConfigureModel(req, res);

expect(res.status).toHaveBeenCalledWith(400);
expect(res.send).toHaveBeenCalledWith('Missing or invalid value');
});

test('WHEN value is invalid THEN does not configure model', () => {
const req = {
body: {
configId: 'topP',
value: 'invalid value',
},
} as unknown as OpenAiConfigureModelRequest;
const res = responseMock();

handleConfigureModel(req, res);

expect(res.status).toHaveBeenCalledWith(400);
expect(res.send).toHaveBeenCalledWith('Missing or invalid value');
});

test.each(modelConfigIds)(
'GIVEN configId is %s WHEN value is below range THEN does not configure model',
(configId) => {
const req = {
body: {
configId,
value: -1,
},
} as unknown as OpenAiConfigureModelRequest;
const res = responseMock();

handleConfigureModel(req, res);

const expectedMaxValue = configId === 'topP' ? 1 : 2;

expect(res.status).toHaveBeenCalledWith(400);
expect(res.send).toHaveBeenCalledWith(
`Value should be between 0 and ${expectedMaxValue} for ${configId}`
);
}
);

test.each(modelConfigIds)(
'GIVEN configId is %s WHEN value is above range THEN does not configure model',
(configId) => {
const expectedMaxValue = configId === 'topP' ? 1 : 2;

const req = {
body: {
configId,
value: expectedMaxValue + 1,
},
} as unknown as OpenAiConfigureModelRequest;
const res = responseMock();

handleConfigureModel(req, res);

expect(res.status).toHaveBeenCalledWith(400);
expect(res.send).toHaveBeenCalledWith(
`Value should be between 0 and ${expectedMaxValue} for ${configId}`
);
}
);
});
5 changes: 4 additions & 1 deletion frontend/src/components/ControlPanel/ControlPanel.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ import DefenceBox from '@src/components/DefenceBox/DefenceBox';
import DocumentViewButton from '@src/components/DocumentViewer/DocumentViewButton';
import ModelBox from '@src/components/ModelBox/ModelBox';
import DetailElement from '@src/components/ThemedButtons/DetailElement';
import { ChatModel } from '@src/models/chat';
import { ChatMessage, ChatModel } from '@src/models/chat';
import {
DEFENCE_ID,
DefenceConfigItem,
Expand All @@ -25,6 +25,7 @@ function ControlPanel({
setDefenceConfiguration,
openDocumentViewer,
addInfoMessage,
addChatMessage,
}: {
currentLevel: LEVEL_NAMES;
defences: Defence[];
Expand All @@ -42,6 +43,7 @@ function ControlPanel({
) => Promise<boolean>;
openDocumentViewer: () => void;
addInfoMessage: (message: string) => void;
addChatMessage: (chatMessage: ChatMessage) => void;
}) {
const configurableDefences =
currentLevel === LEVEL_NAMES.SANDBOX
Expand Down Expand Up @@ -100,6 +102,7 @@ function ControlPanel({
setChatModelId={setChatModelId}
chatModelOptions={chatModelOptions}
addInfoMessage={addInfoMessage}
addChatMessage={addChatMessage}
/>
)}
</DetailElement>
Expand Down
1 change: 1 addition & 0 deletions frontend/src/components/MainComponent/MainBody.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,7 @@ function MainBody({
setDefenceConfiguration={setDefenceConfiguration}
openDocumentViewer={openDocumentViewer}
addInfoMessage={addInfoMessage}
addChatMessage={addChatMessage}
/>
</div>
<div className="centre-area">
Expand Down
6 changes: 4 additions & 2 deletions frontend/src/components/ModelBox/ModelBox.tsx
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { ChatModel } from '@src/models/chat';
import { ChatMessage, ChatModel } from '@src/models/chat';

import ModelConfiguration from './ModelConfiguration';
import ModelSelection from './ModelSelection';
Expand All @@ -10,11 +10,13 @@ function ModelBox({
setChatModelId,
chatModelOptions,
addInfoMessage,
addChatMessage,
}: {
chatModel?: ChatModel;
setChatModelId: (modelId: string) => void;
chatModelOptions: string[];
addInfoMessage: (message: string) => void;
addChatMessage: (chatMessage: ChatMessage) => void;
}) {
return (
<div className="model-box">
Expand All @@ -26,7 +28,7 @@ function ModelBox({
/>
<ModelConfiguration
chatModel={chatModel}
addInfoMessage={addInfoMessage}
addChatMessage={addChatMessage}
/>
</div>
);
Expand Down
Loading
Loading