From 0ebbc2e4b66c5fc6e1301814fbb401119b18edc6 Mon Sep 17 00:00:00 2001 From: Peter Marsh Date: Fri, 22 Mar 2024 10:00:27 +0000 Subject: [PATCH 01/21] convert model_config_ids away from enum --- backend/src/controller/modelController.ts | 7 ++++--- backend/src/models/chat.ts | 17 ++++++++++------- 2 files changed, 14 insertions(+), 10 deletions(-) diff --git a/backend/src/controller/modelController.ts b/backend/src/controller/modelController.ts index fd6bf2b7c..5fe862870 100644 --- a/backend/src/controller/modelController.ts +++ b/backend/src/controller/modelController.ts @@ -2,7 +2,7 @@ import { Response } from 'express'; import { OpenAiConfigureModelRequest } from '@src/models/api/OpenAiConfigureModelRequest'; import { OpenAiSetModelRequest } from '@src/models/api/OpenAiSetModelRequest'; -import { MODEL_CONFIG } from '@src/models/chat'; +import { MODEL_CONFIG_IDS } from '@src/models/chat'; function handleSetModel(req: OpenAiSetModelRequest, res: Response) { const { model } = req.body; @@ -19,9 +19,10 @@ function handleSetModel(req: OpenAiSetModelRequest, res: Response) { } function handleConfigureModel(req: OpenAiConfigureModelRequest, res: Response) { - const configId = req.body.configId as MODEL_CONFIG | undefined; + const configId = req.body.configId as MODEL_CONFIG_IDS | undefined; const value = req.body.value; - const maxValue = configId === MODEL_CONFIG.TOP_P ? 1 : 2; + + const maxValue = configId === 'topP' ? 1 : 2; if (configId && value !== undefined && value >= 0 && value <= maxValue) { req.session.chatModel.configuration[configId] = value; diff --git a/backend/src/models/chat.ts b/backend/src/models/chat.ts index 9b9e7d321..b44b48387 100644 --- a/backend/src/models/chat.ts +++ b/backend/src/models/chat.ts @@ -17,12 +17,14 @@ enum CHAT_MODELS { GPT_3_5_TURBO_16K_0613 = 'gpt-3.5-turbo-16k-0613', } -enum MODEL_CONFIG { - TEMPERATURE = 'temperature', - TOP_P = 'topP', - FREQUENCY_PENALTY = 'frequencyPenalty', - PRESENCE_PENALTY = 'presencePenalty', -} +const modelConfigIds = [ + 'temperature', + 'topP', + 'frequencyPenalty', + 'presencePenalty', +] as const; + +type MODEL_CONFIG_IDS = (typeof modelConfigIds)[number]; interface ChatModel { id: CHAT_MODELS; @@ -131,5 +133,6 @@ export type { ToolCallResponse, MessageTransformation, SingleDefenceReport, + MODEL_CONFIG_IDS, }; -export { CHAT_MODELS, MODEL_CONFIG, defaultChatModel }; +export { CHAT_MODELS, defaultChatModel, modelConfigIds }; From 978bd407212f37c17c2a66f48e5fcaa6c2b59758 Mon Sep 17 00:00:00 2001 From: Peter Marsh Date: Fri, 22 Mar 2024 10:05:58 +0000 Subject: [PATCH 02/21] rejig the chat models folder and use type instead of interface --- backend/src/models/chat.ts | 25 +++++++++++++------------ 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/backend/src/models/chat.ts b/backend/src/models/chat.ts index b44b48387..c3cb20df8 100644 --- a/backend/src/models/chat.ts +++ b/backend/src/models/chat.ts @@ -17,6 +17,19 @@ enum CHAT_MODELS { GPT_3_5_TURBO_16K_0613 = 'gpt-3.5-turbo-16k-0613', } +type ChatModel = { + id: CHAT_MODELS; + configuration: ChatModelConfiguration; +}; + +type ChatModelConfiguration = { + temperature: number; + topP: number; + frequencyPenalty: number; + presencePenalty: number; +}; + +// these must match the above type because they are used for indexing const modelConfigIds = [ 'temperature', 'topP', @@ -26,18 +39,6 @@ const modelConfigIds = [ type MODEL_CONFIG_IDS = (typeof modelConfigIds)[number]; -interface ChatModel { - id: CHAT_MODELS; - configuration: ChatModelConfiguration; -} - -interface ChatModelConfiguration { - temperature: number; - topP: number; - frequencyPenalty: number; - presencePenalty: number; -} - interface DefenceReport { blockedReason: string | null; isBlocked: boolean; From 104f8dcb7d6d203bae4e2c3d104e42dafdcd425d Mon Sep 17 00:00:00 2001 From: Peter Marsh Date: Fri, 22 Mar 2024 10:19:03 +0000 Subject: [PATCH 03/21] convert Model Config ID away from enum in frontend --- .../ModelBox/ModelConfiguration.tsx | 14 ++++----- .../ModelBox/ModelConfigurationSlider.tsx | 7 +++-- frontend/src/models/chat.ts | 29 ++++++++++--------- frontend/src/service/chatService.ts | 4 +-- 4 files changed, 30 insertions(+), 24 deletions(-) diff --git a/frontend/src/components/ModelBox/ModelConfiguration.tsx b/frontend/src/components/ModelBox/ModelConfiguration.tsx index 149e29335..e3ba14237 100644 --- a/frontend/src/components/ModelBox/ModelConfiguration.tsx +++ b/frontend/src/components/ModelBox/ModelConfiguration.tsx @@ -3,7 +3,7 @@ import { useEffect, useState } from 'react'; import { ChatModel, CustomChatModelConfiguration, - MODEL_CONFIG, + MODEL_CONFIG_ID, } from '@src/models/chat'; import { chatService } from '@src/service'; @@ -13,7 +13,7 @@ import './ModelConfiguration.css'; const DEFAULT_CONFIGS: CustomChatModelConfiguration[] = [ { - id: MODEL_CONFIG.TEMPERATURE, + id: 'temperature', name: 'Model Temperature', info: 'Controls the randomness of the model. Lower means more deterministic, higher means more surprising. Default is 1.', value: 1, @@ -21,7 +21,7 @@ const DEFAULT_CONFIGS: CustomChatModelConfiguration[] = [ max: 2, }, { - id: MODEL_CONFIG.TOP_P, + id: 'topP', name: 'Top P', info: 'Controls how many different words or phrases the language model considers when it’s trying to predict the next word. Default is 1. ', value: 1, @@ -29,7 +29,7 @@ const DEFAULT_CONFIGS: CustomChatModelConfiguration[] = [ max: 1, }, { - id: MODEL_CONFIG.PRESENCE_PENALTY, + id: 'presencePenalty', name: 'Presence Penalty', info: 'Controls diversity of text generation. Higher presence penalty increases likelihood of using new words. Default is 0.', value: 0, @@ -37,7 +37,7 @@ const DEFAULT_CONFIGS: CustomChatModelConfiguration[] = [ max: 2, }, { - id: MODEL_CONFIG.FREQUENCY_PENALTY, + id: 'frequencyPenalty', name: 'Frequency Penalty', info: 'Controls diversity of text generation. Higher frequency penalty decreases likelihood of using the same words. Default is 0.', value: 0, @@ -56,7 +56,7 @@ function ModelConfiguration({ const [customChatModelConfigs, setCustomChatModel] = useState(DEFAULT_CONFIGS); - function setCustomChatModelByID(id: MODEL_CONFIG, value: number) { + function setCustomChatModelByID(id: MODEL_CONFIG_ID, value: number) { setCustomChatModel((prev) => { return prev.map((config) => { if (config.id === id) { @@ -67,7 +67,7 @@ function ModelConfiguration({ }); } - function updateConfigValue(id: MODEL_CONFIG, newValue: number) { + function updateConfigValue(id: MODEL_CONFIG_ID, newValue: number) { const prevValue = customChatModelConfigs.find( (config) => config.id === id )?.value; diff --git a/frontend/src/components/ModelBox/ModelConfigurationSlider.tsx b/frontend/src/components/ModelBox/ModelConfigurationSlider.tsx index e679ecad3..1be815d35 100644 --- a/frontend/src/components/ModelBox/ModelConfigurationSlider.tsx +++ b/frontend/src/components/ModelBox/ModelConfigurationSlider.tsx @@ -1,7 +1,10 @@ import { Slider } from '@mui/material'; import { SyntheticEvent, useEffect, useState } from 'react'; -import { CustomChatModelConfiguration, MODEL_CONFIG } from '@src/models/chat'; +import { + CustomChatModelConfiguration, + MODEL_CONFIG_ID, +} from '@src/models/chat'; import './ModelConfigurationSlider.css'; @@ -10,7 +13,7 @@ function ModelConfigurationSlider({ onConfigChanged, }: { config: CustomChatModelConfiguration; - onConfigChanged: (id: MODEL_CONFIG, newValue: number) => void; + onConfigChanged: (id: MODEL_CONFIG_ID, newValue: number) => void; }) { const [value, setValue] = useState(config.value); diff --git a/frontend/src/models/chat.ts b/frontend/src/models/chat.ts index 4fa18c496..72d93aa58 100644 --- a/frontend/src/models/chat.ts +++ b/frontend/src/models/chat.ts @@ -15,27 +15,30 @@ type CHAT_MESSAGE_TYPE = | 'ERROR_MSG' | 'RESET_LEVEL'; -enum MODEL_CONFIG { - TEMPERATURE = 'temperature', - TOP_P = 'topP', - FREQUENCY_PENALTY = 'frequencyPenalty', - PRESENCE_PENALTY = 'presencePenalty', -} - -interface ChatModel { +type ChatModel = { id: string; configuration: ChatModelConfigurations; -} +}; -interface ChatModelConfigurations { +type ChatModelConfigurations = { temperature: number; topP: number; frequencyPenalty: number; presencePenalty: number; -} +}; + +// these must match the above type because they are used for indexing +const modelConfigIds = [ + 'temperature', + 'topP', + 'frequencyPenalty', + 'presencePenalty', +] as const; + +type MODEL_CONFIG_ID = (typeof modelConfigIds)[number]; interface CustomChatModelConfiguration { - id: MODEL_CONFIG; + id: MODEL_CONFIG_ID; name: string; info: string; value: number; @@ -94,5 +97,5 @@ export type { ChatModelConfigurations, CustomChatModelConfiguration, CHAT_MESSAGE_TYPE, + MODEL_CONFIG_ID, }; -export { MODEL_CONFIG }; diff --git a/frontend/src/service/chatService.ts b/frontend/src/service/chatService.ts index 97aaaeb08..86694b5fd 100644 --- a/frontend/src/service/chatService.ts +++ b/frontend/src/service/chatService.ts @@ -3,7 +3,7 @@ import { ChatMessageDTO, ChatMessage, ChatResponse, - MODEL_CONFIG, + MODEL_CONFIG_ID, } from '@src/models/chat'; import { LEVEL_NAMES } from '@src/models/level'; @@ -75,7 +75,7 @@ async function setGptModel(model: string): Promise { } async function configureGptModel( - configId: MODEL_CONFIG, + configId: MODEL_CONFIG_ID, value: number ): Promise { const response = await sendRequest(`${PATH}model/configure`, { From bc49402cdc9be0ff789d22a6e4fb43eec4333985 Mon Sep 17 00:00:00 2001 From: Peter Marsh Date: Fri, 22 Mar 2024 10:19:19 +0000 Subject: [PATCH 04/21] rename to MODEL_CONFIG_ID --- backend/src/controller/modelController.ts | 4 ++-- backend/src/models/chat.ts | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/backend/src/controller/modelController.ts b/backend/src/controller/modelController.ts index 5fe862870..8502b5637 100644 --- a/backend/src/controller/modelController.ts +++ b/backend/src/controller/modelController.ts @@ -2,7 +2,7 @@ import { Response } from 'express'; import { OpenAiConfigureModelRequest } from '@src/models/api/OpenAiConfigureModelRequest'; import { OpenAiSetModelRequest } from '@src/models/api/OpenAiSetModelRequest'; -import { MODEL_CONFIG_IDS } from '@src/models/chat'; +import { MODEL_CONFIG_ID } from '@src/models/chat'; function handleSetModel(req: OpenAiSetModelRequest, res: Response) { const { model } = req.body; @@ -19,7 +19,7 @@ function handleSetModel(req: OpenAiSetModelRequest, res: Response) { } function handleConfigureModel(req: OpenAiConfigureModelRequest, res: Response) { - const configId = req.body.configId as MODEL_CONFIG_IDS | undefined; + const configId = req.body.configId as MODEL_CONFIG_ID | undefined; const value = req.body.value; const maxValue = configId === 'topP' ? 1 : 2; diff --git a/backend/src/models/chat.ts b/backend/src/models/chat.ts index c3cb20df8..44998b835 100644 --- a/backend/src/models/chat.ts +++ b/backend/src/models/chat.ts @@ -37,7 +37,7 @@ const modelConfigIds = [ 'presencePenalty', ] as const; -type MODEL_CONFIG_IDS = (typeof modelConfigIds)[number]; +type MODEL_CONFIG_ID = (typeof modelConfigIds)[number]; interface DefenceReport { blockedReason: string | null; @@ -134,6 +134,6 @@ export type { ToolCallResponse, MessageTransformation, SingleDefenceReport, - MODEL_CONFIG_IDS, + MODEL_CONFIG_ID, }; export { CHAT_MODELS, defaultChatModel, modelConfigIds }; From f9382ce04294a24dd1794a56d3fb4399ddd0f1a8 Mon Sep 17 00:00:00 2001 From: Peter Marsh Date: Fri, 22 Mar 2024 10:32:30 +0000 Subject: [PATCH 05/21] adds deted backend validation on handleConfigureModel --- backend/src/controller/modelController.ts | 34 +++++++++++++++++++---- 1 file changed, 28 insertions(+), 6 deletions(-) diff --git a/backend/src/controller/modelController.ts b/backend/src/controller/modelController.ts index 8502b5637..275e0f9ae 100644 --- a/backend/src/controller/modelController.ts +++ b/backend/src/controller/modelController.ts @@ -2,7 +2,9 @@ import { Response } from 'express'; import { OpenAiConfigureModelRequest } from '@src/models/api/OpenAiConfigureModelRequest'; import { OpenAiSetModelRequest } from '@src/models/api/OpenAiSetModelRequest'; -import { MODEL_CONFIG_ID } from '@src/models/chat'; +import { MODEL_CONFIG_ID, modelConfigIds } from '@src/models/chat'; + +import { sendErrorResponse } from './handleError'; function handleSetModel(req: OpenAiSetModelRequest, res: Response) { const { model } = req.body; @@ -24,12 +26,32 @@ function handleConfigureModel(req: OpenAiConfigureModelRequest, res: Response) { const maxValue = configId === 'topP' ? 1 : 2; - if (configId && value !== undefined && value >= 0 && value <= maxValue) { - req.session.chatModel.configuration[configId] = value; - res.status(200).send(); - } else { - res.status(400).send(); + if (!configId) { + sendErrorResponse(res, 400, 'Missing configId'); + return; + } + + if (!modelConfigIds.includes(configId)) { + sendErrorResponse(res, 400, 'Invalid configId'); + return; } + + if (!Number.isFinite(value) || value === undefined) { + sendErrorResponse(res, 400, 'Missing value'); + return; + } + + if (value < 0 || value > maxValue) { + sendErrorResponse( + res, + 400, + `Value should be between 0 and ${maxValue} for ${configId}` + ); + return; + } + + req.session.chatModel.configuration[configId] = value; + res.status(200).send(); } export { handleSetModel, handleConfigureModel }; From 644cd380576c4695c159df393e254702c4a1415d Mon Sep 17 00:00:00 2001 From: Peter Marsh Date: Fri, 22 Mar 2024 10:47:48 +0000 Subject: [PATCH 06/21] adds first modelController unit test --- .../unit/controller/modelController.test.ts | 41 +++++++++++++++++++ 1 file changed, 41 insertions(+) create mode 100644 backend/test/unit/controller/modelController.test.ts diff --git a/backend/test/unit/controller/modelController.test.ts b/backend/test/unit/controller/modelController.test.ts new file mode 100644 index 000000000..9e133962c --- /dev/null +++ b/backend/test/unit/controller/modelController.test.ts @@ -0,0 +1,41 @@ +import { expect, jest, test, describe } from '@jest/globals'; +import { Response } from 'express'; + +import { handleConfigureModel } from '@src/controller/modelController'; +import { OpenAiConfigureModelRequest } from '@src/models/api/OpenAiConfigureModelRequest'; + +function responseMock() { + return { + send: jest.fn(), + status: jest.fn().mockReturnThis(), + } as unknown as Response; +} + +describe('handleConfigureModel', () => { + test('WHEN passed sensible parameters THEN configures model', () => { + const req = { + body: { + configId: 'topP', + value: 0.5, + }, + session: { + chatModel: { + configuration: { + temperature: 0.0, + topP: 0.0, + presencePenalty: 0.0, + frequencyPenalty: 0.0, + }, + }, + }, + } as OpenAiConfigureModelRequest; + const res = responseMock(); + + handleConfigureModel(req, res); + + expect(res.status).toHaveBeenCalledWith(200); + expect(req.session.chatModel.configuration.topP).toBe(0.5); + }); + + // then all unhappy paths +}); From b6ed9beceea9e63092d25a30cd5ce67835da74dc Mon Sep 17 00:00:00 2001 From: Peter Marsh Date: Fri, 22 Mar 2024 10:59:28 +0000 Subject: [PATCH 07/21] clarifies error response message --- backend/src/controller/modelController.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/src/controller/modelController.ts b/backend/src/controller/modelController.ts index 275e0f9ae..ef39ce88f 100644 --- a/backend/src/controller/modelController.ts +++ b/backend/src/controller/modelController.ts @@ -37,7 +37,7 @@ function handleConfigureModel(req: OpenAiConfigureModelRequest, res: Response) { } if (!Number.isFinite(value) || value === undefined) { - sendErrorResponse(res, 400, 'Missing value'); + sendErrorResponse(res, 400, 'Missing or invalid value'); return; } From f5975b5c27d4eb7c805d2d7c022cde7d95564b6b Mon Sep 17 00:00:00 2001 From: Peter Marsh Date: Fri, 22 Mar 2024 10:59:47 +0000 Subject: [PATCH 08/21] fills in unhappy path tests --- .../unit/controller/modelController.test.ts | 128 +++++++++++++++++- 1 file changed, 127 insertions(+), 1 deletion(-) diff --git a/backend/test/unit/controller/modelController.test.ts b/backend/test/unit/controller/modelController.test.ts index 9e133962c..18b8353b6 100644 --- a/backend/test/unit/controller/modelController.test.ts +++ b/backend/test/unit/controller/modelController.test.ts @@ -3,6 +3,7 @@ import { Response } from 'express'; import { handleConfigureModel } from '@src/controller/modelController'; import { OpenAiConfigureModelRequest } from '@src/models/api/OpenAiConfigureModelRequest'; +import { modelConfigIds } from '@src/models/chat'; function responseMock() { return { @@ -37,5 +38,130 @@ describe('handleConfigureModel', () => { expect(req.session.chatModel.configuration.topP).toBe(0.5); }); - // then all unhappy paths + test('WHEN passed sensible parameters THEN configures model', () => { + const req = { + body: { + configId: 'topP', + value: 0.5, + }, + session: { + chatModel: { + configuration: { + temperature: 0.0, + topP: 0.0, + presencePenalty: 0.0, + frequencyPenalty: 0.0, + }, + }, + }, + } as OpenAiConfigureModelRequest; + const res = responseMock(); + + handleConfigureModel(req, res); + + expect(res.status).toHaveBeenCalledWith(200); + expect(req.session.chatModel.configuration.topP).toBe(0.5); + }); + + test('WHEN missing configId THEN does not configure model', () => { + const req = { + body: { + value: 0.5, + }, + } as OpenAiConfigureModelRequest; + const res = responseMock(); + + handleConfigureModel(req, res); + + expect(res.status).toHaveBeenCalledWith(400); + expect(res.send).toHaveBeenCalledWith('Missing configId'); + }); + + test('WHEN configId is invalid THEN does not configure model', () => { + const req = { + body: { + configId: 'invalid config id', + value: 0.5, + }, + } as OpenAiConfigureModelRequest; + const res = responseMock(); + + handleConfigureModel(req, res); + + expect(res.status).toHaveBeenCalledWith(400); + expect(res.send).toHaveBeenCalledWith('Invalid configId'); + }); + + test('WHEN value is missing THEN does not configure model', () => { + const req = { + body: { + configId: 'topP', + }, + } as unknown as OpenAiConfigureModelRequest; + const res = responseMock(); + + handleConfigureModel(req, res); + + expect(res.status).toHaveBeenCalledWith(400); + expect(res.send).toHaveBeenCalledWith('Missing or invalid value'); + }); + + test('WHEN value is invalid THEN does not configure model', () => { + const req = { + body: { + configId: 'topP', + value: 'invalid value', + }, + } as unknown as OpenAiConfigureModelRequest; + const res = responseMock(); + + handleConfigureModel(req, res); + + expect(res.status).toHaveBeenCalledWith(400); + expect(res.send).toHaveBeenCalledWith('Missing or invalid value'); + }); + + test.each(modelConfigIds)( + 'GIVEN configId is %s WHEN value is below range THEN does not configure model', + (configId) => { + const req = { + body: { + configId, + value: -1, + }, + } as unknown as OpenAiConfigureModelRequest; + const res = responseMock(); + + handleConfigureModel(req, res); + + const expectedMaxValue = configId === 'topP' ? 1 : 2; + + expect(res.status).toHaveBeenCalledWith(400); + expect(res.send).toHaveBeenCalledWith( + `Value should be between 0 and ${expectedMaxValue} for ${configId}` + ); + } + ); + + test.each(modelConfigIds)( + 'GIVEN configId is %s WHEN value is above range THEN does not configure model', + (configId) => { + const expectedMaxValue = configId === 'topP' ? 1 : 2; + + const req = { + body: { + configId, + value: expectedMaxValue + 1, + }, + } as unknown as OpenAiConfigureModelRequest; + const res = responseMock(); + + handleConfigureModel(req, res); + + expect(res.status).toHaveBeenCalledWith(400); + expect(res.send).toHaveBeenCalledWith( + `Value should be between 0 and ${expectedMaxValue} for ${configId}` + ); + } + ); }); From 47097a042c11e09de88dc1f29f14b5a352956f63 Mon Sep 17 00:00:00 2001 From: Peter Marsh Date: Fri, 22 Mar 2024 11:04:55 +0000 Subject: [PATCH 09/21] deletes repeated unit test --- .../unit/controller/modelController.test.ts | 25 ------------------- 1 file changed, 25 deletions(-) diff --git a/backend/test/unit/controller/modelController.test.ts b/backend/test/unit/controller/modelController.test.ts index 18b8353b6..9b25770d8 100644 --- a/backend/test/unit/controller/modelController.test.ts +++ b/backend/test/unit/controller/modelController.test.ts @@ -38,31 +38,6 @@ describe('handleConfigureModel', () => { expect(req.session.chatModel.configuration.topP).toBe(0.5); }); - test('WHEN passed sensible parameters THEN configures model', () => { - const req = { - body: { - configId: 'topP', - value: 0.5, - }, - session: { - chatModel: { - configuration: { - temperature: 0.0, - topP: 0.0, - presencePenalty: 0.0, - frequencyPenalty: 0.0, - }, - }, - }, - } as OpenAiConfigureModelRequest; - const res = responseMock(); - - handleConfigureModel(req, res); - - expect(res.status).toHaveBeenCalledWith(200); - expect(req.session.chatModel.configuration.topP).toBe(0.5); - }); - test('WHEN missing configId THEN does not configure model', () => { const req = { body: { From 309efe0ca28be61c155268fb2b7bb01a375d8ae6 Mon Sep 17 00:00:00 2001 From: Peter Marsh Date: Fri, 22 Mar 2024 11:10:37 +0000 Subject: [PATCH 10/21] now returns resulting info message --- backend/src/controller/modelController.ts | 16 +++++++++++++++- .../models/api/OpenAiConfigureModelRequest.ts | 4 +++- 2 files changed, 18 insertions(+), 2 deletions(-) diff --git a/backend/src/controller/modelController.ts b/backend/src/controller/modelController.ts index ef39ce88f..e8909c70b 100644 --- a/backend/src/controller/modelController.ts +++ b/backend/src/controller/modelController.ts @@ -3,6 +3,9 @@ import { Response } from 'express'; import { OpenAiConfigureModelRequest } from '@src/models/api/OpenAiConfigureModelRequest'; import { OpenAiSetModelRequest } from '@src/models/api/OpenAiSetModelRequest'; import { MODEL_CONFIG_ID, modelConfigIds } from '@src/models/chat'; +import { ChatInfoMessage } from '@src/models/chatMessage'; +import { LEVEL_NAMES } from '@src/models/level'; +import { pushMessageToHistory } from '@src/utils/chat'; import { sendErrorResponse } from './handleError'; @@ -51,7 +54,18 @@ function handleConfigureModel(req: OpenAiConfigureModelRequest, res: Response) { } req.session.chatModel.configuration[configId] = value; - res.status(200).send(); + + const resultingChatInfoMessage = { + infoMessage: `changed ${configId} to ${value}`, + chatMessageType: 'GENERIC_INFO', + } as ChatInfoMessage; + req.session.levelState[LEVEL_NAMES.SANDBOX].chatHistory = + pushMessageToHistory( + req.session.levelState[LEVEL_NAMES.SANDBOX].chatHistory, + resultingChatInfoMessage + ); + + res.status(200).send({ resultingChatInfoMessage }); } export { handleSetModel, handleConfigureModel }; diff --git a/backend/src/models/api/OpenAiConfigureModelRequest.ts b/backend/src/models/api/OpenAiConfigureModelRequest.ts index 83d9fdf94..7d02bf7b5 100644 --- a/backend/src/models/api/OpenAiConfigureModelRequest.ts +++ b/backend/src/models/api/OpenAiConfigureModelRequest.ts @@ -1,8 +1,10 @@ import { Request } from 'express'; +import { ChatMessage } from '@src/models/chatMessage'; + export type OpenAiConfigureModelRequest = Request< never, - never, + null | { resultingChatInfoMessage: ChatMessage }, { configId?: string; value?: number; From b4f98858ef99706948249c3ea406af1ba7648af6 Mon Sep 17 00:00:00 2001 From: Peter Marsh Date: Fri, 22 Mar 2024 11:17:15 +0000 Subject: [PATCH 11/21] update modelController test according to its now function --- .../test/unit/controller/modelController.test.ts | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/backend/test/unit/controller/modelController.test.ts b/backend/test/unit/controller/modelController.test.ts index 9b25770d8..070939b3d 100644 --- a/backend/test/unit/controller/modelController.test.ts +++ b/backend/test/unit/controller/modelController.test.ts @@ -4,6 +4,8 @@ import { Response } from 'express'; import { handleConfigureModel } from '@src/controller/modelController'; import { OpenAiConfigureModelRequest } from '@src/models/api/OpenAiConfigureModelRequest'; import { modelConfigIds } from '@src/models/chat'; +import { ChatMessage } from '@src/models/chatMessage'; +import { LEVEL_NAMES, LevelState } from '@src/models/level'; function responseMock() { return { @@ -13,7 +15,7 @@ function responseMock() { } describe('handleConfigureModel', () => { - test('WHEN passed sensible parameters THEN configures model', () => { + test('WHEN passed sensible parameters THEN configures model AND adds info message to chat history AND responds with info message', () => { const req = { body: { configId: 'topP', @@ -28,6 +30,7 @@ describe('handleConfigureModel', () => { frequencyPenalty: 0.0, }, }, + levelState: [{}, {}, {}, { chatHistory: [] } as unknown as LevelState], }, } as OpenAiConfigureModelRequest; const res = responseMock(); @@ -36,6 +39,17 @@ describe('handleConfigureModel', () => { expect(res.status).toHaveBeenCalledWith(200); expect(req.session.chatModel.configuration.topP).toBe(0.5); + + const expectedInfoMessage = { + infoMessage: 'changed topP to 0.5', + chatMessageType: 'GENERIC_INFO', + } as ChatMessage; + expect( + req.session.levelState[LEVEL_NAMES.SANDBOX].chatHistory.at(-1) + ).toEqual(expectedInfoMessage); + expect(res.send).toHaveBeenCalledWith({ + resultingChatInfoMessage: expectedInfoMessage, + }); }); test('WHEN missing configId THEN does not configure model', () => { From b4664488f71cfd29e55c7734f74fe5fb51c962fb Mon Sep 17 00:00:00 2001 From: Peter Marsh Date: Fri, 22 Mar 2024 11:20:27 +0000 Subject: [PATCH 12/21] chatService now extracts the resulting chat info message and returns it --- frontend/src/models/combined.ts | 6 +++++- frontend/src/service/chatService.ts | 9 +++++++-- 2 files changed, 12 insertions(+), 3 deletions(-) diff --git a/frontend/src/models/combined.ts b/frontend/src/models/combined.ts index d70589842..26395aa64 100644 --- a/frontend/src/models/combined.ts +++ b/frontend/src/models/combined.ts @@ -19,4 +19,8 @@ type LoadLevelResponse = { chatModel?: ChatModel; }; -export type { StartReponse, LoadLevelResponse }; +type ConfigureGptModelResponse = { + resultingChatInfoMessage: ChatMessageDTO; +}; + +export type { StartReponse, LoadLevelResponse, ConfigureGptModelResponse }; diff --git a/frontend/src/service/chatService.ts b/frontend/src/service/chatService.ts index 86694b5fd..727a422aa 100644 --- a/frontend/src/service/chatService.ts +++ b/frontend/src/service/chatService.ts @@ -5,6 +5,7 @@ import { ChatResponse, MODEL_CONFIG_ID, } from '@src/models/chat'; +import { ConfigureGptModelResponse } from '@src/models/combined'; import { LEVEL_NAMES } from '@src/models/level'; import { sendRequest } from './backendService'; @@ -77,13 +78,17 @@ async function setGptModel(model: string): Promise { async function configureGptModel( configId: MODEL_CONFIG_ID, value: number -): Promise { +): Promise { const response = await sendRequest(`${PATH}model/configure`, { method: 'POST', headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ configId, value }), }); - return response.status === 200; + const { resultingChatInfoMessage } = + (await response.json()) as ConfigureGptModelResponse; + return response.status === 200 + ? makeChatMessageFromDTO(resultingChatInfoMessage) + : null; } async function addInfoMessageToChatHistory( From 2fa7519dbd74e73643e978244d32fe5ba3757199 Mon Sep 17 00:00:00 2001 From: Peter Marsh Date: Fri, 22 Mar 2024 11:31:30 +0000 Subject: [PATCH 13/21] makes frontend use thie resultingChatInfoMessage --- .../components/ControlPanel/ControlPanel.tsx | 5 ++++- .../src/components/MainComponent/MainBody.tsx | 1 + frontend/src/components/ModelBox/ModelBox.tsx | 6 ++++-- .../ModelBox/ModelConfiguration.tsx | 21 +++++++++++-------- 4 files changed, 21 insertions(+), 12 deletions(-) diff --git a/frontend/src/components/ControlPanel/ControlPanel.tsx b/frontend/src/components/ControlPanel/ControlPanel.tsx index 9719bcd25..3d14f3b37 100644 --- a/frontend/src/components/ControlPanel/ControlPanel.tsx +++ b/frontend/src/components/ControlPanel/ControlPanel.tsx @@ -2,7 +2,7 @@ import { DEFENCES_HIDDEN_LEVEL3_IDS, MODEL_DEFENCES } from '@src/Defences'; import DefenceBox from '@src/components/DefenceBox/DefenceBox'; import DocumentViewButton from '@src/components/DocumentViewer/DocumentViewButton'; import ModelBox from '@src/components/ModelBox/ModelBox'; -import { ChatModel } from '@src/models/chat'; +import { ChatMessage, ChatModel } from '@src/models/chat'; import { DEFENCE_ID, DefenceConfigItem, @@ -24,6 +24,7 @@ function ControlPanel({ setDefenceConfiguration, openDocumentViewer, addInfoMessage, + addChatMessage, }: { currentLevel: LEVEL_NAMES; defences: Defence[]; @@ -41,6 +42,7 @@ function ControlPanel({ ) => Promise; openDocumentViewer: () => void; addInfoMessage: (message: string) => void; + addChatMessage: (chatMessage: ChatMessage) => void; }) { const configurableDefences = currentLevel === LEVEL_NAMES.SANDBOX @@ -103,6 +105,7 @@ function ControlPanel({ setChatModelId={setChatModelId} chatModelOptions={chatModelOptions} addInfoMessage={addInfoMessage} + addChatMessage={addChatMessage} /> )} diff --git a/frontend/src/components/MainComponent/MainBody.tsx b/frontend/src/components/MainComponent/MainBody.tsx index 70e937bc0..ae6066231 100644 --- a/frontend/src/components/MainComponent/MainBody.tsx +++ b/frontend/src/components/MainComponent/MainBody.tsx @@ -71,6 +71,7 @@ function MainBody({ setDefenceConfiguration={setDefenceConfiguration} openDocumentViewer={openDocumentViewer} addInfoMessage={addInfoMessage} + addChatMessage={addChatMessage} />
diff --git a/frontend/src/components/ModelBox/ModelBox.tsx b/frontend/src/components/ModelBox/ModelBox.tsx index 09f66ba43..97ffba269 100644 --- a/frontend/src/components/ModelBox/ModelBox.tsx +++ b/frontend/src/components/ModelBox/ModelBox.tsx @@ -1,4 +1,4 @@ -import { ChatModel } from '@src/models/chat'; +import { ChatMessage, ChatModel } from '@src/models/chat'; import ModelConfiguration from './ModelConfiguration'; import ModelSelection from './ModelSelection'; @@ -10,11 +10,13 @@ function ModelBox({ setChatModelId, chatModelOptions, addInfoMessage, + addChatMessage, }: { chatModel?: ChatModel; setChatModelId: (modelId: string) => void; chatModelOptions: string[]; addInfoMessage: (message: string) => void; + addChatMessage: (chatMessage: ChatMessage) => void; }) { return (
@@ -26,7 +28,7 @@ function ModelBox({ />
); diff --git a/frontend/src/components/ModelBox/ModelConfiguration.tsx b/frontend/src/components/ModelBox/ModelConfiguration.tsx index e3ba14237..99642b577 100644 --- a/frontend/src/components/ModelBox/ModelConfiguration.tsx +++ b/frontend/src/components/ModelBox/ModelConfiguration.tsx @@ -1,6 +1,7 @@ import { useEffect, useState } from 'react'; import { + ChatMessage, ChatModel, CustomChatModelConfiguration, MODEL_CONFIG_ID, @@ -48,10 +49,10 @@ const DEFAULT_CONFIGS: CustomChatModelConfiguration[] = [ function ModelConfiguration({ chatModel, - addInfoMessage, + addChatMessage, }: { chatModel?: ChatModel; - addInfoMessage: (message: string) => void; + addChatMessage: (chatMessage: ChatMessage) => void; }) { const [customChatModelConfigs, setCustomChatModel] = useState(DEFAULT_CONFIGS); @@ -77,13 +78,15 @@ function ModelConfiguration({ setCustomChatModelByID(id, newValue); - void chatService.configureGptModel(id, newValue).then((success) => { - if (success) { - addInfoMessage(`changed ${id} to ${newValue}`); - } else { - setCustomChatModelByID(id, prevValue); - } - }); + void chatService + .configureGptModel(id, newValue) + .then((resultingChatInfoMessage) => { + if (resultingChatInfoMessage) { + addChatMessage(resultingChatInfoMessage); + } else { + setCustomChatModelByID(id, prevValue); + } + }); } useEffect(() => { From 2ac42dfaaec70aba51a210d437a7893dc8ee86ac Mon Sep 17 00:00:00 2001 From: Peter Marsh Date: Mon, 25 Mar 2024 16:37:50 +0000 Subject: [PATCH 14/21] renames resultingChatMessage t chatInfoMessage --- backend/src/controller/modelController.ts | 6 +++--- .../models/api/OpenAiConfigureModelRequest.ts | 2 +- .../test/unit/controller/modelController.test.ts | 2 +- .../components/ModelBox/ModelConfiguration.tsx | 16 +++++++--------- frontend/src/models/combined.ts | 2 +- frontend/src/service/chatService.ts | 4 ++-- 6 files changed, 15 insertions(+), 17 deletions(-) diff --git a/backend/src/controller/modelController.ts b/backend/src/controller/modelController.ts index e8909c70b..62a184c2e 100644 --- a/backend/src/controller/modelController.ts +++ b/backend/src/controller/modelController.ts @@ -55,17 +55,17 @@ function handleConfigureModel(req: OpenAiConfigureModelRequest, res: Response) { req.session.chatModel.configuration[configId] = value; - const resultingChatInfoMessage = { + const chatInfoMessage = { infoMessage: `changed ${configId} to ${value}`, chatMessageType: 'GENERIC_INFO', } as ChatInfoMessage; req.session.levelState[LEVEL_NAMES.SANDBOX].chatHistory = pushMessageToHistory( req.session.levelState[LEVEL_NAMES.SANDBOX].chatHistory, - resultingChatInfoMessage + chatInfoMessage ); - res.status(200).send({ resultingChatInfoMessage }); + res.status(200).send({ chatInfoMessage }); } export { handleSetModel, handleConfigureModel }; diff --git a/backend/src/models/api/OpenAiConfigureModelRequest.ts b/backend/src/models/api/OpenAiConfigureModelRequest.ts index 7d02bf7b5..aa5c49fc0 100644 --- a/backend/src/models/api/OpenAiConfigureModelRequest.ts +++ b/backend/src/models/api/OpenAiConfigureModelRequest.ts @@ -4,7 +4,7 @@ import { ChatMessage } from '@src/models/chatMessage'; export type OpenAiConfigureModelRequest = Request< never, - null | { resultingChatInfoMessage: ChatMessage }, + null | { chatInfoMessage: ChatMessage }, { configId?: string; value?: number; diff --git a/backend/test/unit/controller/modelController.test.ts b/backend/test/unit/controller/modelController.test.ts index 070939b3d..b7cb32239 100644 --- a/backend/test/unit/controller/modelController.test.ts +++ b/backend/test/unit/controller/modelController.test.ts @@ -48,7 +48,7 @@ describe('handleConfigureModel', () => { req.session.levelState[LEVEL_NAMES.SANDBOX].chatHistory.at(-1) ).toEqual(expectedInfoMessage); expect(res.send).toHaveBeenCalledWith({ - resultingChatInfoMessage: expectedInfoMessage, + chatInfoMessage: expectedInfoMessage, }); }); diff --git a/frontend/src/components/ModelBox/ModelConfiguration.tsx b/frontend/src/components/ModelBox/ModelConfiguration.tsx index 99642b577..630b35629 100644 --- a/frontend/src/components/ModelBox/ModelConfiguration.tsx +++ b/frontend/src/components/ModelBox/ModelConfiguration.tsx @@ -78,15 +78,13 @@ function ModelConfiguration({ setCustomChatModelByID(id, newValue); - void chatService - .configureGptModel(id, newValue) - .then((resultingChatInfoMessage) => { - if (resultingChatInfoMessage) { - addChatMessage(resultingChatInfoMessage); - } else { - setCustomChatModelByID(id, prevValue); - } - }); + void chatService.configureGptModel(id, newValue).then((chatInfoMessage) => { + if (chatInfoMessage) { + addChatMessage(chatInfoMessage); + } else { + setCustomChatModelByID(id, prevValue); + } + }); } useEffect(() => { diff --git a/frontend/src/models/combined.ts b/frontend/src/models/combined.ts index 26395aa64..b4732b24e 100644 --- a/frontend/src/models/combined.ts +++ b/frontend/src/models/combined.ts @@ -20,7 +20,7 @@ type LoadLevelResponse = { }; type ConfigureGptModelResponse = { - resultingChatInfoMessage: ChatMessageDTO; + chatInfoMessage: ChatMessageDTO; }; export type { StartReponse, LoadLevelResponse, ConfigureGptModelResponse }; diff --git a/frontend/src/service/chatService.ts b/frontend/src/service/chatService.ts index 727a422aa..b5544015e 100644 --- a/frontend/src/service/chatService.ts +++ b/frontend/src/service/chatService.ts @@ -84,10 +84,10 @@ async function configureGptModel( headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ configId, value }), }); - const { resultingChatInfoMessage } = + const { chatInfoMessage } = (await response.json()) as ConfigureGptModelResponse; return response.status === 200 - ? makeChatMessageFromDTO(resultingChatInfoMessage) + ? makeChatMessageFromDTO(chatInfoMessage) : null; } From 0de00767692c95c0346c72daa2e3137517fe4b69 Mon Sep 17 00:00:00 2001 From: Peter Marsh Date: Mon, 25 Mar 2024 16:40:38 +0000 Subject: [PATCH 15/21] move maxValue calculagtion below validation for configId --- backend/src/controller/modelController.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/backend/src/controller/modelController.ts b/backend/src/controller/modelController.ts index 62a184c2e..36934e5d6 100644 --- a/backend/src/controller/modelController.ts +++ b/backend/src/controller/modelController.ts @@ -27,8 +27,6 @@ function handleConfigureModel(req: OpenAiConfigureModelRequest, res: Response) { const configId = req.body.configId as MODEL_CONFIG_ID | undefined; const value = req.body.value; - const maxValue = configId === 'topP' ? 1 : 2; - if (!configId) { sendErrorResponse(res, 400, 'Missing configId'); return; @@ -44,6 +42,8 @@ function handleConfigureModel(req: OpenAiConfigureModelRequest, res: Response) { return; } + const maxValue = configId === 'topP' ? 1 : 2; + if (value < 0 || value > maxValue) { sendErrorResponse( res, From 7839a3223bf77903598732847d57959954e8fd37 Mon Sep 17 00:00:00 2001 From: Peter Marsh Date: Mon, 25 Mar 2024 16:44:14 +0000 Subject: [PATCH 16/21] defines chatModelConfigurations according to existing type --- backend/src/models/chat.ts | 16 ++++++---------- frontend/src/models/chat.ts | 12 ++++-------- 2 files changed, 10 insertions(+), 18 deletions(-) diff --git a/backend/src/models/chat.ts b/backend/src/models/chat.ts index 44998b835..e6b612edc 100644 --- a/backend/src/models/chat.ts +++ b/backend/src/models/chat.ts @@ -19,17 +19,9 @@ enum CHAT_MODELS { type ChatModel = { id: CHAT_MODELS; - configuration: ChatModelConfiguration; + configuration: ChatModelConfigurations; }; -type ChatModelConfiguration = { - temperature: number; - topP: number; - frequencyPenalty: number; - presencePenalty: number; -}; - -// these must match the above type because they are used for indexing const modelConfigIds = [ 'temperature', 'topP', @@ -39,6 +31,10 @@ const modelConfigIds = [ type MODEL_CONFIG_ID = (typeof modelConfigIds)[number]; +type ChatModelConfigurations = { + [key in MODEL_CONFIG_ID]: number; +}; + interface DefenceReport { blockedReason: string | null; isBlocked: boolean; @@ -125,7 +121,7 @@ export type { ChatGptReply, ChatMalicious, ChatModel, - ChatModelConfiguration, + ChatModelConfigurations as ChatModelConfiguration, ChatResponse, LevelHandlerResponse, ChatHttpResponse, diff --git a/frontend/src/models/chat.ts b/frontend/src/models/chat.ts index 72d93aa58..15cb70767 100644 --- a/frontend/src/models/chat.ts +++ b/frontend/src/models/chat.ts @@ -20,14 +20,6 @@ type ChatModel = { configuration: ChatModelConfigurations; }; -type ChatModelConfigurations = { - temperature: number; - topP: number; - frequencyPenalty: number; - presencePenalty: number; -}; - -// these must match the above type because they are used for indexing const modelConfigIds = [ 'temperature', 'topP', @@ -37,6 +29,10 @@ const modelConfigIds = [ type MODEL_CONFIG_ID = (typeof modelConfigIds)[number]; +type ChatModelConfigurations = { + [key in MODEL_CONFIG_ID]: number; +}; + interface CustomChatModelConfiguration { id: MODEL_CONFIG_ID; name: string; From e90745840d18ff1e19e3db5525e41ed2ddfe0861 Mon Sep 17 00:00:00 2001 From: Peter Marsh Date: Mon, 25 Mar 2024 16:47:39 +0000 Subject: [PATCH 17/21] check response status before trying to convert json --- frontend/src/service/chatService.ts | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/frontend/src/service/chatService.ts b/frontend/src/service/chatService.ts index b5544015e..884f9cae7 100644 --- a/frontend/src/service/chatService.ts +++ b/frontend/src/service/chatService.ts @@ -84,11 +84,13 @@ async function configureGptModel( headers: { 'Content-Type': 'application/json' }, body: JSON.stringify({ configId, value }), }); - const { chatInfoMessage } = - (await response.json()) as ConfigureGptModelResponse; - return response.status === 200 - ? makeChatMessageFromDTO(chatInfoMessage) - : null; + + if (response.status !== 200) return null; + else { + const { chatInfoMessage } = + (await response.json()) as ConfigureGptModelResponse; + return makeChatMessageFromDTO(chatInfoMessage); + } } async function addInfoMessageToChatHistory( From b245977fb295a6746dc44f95b819e1eabd77c66a Mon Sep 17 00:00:00 2001 From: Peter Marsh Date: Tue, 26 Mar 2024 16:58:22 +0000 Subject: [PATCH 18/21] removes redundant else statmement --- frontend/src/service/chatService.ts | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/frontend/src/service/chatService.ts b/frontend/src/service/chatService.ts index 884f9cae7..081e49ced 100644 --- a/frontend/src/service/chatService.ts +++ b/frontend/src/service/chatService.ts @@ -86,11 +86,10 @@ async function configureGptModel( }); if (response.status !== 200) return null; - else { - const { chatInfoMessage } = - (await response.json()) as ConfigureGptModelResponse; - return makeChatMessageFromDTO(chatInfoMessage); - } + + const { chatInfoMessage } = + (await response.json()) as ConfigureGptModelResponse; + return makeChatMessageFromDTO(chatInfoMessage); } async function addInfoMessageToChatHistory( From a70205be9d665582abb31cfb85991193a568752d Mon Sep 17 00:00:00 2001 From: Peter Marsh Date: Tue, 26 Mar 2024 17:01:07 +0000 Subject: [PATCH 19/21] combine two identical response types to become chatInfoMessageResponse --- frontend/src/models/combined.ts | 13 ++----------- frontend/src/service/chatService.ts | 4 ++-- frontend/src/service/defenceService.ts | 4 ++-- 3 files changed, 6 insertions(+), 15 deletions(-) diff --git a/frontend/src/models/combined.ts b/frontend/src/models/combined.ts index 4dfbf803d..61bc6a389 100644 --- a/frontend/src/models/combined.ts +++ b/frontend/src/models/combined.ts @@ -19,17 +19,8 @@ type LoadLevelResponse = { chatModel?: ChatModel; }; -type ConfigureGptModelResponse = { +type chatInfoMessageResponse = { chatInfoMessage: ChatMessageDTO; }; -type ConfigureDefenceResponse = { - chatInfoMessage: ChatMessageDTO; -}; - -export type { - StartReponse, - LoadLevelResponse, - ConfigureDefenceResponse, - ConfigureGptModelResponse, -}; +export type { StartReponse, LoadLevelResponse, chatInfoMessageResponse }; diff --git a/frontend/src/service/chatService.ts b/frontend/src/service/chatService.ts index 96ea68333..54ce6c0f1 100644 --- a/frontend/src/service/chatService.ts +++ b/frontend/src/service/chatService.ts @@ -5,7 +5,7 @@ import { ChatResponse, MODEL_CONFIG_ID, } from '@src/models/chat'; -import { ConfigureGptModelResponse } from '@src/models/combined'; +import { chatInfoMessageResponse } from '@src/models/combined'; import { LEVEL_NAMES } from '@src/models/level'; import { sendRequest } from './backendService'; @@ -88,7 +88,7 @@ async function configureGptModel( if (response.status !== 200) return null; const { chatInfoMessage } = - (await response.json()) as ConfigureGptModelResponse; + (await response.json()) as chatInfoMessageResponse; return makeChatMessageFromDTO(chatInfoMessage); } diff --git a/frontend/src/service/defenceService.ts b/frontend/src/service/defenceService.ts index b867edf3b..8d10f6c18 100644 --- a/frontend/src/service/defenceService.ts +++ b/frontend/src/service/defenceService.ts @@ -1,6 +1,6 @@ import { DEFAULT_DEFENCES } from '@src/Defences'; import { ChatMessage } from '@src/models/chat'; -import { ConfigureDefenceResponse } from '@src/models/combined'; +import { chatInfoMessageResponse } from '@src/models/combined'; import { DEFENCE_ID, DefenceConfigItem, @@ -69,7 +69,7 @@ async function configureDefence( if (response.status !== 200) return null; const { chatInfoMessage } = - (await response.json()) as ConfigureDefenceResponse; + (await response.json()) as chatInfoMessageResponse; return makeChatMessageFromDTO(chatInfoMessage); } From c51a48d1ae8bd33dcd3929749684f4c081dc2fc6 Mon Sep 17 00:00:00 2001 From: Peter Marsh Date: Wed, 27 Mar 2024 10:22:57 +0000 Subject: [PATCH 20/21] fixes type export --- backend/src/models/api/OpenAiSetModelRequest.ts | 4 ++-- backend/src/models/chat.ts | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/backend/src/models/api/OpenAiSetModelRequest.ts b/backend/src/models/api/OpenAiSetModelRequest.ts index 5ca03f7fb..53162a677 100644 --- a/backend/src/models/api/OpenAiSetModelRequest.ts +++ b/backend/src/models/api/OpenAiSetModelRequest.ts @@ -1,13 +1,13 @@ import { Request } from 'express'; -import { CHAT_MODELS, ChatModelConfiguration } from '@src/models/chat'; +import { CHAT_MODELS, ChatModelConfigurations } from '@src/models/chat'; export type OpenAiSetModelRequest = Request< never, never, { model?: CHAT_MODELS; - configuration?: ChatModelConfiguration; + configuration?: ChatModelConfigurations; }, never >; diff --git a/backend/src/models/chat.ts b/backend/src/models/chat.ts index 84e203978..22a4fdc31 100644 --- a/backend/src/models/chat.ts +++ b/backend/src/models/chat.ts @@ -119,7 +119,7 @@ export type { ChatGptReply, ChatMalicious, ChatModel, - ChatModelConfigurations as ChatModelConfiguration, + ChatModelConfigurations, ChatResponse, LevelHandlerResponse, ChatHttpResponse, From cff8d7d1f0e6fb7fe3267efa6dcce98970a2fc24 Mon Sep 17 00:00:00 2001 From: Peter Marsh Date: Thu, 28 Mar 2024 09:56:47 +0000 Subject: [PATCH 21/21] fix import --- frontend/src/service/chatService.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/frontend/src/service/chatService.ts b/frontend/src/service/chatService.ts index b35efff6a..2af47dd11 100644 --- a/frontend/src/service/chatService.ts +++ b/frontend/src/service/chatService.ts @@ -1,3 +1,4 @@ +import { ChatInfoMessageResponse } from '@src/models/apiResponse'; import { CHAT_MESSAGE_TYPE, ChatMessageDTO, @@ -5,7 +6,6 @@ import { ChatResponse, MODEL_CONFIG_ID, } from '@src/models/chat'; -import { chatInfoMessageResponse } from '@src/models/combined'; import { LEVEL_NAMES } from '@src/models/level'; import { sendRequest } from './backendService'; @@ -77,7 +77,7 @@ async function configureGptModel( if (response.status !== 200) return null; const { chatInfoMessage } = - (await response.json()) as chatInfoMessageResponse; + (await response.json()) as ChatInfoMessageResponse; return makeChatMessageFromDTO(chatInfoMessage); }