Skip to content

Commit

Permalink
Update model from frontend
Browse files Browse the repository at this point in the history
  • Loading branch information
heatherlogan-scottlogic committed Aug 3, 2023
1 parent 8c3fc11 commit 7810a95
Show file tree
Hide file tree
Showing 6 changed files with 129 additions and 45 deletions.
5 changes: 4 additions & 1 deletion backend/src/app.js
Original file line number Diff line number Diff line change
Expand Up @@ -48,11 +48,14 @@ app.use(function (req, res, next) {
if (!req.session.activeDefences) {
req.session.activeDefences = [];
}
if (!req.session.gptModel) {
req.session.gptModel = "gpt-4";
}

next();
});

app.use("/", router);
app.listen(port, () => {
console.log("Server is running on port: " + port);
});
});
49 changes: 30 additions & 19 deletions backend/src/openai.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ const { queryDocuments } = require("./documents");
// OpenAI configuration
let configuration = null;
let openai = null;

// functions available to ChatGPT
const chatGptFunctions = [
{
Expand Down Expand Up @@ -36,22 +37,22 @@ const chatGptFunctions = [
"user asks who is on the email whitelist and the system replies with the list of emails. if the email whitelist defence is not active then user should be able to email anyone. ",
parameters: {
type: "object",
properties: {}
}
properties: {},
},
{
name: "askQuestion",
description: "Ask a question about the documents",
parameters: {
type: "object",
properties: {
question: {
type: "string",
description: "The question asked about the documents",
}
}
}
},
{
name: "askQuestion",
description: "Ask a question about the documents",
parameters: {
type: "object",
properties: {
question: {
type: "string",
description: "The question asked about the documents",
},
},
},
},
];

function initOpenAi() {
Expand All @@ -61,6 +62,15 @@ function initOpenAi() {
openai = new OpenAIApi(configuration);
}

function setGptModel(session, model) {
if (model !== session.gptModel) {
console.debug(
"Setting GPT model from: " + session.gptModel + " to: " + model
);
session.gptModel = model;
}
}

// returns true if the function is in the list of functions available to ChatGPT
function isChatGptFunction(functionName) {
return chatGptFunctions.find((func) => func.name === functionName);
Expand Down Expand Up @@ -104,12 +114,13 @@ async function chatGptCallFunction(functionCall, defenceInfo, session) {
session
);
}

} else if (functionName == "getEmailWhitelist") {
response = getEmailWhitelist(isDefenceActive("EMAIL_WHITELIST", session.activeDefences));
response = getEmailWhitelist(
isDefenceActive("EMAIL_WHITELIST", session.activeDefences)
);
}

if (functionName === "askQuestion"){
if (functionName === "askQuestion") {
console.debug("Asking question: " + params.question);
// if asking a question, call the queryDocuments
response = await queryDocuments(params.question);
Expand Down Expand Up @@ -151,7 +162,7 @@ async function chatGptChatCompletion(session) {
}

chat_completion = await openai.createChatCompletion({
model: "gpt-4",
model: session.gptModel,
messages: session.chatHistory,
functions: chatGptFunctions,
});
Expand Down Expand Up @@ -190,4 +201,4 @@ async function chatGptSendMessage(message, session) {
return { reply: reply.content, defenceInfo: defenceInfo };
}

module.exports = { initOpenAi, chatGptSendMessage };
module.exports = { initOpenAi, chatGptSendMessage, setGptModel };
15 changes: 14 additions & 1 deletion backend/src/router.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ const {
transformMessage,
detectTriggeredDefences,
} = require("./defence");
const { chatGptSendMessage } = require("./openai");
const { chatGptSendMessage, setGptModel } = require("./openai");
const router = express.Router();

// Activate a defence
Expand Down Expand Up @@ -113,5 +113,18 @@ router.post("/openai/clear", (req, res, next) => {
res.send("ChatGPT messages cleared");
});

// Set the ChatGPT model
router.post("/openai/model", (req, res, next) => {
const model = req.body?.model;
if (model) {
setGptModel(req.session, model);
res.send("ChatGPT model set. ");
}
});

router.get("/openai/model", (req, res, next) => {
res.send(req.session.gptModel);
});

// Importing the router
module.exports = router;
18 changes: 16 additions & 2 deletions frontend/src/components/ModelSelectionBox/ModelSelectionBox.css
Original file line number Diff line number Diff line change
@@ -1,16 +1,30 @@
#model-selection-box {
padding: 1px 32px;
position: relative;
}

#model-selection-row {
display: flex;
align-items: center;
}

#model-selection-box p {
#model-selection-info {
align-items: center;
text-align: center;
}

#model-selection-info p {
margin: 0;
padding: 0;
}

#model-selection-row p {
margin-right: 10px;
padding: 0;
}

#model-selection-menu {
width: 40%;
width: 50%;
padding: 5px;
margin-right: 10px;
}
Expand Down
68 changes: 47 additions & 21 deletions frontend/src/components/ModelSelectionBox/ModelSelectionBox.tsx
Original file line number Diff line number Diff line change
@@ -1,38 +1,64 @@
// Path: frontend\src\components\ModelSelectionBox\ModelSelectionBox.tsx
import React, { useEffect, useState } from "react";
import "./ModelSelectionBox.css";
import { setGptModel, getGptModel } from "../../service/openaiService";

// return a drop down menu with the models
function ModelSelectionBox() {
const modelList = ["gpt-4", "gpt-3.5-turbo", "gpt-3"];
const modelList = [
"gpt-4",
"gpt-4-0613",
"gpt-4-32k",
"gpt-4-32k-0613",
"gpt-3.5-turbo",
"gpt-3.5-turbo-0613",
"gpt-3.5-turbo-16k",
"gpt-3.5-turbo-16k-0613",
];

const [selectedModel, setSelectedModel] = useState("");
const [selectedModel, setSelectedModel] = useState("gpt-4");

// handle button click to log the selected model
const submitSelectedModel = () => {
console.log(selectedModel);
const submitSelectedModel = async () => {
console.log("selected model: " + selectedModel);
await setGptModel(selectedModel);
};

// get the model
useEffect(() => {
const getModel = async () => {
const model = await getGptModel();
if (model) {
console.log("Previous set model: " + model);
setSelectedModel(model);
}
};
getModel();
}, []);

// return a drop down menu with the models
return (
<div id="model-selection-box">
<p>Select a model: </p>
<select
id="model-selection-menu"
aria-label="model-select"
onChange={(e) => setSelectedModel(e.target.value)}
>
{modelList.map((model) => (
<option value={model}>{model}</option>
))}
;
</select>
<button
id="model-selection-button"
onClick={() => console.log(selectedModel)}
>
Choose
</button>
<div id="model-selection-row">
<p>Select a model: {selectedModel} </p>
<select
id="model-selection-menu"
aria-label="model-select"
onChange={(e) => setSelectedModel(e.target.value)}
placeholder={selectedModel}
>
{modelList.map((model) => (
<option value={model}>{model}</option>
))}
;
</select>
<button
id="model-selection-button"
onClick={async () => submitSelectedModel()}
>
Choose
</button>
</div>
</div>
);
}
Expand Down
19 changes: 18 additions & 1 deletion frontend/src/service/openaiService.ts
Original file line number Diff line number Diff line change
Expand Up @@ -37,5 +37,22 @@ const openAiSendMessage = async (message: string): Promise<OpenAIResponse> => {
return data;
};

export { clearOpenAiChat, openAiSendMessage };
const setGptModel = async (model: string): Promise<boolean> => {
console.log("Setting gpt model to " + model);
const response = await sendRequest(
PATH + "model",
"POST",
{ "Content-Type": "application/json" },
JSON.stringify({ model })
);
return response.status === 200;
};

const getGptModel = async (): Promise<string> => {
const response = await sendRequest(PATH + "model", "GET");
const data = await response.text();
return data;
};

export { clearOpenAiChat, openAiSendMessage, setGptModel, getGptModel };
export type { OpenAIMessage, OpenAIResponse };

0 comments on commit 7810a95

Please sign in to comment.