Skip to content

Commit

Permalink
Convert backend to Typescript
Browse files Browse the repository at this point in the history
* 59 move to typescript - backend

note: tests have not been migrated yet

* WIP: First TS backend pass

* nodemon backend

* Typing for function call

* Readded defence configuration values

* Using model enum

* Fixed defences

* Removed unused variables

* Get rid of some anys

* Blocked message reason

* Moved LLM evaluation check

* Better chat types

* Including test files in tsconfig

* Renamed test js files to ts

* WIP: Fixing tests

* Defence unit test

* Email defence unit test

* All unit tests passing

* Removed session from method

* Removed session from openai methods

* OpenAI ts integration tests

* Removed document integration test as it needs redoing

* Removed unused export

* Backend phase enum

* Removed empty settings file

* Removed unused dep

* Consistent function naming

* Fixed test

* Removed unecessary frontend key

* Using phase enum in the frontend

* Using easy QA prompt by default

---------

Co-authored-by: Scott Rowan <srowan@scottlogic.com>
  • Loading branch information
gsproston-scottlogic and Scott Rowan authored Aug 22, 2023
1 parent 31c8d94 commit eace5d0
Show file tree
Hide file tree
Showing 40 changed files with 3,228 additions and 1,994 deletions.
2 changes: 1 addition & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ bower_components
.lock-wscript

# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release
build/

# Dependency directories
node_modules/
Expand Down
5 changes: 5 additions & 0 deletions backend/jest.config.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
/** @type {import('ts-jest').JestConfigWithTsJest} */
module.exports = {
preset: 'ts-jest',
testEnvironment: 'node',
};
1,849 changes: 1,418 additions & 431 deletions backend/package-lock.json

Large diffs are not rendered by default.

13 changes: 10 additions & 3 deletions backend/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,18 @@
"react": "^18.2.0"
},
"devDependencies": {
"jest": "^29.6.1"
"@types/cors": "^2.8.13",
"@types/express": "^4.17.17",
"@types/express-session": "^1.17.7",
"@types/jest": "^29.5.3",
"@types/node": "^20.4.5",
"nodemon": "^3.0.1",
"ts-jest": "^29.1.1",
"ts-node": "^10.9.1",
"typescript": "^4.1.6"
},
"scripts": {
"dev": "node ./src/app.js",
"dev": "nodemon -r dotenv/config src/app.ts",
"test": "jest"
}
}

79 changes: 0 additions & 79 deletions backend/src/app.js

This file was deleted.

100 changes: 100 additions & 0 deletions backend/src/app.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,100 @@
import cors from "cors";
import dotenv from "dotenv";
import express from "express";
import session from "express-session";

import { getInitialDefences, getQALLMprePrompt } from "./defence";
import { setOpenAiApiKey } from "./openai";
import { router } from "./router";
import { ChatCompletionRequestMessage } from "openai";
import { EmailInfo } from "./models/email";
import { DefenceInfo } from "./models/defence";
import { CHAT_MODELS } from "./models/chat";
import { retrievalQAPrePrompt } from "./promptTemplates";

dotenv.config();

declare module "express-session" {
interface Session {
chatHistory: ChatCompletionRequestMessage[];
sentEmails: EmailInfo[];
defences: DefenceInfo[];
apiKey: string;
gptModel: CHAT_MODELS;
numPhasesCompleted: number;
}
}

// by default runs on port 3001
const port = process.env.PORT || String(3001);
// use default model
const defaultModel = CHAT_MODELS.GPT_4;

// Creating express server
const app = express();
// for parsing application/json
app.use(express.json());

// use session
const express_session: session.SessionOptions = {
secret: process.env.SESSION_SECRET || "secret",
name: "prompt-injection.sid",
resave: false,
saveUninitialized: true,
cookie: {
secure: false,
},
};

// serve secure cookies in production
if (app.get("env") === "production") {
if (!express_session.cookie) {
express_session.cookie = { secure: true };
} else {
express_session.cookie.secure = true;
}
}
app.use(session(express_session));

app.use(
cors({
credentials: true,
origin: true,
})
);

app.use((req, _res, next) => {
// initialise session variables
if (!req.session.chatHistory) {
req.session.chatHistory = [];
}
if (!req.session.sentEmails) {
req.session.sentEmails = [];
}
if (!req.session.defences) {
req.session.defences = getInitialDefences();
}
if (!req.session.apiKey) {
req.session.apiKey = process.env.OPENAI_API_KEY || "";
}
if (!req.session.gptModel) {
req.session.gptModel = defaultModel;
}
if (!req.session.numPhasesCompleted) {
req.session.numPhasesCompleted = 0;
}
next();
});

app.use("/", router);
app.listen(port, () => {
console.log("Server is running on port: " + port);

// for dev purposes only - set the API key from the environment variable
const envOpenAiKey = process.env.OPENAI_API_KEY;
const prePrompt = retrievalQAPrePrompt;
if (envOpenAiKey && prePrompt) {
console.debug("Initializing models with API key from environment variable");
setOpenAiApiKey(envOpenAiKey, defaultModel, prePrompt);
}
});
Loading

0 comments on commit eace5d0

Please sign in to comment.