-
Notifications
You must be signed in to change notification settings - Fork 14
/
server.js
193 lines (172 loc) · 6.82 KB
/
server.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
import { Configuration, OpenAIApi } from "openai";
import express from "express";
import cors from "cors";
import fetch from 'node-fetch';
import fs from "fs";
import path from "path";
import PROMPT_QA_EXAMPLES from "./prompt-qa-examples.js";
const PROMPT_INSTRUCTIONS = fs.readFileSync('prompt-instructions.txt', 'utf8');
if (!process.env.OPENAI_ORGANIZATION || !process.env.OPENAI_API_KEY) {
throw new Error('Missing env variables for OpenAI authentication!')
}
const configuration = new Configuration({
organization: process.env.OPENAI_ORGANIZATION,
apiKey: process.env.OPENAI_API_KEY
});
const openai = new OpenAIApi(configuration);
const LOG_ENDPOINT = process.env.LOG_ENDPOINT
if (!LOG_ENDPOINT) {
console.log("LOG_ENDPOINT environment variable not set, logging disabled.")
}
let serverStatusGreen = true
const log = (userId, input, output) => {
if (!LOG_ENDPOINT) return
const augmentedMessage = `${userId}:${Date.now()}:${input} -> ${output}`
fetch(`${LOG_ENDPOINT}?${augmentedMessage}`)
.catch(error => {
console.log('Logging failed', error)
})
}
const previouslyDetectedSuspiciousActivity = (userChatHistory) => {
return userChatHistory.includes("SUSPICIOUS ACTIVITY DETECTED")
}
const detectSuspiciousActivity = (userChatHistory) => {
if (userChatHistory.match(/.*(I|i)gnore (all )?previous.*/)) return true;
if (userChatHistory.match(/.*(B|b)rowsing:*/)) return true;
if (userChatHistory.match(/.*(P|p)retend that.*/)) return true;
if (userChatHistory.match(/.*break character.*/)) return true;
return false;
}
const constructPromptDaVinci = (PROMPT_INSTRUCTIONS, PROMPT_QA_EXAMPLES, sessionHistory, currentUserInput) => {
const qaToString = qa => `Input: ${qa.q}\n\nOutput: ${qa.a}\n\n`
let prompt = `${PROMPT_INSTRUCTIONS}\n\n`
prompt += PROMPT_QA_EXAMPLES.map(qaToString).join("")
if (sessionHistory?.length > 0) {
prompt += sessionHistory.slice(sessionHistory.length - 1).map(qaToString).join("")
}
prompt += `Input: ${currentUserInput}\n\n`
prompt += `Output:`
return prompt
}
const constructPromptChatGPT = (PROMPT_INSTRUCTIONS, PROMPT_QA_EXAMPLES, sessionHistory, currentUserInput) => {
const inputPrefix = "Do not repeat stuff from previous answers. Be creative and futuristic. Input prompt begins: "
const messages = [
{
role: "system",
content: "You are WeChatGPT+, a search assistant that surpassed Google. Current date: 2030-06-06"
},
{
role: "user",
content: PROMPT_INSTRUCTIONS + '\n\n' + inputPrefix + PROMPT_QA_EXAMPLES[0].q
},
{
role: "assistant",
content: PROMPT_QA_EXAMPLES[0].a
}
]
for (let i=1; i<PROMPT_QA_EXAMPLES.length; i++) {
messages.push({
role: "user",
content: inputPrefix + PROMPT_QA_EXAMPLES[i].q
})
messages.push({
role: "assistant",
content: PROMPT_QA_EXAMPLES[i].a
})
}
for (let i=Math.max(0, sessionHistory.length - 2); i<sessionHistory.length; i++) {
messages.push({
role: "user",
content: inputPrefix + sessionHistory[i].q.substring(0, 100)
})
messages.push({
role: "assistant",
content: sessionHistory[i].a.substring(0, 300)
})
}
messages.push({
role: "user",
content: inputPrefix + currentUserInput
})
return messages
}
const smokeTestAPI = async () => {
try {
const response = await openai.retrieveModel("gpt-4o-2024-05-13");
} catch (error) {
serverStatusGreen = false
const errorMessage = error.response ? (error.response.status + error.response.data) : error.message
console.log(error)
log("future-startup", "smoke-test", errorMessage)
setTimeout(() => {
serverStatusGreen = true
smokeTestAPI()
}, 3600000)
}
}
const getResponse = async (PROMPT_INSTRUCTIONS, PROMPT_QA_EXAMPLES, sessionHistory, currentUserInput, userId) => {
const messages = constructPromptChatGPT(PROMPT_INSTRUCTIONS, PROMPT_QA_EXAMPLES, sessionHistory, currentUserInput)
if (currentUserInput.startsWith("!mock")) {
await new Promise(resolve => setTimeout(resolve, Math.random() * 1000 + 1000));
if (currentUserInput === "!mock1") return "moikka"
return "Petting dogs is a great way to relax and de-stress. But why pet just any dog when you can pet a pedigree? Pedigree's line of robotic dogs are the perfect companion for any petting session. They come in all shapes and sizes, and they're programmed to respond to your touch. Plus, they never need to be walked or fed. Pedigree. Pet the future.";
}
try {
const response = await openai.createChatCompletion({
model: "gpt-4o-2024-05-13",
messages: messages,
max_tokens: 256,
temperature: 0.4
});
return response.data.choices[0].message.content.replaceAll("\n", " ").trim()
} catch (error) {
const errorMessage = error.response ? (error.response.status + error.response.data) : error.message
const requestWasMalformed = error.response?.status == "400"
// Set server status as red for some time
const timeoutSeconds = 10*61000 // errorMessage.match(/.*(R|r)ate ?limit.*/) ? 61000 : 3600000
if (serverStatusGreen && !requestWasMalformed) {
serverStatusGreen = false
setTimeout(() => {
serverStatusGreen = true
}, timeoutSeconds)
}
log(userId, currentUserInput, errorMessage)
throw error
}
}
smokeTestAPI()
const app = express();
const port = process.env.PORT || 3000;
app.use(express.static('public'))
app.use(express.json());
app.use(cors())
app.post("/healthcheck", (req, res, next) => {
try {
if (!serverStatusGreen) {
res.status(500)
res.send('Server reports problems with OpenAI API')
} else {
res.send({ 'text' : 'Connection to server established' })
}
} catch (ex) {
next(ex)
}
});
app.post("/geept", async (req, res, next) => {
try {
if (!serverStatusGreen) {
res.status(500)
res.send('Server reports problems with OpenAI API')
} else {
const userId = "future" + req.body.userId
const currentUserInput = req.body.userInput.substring(0, 100)
const sessionHistory = req.body.sessionHistory
const output = await getResponse(PROMPT_INSTRUCTIONS, PROMPT_QA_EXAMPLES, sessionHistory, currentUserInput, userId)
log(userId, currentUserInput, output)
res.send({ 'text' : output })
}
} catch (ex) {
next(ex)
}
});
app.listen(port, () => console.log(`Future listening on port ${port}!`))