Skip to content

Commit

Permalink
Update: API request
Browse files Browse the repository at this point in the history
  • Loading branch information
Kazuki-tam committed Feb 6, 2023
1 parent 70162c9 commit aa16528
Show file tree
Hide file tree
Showing 5 changed files with 105 additions and 36 deletions.
53 changes: 18 additions & 35 deletions src/openai/createCompletionByGpt3.ts
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
import { GptApiInfo, GptRequestOptions } from "../types/openai.ts";
import type { GptApiInfo, GptRequestOptions } from "../types/openai.ts";
import {
GptMaxTokensSchema,
GptModelSchema,
GptTemperatureSchema,
} from "../schemas/openaiSchema.ts";
import { httpRequestWithRetries } from "./httpRequestWithRetries.ts";
import { getPropertiesService } from "../utils/getPropertiesService.ts";

/**
* Create Text Completion with OpenAI GPT-3
Expand All @@ -14,8 +16,7 @@ import {
* @param {number} temperature Temperature
* @return Response text returned by GPT-3
*/

export const createCompletionByGpt3 = (
const createCompletionByGpt3 = (
prompt: GptRequestOptions["prompt"],
maxTokens = 100,
model = "text-davinci-003",
Expand All @@ -25,12 +26,6 @@ export const createCompletionByGpt3 = (
throw new Error("You have to input the prompt at the least.");
}

const OPENAI_API_KEY: string | null = PropertiesService.getScriptProperties()
.getProperty("OPENAI_API_KEY");
if (!OPENAI_API_KEY) {
throw new Error("You have to set your OpenAI API Key.");
}

if (maxTokens) {
GptMaxTokensSchema.parse(maxTokens);
}
Expand All @@ -43,31 +38,19 @@ export const createCompletionByGpt3 = (
GptTemperatureSchema.parse(temperature);
}

const url = "https://api.openai.com/v1/completions";
const payload = {
model: model,
prompt: prompt,
suffix: null,
temperature: temperature,
max_tokens: maxTokens,
top_p: 1,
};

const fetchOptions = {
contentType: "application/json",
headers: { Authorization: "Bearer " + OPENAI_API_KEY },
payload: JSON.stringify(payload),
};

try {
const res = UrlFetchApp.fetch(url, fetchOptions);
if (res.getResponseCode() !== 200) {
throw new Error(`Error: ${res.getContentText()}`);
}
const parsedRes = JSON.parse(res.getContentText()) as GptApiInfo;
return parsedRes.choices[0].text.trim();
} catch (error) {
console.error(error);
throw new Error(`Error: ${error}`);
const OPENAI_API_KEY: string = getPropertiesService("OPENAI_API_KEY");
const response = httpRequestWithRetries(
OPENAI_API_KEY,
prompt,
model,
maxTokens,
temperature,
);
if (!response) {
throw new Error("Error: Response error.");
}
const parsedRes = JSON.parse(response.getContentText()) as GptApiInfo;
return parsedRes.choices[0].text.trim();
};

export { createCompletionByGpt3 };
55 changes: 55 additions & 0 deletions src/openai/httpRequestWithRetries.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
import { sleepForBackoff } from "../utils/sleepForBackoff.ts";

/**
* Execute HTTP requests and retry failed requests.
* @param {string | string[]} prompt Prompt
* @param {string} OPENAI_API_KEY OpenAI API key
* @param {string} model Model
* @param {number} maxTokens Max Tokens
* @param {number} temperature Temperature
* @return Response returned by GPT-3
*/

const httpRequestWithRetries = (
OPENAI_API_KEY: string,
prompt: string | string[],
model: string,
maxTokens: number,
temperature: number,
) => {
const url = "https://api.openai.com/v1/completions";
const payload = {
model: model,
prompt: prompt,
suffix: null,
temperature: temperature,
max_tokens: maxTokens,
top_p: 1,
};

const fetchOptions = {
contentType: "application/json",
headers: { Authorization: "Bearer " + OPENAI_API_KEY },
muteHttpExceptions: true,
payload: JSON.stringify(payload),
};

let response = null;
for (let numRetries = 0; numRetries < 5; numRetries++) {
const lastRequestTime = Date.now();
try {
Logger.log(`Sending HTTP request to ${url}`);
response = UrlFetchApp.fetch(url, fetchOptions);
const responseCode = response.getResponseCode();
if (responseCode !== 429 && responseCode < 500) {
return response;
}
} catch (error) {
throw new Error(`Error: ${error}`);
}
Logger.log(`Retrying after ${numRetries} failed requests.`);
sleepForBackoff(numRetries, lastRequestTime);
}
return response;
};
export { httpRequestWithRetries };
3 changes: 2 additions & 1 deletion src/openai/index.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import { createCompletionByGpt3 } from "./createCompletionByGpt3.ts";
import { httpRequestWithRetries } from "./httpRequestWithRetries.ts";

export { createCompletionByGpt3 };
export { createCompletionByGpt3, httpRequestWithRetries };
17 changes: 17 additions & 0 deletions src/utils/getPropertiesService.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
/**
* Gets a property store that all users can access, but only within this script.
* @param {string} propertyName The Properties service
* @return The property value
* https://developers.google.com/apps-script/reference/properties/properties-service
*/

const getPropertiesService = (propertyName: string) => {
const propertyValue: string | null = PropertiesService.getScriptProperties()
.getProperty(propertyName);
if (!propertyValue) {
throw new Error(`Error: ${propertyName} is not defined.`);
}
return propertyValue;
};

export { getPropertiesService };
13 changes: 13 additions & 0 deletions src/utils/sleepForBackoff.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
/**
* Sleep after failed requests.
* @param {number} numRetries The number to retry
* @param {number} lastRequestTime The last request time
*/
const sleepForBackoff = (numRetries: number, lastRequestTime: number): void => {
const backoff = Math.min(1000 * (1.6 ** numRetries), 60000);
const jitter = 1 + 0.23 * (2 * Math.random() - 1); // Random value in [0.77 1.23]
const sleepTime = Date.now() - lastRequestTime + backoff * jitter;
Utilities.sleep(sleepTime);
};

export { sleepForBackoff };

0 comments on commit aa16528

Please sign in to comment.