Skip to content

Commit

Permalink
v4.0.0-beta.5
Browse files Browse the repository at this point in the history
  • Loading branch information
schnerd committed Jul 17, 2023
1 parent be1e275 commit 07b3504
Show file tree
Hide file tree
Showing 44 changed files with 492 additions and 14,003 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
node_modules
yarn-error.log
codegen.log
dist
/*.tgz
55 changes: 30 additions & 25 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -29,16 +29,15 @@ const openai = new OpenAI({
});

async function main() {
const completion = await openai.completions.create({
model: 'text-davinci-002',
prompt: 'Say this is a test',
max_tokens: 6,
temperature: 0,
const completion = await openai.chat.completions.create({
messages: [{ role: 'user', content: 'Say this is a test' }],
model: 'gpt-3.5-turbo',
});

console.log(completion.choices);
}
main().catch(console.error);

main();
```

## Streaming Responses
Expand All @@ -48,16 +47,20 @@ We provide support for streaming responses using Server Side Events (SSE).
```ts
import OpenAI from 'openai';

const client = new OpenAI();
const openai = new OpenAI();

const stream = await client.completions.create({
prompt: 'Say this is a test',
model: 'text-davinci-003',
stream: true,
});
for await (const part of stream) {
process.stdout.write(part.choices[0]?.text || '');
async function main() {
const completion = await openai.chat.completions.create({
model: 'gpt-4',
messages: [{ role: 'user', content: 'Say this is a test' }],
stream: true,
});
for await (const part of stream) {
process.stdout.write(part.choices[0]?.text || '');
}
}

main();
```

If you need to cancel a stream, you can `break` from the loop
Expand All @@ -76,15 +79,14 @@ const openai = new OpenAI({
});

async function main() {
const params: OpenAI.CompletionCreateParams = {
model: 'text-davinci-002',
prompt: 'Say this is a test',
max_tokens: 6,
temperature: 0,
const params: OpenAI.Chat.CompletionCreateParams = {
messages: [{ role: 'user', content: 'Say this is a test' }],
model: 'gpt-3.5-turbo',
};
const completion: OpenAI.Completion = await openai.completions.create(params);
const completion: OpenAI.Chat.ChatCompletion = await openai.chat.completions.create(params);
}
main().catch(console.error);

main();
```

Documentation for each method, request param, and response field are available in docstrings and will appear on hover in most modern editors.
Expand Down Expand Up @@ -141,10 +143,13 @@ async function main() {
console.log(err.name); // BadRequestError

console.log(err.headers); // {server: 'nginx', ...}
} else {
throw err;
}
});
}
main().catch(console.error);

main();
```

Error codes are as followed:
Expand Down Expand Up @@ -176,7 +181,7 @@ const openai = new OpenAI({
});

// Or, configure per-request:
openai.embeddings.create({ model: 'text-similarity-babbage-001',input: 'The food was delicious and the waiter...' }, {
await openai.chat.completions.create({ messages: [{ role: 'user', content: 'How can I get the name of the current day in Node.js?' }], model: 'gpt-3.5-turbo' }, {
maxRetries: 5,
});
```
Expand All @@ -193,7 +198,7 @@ const openai = new OpenAI({
});

// Override per-request:
openai.edits.create({ model: 'text-davinci-edit-001',input: 'What day of the wek is it?',instruction: 'Fix the spelling mistakes' }, {
await openai.chat.completions.create({ messages: [{ role: 'user', content: 'How can I list all files in a directory using Python?' }], model: 'gpt-3.5-turbo' }, {
timeout: 5 * 1000,
});
```
Expand All @@ -219,7 +224,7 @@ const openai = new OpenAI({
});

// Override per-request:
openai.models.list({
await openai.models.list({
baseURL: 'http://localhost:8080/test-api',
httpAgent: new http.Agent({ keepAlive: false }),
})
Expand Down
2 changes: 1 addition & 1 deletion ecosystem-tests/cli.ts
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ const projects = {
await run('npm', ['run', 'tsc']);

if (state.live) {
await run('pnpm', ['test']);
await run('npm', ['run', 'test']);
}
},
'node-ts-esm-dom': async () => {
Expand Down
10 changes: 10 additions & 0 deletions ecosystem-tests/cloudflare-worker/package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions ecosystem-tests/cloudflare-worker/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
},
"devDependencies": {
"@cloudflare/workers-types": "^4.20230419.0",
"fastest-levenshtein": "^1.0.16",
"jest": "^29.5.0",
"start-server-and-test": "^2.0.0",
"ts-jest": "^29.1.0",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ export function uploadWebApiTestCases({
client,
it,
expectEqual,
expectSimilar,
}: {
/**
* OpenAI client instance
Expand All @@ -23,6 +24,10 @@ export function uploadWebApiTestCases({
* Jest expect(a).toEqual(b) function, or an imitation in envs like Cloudflare workers
*/
expectEqual(a: unknown, b: unknown): void;
/**
* Assert that the levenshtein distance between the two given strings is less than the given max distance.
*/
expectSimilar(received: string, expected: string, maxDistance: number): void;
}) {
const url = 'https://audio-samples.github.io/samples/mp3/blizzard_biased/sample-1.mp3';
const filename = 'sample-1.mp3';
Expand All @@ -48,14 +53,14 @@ export function uploadWebApiTestCases({
const params: TranscriptionCreateParams = { file, model };

const result = await client.audio.transcriptions.create(params);
expectEqual(result.text, correctAnswer);
expectSimilar(result.text, correctAnswer, 12);
});

it('handles Response', async () => {
const file = await fetch(url);

const result = await client.audio.transcriptions.create({ file, model });
expectEqual(result.text, correctAnswer);
expectSimilar(result.text, correctAnswer, 12);
});

const fineTune = `{"prompt": "<prompt text>", "completion": "<ideal generated text>"}`;
Expand Down
19 changes: 19 additions & 0 deletions ecosystem-tests/cloudflare-worker/src/worker.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
import OpenAI from 'openai';
import { uploadWebApiTestCases } from './uploadWebApiTestCases.js';
import { distance } from 'fastest-levenshtein'

/**
* Welcome to Cloudflare Workers! This is your first worker.
*
Expand Down Expand Up @@ -44,10 +46,27 @@ export default {
throw new Error(`expected values to be equal: ${JSON.stringify({ a, b })}`);
}
}
function expectSimilar(received: string, expected: string, maxDistance: number) {
const receivedDistance = distance(received, expected);
if (receivedDistance < maxDistance) {
return;
}

const message = [
`Received: ${JSON.stringify(received)}`,
`Expected: ${JSON.stringify(expected)}`,
`Max distance: ${maxDistance}`,
`Received distance: ${receivedDistance}`,
].join('\n');

throw new Error(message);
}

uploadWebApiTestCases({
client: client as any,
it,
expectEqual,
expectSimilar,
});

let allPassed = true;
Expand Down
Loading

0 comments on commit 07b3504

Please sign in to comment.