diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 66443f777..5f8b241b2 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "4.40.2" + ".": "4.41.0" } diff --git a/CHANGELOG.md b/CHANGELOG.md index bb1e22da7..6671f0bf4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,13 @@ # Changelog +## 4.41.0 (2024-05-05) + +Full Changelog: [v4.40.2...v4.41.0](https://github.com/openai/openai-node/compare/v4.40.2...v4.41.0) + +### Features + +* **client:** add Azure client ([#822](https://github.com/openai/openai-node/issues/822)) ([92f9049](https://github.com/openai/openai-node/commit/92f90499f0bbee79ba9c8342c8d58dbcaf88bdd1)) + ## 4.40.2 (2024-05-03) Full Changelog: [v4.40.1...v4.40.2](https://github.com/openai/openai-node/compare/v4.40.1...v4.40.2) diff --git a/README.md b/README.md index 657089070..d8e0fb0a5 100644 --- a/README.md +++ b/README.md @@ -19,7 +19,7 @@ You can import in Deno via: ```ts -import OpenAI from 'https://deno.land/x/openai@v4.40.2/mod.ts'; +import OpenAI from 'https://deno.land/x/openai@v4.41.0/mod.ts'; ``` @@ -361,14 +361,25 @@ Error codes are as followed: | >=500 | `InternalServerError` | | N/A | `APIConnectionError` | -### Azure OpenAI +## Microsoft Azure OpenAI -An example of using this library with Azure OpenAI can be found [here](https://github.com/openai/openai-node/blob/master/examples/azure.ts). +To use this library with [Azure OpenAI](https://learn.microsoft.com/en-us/azure/ai-services/openai/overview), use the `AzureOpenAI` +class instead of the `OpenAI` class. -Please note there are subtle differences in API shape & behavior between the Azure OpenAI API and the OpenAI API, -so using this library with Azure OpenAI may result in incorrect types, which can lead to bugs. +> [!IMPORTANT] +> The Azure API shape differs from the core API shape which means that the static types for responses / params +> won't always be correct. + +```ts +const openai = new AzureOpenAI(); -See [`@azure/openai`](https://www.npmjs.com/package/@azure/openai) for an Azure-specific SDK provided by Microsoft. +const result = await openai.chat.completions.create({ + model: 'gpt-4-1106-preview', + messages: [{ role: 'user', content: 'Say hello!' }], +}); + +console.log(result.choices[0]!.message?.content); +``` ### Retries diff --git a/examples/azure.ts b/examples/azure.ts index a903cfd6e..7f57e45c3 100755 --- a/examples/azure.ts +++ b/examples/azure.ts @@ -1,35 +1,19 @@ #!/usr/bin/env -S npm run tsn -T -import OpenAI from 'openai'; +import { AzureOpenAI } from 'openai'; -// The name of your Azure OpenAI Resource. -// https://learn.microsoft.com/en-us/azure/cognitive-services/openai/how-to/create-resource?pivots=web-portal#create-a-resource -const resource = ''; - -// Corresponds to your Model deployment within your OpenAI resource, e.g. my-gpt35-16k-deployment +// Corresponds to your Model deployment within your OpenAI resource, e.g. gpt-4-1106-preview // Navigate to the Azure OpenAI Studio to deploy a model. -const model = ''; - -// https://learn.microsoft.com/en-us/azure/ai-services/openai/reference#rest-api-versioning -const apiVersion = '2023-06-01-preview'; +const deployment = 'gpt-4-1106-preview'; -const apiKey = process.env['AZURE_OPENAI_API_KEY']; -if (!apiKey) { - throw new Error('The AZURE_OPENAI_API_KEY environment variable is missing or empty.'); -} - -// Azure OpenAI requires a custom baseURL, api-version query param, and api-key header. -const openai = new OpenAI({ - apiKey, - baseURL: `https://${resource}.openai.azure.com/openai/deployments/${model}`, - defaultQuery: { 'api-version': apiVersion }, - defaultHeaders: { 'api-key': apiKey }, -}); +// Make sure to set both AZURE_OPENAI_ENDPOINT with the endpoint of your Azure resource and AZURE_OPENAI_API_KEY with the API key. +// You can find both information in the Azure Portal. +const openai = new AzureOpenAI(); async function main() { console.log('Non-streaming:'); const result = await openai.chat.completions.create({ - model, + model: deployment, messages: [{ role: 'user', content: 'Say hello!' }], }); console.log(result.choices[0]!.message?.content); @@ -37,7 +21,7 @@ async function main() { console.log(); console.log('Streaming:'); const stream = await openai.chat.completions.create({ - model, + model: deployment, messages: [{ role: 'user', content: 'Say hello!' }], stream: true, }); diff --git a/package.json b/package.json index 82962a2f0..b698340af 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "openai", - "version": "4.40.2", + "version": "4.41.0", "description": "The official TypeScript library for the OpenAI API", "author": "OpenAI ", "types": "dist/index.d.ts", diff --git a/scripts/build-deno b/scripts/build-deno index fe1712b97..8df5b0651 100755 --- a/scripts/build-deno +++ b/scripts/build-deno @@ -16,7 +16,7 @@ This is a build produced from https://github.com/openai/openai-node – please g Usage: \`\`\`ts -import OpenAI from "https://deno.land/x/openai@v4.40.2/mod.ts"; +import OpenAI from "https://deno.land/x/openai@v4.41.0/mod.ts"; const client = new OpenAI(); \`\`\` diff --git a/src/index.ts b/src/index.ts index 1741a4816..dbade2f86 100644 --- a/src/index.ts +++ b/src/index.ts @@ -2,7 +2,7 @@ import * as Core from './core'; import * as Errors from './error'; -import { type Agent } from './_shims/index'; +import { type Agent, type RequestInit } from './_shims/index'; import * as Uploads from './uploads'; import * as Pagination from 'openai/pagination'; import * as API from 'openai/resources/index'; @@ -310,4 +310,187 @@ export namespace OpenAI { export import FunctionParameters = API.FunctionParameters; } +// ---------------------- Azure ---------------------- + +/** API Client for interfacing with the Azure OpenAI API. */ +export interface AzureClientOptions extends ClientOptions { + /** + * Defaults to process.env['OPENAI_API_VERSION']. + */ + apiVersion?: string | undefined; + + /** + * Your Azure endpoint, including the resource, e.g. `https://example-resource.azure.openai.com/` + */ + endpoint?: string | undefined; + + /** + * A model deployment, if given, sets the base client URL to include `/deployments/{deployment}`. + * Note: this means you won't be able to use non-deployment endpoints. Not supported with Assistants APIs. + */ + deployment?: string | undefined; + + /** + * Defaults to process.env['AZURE_OPENAI_API_KEY']. + */ + apiKey?: string | undefined; + + /** + * A function that returns an access token for Microsoft Entra (formerly known as Azure Active Directory), + * which will be invoked on every request. + */ + azureADTokenProvider?: (() => string) | undefined; +} + +/** API Client for interfacing with the Azure OpenAI API. */ +export class AzureOpenAI extends OpenAI { + private _azureADTokenProvider: (() => string) | undefined; + apiVersion: string = ''; + /** + * API Client for interfacing with the Azure OpenAI API. + * + * @param {string | undefined} [opts.apiVersion=process.env['OPENAI_API_VERSION'] ?? undefined] + * @param {string | undefined} [opts.endpoint=process.env['AZURE_OPENAI_ENDPOINT'] ?? undefined] - Your Azure endpoint, including the resource, e.g. `https://example-resource.azure.openai.com/` + * @param {string | undefined} [opts.apiKey=process.env['AZURE_OPENAI_API_KEY'] ?? undefined] + * @param {string | undefined} opts.deployment - A model deployment, if given, sets the base client URL to include `/deployments/{deployment}`. + * @param {string | null | undefined} [opts.organization=process.env['OPENAI_ORG_ID'] ?? null] + * @param {string} [opts.baseURL=process.env['OPENAI_BASE_URL']] - Sets the base URL for the API. + * @param {number} [opts.timeout=10 minutes] - The maximum amount of time (in milliseconds) the client will wait for a response before timing out. + * @param {number} [opts.httpAgent] - An HTTP agent used to manage HTTP(s) connections. + * @param {Core.Fetch} [opts.fetch] - Specify a custom `fetch` function implementation. + * @param {number} [opts.maxRetries=2] - The maximum number of times the client will retry a request. + * @param {Core.Headers} opts.defaultHeaders - Default headers to include with every request to the API. + * @param {Core.DefaultQuery} opts.defaultQuery - Default query parameters to include with every request to the API. + * @param {boolean} [opts.dangerouslyAllowBrowser=false] - By default, client-side use of this library is not allowed, as it risks exposing your secret API credentials to attackers. + */ + constructor({ + baseURL = Core.readEnv('OPENAI_BASE_URL'), + apiKey = Core.readEnv('AZURE_OPENAI_API_KEY'), + apiVersion = Core.readEnv('OPENAI_API_VERSION'), + endpoint, + deployment, + azureADTokenProvider, + dangerouslyAllowBrowser, + ...opts + }: AzureClientOptions = {}) { + if (!apiVersion) { + throw new Errors.OpenAIError( + "The OPENAI_API_VERSION environment variable is missing or empty; either provide it, or instantiate the AzureOpenAI client with an apiVersion option, like new AzureOpenAI({ apiVersion: 'My API Version' }).", + ); + } + + if (typeof azureADTokenProvider === 'function') { + dangerouslyAllowBrowser = true; + } + + if (!azureADTokenProvider && !apiKey) { + throw new Errors.OpenAIError( + 'Missing credentials. Please pass one of `apiKey` and `azureADTokenProvider`, or set the `AZURE_OPENAI_API_KEY` environment variable.', + ); + } + + if (azureADTokenProvider && apiKey) { + throw new Errors.OpenAIError( + 'The `apiKey` and `azureADTokenProvider` arguments are mutually exclusive; only one can be passed at a time.', + ); + } + + // define a sentinel value to avoid any typing issues + apiKey ??= API_KEY_SENTINEL; + + opts.defaultQuery = { ...opts.defaultQuery, 'api-version': apiVersion }; + + if (!baseURL) { + if (!endpoint) { + endpoint = process.env['AZURE_OPENAI_ENDPOINT']; + } + + if (!endpoint) { + throw new Errors.OpenAIError( + 'Must provide one of the `baseURL` or `endpoint` arguments, or the `AZURE_OPENAI_ENDPOINT` environment variable', + ); + } + + if (deployment) { + baseURL = `${endpoint}/openai/deployments/${deployment}`; + } else { + baseURL = `${endpoint}/openai`; + } + } else { + if (endpoint) { + throw new Errors.OpenAIError('baseURL and endpoint are mutually exclusive'); + } + } + + super({ + apiKey, + baseURL, + ...opts, + ...(dangerouslyAllowBrowser !== undefined ? { dangerouslyAllowBrowser } : {}), + }); + + this._azureADTokenProvider = azureADTokenProvider; + this.apiVersion = apiVersion; + } + + override buildRequest(options: Core.FinalRequestOptions): { + req: RequestInit; + url: string; + timeout: number; + } { + if (_deployments_endpoints.has(options.path) && options.method === 'post' && options.body !== undefined) { + if (!Core.isObj(options.body)) { + throw new Error('Expected request body to be an object'); + } + const model = options.body['model']; + delete options.body['model']; + if (model !== undefined && !this.baseURL.includes('/deployments')) { + options.path = `/deployments/${model}${options.path}`; + } + } + return super.buildRequest(options); + } + + private _getAzureADToken(): string | undefined { + if (typeof this._azureADTokenProvider === 'function') { + const token = this._azureADTokenProvider(); + if (!token || typeof token !== 'string') { + throw new Errors.OpenAIError( + `Expected 'azureADTokenProvider' argument to return a string but it returned ${token}`, + ); + } + return token; + } + return undefined; + } + + protected override authHeaders(opts: Core.FinalRequestOptions): Core.Headers { + if (opts.headers?.['Authorization'] || opts.headers?.['api-key']) { + return {}; + } + const token = this._getAzureADToken(); + if (token) { + return { Authorization: `Bearer ${token}` }; + } + if (this.apiKey !== API_KEY_SENTINEL) { + return { 'api-key': this.apiKey }; + } + throw new Errors.OpenAIError('Unable to handle auth'); + } +} + +const _deployments_endpoints = new Set([ + '/completions', + '/chat/completions', + '/embeddings', + '/audio/transcriptions', + '/audio/translations', + '/audio/speech', + '/images/generations', +]); + +const API_KEY_SENTINEL = ''; + +// ---------------------- End Azure ---------------------- + export default OpenAI; diff --git a/src/resources/beta/vector-stores/files.ts b/src/resources/beta/vector-stores/files.ts index f8f8cddc5..ff5094065 100644 --- a/src/resources/beta/vector-stores/files.ts +++ b/src/resources/beta/vector-stores/files.ts @@ -144,6 +144,7 @@ export class Files extends APIResource { /** * Upload a file to the `files` API and then attach it to the given vector store. + * * Note the file will be asynchronously processed (you can use the alternative * polling helper method to wait for processing to complete). */ diff --git a/src/version.ts b/src/version.ts index 14b8c36f9..1ab180911 100644 --- a/src/version.ts +++ b/src/version.ts @@ -1 +1 @@ -export const VERSION = '4.40.2'; // x-release-please-version +export const VERSION = '4.41.0'; // x-release-please-version diff --git a/tests/lib/azure.test.ts b/tests/lib/azure.test.ts new file mode 100644 index 000000000..e2b967903 --- /dev/null +++ b/tests/lib/azure.test.ts @@ -0,0 +1,395 @@ +import { AzureOpenAI } from 'openai'; +import { APIUserAbortError } from 'openai'; +import { Headers } from 'openai/core'; +import defaultFetch, { Response, type RequestInit, type RequestInfo } from 'node-fetch'; + +const apiVersion = '2024-02-15-preview'; + +describe('instantiate azure client', () => { + const env = process.env; + + beforeEach(() => { + jest.resetModules(); + process.env = { ...env }; + + console.warn = jest.fn(); + }); + + afterEach(() => { + process.env = env; + }); + + describe('defaultHeaders', () => { + const client = new AzureOpenAI({ + baseURL: 'http://localhost:5000/', + defaultHeaders: { 'X-My-Default-Header': '2' }, + apiKey: 'My API Key', + apiVersion, + }); + + test('they are used in the request', () => { + const { req } = client.buildRequest({ path: '/foo', method: 'post' }); + expect((req.headers as Headers)['x-my-default-header']).toEqual('2'); + }); + + test('can ignore `undefined` and leave the default', () => { + const { req } = client.buildRequest({ + path: '/foo', + method: 'post', + headers: { 'X-My-Default-Header': undefined }, + }); + expect((req.headers as Headers)['x-my-default-header']).toEqual('2'); + }); + + test('can be removed with `null`', () => { + const { req } = client.buildRequest({ + path: '/foo', + method: 'post', + headers: { 'X-My-Default-Header': null }, + }); + expect(req.headers as Headers).not.toHaveProperty('x-my-default-header'); + }); + }); + + describe('defaultQuery', () => { + test('with null query params given', () => { + const client = new AzureOpenAI({ + baseURL: 'http://localhost:5000/', + defaultQuery: { apiVersion: 'foo' }, + apiKey: 'My API Key', + apiVersion, + }); + expect(client.buildURL('/foo', null)).toEqual( + `http://localhost:5000/foo?apiVersion=foo&api-version=${apiVersion}`, + ); + }); + + test('multiple default query params', () => { + const client = new AzureOpenAI({ + baseURL: 'http://localhost:5000/', + defaultQuery: { apiVersion: 'foo', hello: 'world' }, + apiKey: 'My API Key', + apiVersion, + }); + expect(client.buildURL('/foo', null)).toEqual( + `http://localhost:5000/foo?apiVersion=foo&hello=world&api-version=${apiVersion}`, + ); + }); + + test('overriding with `undefined`', () => { + const client = new AzureOpenAI({ + baseURL: 'http://localhost:5000/', + defaultQuery: { hello: 'world' }, + apiKey: 'My API Key', + apiVersion, + }); + expect(client.buildURL('/foo', { hello: undefined })).toEqual( + `http://localhost:5000/foo?api-version=${apiVersion}`, + ); + }); + }); + + test('custom fetch', async () => { + const client = new AzureOpenAI({ + baseURL: 'http://localhost:5000/', + apiKey: 'My API Key', + apiVersion, + fetch: (url) => { + return Promise.resolve( + new Response(JSON.stringify({ url, custom: true }), { + headers: { 'Content-Type': 'application/json' }, + }), + ); + }, + }); + + const response = await client.get('/foo'); + expect(response).toEqual({ url: `http://localhost:5000/foo?api-version=${apiVersion}`, custom: true }); + }); + + test('custom signal', async () => { + const client = new AzureOpenAI({ + baseURL: process.env['TEST_API_BASE_URL'] ?? 'http://127.0.0.1:4010', + apiKey: 'My API Key', + apiVersion, + fetch: (...args) => { + return new Promise((resolve, reject) => + setTimeout( + () => + defaultFetch(...args) + .then(resolve) + .catch(reject), + 300, + ), + ); + }, + }); + + const controller = new AbortController(); + setTimeout(() => controller.abort(), 200); + + const spy = jest.spyOn(client, 'request'); + + await expect(client.get('/foo', { signal: controller.signal })).rejects.toThrowError(APIUserAbortError); + expect(spy).toHaveBeenCalledTimes(1); + }); + + describe('baseUrl', () => { + test('trailing slash', () => { + const client = new AzureOpenAI({ + baseURL: 'http://localhost:5000/custom/path/', + apiKey: 'My API Key', + apiVersion, + }); + expect(client.buildURL('/foo', null)).toEqual( + `http://localhost:5000/custom/path/foo?api-version=${apiVersion}`, + ); + }); + + test('no trailing slash', () => { + const client = new AzureOpenAI({ + baseURL: 'http://localhost:5000/custom/path', + apiKey: 'My API Key', + apiVersion, + }); + expect(client.buildURL('/foo', null)).toEqual( + `http://localhost:5000/custom/path/foo?api-version=${apiVersion}`, + ); + }); + + afterEach(() => { + process.env['OPENAI_BASE_URL'] = undefined; + }); + + test('explicit option', () => { + const client = new AzureOpenAI({ baseURL: 'https://example.com', apiKey: 'My API Key', apiVersion }); + expect(client.baseURL).toEqual('https://example.com'); + }); + + test('env variable', () => { + process.env['OPENAI_BASE_URL'] = 'https://example.com/from_env'; + const client = new AzureOpenAI({ apiKey: 'My API Key', apiVersion }); + expect(client.baseURL).toEqual('https://example.com/from_env'); + }); + + test('empty baseUrl/endpoint env variable', () => { + process.env['OPENAI_BASE_URL'] = ''; // empty + expect(() => new AzureOpenAI({ apiKey: 'My API Key', apiVersion })).toThrow( + /Must provide one of the `baseURL` or `endpoint` arguments, or the `AZURE_OPENAI_ENDPOINT` environment variable/, + ); + }); + + test('blank baseUrl/endpoint env variable', () => { + process.env['OPENAI_BASE_URL'] = ' '; // blank + expect(() => new AzureOpenAI({ apiKey: 'My API Key', apiVersion })).toThrow( + /Must provide one of the `baseURL` or `endpoint` arguments, or the `AZURE_OPENAI_ENDPOINT` environment variable/, + ); + }); + }); + + test('maxRetries option is correctly set', () => { + const client = new AzureOpenAI({ + baseURL: 'https://example.com', + maxRetries: 4, + apiKey: 'My API Key', + apiVersion, + }); + expect(client.maxRetries).toEqual(4); + + // default + const client2 = new AzureOpenAI({ baseURL: 'https://example.com', apiKey: 'My API Key', apiVersion }); + expect(client2.maxRetries).toEqual(2); + }); + + test('with environment variable arguments', () => { + // set options via env var + process.env['OPENAI_BASE_URL'] = 'https://example.com'; + process.env['AZURE_OPENAI_API_KEY'] = 'My API Key'; + process.env['OPENAI_API_VERSION'] = 'My API Version'; + const client = new AzureOpenAI(); + expect(client.baseURL).toBe('https://example.com'); + expect(client.apiKey).toBe('My API Key'); + expect(client.apiVersion).toBe('My API Version'); + }); + + test('with overriden environment variable arguments', () => { + // set options via env var + process.env['AZURE_OPENAI_API_KEY'] = 'another My API Key'; + process.env['OPENAI_API_VERSION'] = 'another My API Version'; + const client = new AzureOpenAI({ baseURL: 'https://example.com', apiKey: 'My API Key', apiVersion }); + expect(client.apiKey).toBe('My API Key'); + expect(client.apiVersion).toBe(apiVersion); + }); + + describe('Azure Active Directory (AD)', () => { + test('with azureADTokenProvider', () => { + const client = new AzureOpenAI({ + baseURL: 'http://localhost:5000/', + azureADTokenProvider: () => 'my token', + apiVersion, + }); + expect(client.buildRequest({ method: 'post', path: 'https://example.com' }).req.headers).toHaveProperty( + 'authorization', + 'Bearer my token', + ); + }); + + test('apiKey and azureADTokenProvider cant be combined', () => { + expect( + () => + new AzureOpenAI({ + baseURL: 'http://localhost:5000/', + azureADTokenProvider: () => 'my token', + apiKey: 'My API Key', + apiVersion, + }), + ).toThrow( + /The `apiKey` and `azureADTokenProvider` arguments are mutually exclusive; only one can be passed at a time./, + ); + }); + }); + + test('with endpoint', () => { + const client = new AzureOpenAI({ endpoint: 'https://example.com', apiKey: 'My API Key', apiVersion }); + expect(client.baseURL).toEqual('https://example.com/openai'); + }); + + test('baseURL and endpoint are mutually exclusive', () => { + expect( + () => + new AzureOpenAI({ + endpoint: 'https://example.com', + baseURL: 'https://anotherexample.com', + apiKey: 'My API Key', + apiVersion, + }), + ).toThrow(/baseURL and endpoint are mutually exclusive/); + }); +}); + +describe('azure request building', () => { + const client = new AzureOpenAI({ baseURL: 'https://example.com', apiKey: 'My API Key', apiVersion }); + + describe('Content-Length', () => { + test('handles multi-byte characters', () => { + const { req } = client.buildRequest({ path: '/foo', method: 'post', body: { value: '—' } }); + expect((req.headers as Record)['content-length']).toEqual('20'); + }); + + test('handles standard characters', () => { + const { req } = client.buildRequest({ path: '/foo', method: 'post', body: { value: 'hello' } }); + expect((req.headers as Record)['content-length']).toEqual('22'); + }); + }); + + describe('custom headers', () => { + test('handles undefined', () => { + const { req } = client.buildRequest({ + path: '/foo', + method: 'post', + body: { value: 'hello' }, + headers: { 'X-Foo': 'baz', 'x-foo': 'bar', 'x-Foo': undefined, 'x-baz': 'bam', 'X-Baz': null }, + }); + expect((req.headers as Record)['x-foo']).toEqual('bar'); + expect((req.headers as Record)['x-Foo']).toEqual(undefined); + expect((req.headers as Record)['X-Foo']).toEqual(undefined); + expect((req.headers as Record)['x-baz']).toEqual(undefined); + }); + }); +}); + +describe('retries', () => { + test('retry on timeout', async () => { + let count = 0; + const testFetch = async (url: RequestInfo, { signal }: RequestInit = {}): Promise => { + if (count++ === 0) { + return new Promise( + (resolve, reject) => signal?.addEventListener('abort', () => reject(new Error('timed out'))), + ); + } + return new Response(JSON.stringify({ a: 1 }), { headers: { 'Content-Type': 'application/json' } }); + }; + + const client = new AzureOpenAI({ + baseURL: 'https://example.com', + apiKey: 'My API Key', + apiVersion, + timeout: 10, + fetch: testFetch, + }); + + expect(await client.request({ path: '/foo', method: 'get' })).toEqual({ a: 1 }); + expect(count).toEqual(2); + expect( + await client + .request({ path: '/foo', method: 'get' }) + .asResponse() + .then((r) => r.text()), + ).toEqual(JSON.stringify({ a: 1 })); + expect(count).toEqual(3); + }); + + test('retry on 429 with retry-after', async () => { + let count = 0; + const testFetch = async (url: RequestInfo, { signal }: RequestInit = {}): Promise => { + if (count++ === 0) { + return new Response(undefined, { + status: 429, + headers: { + 'Retry-After': '0.1', + }, + }); + } + return new Response(JSON.stringify({ a: 1 }), { headers: { 'Content-Type': 'application/json' } }); + }; + + const client = new AzureOpenAI({ + baseURL: 'https://example.com', + apiKey: 'My API Key', + apiVersion, + fetch: testFetch, + }); + + expect(await client.request({ path: '/foo', method: 'get' })).toEqual({ a: 1 }); + expect(count).toEqual(2); + expect( + await client + .request({ path: '/foo', method: 'get' }) + .asResponse() + .then((r) => r.text()), + ).toEqual(JSON.stringify({ a: 1 })); + expect(count).toEqual(3); + }); + + test('retry on 429 with retry-after-ms', async () => { + let count = 0; + const testFetch = async (url: RequestInfo, { signal }: RequestInit = {}): Promise => { + if (count++ === 0) { + return new Response(undefined, { + status: 429, + headers: { + 'Retry-After-Ms': '10', + }, + }); + } + return new Response(JSON.stringify({ a: 1 }), { headers: { 'Content-Type': 'application/json' } }); + }; + + const client = new AzureOpenAI({ + baseURL: 'https://example.com', + apiKey: 'My API Key', + apiVersion, + fetch: testFetch, + }); + + expect(await client.request({ path: '/foo', method: 'get' })).toEqual({ a: 1 }); + expect(count).toEqual(2); + expect( + await client + .request({ path: '/foo', method: 'get' }) + .asResponse() + .then((r) => r.text()), + ).toEqual(JSON.stringify({ a: 1 })); + expect(count).toEqual(3); + }); +});