diff --git a/packages/core/src/constants.ts b/packages/core/src/constants.ts index 098334ca39..3c02fd3b01 100644 --- a/packages/core/src/constants.ts +++ b/packages/core/src/constants.ts @@ -247,6 +247,7 @@ export const MODEL_PROVIDERS: readonly { url: string seed?: boolean logit_bias?: boolean + top_p?: boolean }[] = Object.freeze([ { id: MODEL_PROVIDER_OPENAI, diff --git a/packages/core/src/openai.ts b/packages/core/src/openai.ts index 03d18017c8..c8d94e2af0 100644 --- a/packages/core/src/openai.ts +++ b/packages/core/src/openai.ts @@ -150,6 +150,11 @@ export const OpenAIChatCompletion: ChatCompletionHandler = async ( trace.itemValue(`logit_bias`, `disabled`) delete postReq.logit_bias // some providers do not support logit_bias } + if (!isNaN(postReq.top_p) && features?.top_p === false) { + logVerbose(`top_p: disabled, not supported by ${provider}`) + trace.itemValue(`top_p`, `disabled`) + delete postReq.top_p + } // stream_options fails in some cases if (model === "gpt-4-turbo-v" || /mistral/i.test(model)) {