Skip to content

Commit

Permalink
feat(langchain): add promptId parameter (#27)
Browse files Browse the repository at this point in the history
Signed-off-by: Tomáš Dvořák <toomas2d@gmail.com>
  • Loading branch information
Tomas2D authored Jun 28, 2023
1 parent 5bef63d commit 53b9853
Showing 1 changed file with 17 additions and 5 deletions.
22 changes: 17 additions & 5 deletions src/langchain/llm.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,24 +6,29 @@ import type { LLMResult, Generation } from 'langchain/schema';
import type { GenerateOutput } from '../client-types.js';
import { GenerateInput } from '../client-types.js';

export interface GenAIModelOptions {
modelId?: string;
interface BaseGenAIModelOptions {
stream?: boolean;
parameters?: Record<string, any>;
timeout?: number;
configuration?: Configuration;
}

export type GenAIModelOptions =
| (BaseGenAIModelOptions & { modelId?: string; promptId?: never })
| (BaseGenAIModelOptions & { modelId?: never; promptId: string });

export class GenAIModel extends BaseLLM {
#client: Client;

protected modelId?: string;
protected promptId?: string;
protected stream: boolean;
protected timeout: number | undefined;
protected parameters: Record<string, any>;

constructor({
modelId,
promptId,
stream = false,
parameters,
timeout,
Expand All @@ -33,6 +38,7 @@ export class GenAIModel extends BaseLLM {
super(baseParams ?? {});

this.modelId = modelId;
this.promptId = promptId;
this.timeout = timeout;
this.parameters = parameters || {};
this.stream = Boolean(stream);
Expand All @@ -46,9 +52,15 @@ export class GenAIModel extends BaseLLM {
const stopSequences = concatUnique(this.parameters.stop, options.stop);

return prompts.map((input) => ({
...(!isNullish(this.modelId) && {
model_id: this.modelId,
}),
...(!isNullish(this.promptId)
? {
prompt_id: this.promptId,
}
: !isNullish(this.modelId)
? {
model_id: this.modelId,
}
: {}),
input,
parameters: {
...this.parameters,
Expand Down

0 comments on commit 53b9853

Please sign in to comment.