Skip to content

Commit

Permalink
fix(lanchain): prepare request in chat model (#84)
Browse files Browse the repository at this point in the history
Signed-off-by: Tomas Pilar <tomas.pilar@ibm.com>
  • Loading branch information
pilartomas authored Mar 8, 2024
1 parent 238fef9 commit c9d968d
Showing 1 changed file with 28 additions and 26 deletions.
54 changes: 28 additions & 26 deletions src/langchain/llm-chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -69,13 +69,7 @@ export class GenAIChatModel extends BaseChatModel<GenAIChatModelOptions> {
_runManager?: CallbackManagerForLLMRun,
): Promise<ChatResult> {
const output = await this.client.text.chat.create(
{
...(this.conversationId
? { conversation_id: this.conversationId }
: { model_id: this.modelId, prompt_id: this.promptId }),
messages: this._convertMessages(messages),
parameters: merge(this.parameters, options.parameters),
},
this._prepareRequest(messages, options),
{ signal: options.signal },
);
if (output.results.length !== 1) throw new InternalError('Invalid result');
Expand Down Expand Up @@ -114,22 +108,7 @@ export class GenAIChatModel extends BaseChatModel<GenAIChatModelOptions> {
_runManager?: CallbackManagerForLLMRun,
): AsyncGenerator<ChatGenerationChunk> {
const outputStream = await this.client.text.chat.create_stream(
GenAIChatModel._prepareRequest(
merge(
{
conversation_id: this.conversationId,
model_id: this.modelId,
prompt_id: this.promptId,
messages: this._convertMessages(messages),
moderations: this.moderations,
parameters: this.parameters,
use_conversation_parameters: this.useConversationParameters,
parent_id: this.parentId,
trim_method: this.trimMethod,
},
options,
),
),
this._prepareRequest(messages, options),
{ signal: options.signal },
);
for await (const output of outputStream) {
Expand Down Expand Up @@ -168,8 +147,9 @@ export class GenAIChatModel extends BaseChatModel<GenAIChatModelOptions> {
}
}

private static _prepareRequest(
request: TextChatCreateInput & TextChatCreateStreamInput,
private _prepareRequest(
messages: BaseMessage[],
options: this['ParsedCallOptions'],
) {
const {
conversation_id,
Expand All @@ -178,7 +158,29 @@ export class GenAIChatModel extends BaseChatModel<GenAIChatModelOptions> {
use_conversation_parameters,
parameters,
...rest
} = request;
} = merge(
{
conversation_id: this.conversationId,
model_id: this.modelId,
prompt_id: this.promptId,
moderations: this.moderations,
parameters: this.parameters,
use_conversation_parameters: this.useConversationParameters,
parent_id: this.parentId,
trim_method: this.trimMethod,
},
{
conversation_id: options.conversation_id,
model_id: options.model_id,
prompt_id: options.prompt_id,
moderations: options.moderations,
parameters: options.parameters,
use_conversation_parameters: options.use_conversation_parameters,
parent_id: options.parent_id,
trim_method: options.trim_method,
},
{ messages: this._convertMessages(messages) },
);
return {
...(conversation_id
? { conversation_id }
Expand Down

0 comments on commit c9d968d

Please sign in to comment.