From f5050ab3b2a6af371250531fa5fb3b1e7db91261 Mon Sep 17 00:00:00 2001 From: Joel Mut Date: Wed, 4 Oct 2023 20:43:01 +0200 Subject: [PATCH] Add Http 202 (Accepted) for DLASE --- .../Session/StreamingSession.cs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/libraries/Microsoft.Bot.Connector.Streaming/Session/StreamingSession.cs b/libraries/Microsoft.Bot.Connector.Streaming/Session/StreamingSession.cs index 6d0027e414..15e78201d6 100644 --- a/libraries/Microsoft.Bot.Connector.Streaming/Session/StreamingSession.cs +++ b/libraries/Microsoft.Bot.Connector.Streaming/Session/StreamingSession.cs @@ -7,6 +7,7 @@ using System.Collections.Generic; using System.IO; using System.Linq; +using System.Net; using System.Runtime.InteropServices; using System.Text.Json; using System.Threading; @@ -91,7 +92,7 @@ public async Task SendRequestAsync(StreamingRequest request, Ca } } - return await responseCompletionSource.Task.DefaultTimeOutAsync().ConfigureAwait(false); + return await responseCompletionSource.Task.DefaultTimeOutAsync().ConfigureAwait(false); } public async Task SendResponseAsync(Header header, StreamingResponse response, CancellationToken cancellationToken) @@ -358,6 +359,8 @@ private void ProcessRequest(Guid id, ReceiveRequest request) { _ = Task.Run(async () => { + // Send an HTTP 202 (Accepted) response right away, otherwise, while under high streaming load, the conversation times out due to not having a response in the request/response time frame. + await SendResponseAsync(new Header { Id = id, Type = PayloadTypes.Response }, new StreamingResponse { StatusCode = (int)HttpStatusCode.Accepted }, _connectionCancellationToken).ConfigureAwait(false); var streamingResponse = await _receiver.ProcessRequestAsync(request, null).ConfigureAwait(false); await SendResponseAsync(new Header() { Id = id, Type = PayloadTypes.Response }, streamingResponse, _connectionCancellationToken).ConfigureAwait(false);