From c584a7d601c2feaf93f9c5af490d94832d5b70e1 Mon Sep 17 00:00:00 2001 From: Kekma <136650032+KekmaTime@users.noreply.github.com> Date: Mon, 25 Mar 2024 14:41:56 +0530 Subject: [PATCH] =?UTF-8?q?=E2=9C=A8=20feat(config):=20config=20option=20t?= =?UTF-8?q?o=20keep=20`tcp`=20connection=20alive=20for=20a=20certain=20per?= =?UTF-8?q?iod=20for=20subsequent=20requests=20(#548)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Added new HTTP connection setting to the reqwest::ClientBuilder to timeout requests for fetching the search results from the upstream search engines. * Provided a config option under the server section of the config to allow users to keep tcp connections alive for each request for a certain period of time * Update src/results/aggregator.rs Co-authored-by: neon_arch * Update src/results/aggregator.rs Co-authored-by: neon_arch * Fixed import issue in `tcp_connection_keepalive` * updated size to u8 Co-authored-by: neon_arch * Fixed sizer eror in `parser.rs` --------- Co-authored-by: neon_arch Co-authored-by: alamin655 <129589283+alamin655@users.noreply.github.com> --- src/config/parser.rs | 3 +++ src/results/aggregator.rs | 1 + websurfx/config.lua | 1 + 3 files changed, 5 insertions(+) diff --git a/src/config/parser.rs b/src/config/parser.rs index fa61ce00..5d476299 100644 --- a/src/config/parser.rs +++ b/src/config/parser.rs @@ -42,6 +42,8 @@ pub struct Config { /// It stores the level of safe search to be used for restricting content in the /// search results. pub safe_search: u8, + /// It stores the TCP connection keepalive duration in seconds. + pub tcp_connection_keepalive: u8, } impl Config { @@ -131,6 +133,7 @@ impl Config { upstream_search_engines: globals .get::<_, HashMap>("upstream_search_engines")?, request_timeout: globals.get::<_, u8>("request_timeout")?, + tcp_connection_keepalive: globals.get::<_, u8>("tcp_connection_keepalive")?, threads, rate_limiter: RateLimiter { number_of_requests: rate_limiter["number_of_requests"], diff --git a/src/results/aggregator.rs b/src/results/aggregator.rs index 5244a769..d827b94a 100644 --- a/src/results/aggregator.rs +++ b/src/results/aggregator.rs @@ -77,6 +77,7 @@ pub async fn aggregate( let client = CLIENT.get_or_init(|| { ClientBuilder::new() .timeout(Duration::from_secs(config.request_timeout as u64)) // Add timeout to request to avoid DDOSing the server + .tcp_keepalive(Duration::from_secs(config.tcp_connection_keepalive as u64)) .connect_timeout(Duration::from_secs(config.request_timeout as u64)) // Add timeout to request to avoid DDOSing the server .https_only(true) .gzip(true) diff --git a/websurfx/config.lua b/websurfx/config.lua index 3b6c4ab9..548b4aaf 100644 --- a/websurfx/config.lua +++ b/websurfx/config.lua @@ -10,6 +10,7 @@ production_use = false -- whether to use production mode or not (in other words -- if production_use is set to true -- There will be a random delay before sending the request to the search engines, this is to prevent DDoSing the upstream search engines from a large number of simultaneous requests. request_timeout = 30 -- timeout for the search requests sent to the upstream search engines to be fetched (value in seconds). +tcp_connection_keepalive = 30 -- the amount of time the tcp connection should remain alive (or connected to the server). (value in seconds). rate_limiter = { number_of_requests = 20, -- The number of request that are allowed within a provided time limit. time_limit = 3, -- The time limit in which the quantity of requests that should be accepted.