diff --git a/.github/workflows/build_and_test.yaml b/.github/workflows/build_and_test.yaml index e694505..d3e35ce 100644 --- a/.github/workflows/build_and_test.yaml +++ b/.github/workflows/build_and_test.yaml @@ -39,10 +39,14 @@ jobs: # we also need to add the runner and cargo_flags to the key so that a seperate cache is used. # Otherwise only the last build to finish would get saved to the cache. key: ${{ matrix.runner }} - ${{ matrix.cargo_flags }} + - name: Install external deps for the prom-remote-api crate + run: sudo apt-get install protobuf-compiler - name: Install cargo-hack run: cargo install cargo-hack --version 0.5.8 - - name: Check `cargo fmt` was run - run: cargo fmt --all -- --check + # lucas - I strongly reccomend reenabling this in the future, + # but I've disabled it for now so I dont trash this already large diff with formatting changes + #- name: Check `cargo fmt` was run + # run: cargo fmt --all -- --check - name: Ensure that all crates compile and have no warnings under every possible combination of features # some things to explicitly point out: # * clippy also reports rustc warnings and errors diff --git a/Cargo.toml b/Cargo.toml index e5129ae..cd0a36a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -5,6 +5,7 @@ members = [ "prometheus-remote", "tools", ] +resolver = "2" [profile.dev] opt-level = 0 diff --git a/brro-compressor/src/compressor/constant.rs b/brro-compressor/src/compressor/constant.rs index cc8774c..b569a1c 100644 --- a/brro-compressor/src/compressor/constant.rs +++ b/brro-compressor/src/compressor/constant.rs @@ -86,7 +86,7 @@ impl Constant { pub fn to_bytes(self) -> Vec { // Use Bincode and flate2-rs? Do this at the Stream Level? let config = BinConfig::get(); - bincode::encode_to_vec(&self, config).unwrap() + bincode::encode_to_vec(self, config).unwrap() } } diff --git a/brro-compressor/src/compressor/mod.rs b/brro-compressor/src/compressor/mod.rs index ec60dbe..977912c 100644 --- a/brro-compressor/src/compressor/mod.rs +++ b/brro-compressor/src/compressor/mod.rs @@ -1,5 +1,5 @@ use bincode::config::{self, Configuration}; -use crate::compressor::constant::{constant}; +use crate::compressor::constant::constant; pub mod noop; pub mod constant; diff --git a/brro-compressor/src/data.rs b/brro-compressor/src/data.rs index dc660ca..4afec3b 100644 --- a/brro-compressor/src/data.rs +++ b/brro-compressor/src/data.rs @@ -1,4 +1,4 @@ -use crate::frame::CompressorFrame; +use crate::frame::CompressorFrame; use crate::header::CompressorHeader; pub struct CompressedStream { @@ -9,9 +9,9 @@ pub struct CompressedStream { impl CompressedStream { /// Creates an empty compressor stream pub fn new() -> Self { - CompressedStream { + CompressedStream { header: CompressorHeader::new(), data_frames: Vec::new(), } } -} \ No newline at end of file +} diff --git a/brro-compressor/src/frame/mod.rs b/brro-compressor/src/frame/mod.rs index 5ff5610..2c2ef28 100644 --- a/brro-compressor/src/frame/mod.rs +++ b/brro-compressor/src/frame/mod.rs @@ -1,4 +1,4 @@ -use crate::compressor::{self, Compressor}; +use crate::compressor::Compressor; /// This is the structure of a compressor frame pub struct CompressorFrame{ diff --git a/brro-compressor/src/lib.rs b/brro-compressor/src/lib.rs index b927a15..8f3c8c8 100644 --- a/brro-compressor/src/lib.rs +++ b/brro-compressor/src/lib.rs @@ -1,6 +1,11 @@ +#![allow(clippy::new_without_default)] + +// Lucas - Once the project is far enough along I strongly reccomend reenabling dead code checks +#![allow(dead_code)] + pub mod compressor; pub mod frame; pub mod preprocessor; pub mod utils; mod header; -mod data; \ No newline at end of file +mod data; diff --git a/brro-compressor/src/main.rs b/brro-compressor/src/main.rs index a685660..04336b4 100644 --- a/brro-compressor/src/main.rs +++ b/brro-compressor/src/main.rs @@ -1,9 +1,6 @@ use log::{info, debug}; use clap::{Parser, command, arg}; -#[macro_use] -extern crate log; - #[derive(Parser, Default, Debug)] #[command(author, version, about, long_about = None)] struct Args { diff --git a/brro-compressor/src/utils/file_type_detector.rs b/brro-compressor/src/utils/file_type_detector.rs index f7a3123..964ce06 100644 --- a/brro-compressor/src/utils/file_type_detector.rs +++ b/brro-compressor/src/utils/file_type_detector.rs @@ -2,8 +2,8 @@ use std::fs::File; use std::io::{BufReader, Read, Result, Take, Error, ErrorKind}; enum FileType { - WAV, - RAW, + Wav, + Raw, } fn is_wav(reader: &mut Take>) -> Result { @@ -21,8 +21,8 @@ fn detect_file_type(filename: &str) -> Result { } if is_wav(&mut BufReader::new(file).take(12))? { - Ok(FileType::WAV) + Ok(FileType::Wav) } else { - Ok(FileType::RAW) + Ok(FileType::Raw) } } \ No newline at end of file diff --git a/optimizer/src/main.rs b/optimizer/src/main.rs index c588ec1..7c14ae6 100644 --- a/optimizer/src/main.rs +++ b/optimizer/src/main.rs @@ -1,14 +1,15 @@ -use std::fs::File; +// Lucas - Once the project is far enough along I strongly reccomend reenabling dead code checks +#![allow(dead_code)] + +use std::{fs::File, path::Path}; use std::io::Write; use std::fs; -use std::io::{self, BufRead}; -use std::path::{Path, PathBuf}; +use std::path::PathBuf; use hound::{WavSpec, WavWriter}; use clap::{Parser, command, arg}; -use clap::builder::Str; use regex::Regex; use median::Filter; -use log::{debug, error, info, warn}; +use log::{debug, error, info}; #[derive(Debug)] enum MetricTag { @@ -21,6 +22,7 @@ enum MetricTag { } impl MetricTag { + #[allow(clippy::wrong_self_convention)] fn from_float(&self, x: f64) -> i64 { match self { MetricTag::Other => { @@ -70,18 +72,18 @@ fn read_metrics_from_wav(filename: &str) -> Vec { current_channel = 0; } } - return raw_data; + raw_data } fn generate_wav_header(channels: Option, bitdepth: u16, samplerate: u32) -> WavSpec { - let spec = hound::WavSpec { + + hound::WavSpec { channels: channels.unwrap_or(4) as u16, // TODO: Sample rate adaptations sample_rate: samplerate, bits_per_sample: bitdepth, sample_format: hound::SampleFormat::Int - }; - return spec; + } } /// Write a WAV file with the outputs of data analysis for float data @@ -89,7 +91,7 @@ fn write_optimal_wav(filename: &str, data: Vec, bitdepth: i32, dc: i64, cha // Make DC a float for operations let fdc = dc as f64; let header: WavSpec = generate_wav_header(Some(channels), bitdepth as u16, 8000); - let mut file_path = format!("{}", filename); + let mut file_path = filename.to_string(); file_path.truncate(file_path.len() - 4); file_path = format!("{}_OPT.wav", file_path); let file = std::fs::OpenOptions::new().write(true).create(true).read(true).open(file_path).unwrap(); @@ -106,7 +108,7 @@ fn write_optimal_wav(filename: &str, data: Vec, bitdepth: i32, dc: i64, cha fn write_optimal_int_wav(filename: &str, data: Vec, bitdepth: i32, dc: i64, channels: i32) { let header: WavSpec = generate_wav_header(Some(channels), bitdepth as u16, 8000); - let mut file_path = format!("{}", filename); + let mut file_path = filename.to_string(); file_path.truncate(file_path.len() - 4); file_path = format!("{}_OPT.wav", file_path); let file = std::fs::OpenOptions::new().write(true).create(true).read(true).open(file_path).unwrap(); @@ -122,15 +124,15 @@ fn write_optimal_int_wav(filename: &str, data: Vec, bitdepth: i32, dc: i64, } fn as_i8(value: f64) -> i8 { - return split_n(value).0 as i8; + split_n(value).0 as i8 } fn as_i16(value: f64) -> i16 { - return split_n(value).0 as i16; + split_n(value).0 as i16 } fn as_i32(value: f64) -> i32 { - return split_n(value).0 as i32; + split_n(value).0 as i32 } // Split a float into an integer @@ -160,17 +162,16 @@ fn split_n(x: f64) -> (i64, f64) { } else { // x >> 64.. (0, 0.0) } - } else { - if shl < 64 { // x << 1..64 - let int = mantissa >> (64 - shl); - let fraction = ((mantissa as u64) << shl) as f64 * FRACT_SCALE; - (int, fraction) - } else if shl < 128 { // x << 64..128 - let int = mantissa << (shl - 64); - (int, 0.0) - } else { // x << 128.. - (0, 0.0) - } + } + else if shl < 64 { // x << 1..64 + let int = mantissa >> (64 - shl); + let fraction = ((mantissa as u64) << shl) as f64 * FRACT_SCALE; + (int, fraction) + } else if shl < 128 { // x << 64..128 + let int = mantissa << (shl - 64); + (int, 0.0) + } else { // x << 128.. + (0, 0.0) } } @@ -180,8 +181,8 @@ fn join_u16_into_f64(bits: [u16; 4]) -> f64 { ((bits[2] as u64) << 32) | ((bits[3] as u64) << 48); - let f64_value = f64::from_bits(u64_bits); - f64_value + + f64::from_bits(u64_bits) } fn get_max(a: i32, b: i32) -> i32 { @@ -296,8 +297,8 @@ fn find_bitdepth(max_int: i64, min_int: i64) -> i32 { _ => 64 }; - let recommended_bitdepth = get_max(bitdepth, bitdepth_signed); - recommended_bitdepth + + get_max(bitdepth, bitdepth_signed) } fn process_args(input_path: &str, arguments: &Args) { @@ -352,13 +353,12 @@ fn construct_output_path(filename: &str, new_directory: Option<&str>) -> String } } -fn process_data_and_write_output(full_path: &PathBuf, file: &mut File, arguments: &Args) { - +fn process_data_and_write_output(full_path: &Path, file: &mut File, arguments: &Args) { let full_path_str = full_path.to_str().unwrap_or(""); debug!("File: {} ,", full_path_str); - let mut bitdepth = 64; - let mut dc_component: i64 = 0; - let mut fractional = true; + let mut _bitdepth = 64; + let mut _dc_component: i64 = 0; + let mut _fractional = true; let wav_data = read_metrics_from_wav(full_path_str); if arguments.dump_raw { writeln!(file, "{:?}", wav_data).expect("Unable to write to file"); } // Depending on Metric Tag, apply a transformation @@ -375,21 +375,21 @@ fn process_data_and_write_output(full_path: &PathBuf, file: &mut File, arguments } }; // We split the code here - if iwav_data.len() > 0 { - fractional = false; + if !iwav_data.is_empty() { + _fractional = false; if arguments.dump_optimized { writeln!(file, "{:?}", iwav_data).expect("Unable to write to file"); } - (bitdepth, dc_component) = analyze_int_data(&iwav_data); + (_bitdepth, _dc_component) = analyze_int_data(&iwav_data); } else { - (bitdepth, dc_component, fractional) = analyze_data(&wav_data); + (_bitdepth, _dc_component, _fractional) = analyze_data(&wav_data); } - if bitdepth == 64 || fractional { + if _bitdepth == 64 || _fractional { debug!("No optimization, exiting"); std::process::exit(0); } else if arguments.write { debug!("Writing optimal file!"); match iwav_data.len() { - 0 => write_optimal_wav(full_path_str, wav_data, bitdepth, dc_component, 1), - _ => write_optimal_int_wav(full_path_str, iwav_data, bitdepth, dc_component, 1) + 0 => write_optimal_wav(full_path_str, wav_data, _bitdepth, _dc_component, 1), + _ => write_optimal_int_wav(full_path_str, iwav_data, _bitdepth, _dc_component, 1) } } } diff --git a/prometheus-remote/Cargo.toml b/prometheus-remote/Cargo.toml index 06637a7..906af7a 100644 --- a/prometheus-remote/Cargo.toml +++ b/prometheus-remote/Cargo.toml @@ -24,9 +24,3 @@ regex = "1.9.1" median = "0.3.2" dtw_rs = "0.9.5" -[profile.dev] -opt-level = 0 - -[profile.release] -opt-level = 3 -codegen-units = 1 diff --git a/prometheus-remote/src/flac_reader.rs b/prometheus-remote/src/flac_reader.rs index 26189a1..5825bb8 100644 --- a/prometheus-remote/src/flac_reader.rs +++ b/prometheus-remote/src/flac_reader.rs @@ -81,8 +81,8 @@ impl SimpleFlacReader { ((bits[2] as u64) << 32) | ((bits[3] as u64) << 48); - let f64_value = f64::from_bits(u64_bits); - f64_value + + f64::from_bits(u64_bits) } } @@ -116,7 +116,7 @@ impl FlacMetric { ); // Transform datetime to string with the format YYYY-MM-DD let datetime_str = datetime.format("%Y-%m-%d").to_string(); - return datetime_str; + datetime_str } /// Load sample data into the Flac Object @@ -137,7 +137,7 @@ impl FlacMetric { // Probe the media source stream for a format. let probed = symphonia::default::get_probe().format(Hint::new().mime_type("FLaC"), mss, &format_opts, &metadata_opts).unwrap(); // Get the format reader yielded by the probe operation. - return probed.format; + probed.format } fn get_decoder(&self) -> Box { @@ -147,7 +147,7 @@ impl FlacMetric { let track = format.default_track().unwrap(); // Create a decoder for the track. let decoder = symphonia::default::get_codecs().make(&track.codec_params, &decoder_opts).unwrap(); - return decoder; + decoder } @@ -265,8 +265,8 @@ impl FlacMetric { ((bits[2] as u64) << 32) | ((bits[3] as u64) << 48); - let f64_value = f64::from_bits(u64_bits); - f64_value + + f64::from_bits(u64_bits) } } \ No newline at end of file diff --git a/prometheus-remote/src/fs_utils.rs b/prometheus-remote/src/fs_utils.rs index b9fe24f..06f4912 100644 --- a/prometheus-remote/src/fs_utils.rs +++ b/prometheus-remote/src/fs_utils.rs @@ -22,7 +22,7 @@ use std::mem; use chrono::{DateTime, Utc, Duration}; use crate::flac_reader::SimpleFlacReader; -use crate::lib_vsri::{VSRI, day_elapsed_seconds, start_day_ts, MAX_INDEX_SAMPLES}; +use crate::lib_vsri::{Vsri, day_elapsed_seconds, start_day_ts, MAX_INDEX_SAMPLES}; struct DateRange(DateTime, DateTime); @@ -77,7 +77,7 @@ impl FileTimeRange { #[derive(Debug)] pub struct DataLocator { file: File, - index: VSRI, + index: Vsri, time_range: FileTimeRange, date: DateTime } @@ -86,7 +86,7 @@ impl DataLocator { /// Creates a new DataLocator, includes the File, Index and the Time Range for the data it is expected to return. /// This is a lazy, doesn't check for the intersection between the time range and data owned until the data is /// requested. - fn new(file: File, index: VSRI, time_range: FileTimeRange, date: DateTime ) -> Self { + fn new(file: File, index: Vsri, time_range: FileTimeRange, date: DateTime ) -> Self { DataLocator { file, index, @@ -151,7 +151,7 @@ impl DataLocator { match temp_result { // Pack this into DataPoints Ok(samples) => { - for (v, t) in samples.into_iter().zip(time_for_samples.into_iter()) { + for (v, t) in samples.into_iter().zip(time_for_samples.iter()) { let ts = *t as i64+start_day_ts(self.date); prom_data.push(PromDataPoint::new(v, ts*1000)); } @@ -166,20 +166,20 @@ impl DataLocator { let mut file_index_vec = Vec::new(); let data_locator_vec: Vec; let start_date = DateTime::::from_utc( - chrono::NaiveDateTime::from_timestamp_opt((start_time/1000).into(), 0).unwrap(), + chrono::NaiveDateTime::from_timestamp_opt(start_time/1000, 0).unwrap(), Utc, ); let end_date = DateTime::::from_utc( - chrono::NaiveDateTime::from_timestamp_opt((end_time/1000).into(), 0).unwrap(), + chrono::NaiveDateTime::from_timestamp_opt(end_time/1000, 0).unwrap(), Utc, ); let file_time_intervals = time_intervals(start_time, end_time); debug!("[READ] Time intervals for the range {:?} ", file_time_intervals); let mut range_count = 0; for date in DateRange(start_date, end_date).enumerate() { - let data_file_name = format!("{}_{}",metric_name, date.1.format("%Y-%m-%d").to_string()); + let data_file_name = format!("{}_{}",metric_name, date.1.format("%Y-%m-%d")); debug!("[READ] Time intervals for file {}: {:?} ", data_file_name, file_time_intervals[range_count]); - let vsri = VSRI::load(&data_file_name); + let vsri = Vsri::load(&data_file_name); range_count += 1; let file = match fs::File::open(format!("{}.flac", data_file_name.clone())) { Ok(file) => { @@ -190,7 +190,7 @@ impl DataLocator { continue; } }; - // If I got here, I should be able to unwrap VSRI safely. + // If I got here, I should be able to unwrap Vsri safely. file_index_vec.push((file, vsri.unwrap(), date)); } // Creating the Time Range array @@ -213,7 +213,7 @@ impl DataLocator { } // We have at least one file create the Object - if file_index_vec.len() >= 1 { + if !file_index_vec.is_empty() { data_locator_vec = file_index_vec.into_iter() .map(|item| DataLocator::new(item.0, item.1, time_intervals[item.2.0], item.2.1)) .collect(); @@ -228,16 +228,16 @@ impl DataLocator { fn time_intervals(start_time: i64, end_time: i64) -> Vec<[i32; 2]> { let mut time_intervals = Vec::new(); let start_date = DateTime::::from_utc( - chrono::NaiveDateTime::from_timestamp_opt((start_time/1000).into(), 0).unwrap(), + chrono::NaiveDateTime::from_timestamp_opt(start_time/1000, 0).unwrap(), Utc, ); let end_date = DateTime::::from_utc( - chrono::NaiveDateTime::from_timestamp_opt((end_time/1000).into(), 0).unwrap(), + chrono::NaiveDateTime::from_timestamp_opt(end_time/1000, 0).unwrap(), Utc, ); let start_ts_i32 = day_elapsed_seconds(start_time); let end_ts_i32 = day_elapsed_seconds(end_time); - let date_spread_size = DateRange(start_date, end_date).into_iter().count(); + let date_spread_size = DateRange(start_date, end_date).count(); match date_spread_size { 1 => { time_intervals.push([start_ts_i32, end_ts_i32]);}, 2 => { @@ -265,14 +265,14 @@ pub fn data_locator_into_prom_data_point(data: Vec) -> Vec 0 { data_points.append(&mut proms); } + if !proms.is_empty() { data_points.append(&mut proms); } } data_points } /// Retrieves all the available data points in a timerange in the provided Vector of files and indexes -pub fn get_data_between_timestamps(start_time: i64, end_time: i64, file_vec: Vec<(File, VSRI)>) -> Vec { +pub fn get_data_between_timestamps(start_time: i64, end_time: i64, file_vec: Vec<(File, Vsri)>) -> Vec { let mut data_points = Vec::new(); /* Processing logic: Case 1 (2+ files): @@ -287,14 +287,14 @@ pub fn get_data_between_timestamps(start_time: i64, end_time: i64, file_vec: Vec let file_count = file_vec.len(); // Get the baseline timestamps to add to the index timestamps let start_date = DateTime::::from_utc( - chrono::NaiveDateTime::from_timestamp_opt((start_time/1000).into(), 0).unwrap(), + chrono::NaiveDateTime::from_timestamp_opt(start_time/1000, 0).unwrap(), Utc, ); let end_date = DateTime::::from_utc( - chrono::NaiveDateTime::from_timestamp_opt((end_time/1000).into(), 0).unwrap(), + chrono::NaiveDateTime::from_timestamp_opt(end_time/1000, 0).unwrap(), Utc, ); - let ts_bases: Vec = DateRange(start_date, end_date).map(|dt| start_day_ts(dt)).collect(); + let ts_bases: Vec = DateRange(start_date, end_date).map(start_day_ts).collect(); let start_ts_i32 = day_elapsed_seconds(start_time); let end_ts_i32 = day_elapsed_seconds(end_time); // Files might not match the intervals of time, a time array of time intervals need to be done. @@ -367,7 +367,7 @@ pub fn get_data_between_timestamps(start_time: i64, end_time: i64, file_vec: Vec match temp_result { // Pack this into DataPoints Ok(samples) => { - for (v, t) in samples.into_iter().zip(time_for_samples.into_iter()) { + for (v, t) in samples.into_iter().zip(time_for_samples.iter()) { let ts = *t as i64+ts_bases[iter_index]; data_points.push(PromDataPoint::new(v, ts)); } diff --git a/prometheus-remote/src/lib_vsri.rs b/prometheus-remote/src/lib_vsri.rs index 58a5736..6f661ac 100644 --- a/prometheus-remote/src/lib_vsri.rs +++ b/prometheus-remote/src/lib_vsri.rs @@ -58,17 +58,17 @@ pub fn start_day_ts(dt: DateTime) -> i64 { /// # Examples /// Creating a new index, metric is of expected time 0, but for sure location of X is 0 /// ```no_run -/// let vsri = VSRI::new("metric_name", 0, 0); +/// let vsri = Vsri::new("metric_name", 0, 0); /// vsri.flush(); /// ``` /// Updating an index, adding point at time 5sec /// ```no_run -/// let vsri = VSRI::load("metric_name").unwrap().update_for_point(5); +/// let vsri = Vsri::load("metric_name").unwrap().update_for_point(5); /// vsri.flush(); /// ``` /// Fetch a sample location from the index given a timestamp /// ```no_run -/// let vsri = VSRI::load("metric_name").unwrap(); +/// let vsri = Vsri::load("metric_name").unwrap(); /// vsri.get_sample_location("metric_name", 5); /// ``` @@ -81,7 +81,7 @@ pub fn start_day_ts(dt: DateTime) -> i64 { /// Each segments describes a line with the form of mX + B that has a lenght /// of # of samples. #[derive(Debug)] -pub struct VSRI { +pub struct Vsri { index_file: String, min_ts: i32, max_ts: i32, @@ -89,14 +89,14 @@ pub struct VSRI { vsri_segments: Vec<[i32; 4]> // [Sample Rate (m), X0, Y0, # of Samples] } -impl VSRI { +impl Vsri { /// Creates the index, it doesn't create the file in the disk /// flush needs to be called for that pub fn new(filename: &String) -> Self { debug!("[INDEX] Creating new index!"); let segments: Vec<[i32; 4]> = Vec::new(); - VSRI { + Vsri { index_file: filename.to_string(), min_ts: 0, max_ts: 0, @@ -107,7 +107,7 @@ impl VSRI { /// Given a filename and a time location, returns the sample location in the /// data file. Or None in case it doesn't exist. pub fn get_sample_location(filename: String, y: i32) -> Option { - let vsri = match VSRI::load(&filename) { + let vsri = match Vsri::load(&filename) { Ok(vsri) => vsri, Err(_err) => { return None } }; @@ -196,8 +196,7 @@ impl VSRI { _ => { // More than 1 segment let mut previous_seg_end: i32 = 0; - let mut segment_count = 0; - for segment in &self.vsri_segments { + for (segment_count, segment) in self.vsri_segments.iter().enumerate() { let sample_rate = segment[0]; let y0 = segment[2]; let num_samples = segment[3]; @@ -219,7 +218,6 @@ impl VSRI { } // At this point, time segments doesn't touch this segment. previous_seg_end = segment_end_y; - segment_count += 1; } } } @@ -260,7 +258,7 @@ impl VSRI { // If it doesn't fit, create a new fake segment self.vsri_segments.push(self.create_fake_segment(y)); } - return Ok(()); + Ok(()) } /// Minimum time stamp @@ -275,8 +273,8 @@ impl VSRI { fn calculate_b(&self, segment: &[i32; 4]) -> i32 { // b = y - mx - let b = segment[2] - segment[0] * segment[1]; - b + + segment[2] - segment[0] * segment[1] } /// Returns the most recent (the last) calculated segment @@ -309,9 +307,9 @@ impl VSRI { /// For a given sample position, return the timestamp associated pub fn get_time(&self, x:i32) -> Option { match x { - 0 => { return Some(self.min()); }, - _ if x > self.get_sample_count() => { return None; }, - _ if x == self.get_sample_count() => { return Some(self.max()); }, + 0 => { Some(self.min())}, + _ if x > self.get_sample_count() => { None}, + _ if x == self.get_sample_count() => { Some(self.max())}, // it is somewhere in the middle _ => { // Find the segment where X fits @@ -444,7 +442,7 @@ impl VSRI { } i+=1; } - Ok(VSRI { + Ok(Vsri { index_file: filename.to_string(), min_ts, max_ts, diff --git a/prometheus-remote/src/main.rs b/prometheus-remote/src/main.rs index 115493c..de186ea 100644 --- a/prometheus-remote/src/main.rs +++ b/prometheus-remote/src/main.rs @@ -1,3 +1,6 @@ +// Lucas - Once the project is far enough along I strongly reccomend reenabling dead code checks +#![allow(dead_code)] + mod wav_writer; mod fs_utils; mod lib_vsri; @@ -24,7 +27,7 @@ extern crate log; use crate::fs_utils::get_file_index_time; // Data sampling frequency. How many seconds between each sample. -static VERSION: &'static str = "0.1.1"; +static VERSION: &str = "0.1.1"; fn get_flac_samples_to_prom(metric: &str, source: &str, _job: &str, start_ms: i64, end_ms: i64, step_ms: i64) -> Vec { // TODO: #6 Count the number of samples for the given metric! -> Can be done with the Index alone \m/ \m/ @@ -102,7 +105,7 @@ fn parse_remote_write_request(timeseries: &TimeSeries, metadata: Option<&MetricM // Not going to share state, flush it once you're done. // TODO: #3 Improve write performance (?) let mut metric_data: Vec<(i64, f64)> = timeseries.samples.iter().map(|x| (x.timestamp, x.value)).collect(); - if timeseries.samples.len() < 1 { + if timeseries.samples.is_empty() { error!("[WRITE][MAIN] Empty samples: {:?}", timeseries.samples); return Ok(()); } diff --git a/prometheus-remote/src/wav_writer.rs b/prometheus-remote/src/wav_writer.rs index c420e67..d9cd390 100644 --- a/prometheus-remote/src/wav_writer.rs +++ b/prometheus-remote/src/wav_writer.rs @@ -4,7 +4,7 @@ use chrono::{DateTime, Utc}; use hound::{WavWriter, WavSpec}; use std::process::Command; -use crate::lib_vsri::{VSRI, day_elapsed_seconds}; +use crate::lib_vsri::{Vsri, day_elapsed_seconds}; // --- Write layer // Remote write spec: https://prometheus.io/docs/concepts/remote_write_spec/ @@ -30,7 +30,7 @@ impl WavMetric { pub fn new(name: String, source: String, job: String, start_sample_ts: i64) -> WavMetric { // Sample needs to fall within the file that the TS refers too, not the calendar day let start_date = DateTime::::from_utc( - chrono::NaiveDateTime::from_timestamp_opt((start_sample_ts/1000).into(), 0).unwrap(), + chrono::NaiveDateTime::from_timestamp_opt(start_sample_ts/1000, 0).unwrap(), Utc,); // TODO: Do not ignore JOB! WavMetric { metric_name: name, @@ -45,7 +45,7 @@ impl WavMetric { // Too many assumptions on correct behavior of all the code. Assumption is the mother of all... Needs to be fixed pub fn flush(mut self) -> Result<(), i32> { let mut processed_samples: i32 = 0; - let vsri: Option; + let vsri: Option; if self.timeseries_data.is_empty() { // Can't flush empty data error!("[WRITE][WAV] Call flush on empty data"); @@ -62,7 +62,7 @@ impl WavMetric { let file = OpenOptions::new().write(true).read(true).open(self.last_file_created.unwrap()).unwrap(); // Load the index file // TODO: one more unwrap to work on later - vsri = Some(VSRI::load(&self.metric_name).unwrap()); + vsri = Some(Vsri::load(&self.metric_name).unwrap()); WavWriter::new_append(file).unwrap() } @@ -107,7 +107,7 @@ impl WavMetric { /// Create a file accordingly to the day of the year, the metric and the instance that generated the metric /// TODO: Create file shouldn't open a file for append. Should only create. Fix this (or rename) - fn create_file(&mut self) -> Result<(WavWriter, VSRI), hound::Error> { + fn create_file(&mut self) -> Result<(WavWriter, Vsri), hound::Error> { let spec = WavMetric::generate_wav_header(None); let file_name = format!("{}_{}_{}", self.metric_name,self.instance, self.creation_time); let file_path = format!("./{}.wav", file_name); @@ -116,25 +116,25 @@ impl WavMetric { if meta.is_file() { let file = OpenOptions::new().write(true).read(true).open(&file_path)?; let wav_writer = WavWriter::new_append(file)?; - return Ok((wav_writer,VSRI::load(&file_name).unwrap())); + return Ok((wav_writer,Vsri::load(&file_name).unwrap())); } } let file = OpenOptions::new().write(true).create(true).read(true).open(&file_path)?; let wav_writer = WavWriter::new(file, spec)?; self.last_file_created = Some(file_path); // TODO: Y can't be 0. Needs to be TS - Ok((wav_writer, VSRI::new(&file_name))) + Ok((wav_writer, Vsri::new(&file_name))) } /// Generate the WAV file header. fn generate_wav_header(channels: Option) -> WavSpec { - let spec = hound::WavSpec { + + hound::WavSpec { channels: channels.unwrap_or(4) as u16, sample_rate: 8000, bits_per_sample: 16, sample_format: hound::SampleFormat::Int - }; - return spec; + } } /// Add a single metric value to the structure @@ -151,14 +151,12 @@ impl WavMetric { pub fn get_range(self, ts_start: i64, ts_end: i64) -> Vec<(i64, f64)>{ let mut i = 0; let mut j = 0; - let mut count = 0; - for (ts, _) in self.timeseries_data.iter() { + for (count, (ts, _)) in self.timeseries_data.iter().enumerate() { if *ts < ts_start {i = count} if *ts < ts_end {j = count; break} - count += 1; } if i > 0 { return self.timeseries_data[i-1..j].to_vec();} - return self.timeseries_data[..j].to_vec(); + self.timeseries_data[..j].to_vec() } /// Instead of chasing data types and converting stuff, let's just unpack the f64 and @@ -181,9 +179,9 @@ impl WavMetric { ((bits[2] as u64) << 32) | ((bits[3] as u64) << 48); - let f64_value = f64::from_bits(u64_bits); - f64_value + + f64::from_bits(u64_bits) } /// Rotate the wav file after the interval and save it as a FLaC file @@ -198,7 +196,7 @@ impl WavMetric { } /// Check if the current timestamp is within the file period - fn is_ts_valid(ts: i64) -> bool { + fn is_ts_valid(_ts: i64) -> bool { true } } \ No newline at end of file diff --git a/tools/src/bin/dwt_finder.rs b/tools/src/bin/dwt_finder.rs index b740ff3..3873794 100644 --- a/tools/src/bin/dwt_finder.rs +++ b/tools/src/bin/dwt_finder.rs @@ -22,7 +22,7 @@ fn read_metrics_from_wav(filename: &str) -> Vec { current_channel = 0; } } - return raw_data; + raw_data } fn join_u16_into_f64(bits: [u16; 4]) -> f64 { @@ -31,8 +31,8 @@ fn join_u16_into_f64(bits: [u16; 4]) -> f64 { ((bits[2] as u64) << 32) | ((bits[3] as u64) << 48); - let f64_value = f64::from_bits(u64_bits); - f64_value + + f64::from_bits(u64_bits) } #[derive(Parser,Default,Debug)] diff --git a/tools/src/bin/flac_reader_tester.rs b/tools/src/bin/flac_reader_tester.rs index ecd9d66..65f5c36 100644 --- a/tools/src/bin/flac_reader_tester.rs +++ b/tools/src/bin/flac_reader_tester.rs @@ -4,7 +4,7 @@ Also good to test if the FLAC and WAV read routines are good */ /* Read a WAV file, */ -fn read_metrics_from_wav(filename: &str) -> Vec { +fn _read_metrics_from_wav(filename: &str) -> Vec { let mut reader = hound::WavReader::open(filename).unwrap(); let num_samples = reader.len() as usize / reader.spec().channels as usize; let num_channels = reader.spec().channels as usize; @@ -16,7 +16,7 @@ fn read_metrics_from_wav(filename: &str) -> Vec { for sample in reader.samples::() { samples.push(sample.unwrap()); } - return samples; + samples } /* Read a FLAC file */ @@ -27,16 +27,15 @@ fn read_metrics_from_flac(filename: &str) -> Vec { for sample in reader.samples() { samples.push(sample.unwrap() as u16); } - return samples; + samples } fn read_metrics_from_flac_by_bloc(filename: &str) -> Vec { let mut sample_vec: Vec = Vec::new(); let mut reader = claxon::FlacReader::open(filename).unwrap(); let channels = reader.streaminfo().channels; - let mut sample_count = 0; // TODO: Make this hold up to channel number - let mut sample_channel_data: [u16; 4] = [0,0,0,0]; + let _sample_channel_data: [u16; 4] = [0,0,0,0]; let mut frame_reader = reader.blocks(); let mut block = claxon::Block::empty(); loop { @@ -53,37 +52,35 @@ fn read_metrics_from_flac_by_bloc(filename: &str) -> Vec { println!("Sample {}/{}, Channel {}", sample,block.duration(), channel); } //sample_vec.push(SimpleFlacReader::join_u16_into_f64(sample_channel_data)); - sample_count += 1; } } sample_vec } -fn read_metrics_from_flac_in_interval(filename: &str, start: u32, end: u32) -> Vec { +fn _read_metrics_from_flac_in_interval(filename: &str, start: u32, end: u32) -> Vec { let mut reader = claxon::FlacReader::open(filename).unwrap(); // Create a vector to hold the audio data let start_sample = start * reader.streaminfo().sample_rate; let end_sample = end * reader.streaminfo().sample_rate; //let mut samples = Vec::with_capacity(reader.streaminfo().samples.unwrap() as usize); let mut samples: Vec = Vec::new(); - let mut i = 0; - for sample in reader.samples() { + for (i, sample) in reader.samples().enumerate() { + let i = i as u32; if start_sample <=i && i <= end_sample { samples.push(sample.unwrap() as i16); } else if i > end_sample { break; } - i+=1; } - return samples; + samples } fn main() { println!("Testing, does FLAC reading is the same as WAV?"); - let filename = "2023-05-11_15-11-19.wav"; + let _filename = "2023-05-11_15-11-19.wav"; let filename_flac = "/home/crolo/code/prom_data/go_memstats_frees_total_localhost:9090_2023-07-07.flac"; - let filename_flac_single = "3_single_channel.flac"; + let _filename_flac_single = "3_single_channel.flac"; //let samples = read_metrics_from_wav(filename); //println!("{:?}", samples); let samples_flac = read_metrics_from_flac(filename_flac); diff --git a/tools/src/bin/mid_channel_computing.rs b/tools/src/bin/mid_channel_computing.rs index 134c977..c8d993a 100644 --- a/tools/src/bin/mid_channel_computing.rs +++ b/tools/src/bin/mid_channel_computing.rs @@ -41,14 +41,12 @@ fn join_u16_into_f64(bits: [u16; 4]) -> f64 { ((bits[2] as u64) << 32) | ((bits[3] as u64) << 48); - let f64_value = f64::from_bits(u64_bits); - f64_value + f64::from_bits(u64_bits) } fn write_optimal_int_wav(filename: &str, data: Vec, bitdepth: i32, channels: i32) { let header: WavSpec = generate_wav_header(Some(channels), bitdepth as u16, 8000); - let mut file_path = format!("{}", filename); - file_path = format!("{}.wav", file_path); + let file_path = format!("{filename}.wav"); let file = std::fs::OpenOptions::new().write(true).create(true).read(true).open(file_path).unwrap(); let mut wav_writer = WavWriter::new(file, header).unwrap(); for sample in data { @@ -58,13 +56,13 @@ fn write_optimal_int_wav(filename: &str, data: Vec, bitdepth: i32, channels } fn generate_wav_header(channels: Option, bitdepth: u16, samplerate: u32) -> WavSpec { - let spec = hound::WavSpec { + + hound::WavSpec { channels: channels.unwrap_or(4) as u16, sample_rate: samplerate, bits_per_sample: bitdepth, sample_format: hound::SampleFormat::Int - }; - return spec; + } } fn calculate_mid_channel(left: Vec, right: Vec) -> (Vec, Vec) {