diff --git a/src/bottomup.rs b/src/bottomup.rs index 0fa221a..b670b63 100644 --- a/src/bottomup.rs +++ b/src/bottomup.rs @@ -257,7 +257,12 @@ pub(crate) fn bottomup( args: &Args, ) -> Vec { let mut start = Instant::now(); - let top_k = phase_one(data, args.k, args.block_size * args.block_size_multiple, args.base_pattern_size); + let top_k = phase_one( + data, + args.k, + args.block_size * args.block_size_multiple, + args.base_pattern_size, + ); println!("phase one {:.2?}", start.elapsed()); start = Instant::now(); let r = phase_two( diff --git a/src/common.rs b/src/common.rs index 9823bd7..dd43d31 100644 --- a/src/common.rs +++ b/src/common.rs @@ -148,16 +148,30 @@ pub(crate) fn prepare_data( } else { training_data = transform_training_data(data, block_size, block_size_multiple); } - println!("tr {}, te {}", training_data.data.len(), testing_data_option.as_ref().unwrap().data.len()); + println!( + "tr {}, te {}", + training_data.data.len(), + testing_data_option.as_ref().unwrap().data.len() + ); (training_data, validation_data_option, testing_data_option) } -fn transform_training_data(data: Vec>, block_size: usize, block_size_multiple: usize) -> Data { - if block_size_multiple == 1{ +fn transform_training_data( + data: Vec>, + block_size: usize, + block_size_multiple: usize, +) -> Data { + if block_size_multiple == 1 { return transform_data(data); } - let data_flattened: Vec> = data.into_iter().flat_map(|x| x).collect_vec().chunks(block_size/8).map(<[u8]>::to_vec).collect(); + let data_flattened: Vec> = data + .into_iter() + .flatten() + .collect_vec() + .chunks(block_size / 8) + .map(<[u8]>::to_vec) + .collect(); let mut data_duplicated = Vec::new(); @@ -167,7 +181,6 @@ fn transform_training_data(data: Vec>, block_size: usize, block_size_mul block.append(&mut data_flattened[i + j].clone()); } data_duplicated.push(block); - } transform_data(data_duplicated)