Skip to content

Commit

Permalink
re-enable most unit tests again
Browse files Browse the repository at this point in the history
  • Loading branch information
rfuzzo committed Mar 10, 2024
1 parent eaf03db commit 0477618
Show file tree
Hide file tree
Showing 13 changed files with 191,970 additions and 238 deletions.
92,671 changes: 92,671 additions & 0 deletions base_rules.json

Large diffs are not rendered by default.

24,614 changes: 24,614 additions & 0 deletions base_rules_order.json

Large diffs are not rendered by default.

44 changes: 31 additions & 13 deletions src/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -194,28 +194,46 @@ impl Parser {
// lowercase all
let mut line = line.to_lowercase();

// trim inline comments
line = if let Some(index) = line.find(';') {
line[..index].trim_end().to_owned()
} else {
line.trim_end().to_owned()
};

if chunk.is_some() && line.trim().is_empty() {
// end chunk
// we skip empty lines
if line.trim().is_empty() {
continue;
}

fn new_rule(line: &str) -> bool {
// check if a new rule has started by matching the first chars to the rules names
line.starts_with("[order")
|| line.starts_with("[nearstart")
|| line.starts_with("[nearend")
|| line.starts_with("[note")
|| line.starts_with("[conflict")
|| line.starts_with("[requires")
|| line.starts_with("[patch")
}

// we are inside a chunk
if chunk.is_some() && new_rule(&line) {
// end current chunk
if let Some(chunk) = chunk.take() {
chunks.push(chunk);
}
} else if !line.trim().is_empty() {
// read to chunk, preserving newline delimeters
let delimited_line = line + "\n";
if let Some(chunk) = &mut chunk {
chunk.data.extend(delimited_line.as_bytes());
} else {
chunk = Some(ChunkWrapper::new(
delimited_line.as_bytes().to_vec(),
(idx + 1).to_string(),
));
}
}

// read to current chunk, preserving newline delimeters
let delimited_line = line + "\n";
if let Some(chunk) = &mut chunk {
chunk.data.extend(delimited_line.as_bytes());
} else {
chunk = Some(ChunkWrapper::new(
delimited_line.as_bytes().to_vec(),
(idx + 1).to_string(),
));
}
}
// parse last chunk
Expand Down
9 changes: 8 additions & 1 deletion src/sorter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -192,7 +192,14 @@ impl Sorter {
}
return Ok(result);
}
log::debug!("{}, index {}", i, index);

if let Some(edge) = edges.get(index) {
let resoved_0 = &index_dict_rev[&edge.0];
let resoved_1 = &index_dict_rev[&edge.1];
log::debug!("{}, index {} ({}, {})", i, index, resoved_0, resoved_1);
} else {
log::debug!("{}, index {}", i, index);
}
}

log::error!("Out of iterations");
Expand Down
67 changes: 39 additions & 28 deletions tests/integration_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
mod integration_tests {
use std::{fs::create_dir_all, io::Write};

use log::warn;
use plox::{parser::*, rules::EOrderRule, sorter::*, *};
use rand::seq::SliceRandom;
use rand::thread_rng;
Expand Down Expand Up @@ -39,7 +40,7 @@ mod integration_tests {
.init_from_file("./tests/plox/rules_order.txt")
.expect("failed rule parsing");

assert_eq!(5, parser.order_rules.len());
assert_eq!(8, parser.order_rules.len());

let mods = debug_get_mods_from_order_rules(&parser.order_rules);

Expand Down Expand Up @@ -183,33 +184,31 @@ mod integration_tests {
}
}

#[allow(dead_code)]
//TODO disabled for now #[test]
#[test]
fn test_mlox_user_rules() -> std::io::Result<()> {
init();

let mut parser = new_tes3_parser();
parser.init_from_file("./tests/mlox/mlox_user.txt")?;

let mods = debug_get_mods_from_order_rules(&parser.order_rules);

// let mut rng = thread_rng();
// mods.shuffle(&mut rng);
let mut mods = debug_get_mods_from_order_rules(&parser.order_rules);

// let file = std::fs::File::create("tmp/mods.json").expect("file create failed");
// serde_json::to_writer_pretty(file, &mods).expect("serialize failed");
let mut rng = thread_rng();
mods.shuffle(&mut rng);

match new_unstable_sorter().topo_sort(&mods, &parser.order_rules) {
match new_stable_sorter().topo_sort(&mods, &parser.order_rules) {
Ok(result) => {
assert!(
checkresult(&result, &parser.order_rules),
"stable(true) order is wrong"
);
}
Err(e) => panic!("Error: {}", e),
Err(e) => {
panic!("Error: {}", e)
}
}

match new_stable_sorter().topo_sort(&mods, &parser.order_rules) {
match new_unstable_sorter().topo_sort(&mods, &parser.order_rules) {
Ok(result) => {
assert!(
checkresult(&result, &parser.order_rules),
Expand All @@ -222,30 +221,31 @@ mod integration_tests {
Ok(())
}

#[allow(dead_code)]
//TODO disabled for now #[test]
#[test]
fn test_mlox_base_rules() -> std::io::Result<()> {
init();

let mut parser = new_tes3_parser();
parser.init_from_file("./tests/mlox/mlox_base.txt")?;

let mods = debug_get_mods_from_order_rules(&parser.order_rules);
let mut mods = debug_get_mods_from_order_rules(&parser.order_rules);

// let mut rng = thread_rng();
// mods.shuffle(&mut rng);
let mut rng = thread_rng();
mods.shuffle(&mut rng);

match new_unstable_sorter().topo_sort(&mods, &parser.order_rules) {
match new_stable_sorter().topo_sort(&mods, &parser.order_rules) {
Ok(result) => {
assert!(
checkresult(&result, &parser.order_rules),
"stable(true) order is wrong"
);
}
Err(e) => panic!("Error: {}", e),
Err(e) => {
panic!("Error: {}", e)
}
}

match new_stable_sorter().topo_sort(&mods, &parser.order_rules) {
match new_unstable_sorter().topo_sort(&mods, &parser.order_rules) {
Ok(result) => {
assert!(
checkresult(&result, &parser.order_rules),
Expand All @@ -259,7 +259,7 @@ mod integration_tests {
}

#[allow(dead_code)]
//TODO disabled for now #[test]
//#[test]
fn test_mlox_rules() -> std::io::Result<()> {
init();

Expand All @@ -271,17 +271,30 @@ mod integration_tests {
// let mut rng = thread_rng();
// mods.shuffle(&mut rng);

match new_unstable_sorter().topo_sort(&mods, &parser.order_rules) {
warn!("MODS: {}", mods.len());

match new_stable_sorter().topo_sort(&mods, &parser.order_rules) {
Ok(result) => {
assert!(
checkresult(&result, &parser.order_rules),
"stable(true) order is wrong"
);
}
Err(e) => panic!("Error: {}", e),
Err(e) => {
match new_unstable_sorter().topo_sort(&mods, &parser.order_rules) {
Ok(result) => {
assert!(
checkresult(&result, &parser.order_rules),
"stable(true) order is wrong"
);
}
Err(e) => panic!("Error: {}", e),
}
panic!("Error: {}", e)
}
}

match new_stable_sorter().topo_sort(&mods, &parser.order_rules) {
match new_unstable_sorter().topo_sort(&mods, &parser.order_rules) {
Ok(result) => {
assert!(
checkresult(&result, &parser.order_rules),
Expand All @@ -298,8 +311,7 @@ mod integration_tests {
Sorter::new(sorter::ESortType::StableFull, 1000)
}

#[allow(dead_code)]
//#[test]
#[test]
fn test_optimized_sort() -> std::io::Result<()> {
init();

Expand All @@ -323,8 +335,7 @@ mod integration_tests {
Ok(())
}

#[allow(dead_code)]
//#[test]
#[test]
fn test_optimized_sort_time() -> std::io::Result<()> {
init();

Expand Down
1 change: 1 addition & 0 deletions tests/mlox/mlox_base.txt
Original file line number Diff line number Diff line change
Expand Up @@ -34397,6 +34397,7 @@ Rose_Cottage_Bathing_Patch.esp
[Order] ; ( Ref: "Stinkers_ReadMe_v1.71.txt" )
Stinkers.esp
BE_Bathing_Patch.esp

[Order]
Balmora Expansion v1.4.esp
Balmora Expansion v1.4+(1.4).esp
Expand Down
Loading

0 comments on commit 0477618

Please sign in to comment.