Skip to content

Commit

Permalink
convlog: add regression to mitigate real-naki-of-two-identical-discar…
Browse files Browse the repository at this point in the history
…d problem
  • Loading branch information
Equim-chan committed Apr 19, 2021
1 parent 6218429 commit 70939e8
Show file tree
Hide file tree
Showing 9 changed files with 163 additions and 34 deletions.
79 changes: 66 additions & 13 deletions convlog/src/conv.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,13 @@ use crate::mjai;
use crate::tenhou;
use crate::Pai;

use std::cmp::Ordering;
use std::convert::TryFrom;

use thiserror::Error;

const REGRESSION_LIMIT: u8 = 10;

#[derive(Debug, Error)]
pub enum ConvertError {
#[error("invalid naki string: {0:?}")]
Expand Down Expand Up @@ -37,24 +40,43 @@ pub type Result<T> = std::result::Result<T, ConvertError>;

/// Transform a tenhou.net/6 format log into mjai format.
pub fn tenhou_to_mjai(log: &tenhou::Log) -> Result<Vec<mjai::Event>> {
let mut events = vec![];

events.push(mjai::Event::StartGame {
let mut events = vec![mjai::Event::StartGame {
kyoku_first: log.game_length as u8,
aka_flag: log.has_aka,
names: log.names.clone(),
});
}];

for kyoku in &log.kyokus {
tenhou_kyoku_to_mjai_events(&mut events, kyoku)?;
let mut regressions = vec![];
let mut first_error = None;

let result = (0..REGRESSION_LIMIT).find_map(|_| {
match tenhou_kyoku_to_mjai_events(kyoku, &mut regressions) {
Ok(kyoku_events) => Some(kyoku_events),
Err(err) => {
first_error.get_or_insert(err);
None
}
}
});

if let Some(kyoku_events) = result {
events.extend(kyoku_events);
} else if let Some(err) = first_error {
return Err(err);
}
}

events.push(mjai::Event::EndGame);

Ok(events)
}

fn tenhou_kyoku_to_mjai_events(events: &mut Vec<mjai::Event>, kyoku: &tenhou::Kyoku) -> Result<()> {
fn tenhou_kyoku_to_mjai_events(
kyoku: &tenhou::Kyoku,
regressions: &mut Vec<usize>,
) -> Result<Vec<mjai::Event>> {
let mut events = vec![];

// First of all, transform all takes and discards to events.
let mut take_events = (0..4)
.map(|i| {
Expand Down Expand Up @@ -105,7 +127,7 @@ fn tenhou_kyoku_to_mjai_events(events: &mut Vec<mjai::Event>, kyoku: &tenhou::Ky
});

let mut actor = oya as usize;
loop {
for idx in 0.. {
// Start to process a take event.
let take = take_events[actor]
.next()
Expand Down Expand Up @@ -140,7 +162,7 @@ fn tenhou_kyoku_to_mjai_events(events: &mut Vec<mjai::Event>, kyoku: &tenhou::Ky
// Check if the kyoku ends here, can be ryukyoku (九種九牌) or tsumo.
// Here it simply checks if there is no more discard for current actor.
if discard_events[actor].peek().is_none() {
end_kyoku(events, kyoku);
end_kyoku(&mut events, kyoku);
break;
}

Expand All @@ -163,7 +185,7 @@ fn tenhou_kyoku_to_mjai_events(events: &mut Vec<mjai::Event>, kyoku: &tenhou::Ky
events.push(discard.clone());

// Process previous minkan.
if need_new_dora {
if matches!(discard, mjai::Event::Dahai { .. }) && need_new_dora {
events.push(mjai::Event::Dora {
dora_marker: dora_feed
.next()
Expand Down Expand Up @@ -201,7 +223,7 @@ fn tenhou_kyoku_to_mjai_events(events: &mut Vec<mjai::Event>, kyoku: &tenhou::Ky
// Here it simply checks if there is no more take for every single
// actor.
if (0..4).all(|i| take_events[i].peek().is_none()) {
end_kyoku(events, kyoku);
end_kyoku(&mut events, kyoku);
break;
}

Expand Down Expand Up @@ -260,15 +282,46 @@ fn tenhou_kyoku_to_mjai_events(events: &mut Vec<mjai::Event>, kyoku: &tenhou::Ky
// will be impossible to take as he will have no chance to Pon from
// the same actor without Tsumo first.
//
// There is one exception to make the Chi legal though, if the actor
// There is one exception to make the Chi legal though - the actor
// takes another naki (Pon) before him, which is rare to be seen and
// it seems not possible to properly describe it on tenhou.net/6.
.max_by_key(|&(_, naki_ord)| naki_ord)
.map(|(i, _)| i)
.and_then(|i| {
// I really can't think of a better way.
//
// The regression here is to mitigate the
// real-naki-of-two-identical-discard problem. If you are
// wondering, check `confusing_nakis` in testdata and load them
// into tenhou.net/6 to see what the problem is.
match regressions.last() {
Some(last) => match idx.cmp(last) {
Ordering::Greater => {
// new branch, most likely
regressions.push(idx);
Some(i)
}
Ordering::Equal => {
// where the regression happens
regressions.pop();
None
}
Ordering::Less => {
// not the time to perform regression
Some(i)
}
},
None => {
// the first regression point
regressions.push(idx);
Some(i)
}
}
})
.unwrap_or((actor + 1) % 4);
}

Ok(())
Ok(events)
}

fn take_action_to_events(actor: u8, takes: &[tenhou::ActionItem]) -> Result<Vec<mjai::Event>> {
Expand Down
5 changes: 5 additions & 0 deletions convlog/src/tenhou.rs
Original file line number Diff line number Diff line change
Expand Up @@ -207,6 +207,11 @@ impl RawLog {

ret
}

#[inline]
pub fn len(&self) -> usize {
self.logs.len()
}
}

impl From<RawPartialLog<'_>> for RawLog {
Expand Down
14 changes: 10 additions & 4 deletions convlog/tests/parse_and_convert.rs
Original file line number Diff line number Diff line change
@@ -1,13 +1,19 @@
mod testdata;

use convlog::*;
use testdata::TESTDATA;
use testdata::{TestCase, TESTDATA};

#[test]
fn test_parse_and_convert() {
TESTDATA.iter().for_each(|data| {
let tenhou_log = tenhou::Log::from_json_str(data).expect("failed to parse tenhou log");
let mjai_log = tenhou_to_mjai(&tenhou_log).expect("failed to transform tenhou log");
TESTDATA.iter().for_each(|TestCase { description, data }| {
let tenhou_log = tenhou::Log::from_json_str(data).expect(&*format!(
"failed to parse tenhou log (case: {})",
description
));
let mjai_log = tenhou_to_mjai(&tenhou_log).expect(&*format!(
"failed to transform tenhou log (case: {})",
description
));

assert!(!mjai_log.is_empty());
});
Expand Down
19 changes: 14 additions & 5 deletions convlog/tests/split_raw_log.rs
Original file line number Diff line number Diff line change
@@ -1,14 +1,17 @@
mod testdata;

use convlog::*;
use testdata::TESTDATA;
use testdata::{TestCase, TESTDATA};

use serde_json as json;

#[test]
fn test_split_by_kyoku() {
TESTDATA.iter().for_each(|data| {
let raw_log: tenhou::RawLog = json::from_str(data).expect("failed to parse tenhou log");
TESTDATA.iter().for_each(|TestCase { description, data }| {
let raw_log: tenhou::RawLog = json::from_str(data).expect(&*format!(
"failed to parse tenhou log (case: {})",
description
));
let splited_raw_logs = raw_log.split_by_kyoku();

let log = tenhou::Log::from(raw_log.clone());
Expand All @@ -23,8 +26,14 @@ fn test_split_by_kyoku() {
..log.clone()
};

let mjai_log = tenhou_to_mjai(&log).expect("failed to transform tenhou log");
let mjai_log_joined = tenhou_to_mjai(&joined_logs).expect("failed to transform tenhou log");
let mjai_log = tenhou_to_mjai(&log).expect(&*format!(
"failed to transform tenhou (case: {})",
description
));
let mjai_log_joined = tenhou_to_mjai(&joined_logs).expect(&*format!(
"failed to transform tenhou (case: {})",
description
));

assert_eq!(mjai_log, mjai_log_joined);
});
Expand Down
1 change: 1 addition & 0 deletions convlog/tests/testdata/confusing_nakis_0.json
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{"title":["",""],"name":["Aさん","Bさん","Cさん","Dさん"],"rule":{"disp":"鳳南喰赤","aka":1},"log":[[[3,0,0],[40800,40200,8500,10500],[41,12],[],[12,13,13,17,19,19,27,33,35,36,38,44,44],[17,25,43,33,21,39,29,38,32,12,28,42,29,37,"1717p17",27,16,32],[38,33,27,60,60,60,43,19,12,19,60,38,44,12,42,44,32,16],[11,12,17,22,23,24,25,26,31,39,42,42,47],[23,18,36,15,14,21,11,27,14,34,26,44,21,17,"4242p42",26,19,16,31],[39,31,47,60,11,36,60,12,60,14,34,18,44,60,17,21,60,60,27],[13,13,15,15,18,28,32,53,37,39,43,47,47],[46,41,"4747p47",38,"p151515",11,"c365337",51,43,26,24,46,19,24,33,42,21,23,11,52],[60,60,43,32,18,60,28,"k51151515",60,60,60,60,60,60,60,60,60,60,60,60],[14,22,28,29,31,31,32,37,41,45,46,46,47],[36,"p464646",37,16,29,22,35,43,35,36,39,28,38,44,24,45,14,18,25],[22,41,47,45,28,60,16,14,43,32,60,60,60,60,60,60,60,60,60],["流局",[-3000,1000,1000,1000]]]]}
1 change: 1 addition & 0 deletions convlog/tests/testdata/confusing_nakis_1.json
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{"title":["",""],"name":["Aさん","Bさん","Cさん","Dさん"],"rule":{"disp":"鳳南喰赤","aka":1},"log":[[[1,2,0],[22700,23700,27900,25700],[28],[],[16,17,17,19,19,23,25,35,38,42,43,44,47],[47,"4747p47",45,26,32,16,35,"35p3535","17p1717",39,18],[42,43,60,44,38,23,32,16,16,60,60],[23,29,31,32,36,38,39,41,41,42,46,46,47],[13,41,21,22,33,15,18,11,36,44,33],[42,47,13,36,29,60,60,60,60,60,33],[11,11,13,14,24,27,28,32,33,33,34,35,36],[39,31,22,11,45,44,53,17,12,27],[60,24,60,33,60,60,35,60,"r11",60],[51,16,16,19,21,24,25,34,39,44,45,46,47],[42,14,26,12,15,34,"1616p16",31],[19,39,42,47,21,46,44,45],["和了",[1600,0,-1600,0],[0,2,0,"30符1飜1000点","役牌 中(1飜)"]]]]}
1 change: 1 addition & 0 deletions convlog/tests/testdata/double_kakan_then_chankan.json
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
{"title":["",""],"name":["Aさん","Bさん","Cさん","Dさん"],"rule":{"disp":"鳳南喰赤","aka":1},"log":[[[1,0,0],[24000,25000,24000,27000],[27],[],[11,12,14,14,22,26,29,36,39,41,44,46,47],[26,12,28,38,52,42,33,45,"1212p12",12,25,"2626p26",23,44,26,16],[29,39,44,46,11,41,47,60,33,22,42,28,60,60,"2626k2626","1212k1212"],[11,16,16,18,28,29,31,32,34,37,41,45,47],[15,17,24,17,11,21,24,34,12,"c333234",36,26,19,23,27],[11,31,45,41,60,60,47,37,60,29,28,60,60,60,60],[11,13,14,15,51,22,25,25,28,33,39,43,43],[45,36,32,27,45,44,22,18,46,41,37,13,27],[39,45,22,36,60,60,60,60,60,60,11,37,28],[14,16,18,19,19,34,35,37,38,39,41,42,46],[17,28,13,21,38,23,21,35,33,21,39,34,47],[42,46,41,60,60,60,60,60,28,60,60,60,60],["和了",[-2000,0,0,2000],[3,0,3,"30符2飜2000点","槍槓(1飜)","平和(1飜)"]]]]}
74 changes: 62 additions & 12 deletions convlog/tests/testdata/mod.rs
Original file line number Diff line number Diff line change
@@ -1,13 +1,63 @@
pub const TESTDATA: &[&str] = &[
include_str!("chankan.json"),
include_str!("complex_nakis_0.json"),
include_str!("complex_nakis_1.json"),
include_str!("four_reach.json"),
include_str!("kyushukyuhai.json"),
include_str!("double_ron.json"),
include_str!("ranked_game.json"),
include_str!("rinshan.json"),
include_str!("ryukyoku.json"),
include_str!("suukantsu_0.json"),
include_str!("suukantsu_1.json"),
pub struct TestCase {
pub description: &'static str,
pub data: &'static str,
}

pub const TESTDATA: &[TestCase] = &[
TestCase {
description: "chankan",
data: include_str!("chankan.json"),
},
TestCase {
description: "complex_nakis_0",
data: include_str!("complex_nakis_0.json"),
},
TestCase {
description: "complex_nakis_1",
data: include_str!("complex_nakis_1.json"),
},
TestCase {
description: "confusing_nakis_0",
data: include_str!("confusing_nakis_0.json"),
},
TestCase {
description: "confusing_nakis_1",
data: include_str!("confusing_nakis_1.json"),
},
TestCase {
description: "double_kakan_then_chankan",
data: include_str!("double_kakan_then_chankan.json"),
},
TestCase {
description: "four_reach",
data: include_str!("four_reach.json"),
},
TestCase {
description: "kyushukyuhai",
data: include_str!("kyushukyuhai.json"),
},
TestCase {
description: "double_ron",
data: include_str!("double_ron.json"),
},
TestCase {
description: "ranked_game",
data: include_str!("ranked_game.json"),
},
TestCase {
description: "rinshan",
data: include_str!("rinshan.json"),
},
TestCase {
description: "ryukyoku",
data: include_str!("ryukyoku.json"),
},
TestCase {
description: "suukantsu_0",
data: include_str!("suukantsu_0.json"),
},
TestCase {
description: "suukantsu_1",
data: include_str!("suukantsu_1.json"),
},
];
3 changes: 3 additions & 0 deletions src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -481,6 +481,9 @@ fn main() -> Result<()> {
if let Some(expr) = arg_kyokus {
let filter = expr.parse().context("failed to parse kyoku filter")?;
raw_log.filter_kyokus(&filter);
if raw_log.len() == 0 {
return Err(anyhow!("no kyoku to review (invalid filter?)"));
}
}

// clone the parsed raw log for possible reuse (split)
Expand Down

0 comments on commit 70939e8

Please sign in to comment.