Skip to content

Commit

Permalink
Merge pull request #640 from ajgilbert/ch-june-15
Browse files Browse the repository at this point in the history
CombineHarvester June updates
  • Loading branch information
ajgilbert committed Sep 1, 2015
2 parents 6e6cb33 + aea666d commit 52ac317
Show file tree
Hide file tree
Showing 15 changed files with 1,032 additions and 183 deletions.
22 changes: 14 additions & 8 deletions CombineHarvester/CombinePdfs/test/MorphingMSSM.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -227,6 +227,10 @@ int main() {
signal_types["ggH"], {mt_cats[0]}, true);
cb.AddProcesses(masses, {"htt"}, {"8TeV"}, {"mt"},
signal_types["bbH"], {mt_cats[1]}, true);
cb.AddProcesses(masses, {"htt"}, {"8TeV"}, {"mt"},
signal_types["bbH"], {mt_cats[0]}, true);
cb.AddProcesses(masses, {"htt"}, {"8TeV"}, {"mt"},
signal_types["ggH"], {mt_cats[1]}, true);
cout << " done\n";

cout << "Adding systematic uncertainties...";
Expand All @@ -251,18 +255,15 @@ int main() {
cout << " done\n";

cout << "Scaling signal process rates for acceptance...\n";
map<string, TGraph> xs;
for (string e : {"8TeV"}) {
for (string p : {"ggH", "bbH"}) {
ch::ParseTable(&xs, "input/xsecs_brs/mssm_" + p + "_" + e + "_accept.txt",
{p + "_" + e});
}
}
for (string const& e : {"8TeV"}) {
for (string const& p : {"ggH", "bbH"}) {
cout << "Scaling for process " << p << " and era " << e << "\n";
auto gr = ch::TGraphFromTable(
"input/xsecs_brs/mssm_" + p + "_" + e + "_accept.txt", "mPhi",
"accept");
cb.cp().process(signal_types[p]).era({e}).ForEachProc([&](ch::Process *proc) {
ch::ScaleProcessRate(proc, &xs, p+"_"+e, "");
double m = boost::lexical_cast<double>(proc->mass());
proc->set_rate(proc->rate() * gr.Eval(m));
});
}
}
Expand Down Expand Up @@ -336,6 +337,11 @@ int main() {
// bbA.Write("bbA");
// bbX.Write("bbX");
cb.cp().mass({"*"}).WriteDatacard(folder + "/htt_mt_mssm.txt", output);
auto bins = cb.bin_set();
for (auto b : bins) {
cb.cp().bin({b}).mass({"*"}).WriteDatacard(
folder + "/" + b + ".txt", output);
}
output.Close();
}

Expand Down
17 changes: 7 additions & 10 deletions CombineHarvester/CombinePdfs/test/ParametricMSSM.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -82,18 +82,15 @@ int main() {
std::cout << " done\n";

std::cout << "Scaling signal process rates for acceptance...\n";
map<string, TGraph> xs;
for (std::string const& e : {"7TeV", "8TeV"}) {
for (std::string const& p : {"ggH", "bbH"}) {
ch::ParseTable(&xs, "data/xsecs_brs/mssm_" + p + "_" + e + "_accept.txt",
{p + "_" + e});
}
}
for (std::string const& e : {"7TeV", "8TeV"}) {
for (std::string const& p : {"ggH", "bbH"}) {
for (string e : {"8TeV"}) {
for (string p : {"ggH", "bbH"}) {
std::cout << "Scaling for process " << p << " and era " << e << "\n";
auto gr = ch::TGraphFromTable(
"input/xsecs_brs/mssm_" + p + "_" + e + "_accept.txt", "mPhi",
"accept");
cb.cp().process({p}).era({e}).ForEachProc([&](ch::Process *proc) {
ch::ScaleProcessRate(proc, &xs, p+"_"+e, "");
double m = boost::lexical_cast<double>(proc->mass());
proc->set_rate(proc->rate() * gr.Eval(m));
});
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,12 +10,12 @@
#include "CombineTools/interface/Utilities.h"
#include "CombineTools/interface/HttSystematics.h"
#include "CombinePdfs/interface/MorphFunctions.h"
#include "CombineTools/interface/BinByBin.h"

using namespace std;

int main() {
ch::CombineHarvester cb;

// cb.SetVerbosity(1);

typedef vector<pair<int, string>> Categories;
Expand Down Expand Up @@ -103,7 +103,7 @@ int main() {
{0, "tauTau_1jet_high_mediumhiggs"}, {1, "tauTau_1jet_high_highhiggs"},
{2, "tauTau_vbf"}};

vector<string> masses = ch::MassesFromRange("110-145:5");
vector<string> masses = ch::ValsFromRange("110:145|5");

cout << ">> Creating processes and observations...\n";
for (string era : {"7TeV", "8TeV"}) {
Expand Down Expand Up @@ -145,117 +145,67 @@ int main() {
cout << ">> Scaling signal process rates...\n";
map<string, TGraph> xs;
// Get the table of H->tau tau BRs vs mass
ch::ParseTable(&xs, "input/xsecs_brs/htt_YR3.txt", {"htt"});
xs["htt"] = ch::TGraphFromTable("input/xsecs_brs/htt_YR3.txt", "mH", "br");
for (string const& e : {"7TeV", "8TeV"}) {
for (string const& p : sig_procs) {
// Get the table of xsecs vs mass for process "p" and era "e":
ch::ParseTable(&xs, "data/xsecs_brs/"+p+"_"+e+"_YR3.txt", {p+"_"+e});
xs[p+"_"+e] = ch::TGraphFromTable("input/xsecs_brs/"+p+"_"+e+"_YR3.txt", "mH", "xsec");
cout << ">>>> Scaling for process " << p << " and era " << e << "\n";
cb.cp().process({p}).era({e}).ForEachProc([&](ch::Process *proc) {
ch::ScaleProcessRate(proc, &xs, p+"_"+e, "htt");
double m = boost::lexical_cast<double>(proc->mass());
proc->set_rate(proc->rate() * xs[p+"_"+e].Eval(m) * xs["htt"].Eval(m));
});
}
}
ch::ParseTable(&xs, "input/xsecs_brs/hww_over_htt.txt", {"hww_over_htt"});
xs["hww_over_htt"] = ch::TGraphFromTable("input/xsecs_brs/hww_over_htt.txt", "mH", "ratio");
for (string const& e : {"7TeV", "8TeV"}) {
for (string const& p : {"ggH", "qqH"}) {
cb.cp().channel({"em"}).process({p+"_hww125"}).era({e})
.ForEachProc([&](ch::Process *proc) {
ch::ScaleProcessRate(proc, &xs, p+"_"+e, "htt", "125");
ch::ScaleProcessRate(proc, &xs, "hww_over_htt", "", "125");
proc->set_rate(proc->rate() * xs[p+"_"+e].Eval(125.) * xs["htt"].Eval(125.));
proc->set_rate(proc->rate() * xs["hww_over_htt"].Eval(125.));
});
}
}

cout << ">> Merging bin errors...\n";
ch::CombineHarvester cb_et = move(cb.cp().channel({"et"}));
for (string era : {"7TeV", "8TeV"}) {
cb_et.cp().era({era}).bin_id({1, 2}).process({"ZL", "ZJ", "QCD", "W"})
.MergeBinErrors(0.1, 0.5);
cb_et.cp().era({era}).bin_id({3, 5}).process({"W"})
.MergeBinErrors(0.1, 0.5);
}
cb_et.cp().era({"7TeV"}).bin_id({6}).process({"ZL", "ZJ", "W", "ZTT"})
.MergeBinErrors(0.1, 0.5);
cb_et.cp().era({"8TeV"}).bin_id({7}).process({"ZL", "ZJ", "W", "ZTT"})
.MergeBinErrors(0.1, 0.5);
cb_et.cp().era({"8TeV"}).bin_id({6}).process({"ZL", "ZJ", "W"})
.MergeBinErrors(0.1, 0.5);

ch::CombineHarvester cb_mt = move(cb.cp().channel({"mt"}));
for (string era : {"7TeV", "8TeV"}) {
cb_mt.cp().era({era}).bin_id({1, 2, 3, 4}).process({"W", "QCD"})
.MergeBinErrors(0.1, 0.5);
}
cb_mt.cp().era({"7TeV"}).bin_id({5}).process({"W"})
.MergeBinErrors(0.1, 0.5);
cb_mt.cp().era({"7TeV"}).bin_id({6}).process({"W", "ZTT"})
.MergeBinErrors(0.1, 0.5);
cb_mt.cp().era({"8TeV"}).bin_id({5, 6}).process({"W"})
.MergeBinErrors(0.1, 0.5);
cb_mt.cp().era({"8TeV"}).bin_id({7}).process({"W", "ZTT"})
.MergeBinErrors(0.1, 0.5);

ch::CombineHarvester cb_em = move(cb.cp().channel({"em"}));
for (string era : {"7TeV", "8TeV"}) {
cb_em.cp().era({era}).bin_id({1, 3}).process({"Fakes"})
.MergeBinErrors(0.1, 0.5);
}
cb_em.cp().era({"7TeV"}).bin_id({4}).process({"Fakes", "EWK", "Ztt"})
.MergeBinErrors(0.1, 0.5);
cb_em.cp().era({"8TeV"}).bin_id({5}).process({"Fakes", "EWK", "Ztt"})
.MergeBinErrors(0.1, 0.5);
cb_em.cp().era({"8TeV"}).bin_id({4}).process({"Fakes", "EWK"})
.MergeBinErrors(0.1, 0.5);

ch::CombineHarvester cb_ee_mm = move(cb.cp().channel({"ee", "mm"}));
for (string era : {"7TeV", "8TeV"}) {
cb_ee_mm.cp().era({era}).bin_id({1, 3, 4})
.process({"ZTT", "ZEE", "ZMM", "TTJ"})
.MergeBinErrors(0.0, 0.5);
}

ch::CombineHarvester cb_tt = move(cb.cp().channel({"tt"}));
cb_tt.cp().bin_id({0, 1, 2}).era({"8TeV"}).process({"ZTT", "QCD"})
.MergeBinErrors(0.1, 0.5);

cout << ">> Generating bbb uncertainties...\n";
cb_mt.cp().bin_id({0, 1, 2, 3, 4}).process({"W", "QCD"})
.AddBinByBin(0.1, true, &cb);
cb_mt.cp().era({"7TeV"}).bin_id({5}).process({"W"})
.AddBinByBin(0.1, true, &cb);
cb_mt.cp().era({"7TeV"}).bin_id({6}).process({"W", "ZTT"})
.AddBinByBin(0.1, true, &cb);
cb_mt.cp().era({"8TeV"}).bin_id({5, 6}).process({"W"})
.AddBinByBin(0.1, true, &cb);
cb_mt.cp().era({"8TeV"}).bin_id({7}).process({"W", "ZTT"})
.AddBinByBin(0.1, true, &cb);

cb_et.cp().bin_id({1, 2}).process({"ZL", "ZJ", "QCD", "W"})
.AddBinByBin(0.1, true, &cb);
cb_et.cp().bin_id({3, 5}).process({"W"})
.AddBinByBin(0.1, true, &cb);
cb_et.cp().era({"7TeV"}).bin_id({6}).process({"ZL", "ZJ", "W", "ZTT"})
.AddBinByBin(0.1, true, &cb);
cb_et.cp().era({"8TeV"}).bin_id({7}).process({"ZL", "ZJ", "W", "ZTT"})
.AddBinByBin(0.1, true, &cb);
cb_et.cp().era({"8TeV"}).bin_id({6}).process({"ZL", "ZJ", "W"})
.AddBinByBin(0.1, true, &cb);

cb_em.cp().bin_id({1, 3}).process({"Fakes"})
.AddBinByBin(0.1, true, &cb);
cb_em.cp().era({"7TeV"}).bin_id({4}).process({"Fakes", "EWK", "Ztt"})
.AddBinByBin(0.1, true, &cb);
cb_em.cp().era({"8TeV"}).bin_id({5}).process({"Fakes", "EWK", "Ztt"})
.AddBinByBin(0.1, true, &cb);
cb_em.cp().era({"8TeV"}).bin_id({4}).process({"Fakes", "EWK"})
.AddBinByBin(0.1, true, &cb);

cb_ee_mm.cp().bin_id({1, 3, 4}).process({"ZTT", "ZEE", "ZMM", "TTJ"})
.AddBinByBin(0.0, true, &cb);

cb_tt.cp().bin_id({0, 1, 2}).era({"8TeV"}).process({"QCD", "ZTT"})
.AddBinByBin(0.1, true, &cb);
cout << ">> Merging bin errors and generating bbb uncertainties...\n";

auto bbb = ch::BinByBinFactory()
.SetAddThreshold(0.1)
.SetMergeThreshold(0.5)
.SetFixNorm(true);

ch::CombineHarvester cb_et = cb.cp().channel({"et"});
bbb.MergeAndAdd(cb_et.cp().era({"7TeV"}).bin_id({1, 2}).process({"ZL", "ZJ", "QCD", "W"}), cb);
bbb.MergeAndAdd(cb_et.cp().era({"7TeV"}).bin_id({3, 5}).process({"W"}), cb);
bbb.MergeAndAdd(cb_et.cp().era({"8TeV"}).bin_id({1, 2}).process({"ZL", "ZJ", "QCD", "W"}), cb);
bbb.MergeAndAdd(cb_et.cp().era({"8TeV"}).bin_id({3, 5}).process({"W"}), cb);
bbb.MergeAndAdd(cb_et.cp().era({"7TeV"}).bin_id({6}).process({"ZL", "ZJ", "W", "ZTT"}), cb);
bbb.MergeAndAdd(cb_et.cp().era({"8TeV"}).bin_id({6}).process({"ZL", "ZJ", "W"}), cb);
bbb.MergeAndAdd(cb_et.cp().era({"8TeV"}).bin_id({7}).process({"ZL", "ZJ", "W", "ZTT"}), cb);

ch::CombineHarvester cb_mt = cb.cp().channel({"mt"});
bbb.MergeAndAdd(cb_mt.cp().era({"7TeV"}).bin_id({1, 2, 3, 4}).process({"W", "QCD"}), cb);
bbb.MergeAndAdd(cb_mt.cp().era({"8TeV"}).bin_id({1, 2, 3, 4}).process({"W", "QCD"}), cb);
bbb.MergeAndAdd(cb_mt.cp().era({"7TeV"}).bin_id({5}).process({"W"}), cb);
bbb.MergeAndAdd(cb_mt.cp().era({"7TeV"}).bin_id({6}).process({"W", "ZTT"}), cb);
bbb.MergeAndAdd(cb_mt.cp().era({"8TeV"}).bin_id({5, 6}).process({"W"}), cb);
bbb.MergeAndAdd(cb_mt.cp().era({"8TeV"}).bin_id({7}).process({"W", "ZTT"}), cb);

ch::CombineHarvester cb_em = cb.cp().channel({"em"});
bbb.MergeAndAdd(cb_em.cp().era({"7TeV"}).bin_id({1, 3}).process({"Fakes"}), cb);
bbb.MergeAndAdd(cb_em.cp().era({"8TeV"}).bin_id({1, 3}).process({"Fakes"}), cb);
bbb.MergeAndAdd(cb_em.cp().era({"7TeV"}).bin_id({4}).process({"Fakes", "EWK", "Ztt"}), cb);
bbb.MergeAndAdd(cb_em.cp().era({"8TeV"}).bin_id({5}).process({"Fakes", "EWK", "Ztt"}), cb);
bbb.MergeAndAdd(cb_em.cp().era({"8TeV"}).bin_id({4}).process({"Fakes", "EWK"}), cb);

ch::CombineHarvester cb_tt = cb.cp().channel({"tt"});
bbb.MergeAndAdd(cb_tt.cp().era({"8TeV"}).bin_id({0, 1, 2}).process({"ZTT", "QCD"}), cb);

bbb.SetAddThreshold(0.); // ee and mm use a different threshold
ch::CombineHarvester cb_ll = cb.cp().channel({"ee", "mm"});
bbb.MergeAndAdd(cb_ll.cp().era({"7TeV"}).bin_id({1, 3, 4}).process({"ZTT", "ZEE", "ZMM", "TTJ"}), cb);
bbb.MergeAndAdd(cb_ll.cp().era({"8TeV"}).bin_id({1, 3, 4}).process({"ZTT", "ZEE", "ZMM", "TTJ"}), cb);

cout << ">> Setting standardised bin names...\n";
ch::SetStandardBinNames(cb);
Expand Down Expand Up @@ -303,30 +253,30 @@ int main() {
string folder = "output/sm_cards_morphed";
boost::filesystem::create_directories(folder);

TFile output((folder + "/htt.input.root").c_str(),
"RECREATE");
for (string chn : chns) {
TFile output((folder + "/htt_" + chn + ".input.root").c_str(),
"RECREATE");
// auto bins = cb.cp().channel({chn}).bin_set();
// for (auto b : bins) {
// for (auto m : masses) {
// cout << ">> Writing datacard for bin: " << b << " and mass: " << m
// << "\r" << flush;
// cb.cp().channel({chn}).bin({b}).mass({m, "*"}).WriteDatacard(
// folder+"/"+b + "_" + m + ".txt", output);
// }
// }
cb.cp().channel({chn}).mass({"125", "*"}).WriteDatacard(
folder+"/htt_" + chn + "_125.txt", output);
output.Close();
boost::filesystem::create_directories(folder+"/"+chn);
//Use CH to create combined card for each channel
cb.cp().channel({chn}).mass({"*"}).WriteDatacard(
folder + "/" + chn + "/combinedCard.txt", output);
auto bins = cb.cp().channel({chn}).bin_set();
for (auto b : bins) {
cout << ">> Writing datacard for bin: " << b << "\r" << flush;
//Also print individual datacards for each category of each channel
boost::filesystem::create_directories(folder+"/"+chn);
cb.cp().channel({chn}).bin({b}).mass({"*"}).WriteDatacard(
folder + "/" + chn + "/" + b + ".txt", output);
//Also print individual datacards for each category of each channel in the combined directory
boost::filesystem::create_directories(folder+"/cmb");
cb.cp().channel({chn}).bin({b}).mass({"*"}).WriteDatacard(
folder + "/cmb/"+ b + ".txt", output);
}
}
TFile output((folder + "/htt_combined.input.root").c_str(),
"RECREATE");
cb.cp().mass({"125", "*"}).WriteDatacard(
folder+"/htt_combined_125.txt", output);
//Use CH to create combined card for full combination
cb.cp().mass({"*"}).WriteDatacard(
folder + "/cmb/combinedCard.txt", output);
output.Close();
cout << "\n>> Done!\n";
}



}
2 changes: 2 additions & 0 deletions CombineHarvester/CombineTools/Rules.mk
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@ $(d)/interface/GitVersion.h: $(TOP)/../.git/logs/HEAD
@echo -e "Updating $@"
@echo -e "namespace ch { inline std::string GitVersion() { return \""$(shell git describe --dirty)"\"; } }\n" > $@

$(d)/src/CombineHarvester_Datacards.cc : $(d)/interface/GitVersion.h

clean_$(d)/interface/GitVersion.h :
rm -f $(subst clean_,,$@)

Expand Down
2 changes: 1 addition & 1 deletion CombineHarvester/CombineTools/interface/TFileIO.h
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ void ch::WriteToTFile(T const* ptr, TFile* file, std::string const& path) {
}
gDirectory->cd(as_vec[i].c_str());
}
if (!gDirectory->Get(as_vec.back().c_str())) {
if (!gDirectory->FindKey(as_vec.back().c_str())) {
gDirectory->WriteTObject(ptr, as_vec.back().c_str());
}
gDirectory->cd("/");
Expand Down
Loading

0 comments on commit 52ac317

Please sign in to comment.