Skip to content

Commit

Permalink
pre-commit fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
kratsg committed Aug 8, 2023
1 parent 346b2bb commit 9cafdec
Showing 1 changed file with 49 additions and 31 deletions.
80 changes: 49 additions & 31 deletions book/UpperLimitsTable.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -7,21 +7,22 @@
"metadata": {},
"outputs": [],
"source": [
"import pyhf\n",
"import pyhf.contrib.utils\n",
"from pyhf.contrib.viz import brazil\n",
"import tarfile\n",
"import json\n",
"import tarfile\n",
"\n",
"import matplotlib.pyplot as plt\n",
"import numpy as np\n",
"import pyhf\n",
"import pyhf.contrib.utils\n",
"import scipy\n",
"from pyhf.contrib.viz import brazil\n",
"\n",
"pyhf.set_backend(\"numpy\", \"minuit\")\n",
"\n",
"from rich import box\n",
"# for making tables\n",
"from rich.console import Console\n",
"from rich.table import Table\n",
"from rich import box"
"from rich.table import Table"
]
},
{
Expand Down Expand Up @@ -61,7 +62,9 @@
"metadata": {},
"outputs": [],
"source": [
"pyhf.contrib.utils.download(\"https://doi.org/10.17182/hepdata.116034.v1/r34\", \"workspace_2L2J\")"
"pyhf.contrib.utils.download(\n",
" \"https://doi.org/10.17182/hepdata.116034.v1/r34\", \"workspace_2L2J\"\n",
")"
]
},
{
Expand All @@ -81,7 +84,7 @@
"source": [
"with tarfile.open(\"workspace_2L2J/ewk_discovery_bkgonly.json.tgz\") as fp:\n",
" bkgonly = pyhf.Workspace(json.load(fp.extractfile(\"ewk_discovery_bkgonly.json\")))\n",
" \n",
"\n",
"with tarfile.open(\"workspace_2L2J/ewk_discovery_patchset.json.tgz\") as fp:\n",
" patchset = pyhf.PatchSet(json.load(fp.extractfile(\"ewk_discovery_patchset.json\")))"
]
Expand Down Expand Up @@ -200,16 +203,18 @@
}
],
"source": [
"pars_uncrt_bonly, corr_bonly = pyhf.infer.mle.fixed_poi_fit(0.0, data, model, return_uncertainties=True, return_correlations=True)\n",
"pars_uncrt_bonly, corr_bonly = pyhf.infer.mle.fixed_poi_fit(\n",
" 0.0, data, model, return_uncertainties=True, return_correlations=True\n",
")\n",
"pars_bonly, uncrt_bonly = pars_uncrt_bonly.T\n",
"\n",
"table = Table(title=\"Per-region yields from background-only fit\", box=box.HORIZONTALS)\n",
"table.add_column(\"Region\", justify=\"left\", style=\"cyan\", no_wrap=True)\n",
"table.add_column(\"Total Bkg.\", justify=\"center\")\n",
"\n",
"for (region, count) in zip(model.config.channels, model.expected_actualdata(pars_bonly)):\n",
"for region, count in zip(model.config.channels, model.expected_actualdata(pars_bonly)):\n",
" table.add_row(region, f\"{count:0.2f}\")\n",
" \n",
"\n",
"console = Console()\n",
"console.print(table)"
]
Expand Down Expand Up @@ -267,7 +272,7 @@
"source": [
"npars = len(model.config.parameters)\n",
"model_batch = ws.model(batch_size=npars)\n",
"pars_batch = np.tile(pars_bonly, (npars,1))"
"pars_batch = np.tile(pars_bonly, (npars, 1))"
]
},
{
Expand All @@ -287,7 +292,7 @@
"source": [
"up_yields = model_batch.expected_actualdata(pars_batch + np.diag(uncrt_bonly))\n",
"dn_yields = model_batch.expected_actualdata(pars_batch - np.diag(uncrt_bonly))\n",
"variations = (up_yields - dn_yields)/2"
"variations = (up_yields - dn_yields) / 2"
]
},
{
Expand Down Expand Up @@ -317,7 +322,7 @@
}
],
"source": [
"error_sq = np.diag(np.einsum('ij,ik,kl->jl', variations, corr_bonly, variations))\n",
"error_sq = np.diag(np.einsum(\"ij,ik,kl->jl\", variations, corr_bonly, variations))\n",
"error_sq"
]
},
Expand Down Expand Up @@ -374,9 +379,11 @@
"table.add_column(\"Region\", justify=\"left\", style=\"cyan\", no_wrap=True)\n",
"table.add_column(\"Total Bkg.\", justify=\"center\")\n",
"\n",
"for (region, count, uncrt) in zip(model.config.channels, model.expected_actualdata(pars_bonly), np.sqrt(error_sq)):\n",
"for region, count, uncrt in zip(\n",
" model.config.channels, model.expected_actualdata(pars_bonly), np.sqrt(error_sq)\n",
"):\n",
" table.add_row(region, f\"{count:0.2f} ± {uncrt:0.2f}\")\n",
" \n",
"\n",
"console = Console()\n",
"console.print(table)"
]
Expand Down Expand Up @@ -411,7 +418,11 @@
"pyhf.set_backend(\"numpy\", \"scipy\")\n",
"\n",
"mu_tests = np.linspace(15, 25, 10)\n",
"obs_limit, exp_limit_and_bands, (poi_tests, tests) = pyhf.infer.intervals.upper_limits.upper_limit(\n",
"(\n",
" obs_limit,\n",
" exp_limit_and_bands,\n",
" (poi_tests, tests),\n",
") = pyhf.infer.intervals.upper_limits.upper_limit(\n",
" data, model, scan=mu_tests, return_results=True\n",
")"
]
Expand All @@ -432,8 +443,10 @@
}
],
"source": [
"print(f'Observed UL: {obs_limit:0.2f}')\n",
"print(f'Expected UL: {exp_limit_and_bands[2]:0.2f} [{exp_limit_and_bands[1]-exp_limit_and_bands[2]:0.2f}, +{exp_limit_and_bands[3]-exp_limit_and_bands[2]:0.2f}]')"
"print(f\"Observed UL: {obs_limit:0.2f}\")\n",
"print(\n",
" f\"Expected UL: {exp_limit_and_bands[2]:0.2f} [{exp_limit_and_bands[1]-exp_limit_and_bands[2]:0.2f}, +{exp_limit_and_bands[3]-exp_limit_and_bands[2]:0.2f}]\"\n",
")"
]
},
{
Expand Down Expand Up @@ -461,7 +474,7 @@
}
],
"source": [
"print(f'Limit on visible cross-section: {obs_limit / 140:0.2f}') # 140 ifb"
"print(f\"Limit on visible cross-section: {obs_limit / 140:0.2f}\") # 140 ifb"
]
},
{
Expand Down Expand Up @@ -566,9 +579,11 @@
}
],
"source": [
"p0 = pyhf.infer.hypotest(0.0, data, model, test_stat='q0')#, calctype='toybased', ntoys=10000)\n",
"p0 = pyhf.infer.hypotest(\n",
" 0.0, data, model, test_stat=\"q0\"\n",
") # , calctype='toybased', ntoys=10000)\n",
"p0_Z = scipy.stats.norm.isf(p0)\n",
"print(f'p(s)=0 (Z): {p0:0.2f} ({p0_Z:0.2f})')"
"print(f\"p(s)=0 (Z): {p0:0.2f} ({p0_Z:0.2f})\")"
]
},
{
Expand All @@ -595,7 +610,7 @@
],
"source": [
"_, (_, CLb) = pyhf.infer.hypotest(obs_limit, data, model, return_tail_probs=True)\n",
"print(f'CLb: {CLb:0.2f}')"
"print(f\"CLb: {CLb:0.2f}\")"
]
},
{
Expand All @@ -615,7 +630,7 @@
"metadata": {},
"outputs": [],
"source": [
"region_mask = model.config.channels.index('DRInt_cuts')\n",
"region_mask = model.config.channels.index(\"DRInt_cuts\")\n",
"bkg_count = model.expected_actualdata(pars_bonly)[region_mask]\n",
"bkg_uncrt = np.sqrt(error_sq)[region_mask]"
]
Expand Down Expand Up @@ -653,7 +668,10 @@
}
],
"source": [
"table = Table(title=\"Model-independent upper limits on the observed visible cross-section in the five electroweak search discovery regions; derived using asymptotics, pyhf, and published likelihoods.\", box=box.HORIZONTALS)\n",
"table = Table(\n",
" title=\"Model-independent upper limits on the observed visible cross-section in the five electroweak search discovery regions; derived using asymptotics, pyhf, and published likelihoods.\",\n",
" box=box.HORIZONTALS,\n",
")\n",
"table.add_column(\"Signal Region\", justify=\"left\", style=\"cyan\", no_wrap=True)\n",
"table.add_column(\"Total Bkg.\", justify=\"center\")\n",
"table.add_column(\"Data\")\n",
Expand All @@ -664,14 +682,14 @@
"table.add_column(\"p(s=0) (Z)\")\n",
"\n",
"table.add_row(\n",
" model.config.channels[region_mask], \n",
" f\"{bkg_count:0.0f} ± {bkg_uncrt:0.0f}\", \n",
" f\"{data[region_mask]:0.0f}\", \n",
" f'{obs_limit / 140:0.2f}', \n",
" f\"{obs_limit:0.1f}\", \n",
" model.config.channels[region_mask],\n",
" f\"{bkg_count:0.0f} ± {bkg_uncrt:0.0f}\",\n",
" f\"{data[region_mask]:0.0f}\",\n",
" f\"{obs_limit / 140:0.2f}\",\n",
" f\"{obs_limit:0.1f}\",\n",
" f\"{exp_limit_and_bands[2]:0.1f} + {exp_limit_and_bands[3] - exp_limit_and_bands[2]:0.1f} - {exp_limit_and_bands[2] - exp_limit_and_bands[1]:0.1f}\",\n",
" f\"{CLb:0.2f}\",\n",
" f\"{p0:0.2f} ({p0_Z:0.1f})\"\n",
" f\"{p0:0.2f} ({p0_Z:0.1f})\",\n",
")\n",
"\n",
"console = Console()\n",
Expand Down

0 comments on commit 9cafdec

Please sign in to comment.