Skip to content

Commit

Permalink
merge
Browse files Browse the repository at this point in the history
  • Loading branch information
rkansal47 committed Mar 5, 2024
1 parent d04b04d commit f58fde3
Show file tree
Hide file tree
Showing 2 changed files with 50 additions and 58 deletions.
104 changes: 48 additions & 56 deletions src/HHbbVV/postprocessing/PostProcessRes.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
"import corrections\n",
"\n",
"from utils import CUT_MAX_VAL, ShapeVar\n",
"from hh_vars import (\n",
"from HHbbVV.hh_vars import (\n",
" years,\n",
" data_key,\n",
" qcd_key,\n",
Expand Down Expand Up @@ -92,6 +92,7 @@
"source": [
"MAIN_DIR = Path(\"../../../\")\n",
"samples_dir = MAIN_DIR / \"../data/skimmer/24Feb25_update_skimmer\"\n",
"sig_samples_dir = MAIN_DIR / \"../data/skimmer/24Mar5_update_lp\"\n",
"# samples_dir = f\"{MAIN_DIR}/../data/skimmer/Feb24\"\n",
"# nonres_signal_samples_dir = f\"{MAIN_DIR}/../data/skimmer/Jun10\"\n",
"# res_signal_samples_dir = f\"{MAIN_DIR}/../data/skimmer/Apr11\"\n",
Expand All @@ -100,7 +101,7 @@
"# res_signal_samples_dir = \"/eos/uscms/store/user/rkansal/bbVV/skimmer/Apr11/\"\n",
"year = \"2017\"\n",
"\n",
"date = \"24Mar2\"\n",
"date = \"24Mar5\"\n",
"plot_dir = MAIN_DIR / f\"plots/PostProcessing/{date}/\"\n",
"templates_dir = Path(f\"templates/{date}/\")\n",
"\n",
Expand Down Expand Up @@ -133,22 +134,34 @@
" index=list(samples.keys()) + list(nonres_samples.keys()) + list(res_samples.keys())\n",
")\n",
"\n",
"# hem cleaning in load_samples not implemented yet for res samples\n",
"hem_cleaning = True\n",
"\n",
"# utils.remove_empty_parquets(samples_dir, year)\n",
"events_dict = postprocessing.load_samples(\n",
" sig_samples_dir,\n",
" {**nonres_samples, **res_samples},\n",
" year,\n",
" new_filters,\n",
")\n",
"\n",
"events_dict |= postprocessing.load_samples(\n",
" samples_dir,\n",
" {**nonres_samples, **res_samples, **samples},\n",
" samples,\n",
" year,\n",
" new_filters,\n",
" hem_cleaning=hem_cleaning,\n",
")\n",
"\n",
"utils.add_to_cutflow(events_dict, \"Preselection\", \"finalWeight\", cutflow)\n",
"cutflow"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"events = pd.read_parquet(f\"{sig_samples_dir}/{year}/GluGluToHHTobbVV_node_cHHH1/parquet\")\n",
"events"
]
},
{
"attachments": {},
"cell_type": "markdown",
Expand Down Expand Up @@ -226,7 +239,7 @@
" bb_masks,\n",
" nonres_sig_keys + res_sig_keys,\n",
" control_plot_vars,\n",
" f\"{plot_dir}/ControlPlots/{year}/\",\n",
" plot_dir / f\"ControlPlots/{year}\",\n",
" year,\n",
" bg_keys=bg_keys,\n",
" sig_scale_dict={\"HHbbVV\": 1e5, \"VBFHHbbVV\": 2e6} | {key: 2e4 for key in res_sig_keys},\n",
Expand All @@ -243,6 +256,32 @@
"Overall LP SF"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"from postprocessing import Region, nonres_shape_vars\n",
"\n",
"# temp region to check systematics\n",
"selection_regions = {\n",
" \"pass\": Region(\n",
" cuts={\n",
" \"bbFatJetParticleNetMD_Txbb\": [0.97, CUT_MAX_VAL],\n",
" \"VVFatJetParTMD_THWWvsT\": [0.8, CUT_MAX_VAL],\n",
" },\n",
" label=\"Pass\",\n",
" ),\n",
" \"lpsf\": Region(\n",
" cuts={\n",
" \"VVFatJetParTMD_THWWvsT\": [0.8, CUT_MAX_VAL],\n",
" },\n",
" label=\"LP SF\",\n",
" ),\n",
"}"
]
},
{
"cell_type": "code",
"execution_count": null,
Expand Down Expand Up @@ -305,26 +344,6 @@
"# del selection_regions[\"fail\"], selection_regions[\"failBlinded\"]"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"from postprocessing import Region, nonres_shape_vars\n",
"\n",
"# temp region to check systematics\n",
"selection_regions = {\n",
" \"pass\": Region(\n",
" cuts={\n",
" \"bbFatJetParticleNetMD_Txbb\": [0.97, CUT_MAX_VAL],\n",
" \"VVFatJetParTMD_THWWvsT\": [0.8, CUT_MAX_VAL],\n",
" },\n",
" label=\"Pass\",\n",
" )\n",
"}"
]
},
{
"cell_type": "code",
"execution_count": null,
Expand Down Expand Up @@ -395,33 +414,6 @@
" templates = {**templates, **ttemps}"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"ttemps"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"np.linalg.norm((ttemps[1].values() - ttemps[0])[:, 10]) / "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"np.linalg.norm([1, 1])"
]
},
{
"cell_type": "code",
"execution_count": null,
Expand Down
4 changes: 2 additions & 2 deletions src/HHbbVV/postprocessing/postprocessing.py
Original file line number Diff line number Diff line change
Expand Up @@ -1399,7 +1399,7 @@ def control_plots(
bb_masks: Dict[str, pd.DataFrame],
sig_keys: List[str],
control_plot_vars: List[ShapeVar],
plot_dir: str,
plot_dir: Path,
year: str,
weight_key: str = "finalWeight",
hists: Dict = {},
Expand Down Expand Up @@ -1446,7 +1446,7 @@ def control_plots(
if HEM2d and year == "2018":
hists["HEM2d"] = hists_HEM2d(events_dict, bb_masks, weight_key, selection)

with open(f"{plot_dir}/hists.pkl", "wb") as f:
with (plot_dir / "hists.pkl").open("wb") as f:
pickle.dump(hists, f)

if sig_splits is None:
Expand Down

0 comments on commit f58fde3

Please sign in to comment.