Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Multi-region datacards for nonresonant #58

Merged
merged 3 commits into from
Apr 11, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -136,6 +136,7 @@ ignore = [
"ISC001", # Conflicts with formatter
"PLR", # Design related pylint codes
"T201", # print statements
"T203", # pprint statements
"EM101", # Error message
"EM102",
"G002", # Logging statement format
Expand Down
51 changes: 42 additions & 9 deletions src/HHbbVV/combine/run_blinded.sh
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,9 @@
# Specify resonant with --resonant / -r, otherwise does nonresonant
# Specify seed with --seed (default 42) and number of toys with --numtoys (default 100)
#
# For nonresonant, will try to load all the regions automatically based on which text files exist
# Can use --noggf, --novbf to exclude ggF and VBF regions respectively
#
# Usage ./run_blinded.sh [-wblsdgt] [--numtoys 100] [--seed 42]
#
# Author: Raghav Kansal
Expand Down Expand Up @@ -46,8 +49,10 @@ numtoys=100
bias=-1
mintol=0.1 # --cminDefaultMinimizerTolerance
# maxcalls=1000000000 # --X-rtd MINIMIZER_MaxCalls
nonresggf=1
nonresvbf=1

options=$(getopt -o "wblsdrgti" --long "workspace,bfit,limits,significance,dfit,dfitasimov,resonant,gofdata,goftoys,impactsi,impactsf:,impactsc:,bias:,seed:,numtoys:,mintol:" -- "$@")
options=$(getopt -o "wblsdrgti" --long "workspace,bfit,limits,significance,dfit,dfitasimov,resonant,noggf,novbf,gofdata,goftoys,impactsi,impactsf:,impactsc:,bias:,seed:,numtoys:,mintol:" -- "$@")
eval set -- "$options"

while true; do
Expand All @@ -73,6 +78,12 @@ while true; do
-r|--resonant)
resonant=1
;;
--noggf)
nonresggf=0
;;
--novbf)
nonresvbf=0
;;
-g|--gofdata)
gofdata=1
;;
Expand Down Expand Up @@ -143,22 +154,45 @@ outsdir=${cards_dir}/outs
mkdir -p $outsdir

if [ $resonant = 0 ]; then
# nonresonant args

if [ -f "mXbin0pass.txt" ]; then
echo -e "\nWARNING: This is doing nonresonant fits - did you mean to pass -r|--resonant?\n"
fi

CMS_PARAMS_LABEL="CMS_bbWW_hadronic"

# nonresonant args
ccargs="fail=${cards_dir}/fail.txt failBlinded=${cards_dir}/failBlinded.txt pass=${cards_dir}/pass.txt passBlinded=${cards_dir}/passBlinded.txt"
maskunblindedargs="mask_pass=1,mask_fail=1,mask_passBlinded=0,mask_failBlinded=0"
maskblindedargs="mask_pass=0,mask_fail=0,mask_passBlinded=1,mask_failBlinded=1"
if [ -f "pass.txt" ]; then
echo "Single pass region"
ccargs="fail=${cards_dir}/fail.txt failBlinded=${cards_dir}/failBlinded.txt pass=${cards_dir}/pass.txt passBlinded=${cards_dir}/passBlinded.txt"
maskunblindedargs="mask_pass=1,mask_fail=1,mask_passBlinded=0,mask_failBlinded=0"
maskblindedargs="mask_pass=0,mask_fail=0,mask_passBlinded=1,mask_failBlinded=1"
else
ccargs="fail=${cards_dir}/fail.txt failBlinded=${cards_dir}/failBlinded.txt"
maskunblindedargs="mask_fail=1,mask_failBlinded=0"
maskblindedargs="mask_fail=0,mask_failBlinded=1"

if [ -f "passggf.txt" ] && [ $nonresggf = 1 ]; then
echo "passggf region"
ccargs+=" passggf=${cards_dir}/passggf.txt passggfBlinded=${cards_dir}/passggfBlinded.txt"
maskunblindedargs+=",mask_passggf=1,mask_passggfBlinded=0"
maskblindedargs+=",mask_passggf=0,mask_passggfBlinded=1"
fi

if [ -f "passvbf.txt" ] && [ $nonresvbf = 1 ]; then
echo "passvbf region"
ccargs+=" passvbf=${cards_dir}/passvbf.txt passvbfBlinded=${cards_dir}/passvbfBlinded.txt"
maskunblindedargs+=",mask_passvbf=1,mask_passvbfBlinded=0"
maskblindedargs+=",mask_passvbf=0,mask_passvbfBlinded=1"
fi
fi

# freeze qcd params in blinded bins
# freeze fail region qcd params in blinded bins
setparamsblinded=""
freezeparamsblinded=""
for bin in {5..9}
do
# would need to use regex here for multiple fail regions
setparamsblinded+="${CMS_PARAMS_LABEL}_tf_dataResidual_Bin${bin}=0,"
freezeparamsblinded+="${CMS_PARAMS_LABEL}_tf_dataResidual_Bin${bin},"
done
Expand All @@ -167,7 +201,6 @@ if [ $resonant = 0 ]; then
setparamsblinded=${setparamsblinded%,}
freezeparamsblinded=${freezeparamsblinded%,}


# floating parameters using var{} floats a bunch of parameters which shouldn't be floated,
# so countering this inside --freezeParameters which takes priority.
# Although, practically even if those are set to "float", I didn't see them ever being fitted,
Expand Down Expand Up @@ -239,7 +272,7 @@ echo "$unblindedparams"
ulimit -s unlimited

if [ $workspace = 1 ]; then
echo "Combining cards"
echo "Combining cards $ccargs"
combineCards.py $ccargs > $ws.txt

echo "Running text2workspace"
Expand All @@ -263,7 +296,7 @@ if [ $bfit = 1 ]; then
-n Snapshot 2>&1 | tee $outsdir/MultiDimFit.txt
else
if [ ! -f "higgsCombineSnapshot.MultiDimFit.mH125.root" ]; then
echo "Background-only fit snapshot doesn't exist! Use the -b|--bfit option to run fit first"
echo "Background-only fit snapshot doesn't exist! Use the -b|--bfit option to run fit first. (Ignore this if you're only creating the workspace.)"
exit 1
fi
fi
Expand Down
4 changes: 2 additions & 2 deletions src/HHbbVV/combine/run_ftest_nonres.sh
Original file line number Diff line number Diff line change
Expand Up @@ -103,8 +103,8 @@ setparamsblinded=""
freezeparamsblinded=""
for bin in {5..9}
do
setparamsblinded+="CMS_bbWW_boosted_ggf_qcdparam_msdbin${bin}=0,"
freezeparamsblinded+="CMS_bbWW_boosted_ggf_qcdparam_msdbin${bin},"
setparamsblinded+="${CMS_PARAMS_LABEL}_tf_dataResidual_Bin${bin}=0,"
freezeparamsblinded+="${CMS_PARAMS_LABEL}_tf_dataResidual_Bin${bin},"
done

# remove last comma
Expand Down
4 changes: 2 additions & 2 deletions src/HHbbVV/combine/submit/submit_ftest_nonres.templ.sh
Original file line number Diff line number Diff line change
Expand Up @@ -61,8 +61,8 @@ setparamsblinded=""
freezeparamsblinded=""
for bin in {5..9}
do
setparamsblinded+="${CMS_PARAMS_LABEL}_tf_dataResidual_Bin${bin}=0,"
freezeparamsblinded+="${CMS_PARAMS_LABEL}_tf_dataResidual_Bin${bin},"
setparamsblinded+="rgx{${CMS_PARAMS_LABEL}_tf_dataResidual.*_Bin${bin}}=0,"
freezeparamsblinded+="rgx{${CMS_PARAMS_LABEL}_tf_dataResidual.*_Bin${bin}},"
done

# remove last comma
Expand Down
14 changes: 7 additions & 7 deletions src/HHbbVV/postprocessing/CombineTemplates.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -60,14 +60,14 @@
" for region, h in ggf_templates.items():\n",
" if region.startswith(\"pass\"):\n",
" # rename pass regions\n",
" combined_templates[region.replace(\"pass\", \"pass_ggf\")] = h\n",
" combined_templates[region.replace(\"pass\", \"passggf\")] = h\n",
" else:\n",
" combined_templates[region] = h\n",
"\n",
" for region, h in vbf_templates.items():\n",
" if region.startswith(\"pass\"):\n",
" # rename pass regions\n",
" combined_templates[region.replace(\"pass\", \"pass_vbf\")] = h\n",
" combined_templates[region.replace(\"pass\", \"passvbf\")] = h\n",
" else:\n",
" # checking that fail regions are identical\n",
" assert combined_templates[region] == h\n",
Expand All @@ -90,7 +90,7 @@
"metadata": {},
"outputs": [],
"source": [
"combined_systematics = {\"pass_ggf\": {}, \"pass_vbf\": {}}\n",
"combined_systematics = {\"passggf\": {}, \"passvbf\": {}}\n",
"\n",
"with (ggf_templates_path / \"systematics.json\").open(\"r\") as f:\n",
" ggf_systematics = json.load(f)\n",
Expand All @@ -104,25 +104,25 @@
" for region, val in sval.items():\n",
" if region.startswith(\"pass\"):\n",
" # rename pass regions\n",
" combined_systematics[skey][region.replace(\"pass\", \"pass_ggf\")] = val\n",
" combined_systematics[skey][region.replace(\"pass\", \"passggf\")] = val\n",
" else:\n",
" combined_systematics[skey][region] = val\n",
" else:\n",
" # LP SFs\n",
" combined_systematics[\"pass_ggf\"][skey] = sval\n",
" combined_systematics[\"passggf\"][skey] = sval\n",
"\n",
"for skey, sval in vbf_systematics.items():\n",
" if skey in years:\n",
" for region, val in sval.items():\n",
" if region.startswith(\"pass\"):\n",
" # rename pass regions\n",
" combined_systematics[skey][region.replace(\"pass\", \"pass_vbf\")] = val\n",
" combined_systematics[skey][region.replace(\"pass\", \"passvbf\")] = val\n",
" else:\n",
" # checking that fail regions are identical\n",
" assert combined_systematics[skey][region] == val\n",
" else:\n",
" # LP SFs\n",
" combined_systematics[\"pass_vbf\"][skey] = sval\n",
" combined_systematics[\"passvbf\"][skey] = sval\n",
"\n",
"with open(templates_path / \"systematics.json\", \"w\") as f:\n",
" json.dump(combined_systematics, f, indent=4)"
Expand Down
Loading
Loading