From eb8daf81f9ddd2c72d0eccd6a6611765f754b9ac Mon Sep 17 00:00:00 2001
From: Documentation Builder Zb?ItPEVT
zOR0P&)ot|$ JqEv7%B_Taca(auYmhP1-tLVN5T9j+&x`@puu^@qaH-XZ3
zwzTfdR6=zT`TQhOmCUg=BryUFh)dE=Han-5y?KvQCgcDpujI*-GVuR?(jVuYn@fqv
z=lbYSIB^LoDzc(#R7okNieykh6|73p^=9@+p@Q=zH38j|W+>HgU&q|zCZ$AFLdnIO
zlEm|A+tEDBN|Tq;*s-k~@!GgiN{tuf8}!19h4c~1a7VhbGW{s%8N#U1VRvp?PPT}L
zk`G76vYBRtNjj8>M3z1=XOqDI6gxqxn1ZX4HTcc@ML$w~_H!Nn-i#Vm6@BnrjqV*m
z(Cgtt_AimsyXxc3+46$RGXmugYc2Oq^EyS1_1WmWeixs+LYyksD$SW&
z^{v}>JP&tF_eWebh^uECdFwOpS2yoIoW!0qXk=FXc58O#bXGVLSlwN8xlLPdK$#D!
z)^EXd&A&1m9b0ZHTE|-7Tk&
kT7cqJsk|kiyAfksKE{(YNZAujp!reME%yARX+2i2
zy^VmLL=03>Ky+YSD8=hfzwY;90F_F|93BJX+mHE+QCE@wca7nd`#Z8WCju*Hq^7GC
zSqRi15=M~&O*S7A`fG=A|5ixymS;YY;;_^zv=C6(IeB|cs`Bz7pyc-$+VOeE0uJFu
z^@3^yI5iT3DSrnGMK*;|R9P9*^Y-}ASE1S%IGjW;sV){)w7D-#7Tyr*jY+e}zGw>S28pp2m3=I_p%k!^+wMaq
z&EWB0#KPjATI(dyWe#=L62S$hKQo2IBg>Q@fK-v|w354dj |pTrxQPp2;$cY(wtvDi6yhiInGt*gHg4ev*_wB2>Hqih$3X
z_EAZdx;*TljO7p!SETFSRIhn_KPT3TfXgWPl*?Ag^^!!ws)KywlveO~IY)A}uc3Z}
zuSn9hU3L>PVs+j2rc{I^q!rbfj@;DuZ-_NaACwYBu-od&3BOLttn?X)d;m>h(ZmDXSOwY3|%W@F(6{V$k
z_31}(|Krb~CXj}+F1!+7zV>pYg<7bjw`J*V-i;zqTna2^hxz!^DE1xz7Eray&vh`k
z4?kx?`;_`eIXlFe
zxedbx_eFG6I5N}EVBe86R9pQ}T2@J%F$o}{fMRrBehI?6$1y>tdmioIJybi0%tcBx
z!NEaTMV<0LnV9tKnKam}J_rg4LwuMQ&RxhsL5V{(QBaVN+`JORM0KRm#yyYxZxeq_
z5-FWXHT*OMt{$fn0k0oD25&B#hh^`t#_gjA;BW6NL{&f}M$-)39ziFW2$S$6D52G5
z_l|8ic`lEl%1;p);)g@LsO!x%x&E}A_P_QTyuExQ)^Gk4JtO?EX7y@>#l~a6H3>%f
z_Vz
@@ -64,10 +61,10 @@
-
+
@@ -83,7 +80,7 @@
@@ -64,10 +61,10 @@
diff --git a/_algorithm_8h.html b/_algorithm_8h.html index 3bf6360fed5..adcedb42c0a 100644 --- a/_algorithm_8h.html +++ b/_algorithm_8h.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- -
-
-
-
-
+
+
+
+
+
+ + diff --git a/_auto_rebin_8cc.html b/_auto_rebin_8cc.html index fe9d35b9f1f..ec833e42146 100644 --- a/_auto_rebin_8cc.html +++ b/_auto_rebin_8cc.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- -
-
-
+
+
+
+
+
-
-
+
-
-
+
+ + diff --git a/_auto_rebin_8h.html b/_auto_rebin_8h.html index cfd4382ea39..3973ff7448e 100644 --- a/_auto_rebin_8h.html +++ b/_auto_rebin_8h.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- -
+
-
-
+
+
+
-
+
+
-
+
-
-
-
+
+ + diff --git a/_b_s_m-_model_independent-_limits-_hhh_a_zh_8md.html b/_b_s_m-_model_independent-_limits-_hhh_a_zh_8md.html index 2deeedcdb97..47f98c045f8 100644 --- a/_b_s_m-_model_independent-_limits-_hhh_a_zh_8md.html +++ b/_b_s_m-_model_independent-_limits-_hhh_a_zh_8md.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- -
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
-
+
-
-
-
-
-
+
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
+ + diff --git a/_bin_by_bin_8h.html b/_bin_by_bin_8h.html index e9e263707e9..d3bfaeea466 100644 --- a/_bin_by_bin_8h.html +++ b/_bin_by_bin_8h.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- -
+
+
-
+
+
-
-
+
+
+
+
+
+
-
-
-
-
-
+
+
-
-
-
-
-
-
-
-
+
+
+
+
+
+ + diff --git a/_c_m_s_hist_func_factory_8cc.html b/_c_m_s_hist_func_factory_8cc.html index a8804263fb4..464cbf97850 100644 --- a/_c_m_s_hist_func_factory_8cc.html +++ b/_c_m_s_hist_func_factory_8cc.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- -
-
-
+
+
+
+
+
+
+
-
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
+
+
+
+
-
-
-
+
+
+
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+ + diff --git a/_c_m_s_hist_func_factory_8h.html b/_c_m_s_hist_func_factory_8h.html index b9d75435daa..13c64e158f9 100644 --- a/_c_m_s_hist_func_factory_8h.html +++ b/_c_m_s_hist_func_factory_8h.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- -
-
-
-
-
-
+
+
+
+
+
+
+ + diff --git a/_card_writer_8cc.html b/_card_writer_8cc.html index 1899e4263d9..c64aa8b18cb 100644 --- a/_card_writer_8cc.html +++ b/_card_writer_8cc.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- -
-
-
-
-
+
-
-
-
-
-
+
-
-
-
-
+
+
+
+
-
-
-
+
+
+
+
+
+
-
-
-
+
+
+
+
+
+
+
+
+ + diff --git a/_card_writer_8h.html b/_card_writer_8h.html index 1d2e7cc83a2..52b1d95d286 100644 --- a/_card_writer_8h.html +++ b/_card_writer_8h.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- -
-
+
+
-
+
+
-
+
-
-
-
+
+
+ + diff --git a/_charged_higgs_8md.html b/_charged_higgs_8md.html index 46b1944241f..82ccfc62902 100644 --- a/_charged_higgs_8md.html +++ b/_charged_higgs_8md.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- -
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
-
-
+
+
+
+
-
-
-
-
-
+
+
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
+
+
+ + diff --git a/_combine_harvester_8h.html b/_combine_harvester_8h.html index 132af1c8b00..e8b45a7c8e5 100644 --- a/_combine_harvester_8h.html +++ b/_combine_harvester_8h.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- -
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
-
-
-
-
-
+
+
-
-
-
-
+
+
+
-
+
+
+
+
+
-
-
-
-
-
-
-
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
-
-
-
-
-
-
-
-
-
-
-
+
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
+
+
+
+
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
+
+
+
+
+
+
+
+
+
+
+ + diff --git a/_combine_harvester___creation_8cc.html b/_combine_harvester___creation_8cc.html index 98f6d2f03e7..2b0a69a4092 100644 --- a/_combine_harvester___creation_8cc.html +++ b/_combine_harvester___creation_8cc.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- -
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
+
+
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
-
+
-
-
+
-
-
-
-
-
+
+
+
+
+
+
+
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ + diff --git a/_combine_harvester___datacards_8cc.html b/_combine_harvester___datacards_8cc.html index 65bae36b565..c9bfbf56e5a 100644 --- a/_combine_harvester___datacards_8cc.html +++ b/_combine_harvester___datacards_8cc.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
#include "RooRealVar.h"
#include "RooFormulaVar.h"
#include "RooCategory.h"
#include "RooConstVar.h"
#include "CombineHarvester/CombineTools/interface/Observation.h"
#include "CombineHarvester/CombineTools/interface/Process.h"
#include "CombineHarvester/CombineTools/interface/Systematic.h"
- + @@ -83,7 +80,7 @@
- -
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
+
+
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
+
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
-
+
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
-
-
+
+
-
-
-
-
-
-
-
+
+
+
+
+ + diff --git a/_combine_harvester___evaluate_8cc.html b/_combine_harvester___evaluate_8cc.html index 43ab104c892..e8a12729562 100644 --- a/_combine_harvester___evaluate_8cc.html +++ b/_combine_harvester___evaluate_8cc.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- -
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
+
+
+
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
-
-
+
+
+
+
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ + diff --git a/_combine_harvester___filters_8cc.html b/_combine_harvester___filters_8cc.html index 0476efb04ef..9c3450a91d7 100644 --- a/_combine_harvester___filters_8cc.html +++ b/_combine_harvester___filters_8cc.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- -
-
-
-
-
-
-
-
-
+
+
+
-
+
+
-
-
-
-
-
-
-
-
+
+
+
+
+
+
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
-
+
+
+
-
-
-
-
-
-
-
-
-
-
+
+
+
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+ + diff --git a/_combine_pdfs_2src_2classes_8h.html b/_combine_pdfs_2src_2classes_8h.html index 82fe75d77f0..88576d82960 100644 --- a/_combine_pdfs_2src_2classes_8h.html +++ b/_combine_pdfs_2src_2classes_8h.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
-
-
diff --git a/_combine_tools_2src_2classes_8h.html b/_combine_tools_2src_2classes_8h.html index a7acdf6ecbc..d25595c49e9 100644 --- a/_combine_tools_2src_2classes_8h.html +++ b/_combine_tools_2src_2classes_8h.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- -
-
-
-
-
+
+
+
+
+
+ + diff --git a/_copy_tools_8cc.html b/_copy_tools_8cc.html index 07477437141..b6afbd7a46c 100644 --- a/_copy_tools_8cc.html +++ b/_copy_tools_8cc.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- -
+
-
-
-
-
-
-
-
+
+
+
+
+
+ + diff --git a/_copy_tools_8h.html b/_copy_tools_8h.html index 59db6e41874..4e5247eae00 100644 --- a/_copy_tools_8h.html +++ b/_copy_tools_8h.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- -
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ + diff --git a/_example1_8md.html b/_example1_8md.html index bc79c355f81..93feb8e6de6 100644 --- a/_example1_8md.html +++ b/_example1_8md.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- -
-
-
-
-
+
-
-
-
-
-
-
-
+
+
-
-
+
+
+
-
-
+
+
+
+
+
+
-
+
+
+
+
+
+ + diff --git a/_hist_mapping_8cc.html b/_hist_mapping_8cc.html index 5f759a3d1a0..22c9b33b38b 100644 --- a/_hist_mapping_8cc.html +++ b/_hist_mapping_8cc.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- -
-
-
-
-
-
-
-
-
-
+
+
-
-
+
+
+
+
+
+
+
+
+
+
+ + diff --git a/_hist_mapping_8h.html b/_hist_mapping_8h.html index 7541bb5ece8..d81990e78b6 100644 --- a/_hist_mapping_8h.html +++ b/_hist_mapping_8h.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- -
-
-
-
-
-
-
-
-
-
-
+
+
-
-
+
+
+
+
+
+
+
+
+
+
+
+ + diff --git a/_htt_systematics_8h.html b/_htt_systematics_8h.html index 07364e5559b..33efbf7f99a 100644 --- a/_htt_systematics_8h.html +++ b/_htt_systematics_8h.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- +
+
-
-
-
-
-
-
-
+
-
+
+
+
+
+
+ + diff --git a/_htt_systematics___m_s_s_m_legacy_8cc.html b/_htt_systematics___m_s_s_m_legacy_8cc.html index 1d83d2fe262..b366ba67c91 100644 --- a/_htt_systematics___m_s_s_m_legacy_8cc.html +++ b/_htt_systematics___m_s_s_m_legacy_8cc.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- -
-
-
-
+
-
-
-
-
+
+
-
-
+
+
+
-
-
-
-
+
+
+
+
-
+
+
+
+
+
+ + diff --git a/_htt_systematics___m_s_s_m_update_8cc.html b/_htt_systematics___m_s_s_m_update_8cc.html index fa47d262b44..55cb02112f6 100644 --- a/_htt_systematics___m_s_s_m_update_8cc.html +++ b/_htt_systematics___m_s_s_m_update_8cc.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- -
-
-
-
-
-
+
-
-
-
-
-
-
+
+
-
-
-
+
+
+
-
-
-
+
+
+
+
+
+
+
-
+
+
+
+
+
+
+ + diff --git a/_htt_systematics___s_m_legacy_8cc.html b/_htt_systematics___s_m_legacy_8cc.html index 0331c36dda7..19ec59df929 100644 --- a/_htt_systematics___s_m_legacy_8cc.html +++ b/_htt_systematics___s_m_legacy_8cc.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- -
-
-
-
-
+
-
-
-
-
+
+
+
-
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
+
+
-
+
+
-
-
+
+
+
+ + diff --git a/_hybrid_new_grid_8md.html b/_hybrid_new_grid_8md.html index bb489b8ccb1..519385bc144 100644 --- a/_hybrid_new_grid_8md.html +++ b/_hybrid_new_grid_8md.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
-
+
+
-
-
+ + diff --git a/_json_tools_8h.html b/_json_tools_8h.html index b8368fdbc96..77bfad25d08 100644 --- a/_json_tools_8h.html +++ b/_json_tools_8h.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- +
+
-
-
+ + diff --git a/_limits_8md.html b/_limits_8md.html index de4209075cc..a604015a576 100644 --- a/_limits_8md.html +++ b/_limits_8md.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- -
-
-
-
-
-
-
+
+
+
+
-
+
+
-
-
+
+
+
+
+ + diff --git a/_logging_8h.html b/_logging_8h.html index fb82d8aab68..2f300d0c908 100644 --- a/_logging_8h.html +++ b/_logging_8h.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
Conveniently initialise a ch::FnTimer instance.
This macro should be placed at the start of a function, e.g.:
void MyFunction() { LAUNCH_FUNCTION_TIMER(__timer__, __token__) } -
The arguments are the names of two objects (a ch::FnTimer and a ch::FnTimer::Token) that will be created by this macro. Note that the ch::FnTimer will be assigned the current function name automatically.
+The arguments are the names of two objects (a ch::FnTimer and a ch::FnTimer::Token) that will be created by this macro. Note that the ch::FnTimer will be assigned the current function name automatically.
Definition at line 67 of file Logging.h.
@@ -297,7 +295,6 @@- + @@ -83,7 +80,7 @@
- -
-
-
-
-
-
-
+
+
+
-
+
+
-
+
+
+
+
+ + diff --git a/_m_s_s_m_update_model_dep.html b/_m_s_s_m_update_model_dep.html index 3b088ab56ee..64ab5a6ea2b 100644 --- a/_m_s_s_m_update_model_dep.html +++ b/_m_s_s_m_update_model_dep.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
These instruction shall elaborate how to produce MSSM model dependent limits using the 8TeV part of the MSSM (CMS-PAS-HIG-14-029) analysis.
+These instruction shall elaborate how to produce MSSM model dependent limits using the 8TeV part of the MSSM (CMS-PAS-HIG-14-029) analysis.
First we create the datacards.
cd CombineHarvester/Run1BSMComb/ MorphingMSSMUpdate -
The created datacards contain six signals: Three for each considered production process, ggH and bbH and each of them separated in the three neutral MSSM Higgs bosons h, H and A. The particular interesting feature of the "-NoModel" creation of the datacards is that all considered signal masses are stored in the workspace instead of having a single datacard for each mass. This has the advantage that high-level morphing tools of combine could be directly used instead of having to perform manual morphing using fixed mass datacards.
+The created datacards contain six signals: Three for each considered production process, ggH and bbH and each of them separated in the three neutral MSSM Higgs bosons h, H and A. The particular interesting feature of the "-NoModel" creation of the datacards is that all considered signal masses are stored in the workspace instead of having a single datacard for each mass. This has the advantage that high-level morphing tools of combine could be directly used instead of having to perform manual morphing using fixed mass datacards.
The combined datacard "htt_mssm.txt" is now transfered to a MSSM model dependent workspace.
text2workspace.py -b output/mssm_nomodel/htt_mssm.txt -o output/mssm_nomodel/htt_cmb_mhmodp.root -P CombineHarvester.CombinePdfs.MSSMv2:MSSM --PO filePrefix=$CMSSW_BASE/src/auxiliaries/models/ --PO modelFiles=8TeV,out.mhmodp-8TeV-tanbHigh-nnlo.root,0 -
Therefore, a physic model "MSSMv2.py" is used. It will try to read model files from the directory auxiliaries/models/, so we need to add this path as well as the model files themselves as additional parameters. The syntax for setting the path to the models is as above. The general syntax for the model files themselves is "--PO modelFiles=ERA,FILE,VERSION". In this example, we choose the mhmod+ model for high values of tanb at 8TeV. For a list of all available options for text2workspace, run text2workspace.py -h. For upcoming 13/14TeV runs VERSION=1 should be set. After the creation the workspace "htt_cmb_mhmodp.root" contains the model information like BR/xs/masses for all considered mA/tanb which are set as parameters of interest. The six signal processes dependent on the parameters of interest.
+Therefore, a physic model "MSSMv2.py" is used. It will try to read model files from the directory auxiliaries/models/, so we need to add this path as well as the model files themselves as additional parameters. The syntax for setting the path to the models is as above. The general syntax for the model files themselves is "--PO modelFiles=ERA,FILE,VERSION". In this example, we choose the mhmod+ model for high values of tanb at 8TeV. For a list of all available options for text2workspace, run text2workspace.py -h. For upcoming 13/14TeV runs VERSION=1 should be set. After the creation the workspace "htt_cmb_mhmodp.root" contains the model information like BR/xs/masses for all considered mA/tanb which are set as parameters of interest. The six signal processes dependent on the parameters of interest.
In the next step we calculate the limits in the considered MSSM model phase space.
python ../CombineTools/scripts/combineTool.py -M AsymptoticGrid scripts/mssm_asymptotic_grid.json -d output/mssm_nomodel/htt_cmb_mssm.root --job-mode 'lxbatch' --task-name 'mssm_mhodp' --sub-opts '-q 1nh' --merge=8 -
The scanned grid (=considered mA/tanb points) in the MSSM model is defined in the json file "mssm_asymptotic_grid.json". As example:
+The scanned grid (=considered mA/tanb points) in the MSSM model is defined in the json file "mssm_asymptotic_grid.json". As example:
"opts" : "--singlePoint 1.0", "POIs" : ["mA", "tanb"], "grids" : [ ["130:150|10", "1:3|1", "0"], ["130:150|10", "4:60|20", ""] ], "hist_binning" : [87, 130, 1000, 60, 1, 60]
The "opts", "POIS" are mandatory. The "hist_binning" has no influence yet. The list of grids can be set like above. The first row, ["130:150|10", "1:3|1", "0"], defines a grid of mA=130, 140 and 150 GeV scanning tanb=1,2 and 3. The third command, here "0", sets the CLs limit to the given value instead of calculating it. This could be used to exclude some regions of the phase space which for example are known to be excluded by other theoretical constraints. If the third option is empty "" the CLs limit for the given region will be computed. Like in the second row, ["130:150|10", "4:60|20", ""], where a grid of mA=130, 140 and 150 GeV and tanb=4, 24 and 44 is scanned. Here, the lxbatch computing system is used. Eight grid points are merged in one job.
After all jobs have finished, the results can be collected by simple rerunning the calculating command.
python ../CombineTools/scripts/combineTool.py -M AsymptoticGrid scripts/mssm_asymptotic_grid.json -d output/mssm_nomodel/htt_cmb_mhmodp.root --task-name 'mssm_mhodp' -
The limits for the median expected, expected error bands and observed are stored in TGraph2D. The resulting file "asymptotic_grid.root" is needed for the plotting.
+The limits for the median expected, expected error bands and observed are stored in TGraph2D. The resulting file "asymptotic_grid.root" is needed for the plotting.
The plotting is done be the "MSSMtanbPlot.py" script.
python ../CombineTools/scripts/MSSMtanbPlot.py --file=asymptotic_grid.root --scenario="mhmodp" -
Again, the filename after "--file" has to be the root file which was created in the previous step. MSSMtanbPlot.py also requires a name for the scenario, which will be written in the plot. MSSMtanbPlot.py will only produce the plots as a png- and pdf file. The plotting is still work in progress.
+Again, the filename after "--file" has to be the root file which was created in the previous step. MSSMtanbPlot.py also requires a name for the scenario, which will be written in the plot. MSSMtanbPlot.py will only produce the plots as a png- and pdf file. The plotting is still work in progress.
diff --git a/_m_s_s_m_update_no_model.html b/_m_s_s_m_update_no_model.html index 913b222204f..c92468f13a2 100644 --- a/_m_s_s_m_update_no_model.html +++ b/_m_s_s_m_update_no_model.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
These instruction shall elaborate how to produce model independent limits using the 8TeV part of the MSSM (CMS-HIG-14-039) analysis. Below there will be given instruction how to set limits on one signal process (ggH) while another is left floating (bbH).
+These instruction shall elaborate how to produce model independent limits using the 8TeV part of the MSSM (CMS-HIG-14-039) analysis. Below there will be given instruction how to set limits on one signal process (ggH) while another is left floating (bbH).
The first step is to create the datacards, which will be used to produce the limit later on. To do this, go into the Folder CombineHarvester/Run1BSMComb/ and then execute MorphingMSSMUpdate. All of the programs in the following steps also need to be executed from this folder. Also make sure that all the files have been computed beforehand:
cd CombineHarvester/Run1BSMComb/ MorphingMSSMUpdate -m MH -
MorphingMSSMUpdate.cpp is set up similarly like Example2.cpp. More information about the datacard steps could be found in the respective example. Adding the option "-m MH" when executing MorphingMSSMUpdate is necessary to ensure that the signal types are set to only assume one single narrow resonance produced via ggH and bbH instead of distinguishing between the three neutral MSSM bosons h, A and H. It should be mentioned that CombineHarvester needs to use specific shape-rootfiles, which have been edited to reproduce the original result, since tail-fitting is not yet included in Combine Harvester. The output will be a set of datacards. The special point is that not for each mass a datacard is created. In contrast a workspace is given for the signals which contain all centrally produced mass hypothesis. In the calculating process the signal will be morphed to the chosen mass. If, for example, MC signal templates exist for m=100GeV and 200GeV one could still calculate limits for m=150GeV ("combine -m 150 ..."). The created root file, named "htt_mssm_demo.root", will be in the CombinePdfs folder. It contains the centrally available MC signals for each channel, category and mass. Per default a combined datacard "htt_mssm.txt" is created, which contains all the information of the other datacards together.
+MorphingMSSMUpdate.cpp is set up similarly like Example2.cpp. More information about the datacard steps could be found in the respective example. Adding the option "-m MH" when executing MorphingMSSMUpdate is necessary to ensure that the signal types are set to only assume one single narrow resonance produced via ggH and bbH instead of distinguishing between the three neutral MSSM bosons h, A and H. It should be mentioned that CombineHarvester needs to use specific shape-rootfiles, which have been edited to reproduce the original result, since tail-fitting is not yet included in Combine Harvester. The output will be a set of datacards. The special point is that not for each mass a datacard is created. In contrast a workspace is given for the signals which contain all centrally produced mass hypothesis. In the calculating process the signal will be morphed to the chosen mass. If, for example, MC signal templates exist for m=100GeV and 200GeV one could still calculate limits for m=150GeV ("combine -m 150 ..."). The created root file, named "htt_mssm_demo.root", will be in the CombinePdfs folder. It contains the centrally available MC signals for each channel, category and mass. Per default a combined datacard "htt_mssm.txt" is created, which contains all the information of the other datacards together.
Now that the datacards have been created, we can add which model we would like to study. We need to have a workspace, where the signals for each process are scaled correctly according to whatever model is selected.
text2workspace.py -b output/mssm_nomodel/htt_mssm.txt -o output/mssm_nomodel/htt_ggPhi_mssm.root -P CombineHarvester.CombinePdfs.ModelIndependent:floatingMSSMXSHiggs --PO 'modes=ggH' --PO 'ggHRange=0:20' -
This creates a workspace "output/mssm_nomodel/htt_ggPhi_mssm.root" based on the combined datacard. The physic model "floatingMSSMXSHiggs" is built to split the signal into the processes ggH and bbH. A signal scaling parameter is assigned to one process while the other is left floating and therefore will be treated as an nuisance parameter in the fit later on. In this example we like to set a limit on the xs*BR of the ggH (to tau tau) process. The fit range of 0 to 20 is set for this parameter (be sure that the fitting range contains the minimum and has sensible ranges). Here the bbH process is left floating. By changing "ggH" to "bbH" in the text2workspace.py step it is easily possible to switch this. More advanced users might extend the physic model to be able to scale different processes (like ttH ...).
+This creates a workspace "output/mssm_nomodel/htt_ggPhi_mssm.root" based on the combined datacard. The physic model "floatingMSSMXSHiggs" is built to split the signal into the processes ggH and bbH. A signal scaling parameter is assigned to one process while the other is left floating and therefore will be treated as an nuisance parameter in the fit later on. In this example we like to set a limit on the xs*BR of the ggH (to tau tau) process. The fit range of 0 to 20 is set for this parameter (be sure that the fitting range contains the minimum and has sensible ranges). Here the bbH process is left floating. By changing "ggH" to "bbH" in the text2workspace.py step it is easily possible to switch this. More advanced users might extend the physic model to be able to scale different processes (like ttH ...).
Now that we have created the workspace htt_ggPhi_mssm.root, which can again be found in the output-folder, we now run combineTool.py, which will lead to the calculation all the values, which will later be used to create a plot.
python ../CombineTools/scripts/combineTool.py -m 100:200:20,90,100,250:500:50,600:1000:100 -M Asymptotic --boundlist $CMSSW_BASE/src/CombineHarvester/CombinePdfs/scripts/mssm_ggh_boundaries.json output/mssm_nomodel/htt_ggPhi_mssm.root --freezeNuisances MH --setPhysicsModelParameters r_ggH=0 -
This program by itself only creates lists of jobs for the program "combine". These jobs can be run interactively on your own computer, or they can be sent somewhere else by using the option "--job-mode" (for example –job-mode 'lxbatch' when using CERN's computing power). To avoid sending too many jobs, we may use "--merge" to include a certain number of combine calls into one job (for example –merge=8 will include up to 8 combine calls into one single job). Other than that, we need to specify which method to use, as well as the workspace. The specification of what method to use is done with the option "-M". In this example, the asymptotic limits are being calculated. The mass-range can be include with the option "-m". In this example, 19 different combine calls will be produced for the Higgs masses as seen at the bottom of this example. Since sensible boundaries on the parameter of interest (here ggH) can be mass dependent an important option can be to add a list of boundaries of the POI for each mass via a extern json fil, here "scripts/mssm_ggh_boundaries.json". This will set the range of relevant physics model parameters, depending on the production process and the mass. The option "--freezeNuisances MH" is important to fix the hypothetical Higgs mass of the considered process to the one selected via the option "-m" instead of letting it freely floating in the fitting process.
+This program by itself only creates lists of jobs for the program "combine". These jobs can be run interactively on your own computer, or they can be sent somewhere else by using the option "--job-mode" (for example –job-mode 'lxbatch' when using CERN's computing power). To avoid sending too many jobs, we may use "--merge" to include a certain number of combine calls into one job (for example –merge=8 will include up to 8 combine calls into one single job). Other than that, we need to specify which method to use, as well as the workspace. The specification of what method to use is done with the option "-M". In this example, the asymptotic limits are being calculated. The mass-range can be include with the option "-m". In this example, 19 different combine calls will be produced for the Higgs masses as seen at the bottom of this example. Since sensible boundaries on the parameter of interest (here ggH) can be mass dependent an important option can be to add a list of boundaries of the POI for each mass via a extern json fil, here "scripts/mssm_ggh_boundaries.json". This will set the range of relevant physics model parameters, depending on the production process and the mass. The option "--freezeNuisances MH" is important to fix the hypothetical Higgs mass of the considered process to the one selected via the option "-m" instead of letting it freely floating in the fitting process.
Once all the jobs sent in the previous step and done, we will then collect all relevant data from the created root files into a single file. To do this, we will run combineTool.py again, but time with the method "CollectLimits", as well as different options.
python ../CombineTools/scripts/combineTool.py -M CollectLimits -i higgsCo* -o mssm.json -
The option "-i" is used to include all root files which have been previously created. These filenames usually all begin with "higgsCombine*". The filename specified after "-o" is the name of the json file which will be created. This json file contains all necessary, computated values from the root files, which will be needed to plot the limit. These include the values for the observation, expectation and the -2, -1, +1, +2 sigma uncertainties.
+The option "-i" is used to include all root files which have been previously created. These filenames usually all begin with "higgsCombine*". The filename specified after "-o" is the name of the json file which will be created. This json file contains all necessary, computated values from the root files, which will be needed to plot the limit. These include the values for the observation, expectation and the -2, -1, +1, +2 sigma uncertainties.
Finally we can produce the plot. To do this, we use the program "plotBSMxsBRLimit.py".
python ../CombineTools/scripts/plotBSMxsBRLimit.py --file=mssm.json -
The filename specified as "--file" is the json file produced in the previous step. We may also add more options regarding the aesthetics of the plot, such as changing the range of the x- and y-axis, or enabling logarithmic scaling. Executing this program along with the additional parameters will create the desired plot as a png- and pdf file. It will also create a textfile "mssm_limit_table.txt", which contains a list of the exact values for all mass points. The limits of the described example should agree with the following numbers:
+The filename specified as "--file" is the json file produced in the previous step. We may also add more options regarding the aesthetics of the plot, such as changing the range of the x- and y-axis, or enabling logarithmic scaling. Executing this program along with the additional parameters will create the desired plot as a png- and pdf file. It will also create a textfile "mssm_limit_table.txt", which contains a list of the exact values for all mass points. The limits of the described example should agree with the following numbers:
mass | minus2sigma | minus1sigma | expected | plus1sigma | plus2sigma | observed | mass | minus2sigma | minus1sigma | expected | plus1sigma | plus2sigma | observed |
---|---|---|---|---|---|---|
90.0 | 7.44689941406 | 10.4886741638 | 15.3125 | 22.3316726685 | 31.1344871521 | 21.4247200433 | 90.0 | 7.44689941406 | 10.4886741638 | 15.3125 | 22.3316726685 | 31.1344871521 | 21.4247200433 |
100.0 | 6.0858001709 | 8.5330581665 | 12.4140625 | 18.1046066284 | 25.0876998901 | 17.6605401733 | 100.0 | 6.0858001709 | 8.5330581665 | 12.4140625 | 18.1046066284 | 25.0876998901 | 17.6605401733 |
120.0 | 1.18392789364 | 1.57787442207 | 2.17265629768 | 3.03002619743 | 4.02200269699 | 2.97035384799 | 120.0 | 1.18392789364 | 1.57787442207 | 2.17265629768 | 3.03002619743 | 4.02200269699 | 2.97035384799 |
130.0 | 0.639678955078 | 0.856723308563 | 1.1953125 | 1.68129837513 | 2.2536046505 | 1.59457176432 | 130.0 | 0.639678955078 | 0.856723308563 | 1.1953125 | 1.68129837513 | 2.2536046505 | 1.59457176432 |
140.0 | 0.438079833984 | 0.584083616734 | 0.815625011921 | 1.14398777485 | 1.52501606941 | 0.94770346896 | 140.0 | 0.438079833984 | 0.584083616734 | 0.815625011921 | 1.14398777485 | 1.52501606941 | 0.94770346896 |
160.0 | 0.253028869629 | 0.337358653545 | 0.47109374404 | 0.660751521587 | 0.886857688427 | 0.460536925886 | 160.0 | 0.253028869629 | 0.337358653545 | 0.47109374404 | 0.660751521587 | 0.886857688427 | 0.460536925886 |
180.0 | 0.180056750774 | 0.240920364857 | 0.332812488079 | 0.466799587011 | 0.626536250114 | 0.302229212372 | 180.0 | 0.180056750774 | 0.240920364857 | 0.332812488079 | 0.466799587011 | 0.626536250114 | 0.302229212372 |
200.0 | 0.143707275391 | 0.19228386879 | 0.265625 | 0.370445460081 | 0.49172270298 | 0.227834272278 | 200.0 | 0.143707275391 | 0.19228386879 | 0.265625 | 0.370445460081 | 0.49172270298 | 0.227834272278 |
250.0 | 0.0768411234021 | 0.102051369846 | 0.142031252384 | 0.198079377413 | 0.264753729105 | 0.117494322717 | 250.0 | 0.0768411234021 | 0.102051369846 | 0.142031252384 | 0.198079377413 | 0.264753729105 | 0.117494322717 |
300.0 | 0.0484149158001 | 0.0645136460662 | 0.090468749404 | 0.126169368625 | 0.168638512492 | 0.06854323815 | 300.0 | 0.0484149158001 | 0.0645136460662 | 0.090468749404 | 0.126169368625 | 0.168638512492 | 0.06854323815 |
350.0 | 0.0495315566659 | 0.0657380968332 | 0.0886718779802 | 0.118715122342 | 0.152319088578 | 0.0842439601436 | 350.0 | 0.0495315566659 | 0.0657380968332 | 0.0886718779802 | 0.118715122342 | 0.152319088578 | 0.0842439601436 |
400.0 | 0.0397595204413 | 0.052680041641 | 0.0721874982119 | 0.0972210913897 | 0.12731602788 | 0.0725939537161 | 400.0 | 0.0397595204413 | 0.052680041641 | 0.0721874982119 | 0.0972210913897 | 0.12731602788 | 0.0725939537161 |
450.0 | 0.0276245102286 | 0.035754583776 | 0.0484374985099 | 0.06581415236 | 0.086501725018 | 0.0503201500801 | 450.0 | 0.0276245102286 | 0.035754583776 | 0.0484374985099 | 0.06581415236 | 0.086501725018 | 0.0503201500801 |
500.0 | 0.0173510741442 | 0.0233783721924 | 0.0321874991059 | 0.0452741757035 | 0.0613017454743 | 0.0379442905513 | 500.0 | 0.0173510741442 | 0.0233783721924 | 0.0321874991059 | 0.0452741757035 | 0.0613017454743 | 0.0379442905513 |
600.0 | 0.0101513667032 | 0.0140254208818 | 0.019687499851 | 0.02816282399 | 0.0385656952858 | 0.0255840519774 | 600.0 | 0.0101513667032 | 0.0140254208818 | 0.019687499851 | 0.02816282399 | 0.0385656952858 | 0.0255840519774 |
700.0 | 0.00771972676739 | 0.00984832737595 | 0.0145312501118 | 0.0205551572144 | 0.0290479976684 | 0.0209467474855 | 700.0 | 0.00771972676739 | 0.00984832737595 | 0.0145312501118 | 0.0205551572144 | 0.0290479976684 | 0.0209467474855 |
800.0 | 0.0062255859375 | 0.00794219970703 | 0.01171875 | 0.0165767390281 | 0.0222346615046 | 0.0163398869015 | 800.0 | 0.0062255859375 | 0.00794219970703 | 0.01171875 | 0.0165767390281 | 0.0222346615046 | 0.0163398869015 |
900.0 | 0.00439453125 | 0.00567626953125 | 0.0078125 | 0.0113002872095 | 0.0163606926799 | 0.0107711296097 | 900.0 | 0.00439453125 | 0.00567626953125 | 0.0078125 | 0.0113002872095 | 0.0163606926799 | 0.0107711296097 |
1000.0 | 0.00259765610099 | 0.00385009753518 | 0.00593749992549 | 0.00858821813017 | 0.0124341268092 | 0.008082145785341 | 1000.0 | 0.00259765610099 | 0.00385009753518 | 0.00593749992549 | 0.00858821813017 | 0.0124341268092 | 0.008082145785341 |
diff --git a/_m_s_s_m_yield_table_8cpp.html b/_m_s_s_m_yield_table_8cpp.html index f9b7ac945ca..c12eeba23c2 100644 --- a/_m_s_s_m_yield_table_8cpp.html +++ b/_m_s_s_m_yield_table_8cpp.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- -
-
-
-
-
-
-
-
-
-
+
+
-
-
-
-
+
+
+
+
+
+
+
+
-
+
+
+
+
+
-
-
-
-
+
+
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+ + diff --git a/_main_8md.html b/_main_8md.html index 383e3f1de7f..25b0876711a 100644 --- a/_main_8md.html +++ b/_main_8md.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
-
-
diff --git a/_model_indep_hhh_a_zh.html b/_model_indep_hhh_a_zh.html index 82ac3e8bdab..674fede9a76 100644 --- a/_model_indep_hhh_a_zh.html +++ b/_model_indep_hhh_a_zh.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
The model independent limits for the H->hh and A->Zh analyses (HIG-14-034) are slightly simpler than for the MSSM H->tautau analysis, since only one signal process is considered and hence no profiling is required. This removes the need for any kind of physics model at the text2workspace step.
+The model independent limits for the H->hh and A->Zh analyses (HIG-14-034) are slightly simpler than for the MSSM H->tautau analysis, since only one signal process is considered and hence no profiling is required. This removes the need for any kind of physics model at the text2workspace step.
The first step is to create the datacards, which will be used to produce the limit later on. To do this, go into the Folder CombineHarvester/Run1BSMComb/ and then execute MorphingHhh or MorphingAZh. All of the programs in the following steps also need to be executed from this folder. Also make sure that all the files have been compiled beforehand:
cd CombineHarvester/Run1BSMComb/ MorphingAZh MorphingHhh -
MorphingHhh.cpp and MorphingAZh.cpp are setup similarly to Example2 and MorphingMSSMUpdate as documented previously. No additional option -m MH is needed as in the case of the MSSMUpdate cards because there is only one Higgs boson considered anyway for these analyses.
+MorphingHhh.cpp and MorphingAZh.cpp are setup similarly to Example2 and MorphingMSSMUpdate as documented previously. No additional option -m MH is needed as in the case of the MSSMUpdate cards because there is only one Higgs boson considered anyway for these analyses.
Now that the datacards have been created, we can create a workspace as follows:
diff --git a/_morph_functions_8cc.html b/_morph_functions_8cc.html index c4de602c71f..f1d0b8e818d 100644 --- a/_morph_functions_8cc.html +++ b/_morph_functions_8cc.html @@ -4,7 +4,7 @@ - +- + @@ -83,7 +80,7 @@
- -
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+ + diff --git a/_morph_functions_8h.html b/_morph_functions_8h.html index 59ce27e2768..c56dc192726 100644 --- a/_morph_functions_8h.html +++ b/_morph_functions_8h.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- -
-
-
-
+
+
+
+
+ + diff --git a/_object_8cc.html b/_object_8cc.html index a36b1e32fc9..2fd78366763 100644 --- a/_object_8cc.html +++ b/_object_8cc.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- +
+
-
+
+
+
-
-
+
-
-
-
+ + diff --git a/_object_8h.html b/_object_8h.html index cd8c300a9c7..831ca1ca243 100644 --- a/_object_8h.html +++ b/_object_8h.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- -
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
-
-
-
-
-
-
+
+
+
-
+
+
+
+
+
+
+
+ + diff --git a/_observation_8cc.html b/_observation_8cc.html index 3055d87e5cb..b535dc39719 100644 --- a/_observation_8cc.html +++ b/_observation_8cc.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- -
+
+
+
+
-
-
-
-
-
-
-
+
+
-
-
-
-
-
+
+
+
+
+
+
-
-
-
-
-
+
+
+
+
+
+
+
+
-
-
-
+
+ + diff --git a/_observation_8h.html b/_observation_8h.html index 9eb20924e5f..6f53d3ca17f 100644 --- a/_observation_8h.html +++ b/_observation_8h.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- -
-
-
-
-
-
-
-
-
+
+
+
+
+
+
-
-
-
+
+
+
-
-
-
+
+
+
+
+
+
+ + diff --git a/_parameter_8cc.html b/_parameter_8cc.html index 58012eb7729..5a634641003 100644 --- a/_parameter_8cc.html +++ b/_parameter_8cc.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- +
+
+
-
+
+
-
-
-
+
-
-
-
-
-
+
+
+
+ + diff --git a/_parameter_8h.html b/_parameter_8h.html index 37da622221c..3f5a28be9ab 100644 --- a/_parameter_8h.html +++ b/_parameter_8h.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- +
+
+
+
+
+
+
+
+
+
+
+
+
+
-
+
+
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+ + diff --git a/_parse_combine_workspace_8cc.html b/_parse_combine_workspace_8cc.html index 13a04684972..273ef03a5c6 100644 --- a/_parse_combine_workspace_8cc.html +++ b/_parse_combine_workspace_8cc.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- -
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
+
-
-
-
-
-
-
-
-
-
-
+
+
+
+ + diff --git a/_parse_combine_workspace_8h.html b/_parse_combine_workspace_8h.html index 5046a7267c8..c958c4f45d8 100644 --- a/_parse_combine_workspace_8h.html +++ b/_parse_combine_workspace_8h.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- +
+
-
-
-
+
+ + diff --git a/_plotting_8h.html b/_plotting_8h.html index 37f6eaa777c..f5d44c1c4d6 100644 --- a/_plotting_8h.html +++ b/_plotting_8h.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- -
-
+
-
+
-
+
+
+
-
-
-
+
-
-
+
+
+
-
-
+
+
+
+
+
-
-
-
+ + diff --git a/_plotting___contours_8h.html b/_plotting___contours_8h.html index 9726ced2ca9..478d79e4a2e 100644 --- a/_plotting___contours_8h.html +++ b/_plotting___contours_8h.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- -
-
-
-
+
+
+
+
+ + diff --git a/_plotting___style_8h.html b/_plotting___style_8h.html index 0c01fc0259c..2aa600f6d24 100644 --- a/_plotting___style_8h.html +++ b/_plotting___style_8h.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- +
-
+ + diff --git a/_post_fit_shapes_from_workspace_8cpp.html b/_post_fit_shapes_from_workspace_8cpp.html index 8ffdabc0f9a..f8e0bb78caa 100644 --- a/_post_fit_shapes_from_workspace_8cpp.html +++ b/_post_fit_shapes_from_workspace_8cpp.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- -
-
-
-
+
+
-
+
+
-
-
-
-
-
-
-
-
+
+
+
+
-
-
-
-
-
-
-
+
+
+
+
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
-
+
-
-
-
+
+
+
+
+ + diff --git a/_post_fit_shapes_from_workspace_8md.html b/_post_fit_shapes_from_workspace_8md.html index 0508a2cb6c3..c9fddae604b 100644 --- a/_post_fit_shapes_from_workspace_8md.html +++ b/_post_fit_shapes_from_workspace_8md.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- -
-
-
-
+
+
+
+
-
+
+
+
+
+
-
-
+
+
+
-
+
+
+
+
-
-
-
-
-
-
-
+
+
+
-
-
-
-
-
-
-
-
+
+
+
+
+ + diff --git a/_process_8h.html b/_process_8h.html index bff0704c769..ed239b03ddd 100644 --- a/_process_8h.html +++ b/_process_8h.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- -
-
-
-
+
+
+
-
+
+
+
+
+
+
+
+
+
-
-
-
-
+
-
-
-
-
-
-
-
-
+
-
+
+
+
-
-
-
+
+
+
+
+ + diff --git a/_python_interface_8md.html b/_python_interface_8md.html index 5e4f9d740c5..24ea642400a 100644 --- a/_python_interface_8md.html +++ b/_python_interface_8md.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- -
-
-
-
-
+
+
+
+
+
-
-
-
+
+
+
+
-
-
-
-
-
-
-
+
+
+
-
-
-
+
-
-
+
+
+
+
+
+
-
-
-
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
+
+
-
+ + diff --git a/_s_over_b_tools_8cc.html b/_s_over_b_tools_8cc.html index a123365576f..f776b430074 100644 --- a/_s_over_b_tools_8cc.html +++ b/_s_over_b_tools_8cc.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- +
+
+
+
-
-
-
-
-
+
+ + diff --git a/_s_over_b_tools_8h.html b/_s_over_b_tools_8h.html index d0fd776c4b3..34981829c77 100644 --- a/_s_over_b_tools_8h.html +++ b/_s_over_b_tools_8h.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- -
+
+
+
+
-
-
-
-
+
+ + diff --git a/_systematic_8cc.html b/_systematic_8cc.html index 90d137ec0eb..e90ee1e1ed5 100644 --- a/_systematic_8cc.html +++ b/_systematic_8cc.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- -
-
-
-
-
-
-
-
-
-
-
+
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
+
+
+
+
+
+
+
+
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ + diff --git a/_systematic_8h.html b/_systematic_8h.html index 5184c578634..6655599c042 100644 --- a/_systematic_8h.html +++ b/_systematic_8h.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
#include <memory>
#include <string>
#include "TH1.h"
#include "RooAbsReal.h"
#include "RooDataHist.h"
#include "CombineHarvester/CombineTools/interface/MakeUnique.h"
#include "CombineHarvester/CombineTools/interface/Object.h"
@@ -83,7 +80,7 @@
- -
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
-
+
-
-
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
+
+
+
+
+
+
+
+
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ + diff --git a/_systematics_8h.html b/_systematics_8h.html index ec424b21330..e85da3cdb14 100644 --- a/_systematics_8h.html +++ b/_systematics_8h.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- -
-
-
-
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
+
+
+
+
-
-
-
-
-
-
-
-
-
+
+
+
+
-
+
+
+
+
+
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
-
-
+
-
-
-
-
-
-
-
+
+
+
+
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+ + diff --git a/_t_file_i_o_8cc.html b/_t_file_i_o_8cc.html index 71e1849c38f..664da68c8e3 100644 --- a/_t_file_i_o_8cc.html +++ b/_t_file_i_o_8cc.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- -
+
+
-
+ + diff --git a/_t_file_i_o_8h.html b/_t_file_i_o_8h.html index 4dd69c26026..ab68345f67c 100644 --- a/_t_file_i_o_8h.html +++ b/_t_file_i_o_8h.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- +
-
-
+
-
+
+ + diff --git a/_utilities_8cc.html b/_utilities_8cc.html index b1ecd067175..01edce3dab3 100644 --- a/_utilities_8cc.html +++ b/_utilities_8cc.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- -
-
-
-
-
-
-
-
-
-
+
-
+
+
+
+
+
+
-
+
+
+
+
-
-
-
+
-
-
+
+
+
+
-
+
+
+
+
+
+
-
-
-
-
-
-
-
-
-
-
-
-
+
-
-
+
+
+
+
+
+
+
+
+
+ + diff --git a/_utilities_8h.html b/_utilities_8h.html index f9120eee334..0ba2b462b27 100644 --- a/_utilities_8h.html +++ b/_utilities_8h.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- -
-
-
+
-
-
-
-
-
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
+
-
-
-
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ + diff --git a/_validation_tools_8cc.html b/_validation_tools_8cc.html index bae687cf980..9c2fc09dd59 100644 --- a/_validation_tools_8cc.html +++ b/_validation_tools_8cc.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- -
-
-
-
-
-
+
+
+
+
+
+
+
+
+
+
+
-
-
-
-
-
+
+
-
-
-
-
-
+
+
+
+
+
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
-
-
-
-
-
-
-
+
+
+ + diff --git a/_validation_tools_8h.html b/_validation_tools_8h.html index da0267777a6..23028cbc305 100644 --- a/_validation_tools_8h.html +++ b/_validation_tools_8h.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- -
-
-
+
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
-
+
+
+ + diff --git a/_validation_tools_no_j_s_o_n_8h.html b/_validation_tools_no_j_s_o_n_8h.html index 137b4a63508..0b38d3b426f 100644 --- a/_validation_tools_no_j_s_o_n_8h.html +++ b/_validation_tools_no_j_s_o_n_8h.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
- + @@ -83,7 +80,7 @@
- -
-
-
+
+
+
+
+
+
+
-
-
-
-
-
+
+ + diff --git a/annotated.html b/annotated.html index ba3e459670d..9c3fa7c61eb 100644 --- a/annotated.html +++ b/annotated.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
▼Nch | |
▼Nsyst | |
Canalysis | |
Cbin | |
Cbin_id | |
Cbin | |
Canalysis | |
Cera | |
Cchannel | |
Cera | |
Cmass | |
Cprocess | |
Cmass | |
Cprocess | |
Cbin_id | |
CSystMap | |
CSystMapAsymm | |
CSystMapFunc | |
▼Ntupleprint | |
CTuplePrinter | |
CTuplePrinter< Tuple, 0 > | |
CTuplePrinter< Tuple, 1 > | |
CTuplePrinter< Tuple, 1 > | |
CTuplePrinter< Tuple, 0 > | |
CAutoRebin | Tests for any bins below a certain threshold and if they exist merges them with neighborouring bins |
CBinByBinFactory | Merges bin uncertainties and creates bin-by-bin statistical uncertainties |
CCardWriter | Automates the writing of datacards into directory structures |
CCMSHistFuncFactory | |
CCombineHarvester | |
CCombineHarvester | |
CHistMapping | |
▼CFnTimer | Determine the total amount of time spent in a function |
CToken | |
CHistMapping | |
CObject | |
CObservation | |
CParameter | |
CProcess | |
CSOverBInfo | |
CSystematic | |
▼Npython | |
CBkgInfo | |
CColInfo | |
CObject | |
CObservation | |
CParameter | |
CProcess | |
CSOverBInfo | |
CSystematic | |
CCMSHistFuncFactory | |
CBkgInfo | |
CColInfo |
diff --git a/annotated_dup.js b/annotated_dup.js index 7df67e78be0..56989822175 100644 --- a/annotated_dup.js +++ b/annotated_dup.js @@ -1,7 +1,37 @@ var annotated_dup = [ - [ "ch", "namespacech.html", "namespacech" ], - [ "python", "namespacepython.html", null ], + [ "ch", "namespacech.html", [ + [ "syst", "namespacech_1_1syst.html", [ + [ "bin", "structch_1_1syst_1_1bin.html", "structch_1_1syst_1_1bin" ], + [ "analysis", "structch_1_1syst_1_1analysis.html", "structch_1_1syst_1_1analysis" ], + [ "era", "structch_1_1syst_1_1era.html", "structch_1_1syst_1_1era" ], + [ "channel", "structch_1_1syst_1_1channel.html", "structch_1_1syst_1_1channel" ], + [ "mass", "structch_1_1syst_1_1mass.html", "structch_1_1syst_1_1mass" ], + [ "process", "structch_1_1syst_1_1process.html", "structch_1_1syst_1_1process" ], + [ "bin_id", "classch_1_1syst_1_1bin__id.html", "classch_1_1syst_1_1bin__id" ], + [ "SystMap", "classch_1_1syst_1_1_syst_map.html", "classch_1_1syst_1_1_syst_map" ], + [ "SystMapAsymm", "classch_1_1syst_1_1_syst_map_asymm.html", "classch_1_1syst_1_1_syst_map_asymm" ], + [ "SystMapFunc", "classch_1_1syst_1_1_syst_map_func.html", "classch_1_1syst_1_1_syst_map_func" ] + ] ], + [ "tupleprint", "namespacech_1_1tupleprint.html", [ + [ "TuplePrinter", "structch_1_1tupleprint_1_1_tuple_printer.html", null ], + [ "TuplePrinter< Tuple, 1 >", "structch_1_1tupleprint_1_1_tuple_printer_3_01_tuple_00_011_01_4.html", null ], + [ "TuplePrinter< Tuple, 0 >", "structch_1_1tupleprint_1_1_tuple_printer_3_01_tuple_00_010_01_4.html", null ] + ] ], + [ "AutoRebin", "classch_1_1_auto_rebin.html", "classch_1_1_auto_rebin" ], + [ "BinByBinFactory", "classch_1_1_bin_by_bin_factory.html", "classch_1_1_bin_by_bin_factory" ], + [ "CardWriter", "classch_1_1_card_writer.html", "classch_1_1_card_writer" ], + [ "CombineHarvester", "classch_1_1_combine_harvester.html", "classch_1_1_combine_harvester" ], + [ "HistMapping", "structch_1_1_hist_mapping.html", "structch_1_1_hist_mapping" ], + [ "FnTimer", "classch_1_1_fn_timer.html", "classch_1_1_fn_timer" ], + [ "Object", "classch_1_1_object.html", "classch_1_1_object" ], + [ "Observation", "classch_1_1_observation.html", "classch_1_1_observation" ], + [ "Parameter", "classch_1_1_parameter.html", "classch_1_1_parameter" ], + [ "Process", "classch_1_1_process.html", "classch_1_1_process" ], + [ "SOverBInfo", "structch_1_1_s_over_b_info.html", "structch_1_1_s_over_b_info" ], + [ "Systematic", "classch_1_1_systematic.html", "classch_1_1_systematic" ], + [ "CMSHistFuncFactory", "classch_1_1_c_m_s_hist_func_factory.html", "classch_1_1_c_m_s_hist_func_factory" ] + ] ], [ "BkgInfo", "struct_bkg_info.html", "struct_bkg_info" ], [ "ColInfo", "struct_col_info.html", "struct_col_info" ] ]; \ No newline at end of file diff --git a/classch_1_1_auto_rebin.html b/classch_1_1_auto_rebin.html index f3f8313a074..ba8501fff2e 100644 --- a/classch_1_1_auto_rebin.html +++ b/classch_1_1_auto_rebin.html @@ -4,7 +4,7 @@
- +
@@ -13,10 +13,6 @@ - @@ -30,7 +26,8 @@ extensions: ["tex2jax.js"], jax: ["input/TeX","output/HTML-CSS"], }); - + + @@ -46,7 +43,7 @@
@@ -64,10 +61,10 @@ - + @@ -83,7 +80,7 @@
Tests for any bins below a certain threshold and if they exist merges them with neighborouring bins. More...
-#include "CombineHarvester/CombineTools/interface/AutoRebin.h"
Public Member Functions | @@ -64,10 +61,10 @@ |
- + @@ -83,7 +80,7 @@
Merges bin uncertainties and creates bin-by-bin statistical uncertainties. More...
-#include "CombineHarvester/CombineTools/interface/BinByBin.h"
Public Member Functions | @@ -64,10 +61,10 @@ |
- + @@ -83,7 +80,7 @@
#include "CombineHarvester/CombinePdfs/interface/CMSHistFuncFactory.h"
- + @@ -83,7 +80,7 @@
Automates the writing of datacards into directory structures. More...
-#include "CombineHarvester/CombineTools/interface/CardWriter.h"
Public Member Functions | @@ -64,10 +61,10 @@ |
- + @@ -83,7 +80,7 @@
#include "CombineHarvester/CombineTools/interface/CombineHarvester.h"
Friends |
virtual const std::string ch::Object::attribute | +virtual std::string const ch::Object::attribute | ( | std::string const & | attr_label | ) | diff --git a/classch_1_1_object.js b/classch_1_1_object.js index 4dec3c43a34..68205905f30 100644 --- a/classch_1_1_object.js +++ b/classch_1_1_object.js @@ -25,6 +25,6 @@ var classch_1_1_object = [ "delete_attribute", "classch_1_1_object.html#afe324e1fe370f7804a30e84da0a67556", null ], [ "set_all_attributes", "classch_1_1_object.html#a8c779be0d578124a56a17031abd86c9f", null ], [ "all_attributes", "classch_1_1_object.html#af6956eda45da72f747aeee28fadfe4fe", null ], - [ "attribute", "classch_1_1_object.html#a92bcd99a8c7a5e565d713387bbd954d2", null ], + [ "attribute", "classch_1_1_object.html#a3f131c3c63a86f91cab1d2fb56c86bb9", null ], [ "swap", "classch_1_1_object.html#a9ea9cb8427d79a8fccee9409eae3f603", null ] ]; \ No newline at end of file diff --git a/classch_1_1_observation.html b/classch_1_1_observation.html index 28664c112f2..55505464ccf 100644 --- a/classch_1_1_observation.html +++ b/classch_1_1_observation.html @@ -4,7 +4,7 @@ - +@@ -64,10 +61,10 @@ |
#include "CombineHarvester/CombineTools/interface/Observation.h"
Static Public Member Functions | @@ -64,10 +61,10 @@ |
- + @@ -83,7 +80,7 @@
#include "CombineHarvester/CombineTools/interface/Parameter.h"
Public Member Functions | @@ -64,10 +61,10 @@ |
- + @@ -83,7 +80,7 @@
#include "CombineHarvester/CombineTools/interface/Process.h"
#include "/afs/cern.ch/user/a/agilbert/CMSSW_14_1_0_pre4/src/CombineHarvester/CombineTools/interface/Process.h"
Static Public Member Functions | @@ -64,10 +61,10 @@ |
@@ -83,7 +80,7 @@
#include "CombineHarvester/CombineTools/interface/Systematic.h"
Static Public Member Functions |
Definition at line 12 of file Systematic.h.
+Definition at line 13 of file Systematic.h.
Definition at line 21 of file Systematic.h.
+Definition at line 22 of file Systematic.h.
@@ -432,7 +433,7 @@
Definition at line 23 of file Systematic.h.
+Definition at line 24 of file Systematic.h.
@@ -459,7 +460,7 @@Definition at line 24 of file Systematic.h.
+Definition at line 25 of file Systematic.h.
@@ -487,7 +488,7 @@Definition at line 26 of file Systematic.h.
+Definition at line 27 of file Systematic.h.
@@ -514,7 +515,7 @@Definition at line 27 of file Systematic.h.
+Definition at line 28 of file Systematic.h.
@@ -542,7 +543,7 @@Definition at line 29 of file Systematic.h.
+Definition at line 30 of file Systematic.h.
@@ -569,7 +570,7 @@Definition at line 30 of file Systematic.h.
+Definition at line 31 of file Systematic.h.
@@ -597,7 +598,7 @@Definition at line 32 of file Systematic.h.
+Definition at line 33 of file Systematic.h.
@@ -624,7 +625,7 @@Definition at line 33 of file Systematic.h.
+Definition at line 34 of file Systematic.h.
@@ -652,7 +653,7 @@Definition at line 35 of file Systematic.h.
+Definition at line 36 of file Systematic.h.
@@ -679,7 +680,7 @@Definition at line 36 of file Systematic.h.
+Definition at line 37 of file Systematic.h.
@@ -706,7 +707,7 @@Definition at line 38 of file Systematic.h.
+Definition at line 39 of file Systematic.h.
@@ -809,7 +810,7 @@Definition at line 46 of file Systematic.h.
+Definition at line 47 of file Systematic.h.
@@ -836,7 +837,7 @@Definition at line 48 of file Systematic.h.
+Definition at line 49 of file Systematic.h.
@@ -863,7 +864,7 @@Definition at line 50 of file Systematic.h.
+Definition at line 51 of file Systematic.h.
@@ -890,7 +891,7 @@Definition at line 52 of file Systematic.h.
+Definition at line 53 of file Systematic.h.
@@ -917,7 +918,7 @@Definition at line 54 of file Systematic.h.
+Definition at line 55 of file Systematic.h.
@@ -1063,6 +1064,61 @@
+
|
+ +inline | +
Definition at line 69 of file Systematic.h.
+ +
+
|
+ +inline | +
Definition at line 70 of file Systematic.h.
+#include "CombineHarvester/CombineTools/interface/Systematics.h"
Public Member Functions | @@ -64,10 +61,10 @@ |
#include "CombineHarvester/CombineTools/interface/Systematics.h"
Public Member Functions | @@ -64,10 +61,10 @@ |
#include "CombineHarvester/CombineTools/interface/Systematics.h"
Public Member Functions | @@ -64,10 +61,10 @@ |
#include "CombineHarvester/CombineTools/interface/Systematics.h"
Public Types | @@ -64,10 +61,10 @@ |
|
-channel (ch::syst) | -
|
-Process (ch) | -TuplePrinter (ch::tupleprint) | -||
CMSHistFuncFactory (ch) | -
|
-TuplePrinter< Tuple, 0 > (ch::tupleprint) | -||||
analysis (ch::syst) | -ColInfo | -mass (ch::syst) | -TuplePrinter< Tuple, 1 > (ch::tupleprint) | -|||
AutoRebin (ch) | -CombineHarvester (ch) | -
|
-SOverBInfo (ch) | -|||
|
-
|
-Systematic (ch) | -||||
Object (ch) | -SystMap (ch::syst) | -|||||
bin (ch::syst) | -era (ch::syst) | -Observation (ch) | -SystMapAsymm (ch::syst) | -|||
bin_id (ch::syst) | -
|
-
|
-SystMapFunc (ch::syst) | -|||
BinByBinFactory (ch) | -
|
-|||||
BkgInfo | -FnTimer (ch) | -Parameter (ch) | -||||
|
-
|
-process (ch::syst) | -FnTimer::Token (ch) | -|||
CardWriter (ch) | -HistMapping (ch) | -|||||
This page documents the CombineHarvester framework for the production and analysis of datacards for use with the CMS combine tool. The central part of this framework is the CombineHarvester class, which provides a representation of the text-format datacards and the associated shape input.
+This page documents the CombineHarvester framework for the production and analysis of datacards for use with the CMS combine tool. The central part of this framework is the CombineHarvester class, which provides a representation of the text-format datacards and the associated shape input.
The production of new datacards typically requires several steps, for example:
The CMSSW version that should be used with CombineHarvester is driven by the recommendation for the HiggsAnalysis/CombinedLimit package, which is also required. The latest instructions can be found here. The CombineHarvester framework is compatible with the CMSSW 10_2_X and 11_3_X series releases. A new release area can be set up and compiled in the following steps:
export SCRAM_ARCH=slc7_amd64_gcc900 -scram project CMSSW CMSSW_11_3_4 -cd CMSSW_11_3_4/src +The CMSSW version that should be used with CombineHarvester is driven by the recommendation for the HiggsAnalysis/CombinedLimit package, which is also required. The latest instructions can be found here. The CombineHarvester framework is compatible with the CMSSW 14_1_X and 11_3_X series releases. A new release area can be set up and compiled in the following steps:
cmsrel CMSSW_14_1_0_pre4 +cd CMSSW_14_1_0_pre4/src cmsenv git clone https://github.com/cms-analysis/HiggsAnalysis-CombinedLimit.git HiggsAnalysis/CombinedLimit # IMPORTANT: Checkout the recommended tag on the link above git clone https://github.com/cms-analysis/CombineHarvester.git CombineHarvester -git checkout v2.0.0 +git checkout v3.0.0-pre1 scram b -If you are using this framework for the first time we recommend taking a look through some of the examples below which demonstrate the main features:
+
If you are using this framework for the first time we recommend taking a look through some of the examples below which demonstrate the main features:
If the cause of such an error message is unclear, or if you believe the error message should not have been produced, please raise an issue here with full details on reproducing the problem: https://github.com/cms-analysis/CombineHarvester/issues/new
+If the cause of such an error message is unclear, or if you believe the error message should not have been produced, please raise an issue here with full details on reproducing the problem: https://github.com/cms-analysis/CombineHarvester/issues/new
Please also raise an issue if you encounter any bugs, unintended behaviour, abrupt errors or segmentation faults - these will be addressed promptly by the developers.
File: CombineTools/bin/Example1.cpp
+File: CombineTools/bin/Example1.cpp
In this example we use CombineHarvester to parse an existing datacard and then extract information from it. Open the file above and take a look at the source code. To run the example, first make sure the code has been compiled with scram
:
cd $CMSSW_BASE/src scram b -j4 Example1 -
In the first part we locate and open a single text datacard file:
When parsing a datacard, CombineHarvester breaks down the information it contains into sets of objects, each represented by a C++ class. A ch::Observation object stores the information about the observed data in a single category, and likewise ch::Process stores the information for one expected signal or background process in a category. A ch::Systematic object records the uncertainty value assigned to a particular process from a particular source.
shape
). Once the mapped ROOT file has been located and opened, the relevant histograms are copied into their corresponding CombineHarvester objects.Each object class stores a standard set of metadata, designed to aid in the filtering and selection of particular objects within a CombineHarvester, and which in the example above is specified explicitly. The possible metadata is listed in the following table.
name | type | example value | name | type | example value |
---|---|---|
bin | string | automatic | bin | string | automatic |
process | string | automatic | process | string | automatic |
analysis | string | "htt" | analysis | string | "htt" |
era | string | "8TeV" | era | string | "8TeV" |
channel | string | "mt" | channel | string | "mt" |
bin_id | int | 6 | bin_id | int | 6 |
mass | string | "125" | mass | string | "125" |
Of these only bin
, process
and mass
are tracked and used by combine, the others are optional and can be left empty if unneeded. The bin
property is used to uniquely label an event category. Along with the process names, this is written directly into the datacard and is extracted automatically. The mass
property is an exception: although we typically create a datacard for a particular signal mass hypothesis this information is not recorded in the datacard, but rather is passed to combine as a command line option, e.g. combine -M Asymptotic -m 125 my_datacard.txt
.
mass
is not specified, ParseDatacard is likely to fail as this property is often needed to map signal processes to histograms in the input ROOT file. You can tell if this property is needed by looking for the term $MASS
in the shapes
rules at the top of the text datacard.Alternatively,there are a number of fixed-property filters, in which you need only supply a vector of the object properties you want to keep:
An optional boolean can be supplied as a second argument. When set to false this reverses the logic - objects with a property in the list will be dropped. In the second line we use this to remove all information about the QCD process. The full list of filter methods is found here
Note that these functions are greedy - they will sum the contribution from every available Observation or Process entry. This means in the first line we get the total number of observed events in the three remaining categories. To get the yield for a single category we can prefix the function with a filter method. But here we must be careful, because we don't want to actually remove the information on the other categories permanently, which is what would happen if we just do:
cmb2.bin({"muTau_vbf_loose"}).GetObservedRate(); // cmb2 only contains objects for the "muTau_vbf_loose" category now! -
To get around this we first call the cp method on our CombineHarvester instance. This makes a shallow copy of the instance - in this it is only pointers to the contained objects, not the objects themselves, which are copied into a new instance. Such a copy is computationally fast to make, and we are free to filter objects from it without affecting the original instance at all.
+To get around this we first call the cp method on our CombineHarvester instance. This makes a shallow copy of the instance - in this it is only pointers to the contained objects, not the objects themselves, which are copied into a new instance. Such a copy is computationally fast to make, and we are free to filter objects from it without affecting the original instance at all.
The last part of the example code uses the CombineHarvester set-generating methods to conveniently loop through all defined (bin, process) combinations and print out the expected yield. The full list of available set-generating methods can be found here.
-File: CombineTools/bin/Example2.cpp
+File: CombineTools/bin/Example2.cpp
In this example we will set up a simplified version of the Higgs to tau tau datacards, while exploring the main features of datacard creation with the CombineHarvester tool. To run the example, first make sure the code has been compiled:
cd $CMSSW_BASE/src scram b -j4 Example2 -
We start by defining two analysis categories (or 'bins'). It's a good idea for each bin to have a unique name: this is required by combine, and while not required by CombineHarvester explicitly, a number of functions rely on this being true. CombineHarvester also allows for each object to be assigned an integer value, called a "bin_id", that does not need to be unique. This can be useful to label a "type-of-category" that might appear more than once. For example, VBF event categories for both the 7TeV and 8TeV datasets might have a common bin_id, but different names: "vbf_7TeV" and "vbf_8TeV".
Now we define the signal mass points we will build datacards for, but note these are specified as strings, not floats. The function ch::MassesFromRange
is used to quickly generate mass values from 120 to 135 GeV in 5 GeV steps.
The next step is to add some new objects to the CombineHarvester instance. First we will specifiy the observations (i.e. the actual data). The AddObservations method takes a series of vectors as arguments. Each vector specifies some property, such as the analysis name, the dataset era or the bin information. Every possible combination of elements from these vectors will be used to add a new Observation entry.
The arguments are: AddObservations(masses, analyses, eras, channels, categories)
Below we specify one mass entry ("*"), which implies we only need one entry that will cover all signal mass hypotheses. Then we specify the higgs-tau-tau analysis ("htt"), the 8TeV dataset ("8TeV"), the mu+tau analysis channel ("mt") and the categories we defined above. If the analysis, era and channel properties aren't relevant for your analysis, you can always leave them as a single emtpy string.
Next we add the signal and background processes. The arguments are similar to the AddObservations method. An extra argument is added after the channels for the list of processes, and a final boolean option specifies whether these are signal or background processes. Note that each process name here should correspond to the histogram name in your input file. In the signal case we pass the list of Higgs mass hypotheses generated above instead of the generic "*".
The next step is to add details of the systematic uncertainties. The details of an uncertainty on a single process in a single bin is called a ch::Systematic. With CombineHarvester we create the ch::Systematic entries for each uncertainty source in turn. In doing so we must specify: the name of the nuisance parameter we want to use, the type (i.e. normalisation or shape), which processes it should be applied to and the magnitude of the uncertainty on each process. All this information can be expressed in a single line of code (though for clarity we will usually split it over multiple lines), for example the luminosity uncertainty:
@@ -159,13 +162,16 @@We can break this line down into several parts. cb.cp() returns a shallow copy of the CombineHarvester instance - i.e. the Observation and Process entries are shared with the original object (see the documentation of Example 1). However, removing entries from this shallow copy leaves the entries in the original instance intact. The next method, signals(), acts on this copy. This is one of several filter methods. It removes any non-signal process from the internal entries. We do this because we only want to create Systematic entries for the signal processes. Like all filter methods this returns a reference to itself. Then we can apply the actual AddSyst method. The first argument is a reference to the CombineHarvester instance where the new Systematic entries should be created. In this case we just give it our original instance (remember we are calling the AddSyst method on a copy of this instance). The next argument is the Systematic name. Before the Systematic entry for each Process is created a number of string substitutions will be made, based on the properties of the process in question. These are:
$BIN --> proc.bin() $PROCESS --> proc.process() (the process name) $MASS --> proc.mass() $ERA --> proc.era() $CHANNEL --> proc.channel() $ANALYSIS --> proc.analysis() -
So in this example we will expect names like "lumi_8TeV". This substitution provides a quick way of renaming systematics to be correlated/uncorrelated between different channels/analyses/bins etc. Next we specifiy the nuisance type, which must be either "lnN" or "shape". The final argument is special map (SystMap) that contains the set of values that should be added. The SystMap is a templated class, which can take an arbitrary number of template parameters. Each parameter specifies a Process property that will be used as part of the key to map to the values. In this case we will just use the process era as a key. We initialse a new map with init
, then provide a series of entries. Each entry should consist of a series of vectors, one for each key value, and end in the lnN value that should be assigned. Processes matching any combination of key properties in this map will be assigned the given value. In this map, we assign any Process with era "7TeV" a value of 1.022, and any "8TeV" Process a value of 1.026. More examples are given below:
So in this example we will expect names like "lumi_8TeV". This substitution provides a quick way of renaming systematics to be correlated/uncorrelated between different channels/analyses/bins etc. Next we specifiy the nuisance type, which must be either "lnN" or "shape". The final argument is special map (SystMap) that contains the set of values that should be added. The SystMap is a templated class, which can take an arbitrary number of template parameters. Each parameter specifies a Process property that will be used as part of the key to map to the values. In this case we will just use the process era as a key. We initialse a new map with init
, then provide a series of entries. Each entry should consist of a series of vectors, one for each key value, and end in the lnN value that should be assigned. Processes matching any combination of key properties in this map will be assigned the given value. In this map, we assign any Process with era "7TeV" a value of 1.022, and any "8TeV" Process a value of 1.026. More examples are given below:
Creation of asymmetric "lnN" uncertainties is supported through the SystMapAsymm class, whose interface is very similar to SystMap. Instead of a single uncertainty value, simply provide the "down" and "up" relative uncertainties as two separate arguments.
Also note that data histogram must be named data_obs to be extracted by this command.
+Also note that data histogram must be named data_obs to be extracted by this command.
The next step is optional. This will generate additional shape uncertainties to account for limited template statistics, so-called "bin-by-bin" uncertainties.
@@ -212,6 +222,11 @@We first create a ch::BinByBinFactory instance, and specify the bin error threshold over which an uncertainty should be created, expressed as a percentage of the bin content. We also set the flag "FixedNorm", which controls the normalisation of the Up and Down shapes that are created. If set to true, the normalisation is fixed to nominal rate. If false, the normalisation is allowed to vary. We then call the AddBinByBin method specifying that only the background processes should be considered.
While we are required to write a separate datacard for each mass point, there is no obligation to generate one for each bin. For example,
cb.cp().mass({"125", "*"}).WriteDatacard("combined_125.txt", output); -
will produce a datacard containing all categories.
+will produce a datacard containing all categories.
-File: CombineTools/bin/Example3.cpp
+File: CombineTools/bin/Example3.cpp
This examples demonstrates how to create a simple four-bin counting experiment datacard. It is based on the example given on the combine twiki here. Special rateParam
directives are added to the datacard to allow the normalisations in three of these four bins to float freely and the fourth will be expressed as a function of the these three parameters. Here we follow the C++ interface example, a similar python version can be found in CombineTools/scripts/Example3.py
. Make sure all the code is compiled and run the example:
cd $CMSSW_BASE/src scram b -j4 Example3 -
We start by defining four categories: A, B, C and D in the normal way. Contrary to the previous shape-based examples, with a counting experiment we have to specify all of the observed and expected yields directly. To start with we'll define a map containing the observed yields in each category.
+We start by defining four categories: A, B, C and D in the normal way. Contrary to the previous shape-based examples, with a counting experiment we have to specify all of the observed and expected yields directly. To start with we'll define a map containing the observed yields in each category.
Next we create the CombineHarvester instance and populate it with Observation and Process entries. Then using the ForEachProc
and ForEachObs
methods we supply small lambda functions that set the observed and expected yields, for the former using our map defined above. For the latter we will set each yield to one and then create some rateParam
entries that will allow these yields to float.
First we add a regular lnN
systematic uncertainty with the AddSyst
method. Then, using the same technique, we add a rateParam
systematic to each of the bins B, C and D. The name we give will be turned into a floating parameter which will be multiplied by the process yield that we set above to determine the overall process normalisation in the model. The name we specify here supports pattern substitution like any other systematic. In this case we have created a unique parameter per bin. In the SystMap we set the initial value of each parameter to the observed yield in the respective bin.
Then we create the second type of rateParam
term, one which is a function of the other parameters we have specified. This sets the expected yield in bin A as a function of the three free parameters we just created, like in a standard ABCD method to set a background normalisation via control regions. As for the floating parameters this expression will be multiplied by the nominal process yield. To define the expression we use different mapping object, a SystMapFunc
, which accepts two string arguments instead of a float. The first is a RooFit formula in terms of generic parameters, e.g. @0
, and the second is a comma-separated list of the corresponding parameter names.
AddSyst
a check is made to see if the named parameter already exists. If so, and if it is a floating parameter, its initial value will be updated. If it is a formula, then the existing formula will be used to create the Systematic and the new value will be ignored, i.e. once a formula has been entered into the CombineHarvester instance it cannot be modified.Finally we print the CombineHarvester contents. Note that the while the Systematic entries for the rateParam
terms are shown in the list the "value" column currently always shows zero. For floating terms the actual values are given in the final list of parameters. We end by writing the text datacard, where the initial parameter values and formulae will be written at the end.
rateParam
terms. This will be implemented in a later release. File: CombinePdfs/src/MorphFunctions.cc
The PDF which is used for the signal is a custom RooFit PDF. Its application is made via the datacard production code, with a call to a function named BuildRooMorphing. Here we highlight some of the important features of this function. For a greater level of detail we recommend the reader look at the code itself, which is well commented.
@@ -115,6 +113,7 @@The options to this function are as follows:
Then it is necessary to track all the information on the systematics affecting the signal process. This is done separately for the shape and normalisation systematics. After some manipulation of this information (including special treatment for the case where a shape systematic also alters the normalisation, i.e. has a value different from 1.00), it is possible to build a RooArgList of the parameters controlling the vertical template morphing which is internally used by combine to apply the effect of the shape systematics to the signal.
Note that the above code takes care over the possibility that the shape systematics are not the same for all masspoints - this is not currently supported in this function. The created "ss_list" is used later to create the vertical template morphing PDFs.
For the normalisation systematics we must consider separately the two cases: 1) in the first case the uncertainty is the same for each masspoint, so we can leave this in the datacard in the usual way, but in case 2) where the uncertainty is not the same for each masspoint, we have to include this information in the signal PDF by creating a RooFit object that makes the uncertainty a function off mass. Finally a list is built of all objects required for the interpolation.
A 1D spline is built directly from the array of rates and masses:
@@ -293,10 +295,6 @@Using all of the techniques described previously, both in terms of datacard production and usage of the RooMorphingPdf object for signal processes, code to reproduce many of the Run 1 H->tautau results is included in the package. For some analyses, development was first performed for the datacard production without morphing applied, and fully validated, before moving to using morphing for the signal process. Note that in the non-morphing version many of the analyses have an equivalent python version. Once the usage of RooMorphingPdf for signal was validated in several use cases some additional analyses were added making use of this method only. This section describes how to find the code for each of the legacy analyses and details any specifics beyond the previous examples. More detail on the validation which was made can be found in the analysis note AN-15-235. Note that to run all of the examples below, the shape files exist in /auxiliaries and are linked to the script correctly. For more information on running the statistical results with the produced datacards, see later sections.
+Using all of the techniques described previously, both in terms of datacard production and usage of the RooMorphingPdf object for signal processes, code to reproduce many of the Run 1 H->tautau results is included in the package. For some analyses, development was first performed for the datacard production without morphing applied, and fully validated, before moving to using morphing for the signal process. Note that in the non-morphing version many of the analyses have an equivalent python version. Once the usage of RooMorphingPdf for signal was validated in several use cases some additional analyses were added making use of this method only. This section describes how to find the code for each of the legacy analyses and details any specifics beyond the previous examples. More detail on the validation which was made can be found in the analysis note AN-15-235. Note that to run all of the examples below, the shape files exist in /auxiliaries and are linked to the script correctly. For more information on running the statistical results with the produced datacards, see later sections.
Files CombineTools/interface/HttSystematics.h, CombineTools/python/systematics
@@ -134,6 +132,11 @@ This reads the values for cross section times branching ratio from a text file and then uses them to scale each of the signal processes by the appropriate value.
2) Merging bin by bin uncertainties
@@ -142,6 +145,10 @@In this example the merge threshold is set to 0.5. This controls the proportion of the total error that is allowed to be merged. The add threshold controls the value below which the stat uncertainty should be for the bin by bin uncertainty to be added. The uncertainties for different processes can then be added and merged simultaneously using calls like:
The filters for eta, bin id and channel can be used in this way to add the specific requirements for each channel and category, of which there were many for the legacy SM analysis.
@@ -159,6 +166,7 @@The validation of the produced SM cards as compared to the official cards can be found in the Analysis Note.
In these examples we will work through several different limit calculations using datacards from the Run 1 HTT analyses.
+In these examples we will work through several different limit calculations using datacards from the Run 1 HTT analyses.
First we will compute limits vs mH in the legacy SM Higgs analysis (HIG-13-004). This task will be broken down into several steps:
@@ -137,18 +135,18 @@Go to the CombineTools
directory and create the datacards using the SMLegacyExample.py
script:
cd $CMSSW_BASE/src/CombineHarvester/CombineTools python scripts/SMLegacyExample.py -
This will create a familiar structure of datacards in output/sm_cards
, with one subdirectory containing the datacards for all channels and categories (cmb
) and subdirectories containing just the cards for specific channels (e.g. tt
). Within each of these directories the cards are organised into further subdirectories corresponding to the mass of the signal.
This will create a familiar structure of datacards in output/sm_cards
, with one subdirectory containing the datacards for all channels and categories (cmb
) and subdirectories containing just the cards for specific channels (e.g. tt
). Within each of these directories the cards are organised into further subdirectories corresponding to the mass of the signal.
We will focus on computing the limits for each channel separately, but before we get to this we need to turn the datacards into binary RooFit workspaces. This means combining the cards from each category into a single text datacard with combineCards.py
, then running text2workspace.py
to convert this single card into a workspace.
combine
directly with a text datacard as input. Internally combine
will just run text2workspace.py
on it first with some default options and save the resulting workspace in a temporary location. For anything other than very simple models it is preferable to create the workspaces manually which can then be reused for later calculations.The combineTool.py
script has a mode called T2W that passes options through to text2workspace.py
and supports multiple datacard or directory options. With a directory argument, all the cards within that directory are combined first. If the enclosing directory is a number then this will automatically be used as the -m [mass]
option in text2workspace.py
, so there is no need to specify this explicitly. We can also take advantage of the --parallel
option to build multiple workspaces simultaneously.
cd output/sm_cards combineTool.py -M T2W -i {ee,mm,em,et,mt,tt}/* -o workspace.root --parallel 4 -
Here we have specified the name of the output workspace explicitly with the -o
option. The T2W mode also has a --cc
option that can be used to specify the name of the combined text datacard that is created (default is combined.txt
). When individual datacards are given as input, this option causes the cards to be combined first and produces a single workspace, as opposed to the default behaviour which would create a workspace per datacard. Run combineTool.py -M T2W -h
for more information.
Here we have specified the name of the output workspace explicitly with the -o
option. The T2W mode also has a --cc
option that can be used to specify the name of the combined text datacard that is created (default is combined.txt
). When individual datacards are given as input, this option causes the cards to be combined first and produces a single workspace, as opposed to the default behaviour which would create a workspace per datacard. Run combineTool.py -M T2W -h
for more information.
We now have a set of workspaces within a {channel}/{mass}/workspace.root
directory structure. The next step is simply to run the Asymptotic mode of combine on each workspace, which again can be done with combineTool.py
. The normal -d/--datacard
option is enhanced to support multiple workspaces:
combineTool.py -M Asymptotic -d */*/workspace.root --there -n .limit --parallel 4 -
One minor complication is that combine
produces its output in the directory we run it from. In this case the outputs for a mass point in the different channels will overwrite each other as there is nothing in the name option (-n
) to distinguish them. The solution is to add the --there
option which will run each combine
command in the directory where the workspace is located.
One minor complication is that combine
produces its output in the directory we run it from. In this case the outputs for a mass point in the different channels will overwrite each other as there is nothing in the name option (-n
) to distinguish them. The solution is to add the --there
option which will run each combine
command in the directory where the workspace is located.
Each output file contains a "limits" TTree with one entry for each of the observed and expected limits, for example:
********************************************************************************* @@ -163,7 +161,7 @@*********************************************************************************
This could be used as a direct input for plotting but here we will use another combineTool.py
method, CollectLimits to covert the information in these files to a more easily readable json file. This is a useful intermediate format for producing different figures and tables as it is simple to parse as a dictionary object in python.
We can use the directory structure to collect all the outputs in one go, creating one json file per channel:
combineTool.py -M CollectLimits */*/*.limit.* --use-dirs -o limits.json -
By default the CollectLimits method will assume all the limit results should be merged into a single json file, but with the --use-dirs
option it will group the output files according the parent directory name (skipping directories that correspond to mass values). The output files will have these directory names appended, e.g. limits_mt.json
. The files are structured like a dictionary with one entry per mass value, e.g.
By default the CollectLimits method will assume all the limit results should be merged into a single json file, but with the --use-dirs
option it will group the output files according the parent directory name (skipping directories that correspond to mass values). The output files will have these directory names appended, e.g. limits_mt.json
. The files are structured like a dictionary with one entry per mass value, e.g.
This will produce a standard limit plot:
-A more fully-featured example can be found in CombineTools/scripts/plotLimits.py
. This script support multiple json file arguments for drawing combined observed/expected bands as above, or specifying single limits to draw. For the former it is enough to just give the json file as the argument. It is possible to restrict the output to just the observed or expected limits with the --show obs
or --show exp
option. With the latter it's possible to overlay limits from different json files:
plotLimits.py limits_{ee,em,et,mm,mt,tt}.json:obs --auto-style -
The limit to draw from each file is specified as [file.json]:[limit]
. The --auto-style
option will draw each TGraph with a different colour. Each TGraph can be styled further by extending the input argument: [file.json]:[limit]:[style options]
. The last part is a comma-separated list of style settings that will be applied to the TGraph. All single-argument SetXYZ(...)
methods are supported, e.g. 'limits_mt.json:exp:Title="#mu#tau_{h}",LineStyle=4
is equivalent to doing:
A more fully-featured example can be found in CombineTools/scripts/plotLimits.py
. This script support multiple json file arguments for drawing combined observed/expected bands as above, or specifying single limits to draw. For the former it is enough to just give the json file as the argument. It is possible to restrict the output to just the observed or expected limits with the --show obs
or --show exp
option. With the latter it's possible to overlay limits from different json files:
plotLimits.py limits_{ee,em,et,mm,mt,tt}.json:obs --auto-style +
The limit to draw from each file is specified as [file.json]:[limit]
. The --auto-style
option will draw each TGraph with a different colour. Each TGraph can be styled further by extending the input argument: [file.json]:[limit]:[style options]
. The last part is a comma-separated list of style settings that will be applied to the TGraph. All single-argument SetXYZ(...)
methods are supported, e.g. 'limits_mt.json:exp:Title="#mu#tau_{h}",LineStyle=4
is equivalent to doing:
The following command will draw the observed and expected limits for three of the channels:
plotLimits.py --auto-style obs,exp \ @@ -243,23 +245,19 @@'limits_et.json:exp0:Title="e#tau_{h} Expected"' \ 'limits_tt.json:obs:Title="#tau_{h}#tau_{h} Observed"' \ 'limits_tt.json:exp0:Title="#tau_{h}#tau_{h} Expected"' -
Note the use of single quotes to prevent bash removing the double quotes surrounding the graph titles. Here the --auto-style
argument is given a list of groups for assigning style options. Graphs in each group are given a common line style and the same pool of colours are assigned in order for graphs within that group. The output of this command is given below.
Note the use of single quotes to prevent bash removing the double quotes surrounding the graph titles. Here the --auto-style
argument is given a list of groups for assigning style options. Graphs in each group are given a common line style and the same pool of colours are assigned in order for graphs within that group. The output of this command is given below.
For datacards using RooMorphingPdfs for the signal, the steps to produce limits are similar to the per-mass case above, with the main difference being the need to specify the set of mass values explicitly rather than picking this up from the directory structure. The SMLegacyMorphing
program will create similar directory structure with the RooMorphingPdf version of the HIG-13-004 cards:
cd $CMSSW_BASE/src/CombineHarvester/CombineTools SMLegacyMorphing -
This program creates a directory per channel which already contain a combined datacard. We can specify these cards directly in the text2workspace step.
cd output/sm_cards_morphed/ +
This program creates a directory per channel which already contain a combined datacard. We can specify these cards directly in the text2workspace step.
cd output/sm_cards_morphed/ combineTool.py -M T2W -i {ee,mm,em,et,mt,tt}/combinedCard.txt -o workspace.root --parallel 4 -
For the Asymptotic calculation we take advantage of the enhanced -m
option to compute the limit in 2 GeV steps:
combineTool.py -M Asymptotic -d */workspace.root -m 110:144:2 --freezeNuisances MH --there -n .limit --parallel 4 -
After this, the output collection and the plotting is the same as above:
combineTool.py -M CollectLimits */*.limit.* --use-dirs -o limits.json +
For the Asymptotic calculation we take advantage of the enhanced -m
option to compute the limit in 2 GeV steps:
combineTool.py -M Asymptotic -d */workspace.root -m 110:144:2 --freezeNuisances MH --there -n .limit --parallel 4 +
After this, the output collection and the plotting is the same as above:
combineTool.py -M CollectLimits */*.limit.* --use-dirs -o limits.json plotLimits.py limits_mt.json -
coming soon
With the Asymptotic method of combine it is only possible to determine a limit for a single POI at a time. This POI will be chosen as the first one in the list of POIs embedded within the workspace. Therefore it is best to always specify the POI list explicitly, putting the one you want the limit for first in the list, e.g.
combine -M Asymptotic -d workspace.root -m 125 --redefineSignalPOIs r_ggH,r_qqH
to set a limit on r_ggH
. You should also decide how to treat the other POIs in the fits for the limit extraction. By default combine will allow all other POIs to float freely, so in this example r_qqH
will be profiled. If instead you wish to fix it to a particular value then it should not be included in the list of POIs, e.g.
combine -M Asymptotic -d workspace.root -m 125 --redefineSignalPOIs r_ggH --setPhysicsModelParameters r_qqH=0.0 --freezeNuisances r_qqH -
There is a further issue to consider when computing expected limits without a fit to the data (combine option --run blind
or when generating toy datasets with -t
). For the profiled case combine must generate a background-only pre-fit asimov dataset for use in the asymptotic calculation. In doing this only the first POI in the list will be fixed to zero. So if the other POI values are non-zero then the asimov dataset will contain a (probably) unwanted signal contribution. To avoid this it is safest to explicitly set the values of the other POIs to zero, e.g.
combine -M Asymptotic -d workspace.root -m 125 --redefineSignalPOIs r_ggH,r_qqH --setPhysicsModelParameters r_qqH=0.0 -
There is a further issue to consider when computing expected limits without a fit to the data (combine option --run blind
or when generating toy datasets with -t
). For the profiled case combine must generate a background-only pre-fit asimov dataset for use in the asymptotic calculation. In doing this only the first POI in the list will be fixed to zero. So if the other POI values are non-zero then the asimov dataset will contain a (probably) unwanted signal contribution. To avoid this it is safest to explicitly set the values of the other POIs to zero, e.g.
combine -M Asymptotic -d workspace.root -m 125 --redefineSignalPOIs r_ggH,r_qqH --setPhysicsModelParameters r_qqH=0.0 +
coming soon
The cards from the CMS HCG svn need to be copied into the auxiliaries directory:
cp -r /afs/cern.ch/work/a/agilbert/public/CombineTools/data/HIG-14-020.r6636 $CMSSW_BASE/src/auxiliaries/datacards/ -
We will only be using the "low-mass" cards (m_H+ < m_top), which are named like combine_datacard_hplushadronic_m[MASS].txt
, where [MASS]
ranges from 80 to 160 GeV.
We will only be using the "low-mass" cards (m_H+ < m_top), which are named like combine_datacard_hplushadronic_m[MASS].txt
, where [MASS]
ranges from 80 to 160 GeV.
The program AdaptChargedHiggs
will:
To reproduce the model-independent limits from the charged Higgs PAS we will need to use the same physics model that scales the tt->H+H-bb
and tt->H+W-bb
processes by the appropriate function of the t->H+b
branching ratio POI BR
. This has been adapted from the original version in the combine repository to account for our change of signal process naming:
text2workspace.py hplus_tauhad_mssm.txt -o hplus_tauhad_mssm.root -P CombineHarvester.CombinePdfs.MSSM:brChargedHiggs -
Then we can run the asymptotic limits:
combineTool.py -M Asymptotic -d hplus_tauhad_mssm.root -m 80:160:10 --freezeNuisances MH --setPhysicsModelParameterRanges BR=0,0.2 --cminDefaultMinimizerType Minuit2 --rAbsAcc 0.00001 -n .ChargedHiggs -
A few non-default combine options are used:
+Then we can run the asymptotic limits:
combineTool.py -M Asymptotic -d hplus_tauhad_mssm.root -m 80:160:10 --freezeNuisances MH --setPhysicsModelParameterRanges BR=0,0.2 --cminDefaultMinimizerType Minuit2 --rAbsAcc 0.00001 -n .ChargedHiggs +
A few non-default combine options are used:
Option | Reason | Option | Reason |
---|---|
--setPhysicsModelParameterRanges BR=0,0.2 | Don't want the range to be too large compared to the typical uncertainty on the limit (can reduce Minuit precision) | --setPhysicsModelParameterRanges BR=0,0.2 | Don't want the range to be too large compared to the typical uncertainty on the limit (can reduce Minuit precision) |
--cminDefaultMinimizerType Minuit2 | When doing the fit to get the global observables for the asimov dataset combine uses Minuit not Minuit2 by default - better to always use Minuit2 | --cminDefaultMinimizerType Minuit2 | When doing the fit to get the global observables for the asimov dataset combine uses Minuit not Minuit2 by default - better to always use Minuit2 |
--rAbsAcc 0.00001 | The default absolute uncertainty on the POI value when searching for the limit is too large compared to the small values of BR we are probing here - we would get inaccurate limits. | --rAbsAcc 0.00001 | The default absolute uncertainty on the POI value when searching for the limit is too large compared to the small values of BR we are probing here - we would get inaccurate limits. |
The output limits are found to be in excellent agreement with the published numbers (dash indicates only available via morphing):
Mass (GeV) | Median exp. (HIG-14-020) | Median exp. (CH) | Obs. (HIG-14-020) | Obs. (CH) | Mass (GeV) | Median exp. (HIG-14-020) | Median exp. (CH) | Obs. (HIG-14-020) | Obs. (CH) |
---|---|---|---|---|
80 | 0.0111 | 0.0111 | 0.0121 | 0.0121 | 80 | 0.0111 | 0.0111 | 0.0121 | 0.0121 |
90 | 0.0080 | 0.0079 | 0.0094 | 0.0095 | 90 | 0.0080 | 0.0079 | 0.0094 | 0.0095 |
100 | 0.0061 | 0.0061 | 0.0063 | 0.0063 | 100 | 0.0061 | 0.0061 | 0.0063 | 0.0063 |
110 | - | 0.0046 | - | 0.0043 | 110 | - | 0.0046 | - | 0.0043 |
120 | 0.0034 | 0.0034 | 0.0029 | 0.0029 | 120 | 0.0034 | 0.0034 | 0.0029 | 0.0029 |
130 | - | 0.0028 | - | 0.0023 | 130 | - | 0.0028 | - | 0.0023 |
140 | 0.0023 | 0.0023 | 0.0018 | 0.0018 | 140 | 0.0023 | 0.0023 | 0.0018 | 0.0018 |
150 | 0.0021 | 0.0021 | 0.0015 | 0.0015 | 150 | 0.0021 | 0.0021 | 0.0015 | 0.0015 |
160 | 0.0022 | 0.0022 | 0.0016 | 0.0016 | 160 | 0.0022 | 0.0022 | 0.0016 | 0.0016 |
Typedefs |
@@ -64,10 +61,10 @@ |
Data Structures | |
struct | analysis |
struct | bin |
class | bin_id |
struct | channel |
struct | analysis |
struct | era |
struct | channel |
struct | mass |
struct | process |
class | bin_id |
class | SystMap |
class | SystMapAsymm | @@ -64,10 +61,10 @@ |