HiggsAnalysis-KITHiggsToTauTau
 All Classes Namespaces Files Functions Variables Typedefs Enumerations Enumerator Macros
makePlots_datacardsLFV Namespace Reference

Functions

def harry_do_your_job
 

Variables

tuple log = logging.getLogger(__name__)
 
tuple parser
 
string help = "Input directory."
 
list default = ["ZeroJet_LFV", "OneJet_LFV"]
 
tuple args = parser.parse_args()
 
tuple systematics_factory = systematics.SystematicsFactory()
 Delete old datacards. More...
 
list plot_configs = []
 
list output_files = []
 Hadd command for merging the tmp output files. More...
 
list merged_output_files = []
 
list hadd_commands = []
 
string tmp_input_root_filename_template = "input/${ANALYSIS}_${CHANNEL}_${BIN}_${SYSTEMATIC}_${ERA}"
 
string input_root_filename_template = "input/${ANALYSIS}_${CHANNEL}_${BIN}_${ERA}.root"
 
string bkg_histogram_name_template = "${BIN}/${PROCESS}"
 
string sig_histogram_name_template = "${BIN}/${PROCESS}"
 
string bkg_syst_histogram_name_template = "${BIN}/${PROCESS}_${SYSTEMATIC}"
 
string sig_syst_histogram_name_template = "${BIN}/${PROCESS}_${SYSTEMATIC}"
 
list datacard_filename_templates
 
string output_root_filename_template = "datacards/common/${ANALYSIS}.input_${ERA}.root"
 
dictionary categories
 Dictionary for categories with list of [path of file with optimized cuts, weight for numbers of jets]. More...
 
dictionary control_regions
 Dictionary with control region with list of [process, weight]. More...
 
tuple datacards = lfvdatacards.LFVDatacards(args.channel, args.signal, args.categories, control_regions, lnN_syst_enable = args.lnN_uncs, shape_syst_enable = args.shape_uncs, rate_param_enable = False)
 
tuple cut_info = yaml.load(open(os.path.abspath(os.path.expandvars("$CMSSW_BASE/src/HiggsAnalysis/KITHiggsToTauTau/python/lfv/cuts.yaml")), "r"))
 Information about parameter and cuts for harry.py config. More...
 
tuple parameter_info = yaml.load(open(os.path.abspath(os.path.expandvars("$CMSSW_BASE/src/HiggsAnalysis/KITHiggsToTauTau/python/lfv/parameter.yaml")), "r"))
 
tuple list_of_samples_for_syst = datacards.get_samples_per_shape_systematic()
 
list tmp_output_files = []
 
list cut_strings = [parameter_info[param][4] for param in cut_info[categories[category][0]][channel].keys()]
 
string weight = "*"
 Weight if BDT score is used for stastical anaylsis. More...
 
string category = channel+"_"
 
tuple nominal = (shape_systematic == "nominal")
 
list samples = ["data"]
 Samples for control region. More...
 
string systematic = "nominal"
 
 histogram_name_template = bkg_histogram_name_templateifnominalelsebkg_syst_histogram_name_template
 
list base_values
 Define values to fill Harry Plotter config. More...
 
list sample_values
 
list datacard_values
 
tuple config = configmaster.ConfigMaster(base_values, sample_values)
 Fill config with ConfigMaster and SystematicFactory. More...
 
tuple systematics_settings = systematics_factory.get(shape_systematic)
 
tuple tmp_output_file = os.path.join(args.output_dir, tmp_input_root_filename_template.replace("$", "").format(ANALYSIS="LFV", CHANNEL = channel,BIN= category,SYSTEMATIC=systematic, ERA="13TeV") + ".root")
 Do specific config change. More...
 
tuple output_file = os.path.join(args.output_dir, input_root_filename_template.replace("$", "").format(ANALYSIS="LFV", CHANNEL = channel, BIN= category, ERA="13TeV"))
 File list with merged outputs. More...
 
tuple pool = Pool(cpu_count())
 
dictionary datacards_cbs = {}
 
dictionary datacards_poi_ranges = {}
 
tuple channels = cb.channel_set()
 
tuple datacards_workspaces = datacards.text2workspace(datacards_cbs, n_processes=1)
 

Function Documentation

def makePlots_datacardsLFV.harry_do_your_job (   config)

Variable Documentation

tuple makePlots_datacardsLFV.args = parser.parse_args()
list makePlots_datacardsLFV.base_values
Initial value:
1 = [
2  [args.input_dir],
3  args.output_dir,
4  ["png"],
5  True,
6  "",
7  [args.quantity],
8  ["30,40,130"] if args.quantity == "m_vis" else ["20,0,0.2"], #Binning for m_vis or BDT_score
9  tmp_input_root_filename_template.replace("$", "").format(ANALYSIS="LFV", CHANNEL = channel, BIN= category,SYSTEMATIC=systematic, ERA="13TeV"),
10  "",
11  False,
12  ""
13  ]

Define values to fill Harry Plotter config.

string makePlots_datacardsLFV.bkg_histogram_name_template = "${BIN}/${PROCESS}"
string makePlots_datacardsLFV.bkg_syst_histogram_name_template = "${BIN}/${PROCESS}_${SYSTEMATIC}"
tuple makePlots_datacardsLFV.categories
Initial value:
1 = {
2  "ZeroJet_LFV": [0, "(njetspt30==0)"],
3  "OneJet_LFV": [1, "(njetspt30==1)"],
4  }

Dictionary for categories with list of [path of file with optimized cuts, weight for numbers of jets].

string makePlots_datacardsLFV.category = channel+"_"
tuple makePlots_datacardsLFV.channels = cb.channel_set()
tuple makePlots_datacardsLFV.config = configmaster.ConfigMaster(base_values, sample_values)

Fill config with ConfigMaster and SystematicFactory.

dictionary makePlots_datacardsLFV.control_regions
Initial value:
1 = {
2  #"TT_CR_LFV": ["TT", "nbtag==0"]
3  }

Dictionary with control region with list of [process, weight].

tuple makePlots_datacardsLFV.cut_info = yaml.load(open(os.path.abspath(os.path.expandvars("$CMSSW_BASE/src/HiggsAnalysis/KITHiggsToTauTau/python/lfv/cuts.yaml")), "r"))

Information about parameter and cuts for harry.py config.

list makePlots_datacardsLFV.cut_strings = [parameter_info[param][4] for param in cut_info[categories[category][0]][channel].keys()]
list makePlots_datacardsLFV.datacard_filename_templates
Initial value:
1 = [
2  "datacards/individual/${BIN}/${ANALYSIS}_${CHANNEL}_${BINID}_${ERA}.txt",
3  "datacards/channel/${CHANNEL}/${ANALYSIS}_${CHANNEL}_${ERA}.txt",
4  "datacards/category/${BINID}/${ANALYSIS}_${BINID}_${ERA}.txt",
5  "datacards/combined/${ANALYSIS}_${ERA}.txt",]
list makePlots_datacardsLFV.datacard_values
Initial value:
1 = [
2  [histogram_name_template.replace("$", "").format(PROCESS=datacards.configs.sample2process(sample), CHANNEL = channel, BIN=category, SYSTEMATIC=systematic) for sample in samples],
3  ["ExportRoot"],
4  "UPDATE"
5  ]
tuple makePlots_datacardsLFV.datacards = lfvdatacards.LFVDatacards(args.channel, args.signal, args.categories, control_regions, lnN_syst_enable = args.lnN_uncs, shape_syst_enable = args.shape_uncs, rate_param_enable = False)
dictionary makePlots_datacardsLFV.datacards_cbs = {}
dictionary makePlots_datacardsLFV.datacards_poi_ranges = {}
tuple makePlots_datacardsLFV.datacards_workspaces = datacards.text2workspace(datacards_cbs, n_processes=1)
string makePlots_datacardsLFV.default = ["ZeroJet_LFV", "OneJet_LFV"]
list makePlots_datacardsLFV.hadd_commands = []
string makePlots_datacardsLFV.help = "Input directory."
makePlots_datacardsLFV.histogram_name_template = bkg_histogram_name_templateifnominalelsebkg_syst_histogram_name_template
string makePlots_datacardsLFV.input_root_filename_template = "input/${ANALYSIS}_${CHANNEL}_${BIN}_${ERA}.root"
tuple makePlots_datacardsLFV.list_of_samples_for_syst = datacards.get_samples_per_shape_systematic()
tuple makePlots_datacardsLFV.log = logging.getLogger(__name__)
list makePlots_datacardsLFV.merged_output_files = []
tuple makePlots_datacardsLFV.nominal = (shape_systematic == "nominal")
tuple makePlots_datacardsLFV.output_file = os.path.join(args.output_dir, input_root_filename_template.replace("$", "").format(ANALYSIS="LFV", CHANNEL = channel, BIN= category, ERA="13TeV"))

File list with merged outputs.

tuple makePlots_datacardsLFV.output_files = []

Hadd command for merging the tmp output files.

string makePlots_datacardsLFV.output_root_filename_template = "datacards/common/${ANALYSIS}.input_${ERA}.root"
tuple makePlots_datacardsLFV.parameter_info = yaml.load(open(os.path.abspath(os.path.expandvars("$CMSSW_BASE/src/HiggsAnalysis/KITHiggsToTauTau/python/lfv/parameter.yaml")), "r"))
tuple makePlots_datacardsLFV.parser
Initial value:
1 = argparse.ArgumentParser(description="Create ROOT inputs and datacards for LFV analysis.",
2  parents=[logger.loggingParser])
list makePlots_datacardsLFV.plot_configs = []
tuple makePlots_datacardsLFV.pool = Pool(cpu_count())
list makePlots_datacardsLFV.sample_values
Initial value:
1 = [
2  samples,
3  channel,
4  None,
5  "new",
6  "lfv",
7  "",
8  False,
9  weight,
10  ]
tuple makePlots_datacardsLFV.samples = ["data"]

Samples for control region.

Sample for signal region.

string makePlots_datacardsLFV.sig_histogram_name_template = "${BIN}/${PROCESS}"
string makePlots_datacardsLFV.sig_syst_histogram_name_template = "${BIN}/${PROCESS}_${SYSTEMATIC}"
string makePlots_datacardsLFV.systematic = "nominal"
tuple makePlots_datacardsLFV.systematics_factory = systematics.SystematicsFactory()

Delete old datacards.

tuple makePlots_datacardsLFV.systematics_settings = systematics_factory.get(shape_systematic)
string makePlots_datacardsLFV.tmp_input_root_filename_template = "input/${ANALYSIS}_${CHANNEL}_${BIN}_${SYSTEMATIC}_${ERA}"
tuple makePlots_datacardsLFV.tmp_output_file = os.path.join(args.output_dir, tmp_input_root_filename_template.replace("$", "").format(ANALYSIS="LFV", CHANNEL = channel,BIN= category,SYSTEMATIC=systematic, ERA="13TeV") + ".root")

Do specific config change.

File list with tmp output files

list makePlots_datacardsLFV.tmp_output_files = []
list makePlots_datacardsLFV.weight = "*"

Weight if BDT score is used for stastical anaylsis.

Weight for control region.