diff --git a/PhysicsTools/Heppy/python/analyzers/core/TreeAnalyzerNumpy.py b/PhysicsTools/Heppy/python/analyzers/core/TreeAnalyzerNumpy.py index b75d700434caf..5de4ae7c608a9 100644 --- a/PhysicsTools/Heppy/python/analyzers/core/TreeAnalyzerNumpy.py +++ b/PhysicsTools/Heppy/python/analyzers/core/TreeAnalyzerNumpy.py @@ -11,7 +11,7 @@ class TreeAnalyzerNumpy( Analyzer ): def __init__(self, cfg_ana, cfg_comp, looperName): super(TreeAnalyzerNumpy,self).__init__(cfg_ana, cfg_comp, looperName) - self.outservicename = getattr(cfg_ana,"outservicename","outputfile") + self.outservicename = getattr(cfg_ana,"outservicename","PhysicsTools.HeppyCore.framework.services.tfile.TFileService_outputfile") self.treename = getattr(cfg_ana,"treename","tree") diff --git a/PhysicsTools/Heppy/python/analyzers/core/autovars.py b/PhysicsTools/Heppy/python/analyzers/core/autovars.py index d70ab689084d2..1fe9162c99893 100644 --- a/PhysicsTools/Heppy/python/analyzers/core/autovars.py +++ b/PhysicsTools/Heppy/python/analyzers/core/autovars.py @@ -11,10 +11,10 @@ class NTupleVariable: - name, type, help, default: obvious - function: a function that taken an object computes the value to fill (e.g. lambda event : len(event.goodVertices)) """ - def __init__(self, name, function, type=float, help="", default=-99, mcOnly=False, filler=None): + def __init__(self, name, function, the_type=float, help="", default=-99, mcOnly=False, filler=None): self.name = name self.function = function - self.type = type + self.the_type = the_type self.help = help self.default = default self.mcOnly = mcOnly @@ -24,7 +24,7 @@ def __call__(self,object): return ret def makeBranch(self,treeNumpy,isMC): if self.mcOnly and not isMC: return - treeNumpy.var(self.name, type=self.type, default=self.default, title=self.help, filler=self.filler) + treeNumpy.var(self.name, the_type=self.the_type, default=self.default, title=self.help, filler=self.filler) def fillBranch(self,treeNumpy,object,isMC): if self.mcOnly and not isMC: return treeNumpy.fill(self.name, self(object)) @@ -60,7 +60,7 @@ def ownVars(self,isMC): lambda object, subvar=subvar, so=so : subvar(so(object)), # ^-- lambda object : subvar(so(object)) doesn't work due to scoping, see # http://stackoverflow.com/questions/2295290/what-do-lambda-function-closures-capture-in-python/2295372#2295372 - type = subvar.type, help = subvar.help, default = subvar.default, mcOnly = subvar.mcOnly, + the_type = subvar.the_type, help = subvar.help, default = subvar.default, mcOnly = subvar.mcOnly, filler = subvar.filler)) self._subObjectVars[isMC] = subvars vars += self._subObjectVars[isMC] @@ -144,7 +144,7 @@ def makeBranches(self,treeNumpy,isMC): for v in allvars: h = v.help if self.help: h = "%s for %s" % ( h if h else v.name, self.help ) - treeNumpy.var("%s_%s" % (self.name, v.name), type=v.type, default=v.default, title=h, filler=v.filler) + treeNumpy.var("%s_%s" % (self.name, v.name), the_type=v.the_type, default=v.default, title=h, filler=v.filler) def fillBranches(self,treeNumpy,object,isMC): if self.mcOnly and not isMC: return if object is None: @@ -209,7 +209,7 @@ def makeBranchesScalar(self,treeNumpy,isMC): for i in xrange(1,self.maxlen+1): h = v.help if self.help: h = "%s for %s [%d]" % ( h if h else v.name, self.help, i-1 ) - treeNumpy.var("%s%d_%s" % (self.name, i, v.name), type=v.type, default=v.default, title=h, filler=v.filler) + treeNumpy.var("%s%d_%s" % (self.name, i, v.name), the_type=v.the_type, default=v.default, title=h, filler=v.filler) def makeBranchesVector(self,treeNumpy,isMC): if not isMC and self.objectType.mcOnly: return treeNumpy.var("n"+self.name, int) @@ -218,7 +218,7 @@ def makeBranchesVector(self,treeNumpy,isMC): h = v.help if self.help: h = "%s for %s" % ( h if h else v.name, self.help ) name="%s_%s" % (self.name, v.name) if v.name != "" else self.name - treeNumpy.vector(name, "n"+self.name, self.maxlen, type=v.type, default=v.default, title=h, filler=v.filler) + treeNumpy.vector(name, "n"+self.name, self.maxlen, the_type=v.the_type, default=v.default, title=h, filler=v.filler) def fillBranchesScalar(self,treeNumpy,collection,isMC): if not isMC and self.objectType.mcOnly: return if self.filter != None: collection = [ o for o in collection if self.filter(o) ] @@ -248,14 +248,14 @@ def __repr__(self): def get_cpp_declaration(self, isMC): s = [] for v in self.objectType.allVars(isMC): - s += ["{0} {1}__{2}[{3}];".format(v.type.__name__, self.name, v.name, self.maxlen)] + s += ["{0} {1}__{2}[{3}];".format(v.the_type.__name__, self.name, v.name, self.maxlen)] return "\n".join(s) def get_cpp_wrapper_class(self, isMC): s = "class %s {\n" % self.name s += "public:\n" for v in self.objectType.allVars(isMC): - s += " {0} {1};\n".format(v.type.__name__, v.name) + s += " {0} {1};\n".format(v.the_type.__name__, v.name) s += "};\n" return s diff --git a/PhysicsTools/Heppy/python/analyzers/objects/LeptonAnalyzer.py b/PhysicsTools/Heppy/python/analyzers/objects/LeptonAnalyzer.py index 6bcddd0d7c498..34435695f10b9 100644 --- a/PhysicsTools/Heppy/python/analyzers/objects/LeptonAnalyzer.py +++ b/PhysicsTools/Heppy/python/analyzers/objects/LeptonAnalyzer.py @@ -629,7 +629,7 @@ def plausible(rec,gen): leps = event.inclusiveLeptons if self.cfg_ana.match_inclusiveLeptons else event.selectedLeptons match = matchObjectCollection3(leps, event.genleps + event.gentauleps, - deltaRMax = 1.2, filter = plausible) + deltaRMax = 1.2, filter_func = plausible) for lep in leps: gen = match[lep] lep.mcMatchId = (gen.sourceId if gen != None and hasattr(gen, "sourceId") else 0) @@ -649,7 +649,7 @@ def isFromB(self,particle,bid=5, done={}): def matchAnyLeptons(self, event): event.anyLeptons = [ x for x in event.genParticles if x.status() == 1 and abs(x.pdgId()) in [11,13] ] leps = event.inclusiveLeptons if hasattr(event, 'inclusiveLeptons') else event.selectedLeptons - match = matchObjectCollection3(leps, event.anyLeptons, deltaRMax = 0.3, filter = lambda x,y : abs(x.pdgId()) == abs(y.pdgId())) + match = matchObjectCollection3(leps, event.anyLeptons, deltaRMax = 0.3, filter_func = lambda x,y : abs(x.pdgId()) == abs(y.pdgId())) for lep in leps: gen = match[lep] lep.mcMatchAny_gp = gen @@ -675,7 +675,7 @@ def matchToPhotons(self, event): leps = event.inclusiveLeptons if hasattr(event, 'inclusiveLeptons') else event.selectedLeptons leps = [ l for l in leps if abs(l.pdgId()) == 11 ] plausible = lambda rec, gen : 0.3*gen.pt() < rec.pt() and rec.pt() < 1.5*gen.pt() - match = matchObjectCollection3(leps, event.anyPho, deltaRMax = 0.3, filter = plausible) + match = matchObjectCollection3(leps, event.anyPho, deltaRMax = 0.3, filter_func = plausible) for lep in leps: gen = match[lep] lep.mcPho = gen diff --git a/PhysicsTools/Heppy/python/analyzers/objects/METAnalyzer.py b/PhysicsTools/Heppy/python/analyzers/objects/METAnalyzer.py index 2f29c3e3f03cd..e82cbd22c6af0 100644 --- a/PhysicsTools/Heppy/python/analyzers/objects/METAnalyzer.py +++ b/PhysicsTools/Heppy/python/analyzers/objects/METAnalyzer.py @@ -179,7 +179,8 @@ def makeMETs(self, event): if self.cfg_ana.doMetNoPU: self.metNoPU = ROOT.pat.MET(self.handles['nopumet'].product()[0]) else: self.met = self.handles['met'].product()[0] - if self.cfg_ana.doMetNoPU: self.metNoPU = self.handles['nopumet'].product()[0] + if self.cfg_ana.doMetNoPU: + self.metNoPU = self.handles['nopumet'].product()[0] if self.recalibrateMET == "type1": type1METCorr = getattr(event, 'type1METCorr'+self.jetAnalyzerPostFix) diff --git a/PhysicsTools/Heppy/python/utils/miniAodFiles.py b/PhysicsTools/Heppy/python/utils/miniAodFiles.py index 0cfc42f923d20..e875459922603 100644 --- a/PhysicsTools/Heppy/python/utils/miniAodFiles.py +++ b/PhysicsTools/Heppy/python/utils/miniAodFiles.py @@ -40,8 +40,8 @@ def miniAodFiles(): ] elif (big,medium)==(8,0): files=[ - '/store/relval/CMSSW_8_0_19/RelValZMM_13/MINIAODSIM/80X_mcRun2_asymptotic_v17_gs7120p2-v1/00000/4E733BCE-656E-E611-AE16-0CC47A78A3F4.root', - '/store/relval/CMSSW_8_0_19/RelValZMM_13/MINIAODSIM/80X_mcRun2_asymptotic_v17_gs7120p2-v1/00000/B82838D0-656E-E611-BAFD-0CC47A7C35A4.root' + '/store/relval/CMSSW_8_0_21/RelValZMM_13/MINIAODSIM/80X_mcRun2_asymptotic_2016_TrancheIV_v6_Tr4GT_v6-v1/10000/50C4AE3D-C498-E611-B0DF-0025905B859E.root', + '/store/relval/CMSSW_8_0_21/RelValZMM_13/MINIAODSIM/80X_mcRun2_asymptotic_2016_TrancheIV_v6_Tr4GT_v6-v1/10000/DCC7483C-C498-E611-9270-0025905A48EC.root' ] else: raise ValueError('no mini AOD file defined for release '+cmsswRelease()) diff --git a/PhysicsTools/Heppy/test/simple_example_cfg.py b/PhysicsTools/Heppy/test/simple_example_cfg.py index 6a429f7f5031b..1cb3415a675d1 100644 --- a/PhysicsTools/Heppy/test/simple_example_cfg.py +++ b/PhysicsTools/Heppy/test/simple_example_cfg.py @@ -58,6 +58,7 @@ sel_jets = cfg.Analyzer( Filter, 'jets', + output = 'jets', input_objects = 'all_jets', filter_func = lambda x : x.pt()>30 ) diff --git a/PhysicsTools/HeppyCore/python/analyzers/CMSTestAnalyzer.py b/PhysicsTools/HeppyCore/python/analyzers/CMSTestAnalyzer.py deleted file mode 100644 index 81e12077b5c8b..0000000000000 --- a/PhysicsTools/HeppyCore/python/analyzers/CMSTestAnalyzer.py +++ /dev/null @@ -1,7 +0,0 @@ -from PhysicsTools.HeppyCore.framework.analyzer import Analyzer - -class CMSTestAnalyzer(Analyzer): - - def process(self, event): - evid = event.input.eventAuxiliary().id() - print 'run/lumi/event:', evid.run(), evid.luminosityBlock(), evid.event() diff --git a/PhysicsTools/HeppyCore/python/analyzers/ChargedHadronsFromB.py b/PhysicsTools/HeppyCore/python/analyzers/ChargedHadronsFromB.py new file mode 100644 index 0000000000000..2210baa10fdfa --- /dev/null +++ b/PhysicsTools/HeppyCore/python/analyzers/ChargedHadronsFromB.py @@ -0,0 +1,34 @@ +from PhysicsTools.HeppyCore.framework.analyzer import Analyzer +from PhysicsTools.HeppyCore.particles.genbrowser import GenBrowser +from PhysicsTools.HeppyCore.particles.pdgcodes import hasBottom + +class ChargedHadronsFromB(Analyzer): + + def process(self, event): + genptcs = event.gen_particles + bquarks = [] + charged_hadrons = [] + event.hadrons_from_b = [] + for ptc in genptcs: + if abs(ptc.pdgid())==5: + bquarks.append(ptc) + elif ptc.q() and ptc.status()==1: + charged_hadrons.append(ptc) + if len(bquarks) == 0 or len(charged_hadrons) == 0: + return + event.genbrowser = GenBrowser(event.gen_particles, + event.gen_vertices) + event.hadrons_from_b = [] + event.hadrons_not_from_b = [] + for hadron in charged_hadrons: + ancestors = event.genbrowser.ancestors(hadron) + is_from_b = False + for ancestor in ancestors: + if hasBottom(ancestor.pdgid() ): + is_from_b = True + if is_from_b: + event.hadrons_from_b.append(hadron) + else: + event.hadrons_not_from_b.append(hadron) + + diff --git a/PhysicsTools/HeppyCore/python/analyzers/Counter.py b/PhysicsTools/HeppyCore/python/analyzers/Counter.py new file mode 100644 index 0000000000000..13e7016ad33f0 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/analyzers/Counter.py @@ -0,0 +1,23 @@ +from PhysicsTools.HeppyCore.framework.analyzer import Analyzer + +class Counter(Analyzer): + '''Counts the number of objects in the input_objects collection + and skip the event if this number is strictly inferior to min_number + + Example: + + from PhysicsTools.HeppyCore.analyzers.Counter import Counter + gen_counter = cfg.Analyzer( + Counter, + input_objects = 'gen_particles_stable', + min_number = 1 + ) + + * input_objects : the input collection + + * min_number : the minimum amount of object in input_object to not skip the event + ''' + + def process(self, event): + input_collection = getattr(event, self.cfg_ana.input_objects) + return len(input_collection) >= self.cfg_ana.min_number diff --git a/PhysicsTools/HeppyCore/python/analyzers/EventFilter.py b/PhysicsTools/HeppyCore/python/analyzers/EventFilter.py new file mode 100644 index 0000000000000..0b8b40b4de03e --- /dev/null +++ b/PhysicsTools/HeppyCore/python/analyzers/EventFilter.py @@ -0,0 +1,39 @@ +from PhysicsTools.HeppyCore.framework.analyzer import Analyzer +import collections + +class EventFilter (Analyzer): + '''Filters events based on the contents of an input collection. + + When an event is rejected by the EventFilter, the analyzers + placed after the filter in the sequence will not run. + + Example: + + To reject events with 1 lepton or more: + + from PhysicsTools.HeppyCore.analyzers.EventFilter import EventFilter + lepton_filter = cfg.Analyzer( + EventFilter , + 'lepton_filter', + input_objects = 'leptons', + min_number = 1, + veto = True + ) + + * input_objects : the input collection. + + * min_number : minimum number of objects in input_objects to trigger the filtering + + * veto : + - if False: events are selected if there are >= min_number objects in input_objects + - if True: events are rejected if there are >= min_number objects in input_objects. + ''' + + def process(self, event): + input_collection = getattr(event, self.cfg_ana.input_objects) + if self.cfg_ana.veto: + return not len(input_collection) >= self.cfg_ana.min_number + else: + return len(input_collection) >= self.cfg_ana.min_number + + diff --git a/PhysicsTools/HeppyCore/python/analyzers/EventTextOutput.py b/PhysicsTools/HeppyCore/python/analyzers/EventTextOutput.py new file mode 100644 index 0000000000000..2edecadd4e831 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/analyzers/EventTextOutput.py @@ -0,0 +1,69 @@ +from PhysicsTools.HeppyCore.framework.analyzer import Analyzer + +import shelve + +outfilename = 'particles.shv' + +class SimpleParticle(object): + def __init__(self, ptc): + self.theta = ptc.theta() + self.phi = ptc.phi() + self.energy = ptc.e() + self.pdgid = ptc.pdgid() + + def get_data(self): + return self.pdgid, self.theta, self.phi, self.energy + + def __str__(self): + return 'particle: {id} : theta={theta}, phi={phi}, energy={energy}'.format( + id = self.pdgid, + theta = self.theta, + phi = self.phi, + energy = self.energy + ) + +class SimpleEvent(object): + def __init__(self, ievent, ptcs): + self.ievent = ievent + self.ptcs = map(SimpleParticle, ptcs) + self.data = dict( + ievent = ievent, + particles = [ptc.get_data() for ptc in self.ptcs] + ) + + def get_data(self): + return self.data + + def __str__(self): + lines = ['event {iev}'.format(iev=self.ievent)] + lines.extend( map(str, self.ptcs) ) + return '\n'.join(lines) + + + +class EventTextOutput(Analyzer): + + def beginLoop(self, setup): + super(EventTextOutput, self).beginLoop(setup) + self.events = [] + + def process(self, event): + ptcs = getattr(event, self.cfg_ana.particles ) + self.events.append(SimpleEvent(event.iEv, ptcs).get_data()) + + def endLoop(self, setup): + super(EventTextOutput, self).endLoop(setup) + out = shelve.open('/'.join([self.dirName, outfilename])) + out['events'] = self.events + out.close() + + +if __name__ == '__main__': + + import pprint + sh = shelve.open(outfilename) + events = sh['events'] + for event in events: + print 'event', event['ievent'] + for pdgid, theta, phi, energy in event['particles']: + print '\t', pdgid, theta, phi, energy diff --git a/PhysicsTools/HeppyCore/python/analyzers/Filter.py b/PhysicsTools/HeppyCore/python/analyzers/Filter.py index 3d950f5d6cd3d..982a2656691d2 100644 --- a/PhysicsTools/HeppyCore/python/analyzers/Filter.py +++ b/PhysicsTools/HeppyCore/python/analyzers/Filter.py @@ -1,9 +1,45 @@ from PhysicsTools.HeppyCore.framework.analyzer import Analyzer +import collections class Filter(Analyzer): + '''Filter objects from the input_objects collection + and store them in the output collection + + Example: + from PhysicsTools.HeppyCore.analyzers.Filter import Filter + def is_lepton(ptc): + """Returns true if the particle energy is larger than 5 GeV + and if its pdgid is +-11 (electrons) or +-13 (muons) + return ptc.e()> 5. and abs(ptc.pdgid()) in [11, 13] + + leptons = cfg.Analyzer( + Filter, + 'sel_leptons', + output = 'leptons', + input_objects = 'rec_particles', + filter_func = is_lepton + ) + + * input_objects : the input collection. + if a dictionary, the filtering function is applied to the dictionary values, + and not to the keys. + + * output : the output collection + + * filter_func : a function object. + IMPORTANT NOTE: lambda statements should not be used, as they + do not work in multiprocessing mode. looking for a solution... + + ''' + def process(self, event): input_collection = getattr(event, self.cfg_ana.input_objects) - output_collection = [obj for obj in input_collection \ - if self.cfg_ana.filter_func(obj)] - setattr(event, self.instance_label, output_collection) + output_collection = None + if isinstance(input_collection, collections.Mapping): + output_collection = dict( [(key, val) for key, val in input_collection.iteritems() + if self.cfg_ana.filter_func(val)] ) + else: + output_collection = [obj for obj in input_collection \ + if self.cfg_ana.filter_func(obj)] + setattr(event, self.cfg_ana.output, output_collection) diff --git a/PhysicsTools/HeppyCore/python/analyzers/GaussianSmearer.py b/PhysicsTools/HeppyCore/python/analyzers/GaussianSmearer.py new file mode 100644 index 0000000000000..afebefd546805 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/analyzers/GaussianSmearer.py @@ -0,0 +1,45 @@ +from PhysicsTools.HeppyCore.framework.analyzer import Analyzer + +import copy +import PhysicsTools.HeppyCore.statistics.rrandom as random + +class GaussianSmearer(Analyzer): + '''Smears the 4-momentum of a collection of particles according to a gaussian model, + and then applies an acceptance model + + Example: + + from PhysicsTools.HeppyCore.analyzers.GaussianSmearer import Smearer + def accept_electron(ele): + return abs(ele.eta()) < 2.5 and ele.e() > 5. + electrons = cfg.Analyzer( + Smearer, + 'electrons', + output = 'electrons', + input_objects = 'sim_leptons', + accept=accept_electron, + mu_sigma=(1, 0.1) + ) + + output: name of the collection created in the event to hold the smeared particles + input_objects: the collection of particles to be smeared + accept: function object returning True if a particle is accepted and + False otherwise + mu_sigma: mean and width of the gaussian model (response and resolution) + ''' + + def process(self, event): + input_objects = getattr(event, self.cfg_ana.input_objects) + output = [] + for obj in input_objects: + smeared = self.smear(obj) + if self.cfg_ana.accept(smeared): + output.append(smeared) + setattr(event, self.cfg_ana.output, output) + + def smear(self, obj): + mu, sigma = self.cfg_ana.mu_sigma + smear_factor = random.gauss(mu, sigma) + smeared = copy.deepcopy(obj) + smeared._tlv *= smear_factor + return smeared diff --git a/PhysicsTools/HeppyCore/python/analyzers/GenAnalyzer.py b/PhysicsTools/HeppyCore/python/analyzers/GenAnalyzer.py new file mode 100644 index 0000000000000..fa111a6c6192a --- /dev/null +++ b/PhysicsTools/HeppyCore/python/analyzers/GenAnalyzer.py @@ -0,0 +1,19 @@ +from PhysicsTools.HeppyCore.framework.analyzer import Analyzer +from PhysicsTools.HeppyCore.particles.genbrowser import GenBrowser +from PhysicsTools.HeppyCore.particles.pdgcodes import hasBottom + +class GenAnalyzer(Analyzer): + + def process(self, event): + genptcs = event.gen_particles + charged_hadrons = [ptc for ptc in genptcs if ptc.q() and ptc.status()==1] + event.genbrowser = GenBrowser(event.gen_particles, + event.gen_vertices) + event.hadrons_from_b = [] + for hadron in charged_hadrons: + ancestors = event.genbrowser.ancestors(hadron) + for ancestor in ancestors: + if hasBottom(ancestor.pdgid() ): + event.hadrons_from_b.append(hadron) + break + diff --git a/PhysicsTools/HeppyCore/python/analyzers/GlobalEventTreeProducer.py b/PhysicsTools/HeppyCore/python/analyzers/GlobalEventTreeProducer.py new file mode 100644 index 0000000000000..18313f5da47db --- /dev/null +++ b/PhysicsTools/HeppyCore/python/analyzers/GlobalEventTreeProducer.py @@ -0,0 +1,29 @@ +from PhysicsTools.HeppyCore.framework.analyzer import Analyzer +from PhysicsTools.HeppyCore.statistics.tree import Tree +from PhysicsTools.HeppyCore.analyzers.ntuple import * + +from ROOT import TFile + +class GlobalEventTreeProducer(Analyzer): + + def beginLoop(self, setup): + super(GlobalEventTreeProducer, self).beginLoop(setup) + self.rootfile = TFile('/'.join([self.dirName, + 'tree.root']), + 'recreate') + self.tree = Tree( 'events', '') + bookJet(self.tree, 'sum_all') + bookJet(self.tree, 'sum_all_gen') + + def process(self, event): + self.tree.reset() + sum_all = getattr(event, self.cfg_ana.sum_all) + sum_all_gen = getattr(event, self.cfg_ana.sum_all_gen) + fillJet(self.tree, 'sum_all', sum_all) + fillJet(self.tree, 'sum_all_gen', sum_all_gen) + self.tree.tree.Fill() + + def write(self, setup): + self.rootfile.Write() + self.rootfile.Close() + diff --git a/PhysicsTools/HeppyCore/python/analyzers/Gun.py b/PhysicsTools/HeppyCore/python/analyzers/Gun.py new file mode 100644 index 0000000000000..58cd463b40004 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/analyzers/Gun.py @@ -0,0 +1,43 @@ +from PhysicsTools.HeppyCore.framework.analyzer import Analyzer +from PhysicsTools.HeppyCore.papas.pdt import particle_data +from PhysicsTools.HeppyCore.particles.tlv.particle import Particle + +import math +import PhysicsTools.HeppyCore.statistics.rrandom as random + +from ROOT import TLorentzVector + +def particle(pdgid, thetamin, thetamax, ptmin, ptmax, flat_pt=False): + mass, charge = particle_data[pdgid] + theta = random.uniform(thetamin, thetamax) + phi = random.uniform(-math.pi, math.pi) + energy = random.uniform(ptmin, ptmax) + costheta = math.cos(math.pi/2. - theta) + sintheta = math.sin(math.pi/2. - theta) + tantheta = sintheta / costheta + cosphi = math.cos(phi) + sinphi = math.sin(phi) + if flat_pt: + pt = energy + momentum = pt / sintheta + energy = math.sqrt(momentum**2 + mass**2) + else: + energy = max([energy, mass]) + momentum = math.sqrt(energy**2 - mass**2) + tlv = TLorentzVector(momentum*sintheta*cosphi, + momentum*sintheta*sinphi, + momentum*costheta, + energy) + return Particle(pdgid, charge, tlv) + + +class Gun(Analyzer): + + def process(self, event): + event.gen_particles = [particle(self.cfg_ana.pdgid, + self.cfg_ana.thetamin, + self.cfg_ana.thetamax, + self.cfg_ana.ptmin, + self.cfg_ana.ptmax, + flat_pt=self.cfg_ana.flat_pt)] + event.gen_particles_stable = event.gen_particles diff --git a/PhysicsTools/HeppyCore/python/analyzers/ImpactParameter.py b/PhysicsTools/HeppyCore/python/analyzers/ImpactParameter.py new file mode 100644 index 0000000000000..dfacebdc7b0c3 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/analyzers/ImpactParameter.py @@ -0,0 +1,240 @@ +from PhysicsTools.HeppyCore.framework.analyzer import Analyzer +from ROOT import TFile, TH1F +from ROOT import TVector3, TLorentzVector +from PhysicsTools.HeppyCore.papas.path import Helix +import math +from PhysicsTools.HeppyCore.utils.deltar import deltaR + +class ImpactParameter(Analyzer): + '''This analyzer puts an impact parameter for every charged particle + as an attribute of its path. + The significance is calculated, the calculus are a first order approximation, + thus this may not be correct for large impact parameters (more than 3 mm). + The Impact parameter significance is stored in the particle's path. + + New attributes for PhysicsTools.HeppyCore.papas.pfobjects.Particle.path (from compute_IP) : + * closest_t = time of closest approach to the primary vertex. + * IP = signed impact parameter + * IPcoord = TVector3 of the point of closest approach to the + primary vertex + + New attributes for particles.path (from compute_theta_0) : + * theta_0 = 1/sqrt(2) * gaussian width of the scattering angle + due to the beam pipe. --> See pdg booklet, Passage of particles through matter, + multiple scattering through small angles. + * xX_0 = distance in number of X_0 the radiation length the particles goes through + + New attributes for particles.path (from compute_IP_signif) : + * IP_signif = the impact parameter significance. To get the uncertainty, just compute IP/IP_signif + * IP_sigma = the uncertainty on the impact parameter + + Then, several b-tagging methods are applied to each jet, with selected tracks : + * a log-likelihood ratio based on Impact Parameter (IP_b_LL), + if a numerator and a denominator for IP are provided (see example) + * a log-likelihood ratio based on Impact Parameter Significance (IPs_b_LL), + if a numerator and a denominator for IPs are provided (see example) + * TCHE and TCHP taggers : using the second and third highest Impact Prameter for each jet + + New attributes for jets : + * TCHE = the value of the TCHE for this jet + * TCHP = the value of the TCHP for this jet + * TCHE_IP = the value of the IP of the track used to compute the TCHE + * TCHP_IP = the value of the IP of the track used to compute the TCHE + * TCHE_x = the x position of the vertex for the particle used for the TCHE + * TCHE_y = the y position of the vertex for the particle used for the TCHE + * TCHE_z = the z position of the vertex for the particle used for the TCHE + * TCHE_pt = the transverse impulsion of the particle used for the TCHE + * TCHE_dr = the cone containing the track used for the THCE with respects to the jet axis + + Example of configuration : the root files contain the distributions of IP or IPs histograms (h_u, h_b ...) + that can be divided (num/denom) to get the ratio. + + from PhysicsTools.HeppyCore.papas.detectors.CMS import CMS + from PhysicsTools.HeppyCore.analyzers.ImpactParameter import ImpactParameter + btag = cfg.Analyzer( + ImpactParameter, + jets = 'jets', + + # needed only for the IP_b_LL tagger + # file and histogram key for b jet charged hadrons IP + # num_IP = ("histo_stat_IP_ratio_bems.root","h_b"), + # file and histogram key for u jet charged hadrons IP + # denom_IP = ("histo_stat_IP_ratio_bems.root","h_u"), + + # needed only for the IPs_b_LL tagger + # file and histogram key for b jet charged hadrons IPs + # num_IPs = ("histo_stat_IPs_ratio_bems.root","h_b"), + # file and histogram key for u jet charged hadrons IPs + # denom_IPs = ("histo_stat_IPs_ratio_bems.root","h_u"), + + # selection of charged hadrons for b tagging + pt_min = 1, # pt threshold + dxy_max = 2e-3, # max impact parameter in transverse plane, in m, w/r origin + dz_max = 17e-2, # max longitudinal impact parameter in transverse plane, in m, w/r origin + detector = CMS() + ) + + ''' + def beginLoop(self, setup): + super(ImpactParameter, self).beginLoop(setup) + if hasattr(self.cfg_ana, 'num_IP') == False : + self.tag_IP_b_LL = False + else : + if hasattr(self.cfg_ana, 'denom_IP') == False : + self.tag_IP_b_LL = False + raise AttributeError('You specified a numerator without a denominator for the log likelihood based on IP') + else : + self.tag_IP_b_LL = True + self.num_IP_file = TFile.Open(self.cfg_ana.num_IP[0]) + self.num_IP_hist = self.num_IP_file.Get(self.cfg_ana.num_IP[1]) + self.denom_IP_file = TFile.Open(self.cfg_ana.denom_IP[0]) + self.denom_IP_hist = self.denom_IP_file.Get(self.cfg_ana.denom_IP[1]) + self.ratio_IP = TH1F("ratio_IP","num_IP over denom_IP", self.num_IP_hist.GetXaxis().GetNbins(), self.num_IP_hist.GetXaxis().GetXmin(), self.num_IP_hist.GetXaxis().GetXmax()) + self.ratio_IP.Divide(self.num_IP_hist,self.denom_IP_hist) + #import pdb; pdb.set_trace() + + if hasattr(self.cfg_ana, 'num_IPs') == False : + self.tag_IPs_b_LL = False + else : + if hasattr(self.cfg_ana, 'denom_IPs') == False : + self.tag_IPs_b_LL = False + raise AttributeError('You specified a numerator without a denominator for the log likelihood based on IP significance') + else : + self.tag_IPs_b_LL = True + self.num_IPs_file = TFile.Open(self.cfg_ana.num_IPs[0]) + self.num_IPs_hist = self.num_IPs_file.Get(self.cfg_ana.num_IPs[1]) + self.denom_IPs_file = TFile.Open(self.cfg_ana.denom_IPs[0]) + self.denom_IPs_hist = self.denom_IPs_file.Get(self.cfg_ana.denom_IPs[1]) + self.ratio_IPs = TH1F("ratio_IPs","num_IPs over denom_IPs", self.num_IPs_hist.GetXaxis().GetNbins(), self.num_IPs_hist.GetXaxis().GetXmin(), self.num_IPs_hist.GetXaxis().GetXmax()) + self.ratio_IPs.Divide(self.num_IPs_hist,self.denom_IPs_hist) + + + def ll_tag(self, ratio_histo, ptc_var, jet_tag ): + ibin = ratio_histo.FindBin(ptc_var) + lhratio = ratio_histo.GetBinContent(ibin) + if not lhratio == 0: + LLratio = math.log(lhratio) + jet_tag += LLratio + if lhratio == 0: + LLratio = 0 + return LLratio + + + def process(self, event): + assumed_vertex = TVector3(0, 0, 0) + jets = getattr(event, self.cfg_ana.jets) + detector = self.cfg_ana.detector + pt_min = self.cfg_ana.pt_min + dxy_max = self.cfg_ana.dxy_max + dz_max = self.cfg_ana.dz_max + for jet in jets: + IP_b_LL = 0 # value of the log likelihood ratio based on IP initiated at 0 + IPs_b_LL = 0 # value of the log likelihood ratio based on IP significance initiated at 0 + ipsig_ptcs = [] # list of IP signif and associated ptcs + for id, ptcs in jet.constituents.iteritems(): + if abs(id) in [22,130,11]: + continue + for ptc in ptcs : + if ptc.q() == 0 : + continue + ptc.path.compute_IP(assumed_vertex,jet) + + ptc_IP_signif = 0 + if hasattr(ptc.path, 'points') == True and 'beampipe_in' in ptc.path.points: + phi_in = ptc.path.phi(ptc.path.points['beampipe_in'].X(),\ + ptc.path.points['beampipe_in'].Y()) + phi_out= ptc.path.phi(ptc.path.points['beampipe_out'].X(),\ + ptc.path.points['beampipe_out'].Y()) + deltat = ptc.path.time_at_phi(phi_out)-ptc.path.time_at_phi(phi_in) + x = ptc.path.path_length(deltat) + X_0 = detector.elements['beampipe'].material.x0 + ptc.path.compute_theta_0(x, X_0) + ptc.path.compute_IP_signif(ptc.path.IP, + ptc.path.theta_0, + ptc.path.points['beampipe_in']) + else : + ptc.path.compute_IP_signif(ptc.path.IP, None, None) + + dx = ptc.path.IPcoord.x() - assumed_vertex.x() + dy = ptc.path.IPcoord.y() - assumed_vertex.y() + dz = ptc.path.IPcoord.z() - assumed_vertex.z() + if ptc.path.p4.Perp() > pt_min and (dx**2 + dy**2)**0.5 < dxy_max and dz**2 < dz_max**2 : + ipsig_ptcs.append([ptc.path.IP_signif, ptc]) + + if self.tag_IP_b_LL: + ptc.path.IP_b_LL = self.ll_tag(self.ratio_IP, ptc.path.IP,IP_b_LL ) + if self.tag_IPs_b_LL: + ptc.path.IPs_b_LL = self.ll_tag(self.ratio_IPs, ptc.path.IP_signif, IPs_b_LL ) + + ipsig_ptcs.sort(reverse=True) + + if len(ipsig_ptcs) < 2 : + TCHE = -99 + TCHP = -99 + TCHE_IP = -99 + TCHP_IP = -99 + TCHE_x = -99 + TCHE_y = -99 + TCHE_z = -99 + TCHE_pt = -99 + TCHE_dr = -99 + + if len(ipsig_ptcs) > 1 : + TCHE = ipsig_ptcs[1][0] + ptc = ipsig_ptcs[1][1] + TCHE_IP = ptc.path.IP + TCHE_x, TCHE_y, TCHE_z = ptc.path.coord_at_time(0) + TCHE_pt = ptc.path.p4.Perp() + TCHE_dr = deltaR(jet.eta(), jet.phi(), ptc.eta(), ptc.phi()) + TCHP = -99 + TCHP_IP = -99 + + if len(ipsig_ptcs) > 2 : + TCHP = ipsig_ptcs[2][0] + ptc = ipsig_ptcs[2][1] + TCHP_IP = ptc.path.IP + + jet.tags['IP_b_LL'] = IP_b_LL if self.tag_IP_b_LL else None + jet.tags['IPs_b_LL']= IPs_b_LL if self.tag_IPs_b_LL else None + #TODO COLIN : create a BTagInfo class. + jet.tags['TCHE'] = TCHE + jet.tags['TCHP'] = TCHP + jet.tags['TCHE_IP'] = TCHE_IP + jet.tags['TCHP_IP'] = TCHP_IP + jet.tags['TCHE_x'] = TCHE_x + jet.tags['TCHE_y'] = TCHE_y + jet.tags['TCHE_z'] = TCHE_z + jet.tags['TCHE_xy'] = (TCHE_x**2+TCHE_y**2)**0.5 + jet.tags['TCHE_pt'] = TCHE_pt + jet.tags['TCHE_dr'] = TCHE_dr + + if hasattr(event, 'K0s') == True : + jet.tags['K0s'] = event.K0s + else : + jet.tags['K0s'] = -99 + if hasattr(event, 'Kp') == True : + jet.tags['Kp'] = event.Kp + else : + jet.tags['Kp'] = -99 + if hasattr(event, 'L0') == True : + jet.tags['L0'] = event.L0 + else : + jet.tags['L0'] = -99 + if hasattr(event, 'S0') == True : + jet.tags['S0'] = event.S0 + else : + jet.tags['S0'] = -99 + if hasattr(event, 'Sp') == True : + jet.tags['Sp'] = event.Sp + else : + jet.tags['Sp'] = -99 + if hasattr(event, 'Sm') == True : + jet.tags['Sm'] = event.Sm + else : + jet.tags['Sm'] = -99 + if hasattr(event, 'Muons') == True : + jet.tags['Muons'] = event.Muons + else : + jet.tags['Muons'] = -99 + + diff --git a/PhysicsTools/HeppyCore/python/analyzers/IsolationAnalyzer.py b/PhysicsTools/HeppyCore/python/analyzers/IsolationAnalyzer.py new file mode 100644 index 0000000000000..4d6baf119cd52 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/analyzers/IsolationAnalyzer.py @@ -0,0 +1,90 @@ +from PhysicsTools.HeppyCore.framework.analyzer import Analyzer +from PhysicsTools.HeppyCore.particles.isolation import IsolationComputer, IsolationInfo + +import pprint + +pdgids = [211, 22, 130, 11, 13] + +class IsolationAnalyzer(Analyzer): + '''Compute lepton isolation. + + Example: + + from PhysicsTools.HeppyCore.analyzers.IsolationAnalyzer import IsolationAnalyzer + from PhysicsTools.HeppyCore.particles.isolation import EtaPhiCircle + iso_leptons = cfg.Analyzer( + IsolationAnalyzer, + leptons = 'leptons', + particles = 'particles', + iso_area = EtaPhiCircle(0.4) + ) + + * leptons : collection of leptons for which the isolation should be computed + + * particles : collection of particles w/r to which the leptons should be isolated. + + The particles are assumed to have a pdgid equal to + +- 11 (electrons) + +- 13 (muons) + +- 211 (all other charged particles) + 22 (photons) + 130 (all other neutral particles) + + If one of the particles considered in the isolation calculation is + the lepton (is the same python object), it is discarded. + + For each pdgid, the isolation result is attached to the lepton. + For example, to keep track of isolation w/r to charged hadrons, an + attribute lepton.iso_211 is attached to each lepton. It contains: + - lepton.iso_211.sumpt: sum pT of all charged hadrons in a cone around the lepton + - lepton.iso_211.sume: sum E for these charged hadrons + - lepton.iso_211.num: number of such charged hadrons + + Additionally, the attribute lepton.iso is attached to the lepton. it contains + sumpt, sume, and num for charged hadrons, photons, and neutral hadrons together. + + See IsolationComputer and IsolationInfo for more information. + ''' + + def beginLoop(self, setup): + super(IsolationAnalyzer, self).beginLoop(setup) + # now using same isolation definition for all pdgids + self.iso_computers = dict() + for pdgid in pdgids: + self.iso_computers[pdgid] = IsolationComputer( + [self.cfg_ana.iso_area], + label='iso{pdgid}'.format(pdgid=str(pdgid)) + ) + + def process(self, event): + particles = getattr(event, self.cfg_ana.particles) + leptons = getattr(event, self.cfg_ana.leptons) + for lepton in leptons: + isosum = IsolationInfo('all', lepton) + self.logger.info(str(lepton)) + for pdgid in pdgids: + sel_ptcs = [ptc for ptc in particles if abs(self.pdgid(ptc))==pdgid] + iso = self.iso_computers[pdgid].compute(lepton, sel_ptcs) + isosum += iso + setattr(lepton, 'iso_{pdgid}'.format(pdgid=pdgid), iso) + self.logger.info(str(iso)) + if iso.num: + self.logger.info(pprint.pformat(iso.particles)) + lepton.iso = isosum + self.logger.info(str(isosum)) + + def pdgid(self, ptc): + '''returns summary pdg id. + - e or mu -> +- 11 or +- 13 + - charged -> +- 211 + - photon -> 22 + - other -> neutral hadron + ''' + if ptc.pdgid() in [11,13]: + return ptc.pdgid() + elif ptc.q(): + return ptc.q() * 211 + elif ptc.pdgid() == 22: + return 22 + else: + return 130 diff --git a/PhysicsTools/HeppyCore/python/analyzers/JetTreeProducer.py b/PhysicsTools/HeppyCore/python/analyzers/JetTreeProducer.py new file mode 100644 index 0000000000000..f61f4095c2c90 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/analyzers/JetTreeProducer.py @@ -0,0 +1,59 @@ +'''some module doc''' + +from PhysicsTools.HeppyCore.framework.analyzer import Analyzer +from PhysicsTools.HeppyCore.statistics.tree import Tree +from PhysicsTools.HeppyCore.analyzers.ntuple import * + +from ROOT import TFile + +class JetTreeProducer(Analyzer): + '''Some class doc''' + + def beginLoop(self, setup): + super(JetTreeProducer, self).beginLoop(setup) + self.rootfile = TFile('/'.join([self.dirName, + 'jet_tree.root']), + 'recreate') + self.tree = Tree( self.cfg_ana.tree_name, + self.cfg_ana.tree_title ) + bookJet(self.tree, 'jet1') + bookJet(self.tree, 'jet1_match') + bookJet(self.tree, 'jet2') + bookJet(self.tree, 'jet2_match') + var(self.tree, 'event') + var(self.tree, 'lumi') + var(self.tree, 'run') + + + def process(self, event): + self.tree.reset() + if hasattr(event, 'eventId'): + fill(self.tree, 'event', event.eventId) + fill(self.tree, 'lumi', event.lumi) + fill(self.tree, 'run', event.run) + elif hasattr(event, 'iEv'): + fill(self.tree, 'event', event.iEv) + jets = getattr(event, self.cfg_ana.jets) + if( len(jets)>0 ): + jet = jets[0] + comp211 = jet.constituents.get(211, None) + if comp211: + if comp211.num==2: + import pdb; pdb.set_trace() + fillJet(self.tree, 'jet1', jet) + if hasattr(jet, 'match') and jet.match: + fillJet(self.tree, 'jet1_match', jet.match) + # if jet.e()/jet.match.e() > 2.: + # import pdb; pdb.set_trace() + if( len(jets)>1 ): + jet = jets[1] + fillJet(self.tree, 'jet2', jet) + if hasattr(jet, 'match') and jet.match: + fillJet(self.tree, 'jet2_match', jet.match) + self.tree.tree.Fill() + + + def write(self, setup): + self.rootfile.Write() + self.rootfile.Close() + diff --git a/PhysicsTools/HeppyCore/python/analyzers/M3Builder.py b/PhysicsTools/HeppyCore/python/analyzers/M3Builder.py new file mode 100644 index 0000000000000..1d680b88bf7cb --- /dev/null +++ b/PhysicsTools/HeppyCore/python/analyzers/M3Builder.py @@ -0,0 +1,30 @@ +from PhysicsTools.HeppyCore.framework.analyzer import Analyzer +from PhysicsTools.HeppyCore.particles.tlv.resonance import Resonance + +import pprint +import itertools + +class M3Builder(Analyzer): + + def process(self, event): + jets = getattr(event, self.cfg_ana.jets) + + m3 = None + pt3max=0 + seljets=None + #print jets + + if len(jets)>=3: + for l in list(itertools.permutations(jets,3)): + #ntag=sum([l[0].tags['b'],l[1].tags['b'],l[2].tags['b']]) + pt3=(l[0].p4()+l[1].p4()+l[2].p4()).Pt() + if pt3>pt3max: + ptmax=pt3 + seljets=l + + top_pdgid = 6 + m3 = Resonance(seljets, top_pdgid) + setattr(event, self.instance_label, m3) + + + diff --git a/PhysicsTools/HeppyCore/python/analyzers/METBuilder.py b/PhysicsTools/HeppyCore/python/analyzers/METBuilder.py new file mode 100644 index 0000000000000..60c68c02ea30c --- /dev/null +++ b/PhysicsTools/HeppyCore/python/analyzers/METBuilder.py @@ -0,0 +1,17 @@ +from PhysicsTools.HeppyCore.framework.analyzer import Analyzer + +from PhysicsTools.HeppyCore.particles.tlv.met import MET +from ROOT import TLorentzVector + +class METBuilder(Analyzer): + + def process(self, event): + particles = getattr(event, self.cfg_ana.particles) + missingp4 = TLorentzVector() + sumpt = 0 + for ptc in particles: + missingp4 += ptc.p4() + sumpt += ptc.pt() + missingp4 *= -1 + met = MET(missingp4, sumpt) + setattr(event, self.instance_label, met) diff --git a/PhysicsTools/HeppyCore/python/analyzers/MTW.py b/PhysicsTools/HeppyCore/python/analyzers/MTW.py new file mode 100644 index 0000000000000..63c97cbd53588 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/analyzers/MTW.py @@ -0,0 +1,16 @@ +from PhysicsTools.HeppyCore.framework.analyzer import Analyzer +import math + +class MTW(Analyzer): + + def process(self, event): + ele = getattr(event, self.cfg_ana.electron) + mu = getattr(event, self.cfg_ana.muon) + + lepton = ele[0] if len(ele)==1 else mu[0] + + met = getattr(event, self.cfg_ana.met) + mtw = math.sqrt(2.*lepton.pt()*met.pt()*(1-math.cos(lepton.phi() - met.phi() ))) + + setattr(event, self.instance_label, mtw) + diff --git a/PhysicsTools/HeppyCore/python/analyzers/Masker.py b/PhysicsTools/HeppyCore/python/analyzers/Masker.py new file mode 100644 index 0000000000000..00c8688803c7f --- /dev/null +++ b/PhysicsTools/HeppyCore/python/analyzers/Masker.py @@ -0,0 +1,21 @@ +from PhysicsTools.HeppyCore.framework.analyzer import Analyzer + +class Masker(Analyzer): + '''Returns in output all objects that are in input and not in mask. + + Example: + + from PhysicsTools.HeppyCore.analyzers.Masker import Masker + particles_not_zed = cfg.Analyzer( + Masker, + output = 'particles_not_zed', + input = 'gen_particles_stable', + mask = 'zeds', + ) + + ''' + def process(self, event): + inputs = getattr(event, self.cfg_ana.input) + masks = getattr(event, self.cfg_ana.mask) + output = [obj for obj in inputs if obj not in masks] + setattr(event, self.cfg_ana.output, output) diff --git a/PhysicsTools/HeppyCore/python/analyzers/Matcher.py b/PhysicsTools/HeppyCore/python/analyzers/Matcher.py index 5636852911bc1..899481de5a785 100644 --- a/PhysicsTools/HeppyCore/python/analyzers/Matcher.py +++ b/PhysicsTools/HeppyCore/python/analyzers/Matcher.py @@ -1,5 +1,5 @@ -from heppy.framework.analyzer import Analyzer -from heppy.utils.deltar import matchObjectCollection, deltaR +from PhysicsTools.HeppyCore.framework.analyzer import Analyzer +from PhysicsTools.HeppyCore.utils.deltar import matchObjectCollection, deltaR import collections @@ -10,10 +10,11 @@ class Matcher(Analyzer): Simple example configuration: - from heppy_fcc.analyzers.Matcher import Matcher + from PhysicsTools.HeppyCore.analyzers.Matcher import Matcher papas_jet_match = cfg.Analyzer( Matcher, instance_label = 'papas', + delta_r = 0.3, match_particles = 'gen_jets', particles = 'papas_jets' ) @@ -32,6 +33,7 @@ class Matcher(Analyzer): papas_particle_match_g2r = cfg.Analyzer( Matcher, instance_label = 'papas_g2r', + delta_r = 0.3, particles = 'gen_particles_stable', match_particles = [ ('papas_rec_particles', None), @@ -48,9 +50,24 @@ class Matcher(Analyzer): if any. - etc. + TODO: Colin: was well adapted, but probably better to do something more modular. + for example: + papas_jet_match = cfg.Analyzer( + Matcher, + instance_label = 'gen_jets_match', + delta_r = 0.3, + match_particles = 'gen_jets', + particles = 'papas_jets' + ) + would create for each papas_jet: + papas_jet.gen_jets_match + that is a match object with 2 attributes: particle, distance + in the more complicated case, just need to use a Filter to select the particles, + and have several Matcher instances + + note: one cannot attach the distance to the matched particle as + the match particle can be matched to another object... ''' - - def beginLoop(self, setup): super(Matcher, self).beginLoop(setup) self.match_collections = [] @@ -69,7 +86,7 @@ def process(self, event): match_ptcs_filtered = [ptc for ptc in match_ptcs if ptc.pdgid()==pdgid] pairs = matchObjectCollection(particles, match_ptcs_filtered, - 0.3**2) + self.cfg_ana.delta_r) for ptc in particles: matchname = 'match' if pdgid: diff --git a/PhysicsTools/HeppyCore/python/analyzers/Merger.py b/PhysicsTools/HeppyCore/python/analyzers/Merger.py new file mode 100644 index 0000000000000..876000f6d1fe2 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/analyzers/Merger.py @@ -0,0 +1,33 @@ +from PhysicsTools.HeppyCore.framework.analyzer import Analyzer +import copy +import itertools + + +class Merger(Analyzer): + '''Merges collections of particle-like objects into a single collection + + + Example: + + from PhysicsTools.HeppyCore.analyzers.Merger import Merger + merge_particles = cfg.Analyzer( + Merger, + instance_label = 'leptons', + inputs = ['electrons','muons'], + output = 'leptons', + ) + + inputs: names of the collections of input + output: collection of all particle-like objects in the input collections + ''' + def process(self, event): + inputs = [getattr(event, name) for name in self.cfg_ana.inputs] + output = list(ptc for ptc in itertools.chain(*inputs)) + if hasattr(self.cfg_ana, 'sort_key'): + output.sort(key=self.cfg_ana.sort_key, + reverse=True) + setattr(event, self.cfg_ana.output, output) + + + + diff --git a/PhysicsTools/HeppyCore/python/analyzers/MissingEnergyBuilder.py b/PhysicsTools/HeppyCore/python/analyzers/MissingEnergyBuilder.py new file mode 100644 index 0000000000000..5e02e533fd3c1 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/analyzers/MissingEnergyBuilder.py @@ -0,0 +1,19 @@ +from PhysicsTools.HeppyCore.framework.analyzer import Analyzer + +from PhysicsTools.HeppyCore.particles.tlv.particle import Particle +from ROOT import TVector3, TLorentzVector + +class MissingEnergyBuilder(Analyzer): + + def process(self, event): + ptcs = getattr(event, self.cfg_ana.particles) + sump3 = TVector3() + charge = 0 + sume = 0 + for ptc in ptcs: + sump3 += ptc.p3() + charge += ptc.q() + p4 = TLorentzVector() + p4.SetVectM(-sump3, 0) + missing = Particle(0, charge, p4) + setattr(event, self.instance_label, missing) diff --git a/PhysicsTools/HeppyCore/python/analyzers/P4SumBuilder.py b/PhysicsTools/HeppyCore/python/analyzers/P4SumBuilder.py new file mode 100644 index 0000000000000..0a8cb0c1a18c7 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/analyzers/P4SumBuilder.py @@ -0,0 +1,43 @@ +from PhysicsTools.HeppyCore.framework.analyzer import Analyzer +from PhysicsTools.HeppyCore.particles.tlv.particle import Particle +from PhysicsTools.HeppyCore.particles.tlv.jet import Jet +from PhysicsTools.HeppyCore.particles.jet import JetConstituents + +from ROOT import TLorentzVector + +mass = {23: 91, 25: 125} + +class P4SumBuilder(Analyzer): + '''Computes the 4 momentum recoiling agains a selection of particles. + + Example: + from PhysicsTools.HeppyCore.analyzers.P4SumBuilder import P4SumBuilder + recoil = cfg.Analyzer( + P4SumBuilder, + output = 'sum_ptc', + particles = 'rec_particles' + ) + + * output : contains a single particle with a p4 equal to the + sum p4 of all input particles. + + * particles : collection of input particles. + ''' + + def process(self, event): + p4 = TLorentzVector() + charge = 0 + pdgid = 0 + ptcs = getattr(event, self.cfg_ana.particles) + jet = Jet(p4) + constituents = JetConstituents() + for ptc in ptcs: + p4 += ptc.p4() + charge += ptc.q() + constituents.append(ptc) + sumptc = Particle(pdgid, charge, p4) + jet = Jet(p4) + jet.constituents = constituents + jet.constituents.sort() + setattr(event, self.cfg_ana.output, jet) + diff --git a/PhysicsTools/HeppyCore/python/analyzers/PDebugger.py b/PhysicsTools/HeppyCore/python/analyzers/PDebugger.py new file mode 100644 index 0000000000000..d201930b14b6a --- /dev/null +++ b/PhysicsTools/HeppyCore/python/analyzers/PDebugger.py @@ -0,0 +1,48 @@ +import logging +import sys +from PhysicsTools.HeppyCore.framework.analyzer import Analyzer +import PhysicsTools.HeppyCore.utils.pdebug as pdebug + + +class PDebugger(Analyzer): + '''Analyzer which turns on the physics debug output which + (1) sets up the pdebugging tool, a separate logger for physics + (2) logs for each event the number of event + + The pdebugging module should be used wherever the user wants a physics debug output. + The physics debug output documents creation of clusters, tracks + particles and aspects of simulation and reconstruction and can + be used to trace errors or to follow the simulation and reconstruction code + It was built in order to allow verification of C++ code vs python code + + This analyszer can be used to decide whether pysics output goes to either/both + log file, + stdout + + Example: + from PhysicsTools.HeppyCore.analyzers.PDebugger import PDebugger + pdebug = cfg.Analyzer( + PDebugger, + output_to_stdout = False, #optional + debug_filename = os.getcwd()+'/python_physics_debug.log' #optional argument + ) + ''' + def __init__(self, *args, **kwargs): + super(PDebugger, self).__init__(*args, **kwargs) + + #no output will occur unless one or both of the following is requested. + + #turn on output to stdout if requested + #note that both the main log leve and the stdout log level must be set in order to + # obtain output at the info level + if hasattr(self.cfg_ana, 'output_to_stdout') and self.cfg_ana.output_to_stdout: + pdebug.set_stream(sys.stdout,level=logging.INFO) + pdebug.pdebugger.setLevel(logging.INFO) + + #turn on output to file if requested + if hasattr(self.cfg_ana, 'debug_filename'): + pdebug.set_file(self.cfg_ana.debug_filename) + pdebug.pdebugger.setLevel(logging.INFO) + + def process(self, event): + pdebug.pdebugger.info(str('Event: {}'.format(event.iEv))) diff --git a/PhysicsTools/HeppyCore/python/analyzers/PapasPF.py b/PhysicsTools/HeppyCore/python/analyzers/PapasPF.py new file mode 100644 index 0000000000000..0d6d3e44a614d --- /dev/null +++ b/PhysicsTools/HeppyCore/python/analyzers/PapasPF.py @@ -0,0 +1,52 @@ +from PhysicsTools.HeppyCore.framework.analyzer import Analyzer +from PhysicsTools.HeppyCore.particles.fcc.particle import Particle + +import math +from PhysicsTools.HeppyCore.papas.simulator import Simulator +from PhysicsTools.HeppyCore.papas.vectors import Point +from PhysicsTools.HeppyCore.papas.pfobjects import Particle as PFSimParticle +from PhysicsTools.HeppyCore.papas.toyevents import particles +from PhysicsTools.HeppyCore.display.core import Display +from PhysicsTools.HeppyCore.display.geometry import GDetector +from PhysicsTools.HeppyCore.display.pfobjects import GTrajectories + +from ROOT import TLorentzVector, TVector3 + + +class PapasPF(Analyzer): + '''Runs PAPAS, the PArametrized Particle Simulation. + + Example configuration: + + from PhysicsTools.HeppyCore.analyzers.PapasPF import PapasPF + from PhysicsTools.HeppyCore.papas.detectors.CMS import CMS + papas = cfg.Analyzer( + PapasPF, + instance_label = 'papas', + detector = CMS(), + gen_particles = 'gen_particles_stable', + sim_particles = 'sim_particles', + rec_particles = 'rec_particles', + display = False, + verbose = False + ) + + detector: Detector model to be used. + gen_particles: Name of the input gen particle collection + sim_particles: Name extension for the output sim particle collection. + Note that the instance label is prepended to this name. + Therefore, in this particular case, the name of the output + sim particle collection is "papas_sim_particles". + rec_particles: Name extension for the output reconstructed particle collection. + Same comments as for the sim_particles parameter above. + display : Enable the event display + verbose : Enable the detailed printout. + ''' + + def __init__(self, *args, **kwargs): + super(PapasPF, self).__init__(*args, **kwargs) + + def process(self, event): + ecal = event.ECALclusters + hcal = event.HCALclusters + tracks = event.tracks diff --git a/PhysicsTools/HeppyCore/python/analyzers/PapasPFBlockBuilder.py b/PhysicsTools/HeppyCore/python/analyzers/PapasPFBlockBuilder.py new file mode 100644 index 0000000000000..aa303f3a4f578 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/analyzers/PapasPFBlockBuilder.py @@ -0,0 +1,56 @@ +from PhysicsTools.HeppyCore.framework.analyzer import Analyzer +from PhysicsTools.HeppyCore.papas.pfalgo.pfblockbuilder import PFBlockBuilder +from PhysicsTools.HeppyCore.papas.data.pfevent import PFEvent +from PhysicsTools.HeppyCore.papas.pfalgo.distance import Distance + + +class PapasPFBlockBuilder(Analyzer): + ''' Module to construct blocks of connected clusters and tracks + particles will eventually be reconstructed from elements of a block + + + Usage: + from PhysicsTools.HeppyCore.analyzers.PapasPFBlockBuilder import PapasPFBlockBuilder + pfblocks = cfg.Analyzer( + PapasPFBlockBuilder, + tracks = 'tracks', + ecals = 'ecal_clusters', + hcals = 'hcal_clusters', + history = 'history_nodes', + output_blocks = 'reconstruction_blocks' + ) + + tracks: Name of dict in Event where tracks are stored + ecals: Name of dict in Event where ecals are stored + hcals: Name of dict in Event where hcals are stored + history: Name of history_nodes, can be set to None. + output_blocks: Name to be used for the blocks dict + + ''' + def __init__(self, *args, **kwargs): + super(PapasPFBlockBuilder, self).__init__(*args, **kwargs) + + self.tracksname = self.cfg_ana.tracks; + self.ecalsname = self.cfg_ana.ecals; + self.hcalsname = self.cfg_ana.hcals; + self.blocksname = self.cfg_ana.output_blocks; + self.historyname = self.cfg_ana.history; + + + def process(self, event): + + pfevent=PFEvent(event, self.tracksname, self.ecalsname, self.hcalsname, self.blocksname) + + distance = Distance() + + history_nodes = None + if hasattr(event, self.historyname) : + history_nodes = getattr(event, self.historyname) + blockbuilder = PFBlockBuilder(pfevent, distance, history_nodes) + #print blockbuilder + + setattr(event, self.blocksname, blockbuilder.blocks) + #setattr(event, self.outhistoryname, blockbuilder.history_nodes) + + + diff --git a/PhysicsTools/HeppyCore/python/analyzers/PapasPFReconstructor.py b/PhysicsTools/HeppyCore/python/analyzers/PapasPFReconstructor.py new file mode 100644 index 0000000000000..1310eef4db29e --- /dev/null +++ b/PhysicsTools/HeppyCore/python/analyzers/PapasPFReconstructor.py @@ -0,0 +1,59 @@ +from PhysicsTools.HeppyCore.framework.analyzer import Analyzer +from PhysicsTools.HeppyCore.papas.pfalgo.pfreconstructor import PFReconstructor as PFReconstructor +from PhysicsTools.HeppyCore.papas.data.pfevent import PFEvent +from PhysicsTools.HeppyCore.papas.pfalgo.distance import Distance +from PhysicsTools.HeppyCore.papas.data.history import History + +class PapasPFReconstructor(Analyzer): + ''' Module to reconstruct particles from blocks of events + + Usage: + pfreconstruct = cfg.Analyzer( + PapasPFReconstructor, + instance_label = 'papas_PFreconstruction', + detector = CMS(), + input_blocks = 'reconstruction_blocks', + input_history = 'history_nodes', + output_history = 'history_nodes', + output_particles_dict = 'particles_dict', + output_particles_list = 'particles_list' + ) + + input_blocks: Name of the the blocks dict in the event + history: Name of history_nodes + output_particles_dict = Name for recosntructed particles (as dict), + output_particles_list = Name for recosntructed particles (as list) + ''' + + def __init__(self, *args, **kwargs): + super(PapasPFReconstructor, self).__init__(*args, **kwargs) + self.detector = self.cfg_ana.detector + self.reconstructed = PFReconstructor(self.detector, self.logger) + self.blocksname = self.cfg_ana.input_blocks + self.historyname = self.cfg_ana.history + self.output_particlesdictname = '_'.join([self.instance_label, + self.cfg_ana.output_particles_dict]) + self.output_particleslistname = '_'.join([self.instance_label, + self.cfg_ana.output_particles_list]) + + def process(self, event): + ''' Calls the particle reconstruction algorithm and returns the + reconstructed paricles and updated history_nodes to the event object + arguments: + event must contain blocks made using BlockBuilder''' + + self.reconstructed.reconstruct(event, self.blocksname, self.historyname) + + #setattr(event, self.historyname, self.reconstructed.history_nodes) + setattr(event, self.output_particlesdictname, self.reconstructed.particles) + + #hist = History(event.history_nodes,PFEvent(event)) + #for block in event.blocks: + # hist.summary_of_links(block) + + #for particle comparison we want a list of particles (instead of a dict) so that we can sort and compare + reconstructed_particle_list = sorted( self.reconstructed.particles.values(), + key = lambda ptc: ptc.e(), + reverse=True) + + setattr(event, self.output_particleslistname, reconstructed_particle_list) diff --git a/PhysicsTools/HeppyCore/python/analyzers/PapasParticlesComparer.py b/PhysicsTools/HeppyCore/python/analyzers/PapasParticlesComparer.py new file mode 100644 index 0000000000000..6fe6fc341d4e0 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/analyzers/PapasParticlesComparer.py @@ -0,0 +1,31 @@ +from PhysicsTools.HeppyCore.framework.analyzer import Analyzer +from PhysicsTools.HeppyCore.papas.data.comparer import ParticlesComparer +from PhysicsTools.HeppyCore.papas.data.history import History +from PhysicsTools.HeppyCore.papas.data.pfevent import PFEvent + +class PapasParticlesComparer(Analyzer): + ''' Unsophisticated testing Module that checks that two lists of sorted particles match + + Usage: + from PhysicsTools.HeppyCore.analyzers.PapasParticlesComparer import PapasParticlesComparer + particlescomparer = cfg.Analyzer( + PapasParticlesComparer , + particlesA = 'papas_PFreconstruction_particles_list', + particlesB = 'papas_rec_particles_no_leptons' + ) + + ''' + def __init__(self, *args, **kwargs): + super(PapasParticlesComparer, self).__init__(*args, **kwargs) + self.particlesA_name = self.cfg_ana.particlesA + self.particlesB_name = self.cfg_ana.particlesB + + def process(self, event): #think about if argument is correct + ''' calls a particle comparer to compare two lists of pre-sorted particles + arguments + event: must contain baseline_particles (the original reconstruction from simulation) + and reconstructed_particles made from the new BlockBuilder approach + ''' + ParticlesComparer(getattr(event, self.particlesA_name), getattr(event, self.particlesB_name)) + + \ No newline at end of file diff --git a/PhysicsTools/HeppyCore/python/analyzers/PapasSim.py b/PhysicsTools/HeppyCore/python/analyzers/PapasSim.py new file mode 100644 index 0000000000000..d7c4c76d4f407 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/analyzers/PapasSim.py @@ -0,0 +1,149 @@ +from PhysicsTools.HeppyCore.framework.analyzer import Analyzer +from PhysicsTools.HeppyCore.papas.simulator import Simulator +from PhysicsTools.HeppyCore.papas.papas_exceptions import PropagationError, SimulationError +from PhysicsTools.HeppyCore.display.core import Display +from PhysicsTools.HeppyCore.display.geometry import GDetector +from PhysicsTools.HeppyCore.display.pfobjects import GTrajectories +from PhysicsTools.HeppyCore.papas.pfalgo.distance import Distance +from PhysicsTools.HeppyCore.papas.mergedclusterbuilder import MergedClusterBuilder +from PhysicsTools.HeppyCore.papas.data.pfevent import PFEvent +from PhysicsTools.HeppyCore.papas.graphtools.DAG import Node + +#todo following Alices merge and reconstruction work +# - add muons and electrons back into the particles, these +# particles are not yet handled by alices reconstruction +# they are (for the time being) excluded from the simulation rec particles in order that particle +# comparisons can be made (eg # no of particles) + +class PapasSim(Analyzer): + '''Runs PAPAS, the PArametrized Particle Simulation. + + #This will need to redocumented once new papasdata structure arrives + + Example configuration: + + from PhysicsTools.HeppyCore.analyzers.PapasSim import PapasSim + from PhysicsTools.HeppyCore.papas.detectors.CMS import CMS + papas = cfg.Analyzer( + PapasSim, + instance_label = 'papas', + detector = CMS(), + gen_particles = 'gen_particles_stable', + sim_particles = 'sim_particles', + merged_ecals = 'ecal_clusters', + merged_hcals = 'hcal_clusters', + tracks = 'tracks', + #rec_particles = 'sim_rec_particles', # optional - will only do a simulation reconstruction if a name is provided + output_history = 'history_nodes', + display_filter_func = lambda ptc: ptc.e()>1., + display = False, + verbose = True + ) + detector: Detector model to be used. + gen_particles: Name of the input gen particle collection + sim_particles: Name extension for the output sim particle collection. + Note that the instance label is prepended to this name. + Therefore, in this particular case, the name of the output + sim particle collection is "papas_sim_particles". + merged_ecals: Name for the merged clusters created by simulator + merged_hcals: Name for the merged clusters created by simulator + tracks: Name for smeared tracks created by simulator + rec_particles: Optional. Name extension for the reconstructed particles created by simulator + This is retained for the time being to allow two reconstructions to be compared + Reconstruction will occur if this parameter or rec_particles_no_leptons is provided + Same comments as for the sim_particles parameter above. + rec_particles_no_leptons: Optional. Name extension for the reconstructed particles created by simulator + without electrons and muons + Reconstruction will occur if this parameter or rec_particles is provided + This is retained for the time being to allow two reconstructions to be compared + Same comments as for the sim_particles parameter above. + smeared: Name for smeared leptons + history: Optional name for the history nodes, set to None if not needed + display : Enable the event display + verbose : Enable the detailed printout. + + event must contain + todo once history is implemented + event will gain + ecal_clusters:- smeared merged clusters from simulation + hcal_clusters:- smeared merged clusters from simulation + tracks: - tracks from simulation + baseline_particles:- simulated particles (excluding electrons and muons) + sim_particles - simulated particles including electrons and muons + + ''' + + def __init__(self, *args, **kwargs): + super(PapasSim, self).__init__(*args, **kwargs) + self.detector = self.cfg_ana.detector + self.simulator = Simulator(self.detector, self.mainLogger) + self.simname = '_'.join([self.instance_label, self.cfg_ana.sim_particles]) + self.tracksname = self.cfg_ana.tracks + self.mergedecalsname = self.cfg_ana.merged_ecals + self.mergedhcalsname = self.cfg_ana.merged_hcals + self.historyname = self.cfg_ana.output_history + self.is_display = self.cfg_ana.display + if self.is_display: + self.init_display() + + def init_display(self): + self.display = Display(['xy', 'yz']) + self.gdetector = GDetector(self.detector) + self.display.register(self.gdetector, layer=0, clearable=False) + self.is_display = True + + def process(self, event): + + event.simulator = self + if self.is_display: + self.display.clear() + pfsim_particles = [] + gen_particles = getattr(event, self.cfg_ana.gen_particles) + try: + self.simulator.simulate(gen_particles) + except (PropagationError, SimulationError) as err: + self.mainLogger.error(str(err) + ' -> Event discarded') + return False + pfsim_particles = self.simulator.ptcs + if self.is_display : + self.display.register(GTrajectories(pfsim_particles), + layer=1) + #these are the particles before simulation + simparticles = sorted(pfsim_particles, + key=lambda ptc: ptc.e(), reverse=True) + setattr(event, self.simname, simparticles) + + #extract the tracks and clusters (extraction is prior to Colins merging step) + event.tracks = dict() + event.ecal_clusters = dict() + event.hcal_clusters = dict() + if "tracker" in self.simulator.pfinput.elements : + for element in self.simulator.pfinput.elements["tracker"]: + event.tracks[element.uniqueid] = element + + if "ecal_in" in self.simulator.pfinput.elements : + for element in self.simulator.pfinput.elements["ecal_in"]: + event.ecal_clusters[element.uniqueid] = element + + if "hcal_in" in self.simulator.pfinput.elements : + for element in self.simulator.pfinput.elements["hcal_in"]: + event.hcal_clusters[element.uniqueid] = element + + ruler = Distance() + + #create history node + #note eventually history will be created by the simulator and passed in + # as an argument and this will no longer be needed + uniqueids = list(event.tracks.keys()) + list(event.ecal_clusters.keys()) + list(event.hcal_clusters.keys()) + history = dict((idt, Node(idt)) for idt in uniqueids) + + #Now merge the simulated clusters and tracks as a separate pre-stage (prior to new reconstruction) + # and set the event to point to the merged cluster + pfevent = PFEvent(event, 'tracks', 'ecal_clusters', 'hcal_clusters') + merged_ecals = MergedClusterBuilder(pfevent.ecal_clusters, ruler, history) + setattr(event, self.mergedecalsname, merged_ecals.merged) + merged_hcals = MergedClusterBuilder(pfevent.hcal_clusters, ruler, merged_ecals.history_nodes) + setattr(event, self.mergedhcalsname, merged_hcals.merged) + setattr(event, self.historyname, merged_hcals.history_nodes) + + diff --git a/PhysicsTools/HeppyCore/python/analyzers/ParametrizedBTagger.py b/PhysicsTools/HeppyCore/python/analyzers/ParametrizedBTagger.py new file mode 100644 index 0000000000000..95967a74666a8 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/analyzers/ParametrizedBTagger.py @@ -0,0 +1,14 @@ +from PhysicsTools.HeppyCore.framework.analyzer import Analyzer + +class ParametrizedBTagger(Analyzer): + + def process(self, event): + jets = getattr(event, self.cfg_ana.input_jets) + for jet in jets: + is_bjet = False + if jet.match and \ + jet.match.match and \ + abs(jet.match.match.pdgid())== 5: + is_bjet = True + is_b_tagged = self.cfg_ana.roc.is_b_tagged(is_bjet) + jet.tags['b'] = is_b_tagged diff --git a/PhysicsTools/HeppyCore/python/analyzers/ParticleTreeProducer.py b/PhysicsTools/HeppyCore/python/analyzers/ParticleTreeProducer.py new file mode 100644 index 0000000000000..c7b2828ad9877 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/analyzers/ParticleTreeProducer.py @@ -0,0 +1,60 @@ +from PhysicsTools.HeppyCore.framework.analyzer import Analyzer +from PhysicsTools.HeppyCore.statistics.tree import Tree +from PhysicsTools.HeppyCore.analyzers.ntuple import * + +from ROOT import TFile + +class ParticleTreeProducer(Analyzer): + + def beginLoop(self, setup): + super(ParticleTreeProducer, self).beginLoop(setup) + self.rootfile = TFile('/'.join([self.dirName, + 'tree.root']), + 'recreate') + self.tree = Tree('particles', '') + bookParticle(self.tree, 'ptc') + bookCluster(self.tree, 'ptc_ecal') + bookParticle(self.tree, 'ptc_match') + var(self.tree, 'dr') + bookParticle(self.tree, 'ptc_match_211') + var(self.tree, 'dr_211') + bookParticle(self.tree, 'ptc_match_130') + var(self.tree, 'dr_130') + bookParticle(self.tree, 'ptc_match_22') + var(self.tree, 'dr_22') + + def process(self, event): + particles = getattr(event, self.cfg_ana.particles) + for ptc in particles: + self.tree.reset() + fillParticle(self.tree, 'ptc', ptc) + m211 = m22 = False + # if hasattr(ptc, 'clusters'): + # # sim particle + # ecal = ptc.clusters.get('ecal_in', None) + # if ecal: + # fillCluster(self.tree, 'ptc_ecal', ecal) + if hasattr(ptc, 'match') and ptc.match: + fillParticle(self.tree, 'ptc_match', ptc.match) + fill(self.tree, 'dr', ptc.dr) + if hasattr(ptc, 'match_211') and ptc.match_211: + m211 = True + fillParticle(self.tree, 'ptc_match_211', ptc.match_211) + fill(self.tree, 'dr_211', ptc.dr_211) + if hasattr(ptc, 'match_130') and ptc.match_130: + fillParticle(self.tree, 'ptc_match_130', ptc.match_130) + fill(self.tree, 'dr_130', ptc.dr_130) + if hasattr(ptc, 'match_22') and ptc.match_22: + m22 = True + fillParticle(self.tree, 'ptc_match_22', ptc.match_22) + fill(self.tree, 'dr_22', ptc.dr_22) + # if m22 and not m211: + # print event + # import pdb; pdb.set_trace() + self.tree.tree.Fill() + + + def write(self, setup): + self.rootfile.Write() + self.rootfile.Close() + diff --git a/PhysicsTools/HeppyCore/python/analyzers/RecoilBuilder.py b/PhysicsTools/HeppyCore/python/analyzers/RecoilBuilder.py new file mode 100644 index 0000000000000..2edb671491c88 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/analyzers/RecoilBuilder.py @@ -0,0 +1,38 @@ +from PhysicsTools.HeppyCore.framework.analyzer import Analyzer +from PhysicsTools.HeppyCore.particles.tlv.particle import Particle as Recoil + +from ROOT import TLorentzVector + +mass = {23: 91, 25: 125} + +class RecoilBuilder(Analyzer): + '''Computes the 4 momentum recoiling agains a selection of particles. + + Example: + from PhysicsTools.HeppyCore.analyzers.RecoilBuilder import RecoilBuilder + recoil = cfg.Analyzer( + RecoilBuilder, + output = 'recoil', + sqrts = 240., + to_remove = 'zeds_legs' + ) + + * output : the recoil "particle" is stored in this collection. + + * sqrts : energy in the center of mass system. + + * to_remove : collection of particles to be subtracted to the initial p4. + if to_remove is set to the whole collection of reconstructed particles + in the event, the missing p4 is computed. + + ''' + + def process(self, event): + sqrts = self.cfg_ana.sqrts + to_remove = getattr(event, self.cfg_ana.to_remove) + recoil_p4 = TLorentzVector(0, 0, 0, sqrts) + for ptc in to_remove: + recoil_p4 -= ptc.p4() + recoil = Recoil(0, 0, recoil_p4, 1) + setattr(event, self.cfg_ana.output, recoil) + diff --git a/PhysicsTools/HeppyCore/python/analyzers/ResonanceBuilder.py b/PhysicsTools/HeppyCore/python/analyzers/ResonanceBuilder.py new file mode 100644 index 0000000000000..f40fce4cf6897 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/analyzers/ResonanceBuilder.py @@ -0,0 +1,50 @@ +from PhysicsTools.HeppyCore.framework.analyzer import Analyzer +from PhysicsTools.HeppyCore.particles.tlv.resonance import Resonance2 as Resonance + +import pprint +import itertools + +mass = {23: 91, 25: 125} + +class ResonanceBuilder(Analyzer): + '''Builds resonances. + + Example: + + from PhysicsTools.HeppyCore.analyzers.ResonanceBuilder import ResonanceBuilder + zeds = cfg.Analyzer( + ResonanceBuilder, + output = 'zeds', + leg_collection = 'sel_iso_leptons', + pdgid = 23 + ) + + * output : resonances are stored in this collection, + sorted according to their distance to the nominal mass corresponding + to the specified pdgid. The first resonance in this collection is thus the best one. + + Additionally, a collection zeds_legs (in this case) is created to contain the + legs of the best resonance. + + * leg_collection : collection of particles that will be combined into resonances. + + * pdgid : pythia code for the target resonance. + + See Resonance2 and PhysicsTools.HeppyCore.particles.tlv.Resonance for more information + ''' + + def process(self, event): + legs = getattr(event, self.cfg_ana.leg_collection) + resonances = [] + for leg1, leg2 in itertools.combinations(legs,2): + resonances.append( Resonance(leg1, leg2, self.cfg_ana.pdgid) ) + # sorting according to distance to nominal mass + nominal_mass = mass[self.cfg_ana.pdgid] + resonances.sort(key=lambda x: abs(x.m()-nominal_mass)) + setattr(event, self.cfg_ana.output, resonances) + # getting legs of best resonance + legs = [] + if len(resonances): + legs = resonances[0].legs + setattr(event, '_'.join([self.cfg_ana.output, 'legs']), legs) + diff --git a/PhysicsTools/HeppyCore/python/analyzers/cms/JetReader.py b/PhysicsTools/HeppyCore/python/analyzers/cms/JetReader.py new file mode 100644 index 0000000000000..d0e06015fc875 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/analyzers/cms/JetReader.py @@ -0,0 +1,39 @@ +from PhysicsTools.Heppy.analyzers.core.Analyzer import Analyzer +from PhysicsTools.Heppy.analyzers.core.AutoHandle import AutoHandle +from PhysicsTools.HeppyCore.particles.cms.jet import Jet + +import math + +class JetReader(Analyzer): + + def declareHandles(self): + super(JetReader, self).declareHandles() + self.handles['jets'] = AutoHandle( + self.cfg_ana.jets, + 'std::vector' + ) + self.handles['gen_jets'] = AutoHandle( + self.cfg_ana.gen_jets, + 'std::vector' + ) + + def process(self, event): + self.readCollections(event.input) + store = event.input + genj = self.handles['gen_jets'].product() + genj = [jet for jet in genj if jet.pt()>self.cfg_ana.gen_jet_pt] + gen_jets = map(Jet, genj) + event.gen_jets = sorted( gen_jets, + key = lambda ptc: ptc.pt(), reverse=True ) + + for jet in event.gen_jets: + jet.constituents.validate(jet.e()) + + pfj = self.handles['jets'].product() + pfj = [jet for jet in pfj if jet.pt()>self.cfg_ana.jet_pt] + jets = map(Jet, pfj) + event.cms_jets = sorted( jets, + key = lambda ptc: ptc.pt(), reverse=True ) + + # for jet in event.cms_jets: + # jet.constituents.validate(jet.e()) diff --git a/PhysicsTools/HeppyCore/python/analyzers/cms/Reader.py b/PhysicsTools/HeppyCore/python/analyzers/cms/Reader.py new file mode 100644 index 0000000000000..4a1e348f62491 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/analyzers/cms/Reader.py @@ -0,0 +1,38 @@ +from PhysicsTools.Heppy.analyzers.core.Analyzer import Analyzer +from PhysicsTools.Heppy.analyzers.core.AutoHandle import AutoHandle +from PhysicsTools.HeppyCore.particles.cms.particle import Particle + +import math + +class CMSReader(Analyzer): + + def declareHandles(self): + super(CMSReader, self).declareHandles() + self.handles['gen_particles'] = AutoHandle( + self.cfg_ana.gen_particles, + 'std::vector' + ) + self.read_pf = self.cfg_ana.pf_particles is not None + if self.read_pf: + self.handles['pf_particles'] = AutoHandle( + self.cfg_ana.pf_particles, + 'std::vector' + ) + + def process(self, event): + self.readCollections(event.input) + store = event.input + genp = self.handles['gen_particles'].product() + gen_particles = map(Particle, genp) + event.gen_particles = sorted( gen_particles, + key = lambda ptc: ptc.e(), reverse=True ) + event.gen_particles_stable = [ptc for ptc in event.gen_particles + if ptc.status()==1 and + not math.isnan(ptc.e()) and + ptc.e()>1e-5 and + ptc.pt()>1e-5 and + not abs(ptc.pdgid()) in [12, 14, 16]] + if self.read_pf: + pfp = self.handles['pf_particles'].product() + event.pf_particles = map(Particle, pfp) + diff --git a/PhysicsTools/HeppyCore/python/analyzers/deprecated/Matcher.py b/PhysicsTools/HeppyCore/python/analyzers/deprecated/Matcher.py new file mode 100644 index 0000000000000..468f3f045fc0a --- /dev/null +++ b/PhysicsTools/HeppyCore/python/analyzers/deprecated/Matcher.py @@ -0,0 +1,103 @@ +from PhysicsTools.HeppyCore.framework.analyzer import Analyzer +from PhysicsTools.HeppyCore.utils.deltar import matchObjectCollection + +import collections + +class Matcher(Analyzer): + '''Particle matcher. + + Works with any kind of object with a p4 function. + + Simple example configuration: + + from PhysicsTools.HeppyCore.analyzers.Matcher import Matcher + papas_jet_match = cfg.Analyzer( + Matcher, + instance_label = 'papas', + delta_r = 0.3, + match_particles = 'gen_jets', + particles = 'papas_jets' + ) + + particles: Name of the collection containing the particles to be matched. + match_particles: Name of the collection containing the particles where a match + is to be found. + + In this particular case, each jet in "papas_jets" will end up with a new + attribute called "match". This attribute can be either the closest gen jet in the + "gen_jets" collection in case a gen_jet is found within delta R = 0.3, + or None in case a match cannot be found in this cone. + + More complex example configuration: + + papas_particle_match_g2r = cfg.Analyzer( + Matcher, + instance_label = 'papas_g2r', + delta_r = 0.3, + particles = 'gen_particles_stable', + match_particles = [ + ('papas_rec_particles', None), + ('papas_rec_particles', 211), + ('papas_rec_particles', 130), + ('papas_rec_particles', 22) + ] + ) + + In this case, each gen particle in gen_particles_stable will end up with the following + new attributes: + - "match" : closest reconstructed particle in "papas_rec_particles", if any. + - "match_211": closest reconstructed particle of pdgId 211 in "papas_rec_particles", + if any. + - etc. + + TODO: Colin: was well adapted, but probably better to do something more modular. + for example: + papas_jet_match = cfg.Analyzer( + Matcher, + instance_label = 'gen_jets_match', + delta_r = 0.3, + match_particles = 'gen_jets', + particles = 'papas_jets' + ) + would create for each papas_jet: + papas_jet.gen_jets_match + that is a match object with 2 attributes: particle, distance + in the more complicated case, just need to use a Filter to select the particles, + and have several Matcher instances + + note: one cannot attach the distance to the matched particle as + the match particle can be matched to another object... + ''' + def beginLoop(self, setup): + super(Matcher, self).beginLoop(setup) + self.match_collections = [] + if isinstance( self.cfg_ana.match_particles, basestring): + self.match_collections.append( (self.cfg_ana.match_particles, None) ) + else: + self.match_collections = self.cfg_ana.match_particles + + def process(self, event): + particles = getattr(event, self.cfg_ana.particles) + # match_particles = getattr(event, self.cfg_ana.match_particles) + for collname, pdgid in self.match_collections: + match_ptcs = getattr(event, collname) + match_ptcs_filtered = match_ptcs + if pdgid is not None: + match_ptcs_filtered = [ptc for ptc in match_ptcs + if ptc.pdgid()==pdgid] + pairs = matchObjectCollection(particles, match_ptcs_filtered, + self.cfg_ana.delta_r) + for ptc in particles: + matchname = 'match' + if pdgid: + matchname = 'match_{pdgid}'.format(pdgid=pdgid) + match = pairs[ptc] + setattr(ptc, matchname, match) + if match: + drname = 'dr' + if pdgid: + drname = 'dr_{pdgid}'.format(pdgid=pdgid) + dr = deltaR(ptc.theta(), ptc.phi(), + match.theta(), match.phi()) + setattr(ptc, drname, dr) + # print dr, ptc, match diff --git a/PhysicsTools/HeppyCore/python/analyzers/deprecated/Recoil.py b/PhysicsTools/HeppyCore/python/analyzers/deprecated/Recoil.py new file mode 100644 index 0000000000000..c0be765dd5d5c --- /dev/null +++ b/PhysicsTools/HeppyCore/python/analyzers/deprecated/Recoil.py @@ -0,0 +1,21 @@ +from PhysicsTools.HeppyCore.framework.analyzer import Analyzer +from PhysicsTools.HeppyCore.particles.tlv.particle import Particle +from ROOT import TLorentzVector + + +class Recoil(Analyzer): + + def process(self, event): + initial = TLorentzVector() + initial.SetXYZM(0,0,0,self.cfg_ana.sqrts) + particles = getattr(event, self.cfg_ana.particles) + visible_p4 = TLorentzVector() + for ptc in particles: + if ptc.status()>1: #PF cand status=0 in CMS + raise ValueError('are you sure? status='+str(ptc.status()) ) + visible_p4 += ptc.p4() + recoil_p4 = initial - visible_p4 + recoil = Particle(0, 0, recoil_p4) + visible = Particle(0, 0, visible_p4) + setattr(event, '_'.join(['recoil', self.cfg_ana.instance_label]), recoil) + setattr(event, '_'.join(['recoil_visible', self.cfg_ana.instance_label]), visible) diff --git a/PhysicsTools/HeppyCore/python/analyzers/examples/objects/IsoParticleTreeProducer.py b/PhysicsTools/HeppyCore/python/analyzers/examples/objects/IsoParticleTreeProducer.py new file mode 100644 index 0000000000000..094b38ca20db0 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/analyzers/examples/objects/IsoParticleTreeProducer.py @@ -0,0 +1,33 @@ +from PhysicsTools.HeppyCore.framework.analyzer import Analyzer +from PhysicsTools.HeppyCore.statistics.tree import Tree +from PhysicsTools.HeppyCore.analyzers.ntuple import * + +from ROOT import TFile + +class IsoParticleTreeProducer(Analyzer): + + def beginLoop(self, setup): + super(IsoParticleTreeProducer, self).beginLoop(setup) + self.rootfile = TFile('/'.join([self.dirName, + 'tree.root']), + 'recreate') + self.tree = Tree( self.cfg_ana.tree_name, + self.cfg_ana.tree_title ) + bookIsoParticle(self.tree, 'ptc') + + def process(self, event): + self.tree.reset() + leptons = getattr(event, self.cfg_ana.leptons) + pdgids = [211, 22, 130] + for lepton in leptons: + for pdgid in pdgids: + iso = getattr(lepton, 'iso_{pdgid:d}'.format(pdgid=pdgid)) + for ptc in iso.on_ptcs: + self.tree.reset() + fillIsoParticle(self.tree, 'ptc', ptc, lepton) + self.tree.tree.Fill() + + def write(self, setup): + self.rootfile.Write() + self.rootfile.Close() + diff --git a/PhysicsTools/HeppyCore/python/analyzers/examples/objects/JetTreeProducer.py b/PhysicsTools/HeppyCore/python/analyzers/examples/objects/JetTreeProducer.py new file mode 100644 index 0000000000000..c58bb7660e1a7 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/analyzers/examples/objects/JetTreeProducer.py @@ -0,0 +1,57 @@ +'''some module doc''' + +from PhysicsTools.HeppyCore.framework.analyzer import Analyzer +from PhysicsTools.HeppyCore.statistics.tree import Tree +from PhysicsTools.HeppyCore.analyzers.ntuple import * + +from ROOT import TFile + +class JetTreeProducer(Analyzer): + '''Some class doc''' + + def beginLoop(self, setup): + super(JetTreeProducer, self).beginLoop(setup) + self.rootfile = TFile('/'.join([self.dirName, + 'jet_tree.root']), + 'recreate') + self.tree = Tree( self.cfg_ana.tree_name, + self.cfg_ana.tree_title ) + bookJet(self.tree, 'jet1') + bookJet(self.tree, 'jet1_gen') + bookJet(self.tree, 'jet2') + bookJet(self.tree, 'jet2_gen') + var(self.tree, 'event') + var(self.tree, 'lumi') + var(self.tree, 'run') + + + def process(self, event): + self.tree.reset() + if hasattr(event, 'eventId'): + fill(self.tree, 'event', event.eventId) + fill(self.tree, 'lumi', event.lumi) + fill(self.tree, 'run', event.run) + jets = getattr(event, self.cfg_ana.jets) + if( len(jets)>0 ): + jet = jets[0] + comp211 = jet.constituents.get(211, None) + if comp211: + if comp211.num==2: + import pdb; pdb.set_trace() + fillJet(self.tree, 'jet1', jet) + if jet.match: + fillJet(self.tree, 'jet1_gen', jet.match) + # if jet.e()/jet.match.e() > 2.: + # import pdb; pdb.set_trace() + if( len(jets)>1 ): + jet = jets[1] + fillJet(self.tree, 'jet2', jet) + if jet.match: + fillJet(self.tree, 'jet2_gen', jet.match) + self.tree.tree.Fill() + + + def write(self, setup): + self.rootfile.Write() + self.rootfile.Close() + diff --git a/PhysicsTools/HeppyCore/python/analyzers/examples/objects/LeptonTreeProducer.py b/PhysicsTools/HeppyCore/python/analyzers/examples/objects/LeptonTreeProducer.py new file mode 100644 index 0000000000000..750a757e64e66 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/analyzers/examples/objects/LeptonTreeProducer.py @@ -0,0 +1,33 @@ +from PhysicsTools.HeppyCore.framework.analyzer import Analyzer +from PhysicsTools.HeppyCore.statistics.tree import Tree +from PhysicsTools.HeppyCore.analyzers.ntuple import * + +from ROOT import TFile + +class LeptonTreeProducer(Analyzer): + + def beginLoop(self, setup): + super(LeptonTreeProducer, self).beginLoop(setup) + self.rootfile = TFile('/'.join([self.dirName, + 'tree.root']), + 'recreate') + self.tree = Tree( self.cfg_ana.tree_name, + self.cfg_ana.tree_title ) + bookLepton(self.tree, 'lep1') + bookLepton(self.tree, 'lep2') + + + def process(self, event): + self.tree.reset() + leptons = getattr(event, self.cfg_ana.leptons) + if len(leptons) > 0: + fillLepton(self.tree, 'lep1', leptons[0]) + if len(leptons) > 1: + fillLepton(self.tree, 'lep2', leptons[1]) + self.tree.tree.Fill() + + + def write(self, setup): + self.rootfile.Write() + self.rootfile.Close() + diff --git a/PhysicsTools/HeppyCore/python/analyzers/Printer.py b/PhysicsTools/HeppyCore/python/analyzers/examples/simple/Printer.py similarity index 52% rename from PhysicsTools/HeppyCore/python/analyzers/Printer.py rename to PhysicsTools/HeppyCore/python/analyzers/examples/simple/Printer.py index 5cbec1616d22e..a46a81a33fd59 100644 --- a/PhysicsTools/HeppyCore/python/analyzers/Printer.py +++ b/PhysicsTools/HeppyCore/python/analyzers/examples/simple/Printer.py @@ -4,11 +4,11 @@ class Printer(Analyzer): def beginLoop(self, setup): super(Printer, self).beginLoop(setup) - self.firstEvent = True def process(self, event): - if self.firstEvent: - event.input.Print() - self.firstEvent = False - print "printing event", event.iEv, 'var1', event.input.var1 + self.mainLogger.info( + "event {iEv}, var1 {var1}".format( + iEv = event.iEv, var1 = event.input.var1 + )) + diff --git a/PhysicsTools/HeppyCore/python/analyzers/examples/simple/RandomAnalyzer.py b/PhysicsTools/HeppyCore/python/analyzers/examples/simple/RandomAnalyzer.py new file mode 100644 index 0000000000000..7caf8603ac5a2 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/analyzers/examples/simple/RandomAnalyzer.py @@ -0,0 +1,8 @@ +from PhysicsTools.HeppyCore.framework.analyzer import Analyzer + +import PhysicsTools.HeppyCore.statistics.rrandom as random + +class RandomAnalyzer(Analyzer): + + def process(self, event): + event.var_random = random.uniform(0,1) diff --git a/PhysicsTools/HeppyCore/python/analyzers/SimpleTreeProducer.py b/PhysicsTools/HeppyCore/python/analyzers/examples/simple/SimpleTreeProducer.py similarity index 87% rename from PhysicsTools/HeppyCore/python/analyzers/SimpleTreeProducer.py rename to PhysicsTools/HeppyCore/python/analyzers/examples/simple/SimpleTreeProducer.py index 41b477fcf6011..f0ff51142176d 100644 --- a/PhysicsTools/HeppyCore/python/analyzers/SimpleTreeProducer.py +++ b/PhysicsTools/HeppyCore/python/analyzers/examples/simple/SimpleTreeProducer.py @@ -12,9 +12,11 @@ def beginLoop(self, setup): self.tree = Tree( self.cfg_ana.tree_name, self.cfg_ana.tree_title ) self.tree.var('test_variable') + self.tree.var('test_variable_random') def process(self, event): self.tree.fill('test_variable', event.input.var1) + self.tree.fill('test_variable_random', event.var_random) self.tree.tree.Fill() def write(self, setup): diff --git a/PhysicsTools/HeppyCore/python/analyzers/examples/simple/Stopper.py b/PhysicsTools/HeppyCore/python/analyzers/examples/simple/Stopper.py new file mode 100644 index 0000000000000..869381267f2ce --- /dev/null +++ b/PhysicsTools/HeppyCore/python/analyzers/examples/simple/Stopper.py @@ -0,0 +1,10 @@ +from PhysicsTools.HeppyCore.framework.analyzer import Analyzer +from PhysicsTools.HeppyCore.framework.exceptions import UserStop + +class Stopper(Analyzer): + + def process(self, event): + if event.iEv == self.cfg_ana.iEv: + raise UserStop('stopping at event {iEv}'.format(iEv=event.iEv)) + + diff --git a/PhysicsTools/HeppyCore/python/analyzers/examples/ttbar/BTagging.py b/PhysicsTools/HeppyCore/python/analyzers/examples/ttbar/BTagging.py new file mode 100644 index 0000000000000..e8a60f85ba3e1 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/analyzers/examples/ttbar/BTagging.py @@ -0,0 +1,15 @@ +from PhysicsTools.HeppyCore.framework.analyzer import Analyzer + +class BTagging(Analyzer): + + def process(self, event): + jets = getattr(event, self.cfg_ana.input_objects) + bjets = [] + for jet in jets: + if self.cfg_ana.filter_func(jet): + bjets.append(jet) + jet.tags['b'] = True + else: + jet.tags['b'] = False + + setattr(event, self.cfg_ana.output, bjets) diff --git a/PhysicsTools/HeppyCore/python/analyzers/examples/ttbar/TTbarTreeProducer.py b/PhysicsTools/HeppyCore/python/analyzers/examples/ttbar/TTbarTreeProducer.py new file mode 100644 index 0000000000000..58f127cf0a856 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/analyzers/examples/ttbar/TTbarTreeProducer.py @@ -0,0 +1,69 @@ +from PhysicsTools.HeppyCore.framework.analyzer import Analyzer +from PhysicsTools.HeppyCore.statistics.tree import Tree +from PhysicsTools.HeppyCore.analyzers.ntuple import * + +from ROOT import TFile + +class TTbarTreeProducer(Analyzer): + + def beginLoop(self, setup): + super(TTbarTreeProducer, self).beginLoop(setup) + self.rootfile = TFile('/'.join([self.dirName, + 'tree.root']), + 'recreate') + self.tree = Tree( 'events', '') + bookParticle(self.tree, 'jet1') + bookParticle(self.tree, 'jet2') + bookParticle(self.tree, 'jet3') + bookParticle(self.tree, 'jet4') + bookParticle(self.tree, 'm3') + var(self.tree, 'mtw') + + bookMet(self.tree, 'met') + bookLepton(self.tree, 'muon', pflow=False) + bookLepton(self.tree, 'electron', pflow=False) + + def process(self, event): + self.tree.reset() + muons = getattr(event, self.cfg_ana.muons) + electrons = getattr(event, self.cfg_ana.electrons) + + if len(muons)==0 and len(electrons)==0: + return # NOT FILLING THE TREE IF NO + + if len(muons)==1 and len(electrons)==0: + fillLepton(self.tree, 'muon', muons[0]) + fillIso(self.tree, 'muon_iso', muons[0].iso) + + elif len(electrons)==1 and len(muons)==0: + fillLepton(self.tree, 'electron', electrons[0]) + fillIso(self.tree, 'electron_iso', electrons[0].iso) + + else: + return # NOT FILLING THE TREE IF MORE THAN 1 LEPTON + + jets = getattr(event, self.cfg_ana.jets_30) + if len(jets)<3: + return # NOT FILLING THE TREE IF LESS THAN 4 JETS + for ijet, jet in enumerate(jets): + if ijet==4: + break + fillParticle(self.tree, 'jet{ijet}'.format(ijet=ijet+1), jet) + m3 = getattr(event, self.cfg_ana.m3) + if m3: + fillParticle(self.tree, 'm3', m3) + + mtw = getattr(event, self.cfg_ana.mtw) + if mtw: + fill(self.tree, 'mtw', mtw) + #fillParticle(self.tree, 'mtw', mtw) + + + met = getattr(event, self.cfg_ana.met) + fillMet(self.tree, 'met', met) + self.tree.tree.Fill() + + def write(self, setup): + self.rootfile.Write() + self.rootfile.Close() + diff --git a/PhysicsTools/HeppyCore/python/analyzers/examples/ttbar/selection.py b/PhysicsTools/HeppyCore/python/analyzers/examples/ttbar/selection.py new file mode 100644 index 0000000000000..e728ef02de54a --- /dev/null +++ b/PhysicsTools/HeppyCore/python/analyzers/examples/ttbar/selection.py @@ -0,0 +1,38 @@ +from PhysicsTools.HeppyCore.framework.analyzer import Analyzer +from PhysicsTools.HeppyCore.statistics.counter import Counter + +class Selection(Analyzer): + + def beginLoop(self, setup): + super(Selection, self).beginLoop(setup) + self.counters.addCounter('cut_flow') + self.counters['cut_flow'].register('All events') + self.counters['cut_flow'].register('At least 4 jets') + self.counters['cut_flow'].register('At least 1 b-jet') + self.counters['cut_flow'].register('Exactly 1 lepton') + self.counters['cut_flow'].register('MET > 20GeV') + + def process(self, event): + self.counters['cut_flow'].inc('All events') + + #select events with at least 4 jets + if len(event.sel_jets_noelectronnomuon_30)<4: + return False + self.counters['cut_flow'].inc('At least 4 jets') + + #select events with at least 1 b-jet + if len(event.b_jets_30)<1: + return False + self.counters['cut_flow'].inc('At least 1 b-jet') + + #select events with exactly 1 lepton + if (len(event.sel_iso_electrons) + len(event.sel_iso_muons) != 1): + return False + self.counters['cut_flow'].inc('Exactly 1 lepton') + + #select events with MET>20GeV + if event.met.pt()<20.: + return False + self.counters['cut_flow'].inc('MET > 20GeV') + + return True diff --git a/PhysicsTools/HeppyCore/python/analyzers/examples/zh/ZHTreeProducer.py b/PhysicsTools/HeppyCore/python/analyzers/examples/zh/ZHTreeProducer.py new file mode 100644 index 0000000000000..5dab968b82e2f --- /dev/null +++ b/PhysicsTools/HeppyCore/python/analyzers/examples/zh/ZHTreeProducer.py @@ -0,0 +1,56 @@ +from PhysicsTools.HeppyCore.framework.analyzer import Analyzer +from PhysicsTools.HeppyCore.statistics.tree import Tree +from PhysicsTools.HeppyCore.analyzers.ntuple import * + +from ROOT import TFile + +class ZHTreeProducer(Analyzer): + + def beginLoop(self, setup): + super(ZHTreeProducer, self).beginLoop(setup) + self.rootfile = TFile('/'.join([self.dirName, + 'tree.root']), + 'recreate') + self.tree = Tree( 'events', '') + bookParticle(self.tree, 'zed') + bookParticle(self.tree, 'recoil') + bookJet(self.tree, 'jet1') + bookJet(self.tree, 'jet2') + bookJet(self.tree, 'jet3') + bookJet(self.tree, 'jet4') + bookLepton(self.tree, 'zed_1') + bookLepton(self.tree, 'zed_2') + bookParticle(self.tree, 'higgs') + bookParticle(self.tree, 'higgs_1') + bookParticle(self.tree, 'higgs_2') + bookParticle(self.tree, 'misenergy') + + def process(self, event): + self.tree.reset() + recoil = getattr(event, self.cfg_ana.recoil) + fillParticle(self.tree, 'recoil', recoil) + misenergy = getattr(event, self.cfg_ana.misenergy) + fillParticle(self.tree, 'misenergy', misenergy ) + zeds = getattr(event, self.cfg_ana.zeds) + if len(zeds)>0: + zed = zeds[0] + fillParticle(self.tree, 'zed', zed) + fillLepton(self.tree, 'zed_1', zed.legs[0]) + fillLepton(self.tree, 'zed_2', zed.legs[1]) + jets = getattr(event, self.cfg_ana.jets) + for ijet, jet in enumerate(jets): + if ijet==4: + break + fillJet(self.tree, 'jet{ijet}'.format(ijet=ijet+1), jet) + higgses = getattr(event, self.cfg_ana.higgses) + if len(higgses)>0: + higgs = higgses[0] + fillParticle(self.tree, 'higgs', higgs) + fillLepton(self.tree, 'higgs_1', higgs.legs[0]) + fillLepton(self.tree, 'higgs_2', higgs.legs[1]) + self.tree.tree.Fill() + + def write(self, setup): + self.rootfile.Write() + self.rootfile.Close() + diff --git a/PhysicsTools/HeppyCore/python/analyzers/examples/zh/selection.py b/PhysicsTools/HeppyCore/python/analyzers/examples/zh/selection.py new file mode 100644 index 0000000000000..0b54de2642a2a --- /dev/null +++ b/PhysicsTools/HeppyCore/python/analyzers/examples/zh/selection.py @@ -0,0 +1,20 @@ +from PhysicsTools.HeppyCore.framework.analyzer import Analyzer +from PhysicsTools.HeppyCore.statistics.counter import Counter + +class Selection(Analyzer): + + def beginLoop(self, setup): + super(Selection, self).beginLoop(setup) + self.counters.addCounter('cut_flow') + self.counters['cut_flow'].register('All events') + self.counters['cut_flow'].register('At least 2 leptons') + self.counters['cut_flow'].register('Both leptons e>30') + + def process(self, event): + self.counters['cut_flow'].inc('All events') + if len(event.sel_iso_leptons)<2: + return True # could return False to stop processing + self.counters['cut_flow'].inc('At least 2 leptons') + if event.sel_iso_leptons[1].e()>30.: + self.counters['cut_flow'].inc('Both leptons e>30') + return True diff --git a/PhysicsTools/HeppyCore/python/analyzers/examples/zh_had/JetEnergyComputer.py b/PhysicsTools/HeppyCore/python/analyzers/examples/zh_had/JetEnergyComputer.py new file mode 100644 index 0000000000000..4082ea38aa5c5 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/analyzers/examples/zh_had/JetEnergyComputer.py @@ -0,0 +1,40 @@ +from PhysicsTools.HeppyCore.framework.analyzer import Analyzer +import copy + +class JetEnergyComputer(Analyzer): + '''Use the initial p4 to constrain the energy of the 4 jets, + in ee -> 4 jet final states. + + from PhysicsTools.HeppyCore.analyzers.examples.zh_had.JetEnergyComputer import JetEnergyComputer + compute_jet_energy = cfg.Analyzer( + JetEnergyComputer, + output_jets='rescaled_jets', + input_jets='jets', + sqrts=Collider.SQRTS + ) + + * output_jets: output jets with a rescaled energy. + note that only the jet p4 is copied when creating a rescaled jet + + * input_jets: collection of jets to be rescaled + + * sqrts: center-of-mass energy of the collision + + ''' + + def process(self, event): + sqrts = self.cfg_ana.sqrts + jets = getattr(event, self.cfg_ana.input_jets) + assert(len(jets) == 4) + # here solve the equation to get the energy scale factor for each jet. + scale_factors = [1] * 4 + output = [] + for jet, factor in zip(jets, scale_factors): + # the jets should not be deepcopied + # as they are heavy objects containing + # in particular a list of consistuent particles + scaled_jet = copy.copy(jet) + scaled_jet._tlv = copy.deepcopy(jet._tlv) + scaled_jet._tlv *= factor + output.append(scaled_jet) + setattr(event, self.cfg_ana.output_jets, output) diff --git a/PhysicsTools/HeppyCore/python/analyzers/examples/zh_had/Selection.py b/PhysicsTools/HeppyCore/python/analyzers/examples/zh_had/Selection.py new file mode 100644 index 0000000000000..ad325d505ba90 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/analyzers/examples/zh_had/Selection.py @@ -0,0 +1,28 @@ +from PhysicsTools.HeppyCore.framework.analyzer import Analyzer +from PhysicsTools.HeppyCore.statistics.counter import Counter + +class Selection(Analyzer): + + def beginLoop(self, setup): + super(Selection, self).beginLoop(setup) + self.counters.addCounter('cut_flow') + self.counters['cut_flow'].register('All events') + self.counters['cut_flow'].register('No lepton') + self.counters['cut_flow'].register('4 jets') + self.counters['cut_flow'].register('4 jets with E>15') + self.counters['cut_flow'].register('2 b jets') + + def process(self, event): + self.counters['cut_flow'].inc('All events') + if len(event.sel_iso_leptons) > 0: + return True # could return False to stop processing + self.counters['cut_flow'].inc('No lepton') + jets = getattr(event, self.cfg_ana.input_jets) + if len(jets) < 4: + return True + self.counters['cut_flow'].inc('4 jets') + if min(jet.e() for jet in jets) >= 15.: + self.counters['cut_flow'].inc('4 jets with E>15') + bjets = [jet for jet in jets if jet.tags['b']] + if len(bjets) >= 2: + self.counters['cut_flow'].inc('2 b jets') diff --git a/PhysicsTools/HeppyCore/python/analyzers/examples/zh_had/TreeProducer.py b/PhysicsTools/HeppyCore/python/analyzers/examples/zh_had/TreeProducer.py new file mode 100644 index 0000000000000..8da7985069a81 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/analyzers/examples/zh_had/TreeProducer.py @@ -0,0 +1,56 @@ +from PhysicsTools.HeppyCore.framework.analyzer import Analyzer +from PhysicsTools.HeppyCore.statistics.tree import Tree +from PhysicsTools.HeppyCore.analyzers.ntuple import * + +from ROOT import TFile + +class TreeProducer(Analyzer): + + def beginLoop(self, setup): + super(TreeProducer, self).beginLoop(setup) + self.rootfile = TFile('/'.join([self.dirName, + 'tree.root']), + 'recreate') + self.tree = Tree( 'events', '') + self.taggers = 'b' + bookJet(self.tree, 'jet1', self.taggers) + bookJet(self.tree, 'jet2', self.taggers) + bookJet(self.tree, 'jet3', self.taggers) + bookJet(self.tree, 'jet4', self.taggers) + bookParticle(self.tree, 'misenergy') + bookParticle(self.tree, 'higgs') + bookParticle(self.tree, 'zed') + bookLepton(self.tree, 'lepton1') + bookLepton(self.tree, 'lepton2') + + + def process(self, event): + self.tree.reset() + misenergy = getattr(event, self.cfg_ana.misenergy) + fillParticle(self.tree, 'misenergy', misenergy ) + jets = getattr(event, self.cfg_ana.jets) + for ijet, jet in enumerate(jets): + if ijet==4: + break + fillJet(self.tree, 'jet{ijet}'.format(ijet=ijet+1), + jet, self.taggers) + higgs = getattr(event, self.cfg_ana.higgs) + if higgs: + fillParticle(self.tree, 'higgs', higgs) + zed = getattr(event, self.cfg_ana.zed) + if zed: + fillParticle(self.tree, 'zed', zed) + leptons = getattr(event, self.cfg_ana.leptons) + for ilep, lepton in enumerate(reversed(leptons)): + if ilep == 2: + break + fillLepton(self.tree, + 'lepton{ilep}'.format(ilep=ilep+1), + lepton) + self.tree.tree.Fill() + + def write(self, setup): + self.rootfile.Write() + self.rootfile.Close() + + diff --git a/PhysicsTools/HeppyCore/python/analyzers/examples/zh_had/ZHReconstruction.py b/PhysicsTools/HeppyCore/python/analyzers/examples/zh_had/ZHReconstruction.py new file mode 100644 index 0000000000000..44d784a65b01b --- /dev/null +++ b/PhysicsTools/HeppyCore/python/analyzers/examples/zh_had/ZHReconstruction.py @@ -0,0 +1,33 @@ +from PhysicsTools.HeppyCore.framework.analyzer import Analyzer +from PhysicsTools.HeppyCore.particles.tlv.resonance import Resonance2 as Resonance + +import pprint +import itertools +import copy + +mass = {23: 91, 25: 125} + +class ZHReconstruction(Analyzer): + + def process(self, event): + jets = getattr(event, self.cfg_ana.input_jets) + bjets = [jet for jet in jets if jet.tags['b']] + higgses = [] + for leg1, leg2 in itertools.combinations(bjets,2): + higgses.append( Resonance(leg1, leg2, 25) ) + higgs = None + zed = None + if len(higgses): + # sorting according to distance to nominal mass + nominal_mass = mass[25] + higgses.sort(key=lambda x: abs(x.m()-nominal_mass)) + higgs = higgses[0] + remaining_jets = copy.copy(jets) + remaining_jets.remove(higgs.leg1()) + remaining_jets.remove(higgs.leg2()) + assert(len(remaining_jets) == 2) + zed = Resonance(remaining_jets[0], remaining_jets[1], 21) + setattr(event, self.cfg_ana.output_higgs, higgs) + setattr(event, self.cfg_ana.output_zed, zed) + + diff --git a/PhysicsTools/HeppyCore/python/analyzers/fcc/JetClusterizer.py b/PhysicsTools/HeppyCore/python/analyzers/fcc/JetClusterizer.py new file mode 100644 index 0000000000000..b6ba8348c2a0b --- /dev/null +++ b/PhysicsTools/HeppyCore/python/analyzers/fcc/JetClusterizer.py @@ -0,0 +1,105 @@ +from PhysicsTools.HeppyCore.framework.analyzer import Analyzer +from PhysicsTools.HeppyCore.framework.event import Event +from PhysicsTools.HeppyCore.particles.tlv.jet import Jet +from PhysicsTools.HeppyCore.particles.jet import JetConstituents + +import os + +from ROOT import gSystem +CCJetClusterizer = None +if os.environ.get('FCCPHYSICS'): + gSystem.Load("libfccphysics-tools") + from ROOT import JetClusterizer as CCJetClusterizer +elif os.environ.get('CMSSW_BASE'): + gSystem.Load("libColinPFSim") + from ROOT import heppy + CCJetClusterizer = PhysicsTools.HeppyCore.JetClusterizer + +import math + +class JetClusterizer(Analyzer): + '''Jet clusterizer. + + Makes use of the JetClusterizer class compiled in the analysis-cpp package + (this external package is the only dependence to the FCC software). + + Example configuration: + + from PhysicsTools.HeppyCore.analyzers.fcc.JetClusterizer import JetClusterizer + jets = cfg.Analyzer( + JetClusterizer, + output = 'jets', + particles = 'particles_not_zed', + fastjet_args = dict( njets = 2) + ) + + * output: name of the output collection of Jets. + Each jet is attached a JetConstituents object as jet.constituents. + See the Jet and JetConstituents classes. + + * particles: name of the input collection of particle-like objects. + These objects should have a p4(). + + you should provide either one or the other of the following arguments: + - ptmin : pt threshold for exclusive jet reconstruction + - njets : number of jets for inclusive jet reconstruction + + A more flexible interface can easily be provided if needed, + contact Colin. + ''' + + def __init__(self, *args, **kwargs): + super(JetClusterizer, self).__init__(*args, **kwargs) + args = self.cfg_ana.fastjet_args + self.clusterize = None + if 'ptmin' in args and 'njets' in args: + raise ValueError('cannot specify both ptmin and njets arguments') + if 'ptmin' in args: + self.clusterizer = CCJetClusterizer(0) + def clusterize(): + return self.clusterizer.make_inclusive_jets(args['ptmin']) + self.clusterize = clusterize + elif 'njets' in args: + self.clusterizer = CCJetClusterizer(1) + def clusterize(): + return self.clusterizer.make_exclusive_jets(args['njets']) + self.clusterize = clusterize + else: + raise ValueError('specify either ptmin or njets') + + def validate(self, jet): + constits = jet.constituents + keys = set(jet.constituents.keys()) + all_possible = set([211, 22, 130, 11, 13, 1, 2]) + if not keys.issubset(all_possible): + print constits + assert(False) + sume = 0. + for component in jet.constituents.values(): + if component.e() - jet.e() > 1e-5: + import pdb; pdb.set_trace() + sume += component.e() + if jet.e() - sume > 1e-5: + import pdb; pdb.set_trace() + + + def process(self, event): + particles = getattr(event, self.cfg_ana.particles) + # removing neutrinos + particles = [ptc for ptc in particles if abs(ptc.pdgid()) not in [12,14,16]] + self.clusterizer.clear(); + for ptc in particles: + self.clusterizer.add_p4( ptc.p4() ) + self.clusterize() + jets = [] + for jeti in range(self.clusterizer.n_jets()): + jet = Jet( self.clusterizer.jet(jeti) ) + jet.constituents = JetConstituents() + jets.append( jet ) + for consti in range(self.clusterizer.n_constituents(jeti)): + constituent_index = self.clusterizer.constituent_index(jeti, consti) + constituent = particles[constituent_index] + jet.constituents.append(constituent) + jet.constituents.sort() + self.validate(jet) + setattr(event, self.cfg_ana.output, jets) diff --git a/PhysicsTools/HeppyCore/python/analyzers/fcc/Reader.py b/PhysicsTools/HeppyCore/python/analyzers/fcc/Reader.py new file mode 100644 index 0000000000000..3c31925715071 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/analyzers/fcc/Reader.py @@ -0,0 +1,122 @@ +from PhysicsTools.HeppyCore.framework.analyzer import Analyzer +from PhysicsTools.HeppyCore.particles.fcc.particle import Particle +from PhysicsTools.HeppyCore.particles.fcc.jet import Jet +from PhysicsTools.HeppyCore.particles.fcc.vertex import Vertex +from PhysicsTools.HeppyCore.particles.fcc.met import Met +import PhysicsTools.HeppyCore.configuration + +import math + +class MissingCollection(Exception): + pass + +class Reader(Analyzer): + '''Reads events in FCC EDM format, and creates lists of objects adapted to an + analysis in python. + + Configuration: + ---------------------- + + Example: + + from PhysicsTools.HeppyCore.analyzers.fcc.Reader import Reader + source = cfg.Analyzer( + Reader, + # all the parameters below are optional: + gen_particles = 'GenParticle', + # gen_vertices = '', + # gen_jets = '', + # jets = '', + ) + + * gen_particles: name of the collection of gen particles + in the input FCC-EDM file + * gen_vertices: name of the collection of gen vertices + * gen_jets: name of the collection of gen jets. + * jets: name of the collection of reconstructed jets + + + You can find out about the names of the collections by opening + the root file with root, and by printing the events TTree. + + Creates: + -------- + + if self.cfg_ana.gen_particles is set: + - event.gen_particles: gen particles + - event.gen_particles_stable: stable gen_particles except neutrinos + + if the respective parameter is set (see above): + - event.gen_vertices: gen vertices (needed for gen particle history) + - event.gen_jets: gen jets + - event.jets: reconstructed jets + ''' + + def process(self, event): + store = event.input + + def get_collection(class_object, coll_label, sort=True): + pycoll = None + if hasattr(self.cfg_ana, coll_label): + coll_name = getattr( self.cfg_ana, coll_label) + coll = store.get( coll_name ) + if coll == None: + raise MissingCollection( + 'collection {} is missing'.format(coll_name) + ) + pycoll = map(class_object, coll) + if sort: + # pycoll.sort(key = self.sort_key, reverse=True) + pycoll.sort(reverse=True) + setattr(event, coll_label, pycoll ) + return pycoll + + get_collection(Particle, 'gen_particles') + get_collection(Vertex, 'gen_vertices', False) + get_collection(Jet, 'gen_jets') + jetcoll = get_collection(Jet, 'jets') + if jetcoll: + jets = dict() + for jet in jetcoll: + jets[jet] = jet + if hasattr(self.cfg_ana, 'bTags') and \ + hasattr(self.cfg_ana, 'jetsToBTags'): + for tt in store.get(self.cfg_ana.jetsToBTags): + jets[Jet(tt.Jet())].tags['bf'] = tt.Tag().Value() + + class Iso(object): + def __init__(self): + self.sumpt=-9999 + self.sume=-9999 + self.num=-9999 + + electrons = dict() + if hasattr(self.cfg_ana, 'electrons'): + event.electrons = map(Particle, store.get(self.cfg_ana.electrons)) + event.electrons.sort(reverse=True) + for ele in event.electrons: + ele.iso = Iso() + electrons[ele]=ele + if hasattr(self.cfg_ana, 'electronsToITags') and hasattr(self.cfg_ana, 'electronITags'): + for ele in store.get(self.cfg_ana.electronsToITags): + electrons[Particle(ele.Particle())].iso = Iso() + electrons[Particle(ele.Particle())].iso.sumpt = electrons[Particle(ele.Particle())].pt()*ele.Tag().Value() + + muons = dict() + if hasattr(self.cfg_ana, 'muons'): + event.muons = map(Particle, store.get(self.cfg_ana.muons)) + event.muons.sort(reverse=True) + for mu in event.muons: + mu.iso = Iso() + muons[mu]=mu + if hasattr(self.cfg_ana, 'muonsToITags') and hasattr(self.cfg_ana, 'muonITags'): + for mu in store.get(self.cfg_ana.muonsToITags): + #import pdb; pdb.set_trace() + muons[Particle(mu.Particle())].iso = Iso() + muons[Particle(mu.Particle())].iso.sumpt = muons[Particle(mu.Particle())].pt()*mu.Tag().Value() + + + get_collection(Particle, 'photons') + met = get_collection(Met, 'met', False) + if met: + event.met = event.met[0] diff --git a/PhysicsTools/HeppyCore/python/analyzers/lcio/MCParticlePrinter.py b/PhysicsTools/HeppyCore/python/analyzers/lcio/MCParticlePrinter.py new file mode 100644 index 0000000000000..366260bc02858 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/analyzers/lcio/MCParticlePrinter.py @@ -0,0 +1,15 @@ +from PhysicsTools.HeppyCore.framework.analyzer import Analyzer + +class MCParticlePrinter(Analyzer): + + def process(self, event): + mcparticles = event.input.getCollection('MCParticle') + for ptc in mcparticles: + p4 = ptc.getLorentzVec() + self.mainLogger.info( + "ptc E={energy}, m={mass}".format( + energy = p4.E(), + mass = p4.M() + )) + + diff --git a/PhysicsTools/HeppyCore/python/analyzers/ntuple.py b/PhysicsTools/HeppyCore/python/analyzers/ntuple.py new file mode 100644 index 0000000000000..658f2a5c3b04d --- /dev/null +++ b/PhysicsTools/HeppyCore/python/analyzers/ntuple.py @@ -0,0 +1,170 @@ +#!/bin/env python + +def var( tree, varName, type=float ): + tree.var(varName, type) + +def fill( tree, varName, value ): + tree.fill( varName, value ) + +# simple p4 + +def bookP4( tree, pName ): + var(tree, '{pName}_e'.format(pName=pName)) + var(tree, '{pName}_pt'.format(pName=pName)) + var(tree, '{pName}_theta'.format(pName=pName)) + var(tree, '{pName}_eta'.format(pName=pName)) + var(tree, '{pName}_phi'.format(pName=pName)) + var(tree, '{pName}_m'.format(pName=pName)) + +def fillP4( tree, pName, p4 ): + fill(tree, '{pName}_e'.format(pName=pName), p4.e() ) + fill(tree, '{pName}_pt'.format(pName=pName), p4.pt() ) + fill(tree, '{pName}_theta'.format(pName=pName), p4.theta() ) + fill(tree, '{pName}_eta'.format(pName=pName), p4.eta() ) + fill(tree, '{pName}_phi'.format(pName=pName), p4.phi() ) + fill(tree, '{pName}_m'.format(pName=pName), p4.m() ) + +# simple particle + +def bookParticle( tree, pName ): + var(tree, '{pName}_pdgid'.format(pName=pName)) + var(tree, '{pName}_ip'.format(pName=pName)) #TODO Colin clean up hierarchy + var(tree, '{pName}_ip_signif'.format(pName=pName)) + bookP4(tree, pName) + +def fillParticle( tree, pName, particle ): + fill(tree, '{pName}_pdgid'.format(pName=pName), particle.pdgid() ) + ip = -99 + ip_signif = -1e9 + if hasattr(particle, 'path'): + path = particle.path + if hasattr(path, 'IP'): + ip = path.IP + if hasattr(path, 'IP_signif'): + ip_signif = path.IP_signif + fill(tree, '{pName}_ip'.format(pName=pName), ip ) + fill(tree, '{pName}_ip_signif'.format(pName=pName), ip_signif ) + fillP4(tree, pName, particle ) + + +def bookCluster( tree, name ): + var(tree, '{name}_e'.format(name=name)) + var(tree, '{name}_layer'.format(name=name)) + +layers = dict( + ecal_in = 0, + hcal_in = 1 +) + +def fillCluster( tree, name, cluster ): + fill(tree, '{name}_e'.format(name=name), cluster.energy ) + fill(tree, '{name}_layer'.format(name=name), layers[cluster.layer] ) + +# jet + +def bookComponent( tree, pName ): + var(tree, '{pName}_e'.format(pName=pName)) + var(tree, '{pName}_pt'.format(pName=pName)) + var(tree, '{pName}_num'.format(pName=pName)) + +def fillComponent(tree, pName, component): + fill(tree, '{pName}_e'.format(pName=pName), component.e() ) + fill(tree, '{pName}_pt'.format(pName=pName), component.pt() ) + fill(tree, '{pName}_num'.format(pName=pName), component.num() ) + + +pdgids = [211, 22, 130, 11, 13] + +def bookJet( tree, pName, taggers=None): + bookP4(tree, pName ) + for pdgid in pdgids: + bookComponent(tree, '{pName}_{pdgid:d}'.format(pName=pName, pdgid=pdgid)) + if taggers: + for tagger in taggers: + var(tree, '{pName}_{tagger}'.format(pName=pName, tagger=tagger)) + + +def fillJet( tree, pName, jet, taggers=None): + fillP4(tree, pName, jet ) + if taggers: + for tagger in taggers: + if tagger in jet.tags: + fill(tree, + '{pName}_{tagger}'.format(pName=pName, tagger=tagger), + jet.tags.get(tagger, None)) + else: + fill(tree, '{pName}_{tagger}'.format(pName=pName, tagger=tagger), -99) + + for pdgid in pdgids: + component = jet.constituents.get(pdgid, None) + if component is not None: + fillComponent(tree, + '{pName}_{pdgid:d}'.format(pName=pName, pdgid=pdgid), + component ) + else: + import pdb; pdb.set_trace() + print jet + + +# isolation +from IsolationAnalyzer import pdgids as iso_pdgids +# iso_pdgids = [211, 22, 130] + +def bookIso(tree, pName): + var(tree, '{pName}_e'.format(pName=pName)) + var(tree, '{pName}_pt'.format(pName=pName)) + var(tree, '{pName}_num'.format(pName=pName)) + +def fillIso(tree, pName, iso): + fill(tree, '{pName}_e'.format(pName=pName), iso.sume ) + fill(tree, '{pName}_pt'.format(pName=pName), iso.sumpt ) + fill(tree, '{pName}_num'.format(pName=pName), iso.num ) + +def bookLepton( tree, pName, pflow=True ): + bookParticle(tree, pName ) + if pflow: + for pdgid in iso_pdgids: + bookIso(tree, '{pName}_iso{pdgid:d}'.format(pName=pName, pdgid=pdgid)) + bookIso(tree, '{pName}_iso'.format(pName=pName)) + + +def fillLepton( tree, pName, lepton ): + fillParticle(tree, pName, lepton ) + for pdgid in iso_pdgids: + #import pdb; pdb.set_trace() + isoname='iso_{pdgid:d}'.format(pdgid=pdgid) + if hasattr(lepton, isoname): + iso = getattr(lepton, isoname) + fillIso(tree, '{pName}_iso{pdgid:d}'.format(pName=pName, pdgid=pdgid), iso) + #fillIso(tree, '{pName}_iso'.format(pName=pName), lepton.iso) + + +def bookIsoParticle(tree, pName): + bookParticle(tree, pName ) + bookLepton(tree, '{pName}_lep'.format(pName=pName) ) + +def fillIsoParticle(tree, pName, ptc, lepton): + fillParticle(tree, pName, ptc) + fillLepton(tree, '{pName}_lep'.format(pName=pName), lepton) + +def bookZed(tree, pName): + bookParticle(tree, pName ) + bookParticle(tree, '{pName}_leg1'.format(pName=pName) ) + bookParticle(tree, '{pName}_leg2'.format(pName=pName) ) + +def fillZed(tree, pName, zed): + fillParticle(tree, pName, zed) + fillParticle(tree, '{pName}_leg1'.format(pName=pName), zed.leg1 ) + fillParticle(tree, '{pName}_leg2'.format(pName=pName), zed.leg2 ) + +def bookMet(tree, pName): + var(tree, '{pName}_pt'.format(pName=pName) ) + var(tree, '{pName}_sumet'.format(pName=pName) ) + var(tree, '{pName}_phi'.format(pName=pName) ) + +def fillMet(tree, pName, met): + fill(tree, '{pName}_pt'.format(pName=pName), met.pt() ) + fill(tree, '{pName}_sumet'.format(pName=pName), met.sum_et() ) + fill(tree, '{pName}_phi'.format(pName=pName), met.phi() ) + + diff --git a/PhysicsTools/HeppyCore/python/analyzers/roc.py b/PhysicsTools/HeppyCore/python/analyzers/roc.py new file mode 100644 index 0000000000000..e7160c2cc7f6c --- /dev/null +++ b/PhysicsTools/HeppyCore/python/analyzers/roc.py @@ -0,0 +1,46 @@ +import numpy as np +import scipy as sp +import scipy.interpolate +import PhysicsTools.HeppyCore.statistics.rrandom as random + + +class ROC(object): + '''background rate vs signal efficiency''' + + def __init__(self, sig_bgd_points): + '''Provide a few points on the ROC curve ''' + self.sig_bgd_points = sig_bgd_points + lin_interp = scipy.interpolate.interp1d(sig_bgd_points[:, 0], + np.log10(sig_bgd_points[:, 1]), + 'linear') + self.roc = lambda zz: np.power(10.0, lin_interp(zz)) + + def plot(self): + xx = np.linspace(min(self.sig_bgd_points[:, 0]), max(self.sig_bgd_points[:, 0])) + plt.plot(xx, self.roc(xx)) + plt.show() + + def set_working_point(self, b_eff): + self.eff = b_eff + self.fake_rate = self.roc(b_eff) + + def is_b_tagged(self, is_b): + eff = self.eff if is_b else self.fake_rate + return random.uniform(0, 1) < eff + + + +cms_roc = ROC( + np.array( + [ + [0.4, 2.e-4], + [0.5, 7.e-4], + [0.6, 3.e-3], + [0.7, 1.5e-2], + [0.8, 7.e-2], + [0.9, 3.e-1], + [1., 1.]] + ) +) + + diff --git a/PhysicsTools/HeppyCore/python/analyzers/test_Filter.py b/PhysicsTools/HeppyCore/python/analyzers/test_Filter.py new file mode 100644 index 0000000000000..5db3ba4bd37a8 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/analyzers/test_Filter.py @@ -0,0 +1,52 @@ +import unittest +import os +import shutil +import tempfile +from Filter import Filter +from PhysicsTools.HeppyCore.framework.event import Event +import PhysicsTools.HeppyCore.framework.config as cfg + +class FilterTestCase(unittest.TestCase): + + def setUp(self): + self.outdir = tempfile.mkdtemp() + + def tearDown(self): + shutil.rmtree(self.outdir) + + def test_list(self): + event = Event(0) + event.the_list = range(10) + cfg_ana = cfg.Analyzer( + Filter, + output = 'filtered', + input_objects = 'the_list', + filter_func = lambda x : x%2 == 0 + ) + cfg_comp = cfg.Component( + 'test', + files = [] + ) + filter = Filter(cfg_ana, cfg_comp, self.outdir) + filter.process(event) + self.assertItemsEqual(event.filtered, [0,2,4,6,8]) + + def test_dict(self): + event = Event(0) + event.the_dict = dict( [ (x, x**2) for x in range(10) ] ) + cfg_ana = cfg.Analyzer( + Filter, + output = 'filtered', + input_objects = 'the_dict', + filter_func = lambda x : x == 9 + ) + cfg_comp = cfg.Component( + 'test', + files = [] + ) + filter = Filter(cfg_ana, cfg_comp, self.outdir) + filter.process(event) + self.assertDictEqual(event.filtered, {3:9}) + +if __name__ == '__main__': + unittest.main() diff --git a/PhysicsTools/HeppyCore/python/analyzers/test_Gun.py b/PhysicsTools/HeppyCore/python/analyzers/test_Gun.py new file mode 100644 index 0000000000000..2b872d38e91f8 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/analyzers/test_Gun.py @@ -0,0 +1,18 @@ +import unittest + +from Gun import * + +class GunTestCase(unittest.TestCase): + + def test_particle(self): + for i in range(1000): + ptc = particle(211, -0.5, 0.5, 10, 10, flat_pt=True) + self.assertAlmostEqual(ptc.pt(), 10.) + + def test_e_pt_not_same(self): + for i in range(1000): + ptc = particle(211, -0.5, 0.5, 10, 10, flat_pt=True) + self.assertNotEqual(ptc.pt(), ptc.e()) + +if __name__ == '__main__': + unittest.main() diff --git a/PhysicsTools/HeppyCore/python/analyzers/test_roc.py b/PhysicsTools/HeppyCore/python/analyzers/test_roc.py new file mode 100644 index 0000000000000..7a77c5ee88336 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/analyzers/test_roc.py @@ -0,0 +1,22 @@ +import unittest +import numpy as np + +from roc import cms_roc + +class TestROC(unittest.TestCase): + + def test_all_wps(self): + for b_eff, fake_eff in cms_roc.sig_bgd_points: + cms_roc.set_working_point(b_eff) + found = [] + for i in range(50000): + found.append(cms_roc.is_b_tagged(True)) + self.assertAlmostEqual(np.average(found), b_eff, 2) + fake = [] + for i in range(50000): + fake.append(cms_roc.is_b_tagged(False)) + self.assertAlmostEqual(np.average(fake), fake_eff, 2) + + +if __name__ == '__main__': + unittest.main() diff --git a/PhysicsTools/HeppyCore/python/analyzers/tmp.py b/PhysicsTools/HeppyCore/python/analyzers/tmp.py new file mode 100644 index 0000000000000..2c082f600bbb5 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/analyzers/tmp.py @@ -0,0 +1,25 @@ +from PhysicsTools.HeppyCore.framework.analyzer import Analyzer +from PhysicsTools.HeppyCore.particles.tlv.resonance import Resonance2 as Resonance + +import pprint +import itertools + +mass = {23: 91, 25: 125} + +class ResonanceBuilder(Analyzer): + + def process(self, event): + legs = getattr(event, self.cfg_ana.leg_collection) + resonances = [] + for leg1, leg2 in itertools.combinations(legs,2): + resonances.append( Resonance(leg1, leg2, self.cfg_ana.pdgid) ) + # sorting according to distance to nominal mass + nominal_mass = mass[self.cfg_ana.pdgid] + resonances.sort(key=lambda x: abs(x.m()-nominal_mass)) + setattr(event, self.cfg_ana.output, resonances) + # getting legs of best resonance + legs = [] + if len(resonances): + legs = resonances[0].legs + setattr(event, '_'.join([self.cfg_ana.output, 'legs']), legs) + diff --git a/PhysicsTools/HeppyCore/python/configuration.py b/PhysicsTools/HeppyCore/python/configuration.py new file mode 100644 index 0000000000000..d245332f2a06a --- /dev/null +++ b/PhysicsTools/HeppyCore/python/configuration.py @@ -0,0 +1,3 @@ +class Collider(object): + BEAMS = 'pp' + SQRTS = 13000. diff --git a/PhysicsTools/HeppyCore/python/display/core.py b/PhysicsTools/HeppyCore/python/display/core.py new file mode 100644 index 0000000000000..a25dd44a41470 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/display/core.py @@ -0,0 +1,102 @@ +from ROOT import TCanvas, TH1, TH2F +import operator +import math +import os +from PhysicsTools.HeppyCore.papas.pfobjects import Cluster + +class Display(object): + + def __init__(self, views=None): + ViewPane.nviews = 0 + if not views: + views = ['xy', 'yz', 'xz'] + self.views = dict() + for view in views: + if view in ['xy', 'yz', 'xz']: + self.views[view] = ViewPane(view, view, + 100, -4, 4, 100, -4, 4) + elif 'thetaphi' in view: + self.views[view] = ViewPane(view, view, + 100, -math.pi/2, math.pi/2, + 100, -math.pi, math.pi, + 500, 1000) + + def register(self, obj, layer, clearable=True): + elems = [obj] + if hasattr(obj, '__iter__'): + elems = obj + for elem in elems: + for view in self.views.values(): + view.register(elem, layer, clearable) + + def clear(self): + for view in self.views.values(): + view.clear() + + def zoom(self, xmin, xmax, ymin, ymax): + for view in self.views.values(): + view.zoom(xmin, xmax, ymin, ymax) + + def unzoom(self): + for view in self.views.values(): + view.unzoom() + + def draw(self): + for view in self.views.values(): + view.draw() + + def save(self, outdir, filetype='png'): + os.mkdir(outdir) + for view in self.views.values(): + view.save(outdir, filetype) + + +class ViewPane(object): + nviews = 0 + def __init__(self, name, projection, nx, xmin, xmax, ny, ymin, ymax, + dx=600, dy=600): + self.projection = projection + tx = 50 + self.__class__.nviews * (dx+10) + ty = 50 + self.canvas = TCanvas(name, name, tx, ty, dx, dy) + TH1.AddDirectory(False) + self.hist = TH2F(name, name, nx, xmin, xmax, ny, ymin, ymax) + TH1.AddDirectory(True) + self.hist.Draw() + self.hist.SetStats(False) + self.registered = dict() + self.locked = dict() + self.__class__.nviews += 1 + + def register(self, obj, layer, clearable=True): + self.registered[obj] = layer + if not clearable: + self.locked[obj] = layer + #TODO might need to keep track of views in objects + + def clear(self): + self.registered = dict(self.locked.items()) + + def draw(self): + self.canvas.cd() + for obj, layer in sorted(self.registered.items(), + key = operator.itemgetter(1)): + obj.draw(self.projection) + self.canvas.Update() + + def zoom(self, xmin, xmax, ymin, ymax): + self.hist.GetXaxis().SetRangeUser(xmin, xmax) + self.hist.GetYaxis().SetRangeUser(ymin, ymax) + self.canvas.Update() + + def unzoom(self): + self.hist.GetXaxis().UnZoom() + self.hist.GetYaxis().UnZoom() + self.canvas.Modified() + self.canvas.Update() + + def save(self, outdir, filetype): + fname = '{outdir}/{name}.{filetype}'.format(outdir=outdir, + name=self.canvas.GetName(), + filetype=filetype) + self.canvas.SaveAs(fname) diff --git a/PhysicsTools/HeppyCore/python/display/geometry.py b/PhysicsTools/HeppyCore/python/display/geometry.py new file mode 100644 index 0000000000000..57b146f3737d9 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/display/geometry.py @@ -0,0 +1,98 @@ + +from ROOT import TEllipse, TBox +from ROOT import TColor, kRed, kBlue, kCyan + +#TODO display the field +#TODO display trajectories (tracks, particles, charged or not) +#TODO display deposits + + +COLORS = dict( + ECAL = kRed-10, + HCAL = kBlue-10, + void = None, + BeamPipe = kCyan-10 +) + +class GDetectorElement(object): + '''TODO improve design? + there could be one detector element per view, + and they would all be linked together. + ''' + def __init__(self, description): + self.desc = description + self.circles = [] + self.boxes = [] + self.circles.append( TEllipse(0., 0., + self.desc.volume.outer.rad, + self.desc.volume.outer.rad) ) + dz = self.desc.volume.outer.z + radius = self.desc.volume.outer.rad + self.boxes.append( TBox(-dz, -radius, dz, radius) ) + + if self.desc.volume.inner: + self.circles.append( TEllipse(0., 0., + self.desc.volume.inner.rad, + self.desc.volume.inner.rad)) + dz = self.desc.volume.inner.z + radius = self.desc.volume.inner.rad + self.boxes.append( TBox(-dz, -radius, dz, radius) ) + color = COLORS[self.desc.material.name] + oc = self.circles[0] + ob = self.boxes[0] + for shape in [oc, ob]: + if color: + shape.SetFillColor(color) + shape.SetFillStyle(1001) + else: + shape.SetFillStyle(0) + shape.SetLineColor(1) + shape.SetLineStyle(1) + if len(self.circles)==2: + ic = self.circles[1] + ib = self.boxes[1] + for shape in [ic, ib]: + if color: + shape.SetFillColor(0) + shape.SetFillStyle(1001) + else: + shape.SetFillStyle(0) + + def draw(self, projection): + if projection == 'xy': + for circle in self.circles: + circle.Draw('same') + elif projection in ['xz', 'yz']: + for box in self.boxes: + box.Draw('samel') + elif 'thetaphi' in projection: + pass + else: + raise ValueError('implement drawing for projection ' + projection ) + + +class GDetector(object): + def __init__(self, description): + self.desc = description + elems = sorted(self.desc.elements.values(), key= lambda x : x.volume.outer.rad, reverse = True) + self.elements = [GDetectorElement(elem) for elem in elems] + #self.elements = [GDetectorElement(elem) for elem in self.desc.elements.values()] + + def draw(self, projection): + for elem in self.elements: + elem.draw(projection) + + + +if __name__ == '__main__': + + from ROOT import TCanvas, TH2F + from PhysicsTools.HeppyCore.papas.detectors.CMS import CMS + from PhysicsTools.HeppyCore.display.core import Display + + cms = CMS() + gcms = GDetector(cms) + + display = Display() + display.register(gcms, 0) + display.draw() diff --git a/PhysicsTools/HeppyCore/python/display/helix.py b/PhysicsTools/HeppyCore/python/display/helix.py new file mode 100644 index 0000000000000..4f4fa734adce2 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/display/helix.py @@ -0,0 +1,12 @@ +from ROOT import THelix, TH3F, gPad + +helix = THelix(0, 0, 0, 2, 0, 1, 4) + +hframe = TH3F("hframe","", 10, -2, 2, 10, -2, 2, 10, -2, 2) +hframe.Draw() +helix.SetRange(0, 0.1, 0) + +helix.Draw("same") + +gPad.Update() + diff --git a/PhysicsTools/HeppyCore/python/display/pfobjects.py b/PhysicsTools/HeppyCore/python/display/pfobjects.py new file mode 100644 index 0000000000000..01a04e35f6c8a --- /dev/null +++ b/PhysicsTools/HeppyCore/python/display/pfobjects.py @@ -0,0 +1,187 @@ +from ROOT import TPolyLine, TGraph, TArc, TEllipse, kGray +import numpy as np +import operator +import math + +class Blob(object): + def __init__(self, cluster): + self.cluster = cluster + pos = cluster.position + radius = cluster.size() + thetaphiradius = cluster.angular_size() + # print radius + color = 1 + if cluster.particle: + if cluster.particle.pdgid() == 22 or cluster.particle.pdgid() == 11: + color = 2 + else: + color = 4 + max_energy = cluster.__class__.max_energy + self.contour_xy = TEllipse(pos.X(), pos.Y(), radius) + self.contour_yz = TEllipse(pos.Z(), pos.Y(), radius) + self.contour_xz = TEllipse(pos.Z(), pos.X(), radius) + self.contour_thetaphi = TEllipse(math.pi/2. - pos.Theta(), pos.Phi(), + thetaphiradius) + contours = [self.contour_xy, self.contour_yz, self.contour_xz, + self.contour_thetaphi] + iradius = radius * cluster.energy / max_energy + ithetaphiradius = thetaphiradius * cluster.energy / max_energy + self.inner_xy = TEllipse(pos.X(), pos.Y(), iradius) + self.inner_yz = TEllipse(pos.Z(), pos.Y(), iradius) + self.inner_xz = TEllipse(pos.Z(), pos.X(), iradius) + self.inner_thetaphi = TEllipse(math.pi/2. - pos.Theta(), pos.Phi(), + ithetaphiradius) + inners = [self.inner_xy, self.inner_yz, self.inner_xz, + self.inner_thetaphi] + for contour in contours: + contour.SetLineColor(color) + contour.SetFillStyle(0) + for inner in inners: + inner.SetFillColor(color) + inner.SetFillStyle(3002) + + def draw(self, projection, opt=''): + if projection == 'xy': + self.contour_xy.Draw(opt+"psame") + self.inner_xy.Draw(opt+"psame") + elif projection == 'yz': + self.contour_yz.Draw(opt+"psame") + self.inner_yz.Draw(opt+"psame") + elif projection == 'xz': + self.contour_xz.Draw(opt+"psame") + self.inner_xz.Draw(opt+"psame") + elif projection == 'ECAL_thetaphi': + if self.cluster.layer == 'ecal_in': + self.contour_thetaphi.Draw(opt+"psame") + self.inner_thetaphi.Draw(opt+"psame") + elif projection == 'HCAL_thetaphi': + if self.cluster.layer == 'hcal_in': + self.contour_thetaphi.Draw(opt+"psame") + self.inner_thetaphi.Draw(opt+"psame") + else: + raise ValueError('implement drawing for projection ' + projection ) + + +class GTrajectory(object): + + draw_smeared_clusters = True + + def __init__(self, description, linestyle=1, linecolor=1): + self.desc = description + npoints = len(self.desc.points) + self.graph_xy = TGraph(npoints) + self.graph_yz = TGraph(npoints) + self.graph_xz = TGraph(npoints) + self.graph_thetaphi = TGraph(npoints) + self.graphs = [self.graph_xy, self.graph_yz, self.graph_xz, self.graph_thetaphi] + def set_graph_style(graph): + graph.SetMarkerStyle(2) + graph.SetMarkerSize(0.7) + graph.SetLineStyle(linestyle) + graph.SetLineColor(linecolor) + set_graph_style(self.graph_xy) + set_graph_style(self.graph_yz) + set_graph_style(self.graph_xz) + set_graph_style(self.graph_thetaphi) + for i, point in enumerate(self.desc.points.values()): + self.graph_xy.SetPoint( i, point.X(), point.Y() ) + self.graph_yz.SetPoint(i, point.Z(), point.Y() ) + self.graph_xz.SetPoint(i, point.Z(), point.X() ) + tppoint = point + if i == 0: + tppoint = description.p4().Vect() + self.graph_thetaphi.SetPoint(i, math.pi/2. - tppoint.Theta(), tppoint.Phi() ) + clusters = self.desc.clusters_smeared \ + if self.__class__.draw_smeared_clusters \ + else self.desc.clusters + self.blobs = map(Blob, clusters.values()) + + def set_color(self, color): + for graph in self.graphs: + graph.SetMarkerColor(color) + + def draw(self, projection, opt=''): + for blob in self.blobs: + blob.draw(projection, opt) + if projection == 'xy': + self.graph_xy.Draw(opt+"psame") + elif projection == 'yz': + self.graph_yz.Draw(opt+"psame") + elif projection == 'xz': + self.graph_xz.Draw(opt+"psame") + elif 'thetaphi' in projection: + self.graph_thetaphi.Draw(opt+"psame") + else: + raise ValueError('implement drawing for projection ' + projection ) + +class GStraightTrajectory(GTrajectory): + def __init__(self, description): + super(GStraightTrajectory, self).__init__(description, + linestyle=2, linecolor=1) + + def draw(self, projection): + super(GStraightTrajectory, self).draw(projection, 'l') + + +class GHelixTrajectory(GTrajectory): + def __init__(self, description): + super(GHelixTrajectory, self).__init__(description) + helix = description.path + self.helix_xy = TArc(helix.center_xy.X(), + helix.center_xy.Y(), + helix.rho, helix.phi_min, helix.phi_max) + self.helix_xy.SetFillStyle(0) + #TODO this is patchy,need to access the last point, whatever its name + max_time = helix.time_at_z(description.points.values()[-1].Z()) + npoints = 100 + self.graphline_xy = TGraph(npoints) + self.graphline_yz = TGraph(npoints) + self.graphline_xz = TGraph(npoints) + self.graphline_thetaphi = TGraph(npoints) + for i, time in enumerate(np.linspace(0, max_time, npoints)): + point = helix.point_at_time(time) + self.graphline_xy.SetPoint(i, point.X(), point.Y()) + self.graphline_yz.SetPoint(i, point.Z(), point.Y()) + self.graphline_xz.SetPoint(i, point.Z(), point.X()) + tppoint = point + if i == 0: + tppoint = description.p4().Vect() + self.graphline_thetaphi.SetPoint(i, math.pi/2.-tppoint.Theta(), tppoint.Phi()) + if abs(self.desc.pdgid()) in [11,13]: + def set_graph_style(graph): + graph.SetLineWidth(3) + graph.SetLineColor(5) + set_graph_style(self.graphline_xy) + set_graph_style(self.graphline_xz) + set_graph_style(self.graphline_yz) + set_graph_style(self.graphline_thetaphi) + + + def draw(self, projection): + if projection == 'xy': + # self.helix_xy.Draw("onlysame") + self.graphline_xy.Draw("lsame") + elif projection == 'yz': + self.graphline_yz.Draw("lsame") + elif projection == 'xz': + self.graphline_xz.Draw("lsame") + elif 'thetaphi' in projection: + self.graphline_thetaphi.Draw("lsame") + else: + raise ValueError('implement drawing for projection ' + projection ) + super(GHelixTrajectory, self).draw(projection) + + +class GTrajectories(list): + + def __init__(self, particles): + for ptc in particles: + is_neutral = abs(ptc.q())<0.5 + TrajClass = GStraightTrajectory if is_neutral else GHelixTrajectory + gtraj = TrajClass(ptc) + self.append(gtraj) + # display.register(gtraj,1) + + def draw(self, projection): + for traj in self: + traj.draw(projection) diff --git a/PhysicsTools/HeppyCore/python/framework/analyzer.py b/PhysicsTools/HeppyCore/python/framework/analyzer.py index 11ad061d30dec..93d693b84eaaf 100644 --- a/PhysicsTools/HeppyCore/python/framework/analyzer.py +++ b/PhysicsTools/HeppyCore/python/framework/analyzer.py @@ -2,6 +2,7 @@ # https://github.com/cbernet/heppy/blob/master/LICENSE import os +import sys import logging from PhysicsTools.HeppyCore.statistics.counter import Counters @@ -31,19 +32,30 @@ def __init__(self, cfg_ana, cfg_comp, looperName ): self.cfg_ana = cfg_ana self.cfg_comp = cfg_comp self.looperName = looperName - if hasattr(cfg_ana,"nosubdir") and cfg_ana.nosubdir: - self.dirName = self.looperName - else: + if hasattr(cfg_ana,"nosubdir") and cfg_ana.nosubdir: + self.dirName = self.looperName + else: self.dirName = '/'.join( [self.looperName, self.name] ) os.mkdir( self.dirName ) # this is the main logger corresponding to the looper. - # each analyzer could also declare its own logger self.mainLogger = logging.getLogger( looperName ) - # print self.mainLogger.handlers + + # this logger is specific to the analyzer + self.logger = logging.getLogger(self.name) + self.logger.addHandler(logging.FileHandler('/'.join([self.dirName, + 'log.txt']))) + self.logger.propagate = False + self.logger.addHandler( logging.StreamHandler(sys.stdout) ) + log_level = logging.CRITICAL + if hasattr(self.cfg_ana, 'log_level'): + log_level = self.cfg_ana.log_level + self.logger.setLevel(log_level) + self.beginLoopCalled = False + def beginLoop(self, setup): """Automatically called by Looper, for all analyzers.""" self.counters = Counters() @@ -51,6 +63,7 @@ def beginLoop(self, setup): self.mainLogger.info( 'beginLoop ' + self.cfg_ana.name ) self.beginLoopCalled = True + def endLoop(self, setup): """Automatically called by Looper, for all analyzers.""" #print self.cfg_ana @@ -70,6 +83,11 @@ def write(self, setup): Just overload it if you have histograms to write.""" self.counters.write( self.dirName ) self.averages.write( self.dirName ) + if len(self.counters): + self.logger.info(str(self.counters)) + if len(self.averages): + self.logger.info(str(self.averages)) + def __str__(self): """A multipurpose printout. Should do the job for most analyzers.""" diff --git a/PhysicsTools/HeppyCore/python/framework/chain.py b/PhysicsTools/HeppyCore/python/framework/chain.py index 43117bbcba2c2..f13ad61bb192c 100644 --- a/PhysicsTools/HeppyCore/python/framework/chain.py +++ b/PhysicsTools/HeppyCore/python/framework/chain.py @@ -6,6 +6,7 @@ import pprint from ROOT import TChain, TFile, TTree, gSystem +#TODO should use eostools def is_pfn(fn): return not (is_lfn(fn) or is_rootfn(fn)) diff --git a/PhysicsTools/HeppyCore/python/framework/chain_noindexing.py b/PhysicsTools/HeppyCore/python/framework/chain_noindexing.py new file mode 100644 index 0000000000000..b7f1dc92599cf --- /dev/null +++ b/PhysicsTools/HeppyCore/python/framework/chain_noindexing.py @@ -0,0 +1,98 @@ +# Copyright (C) 2014 Colin Bernet +# https://github.com/cbernet/heppy/blob/master/LICENSE + +import glob +import os +import pprint +from ROOT import TChain, TFile, TTree, gSystem + +class ChainNoIndexing( object ): + """Wrapper to TChain, with a python iterable interface. + + Example of use: #TODO make that a doctest / nose? + from chain import Chain + the_chain = Chain('../test/test_*.root', 'test_tree') + event3 = the_chain[2] + print event3.var1 + + for event in the_chain: + print event.var1 + """ + + def __init__(self, input, tree_name=None): + """ + Create a chain. + + Parameters: + input = either a list of files or a wildcard (e.g. 'subdir/*.root'). + In the latter case all files matching the pattern will be used + to build the chain. + tree_name = key of the tree in each file. + if None and if each file contains only one TTree, + this TTree is used. + """ + self.files = input + if isinstance(input, basestring): # input is a pattern + self.files = glob.glob(input) + if len(self.files)==0: + raise ValueError('no matching file name: '+input) + else: # case of a list of files + if False in [ os.path.isfile(fnam) for fnam in self.files ]: + err = 'at least one input file does not exist\n' + err += pprint.pformat(self.files) + raise ValueError(err) + if tree_name is None: + tree_name = self._guessTreeName(input) + self.chain = TChain(tree_name) + for file in self.files: + self.chain.Add(file) + + def _guessTreeName(self, pattern): + """ + Find the set of keys of all TTrees in all files matching pattern. + If the set contains only one key + Returns: the TTree key + else raises ValueError. + """ + names = [] + for fnam in self.files: + rfile = TFile(fnam) + for key in rfile.GetListOfKeys(): + obj = rfile.Get(key.GetName()) + if type(obj) is TTree: + names.append( key.GetName() ) + thename = set(names) + if len(thename)==1: + return list(thename)[0] + else: + err = [ + 'several TTree keys in {pattern}:'.format( + pattern=pattern + ), + ','.join(thename) + ] + raise ValueError('\n'.join(err)) + + def __getattr__(self, attr): + """ + All functions of the wrapped TChain are made available + """ + return getattr(self.chain, attr) + + def __iter__(self): + return iter(self.chain) + + def __len__(self): + return int(self.chain.GetEntries()) + + +if __name__ == '__main__': + + import sys + + if len(sys.argv)!=3: + print 'usage: Chain.py ' + sys.exit(1) + tree_name = sys.argv[1] + pattern = sys.argv[2] + chain = Chain( tree_name, pattern ) diff --git a/PhysicsTools/HeppyCore/python/framework/config.py b/PhysicsTools/HeppyCore/python/framework/config.py index cf6fd79b472c0..9f9698a5251d0 100644 --- a/PhysicsTools/HeppyCore/python/framework/config.py +++ b/PhysicsTools/HeppyCore/python/framework/config.py @@ -2,8 +2,14 @@ # https://github.com/cbernet/heppy/blob/master/LICENSE from weight import Weight -import copy import glob +import analyzer +import copy + +# Forbidding PyROOT to hijack help system, +# in case the configuration module is used as a script. +import ROOT +ROOT.PyConfig.IgnoreCommandLineOptions = True def printComps(comps, details=False): ''' @@ -29,12 +35,47 @@ def printComps(comps, details=False): print '# components with files = ', nCompsWithFiles print '# jobs = ', nJobs +def split(comps): + '''takes a list of components, split the ones that need to be splitted, + and return a new (bigger) list''' + + def chunks(l, n): + '''split list l in n chunks. The last one can be smaller.''' + return [l[i:i+n] for i in range(0, len(l), n)] + + splitComps = [] + for comp in comps: + if hasattr( comp, 'fineSplitFactor') and comp.fineSplitFactor>1: + subchunks = range(comp.fineSplitFactor) + for ichunk, chunk in enumerate([(f,i) for f in comp.files for i in subchunks]): + newComp = copy.deepcopy(comp) + newComp.files = [chunk[0]] + newComp.fineSplit = ( chunk[1], comp.fineSplitFactor ) + newComp.name = '{name}_Chunk{index}'.format(name=newComp.name, + index=ichunk) + splitComps.append( newComp ) + elif hasattr( comp, 'splitFactor') and comp.splitFactor>1: + chunkSize = len(comp.files) / comp.splitFactor + if len(comp.files) % comp.splitFactor: + chunkSize += 1 + # print 'chunk size',chunkSize, len(comp.files), comp.splitFactor + for ichunk, chunk in enumerate(chunks(comp.files, chunkSize)): + newComp = copy.deepcopy(comp) + newComp.files = chunk + newComp.name = '{name}_Chunk{index}'.format(name=newComp.name, + index=ichunk) + splitComps.append( newComp ) + else: + splitComps.append( comp ) + return splitComps + class CFG(object): '''Base configuration class. The attributes are used to store parameters of any type''' def __init__(self, **kwargs): '''All keyword arguments are added as attributes.''' self.__dict__.update( **kwargs ) + self.name = None def __str__(self): '''A useful printout''' @@ -75,24 +116,18 @@ def clone(self, **kwargs): return other class Analyzer( CFG ): - '''Base configuration class for analyzers such as framework.analyzer.Analyzer . - - The attributes are used to store parameters of any type, - which can then be used in the corresponding framework.analyzer.Analyzer. - - See constructor for more information.''' - - num_instance = 0 + '''Base analyzer configuration, see constructor''' + names = set() - def __init__(self, class_object, instance_label=None, + def __init__(self, class_object, instance_label='1', verbose=False, **kwargs): ''' One could for example define the analyzer configuration for a di-muon framework.Analyzer.Analyzer in the following way: - dimuons_ana = cfg.Analyzer( + ZMuMuAna = cfg.Analyzer( ZMuMuAnalyzer, - instance_label='dimuons', + 'zmumu', # optional! pt1 = 20, pt2 = 20, iso1 = 0.1, @@ -103,60 +138,100 @@ def __init__(self, class_object, instance_label=None, m_max = 200 ) - Any kind of keyword arguments can be added. - - The first argument is the class object of the corresponding framework.analyzer.Analyzer. - It will be used by framework.looper.Looper to create an object of this class to process - your events. - - The second argument, instance_label, allows you to specify a label in case you have - several analyzers of the same class. This argument is optional and will be set automatically - by heppy if omitted. - ''' + The first argument is your analyzer class. + It should inherit from heppy.framework.analyzer.Analyser (standalone) + or from PhysicsTools.HeppyCore.framework.analyzer (in CMS) + + The second argument is optional. + If you have several analyzers of the same class, + e.g. ZEleEleAna and ZMuMuAna, + you may choose to provide it to keep track of the output + of these analyzers. + If you don't so so, the instance labels of the analyzers will + automatically be set to 1, 2, etc. + + Finally, any kinds of keyword arguments can be added. + + This analyzer configuration object will become available + as self.cfg_ana in your ZMuMuAnalyzer. + ''' + super(Analyzer, self).__init__(**kwargs) + errmsg = None + if type(class_object) is not type: + errmsg = 'The first argument should be a class' + elif not analyzer.Analyzer in class_object.__mro__: + try: + #TODO: we also should be able to use analyzers + #TODO: in PhysicsTools.HeppyCore... + #TODO: a bit of a hack anyway, can we do something cleaner? + from PhysicsTools.Heppy.analyzers.core.Analyzer import Analyzer as CMSBaseAnalyzer + if CMSBaseAnalyzer in class_object.__mro__: + errmsg = None + except: + errmsg = 'The first argument should be a class inheriting from {anaclass}'.format(anaclass=analyzer.Analyzer) + if errmsg: + msg = 'Error creating {selfclass} object. {errmsg}. Instead, you gave {classobjectclass}'.format( + selfclass=self.__class__, + errmsg=errmsg, + classobjectclass=class_object ) + raise ValueError(msg) self.class_object = class_object - self.__class__.num_instance += 1 - if instance_label is None: - instance_label = str(self.__class__.num_instance) - self.instance_label = instance_label + self.instance_label = instance_label # calls _build_name self.verbose = verbose - super(Analyzer, self).__init__(**kwargs) def __setattr__(self, name, value): '''You may decide to copy an existing analyzer and change its instance_label. In that case, one must stay consistent.''' self.__dict__[name] = value if name == 'instance_label': - self.name = self.build_name() + self.name = self._build_name() - def build_name(self): + def _build_name(self): class_name = '.'.join([self.class_object.__module__, self.class_object.__name__]) - name = '_'.join([class_name, self.instance_label]) + while 1: + # if class_name == 'heppy.analyzers.ResonanceBuilder.ResonanceBuilder': + # import pdb; pdb.set_trace() + name = '_'.join([class_name, self.instance_label]) + if name not in self.__class__.names: + self.__class__.names.add(name) + break + else: + # cannot set attr directly or infinite recursion, + # see setattr + iinst = None + try: + iinst = int(self.instance_label) + self.__dict__['instance_label'] = str(iinst+1) + except ValueError: + # here, reloading module in ipython + self.__class__.names = set() + self.__dict__['instance_label'] = self.instance_label return name def clone(self, **kwargs): other = super(Analyzer, self).clone(**kwargs) if 'class_object' in kwargs and 'name' not in kwargs: - other.name = other.build_name() + other.name = other._build_name() return other + def __repr__(self): + baserepr = super(Analyzer, self).__repr__() + return ':'.join([baserepr, self.name]) + class Service( CFG ): - num_instance = 0 - - def __init__(self, class_object, instance_label=None, + def __init__(self, class_object, instance_label='1', verbose=False, **kwargs): + super(Service, self).__init__(**kwargs) self.class_object = class_object - self.__class__.num_instance += 1 - if instance_label is None: - instance_label = str(self.__class__.num_instance) self.instance_label = instance_label - self.__class__.num_instance += 1 - self.name = self.build_name() + self.name = self._build_name() self.verbose = verbose - super(Service, self).__init__(**kwargs) - def build_name(self): + def _build_name(self): class_name = '.'.join([self.class_object.__module__, self.class_object.__name__]) name = '_'.join([class_name, self.instance_label]) @@ -167,12 +242,12 @@ def __setattr__(self, name, value): its instance_label. In that case, one must stay consistent.''' self.__dict__[name] = value if name == 'instance_label': - self.name = self.build_name() + self.name = self._build_name() def clone(self, **kwargs): other = super(Service, self).clone(**kwargs) if 'class_object' in kwargs and 'name' not in kwargs: - other.name = other.build_name() + other.name = other._build_name() return other @@ -180,6 +255,20 @@ class Sequence( list ): '''A list with print functionalities. Used to define a sequence of analyzers.''' + def __init__(self, *args): + for arg in args: + if isinstance(arg, list): + self.extend(arg) + elif not hasattr(arg, '__iter__'): + self.append(arg) + else: + raise ValueError( +''' +Sequence only accepts lists or non iterable objects. +You provided an object of type {} +'''.format(arg.__class__) + ) + def __str__(self): tmp = [] for index, ana in enumerate( self ): @@ -204,10 +293,13 @@ def __init__(self, name, files, tree_name=None, triggers=None, **kwargs): files = files, tree_name = tree_name, triggers = triggers, **kwargs) + self.name = name self.dataset_entries = 0 self.isData = False self.isMC = False self.isEmbed = False + + class DataComponent( Component ): diff --git a/PhysicsTools/HeppyCore/python/framework/context.py b/PhysicsTools/HeppyCore/python/framework/context.py new file mode 100644 index 0000000000000..2632652082345 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/framework/context.py @@ -0,0 +1,40 @@ +import os + +def get_name(env=None): + '''Returns the name of the context is which heppy is used. + + If several contexts are defined, throws a ValueError. + + @return: 'cms', 'fcc' or None if no context is defined. + ''' + if env is None: + env = os.environ + contexts = dict( (key, False) for key in ['cms','fcc']) + reldir = env.get('CMSSW_BASE', None) + if reldir and os.path.isdir(reldir): + contexts['cms'] = True + fcc_envs = set(['PODIO', 'FCCPHYSICS', 'FCCEDM', 'HEPPY']) + if fcc_envs.issubset( env ): + contexts['fcc'] = True + defined = [key for key,defined in contexts.iteritems() + if defined is True] + if len(defined)>1: + raise ValueError('several contexts defined: ' + str(defined) ) + elif len(defined)==0: + return None + else: + return defined.pop() + +def heppy_path(): + context = get_name() + if context == 'cms': + return '/'.join([os.environ['CMSSW_BASE'], + 'src/PhysicsTools/HeppyCore/python']) + elif context == 'fcc': + return os.environ['HEPPY'] + + + +name = get_name() + +heppy_path = heppy_path() diff --git a/PhysicsTools/HeppyCore/python/framework/event.py b/PhysicsTools/HeppyCore/python/framework/event.py index 16dfb22da06eb..dbf6fa976f3e9 100644 --- a/PhysicsTools/HeppyCore/python/framework/event.py +++ b/PhysicsTools/HeppyCore/python/framework/event.py @@ -1,47 +1,78 @@ -import collections +import pprint +import copy +import collections +import fnmatch + from ROOT import TChain class Event(object): '''Event class. - The Looper passes the Event object to each of its Analyzers, + The Looper passes an Event object to each of its Analyzers, which in turn can: - read some information - add more information - modify existing information. - Attributes: + A printout can be obtained by doing e.g.: + + event = Event() + print event + + The printout can be controlled by the following class attributes: + print_nstrip : number of items in sequence to be printed before stripping the following items + print_patterns : list of patterns. By default, this list is set to ['*'] so that all attributes are + printed + + Example: + event = Event() + Event.print_nstrip = 5 # print only the 5 first items of sequences + Event.print_patterns = ['*particles*', 'jet*'] # only print the attributes that + # contain "particles" in their name or + # have a name starting by "jet" + + Object attributes: iEv = event processing index, starting at 0 eventWeight = a weight, set to 1 at the beginning of the processing input = input, as determined by the looper #TODO: provide a clear interface for access control (put, get, del products) - we should keep track of the name and id of the analyzer. ''' + print_nstrip = 10 + print_patterns = ['*'] + def __init__(self, iEv, input_data=None, setup=None, eventWeight=1 ): self.iEv = iEv self.input = input_data self.setup = setup self.eventWeight = eventWeight + def __str__(self): header = '{type}: {iEv}'.format( type=self.__class__.__name__, iEv = self.iEv) - varlines = [] - for var,value in sorted(vars(self).iteritems()): - tmp = value - # check for recursivity - recursive = False - if hasattr(value, '__getitem__') and \ - not isinstance(value, collections.Mapping) and \ - (len(value)>0 and value[0].__class__ == value.__class__): - recursive = True - if hasattr(value, '__contains__') and \ - not isinstance(value, (str,unicode)) and \ - not isinstance(value, TChain) and \ - not recursive : - tmp = map(str, value) - - varlines.append( '\t{var:<15}: {value}'.format(var=var, value=tmp) ) - all = [ header ] - all.extend(varlines) - return '\n'.join( all ) + selected_attrs = copy.copy( self.__dict__ ) + selected_attrs.pop('setup') + selected_attrs.pop('input') + stripped_attrs = dict() + for name, value in selected_attrs.iteritems(): + if any([fnmatch.fnmatch(name, pattern) for pattern in self.__class__.print_patterns]): + stripped_attrs[name] = value + for name, value in stripped_attrs.iteritems(): + if hasattr(value, '__len__') and \ + hasattr(value.__len__, '__call__') and \ + len(value)>self.__class__.print_nstrip+1: + # taking the first 10 elements and converting to a python list + # note that value could be a wrapped C++ vector + if isinstance(value, collections.Mapping): + entries = [entry for entry in value.iteritems()] + entries = entries[:self.__class__.print_nstrip] + entries + stripped_attrs[name] = dict(entries) + else: + stripped_attrs[name] = [ val for val in value[:self.__class__.print_nstrip] ] + stripped_attrs[name].append('...') + stripped_attrs[name].append(value[-1]) + + contents = pprint.pformat(stripped_attrs, indent=4) + return '\n'.join([header, contents]) diff --git a/PhysicsTools/HeppyCore/python/framework/eventsfwlite.py b/PhysicsTools/HeppyCore/python/framework/eventsfwlite.py index 911e1ca4d7892..8cbd27163ea6b 100644 --- a/PhysicsTools/HeppyCore/python/framework/eventsfwlite.py +++ b/PhysicsTools/HeppyCore/python/framework/eventsfwlite.py @@ -1,20 +1,20 @@ -from DataFormats.FWLite import Events as FWLiteEvents -from ROOT import gROOT, gSystem, AutoLibraryLoader - -print "Loading FW Lite" -gSystem.Load("libFWCoreFWLite"); -gROOT.ProcessLine('FWLiteEnabler::enable();') +class Events(object): + def __init__(self, files, tree_name, options=None): + from DataFormats.FWLite import Events as FWLiteEvents + #TODO not sure we still need the stuff below + from ROOT import gROOT, gSystem, AutoLibraryLoader -gSystem.Load("libFWCoreFWLite"); -gSystem.Load("libDataFormatsPatCandidates"); + print "Loading FW Lite" + gSystem.Load("libFWCoreFWLite"); + gROOT.ProcessLine('FWLiteEnabler::enable();') -from ROOT import gInterpreter -gInterpreter.ProcessLine("using namespace reco;") -gInterpreter.ProcessLine("using edm::refhelper::FindUsingAdvance;") + gSystem.Load("libFWCoreFWLite"); + gSystem.Load("libDataFormatsPatCandidates"); -class Events(object): - def __init__(self, files, tree_name, options=None): + from ROOT import gInterpreter + gInterpreter.ProcessLine("using namespace reco;") + gInterpreter.ProcessLine("using edm::refhelper::FindUsingAdvance;") if options is not None : if not hasattr(options,"inputFiles"): options.inputFiles=files @@ -35,4 +35,3 @@ def __getattr__(self, key): def __getitem__(self, iEv): self.events.to(iEv) return self - diff --git a/PhysicsTools/HeppyCore/python/framework/eventslcio.py b/PhysicsTools/HeppyCore/python/framework/eventslcio.py new file mode 100644 index 0000000000000..e39546589e612 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/framework/eventslcio.py @@ -0,0 +1,16 @@ +from pyLCIO import IOIMPL + +class Events(object): + + def __init__(self, filename, dummy): + self.reader = IOIMPL.LCFactory.getInstance().createLCReader() + self.reader.open(filename) + + def __len__(self): + return self.reader.getNumberOfEvents() + + def __getattr__(self, key): + return getattr(self.events, key) + + def __iter__(self): + return iter(self.reader) diff --git a/PhysicsTools/HeppyCore/python/framework/eventstfile.py b/PhysicsTools/HeppyCore/python/framework/eventstfile.py index 54fbe9fcfac56..fa265ebff369f 100644 --- a/PhysicsTools/HeppyCore/python/framework/eventstfile.py +++ b/PhysicsTools/HeppyCore/python/framework/eventstfile.py @@ -7,6 +7,8 @@ class Events(object): '''Event list from a tree in a root file. ''' def __init__(self, filename, treename, options=None): + self.filename = filename + self.treename = treename self.file = TFile(filename) if self.file.IsZombie(): raise ValueError('file {fnam} does not exist'.format(fnam=filename)) @@ -22,7 +24,11 @@ def size(self): def to(self, iEv): '''navigate to event iEv.''' - self.tree.GetEntry(iEv) + nbytes = self.tree.GetEntry(iEv) + if nbytes < 0: + raise IOError("Could not read event {0} in tree {1}:{2}".format( + iEv, self.filename, self.treename + )) return self.tree def __iter__(self): diff --git a/PhysicsTools/HeppyCore/python/framework/exceptions.py b/PhysicsTools/HeppyCore/python/framework/exceptions.py new file mode 100644 index 0000000000000..9c2c7f9ce869f --- /dev/null +++ b/PhysicsTools/HeppyCore/python/framework/exceptions.py @@ -0,0 +1,2 @@ +class UserStop(Exception): + pass diff --git a/PhysicsTools/HeppyCore/python/framework/heppy_loop.py b/PhysicsTools/HeppyCore/python/framework/heppy_loop.py index 50c958be71732..303a0ed00d9bb 100755 --- a/PhysicsTools/HeppyCore/python/framework/heppy_loop.py +++ b/PhysicsTools/HeppyCore/python/framework/heppy_loop.py @@ -21,13 +21,13 @@ from PhysicsTools.HeppyCore.framework.looper import Looper +from PhysicsTools.HeppyCore.framework.config import split # global, to be used interactively when only one component is processed. loop = None def callBack( result ): pass - print 'production done:', str(result) def runLoopAsync(comp, outDir, configName, options): try: @@ -67,64 +67,36 @@ def runLoop( comp, outDir, config, options): return loop -def createOutputDir(dir, components, force): +def createOutputDir(dirname, components, force): '''Creates the output dir, dealing with the case where dir exists.''' answer = None try: - os.mkdir(dir) + os.mkdir(dirname) return True except OSError: - print 'directory %s already exists' % dir - print 'contents: ' - dirlist = [path for path in os.listdir(dir) if os.path.isdir( '/'.join([dir, path]) )] - pprint( dirlist ) - print 'component list: ' - print [comp.name for comp in components] - if force is True: - print 'force mode, continue.' - return True - else: - while answer not in ['Y','y','yes','N','n','no']: - answer = raw_input('Continue? [y/n]') - if answer.lower().startswith('n'): - return False - elif answer.lower().startswith('y'): + if not os.listdir(dirname): + return True + else: + if force is True: return True - else: - raise ValueError( ' '.join(['answer can not have this value!', - answer]) ) - -def chunks(l, n): - return [l[i:i+n] for i in range(0, len(l), n)] - -def split(comps): - # import pdb; pdb.set_trace() - splitComps = [] - for comp in comps: - if hasattr( comp, 'fineSplitFactor') and comp.fineSplitFactor>1: - subchunks = range(comp.fineSplitFactor) - for ichunk, chunk in enumerate([(f,i) for f in comp.files for i in subchunks]): - newComp = copy.deepcopy(comp) - newComp.files = [chunk[0]] - newComp.fineSplit = ( chunk[1], comp.fineSplitFactor ) - newComp.name = '{name}_Chunk{index}'.format(name=newComp.name, - index=ichunk) - splitComps.append( newComp ) - elif hasattr( comp, 'splitFactor') and comp.splitFactor>1: - chunkSize = len(comp.files) / comp.splitFactor - if len(comp.files) % comp.splitFactor: - chunkSize += 1 - # print 'chunk size',chunkSize, len(comp.files), comp.splitFactor - for ichunk, chunk in enumerate( chunks( comp.files, chunkSize)): - newComp = copy.deepcopy(comp) - newComp.files = chunk - newComp.name = '{name}_Chunk{index}'.format(name=newComp.name, - index=ichunk) - splitComps.append( newComp ) - else: - splitComps.append( comp ) - return splitComps - + else: + print 'directory %s already exists' % dirname + print 'contents: ' + dirlist = [path for path in os.listdir(dirname) \ + if os.path.isdir( '/'.join([dirname, path]) )] + pprint( dirlist ) + print 'component list: ' + print [comp.name for comp in components] + while answer not in ['Y','y','yes','N','n','no']: + answer = raw_input('Continue? [y/n]') + if answer.lower().startswith('n'): + return False + elif answer.lower().startswith('y'): + return True + else: + raise ValueError( ' '.join(['answer can not have this value!', + answer]) ) + _heppyGlobalOptions = {} @@ -169,7 +141,9 @@ def main( options, args, parser ): _heppyGlobalOptions[opt] = True file = open( cfgFileName, 'r' ) - cfg = imp.load_source( 'PhysicsTools.HeppyCore.__cfg_to_run__', cfgFileName, file) + sys.path.append( os.path.dirname(cfgFileName) ) + cfg = imp.load_source( 'PhysicsTools.HeppyCore.__cfg_to_run__', + cfgFileName, file) selComps = [comp for comp in cfg.config.components if len(comp.files)>0] selComps = split(selComps) @@ -186,7 +160,6 @@ def main( options, args, parser ): ## workaround for a scoping problem in ipython+multiprocessing import PhysicsTools.HeppyCore.framework.heppy_loop as ML for comp in selComps: - print 'submitting', comp.name pool.apply_async( ML.runLoopAsync, [comp, outDir, 'PhysicsTools.HeppyCore.__cfg_to_run__', options], callback=ML.callBack) pool.close() @@ -197,3 +170,69 @@ def main( options, args, parser ): global loop loop = runLoop( comp, outDir, cfg.config, options ) return loop + + +def create_parser(): + from optparse import OptionParser + + parser = OptionParser() + parser.usage = """ + %prog + Start the processing of the jobs defined in your configuration file. + """ + parser.add_option("-N", "--nevents", + dest="nevents", + type="int", + help="number of events to process", + default=None) + parser.add_option("-p", "--nprint", + dest="nprint", + help="number of events to print at the beginning", + default=5) + parser.add_option("-e", "--iEvent", + dest="iEvent", + help="jump to a given event. ignored in multiprocessing.", + default=None) + parser.add_option("-f", "--force", + dest="force", + action='store_true', + help="don't ask questions in case output directory already exists.", + default=False) + parser.add_option("-i", "--interactive", + dest="interactive", + action='store_true', + help="stay in the command line prompt instead of exiting", + default=False) + parser.add_option("-t", "--timereport", + dest="timeReport", + action='store_true', + help="Make a report of the time used by each analyzer", + default=False) + parser.add_option("-v", "--verbose", + dest="verbose", + action='store_true', + help="increase the verbosity of the output (from 'warning' to 'info' level)", + default=False) + parser.add_option("-q", "--quiet", + dest="quiet", + action='store_true', + help="do not print log messages to screen.", + default=False) + parser.add_option("-o", "--option", + dest="extraOptions", + type="string", + action="append", + default=[], + help="Save one extra option (either a flag, or a key=value pair) that can be then accessed from the job config file") + parser.add_option("-j", "--ntasks", + dest="ntasks", + type="int", + help="number of parallel tasks to span", + default=10) + parser.add_option("--memcheck", + dest="memCheck", + action='store_true', + help="Activate memory checks per event", + default=False) + + return parser diff --git a/PhysicsTools/HeppyCore/python/framework/looper.py b/PhysicsTools/HeppyCore/python/framework/looper.py index 32bb6d64f78ac..98fb55ca09f2d 100644 --- a/PhysicsTools/HeppyCore/python/framework/looper.py +++ b/PhysicsTools/HeppyCore/python/framework/looper.py @@ -1,6 +1,8 @@ # Copyright (C) 2014 Colin Bernet # https://github.com/cbernet/heppy/blob/master/LICENSE +import ROOT +ROOT.PyConfig.IgnoreCommandLineOptions = True import os import sys import imp @@ -9,6 +11,7 @@ from math import ceil from event import Event import timeit +from PhysicsTools.HeppyCore.framework.exceptions import UserStop import resource import json @@ -116,7 +119,9 @@ def doSigUsr2(sig,frame): self.nEvents = None if hasattr(self.cfg_comp,"options"): print self.cfg_comp.files,self.cfg_comp.options - self.events = config.events_class(self.cfg_comp.files, tree_name,options=self.cfg_comp.options) + self.events = config.events_class(self.cfg_comp.files, + tree_name, + options=self.cfg_comp.options) else : self.events = config.events_class(self.cfg_comp.files, tree_name) if hasattr(self.cfg_comp, 'fineSplit'): @@ -142,7 +147,22 @@ def doSigUsr2(sig,frame): self.setup = Setup(config, services) def _build(self, cfg): - theClass = cfg.class_object + try: + theClass = cfg.class_object + except AttributeError: + errfgmt = 'an object of class {cfg_class}'.format( + cfg_class=cfg.__class__ + ) + if type(cfg) is type: + errfgmt = 'a class named {class_name}'.format( + class_name=cfg.__name__ + ) + err=''' +The looper is trying to build an analyzer configured by {errfgmt}. + +Make sure that the configuration object is of class cfg.Analyzer. + '''.format(errfgmt=errfgmt) + raise ValueError(err) obj = theClass( cfg, self.cfg_comp, self.outDir ) return obj @@ -155,8 +175,14 @@ def _prepareOutput(self, name): os.mkdir( tmpname ) break except OSError: - index += 1 - tmpname = '%s_%d' % (name, index) + # failed to create the directory + # is it empty? + if not os.listdir(tmpname): + break # it is, so use it + else: + # if not we append a number to the directory name + index += 1 + tmpname = '%s_%d' % (name, index) if index == 2000: raise ValueError( "More than 2000 output folder with same name or 2000 attempts failed, please clean-up, change name or check permissions") return tmpname @@ -173,47 +199,74 @@ def loop(self): nEvents = self.nEvents firstEvent = self.firstEvent iEv = firstEvent - if nEvents is None or int(nEvents) > len(self.events) : - nEvents = len(self.events) + self.nEvProcessed = 0 + if nEvents is None or int(nEvents)-firstEvent > len(self.events) : + nEvents = len(self.events) - firstEvent else: nEvents = int(nEvents) - eventSize = nEvents self.logger.info( 'starting loop at event {firstEvent} '\ - 'to process {eventSize} events.'.format(firstEvent=firstEvent, - eventSize=eventSize)) + 'to process {nEvents} events.'.format(firstEvent=firstEvent, + nEvents=nEvents)) self.logger.info( str( self.cfg_comp ) ) for analyzer in self.analyzers: analyzer.beginLoop(self.setup) - try: - for iEv in range(firstEvent, firstEvent+eventSize): - # if iEv == nEvents: - # break - if iEv%100 ==0: - # print 'event', iEv + + if hasattr(self.events, '__getitem__'): + # events backend supports indexing, e.g. CMS, FCC, bare root + for iEv in range(firstEvent, firstEvent+nEvents): + if iEv%100 == 0: if not hasattr(self,'start_time'): - print 'event', iEv + self.logger.info( 'event {iEv}'.format(iEv=iEv)) self.start_time = timeit.default_timer() self.start_time_event = iEv else: - print 'event %d (%.1f ev/s)' % (iEv, (iEv-self.start_time_event)/float(timeit.default_timer() - self.start_time)) - - self.process( iEv ) - if iEv> 32 + + @staticmethod + def is_ecal ( ident): + return Identifier.get_type(ident) == Identifier.PFOBJECTTYPE.ECALCLUSTER + + @staticmethod + def is_hcal ( ident): + return Identifier.get_type(ident) == Identifier.PFOBJECTTYPE.HCALCLUSTER + + @staticmethod + def is_track ( ident): + return Identifier.get_type(ident) == Identifier.PFOBJECTTYPE.TRACK + + @staticmethod + def is_block ( ident): + return Identifier.get_type(ident) == Identifier.PFOBJECTTYPE.BLOCK + + @staticmethod + def is_rec_particle ( ident): + return Identifier.get_type(ident) == Identifier.PFOBJECTTYPE.RECPARTICLE + + @staticmethod + def is_particle ( ident): + return Identifier.get_type(ident) == Identifier.PFOBJECTTYPE.PARTICLE + + @staticmethod + def type_short_code(ident): + ''' Returns code + e = ecal + h = hcal + t = track + p = particle + r = reconstructed particle (this will soon go) + b = block + ''' + typelist=".ehtprb..." #the enum value (0 to 8) will index into this and return E is it is ECAL etc + return typelist[Identifier.get_type(ident)] + + @staticmethod + def pretty(ident): + return Identifier.type_short_code(ident) + str(Identifier.get_unique_id(ident)) + + @classmethod + def reset(cls): + cls._id=count(1) + pdebugger.info("reset ID") + return diff --git a/PhysicsTools/HeppyCore/python/papas/data/pfevent.py b/PhysicsTools/HeppyCore/python/papas/data/pfevent.py new file mode 100644 index 0000000000000..ffb76279c4d05 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/papas/data/pfevent.py @@ -0,0 +1,62 @@ +from PhysicsTools.HeppyCore.papas.data.identifier import Identifier + + + +class PFEvent(object): + '''PFEvent is used to allow addition of a function get_object to an Event class + get_object() allows a cluster or track to be found from its id + May want to merge this with the history class + + attributes: + tracks is a dictionary : {id1:track1, id2:track2, ...} + ecal is a dictionary : {id1:ecal1, id2:ecal2, ...} + hcal is a dictionary : {id1:hcal1, id2:hcal2, ...} + blocks = optional dictionary of blocks : {id1:block1, id2:block2, ...} + + usage: + pfevent=PFEvent(event, self.tracksname, self.ecalsname, self.hcalsname, self.blocksname) + obj1 = pfevent.get_object(id1) + ''' + def __init__(self, event, tracksname = 'tracks', ecalsname = 'ecal_clusters', hcalsname = 'hcal_clusters', blocksname = 'blocks', + sim_particlesname = "None", rec_particlesname = "reconstructed_particles"): + '''arguments + event: must contain + tracks dictionary : {id1:track1, id2:track2, ...} + ecal dictionary : {id1:ecal1, id2:ecal2, ...} + hcal dictionary : {id1:hcal1, id2:hcal2, ...} + + and these must be names according to ecalsname etc + blocks, sim_particles and rec_particles are optional + ''' + self.tracks = getattr(event, tracksname) + self.ecal_clusters = getattr(event, ecalsname) + self.hcal_clusters = getattr(event, hcalsname) + + self.blocks = [] + if hasattr(event, blocksname): + self.blocks = getattr(event, blocksname) + if hasattr(event,sim_particlesname): + self.sim_particles= getattr(event, sim_particlesname) + if hasattr(event,rec_particlesname): #todo think about naming + self.reconstructed_particles= getattr(event, rec_particlesname) + + def get_object(self, uniqueid): + ''' given a uniqueid return the underlying obejct + ''' + type = Identifier.get_type(uniqueid) + if type == Identifier.PFOBJECTTYPE.TRACK: + return self.tracks[uniqueid] + elif type == Identifier.PFOBJECTTYPE.ECALCLUSTER: + return self.ecal_clusters[uniqueid] + elif type == Identifier.PFOBJECTTYPE.HCALCLUSTER: + return self.hcal_clusters[uniqueid] + elif type == Identifier.PFOBJECTTYPE.PARTICLE: + return self.sim_particles[uniqueid] + elif type == Identifier.PFOBJECTTYPE.RECPARTICLE: + return self.reconstructed_particles[uniqueid] + elif type == Identifier.PFOBJECTTYPE.BLOCK: + return self.blocks[uniqueid] + else: + assert(False) + + diff --git a/PhysicsTools/HeppyCore/python/papas/detector.py b/PhysicsTools/HeppyCore/python/papas/detector.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/PhysicsTools/HeppyCore/python/papas/detectors/CMS.py b/PhysicsTools/HeppyCore/python/papas/detectors/CMS.py new file mode 100644 index 0000000000000..88543f9925907 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/papas/detectors/CMS.py @@ -0,0 +1,163 @@ +from detector import Detector, DetectorElement +import material as material +from geometry import VolumeCylinder +import math +import PhysicsTools.HeppyCore.statistics.rrandom as random + +class ECAL(DetectorElement): + + def __init__(self): + volume = VolumeCylinder('ecal', 1.55, 2.1, 1.30, 2. ) + mat = material.Material('ECAL', 8.9e-3, 0.275) + self.eta_crack = 1.479 + self.emin = {'barrel':0.3, 'endcap':1.} + self.eres = {'barrel':[4.22163e-02, 1.55903e-01, 7.14166e-03], 'endcap':[-2.08048e-01, 3.25097e-01, 7.34244e-03]} + self.eresp = {'barrel':[1.00071, -9.04973, -2.48554], 'endcap':[9.95665e-01, -3.31774, -2.11123]} + super(ECAL, self).__init__('ecal', volume, mat) + + def energy_resolution(self, energy, eta=0.): + part = 'barrel' + if abs(eta)>1.479 and abs(eta)<3.0: + part = 'endcap' + stoch = self.eres[part][0] / math.sqrt(energy) + noise = self.eres[part][1] / energy + constant = self.eres[part][2] + return math.sqrt( stoch**2 + noise**2 + constant**2) + + def energy_response(self, energy, eta=0): + part = 'barrel' + if abs(eta)>self.eta_crack: + part = 'endcap' + return self.eresp[part][0]/(1+math.exp((energy-self.eresp[part][1])/self.eresp[part][2])) #using fermi-dirac function : [0]/(1 + exp( (energy-[1]) /[2] )) + + def cluster_size(self, ptc): + pdgid = abs(ptc.pdgid()) + if pdgid==22 or pdgid==11: + return 0.04 + else: + return 0.07 + + def acceptance(self, cluster): + energy = cluster.energy + eta = abs(cluster.position.Eta()) + if eta < self.eta_crack: + return energy>self.emin['barrel'] + elif eta < 2.93: + return energy>self.emin['endcap'] and cluster.pt>0.2 + else: + return False + + def space_resolution(self, ptc): + pass + +class HCAL(DetectorElement): + + def __init__(self): + volume = VolumeCylinder('hcal', 2.9, 3.6, 1.9, 2.6 ) + mat = material.Material('HCAL', None, 0.17) + self.eta_crack = 1.3 + self.eres = {'barrel':[0.8062, 2.753, 0.1501], 'endcap':[6.803e-06, 6.676, 0.1716]} + self.eresp = {'barrel':[1.036, 4.452, -2.458], 'endcap':[1.071, 9.471, -2.823]} + super(HCAL, self).__init__('ecal', volume, mat) + + def energy_resolution(self, energy, eta=0.): + part = 'barrel' + if abs(eta)>self.eta_crack: + part = 'endcap' + stoch = self.eres[part][0] / math.sqrt(energy) + noise = self.eres[part][1] / energy + constant = self.eres[part][2] + return math.sqrt( stoch**2 + noise**2 + constant**2) + + def energy_response(self, energy, eta=0): + part = 'barrel' + if abs(eta)>self.eta_crack: + part = 'endcap' + return self.eresp[part][0]/(1+math.exp((energy-self.eresp[part][1])/self.eresp[part][2])) #using fermi-dirac function : [0]/(1 + exp( (energy-[1]) /[2] )) + + def cluster_size(self, ptc): + return 0.2 + + def acceptance(self, cluster): + energy = cluster.energy + eta = abs(cluster.position.Eta()) + if eta < self.eta_crack : + if energy>1.: + return random.uniform(0,1)<(1/(1+math.exp((energy-1.93816)/(-1.75330)))) + else: + return False + elif eta < 3. : + if energy>1.1: + if energy<10.: + return random.uniform(0,1)<(1.05634-1.66943e-01*energy+1.05997e-02*(energy**2)) + else: + return random.uniform(0,1)<(8.09522e-01/(1+math.exp((energy-9.90855)/-5.30366))) + else: + return False + elif eta < 5.: + return energy>7. + else: + return False + + def space_resolution(self, ptc): + pass + + + +class Tracker(DetectorElement): + #TODO acceptance and resolution depend on the particle type + + def __init__(self): + volume = VolumeCylinder('tracker', 1.29, 1.99) + # care : there is the beam pipe ! Shouldn't be an inner radius specified ? + mat = material.void + super(Tracker, self).__init__('tracker', volume, mat) + + def acceptance(self, track): + # return False + pt = track.pt + eta = abs(track.p3.Eta()) + if eta < 1.35 and pt>0.5: + return random.uniform(0,1)<0.95 + elif eta < 2.5 and pt>0.5: + return random.uniform(0,1)<0.9 + else: + return False + + def pt_resolution(self, track): + # TODO: depends on the field + pt = track.pt + return 1.1e-2 + + + +class Field(DetectorElement): + + def __init__(self, magnitude): + self.magnitude = magnitude + volume = VolumeCylinder('field', 2.9, 3.6) + mat = material.void + super(Field, self).__init__('tracker', volume, mat) + +class BeamPipe(DetectorElement): + + def __init__(self): + #Material Seamless AISI 316 LN, External diameter 53 mm, Wall thickness 1.5 mm (hors cms) X0 1.72 cm + #in CMS, radius 25 mm (?), tchikness 8mm, X0 35.28 cm : berylluim + factor = 1.0 + volume = VolumeCylinder('beampipe', 2.5e-2*factor+0.8e-3, 1.98, 2.5e-2*factor, 1.9785 ) + mat = material.Material('BeamPipe', 35.28e-2, 0) + super(BeamPipe, self).__init__('beampipe', volume, mat) + + +class CMS(Detector): + + def __init__(self): + super(CMS, self).__init__() + self.elements['tracker'] = Tracker() + self.elements['ecal'] = ECAL() + self.elements['hcal'] = HCAL() + self.elements['field'] = Field(3.8) + self.elements['beampipe'] = BeamPipe() + +cms = CMS() diff --git a/PhysicsTools/HeppyCore/python/papas/detectors/base.py b/PhysicsTools/HeppyCore/python/papas/detectors/base.py new file mode 100644 index 0000000000000..05423f40ed6de --- /dev/null +++ b/PhysicsTools/HeppyCore/python/papas/detectors/base.py @@ -0,0 +1,38 @@ +import operator + +class Material(object): + def __init__(self, name, x0, lambdaI): + self.name = name + self.x0 = x0 + self.lambdaI = lambdaI + + +material_CMS_ECAL = Material('CMS_ECAL', 8.9e-3, 0.25) +material_CMS_HCAL = Material('CMS_HCAL', None, 0.17) +material_void = Material('void', 0., 0.) + + +class DetectorElement(object): + def __init__(self, name, volume, material, field, cluster_size=0.1): + self.name = name + self.volume = volume + self.material = material + self.field = field + self.cluster_size = cluster_size + + +class Detector(object): + #TODO validate geometry consistency (no hole, no overlapping volumes) + def __init__(self): + self.elements = dict() + self._cylinders = [] + + def cylinders(self): + if len(self._cylinders): + return self._cylinders + for element in self.elements.values(): + if element.volume.inner is not None: + self._cylinders.append(element.volume.inner) + self._cylinders.append(element.volume.outer) + self._cylinders.sort(key=operator.attrgetter("rad")) + return self._cylinders diff --git a/PhysicsTools/HeppyCore/python/papas/detectors/detector.py b/PhysicsTools/HeppyCore/python/papas/detectors/detector.py new file mode 100644 index 0000000000000..66b5b55ed3f98 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/papas/detectors/detector.py @@ -0,0 +1,25 @@ +import operator + +class DetectorElement(object): + + def __init__(self, name, volume, material): + self.name = name + self.volume = volume + self.material = material + +class Detector(object): + #TODO validate geometry consistency (no hole, no overlapping volumes) + def __init__(self): + self.elements = dict() + self._cylinders = [] + + def cylinders(self): + '''Return list of surface cylinders sorted by increasing radius.''' + if len(self._cylinders): + return self._cylinders + for element in self.elements.values(): + if element.volume.inner is not None: + self._cylinders.append(element.volume.inner) + self._cylinders.append(element.volume.outer) + self._cylinders.sort(key=operator.attrgetter("rad")) + return self._cylinders diff --git a/PhysicsTools/HeppyCore/python/papas/detectors/geometry.py b/PhysicsTools/HeppyCore/python/papas/detectors/geometry.py new file mode 100644 index 0000000000000..97c4594562c5a --- /dev/null +++ b/PhysicsTools/HeppyCore/python/papas/detectors/geometry.py @@ -0,0 +1,43 @@ + +class SurfaceCylinder(object): + + def __init__(self, name, rad, z): + self.name = name + self.rad = rad + self.z = z + + def __str__(self): + return '{} : {}, R={:5.2f}, z={:5.2f}'.format( + self.__class__.__name__, + self.name, + self.rad, + self.z + ) + + +class VolumeCylinder(object): + '''Implement sub even for pipes, and consistency test: all space must be filled.''' + + def __init__(self, name, orad, oz, irad=None, iz=None): + if not isinstance(name, basestring): + raise ValueError('first parameter must be a string') + self.name = name + self.outer = SurfaceCylinder('_'.join([self.name, 'out']), orad, oz) + self.inner = None + if irad and iz: + if irad > orad: + raise ValueError('outer radius of subtracted cylinder must be smaller') + if iz > oz : + raise ValueError('outer z of subtracted cylinder must be smaller') + if irad is None or iz is None: + raise ValueError('must specify both irad and iz.') + self.inner = SurfaceCylinder('_'.join([self.name, 'in']), irad, iz) + + def contains(self, point): + perp = point.Perp() + if abs(point.Z())= self.inner.rad and perp < self.outer.rad + elif abs(point.Z()) 1e-9: + xp = -xp + xm = math.sqrt(r2**2 - ym**2) + if abs((xm-x1)**2 + (ym-y1)**2 - r1**2) > 1e-9: + xm = -xm + if switchxy: + xm, ym = ym, xm + xp, yp = yp, xp + return xm, ym, xp, yp + + +if __name__ == '__main__': + + from ROOT import TEllipse, TH2F, TCanvas, TMarker + + can = TCanvas("can","", 600, 600) + suph = TH2F("suph", "", 10, -5, 5, 10, -5, 5) + suph.Draw() + x1, y1, r1, r2 = 0., 1.8, 1., 2. + results = circle_intersection(x1, y1, r1, r2) + c1 = TEllipse(x1, y1, r1) + c1.Draw('same') + c2 = TEllipse(0., 0., r2) + c2.Draw('same') + c1.SetFillStyle(0) + c2.SetFillStyle(0) + mm = TMarker(results[0], results[1], 8) + mp = TMarker(results[2], results[3], 21) + mm.Draw('same') + mp.Draw('same') + can.Update() diff --git a/PhysicsTools/HeppyCore/python/papas/graphtools/DAG.py b/PhysicsTools/HeppyCore/python/papas/graphtools/DAG.py new file mode 100644 index 0000000000000..5775c84bb1e4f --- /dev/null +++ b/PhysicsTools/HeppyCore/python/papas/graphtools/DAG.py @@ -0,0 +1,199 @@ +import pprint + +from collections import deque + +'''Directed Acyclic Graph (DAG) with floodfill and breadth first traversal algorithms + +Each node may have several children. +Each node may have several parents. The DAG may have multiple roots ( a node without a parent) +It has no loops when directed, but may have loops when traversed in an undirected way + +Traversals +1: deal with all nodes at the same level and then with all children of these nodes etc (breadth-first search or BFS) +2: deal with all children and finally with the node (depth-first search of DFS) + +A "visitor pattern" is used to allow the algorithms to be separated from the object +on which it operates and without modifying the objects structures (eg a visited flag can be +owned by the algorithm) + +The visitor pattern also allows the visit method to dynamically depend on both the object and the visitor + +example of setting up Nodes: + self.nodes = dict( (i, Node(i) ) for i in range(10) + self.nodes[0].add_child(self.nodes[1]) + self.nodes[0].add_child(self.nodes[2]) +traversing nodes: + BFS = BreadthFirstSearchIterative(self.nodes[0],"undirected") + see alos test_DAG.py +''' + + +class Node(object): + ''' + Implements a Directed Acyclic Graph: + each node has an arbitrary number of children and parents + There are no loops in the directed DAG + But there may be loops in the undirected version of the DAG + + attributes: + value = the item of interest (around which the node is wrapped) + children = list of child nodes + parents = list of parent node + undirected_links = combined list of parents and children + ''' + + def __init__(self, value): + '''constructor. + value can be anything, even a complex object. + example: + newnode=Node(uniqueid) + ''' + self.value = value # wrapped object + self.children = [] + self.parents = [] + self.undirected_links = [] #the union of the parents and children (other implementations possible) + + def get_value(self): + return self.value + + def accept(self, visitor): + visitor.visit(self) + + def add_child(self, child): + '''set the children''' + self.children.append(child) + child.add_parent(self) + self.undirected_links.append(child) + + def add_parent(self, parent): + '''set the parents''' + self.parents.append(parent) + self.undirected_links.append(parent) + + def remove_all_links_to(self,toremove): + '''checks for element toremove in the list of children and parents and + removes any links from both this and from the toremove node + ''' + if (toremove in self.parents): + self.parents.remove(toremove) + toremove.children.remove(self) + if (toremove in self.children): + self.children.remove(toremove) + toremove.parents.remove(self) + + def get_linked_nodes(self, type): #ask colin, I imagine there is a more elegant Python way to do this + #alice todo make type a enumeration and not a string? + '''return a list of the linked children/parents/undirected links''' + if (type is "children"): + return self.children + if(type is "parents"): + return self.parents + if(type is "undirected"): + return self.undirected_links + + def __repr__(self): + '''unique string representation''' + return self.__str__() + + def __str__(self): + '''unique string representation''' + return str('node: {val} {children}'.format( + val = self.value, + children = self.children + ) ) + + +class BreadthFirstSearch(object): + + def __init__(self,root, link_type): + '''Perform the breadth first recursive search of the nodes''' + self.result = [] + self.root = root + self.visited = dict() + self.bfs_recursive([root],link_type) + + def visit(self, node): + if self.visited.get(node, False): + return + self.result.append( node ) + self.visited[node] = True + + def bfs_recursive(self,nodes, link_type ): + '''Breadth first recursive implementation + each recursion is one level down the tree + link_type can be "children", "parents","undirected" ''' + link_nodes = [] + if len(nodes) is 0: + return + + for node in nodes: # collect a list of all the next level of nodes + if (self.visited.get(node, False)): + continue + link_nodes.extend(node.get_linked_nodes(link_type)) + for node in nodes: #add these nodes onto list and mark as visited + if (self.visited.get(node, False)): + continue + node.accept(self) + + self.bfs_recursive(link_nodes, link_type) + + +class BreadthFirstSearchIterative(object): + + def __init__(self,root, link_type): + '''Perform the breadth first iterative search of the nodes''' + self.visited = {} + self.result = [] + self.bfs_iterative(root,link_type) + + def visit(self, node): + if self.visited.get(node, False): + return + self.result.append( node ) + self.visited[node] = True + + def bfs_iterative(self,node, link_type ): + '''Breadth first iterative implementation + using a deque to order the nodes + link_type can be "children", "parents","undirected" ''' + + # Create a deque for the Breadth First Search + todo = deque() + todo.append( node) + + while len(todo): + node = todo.popleft() + if self.visited.get(node,False): #check if already processed + continue + node.accept(self) + for linknode in node.get_linked_nodes(link_type): + if self.visited.get(linknode,False): #check if already processed + continue + todo.append( linknode) + + +class DAGFloodFill(object): + + def __init__(self, elements, first_label = 1): + '''Iterate through all nodes and + use Breadth first search to find connected groups''' + self.visited = {} + self.label = first_label + self.visited = dict() + self.blocks = [] + for uid, node in elements.iteritems(): + if self.visited.get(node, False): #already done so skip the rest + continue + + #find connected nodes + bfs = BreadthFirstSearchIterative(node,"undirected") + + # set all connected elements to have a visited flag =true + for n in bfs.result : + self.visited.update({n: True}) + #add into the set of blocks + self.blocks.append( bfs.result) + self.label += 1 + + + diff --git a/PhysicsTools/HeppyCore/python/papas/graphtools/edge.py b/PhysicsTools/HeppyCore/python/papas/graphtools/edge.py new file mode 100644 index 0000000000000..8273e17d1fe47 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/papas/graphtools/edge.py @@ -0,0 +1,77 @@ +from PhysicsTools.HeppyCore.papas.data.identifier import Identifier +class Edge(object): + '''An Edge stores end node ids, distance between the nodes, and whether they are linked + + attributes: + + id1 : element1 uniqueid generated from Identifier class + id2 : element2 uniqueid generated from Identifier class + key : unique key value created from id1 and id2 (order of id1 and id2 is not important) + distance: distance between two elements + is_linked : boolean T/F + edge_type : "hcal_track" "ecal_track" etc + ''' + + def __init__(self, id1, id2, is_linked, distance): + ''' The Edge knows the ids of its ends, the distance between the two ends and whether or not they are linked + id1 : element1 uniqueid generated from Identifier class + id2 : element2 uniqueid generated from Identifier class + is_linked : boolean T/F + distance: distance between two elements + ''' + self.id1 = id1 + self.id2 = id2 + self.distance = distance + self.linked = is_linked + self.edge_type = self._edge_type() + + #for reconstruction we do not use ecal-hcal links (may need to be moved if we use these edges for merging) + if self.edge_type == "ecal_hcal": + self.is_linked = False + self.key = Edge.make_key(id1,id2) + + def _edge_type(self): + ''' produces an edge_type string eg "ecal_track" + the order of id1 an id2 does not matter, + eg for one track and one ecal the type will always be "ecal_track" (and never be a "track_ecal") + ''' + #consider creating an ENUM instead for the edge_type + shortid1=Identifier.type_short_code(self.id1); + shortid2=Identifier.type_short_code(self.id2); + if shortid1 == shortid2: + if shortid1 == "h": + return "hcal_hcal" + elif shortid1 == "e": + return "ecal_ecal" + elif shortid1 == "t": + return "track_track" + elif (shortid1=="h" and shortid2=="t" or shortid1=="t" and shortid2=="h"): + return "hcal_track" + elif (shortid1=="e" and shortid2=="t" or shortid1=="t" and shortid2=="e"): + return "ecal_track" + elif (shortid1=="e" and shortid2=="h" or shortid1=="h" and shortid2=="e"): + return "ecal_hcal" + + return "unknown" + + def __str__(self): + ''' String descriptor of the edge + for example: + Edge: 3303164520272<->3303164436240 = No distance (link = False) + ''' + if self.distance==None: + descrip = 'Edge: {id1:d}<->{id2:d} = No distance (link = {linked}) '.format(id1=self.id1,id2=self.id2,linked=self.linked) + else : + descrip = 'Edge: {id1}<->{id2} = {dist:8.4f} (link = {linked}) '.format(id1=self.id1,id2=self.id2,dist=self.distance,linked=self.linked) + return descrip + + def __repr__(self): + return self.__str__() + + @staticmethod + def make_key(id1,id2): + '''method to create a key based on two ids that can then be used to retrieve a specific edge + ''' + return hash(tuple(sorted([id1,id2]))) + + \ No newline at end of file diff --git a/PhysicsTools/HeppyCore/python/papas/graphtools/graphbuilder.py b/PhysicsTools/HeppyCore/python/papas/graphtools/graphbuilder.py new file mode 100644 index 0000000000000..6bcaf7dea860e --- /dev/null +++ b/PhysicsTools/HeppyCore/python/papas/graphtools/graphbuilder.py @@ -0,0 +1,65 @@ +#todo remove pfevent from this class once we have written a helper class to print the block and its elements +from DAG import Node, DAGFloodFill +from PhysicsTools.HeppyCore.utils.pdebug import pdebugger +from PhysicsTools.HeppyCore.papas.data.identifier import Identifier +import collections +#todo remove pfevent from this class once we have written a helper class to print the block and its elements + + +class GraphBuilder(object): + ''' GraphBuilder takes a set of identifiers and a dict of associated edges which have distance and link info + It uses the distances between elements to construct a set of subgraphs + Each element will end up in one (and only one) subgraph + + attributes: + + ids : list of unique identifiers eg of tracks, clusters etc + edges : dict of edges which contains all edges between the ids (and maybe more) + an edge records the distance between two ids + nodes : a set of nodes corresponding to the unique ids which is used to construct a graph + and thus find distinct blocks + subgraphs : a list of subgraphs, each subgraph is a list of connected ids + + Usage example: + graph = GraphBuilder(ids, edges) + + ''' + def __init__(self, ids, edges): + ''' + ids : list of unique identifiers eg of tracks, clusters etc + edges : dict of edges which contains all edges between the ids (and maybe more) + an edge records the distance/link between two ids + ''' + self.ids = ids + self.edges = edges + + # build the block nodes (separate graph which will use distances between items to determine links) + self.nodes = dict((idt, Node(idt)) for idt in ids) + for edge in edges.itervalues(): + #add linkage info into the nodes dictionary + if edge.linked: #this is actually an undirected link - OK for undirected searches + self.nodes[edge.id1].add_child(self.nodes[edge.id2]) + + # build the subgraphs of connected nodes + self.subgraphs = [] + for subgraphlist in DAGFloodFill(self.nodes).blocks: # change to subgraphs + element_ids = [] + # NB the nodes that are found by FloodFill are the Nodes describing links between items + # we want the ids of these nodes + for node in subgraphlist: + element_ids.append(node.get_value()) + self.subgraphs.append(sorted(element_ids)) #newsort + + def __str__(self): + descrip = "{ " + + for subgraph in self.subgraphs: + descrip = descrip + " (" + for elemid in subgraph: + descrip = descrip + str(elem) + " " + descrip = descrip + " )" + descrip = descrip + "}\n" + return descrip + + def __repr__(self): + return self.__str__() \ No newline at end of file diff --git a/PhysicsTools/HeppyCore/python/papas/graphtools/test_DAG.py b/PhysicsTools/HeppyCore/python/papas/graphtools/test_DAG.py new file mode 100644 index 0000000000000..ac082dfdb04fe --- /dev/null +++ b/PhysicsTools/HeppyCore/python/papas/graphtools/test_DAG.py @@ -0,0 +1,73 @@ +from DAG import Node, BreadthFirstSearchIterative, BreadthFirstSearch +import unittest + +class TreeTestCase( unittest.TestCase ): + + def setUp(self): + ''' + called before every test. + 0 and 8 are root/head nodes + + + 8 + \ + \ + 9 + \ + \ + 4 + / + / + 1--5--7 + / \ + / \ + 0--2 6 + \ / + \ / + 3 + + ''' + # building all nodes + self.nodes = dict( (i, Node(i) ) for i in range(10) ) + + self.nodes[0].add_child(self.nodes[1]) + self.nodes[0].add_child(self.nodes[2]) + self.nodes[0].add_child(self.nodes[3]) + self.nodes[1].add_child(self.nodes[4]) + self.nodes[1].add_child(self.nodes[5]) + self.nodes[1].add_child(self.nodes[6]) + self.nodes[5].add_child(self.nodes[7]) + self.nodes[8].add_child(self.nodes[9]) + self.nodes[9].add_child(self.nodes[4]) + self.nodes[3].add_child(self.nodes[6]) + + + + def test_BFS_visitor_pattern_iterative_undirected(self): + BFS = BreadthFirstSearchIterative(self.nodes[0],"undirected") + # the result is equal to [0, 1, 2, 3, 4, 5, 6, 9, 7, 8] + values=[] + for x in BFS.result: + values.append(x.value) + self.assertEqual(values, [0, 1, 2, 3, 4, 5, 6, 9, 7, 8] ) + + def test_BFS_visitor_pattern_children(self): + BFS = BreadthFirstSearch(self.nodes[0],"children") + # the result is equal to [0, 1, 2, 3, 4, 5, 6, 7] + values=[] + for x in BFS.result: + values.append(x.value) + self.assertEqual(values, range(8) ) + + def test_BFS_visitor_pattern_undirected(self): + + BFS = BreadthFirstSearch(self.nodes[0],"undirected") + # the result is equal to [0, 1, 2, 3, 4, 5, 6, 9, 7, 8] + values=[] + for x in BFS.result: + values.append(x.value) + self.assertEqual(values, [0, 1, 2, 3, 4, 5, 6, 9, 7, 8] ) + + +if __name__ == '__main__': + unittest.main() diff --git a/PhysicsTools/HeppyCore/python/papas/mergedclusterbuilder.py b/PhysicsTools/HeppyCore/python/papas/mergedclusterbuilder.py new file mode 100644 index 0000000000000..b9e81b31df7c0 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/papas/mergedclusterbuilder.py @@ -0,0 +1,88 @@ +import itertools +from PhysicsTools.HeppyCore.papas.graphtools.graphbuilder import GraphBuilder +from PhysicsTools.HeppyCore.papas.graphtools.edge import Edge +from PhysicsTools.HeppyCore.papas.graphtools.DAG import Node +from PhysicsTools.HeppyCore.papas.pfobjects import MergedCluster +from PhysicsTools.HeppyCore.papas.data.identifier import Identifier +from PhysicsTools.HeppyCore.utils.pdebug import pdebugger + +class MergedClusterBuilder(GraphBuilder): + ''' MergingBlockBuilder takes particle flow elements of one cluster type eg ecal_in + and uses the distances between elements to construct a set of blocks ( of connected clusters) + The blocks will contain overlapping clusters and then be used to merge the clusters + + attributes: + merged - the dictionary of merged clusters + + Usage example: + (will return the merged clusters to the event) + event.ecal_clusters = MergingBlockBuilder(event.ecal_clusters, ruler).merged + + ''' + def __init__(self, clusters, ruler, history_nodes = None): + ''' + clusters a dictionary : {id1:ecal1, id2:ecal2, ...} + ruler is something that measures distance between two objects eg track and hcal + (see Distance class for example) + it should take the two objects as arguments and return a tuple + of the form + link_type = 'ecal_ecal', 'ecal_track' etc + is_link = true/false + distance = float + hist_nodes is an optional dictionary of Nodes : { id:Node1, id: Node2 etc} + it could for example contain the simulation history nodes + A Node contains the id of an item (cluster, track, particle etc) + and says what it is linked to (its parents and children) + if hist_nodes is provided it will be added to with the new block information + If hist_nodes is not provided one will be created, it will contain nodes + corresponding to each of the tracks, ecal etc and also for the blocks that + are created by the event block builder. + ''' + self.clusters = clusters + + # the merged clusters will be stored here + self.merged = dict() + + # collate ids of clusters + uniqueids = list(clusters.keys()) + + #make the edges match cpp by using the same approach as cpp + edges = dict() + for obj1 in clusters.values(): + for obj2 in clusters.values(): + if obj1.uniqueid < obj2.uniqueid : + link_type, is_linked, distance = ruler(obj1, obj2) + edge = Edge(obj1.uniqueid, obj2.uniqueid, is_linked, distance) + #the edge object is added into the edges dictionary + edges[edge.key] = edge + + #make the subgraphs of clusters + super(MergedClusterBuilder, self).__init__(uniqueids, edges) + + #make sure we use the original history and update it as needed + self.history_nodes = history_nodes + + self._make_merged_clusters() + + def _make_merged_clusters(self): + #carry out the merging of linked clusters + for subgraphids in self.subgraphs: + subgraphids.sort() + first = None + supercluster =None + snode = None + for elemid in subgraphids : + if not first: + first = elemid + supercluster = MergedCluster(self.clusters[elemid]) + self.merged[supercluster.uniqueid] = supercluster; + if (self.history_nodes) : + snode = Node(supercluster.uniqueid) + self.history_nodes[supercluster.uniqueid] = snode + else: + thing = self.clusters[elemid] + supercluster += thing + if (self.history_nodes) : + self.history_nodes[elemid].add_child(snode) + pdebugger.info('Merged Cluster from {}\n'.format(self.clusters[elemid])) + pdebugger.info(str('Made {}\n'.format(supercluster))) diff --git a/PhysicsTools/HeppyCore/python/papas/multiple_scattering.py b/PhysicsTools/HeppyCore/python/papas/multiple_scattering.py new file mode 100644 index 0000000000000..ee3bc61b9e21d --- /dev/null +++ b/PhysicsTools/HeppyCore/python/papas/multiple_scattering.py @@ -0,0 +1,99 @@ +import math +from scipy import constants +from numpy import sign +from ROOT import TLorentzVector, TVector3 +import PhysicsTools.HeppyCore.statistics.rrandom as random + +from PhysicsTools.HeppyCore.papas.path import Helix +from PhysicsTools.HeppyCore.papas.pfobjects import Particle + +# propagate untill surface +#_______________________________________________________________________________ +# find t_scat, time when scattering : + +def multiple_scattering( particle, detector_element, field ): + '''This function computes the scattering of a particle while propagating through the detector. + + As described in the pdg booklet, Passage of particles through matter, multiple scattering through small angles. + the direction of a charged particle is modified. + + This function takes a particle (that has been propagated until the detector element + where it will be scattered) and the detector element responsible for the scattering. + The magnetic field has to be specified in order to create the new trajectory. + + Then this function computes the new direction, randomly choosen according to + Moliere's theory of multiple scattering (see pdg booklet) and replaces the + initial path of the particle by this new scattered path. + + The particle can now be propagated in the next part of the detector. + ''' + + if not particle.q(): + return + # reject particles that could not be extrapolated to detector element + # (particle created too late, out of the detector element) + surface_in = '{}_in'.format(detector_element.name) + surface_out = '{}_out'.format(detector_element.name) + if not surface_in in particle.path.points or \ + not surface_out in particle.path.points: + return + + #TODOCOLIN : check usage of private attributes + in_point = particle.path.points[surface_in] + out_point = particle.path.points[surface_out] + phi_in = particle.path.phi( in_point.X(), in_point.Y()) + phi_out = particle.path.phi( out_point.X(), out_point.Y()) + t_scat = particle.path.time_at_phi((phi_in+phi_out)*0.5) + # compute p4_t = p4 at t_scat : + p4_0 = particle.path.p4.Clone() + p4tx = p4_0.X()*math.cos(particle.path.omega*t_scat)\ + + p4_0.Y()*math.sin(particle.path.omega*t_scat) + p4ty =-p4_0.X()*math.sin(particle.path.omega*t_scat)\ + + p4_0.Y()*math.cos(particle.path.omega*t_scat) + p4tz = p4_0.Z() + p4tt = p4_0.T() + p4_t = TLorentzVector(p4tx, p4ty, p4tz, p4tt) + + # now, p4t will be modified with respect to the multiple scattering + # first one has to determine theta_0 the width of the gaussian : + P = p4_t.Vect().Dot(p4_t.Vect().Unit()) + deltat = particle.path.time_at_phi(phi_out)-particle.path.time_at_phi(phi_in) + x = abs(particle.path.path_length(deltat)) + X_0 = detector_element.material.x0 + + theta_0 = 1.0*13.6e-3/(1.0*particle.path.speed/constants.c*P)*abs(particle.path.charge) + theta_0 *= (1.0*x/X_0)**(1.0/2)*(1+0.038*math.log(1.0*x/X_0)) + + # now, make p4_t change due to scattering : + theta_space = random.gauss(0, theta_0*2.0**(1.0/2)) + psi = constants.pi*random.uniform(0,1) #double checked + p3i = p4_t.Vect().Clone() + e_z = TVector3(0,0,1) + #first rotation : theta, in the xy plane + a = p3i.Cross(e_z) + #this may change the sign, but randomly, as the sign of theta already is + p4_t.Rotate(theta_space,a) + #second rotation : psi (isotropic around initial direction) + p4_t.Rotate(psi,p3i.Unit()) + + # creating new helix, ref at scattering point : + helix_new_t = Helix(field, particle.path.charge, p4_t, + particle.path.point_at_time(t_scat)) + + # now, back to t=0 + p4sx = p4_t.X()*math.cos(-particle.path.omega*t_scat)\ + + p4_t.Y()*math.sin(-particle.path.omega*t_scat) + p4sy =-p4_t.X()*math.sin(-particle.path.omega*t_scat)\ + + p4_t.Y()*math.cos(-particle.path.omega*t_scat) + p4sz = p4_t.Z() + p4st = p4_t.T() + p4_scat = TLorentzVector(p4sx, p4sy, p4sz, p4st) + + # creating new helix, ref at new t0 point : + helix_new_0 = Helix(field, particle.path.charge, p4_scat, + helix_new_t.point_at_time(-t_scat)) + + # replacing the particle's path with the scatterd one : + particle.set_path(helix_new_0, option = 'w') + + diff --git a/PhysicsTools/HeppyCore/python/papas/papas_exceptions.py b/PhysicsTools/HeppyCore/python/papas/papas_exceptions.py new file mode 100644 index 0000000000000..382d097bf4594 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/papas/papas_exceptions.py @@ -0,0 +1,15 @@ + +class PropagationError(Exception): + def __init__(self, particle, addmsg=''): + msg = '''particle starts out of the detector, cannot propagate it. +{addmsg} +\t{ptc} +\t\tvertex: {x:5.2f}, {y:5.2f}, {z:5.2f}\n'''.format(addmsg=addmsg, ptc=str(particle), + x=particle.vertex.X(), + y=particle.vertex.Y(), + z=particle.vertex.Z()) + super(PropagationError, self).__init__(msg) + + +class SimulationError(Exception): + pass diff --git a/PhysicsTools/HeppyCore/python/papas/path.py b/PhysicsTools/HeppyCore/python/papas/path.py new file mode 100644 index 0000000000000..8eca481c92be6 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/papas/path.py @@ -0,0 +1,188 @@ +import math +from scipy import constants +from ROOT import TVector3, TLorentzVector +from PhysicsTools.HeppyCore.utils.deltar import deltaPhi +from collections import OrderedDict +import scipy.optimize as opti # need to compute impact parameters +from numpy import sign +import PhysicsTools.HeppyCore.statistics.rrandom as random + +class Path(object): + '''Path followed by a particle in 3D space. + Assumes constant speed magnitude both along the z axis and in the transverse plane. + ''' + + def __init__(self, p4, origin): + self.p4 = p4 + self.udir = p4.Vect().Unit() + self.origin = origin + self.speed = self.p4.Beta() * constants.c + self.points = OrderedDict() + self.points['vertex'] = origin + + def time_at_z(self, z): + dest_time = (z - self.origin.Z())/self.vz() + return dest_time + + def deltat(self, path_length): + '''Time needed to follow a given path length''' + return path_length / self.speed + + def point_at_time(self, time): + '''Returns the 3D point on the path at a given time''' + return self.origin + self.udir * self.speed * time + + def vz(self): + '''Speed magnitude along z axis''' + return self.p4.Beta() * constants.c * self.udir.Z() + + def vperp(self): + '''Speed magnitude in the transverse plane''' + return self.p4.Beta() * constants.c * self.udir.Perp() + + +class StraightLine(Path): + pass + + +class Helix(Path): + def __init__(self, field, charge, p4, origin): + super(Helix, self).__init__(p4, origin) + self.charge = charge + self.rho = p4.Perp() / (abs(charge)*field) * 1e9/constants.c + self.v_over_omega = p4.Vect() + self.v_over_omega *= 1./(charge*field)*1e9/constants.c + self.omega = charge*field*constants.c**2 / (p4.M()*p4.Gamma()*1e9) + momperp_xy = TVector3(-p4.Y(), p4.X(), 0.).Unit() + origin_xy = TVector3(origin.X(), origin.Y(), 0.) + self.center_xy = origin_xy - charge * momperp_xy * self.rho + self.extreme_point_xy = TVector3(self.rho, 0, 0) + if self.center_xy.X()!=0 or self.center_xy.Y()!=0: + self.extreme_point_xy = self.center_xy + self.center_xy.Unit() * self.rho + # calculate phi range with the origin at the center, + # for display purposes + center_to_origin = origin_xy - self.center_xy + self.phi0 = center_to_origin.Phi() + self.phi_min = self.phi0 * 180 / math.pi + self.phi_max = self.phi_min + 360. + + def polar_at_time(self, time): + z = self.vz() * time + self.origin.Z() + rho = self.rho + phi = - self.omega * time + self.phi0 + return rho, z, phi + + def time_at_phi(self, phi): + time = deltaPhi(self.phi0, phi) / self.omega + return time + + def phi(self, x, y): + xy = TVector3(x,y,0) + xy -= self.center_xy + return xy.Phi() + + def point_from_polar(self, polar): + rho,z,phi = polar + xy = self.center_xy + self.rho * TVector3(math.cos(phi), math.sin(phi), 0) + return TVector3(xy.X(), xy.Y(), z) + + def point_at_time(self, time): + '''return a TVector3 with cartesian coordinates at time t''' + x,y,z = self.coord_at_time(time) + return TVector3(x, y, z) + + def path_length(self, deltat): + '''ds2 = dx2+dy2+dz2 = [w2rho2 + vz2] dt2''' + return math.sqrt(self.omega**2 * self.rho**2 + self.vz()**2)*deltat + + #______________________________________________________________________________ + def coord_at_time(self, time): + '''returns x,y,z at time t''' + x = self.origin.X() + \ + self.v_over_omega.Y() * (1-math.cos(self.omega*time)) \ + + self.v_over_omega.X() * math.sin(self.omega*time) + y = self.origin.Y() - \ + self.v_over_omega.X() * (1-math.cos(self.omega*time)) \ + + self.v_over_omega.Y() * math.sin(self.omega*time) + z = self.vz() * time + self.origin.Z() + return x,y,z + + def compute_IP(self, vertex,jet): + '''find the impact parameter of the trajectory with respect to a given + point (vertex). The impact parameter has the same sign as the scalar product of + the vector pointing from the given vertex to the point of closest + approach with the given jet direction. + + new attributes : + * closest_t = time of closest approach to the primary vertex. + * IP = signed impact parameter + * IPcoord = TVector3 of the point of closest approach to the + primary vertex + ''' + self.vertex_IP = vertex + def distquad (time): + x,y,z = self.coord_at_time(time) + dist2 = (x-vertex.x())**2 + (y-vertex.y())**2\ + + (z-vertex.z())**2 + return dist2 + minim_answer = opti.bracket(distquad, xa = -0.5e-14, xb = 0.5e-14) + self.closest_t = minim_answer[1] + vector_IP = self.point_at_time(minim_answer[1]) - vertex + Pj = jet.p4().Vect().Unit() + signIP = vector_IP.Dot(Pj) + self.IP = minim_answer[4]**(1.0/2)*sign(signIP) + + x,y,z = self.coord_at_time(minim_answer[1]) + self.IPcoord = TVector3(x, y, z) + + def compute_theta_0(self, x, X_0): + '''Computes the square root of the variance, sigma, of the multiple + scattering angle due to matter interactions, using the formula in PDG + booklet, Passage of particles through matter, multiple scattering through small angles + equation 10.''' + P = self.p4.Vect().Dot(self.udir) + self.theta_0 = 1.0*13.6e-3/(1.0*self.speed/constants.c*P) + self.theta_0 *= abs(self.charge)*(1.0*abs(x/X_0))**(1.0/2)*(1+0.038*math.log(1.0*abs(x/X_0))) + self.xX_0 = 1.0*x/X_0 + + def compute_IP_signif(self, IP, theta_0, scat_point): + # ! are we sure sigma_IP_due_IP_algo_precise isnt overestimated ? + # it is an approximation : we stay here in a plan containing the primary + # vertex, the IP_point and the deviated one. But geometrically the new + # IP_point isnt in that plan (cos(theta) factor ~ 1) + delta_t = 1e-15 + delta_s = delta_t * self.speed *1.0 + sigma_s = delta_s + sigma_IP_due_IP_algo_precise = IP*1.0/(math.cos(math.atan(sigma_s/IP)))-IP + sigma_IP_due_other = 1e-5 + + if theta_0 == None or scat_point == None: + self.IP_signif = IP*1.0/(sigma_IP_due_IP_algo_precise**2+sigma_IP_due_other**2)**0.5 + else : + phi_t_scat = self.phi( scat_point.X(), scat_point.Y()) + t_scat = self.time_at_phi(phi_t_scat) + fly_distance = self.speed * 1.0 * t_scat + # for the IP significance : estimation + sigma_IP_due_scattering = fly_distance*math.tan((2)**0.5*theta_0) + sigma_IP_tot = ( sigma_IP_due_IP_algo_precise**2 + sigma_IP_due_scattering**2 + sigma_IP_due_other**2 )**0.5 + self.IP_signif = IP*1.0/sigma_IP_tot + self.IP_sigma = sigma_IP_tot + + + #______________________________________________________________________________ + + # def deltat(self, path_length): + # #TODO: shouldn't this just use beta???? + # d1 = path_length / (self.p4.Beta()*constants.c) + # # d2 = path_length / math.sqrt(self.omega**2 * self.rho**2 + self.vz()**2) + # return d1 + + +if __name__ == '__main__': + + from ROOT import TLorentzVector, TVector3 + p4 = TLorentzVector() + p4.SetPtEtaPhiM(1, 0, 0, 5.11e-4) + helix = Helix(3.8, 1, p4, TVector3(0,0,0)) + length = helix.path_length(1e-9) + helix.deltat(length) diff --git a/PhysicsTools/HeppyCore/python/papas/pdt.py b/PhysicsTools/HeppyCore/python/papas/pdt.py new file mode 100644 index 0000000000000..501627185d153 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/papas/pdt.py @@ -0,0 +1,17 @@ +m_e = 0.000511 +m_mu = 0.105 +m_pi = 0.139 +m_K0 = 0.498 +m_n = 1. +m_p = 1. + +particle_data = { + 11 : (m_e, 1), + -11 : (m_e, -1), + 13 : (m_mu, 1), + -13 : (m_mu, -1), + 22 : (0., 0), + 130 : (m_K0, 0), + 211 : (m_pi, 1), + -211 : (m_pi, -1) + } diff --git a/PhysicsTools/HeppyCore/python/papas/pfalgo/blockbuilder.py b/PhysicsTools/HeppyCore/python/papas/pfalgo/blockbuilder.py new file mode 100644 index 0000000000000..ff64cd775167a --- /dev/null +++ b/PhysicsTools/HeppyCore/python/papas/pfalgo/blockbuilder.py @@ -0,0 +1,95 @@ +from PhysicsTools.HeppyCore.papas.graphtools.DAG import Node, DAGFloodFill +from PhysicsTools.HeppyCore.papas.pfalgo.pfblock import PFBlock +from PhysicsTools.HeppyCore.papas.graphtools.graphbuilder import GraphBuilder +from PhysicsTools.HeppyCore.utils.pdebug import pdebugger + +#todo remove pfevent from this class once we have written a helper class to print the block and its elements + + +class BlockBuilder(GraphBuilder): + ''' BlockBuilder takes a set of identifiers and a dict of associated edges which have distance and link info + It uses the distances between elements to construct a set of blocks + Each element will end up in one (and only one block) + Blocks retain information of the elements and the distances between elements + The blocks can then be used for future particle reconstruction + The ids must be unique and are expected to come from the Identifier class + + attributes: + + ids : list of unique identifiers eg of tracks, clusters etc + edges : dict of edges which contains all edges between the ids (and maybe more) + an edge records the distance between two ids + history_nodes : dictionary of nodes that describe which elements are parents of which blocks + if a history_nodes tree is passed in then + the additional history will be added into the exisiting history + pfevent : the particle flow event object which is needed so that the underlying object can + be retrieved + nodes : a set of nodes corresponding to the unique ids which is used to construct a graph + and thus find distinct blocks + blocks: the resulting blocks + + + Usage example: + + builder = BlockBuilder(ids, edges, history_nodes, pfevent) + for b in builder.blocks.itervalues() : + print b + ''' + def __init__(self, ids, edges, history_nodes = None, pfevent = None): + ''' + ids : list of unique identifiers eg of tracks, clusters etc + edges : dict of edges which contains all edges between the ids (and maybe more) + an edge records the distance/link between two ids + history_nodes : optional dictionary of nodes that describe which elements are parents of which blocks + if a history_nodes tree is passed in then + the additional history will be added into the exisiting history + pfevent : particle flow event object needed so that the underlying object can + be retrieved + + ''' + + #given a unique id this can return the underying object + self.pfevent = pfevent + self.history_nodes = history_nodes + + super(BlockBuilder, self).__init__(ids, edges) + + # build the blocks of connected nodes + self.blocks = dict() + self._make_blocks() + + + + + def _make_blocks (self) : + ''' uses the DAGfloodfill algorithm in connection with the BlockBuilder nodes + to work out which elements are connected + Each set of connected elements will be used to make a new PFBlock + ''' + for subgraph in self.subgraphs: + #make the block + block = PFBlock(subgraph, self.edges, self.pfevent) + pdebugger.info("Made {}".format(block)) + #put the block in the dict of blocks + self.blocks[block.uniqueid] = block + + #make a node for the block and add into the history Nodes + if (self.history_nodes != None): + blocknode = Node(block.uniqueid) + self.history_nodes[block.uniqueid] = blocknode + #now add in the links between the block elements and the block into the history_nodes + for elemid in block.element_uniqueids: + self.history_nodes[elemid].add_child(blocknode) + + + def __str__(self): + descrip = "{ " + #for block in self.blocks.iteritems(): + for block in sorted(self.blocks, key = lambda k: (len(self.blocks[k].element_uniqueids), self.blocks[k].short_name()),reverse =True): + descrip = descrip + self.blocks[block].__str__() + + descrip = descrip + "}\n" + return descrip + + def __repr__(self): + return self.__str__() \ No newline at end of file diff --git a/PhysicsTools/HeppyCore/python/papas/pfalgo/distance.py b/PhysicsTools/HeppyCore/python/papas/pfalgo/distance.py new file mode 100644 index 0000000000000..49906e1ed9d59 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/papas/pfalgo/distance.py @@ -0,0 +1,64 @@ + +class Distance(object): + '''Concrete distance calculator. + ''' + def __call__(self, ele1, ele2): + '''returns a tuple: + True/False depending on the validity of the link + float the link distance + ''' + layer1, layer2 = ele1.layer, ele2.layer + if layer2 < layer1: + layer1, layer2 = layer2, layer1 + ele1, ele2 = ele2, ele1 + layers = layer1, layer2 + func = None + if layers == ('ecal_in', 'tracker'): + func = self.ecal_track + elif layers == ('hcal_in', 'tracker'): + func = self.hcal_track + elif layers == ('ecal_in', 'hcal_in'): + func = self.no_link #Alice needed to make align with cpp ecal_hcal + elif layers == ('ecal_in', 'ecal_in'): + func = self.ecal_ecal + elif layers == ('hcal_in', 'hcal_in'): + func = self.hcal_hcal + elif layers == ('tracker', 'tracker'): + func = self.no_link + else: + raise ValueError('no such link layer:', layers) + return func(ele1, ele2) + + def no_link(self, ele1, ele2): + return None, False, None + + def ecal_ecal(self, ele1, ele2): + #modified this to also deal with clusters that are merged clusters + link_ok, dist = ele1.is_inside_clusters(ele2) + return ('ecal_in', 'ecal_in'), link_ok, dist + + def hcal_hcal(self, ele1, ele2): + link_ok, dist = ele1.is_inside_clusters(ele2) + return ('hcal_in', 'hcal_in'), link_ok, dist + + def ecal_track(self, ecal, track): + tp = track.path.points.get('ecal_in', None) + if tp is None: + # probably a looper + return ('ecal_in', 'tracker'), False, None + link_ok, dist = ecal.is_inside(tp) + return ('ecal_in', 'tracker'), link_ok, dist + + def hcal_track(self, hcal, track): + tp = track.path.points.get('hcal_in', None) + if tp is None: + # probably a looper + return ('hcal_in', 'tracker'), False, None + link_ok, dist = hcal.is_inside(tp) + return ('hcal_in', 'tracker'), link_ok, dist + + def ecal_hcal(self, ele1, ele2): + link_ok, dist = ele1.is_inside_clusters(ele2) + return ('ecal_in', 'hcal_in'), link_ok, dist + +distance = Distance() diff --git a/PhysicsTools/HeppyCore/python/papas/pfalgo/floodfill.py b/PhysicsTools/HeppyCore/python/papas/pfalgo/floodfill.py new file mode 100644 index 0000000000000..798022f9527c3 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/papas/pfalgo/floodfill.py @@ -0,0 +1,64 @@ + + +class Node(object): + '''Basic interface for nodes traversed by the floodfill algo. + Floodill will give a label to each node corresponding to the + disconnected subgraph it corresponds to. + + The linked attribute is a list that should contain the elements y-linkes. + ''' + def __init__(self): + self.linked = [] + self.block_label = None + + def accept(self, visitor): + '''Called by visitors, such as FloodFill.''' + notseen = visitor.visit(self) + if notseen: + for elem in self.linked: + elem.accept(visitor) + + +class FloodFill(object): + '''The flood fill algorithm finds all disconnected subgraphs in + a list of nodes. + + The block_label of each node is set to an integer corresponding to the + disconnected subgraph it corresponds to. + + The results can be accessed through the nodes themselves, + or through the groups attribute, which has the following form: + {0: [list of elements in subgraph0], 1: [list of elements in subgraph 1], ...} + ''' + + def __init__(self, elements, first_label=0): + '''Perform the search for disconnected subgraphs on a list of elements + matching the interface given in this module.''' + self.label = first_label + self.visited = dict() + self.groups = dict() + for elem in elements: + if self.visited.get(elem, False): + continue + elem.accept(self) + # print 'incrementing', elem, self.label + self.label += 1 + + def visit(self, element): + '''visit one element.''' + if self.visited.get(element, False): + return False + else: + # print 'visiting', element, self.label + element.block_label = self.label + self.groups.setdefault(element.block_label, []).append(element) + self.visited[element] = True + return True + + def __str__(self): + lines = [] + for gid, group in self.groups.iteritems(): + groupinfo = ', '.join(map(str, group)) + lines.append('{gid:5} {ginfo}'.format(gid=gid, ginfo=groupinfo)) + return '\n'.join(lines) + diff --git a/PhysicsTools/HeppyCore/python/papas/pfalgo/links.py b/PhysicsTools/HeppyCore/python/papas/pfalgo/links.py new file mode 100644 index 0000000000000..a811b8a6f0267 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/papas/pfalgo/links.py @@ -0,0 +1,125 @@ +import itertools +import pprint +from floodfill import FloodFill + +class Element(object): + '''Basic interface for a particle flow element. + Your class should expose the same attributes + ''' + def __init__(self): + self.layer = None + self.linked = [] + self.locked = False + self.block_label = None + + def accept(self, visitor): + '''Called by visitors, such as FloodFill.''' + notseen = visitor.visit(self) + if notseen: + for elem in self.linked: + elem.accept(visitor) + + +class Distance(object): + '''Basic distance functor interface. + You should provide such a functor (or a function), able to deal + with any pair of elements you have. + ''' + def __call__(self, ele1, ele2): + '''Should return True if the link is valid, + together with a link property object (maybe only the link distance). + ''' + link_type = 'dummy' + dist12 = 0. + return link_type, True, dist12 + + + +class Links(dict): + + def __init__(self, elements, distance): + self.elements = elements + for ele in elements: + ele.linked = [] + for ele1, ele2 in itertools.combinations(elements, 2): + link_type, link_ok, dist = distance(ele1, ele2) + if link_ok: + self.add(ele1, ele2, dist) + floodfill = FloodFill(elements) + #print floodfill + self.groups = floodfill.groups + self.group_label = floodfill.label + for elem in elements: + self.sort_links(elem) + + def subgroups(self, groupid): + floodfill = FloodFill(self.groups[groupid], self.group_label) + self.group_label = floodfill.label + return floodfill.groups + # if len(floodfill.groups)>1: + # del self.groups[groupid] + # self.groups.extend(floodfill.groups) + + def dist_linked(self, elem): + '''returns [(dist, linked_elem1), ...] + for all elements linked to elem.''' + dist_linked = [] + for linked_elem in elem.linked: + dist = self.info(elem, linked_elem) + dist_linked.append( (dist, linked_elem) ) + return dist_linked + + def sort_links(self, elem): + '''sort links in elem according to link distance. + TODO unittest + ''' + dist_linked = [] + for linked_elem in elem.linked: + dist = self.info(elem, linked_elem) + dist_linked.append( (dist, linked_elem) ) + sorted_links = [linked_elem for dist, linked_elem in sorted(dist_linked)] + elem.linked = sorted_links + + def key(self, elem1, elem2): + '''Build the dictionary key for the pair elem1 and elem2.''' + return tuple(sorted([elem1, elem2])) + + def add(self, elem1, elem2, link_info): + '''Link two elements. + TODO: call that link. + ''' + key = self.key(elem1, elem2) + elem1.linked.append(elem2) + elem2.linked.append(elem1) + self[key] = link_info + + def unlink(self, elem1, elem2): + '''Unlink two elements.''' + key = self.key(elem1, elem2) + elem1.linked.remove(elem2) + elem2.linked.remove(elem1) + del self[key] + + def info(self, elem1, elem2): + '''Return link information between two elements. + None if the link does not exist.''' + key = self.key(elem1, elem2) + return self.get(key, None) + + def __str__(self): + lines = [] + for key, val in self.iteritems(): + ele1, ele2 = key + lines.append("{ele1:50} {ele2:50} dist = {val:5.4f}".format(ele1=ele1, + ele2=ele2, + val=val)) + '\n Groups:\n'.join(lines) + for gid, group in self.groups.iteritems(): + groupinfo = ', '.join(map(str, group)) + lines.append('group {gid:5} {ginfo}'.format(gid=gid, ginfo=groupinfo)) + + return '\n'.join(lines) + + + + diff --git a/PhysicsTools/HeppyCore/python/papas/pfalgo/merger.py b/PhysicsTools/HeppyCore/python/papas/pfalgo/merger.py new file mode 100644 index 0000000000000..30584748f459d --- /dev/null +++ b/PhysicsTools/HeppyCore/python/papas/pfalgo/merger.py @@ -0,0 +1,30 @@ +import copy +from links import Links +from distance import distance +from ROOT import TVector3 + + +def merge_clusters(elements, layer): + merged = [] + elem_in_layer = [] + elem_other = [] + for elem in elements: + if elem.layer == layer: + elem_in_layer.append(elem) + else: + elem_other.append(elem) + links = Links(elem_in_layer, distance) + for group in links.groups.values(): + if len(group) == 1: + merged.append(group[0]) + continue + supercluster = None + for cluster in group: + if supercluster is None: + supercluster = copy.copy(cluster) + merged.append(supercluster) + continue + else: + supercluster += cluster + merged.extend(elem_other) + return merged diff --git a/PhysicsTools/HeppyCore/python/papas/pfalgo/pfblock.py b/PhysicsTools/HeppyCore/python/papas/pfalgo/pfblock.py new file mode 100644 index 0000000000000..609197c6c6d97 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/papas/pfalgo/pfblock.py @@ -0,0 +1,285 @@ +import itertools +from PhysicsTools.HeppyCore.papas.graphtools.edge import Edge +from PhysicsTools.HeppyCore.papas.data.identifier import Identifier + +#todo remove pfevent from this class once we have written a helper class to print the block and its elements + +class PFBlock(object): + + ''' A Particle Flow Block stores a set of element ids that are connected to each other + together with the edge data (distances) for each possible edge combination + + attributes: + + uniqueid : the block's unique id generated from Identifier class + element_uniqueids : list of uniqueids of its elements + pfevent : contains the tracks and clusters and a get_object method to allow access to the + underlying objects given their uniqueid + edges : Dictionary of all the edge cominations in the block dict{edgekey : Edge} + use get_edge(id1,id2) to find an edge + is_active : bool true/false, set to false if the block is subsequently subdivided + + Usage: + block = PFBlock(element_ids, edges, pfevent) + for uid in block.element_uniqueids: + print pfevent.get_object(uid).__str__() + "\n" + ''' + + temp_block_count = 0 #sequential numbering of blocks, not essential but helpful for debugging + + def __init__(self, element_ids, edges, pfevent): + ''' + element_ids: list of the uniqueids of the elements to go in this block [id1,id2,...] + edges: is a dictionary of edges, it must contain at least all needed edges. + It is not a problem if it contains + additional edges as only the ones needed will be extracted + pfevent: allows access to the underlying elements given a uniqueid + must provide a get_object function + ''' + #make a uniqueid for this block + self.uniqueid = Identifier.make_id(Identifier.PFOBJECTTYPE.BLOCK) + self.is_active = True # if a block is subsequently split it will be deactivated + + #allow access to the underlying objects + self.pfevent = pfevent + + #comment out energy sorting for now as not available C++ + sortby = lambda x: Identifier.type_short_code(x) + self.element_uniqueids = sorted(element_ids, key=sortby) + #sequential numbering of blocks, not essential but helpful for debugging + self.block_count = PFBlock.temp_block_count + PFBlock.temp_block_count += 1 + + #extract the relevant parts of the complete set of edges and store this within the block + self.edges = dict() + for id1, id2 in itertools.combinations(self.element_uniqueids, 2): + key = Edge.make_key(id1, id2) + self.edges[key] = edges[key] + + + def count_ecal(self): + ''' Counts how many ecal cluster ids are in the block ''' + count = 0 + for elem in self.element_uniqueids: + count += Identifier.is_ecal(elem) + return count + + def count_tracks(self): + ''' Counts how many track ids are in the block ''' + count = 0 + for elem in self.element_uniqueids: + count += Identifier.is_track(elem) + return count + + def count_hcal(self): + ''' Counts how many hcal cluster ids are in the block ''' + count = 0 + for elem in self.element_uniqueids: + count += Identifier.is_hcal(elem) + return count + + def __len__(self) : + return len(self.element_uniqueids) + + def linked_edges(self, uniqueid, edgetype=None) : + ''' + Returns list of all edges of a given edge type that are connected to a given id. + The list is sorted in order of increasing distance + + Arguments: + uniqueid : is the id of item of interest + edgetype : is an optional type of edge. If specified only links of the given edgetype will be returned + ''' + linked_edges = [] + for edge in self.edges.itervalues(): + if edge.linked and (edge.id1 == uniqueid or edge.id2 == uniqueid): + if edgetype is None or ((edgetype != None) and (edge.edge_type == edgetype)): + linked_edges.append(edge) + + + #this is a bit yucky and temporary solution as need to make sure the order returned is consistent + # maybe should live outside of this class + linked_edges.sort(key=lambda x: (x.distance is None, x.distance)) + return linked_edges + + def linked_ids(self, uniqueid, edgetype=None) : + ''' Returns list of all linked ids of a given edge type that are connected to a given id - + sorted in order of increasing distance''' + linked_ids = [] + linked_edges = [] + linked_edges = self.linked_edges(uniqueid, edgetype) + if len(linked_edges): + for edge in linked_edges: + if edge.id1 == uniqueid: + linked_ids.append(edge.id2) + else: + linked_ids.append(edge.id1) + return sorted(linked_ids) + + def sort_distance_energy(self, uniqueid, otherids): + ''' returns a list of the otherids sorted by distance to uniqueid and by decreasing energies + + eg if uniqueid is an hcal + and other ids are track1 energy = 18, dist to hcal = 0.1 + track2 energy = 9, dist to hcal = 0 + track3 energy = 4, dist to hcal = 0 + this will return {track2, track3, track1} + ''' + #this is ""needed"" for particle reconstruction + #this is a bit yucky and may only be a temporary work around + # maybe should live outside of this class + return sorted(otherids, key=lambda x: (self.get_edge(x, uniqueid).distance is None, + self.get_edge(x, uniqueid).distance, + -self.pfevent.get_object(x).energy)) + + def elements_string(self): + ''' Construct a string description of each of the elements in a block:- + The elements are given a short name E/H/T according to ecal/hcal/track + and then sequential numbering starting from 0, this naming is also used to index the + matrix of distances. The full unique id is also given. + For example:- + elements: { + E0:1104134446736:SmearedCluster : ecal_in 0.57 0.33 -2.78 + H1:2203643940048:SmearedCluster : hcal_in 6.78 0.35 -2.86 + T2:3303155568016:SmearedTrack : 5.23 4.92 0.34 -2.63 + } + ''' + count = 0 + elemdetails = "\n elements: {\n" + for uid in self.element_uniqueids: + elemdetails += " {shortname}{count}:{strdescrip}\n".format( + shortname=Identifier.type_short_code(uid), + count=count, + strdescrip=self.pfevent.get_object(uid).__str__()) + count = count + 1 + return elemdetails + " }\n" + + def short_elements_string(self): + ''' Construct a string description of each of the elements in a block. + + The elements are given a short name E/H/T according to ecal/hcal/track + and then sequential numbering starting from 0, this naming is also used to index the + matrix of distances. The full unique id is also given. + For example:- + elements: { + E0:1104134446736:SmearedCluster : ecal_in 0.57 0.33 -2.78 + H1:2203643940048:SmearedCluster : hcal_in 6.78 0.35 -2.86 + T2:3303155568016:SmearedTrack : 5.23 4.92 0.34 -2.63 + } + ''' + + count = 0 + elemdetails = " elements:\n" + for uid in self.element_uniqueids: + elemdetails += "{shortname:>7}{count} = {strdescrip:9} ({id})\n".format( + shortname=Identifier.type_short_code(uid), + count=count, + strdescrip=Identifier.pretty(uid), + id=uid) + count = count + 1 + return elemdetails + + def short_name(self): + ''' constructs a short summary name for blocks allowing sorting based on contents + eg 'E1H1T2' for a block with 1 ecal, 1 hcal, 2 tracks + ''' + shortname = "" + if self.count_ecal(): + shortname = shortname + "E" + str(self.count_ecal()) + if self.count_hcal(): + shortname = shortname + "H" + str(self.count_hcal()) + if self.count_tracks(): + shortname = shortname + "T" + str(self.count_tracks()) + return shortname + + def edge_matrix_string(self): + ''' produces a string containing the the lower part of the matrix of distances between elements + elements are ordered as ECAL(E), HCAL(H), Track(T) + for example:- + + distances: + E0 H1 T2 T3 + E0 . + H1 0.0267 . + T2 0.0000 0.0000 . + T3 0.0287 0.0825 --- . + ''' + + # make the header line for the matrix + count = 0 + matrixstr = "\n" + if len(self.element_uniqueids) > 1: + matrixstr = " distances:\n " + for e1 in self.element_uniqueids : + # will produce short id of form E2 H3, T4 etc in tidy format + elemstr = Identifier.type_short_code(e1) + str(count) + matrixstr += "{:>8}".format(elemstr) + count += 1 + matrixstr += "\n" + + #for each element find distances to all other items that are in the lower part of the matrix + countrow = 0 + for e1 in self.element_uniqueids : # this will be the rows + countcol = 0 + rowstr = "" + #make short name for the row element eg E3, H5 etc + rowname = Identifier.type_short_code(e1) +str(countrow) + for e2 in self.element_uniqueids: # these will be the columns + countcol += 1 + if e1 == e2: + rowstr += " ." + break + elif self.get_edge(e1, e2).distance is None: + rowstr += " ---" + elif not self.get_edge(e1, e2).linked: + rowstr += " xxx" + else : + rowstr += "{:8.4f}".format(self.get_edge(e1, e2).distance) + matrixstr += "{:>8}".format(rowname) + rowstr + "\n" + countrow += 1 + return matrixstr + + def get_edge(self, id1, id2): + ''' Find the edge corresponding to e1 e2 + Note that make_key deals with whether it is get_edge(e1, e2) or + get_edge(e2, e1) (either order gives same result) + ''' + return self.edges[Edge.make_key(id1, id2)] + + def __str__(self): + ''' Block description which includes list of elements and a matrix of distances + Example: + block: E1H1T1 id= 39 :uid= 6601693505424: ecals = 1 hcals = 1 tracks = 1 + elements: { + E0:1104134446736:SmearedCluster : ecal_in 0.57 0.33 -2.78 + H1:2203643940048:SmearedCluster : hcal_in 6.78 0.35 -2.86 + T2:3303155568016:SmearedTrack : 5.23 4.92 0.34 -2.63 + } + distances: + E0 H1 T2 + E0 . + H1 0.0796 . + T2 0.0210 0.0000 . + } + ''' + description = self.__repr__() + "\n" + description += self.short_elements_string() + description += self.edge_matrix_string() + return description + + def __repr__(self): + ''' Short Block description + ''' + if self.is_active: + description = "block:" + else: + description = "deactivated block:" + description += str('{shortname:8} :{prettyid:6}: ecals = {count_ecal} hcals = {count_hcal} tracks = {count_tracks}'.format( + shortname=self.short_name(), + prettyid=Identifier.pretty(self.uniqueid), + count_ecal=self.count_ecal(), + count_hcal=self.count_hcal(), + count_tracks=self.count_tracks()) + ) + return description + diff --git a/PhysicsTools/HeppyCore/python/papas/pfalgo/pfblockbuilder.py b/PhysicsTools/HeppyCore/python/papas/pfalgo/pfblockbuilder.py new file mode 100644 index 0000000000000..86974aac0509e --- /dev/null +++ b/PhysicsTools/HeppyCore/python/papas/pfalgo/pfblockbuilder.py @@ -0,0 +1,105 @@ +import itertools +from blockbuilder import BlockBuilder +from PhysicsTools.HeppyCore.papas.graphtools.edge import Edge +from PhysicsTools.HeppyCore.papas.graphtools.DAG import Node + +class PFBlockBuilder(BlockBuilder): + ''' PFBlockBuilder takes particle flow elements from an event (clusters,tracks etc) + and uses the distances between elements to construct a set of blocks + Each element will end up in one (and only one block) + Blocks retain information of the elements and the distances between elements + The blocks can then be used for future particle reconstruction + The ids must be unique and are expected to come from the Identifier class + + attributes: + + blocks : dictionary of blocks {id1:block1, id2:block2, ...} + history_nodes : dictionary of nodes that describe which elements are parents of which blocks + if an existing history_nodes tree eg one created during simulation + is passed to the BlockBuilder then + the additional history will be added into the exisiting history + nodes : dictionary of nodes which describes the distances/links between elements + the nodes dictionary will be used to create the blocks + + + Usage example: + + builder = PFBlockBuilder(pfevent, ruler) + for b in builder.blocks.itervalues() : + print b + ''' + def __init__(self, pfevent, ruler, history_nodes = None): + ''' + pfevent is event structure inside which we find + tracks is a dictionary : {id1:track1, id2:track2, ...} + ecal is a dictionary : {id1:ecal1, id2:ecal2, ...} + hcal is a dictionary : {id1:hcal1, id2:hcal2, ...} + get_object() which allows a cluster or track to be found from its id + ruler is something that measures distance between two objects eg track and hcal + (see Distance class for example) + it should take the two objects as arguments and return a tuple + of the form + link_type = 'ecal_ecal', 'ecal_track' etc + is_link = true/false + distance = float + history_nodes is an optional dictionary of Nodes : { id:Node1, id: Node2 etc} + it could for example contain the simulation history nodes + A Node contains the id of an item (cluster, track, particle etc) + and says what it is linked to (its parents and children) + if hist_nodes is provided it will be added to with the new block information + If hist_nodes is not provided one will be created, it will contain nodes + corresponding to each of the tracks, ecal etc and also for the blocks that + are created by the event block builder. + ''' + + #given a unique id this can return the underying object + self.pfevent = pfevent + + # collate all the ids of tracks and clusters and, if needed, make history nodes + uniqueids = [] + uniqueids = list(pfevent.tracks.keys()) + list(pfevent.ecal_clusters.keys()) + list(pfevent.hcal_clusters.keys()) + uniqueids = sorted(uniqueids) + + self.history_nodes = history_nodes + if history_nodes is None: + self.history_nodes = dict( (idt, Node(idt)) for idt in uniqueids ) + + # compute edges between each pair of nodes + edges = dict() + + for id1 in uniqueids: + for id2 in uniqueids: + if id1 < id2 : + edge=self._make_edge(id1,id2, ruler) + #the edge object is added into the edges dictionary + edges[edge.key] = edge + + #use the underlying BlockBuilder to construct the blocks + super(PFBlockBuilder, self).__init__(uniqueids, edges, self.history_nodes, pfevent) + + def _make_edge(self,id1,id2, ruler): + ''' id1, id2 are the unique ids of the two items + ruler is something that measures distance between two objects eg track and hcal + (see Distance class for example) + it should take the two objects as arguments and return a tuple + of the form + link_type = 'ecal_ecal', 'ecal_track' etc + is_link = true/false + distance = float + an edge object is returned which contains the link_type, is_link (bool) and distance between the + objects. + ''' + #find the original items and pass to the ruler to get the distance info + obj1 = self.pfevent.get_object(id1) + obj2 = self.pfevent.get_object(id2) + link_type, is_linked, distance = ruler(obj1,obj2) #some redundancy in link_type as both distance and Edge make link_type + #not sure which to get rid of + + #for the event we do not want ehal_hcal links + if link_type == "ecal_hcal": + is_linked = False + + #make the edge + return Edge(id1,id2, is_linked, distance) + + diff --git a/PhysicsTools/HeppyCore/python/papas/pfalgo/pfblocksplitter.py b/PhysicsTools/HeppyCore/python/papas/pfalgo/pfblocksplitter.py new file mode 100644 index 0000000000000..b159c507a13c2 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/papas/pfalgo/pfblocksplitter.py @@ -0,0 +1,45 @@ +from blockbuilder import BlockBuilder + +#remove pfevent once we have helper classes to produce good printouts + +class BlockSplitter(BlockBuilder): + ''' BlockSplitter takes an exisiting block of particle flow element ids (clusters,tracks etc) + and unlinks any specified edges. It then recalculates a new set of sub-blocks + using the underlying BlockBuilder class + + Usage example: + + splitter = BlockSplitter(block, unlink_edges) + for b in splitter.blocks.itervalues(): + print b + + Notes: (to be extended) + if a block is split it will be marked as block.is_active=false + as it will effectively be replaced by the new blocks after the split. + ** explain how old block fits into history node ** + ''' + def __init__(self, block, unlink_edges, history_nodes = None): + '''arguments: + + blocks : dictionary of blocks {id1:block1, id2:block2, ...} + unlink_edges : list of edges where a link is to be removed + history_nodes : an optional dictionary of history nodes which describes the parent child links between elements + + ''' + for edge in unlink_edges: + edge.linked = False + + super(BlockSplitter, self).__init__(block.element_uniqueids, block.edges, history_nodes, block.pfevent) + assert( isinstance(self.blocks,dict)) + + #set the original block to be inactive + block.is_active = False + + + + + + + + + \ No newline at end of file diff --git a/PhysicsTools/HeppyCore/python/papas/pfalgo/pfinput.py b/PhysicsTools/HeppyCore/python/papas/pfalgo/pfinput.py new file mode 100644 index 0000000000000..1b605fc60c737 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/papas/pfalgo/pfinput.py @@ -0,0 +1,52 @@ +import operator + +def merge_clusters(clusters): + pass + +class PFInput(object): + '''Builds the inputs to particle flow from a collection of simulated particles: + - collects all smeared tracks and clusters + - merges overlapping clusters + ''' + + def __init__(self, ptcs): + ''' + attributes: + - elements: dictionary of elements: + tracker : [track0, track1, ...] + ecal: [cluster0, cluster1, ...] + hcal: [... ] + ''' + self.elements = dict() + self.build(ptcs) + + def build(self, ptcs): + for ptc in ptcs: + for key, cluster in ptc.clusters_smeared.iteritems(): + self.elements.setdefault(key, []).append(cluster) + if ptc.track_smeared: + self.elements.setdefault('tracker', []).append(ptc.track_smeared) + + #Alice disabled sort + #for elems in self.elements.values(): + # elems.sort(key=operator.attrgetter('energy'), reverse=True) + + def element_list(self): + thelist = [] + for layer, elements in sorted(self.elements.iteritems()): + thelist.extend( elements ) + return thelist + + def __str__(self): + lines = ['PFInput:'] + # lines.append('\tTracks:') + def tab(astr, ntabs=2): + return ''.join(['\t'*ntabs, str(astr)]) + # for track in self.tracks: + # lines.append(tab(str(track))) + # lines.append('\tClusters:') + for layer, elements in sorted(self.elements.iteritems()): + lines.append(tab(layer)) + for element in elements: + lines.append(tab(str(element), 3)) + return '\n'.join(lines) diff --git a/PhysicsTools/HeppyCore/python/papas/pfalgo/pfreconstructor.py b/PhysicsTools/HeppyCore/python/papas/pfalgo/pfreconstructor.py new file mode 100644 index 0000000000000..3135af9dfe425 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/papas/pfalgo/pfreconstructor.py @@ -0,0 +1,440 @@ +from PhysicsTools.HeppyCore.papas.data.identifier import Identifier +from PhysicsTools.HeppyCore.papas.graphtools.DAG import Node +from PhysicsTools.HeppyCore.papas.pfalgo.pfblocksplitter import BlockSplitter +from PhysicsTools.HeppyCore.papas.pdt import particle_data +from PhysicsTools.HeppyCore.papas.path import StraightLine, Helix +from PhysicsTools.HeppyCore.utils.pdebug import pdebugger +from PhysicsTools.HeppyCore.papas.pfobjects import Particle +from PhysicsTools.HeppyCore.utils.pdebug import pdebugger + +from ROOT import TVector3, TLorentzVector +import math +import pprint + +#Discuss with colin self.locked vs ecal.locked +#209 in reconstruct_block extra ecals to be added in +#remove sort + + +class PFReconstructor(object): + ''' The reconstructor takes an event containing blocks of elements + and attempts to reconstruct particles + The following strategy is used (to be checked with Colin) + single elements: + track -> charged hadron + hcal -> neutral hadron + ecal -> photon + connected elements: + has more than one hcal + -> each hcal is treated using rules below + has an hcal with one or more connected tracks + -> add up all connected track energies, turn each track into a charged hadron + -> add up all ecal energies connected to the above tracks + -> if excess = hcal energy + ecal energies - track energies > 0 + and excess < ecal energies + then turn the excess into an photon + -> if excess > 0 and excess > ecal energies + make a neutral hadron with excess- ecal energies + make photon with ecal energies + has hcal but no track (nb by design there will be no attached ecals because hcal ecal links have been removed + so this will equate to single hcal:- that two hcals should not occur as a single block + because if they are close enough to be linked then they should already have been merged) + -> make a neutral hadron + + has track(s) + -> each track is turned into a charged hadron + has track(s) and ecal(s) + -> the tracks are turned into charged hadrons, the ecals are marked as locked but energy is not checked + and no photons are made + TODO handle case where there is more energy in ecals than in the track and make some photons + has only ecals + -> this should not occur because ecals that are close enough to be linked should already have been merged + + + If history_nodes are provided then the particles are linked into the exisiting history + + Contains: + blocks: the dictionary of blocks to be reconstructed { blockid; block } + unused: list of unused elements + particles: list of constructed particles + history_nodes: optional, desribes links between elements, blocks, particles + Example usage: + + reconstructed = PFReconstructor(event) + event.reconstructed_particles= sorted( reconstructed.particles, + key = lambda ptc: ptc.e(), reverse=True) + event.history_nodes=reconstructed.history_nodes + ''' + + def __init__(self, detector, logger): + self.detector = detector + self.log = logger + #self.reconstruct(links) + + + def reconstruct(self, event, blocksname, historyname): + '''arguments event: should contain blocks and optionally history_nodes''' + self.blocks = getattr(event, blocksname) + self.unused = [] + self.particles = dict() + + + # history nodes will be used to connect reconstructed particles into the history + # its optional at the moment + if hasattr(event, historyname): + self.history_nodes = event.history_nodes + else : + self.history_nodes = None + + # simplify the blocks by editing the links so that each track will end up linked to at most one hcal + # then recalculate the blocks + splitblocks=dict() + + for block in self._sorted_block_keys(): #put big interesting blocks first + #print "block: ", len(self.blocks[block]), self.blocks[block].short_name(); + newblocks=self.simplify_blocks(self.blocks[block], self.history_nodes) + if newblocks != None: + splitblocks.update( newblocks) + if len(splitblocks): + self.blocks.update(splitblocks) + + + #reconstruct each of the resulting blocks + for b in self._sorted_block_keys(): #put big interesting blocks first + block=self.blocks[b] + if block.is_active: # when blocks are split the original gets deactivated + #ALICE debugging + #if len(block.element_uniqueids)<6: + # continue + pdebugger.info('Processing {}'.format(block)) + self.reconstruct_block(block) + self.unused.extend( [id for id in block.element_uniqueids if not self.locked[id]]) + + #check if anything is unused + if len(self.unused): + self.log.warning(str(self.unused)) + self.log.info("Particles:") + self.log.info(str(self)) + + + def _sorted_block_keys(self) : + #sort blocks (1) by number of elements (2) by mix of ecal, hcal , tracks (the shortname will look like "H1T2" for a block + #Alice temporary to match cpp + #return sorted(self.blocks.keys(), key=lambda k: (len(self.blocks[k].element_uniqueids), self.blocks[k].short_name()),reverse =True) + #newsort + return sorted(self.blocks.keys()); + + def simplify_blocks(self, block, history_nodes=None): + + ''' Block: a block which contains list of element ids and set of edges that connect them + history_nodes: optional dictionary of Nodes with element identifiers in each node + + returns None or a dictionary of new split blocks + + The goal is to remove, if needed, some links from the block so that each track links to + at most one hcal within a block. In some cases this may separate a block into smaller + blocks (splitblocks). The BlockSplitter is used to return the new smaller blocks. + If history_nodes are provided then the history will be updated. Split blocks will + have the tracks and cluster elements as parents, and also the original block as a parent + ''' + + ids=block.element_uniqueids + + + if len(ids)<=1 : #no links to remove + return None + + # work out any links that need to be removed + # - for tracks unink all hcals except the closest hcal + # - for ecals unlink hcals + to_unlink = [] + for id in ids : + if Identifier.is_track(id): + linked = block.linked_edges(id,"hcal_track") # NB already sorted from small to large distance + if linked!=None and len(linked)>1 : + first_hcal = True + for elem in linked: + if first_hcal: + first_dist=elem.distance + first_hcal = False + else: + if (elem.distance==first_dist): + pass + to_unlink.append(elem) + + #if there is something to unlink then use the BlockSplitter + splitblocks=None + if len(to_unlink): + splitblocks= BlockSplitter(block, to_unlink, history_nodes).blocks + + return splitblocks + + def reconstruct_block(self, block): + ''' see class description for summary of reconstruction approach + ''' + particles = dict() + ids = block.element_uniqueids + #ids = sorted( ids, key = lambda id: Identifier.type_short_code ) + self.locked = dict() + for id in ids: + self.locked[id] = False + + self.debugprint = False + if (self.debugprint and len(block.element_uniqueids)> 4): + print block + + + if len(ids) == 1: #TODO WARNING!!! LOTS OF MISSING CASES + id = ids[0] + + if Identifier.is_ecal(id): + self.insert_particle(block, self.reconstruct_cluster(block.pfevent.ecal_clusters[id],"ecal_in")) + + elif Identifier.is_hcal(id): + self.insert_particle(block, self.reconstruct_cluster(block.pfevent.hcal_clusters[id],"hcal_in")) + + elif Identifier.is_track(id): + self.insert_particle(block, self.reconstruct_track(block.pfevent.tracks[id])) + # ask Colin about energy balance - what happened to the associated clusters that one would expect? + else: #TODO + for id in sorted(ids) : #newsort + if Identifier.is_hcal(id): + self.reconstruct_hcal(block,id) + for id in sorted(ids) : #newsort + if Identifier.is_track(id) and not self.locked[id]: + # unused tracks, so not linked to HCAL + # reconstructing charged hadrons. + # ELECTRONS TO BE DEALT WITH. + self.insert_particle(block, self.reconstruct_track(block.pfevent.tracks[id])) + + # tracks possibly linked to ecal->locking cluster + for idlink in block.linked_ids(id,"ecal_track"): + #ask colin what happened to possible photons here: + self.locked[idlink] = True + #TODO add in extra photonsbut decide where they should go? + + + # #TODO deal with ecal-ecal + # ecals = [elem for elem in group if elem.layer=='ecal_in' + # and not elem.locked] + # for ecal in ecals: + # linked_layers = [linked.layer for linked in ecal.linked] + # # assert('tracker' not in linked_layers) #TODO electrons + # self.log.warning( 'DEAL WITH ELECTRONS!' ) + # particles.append(self.reconstruct_cluster(ecal, 'ecal_in')) + #TODO deal with track-ecal + + + + def insert_particle(self, block, newparticle): + ''' The new particle will be inserted into the history_nodes (if present). + A new node for the particle will be created if needed. + It will have as its parents the block and all the elements of the block. + ''' + #Note that although it may be possible to specify more closely that the particle comes from + #some parts of the block, there are frequently ambiguities and so for now the particle is + #linked to everything in the block + if (newparticle) : + newid = newparticle.uniqueid + self.particles[newid] = newparticle + + #check if history nodes exists + if (self.history_nodes == None): + return + + #find the node for the block + blocknode = self.history_nodes[block.uniqueid] + + #find or make a node for the particle + if newid in self.history_nodes : + pnode = self.history_nodes[newid] + else : + pnode = Node(newid) + self.history_nodes[newid] = pnode + + #link particle to the block + blocknode.add_child(pnode) + #link particle to block elements + for element_id in block.element_uniqueids: + self.history_nodes[element_id].add_child(pnode) + + + def neutral_hadron_energy_resolution(self, energy, eta): + '''Currently returns the hcal resolution of the detector in use. + That's a generic solution, but CMS is doing the following + (implementation in commented code) + http://cmslxr.fnal.gov/source/RecoParticleFlow/PFProducer/src/PFAlgo.cc#3350 + ''' + resolution = self.detector.elements['hcal'].energy_resolution(energy, eta) + return resolution +## energy = max(hcal.energy, 1.) +## eta = hcal.position.Eta() +## stoch, const = 1.02, 0.065 +## if abs(hcal.position.Eta())>1.48: +## stoch, const = 1.2, 0.028 +## resol = math.sqrt(stoch**2/energy + const**2) +## return resol + + def nsigma_hcal(self, cluster): + '''Currently returns 2. + CMS is doing the following (implementation in commented code) + http://cmslxr.fnal.gov/source/RecoParticleFlow/PFProducer/src/PFAlgo.cc#3365 + ''' + return 2 +## return 1. + math.exp(-cluster.energy/100.) + + + + def reconstruct_hcal(self, block, hcalid): + ''' + block: element ids and edges + hcalid: id of the hcal being processed her + + has hcal and has a track + -> add up all connected tracks, turn each track into a charged hadron + -> add up all ecal energies + -> if track energies is greater than hcal energy then turn the missing energies into an ecal (photon) + NB this links the photon to the hcal rather than the ecals + -> if track energies are less than hcal then make a neutral hadron with rest of hcal energy and turn all ecals into photons + has hcal but no track (nb by design there will be no attached ecals because hcal ecal links have been removed) + -> make a neutral hadron + has hcals + -> each hcal is treated using rules above + ''' + + # hcal used to make ecal_in has a couple of possible issues + tracks = [] + ecals = [] + hcal =block.pfevent.hcal_clusters[hcalid] + + assert(len(block.linked_ids(hcalid, "hcal_hcal"))==0 ) + + #trackids = block.linked_ids(hcalid, "hcal_track") + #alice temporarily disabled + #trackids = block.sort_distance_energy(hcalid, trackids ) +#newsort + trackids = block.linked_ids(hcalid, "hcal_track") #sorted within block + for trackid in trackids: + tracks.append(block.pfevent.tracks[trackid]) + for ecalid in block.linked_ids(trackid, "ecal_track"): #new sort + # the ecals get all grouped together for all tracks in the block + # Maybe we want to link ecals to their closest track etc? + # this might help with history work + # ask colin. + if not self.locked[ecalid]: + ecals.append(block.pfevent.ecal_clusters[ecalid]) + self.locked[ecalid] = True + # hcal should be the only remaining linked hcal cluster (closest one) + #thcals = [th for th in elem.linked if th.layer=='hcal_in'] + #assert(thcals[0]==hcal) + self.log.info( hcal ) + self.log.info( '\tT {tracks}'.format(tracks=tracks) ) + self.log.info( '\tE {ecals}'.format(ecals=ecals) ) + hcal_energy = hcal.energy + if len(tracks): + ecal_energy = sum(ecal.energy for ecal in ecals) + track_energy = sum(track.energy for track in tracks) + for track in tracks: + #make a charged hadron + self.insert_particle(block, self.reconstruct_track( track)) + + delta_e_rel = (hcal_energy + ecal_energy) / track_energy - 1. + # WARNING + # calo_eres = self.detector.elements['hcal'].energy_resolution(track_energy) + # calo_eres = self.neutral_hadron_energy_resolution(hcal) + calo_eres = self.neutral_hadron_energy_resolution(track_energy, + hcal.position.Eta()) + self.log.info( 'dE/p, res = {derel}, {res} '.format( + derel = delta_e_rel, + res = calo_eres )) + # if False: + if delta_e_rel > self.nsigma_hcal(hcal) * calo_eres: # approx means hcal energy + ecal energies > track energies + + excess = delta_e_rel * track_energy # energy in excess of track energies + #print( 'excess = {excess:5.2f}, ecal_E = {ecal_e:5.2f}, diff = {diff:5.2f}'.format( + # excess=excess, ecal_e = ecal_energy, diff=excess-ecal_energy)) + if excess <= ecal_energy: # approx means hcal energy > track energies + # Make a photon from the ecal energy + # We make only one photon using only the combined ecal energies + self.insert_particle(block, self.reconstruct_cluster(hcal, 'ecal_in',excess)) + + else: # approx means that hcal energy>track energies so we must have a neutral hadron + #excess-ecal_energy is approximately hcal energy - track energies + self.insert_particle(block, self.reconstruct_cluster(hcal, 'hcal_in', + excess-ecal_energy)) + if ecal_energy: + #make a photon from the remaining ecal energies + #again history is confusingbecause hcal is used to provide direction + #be better to make several smaller photons one per ecal? + self.insert_particle(block, self.reconstruct_cluster(hcal, 'ecal_in', + ecal_energy)) + + else: # case where there are no tracks make a neutral hadron for each hcal + # note that hcal-ecal links have been removed so hcal should only be linked to + # other hcals + + self.insert_particle(block, self.reconstruct_cluster(hcal, 'hcal_in')) + + self.locked[hcalid] = True + + + def reconstruct_cluster(self, cluster, layer, energy = None, vertex = None): + '''construct a photon if it is an ecal + construct a neutral hadron if it is an hcal + ''' + if vertex is None: + vertex = TVector3() + pdg_id = None + if layer=='ecal_in': + pdg_id = 22 #photon + elif layer=='hcal_in': + pdg_id = 130 #K0 + else: + raise ValueError('layer must be equal to ecal_in or hcal_in') + assert(pdg_id) + mass, charge = particle_data[pdg_id] + if energy is None: + energy = cluster.energy + if energy < mass: + return None + if (mass==0): + momentum= energy #avoid sqrt for zero mass + else: + momentum = math.sqrt(energy**2 - mass**2) + p3 = cluster.position.Unit() * momentum + p4 = TLorentzVector(p3.Px(), p3.Py(), p3.Pz(), energy) #mass is not accurate here + particle = Particle(p4, vertex, charge, pdg_id, Identifier.PFOBJECTTYPE.RECPARTICLE) + path = StraightLine(p4, vertex) + path.points[layer] = cluster.position #alice: this may be a bit strange because we can make a photon with a path where the point is actually that of the hcal? + # nb this only is problem if the cluster and the assigned layer are different + particle.set_path(path) + particle.clusters[layer] = cluster # not sure about this either when hcal is used to make an ecal cluster? + self.locked[cluster.uniqueid] = True #just OK but not nice if hcal used to make ecal. + pdebugger.info(str('Made {} from {}'.format(particle, cluster))) + return particle + + def reconstruct_track(self, track, clusters = None): # cluster argument does not ever seem to be used at present + '''construct a charged hadron from the track + ''' + vertex = track.path.points['vertex'] + pdg_id = 211 * track.charge + mass, charge = particle_data[pdg_id] + p4 = TLorentzVector() + p4.SetVectM(track.p3, mass) + particle = Particle(p4, vertex, charge, pdg_id, Identifier.PFOBJECTTYPE.RECPARTICLE) + particle.set_path(track.path) + particle.clusters = clusters + self.locked[track.uniqueid] = True + pdebugger.info(str('Made {} from {}'.format(particle, track))) + return particle + + + def __str__(self): + theStr = ['New Rec Particles:'] + theStr.extend( map(str, self.particles.itervalues())) + theStr.append('Unused:') + if len(self.unused)==0: + theStr.append('None') + else: + theStr.extend( map(str, self.unused)) + return '\n'.join( theStr ) diff --git a/PhysicsTools/HeppyCore/python/papas/pfalgo/test_block_reconstruction.py b/PhysicsTools/HeppyCore/python/papas/pfalgo/test_block_reconstruction.py new file mode 100644 index 0000000000000..7b54052e3e8ad --- /dev/null +++ b/PhysicsTools/HeppyCore/python/papas/pfalgo/test_block_reconstruction.py @@ -0,0 +1,391 @@ +import unittest +from PhysicsTools.HeppyCore.papas.graphtools.DAG import Node, BreadthFirstSearchIterative,DAGFloodFill +from PhysicsTools.HeppyCore.papas.data.identifier import Identifier +from PhysicsTools.HeppyCore.papas.graphtools.edge import Edge +from PhysicsTools.HeppyCore.papas.pfalgo.pfblockbuilder import PFBlockBuilder +#from PhysicsTools.HeppyCore.papas.pfalgo.pfblockbuilder import BlockSplitter +from PhysicsTools.HeppyCore.papas.pfalgo.pfblock import PFBlock as realPFBlock + + +class Cluster(object): + ''' Simple Cluster class for test case + Contains a long uniqueid (created via Identifier class), a short id (used for distance) and a layer (ecal/hcal) + ''' + def __init__(self, id, layer): + ''' id is unique integer from 101-199 for ecal cluster + unique integer from 201-299 for hcal cluster + layer is ecal/hcal + ''' + if (layer == 'ecal_in'): + self.uniqueid = Identifier.make_id(Identifier.PFOBJECTTYPE.ECALCLUSTER) + elif (layer == 'hcal_in'): + self.uniqueid = Identifier.make_id(Identifier.PFOBJECTTYPE.HCALCLUSTER) + else: + assert false + self.layer = layer + self.id = id + self.energy=0 + + def __repr__(self): + return "cluster:" + str(self.id) + " :" + str(self.uniqueid) + +class Track(object): + ''' Simple Track class for test case + Contains a long uniqueid (created via Identifier class), a short id (used for distance) and a layer (tracker) + ''' + def __init__(self, id): + ''' id is unique integer from 1-99 + ''' + self.uniqueid = Identifier.make_id(Identifier.PFOBJECTTYPE.TRACK) + self.id = id + self.layer = 'tracker' + self.energy=0 + + def __repr__(self): + return "track:"+ str(self.id) + " :"+ str(self.uniqueid) + +class Particle(object): + ''' Simple Particle class for test case + Contains a long uniqueid (created via Identifier class), a short id (used for distance) and a ppdgid + ''' + def __init__(self, id, pdgid): + ''' id is unique integer from 301-399 + pdgid is particle id eg 22 for photon + ''' + self.uniqueid = Identifier.make_id(Identifier.PFOBJECTTYPE.PARTICLE) + #print "particle: ",self.uniqueid," ",id + self.pdgid = pdgid + self.id = id + #self.type = PFType.PARTICLE + + def __repr__(self): + return "particle:"+ str(self.id) + " :"+ str(self.uniqueid) +class ReconstructedParticle(Particle): + ''' Simple Particle class for test case + Contains a long uniqueid (created via Identifier class), a short id (used for distance) and a ppdgid + ''' + def __init__(self, id,pdgid): + ''' id is unique integer from 601-699 + pdgid is particle id eg 22 for photon + ''' + self.uniqueid = Identifier.make_id(Identifier.PFOBJECTTYPE.RECPARTICLE) + self.pdgid = pdgid + self.id = id + + def __repr__(self): + return "reconstructed particle:"+ str(self.id) + " :"+ str(self.uniqueid) + + + +class Event(object): + ''' Simple Event class for test case + Used to contains the tracks, clusters, particles + and also nodes describing history (which particle gave rise to which track) + and nodes describing links/distances between elements + ''' + def __init__(self, distance): + self.sim_particles = dict() #simulated particles + self.reconstructed_particles = dict() #reconstructed particles + self.ecal_clusters = dict() + self.hcal_clusters = dict() + self.tracks = dict() #tracks + self.history_nodes = dict() #Nodes used in simulation/reconstruction (contain uniqueid) + self.nodes = dict() #Contains links/ distances between nodes + self.blocks = dict() #Blocks to be made for use in reconstuction + self.ruler = distance + #self.get_object=GetObject(self) + + def get_object(self, uniqueid): + ''' given a uniqueid return the underlying obejct + ''' + type = Identifier.get_type(uniqueid) + if type == Identifier.PFOBJECTTYPE.TRACK: + return self.tracks[uniqueid] + elif type == Identifier.PFOBJECTTYPE.ECALCLUSTER: + return self.ecal_clusters[uniqueid] + elif type == Identifier.PFOBJECTTYPE.HCALCLUSTER: + return self.hcal_clusters[uniqueid] + elif type == Identifier.PFOBJECTTYPE.PARTICLE: + return self.sim_particles[uniqueid] + elif type == Identifier.PFOBJECTTYPE.RECPARTICLE: + return self.reconstructed_particles[uniqueid] + elif type == Identifier.PFOBJECTTYPE.BLOCK: + return self.blocks[uniqueid] + else: + assert(False) + +class Simulator(object): + ''' Simplified simulator for testing + The simulator sets up two charged hadrons and a photon + the clusters/tracks/particles contain a short id where + # 1- 99 is a track + # 101-199 is an ecal cluster + # 201-299 is an hcal cluster + # 301-399 is a simulated particle + # 401-499 is a block + # 601-699 is a reconstructed particle + the short ids are used to construct the distances between elements + The elements also have a long unique id which is created via + an Identifier class + ''' + def __init__(self,event): + self.event = event + + #add some clusters /tracks/ particles + self.add_ecal_cluster(101) + self.add_hcal_cluster(202) + self.add_ecal_cluster(102) + self.add_ecal_cluster(103) + self.add_track(1) + self.add_track(2) + self.add_particle(301,211) #charged hadron + self.add_particle(302,211) #charged hadron + self.add_particle(303,22) #photon + #define links between clusters/tracks and particles + self.add_link(self.UID(301),self.UID(101)) + self.add_link(self.UID(301),self.UID(1)) + self.add_link(self.UID(302),self.UID(2)) + self.add_link(self.UID(302),self.UID(102)) + self.add_link(self.UID(302),self.UID(202)) + self.add_link(self.UID(303),self.UID(103)) + + def add_ecal_cluster(self, id): + clust = Cluster(id,'ecal_in')# make a cluster + uniqueid = clust.uniqueid + self.event.ecal_clusters[uniqueid] = clust # add into the collection of clusters + self.event.history_nodes[uniqueid] = Node(uniqueid) #add into the collection of History Nodes + + def add_hcal_cluster(self, id): + clust = Cluster(id,'hcal_in') + uniqueid = clust.uniqueid + self.event.hcal_clusters[uniqueid] = clust + self.event.history_nodes[uniqueid] = Node(uniqueid) + + def add_track(self, id): + track = Track(id) + uniqueid = track.uniqueid + self.event.tracks[uniqueid] = track + self.event.history_nodes[uniqueid] = Node(uniqueid) + + def add_particle(self, id, pdgid): + particle = Particle(id,pdgid) + uniqueid = particle.uniqueid + self.event.sim_particles[uniqueid] = particle + self.event.history_nodes[uniqueid] = Node(uniqueid) + + def UID(self, id): #Takes the test case short id and find the unique id + ''' id is the short id of the element + this returns the corresponding long unique id + ''' + for h in self.event.history_nodes : + obj = self.event.get_object(h) + if hasattr(obj, "id"): + if obj.id == id : + return obj.uniqueid + return 0 + + def short_id(self, uniqueid): #Takes the unique id and finds corresponding short id + ''' uniqueid is the long unique id of the element + this returns the corresponding short integer id + ''' + for h in self.event.history_nodes : + obj = self.event.get_object(h) + if hasattr(obj, "id"): + if obj.uniqueid == uniqueid : + return obj.id + return 0 + + def add_link(self, uniqueid1, uniqueid2): + ''' create a parent child link in the history nodes between two elements + uniqueid1, uniqueid2 are the elements unique ids + ''' + self.event.history_nodes[uniqueid1].add_child(self.event.history_nodes[uniqueid2]) + + + +class Reconstructor(object): + ''' Simplified reconstructor class for testing + ''' + def __init__(self, event): + self.event = event + self.particlecounter = 600 #used to create reconstructed particle short ids + self.reconstruct_particles() + + def add_nodes(self, nodedict,values): + for e1 in values : + nodedict[e1.uniqueid] = Node(e1.uniqueid) + + def reconstruct_particles (self): + for block in self.event.blocks.itervalues(): + self.make_particles_from_block (block) + + def make_particles_from_block(self, block): + ''' Take a block and use simple rules to construct particles + ''' + #take a block and find its parents (clusters and tracks) + parents = block.element_uniqueids + + if (len(parents) == 1) & (Identifier.is_ecal(parents[0])): + #print "make photon" + self.make_photon(parents) + + elif ( (len(parents) == 2) & (block.count_ecal() == 1 ) & (block.count_tracks() == 1)): + #print "make hadron" + self.make_hadron(parents) + + elif ((len(parents) == 3) & (block.count_ecal() == 1) & (block.count_tracks() == 1) & (block.count_hcal() == 1)): + #print "make hadron and photon" + #probably not right but illustrates splitting of parents for more than one particle + hparents = [] # will contain parents for the Hadron which gets everything except the + #hcal which is used for the photom + for elem in parents: + if (Identifier.is_hcal(elem)): + self.make_photon({elem}) + else : + hparents.append(elem) + self.make_hadron(hparents) + + else : + pass + #print "particle TODO" + + def make_photon(self, parents): + return self.add_particle(self.new_id(), 22,parents) + + def make_hadron(self, parents): + return self.add_particle(self.new_id(), 211,parents) + + def add_particle(self, id, pdgid, parents): + ''' creates a new particle and then updates the + event to include the new node and its parental links + pdgid = is the particle type id eg 22 for photon + parents = list of the unique ids (from Identifier class) for the elements from + which the particle has been reconstructed + ''' + particle = ReconstructedParticle(id,pdgid) + self.event.reconstructed_particles[particle.uniqueid] = particle + #Now create the history node and links + particle_node = Node(particle.uniqueid) + self.event.history_nodes[particle.uniqueid] = particle_node + for parent in parents : + self.event.history_nodes[parent].add_child(particle_node) + + def new_id(self): + #new short id for the next reconstucted particle + id = self.particlecounter + self.particlecounter += 1 + return id + + +#class PFType(Enum): + #NONE = 0 + #TRACK = 1 + #ECAL = 2 + #HCAL = 4 + #PARTICLE = 8 + #REConstructedPARTICLE = 16 + +#class PFDISTANCEtype(Enum): + #NONE = 0 + #TRACK_TRACK = 2 + #TRACK_ECAL = 3 + #ECAL_ECAL = 4 + #TRACK_HCAL = 5 + #ECAL_HCAL = 6 + #HCAL_HCAL = 8 + +class DistanceItem(object): + '''Concrete distance calculator using an integer id to determine distance + ''' + def __call__(self, ele1, ele2): + '''ele1, ele2 two elements to find distance between + returns a tuple: + Link_type set to None for this test + True/False depending on the validity of the link + float the link distance + ''' + distance = abs(ele1.id%100 -ele2.id%100) + return None, distance == 0, distance + +#will be the ruler in the event class +distance = DistanceItem() + + +class TestBlockReconstruction(unittest.TestCase): + + + def test_1(self): + + event = Event(distance) + sim = Simulator(event) + event=sim.event + + pfblocker = PFBlockBuilder( event, distance, event.history_nodes) + + event.blocks = pfblocker.blocks + #event.history_nodes = pfblocker.history_nodes + + + ##test block splitting + #blockids = [] + #unlink=[] + #for b in event.blocks.itervalues(): + #ids=b.element_uniqueids + #if len(ids)==3 : + #print ids[0], ids[2] + #unlink.append(b.edges[Edge.make_key(ids[0], ids[2])]) + #unlink.append(b.edges[Edge.make_key(ids[0], ids[1])]) + #print unlink + #splitter=BlockSplitter(b,unlink,event.history_nodes) + #print splitter.blocks + + #blocksplitter=BlockSplitter() + + rec = Reconstructor(event) + + + # What is connected to HCAL 202 node? + # (1) via history_nodes + # (2) via reconstructed node links + # (3) Give me all blocks with one track: + # (4) Give me all simulation particles attached to each reconstructed particle + nodeid = 202 + nodeuid = sim.UID(nodeid) + + #(1) what is connected to the the HCAL CLUSTER + ids = [] + BFS = BreadthFirstSearchIterative(event.history_nodes[nodeuid],"undirected") + for n in BFS.result : + ids.append(n.get_value()) + + #1b WHAT BLOCK Does it belong to + x = None + for id in ids: + if Identifier.is_block(id) and event.blocks[id].short_name()== "E1H1T1": + x = event.blocks[id] + + #1c #check that the block contains the expected list of suspects + pids = [] + for n in x.element_uniqueids: + pids.append(n) + ids = sorted(pids) + expected_ids = sorted([sim.UID(2), sim.UID(102),sim.UID(202)]) + self.assertEqual(ids,expected_ids ) + + #(2) use edge nodes to see what is connected + ids = [] + BFS = BreadthFirstSearchIterative(pfblocker.nodes[nodeuid],"undirected") + for n in BFS.result : + ids.append(n.get_value()) + expected_ids = sorted([sim.UID(2), sim.UID(102),sim.UID(202)]) + self.assertEqual(sorted(ids), expected_ids) + + #(3) Give me all simulation particles attached to each reconstructed particle + for rp in event.reconstructed_particles : + ids=[] + BFS = BreadthFirstSearchIterative(event.history_nodes[rp],"parents") + for n in BFS.result : + z=n.get_value() + +if __name__ == '__main__': + unittest.main() diff --git a/PhysicsTools/HeppyCore/python/papas/pfalgo/test_distance.py b/PhysicsTools/HeppyCore/python/papas/pfalgo/test_distance.py new file mode 100644 index 0000000000000..c0426c1a82868 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/papas/pfalgo/test_distance.py @@ -0,0 +1,61 @@ +import unittest +import itertools +from distance import Distance +from links import Element +from PhysicsTools.HeppyCore.papas.pfobjects import Cluster, Track +from PhysicsTools.HeppyCore.papas.path import StraightLine + +from ROOT import TVector3, TLorentzVector + +ruler = Distance() + +class TestDistance(unittest.TestCase): + + def test_layerfan(self): + c1 = Cluster(10, TVector3(1, 0, 0), 1., 'ecal_in') + c2 = Cluster(20, TVector3(1, 0, 0), 1., 'hcal_in') + p3 = c1.position.Unit()*100. + p4 = TLorentzVector() + p4.SetVectM(p3, 1.) + path = StraightLine(p4, TVector3(0,0,0)) + charge = 1. + tr = Track(p3, charge, path) + tr.path.points['ecal_in'] = c1.position + tr.path.points['hcal_in'] = c2.position + elems = [c1, c2, tr] + for ele in elems: + link_type, link_ok, distance = ruler(ele, ele) + if ele!=tr: + self.assertTrue(link_ok) + elif ele==tr: + self.assertFalse(link_ok) + #ecal hcal no longer linked to match c++ so have adjusted test accordingly + link_type, link_ok, distance = ruler(c2, c1) + self.assertFalse(link_ok) + self.assertEqual(link_type, None) + link_type, link_ok, distance = ruler(tr, c1) + self.assertTrue(link_ok) + link_type, link_ok, distance = ruler(tr, c2) + self.assertTrue(link_ok) + + + def test_ecal_hcal(self): + c1 = Cluster(10, TVector3(1, 0, 0), 4., 'ecal_in') + c2 = Cluster(20, TVector3(1, 0, 0), 4., 'hcal_in') + link_type, link_ok, distance = ruler(c1, c2) + self.assertFalse(link_ok) #adjusted to match cpp + self.assertEqual(distance, None) + pos3 = TVector3(c1.position) + pos3.RotateZ(0.059) + c3 = Cluster(30, pos3, 5, 'hcal_in') + link_type, link_ok, distance = ruler(c2, c3) + self.assertEqual(distance, 0.059) + + + + +if __name__ == '__main__': + unittest.main() + + + diff --git a/PhysicsTools/HeppyCore/python/papas/pfalgo/test_floodfill.py b/PhysicsTools/HeppyCore/python/papas/pfalgo/test_floodfill.py new file mode 100644 index 0000000000000..44d7fcc1f34a5 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/papas/pfalgo/test_floodfill.py @@ -0,0 +1,69 @@ +import unittest +from floodfill import FloodFill + +class Node(int): + def __init__(self, *args): + self.linked = [] + self.block_label = None + super(Node, self).__init__(*args) + + def accept(self, visitor): + notseen = visitor.visit(self) + if notseen: + for elem in self.linked: + elem.accept(visitor) + + def __str__(self): + return super(Node, self).__str__() + str(self.linked) + +class Graph(object): + def __init__(self, edges): + self.nodes = dict() + for e1, e2 in edges: + node1, node2 = None, None + if e1: + node1 = self.nodes.get(e1, False) + if not node1: + node1 = Node(e1) + self.nodes[e1] = node1 + if e2: + node2 = self.nodes.get(e2, False) + if not node2: + node2 = Node(e2) + self.nodes[e2] = node2 + if node1 and node2: + node1.linked.append(node2) + node2.linked.append(node1) + + +class TestFloodFill(unittest.TestCase): + def test_1(self): + graph = Graph( [ (1,2), (1,3), (4,None) ] ) + floodfill = FloodFill(graph.nodes.values()) + self.assertEqual(floodfill.groups.keys(), [0,1]) + self.assertEqual(floodfill.groups.values()[0], [1,2,3]) + self.assertEqual(floodfill.groups.values()[1], [4]) + + def test_2(self): + graph = Graph( [ (1,2), (2,3), (3,4), (5, 6) ] ) + floodfill = FloodFill(graph.nodes.values()) + self.assertEqual(floodfill.groups.keys(), [0,1]) + self.assertEqual(floodfill.groups.values()[0], [1,2,3,4]) + self.assertEqual(floodfill.groups.values()[1], [5,6]) + + def test_regroup(self): + graph = Graph( [ (1,2), (2,3), (3,4), (5, 6) ] ) + floodfill = FloodFill(graph.nodes.values()) + self.assertEqual(floodfill.groups.keys(), [0,1]) + graph.nodes[1].linked.remove(graph.nodes[2]) + graph.nodes[2].linked.remove(graph.nodes[1]) + floodfill = FloodFill(floodfill.groups[0], + first_label=floodfill.label) + self.assertEqual(floodfill.groups.keys(), [2,3]) + self.assertEqual(floodfill.groups[2], [1]) + self.assertEqual(floodfill.groups[3], [2,3,4]) + + + +if __name__ == '__main__': + unittest.main() diff --git a/PhysicsTools/HeppyCore/python/papas/pfalgo/test_links.py b/PhysicsTools/HeppyCore/python/papas/pfalgo/test_links.py new file mode 100644 index 0000000000000..d80abb429b069 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/papas/pfalgo/test_links.py @@ -0,0 +1,35 @@ +import unittest +from links import Links, Element + +def distance(ele1, ele2): + dist = abs(ele1-ele2) + return 'link_type', dist<3., dist + + +class TestElement(Element): + def __init__(self, val): + self.val = val + super(TestElement, self).__init__() + + def __repr__(self): + return str(self.val) + + def __sub__(self, other): + return self.val - other.val + +class TestLinks(unittest.TestCase): + + def test_link_1(self): + elements = map(TestElement, range(10)) + links = Links(elements, distance) + distances = links.values() + self.assertTrue( max(distances)==2 ) + self.assertEqual(elements[0].linked, [elements[1], elements[2]]) + self.assertEqual(links.info(elements[2], elements[4]), 2) + self.assertIsNone(links.info(elements[2], elements[5]), None) + +if __name__ == '__main__': + unittest.main() + + + diff --git a/PhysicsTools/HeppyCore/python/papas/pfalgo/test_merger.py b/PhysicsTools/HeppyCore/python/papas/pfalgo/test_merger.py new file mode 100644 index 0000000000000..4ea5c4e21e44b --- /dev/null +++ b/PhysicsTools/HeppyCore/python/papas/pfalgo/test_merger.py @@ -0,0 +1,52 @@ +import unittest +import copy +from merger import merge_clusters +from PhysicsTools.HeppyCore.papas.pfobjects import Cluster +from ROOT import TVector3 + +class TestMerger(unittest.TestCase): + + def test_merge_pair(self): + clusters = [ Cluster(20, TVector3(1, 0, 0), 0.1, 'hcal_in'), + Cluster(20, TVector3(1.,0.05,0.), 0.1, 'hcal_in')] + merged_clusters = merge_clusters(clusters, 'hcal_in') + self.assertEqual( len(merged_clusters), 1 ) + self.assertEqual( merged_clusters[0].energy, + clusters[0].energy + clusters[1].energy) + self.assertEqual( merged_clusters[0].position.X(), + (clusters[0].position.X() + clusters[1].position.X())/2.) + self.assertEqual( len(merged_clusters[0].subclusters), 2) + self.assertEqual( merged_clusters[0].subclusters[0], clusters[0]) + self.assertEqual( merged_clusters[0].subclusters[1], clusters[1]) + + + def test_merge_pair_away(self): + clusters = [ Cluster(20, TVector3(1,0,0), 0.04, 'hcal_in'), + Cluster(20, TVector3(1,1.1,0.0), 0.04, 'hcal_in')] + merge_clusters(clusters, 'hcal_in') + self.assertEqual( len(clusters), 2 ) + self.assertEqual( len(clusters[0].subclusters), 1) + self.assertEqual( len(clusters[1].subclusters), 1) + + def test_merge_different_layers(self): + clusters = [ Cluster(20, TVector3(1,0,0), 0.04, 'ecal_in'), + Cluster(20, TVector3(1,0,0), 0.04, 'hcal_in')] + merge_clusters(clusters, 'hcal_in') + self.assertEqual( len(clusters), 2) + + def test_inside(self): + clusters = [ Cluster(20, TVector3(1, 0, 0), 0.055, 'hcal_in'), + Cluster(20, TVector3(1.,0.1, 0.0), 0.055, 'hcal_in')] + merged_clusters = merge_clusters(clusters, 'hcal_in') + self.assertEqual( len(merged_clusters), 1 ) + cluster = merged_clusters[0] + self.assertEqual( (True, 0.), cluster.is_inside(TVector3(1, 0 , 0)) ) + self.assertEqual( (True, 0.), cluster.is_inside(TVector3(1, 0.1, 0)) ) + in_the_middle = cluster.is_inside(TVector3(1, 0.06, 0)) + self.assertTrue(in_the_middle[0]) + self.assertAlmostEqual(in_the_middle[1], 0.04000) + self.assertFalse( cluster.is_inside(TVector3(1, 0.156, 0))[0] ) + + +if __name__ == '__main__': + unittest.main() diff --git a/PhysicsTools/HeppyCore/python/papas/pfalgo/test_pfinput.py b/PhysicsTools/HeppyCore/python/papas/pfalgo/test_pfinput.py new file mode 100644 index 0000000000000..f4e5f99b9d232 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/papas/pfalgo/test_pfinput.py @@ -0,0 +1,17 @@ +import unittest +from ROOT import TVector3 +from PhysicsTools.HeppyCore.papas.pfobjects import Cluster, Particle +from pfinput import merge_clusters + +class TestPFInput(unittest.TestCase): + + def test_merge_1(self): + cluster1 = Cluster(10., TVector3(0., 1., 0.), 0.04, "ecal_in") #alice added layer (needed for Identifier) + cluster2 = Cluster(20., TVector3(0., 1., 0.99), 0.06, "ecal_in") + merge_clusters([cluster1, cluster2]) + +if __name__ == '__main__': + unittest.main() + + + diff --git a/PhysicsTools/HeppyCore/python/papas/pfobjects.py b/PhysicsTools/HeppyCore/python/papas/pfobjects.py new file mode 100644 index 0000000000000..62bc65bf945d1 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/papas/pfobjects.py @@ -0,0 +1,293 @@ +import math +from PhysicsTools.HeppyCore.particles.tlv.particle import Particle as BaseParticle +from PhysicsTools.HeppyCore.utils.deltar import deltaR +from PhysicsTools.HeppyCore.papas.data.identifier import Identifier + +#add angular size needs to be fixed since at the moment the angluar size is set by the first element +#in a merged cluster. If the merged cluster is formed in a different order then the angular size will be different + +class PFObject(object): + '''Base class for all particle flow objects (tracks, clusters, etc). + Particle flow objects of different types can be linked together + forming graphs called "blocks". + + attributes: + linked : list of PFObjects linked to this one + locked : already used in the particle flow algorithm + block_label : label of the block the PFObject belongs to. The block label is a unique identifier for the block. + ''' + + def __init__(self, pfobjecttype=Identifier.PFOBJECTTYPE.NONE): + #def __init__(self): + super(PFObject, self).__init__() + + self.linked = [] + self.locked = False + self.block_label = None + self.uniqueid = Identifier.make_id(pfobjecttype) + + def accept(self, visitor): + '''Called by visitors, such as FloodFill. See pfalgo.floodfill''' + notseen = visitor.visit(self) + if notseen: + for elem in self.linked: + elem.accept(visitor) + + def __repr__(self): + return str(self) + + def info(self): + return "" + + def __str__(self): + return '{classname}: {pretty:6}:{id}: {info}'.format( + classname=self.__class__.__name__, + pretty=Identifier.pretty(self.uniqueid), + id=self.uniqueid, + info=self.info()) + + +class Cluster(PFObject): + ''' + TODO: + - not sure max_energy plays well with SmearedClusters + - investigate the possibility to have only one class. + so: put mother in Cluster + define the identifier outside? + or stay as it is, but do not do any work in the child SmearedCluster and MergedCluster classes + ''' + + #TODO: not sure this plays well with SmearedClusters + max_energy = 0. + + def __init__(self, energy, position, size_m, layer='ecal_in', particle=None): + + #may be better to have one PFOBJECTTYPE.CLUSTER type and also use the layer... + if layer == 'ecal_in': + super(Cluster, self).__init__(Identifier.PFOBJECTTYPE.ECALCLUSTER) + elif layer == 'hcal_in': + super(Cluster, self).__init__(Identifier.PFOBJECTTYPE.HCALCLUSTER) + else : + assert False + self.position = position + self.set_energy(energy) + self.set_size(float(size_m)) + self.layer = layer + self.particle = particle + self.subclusters = [self] + # self.absorbed = [] + + def set_size(self, value): + self._size = value + try: + self._angularsize = math.atan(self._size / self.position.Mag()) + except: + import pdb; pdb.set_trace() + + def size(self): + return self._size + + def angular_size(self): + #angular_size is only properly specified for single (unmerged) clusters + return self._angularsize + + def is_inside_clusters(self, other): + '''TODO: no need for two versions of this method, see below. + one should have a single overlap method that always works, whether or not there are any + subclusters. + ''' + #see if two clusters overlap (allowing for merged clusters which contain subclusters) + #we have a link if any of the subclusters overlap + #the distance is the distance betewen the weighted centres of each (merged) cluster + + dist = deltaR(self.position.Theta(), + self.position.Phi(), + other.position.Theta(), + other.position.Phi()) + + for c in self.subclusters: + for o in other.subclusters: + is_link, innerdist = c.is_inside_cluster(o) + if is_link: + return True, dist + return False, dist + + + def is_inside_cluster(self, other): + '''TODO change name to "overlaps" ? ''' + #now we have original unmerged clusters so we can compare directly to see if they overlap + dR = deltaR(self.position.Theta(), + self.position.Phi(), + other.position.Theta(), + other.position.Phi()) + link_ok = dR < self.angular_size() + other.angular_size() + return link_ok, dR + + + def is_inside(self, point): + """check if the point lies within the "size" circle of each of the subclusters""" + subdist = [] + for subc in self.subclusters: + dist = (subc.position - point).Mag() + if dist < subc.size(): + subdist.append(dist) + if len(subdist): + return True, min(subdist) + + subdists = [(subc.position - point).Mag() for subc in self.subclusters] + dist = min(subdists) + return False, dist + + #subdists = [ (subc.position - point).Mag() for subc in self.subclusters ] + #dist = min(subdists) + #if dist < self.size(): + #return True, dist + #else: + #return False, dist + + def __iadd__(self, other): + if other.layer != self.layer: + raise ValueError('can only add a cluster from the same layer') + position = self.position * self.energy + other.position * other.energy + energy = self.energy + other.energy + denom = 1/energy + position *= denom + self.position = position + self.energy = energy + assert len(other.subclusters) == 1 + self.subclusters.extend(other.subclusters) + + #todo recalculate the angular size + return self + + def set_energy(self, energy): + energy = float(energy) + self.energy = energy + if energy > self.__class__.max_energy: + self.__class__.max_energy = energy + self.pt = energy * self.position.Unit().Perp() + + # fancy but I prefer the other solution + # def __setattr__(self, name, value): + # if name == 'energy': + # self.pt = value * self.position.Unit().Perp() + # self.__dict__[name] = value + def info(self): + subclusterstr = str('sub(') + for s in self.subclusters: + subclusterstr += str('{:}, '.format(Identifier.pretty(s.uniqueid))) + subclusterstr += ")" + return '{energy:7.2f} {theta:5.2f} {phi:5.2f} {sub}'.format( + energy=self.energy, + theta=math.pi/2. - self.position.Theta(), + phi=self.position.Phi(), + sub=subclusterstr + ) + +class SmearedCluster(Cluster): + def __init__(self, mother, *args, **kwargs): + self.mother = mother + super(SmearedCluster, self).__init__(*args, **kwargs) + +class MergedCluster(Cluster): + '''The MergedCluster is used to hold a cluster that has been merged from other clusters ''' + + def __init__(self, mother): + self.mother = mother + super(MergedCluster, self).__init__(mother.energy, mother.position, mother._size, mother.layer, mother.particle) + self.subclusters = [mother] + + def __iadd__(self, other): + '''TODO: why not using iadd from base class''' + if other.layer != self.layer: + raise ValueError('can only add a cluster from the same layer') + position = self.position * self.energy + other.position * other.energy + energy = self.energy + other.energy + denom = 1/energy + position *= denom + self.position = position + self.energy = energy + self.subclusters.extend([other]) + + return self + +class Track(PFObject): + '''Determines the trajectory in space and time of a particle (charged or neutral). + + attributes: + - p3 : momentum in 3D space (px, py, pz) + - charge : particle charge + - path : contains the trajectory parameters and points + ''' + + def __init__(self, p3, charge, path, particle=None): + super(Track, self).__init__(Identifier.PFOBJECTTYPE.TRACK) + self.p3 = p3 + self.pt = p3.Perp() + self.energy = p3.Mag() #TODO clarify energy and momentum + self.charge = charge + self.path = path + self.particle = particle + self.layer = 'tracker' + + def info(self): + return '{e:7.2f} {pt:7.2f} {theta:5.2f} {phi:5.2f}'.format( + pt=self.pt, + e=self.energy, + theta=math.pi/2. - self.p3.Theta(), + phi=self.p3.Phi() + ) + + +class SmearedTrack(Track): + def __init__(self, mother, *args, **kwargs): + self.mother = mother + self.path = mother.path + super(SmearedTrack, self).__init__(*args, **kwargs) + +class Particle(BaseParticle): + def __init__(self, tlv, vertex, charge, + pdgid=None, + ParticleType=Identifier.PFOBJECTTYPE.PARTICLE): + super(Particle, self).__init__(pdgid, charge, tlv) + self.uniqueid = Identifier.make_id(ParticleType) + self.vertex = vertex + self.path = None + self.clusters = dict() + self.track = None # Alice Experiment to match cpp debug Track(self.p3(), self.q(), self.path) + self.clusters_smeared = dict() + self.track_smeared = None + + def __getattr__(self, name): + if name == 'points': + # if self.path is None: + # import pdb; pdb.set_trace() + return self.path.points + else: + raise AttributeError + + def is_em(self): + kind = abs(self.pdgid()) + return kind == 11 or kind == 22 + + def set_path(self, path, option=None): + if option == 'w' or self.path is None: + self.path = path + if self.q(): + self.track = Track(self.p3(), self.q(), self.path) + + def __str__(self): + mainstr = super(Particle, self).__str__() + idstr = '{pretty:6}:{id}'.format( + pretty=Identifier.pretty(self.uniqueid), + id=self.uniqueid) + fields = mainstr.split(':') + fields.insert(1, idstr) + return ':'.join(fields) + +if __name__ == '__main__': + from ROOT import TVector3 + cluster = Cluster(10., TVector3(1, 0, 0), 1) #alice made this use default layer + print cluster.pt + cluster.set_energy(5.) + print cluster.pt diff --git a/PhysicsTools/HeppyCore/python/papas/propagator.py b/PhysicsTools/HeppyCore/python/papas/propagator.py new file mode 100644 index 0000000000000..b7005b6739bec --- /dev/null +++ b/PhysicsTools/HeppyCore/python/papas/propagator.py @@ -0,0 +1,108 @@ +from vectors import Point +import math +import copy +from ROOT import TVector3 +from geotools import circle_intersection +from papas_exceptions import PropagationError +from path import Helix, StraightLine + +class Info(object): + pass + +class Propagator(object): + + def propagate(self, particles, cylinders, *args, **kwargs): + for ptc in particles: + for cyl in cylinders: + self.propagate_one(ptc, cyl, *args, **kwargs) + + +class StraightLinePropagator(Propagator): + + def propagate_one(self, particle, cylinder, dummy=None): + line = StraightLine(particle.p4(), particle.vertex) + particle.set_path( line ) # TODO + theta = line.udir.Theta() + if abs(line.origin.Z()) > cylinder.z or \ + line.origin.Perp() > cylinder.rad: + return # particle created outside the cylinder + if line.udir.Z(): + destz = cylinder.z if line.udir.Z() > 0. else -cylinder.z + length = (destz - line.origin.Z())/math.cos(theta) + if length < 0: + print 'HERE!!' + import pdb; pdb.set_trace() + raise PropagationError(particle) + destination = line.origin + line.udir * length + rdest = destination.Perp() + if rdest > cylinder.rad: + udirxy = TVector3(line.udir.X(), line.udir.Y(), 0.) + originxy = TVector3(line.origin.X(), line.origin.Y(), 0.) + # solve 2nd degree equation for intersection + # between the straight line and the cylinder + # in the xy plane to get k, + # the propagation length + a = udirxy.Mag2() + b= 2*udirxy.Dot(originxy) + c= originxy.Mag2()-cylinder.rad**2 + delta = b**2 - 4*a*c + if delta<0: + return + # raise PropagationError(particle) + km = (-b - math.sqrt(delta))/(2*a) + # positive propagation -> correct solution. + kp = (-b + math.sqrt(delta))/(2*a) + # print delta, km, kp + destination = line.origin + line.udir * kp + #TODO deal with Z == 0 + #TODO deal with overlapping cylinders + particle.points[cylinder.name] = destination + + +class HelixPropagator(Propagator): + + def propagate_one(self, particle, cylinder, field, debug_info=None): + helix = Helix(field, particle.q(), particle.p4(), + particle.vertex) + particle.set_path(helix) + is_looper = helix.extreme_point_xy.Mag() < cylinder.rad + is_positive = particle.p4().Z() > 0. + if not is_looper: + try: + xm, ym, xp, yp = circle_intersection(helix.center_xy.X(), + helix.center_xy.Y(), + helix.rho, + cylinder.rad ) + except ValueError: + return + # raise PropagationError(particle) + # particle.points[cylinder.name+'_m'] = Point(xm,ym,0) + # particle.points[cylinder.name+'_p'] = Point(xp,yp,0) + phi_m = helix.phi(xm, ym) + phi_p = helix.phi(xp, yp) + dest_time = helix.time_at_phi(phi_p) + destination = helix.point_at_time(dest_time) + if destination.Z()*helix.udir.Z()<0.: + dest_time = helix.time_at_phi(phi_m) + destination = helix.point_at_time(dest_time) + if abs(destination.Z()) 0. else -cylinder.z + dest_time = helix.time_at_z(destz) + destination = helix.point_at_time(dest_time) + # destz = cylinder.z if positive else -cylinder.z + particle.points[cylinder.name] = destination + + + info = Info() + info.is_positive = is_positive + info.is_looper = is_looper + return info + +straight_line = StraightLinePropagator() + +helix = HelixPropagator() diff --git a/PhysicsTools/HeppyCore/python/papas/simulator.py b/PhysicsTools/HeppyCore/python/papas/simulator.py new file mode 100644 index 0000000000000..8e9d87e47c641 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/papas/simulator.py @@ -0,0 +1,340 @@ +import sys +import copy +import shelve +from PhysicsTools.HeppyCore.papas.propagator import StraightLinePropagator, HelixPropagator +from PhysicsTools.HeppyCore.papas.pfobjects import Cluster, SmearedCluster, SmearedTrack +from PhysicsTools.HeppyCore.papas.pfobjects import Particle as PFSimParticle +from PhysicsTools.HeppyCore.papas.pfalgo.pfinput import PFInput + +import PhysicsTools.HeppyCore.papas.multiple_scattering as mscat +from PhysicsTools.HeppyCore.papas.papas_exceptions import SimulationError +from PhysicsTools.HeppyCore.utils.pdebug import pdebugger +import PhysicsTools.HeppyCore.statistics.rrandom as random + + + +def pfsimparticle(ptc): + '''Create a PFSimParticle from a particle. + The PFSimParticle will have the same p4, vertex, charge, pdg ID. + ''' + tp4 = ptc.p4() + vertex = ptc.start_vertex().position() + charge = ptc.q() + pid = ptc.pdgid() + simptc = PFSimParticle(tp4, vertex, charge, pid) + pdebugger.info(" ".join(("Made", simptc.__str__()))) + simptc.gen_ptc = ptc + return simptc + +class Simulator(object): + + def __init__(self, detector, logger=None): + self.verbose = True + self.detector = detector + if logger is None: + import logging + logging.basicConfig(level='ERROR') + logger = logging.getLogger('Simulator') + self.logger = logger + self.prop_helix = HelixPropagator() + self.prop_straight = StraightLinePropagator() + + def write_ptcs(self, dbname): + db = shelve.open(dbname) + db['ptcs'] = self.ptcs + db.close() + + def reset(self): + self.particles = None + self.ptcs = None + Cluster.max_energy = 0. + SmearedCluster.max_energy = 0. + + def propagator(self, ptc): + is_neutral = abs(ptc.q()) < 0.5 + return self.prop_straight if is_neutral else self.prop_helix + + def propagate(self, ptc): + '''propagate the particle to all detector cylinders''' + self.propagator(ptc).propagate([ptc], self.detector.cylinders(), + self.detector.elements['field'].magnitude) + + def make_cluster(self, ptc, detname, fraction=1., size=None): + '''adds a cluster in a given detector, with a given fraction of + the particle energy.''' + detector = self.detector.elements[detname] + self.propagator(ptc).propagate_one(ptc, + detector.volume.inner, + self.detector.elements['field'].magnitude) + if size is None: + size = detector.cluster_size(ptc) + cylname = detector.volume.inner.name + if not cylname in ptc.points: + # TODO Colin particle was not extrapolated here... + # issue must be solved! + errormsg = ''' +SimulationError : cannot make cluster for particle: +particle: {ptc} +with vertex rho={rho:5.2f}, z={zed:5.2f} +cannot be extrapolated to : {det}\n'''.format(ptc=ptc, + rho=ptc.vertex.Perp(), + zed=ptc.vertex.Z(), + det=detector.volume.inner) + self.logger.warning(errormsg) + raise SimulationError('Particle not extrapolated to the detector, so cannot make a cluster there. No worries for now, problem will be solved :-)') + cluster = Cluster(ptc.p4().E()*fraction, ptc.points[cylname], size, cylname, ptc) + ptc.clusters[cylname] = cluster + pdebugger.info(" ".join(("Made", cluster.__str__()))) + return cluster + + def smear_cluster(self, cluster, detector, accept=False, acceptance=None): + '''Returns a copy of self with a smeared energy. + If accept is False (default), returns None if the smeared + cluster is not in the detector acceptance. ''' + + eres = detector.energy_resolution(cluster.energy, cluster.position.Eta()) + response = detector.energy_response(cluster.energy, cluster.position.Eta()) + energy = cluster.energy * random.gauss(response, eres) + smeared_cluster = SmearedCluster(cluster, + energy, + cluster.position, + cluster.size(), + cluster.layer, + cluster.particle) + pdebugger.info(str('Made {}'.format(smeared_cluster))) + det = acceptance if acceptance else detector + if det.acceptance(smeared_cluster) or accept: + return smeared_cluster + else: + pdebugger.info(str('Rejected {}'.format(smeared_cluster))) + return None + + def smear_track(self, track, detector, accept=False): + #TODO smearing depends on particle type! + ptres = detector.pt_resolution(track) + scale_factor = random.gauss(1, ptres) + smeared_track = SmearedTrack(track, + track.p3 * scale_factor, + track.charge, + track.path) + pdebugger.info(" ".join(("Made", smeared_track.__str__()))) + if detector.acceptance(smeared_track) or accept: + return smeared_track + else: + pdebugger.info(str('Rejected {}'.format(smeared_track))) + return None + + def simulate_photon(self, ptc): + pdebugger.info("Simulating Photon") + detname = 'ecal' + ecal = self.detector.elements[detname] + self.prop_straight.propagate_one(ptc, + ecal.volume.inner) + + cluster = self.make_cluster(ptc, detname) + smeared = self.smear_cluster(cluster, ecal) + if smeared: + ptc.clusters_smeared[smeared.layer] = smeared + + + def simulate_electron(self, ptc): + pdebugger.info("Simulating Electron") + ecal = self.detector.elements['ecal'] + self.prop_helix.propagate_one(ptc, + ecal.volume.inner, + self.detector.elements['field'].magnitude) + cluster = self.make_cluster(ptc, 'ecal') + smeared_cluster = self.smear_cluster(cluster, ecal) + if smeared_cluster: + ptc.clusters_smeared[smeared_cluster.layer] = smeared_cluster + smeared_track = self.smear_track(ptc.track, + self.detector.elements['tracker']) + if smeared_track: + ptc.track_smeared = smeared_track + + + def simulate_neutrino(self, ptc): + self.propagate(ptc) + + def simulate_hadron(self, ptc): + '''Simulate a hadron, neutral or charged. + ptc should behave as pfobjects.Particle. + ''' + pdebugger.info("Simulating Hadron") + #implement beam pipe scattering + + ecal = self.detector.elements['ecal'] + hcal = self.detector.elements['hcal'] + beampipe = self.detector.elements['beampipe'] + frac_ecal = 0. + + self.propagator(ptc).propagate_one(ptc, + beampipe.volume.inner, + self.detector.elements['field'].magnitude) + + self.propagator(ptc).propagate_one(ptc, + beampipe.volume.outer, + self.detector.elements['field'].magnitude) + + mscat.multiple_scattering(ptc, beampipe, self.detector.elements['field'].magnitude) + + #re-propagate after multiple scattering in the beam pipe + #indeed, multiple scattering is applied within the beam pipe, + #so the extrapolation points to the beam pipe entrance and exit + #change after multiple scattering. + self.propagator(ptc).propagate_one(ptc, + beampipe.volume.inner, + self.detector.elements['field'].magnitude) + self.propagator(ptc).propagate_one(ptc, + beampipe.volume.outer, + self.detector.elements['field'].magnitude) + self.propagator(ptc).propagate_one(ptc, + ecal.volume.inner, + self.detector.elements['field'].magnitude) + + # these lines moved earlier in order to match cpp logic + if ptc.q() != 0: + pdebugger.info(" ".join(("Made", ptc.track.__str__()))) + smeared_track = self.smear_track(ptc.track, + self.detector.elements['tracker']) + if smeared_track: + ptc.track_smeared = smeared_track + + if 'ecal_in' in ptc.path.points: + # doesn't have to be the case (long-lived particles) + path_length = ecal.material.path_length(ptc) + if path_length < sys.float_info.max: + # ecal path length can be infinite in case the ecal + # has lambda_I = 0 (fully transparent to hadrons) + time_ecal_inner = ptc.path.time_at_z(ptc.points['ecal_in'].Z()) + deltat = ptc.path.deltat(path_length) + time_decay = time_ecal_inner + deltat + point_decay = ptc.path.point_at_time(time_decay) + ptc.points['ecal_decay'] = point_decay + if ecal.volume.contains(point_decay): + frac_ecal = random.uniform(0., 0.7) + cluster = self.make_cluster(ptc, 'ecal', frac_ecal) + # For now, using the hcal resolution and acceptance + # for hadronic cluster + # in the ECAL. That's not a bug! + smeared = self.smear_cluster(cluster, hcal, acceptance=ecal) + if smeared: + ptc.clusters_smeared[smeared.layer] = smeared + + cluster = self.make_cluster(ptc, 'hcal', 1-frac_ecal) + smeared = self.smear_cluster(cluster, hcal) + if smeared: + ptc.clusters_smeared[smeared.layer] = smeared + + def simulate_muon(self, ptc): + pdebugger.info("Simulating Muon") + self.propagate(ptc) + smeared_track = self.smear_track(ptc.track, + self.detector.elements['tracker']) + if smeared_track: + ptc.track_smeared = smeared_track + + def smear_muon(self, ptc): + pdebugger.info("Smearing Muon") + self.propagate(ptc) + if ptc.q() != 0: + pdebugger.info(" ".join(("Made", ptc.track.__str__()))) + smeared = copy.deepcopy(ptc) + return smeared + + def smear_electron(self, ptc): + pdebugger.info("Smearing Electron") + ecal = self.detector.elements['ecal'] + self.prop_helix.propagate_one(ptc, + ecal.volume.inner, + self.detector.elements['field'].magnitude) + if ptc.q() != 0: + pdebugger.info(" ".join(("Made", ptc.track.__str__()))) + smeared = copy.deepcopy(ptc) + return smeared + + def propagate_muon(self, ptc): + pdebugger.info("Propogate Muon") + self.propagate(ptc) + return + + def propagate_electron(self, ptc): + pdebugger.info("Propogate Electron") + ecal = self.detector.elements['ecal'] + self.prop_helix.propagate_one(ptc, + ecal.volume.inner, + self.detector.elements['field'].magnitude) + return + + def simulate(self, ptcs): + self.reset() + self.ptcs = [] + + #newsort + for gen_ptc in sorted(ptcs, key=lambda ptc: ptc.uniqueid): + pdebugger.info(str('{}'.format(gen_ptc))) + for gen_ptc in ptcs: + ptc = pfsimparticle(gen_ptc) + if ptc.pdgid() == 22: + self.simulate_photon(ptc) + elif abs(ptc.pdgid()) == 11: #check with colin + self.propagate_electron(ptc) + #smeared_ptc = self.smear_electron(ptc) + #smeared.append(smeared_ptc) + # self.simulate_electron(ptc) + elif abs(ptc.pdgid()) == 13: #check with colin + self.propagate_muon(ptc) + #smeared_ptc = self.smear_muon(ptc) + #smeared.append(smeared_ptc) + # self.simulate_muon(ptc) + elif abs(ptc.pdgid()) in [12, 14, 16]: + self.simulate_neutrino(ptc) + elif abs(ptc.pdgid()) > 100: #TODO make sure this is ok + if ptc.q() and ptc.pt() < 0.2: + # to avoid numerical problems in propagation + continue + self.simulate_hadron(ptc) + self.ptcs.append(ptc) + self.pfinput = PFInput(self.ptcs) #collect up tracks, clusters etc ready for merging/reconstruction_muon(otc) + +if __name__ == '__main__': + + import math + import logging + from detectors.CMS import cms + from toyevents import particle + from PhysicsTools.HeppyCore.display.core import Display + from PhysicsTools.HeppyCore.display.geometry import GDetector + from PhysicsTools.HeppyCore.display.pfobjects import GTrajectories + + display_on = True + detector = cms + + logging.basicConfig(level='WARNING') + logger = logging.getLogger('Simulator') + logger.addHandler(logging.StreamHandler(sys.stdout)) + + for i in range(1): + if not i%100: + print i + simulator = Simulator(detector, logger) + # particles = monojet([211, -211, 130, 22, 22, 22], math.pi/2., math.pi/2., 2, 50) + particles = [ + # particle(211, math.pi/2., math.pi/2., 100), + particle(211, math.pi/2 + 0.5, 0., 40.), + # particle(130, math.pi/2., math.pi/2.+0., 100.), + # particle(22, math.pi/2., math.pi/2.+0.0, 10.) + ] + simulator.simulate(particles) + + if display_on: + display = Display(['xy', 'yz', + 'ECAL_thetaphi', + 'HCAL_thetaphi' + ]) + gdetector = GDetector(detector) + display.register(gdetector, 0) + gtrajectories = GTrajectories(simulator.ptcs) + display.register(gtrajectories, 1) + display.draw() + diff --git a/PhysicsTools/HeppyCore/python/papas/test_cluster.py b/PhysicsTools/HeppyCore/python/papas/test_cluster.py new file mode 100644 index 0000000000000..9c3bd71950bdb --- /dev/null +++ b/PhysicsTools/HeppyCore/python/papas/test_cluster.py @@ -0,0 +1,138 @@ +import unittest +from pfobjects import Cluster, SmearedCluster +from detectors.CMS import cms +from simulator import Simulator +from ROOT import TVector3 +import math +import numpy as np +from ROOT import TFile, TH1F, TH2F + +simulator = Simulator(cms) + +class TestCluster(unittest.TestCase): + + def test_pt(self): + '''Test that pT is correctly set.''' + cluster = Cluster(10., TVector3(1,0,0), 1) #alice made this use default layer + self.assertAlmostEqual(cluster.pt, 10.) + cluster.set_energy(5.) + self.assertAlmostEqual(cluster.pt, 5.) + + def test_smear(self): + rootfile = TFile('test_cluster_smear.root', 'recreate') + h_e = TH1F('h_e','cluster energy', 200, 5, 15.) + energy = 10. + cluster = Cluster(energy, TVector3(1,0,0), 1) #alice made this use default layer + ecal = cms.elements['ecal'] + energies = [] + for i in range(10000): + smeared = simulator.smear_cluster(cluster, ecal, accept=True) + h_e.Fill(smeared.energy) + energies.append(smeared.energy) + npe = np.array(energies) + mean = np.mean(npe) + rms = np.std(npe) + eres = ecal.energy_resolution(cluster.energy) + self.assertAlmostEqual(mean, energy, places=1) + self.assertAlmostEqual(rms, eres*energy, places=1) + rootfile.Write() + rootfile.Close() + + def test_acceptance(self): + rootfile = TFile('test_cluster_acceptance.root', 'recreate') + h_evseta = TH2F('h_evseta','cluster energy vs eta', + 100, -5, 5, 100, 0, 15) + h_ptvseta = TH2F('h_ptvseta','cluster pt vs eta', + 100, -5, 5, 100, 0, 15) + nclust = 1000. + # making 1000 deposits between 0 and 10 GeV + energies = np.random.uniform(0., 10., nclust) + # theta between 0 and pi + thetas = np.random.uniform(0, math.pi, nclust) + costhetas = np.cos(thetas) + sinthetas = np.sin(thetas) + clusters = [] + for energy, cos, sin in zip(energies, costhetas, sinthetas): + clusters.append(Cluster(energy, TVector3(sin,0,cos), 1)) #alice made this use default layer + ecal = cms.elements['ecal'] + smeared_clusters = [] + min_energy = -999. + for cluster in clusters: + smeared_cluster = simulator.smear_cluster(cluster, ecal) + if smeared_cluster: + h_evseta.Fill(smeared_cluster.position.Eta(), + smeared_cluster.energy) + h_ptvseta.Fill(smeared_cluster.position.Eta(), + smeared_cluster.pt) + smeared_clusters.append(smeared_cluster) + if smeared_cluster.energy > min_energy: + min_energy = smeared_cluster.energy + # test that some clusters have been rejected + # (not passing the acceptance) + self.assertGreater(len(clusters), len(smeared_clusters)) + # test that the minimum cluster energy is larger than the + # minimum ecal threshold + ecal_min_thresh = min(ecal.emin.values()) + self.assertGreater(min_energy, ecal_min_thresh) + rootfile.Write() + rootfile.Close() + + # def test_absorption(self): + # energies = [10, 20, 30, 40] + # e1, e2, e3, e4 = energies + # dists = [-0.01, 0.089, 0.11, 0.16] + # sizes = [0.04, 0.06, 0.06, 0.06] + # def make_clusters(proj='z'): + # clusters = [] + # for i, energy in enumerate(energies): + # # moving along z, at phi=0. + # x, y, z = 1, 0, dists[i] + # if proj=='x': + # #moving along x, around phi = pi/2. + # x, y, z = dists[i], 1, 0. + # elif proj=='y': + # #moving along y, around phi=0. testing periodic condition. + # x, y, z = 1, dists[i], 0 + # position = TVector3(x, y, z) + # clusters.append( Cluster( energy, + # position, + # sizes[i], + # 0 )) + # return clusters + # def test(proj): + # # test simple absorption between two single clusters + # c1, c2, c3, c4 = make_clusters(proj) + # print c1.position.X(), c1.position.Y(), c1.position.Z() + # print c2.position.X(), c2.position.Y(), c2.position.Z() + # c1.absorb(c2) + # self.assertEqual(len(c1.absorbed), 1) + # self.assertEqual(len(c2.absorbed), 0) + # self.assertEqual(c1.absorbed[0], c2) + # self.assertEqual(c1.energy, e1+e2) + # # testing absorption of an additional cluster by a compound cluster + # c1.absorb(c3) + # self.assertEqual(len(c1.absorbed), 2) + # self.assertEqual(c1.energy, e1+e2+e3) + # c1, c2, c3, c4 = make_clusters(proj) + # # testing impossible absorption, cause the 2 clusters are too far + # code = c1.absorb(c3) + # self.assertFalse(code) + # self.assertEqual(len(c1.absorbed), 0) + # self.assertEqual(len(c2.absorbed), 0) + # self.assertEqual(c1.energy, e1) + # c1, c2, c3, c4 = make_clusters(proj) + # # testing absorption between two compound clusters + # c1.absorb(c2) + # self.assertEqual(c1.energy, e1+e2) + # c3.absorb(c4) + # self.assertEqual(c3.energy, e3+e4) + # c1.absorb(c3) + # self.assertEqual(len(c1.absorbed), 3) + # self.assertEqual(c1.energy, e1+e2+e3+e4) + # test('z') + # test('y') + # test('x') + +if __name__ == '__main__': + unittest.main() + diff --git a/PhysicsTools/HeppyCore/python/papas/test_propagator.py b/PhysicsTools/HeppyCore/python/papas/test_propagator.py new file mode 100644 index 0000000000000..9eba52365122c --- /dev/null +++ b/PhysicsTools/HeppyCore/python/papas/test_propagator.py @@ -0,0 +1,85 @@ +import unittest +from detectors.geometry import SurfaceCylinder +from pfobjects import Particle +from propagator import straight_line, helix +from vectors import LorentzVector, Point + +class TestPropagator(unittest.TestCase): + + def test_straightline(self): + origin = Point(0,0,0) + cyl1 = SurfaceCylinder('cyl1', 1, 2) + cyl2 = SurfaceCylinder('cyl2', 2, 1) + + particle = Particle( LorentzVector(1, 0, 1, 2.), origin, 0) + straight_line.propagate_one( particle, cyl1 ) + straight_line.propagate_one( particle, cyl2 ) + self.assertEqual( len(particle.points), 3) + # test extrapolation to barrel + self.assertAlmostEqual( particle.points['cyl1'].Perp(), 1. ) + self.assertAlmostEqual( particle.points['cyl1'].Z(), 1. ) + # test extrapolation to endcap + self.assertAlmostEqual( particle.points['cyl2'].Z(), 1. ) + + # testing extrapolation to -z + particle = Particle( LorentzVector(1, 0, -1, 2.), origin, 0) + # import pdb; pdb.set_trace() + straight_line.propagate_one( particle, cyl1 ) + straight_line.propagate_one( particle, cyl2 ) + self.assertEqual( len(particle.points), 3) + self.assertAlmostEqual( particle.points['cyl1'].Perp(), 1. ) + # test extrapolation to endcap + self.assertAlmostEqual( particle.points['cyl1'].Z(), -1. ) + self.assertAlmostEqual( particle.points['cyl2'].Z(), -1. ) + + # extrapolating from a vertex close to +endcap + particle = Particle( LorentzVector(1, 0, 1, 2.), + Point(0,0,1.5), 0) + straight_line.propagate_one( particle, cyl1 ) + self.assertAlmostEqual( particle.points['cyl1'].Perp(), 0.5 ) + + # extrapolating from a vertex close to -endcap + particle = Particle( LorentzVector(1, 0, -1, 2.), + Point(0,0,-1.5), 0) + straight_line.propagate_one( particle, cyl1 ) + self.assertAlmostEqual( particle.points['cyl1'].Perp(), 0.5 ) + + # extrapolating from a non-zero radius + particle = Particle( LorentzVector(0, 0.5, 1, 2.), + Point(0,0.5,0), 0) + straight_line.propagate_one( particle, cyl1 ) + self.assertAlmostEqual( particle.points['cyl1'].Perp(), 1. ) + self.assertAlmostEqual( particle.points['cyl1'].Z(), 1. ) + + # extrapolating from a z outside the cylinder + particle = Particle( LorentzVector(0, 0, -1, 2.), + Point(0,0,2.5), 0) + straight_line.propagate_one( particle, cyl1 ) + self.assertFalse( 'cyl1' in particle.points ) + + # extrapolating from a z outside the cylinder, negative + particle = Particle( LorentzVector(0, 0, -1, 2.), + Point(0,0,-2.5), 0) + straight_line.propagate_one( particle, cyl1 ) + self.assertFalse( 'cyl1' in particle.points ) + + # extrapolating from a rho outside the cylinder + particle = Particle( LorentzVector(0, 0, -1, 2.), + Point(0,1.1,0), 0) + straight_line.propagate_one( particle, cyl1 ) + self.assertFalse( 'cyl1' in particle.points ) + + def test_helix(self): + cyl1 = SurfaceCylinder('cyl1', 1., 2.) + cyl2 = SurfaceCylinder('cyl2', 2., 1.) + field = 3.8 + particle = Particle( LorentzVector(2., 0, 1, 5), + Point(0., 0., 0.), -1) + debug_info = helix.propagate_one(particle, cyl1, field) + particle = Particle( LorentzVector(0., 2, 1, 5), + Point(0., 0., 0.), -1) + debug_info = helix.propagate_one(particle, cyl1, field) + + +if __name__ == '__main__': + unittest.main() diff --git a/PhysicsTools/HeppyCore/python/papas/toyevents.py b/PhysicsTools/HeppyCore/python/papas/toyevents.py new file mode 100644 index 0000000000000..bbe8f25690901 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/papas/toyevents.py @@ -0,0 +1,99 @@ +import PhysicsTools.HeppyCore.statistics.rrandom as random +#TODO get rid of vectors +from vectors import * +from ROOT import TLorentzVector +import math + +from pfobjects import Particle +from pdt import particle_data + + +def particles(nptcs, pdgid, thetamin, thetamax, emin, emax, vertex=None ): + ngenerated = 0 + mass, charge = particle_data[pdgid] + while ngenerated papas : m + self._ctau = fccobj.Ctau() + diff --git a/PhysicsTools/HeppyCore/python/particles/genbrowser.py b/PhysicsTools/HeppyCore/python/particles/genbrowser.py new file mode 100644 index 0000000000000..a80b947ec000e --- /dev/null +++ b/PhysicsTools/HeppyCore/python/particles/genbrowser.py @@ -0,0 +1,75 @@ +import copy + +class GenBrowser(object): + """Browser for gen particle history.""" + + def __init__(self, particles, vertices): + """ + parameters: + - particles: a list of gen particles + + the particles must have a start_vertex and an end_vertex + attribute, set to None if the vertex doesn't exist. + + After calling this constructor, two lists are added to each + particle: + - daughters: list of direct daugthers + - mothers: list of direct mothers + """ + self.vertices = dict() + for v in vertices: + self.vertices[v] = v + self.particles = particles + for ptc in particles: + ptc.daughters = [] + ptc.mothers = [] + start = ptc.start_vertex() + if start: + vertex = self.vertices.get(start, None) + if vertex: + vertex.outgoing.append(ptc) + else: + raise ValueError('vertex not found!') + end = ptc.end_vertex() + if end: + vertex = self.vertices.get(end, None) + if vertex: + vertex.incoming.append(ptc) + else: + raise ValueError('vertex not found!') + + # now the lists of incoming and outgoing particles is + # complete for each vertex + # setting the list of daughters and mothers for each particle + for vtx in self.vertices: + # print vtx, id(vtx),'-'*50 + # print 'incoming' + for ptc in vtx.incoming: + # print ptc + ptc.daughters = vtx.outgoing + # print 'outgoing' + for ptc in vtx.outgoing: + # print ptc + ptc.mothers = vtx.incoming + + def ancestors(self, particle): + """Returns the list of ancestors for a given particle, + that is mothers, grandmothers, etc.""" + result = [] + for mother in particle.mothers: + result.append(mother) + result.extend(self.ancestors(mother)) + return result + + def descendants(self, particle): + """Returns the list of descendants for a given particle, + that is daughters, granddaughters, etc.""" + result = [] + for daughter in particle.daughters: + result.append(daughter) + result.extend(self.descendants(daughter)) + return result + + + + diff --git a/PhysicsTools/HeppyCore/python/particles/handle.py b/PhysicsTools/HeppyCore/python/particles/handle.py new file mode 100644 index 0000000000000..7c98c63c96ad9 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/particles/handle.py @@ -0,0 +1,34 @@ +import copy + +class Handle(object): + '''Extends the Handle functionalities. + + This class wraps a C++ Handle. + The user can call all functions of the C++ Handle, + and can also attach new attributes to objects from this class. + ''' + + def __init__(self, handle): + self.handle = handle + super(Handle, self).__init__() + + def __copy__(self): + '''Very dirty trick, the handle is deepcopied...''' + handle = copy.deepcopy( self.handle ) + newone = type(self)(handle) + newone.__dict__.update(self.__dict__) + newone.handle = handle + return newone + + def __getattr__(self,name): + '''all accessors from cmg::DiTau are transferred to this class.''' + return getattr(self.handle, name) + + def __eq__(self,other): + if( hasattr(other, 'handle') ): + # the two python Handles have the same C++ Handle + return self.handle == other.handle + else: + # can compare a python Handle with a cpp Handle directly + return self.handle == other + diff --git a/PhysicsTools/HeppyCore/python/particles/isolation.py b/PhysicsTools/HeppyCore/python/particles/isolation.py new file mode 100644 index 0000000000000..e7ce888332272 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/particles/isolation.py @@ -0,0 +1,129 @@ +from PhysicsTools.HeppyCore.utils.deltar import deltaR2 + +class Area(object): + '''Base Area interface.''' + def is_inside(self, *args): + '''returns True if *args describes a particle inside the EtaPhiCircle. + + *args may be the particle itself, assuming it has eta() and phi() methods, + or eta, phi. + ''' + pass + +class EtaPhiCircle(Area): + '''Circle in (eta, phi) space. + When running on a lepton collider, eta is replaced by theta. + ''' + def __init__(self, R): + '''Create a circle of radius R''' + self.R = R + self._R2 = R**2 + + def is_inside(self, *args): + dR2 = deltaR2(*args) + return dR2 < self._R2 + + +class IsolationInfo(object): + '''Holds the results of an isolation calculation.''' + def __init__(self, label, lepton): + '''Create an IsolationInfo. + + Attributes: + lepton = the lepton + particles = list of particles around the lepton used in the calculation. + the following quantities are computed for these particles + sumpt = total pT for the particles + sume = total energy for the particles + num = total number of particles + ''' + self.particles = [] + self.label = label + self.lepton = lepton + self.sumpt = 0 + self.sume = 0 + self.num = 0 + + def add_particle(self, ptc): + '''Add a new particle and update counters.''' + self.particles.append(ptc) + self.sumpt += ptc.pt() + self.sume += ptc.e() + self.num += 1 + + def __iadd__(self, other): + self.particles.extend(other.particles) + self.sumpt += other.sumpt + self.sume += other.sume + self.num += other.num + return self + + def __str__(self): + return 'iso {label:>3}: sumpt = {sumpt:5.2f}, sume = {sume:5.2f}, num = {num}'.format( + label = self.label, + sumpt = self.sumpt, + sume = self.sume, + num = self.num + ) + + + +class IsolationComputer(object): + '''Computes isolation for a given lepton.''' + + def __init__(self, on_areas, off_areas=None, + pt_thresh=0, e_thresh=0, label=''): + '''Creates the isolation computer. + + Particles around the lepton are considered in the isolation if: + - they pass both thresholds: + pt_thresh : pt threshold + e_thresh : energy threshold + + - they are in an active area around the lepton + areas should + + on_areas and off_areas are lists of areas in which particles + around the should be considered + or ignored, respectively. + for a given particle + + ''' + + self.on_areas = on_areas + if off_areas is None: + off_areas = [] + self.off_areas = off_areas + self.pt_thresh = pt_thresh + self.e_thresh = e_thresh + self.label = label + + + def compute(self, lepton, particles): + '''Compute the isolation for lepton, using particles. + returns an IsolationInfo. + ''' + isolation = IsolationInfo(self.label, lepton) + for ptc in particles: + if ptc is lepton: + continue + if ptc.e()100: + if ptc.q(): + return 211 + else: + return 130 + else: + return pdgid + +class JetComponent(list): + + def __init__(self, pdgid): + super(JetComponent, self).__init__() + self._e = 0 + self._pt = 0 + self._num = 0 + self._pdgid = pdgid + + def pdgid(self): + return self._pdgid + + def e(self): + return self._e + + def pt(self): + return self._pt + + def num(self): + return self._num + + def append(self, ptc): + pdgid = group_pdgid(ptc) + if self._pdgid is None: + self._pdgid = pdgid + elif pdgid!=self._pdgid: + raise ValueError('cannot add particles of different type to a component') + super(JetComponent, self).append(ptc) + self._e += ptc.e() + self._pt += ptc.pt() + self._num += 1 + + def __str__(self): + header = '\t\tpdgid={pdgid}, n={num:d}, e={e:3.1f}, pt={pt:3.1f}'.format( + pdgid = self.pdgid(), + num = self.num(), + e = self.e(), + pt = self.pt() + ) + ptcs = [] + for ptc in self: + ptcs.append('\t\t\t{particle}'.format(particle=str(ptc))) + result = [header] + result.extend(ptcs) + return '\n'.join(result) + + +class JetConstituents(dict): + + def __init__(self): + super(JetConstituents, self).__init__() + all_pdgids = [211, 22, 130, 11, 13, + 1, 2 #HF had and em + ] + for pdgid in all_pdgids: + self[pdgid] = JetComponent(pdgid) + + def validate(self, jet_energy, tolerance = 1e-2): + '''Calls pdb if total component energy != jet energy''' + tote = sum([comp.e() for comp in self.values()]) + if abs(jet_energy-tote)>tolerance: + import pdb; pdb.set_trace() + + def append(self, ptc): + pdgid = group_pdgid(ptc) + try: + self[pdgid].append(ptc) + except KeyError: + import pdb; pdb.set_trace() + + def sort(self): + for ptcs in self.values(): + ptcs.sort(key = lambda ptc: ptc.e(), reverse=True) + + def __str__(self): + return '\n'.join(map(str, self.values())) + + +class JetTags(dict): + + def summary(self): + tagstrs = [] + for name, val in sorted(self.iteritems()): + valstr = '..' + if hasattr(val, 'summary'): + valstr = val.summary() + elif isinstance(val, int): + valstr = '{val:d}'.format(val=val) + else: + try: + valstr = '{val:2.1f}'.format(val=val) + except: + pass + tagstr = '{name}:{val}'.format(name=name, val=valstr) + tagstrs.append(tagstr) + return ', '.join(tagstrs) + + +class Jet(P4): + + def __init__(self, *args, **kwargs): + super(Jet, self).__init__(*args, **kwargs) + self.constituents = None + self.tags = JetTags() + + def pdgid(self): + return 0 + + def q(self): + return 0 + + def __str__(self): + tmp = '{className} : {p4}, tags={tags}' + return tmp.format( + className = self.__class__.__name__, + p4 = super(Jet, self).__str__(), + tags = self.tags.summary() + ) + + def __repr__(self): + return str(self) diff --git a/PhysicsTools/HeppyCore/python/particles/met.py b/PhysicsTools/HeppyCore/python/particles/met.py new file mode 100644 index 0000000000000..ee314b366b543 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/particles/met.py @@ -0,0 +1,27 @@ +from p4 import P4 + +class MET(P4): + '''Interface for MET. + Make sure your code satisfies this interface. + Specializations in cms, fcc, and tlv packages + ''' + def __init__(self, *args, **kwargs): + super(MET, self).__init__(*args, **kwargs) + + def sum_et(self): + '''scalar sum of transverse energy''' + return self._sum_et + + def q(self): + '''particle charge''' + return self._charge + + def __str__(self): + tmp = '{className} : met = {met:5.1f}, phi = {phi:2.1f}, sum_et = {sum_et:5.1f}' + return tmp.format( + className = self.__class__.__name__, + met = self.pt(), + phi = self.phi(), + sum_et = self.sum_et() + ) + diff --git a/PhysicsTools/HeppyCore/python/particles/p4.py b/PhysicsTools/HeppyCore/python/particles/p4.py new file mode 100644 index 0000000000000..ab57e0cea969c --- /dev/null +++ b/PhysicsTools/HeppyCore/python/particles/p4.py @@ -0,0 +1,84 @@ +import math +from PhysicsTools.HeppyCore.configuration import Collider + +from functools import total_ordering + +class P4(object): + + def __init__(self, *args, **kwargs): + super(P4, self).__init__(*args, **kwargs) + + def p4(self): + '''4-momentum, px, py, pz, E''' + return self._tlv + + def p3(self): + '''3-momentum px, py, pz''' + return self._tlv.Vect() + + def e(self): + '''energy''' + return self._tlv.E() + + def pt(self): + '''transverse momentum (magnitude of p3 in transverse plane)''' + return self._tlv.Pt() + + def theta(self): + '''angle w/r to transverse plane''' + return math.pi/2 - self._tlv.Theta() + + def eta(self): + '''pseudo-rapidity (-ln(tan self._tlv.Theta()/2)). + theta = 0 -> eta = +inf + theta = pi/2 -> 0 + theta = pi -> eta = -inf + ''' + if self._tlv.Pt()<1e-9: + if self._tlv.Pz()>0.: + return float('inf') + else: + return -float('inf') + else: + return self._tlv.Eta() + + def phi(self): + '''azymuthal angle (from x axis, in the transverse plane)''' + return self._tlv.Phi() + + def m(self): + '''mass''' + return self._tlv.M() + + + def sort_key(self): + if Collider.BEAMS == 'ee': + return self.e() + else: + return self.pt() + + def __gt__(self, other): + '''sorting by pT or energy depending on Collider.BEAMS''' + return self.sort_key() > other.sort_key() + + def __lt__(self, other): + '''sorting by pT or energy depending on Collider.BEAMS''' + return self.sort_key() < other.sort_key() + + def __str__(self): + if Collider.BEAMS == 'pp': + return 'pt = {pt:5.1f}, e = {e:5.1f}, eta = {eta:5.2f}, phi = {phi:5.2f}, mass = {m:5.2f}'.format( + pt = self.pt(), + e = self.e(), + eta = self.eta(), + phi = self.phi(), + m = self.m() + ) + elif Collider.BEAMS == 'ee': + return 'e = {e:5.1f}, theta = {theta:5.2f}, phi = {phi:5.2f}, mass = {m:5.2f}'.format( + e = self.e(), + eta = self.eta(), + theta = self.theta(), + phi = self.phi(), + m = self.m() + ) diff --git a/PhysicsTools/HeppyCore/python/particles/particle.py b/PhysicsTools/HeppyCore/python/particles/particle.py new file mode 100644 index 0000000000000..52cdfca356e9a --- /dev/null +++ b/PhysicsTools/HeppyCore/python/particles/particle.py @@ -0,0 +1,48 @@ +import copy + +from p4 import P4 + +class Particle(P4): + '''Interface for particles. + Make sure your code satisfies this interface. + Specializations in cms, fcc, and tlv packages + ''' + def __init__(self, *args, **kwargs): + super(Particle, self).__init__(*args, **kwargs) + + def pdgid(self): + '''particle type''' + return self._pid + + def q(self): + '''particle charge''' + return self._charge + + def status(self): + '''status code, e.g. from generator. 1:stable.''' + return self._status + + def start_vertex(self): + '''start vertex (3d point)''' + return self._start_vertex + + def end_vertex(self): + '''end vertex (3d point)''' + return self._end_vertex + + def __repr__(self): + return str(self) + + def __str__(self): + tmp = '{className} : pdgid = {pdgid:5}, status = {status:3}, q = {q:2} {p4}' + return tmp.format( + className = self.__class__.__name__, + pdgid = self.pdgid(), + status = self.status(), + q = self.q(), + p4 = super(Particle, self).__str__() + ) + + def __repr__(self): + return str(self) + diff --git a/PhysicsTools/HeppyCore/python/particles/pdgcodes.py b/PhysicsTools/HeppyCore/python/particles/pdgcodes.py new file mode 100644 index 0000000000000..4abcf19fa3e36 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/particles/pdgcodes.py @@ -0,0 +1,78 @@ +'''Most tools taken from Rivet + +Enumerator: + nj + nq3 + nq2 + nq1 + nl + nr + n + n8 + n9 + n10 +''' + +nj, nq3, nq2, nq1, nl, nr, n, n8, n9, n10 = range(1, 11) + +def extraBits(pid): + return abs(pid)/10000000 + +def digit(loc, pid): + '''returns the digit at the given location in the pid + // PID digits (base 10) are: n nr nl nq1 nq2 nq3 nj + // the location enum provides a convenient index into the PID + int numerator = (int) std::pow(10.0,(loc-1)); + return (abspid(pid)/numerator)%10; + ''' + # if loc==0 or loc > len(str(pid)): + # raise ValueError('wrong location for pid') + numerator = int( pow(10, loc-1) ) + return (abs(pid)/numerator)%10 + +def fundamentalId(pid): + '''extract fundamental id if this is a fundamental particle + + In Rivet: + + { + if( extraBits(pid) > 0 ) return 0; + if( digit(nq2,pid) == 0 && digit(nq1,pid) == 0) { + return abspid(pid)%10000; + } else if( abspid(pid) <= 100 ) { + return abspid(pid); + } else { + return 0; + } + } + ''' + if extraBits(pid) > 0: + return 0 + if digit(nq2, pid) == 0 and digit(nq1, pid) == 0: + return abs(pid)%10000 + elif abs(pid) <= 100: + return abs(pid) + else: + return 0 + + +def hasBottom(pid): + '''returns True if it's a composite particle containing a bottom quark + { + if( extraBits(pid) > 0 ) { return false; } + if( fundamentalID(pid) > 0 ) { return false; } + if( digit(nq3,pid) == 5 || digit(nq2,pid) == 5 || digit(nq1,pid) == 5 ) { return true; } + return false; + } + ''' + if extraBits(pid) > 0: + return False + elif fundamentalId(pid) > 0: + return False + elif digit(nq3,pid) == 5 or \ + digit(nq2,pid) == 5 or \ + digit(nq1,pid) == 5 : + return True + else: + return False + diff --git a/PhysicsTools/HeppyCore/python/particles/physicsobjects.py b/PhysicsTools/HeppyCore/python/particles/physicsobjects.py new file mode 100644 index 0000000000000..2b1cb37514211 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/particles/physicsobjects.py @@ -0,0 +1,20 @@ +from PhysicsTools.HeppyCore.particles.handle import Handle +from PhysicsTools.HeppyCore.particles.p4 import P4 + +class Jet(Handle, P4): + pass + +class Particle(Handle, P4): + + def __str__(self): + tmp = '{className} : id = {id:3} pt = {pt:5.1f}, eta = {eta:5.2f}, phi = {phi:5.2f}, mass = {mass:5.2f}' + return tmp.format( + className = self.__class__.__name__, + id = self.read().Core.Type, + pt = self.read().Core.P4.Pt, + eta = self.read().Core.P4.Eta, + phi = self.read().Core.P4.Phi, + mass = self.read().Core.P4.Mass + ) + + diff --git a/PhysicsTools/HeppyCore/python/particles/test_genbrowser.py b/PhysicsTools/HeppyCore/python/particles/test_genbrowser.py new file mode 100644 index 0000000000000..f75d3e9cb323d --- /dev/null +++ b/PhysicsTools/HeppyCore/python/particles/test_genbrowser.py @@ -0,0 +1,79 @@ +import unittest +from genbrowser import GenBrowser + +class Particle(object): + + def __init__(self, id, start, end): + self.id = id + self.start = start + self.end = end + # self.mothers = [] + # self.daughters = [] + + def start_vertex(self): + return self.start + + def end_vertex(self): + return self.end + + def __str__(self): + return 'particle {i}: \tstart {s}, \tend {e}'.format( + i=self.id, + s=self.start.id if self.start else None, + e=self.end.id if self.end else None + ) + + +class Vertex(object): + + def __init__(self, id): + self.id = id + self.outgoing = [] + self.incoming = [] + + def __str__(self): + outg = map(str, self.outgoing) + inc = map(str, self.incoming) + result = ['vertex {i}'.format(i=self.id), 'incoming'] + result += inc + result.append('outgoing') + result += outg + return '\n'.join(result) + + + +class TestGenBrowser(unittest.TestCase): + + def test_1(self): + vs = map(Vertex, range(2)) + ps = [ + Particle(0, None, vs[0]), + Particle(1, vs[0], None), + Particle(2, vs[0],vs[1]), + Particle(3, vs[1],None), + Particle(4, vs[1],None), + Particle(5, None, vs[0]) + ] + browser = GenBrowser(ps, vs) + self.assertItemsEqual( vs[0].incoming, [ps[0], ps[5]] ) + self.assertItemsEqual( vs[0].outgoing, ps[1:3] ) + self.assertItemsEqual( vs[1].incoming, [ps[2]] ) + self.assertItemsEqual( vs[1].outgoing, ps[3:5] ) + self.assertItemsEqual( ps[0].daughters, ps[1:3] ) + self.assertItemsEqual( ps[0].mothers, [] ) + self.assertItemsEqual( ps[1].daughters, [] ) + self.assertItemsEqual( ps[1].mothers, [ps[0], ps[5]] ) + self.assertItemsEqual( ps[2].daughters, ps[3:5] ) + self.assertItemsEqual( ps[2].mothers, [ps[0], ps[5]] ) + self.assertItemsEqual( ps[3].daughters, [] ) + self.assertItemsEqual( ps[3].mothers, [ps[2]] ) + self.assertItemsEqual( ps[4].daughters, [] ) + self.assertItemsEqual( ps[4].mothers, [ps[2]] ) + self.assertItemsEqual( browser.ancestors(ps[4]), [ps[2], ps[0], ps[5]]) + self.assertItemsEqual( browser.descendants(ps[0]), ps[1:5]) + + + +if __name__ == '__main__': + unittest.main() + diff --git a/PhysicsTools/HeppyCore/python/particles/test_isolation.py b/PhysicsTools/HeppyCore/python/particles/test_isolation.py new file mode 100644 index 0000000000000..93448992e6733 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/particles/test_isolation.py @@ -0,0 +1,50 @@ +import unittest +import math +import copy +from PhysicsTools.HeppyCore.particles.isolation import * +from PhysicsTools.HeppyCore.particles.tlv.particle import Particle +from ROOT import TLorentzVector + +class TestIsolation(unittest.TestCase): + + def test_circle(self): + circle = EtaPhiCircle(2) + self.assertTrue( circle.is_inside(0, 0, 1.9, 0) ) + self.assertTrue( circle.is_inside(0, 0, 0., 1.9) ) + self.assertFalse( circle.is_inside(0, 0, 2.1, 0) ) + xory = math.sqrt(1.9**2/2.) + self.assertTrue( circle.is_inside(0, 0, xory, xory) ) + xory = math.sqrt(2.1**2/2.) + self.assertFalse( circle.is_inside(0, 0, xory, xory) ) + + def test_iso1(self): + p4 = TLorentzVector() + p4.SetPtEtaPhiM(10, 0, 0, 0.105) + lepton = Particle(13, 1, p4) + + p4 = TLorentzVector() + p4.SetPtEtaPhiM(1, 0, 0, 0.105) + ptc = Particle(211, 1, p4) + + # test iso calc + computer = IsolationComputer([EtaPhiCircle(0.4)]) + iso = computer.compute(lepton, [ptc,ptc]) + self.assertEqual(iso.sumpt, 2*ptc.pt()) + self.assertEqual(iso.sume, 2*ptc.e()) + self.assertEqual(iso.num, 2) + + # test IsolationInfo addition + iso2 = copy.copy(iso) + iso2 += iso + self.assertEqual(iso2.sumpt, 4*ptc.pt()) + self.assertEqual(iso2.sume, 4*ptc.e()) + self.assertEqual(iso2.num, 4) + + # test veto + computer = IsolationComputer([EtaPhiCircle(0.4)], [EtaPhiCircle(0.1)]) + iso = computer.compute(lepton, [ptc]) + self.assertEqual(iso.sumpt, 0.) + + +if __name__ == '__main__': + unittest.main() diff --git a/PhysicsTools/HeppyCore/python/particles/test_jet.py b/PhysicsTools/HeppyCore/python/particles/test_jet.py new file mode 100644 index 0000000000000..8237bfd43af33 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/particles/test_jet.py @@ -0,0 +1,47 @@ +import unittest +import pprint +from tlv.jet import Jet +from jet import JetConstituents, JetTags +from tlv.particle import Particle +from ROOT import TLorentzVector + +class TestJet(unittest.TestCase): + + def test_jet(self): + ptcs = [ Particle(211, 1, TLorentzVector(1, 0, 0, 1)), + Particle(211, 1, TLorentzVector(2, 0, 0, 2)), + Particle(22, 0, TLorentzVector(5, 0, 0, 5)) ] + jetp4 = TLorentzVector() + jet_const = JetConstituents() + for ptc in ptcs: + jetp4 += ptc.p4() + jet_const.append(ptc) + jet_const.sort() + jet = Jet(jetp4) + self.assertEqual( jet.e(), 8) + keys = sorted(list(jet_const.keys())) + self.assertEqual( keys, [1, 2, 11, 13, 22, 130, 211]) + self.assertEqual(jet_const[211], [ptcs[1], ptcs[0]]) + self.assertEqual(jet_const[22], [ptcs[2]]) + self.assertEqual(jet_const[211].e(), 3) + self.assertEqual(jet_const[130].num(), 0) + self.assertEqual(jet_const[11].num(), 0) + self.assertEqual(jet_const[130].num(), 0) + self.assertEqual(jet_const[22].e(), 5) + self.assertEqual(jet_const[22].pdgid(), 22) + self.assertEqual(jet_const[211].pdgid(), 211) + self.assertRaises(ValueError, jet_const[211].append, ptcs[2]) + + def test_jet_tags(self): + tags = JetTags() + tags['btag'] = 0.32341234 + tags['longfloat'] = 32341234.1 + tags['b'] = tags['btag']>0. + tags['flavour'] = Particle(5, 0, TLorentzVector(5,0,0,5)) + # print tags.summary() + self.assertTrue(True) + + +if __name__ == '__main__': + unittest.main() + diff --git a/PhysicsTools/HeppyCore/python/particles/test_particle.py b/PhysicsTools/HeppyCore/python/particles/test_particle.py new file mode 100644 index 0000000000000..81014f3ca334c --- /dev/null +++ b/PhysicsTools/HeppyCore/python/particles/test_particle.py @@ -0,0 +1,64 @@ +import unittest +import os +import copy +from PhysicsTools.HeppyCore.particles.tlv.particle import Particle as TlvParticle +from PhysicsTools.HeppyCore.particles.fcc.particle import Particle as FccParticle +from PhysicsTools.HeppyCore.configuration import Collider +from ROOT import TLorentzVector, gSystem + +import PhysicsTools.HeppyCore.framework.context as context + +class TestParticle(unittest.TestCase): + + def tearDown(self): + Collider.BEAMS = 'pp' + + def test_root_particle_copy(self): + '''Test that root-based particles can be created, deepcopied, + and compared.''' + ptc = TlvParticle(1, 1, TLorentzVector()) + ptc2 = copy.deepcopy(ptc) + self.assertEqual(ptc, ptc2) + + def test_printout(self): + '''Test that the particle printout is adapted to the collider + beams.''' + ptc = TlvParticle(1, 1, TLorentzVector()) + Collider.BEAMS = 'pp' + self.assertIn('pt', ptc.__repr__()) + Collider.BEAMS = 'ee' + self.assertIn('theta', ptc.__repr__()) + Collider.BEAMS = 'pp' + + #---------------------------------------------------------------------- + def test_sort(self): + """Test that particles are sorted by energy or by pT depending + on the collider beams""" + ptcs = [TlvParticle(1, 1, TLorentzVector(10, 0, 0, 11)), + TlvParticle(1, 1, TLorentzVector(0, 0, 11, 12))] + Collider.BEAMS = 'ee' + self.assertEqual(sorted(ptcs, reverse=True), + [ptcs[1], ptcs[0]]) + Collider.BEAMS = 'pp' + self.assertEqual(sorted(ptcs, reverse=True), + [ptcs[0], ptcs[1]]) + + + #---------------------------------------------------------------------- + def test_fcc_particle(self): + """Test that FCC particles can be copied and compared""" + if context.name != 'fcc': + return + from EventStore import EventStore as Events + test_fcc_file = '/'.join([os.environ['HEPPY'], + 'test/data/ee_ZH_Zmumu_Hbb.root']) + events = Events([test_fcc_file]) + event = next(iter(events)) + fccptc = event.get('GenParticle') + ptcs = map(FccParticle, fccptc) + ptc0_2 = copy.deepcopy(ptcs[0]) + self.assertEqual(ptc0_2, ptcs[0]) + + +if __name__ == '__main__': + unittest.main() diff --git a/PhysicsTools/HeppyCore/python/particles/test_resonance.py b/PhysicsTools/HeppyCore/python/particles/test_resonance.py new file mode 100644 index 0000000000000..ae5026ea11495 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/particles/test_resonance.py @@ -0,0 +1,20 @@ +import unittest +from PhysicsTools.HeppyCore.particles.tlv.resonance import Resonance2 as Resonance +from PhysicsTools.HeppyCore.particles.tlv.particle import Particle +from ROOT import TLorentzVector + +class TestResonance(unittest.TestCase): + + def test_resonance(self): + ptc1 = Particle(11, -1, TLorentzVector(1, 0, 0, 1)) + ptc2 = Particle(-11, 1, TLorentzVector(2, 0, 0, 2)) + reso = Resonance( ptc1, ptc2, 23 ) + self.assertEqual( reso._pid, 23 ) + self.assertEqual( reso.e(), 3 ) + self.assertEqual( reso.leg1(), ptc1 ) + self.assertEqual( reso.leg2(), ptc2 ) + self.assertEqual( reso.q(), 0 ) + self.assertEqual( reso.p4(), TLorentzVector(3,0,0,3) ) + +if __name__ == '__main__': + unittest.main() diff --git a/PhysicsTools/HeppyCore/python/particles/tlv/jet.py b/PhysicsTools/HeppyCore/python/particles/tlv/jet.py new file mode 100644 index 0000000000000..720bc01967e14 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/particles/tlv/jet.py @@ -0,0 +1,9 @@ +from PhysicsTools.HeppyCore.particles.jet import Jet as BaseJet +from rootobj import RootObj + +class Jet(BaseJet, RootObj): + def __init__(self, tlv): + super(Jet, self).__init__() + self._tlv = tlv + + diff --git a/PhysicsTools/HeppyCore/python/particles/tlv/met.py b/PhysicsTools/HeppyCore/python/particles/tlv/met.py new file mode 100644 index 0000000000000..8547c7788ca88 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/particles/tlv/met.py @@ -0,0 +1,6 @@ +from PhysicsTools.HeppyCore.particles.met import MET as BaseMET + +class MET(BaseMET): + def __init__(self, tlv, sum_et): + self._tlv = tlv + self._sum_et = sum_et diff --git a/PhysicsTools/HeppyCore/python/particles/tlv/particle.py b/PhysicsTools/HeppyCore/python/particles/tlv/particle.py new file mode 100644 index 0000000000000..5882e793fde71 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/particles/tlv/particle.py @@ -0,0 +1,16 @@ +from PhysicsTools.HeppyCore.particles.particle import Particle as BaseParticle +from rootobj import RootObj +from ROOT import TVector3 +from vertex import Vertex + +import math + +class Particle(BaseParticle, RootObj): + def __init__(self, pdgid, charge, tlv, status=1): + super(Particle, self).__init__() + self._pid = pdgid + self._charge = charge + self._tlv = tlv + self._status = status + self._start_vertex = Vertex(TVector3(),0) + self._end_vertex = None diff --git a/PhysicsTools/HeppyCore/python/particles/tlv/resonance.py b/PhysicsTools/HeppyCore/python/particles/tlv/resonance.py new file mode 100644 index 0000000000000..c0a91c09ca59e --- /dev/null +++ b/PhysicsTools/HeppyCore/python/particles/tlv/resonance.py @@ -0,0 +1,40 @@ +from PhysicsTools.HeppyCore.particles.tlv.particle import Particle +from ROOT import TLorentzVector +from rootobj import RootObj + +class Resonance(Particle, RootObj): + """Resonance decaying to two or more particles (legs). + + A leg is a particle-like object with the following methods: + - q(): returns charge + - p4(): returns 4-momentum TLorentzVector + - e(): returns energy + """ + + def __init__(self, legs, pid): + self.legs = legs + tlv = TLorentzVector() + charge = 0 + for leg in legs: + charge += leg.q() + tlv += leg.p4() + super(Resonance, self).__init__(pid, charge, tlv, status=3) + + +class Resonance2(Resonance): + '''Resonance decaying to two legs.''' + + def __init__(self, leg1, leg2, pid): + '''leg1 and leg2 are the first and second legs, respectively. + no sorting is done internally. + pid is the pdg id of the resonance. + ''' + super(Resonance2, self).__init__([leg1, leg2], pid) + + def leg1(self): + '''return first leg''' + return self.legs[0] + + def leg2(self): + '''return second leg''' + return self.legs[1] diff --git a/PhysicsTools/HeppyCore/python/particles/tlv/rootobj.py b/PhysicsTools/HeppyCore/python/particles/tlv/rootobj.py new file mode 100644 index 0000000000000..9b684a2f4cb1e --- /dev/null +++ b/PhysicsTools/HeppyCore/python/particles/tlv/rootobj.py @@ -0,0 +1,27 @@ +from itertools import count +import copy + +class RootObj(object): + '''Base class for all objects based on ROOT, + typically created on the fly in analysis code instead + of being read from an EDM file.''' + + _ids = count(0) + + def __init__(self, *args, **kwargs): + super(RootObj, self).__init__(*args, **kwargs) + self._objid = self._ids.next() + + def __eq__(self, other): + '''compares two objects for equality. + True if object id is the same. + So if an object is copied, the two copies are equal. + ''' + return self._objid == other._objid + + def __hash__(self): + '''returns a hash built on the object id. + ''' + return hash( self._objid ) + + diff --git a/PhysicsTools/HeppyCore/python/particles/tlv/test_rootobj.py b/PhysicsTools/HeppyCore/python/particles/tlv/test_rootobj.py new file mode 100644 index 0000000000000..a09e265c0b9e7 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/particles/tlv/test_rootobj.py @@ -0,0 +1,36 @@ +import unittest +import copy +from itertools import count + +from rootobj import RootObj + +class RootObjTestCase(unittest.TestCase): + + def test_instance_id(self): + RootObj._ids = count(0) + class T1(RootObj): + pass + class T2(RootObj): + pass + t1 = T1() + t1_2 = T1() + self.assertEqual(t1._objid, 0) + self.assertEqual(t1_2._objid, 1) + t2 = T2() + self.assertEqual(t2._objid, 2) + + def test_equality(self): + RootObj._ids = count(0) + class T1(RootObj): + pass + t1 = T1() + self.assertEqual(t1._objid, 0) + t1_2 = copy.deepcopy(t1) + self.assertEqual(t1_2._objid, 0) + self.assertEqual(t1_2, t1) + self.assertTrue(t1_2 in [t1]) + + +if __name__ == '__main__': + unittest.main() + diff --git a/PhysicsTools/HeppyCore/python/particles/tlv/vertex.py b/PhysicsTools/HeppyCore/python/particles/tlv/vertex.py new file mode 100644 index 0000000000000..23b4a6f23c90b --- /dev/null +++ b/PhysicsTools/HeppyCore/python/particles/tlv/vertex.py @@ -0,0 +1,12 @@ +from PhysicsTools.HeppyCore.particles.vertex import Vertex as BaseVertex +from rootobj import RootObj + +import math + +class Vertex(BaseVertex, RootObj): + def __init__(self, vector3, ctau=0): + super(Vertex, self).__init__() + self.incoming = [] + self.outgoing = [] + self._point = vector3 + self._ctau = ctau diff --git a/PhysicsTools/HeppyCore/python/particles/vertex.py b/PhysicsTools/HeppyCore/python/particles/vertex.py new file mode 100644 index 0000000000000..c7f15573c5bd9 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/particles/vertex.py @@ -0,0 +1,40 @@ + +class Vertex(object): + '''Interface for vertices. + Make sure your code satisfies this interface. + Specializations in cms, fcc, and tlv packages + ''' + def __init__(self, *args, **kwargs): + super(Vertex, self).__init__(*args, **kwargs) + + def x(self): + return self._point.X() + + def y(self): + return self._point.Y() + + def z(self): + return self._point.Z() + + def position(self): + return self._point + + def ctau(self): + return self._ctau + + def __repr__(self): + return str(self) + + def __str__(self): + tmp = '{className} : pos(mm) x = {x:5.3f}, y = {y:5.3f}, z = {z:5.3f}, ctau = {ctau:5.2f}' + return tmp.format( + className = self.__class__.__name__, + x = self.x()*1000., + y = self.y()*1000., + z = self.z()*1000., + ctau = self.ctau(), + ) + + def __repr__(self): + return str(self) + diff --git a/PhysicsTools/HeppyCore/python/statistics/average.py b/PhysicsTools/HeppyCore/python/statistics/average.py index 861840ce2d5a2..854d10018f594 100644 --- a/PhysicsTools/HeppyCore/python/statistics/average.py +++ b/PhysicsTools/HeppyCore/python/statistics/average.py @@ -91,4 +91,6 @@ def __str__(self): class Averages(diclist): def write(self, dirname): - map( lambda x: x.write(dirname), self) + for item in self: + item.write(dirname) + diff --git a/PhysicsTools/HeppyCore/python/statistics/counter.py b/PhysicsTools/HeppyCore/python/statistics/counter.py index b461f5b3c0b44..bb67ef9313e9c 100644 --- a/PhysicsTools/HeppyCore/python/statistics/counter.py +++ b/PhysicsTools/HeppyCore/python/statistics/counter.py @@ -1,4 +1,4 @@ - # Copyright (C) 2014 Colin Bernet +# Copyright (C) 2014 Colin Bernet # https://github.com/cbernet/heppy/blob/master/LICENSE import pickle @@ -14,16 +14,19 @@ def register(self, level): self.add( level, [level, 0] ) def inc(self, level, nentries=1): - '''increment an existing level + '''Call this function to create a level for this counter, + or to increment an existing level. ''' if level not in self.dico: raise ValueError('level', level, 'has not been registered') + # self.add( level, [level, nentries]) else: self[level][1] += nentries def __add__(self, other): '''Add two counters (+).''' size = max( len(self), len(other)) + # import pdb; pdb.set_trace() for i in range(0, size): if i>=len(other): # this line exists only in this counter, leave it as is @@ -32,6 +35,7 @@ def __add__(self, other): self.register( other[i][0]) self.inc( other[i][0], other[i][1] ) else: + # exists in both if self[i][0] != other[i][0]: err = ['cannot add these counters:', str(self), str(other)] raise ValueError('\n'.join(err)) @@ -98,7 +102,8 @@ def counter(self, name): return self.counters[ self.ranks[name] ] def write(self, dirname): - map( lambda x: x.write(dirname), self.counters) + for item in self.counters: + item.write(dirname) def __str__(self): prints = map( str, self.counters ) @@ -107,3 +112,5 @@ def __str__(self): def __getitem__(self, name): return self.counter(name) + def __len__(self): + return len(self.counters) diff --git a/PhysicsTools/HeppyCore/python/statistics/histograms.py b/PhysicsTools/HeppyCore/python/statistics/histograms.py index cfeff5c47fb93..2148f93d7909d 100644 --- a/PhysicsTools/HeppyCore/python/statistics/histograms.py +++ b/PhysicsTools/HeppyCore/python/statistics/histograms.py @@ -12,6 +12,7 @@ def __init__(self, name): self.name = name self.hists = [] self.named = [] + self.dirname = None # attributes inheriting from TH1 and TNamed # are kept track of automagically, even if they are in # child classes @@ -23,7 +24,7 @@ def __init__(self, name): if var.InheritsFrom('TH1'): var.StatOverflows(True) self.hists.append(var) - except: + except AttributeError: pass # print 'TH1 list:', self.hists # print 'TNamed list:', self.named @@ -33,12 +34,12 @@ def FormatHistos(self, style ): for hist in self.hists: style.FormatHisto( hist ) - def Write(self, dir ): + def Write(self, dirname ): '''Writes all histograms to a subdirectory of dir called self.name.''' - self.dir = dir.mkdir( self.name ) - self.dir.cd() + self.dirname = dirname.mkdir( self.name ) + self.dirname.cd() for hist in self.hists: hist.Write() - dir.cd() + dirname.cd() diff --git a/PhysicsTools/HeppyCore/python/statistics/random_cpplib.py b/PhysicsTools/HeppyCore/python/statistics/random_cpplib.py new file mode 100644 index 0000000000000..8bdd64dcecead --- /dev/null +++ b/PhysicsTools/HeppyCore/python/statistics/random_cpplib.py @@ -0,0 +1,16 @@ +#will remove this once ROOT random is set up and working in cpp +from ROOT import gSystem +gSystem.Load("libpapascpp") #check with Colin if this is OK or if should be made to execute just once +from ROOT import randomgen + +def expovariate (a): + return randomgen.RandExponential(a).next() + +def uniform (a, b): + return randomgen.RandUniform(a, b).next() + +def gauss (a, b): + return randomgen.RandNormal(a, b).next() + +def seed (s): + randomgen.RandUniform(0, 1).setSeed(s) \ No newline at end of file diff --git a/PhysicsTools/HeppyCore/python/statistics/random_root.py b/PhysicsTools/HeppyCore/python/statistics/random_root.py new file mode 100644 index 0000000000000..0cb57ebc71b92 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/statistics/random_root.py @@ -0,0 +1,26 @@ + +from ROOT import TRandom + +raise ValueError('does not behave as python random! the seed is always the same...') + +rootrandom = TRandom() + +def expovariate (a): + x=rootrandom.Exp(1./a) + #pdebugger.info( x) + return x + +def uniform (a, b): + x=rootrandom.Uniform(a, b) + #pdebugger.info( x) + return x + +def gauss (a, b): + x= rootrandom.Gaus(a,b) + #pdebugger.info( x) + return x + +def seed (s): + global rootrandom + raise ValueError("s should be used! ") + rootrandom = TRandom(0xdeadbeef) diff --git a/PhysicsTools/HeppyCore/python/statistics/rrandom.py b/PhysicsTools/HeppyCore/python/statistics/rrandom.py new file mode 100644 index 0000000000000..c16e67eaa6e4d --- /dev/null +++ b/PhysicsTools/HeppyCore/python/statistics/rrandom.py @@ -0,0 +1,5 @@ +#todo make depend on Heppy Configuration + +# from random_root import * +from random import * +#from random_cpplib import * diff --git a/PhysicsTools/HeppyCore/python/statistics/average_test.py b/PhysicsTools/HeppyCore/python/statistics/test_average.py similarity index 83% rename from PhysicsTools/HeppyCore/python/statistics/average_test.py rename to PhysicsTools/HeppyCore/python/statistics/test_average.py index ac3f6782dc002..7b779dd2dff11 100644 --- a/PhysicsTools/HeppyCore/python/statistics/average_test.py +++ b/PhysicsTools/HeppyCore/python/statistics/test_average.py @@ -10,6 +10,7 @@ def test_ave_unw(self): c.add( 2, 1 ) ave, unc = c.average() self.assertEqual(ave, 1.5) + self.assertAlmostEqual(unc, 0.35, 1) def test_ave_wei(self): c = Average('TestAve') @@ -17,6 +18,7 @@ def test_ave_wei(self): c.add( 1, 3 ) ave, unc = c.average() self.assertEqual(ave, 0.75) + self.assertAlmostEqual(unc, 0.22, 1) def test_ave_add(self): c1 = Average('c1') @@ -26,6 +28,7 @@ def test_ave_add(self): c3 = c1 + c2 ave, unc = c3.average() self.assertEqual(ave, 0.75) + self.assertAlmostEqual(unc, 0.22, 1) if __name__ == '__main__': unittest.main() diff --git a/PhysicsTools/HeppyCore/python/statistics/counter_test.py b/PhysicsTools/HeppyCore/python/statistics/test_counter.py similarity index 100% rename from PhysicsTools/HeppyCore/python/statistics/counter_test.py rename to PhysicsTools/HeppyCore/python/statistics/test_counter.py diff --git a/PhysicsTools/HeppyCore/python/statistics/test_random.py b/PhysicsTools/HeppyCore/python/statistics/test_random.py new file mode 100644 index 0000000000000..7ef91405f347b --- /dev/null +++ b/PhysicsTools/HeppyCore/python/statistics/test_random.py @@ -0,0 +1,41 @@ +import unittest +import logging +logging.getLogger().setLevel(logging.ERROR) + +import rrandom as random + +class TestRandom(unittest.TestCase): + + def test_seed(self): + + #unseeded + r0 = random.uniform(0, 1) + r1 = random.expovariate(3000) + r2 = random.gauss(1,3) + + #seed + random.seed(0xdeadbeef) + a0 = random.uniform(0, 1) + a1 = random.expovariate(3) + a2 = random.gauss(1,3) + + #reseed + random.seed(0xdeadbeef) + b0 = random.uniform(0, 1) + b1 = random.expovariate(3) + b2 = random.gauss(1,3) + + #unseeded should be different to seeded + self.assertFalse(a0==r0) + self.assertFalse(a1==r1) + self.assertFalse(a2==r2) + + #reseeded should be same as seeded + self.assertEqual(a0,b0) + self.assertEqual(a1,b1) + self.assertEqual(a2,b2) + + +if __name__ == '__main__': + + unittest.main() diff --git a/PhysicsTools/HeppyCore/python/statistics/tree_test.py b/PhysicsTools/HeppyCore/python/statistics/test_tree.py similarity index 65% rename from PhysicsTools/HeppyCore/python/statistics/tree_test.py rename to PhysicsTools/HeppyCore/python/statistics/test_tree.py index aecf333262dea..f0326a2fe7861 100644 --- a/PhysicsTools/HeppyCore/python/statistics/tree_test.py +++ b/PhysicsTools/HeppyCore/python/statistics/test_tree.py @@ -30,6 +30,21 @@ def test_iterate(self): self.assertEqual(ev.a, 4) break + def test_cwn(self): + fi = TFile('tree2.root','RECREATE') + tr = Tree('test_tree', 'A test tree') + tr.var('nvals', the_type=int) + tr.vector('x', 'nvals', 20) + tr.fill('nvals', 10) + tr.vfill('x', range(10)) + tr.tree.Fill() + tr.reset() + tr.fill('nvals', 5) + tr.vfill('x', range(5)) + tr.tree.Fill() + fi.Write() + fi.Close() + if __name__ == '__main__': unittest.main() diff --git a/PhysicsTools/HeppyCore/python/statistics/value_test.py b/PhysicsTools/HeppyCore/python/statistics/test_value.py similarity index 100% rename from PhysicsTools/HeppyCore/python/statistics/value_test.py rename to PhysicsTools/HeppyCore/python/statistics/test_value.py diff --git a/PhysicsTools/HeppyCore/python/statistics/tree.py b/PhysicsTools/HeppyCore/python/statistics/tree.py index 18e7af5699c57..997d4e1fe76d5 100644 --- a/PhysicsTools/HeppyCore/python/statistics/tree.py +++ b/PhysicsTools/HeppyCore/python/statistics/tree.py @@ -24,26 +24,27 @@ def copyStructure(self, tree): for branch in tree.GetListOfBranches(): name = branch.GetName() typeName = branch.GetListOfLeaves()[0].GetTypeName() - type = float + the_type = float if typeName == 'Int_t': - type = int - self.var(name, type) + the_type = int + self.var(name, the_type) - def branch_(self, selfmap, varName, type, len, postfix="", storageType="default", title=None): + def branch_(self, selfmap, varName, the_type, length, + postfix="", storageType="default", title=None): """Backend function used to create scalar and vector branches. Users should call "var" and "vector", not this function directly.""" if storageType == "default": - storageType = self.defaultIntType if type is int else self.defaultFloatType - if type is float : + storageType = self.defaultIntType if the_type is int else self.defaultFloatType + if the_type is float : if storageType == "F": - selfmap[varName]=numpy.zeros(len,numpy.float32) + selfmap[varName]=numpy.zeros(length,numpy.float32) self.tree.Branch(varName,selfmap[varName],varName+postfix+'/F') elif storageType == "D": - selfmap[varName]=numpy.zeros(len,numpy.float64) + selfmap[varName]=numpy.zeros(length,numpy.float64) self.tree.Branch(varName,selfmap[varName],varName+postfix+'/D') else: raise RuntimeError('Unknown storage type %s for branch %s' % (storageType, varName)) - elif type is int: + elif the_type is int: dtypes = { "i" : numpy.uint32, "s" : numpy.uint16, @@ -56,42 +57,43 @@ def branch_(self, selfmap, varName, type, len, postfix="", storageType="default" } if storageType not in dtypes: raise RuntimeError('Unknown storage type %s for branch %s' % (storageType, varName)) - selfmap[varName]=numpy.zeros(len,dtypes[storageType]) + selfmap[varName]=numpy.zeros(length,dtypes[storageType]) self.tree.Branch(varName,selfmap[varName],varName+postfix+'/'+storageType) else: - raise RuntimeError('Unknown type %s for branch %s' % (type, varName)) + raise RuntimeError('Unknown type %s for branch %s' % (the_type, varName)) if title: self.tree.GetBranch(varName).SetTitle(title) - def var(self, varName,type=float, default=-99, title=None, storageType="default", filler=None ): - if type in [int, float]: - self.branch_(self.vars, varName, type, 1, title=title, storageType=storageType) + def var(self, varName, the_type=float, default=-99, title=None, storageType="default", filler=None ): + if the_type in [int, float]: + self.branch_(self.vars, varName, the_type, 1, title=title, storageType=storageType) self.defaults[varName] = default - elif __builtins__['type'](type) == str: + elif __builtins__['type'](the_type) == str: # create a value, looking up the type from ROOT and calling the default constructor - self.vars[varName] = getattr(ROOT,type)() - if type in [ "TLorentzVector" ]: # custom streamer classes - self.tree.Branch(varName+".", type, self.vars[varName], 8000,-1) + self.vars[varName] = getattr(ROOT,the_type)() + if the_type in [ "TLorentzVector" ]: # custom streamer classes + self.tree.Branch(varName+".", the_type, self.vars[varName], 8000,-1) else: - self.tree.Branch(varName+".", type, self.vars[varName]) + self.tree.Branch(varName+".", the_type, self.vars[varName]) if filler is None: raise RuntimeError("Error: when brancing with an object, filler should be set to a function that takes as argument an object instance and a value, and set the instance to the value (as otherwise python assignment of objects changes the address as well)") self.fillers[varName] = filler else: - raise RuntimeError('Unknown type %s for branch %s: it is not int, float or a string' % (type, varName)) + raise RuntimeError('Unknown type %s for branch %s: it is not int, float or a string' % (the_type, varName)) self.defaults[varName] = default - def vector(self, varName, lenvar, maxlen=None, type=float, default=-99, title=None, storageType="default", filler=None ): + def vector(self, varName, lenvar, maxlen=None, the_type=float, default=-99, title=None, storageType="default", filler=None ): """either lenvar is a string, and maxlen an int (variable size array), or lenvar is an int and maxlen is not specified (fixed array)""" - if type in [int, float]: + if the_type in [int, float]: if __builtins__['type'](lenvar) == int: # need the __builtins__ since 'type' is a variable here :-/ - self.branch_(self.vecvars, varName, type, lenvar, postfix="[%d]" % lenvar, title=title, storageType=storageType) + self.branch_(self.vecvars, varName, the_type, lenvar, postfix="[%d]" % lenvar, title=title, storageType=storageType) else: - if maxlen == None: RuntimeError, 'You must specify a maxlen if making a dynamic array'; - self.branch_(self.vecvars, varName, type, maxlen, postfix="[%s]" % lenvar, title=title, storageType=storageType) - elif __builtins__['type'](type) == str: - self.vecvars[varName] = ROOT.TClonesArray(type,(lenvar if __builtins__['type'](lenvar) == int else maxlen)) - if type in [ "TLorentzVector" ]: # custom streamer classes + if maxlen == None: + raise RuntimeError('You must specify a maxlen if making a dynamic array') + self.branch_(self.vecvars, varName, the_type, maxlen, postfix="[%s]" % lenvar, title=title, storageType=storageType) + elif __builtins__['type'](the_type) == str: + self.vecvars[varName] = ROOT.TClonesArray(the_type,(lenvar if __builtins__['type'](lenvar) == int else maxlen)) + if the_type in [ "TLorentzVector" ]: # custom streamer classes self.tree.Branch(varName+".", self.vecvars[varName], 32000, -1) else: self.tree.Branch(varName+".", self.vecvars[varName]) diff --git a/PhysicsTools/HeppyCore/python/statistics/value.py b/PhysicsTools/HeppyCore/python/statistics/value.py index d42a55595da11..b33569987f33a 100644 --- a/PhysicsTools/HeppyCore/python/statistics/value.py +++ b/PhysicsTools/HeppyCore/python/statistics/value.py @@ -10,7 +10,13 @@ def __init__(self, val, err): self.err = err def relerr(self): - return abs(self.err / self.val) + '''relative uncertainty. + + returns None if value == 0 for __str__ to work.''' + try: + return abs(self.err / self.val) + except ZeroDivisionError: + return None def __eq__(self, other): return self.val == other.val and self.err == other.err @@ -47,6 +53,15 @@ def __div__(self, other): return new def __str__(self): - return '{val:10.3f} +- {err:8.3f} ({relerr:5.2f}%)'.format(val=self.val, - err=self.err, - relerr=self.relerr()*100) + relerr = self.relerr() + relerr_format = '{relerr}' + if relerr: + relerr *= 100 + relerr_format = '{relerr:5.2f}%' + format_template = '{{val:10.3f}} +- {{err:8.3f}} ({relerr_format})'.format( + relerr_format = relerr_format + ) + + return format_template.format(val=self.val, + err=self.err, + relerr=relerr) diff --git a/PhysicsTools/HeppyCore/python/test/analysis_ee_ZH_cfg.py b/PhysicsTools/HeppyCore/python/test/analysis_ee_ZH_cfg.py new file mode 100644 index 0000000000000..c97b2e8a4a6eb --- /dev/null +++ b/PhysicsTools/HeppyCore/python/test/analysis_ee_ZH_cfg.py @@ -0,0 +1,296 @@ +'''Example configuration file for an ee->ZH->mumubb analysis in heppy, with the FCC-ee + +While studying this file, open it in ipython as well as in your editor to +get more information: + +ipython +from analysis_ee_ZH_cfg import * + +''' + +import os +import copy +import PhysicsTools.HeppyCore.framework.config as cfg +import PhysicsTools.HeppyCore.utils.pdebug + +import logging +# next 2 lines necessary to deal with reimports from ipython +logging.shutdown() +reload(logging) +logging.basicConfig(level=logging.WARNING) + +# setting the random seed for reproducible results +import PhysicsTools.HeppyCore.statistics.rrandom as random +random.seed(0xdeadbeef) + +from ROOT import gSystem +gSystem.Load("libdatamodelDict") +from EventStore import EventStore as Events +import PhysicsTools.HeppyCore.utils.pdebug + +# definition of the collider +from PhysicsTools.HeppyCore.configuration import Collider +Collider.BEAMS = 'ee' +Collider.SQRTS = 240. + +# input definition +comp = cfg.Component( + 'ee_ZH_Zmumu_Hbb', + files = [ + os.path.abspath('ee_ZH_Zmumu_Hbb.root') + ] +) +selectedComponents = [comp] + +# Pdebugger +from PhysicsTools.HeppyCore.analyzers.PDebugger import PDebugger +pdebug = cfg.Analyzer( + PDebugger, + output_to_stdout = False, + debug_filename = os.getcwd()+'/python_physics_debug.log' #optional argument +) +# read FCC EDM events from the input root file(s) +# do help(Reader) for more information +from PhysicsTools.HeppyCore.analyzers.fcc.Reader import Reader +source = cfg.Analyzer( + Reader, + gen_particles = 'GenParticle', + gen_vertices = 'GenVertex' +) + +from PhysicsTools.HeppyCore.test.papas_cfg import papas_sequence, detector, papas + +# Use a Filter to select leptons from the output of papas simulation. +# Currently, we're treating electrons and muons transparently. +# we could use two different instances for the Filter module +# to get separate collections of electrons and muons +# help(Filter) for more information +from PhysicsTools.HeppyCore.analyzers.Filter import Filter +leptons_true = cfg.Analyzer( + Filter, + 'sel_leptons', + output = 'leptons_true', + input_objects = 'rec_particles', + filter_func = lambda ptc: ptc.e()>10. and abs(ptc.pdgid()) in [11, 13] +) + +# Compute lepton isolation w/r other particles in the event. +# help(IsolationAnalyzer) for more information +from PhysicsTools.HeppyCore.analyzers.IsolationAnalyzer import IsolationAnalyzer +from PhysicsTools.HeppyCore.particles.isolation import EtaPhiCircle +iso_leptons = cfg.Analyzer( + IsolationAnalyzer, + leptons = 'leptons_true', + particles = 'rec_particles', + iso_area = EtaPhiCircle(0.4) +) + +# Select isolated leptons with a Filter +# one can pass a function like this one to the filter: +def relative_isolation(lepton): + sumpt = lepton.iso_211.sumpt + lepton.iso_22.sumpt + lepton.iso_130.sumpt + sumpt /= lepton.pt() + return sumpt +# ... or use a lambda statement as done below. +sel_iso_leptons = cfg.Analyzer( + Filter, + 'sel_iso_leptons', + output = 'sel_iso_leptons', + input_objects = 'leptons_true', + # filter_func = relative_isolation + filter_func = lambda lep : lep.iso.sumpt/lep.pt()<0.3 # fairly loose +) + +# Building Zeds +# help(ResonanceBuilder) for more information +from PhysicsTools.HeppyCore.analyzers.ResonanceBuilder import ResonanceBuilder +zeds = cfg.Analyzer( + ResonanceBuilder, + output = 'zeds', + leg_collection = 'sel_iso_leptons', + pdgid = 23 +) + +# Computing the recoil p4 (here, p_initial - p_zed) +# help(RecoilBuilder) for more information +sqrts = Collider.SQRTS + +from PhysicsTools.HeppyCore.analyzers.RecoilBuilder import RecoilBuilder +recoil = cfg.Analyzer( + RecoilBuilder, + instance_label = 'recoil', + output = 'recoil', + sqrts = sqrts, + to_remove = 'zeds_legs' +) + +missing_energy = cfg.Analyzer( + RecoilBuilder, + instance_label = 'missing_energy', + output = 'missing_energy', + sqrts = sqrts, + to_remove = 'rec_particles' +) + +# Creating a list of particles excluding the decay products of the best zed. +# help(Masker) for more information +from PhysicsTools.HeppyCore.analyzers.Masker import Masker +particles_not_zed = cfg.Analyzer( + Masker, + output = 'particles_not_zed', + input = 'rec_particles', + mask = 'zeds_legs', +) + +# Make jets from the particles not used to build the best zed. +# Here the event is forced into 2 jets to target ZH, H->b bbar) +# help(JetClusterizer) for more information +from PhysicsTools.HeppyCore.analyzers.fcc.JetClusterizer import JetClusterizer +jets = cfg.Analyzer( + JetClusterizer, + output = 'jets', + particles = 'particles_not_zed', + fastjet_args = dict( njets = 2) +) + +from PhysicsTools.HeppyCore.analyzers.ImpactParameter import ImpactParameter +btag = cfg.Analyzer( + ImpactParameter, + jets = 'jets', + # num_IP = ("histo_stat_IP_ratio_bems.root","h_b"), + # denom_IP = ("histo_stat_IP_ratio_bems.root","h_u"), + # num_IPs = ("histo_stat_IPs_ratio_bems.root","h_b"), + # denom_IPs = ("histo_stat_IPs_ratio_bems.root","h_u"), + pt_min = 1, # pt threshold for charged hadrons in b tagging + dxy_max = 2e-3, # 2mm + dz_max = 17e-2, # 17cm + detector = detector + ) + +# Build Higgs candidates from pairs of jets. +higgses = cfg.Analyzer( + ResonanceBuilder, + output = 'higgses', + leg_collection = 'jets', + pdgid = 25 +) + + +# Just a basic analysis-specific event Selection module. +# this module implements a cut-flow counter +# After running the example as +# heppy_loop.py Trash/ analysis_ee_ZH_cfg.py -f -N 100 +# this counter can be found in: +# Trash/example/PhysicsTools.HeppyCore.analyzers.examples.zh.selection.Selection_cuts/cut_flow.txt +# Counter cut_flow : +# All events 100 1.00 1.0000 +# At least 2 leptons 87 0.87 0.8700 +# Both leptons e>30 79 0.91 0.7900 +# For more information, check the code of the Selection class, +from PhysicsTools.HeppyCore.analyzers.examples.zh.selection import Selection +selection = cfg.Analyzer( + Selection, + instance_label='cuts' +) + +# Analysis-specific ntuple producer +# please have a look at the ZHTreeProducer class +from PhysicsTools.HeppyCore.analyzers.examples.zh.ZHTreeProducer import ZHTreeProducer +tree = cfg.Analyzer( + ZHTreeProducer, + zeds = 'zeds', + jets = 'jets', + higgses = 'higgses', + recoil = 'recoil', + misenergy = 'missing_energy' +) + +# definition of a sequence of analyzers, +# the analyzers will process each event in this order +sequence = cfg.Sequence( + pdebug, + source, + papas_sequence, + leptons_true, + iso_leptons, + sel_iso_leptons, + zeds, + recoil, + missing_energy, + particles_not_zed, + jets, + btag, + higgses, + selection, + tree +) + +# Specifics to read FCC events +from ROOT import gSystem +gSystem.Load("libdatamodelDict") +from EventStore import EventStore as Events + +config = cfg.Config( + components = selectedComponents, + sequence = sequence, + services = [], + events_class = Events +) + +if __name__ == '__main__': + import sys + from PhysicsTools.HeppyCore.framework.looper import Looper + import PhysicsTools.HeppyCore.statistics.rrandom as random + random.seed(0xdeadbeef) + + def process(iev=None): + if iev is None: + iev = loop.iEvent + loop.process(iev) + if display: + display.draw() + + def next(): + loop.process(loop.iEvent+1) + if display: + display.draw() + + iev = None + usage = '''usage: python analysis_ee_ZH_cfg.py [ievent] + + Provide ievent as an integer, or loop on the first events. + You can also use this configuration file in this way: + + heppy_loop.py OutDir/ analysis_ee_ZH_cfg.py -f -N 100 + ''' + if len(sys.argv)==2: + papas.display = True + try: + iev = int(sys.argv[1]) + except ValueError: + print usage + sys.exit(1) + elif len(sys.argv)>2: + print usage + sys.exit(1) + + + loop = Looper( 'looper', config, + nEvents=10, + nPrint=1, + timeReport=True) + + simulation = None + for ana in loop.analyzers: + if hasattr(ana, 'display'): + simulation = ana + display = getattr(simulation, 'display', None) + simulator = getattr(simulation, 'simulator', None) + if simulator: + detector = simulator.detector + if iev is not None: + process(iev) + pass + else: + loop.loop() + loop.write() diff --git a/PhysicsTools/HeppyCore/python/test/analysis_ee_ZH_had_cfg.py b/PhysicsTools/HeppyCore/python/test/analysis_ee_ZH_had_cfg.py new file mode 100644 index 0000000000000..ea9342b8745db --- /dev/null +++ b/PhysicsTools/HeppyCore/python/test/analysis_ee_ZH_had_cfg.py @@ -0,0 +1,319 @@ +'''Example configuration file for an ee->ZH analysis in the 4 jet channel, +with the FCC-ee + +While studying this file, open it in ipython as well as in your editor to +get more information: + +ipython +from analysis_ee_ZH_had_cfg import * + +''' + +import os +import copy +import PhysicsTools.HeppyCore.framework.config as cfg + +from PhysicsTools.HeppyCore.framework.event import Event +Event.print_patterns=['*jet*', 'bquarks', '*higgs*', + '*zed*', '*lep*'] + +import logging +# next 2 lines necessary to deal with reimports from ipython +logging.shutdown() +reload(logging) +logging.basicConfig(level=logging.WARNING) + +# setting the random seed for reproducible results +import PhysicsTools.HeppyCore.statistics.rrandom as random +random.seed(0xdeadbeef) + +# definition of the collider +from PhysicsTools.HeppyCore.configuration import Collider +Collider.BEAMS = 'ee' +Collider.SQRTS = 240. + +# input definition +comp = cfg.Component( + 'ee_ZH_Z_Hbb', + files = [ + 'ee_ZH_Z_Hbb.root' + ] +) +selectedComponents = [comp] + +# read FCC EDM events from the input root file(s) +# do help(Reader) for more information +from PhysicsTools.HeppyCore.analyzers.fcc.Reader import Reader +source = cfg.Analyzer( + Reader, + gen_particles = 'GenParticle', + gen_vertices = 'GenVertex' +) + +# the papas simulation and reconstruction sequence +from PhysicsTools.HeppyCore.test.papas_cfg import papas_sequence, detector, papas + +# Use a Filter to select leptons from the output of papas simulation. +# Currently, we're treating electrons and muons transparently. +# we could use two different instances for the Filter module +# to get separate collections of electrons and muons +# help(Filter) for more information +from PhysicsTools.HeppyCore.analyzers.Filter import Filter +def is_lepton(ptc): + return ptc.e()> 5. and abs(ptc.pdgid()) in [11, 13] + +leptons = cfg.Analyzer( + Filter, + 'sel_leptons', + output = 'leptons', + input_objects = 'rec_particles', + filter_func = is_lepton +) + +# Compute lepton isolation w/r other particles in the event. +# help(IsolationAnalyzer) +# help(isolation) +# for more information +from PhysicsTools.HeppyCore.analyzers.IsolationAnalyzer import IsolationAnalyzer +from PhysicsTools.HeppyCore.particles.isolation import EtaPhiCircle +iso_leptons = cfg.Analyzer( + IsolationAnalyzer, + leptons = 'leptons', + particles = 'rec_particles', + iso_area = EtaPhiCircle(0.4) +) + +# Select isolated leptons with a Filter +def is_isolated(lep): + '''returns true if the particles around the lepton + in the EtaPhiCircle defined above carry less than 30% + of the lepton energy.''' + return lep.iso.sume/lep.e()<0.3 # fairly loose + +sel_iso_leptons = cfg.Analyzer( + Filter, + 'sel_iso_leptons', + output = 'sel_iso_leptons', + input_objects = 'leptons', + filter_func = is_isolated +) + + +##Rejecting events that contain a loosely isolated lepton +## +##Instead of using an event filter at this stage, we store in the tree +##the lepton with lowest energy (with the name lepton1) +## +##from PhysicsTools.HeppyCore.analyzers.EventFilter import EventFilter +##lepton_veto = cfg.Analyzer( +## EventFilter, +## 'lepton_veto', +## input_objects='sel_iso_leptons', +## min_number=1, +## veto=True +##) + +# compute the missing 4-momentum +from PhysicsTools.HeppyCore.analyzers.RecoilBuilder import RecoilBuilder +missing_energy = cfg.Analyzer( + RecoilBuilder, + instance_label = 'missing_energy', + output = 'missing_energy', + sqrts = Collider.SQRTS, + to_remove = 'rec_particles' +) + + +# make 4 exclusive jets +from PhysicsTools.HeppyCore.analyzers.fcc.JetClusterizer import JetClusterizer +jets = cfg.Analyzer( + JetClusterizer, + output = 'jets', + particles = 'rec_particles', + fastjet_args = dict( njets = 4) +) + +# make 4 gen jets with stable gen particles +genjets = cfg.Analyzer( + JetClusterizer, + output = 'genjets', + particles = 'gen_particles_stable', + fastjet_args = dict( njets = 4) +) + +# select b quarks for jet to parton matching +def is_bquark(ptc): + '''returns True if the particle is an outgoing b quark, + see + http://home.thep.lu.se/~torbjorn/pythia81html/ParticleProperties.html + ''' + return abs(ptc.pdgid()) == 5 and ptc.status() == 23 + +bquarks = cfg.Analyzer( + Filter, + 'bquarks', + output = 'bquarks', + input_objects = 'gen_particles', + filter_func =is_bquark +) + +# match genjets to b quarks +from PhysicsTools.HeppyCore.analyzers.Matcher import Matcher +genjet_to_b_match = cfg.Analyzer( + Matcher, + match_particles = 'bquarks', + particles = 'genjets', + delta_r = 0.4 + ) + +# match jets to genjets (so jets are matched to b quarks through gen jets) +jet_to_genjet_match = cfg.Analyzer( + Matcher, + match_particles='genjets', + particles='rescaled_jets', + delta_r=0.5 +) + +# rescale the jet energy taking according to initial p4 +from PhysicsTools.HeppyCore.analyzers.examples.zh_had.JetEnergyComputer import JetEnergyComputer +compute_jet_energy = cfg.Analyzer( + JetEnergyComputer, + output_jets='rescaled_jets', + input_jets='jets', + sqrts=Collider.SQRTS + ) + +# parametrized b tagging with CMS performance. +# the performance of other detectors can be supplied +# in the roc module +# cms_roc is a numpy array, so one can easily scale +# the cms performance, help(numpy.array) for more info. +from PhysicsTools.HeppyCore.analyzers.ParametrizedBTagger import ParametrizedBTagger +from PhysicsTools.HeppyCore.analyzers.roc import cms_roc +cms_roc.set_working_point(0.7) +btag = cfg.Analyzer( + ParametrizedBTagger, + input_jets='rescaled_jets', + roc=cms_roc +) + +# reconstruction of the H and Z resonances. +# for now, use for the Higgs the two b jets with the mass closest to mH +# the other 2 jets are used for the Z. +# implement a chi2? +from PhysicsTools.HeppyCore.analyzers.examples.zh_had.ZHReconstruction import ZHReconstruction +zhreco = cfg.Analyzer( + ZHReconstruction, + output_higgs='higgs', + output_zed='zed', + input_jets='rescaled_jets' +) + +# simple cut flow printout +from PhysicsTools.HeppyCore.analyzers.examples.zh_had.Selection import Selection +selection = cfg.Analyzer( + Selection, + input_jets='rescaled_jets', + log_level=logging.INFO +) + +# Analysis-specific ntuple producer +# please have a look at the ZHTreeProducer class +from PhysicsTools.HeppyCore.analyzers.examples.zh_had.TreeProducer import TreeProducer +tree = cfg.Analyzer( + TreeProducer, + misenergy = 'missing_energy', + jets='rescaled_jets', + higgs='higgs', + zed='zed', + leptons='sel_iso_leptons' +) + +# definition of the sequence of analyzers, +# the analyzers will process each event in this order +sequence = cfg.Sequence( + source, + papas_sequence, + leptons, + iso_leptons, + sel_iso_leptons, +# lepton_veto, + jets, + compute_jet_energy, + bquarks, + genjets, + genjet_to_b_match, + jet_to_genjet_match, + btag, + missing_energy, + selection, + zhreco, + tree +) + +# Specifics to read FCC events +from ROOT import gSystem +gSystem.Load("libdatamodelDict") +from EventStore import EventStore as Events + +config = cfg.Config( + components = selectedComponents, + sequence = sequence, + services = [], + events_class = Events +) + +if __name__ == '__main__': + import sys + from PhysicsTools.HeppyCore.framework.looper import Looper + + def process(iev=None): + if iev is None: + iev = loop.iEvent + loop.process(iev) + if display: + display.draw() + + def next(): + loop.process(loop.iEvent+1) + if display: + display.draw() + + iev = None + usage = '''usage: python analysis_ee_ZH_had_cfg.py [ievent] + + Provide ievent as an integer, or loop on the first events. + You can also use this configuration file in this way: + + PhysicsTools.HeppyCore.loop.py OutDir/ analysis_ee_ZH_had_cfg.py -f -N 100 + ''' + if len(sys.argv)==2: + papas.display = True + try: + iev = int(sys.argv[1]) + except ValueError: + print usage + sys.exit(1) + elif len(sys.argv)>2: + print usage + sys.exit(1) + + + loop = Looper( 'looper', config, + nEvents=10, + nPrint=10, + timeReport=True) + + simulation = None + for ana in loop.analyzers: + if hasattr(ana, 'display'): + simulation = ana + display = getattr(simulation, 'display', None) + simulator = getattr(simulation, 'simulator', None) + if simulator: + detector = simulator.detector + if iev is not None: + process(iev) + else: + loop.loop() + loop.write() diff --git a/PhysicsTools/HeppyCore/python/test/analysis_ee_Z_cfg.py b/PhysicsTools/HeppyCore/python/test/analysis_ee_Z_cfg.py new file mode 100644 index 0000000000000..424ce1e9dfd5d --- /dev/null +++ b/PhysicsTools/HeppyCore/python/test/analysis_ee_Z_cfg.py @@ -0,0 +1,169 @@ +'''Example configuration file for an ee->ZH->mumubb analysis in heppy, with the FCC-ee + +While studying this file, open it in ipython as well as in your editor to +get more information: + +ipython +from analysis_ee_ZH_cfg import * +''' + +import os +import copy +import PhysicsTools.HeppyCore.framework.config as cfg + +from PhysicsTools.HeppyCore.framework.event import Event +Event.print_patterns=['*jet*', 'sum*'] + +import logging +# next 2 lines necessary to deal with reimports from ipython +logging.shutdown() +reload(logging) +logging.basicConfig(level=logging.WARNING) + +# setting the random seed for reproducible results +import PhysicsTools.HeppyCore.statistics.rrandom as random +random.seed(0xdeadbeef) + +# definition of the collider +from PhysicsTools.HeppyCore.configuration import Collider +Collider.BEAMS = 'ee' +Collider.SQRTS = 91. + +# input definition +import glob +files = glob.glob(os.environ['HEPPY']+'/test/ee_Z_ddbar_*.root') +ee_Z_ddbar = cfg.Component( + 'ee_Z_ddbar', + files = files + ) +ee_Z_ddbar.splitFactor = len(ee_Z_ddbar.files) + +ee_Z_bbbar = cfg.Component( + 'ee_Z_bbbar', + files = [ + 'ee_Z_bbbar.root' + ] +) + + +selectedComponents = [ee_Z_ddbar] + +# read FCC EDM events from the input root file(s) +# do help(Reader) for more information +from PhysicsTools.HeppyCore.analyzers.fcc.Reader import Reader +source = cfg.Analyzer( + Reader, + gen_particles = 'GenParticle', + gen_vertices = 'GenVertex' +) + +from PhysicsTools.HeppyCore.analyzers.P4SumBuilder import P4SumBuilder +sum_particles = cfg.Analyzer( + P4SumBuilder, + output='sum_all_ptcs', + # particles='gen_particles_stable' + particles='rec_particles' +) + +sum_gen = cfg.Analyzer( + P4SumBuilder, + output='sum_all_gen', + particles='gen_particles_stable' +) + + +from PhysicsTools.HeppyCore.analyzers.GlobalEventTreeProducer import GlobalEventTreeProducer +zed_tree = cfg.Analyzer( + GlobalEventTreeProducer, + sum_all='sum_all_ptcs', + sum_all_gen='sum_all_gen' +) + + +from PhysicsTools.HeppyCore.test.papas_cfg import gen_particles_stable, papas_sequence, detector, papas +from PhysicsTools.HeppyCore.test.jet_tree_cff import jet_tree_sequence + + +# definition of a sequence of analyzers, +# the analyzers will process each event in this order +sequence = cfg.Sequence( + source, + # gen_particles_stable, + papas_sequence, + jet_tree_sequence('gen_particles_stable', + 'rec_particles', + 2, None), + sum_particles, + sum_gen, + zed_tree + ) + +# Specifics to read FCC events +from ROOT import gSystem +gSystem.Load("libdatamodelDict") +from EventStore import EventStore as Events + +config = cfg.Config( + components = selectedComponents, + sequence = sequence, + services = [], + events_class = Events +) + +if __name__ == '__main__': + import sys + from PhysicsTools.HeppyCore.framework.looper import Looper + + import PhysicsTools.HeppyCore.statistics.rrandom as random + random.seed(0xdeadbeef) + + def process(iev=None): + if iev is None: + iev = loop.iEvent + loop.process(iev) + if display: + display.draw() + + def next(): + loop.process(loop.iEvent+1) + if display: + display.draw() + + iev = None + usage = '''usage: python analysis_ee_ZH_cfg.py [ievent] + + Provide ievent as an integer, or loop on the first events. + You can also use this configuration file in this way: + + heppy_loop.py OutDir/ analysis_ee_ZH_cfg.py -f -N 100 + ''' + if len(sys.argv)==2: + papas.display = True + try: + iev = int(sys.argv[1]) + except ValueError: + print usage + sys.exit(1) + elif len(sys.argv)>2: + print usage + sys.exit(1) + + + loop = Looper( 'looper', config, + nEvents=10, + nPrint=5, + timeReport=True) + + simulation = None + for ana in loop.analyzers: + if hasattr(ana, 'display'): + simulation = ana + display = getattr(simulation, 'display', None) + simulator = getattr(simulation, 'simulator', None) + if simulator: + detector = simulator.detector + if iev is not None: + process(iev) + else: + loop.loop() + loop.write() diff --git a/PhysicsTools/HeppyCore/python/test/analysis_hh_ttbar_cfg.py b/PhysicsTools/HeppyCore/python/test/analysis_hh_ttbar_cfg.py new file mode 100644 index 0000000000000..462d1e4000753 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/test/analysis_hh_ttbar_cfg.py @@ -0,0 +1,223 @@ +import os +import copy +import PhysicsTools.HeppyCore.framework.config as cfg + +import logging +# next 2 lines necessary to deal with reimports from ipython +logging.shutdown() +reload(logging) +logging.basicConfig(level=logging.WARNING) + +comp = cfg.Component( + 'example', + #files = ['example.root'] + files = ['root://eospublic.cern.ch//eos/fcc/users/h/helsens/DelphesOutputs/ttbar_13TeV/FCCDelphesOutput_ttbar13TeV_1.root', + 'root://eospublic.cern.ch//eos/fcc/users/h/helsens/DelphesOutputs/ttbar_13TeV/FCCDelphesOutput_ttbar13TeV_2.root', + 'root://eospublic.cern.ch//eos/fcc/users/h/helsens/DelphesOutputs/ttbar_13TeV/FCCDelphesOutput_ttbar13TeV_3.root', + 'root://eospublic.cern.ch//eos/fcc/users/h/helsens/DelphesOutputs/ttbar_13TeV/FCCDelphesOutput_ttbar13TeV_4.root', + + #'/afs/cern.ch/user/h/helsens/FCCsoft/FCCSOFT/FCC/FCCSW/FCCDelphesOutput.root' + ] + #files = ['FCCDelphes_ClementOutput1.root'] +) +selectedComponents = [comp] + +from PhysicsTools.HeppyCore.analyzers.fcc.Reader import Reader +source = cfg.Analyzer( + Reader, + #gen_particles = 'genParticles', + gen_jets = 'genJets', + + jets = 'jets', + bTags = 'bTags', + jetsToBTags = 'jetsToBTags', + + electrons = 'electrons', + electronITags = 'electronITags', + electronsToITags = 'electronsToITags', + + muons = 'muons', + muonITags = 'muonITags', + muonsToITags = 'muonsToITags', + + photons = 'photons', + met = 'met', +) + +from ROOT import gSystem +gSystem.Load("libdatamodelDict") +from EventStore import EventStore as Events + + + +from PhysicsTools.HeppyCore.analyzers.Filter import Filter +muons = cfg.Analyzer( + Filter, + 'sel_muons', + output = 'muons', + input_objects = 'muons', + filter_func = lambda ptc: ptc.pt()>30 +) + +from PhysicsTools.HeppyCore.analyzers.Filter import Filter +iso_muons = cfg.Analyzer( + Filter, + 'sel_iso_muons', + output = 'sel_iso_muons', + input_objects = 'muons', + filter_func = lambda ptc: ptc.iso.sumpt/ptc.pt()<0.2 +) + +from PhysicsTools.HeppyCore.analyzers.Filter import Filter +electrons = cfg.Analyzer( + Filter, + 'sel_electrons', + output = 'electrons', + input_objects = 'electrons', + filter_func = lambda ptc: ptc.pt()>30 +) + +from PhysicsTools.HeppyCore.analyzers.Filter import Filter +iso_electrons = cfg.Analyzer( + Filter, + 'sel_iso_electrons', + output = 'sel_iso_electrons', + input_objects = 'electrons', + filter_func = lambda ptc: ptc.iso.sumpt/ptc.pt()<0.1 +) + + +jets_30 = cfg.Analyzer( + Filter, + 'jets_30', + output = 'jets_30', + input_objects = 'jets', + filter_func = lambda jet: jet.pt()>30. +) + +from PhysicsTools.HeppyCore.analyzers.Matcher import Matcher +match_jet_electrons = cfg.Analyzer( + Matcher, + 'electron_jets', + delta_r = 0.2, + match_particles = 'sel_iso_electrons', + particles = 'jets_30' +) + +sel_jets_electron = cfg.Analyzer( + Filter, + 'sel_jets_noelecetron_30', + output = 'sel_jets_noelectron_30', + input_objects = 'jets_30', + filter_func = lambda jet: jet.match is None +) + + +from PhysicsTools.HeppyCore.analyzers.Matcher import Matcher +match_muon_jets = cfg.Analyzer( + Matcher, + 'muon_jets', + delta_r = 0.2, + match_particles = 'sel_iso_muons', + particles = 'sel_jets_noelectron_30' +) + +sel_jets_muon = cfg.Analyzer( + Filter, + 'sel_jets_nomuon_30', + output = 'sel_jets_noelectronnomuon_30', + input_objects = 'sel_jets_noelectron_30', + filter_func = lambda jet: jet.match is None +) + + +from PhysicsTools.HeppyCore.analyzers.examples.ttbar.BTagging import BTagging +btagging = cfg.Analyzer( + BTagging, + 'b_jets_30', + output = 'b_jets_30', + input_objects = 'sel_jets_noelectronnomuon_30', + filter_func = lambda jet : jet.tags['bf']>0. +) + + +from PhysicsTools.HeppyCore.analyzers.M3Builder import M3Builder +m3 = cfg.Analyzer( + M3Builder, + instance_label = 'm3', + jets = 'sel_jets_noelectronnomuon_30', + filter_func = lambda x : x.pt()>30. +) + +from PhysicsTools.HeppyCore.analyzers.MTW import MTW +mtw = cfg.Analyzer( + MTW, + instance_label = 'mtw', + met = 'met', + electron = 'sel_iso_electrons', + muon = 'sel_iso_muons' +) + + + +from PhysicsTools.HeppyCore.analyzers.examples.ttbar.selection import Selection +selection = cfg.Analyzer( + Selection, + instance_label='cuts' +) + +from PhysicsTools.HeppyCore.analyzers.examples.ttbar.TTbarTreeProducer import TTbarTreeProducer +gen_tree = cfg.Analyzer( + TTbarTreeProducer, + jets_30 = 'sel_jets_noelectronnomuon_30', + m3 = 'm3', + met = 'met', + mtw= 'mtw', + muons = 'sel_iso_muons', + electrons = 'sel_iso_electrons' +) + + +# definition of a sequence of analyzers, +# the analyzers will process each event in this order +sequence = cfg.Sequence( [ + source, + jets_30, + muons, + electrons, + iso_muons, + iso_electrons, + match_jet_electrons, + sel_jets_electron, + match_muon_jets, + sel_jets_muon, + btagging, + selection, + m3, + mtw, + gen_tree + ] ) + +# comp.files.append('example_2.root') +#comp.splitFactor = len(comp.files) # splitting the component in 2 chunks + +config = cfg.Config( + components = selectedComponents, + sequence = sequence, + services = [], + events_class = Events +) + +if __name__ == '__main__': + import sys + from PhysicsTools.HeppyCore.framework.looper import Looper + + def next(): + loop.process(loop.iEvent+1) + + loop = Looper( 'looper', config, + nEvents=100, + nPrint=0, + timeReport=True) + loop.process(6) + print loop.event diff --git a/PhysicsTools/HeppyCore/python/test/cms_jets.py b/PhysicsTools/HeppyCore/python/test/cms_jets.py new file mode 100644 index 0000000000000..53fdbb814f6a7 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/test/cms_jets.py @@ -0,0 +1,70 @@ +import os +import copy +import PhysicsTools.HeppyCore.framework.config as cfg + +debug = False + +if debug: + print 'DEBUG MODE IS ON!' + +comp = cfg.Component( + 'singlepi', + files = ['/gridgroup/cms/cbernet/data/singlePi_50k.root'] +) + +selectedComponents = [comp] + +from PhysicsTools.HeppyCore.analyzers.cms.JetReader import JetReader +source = cfg.Analyzer( + JetReader, + gen_jets = 'ak4GenJetsNoNu', + gen_jet_pt = 20, + jets = 'ak4PFJets', + jet_pt = 20, + nlead = 2 +) + +from PhysicsTools.HeppyCore.analyzers.Matcher import Matcher +jet_match = cfg.Analyzer( + Matcher, + match_particles = 'cms_jets', + particles = 'gen_jets', + delta_r = 0.3 + ) + +from PhysicsTools.HeppyCore.analyzers.JetTreeProducer import JetTreeProducer +jet_tree = cfg.Analyzer( + JetTreeProducer, + tree_name = 'events', + tree_title = 'jets', + jets = 'gen_jets' + ) + + +from PhysicsTools.HeppyCore.framework.eventsfwlite import Events + +if debug: + comp = selectedComponents[0] + comp.splitFactor =1 + selectedComponents = [comp] + + + +# definition of a sequence of analyzers, +# the analyzers will process each event in this order +sequence = cfg.Sequence( [ + source, + jet_match, + jet_tree + ] ) + + +config = cfg.Config( + components = selectedComponents, + sequence = sequence, + services = [], + events_class = Events +) + +if __name__ == '__main__': + print config diff --git a/PhysicsTools/HeppyCore/python/test/compare_reconstruction_cfg.py b/PhysicsTools/HeppyCore/python/test/compare_reconstruction_cfg.py new file mode 100644 index 0000000000000..7ec82cdf2eea9 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/test/compare_reconstruction_cfg.py @@ -0,0 +1,162 @@ +import os +import copy +import PhysicsTools.HeppyCore.framework.config as cfg + +import logging + +#import sys +# next 2 lines necessary to deal with reimports from ipython +logging.shutdown() +reload(logging) +logging.basicConfig(level=logging.WARNING) + +comp = cfg.Component( + 'example', + files = [ + 'ee_ZH_Zmumu_Hbb.root' + ] +) +selectedComponents = [comp] + +from PhysicsTools.HeppyCore.analyzers.fcc.Reader import Reader +source = cfg.Analyzer( + Reader, + gen_particles = 'GenParticle', +) + +# Use a Filter to select stable gen particles for simulation +# from the output of "source" +# help(Filter) for more information +from PhysicsTools.HeppyCore.analyzers.Filter import Filter +gen_particles_stable = cfg.Analyzer( + Filter, + output = 'gen_particles_stable', + # output = 'particles', + input_objects = 'gen_particles', + filter_func = lambda x : x.status()==1 and x.pdgid() not in [12,14,16] and x.pt()>0.1 +) + +from ROOT import gSystem +gSystem.Load("libdatamodelDict") +from EventStore import EventStore as Events +#from PhysicsTools.HeppyCore.framework.eventsgen import Events + + +#Run simulation (and include the original reconstruction of particles) +from PhysicsTools.HeppyCore.analyzers.PapasSim import PapasSim +from PhysicsTools.HeppyCore.papas.detectors.CMS import CMS +papas = cfg.Analyzer( + PapasSim, + instance_label = 'papas', + detector = CMS(), + gen_particles = 'gen_particles_stable', + sim_particles = 'sim_particles', + merged_ecals = 'ecal_clusters', + merged_hcals = 'hcal_clusters', + tracks = 'tracks', + rec_particles = 'sim_rec_particles', # optional - will only do a simulation reconstruction if a anme is provided + output_history = 'history_nodes', + display_filter_func = lambda ptc: ptc.e()>1., + display = False, + verbose = True +) + +#make connected blocks of tracks/clusters +from PhysicsTools.HeppyCore.analyzers.PapasPFBlockBuilder import PapasPFBlockBuilder +pfblocks = cfg.Analyzer( + PapasPFBlockBuilder, + tracks = 'tracks', + ecals = 'ecal_clusters', + hcals = 'hcal_clusters', + history = 'history_nodes', + output_blocks = 'reconstruction_blocks' +) + +#reconstruct particles +from PhysicsTools.HeppyCore.analyzers.PapasPFReconstructor import PapasPFReconstructor +pfreconstruct = cfg.Analyzer( + PapasPFReconstructor, + instance_label = 'papas_PFreconstruction', + detector = CMS(), + input_blocks = 'reconstruction_blocks', + history = 'history_nodes', + output_particles_dict = 'particles_dict', + output_particles_list = 'particles_list' +) + +#compare orignal and new reconstructions +from PhysicsTools.HeppyCore.analyzers.PapasParticlesComparer import PapasParticlesComparer +particlescomparer = cfg.Analyzer( + PapasParticlesComparer , + particlesA = 'papas_PFreconstruction_particles_list', + particlesB = 'papas_sim_rec_particles' +) + +# and then particle reconstruction from blocks + +# definition of a sequence of analyzers, +# the analyzers will process each event in this order +sequence = cfg.Sequence( [ + source, + gen_particles_stable, + papas, + pfblocks, + pfreconstruct, + particlescomparer + ] ) + +config = cfg.Config( + components = selectedComponents, + sequence = sequence, + services = [], + events_class = Events +) + + + + +if __name__ == '__main__': + import sys + from PhysicsTools.HeppyCore.framework.looper import Looper + + import PhysicsTools.HeppyCore.statistics.rrandom as random + random.seed(0xdeadbeef) + + def process(iev=None): + if iev is None: + iev = loop.iEvent + loop.process(iev) + if display: + display.draw() + + def next(): + loop.process(loop.iEvent+1) + if display: + display.draw() + + iev = None + if len(sys.argv)==2: + papas.display = True + iev = int(sys.argv[1]) + + loop = Looper( 'looper', config, + nEvents=1000, + nPrint=0, + firstEvent=0, + timeReport=True) + simulation = None + for ana in loop.analyzers: + if hasattr(ana, 'display'): + simulation = ana + display = getattr(simulation, 'display', None) + simulator = getattr(simulation, 'simulator', None) + + if simulator: + detector = simulator.detector + if iev is not None: + for j in range(10000) : + process(iev) + pass + else: + loop.loop() + loop.write() diff --git a/PhysicsTools/HeppyCore/python/test/create_tree.py b/PhysicsTools/HeppyCore/python/test/create_tree.py new file mode 100644 index 0000000000000..6df68fbe2718f --- /dev/null +++ b/PhysicsTools/HeppyCore/python/test/create_tree.py @@ -0,0 +1,11 @@ +from PhysicsTools.HeppyCore.utils.testtree import create_tree + +if __name__ == "__main__": + + import sys + import pdb; pdb.set_trace() + if len(sys.argv) == 2: + nentries = sys.argv[1] + create_tree(nentries=nentries) + else: + create_tree() diff --git a/PhysicsTools/HeppyCore/python/test/delphes_cfg.py b/PhysicsTools/HeppyCore/python/test/delphes_cfg.py new file mode 100644 index 0000000000000..b2fc0d4a5f720 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/test/delphes_cfg.py @@ -0,0 +1,144 @@ +import os +import copy +import PhysicsTools.HeppyCore.framework.config as cfg + +import logging +# next 2 lines necessary to deal with reimports from ipython +logging.shutdown() +reload(logging) +logging.basicConfig(level=logging.WARNING) + +comp = cfg.Component( + 'example', + files = ['FCCDelphesOutput.root'] +) +selectedComponents = [comp] + +from PhysicsTools.HeppyCore.analyzers.fcc.Reader import Reader +source = cfg.Analyzer( + Reader, + gen_particles = 'genParticles', + gen_vertices = 'genVertices', + # gen_jets = 'GenJet', + jets = 'recJets' +) + +from ROOT import gSystem +gSystem.Load("libdatamodelDict") +from EventStore import EventStore as Events + +# currently treating electrons and muons transparently. +# could use the same modules to have a collection of electrons +# and a collection of muons +from PhysicsTools.HeppyCore.analyzers.Filter import Filter +leptons = cfg.Analyzer( + Filter, + 'sel_leptons', + output = 'leptons', + input_objects = 'gen_particles_stable', + filter_func = lambda ptc: ptc.pt()>30 and abs(ptc.pdgid()) in [11, 13] +) + +from PhysicsTools.HeppyCore.analyzers.IsolationAnalyzer import IsolationAnalyzer +from PhysicsTools.HeppyCore.particles.isolation import EtaPhiCircle +iso_leptons = cfg.Analyzer( + IsolationAnalyzer, + leptons = 'leptons', + particles = 'gen_particles_stable', + iso_area = EtaPhiCircle(0.4) +) + +#TODO: Colin: would be better to have a lepton class +def relative_isolation(lepton): + sumpt = lepton.iso_211.sumpt + lepton.iso_22.sumpt + lepton.iso_130.sumpt + sumpt /= lepton.pt() + return sumpt + +sel_iso_leptons = cfg.Analyzer( + Filter, + 'sel_iso_leptons', + output = 'sel_iso_leptons', + input_objects = 'leptons', + filter_func = lambda lep : relative_isolation(lep)<0.25 +) + +gen_jets_30 = cfg.Analyzer( + Filter, + 'gen_jets_30', + output = 'gen_jets_30', + input_objects = 'gen_jets', + filter_func = lambda jet: jet.pt()>30. +) + +from PhysicsTools.HeppyCore.analyzers.Matcher import Matcher +match_jet_leptons = cfg.Analyzer( + Matcher, + delta_r = 0.4, + match_particles = 'sel_iso_leptons', + particles = 'gen_jets_30' +) + +sel_jets_nolepton = cfg.Analyzer( + Filter, + 'sel_jets_nolepton', + output = 'sel_jets_nolepton', + input_objects = 'gen_jets_30', + filter_func = lambda jet: not hasattr(jet, 'sel_iso_leptons') +) + +from PhysicsTools.HeppyCore.analyzers.M3Builder import M3Builder +m3 = cfg.Analyzer( + M3Builder, + instance_label = 'gen_m3', + jets = 'sel_jets_nolepton', + filter_func = lambda x : x.pt()>30. +) + +from PhysicsTools.HeppyCore.analyzers.examples.ttbar.TTbarTreeProducer import TTbarTreeProducer +gen_tree = cfg.Analyzer( + TTbarTreeProducer, + jets = 'gen_jets', + m3 = 'gen_m3', + met = 'gen_met', + leptons = 'leptons' +) + + + +# definition of a sequence of analyzers, +# the analyzers will process each event in this order +sequence = cfg.Sequence( [ + source, + # leptons, + # iso_leptons, + # gen_jets_30, + # sel_iso_leptons, + # match_jet_leptons, + # sel_jets_nolepton, + # m3, + # gen_tree + ] ) + +# comp.files.append('example_2.root') +# comp.splitFactor = 2 # splitting the component in 2 chunks + +config = cfg.Config( + components = selectedComponents, + sequence = sequence, + services = [], + events_class = Events +) + +if __name__ == '__main__': + import sys + from PhysicsTools.HeppyCore.framework.looper import Looper + + def next(): + loop.process(loop.iEvent+1) + + loop = Looper( 'looper', config, + nEvents=100, + nPrint=0, + timeReport=True) + loop.process(6) + print loop.event diff --git a/PhysicsTools/HeppyCore/python/test/gun_papas_cfg.py b/PhysicsTools/HeppyCore/python/test/gun_papas_cfg.py new file mode 100644 index 0000000000000..32d4589d7028c --- /dev/null +++ b/PhysicsTools/HeppyCore/python/test/gun_papas_cfg.py @@ -0,0 +1,152 @@ +'''Example configuration file for an ee->ZH->mumubb analysis in heppy, with the FCC-ee + +While studying this file, open it in ipython as well as in your editor to +get more information: + +ipython +from analysis_ee_ZH_cfg import * +''' + +import os +import copy +import PhysicsTools.HeppyCore.framework.config as cfg + +from PhysicsTools.HeppyCore.framework.event import Event +Event.print_patterns=['*'] + +import logging +# next 2 lines necessary to deal with reimports from ipython +logging.shutdown() +reload(logging) +logging.basicConfig(level=logging.WARNING) + +# setting the random seed for reproducible results +import PhysicsTools.HeppyCore.statistics.rrandom as random +random.seed(0xdeadbeef) + +from PhysicsTools.HeppyCore.analyzers.Gun import Gun +source = cfg.Analyzer( + Gun, + pdgid = 211, + thetamin = -1.5, + thetamax = 1.5, + ptmin = 0, + ptmax = 100, + flat_pt = False, +) + + +comp = cfg.Component( + 'gun_{}'.format(source.pdgid), + files = [None] +) +selectedComponents = [comp] + +from PhysicsTools.HeppyCore.test.papas_cfg import papas_sequence, detector, papas + +from jet_tree_cff import jet_tree_sequence + +from PhysicsTools.HeppyCore.analyzers.P4SumBuilder import P4SumBuilder +sum_particles = cfg.Analyzer( + P4SumBuilder, + output='sum_all_ptcs', + # particles='gen_particles_stable' + particles='rec_particles' +) + +sum_gen = cfg.Analyzer( + P4SumBuilder, + output='sum_all_gen', + particles='gen_particles_stable' +) + + +from PhysicsTools.HeppyCore.analyzers.GlobalEventTreeProducer import GlobalEventTreeProducer +zed_tree = cfg.Analyzer( + GlobalEventTreeProducer, + sum_all='sum_all_ptcs', + sum_all_gen='sum_all_gen' +) + +# definition of a sequence of analyzers, +# the analyzers will process each event in this order +sequence = cfg.Sequence( + source, + papas_sequence, + jet_tree_sequence('gen_particles_stable','rec_particles', + njets=None, ptmin=0.5), + sum_particles, + sum_gen, + zed_tree + ) + +# Specifics to read FCC events +from ROOT import gSystem +from PhysicsTools.HeppyCore.framework.eventsgen import Events + +config = cfg.Config( + components = selectedComponents, + sequence = sequence, + services = [], + events_class = Events +) + +if __name__ == '__main__': + import sys + from PhysicsTools.HeppyCore.framework.looper import Looper + + import PhysicsTools.HeppyCore.statistics.rrandom as random + random.seed(0xdeadbeef) + + def process(iev=None): + if iev is None: + iev = loop.iEvent + loop.process(iev) + if display: + display.draw() + + def next(): + loop.process(loop.iEvent+1) + if display: + display.draw() + + iev = None + usage = '''usage: python analysis_ee_ZH_cfg.py [ievent] + + Provide ievent as an integer, or loop on the first events. + You can also use this configuration file in this way: + + heppy_loop.py OutDir/ analysis_ee_ZH_cfg.py -f -N 100 + ''' + if len(sys.argv)==2: + papas.display = True + try: + iev = int(sys.argv[1]) + except ValueError: + print usage + sys.exit(1) + elif len(sys.argv)>2: + print usage + sys.exit(1) + + + loop = Looper( 'looper', config, + nEvents=1000, + nPrint=1, + timeReport=True) + + simulation = None + for ana in loop.analyzers: + if hasattr(ana, 'display'): + simulation = ana + display = getattr(simulation, 'display', None) + simulator = getattr(simulation, 'simulator', None) + if simulator: + detector = simulator.detector + if iev is not None: + process(iev) + process(iev) + process(iev) + else: + loop.loop() + loop.write() diff --git a/PhysicsTools/HeppyCore/python/test/jet_tree_cff.py b/PhysicsTools/HeppyCore/python/test/jet_tree_cff.py new file mode 100644 index 0000000000000..754eabbe3a7c7 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/test/jet_tree_cff.py @@ -0,0 +1,43 @@ +import PhysicsTools.HeppyCore.framework.config as cfg + +def jet_tree_sequence(gen_ptcs, rec_ptcs, njets, ptmin): + + fastjet_args = None + if njets: + fastjet_args = dict(njets=njets) + else: + fastjet_args = dict(ptmin=ptmin) + + + from PhysicsTools.HeppyCore.analyzers.fcc.JetClusterizer import JetClusterizer + gen_jets = cfg.Analyzer( + JetClusterizer, + output = 'gen_jets', + particles = gen_ptcs, + fastjet_args = fastjet_args, + ) + + jets = cfg.Analyzer( + JetClusterizer, + output = 'jets', + particles = rec_ptcs, + fastjet_args = fastjet_args, + ) + + from PhysicsTools.HeppyCore.analyzers.Matcher import Matcher + jet_match = cfg.Analyzer( + Matcher, + match_particles = 'jets', + particles = 'gen_jets', + delta_r = 0.3 + ) + + from PhysicsTools.HeppyCore.analyzers.JetTreeProducer import JetTreeProducer + jet_tree = cfg.Analyzer( + JetTreeProducer, + tree_name = 'events', + tree_title = 'jets', + jets = 'gen_jets' + ) + + return [gen_jets, jets, jet_match, jet_tree] diff --git a/PhysicsTools/HeppyCore/python/test/lcio_cfg.py b/PhysicsTools/HeppyCore/python/test/lcio_cfg.py new file mode 100644 index 0000000000000..99f5bb84beae8 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/test/lcio_cfg.py @@ -0,0 +1,43 @@ +'''Example configuration file showing how to read LCIO events. +Use it in Lyon: + source /gridsoft/ipnls/ilc/v01-17-09/init_ilcsoft.sh + + heppy_loop.py Output lcio_cfg.py +''' + + +import os +import PhysicsTools.HeppyCore.framework.config as cfg +from PhysicsTools.HeppyCore.framework.eventslcio import Events +import logging +logging.basicConfig(level=logging.INFO) + + +inputSample = cfg.Component( + 'test_component', + files = '/gridgroup/ilc/kurca/simple_lcio.slcio', + ) + +from PhysicsTools.HeppyCore.analyzers.lcio.MCParticlePrinter import MCParticlePrinter +mc_ptc_printer = cfg.Analyzer( + MCParticlePrinter +) + +selectedComponents = [inputSample] + + + +# definition of a sequence of analyzers, +# the analyzers will process each event in this order +sequence = cfg.Sequence( [ + mc_ptc_printer +] ) + + +# finalization of the configuration object. +config = cfg.Config( components = selectedComponents, + sequence = sequence, + services = [], + events_class = Events ) + + diff --git a/PhysicsTools/HeppyCore/python/test/lepton_iso_cfg.py b/PhysicsTools/HeppyCore/python/test/lepton_iso_cfg.py new file mode 100644 index 0000000000000..0b9f4f9b0f7a3 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/test/lepton_iso_cfg.py @@ -0,0 +1,102 @@ +'''Example configuration file for an ee->ZH->mumubb analysis in heppy, with the FCC-ee + +While studying this file, open it in ipython as well as in your editor to +get more information: + +ipython +from analysis_ee_ZH_cfg import * + +''' + +import os +import copy +import PhysicsTools.HeppyCore.framework.config as cfg +import PhysicsTools.HeppyCore.utils.pdebug + +import logging +# next 2 lines necessary to deal with reimports from ipython +logging.shutdown() +reload(logging) +logging.basicConfig(level=logging.WARNING) + +# setting the random seed for reproducible results +import PhysicsTools.HeppyCore.statistics.rrandom as random +random.seed(0xdeadbeef) + +from ROOT import gSystem +gSystem.Load("libdatamodelDict") +from EventStore import EventStore as Events +import PhysicsTools.HeppyCore.utils.pdebug + +# definition of the collider +from PhysicsTools.HeppyCore.configuration import Collider +Collider.BEAMS = 'ee' +Collider.SQRTS = 240. + +# input definition +comp = cfg.Component( + 'ee_ZH_Zmumu_Hbb', + files = [ + 'ee_ZH_Zmumu_Hbb.root' + ] +) +selectedComponents = [comp] + +# read FCC EDM events from the input root file(s) +# do help(Reader) for more information +from PhysicsTools.HeppyCore.analyzers.fcc.Reader import Reader +source = cfg.Analyzer( + Reader, + gen_particles = 'GenParticle', + gen_vertices = 'GenVertex' +) + +from PhysicsTools.HeppyCore.test.papas_cfg import gen_particles_stable + +# Use a Filter to select leptons from the output of papas simulation. +# Currently, we're treating electrons and muons transparently. +# we could use two different instances for the Filter module +# to get separate collections of electrons and muons +# help(Filter) for more information +from PhysicsTools.HeppyCore.analyzers.Filter import Filter +leptons_true = cfg.Analyzer( + Filter, + 'sel_leptons', + output = 'leptons_true', + input_objects = 'gen_particles_stable', + filter_func = lambda ptc: ptc.e()>10. and abs(ptc.pdgid()) in [11, 13] +) + +# Compute lepton isolation w/r other particles in the event. +# help(IsolationAnalyzer) for more information +from PhysicsTools.HeppyCore.analyzers.IsolationAnalyzer import IsolationAnalyzer +from PhysicsTools.HeppyCore.particles.isolation import EtaPhiCircle +iso_leptons = cfg.Analyzer( + IsolationAnalyzer, + leptons = 'leptons_true', + particles = 'gen_particles_stable', + iso_area = EtaPhiCircle(0.4), + log_level = logging.INFO +) + +# definition of a sequence of analyzers, +# the analyzers will process each event in this order +sequence = cfg.Sequence( + source, + gen_particles_stable, + leptons_true, + iso_leptons, +) + +# Specifics to read FCC events +from ROOT import gSystem +gSystem.Load("libdatamodelDict") +from EventStore import EventStore as Events + +config = cfg.Config( + components = selectedComponents, + sequence = sequence, + services = [], + events_class = Events +) + diff --git a/PhysicsTools/HeppyCore/python/test/papas_cfg.py b/PhysicsTools/HeppyCore/python/test/papas_cfg.py new file mode 100644 index 0000000000000..b066c57610019 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/test/papas_cfg.py @@ -0,0 +1,143 @@ +import PhysicsTools.HeppyCore.framework.config as cfg +from PhysicsTools.HeppyCore.configuration import Collider + +# Use a Filter to select stable gen particles for simulation +# from the output of "source" +# help(Filter) for more information +from PhysicsTools.HeppyCore.analyzers.Filter import Filter +gen_particles_stable = cfg.Analyzer( + Filter, + output = 'gen_particles_stable', + # output = 'particles', + input_objects = 'gen_particles', + filter_func = lambda x : x.status()==1 and abs(x.pdgid()) not in [12,14,16] and x.pt()>1e-5 +) + +# configure the papas fast simulation with the CMS detector +# help(Papas) for more information +# history nodes keeps track of which particles produced which tracks, clusters +from PhysicsTools.HeppyCore.analyzers.PapasSim import PapasSim +# from PhysicsTools.HeppyCore.analyzers.Papas import Papas +from PhysicsTools.HeppyCore.papas.detectors.CMS import CMS +detector = CMS() + +papas = cfg.Analyzer( + PapasSim, + instance_label = 'papas', + detector = detector, + gen_particles = 'gen_particles_stable', + sim_particles = 'sim_particles', + merged_ecals = 'ecal_clusters', + merged_hcals = 'hcal_clusters', + tracks = 'tracks', + output_history = 'history_nodes', + display_filter_func = lambda ptc: ptc.e()>1., + display = False, + verbose = True +) + + +# group the clusters, tracks from simulation into connected blocks ready for reconstruction +from PhysicsTools.HeppyCore.analyzers.PapasPFBlockBuilder import PapasPFBlockBuilder +pfblocks = cfg.Analyzer( + PapasPFBlockBuilder, + tracks = 'tracks', + ecals = 'ecal_clusters', + hcals = 'hcal_clusters', + history = 'history_nodes', + output_blocks = 'reconstruction_blocks' +) + + +#reconstruct particles from blocks +from PhysicsTools.HeppyCore.analyzers.PapasPFReconstructor import PapasPFReconstructor +pfreconstruct = cfg.Analyzer( + PapasPFReconstructor, + instance_label = 'papas_PFreconstruction', + detector = detector, + input_blocks = 'reconstruction_blocks', + history = 'history_nodes', + output_particles_dict = 'particles_dict', + output_particles_list = 'particles_list' +) + + + +# Use a Filter to select leptons from the output of papas simulation. +# Currently, we're treating electrons and muons transparently. +# we could use two different instances for the Filter module +# to get separate collections of electrons and muons +# help(Filter) for more information +from PhysicsTools.HeppyCore.analyzers.Filter import Filter +sim_electrons = cfg.Analyzer( + Filter, + 'sim_electrons', + output = 'sim_electrons', + input_objects = 'papas_sim_particles', + filter_func = lambda ptc: abs(ptc.pdgid()) in [11] +) + +sim_muons = cfg.Analyzer( + Filter, + 'sim_muons', + output = 'sim_muons', + input_objects = 'papas_sim_particles', + filter_func = lambda ptc: abs(ptc.pdgid()) in [13] +) + + +# Applying a simple resolution and efficiency model to electrons and muons. +# Indeed, papas simply copies generated electrons and muons +# from its input gen particle collection to its output reconstructed +# particle collection. +# Setting up the electron and muon models is left to the user, +# and the LeptonSmearer is just an example +# help(LeptonSmearer) for more information +from PhysicsTools.HeppyCore.analyzers.GaussianSmearer import GaussianSmearer +def accept_electron(ele): + return abs(ele.eta()) < 2.5 and ele.e() > 5. +electrons = cfg.Analyzer( + GaussianSmearer, + 'electrons', + output = 'electrons', + input_objects = 'sim_electrons', + accept=accept_electron, + mu_sigma=(1, 0.1) + ) + +def accept_muon(mu): + return abs(mu.eta()) < 2.5 and mu.pt() > 5. +muons = cfg.Analyzer( + GaussianSmearer, + 'muons', + output = 'muons', + input_objects = 'sim_muons', + accept=accept_muon, + mu_sigma=(1, 0.02) + ) + + +#merge smeared leptons with the reconstructed particles +from PhysicsTools.HeppyCore.analyzers.Merger import Merger +from PhysicsTools.HeppyCore.particles.p4 import P4 +merge_particles = cfg.Analyzer( + Merger, + instance_label = 'merge_particles', + inputs=['papas_PFreconstruction_particles_list', 'electrons', 'muons'], + output = 'rec_particles', + sort_key = P4.sort_key +) + +papas_sequence = [ + gen_particles_stable, + papas, + pfblocks, + pfreconstruct, + sim_electrons, + sim_muons, + electrons, + muons, +# select_leptons, +# smear_leptons, + merge_particles, +] diff --git a/PhysicsTools/HeppyCore/python/test/plot_ee_Z.py b/PhysicsTools/HeppyCore/python/test/plot_ee_Z.py new file mode 100644 index 0000000000000..775b40a4773bd --- /dev/null +++ b/PhysicsTools/HeppyCore/python/test/plot_ee_Z.py @@ -0,0 +1,35 @@ +from ROOT import TFile, TCanvas, TH1F, gPad +import time + +holder = list() + +def plot(fname): + root_file = TFile(fname) + tree = root_file.Get('events') + + canvas = TCanvas("canvas", "canvas", 600,600) + + hist = TH1F("hist", ";mass of all particles (GeV)", 50, 0, 200) + tree.Draw('sum_all_m>>hist', '', '') + hist.Fit("gaus") + gPad.Update() + gPad.SaveAs('sum_all_m.png') + time.sleep(1) + func = hist.GetFunction("gaus") + + holder.extend([root_file, tree, canvas, hist, func]) + + return func.GetParameter(1), func.GetParameter(2) + +if __name__ == '__main__': + + import sys + + if len(sys.argv)!=2: + print 'usage ' + sys.exit(1) + + mean, sigma = plot(sys.argv[1]) + print mean, sigma + + diff --git a/PhysicsTools/HeppyCore/python/test/plot_ee_ZH.py b/PhysicsTools/HeppyCore/python/test/plot_ee_ZH.py new file mode 100644 index 0000000000000..d6bc6af1259f0 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/test/plot_ee_ZH.py @@ -0,0 +1,32 @@ +from ROOT import TFile, TCanvas, TH1F, gPad +import time + +def plot(fname): + root_file = TFile(fname) + tree = root_file.Get('events') + + canvas = TCanvas("canvas", "canvas", 600,600) + + h = TH1F("h", "higgs di-jet mass;m_{jj} (GeV)", 50, 0, 200) + tree.Draw('higgs_m>>h', 'zed_m>50') + # h.GetYaxis().SetRangeUser(0, 120) + h.Fit("gaus") + gPad.Update() + gPad.SaveAs('ee_ZH_mjj.png') + time.sleep(1) + func = h.GetFunction("gaus") + return func.GetParameter(1), func.GetParameter(2) + + +if __name__ == '__main__': + + import sys + + if len(sys.argv)!=2: + print 'usage ' + sys.exit(1) + + mean, sigma = plot(sys.argv[1]) + print mean, sigma + + diff --git a/PhysicsTools/HeppyCore/python/test/simple_example_cfg.py b/PhysicsTools/HeppyCore/python/test/simple_example_cfg.py new file mode 100644 index 0000000000000..4d27e4347d92e --- /dev/null +++ b/PhysicsTools/HeppyCore/python/test/simple_example_cfg.py @@ -0,0 +1,75 @@ + +import os +import PhysicsTools.HeppyCore.framework.config as cfg +from PhysicsTools.HeppyCore.framework.chain import Chain as Events +import logging +logging.basicConfig(level=logging.INFO) + +# input component +# several input components can be declared, +# and added to the list of selected components +inputSample = cfg.Component( + 'test_component', + # create the test file by running + # python create_tree.py + files = [os.path.abspath('test_tree.root')], + ) + +selectedComponents = [inputSample] + +# add a random variable to the event +from PhysicsTools.HeppyCore.analyzers.examples.simple.RandomAnalyzer import RandomAnalyzer +random = cfg.Analyzer( + RandomAnalyzer + ) + + +# just prints a variable in the input test tree +from PhysicsTools.HeppyCore.analyzers.examples.simple.Printer import Printer +printer = cfg.Analyzer( + Printer + ) + +# illustrates how to use an exception to stop processing at event 10 +# for debugging purposes. +from PhysicsTools.HeppyCore.analyzers.examples.simple.Stopper import Stopper +stopper = cfg.Analyzer( + Stopper, + iEv = 10 + ) + +# creating a simple output tree +from PhysicsTools.HeppyCore.analyzers.examples.simple.SimpleTreeProducer import SimpleTreeProducer +tree = cfg.Analyzer( + SimpleTreeProducer, + tree_name = 'tree', + tree_title = 'A test tree' + ) + + +# definition of a sequence of analyzers, +# the analyzers will process each event in this order +sequence = cfg.Sequence([ + random, + # printer, + # stopper, + tree, +] ) + +from PhysicsTools.HeppyCore.framework.services.tfile import TFileService +output_rootfile = cfg.Service( + TFileService, + 'myhists', + fname='histograms.root', + option='recreate' +) + +services = [output_rootfile] + +# finalization of the configuration object. +config = cfg.Config( components = selectedComponents, + sequence = sequence, + services = services, + events_class = Events ) + +# print config diff --git a/PhysicsTools/HeppyCore/python/test/simple_example_noindexing_cfg.py b/PhysicsTools/HeppyCore/python/test/simple_example_noindexing_cfg.py new file mode 100644 index 0000000000000..4adb9720ee87e --- /dev/null +++ b/PhysicsTools/HeppyCore/python/test/simple_example_noindexing_cfg.py @@ -0,0 +1,41 @@ +import os +import copy +import PhysicsTools.HeppyCore.framework.config as cfg + +from PhysicsTools.HeppyCore.framework.chain_noindexing import ChainNoIndexing as Events + +import logging +logging.basicConfig(level=logging.INFO) + + +# input component +# several input components can be declared, +# and added to the list of selected components +inputSample = cfg.Component( + 'test_component', + # create the test file by running + # python create_tree.py + files = [os.path.abspath('test_tree.root')], + ) + +selectedComponents = [inputSample] + +from PhysicsTools.HeppyCore.analyzers.examples.simple.Printer import Printer +printer = cfg.Analyzer( + Printer + ) + +# definition of a sequence of analyzers, +# the analyzers will process each event in this order +sequence = cfg.Sequence( [ + printer, +] ) + + +# finalization of the configuration object. +config = cfg.Config( components = selectedComponents, + sequence = sequence, + services = [], + events_class = Events ) + +# print config diff --git a/PhysicsTools/HeppyCore/test/simple_example_cfg.py b/PhysicsTools/HeppyCore/python/test/simple_multi_example_cfg.py similarity index 63% rename from PhysicsTools/HeppyCore/test/simple_example_cfg.py rename to PhysicsTools/HeppyCore/python/test/simple_multi_example_cfg.py index 55551c67883ac..5f3e14cbf202e 100644 --- a/PhysicsTools/HeppyCore/test/simple_example_cfg.py +++ b/PhysicsTools/HeppyCore/python/test/simple_multi_example_cfg.py @@ -1,4 +1,7 @@ + import os +import shutil + import PhysicsTools.HeppyCore.framework.config as cfg from PhysicsTools.HeppyCore.framework.chain import Chain as Events import logging @@ -7,40 +10,42 @@ # input component # several input components can be declared, # and added to the list of selected components + +# os.system('python create_tree.py') +# shutil.copy('test_tree.root', 'test_tree_2.root') + inputSample = cfg.Component( 'test_component', - # create the test file by running - # python create_tree.py - files = [os.path.abspath('test_tree.root')], + files = [os.path.abspath('test_tree.root'), + os.path.abspath('test_tree_2.root')], + splitFactor = 2 ) selectedComponents = [inputSample] -from PhysicsTools.HeppyCore.analyzers.Printer import Printer -printer = cfg.Analyzer( - Printer + +# add a random variable to the event +from PhysicsTools.HeppyCore.analyzers.examples.simple.RandomAnalyzer import RandomAnalyzer +random = cfg.Analyzer( + RandomAnalyzer ) -from PhysicsTools.HeppyCore.analyzers.SimpleTreeProducer import SimpleTreeProducer +# creating a simple output tree +from PhysicsTools.HeppyCore.analyzers.examples.simple.SimpleTreeProducer import SimpleTreeProducer tree = cfg.Analyzer( SimpleTreeProducer, + instance_label = 'tree', tree_name = 'tree', tree_title = 'A test tree' ) -from PhysicsTools.HeppyCore.analyzers.Histogrammer import Histogrammer -histos = cfg.Analyzer( - Histogrammer, - file_label = 'myhists' -) # definition of a sequence of analyzers, # the analyzers will process each event in this order -sequence = cfg.Sequence( [ - printer, - tree, - histos - ] ) +sequence = cfg.Sequence([ + random, + tree, +] ) from PhysicsTools.HeppyCore.framework.services.tfile import TFileService output_rootfile = cfg.Service( @@ -58,4 +63,4 @@ services = services, events_class = Events ) -# print config + diff --git a/PhysicsTools/HeppyCore/python/test/suite.py b/PhysicsTools/HeppyCore/python/test/suite.py new file mode 100644 index 0000000000000..f53142e66c110 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/test/suite.py @@ -0,0 +1,34 @@ + +if __name__ == '__main__': + import unittest + import sys + import os + + import PhysicsTools.HeppyCore.framework.context as context + + os.chdir(context.heppy_path) + + suites = [] + + pcks = [ + 'analyzers', + 'display', + 'framework', + 'test', # if particles is before test, test fails! + 'papas', + 'particles', + 'statistics', + 'utils' + ] + + for pck in pcks: + suites.append(unittest.TestLoader().discover(pck)) + + suite = unittest.TestSuite(suites) + # result = unittest.TextTestResult(sys.stdout, True, 1) + # suite.run(result) + runner = unittest.TextTestRunner() + runner.run(suite) + + + diff --git a/PhysicsTools/HeppyCore/python/test/test_analysis_ee_Z.py b/PhysicsTools/HeppyCore/python/test/test_analysis_ee_Z.py new file mode 100644 index 0000000000000..60db6b2ae8376 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/test/test_analysis_ee_Z.py @@ -0,0 +1,58 @@ +import unittest +import tempfile +import copy +import os +import shutil + +import PhysicsTools.HeppyCore.framework.context as context + +if context.name == 'fcc': + + from analysis_ee_Z_cfg import config + from PhysicsTools.HeppyCore.test.plot_ee_Z import plot + from PhysicsTools.HeppyCore.framework.looper import Looper + from ROOT import TFile + + import logging + logging.getLogger().setLevel(logging.ERROR) + + import PhysicsTools.HeppyCore.statistics.rrandom as random + + class TestAnalysis_ee_Z(unittest.TestCase): + + def setUp(self): + random.seed(0xdeadbeef) + self.outdir = tempfile.mkdtemp() + fname = '/'.join([os.environ['HEPPY'], + 'test/data/ee_Z_ddbar.root']) + config.components[0].files = [fname] + self.looper = Looper( self.outdir, config, + nEvents=100, + nPrint=0, + timeReport=True) + import logging + logging.disable(logging.CRITICAL) + + def tearDown(self): + shutil.rmtree(self.outdir) + logging.disable(logging.NOTSET) + + def test_analysis(self): + '''Check for an almost perfect match with reference. + Will fail if physics algorithms are modified, + so should probably be removed from test suite, + or better: be made optional. + ''' + self.looper.loop() + self.looper.write() + rootfile = '/'.join([self.outdir, + 'PhysicsTools.HeppyCore.analyzers.GlobalEventTreeProducer.GlobalEventTreeProducer_1/tree.root']) + mean, sigma = plot(rootfile) + self.assertAlmostEqual(mean, 93.6, 1) + self.assertAlmostEqual(sigma, 11.2, 1) + + + +if __name__ == '__main__': + + unittest.main() diff --git a/PhysicsTools/HeppyCore/python/test/test_analysis_ee_ZH.py b/PhysicsTools/HeppyCore/python/test/test_analysis_ee_ZH.py new file mode 100644 index 0000000000000..274f3406b5ef1 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/test/test_analysis_ee_ZH.py @@ -0,0 +1,73 @@ +import unittest +import tempfile +import copy +import os +import shutil + +import PhysicsTools.HeppyCore.framework.context as context + +if context.name == 'fcc': + + from analysis_ee_ZH_cfg import config + from PhysicsTools.HeppyCore.test.plot_ee_ZH import plot + from PhysicsTools.HeppyCore.framework.looper import Looper + from ROOT import TFile + + import logging + logging.getLogger().setLevel(logging.ERROR) + + import PhysicsTools.HeppyCore.statistics.rrandom as random + + def test_sorted(ptcs): + from PhysicsTools.HeppyCore.configuration import Collider + keyname = 'pt' + if Collider.BEAMS == 'ee': + keyname = 'e' + pt_or_e = getattr(ptcs[0].__class__, keyname) + values = [pt_or_e(ptc) for ptc in ptcs] + return values == sorted(values, reverse=True) + + + class TestAnalysis_ee_ZH(unittest.TestCase): + + def setUp(self): + random.seed(0xdeadbeef) + self.outdir = tempfile.mkdtemp() + fname = '/'.join([os.environ['HEPPY'], + 'test/data/ee_ZH_Zmumu_Hbb.root']) + config.components[0].files = [fname] + self.looper = Looper( self.outdir, config, + nEvents=50, + nPrint=0, + timeReport=True) + import logging + logging.disable(logging.CRITICAL) + + def tearDown(self): + shutil.rmtree(self.outdir) + logging.disable(logging.NOTSET) + + def test_analysis(self): + '''Check for an almost perfect match with reference. + Will fail if physics algorithms are modified, + so should probably be removed from test suite, + or better: be made optional. + ''' + self.looper.loop() + self.looper.write() + rootfile = '/'.join([self.outdir, + 'PhysicsTools.HeppyCore.analyzers.examples.zh.ZHTreeProducer.ZHTreeProducer_1/tree.root']) + mean, sigma = plot(rootfile) + self.assertAlmostEqual(mean, 120.7, 1) + self.assertAlmostEqual(sigma, 20.3, 1) + + def test_analysis_sorting(self): + self.looper.process(0) + self.assertTrue(test_sorted(self.looper.event.rec_particles)) + + + + +if __name__ == '__main__': + + unittest.main() diff --git a/PhysicsTools/HeppyCore/python/test/test_multiprocessing.py b/PhysicsTools/HeppyCore/python/test/test_multiprocessing.py new file mode 100644 index 0000000000000..43d10704920ed --- /dev/null +++ b/PhysicsTools/HeppyCore/python/test/test_multiprocessing.py @@ -0,0 +1,85 @@ +import unittest +import shutil +import tempfile +import os +import subprocess +import copy +import glob +from simple_example_cfg import config, stopper +from PhysicsTools.HeppyCore.utils.testtree import create_tree, remove_tree +from PhysicsTools.HeppyCore.framework.looper import Looper +from PhysicsTools.HeppyCore.framework.exceptions import UserStop +import PhysicsTools.HeppyCore.framework.context as context +from ROOT import TFile + +import logging +logging.getLogger().setLevel(logging.ERROR) + +class Options(object): + pass + +class TestMultiProcessing(unittest.TestCase): + + def setUp(self): + self.fname = create_tree() + self.fname2 = self.fname.replace('.root','_2.root') + shutil.copy(self.fname, self.fname2) + rootfile = TFile(self.fname) + self.nevents = rootfile.Get('test_tree').GetEntries() + self.outdir = tempfile.mkdtemp() + logging.disable(logging.CRITICAL) + + def tearDown(self): + shutil.rmtree(self.outdir) + logging.disable(logging.NOTSET) + os.remove(self.fname2) + + def test_multiprocessing(self): + from PhysicsTools.HeppyCore.framework.heppy_loop import create_parser, main + parser = create_parser() + options, args = parser.parse_args() + options.iEvent = None + options.nprint = 0 + cfg = '/'.join( [ context.heppy_path, + 'test/simple_multi_example_cfg.py' ] ) + main(options, [self.outdir, cfg], parser) + wcard = '/'.join([self.outdir, + 'test_component_Chunk*', + 'PhysicsTools.HeppyCore.analyzers.examples.simple.SimpleTreeProducer.SimpleTreeProducer_tree/simple_tree.root' + ]) + output_root_files = glob.glob(wcard) + self.assertEqual(len(output_root_files),2) + + # def test_heppy_batch(self): + # cmd = ['heppy_batch.py', + # '-o', + # '{}'.format(self.outdir), + # '-b', + # 'nohup ./batchScript.sh &', + # 'simple_multi_example_cfg.py'] + # FNULL = open(os.devnull,'w') + # p = subprocess.Popen(cmd, stdout=FNULL, + # stderr=subprocess.STDOUT) + # # p.communicate() + # p.wait() + # import time + # wcard = '/'.join([self.outdir, + # 'test_component_Chunk*', + # 'PhysicsTools.HeppyCore.analyzers.examples.simple.SimpleTreeProducer.SimpleTreeProducer_tree/simple_tree.root' + # ]) + # output_root_files = [] + # print wcard + # for i in range(50): + # # waiting for max 10 seconds for the nohup processes + # # to complete and the files to appear. + # print 'wait' + # time.sleep(1) + # output_root_files = glob.glob(wcard) + # if len(output_root_files): + # break + # self.assertEqual(len(output_root_files),2) + + +if __name__ == '__main__': + + unittest.main() diff --git a/PhysicsTools/HeppyCore/python/test/test_noindexing.py b/PhysicsTools/HeppyCore/python/test/test_noindexing.py new file mode 100644 index 0000000000000..f295b162ba13e --- /dev/null +++ b/PhysicsTools/HeppyCore/python/test/test_noindexing.py @@ -0,0 +1,67 @@ +import unittest +import shutil +import tempfile +import os +from simple_example_noindexing_cfg import config +from PhysicsTools.HeppyCore.utils.testtree import create_tree, remove_tree +from PhysicsTools.HeppyCore.framework.looper import Looper +from ROOT import TFile + +import logging +logging.getLogger().setLevel(logging.ERROR) + +class TestNoIndexing(unittest.TestCase): + + def setUp(self): + self.fname = create_tree() + rootfile = TFile(self.fname) + self.nevents = rootfile.Get('test_tree').GetEntries() + self.outdir = tempfile.mkdtemp() + + def tearDown(self): + shutil.rmtree(self.outdir) + + def test_all_events_processed(self): + loop = Looper( self.outdir, config, + nEvents=None, + nPrint=0, + timeReport=True) + loop.loop() + loop.write() + logfile = open('/'.join([self.outdir, 'log.txt'])) + nev_processed = None + for line in logfile: + if line.startswith('number of events processed:'): + nev_processed = int(line.split(':')[1]) + logfile.close() + self.assertEqual(nev_processed, self.nevents) + # checking the looper itself. + self.assertEqual(loop.nEvProcessed, self.nevents) + + def test_skip(self): + first = 10 + loop = Looper( self.outdir, config, + nEvents=None, + firstEvent=first, + nPrint=0, + timeReport=True) + loop.loop() + loop.write() + # input file has 200 entries + # we skip 10 entries, so we process 190. + self.assertEqual(loop.nEvProcessed, self.nevents-first) + + def test_process_event(self): + '''Test that indeed, calling loop.process(iev) raises + TypeError if the events backend does not support indexing. + ''' + loop = Looper( self.outdir, config, + nEvents=None, + nPrint=0, + timeReport=True) + self.assertRaises(TypeError, loop.process, 10) + + +if __name__ == '__main__': + + unittest.main() diff --git a/PhysicsTools/HeppyCore/python/test/test_simple_example.py b/PhysicsTools/HeppyCore/python/test/test_simple_example.py new file mode 100644 index 0000000000000..16ea4d037f92a --- /dev/null +++ b/PhysicsTools/HeppyCore/python/test/test_simple_example.py @@ -0,0 +1,93 @@ +import unittest +import shutil +import tempfile +import os +import copy +from simple_example_cfg import config, stopper +from PhysicsTools.HeppyCore.utils.testtree import create_tree, remove_tree +from PhysicsTools.HeppyCore.framework.heppy_loop import create_parser, main +from PhysicsTools.HeppyCore.framework.looper import Looper +from PhysicsTools.HeppyCore.framework.exceptions import UserStop +import PhysicsTools.HeppyCore.framework.context as context +from ROOT import TFile + +import logging +logging.getLogger().setLevel(logging.ERROR) + +class TestSimpleExample(unittest.TestCase): + + def setUp(self): + self.fname = create_tree() + rootfile = TFile(self.fname) + self.nevents = rootfile.Get('test_tree').GetEntries() + self.outdir = tempfile.mkdtemp() + logging.disable(logging.CRITICAL) + + def tearDown(self): + shutil.rmtree(self.outdir) + logging.disable(logging.NOTSET) + + def test_all_events_processed(self): + loop = Looper( self.outdir, config, + nEvents=None, + nPrint=0, + timeReport=True) + loop.loop() + loop.write() + logfile = open('/'.join([self.outdir, 'log.txt'])) + nev_processed = None + for line in logfile: + if line.startswith('number of events processed:'): + nev_processed = int(line.split(':')[1]) + logfile.close() + self.assertEqual(nev_processed, self.nevents) + # checking the looper itself. + self.assertEqual(loop.nEvProcessed, self.nevents) + + def test_skip(self): + first = 10 + loop = Looper( self.outdir, config, + nEvents=None, + firstEvent=first, + nPrint=0, + timeReport=True) + loop.loop() + loop.write() + # input file has 200 entries + # we skip 10 entries, so we process 190. + self.assertEqual(loop.nEvProcessed, self.nevents-first) + + def test_process_event(self): + loop = Looper( self.outdir, config, + nEvents=None, + nPrint=0, + timeReport=True) + loop.process(10) + self.assertEqual(loop.event.input.var1, 10) + loop.process(10) + + def test_userstop(self): + config_with_stopper = copy.copy(config) + config_with_stopper.sequence.insert(1, stopper) + loop = Looper( self.outdir, config_with_stopper, + nEvents=None, + nPrint=0, + timeReport=True) + self.assertRaises(UserStop, loop.process, 10) + + def test_rewrite(self): + parser = create_parser() + options, args = parser.parse_args() + options.iEvent = None + options.nprint = 0 + cfg = '/'.join( [ context.heppy_path, + 'test/simple_example_cfg.py' ] ) + main(options, [self.outdir, cfg], parser) + options.force = True + main(options, [self.outdir, cfg], parser) + subdirs = os.listdir(self.outdir) + self.assertEqual(len(subdirs), 2) + +if __name__ == '__main__': + + unittest.main() diff --git a/PhysicsTools/HeppyCore/python/utils/batchmanager.py b/PhysicsTools/HeppyCore/python/utils/batchmanager.py index 4d121d2691298..e05a303697935 100644 --- a/PhysicsTools/HeppyCore/python/utils/batchmanager.py +++ b/PhysicsTools/HeppyCore/python/utils/batchmanager.py @@ -8,6 +8,7 @@ import re import pprint import time +import socket import eostools as castortools @@ -30,14 +31,14 @@ def DefineOptions(self): # how to add more doc to the help? self.parser_ = OptionParser() self.parser_.add_option("-o", "--output-dir", dest="outputDir", - help="Name of the local output directory for your jobs. This directory will be created automatically.", - default=None) + help="Name of the local output directory for your jobs. This directory will be created automatically.", + default=None) self.parser_.add_option("-r", "--remote-copy", dest="remoteCopy", - help="remote output directory for your jobs. Example: /store/cmst3/user/cbern/CMG/HT/Run2011A-PromptReco-v1/AOD/PAT_CMG/RA2. This directory *must* be provided as a logical file name (LFN). When this option is used, all root files produced by a job are copied to the remote directory, and the job index is appended to the root file name. The Logger directory will be sent back to the submision directory. For remote copy to PSI specify path like: '/pnfs/psi.ch/...'. Note: enviromental variable X509_USER_PROXY must point to home area before renewing proxy", - default=None) + help="remote output directory for your jobs. Example: /store/cmst3/user/cbern/CMG/HT/Run2011A-PromptReco-v1/AOD/PAT_CMG/RA2. This directory *must* be provided as a logical file name (LFN). When this option is used, all root files produced by a job are copied to the remote directory, and the job index is appended to the root file name. The Logger directory is tarred and compressed into Logger.tgz, and sent to the remote output directory as well. Afterwards, use logger.py to access the information contained in Logger.tgz. For remote copy to PSI specify path like: '/pnfs/psi.ch/...'. Logs will be sent back to the submision directory. NOTE: so far this option has been implemented and validated to work only for a remote copy to PSI", + default=None) self.parser_.add_option("-f", "--force", action="store_true", dest="force", default=False, - help="Don't ask any questions, just over-write") + help="Don't ask any questions, just over-write") # this opt can be removed self.parser_.add_option("-n", "--negate", action="store_true", dest="negate", default=False, @@ -52,11 +53,11 @@ def DefineOptions(self): default=[], help="Save one extra option (either a flag, or a key=value pair) that can be then accessed from the job config file") - def ParseOptions(self): + def ParseOptions(self): (self.options_,self.args_) = self.parser_.parse_args() if self.options_.remoteCopy == None: self.remoteOutputDir_ = "" - else: + else: # removing possible trailing slash self.remoteOutputDir_ = self.options_.remoteCopy.rstrip('/') if "psi.ch" in self.remoteOutputDir_: # T3 @ PSI: @@ -82,9 +83,9 @@ def ParseOptions(self): if not castortools.isLFN( self.remoteOutputDir_ ): print 'When providing an output directory, you must give its LFN, starting by /store. You gave:' print self.remoteOutputDir_ - sys.exit(1) + sys.exit(1) self.remoteOutputDir_ = castortools.lfnToEOS( self.remoteOutputDir_ ) - dirExist = castortools.isDirectory( self.remoteOutputDir_ ) + dirExist = castortools.isDirectory( self.remoteOutputDir_ ) # nsls = 'nsls %s > /dev/null' % self.remoteOutputDir_ # dirExist = os.system( nsls ) if dirExist is False: @@ -99,21 +100,17 @@ def ParseOptions(self): if self.options_.negate is False and self.options_.force is False: #COLIN need to reimplement protectedRemove in eostools raise ValueError( ' '.join(['directory ', self.remoteOutputDir_, ' already exists.'])) - # if not castortools.protectedRemove( self.remoteOutputDir_, '.*root'): - # the user does not want to delete the root files - self.remoteOutputFile_ = "" - self.ManageOutputDir() return (self.options_, self.args_) - + def PrepareJobs(self, listOfValues, listOfDirNames=None): + self.ManageOutputDir() print 'PREPARING JOBS ======== ' self.listOfJobs_ = [] - if listOfDirNames is None: - for value in listOfValues: - self.PrepareJob( value ) + for value in listOfValues: + self.PrepareJob( value ) else: for value, name in zip( listOfValues, listOfDirNames): self.PrepareJob( value, name ) @@ -124,57 +121,55 @@ def PrepareJobs(self, listOfValues, listOfDirNames=None): # create output dir, if necessary def ManageOutputDir( self ): + '''Create output directory, if necessary. - #if the output dir is not specified, generate a name - #else - #test if the directory exists - #if yes, returns - + if the output dir is not specified, generate a name + else + test if the directory exists + if yes, returns. + ''' outputDir = self.options_.outputDir - if outputDir==None: today = datetime.today() outputDir = 'OutCmsBatch_%s' % today.strftime("%d%h%y_%H%M%S") - print 'output directory not specified, using %s' % outputDir - + print 'output directory not specified, using %s' % outputDir self.outputDir_ = os.path.abspath(outputDir) - - if( os.path.isdir(self.outputDir_) == True ): + if( os.path.isdir(self.outputDir_) == True and os.listdir(self.outputDir_) ): input = '' if not self.options_.force: while input != 'y' and input != 'n': - input = raw_input( 'The directory ' + self.outputDir_ + ' exists. Are you sure you want to continue? its contents will be overwritten [y/n] ' ) + input = raw_input( 'The directory ' + self.outputDir_ + ' exists. Are you sure you want to continue? its contents will be overwritten [y/n]' ) if input == 'n': sys.exit(1) else: os.system( 'rm -rf ' + self.outputDir_) - + self.mkdir( self.outputDir_ ) - + def PrepareJob( self, value, dirname=None): '''Prepare a job for a given value. calls PrepareJobUser, which should be overloaded by the user. ''' - print 'PrepareJob : %s' % value + print 'PrepareJob : %s' % value dname = dirname if dname is None: dname = 'Job_{value}'.format( value=value ) jobDir = '/'.join( [self.outputDir_, dname]) - print '\t',jobDir + print '\t',jobDir self.mkdir( jobDir ) self.listOfJobs_.append( jobDir ) self.PrepareJobUser( jobDir, value ) - + def PrepareJobUser(self, value ): '''Hook allowing user to define how one of his jobs should be prepared.''' print '\to be customized' - + def SubmitJobs( self, waitingTimeInSec=0 ): '''Submit all jobs. Possibly wait between each job''' - + if(self.options_.negate): print '*NOT* SUBMITTING JOBS - exit ' return @@ -193,7 +188,7 @@ def SubmitJobs( self, waitingTimeInSec=0 ): def SubmitJob( self, jobDir ): '''Hook for job submission.''' - print 'submitting (to be customized): ', jobDir + print 'submitting (to be customized): ', jobDir os.system( self.options_.batch ) @@ -205,7 +200,7 @@ def CheckBatchScript( self, batchScript ): if batchScript == '': return - + if( os.path.isfile(batchScript)== False ): print 'file ',batchScript,' does not exist' sys.exit(3) @@ -238,7 +233,7 @@ def mkdir( self, dirname ): if( ret != 0 ): print 'please remove or rename directory: ', dirname sys.exit(4) - + def RunningMode(self, batch): @@ -252,15 +247,14 @@ def RunningMode(self, batch): In all other cases, a CmsBatchException is raised ''' - - hostName = os.environ['HOSTNAME'] - + + hostName = socket.gethostname() onLxplus = hostName.startswith('lxplus') onPSI = hostName.startswith('t3ui') onNAF = hostName.startswith('naf') batchCmd = batch.split()[0] - + if batchCmd == 'bsub': if not onLxplus: err = 'Cannot run %s on %s' % (batchCmd, hostName) diff --git a/PhysicsTools/HeppyCore/python/utils/deltar.py b/PhysicsTools/HeppyCore/python/utils/deltar.py index b7622c0c992e0..06054f3c492c3 100644 --- a/PhysicsTools/HeppyCore/python/utils/deltar.py +++ b/PhysicsTools/HeppyCore/python/utils/deltar.py @@ -3,11 +3,18 @@ import math import copy +import PhysicsTools.HeppyCore.configuration + +DEFAULT_DRMAX = 0.3 +DEFAULT_DRMIN = 1e-5 def deltaR2( e1, p1, e2=None, p2=None): - """Take either 4 arguments (eta,phi, eta,phi) or two objects that have 'eta', 'phi' methods)""" + """Take either 4 arguments (eta,phi, eta,phi) or two particles that have 'eta', 'phi' methods)""" if (e2 == None and p2 == None): - return deltaR2(e1.eta(),e1.phi(), p1.eta(), p1.phi()) + if PhysicsTools.HeppyCore.configuration.Collider.BEAMS == 'ee': + return deltaR2(e1.theta(),e1.phi(), p1.theta(), p1.phi()) + else: + return deltaR2(e1.eta(),e1.phi(), p1.eta(), p1.phi()) de = e1 - e2 dp = deltaPhi(p1, p2) return de*de + dp*dp @@ -27,80 +34,67 @@ def deltaPhi( p1, p2): return res -def inConeCollection(pivot, particles, deltaRMax, deltaRMin=1e-5): +def inConeCollection(pivot, particles, + deltaRMax = DEFAULT_DRMAX, + deltaRMin = DEFAULT_DRMIN): '''Returns the list of particles that are less than deltaRMax away from pivot.''' dR2Max = deltaRMax ** 2 - dR2Min = deltaRMin ** 2 if deltaRMin > 0 else -1 + dR2Min = deltaRMin ** 2 results = [] for ptc in particles: - dR2 = deltaR2(pivot.eta(), pivot.phi(), ptc.eta(), ptc.phi()) - if dR2Min < dR2 and dR2 < dR2Max: + dR2 = deltaR2(pivot, ptc) + if dR2Min <= dR2 < dR2Max: results.append(ptc) return results -def matchObjectCollection3 ( objects, matchCollection, deltaRMax = 0.3, filter = lambda x,y : True ): - '''Univoque association of an element from matchCollection to an element of objects. - Reco and Gen objects get the "matched" attribute, true is they are re part of a matched tulpe. - By default, the matching is true only if delta R is smaller than 0.3. - ''' - # - - pairs = {} - if len(objects)==0: - return pairs - if len(matchCollection)==0: - return dict( list(zip(objects, [None]*len(objects))) ) - # build all possible combinations - - objectCoords = [ (o.eta(),o.phi(),o) for o in objects ] - matchdCoords = [ (o.eta(),o.phi(),o) for o in matchCollection ] - allPairs = [(deltaR2 (oeta, ophi, meta, mphi), (object, match)) for (oeta,ophi,object) in objectCoords for (meta,mphi,match) in matchdCoords if abs(oeta-meta)<=deltaRMax and filter(object,match) ] - #allPairs = [(deltaR2 (object.eta(), object.phi(), match.eta(), match.phi()), (object, match)) for object in objects for match in matchCollection if filter(object,match) ] - allPairs.sort () - # - # to flag already matched objects - # FIXME this variable remains appended to the object, I do not like it - - for object in objects: - object.matched = False - for match in matchCollection: - match.matched = False - # - - deltaR2Max = deltaRMax * deltaRMax - for dR2, (object, match) in allPairs: - if dR2 > deltaR2Max: - break - if dR2 < deltaR2Max and object.matched == False and match.matched == False: - object.matched = True - match.matched = True - pairs[object] = match - # - - for object in objects: - if object.matched == False: - pairs[object] = None - # - - return pairs - # by now, the matched attribute remains in the objects, for future usage - # one could remove it with delattr (object, attrname) - +def cleanObjectCollection(ptcs, masks, deltaRMax=DEFAULT_DRMAX): + '''returns a tuple clean_ptcs, dirty_ptcs, + where: + - dirty_ptcs is the list of particles in ptcs that are matched to a particle + in masks. + - clean_ptcs is the list of particles in ptcs that are NOT matched to a + particle in masks. + + The matching is done within a cone of size deltaRMin. + ''' + if len(ptcs)==0 or len(masks)==0: + return ptcs, [] + dR2Max = deltaRMax ** 2 + clean_ptcs = [] + dirty_ptcs = [] + for ptc in ptcs: + ok = True + for mask in masks: + dR2 = deltaR2(ptc, mask) + if dR2 < dR2Max: + ok = False + if ok: + clean_ptcs.append( ptc ) + else: + dirty_ptcs.append( ptc ) + return clean_ptcs, dirty_ptcs -def cleanObjectCollection2( objects, masks, deltaRMin ): - '''Masks objects using a deltaR cut, another algorithm (same results).''' - if len(objects)==0: - return objects - deltaR2Min = deltaRMin*deltaRMin - cleanObjects = copy.copy( objects ) +def cleanObjectCollection2(ptcs, masks, deltaRMax=DEFAULT_DRMAX): + '''returns the list of particles in ptcs that are NOT matched to a + particle in masks. + + The matching is done within a cone of size deltaRMin. + + The algorithm is different than in cleanObjectCollection, but the results are the same. + Another difference with respect to cleanObjectCollection is that the list of dirty + objects is not returned as well. + ''' + if len(ptcs)==0: + return ptcs + dR2Max = deltaRMax ** 2 + clean_ptcs = copy.copy( ptcs ) for mask in masks: tooClose = [] - for idx, object in enumerate(cleanObjects): - dR2 = deltaR2( object.eta(), object.phi(), - mask.eta(), mask.phi() ) - if dR2 < deltaR2Min: + for idx, ptc in enumerate(clean_ptcs): + dR2 = deltaR2(ptc, mask) + if dR2 < dR2Max: tooClose.append( idx ) nRemoved = 0 for idx in tooClose: @@ -111,97 +105,137 @@ def cleanObjectCollection2( objects, masks, deltaRMin ): # -> ele 2 is now at index 1 # one should again remove the element at index 1 idx -= nRemoved - del cleanObjects[idx] + del clean_ptcs[idx] nRemoved += 1 - return cleanObjects + return clean_ptcs -def cleanObjectCollection( objects, masks, deltaRMin ): - '''Masks objects using a deltaR cut.''' - if len(objects)==0 or len(masks)==0: - return objects, [] - deltaR2Min = deltaRMin*deltaRMin - cleanObjects = [] - dirtyObjects = [] - for object in objects: - ok = True - for mask in masks: - dR2 = deltaR2( object.eta(), object.phi(), - mask.eta(), mask.phi() ) - if dR2 < deltaR2Min: - ok = False - if ok: - cleanObjects.append( object ) - else: - dirtyObjects.append( object ) - return cleanObjects, dirtyObjects - -def bestMatch( object, matchCollection): - '''Return the best match to object in matchCollection, which is the closest object in delta R''' +def bestMatch(ptc, matchCollection): + '''Return the best match to ptc in matchCollection, + which is the closest ptc in delta R, + together with the squared distance dR2 between ptc + and the match.''' deltaR2Min = float('+inf') bm = None for match in matchCollection: - dR2 = deltaR2( object.eta(), object.phi(), - match.eta(), match.phi() ) + dR2 = deltaR2(ptc, match) if dR2 < deltaR2Min: deltaR2Min = dR2 bm = match return bm, deltaR2Min -def matchObjectCollection( objects, matchCollection, deltaR2Max, filter = lambda x,y : True): +def matchObjectCollection(ptcs, matchCollection, + deltaRMax=DEFAULT_DRMAX, filter = lambda x,y : True): pairs = {} - if len(objects)==0: + if len(ptcs)==0: return pairs if len(matchCollection)==0: - return dict( list(zip(objects, [None]*len(objects))) ) - for object in objects: - bm, dr2 = bestMatch( object, [mob for mob in matchCollection if filter(object,mob)] ) - if dr2 deltaR2Max: - break - if dR2 < deltaR2Max and object.matched == False and match.matched == False: - object.matched = True + + dR2Max = deltaRMax ** 2 + for dR2, (ptc, match) in allPairs: + if dR2 > dR2Max: + break + if dR2 < dR2Max and ptc.matched == False and match.matched == False: + ptc.matched = True match.matched = True - pairs[object] = match - - for object in objects: - if object.matched == False: - pairs[object] = None + pairs[ptc] = match + + for ptc in ptcs: + if ptc.matched == False: + pairs[ptc] = None return pairs # by now, the matched attribute remains in the objects, for future usage # one could remove it with delattr (object, attrname) +def matchObjectCollection3(ptcs, matchCollection, + deltaRMax=DEFAULT_DRMAX, + filter_func=None): + '''Univoque association of an element from matchCollection to an element of ptcs. + Returns a list of tuples [(ptc, matched_to_ptc), ...]. + particles in ptcs and matchCollection get the "matched" attribute, + true is they are part of a matched tuple. + By default, the matching is true only if delta R is smaller than 0.3. + ''' + + if filter_func is None: + filter_func = lambda x,y : True + pairs = {} + if len(ptcs)==0: + return pairs + if len(matchCollection)==0: + return dict( zip(ptcs, [None]*len(ptcs)) ) + # build all possible combinations + + ptc_coords = [ (o.eta(),o.phi(),o) for o in ptcs ] + matched_coords = [ (o.eta(),o.phi(),o) for o in matchCollection ] + allPairs = [(deltaR2 (oeta, ophi, meta, mphi), (ptc, match)) + for (oeta,ophi,ptc) in ptc_coords + for (meta,mphi,match) in matched_coords + if abs(oeta-meta)<=deltaRMax and filter_func(ptc,match) ] + #allPairs = [(deltaR2 (object.eta(), object.phi(), match.eta(), match.phi()), (object, match)) for object in objects for match in matchCollection if filter(object,match) ] + allPairs.sort () + # + # to flag already matched objects + # FIXME this variable remains appended to the object, I do not like it + + for ptc in ptcs: + ptc.matched = False + for match in matchCollection: + match.matched = False + + dR2Max = deltaRMax ** 2 + for dR2, (ptc, match) in allPairs: + if dR2 > dR2Max: + break + if dR2 < dR2Max and ptc.matched == False and match.matched == False: + ptc.matched = True + match.matched = True + pairs[ptc] = match + + for ptc in ptcs: + if ptc.matched == False: + pairs[ptc] = None + + return pairs + # by now, the matched attribute remains in the objects, for future usage + # one could remove it with delattr (object, attrname) diff --git a/PhysicsTools/HeppyCore/python/utils/pdebug.py b/PhysicsTools/HeppyCore/python/utils/pdebug.py new file mode 100644 index 0000000000000..3d541a82bd31e --- /dev/null +++ b/PhysicsTools/HeppyCore/python/utils/pdebug.py @@ -0,0 +1,66 @@ +import logging +import sys + +''' + Usage: + Physics Debug output. Can write to file and/or to console. + is based on Python logging. + + To set it up + import pdebug as pdebug + from pdebug import pdebugger + + Use following 3 lines and comment out as needed to obtain desired behaviour + #pdebugger.setLevel(logging.ERROR) # turns off all output + pdebugger.setLevel(logging.INFO) # turns on ouput + pdebug.set_file("pdebug.log",level=logging.INFO) #optional writes to file + pdebugger.set_stream(level=logging.ERROR) + + For example + (1) file and console: + pdebugger.setLevel(logging.INFO) + pdebug.set_file("pdebug.log") + + (2) console only: + pdebugger.setLevel(logging.INFO) + + (3) file only: + pdebugger.setLevel(logging.INFO) + pdebug.set_file("pdebug.log") + pdebug.set_stream(level=logging.ERROR) + + (4) no output + pdebugger.setLevel(logging.ERROR) + or else no lines of code also gives same result + + to use in code + from pdebug import pdebugger + pdebugger.info("A message") + +''' + +#Note the first use of this header should come from the top level of the program +#If not the stream output may be missing +pdebugger = logging.getLogger('pdebug') +pdebugger.setLevel(logging.ERROR) +pdebugger.propagate = False + +def set_file(filename = "pdebug.log", mode='w', level ="INFO"): + #todo add checks + cf = logging.FileHandler(filename, mode) + cf.setLevel(level) + pdebugger.addHandler(cf) + +def set_stream(out=sys.stdout, level ="INFO"): + ch = logging.StreamHandler(out) + ch.setLevel(level) + mformatter = logging.Formatter('%(message)s') + ch.setFormatter(mformatter) + pdebugger.addHandler(ch) + +if __name__ == '__main__': + + pdebugger.setLevel(logging.INFO) + set_stream(sys.stdout) + set_file("pdebug.log") + pdebugger.info('blah') diff --git a/PhysicsTools/HeppyCore/python/utils/dataset_test.py b/PhysicsTools/HeppyCore/python/utils/test_dataset.py similarity index 82% rename from PhysicsTools/HeppyCore/python/utils/dataset_test.py rename to PhysicsTools/HeppyCore/python/utils/test_dataset.py index 4fa46350d4f9a..c9739ba85a3e4 100644 --- a/PhysicsTools/HeppyCore/python/utils/dataset_test.py +++ b/PhysicsTools/HeppyCore/python/utils/test_dataset.py @@ -1,4 +1,5 @@ from dataset import * +import eostools import unittest import os @@ -29,12 +30,19 @@ def test_local(self): # shutil.rmtree('datasets') def test_eos(self): + cbern = '/eos/cms/store/cmst3/user/cbern' + if not 'CMSSW_BASE' in os.environ: + return + if not eostools.fileExists(cbern): + return ds1 = EOSDataset('/EOSTests/ds1', - '/eos/cms/store/cmst3/user/cbern', + cbern, '.*root') self.assertEqual(len(ds1.listOfGoodFiles()), 10) def test_eos_fail(self): + if not 'CMSSW_BASE' in os.environ: + return self.assertRaises( ValueError, EOSDataset, 'not_existing_basedir', @@ -45,8 +53,4 @@ def test_eos_fail(self): if __name__ == '__main__': - import os - import sys - if not os.environ.get('CMSSW_BASE', False): - sys.exit(1) unittest.main() diff --git a/PhysicsTools/HeppyCore/python/utils/test_deltar.py b/PhysicsTools/HeppyCore/python/utils/test_deltar.py new file mode 100644 index 0000000000000..cb624c527a4af --- /dev/null +++ b/PhysicsTools/HeppyCore/python/utils/test_deltar.py @@ -0,0 +1,80 @@ +import unittest +import math +from ROOT import TLorentzVector +import numpy as np + +from PhysicsTools.HeppyCore.particles.tlv.particle import Particle +from PhysicsTools.HeppyCore.configuration import Collider + +from deltar import * + +class TestDeltaR(unittest.TestCase): + + #---------------------------------------------------------------------- + def setUp(self): + """maps the space with particles""" + self.ptcs = {} + for eta in range(-30, 30, 2): + eta /= 10. + for phi in range(-30, 30, 2): + phi /= 10. + tlv = TLorentzVector() + tlv.SetPtEtaPhiM(10, eta, phi, 0) + self.ptcs[(eta, phi)] = Particle(1, 0, tlv) + + #---------------------------------------------------------------------- + def test_deltaPhi(self): + """Test that the deltaPhi function works properly around pi""" + dphi = deltaPhi(math.pi-0.1, -math.pi+0.1) + self.assertAlmostEqual(dphi, -0.2) + + #---------------------------------------------------------------------- + def test_deltaR2(self): + """Test that the deltaR2 method properly uses either eta or + theta depending on the collider configuration + """ + Collider.BEAMS = 'pp' + tlv1 = TLorentzVector() + tlv1.SetPtEtaPhiM(10, 1.1, 0, 0) + tlv2 = TLorentzVector() + tlv2.SetPtEtaPhiM(10, 1.2, 0, 0) + ptc1 = Particle(1, 1, tlv1) + ptc2 = Particle(1, 1, tlv2) + dR = math.sqrt( deltaR2(ptc1, ptc2)) + self.assertAlmostEqual(dR, 0.1) + + Collider.BEAMS = 'ee' + tlv1 = TLorentzVector() + tlv1.SetPtEtaPhiM(10, 1.1, 0, 0) + tlv1.SetTheta(1.1) + tlv2 = TLorentzVector() + tlv2.SetPtEtaPhiM(10, 1.2, 0, 0) + tlv2.SetTheta(1.2) + ptc1 = Particle(1, 1, tlv1) + ptc2 = Particle(1, 1, tlv2) + dR = math.sqrt( deltaR2(ptc1, ptc2)) + self.assertAlmostEqual(dR, 0.1) + + #---------------------------------------------------------------------- + def test_inConeCollection(self): + ptc0 = self.ptcs[(0, 0)] + # very small cone. check that the pivot is + # not included + in_cone = inConeCollection(ptc0, self.ptcs.values(), 0.01) + self.assertEqual(len(in_cone), 0) + # four neighbours + in_cone = inConeCollection(ptc0, self.ptcs.values(), 0.201) + self.assertEqual(len(in_cone), 4) + + def test_cleanObjectCollection(self): + # masking only one particle + clean, dirty = cleanObjectCollection(self.ptcs.values(), + [ self.ptcs[(0, 0)] ], + 0.01) + self.assertEqual(dirty, [self.ptcs[0, 0]]) + self.assertEqual(len(clean), len(self.ptcs) - 1 ) + + +if __name__ == '__main__': + + unittest.main() diff --git a/PhysicsTools/HeppyCore/python/utils/diclist_test.py b/PhysicsTools/HeppyCore/python/utils/test_diclist.py similarity index 100% rename from PhysicsTools/HeppyCore/python/utils/diclist_test.py rename to PhysicsTools/HeppyCore/python/utils/test_diclist.py diff --git a/PhysicsTools/HeppyCore/python/utils/test_pdebug.py b/PhysicsTools/HeppyCore/python/utils/test_pdebug.py new file mode 100644 index 0000000000000..6d3c287eec541 --- /dev/null +++ b/PhysicsTools/HeppyCore/python/utils/test_pdebug.py @@ -0,0 +1,44 @@ +import unittest +import os +import logging +import pdebug as pdebug +from StringIO import StringIO + +class TestPDebug(unittest.TestCase): + + def test_debug_output(self): + out = StringIO() + pdebug.pdebugger.setLevel(logging.ERROR) + pdebug.set_stream(out) + pdebug.pdebugger.error('error console') + pdebug.pdebugger.info('info console') + pdebug.pdebugger.info('debug console') + output = out.getvalue().strip() + assert output == "error console" + + + pdebug.pdebugger.setLevel(logging.INFO) + pdebug.pdebugger.error('error console') + pdebug.pdebugger.info('info console') + pdebug.pdebugger.debug('debug console') + output = out.getvalue().strip() + assert output == "error console\nerror console\ninfo console" + + + #add in file handler + filename = "tempunittestdebug.log" + pdebug.set_file(filename) + pdebug.pdebugger.error('error file') + pdebug.pdebugger.info('info file') + pdebug.pdebugger.debug('debug file') + with open(filename, 'r') as dbfile: + data=dbfile.read() + assert data == "error file\ninfo file\n" + os.remove("tempunittestdebug.log") + output = out.getvalue().strip() + assert output == "error console\nerror console\ninfo console\nerror file\ninfo file" + + +if __name__ == '__main__': + + unittest.main() diff --git a/PhysicsTools/HeppyCore/python/utils/testtree.py b/PhysicsTools/HeppyCore/python/utils/testtree.py index 2da1221cbd838..a6504f61bea44 100644 --- a/PhysicsTools/HeppyCore/python/utils/testtree.py +++ b/PhysicsTools/HeppyCore/python/utils/testtree.py @@ -1,14 +1,30 @@ from ROOT import TFile from PhysicsTools.HeppyCore.statistics.tree import Tree +import os -def create_tree(filename="test_tree.root"): +FNAME="test_tree.root" + +def create_tree(filename=FNAME, nentries=None): + if not nentries: + if os.path.isfile(filename): + #default number of entries, file exists + return filename + else: + nentries = 200 + nentries = int(nentries) outfile = TFile(filename, 'recreate') tree = Tree('test_tree', 'A test tree') tree.var('var1') - for i in range(100): + for i in range(nentries): tree.fill('var1', i) tree.tree.Fill() - print 'creating a tree', tree.tree.GetName(),\ - tree.tree.GetEntries(), 'entries in',\ - outfile.GetName() outfile.Write() + outfile.Close() + return outfile.GetName() + +def remove_tree(filename=FNAME): + os.remove(filename) + +if __name__ == '__main__': + create_tree() + diff --git a/PhysicsTools/HeppyCore/scripts/heppy_batch.py b/PhysicsTools/HeppyCore/scripts/heppy_batch.py index 664524a246778..31378dc47a636 100755 --- a/PhysicsTools/HeppyCore/scripts/heppy_batch.py +++ b/PhysicsTools/HeppyCore/scripts/heppy_batch.py @@ -1,4 +1,4 @@ -#!/bin/env python +#!/usr/bin/env python import sys import imp @@ -9,8 +9,9 @@ import json import math from PhysicsTools.HeppyCore.utils.batchmanager import BatchManager +from PhysicsTools.HeppyCore.framework.config import split -from PhysicsTools.HeppyCore.framework.heppy_loop import split +import PhysicsTools.HeppyCore.framework.looper as looper def batchScriptPADOVA( index, jobDir='./'): '''prepare the LSF version of the batch script, to run on LSF''' @@ -32,12 +33,12 @@ def batchScriptPADOVA( index, jobDir='./'): eval `scram runtime -sh` ls echo 'running' -python $CMSSW_BASE/src/PhysicsTools/HeppyCore/python/framework/looper.py pycfg.py config.pck --options=options.json >& local.output +python {looper} pycfg.py config.pck --options=options.json >& local.output exit $? #echo #echo 'sending the job directory back' #echo cp -r Loop/* $LS_SUBCWD -""".format(jdir=jobDir) +""".format(looper=looper.__file__, jdir=jobDir) return script @@ -65,17 +66,17 @@ def batchScriptPISA( index, remoteDir=''): ls echo `find . -type d | grep /` echo 'running' -python $CMSSW_BASE/src/PhysicsTools/HeppyCore/python/framework/looper.py pycfg.py config.pck --options=options.json >& local.output +python {looper} pycfg.py config.pck --options=options.json >& local.output exit $? #echo #echo 'sending the job directory back' #echo cp -r Loop/* $LS_SUBCWD -""" +""".format(looper=looper.__file__) return script def batchScriptCERN( jobDir, remoteDir=''): '''prepare the LSF version of the batch script, to run on LSF''' - + dirCopy = """echo 'sending the logs back' # will send also root files if copy failed rm Loop/cmsswPreProcessing.root cp -r Loop/* $LS_SUBCWD @@ -84,7 +85,6 @@ def batchScriptCERN( jobDir, remoteDir=''): else echo 'job directory copy succeeded' fi""" - if remoteDir=='': cpCmd=dirCopy elif remoteDir.startswith("root://eoscms.cern.ch//eos/cms/store/"): @@ -155,10 +155,49 @@ def batchScriptCERN( jobDir, remoteDir=''): ls cd `find . -type d | grep /` echo 'running' -python $CMSSW_BASE/src/PhysicsTools/HeppyCore/python/framework/looper.py pycfg.py config.pck --options=options.json +python {looper} pycfg.py config.pck --options=options.json echo {copy} -""".format(copy=cpCmd) +""".format(looper=looper.__file__, copy=cpCmd) + + return script + + + +def batchScriptCERN_FCC( jobDir ): + '''prepare the LSF version of the batch script, to run on LSF''' + + dirCopy = """echo 'sending the logs back' # will send also root files if copy failed +cp -r Loop/* $LS_SUBCWD +if [ $? -ne 0 ]; then + echo 'ERROR: problem copying job directory back' +else + echo 'job directory copy succeeded' +fi""" + cpCmd=dirCopy + + script = """#!/bin/bash +#BSUB -q 8nm +# ulimit -v 3000000 # NO +unset LD_LIBRARY_PATH +echo 'copying job dir to worker' +source /afs/cern.ch/exp/fcc/sw/0.7/init_fcc_stack.sh +cd $HEPPY +source ./init.sh +echo 'environment:' +echo +env | sort +echo +which python +cd - +cp -rf $LS_SUBCWD . +ls +cd `find . -type d | grep /` +echo 'running' +python {looper} pycfg.py config.pck +echo +{copy} +""".format(looper=looper.__file__, copy=cpCmd) return script @@ -244,8 +283,7 @@ def batchScriptPSI( index, jobDir, remoteDir=''): ls cd `find . -type d | grep /` echo 'running' -python $CMSSW_BASE/src/PhysicsTools/HeppyCore/python/framework/looper.py pycfg.py config.pck --options=options.json -#python $CMSSW_BASE/src/CMGTools/RootTools/python/fwlite/looper.py config.pck +python {looper} pycfg.py config.pck --options=options.json echo {copy} ########################################################################### @@ -255,7 +293,7 @@ def batchScriptPSI( index, jobDir, remoteDir=''): echo "Job finished at " `date` echo "Wallclock running time: $RUNTIME s" exit 0 -""".format(jdir=jobDir, vo=VO_CMS_SW_DIR,cmssw=cmssw_release, copy=cpCmd) +""".format(jdir=jobDir, vo=VO_CMS_SW_DIR,cmssw=cmssw_release, looper=looper.__file__, copy=cpCmd) return script @@ -272,11 +310,11 @@ def batchScriptIC(jobDir): eval `scramv1 ru -sh` cd - echo 'running' -python {cmssw}/src/PhysicsTools/HeppyCore/python/framework/looper.py pycfg.py config.pck --options=options.json +python {looper} pycfg.py config.pck --options=options.json echo echo 'sending the job directory back' mv Loop/* ./ && rm -r Loop -""".format(jobdir = jobDir,cmssw = cmssw_release) +""".format(jobdir = jobDir, looper=looper.__file__, cmssw = cmssw_release) return script def batchScriptLocal( remoteDir, index ): @@ -284,7 +322,7 @@ def batchScriptLocal( remoteDir, index ): script = """#!/bin/bash echo 'running' -python $CMSSW_BASE/src/PhysicsTools/HeppyCore/python/framework/looper.py pycfg.py config.pck --options=options.json +python {looper} pycfg.py config.pck --options=options.json echo echo 'sending the job directory back' mv Loop/* ./ @@ -296,53 +334,81 @@ class MyBatchManager( BatchManager ): '''Batch manager specific to cmsRun processes.''' def PrepareJobUser(self, jobDir, value ): - '''Prepare one job. This function is called by the base class.''' - print value - print components[value] - - #prepare the batch script - scriptFileName = jobDir+'/batchScript.sh' - scriptFile = open(scriptFileName,'w') - storeDir = self.remoteOutputDir_.replace('/castor/cern.ch/cms','') - mode = self.RunningMode(options.batch) - if mode == 'LXPLUS': - scriptFile.write( batchScriptCERN( jobDir, storeDir ) ) - elif mode == 'PSI': - scriptFile.write( batchScriptPSI ( value, jobDir, storeDir ) ) # storeDir not implemented at the moment - elif mode == 'LOCAL': - scriptFile.write( batchScriptLocal( storeDir, value) ) # watch out arguments are swapped (although not used) - elif mode == 'PISA' : - scriptFile.write( batchScriptPISA( storeDir, value) ) - elif mode == 'PADOVA' : - scriptFile.write( batchScriptPADOVA( value, jobDir) ) - elif mode == 'IC': - scriptFile.write( batchScriptIC(jobDir) ) - scriptFile.close() - os.system('chmod +x %s' % scriptFileName) - - shutil.copyfile(cfgFileName, jobDir+'/pycfg.py') + '''Prepare one job. This function is called by the base class.''' + print value + print self.components[value] + + #prepare the batch script + scriptFileName = jobDir+'/batchScript.sh' + scriptFile = open(scriptFileName,'w') + storeDir = self.remoteOutputDir_.replace('/castor/cern.ch/cms','') + mode = self.RunningMode(self.options_.batch) + if mode == 'LXPLUS': + if 'CMSSW_BASE' in os.environ and not 'PODIO' in os.environ: + scriptFile.write( batchScriptCERN( jobDir, storeDir) ) + elif 'PODIO' in os.environ: + #FCC case + scriptFile.write( batchScriptCERN_FCC( jobDir ) ) + else: + assert(False) + elif mode == 'PSI': + # storeDir not implemented at the moment + scriptFile.write( batchScriptPSI ( value, jobDir, storeDir ) ) + elif mode == 'LOCAL': + # watch out arguments are swapped (although not used) + scriptFile.write( batchScriptLocal( storeDir, value) ) + elif mode == 'PISA' : + scriptFile.write( batchScriptPISA( storeDir, value) ) + elif mode == 'PADOVA' : + scriptFile.write( batchScriptPADOVA( value, jobDir) ) + elif mode == 'IC': + scriptFile.write( batchScriptIC(jobDir) ) + scriptFile.close() + os.system('chmod +x %s' % scriptFileName) + + shutil.copyfile(self.cfgFileName, jobDir+'/pycfg.py') # jobConfig = copy.deepcopy(config) -# jobConfig.components = [ components[value] ] - cfgFile = open(jobDir+'/config.pck','w') - pickle.dump( components[value] , cfgFile ) - # pickle.dump( cfo, cfgFile ) - cfgFile.close() - if hasattr(self,"heppyOptions_"): - optjsonfile = open(jobDir+'/options.json','w') - optjsonfile.write(json.dumps(self.heppyOptions_)) - optjsonfile.close() - -if __name__ == '__main__': - batchManager = MyBatchManager() - batchManager.parser_.usage=""" +# jobConfig.self.components = [ self.components[value] ] + cfgFile = open(jobDir+'/config.pck','w') + pickle.dump( self.components[value] , cfgFile ) + # pickle.dump( cfo, cfgFile ) + cfgFile.close() + if hasattr(self,"heppyOptions_"): + optjsonfile = open(jobDir+'/options.json','w') + optjsonfile.write(json.dumps(self.heppyOptions_)) + optjsonfile.close() + + +def create_batch_manager(): + batchManager = MyBatchManager() + batchManager.parser_.usage=""" %prog [options] Run Colin's python analysis system on the batch. Job splitting is determined by your configuration file. """ + return batchManager + + +def main(options, args, batchManager): + batchManager.cfgFileName = args[0] - options, args = batchManager.ParseOptions() + handle = open(batchManager.cfgFileName, 'r') + cfo = imp.load_source("pycfg", batchManager.cfgFileName, handle) + config = cfo.config + handle.close() + + batchManager.components = split( [comp for comp in config.components \ + if len(comp.files)>0] ) + listOfValues = range(0, len(batchManager.components)) + listOfNames = [comp.name for comp in batchManager.components] + + batchManager.PrepareJobs( listOfValues, listOfNames ) + waitingTime = 0.1 + batchManager.SubmitJobs( waitingTime ) + +if __name__ == '__main__': from PhysicsTools.HeppyCore.framework.heppy_loop import _heppyGlobalOptions for opt in options.extraOptions: if "=" in opt: @@ -352,19 +418,6 @@ def PrepareJobUser(self, jobDir, value ): _heppyGlobalOptions[opt] = True batchManager.heppyOptions_=_heppyGlobalOptions - cfgFileName = args[0] - - handle = open(cfgFileName, 'r') - # import pdb; pdb.set_trace() - cfo = imp.load_source("pycfg", cfgFileName, handle) - config = cfo.config - handle.close() - - components = split( [comp for comp in config.components if len(comp.files)>0] ) - listOfValues = range(0, len(components)) - listOfNames = [comp.name for comp in components] - - batchManager.PrepareJobs( listOfValues, listOfNames ) - waitingTime = 0.1 - batchManager.SubmitJobs( waitingTime ) - + batchManager = create_batch_manager() + options, args = batchManager.ParseOptions() + main(options, args, batchManager) diff --git a/PhysicsTools/HeppyCore/scripts/heppy_loop.py b/PhysicsTools/HeppyCore/scripts/heppy_loop.py index 991b4aa1cf808..957c79aa12603 100755 --- a/PhysicsTools/HeppyCore/scripts/heppy_loop.py +++ b/PhysicsTools/HeppyCore/scripts/heppy_loop.py @@ -1,70 +1,8 @@ -if __name__ == '__main__': - from optparse import OptionParser - from PhysicsTools.HeppyCore.framework.heppy_loop import main - - parser = OptionParser() - parser.usage = """ - %prog - For each component, start a Loop. - 'name' is whatever you want. - """ - - parser.add_option("-N", "--nevents", - dest="nevents", - type="int", - help="number of events to process", - default=None) - parser.add_option("-p", "--nprint", - dest="nprint", - help="number of events to print at the beginning", - default=5) - parser.add_option("-e", "--iEvent", - dest="iEvent", - help="jump to a given event. ignored in multiprocessing.", - default=None) - parser.add_option("-f", "--force", - dest="force", - action='store_true', - help="don't ask questions in case output directory already exists.", - default=False) - parser.add_option("-i", "--interactive", - dest="interactive", - action='store_true', - help="stay in the command line prompt instead of exiting", - default=False) - parser.add_option("-t", "--timereport", - dest="timeReport", - action='store_true', - help="Make a report of the time used by each analyzer", - default=False) - parser.add_option("-v", "--verbose", - dest="verbose", - action='store_true', - help="increase the verbosity of the output (from 'warning' to 'info' level)", - default=False) - parser.add_option("-q", "--quiet", - dest="quiet", - action='store_true', - help="do not print log messages to screen.", - default=False) - parser.add_option("-o", "--option", - dest="extraOptions", - type="string", - action="append", - default=[], - help="Save one extra option (either a flag, or a key=value pair) that can be then accessed from the job config file") - parser.add_option("-j", "--ntasks", - dest="ntasks", - type="int", - help="number of parallel tasks to span", - default=10) - parser.add_option("--memcheck", - dest="memCheck", - action='store_true', - help="Activate memory checks per event", - default=False) - +#!/usr/bin/env python +if __name__ == '__main__': + from PhysicsTools.HeppyCore.framework.heppy_loop import * + parser = create_parser() (options,args) = parser.parse_args() loop = main(options, args, parser) diff --git a/PhysicsTools/HeppyCore/test/create_tree.py b/PhysicsTools/HeppyCore/test/create_tree.py deleted file mode 100644 index 12037b41bccc2..0000000000000 --- a/PhysicsTools/HeppyCore/test/create_tree.py +++ /dev/null @@ -1,2 +0,0 @@ -from PhysicsTools.HeppyCore.utils.testtree import create_tree -create_tree() diff --git a/VHbbAnalysis/Heppy/python/TTHtoTauTauAnalyzer.py b/VHbbAnalysis/Heppy/python/TTHtoTauTauAnalyzer.py index 9cb7f34131535..174369e8a65b1 100644 --- a/VHbbAnalysis/Heppy/python/TTHtoTauTauAnalyzer.py +++ b/VHbbAnalysis/Heppy/python/TTHtoTauTauAnalyzer.py @@ -32,11 +32,11 @@ def addTau_genMatchType(self, event, tau): if tau.genJet(): genMatchType = 0 if genMatchType == 1: - match = matchObjectCollection3([ tau ], genParticles, deltaRMax = 0.4, filter = lambda x,y : True if (y.pt() > 0.5*x.pt() and abs(y.pdgId()) == 11) else False) + match = matchObjectCollection3([ tau ], genParticles, deltaRMax = 0.4, filter_func = lambda x,y : True if (y.pt() > 0.5*x.pt() and abs(y.pdgId()) == 11) else False) if match[tau]: genMatchType = 2 if genMatchType == 1: - match = matchObjectCollection3([ tau ], genParticles, deltaRMax = 0.4, filter = lambda x,y : True if (y.pt() > 0.5*x.pt() and abs(y.pdgId()) == 13) else False) + match = matchObjectCollection3([ tau ], genParticles, deltaRMax = 0.4, filter_func = lambda x,y : True if (y.pt() > 0.5*x.pt() and abs(y.pdgId()) == 13) else False) if match[tau]: genMatchType = 3 diff --git a/VHbbAnalysis/Heppy/python/VHbbAnalyzer.py b/VHbbAnalysis/Heppy/python/VHbbAnalyzer.py index 8d63b3943e972..3023b90dda338 100644 --- a/VHbbAnalysis/Heppy/python/VHbbAnalyzer.py +++ b/VHbbAnalysis/Heppy/python/VHbbAnalyzer.py @@ -50,8 +50,9 @@ def addNewBTag(self,event): def beginLoop(self,setup): super(VHbbAnalyzer,self).beginLoop(setup) - if "outputfile" in setup.services : - setup.services["outputfile"].file.cd() + outservice_name = "PhysicsTools.HeppyCore.framework.services.tfile.TFileService_outputfile" + if outservice_name in setup.services : + setup.services[outservice_name].file.cd() self.inputCounter = ROOT.TH1F("Count","Count",1,0,2) self.inputCounterFullWeighted = ROOT.TH1F("CountFullWeighted","Count with gen weight and pu weight",1,0,2) self.inputCounterWeighted = ROOT.TH1F("CountWeighted","Count with sign(gen weight) and pu weight",1,0,2) diff --git a/VHbbAnalysis/Heppy/test/vhbb.py b/VHbbAnalysis/Heppy/test/vhbb.py index af1c901efb11b..84d3caee70787 100755 --- a/VHbbAnalysis/Heppy/test/vhbb.py +++ b/VHbbAnalysis/Heppy/test/vhbb.py @@ -522,7 +522,7 @@ def filter(self, record): # and the following runs the process directly if __name__ == '__main__': from PhysicsTools.HeppyCore.framework.looper import Looper - looper = Looper( 'Loop', config, nPrint = 1, nEvents = 10) + looper = Looper( 'Loop', config, nPrint = 0, nEvents = 10) import time import cProfile diff --git a/VHbbAnalysis/Heppy/test/vhbb_combined.py b/VHbbAnalysis/Heppy/test/vhbb_combined.py index 07bf4015987f6..f85d3436b2a7e 100755 --- a/VHbbAnalysis/Heppy/test/vhbb_combined.py +++ b/VHbbAnalysis/Heppy/test/vhbb_combined.py @@ -154,7 +154,7 @@ config.preprocessor=preprocessor if __name__ == '__main__': from PhysicsTools.HeppyCore.framework.looper import Looper - looper = Looper( 'Loop', config, nPrint = 1, nEvents = 2000) + looper = Looper( 'Loop', config, nPrint = 0, nEvents = 2000) import time import cProfile p = cProfile.Profile(time.clock)