From 73910a62a53cdd1e2e321c3fc1c5013a50a9262c Mon Sep 17 00:00:00 2001 From: Erica Brondolin Date: Mon, 16 Nov 2020 13:33:31 +0100 Subject: [PATCH 01/14] Add xtitle to some plots --- Validation/HGCalValidation/python/hgcalPlots.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/Validation/HGCalValidation/python/hgcalPlots.py b/Validation/HGCalValidation/python/hgcalPlots.py index f247728cdd344..cd01366f3af20 100644 --- a/Validation/HGCalValidation/python/hgcalPlots.py +++ b/Validation/HGCalValidation/python/hgcalPlots.py @@ -1305,28 +1305,28 @@ "stat": False, "legend": True, "xmin": 0, - "xmax": 2.0, + "xmax": 1.0, } _common_shared.update(_legend_common) _shared_plots = [ Plot("SharedEnergy_caloparticle2multicl", **_common_shared) ] _common_shared["xmin"] = -4.0 _common_shared["xmax"] = 4.0 -_shared_plots.extend([Plot("SharedEnergy_caloparticle2multicl_vs_eta", **_common_shared)]) -_shared_plots.extend([Plot("SharedEnergy_caloparticle2multicl_vs_phi", **_common_shared)]) +_shared_plots.extend([Plot("SharedEnergy_caloparticle2multicl_vs_eta", xtitle="CaloParticle #eta", **_common_shared)]) +_shared_plots.extend([Plot("SharedEnergy_caloparticle2multicl_vs_phi", xtitle="CaloParticle #phi", **_common_shared)]) _sharedEnergy_caloparticle_to_multicluster = PlotGroup("SharedEnergy_CaloParticleToMultiCluster", _shared_plots, ncols=3) _common_shared= {"title": "Shared Energy Multi Cluster To CaloParticle ", "stat": False, "legend": True, "xmin": 0, - "xmax": 2.0, + "xmax": 1.0, } _common_shared.update(_legend_common) _shared_plots2 = [Plot("SharedEnergy_multicluster2caloparticle", **_common_shared)] _common_shared["xmin"] = -4.0 _common_shared["xmax"] = 4.0 -_shared_plots2.extend([Plot("SharedEnergy_multicl2caloparticle_vs_eta", **_common_shared)]) -_shared_plots2.extend([Plot("SharedEnergy_multicl2caloparticle_vs_phi", **_common_shared)]) +_shared_plots2.extend([Plot("SharedEnergy_multicl2caloparticle_vs_eta", xtitle="MultiCluster #eta", **_common_shared)]) +_shared_plots2.extend([Plot("SharedEnergy_multicl2caloparticle_vs_phi", xtitle="MultiCluster #phi", **_common_shared)]) _sharedEnergy_multicluster_to_caloparticle = PlotGroup("SharedEnergy_MultiClusterToCaloParticle", _shared_plots2, ncols=3) From 06d24065ecff01a455f715647d4962a8c895f813 Mon Sep 17 00:00:00 2001 From: Erica Brondolin Date: Thu, 19 Nov 2020 13:35:47 +0100 Subject: [PATCH 02/14] Explore/extend CaloParticle validation --- .../interface/HGCalValidator.h | 4 +- .../interface/HGVHistoProducerAlgo.h | 15 ++- .../HGCalValidation/plugins/HGCalValidator.cc | 19 ++-- .../HGCalValidation/python/hgcalPlots.py | 35 +++++++ .../scripts/makeHGCalValidationPlots.py | 14 ++- .../src/HGVHistoProducerAlgo.cc | 96 ++++++++++++++++++- 6 files changed, 169 insertions(+), 14 deletions(-) diff --git a/Validation/HGCalValidation/interface/HGCalValidator.h b/Validation/HGCalValidation/interface/HGCalValidator.h index 789f1b29129ee..cbd1ba2585d40 100644 --- a/Validation/HGCalValidation/interface/HGCalValidator.h +++ b/Validation/HGCalValidation/interface/HGCalValidator.h @@ -54,7 +54,9 @@ class HGCalValidator : public DQMGlobalEDAnalyzer { void cpParametersAndSelection(const Histograms& histograms, std::vector const& cPeff, std::vector const& simVertices, - std::vector& selected_cPeff) const; + std::vector& selected_cPeff, + unsigned layers, + std::unordered_map const&) const; protected: edm::InputTag label_lcl; diff --git a/Validation/HGCalValidation/interface/HGVHistoProducerAlgo.h b/Validation/HGCalValidation/interface/HGVHistoProducerAlgo.h index f013a758b8270..b90ac8df39f58 100644 --- a/Validation/HGCalValidation/interface/HGVHistoProducerAlgo.h +++ b/Validation/HGCalValidation/interface/HGVHistoProducerAlgo.h @@ -92,6 +92,15 @@ struct HGVHistoProducerAlgoHistograms { std::unordered_map h_caloparticle_energy; std::unordered_map h_caloparticle_pt; std::unordered_map h_caloparticle_phi; + std::unordered_map h_caloparticle_nSimClusters; + std::unordered_map h_caloparticle_nHitsInSimClusters; + std::unordered_map h_caloparticle_firstlayer; + std::unordered_map h_caloparticle_lastlayer; + std::unordered_map h_caloparticle_layersnum; + std::unordered_map h_caloparticle_nHitsInSimClusters_matchedtoRecHit; + std::unordered_map h_caloparticle_firstlayer_matchedtoRecHit; + std::unordered_map h_caloparticle_lastlayer_matchedtoRecHit; + std::unordered_map h_caloparticle_layersnum_matchedtoRecHit; //For multiclusters std::vector h_score_multicl2caloparticle; @@ -155,7 +164,7 @@ class HGVHistoProducerAlgo { using Histograms = HGVHistoProducerAlgoHistograms; void bookInfo(DQMStore::IBooker& ibook, Histograms& histograms); - void bookCaloParticleHistos(DQMStore::IBooker& ibook, Histograms& histograms, int pdgid); + void bookCaloParticleHistos(DQMStore::IBooker& ibook, Histograms& histograms, int pdgid, unsigned layers); void bookClusterHistos(DQMStore::IBooker& ibook, Histograms& histograms, @@ -186,7 +195,9 @@ class HGVHistoProducerAlgo { void fill_caloparticle_histos(const Histograms& histograms, int pdgid, const CaloParticle& caloparticle, - std::vector const& simVertices) const; + std::vector const& simVertices, + unsigned layers, + std::unordered_map const&) const; void fill_cluster_histos(const Histograms& histograms, int count, const reco::CaloCluster& cluster) const; void fill_generic_cluster_histos( const Histograms& histograms, diff --git a/Validation/HGCalValidation/plugins/HGCalValidator.cc b/Validation/HGCalValidation/plugins/HGCalValidator.cc index 0ee7f7cc4383c..713bbb94cc299 100644 --- a/Validation/HGCalValidation/plugins/HGCalValidator.cc +++ b/Validation/HGCalValidation/plugins/HGCalValidator.cc @@ -91,7 +91,7 @@ void HGCalValidator::bookHistograms(DQMStore::IBooker& ibook, for (auto const particle : particles_to_monitor_) { ibook.setCurrentFolder(dirName_ + "SelectedCaloParticles/" + std::to_string(particle)); - histoProducerAlgo_->bookCaloParticleHistos(ibook, histograms.histoProducerAlgo, particle); + histoProducerAlgo_->bookCaloParticleHistos(ibook, histograms.histoProducerAlgo, particle, totallayers_to_monitor_); } ibook.cd(); ibook.setCurrentFolder(dirName_); @@ -141,7 +141,9 @@ void HGCalValidator::bookHistograms(DQMStore::IBooker& ibook, void HGCalValidator::cpParametersAndSelection(const Histograms& histograms, std::vector const& cPeff, std::vector const& simVertices, - std::vector& selected_cPeff) const { + std::vector& selected_cPeff, + unsigned layers, + std::unordered_map const& hitMap) const { selected_cPeff.reserve(cPeff.size()); size_t j = 0; @@ -151,7 +153,7 @@ void HGCalValidator::cpParametersAndSelection(const Histograms& histograms, if (!doCaloParticleSelection_ || (doCaloParticleSelection_ && cpSelector(caloParticle, simVertices))) { selected_cPeff.push_back(j); if (doCaloParticlePlots_) { - histoProducerAlgo_->fill_caloparticle_histos(histograms.histoProducerAlgo, id, caloParticle, simVertices); + histoProducerAlgo_->fill_caloparticle_histos(histograms.histoProducerAlgo, id, caloParticle, simVertices, layers, hitMap); } } ++j; @@ -163,7 +165,7 @@ void HGCalValidator::dqmAnalyze(const edm::Event& event, const Histograms& histograms) const { using namespace reco; - LogDebug("HGCalValidator") << "\n====================================================" + std::cout << "\n====================================================" << "\n" << "Analyzing new event" << "\n" @@ -214,9 +216,10 @@ void HGCalValidator::dqmAnalyze(const edm::Event& event, // ############################################## // fill caloparticles histograms // ############################################## - LogTrace("HGCalValidator") << "\n# of CaloParticles: " << caloParticles.size() << "\n"; + // HGCRecHit are given to select the SimHits which are also reconstructed + std::cout << "\n# of CaloParticles: " << caloParticles.size() << "\n" << std::endl; std::vector selected_cPeff; - cpParametersAndSelection(histograms, caloParticles, simVertices, selected_cPeff); + cpParametersAndSelection(histograms, caloParticles, simVertices, selected_cPeff, totallayers_to_monitor_, *hitMap); //get collections from the event //Layer clusters @@ -277,9 +280,9 @@ void HGCalValidator::dqmAnalyze(const edm::Event& event, totallayers_to_monitor_); //General Info on multiclusters - LogTrace("HGCalValidator") << "\n# of multi clusters with " << label_mcl[wml].process() << ":" + std::cout << "\n# of multi clusters with " << label_mcl[wml].process() << ":" << label_mcl[wml].label() << ":" << label_mcl[wml].instance() << ": " - << multiClusters.size() << "\n"; + << multiClusters.size() << "\n" << std::endl; } } //end of loop over multicluster input labels } diff --git a/Validation/HGCalValidation/python/hgcalPlots.py b/Validation/HGCalValidation/python/hgcalPlots.py index cd01366f3af20..bd8ead898e7a8 100644 --- a/Validation/HGCalValidation/python/hgcalPlots.py +++ b/Validation/HGCalValidation/python/hgcalPlots.py @@ -2031,6 +2031,41 @@ def append_hgcalMultiClustersPlots(collection = 'ticlMultiClustersFromTracksters # numberOfEventsHistogram=_multiplicity_zplus_numberOfEventsHistogram # )) +#================================================================================================= +hgcalCaloParticlesPlotter = Plotter() +def append_hgcalCaloParticlesPlots(files, collection = '-211', name_collection = "pion-"): + + dqmfolder = "DQMData/Run 1/HGCAL/Run summary/HGCalValidator/SelectedCaloParticles/" + collection + print(dqmfolder) + templateFile = ROOT.TFile.Open(files[0]) # assuming all files have same structure + keys = gDirectory.GetDirectory(dqmfolder,True).GetListOfKeys() + key = keys[0] + while key: + obj = key.ReadObj() + name = obj.GetName() + fileName = TString(name) + fileName.ReplaceAll(" ","_") + pg= PlotGroup(fileName.Data(),[ + Plot(name, + xtitle=obj.GetXaxis().GetTitle(), ytitle=obj.GetYaxis().GetTitle(), + #drawCommand = "", # may want to customize for TH2 (colz, etc.) + normalizeToNumberOfEvents = True, **_common) + ], + ncols=1) + + hgcalCaloParticlesPlotter.append("CaloParticles_"+name_collection, [ + dqmfolder + ], PlotFolder( + pg, + loopSubFolders=False, + purpose=PlotPurpose.Timing, page="CaloParticles", section=name_collection) + ) + + key = keys.After(key) + + templateFile.Close() + + return hgcalCaloParticlesPlotter #================================================================================================= # hitValidation diff --git a/Validation/HGCalValidation/scripts/makeHGCalValidationPlots.py b/Validation/HGCalValidation/scripts/makeHGCalValidationPlots.py index 2eb441df6a359..07bc7bcdf9123 100755 --- a/Validation/HGCalValidation/scripts/makeHGCalValidationPlots.py +++ b/Validation/HGCalValidation/scripts/makeHGCalValidationPlots.py @@ -19,10 +19,11 @@ trackstersGeneralLabel = 'allTiclMultiClusters' hitValidationLabel = 'hitValidation' hitCalibrationLabel = 'hitCalibration' +caloParticlesLabel = 'caloParticles' allLabel = 'all' collection_choices = [layerClustersGeneralLabel] -collection_choices.extend([multiclustersGeneralLabel]+[trackstersGeneralLabel]+[hitValidationLabel]+[hitCalibrationLabel]+[allLabel]) +collection_choices.extend([multiclustersGeneralLabel]+[trackstersGeneralLabel]+[hitValidationLabel]+[hitCalibrationLabel]+[allLabel]+[caloParticlesLabel]) def main(opts): @@ -58,6 +59,17 @@ def main(opts): tracksterCollection = i_iter.replace("ticlMultiClustersFromTracksters","ticlTracksters") hgcalPlots.append_hgcalMultiClustersPlots(i_iter, tracksterCollection) val.doPlots(hgcmulticlus, plotterDrawArgs=drawArgs) + elif opts.collection==caloParticlesLabel: + particletypes = {"pion-":"-211", "pion+":"211", "pion0": "111", + "muon-": "-13", "muon+":"13", + "electron-": "-11", "electron+": "11", "photon": "22", + "kaon-": "-321", "kaon+": "321"} + hgcaloPart = [hgcalPlots.hgcalCaloParticlesPlotter] + for i_part, i_partID in particletypes.iteritems() : + print(i_part) + print(i_partID) + hgcalPlots.append_hgcalCaloParticlesPlots(sample.files(), i_partID, i_part) + val.doPlots(hgcaloPart, plotterDrawArgs=drawArgs) elif opts.collection==hitValidationLabel: hgchit = [hgcalPlots.hgcalHitPlotter] hgcalPlots.append_hgcalHitsPlots('HGCalSimHitsV', "Simulated Hits") diff --git a/Validation/HGCalValidation/src/HGVHistoProducerAlgo.cc b/Validation/HGCalValidation/src/HGVHistoProducerAlgo.cc index 9e45bb02da062..a78c13913842f 100644 --- a/Validation/HGCalValidation/src/HGVHistoProducerAlgo.cc +++ b/Validation/HGCalValidation/src/HGVHistoProducerAlgo.cc @@ -192,7 +192,7 @@ void HGVHistoProducerAlgo::bookInfo(DQMStore::IBooker& ibook, Histograms& histog histograms.maxlayerzp = ibook.bookInt("maxlayerzp"); } -void HGVHistoProducerAlgo::bookCaloParticleHistos(DQMStore::IBooker& ibook, Histograms& histograms, int pdgid) { +void HGVHistoProducerAlgo::bookCaloParticleHistos(DQMStore::IBooker& ibook, Histograms& histograms, int pdgid, unsigned layers) { histograms.h_caloparticle_eta[pdgid] = ibook.book1D("num_caloparticle_eta", "N of caloparticle vs eta", nintEta_, minEta_, maxEta_); histograms.h_caloparticle_eta_Zorigin[pdgid] = @@ -203,6 +203,27 @@ void HGVHistoProducerAlgo::bookCaloParticleHistos(DQMStore::IBooker& ibook, Hist histograms.h_caloparticle_pt[pdgid] = ibook.book1D("caloparticle_pt", "Pt of caloparticle", nintPt_, minPt_, maxPt_); histograms.h_caloparticle_phi[pdgid] = ibook.book1D("caloparticle_phi", "Phi of caloparticle", nintPhi_, minPhi_, maxPhi_); + + histograms.h_caloparticle_nSimClusters[pdgid] = + ibook.book1D("caloparticle_nSimClusters", "Num Sim Clusters in caloparticle", 100, 0., 100.); + histograms.h_caloparticle_nHitsInSimClusters[pdgid] = + ibook.book1D("caloparticle_nHitsInSimClusters", "Num Hits in Sim Clusters in caloparticle", 1000, 0., 1000.); + histograms.h_caloparticle_nHitsInSimClusters_matchedtoRecHit[pdgid] = + ibook.book1D("caloparticle_nHitsInSimClusters_matchedtoRecHit", "Num Hits in Sim Clusters (matched) in caloparticle", 1000, 0., 1000.); + + histograms.h_caloparticle_firstlayer[pdgid] = + ibook.book1D("caloparticle_firstlayer", "First layer of the caloparticle", 2 * layers, 0., (float)2 * layers); + histograms.h_caloparticle_lastlayer[pdgid] = + ibook.book1D("caloparticle_lastlayer", "Last layer of the caloparticle", 2 * layers, 0., (float)2 * layers); + histograms.h_caloparticle_layersnum[pdgid] = + ibook.book1D("caloparticle_layersnum", "Number of layers of the caloparticle", 2 * layers, 0., (float)2 * layers); + + histograms.h_caloparticle_firstlayer_matchedtoRecHit[pdgid] = + ibook.book1D("caloparticle_firstlayer_matchedtoRecHit", "First layer of the caloparticle (matched)", 2 * layers, 0., (float)2 * layers); + histograms.h_caloparticle_lastlayer_matchedtoRecHit[pdgid] = + ibook.book1D("caloparticle_lastlayer_matchedtoRecHit", "Last layer of the caloparticle (matched)", 2 * layers, 0., (float)2 * layers); + histograms.h_caloparticle_layersnum_matchedtoRecHit[pdgid] = + ibook.book1D("caloparticle_layersnum_matchedtoRecHit", "Number of layers of the caloparticle (matched)", 2 * layers, 0., (float)2 * layers); } void HGVHistoProducerAlgo::bookClusterHistos(DQMStore::IBooker& ibook, @@ -780,7 +801,16 @@ void HGVHistoProducerAlgo::fill_info_histos(const Histograms& histograms, unsign void HGVHistoProducerAlgo::fill_caloparticle_histos(const Histograms& histograms, int pdgid, const CaloParticle& caloparticle, - std::vector const& simVertices) const { + std::vector const& simVertices, + unsigned layers, + std::unordered_map const& hitMap) const { + for(auto const& hit : hitMap){ + const auto hitDetId = hit.first; + int layerId = recHitTools_->getLayerWithOffset(hitDetId) + + layers * ((recHitTools_->zside(hitDetId) + 1) >> 1) - 1; + //std::cout << " layerId of HGCRecHit = " << layerId << std::endl; + } + const auto eta = getEta(caloparticle.eta()); if (histograms.h_caloparticle_eta.count(pdgid)) { histograms.h_caloparticle_eta.at(pdgid)->Fill(eta); @@ -799,6 +829,63 @@ void HGVHistoProducerAlgo::fill_caloparticle_histos(const Histograms& histograms if (histograms.h_caloparticle_phi.count(pdgid)) { histograms.h_caloparticle_phi.at(pdgid)->Fill(caloparticle.phi()); } + + if (histograms.h_caloparticle_nSimClusters.count(pdgid)) { + histograms.h_caloparticle_nSimClusters.at(pdgid)->Fill(caloparticle.simClusters().size()); + + int simHits = 0; + int minLayerId = 999; + int maxLayerId = 0; + + int simHits_matched = 0; + int minLayerId_matched = 999; + int maxLayerId_matched = 0; + +// float energy = 0.; + + for (auto const& sc : caloparticle.simClusters()) { + simHits += sc->hits_and_fractions().size(); + for (auto const& h_and_f : sc->hits_and_fractions()) { + const auto hitDetId = h_and_f.first; + int layerId = recHitTools_->getLayerWithOffset(hitDetId) + + layers * ((recHitTools_->zside(hitDetId) + 1) >> 1) - 1; + //std::cout << " layerId of simHit = " << layerId << std::endl; + + // set to 0 if matched RecHit not found + int layerId_matched_min = 999; + int layerId_matched_max = 0; + std::unordered_map::const_iterator itcheck = hitMap.find(hitDetId); + if (itcheck != hitMap.end()) { + //std::cout << " matched to RecHit FOUND !" << std::endl; + layerId_matched_min = layerId; + layerId_matched_max = layerId; + simHits_matched++; + } else { + //std::cout << " matched to RecHit NOT found !" << std::endl; + } + + minLayerId = std::min(minLayerId, layerId); + maxLayerId = std::max(maxLayerId, layerId); + minLayerId_matched = std::min(minLayerId_matched, layerId_matched_min); + maxLayerId_matched = std::max(maxLayerId_matched, layerId_matched_max); +// // if (hitmap.count(h_and_f.first)) +// // energy += hitmap.at(h_and_f.first)->energy() * h_and_f.second; + } + } + histograms.h_caloparticle_firstlayer.at(pdgid)->Fill(minLayerId); + histograms.h_caloparticle_lastlayer.at(pdgid)->Fill(maxLayerId); + histograms.h_caloparticle_layersnum.at(pdgid)->Fill(int(maxLayerId-minLayerId)); + + histograms.h_caloparticle_firstlayer_matchedtoRecHit.at(pdgid)->Fill(minLayerId_matched); + histograms.h_caloparticle_lastlayer_matchedtoRecHit.at(pdgid)->Fill(maxLayerId_matched); + histograms.h_caloparticle_layersnum_matchedtoRecHit.at(pdgid)->Fill(int(maxLayerId_matched-minLayerId_matched)); + + histograms.h_caloparticle_nHitsInSimClusters.at(pdgid)->Fill((float)simHits); + histograms.h_caloparticle_nHitsInSimClusters_matchedtoRecHit.at(pdgid)->Fill((float)simHits_matched); + //std::cout << " simHits in sc = " << simHits << std::endl; + //std::cout << " simHits (matched) in sc = " << simHits_matched << std::endl; + } + } void HGVHistoProducerAlgo::fill_cluster_histos(const Histograms& histograms, @@ -819,7 +906,9 @@ void HGVHistoProducerAlgo::layerClusters_to_CaloParticles( std::unordered_map const& hitMap, unsigned layers, const edm::Handle& LCAssocByEnergyScoreHandle) const { + std::cout << "HGVHistoProducerAlgo::layerClusters_to_CaloParticles" << std::endl; auto nLayerClusters = clusters.size(); + std::cout << "Number Layer Clusters = " << nLayerClusters << std::endl; std::unordered_map> detIdToCaloParticleId_Map; std::unordered_map> detIdToLayerClusterId_Map; @@ -1006,6 +1095,7 @@ void HGVHistoProducerAlgo::layerClusters_to_CaloParticles( cPEnergyOnLayer[layerId] = 0; const SimClusterRefVector& simClusterRefVector = cP[cpId].simClusters(); + std::cout << " layers in simClusterRefVector = " << layers << std::endl; for (const auto& it_sc : simClusterRefVector) { const SimCluster& simCluster = (*(it_sc)); const auto& hits_and_fractions = simCluster.hits_and_fractions(); @@ -1413,9 +1503,11 @@ void HGVHistoProducerAlgo::multiClusters_to_CaloParticles(const Histograms& hist std::vector const& cPSelectedIndices, std::unordered_map const& hitMap, unsigned layers) const { + std::cout << "HGVHistoProducerAlgo::multiClusters_to_CaloParticles " << std::endl; auto nMultiClusters = multiClusters.size(); //Consider CaloParticles coming from the hard scatterer, excluding the PU contribution. auto nCaloParticles = cPIndices.size(); + std::cout << " nMultiClu = " << nMultiClusters << " , " << " nCaloPart = " << nCaloParticles << std::endl; std::unordered_map> detIdToCaloParticleId_Map; std::unordered_map> detIdToMultiClusterId_Map; From 7acf588e6bf96a53acad849c5ce8c18b5df69731 Mon Sep 17 00:00:00 2001 From: Erica Brondolin Date: Fri, 20 Nov 2020 15:13:08 +0100 Subject: [PATCH 03/14] Clean up and add CaloPart to makeHGCalValidationPlots all --- .../HGCalValidation/plugins/HGCalValidator.cc | 6 +- .../HGCalValidation/python/hgcalPlots.py | 60 +++++++++---------- .../scripts/makeHGCalValidationPlots.py | 12 ++++ .../src/HGVHistoProducerAlgo.cc | 15 ----- 4 files changed, 43 insertions(+), 50 deletions(-) diff --git a/Validation/HGCalValidation/plugins/HGCalValidator.cc b/Validation/HGCalValidation/plugins/HGCalValidator.cc index 713bbb94cc299..383fb631760cb 100644 --- a/Validation/HGCalValidation/plugins/HGCalValidator.cc +++ b/Validation/HGCalValidation/plugins/HGCalValidator.cc @@ -165,7 +165,7 @@ void HGCalValidator::dqmAnalyze(const edm::Event& event, const Histograms& histograms) const { using namespace reco; - std::cout << "\n====================================================" + LogDebug("HGCalValidator") << "\n====================================================" << "\n" << "Analyzing new event" << "\n" @@ -217,7 +217,7 @@ void HGCalValidator::dqmAnalyze(const edm::Event& event, // fill caloparticles histograms // ############################################## // HGCRecHit are given to select the SimHits which are also reconstructed - std::cout << "\n# of CaloParticles: " << caloParticles.size() << "\n" << std::endl; + LogTrace("HGCalValidator") << "\n# of CaloParticles: " << caloParticles.size() << "\n" << std::endl; std::vector selected_cPeff; cpParametersAndSelection(histograms, caloParticles, simVertices, selected_cPeff, totallayers_to_monitor_, *hitMap); @@ -280,7 +280,7 @@ void HGCalValidator::dqmAnalyze(const edm::Event& event, totallayers_to_monitor_); //General Info on multiclusters - std::cout << "\n# of multi clusters with " << label_mcl[wml].process() << ":" + LogTrace("HGCalValidator") << "\n# of multi clusters with " << label_mcl[wml].process() << ":" << label_mcl[wml].label() << ":" << label_mcl[wml].instance() << ": " << multiClusters.size() << "\n" << std::endl; } diff --git a/Validation/HGCalValidation/python/hgcalPlots.py b/Validation/HGCalValidation/python/hgcalPlots.py index bd8ead898e7a8..feec6a375b8a1 100644 --- a/Validation/HGCalValidation/python/hgcalPlots.py +++ b/Validation/HGCalValidation/python/hgcalPlots.py @@ -1061,66 +1061,63 @@ ], ncols=8 ) _bin_count = 0 -_xbinlabels = [ "L{:02d}".format(i+1) for i in range(0,maxlayerzm) ] -_common_eff = {"stat": False, "legend": False, "ymin": 0.0, "ymax": 1.1} +_xbinlabels = [ "{:02d}".format(i+1) for i in range(0,maxlayerzm) ] +_xtitle = "Layer Numbers in z-" +_common_eff = {"stat": False, "legend": False, "ymin": 0.0, "ymax": 1.1, "xbinlabeloption": "d"} _effplots_zminus_eta = [Plot("effic_eta_layer{:02d}".format(i), xtitle="", **_common_eff) for i in range(0,maxlayerzm)] _effplots_zminus_phi = [Plot("effic_phi_layer{:02d}".format(i), xtitle="", **_common_eff) for i in range(0,maxlayerzm)] -_common_eff = {"stat": False, "legend": False, "xbinlabels": _xbinlabels, "xbinlabelsize": 12, "xbinlabeloptions": "v", "ymin": 0.0, "ymax": 1.1} +_common_eff = {"stat": False, "legend": False, "xbinlabels": _xbinlabels, "xbinlabelsize": 12, "xbinlabeloption": "v", "ymin": 0.0, "ymax": 1.1} _common_eff["xmin"] = _bin_count _common_eff["xmax"] = maxlayerzm _bin_count += 4*maxlayerzm # 2 for the eta{-,+} and 2 for phi{+,-} -_effplots_zminus = [Plot("globalEfficiencies", xtitle="Global Efficiencies in z-", **_common_eff)] +_effplots_zminus = [Plot("globalEfficiencies", xtitle=_xtitle, ytitle="Efficiency", **_common_eff)] _efficiencies_zminus_eta = PlotGroup("Efficiencies_vs_eta", _effplots_zminus_eta, ncols=10) _efficiencies_zminus_phi = PlotGroup("Efficiencies_vs_phi", _effplots_zminus_phi, ncols=10) -_efficiencies_zminus = PlotGroup("Efficiencies_global", _effplots_zminus, ncols=1) +_efficiencies_zminus = PlotGroup("Efficiencies_vs_layer", _effplots_zminus, ncols=1) _common_dup = {"stat": False, "legend": False, "ymin":0.0, "ymax":1.1} _dupplots_zminus_eta = [Plot("duplicate_eta_layer{:02d}".format(i), xtitle="", **_common_dup) for i in range(0,maxlayerzm)] _dupplots_zminus_phi = [Plot("duplicate_phi_layer{:02d}".format(i), xtitle="", **_common_dup) for i in range(0,maxlayerzm)] -_common_dup = {"stat": False, "legend": False, "title": "Global Duplicates in z-", "xbinlabels": _xbinlabels, "xbinlabelsize": 12, "xbinlabeloptions": "v", "ymin": 0.0, "ymax": 1.1} +_common_dup = {"stat": False, "legend": False, "title": "Global Duplicates in z-", "xbinlabels": _xbinlabels, "xbinlabelsize": 12, "xbinlabeloption": "v", "ymin": 0.0, "ymax": 1.1} _common_dup["xmin"] = _bin_count _common_dup["xmax"] = _common_dup["xmin"] + maxlayerzm _bin_count += 4*maxlayerzm # 2 for the eta{-,+} and 2 for phi{+,-} -_dupplots_zminus = [Plot("globalEfficiencies", xtitle="Global Duplicates in z-", **_common_dup)] +_dupplots_zminus = [Plot("globalEfficiencies", xtitle=_xtitle, ytitle="Duplicates", **_common_dup)] _duplicates_zminus_eta = PlotGroup("Duplicates_vs_eta", _dupplots_zminus_eta, ncols=10) _duplicates_zminus_phi = PlotGroup("Duplicates_vs_phi", _dupplots_zminus_phi, ncols=10) -_duplicates_zminus = PlotGroup("Duplicates_global", _dupplots_zminus, ncols=1) +_duplicates_zminus = PlotGroup("Duplicates_vs_layer", _dupplots_zminus, ncols=1) _common_fake = {"stat": False, "legend": False, "ymin":0.0, "ymax":1.1} _fakeplots_zminus_eta = [Plot("fake_eta_layer{:02d}".format(i), xtitle="", **_common_fake) for i in range(0,maxlayerzm)] _fakeplots_zminus_phi = [Plot("fake_phi_layer{:02d}".format(i), xtitle="", **_common_fake) for i in range(0,maxlayerzm)] -_common_fake = {"stat": False, "legend": False, "title": "Global Fake Rates in z-", "xbinlabels": _xbinlabels, "xbinlabelsize": 12, "xbinlabeloptions": "v", "ymin": 0.0, "ymax": 1.1} +_common_fake = {"stat": False, "legend": False, "title": "Global Fake Rates in z-", "xbinlabels": _xbinlabels, "xbinlabelsize": 12, "xbinlabeloption": "v", "ymin": 0.0, "ymax": 1.1} _common_fake["xmin"] = _bin_count _common_fake["xmax"] = _common_fake["xmin"] + maxlayerzm _bin_count += 4*maxlayerzm # 2 for the eta{-,+} and 2 for phi{+,-} -_common_fake["xbinlabels"] = [ "L{:02d}".format(i+1) for i in range(0,maxlayerzm) ] _common_fake["xbinlabelsize"] = 10. -_fakeplots_zminus = [Plot("globalEfficiencies", xtitle="Global Fake Rate in z-", **_common_fake)] +_fakeplots_zminus = [Plot("globalEfficiencies", xtitle=_xtitle, ytitle="Fake Rate", **_common_fake)] _fakes_zminus_eta = PlotGroup("FakeRate_vs_eta", _fakeplots_zminus_eta, ncols=10) _fakes_zminus_phi = PlotGroup("FakeRate_vs_phi", _fakeplots_zminus_phi, ncols=10) -_fakes_zminus = PlotGroup("FakeRate_global", _fakeplots_zminus, ncols=1) +_fakes_zminus = PlotGroup("FakeRate_vs_layer", _fakeplots_zminus, ncols=1) _common_merge = {"stat": False, "legend": False, "ymin":0.0, "ymax":1.1} _mergeplots_zminus_eta = [Plot("merge_eta_layer{:02d}".format(i), xtitle="", **_common_merge) for i in range(0,maxlayerzm)] _mergeplots_zminus_phi = [Plot("merge_phi_layer{:02d}".format(i), xtitle="", **_common_merge) for i in range(0,maxlayerzm)] -_common_merge = {"stat": False, "legend": False, "title": "Global Merge Rates in z-", "xbinlabels": _xbinlabels, "xbinlabelsize": 12, "xbinlabeloptions": "v", "ymin": 0.0, "ymax": 1.1} +_common_merge = {"stat": False, "legend": False, "title": "Global Merge Rates in z-", "xbinlabels": _xbinlabels, "xbinlabelsize": 12, "xbinlabeloption": "v", "ymin": 0.0, "ymax": 1.1} _common_merge["xmin"] = _bin_count _common_merge["xmax"] = _common_merge["xmin"] + maxlayerzm _bin_count += 4*maxlayerzm # 2 for the eta{-,+} and 2 for phi{+,-} -_common_merge["xbinlabels"] = [ "L{:02d}".format(i+1) for i in range(0,maxlayerzm) ] _common_merge["xbinlabelsize"] = 10. -_mergeplots_zminus = [Plot("globalEfficiencies", xtitle="Global merge Rate in z-", **_common_merge)] +_mergeplots_zminus = [Plot("globalEfficiencies", xtitle=_xtitle, ytitle="Merge Rate", **_common_merge)] _merges_zminus_eta = PlotGroup("MergeRate_vs_eta", _mergeplots_zminus_eta, ncols=10) _merges_zminus_phi = PlotGroup("MergeRate_vs_phi", _mergeplots_zminus_phi, ncols=10) -_merges_zminus = PlotGroup("MergeRate_global", _mergeplots_zminus, ncols=1) +_merges_zminus = PlotGroup("MergeRate_vs_layer", _mergeplots_zminus, ncols=1) _common_energy_score = dict(removeEmptyBins=False, xbinlabelsize=10, stat=True, xbinlabeloption="d", ncols=1, - ylog=True, - xlog=True, xmin=0.001, xmax=1., ymin=0.01, @@ -1132,8 +1129,6 @@ _energyscore_cp2lc_zplus = PlotGroup("Energy_vs_Score_CP2LC", [Plot("Energy_vs_Score_caloparticle2layer_perlayer{:02d}".format(i), title="Energy_vs_Score_CP2LC", xtitle="Layer {}".format(i), drawStyle="COLZ", adjustMarginRight=0.1, **_common_energy_score) for i in range(maxlayerzm,maxlayerzp) ], ncols=10) -_common_energy_score["xlog"]=False -_common_energy_score["ylog"]=False _common_energy_score["xmin"]=-0.1 _energyscore_lc2cp_zminus = PlotGroup("Energy_vs_Score_LC2CP", [Plot("Energy_vs_Score_layer2caloparticle_perlayer{:02d}".format(i), title="Energy_vs_Score_LC2CP", xtitle="Layer {}".format(i), drawStyle="COLZ", adjustMarginRight=0.1, **_common_energy_score) for i in range(0, maxlayerzm) @@ -1219,53 +1214,54 @@ _bin_count = 50 +_xtitle = "Layer Numbers in z+" _common_eff = {"stat": False, "legend": False, "ymin":0.0, "ymax":1.1} _effplots_zplus_eta = [Plot("effic_eta_layer{:02d}".format(i), xtitle="", **_common_eff) for i in range(maxlayerzm,maxlayerzp)] _effplots_zplus_phi = [Plot("effic_phi_layer{:02d}".format(i), xtitle="", **_common_eff) for i in range(maxlayerzm,maxlayerzp)] -_common_eff = {"stat": False, "legend": False, "xbinlabels": _xbinlabels, "xbinlabelsize": 12, "xbinlabeloptions": "d", "ymin": 0.0, "ymax": 1.1} +_common_eff = {"stat": False, "legend": False, "xbinlabels": _xbinlabels, "xbinlabelsize": 12, "xbinlabeloption": "v", "ymin": 0.0, "ymax": 1.1} _common_eff["xmin"] = _bin_count _common_eff["xmax"] = _common_eff["xmin"] + maxlayerzm _bin_count += 4*maxlayerzm # 2 for the eta{-,+} and 2 for phi{+,-} -_effplots_zplus = [Plot("globalEfficiencies", xtitle="Global Efficiencies in z+", **_common_eff)] +_effplots_zplus = [Plot("globalEfficiencies", xtitle=_xtitle, ytitle="Efficiency", **_common_eff)] _efficiencies_zplus_eta = PlotGroup("Efficiencies_vs_eta", _effplots_zplus_eta, ncols=10) _efficiencies_zplus_phi = PlotGroup("Efficiencies_vs_phi", _effplots_zplus_phi, ncols=10) -_efficiencies_zplus = PlotGroup("Efficiencies_global", _effplots_zplus, ncols=1) +_efficiencies_zplus = PlotGroup("Efficiencies_vs_layer", _effplots_zplus, ncols=1) _common_dup = {"stat": False, "legend": False, "ymin": 0.0, "ymax": 1.1} _dupplots_zplus_eta = [Plot("duplicate_eta_layer{:02d}".format(i), xtitle="", **_common_dup) for i in range(maxlayerzm,maxlayerzp)] _dupplots_zplus_phi = [Plot("duplicate_phi_layer{:02d}".format(i), xtitle="", **_common_dup) for i in range(maxlayerzm,maxlayerzp)] -_common_dup = {"stat": False, "legend": False, "title": "Global Duplicates in z+", "xbinlabels": _xbinlabels, "xbinlabelsize": 12, "xbinlabeloptions": "d", "ymin": 0.0, "ymax": 1.1} +_common_dup = {"stat": False, "legend": False, "title": "Global Duplicates in z+", "xbinlabels": _xbinlabels, "xbinlabelsize": 12, "xbinlabeloption": "v", "ymin": 0.0, "ymax": 1.1} _common_dup["xmin"] = _bin_count _common_dup["xmax"] = _common_dup["xmin"] + maxlayerzm _bin_count += 4*maxlayerzm # 2 for the eta{-,+} and 2 for phi{+,-} -_dupplots_zplus = [Plot("globalEfficiencies", xtitle="Global Duplicates in z+", **_common_dup)] +_dupplots_zplus = [Plot("globalEfficiencies", xtitle=_xtitle, ytitle="Duplicates", **_common_dup)] _duplicates_zplus_eta = PlotGroup("Duplicates_vs_eta", _dupplots_zplus_eta, ncols=10) _duplicates_zplus_phi = PlotGroup("Duplicates_vs_phi", _dupplots_zplus_phi, ncols=10) -_duplicates_zplus = PlotGroup("Duplicates_global", _dupplots_zplus, ncols=1) +_duplicates_zplus = PlotGroup("Duplicates_vs_layer", _dupplots_zplus, ncols=1) _common_fake = {"stat": False, "legend": False, "ymin": 0.0, "ymax": 1.1} _fakeplots_zplus_eta = [Plot("fake_eta_layer{:02d}".format(i), xtitle="", **_common_fake) for i in range(maxlayerzm,maxlayerzp)] _fakeplots_zplus_phi = [Plot("fake_phi_layer{:02d}".format(i), xtitle="", **_common_fake) for i in range(maxlayerzm,maxlayerzp)] -_common_fake = {"stat": False, "legend": False, "title": "Global Fake Rates in z+", "xbinlabels": _xbinlabels, "xbinlabelsize": 12, "xbinlabeloptions": "d", "ymin": 0.0, "ymax": 1.1} +_common_fake = {"stat": False, "legend": False, "title": "Global Fake Rates in z+", "xbinlabels": _xbinlabels, "xbinlabelsize": 12, "xbinlabeloption": "v", "ymin": 0.0, "ymax": 1.1} _common_fake["xmin"] = _bin_count _common_fake["xmax"] = _common_fake["xmin"] + maxlayerzm _bin_count += 4*maxlayerzm # 2 for the eta{-,+} and 2 for phi{+,-} -_fakeplots_zplus = [Plot("globalEfficiencies", xtitle="Global Fake Rate in z+", **_common_fake)] +_fakeplots_zplus = [Plot("globalEfficiencies", xtitle=_xtitle, ytitle="Fake Rate", **_common_fake)] _fakes_zplus_eta = PlotGroup("FakeRate_vs_eta", _fakeplots_zplus_eta, ncols=10) _fakes_zplus_phi = PlotGroup("FakeRate_vs_phi", _fakeplots_zplus_phi, ncols=10) -_fakes_zplus = PlotGroup("FakeRate_global", _fakeplots_zplus, ncols=1) +_fakes_zplus = PlotGroup("FakeRate_vs_layer", _fakeplots_zplus, ncols=1) _common_merge = {"stat": False, "legend": False, "ymin": 0.0, "ymax": 1.1} _mergeplots_zplus_eta = [Plot("merge_eta_layer{:02d}".format(i), xtitle="", **_common_merge) for i in range(maxlayerzm,maxlayerzp)] _mergeplots_zplus_phi = [Plot("merge_phi_layer{:02d}".format(i), xtitle="", **_common_merge) for i in range(maxlayerzm,maxlayerzp)] -_common_merge = {"stat": False, "legend": False, "title": "Global Merge Rates in z+", "xbinlabels": _xbinlabels, "xbinlabelsize": 12, "xbinlabeloptions": "d", "ymin": 0.0, "ymax": 1.1} +_common_merge = {"stat": False, "legend": False, "title": "Global Merge Rates in z+", "xbinlabels": _xbinlabels, "xbinlabelsize": 12, "xbinlabeloption": "v", "ymin": 0.0, "ymax": 1.1} _common_merge["xmin"] = _bin_count _common_merge["xmax"] = _common_merge["xmin"] + maxlayerzm _bin_count += 4*maxlayerzm # 2 for the eta{-,+} and 2 for phi{+,-} -_mergeplots_zplus = [Plot("globalEfficiencies", xtitle="Global merge Rate in z+", **_common_merge)] +_mergeplots_zplus = [Plot("globalEfficiencies", xtitle=_xtitle, ytitle="Merge Rate", **_common_merge)] _merges_zplus_eta = PlotGroup("MergeRate_vs_eta", _mergeplots_zplus_eta, ncols=10) _merges_zplus_phi = PlotGroup("MergeRate_vs_phi", _mergeplots_zplus_phi, ncols=10) -_merges_zplus = PlotGroup("MergeRate_global", _mergeplots_zplus, ncols=1) +_merges_zplus = PlotGroup("MergeRate_vs_layer", _mergeplots_zplus, ncols=1) #-------------------------------------------------------------------------------------------- diff --git a/Validation/HGCalValidation/scripts/makeHGCalValidationPlots.py b/Validation/HGCalValidation/scripts/makeHGCalValidationPlots.py index 07bc7bcdf9123..747ee3da52ea1 100755 --- a/Validation/HGCalValidation/scripts/makeHGCalValidationPlots.py +++ b/Validation/HGCalValidation/scripts/makeHGCalValidationPlots.py @@ -80,6 +80,17 @@ def main(opts): hgchitcalib = [hgcalPlots.hgcalHitCalibPlotter] val.doPlots(hgchitcalib, plotterDrawArgs=drawArgs) else : + #caloparticles + particletypes = {"pion-":"-211", "pion+":"211", "pion0": "111", + "muon-": "-13", "muon+":"13", + "electron-": "-11", "electron+": "11", "photon": "22", + "kaon-": "-321", "kaon+": "321"} + hgcaloPart = [hgcalPlots.hgcalCaloParticlesPlotter] + for i_part, i_partID in particletypes.iteritems() : + print(i_part) + print(i_partID) + hgcalPlots.append_hgcalCaloParticlesPlots(sample.files(), i_partID, i_part) + val.doPlots(hgcaloPart, plotterDrawArgs=drawArgs) #hits hgchit = [hgcalPlots.hgcalHitPlotter] @@ -104,6 +115,7 @@ def main(opts): hgcalPlots.append_hgcalMultiClustersPlots(i_iter, tracksterCollection) val.doPlots(hgcmulticlus, plotterDrawArgs=drawArgs) + if opts.no_html: print("Plots created into directory '%s'." % opts.outputDir) else: diff --git a/Validation/HGCalValidation/src/HGVHistoProducerAlgo.cc b/Validation/HGCalValidation/src/HGVHistoProducerAlgo.cc index a78c13913842f..9a872eebe25a8 100644 --- a/Validation/HGCalValidation/src/HGVHistoProducerAlgo.cc +++ b/Validation/HGCalValidation/src/HGVHistoProducerAlgo.cc @@ -804,12 +804,6 @@ void HGVHistoProducerAlgo::fill_caloparticle_histos(const Histograms& histograms std::vector const& simVertices, unsigned layers, std::unordered_map const& hitMap) const { - for(auto const& hit : hitMap){ - const auto hitDetId = hit.first; - int layerId = recHitTools_->getLayerWithOffset(hitDetId) + - layers * ((recHitTools_->zside(hitDetId) + 1) >> 1) - 1; - //std::cout << " layerId of HGCRecHit = " << layerId << std::endl; - } const auto eta = getEta(caloparticle.eta()); if (histograms.h_caloparticle_eta.count(pdgid)) { @@ -856,12 +850,10 @@ void HGVHistoProducerAlgo::fill_caloparticle_histos(const Histograms& histograms int layerId_matched_max = 0; std::unordered_map::const_iterator itcheck = hitMap.find(hitDetId); if (itcheck != hitMap.end()) { - //std::cout << " matched to RecHit FOUND !" << std::endl; layerId_matched_min = layerId; layerId_matched_max = layerId; simHits_matched++; } else { - //std::cout << " matched to RecHit NOT found !" << std::endl; } minLayerId = std::min(minLayerId, layerId); @@ -882,8 +874,6 @@ void HGVHistoProducerAlgo::fill_caloparticle_histos(const Histograms& histograms histograms.h_caloparticle_nHitsInSimClusters.at(pdgid)->Fill((float)simHits); histograms.h_caloparticle_nHitsInSimClusters_matchedtoRecHit.at(pdgid)->Fill((float)simHits_matched); - //std::cout << " simHits in sc = " << simHits << std::endl; - //std::cout << " simHits (matched) in sc = " << simHits_matched << std::endl; } } @@ -906,9 +896,7 @@ void HGVHistoProducerAlgo::layerClusters_to_CaloParticles( std::unordered_map const& hitMap, unsigned layers, const edm::Handle& LCAssocByEnergyScoreHandle) const { - std::cout << "HGVHistoProducerAlgo::layerClusters_to_CaloParticles" << std::endl; auto nLayerClusters = clusters.size(); - std::cout << "Number Layer Clusters = " << nLayerClusters << std::endl; std::unordered_map> detIdToCaloParticleId_Map; std::unordered_map> detIdToLayerClusterId_Map; @@ -1095,7 +1083,6 @@ void HGVHistoProducerAlgo::layerClusters_to_CaloParticles( cPEnergyOnLayer[layerId] = 0; const SimClusterRefVector& simClusterRefVector = cP[cpId].simClusters(); - std::cout << " layers in simClusterRefVector = " << layers << std::endl; for (const auto& it_sc : simClusterRefVector) { const SimCluster& simCluster = (*(it_sc)); const auto& hits_and_fractions = simCluster.hits_and_fractions(); @@ -1503,11 +1490,9 @@ void HGVHistoProducerAlgo::multiClusters_to_CaloParticles(const Histograms& hist std::vector const& cPSelectedIndices, std::unordered_map const& hitMap, unsigned layers) const { - std::cout << "HGVHistoProducerAlgo::multiClusters_to_CaloParticles " << std::endl; auto nMultiClusters = multiClusters.size(); //Consider CaloParticles coming from the hard scatterer, excluding the PU contribution. auto nCaloParticles = cPIndices.size(); - std::cout << " nMultiClu = " << nMultiClusters << " , " << " nCaloPart = " << nCaloParticles << std::endl; std::unordered_map> detIdToCaloParticleId_Map; std::unordered_map> detIdToMultiClusterId_Map; From 03ce18bd65459d49482e365de2311036fc543eb6 Mon Sep 17 00:00:00 2001 From: Erica Brondolin Date: Thu, 26 Nov 2020 14:18:51 +0100 Subject: [PATCH 04/14] Adding CaloPart en hits sum --- .../interface/HGVHistoProducerAlgo.h | 4 ++ .../HGCalValidation/python/hgcalPlots.py | 18 ++++- .../src/HGVHistoProducerAlgo.cc | 65 +++++++++++++++++-- 3 files changed, 78 insertions(+), 9 deletions(-) diff --git a/Validation/HGCalValidation/interface/HGVHistoProducerAlgo.h b/Validation/HGCalValidation/interface/HGVHistoProducerAlgo.h index b90ac8df39f58..9e2c7bd82088f 100644 --- a/Validation/HGCalValidation/interface/HGVHistoProducerAlgo.h +++ b/Validation/HGCalValidation/interface/HGVHistoProducerAlgo.h @@ -98,6 +98,10 @@ struct HGVHistoProducerAlgoHistograms { std::unordered_map h_caloparticle_lastlayer; std::unordered_map h_caloparticle_layersnum; std::unordered_map h_caloparticle_nHitsInSimClusters_matchedtoRecHit; + std::unordered_map h_caloparticle_nHits_matched_energy; + std::unordered_map h_caloparticle_nHits_matched_energy_layer; + std::unordered_map h_caloparticle_nHits_matched_energy_layer_1SimCl; + std::unordered_map h_caloparticle_sum_energy_layer; std::unordered_map h_caloparticle_firstlayer_matchedtoRecHit; std::unordered_map h_caloparticle_lastlayer_matchedtoRecHit; std::unordered_map h_caloparticle_layersnum_matchedtoRecHit; diff --git a/Validation/HGCalValidation/python/hgcalPlots.py b/Validation/HGCalValidation/python/hgcalPlots.py index feec6a375b8a1..7d19850fe751d 100644 --- a/Validation/HGCalValidation/python/hgcalPlots.py +++ b/Validation/HGCalValidation/python/hgcalPlots.py @@ -2028,6 +2028,11 @@ def append_hgcalMultiClustersPlots(collection = 'ticlMultiClustersFromTracksters # )) #================================================================================================= +_common_Calo = {"stat": False, "drawStyle": "hist", "staty": 0.65, "ymin": 0.0, "ylog": False} +list_2D_histos = ["caloparticle_nHits_matched_layer", + "caloparticle_nHits_matched_layer_1SimCl", + "caloparticle_sum_energy_layer"] + hgcalCaloParticlesPlotter = Plotter() def append_hgcalCaloParticlesPlots(files, collection = '-211', name_collection = "pion-"): @@ -2044,11 +2049,20 @@ def append_hgcalCaloParticlesPlots(files, collection = '-211', name_collection = pg= PlotGroup(fileName.Data(),[ Plot(name, xtitle=obj.GetXaxis().GetTitle(), ytitle=obj.GetYaxis().GetTitle(), - #drawCommand = "", # may want to customize for TH2 (colz, etc.) - normalizeToNumberOfEvents = True, **_common) + drawCommand = "", # may want to customize for TH2 (colz, etc.) + normalizeToNumberOfEvents = True, **_common_Calo) ], ncols=1) + if name in list_2D_histos : + pg= PlotOnSideGroup(fileName.Data(), + Plot(name, + xtitle=obj.GetXaxis().GetTitle(), ytitle=obj.GetYaxis().GetTitle(), + drawCommand = "COLZ", + normalizeToNumberOfEvents = True, **_common_Calo) + , + ncols=1) + hgcalCaloParticlesPlotter.append("CaloParticles_"+name_collection, [ dqmfolder ], PlotFolder( diff --git a/Validation/HGCalValidation/src/HGVHistoProducerAlgo.cc b/Validation/HGCalValidation/src/HGVHistoProducerAlgo.cc index 9a872eebe25a8..0cca18045f26b 100644 --- a/Validation/HGCalValidation/src/HGVHistoProducerAlgo.cc +++ b/Validation/HGCalValidation/src/HGVHistoProducerAlgo.cc @@ -205,11 +205,39 @@ void HGVHistoProducerAlgo::bookCaloParticleHistos(DQMStore::IBooker& ibook, Hist ibook.book1D("caloparticle_phi", "Phi of caloparticle", nintPhi_, minPhi_, maxPhi_); histograms.h_caloparticle_nSimClusters[pdgid] = - ibook.book1D("caloparticle_nSimClusters", "Num Sim Clusters in caloparticle", 100, 0., 100.); + ibook.book1D("caloparticle_nSimCl", "Num Sim Clusters in caloparticle", 100, 0., 100.); histograms.h_caloparticle_nHitsInSimClusters[pdgid] = - ibook.book1D("caloparticle_nHitsInSimClusters", "Num Hits in Sim Clusters in caloparticle", 1000, 0., 1000.); + ibook.book1D("caloparticle_nHits", "Num Hits in Sim Clusters in caloparticle", 1000, 0., 1000.); histograms.h_caloparticle_nHitsInSimClusters_matchedtoRecHit[pdgid] = - ibook.book1D("caloparticle_nHitsInSimClusters_matchedtoRecHit", "Num Hits in Sim Clusters (matched) in caloparticle", 1000, 0., 1000.); + ibook.book1D("caloparticle_nHits_matched", "Num Hits in Sim Clusters (matched) in caloparticle", 1000, 0., 1000.); + histograms.h_caloparticle_nHits_matched_energy[pdgid] = + ibook.book1D("caloparticle_nHits_matched_energy", "Energy of Hits in Sim Clusters (matched)", 100, 0., 10.); + histograms.h_caloparticle_nHits_matched_energy_layer_1SimCl[pdgid] = + ibook.book2D("caloparticle_nHits_matched_layer_1SimCl", + "Energy of Hits only 1 Sim Clusters (matched) vs layer", + 2 * layers, + 0., + (float)2 * layers, + 100, + 0., + 5.); + histograms.h_caloparticle_nHits_matched_energy_layer[pdgid] = + ibook.book2D("caloparticle_nHits_matched_layer", + "Energy of Hits in Sim Clusters (matched) vs layer", + 2 * layers, + 0., + (float)2 * layers, + 100, + 0., + 5.); + histograms.h_caloparticle_sum_energy_layer[pdgid] = + ibook.book2D("caloparticle_sum_energy_layer", + "Rescaled Sum Energy of Hits in Sim Clusters (matched) vs layer", + 2 * layers, + 0., + (float)2 * layers, + 110, 0., 110.); + histograms.h_caloparticle_firstlayer[pdgid] = ibook.book1D("caloparticle_firstlayer", "First layer of the caloparticle", 2 * layers, 0., (float)2 * layers); @@ -835,10 +863,13 @@ void HGVHistoProducerAlgo::fill_caloparticle_histos(const Histograms& histograms int minLayerId_matched = 999; int maxLayerId_matched = 0; -// float energy = 0.; + float energy = 0.; + std::map totenergy_layer; for (auto const& sc : caloparticle.simClusters()) { + //std::cout << " This sim cluster has " << sc->hits_and_fractions().size() << " simHits and " << sc->energy() << " energy. " << std::endl; simHits += sc->hits_and_fractions().size(); + energy += sc->energy(); for (auto const& h_and_f : sc->hits_and_fractions()) { const auto hitDetId = h_and_f.first; int layerId = recHitTools_->getLayerWithOffset(hitDetId) + @@ -853,16 +884,25 @@ void HGVHistoProducerAlgo::fill_caloparticle_histos(const Histograms& histograms layerId_matched_min = layerId; layerId_matched_max = layerId; simHits_matched++; - } else { + const HGCRecHit* hit = itcheck->second; + //std::cout << " layer = "<< layerId << " energy = " << hit->energy() << std::endl; + histograms.h_caloparticle_nHits_matched_energy.at(pdgid)->Fill(hit->energy()*h_and_f.second); + histograms.h_caloparticle_nHits_matched_energy_layer.at(pdgid)->Fill(layerId, hit->energy()*h_and_f.second); + if (totenergy_layer.find(layerId) != totenergy_layer.end()){ + totenergy_layer[layerId] = totenergy_layer.at(layerId) + hit->energy(); + } else { + totenergy_layer.emplace(layerId, hit->energy()); + } + if (caloparticle.simClusters().size() == 1 ) + histograms.h_caloparticle_nHits_matched_energy_layer_1SimCl.at(pdgid)->Fill(layerId, hit->energy()*h_and_f.second); } minLayerId = std::min(minLayerId, layerId); maxLayerId = std::max(maxLayerId, layerId); minLayerId_matched = std::min(minLayerId_matched, layerId_matched_min); maxLayerId_matched = std::max(maxLayerId_matched, layerId_matched_max); -// // if (hitmap.count(h_and_f.first)) -// // energy += hitmap.at(h_and_f.first)->energy() * h_and_f.second; } + //std::cout << std::endl; } histograms.h_caloparticle_firstlayer.at(pdgid)->Fill(minLayerId); histograms.h_caloparticle_lastlayer.at(pdgid)->Fill(maxLayerId); @@ -874,6 +914,17 @@ void HGVHistoProducerAlgo::fill_caloparticle_histos(const Histograms& histograms histograms.h_caloparticle_nHitsInSimClusters.at(pdgid)->Fill((float)simHits); histograms.h_caloparticle_nHitsInSimClusters_matchedtoRecHit.at(pdgid)->Fill((float)simHits_matched); + + auto i = totenergy_layer.begin(); + double sum_energy = 0.0; + while( i != totenergy_layer.end() ){ + //std::cout << "x = " << i->first << " y = " << i->second << std::endl; + sum_energy += i->second; + //std::cout << " y (sum) = " << sum_energy << std::endl; + //std::cout << " y (100%) = " << sum_energy / caloparticle.energy() * 100. << std::endl; + histograms.h_caloparticle_sum_energy_layer.at(pdgid)->Fill(i->first, sum_energy / caloparticle.energy() * 100. ); + i++; + } } } From a09a19d492a01fb4d3022f1e16e3cec2e7ec5536 Mon Sep 17 00:00:00 2001 From: Erica Brondolin Date: Wed, 2 Dec 2020 11:13:18 +0100 Subject: [PATCH 05/14] Fix list_2D_all --- Validation/HGCalValidation/python/hgcalPlots.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/Validation/HGCalValidation/python/hgcalPlots.py b/Validation/HGCalValidation/python/hgcalPlots.py index 7d19850fe751d..7df90ebf67577 100644 --- a/Validation/HGCalValidation/python/hgcalPlots.py +++ b/Validation/HGCalValidation/python/hgcalPlots.py @@ -2029,13 +2029,14 @@ def append_hgcalMultiClustersPlots(collection = 'ticlMultiClustersFromTracksters #================================================================================================= _common_Calo = {"stat": False, "drawStyle": "hist", "staty": 0.65, "ymin": 0.0, "ylog": False} -list_2D_histos = ["caloparticle_nHits_matched_layer", - "caloparticle_nHits_matched_layer_1SimCl", - "caloparticle_sum_energy_layer"] hgcalCaloParticlesPlotter = Plotter() def append_hgcalCaloParticlesPlots(files, collection = '-211', name_collection = "pion-"): + list_2D_histos = ["caloparticle_nHits_matched_layer", + "caloparticle_nHits_matched_layer_1SimCl", + "caloparticle_sum_energy_layer"] + dqmfolder = "DQMData/Run 1/HGCAL/Run summary/HGCalValidator/SelectedCaloParticles/" + collection print(dqmfolder) templateFile = ROOT.TFile.Open(files[0]) # assuming all files have same structure From e03dc0e96f9a4d164c772d51f2d1a46312505f8c Mon Sep 17 00:00:00 2001 From: Erica Brondolin Date: Tue, 1 Dec 2020 09:25:32 +0100 Subject: [PATCH 06/14] Fix separate plots for 2D --- Validation/RecoTrack/python/plotting/plotting.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/Validation/RecoTrack/python/plotting/plotting.py b/Validation/RecoTrack/python/plotting/plotting.py index 1939e251959fc..ec09820a28152 100644 --- a/Validation/RecoTrack/python/plotting/plotting.py +++ b/Validation/RecoTrack/python/plotting/plotting.py @@ -1859,6 +1859,9 @@ def isRatio(self, ratio): return ratio return ratio and self._ratio + def setName(self, name): + self._name = name + def getName(self): if self._outname is not None: return self._outname @@ -2531,15 +2534,15 @@ def append(self, *args, **kwargs): def create(self, tdirectoryNEvents, requireAllHistograms=False): self._plots = [] - for element in tdirectoryNEvents: + for i, element in enumerate(tdirectoryNEvents): pl = self._plot.clone() pl.create([element], requireAllHistograms) + pl.setName(pl.getName()+"_"+str(i)) self._plots.append(pl) def draw(self, *args, **kwargs): kargs = copy.copy(kwargs) kargs["ratio"] = False - kargs["separate"] = False return super(PlotOnSideGroup, self).draw(*args, **kargs) class PlotFolder: From 820e372e615f6f479a15ddd08cb37f096cf9c4fe Mon Sep 17 00:00:00 2001 From: Erica Brondolin Date: Mon, 7 Dec 2020 16:31:47 +0100 Subject: [PATCH 07/14] Clean up and fix right margin for 2D plots --- .../HGCalValidation/python/hgcalPlots.py | 4 ++-- .../scripts/makeHGCalValidationPlots.py | 4 ---- .../RecoTrack/python/plotting/plotting.py | 20 +++++++++---------- 3 files changed, 12 insertions(+), 16 deletions(-) diff --git a/Validation/HGCalValidation/python/hgcalPlots.py b/Validation/HGCalValidation/python/hgcalPlots.py index 7df90ebf67577..70f7e142587e9 100644 --- a/Validation/HGCalValidation/python/hgcalPlots.py +++ b/Validation/HGCalValidation/python/hgcalPlots.py @@ -6,7 +6,7 @@ import six import ROOT -from ROOT import TFile +from ROOT import TFile, TString from ROOT import gDirectory ROOT.gROOT.SetBatch(True) ROOT.PyConfig.IgnoreCommandLineOptions = True @@ -2050,7 +2050,7 @@ def append_hgcalCaloParticlesPlots(files, collection = '-211', name_collection = pg= PlotGroup(fileName.Data(),[ Plot(name, xtitle=obj.GetXaxis().GetTitle(), ytitle=obj.GetYaxis().GetTitle(), - drawCommand = "", # may want to customize for TH2 (colz, etc.) + drawCommand = "", normalizeToNumberOfEvents = True, **_common_Calo) ], ncols=1) diff --git a/Validation/HGCalValidation/scripts/makeHGCalValidationPlots.py b/Validation/HGCalValidation/scripts/makeHGCalValidationPlots.py index 747ee3da52ea1..3d17e6cbfda29 100755 --- a/Validation/HGCalValidation/scripts/makeHGCalValidationPlots.py +++ b/Validation/HGCalValidation/scripts/makeHGCalValidationPlots.py @@ -66,8 +66,6 @@ def main(opts): "kaon-": "-321", "kaon+": "321"} hgcaloPart = [hgcalPlots.hgcalCaloParticlesPlotter] for i_part, i_partID in particletypes.iteritems() : - print(i_part) - print(i_partID) hgcalPlots.append_hgcalCaloParticlesPlots(sample.files(), i_partID, i_part) val.doPlots(hgcaloPart, plotterDrawArgs=drawArgs) elif opts.collection==hitValidationLabel: @@ -87,8 +85,6 @@ def main(opts): "kaon-": "-321", "kaon+": "321"} hgcaloPart = [hgcalPlots.hgcalCaloParticlesPlotter] for i_part, i_partID in particletypes.iteritems() : - print(i_part) - print(i_partID) hgcalPlots.append_hgcalCaloParticlesPlots(sample.files(), i_partID, i_part) val.doPlots(hgcaloPart, plotterDrawArgs=drawArgs) diff --git a/Validation/RecoTrack/python/plotting/plotting.py b/Validation/RecoTrack/python/plotting/plotting.py index ec09820a28152..f53098a6d2a14 100644 --- a/Validation/RecoTrack/python/plotting/plotting.py +++ b/Validation/RecoTrack/python/plotting/plotting.py @@ -2414,16 +2414,6 @@ def _drawSeparate(self, legendLabels, prefix, saveFormat, ratio, directory): width = 500 height = 500 - canvas = _createCanvas(self._name+"Single", width, height) - canvasRatio = _createCanvas(self._name+"SingleRatio", width, int(height*self._ratioFactor)) - - # from TDRStyle - for c in [canvas, canvasRatio]: - c.SetTopMargin(0.05) - c.SetBottomMargin(0.13) - c.SetLeftMargin(0.16) - c.SetRightMargin(0.05) - lx1def = 0.6 lx2def = 0.95 ly1def = 0.85 @@ -2435,6 +2425,16 @@ def _drawSeparate(self, legendLabels, prefix, saveFormat, ratio, directory): if plot.isEmpty(): continue + canvas = _createCanvas(self._name+"Single", width, height) + canvasRatio = _createCanvas(self._name+"SingleRatio", width, int(height*self._ratioFactor)) + + # from TDRStyle + for c in [canvas, canvasRatio]: + c.SetTopMargin(0.05) + c.SetBottomMargin(0.13) + c.SetLeftMargin(0.16) + c.SetRightMargin(0.05) + ratioForThisPlot = plot.isRatio(ratio) c = canvas if ratioForThisPlot: From e3400c1faa9577a61a65bed6bf4252ace1705ac7 Mon Sep 17 00:00:00 2001 From: Erica Brondolin Date: Mon, 7 Dec 2020 17:57:28 +0100 Subject: [PATCH 08/14] Add missing plots in HGVHistoProducerAlgo --- .../interface/HGVHistoProducerAlgo.h | 2 + .../src/HGVHistoProducerAlgo.cc | 68 ++++++++++--------- 2 files changed, 38 insertions(+), 32 deletions(-) diff --git a/Validation/HGCalValidation/interface/HGVHistoProducerAlgo.h b/Validation/HGCalValidation/interface/HGVHistoProducerAlgo.h index 9e2c7bd82088f..08e91145adecb 100644 --- a/Validation/HGCalValidation/interface/HGVHistoProducerAlgo.h +++ b/Validation/HGCalValidation/interface/HGVHistoProducerAlgo.h @@ -90,6 +90,8 @@ struct HGVHistoProducerAlgoHistograms { std::unordered_map h_caloparticle_eta; std::unordered_map h_caloparticle_eta_Zorigin; std::unordered_map h_caloparticle_energy; + std::unordered_map h_caloparticle_selfenergy; + std::unordered_map h_caloparticle_energyDifference; std::unordered_map h_caloparticle_pt; std::unordered_map h_caloparticle_phi; std::unordered_map h_caloparticle_nSimClusters; diff --git a/Validation/HGCalValidation/src/HGVHistoProducerAlgo.cc b/Validation/HGCalValidation/src/HGVHistoProducerAlgo.cc index 0cca18045f26b..8d4eecaccb505 100644 --- a/Validation/HGCalValidation/src/HGVHistoProducerAlgo.cc +++ b/Validation/HGCalValidation/src/HGVHistoProducerAlgo.cc @@ -194,44 +194,49 @@ void HGVHistoProducerAlgo::bookInfo(DQMStore::IBooker& ibook, Histograms& histog void HGVHistoProducerAlgo::bookCaloParticleHistos(DQMStore::IBooker& ibook, Histograms& histograms, int pdgid, unsigned layers) { histograms.h_caloparticle_eta[pdgid] = - ibook.book1D("num_caloparticle_eta", "N of caloparticle vs eta", nintEta_, minEta_, maxEta_); + ibook.book1D("N of caloparticle vs eta", "N of caloparticle vs eta", nintEta_, minEta_, maxEta_); histograms.h_caloparticle_eta_Zorigin[pdgid] = ibook.book2D("Eta vs Zorigin", "Eta vs Zorigin", nintEta_, minEta_, maxEta_, nintZpos_, minZpos_, maxZpos_); histograms.h_caloparticle_energy[pdgid] = - ibook.book1D("caloparticle_energy", "Energy of caloparticle", nintEne_, minEne_, maxEne_); - histograms.h_caloparticle_pt[pdgid] = ibook.book1D("caloparticle_pt", "Pt of caloparticle", nintPt_, minPt_, maxPt_); + ibook.book1D("Energy", "Energy of caloparticle", nintEne_, minEne_, maxEne_); + histograms.h_caloparticle_pt[pdgid] = ibook.book1D("Pt", "Pt of caloparticle", nintPt_, minPt_, maxPt_); histograms.h_caloparticle_phi[pdgid] = - ibook.book1D("caloparticle_phi", "Phi of caloparticle", nintPhi_, minPhi_, maxPhi_); + ibook.book1D("Phi", "Phi of caloparticle", nintPhi_, minPhi_, maxPhi_); + histograms.h_caloparticle_selfenergy[pdgid] = + ibook.book1D("SelfEnergy", "Total Energy of Hits in Sim Clusters (matched)", nintEne_, minEne_, maxEne_); + histograms.h_caloparticle_energyDifference[pdgid] = + ibook.book1D("EnergyDifference", "(Energy-SelfEnergy)/Energy", 300., -5., 1.); histograms.h_caloparticle_nSimClusters[pdgid] = - ibook.book1D("caloparticle_nSimCl", "Num Sim Clusters in caloparticle", 100, 0., 100.); + ibook.book1D("Num Sim Clusters", "Num Sim Clusters in caloparticle", 100, 0., 100.); histograms.h_caloparticle_nHitsInSimClusters[pdgid] = - ibook.book1D("caloparticle_nHits", "Num Hits in Sim Clusters in caloparticle", 1000, 0., 1000.); + ibook.book1D("Num Hits in Sim Clusters", "Num Hits in Sim Clusters in caloparticle", 1000, 0., 1000.); histograms.h_caloparticle_nHitsInSimClusters_matchedtoRecHit[pdgid] = - ibook.book1D("caloparticle_nHits_matched", "Num Hits in Sim Clusters (matched) in caloparticle", 1000, 0., 1000.); + ibook.book1D("Num Rec-matched Hits in Sim Clusters", "Num Hits in Sim Clusters (matched) in caloparticle", 1000, 0., 1000.); + histograms.h_caloparticle_nHits_matched_energy[pdgid] = - ibook.book1D("caloparticle_nHits_matched_energy", "Energy of Hits in Sim Clusters (matched)", 100, 0., 10.); - histograms.h_caloparticle_nHits_matched_energy_layer_1SimCl[pdgid] = - ibook.book2D("caloparticle_nHits_matched_layer_1SimCl", - "Energy of Hits only 1 Sim Clusters (matched) vs layer", + ibook.book1D("Energy of Rec-matched Hits", "Energy of Hits in Sim Clusters (matched)", 100, 0., 10.); + histograms.h_caloparticle_nHits_matched_energy_layer[pdgid] = + ibook.book2D("Energy of Rec-matched Hits vs layer", + "Energy of Hits in Sim Clusters (matched) vs layer", 2 * layers, 0., (float)2 * layers, 100, 0., - 5.); - histograms.h_caloparticle_nHits_matched_energy_layer[pdgid] = - ibook.book2D("caloparticle_nHits_matched_layer", - "Energy of Hits in Sim Clusters (matched) vs layer", + 10.); + histograms.h_caloparticle_nHits_matched_energy_layer_1SimCl[pdgid] = + ibook.book2D("Energy of Rec-matched Hits vs layer (1SC)", + "Energy of Hits only 1 Sim Clusters (matched) vs layer", 2 * layers, 0., (float)2 * layers, 100, 0., - 5.); + 10.); histograms.h_caloparticle_sum_energy_layer[pdgid] = - ibook.book2D("caloparticle_sum_energy_layer", + ibook.book2D("Rec-matched Hits Rescaled Sum Energy vs layer", "Rescaled Sum Energy of Hits in Sim Clusters (matched) vs layer", 2 * layers, 0., @@ -240,18 +245,17 @@ void HGVHistoProducerAlgo::bookCaloParticleHistos(DQMStore::IBooker& ibook, Hist histograms.h_caloparticle_firstlayer[pdgid] = - ibook.book1D("caloparticle_firstlayer", "First layer of the caloparticle", 2 * layers, 0., (float)2 * layers); + ibook.book1D("First Layer", "First layer of the caloparticle", 2 * layers, 0., (float)2 * layers); histograms.h_caloparticle_lastlayer[pdgid] = - ibook.book1D("caloparticle_lastlayer", "Last layer of the caloparticle", 2 * layers, 0., (float)2 * layers); + ibook.book1D("Last Layer", "Last layer of the caloparticle", 2 * layers, 0., (float)2 * layers); histograms.h_caloparticle_layersnum[pdgid] = - ibook.book1D("caloparticle_layersnum", "Number of layers of the caloparticle", 2 * layers, 0., (float)2 * layers); - + ibook.book1D("Number of Layers", "Number of layers of the caloparticle", 2 * layers, 0., (float)2 * layers); histograms.h_caloparticle_firstlayer_matchedtoRecHit[pdgid] = - ibook.book1D("caloparticle_firstlayer_matchedtoRecHit", "First layer of the caloparticle (matched)", 2 * layers, 0., (float)2 * layers); + ibook.book1D("First Layer (rec-matched hit)", "First layer of the caloparticle (matched)", 2 * layers, 0., (float)2 * layers); histograms.h_caloparticle_lastlayer_matchedtoRecHit[pdgid] = - ibook.book1D("caloparticle_lastlayer_matchedtoRecHit", "Last layer of the caloparticle (matched)", 2 * layers, 0., (float)2 * layers); + ibook.book1D("Last Layer (rec-matched hit)", "Last layer of the caloparticle (matched)", 2 * layers, 0., (float)2 * layers); histograms.h_caloparticle_layersnum_matchedtoRecHit[pdgid] = - ibook.book1D("caloparticle_layersnum_matchedtoRecHit", "Number of layers of the caloparticle (matched)", 2 * layers, 0., (float)2 * layers); + ibook.book1D("Number of Layers (rec-matched hit)", "Number of layers of the caloparticle (matched)", 2 * layers, 0., (float)2 * layers); } void HGVHistoProducerAlgo::bookClusterHistos(DQMStore::IBooker& ibook, @@ -867,14 +871,12 @@ void HGVHistoProducerAlgo::fill_caloparticle_histos(const Histograms& histograms std::map totenergy_layer; for (auto const& sc : caloparticle.simClusters()) { - //std::cout << " This sim cluster has " << sc->hits_and_fractions().size() << " simHits and " << sc->energy() << " energy. " << std::endl; simHits += sc->hits_and_fractions().size(); - energy += sc->energy(); + for (auto const& h_and_f : sc->hits_and_fractions()) { const auto hitDetId = h_and_f.first; int layerId = recHitTools_->getLayerWithOffset(hitDetId) + layers * ((recHitTools_->zside(hitDetId) + 1) >> 1) - 1; - //std::cout << " layerId of simHit = " << layerId << std::endl; // set to 0 if matched RecHit not found int layerId_matched_min = 999; @@ -884,10 +886,12 @@ void HGVHistoProducerAlgo::fill_caloparticle_histos(const Histograms& histograms layerId_matched_min = layerId; layerId_matched_max = layerId; simHits_matched++; + const HGCRecHit* hit = itcheck->second; - //std::cout << " layer = "<< layerId << " energy = " << hit->energy() << std::endl; + energy += hit->energy()*h_and_f.second; histograms.h_caloparticle_nHits_matched_energy.at(pdgid)->Fill(hit->energy()*h_and_f.second); histograms.h_caloparticle_nHits_matched_energy_layer.at(pdgid)->Fill(layerId, hit->energy()*h_and_f.second); + if (totenergy_layer.find(layerId) != totenergy_layer.end()){ totenergy_layer[layerId] = totenergy_layer.at(layerId) + hit->energy(); } else { @@ -901,8 +905,8 @@ void HGVHistoProducerAlgo::fill_caloparticle_histos(const Histograms& histograms maxLayerId = std::max(maxLayerId, layerId); minLayerId_matched = std::min(minLayerId_matched, layerId_matched_min); maxLayerId_matched = std::max(maxLayerId_matched, layerId_matched_max); + } - //std::cout << std::endl; } histograms.h_caloparticle_firstlayer.at(pdgid)->Fill(minLayerId); histograms.h_caloparticle_lastlayer.at(pdgid)->Fill(maxLayerId); @@ -914,14 +918,14 @@ void HGVHistoProducerAlgo::fill_caloparticle_histos(const Histograms& histograms histograms.h_caloparticle_nHitsInSimClusters.at(pdgid)->Fill((float)simHits); histograms.h_caloparticle_nHitsInSimClusters_matchedtoRecHit.at(pdgid)->Fill((float)simHits_matched); + histograms.h_caloparticle_selfenergy.at(pdgid)->Fill((float)energy); + histograms.h_caloparticle_energyDifference.at(pdgid)->Fill((float) 1. - energy / caloparticle.energy()); + //Calculate sum energy per-layer auto i = totenergy_layer.begin(); double sum_energy = 0.0; while( i != totenergy_layer.end() ){ - //std::cout << "x = " << i->first << " y = " << i->second << std::endl; sum_energy += i->second; - //std::cout << " y (sum) = " << sum_energy << std::endl; - //std::cout << " y (100%) = " << sum_energy / caloparticle.energy() * 100. << std::endl; histograms.h_caloparticle_sum_energy_layer.at(pdgid)->Fill(i->first, sum_energy / caloparticle.energy() * 100. ); i++; } From fa50a18ef4b834c2ee555449fdbe7e22ee9cebcc Mon Sep 17 00:00:00 2001 From: Erica Brondolin Date: Mon, 7 Dec 2020 18:52:01 +0100 Subject: [PATCH 09/14] Fix minor things --- .../HGCalValidation/plugins/CaloParticleValidation.cc | 2 +- .../python/CaloParticleSelectionForEfficiency_cfi.py | 2 +- Validation/HGCalValidation/python/hgcalPlots.py | 6 +++--- Validation/HGCalValidation/src/HGVHistoProducerAlgo.cc | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/Validation/HGCalValidation/plugins/CaloParticleValidation.cc b/Validation/HGCalValidation/plugins/CaloParticleValidation.cc index 6e921083972d3..f1e5e8a7a90eb 100644 --- a/Validation/HGCalValidation/plugins/CaloParticleValidation.cc +++ b/Validation/HGCalValidation/plugins/CaloParticleValidation.cc @@ -239,7 +239,7 @@ void CaloParticleValidation::fillDescriptions(edm::ConfigurationDescriptions& de // Please change this to state exactly what you do use, even if it is no parameters edm::ParameterSetDescription desc; desc.add("folder", "HGCAL/"); // Please keep the trailing '/' - desc.add>("particles_to_monitor", {11, -11, 13, -13, 22, 111, 211, -211, 321, -321}); + desc.add>("particles_to_monitor", {11, -11, 13, -13, 22, 111, 211, -211, 321, -321, 311}); desc.add("simVertices", edm::InputTag("g4SimHits")); desc.add("caloParticles", edm::InputTag("mix", "MergedCaloTruth")); desc.add("simPFClusters", edm::InputTag("simPFProducer", "perfect")); diff --git a/Validation/HGCalValidation/python/CaloParticleSelectionForEfficiency_cfi.py b/Validation/HGCalValidation/python/CaloParticleSelectionForEfficiency_cfi.py index 6141cf4b30a72..0e9ae846a4fde 100644 --- a/Validation/HGCalValidation/python/CaloParticleSelectionForEfficiency_cfi.py +++ b/Validation/HGCalValidation/python/CaloParticleSelectionForEfficiency_cfi.py @@ -11,7 +11,7 @@ tipCP = cms.double(60), chargedOnlyCP = cms.bool(False), stableOnlyCP = cms.bool(False), - pdgIdCP = cms.vint32(11, -11, 13, -13, 22, 111, 211, -211, 321, -321), + pdgIdCP = cms.vint32(11, -11, 13, -13, 22, 111, 211, -211, 321, -321, 311), #--signal only means no PU particles signalOnlyCP = cms.bool(True), #--intime only means no OOT PU particles diff --git a/Validation/HGCalValidation/python/hgcalPlots.py b/Validation/HGCalValidation/python/hgcalPlots.py index 70f7e142587e9..7beb6b7480beb 100644 --- a/Validation/HGCalValidation/python/hgcalPlots.py +++ b/Validation/HGCalValidation/python/hgcalPlots.py @@ -2033,9 +2033,9 @@ def append_hgcalMultiClustersPlots(collection = 'ticlMultiClustersFromTracksters hgcalCaloParticlesPlotter = Plotter() def append_hgcalCaloParticlesPlots(files, collection = '-211', name_collection = "pion-"): - list_2D_histos = ["caloparticle_nHits_matched_layer", - "caloparticle_nHits_matched_layer_1SimCl", - "caloparticle_sum_energy_layer"] + list_2D_histos = ["Energy of Rec-matched Hits vs layer", + "Energy of Rec-matched Hits vs layer (1SC)", + "Rec-matched Hits Sum Energy vs layer"] dqmfolder = "DQMData/Run 1/HGCAL/Run summary/HGCalValidator/SelectedCaloParticles/" + collection print(dqmfolder) diff --git a/Validation/HGCalValidation/src/HGVHistoProducerAlgo.cc b/Validation/HGCalValidation/src/HGVHistoProducerAlgo.cc index 8d4eecaccb505..04ba4c167e38f 100644 --- a/Validation/HGCalValidation/src/HGVHistoProducerAlgo.cc +++ b/Validation/HGCalValidation/src/HGVHistoProducerAlgo.cc @@ -236,7 +236,7 @@ void HGVHistoProducerAlgo::bookCaloParticleHistos(DQMStore::IBooker& ibook, Hist 0., 10.); histograms.h_caloparticle_sum_energy_layer[pdgid] = - ibook.book2D("Rec-matched Hits Rescaled Sum Energy vs layer", + ibook.book2D("Rec-matched Hits Sum Energy vs layer", "Rescaled Sum Energy of Hits in Sim Clusters (matched) vs layer", 2 * layers, 0., From 80c193aab30a6c750893fa986774235d765a8071 Mon Sep 17 00:00:00 2001 From: Erica Brondolin Date: Mon, 7 Dec 2020 19:21:46 +0100 Subject: [PATCH 10/14] Clean up CaloParticleValidation code (still with PFCandidates) --- .../plugins/CaloParticleValidation.cc | 82 +------------------ 1 file changed, 1 insertion(+), 81 deletions(-) diff --git a/Validation/HGCalValidation/plugins/CaloParticleValidation.cc b/Validation/HGCalValidation/plugins/CaloParticleValidation.cc index f1e5e8a7a90eb..4dadfc459a778 100644 --- a/Validation/HGCalValidation/plugins/CaloParticleValidation.cc +++ b/Validation/HGCalValidation/plugins/CaloParticleValidation.cc @@ -31,15 +31,6 @@ // struct Histogram_CaloParticleSingle { - dqm::reco::MonitorElement* eta_; - dqm::reco::MonitorElement* pt_; - dqm::reco::MonitorElement* energy_; - dqm::reco::MonitorElement* nSimClusters_; - dqm::reco::MonitorElement* nHitInSimClusters_; - dqm::reco::MonitorElement* - selfEnergy_; // this is the sum of the energy associated to all recHits linked to all SimClusters - dqm::reco::MonitorElement* energyDifference_; // This contains (energy-selfEnergy)/energy - dqm::reco::MonitorElement* eta_Zorigin_map_; dqm::reco::MonitorElement* simPFSuperClusterSize_; dqm::reco::MonitorElement* simPFSuperClusterEnergy_; dqm::reco::MonitorElement* pfcandidateType_; @@ -70,13 +61,8 @@ class CaloParticleValidation : public DQMGlobalEDAnalyzer particles_to_monitor_; - - edm::EDGetTokenT> simVertices_; - edm::EDGetTokenT> caloParticles_; edm::EDGetTokenT> simPFClusters_; edm::EDGetTokenT simPFCandidates_; - const edm::EDGetTokenT> hitMap_; }; // @@ -92,12 +78,8 @@ class CaloParticleValidation : public DQMGlobalEDAnalyzer("folder")), - particles_to_monitor_(iConfig.getParameter>("particles_to_monitor")), - simVertices_(consumes>(iConfig.getParameter("simVertices"))), - caloParticles_(consumes>(iConfig.getParameter("caloParticles"))), simPFClusters_(consumes>(iConfig.getParameter("simPFClusters"))), - simPFCandidates_(consumes(iConfig.getParameter("simPFCandidates"))), - hitMap_(consumes>(iConfig.getParameter("hitMapTag"))) { + simPFCandidates_(consumes(iConfig.getParameter("simPFCandidates"))){ //now do what ever initialization is needed } @@ -117,18 +99,6 @@ void CaloParticleValidation::dqmAnalyze(edm::Event const& iEvent, Histograms_CaloParticleValidation const& histos) const { using namespace edm; - Handle> hitMapHandle; - iEvent.getByToken(hitMap_, hitMapHandle); - const auto hitmap = *hitMapHandle; - - Handle> simVerticesHandle; - iEvent.getByToken(simVertices_, simVerticesHandle); - std::vector const& simVertices = *simVerticesHandle; - - Handle> caloParticleHandle; - iEvent.getByToken(caloParticles_, caloParticleHandle); - std::vector const& caloParticles = *caloParticleHandle; - Handle> simPFClustersHandle; iEvent.getByToken(simPFClusters_, simPFClustersHandle); std::vector const& simPFClusters = *simPFClustersHandle; @@ -137,40 +107,6 @@ void CaloParticleValidation::dqmAnalyze(edm::Event const& iEvent, iEvent.getByToken(simPFCandidates_, simPFCandidatesHandle); reco::PFCandidateCollection const& simPFCandidates = *simPFCandidatesHandle; - for (auto const& caloParticle : caloParticles) { - if (caloParticle.g4Tracks()[0].eventId().event() != 0 or - caloParticle.g4Tracks()[0].eventId().bunchCrossing() != 0) { - LogDebug("CaloParticleValidation") << "Excluding CaloParticles from event: " - << caloParticle.g4Tracks()[0].eventId().event() - << " with BX: " << caloParticle.g4Tracks()[0].eventId().bunchCrossing() - << std::endl; - continue; - } - int id = caloParticle.pdgId(); - if (histos.count(id)) { - auto& histo = histos.at(id); - histo.eta_->Fill(caloParticle.eta()); - histo.pt_->Fill(caloParticle.pt()); - histo.energy_->Fill(caloParticle.energy()); - histo.nSimClusters_->Fill(caloParticle.simClusters().size()); - // Find the corresponding vertex. - histo.eta_Zorigin_map_->Fill(simVertices.at(caloParticle.g4Tracks()[0].vertIndex()).position().z(), - caloParticle.eta()); - int simHits = 0; - float energy = 0.; - for (auto const& sc : caloParticle.simClusters()) { - simHits += sc->hits_and_fractions().size(); - for (auto const& h_and_f : sc->hits_and_fractions()) { - if (hitmap.count(h_and_f.first)) - energy += hitmap.at(h_and_f.first)->energy() * h_and_f.second; - } - } - histo.nHitInSimClusters_->Fill((float)simHits); - histo.selfEnergy_->Fill(energy); - histo.energyDifference_->Fill(1. - energy / caloParticle.energy()); - } - } - // simPFSuperClusters for (auto const& sc : simPFClusters) { histos.at(0).simPFSuperClusterSize_->Fill((float)sc.clustersSize()); @@ -201,18 +137,6 @@ void CaloParticleValidation::bookHistograms(DQMStore::IBooker& ibook, edm::Run const& run, edm::EventSetup const& iSetup, Histograms_CaloParticleValidation& histos) const { - for (auto const particle : particles_to_monitor_) { - ibook.setCurrentFolder(folder_ + "CaloParticles/" + std::to_string(particle)); - auto& histo = histos[particle]; - histo.eta_ = ibook.book1D("Eta", "Eta", 80, -4., 4.); - histo.energy_ = ibook.book1D("Energy", "Energy", 250, 0., 500.); - histo.pt_ = ibook.book1D("Pt", "Pt", 100, 0., 100.); - histo.nSimClusters_ = ibook.book1D("NSimClusters", "NSimClusters", 100, 0., 100.); - histo.nHitInSimClusters_ = ibook.book1D("NHitInSimClusters", "NHitInSimClusters", 100, 0., 100.); - histo.selfEnergy_ = ibook.book1D("SelfEnergy", "SelfEnergy", 250, 0., 500.); - histo.energyDifference_ = ibook.book1D("EnergyDifference", "(Energy-SelfEnergy)/Energy", 300, -5., 1.); - histo.eta_Zorigin_map_ = ibook.book2D("Eta vs Zorigin", "Eta vs Zorigin", 80, -4., 4., 1100, -550., 550.); - } int offset = 100000; ibook.setCurrentFolder(folder_ + "PFCandidates"); histos[offset].pfcandidateType_ = ibook.book1D("PFCandidateType", "PFCandidateType", 10, 0, 10); @@ -239,12 +163,8 @@ void CaloParticleValidation::fillDescriptions(edm::ConfigurationDescriptions& de // Please change this to state exactly what you do use, even if it is no parameters edm::ParameterSetDescription desc; desc.add("folder", "HGCAL/"); // Please keep the trailing '/' - desc.add>("particles_to_monitor", {11, -11, 13, -13, 22, 111, 211, -211, 321, -321, 311}); - desc.add("simVertices", edm::InputTag("g4SimHits")); - desc.add("caloParticles", edm::InputTag("mix", "MergedCaloTruth")); desc.add("simPFClusters", edm::InputTag("simPFProducer", "perfect")); desc.add("simPFCandidates", edm::InputTag("simPFProducer")); - desc.add("hitMapTag", edm::InputTag("hgcalRecHitMapProducer")); descriptions.add("caloparticlevalidationDefault", desc); } From 85f58153d9434a57f598ff4c13b9922bdb129d76 Mon Sep 17 00:00:00 2001 From: Erica Brondolin Date: Tue, 8 Dec 2020 11:31:09 +0100 Subject: [PATCH 11/14] Code-format --- .../plugins/CaloParticleValidation.cc | 2 +- .../HGCalValidation/plugins/HGCalValidator.cc | 11 ++- .../src/HGVHistoProducerAlgo.cc | 79 ++++++++++--------- 3 files changed, 50 insertions(+), 42 deletions(-) diff --git a/Validation/HGCalValidation/plugins/CaloParticleValidation.cc b/Validation/HGCalValidation/plugins/CaloParticleValidation.cc index 4dadfc459a778..b10552806897f 100644 --- a/Validation/HGCalValidation/plugins/CaloParticleValidation.cc +++ b/Validation/HGCalValidation/plugins/CaloParticleValidation.cc @@ -79,7 +79,7 @@ class CaloParticleValidation : public DQMGlobalEDAnalyzer("folder")), simPFClusters_(consumes>(iConfig.getParameter("simPFClusters"))), - simPFCandidates_(consumes(iConfig.getParameter("simPFCandidates"))){ + simPFCandidates_(consumes(iConfig.getParameter("simPFCandidates"))) { //now do what ever initialization is needed } diff --git a/Validation/HGCalValidation/plugins/HGCalValidator.cc b/Validation/HGCalValidation/plugins/HGCalValidator.cc index 383fb631760cb..d1253a4ef40e2 100644 --- a/Validation/HGCalValidation/plugins/HGCalValidator.cc +++ b/Validation/HGCalValidation/plugins/HGCalValidator.cc @@ -91,7 +91,8 @@ void HGCalValidator::bookHistograms(DQMStore::IBooker& ibook, for (auto const particle : particles_to_monitor_) { ibook.setCurrentFolder(dirName_ + "SelectedCaloParticles/" + std::to_string(particle)); - histoProducerAlgo_->bookCaloParticleHistos(ibook, histograms.histoProducerAlgo, particle, totallayers_to_monitor_); + histoProducerAlgo_->bookCaloParticleHistos( + ibook, histograms.histoProducerAlgo, particle, totallayers_to_monitor_); } ibook.cd(); ibook.setCurrentFolder(dirName_); @@ -141,7 +142,7 @@ void HGCalValidator::bookHistograms(DQMStore::IBooker& ibook, void HGCalValidator::cpParametersAndSelection(const Histograms& histograms, std::vector const& cPeff, std::vector const& simVertices, - std::vector& selected_cPeff, + std::vector& selected_cPeff, unsigned layers, std::unordered_map const& hitMap) const { selected_cPeff.reserve(cPeff.size()); @@ -153,7 +154,8 @@ void HGCalValidator::cpParametersAndSelection(const Histograms& histograms, if (!doCaloParticleSelection_ || (doCaloParticleSelection_ && cpSelector(caloParticle, simVertices))) { selected_cPeff.push_back(j); if (doCaloParticlePlots_) { - histoProducerAlgo_->fill_caloparticle_histos(histograms.histoProducerAlgo, id, caloParticle, simVertices, layers, hitMap); + histoProducerAlgo_->fill_caloparticle_histos( + histograms.histoProducerAlgo, id, caloParticle, simVertices, layers, hitMap); } } ++j; @@ -282,7 +284,8 @@ void HGCalValidator::dqmAnalyze(const edm::Event& event, //General Info on multiclusters LogTrace("HGCalValidator") << "\n# of multi clusters with " << label_mcl[wml].process() << ":" << label_mcl[wml].label() << ":" << label_mcl[wml].instance() << ": " - << multiClusters.size() << "\n" << std::endl; + << multiClusters.size() << "\n" + << std::endl; } } //end of loop over multicluster input labels } diff --git a/Validation/HGCalValidation/src/HGVHistoProducerAlgo.cc b/Validation/HGCalValidation/src/HGVHistoProducerAlgo.cc index 04ba4c167e38f..8b5376df7f9f1 100644 --- a/Validation/HGCalValidation/src/HGVHistoProducerAlgo.cc +++ b/Validation/HGCalValidation/src/HGVHistoProducerAlgo.cc @@ -192,7 +192,10 @@ void HGVHistoProducerAlgo::bookInfo(DQMStore::IBooker& ibook, Histograms& histog histograms.maxlayerzp = ibook.bookInt("maxlayerzp"); } -void HGVHistoProducerAlgo::bookCaloParticleHistos(DQMStore::IBooker& ibook, Histograms& histograms, int pdgid, unsigned layers) { +void HGVHistoProducerAlgo::bookCaloParticleHistos(DQMStore::IBooker& ibook, + Histograms& histograms, + int pdgid, + unsigned layers) { histograms.h_caloparticle_eta[pdgid] = ibook.book1D("N of caloparticle vs eta", "N of caloparticle vs eta", nintEta_, minEta_, maxEta_); histograms.h_caloparticle_eta_Zorigin[pdgid] = @@ -201,8 +204,7 @@ void HGVHistoProducerAlgo::bookCaloParticleHistos(DQMStore::IBooker& ibook, Hist histograms.h_caloparticle_energy[pdgid] = ibook.book1D("Energy", "Energy of caloparticle", nintEne_, minEne_, maxEne_); histograms.h_caloparticle_pt[pdgid] = ibook.book1D("Pt", "Pt of caloparticle", nintPt_, minPt_, maxPt_); - histograms.h_caloparticle_phi[pdgid] = - ibook.book1D("Phi", "Phi of caloparticle", nintPhi_, minPhi_, maxPhi_); + histograms.h_caloparticle_phi[pdgid] = ibook.book1D("Phi", "Phi of caloparticle", nintPhi_, minPhi_, maxPhi_); histograms.h_caloparticle_selfenergy[pdgid] = ibook.book1D("SelfEnergy", "Total Energy of Hits in Sim Clusters (matched)", nintEne_, minEne_, maxEne_); histograms.h_caloparticle_energyDifference[pdgid] = @@ -212,12 +214,12 @@ void HGVHistoProducerAlgo::bookCaloParticleHistos(DQMStore::IBooker& ibook, Hist ibook.book1D("Num Sim Clusters", "Num Sim Clusters in caloparticle", 100, 0., 100.); histograms.h_caloparticle_nHitsInSimClusters[pdgid] = ibook.book1D("Num Hits in Sim Clusters", "Num Hits in Sim Clusters in caloparticle", 1000, 0., 1000.); - histograms.h_caloparticle_nHitsInSimClusters_matchedtoRecHit[pdgid] = - ibook.book1D("Num Rec-matched Hits in Sim Clusters", "Num Hits in Sim Clusters (matched) in caloparticle", 1000, 0., 1000.); + histograms.h_caloparticle_nHitsInSimClusters_matchedtoRecHit[pdgid] = ibook.book1D( + "Num Rec-matched Hits in Sim Clusters", "Num Hits in Sim Clusters (matched) in caloparticle", 1000, 0., 1000.); histograms.h_caloparticle_nHits_matched_energy[pdgid] = ibook.book1D("Energy of Rec-matched Hits", "Energy of Hits in Sim Clusters (matched)", 100, 0., 10.); - histograms.h_caloparticle_nHits_matched_energy_layer[pdgid] = + histograms.h_caloparticle_nHits_matched_energy_layer[pdgid] = ibook.book2D("Energy of Rec-matched Hits vs layer", "Energy of Hits in Sim Clusters (matched) vs layer", 2 * layers, @@ -226,7 +228,7 @@ void HGVHistoProducerAlgo::bookCaloParticleHistos(DQMStore::IBooker& ibook, Hist 100, 0., 10.); - histograms.h_caloparticle_nHits_matched_energy_layer_1SimCl[pdgid] = + histograms.h_caloparticle_nHits_matched_energy_layer_1SimCl[pdgid] = ibook.book2D("Energy of Rec-matched Hits vs layer (1SC)", "Energy of Hits only 1 Sim Clusters (matched) vs layer", 2 * layers, @@ -235,27 +237,32 @@ void HGVHistoProducerAlgo::bookCaloParticleHistos(DQMStore::IBooker& ibook, Hist 100, 0., 10.); - histograms.h_caloparticle_sum_energy_layer[pdgid] = + histograms.h_caloparticle_sum_energy_layer[pdgid] = ibook.book2D("Rec-matched Hits Sum Energy vs layer", "Rescaled Sum Energy of Hits in Sim Clusters (matched) vs layer", 2 * layers, 0., (float)2 * layers, - 110, 0., 110.); - + 110, + 0., + 110.); - histograms.h_caloparticle_firstlayer[pdgid] = + histograms.h_caloparticle_firstlayer[pdgid] = ibook.book1D("First Layer", "First layer of the caloparticle", 2 * layers, 0., (float)2 * layers); - histograms.h_caloparticle_lastlayer[pdgid] = + histograms.h_caloparticle_lastlayer[pdgid] = ibook.book1D("Last Layer", "Last layer of the caloparticle", 2 * layers, 0., (float)2 * layers); histograms.h_caloparticle_layersnum[pdgid] = ibook.book1D("Number of Layers", "Number of layers of the caloparticle", 2 * layers, 0., (float)2 * layers); - histograms.h_caloparticle_firstlayer_matchedtoRecHit[pdgid] = - ibook.book1D("First Layer (rec-matched hit)", "First layer of the caloparticle (matched)", 2 * layers, 0., (float)2 * layers); - histograms.h_caloparticle_lastlayer_matchedtoRecHit[pdgid] = - ibook.book1D("Last Layer (rec-matched hit)", "Last layer of the caloparticle (matched)", 2 * layers, 0., (float)2 * layers); + histograms.h_caloparticle_firstlayer_matchedtoRecHit[pdgid] = ibook.book1D( + "First Layer (rec-matched hit)", "First layer of the caloparticle (matched)", 2 * layers, 0., (float)2 * layers); + histograms.h_caloparticle_lastlayer_matchedtoRecHit[pdgid] = ibook.book1D( + "Last Layer (rec-matched hit)", "Last layer of the caloparticle (matched)", 2 * layers, 0., (float)2 * layers); histograms.h_caloparticle_layersnum_matchedtoRecHit[pdgid] = - ibook.book1D("Number of Layers (rec-matched hit)", "Number of layers of the caloparticle (matched)", 2 * layers, 0., (float)2 * layers); + ibook.book1D("Number of Layers (rec-matched hit)", + "Number of layers of the caloparticle (matched)", + 2 * layers, + 0., + (float)2 * layers); } void HGVHistoProducerAlgo::bookClusterHistos(DQMStore::IBooker& ibook, @@ -836,7 +843,6 @@ void HGVHistoProducerAlgo::fill_caloparticle_histos(const Histograms& histograms std::vector const& simVertices, unsigned layers, std::unordered_map const& hitMap) const { - const auto eta = getEta(caloparticle.eta()); if (histograms.h_caloparticle_eta.count(pdgid)) { histograms.h_caloparticle_eta.at(pdgid)->Fill(eta); @@ -875,8 +881,8 @@ void HGVHistoProducerAlgo::fill_caloparticle_histos(const Histograms& histograms for (auto const& h_and_f : sc->hits_and_fractions()) { const auto hitDetId = h_and_f.first; - int layerId = recHitTools_->getLayerWithOffset(hitDetId) + - layers * ((recHitTools_->zside(hitDetId) + 1) >> 1) - 1; + int layerId = + recHitTools_->getLayerWithOffset(hitDetId) + layers * ((recHitTools_->zside(hitDetId) + 1) >> 1) - 1; // set to 0 if matched RecHit not found int layerId_matched_min = 999; @@ -888,49 +894,48 @@ void HGVHistoProducerAlgo::fill_caloparticle_histos(const Histograms& histograms simHits_matched++; const HGCRecHit* hit = itcheck->second; - energy += hit->energy()*h_and_f.second; - histograms.h_caloparticle_nHits_matched_energy.at(pdgid)->Fill(hit->energy()*h_and_f.second); - histograms.h_caloparticle_nHits_matched_energy_layer.at(pdgid)->Fill(layerId, hit->energy()*h_and_f.second); + energy += hit->energy() * h_and_f.second; + histograms.h_caloparticle_nHits_matched_energy.at(pdgid)->Fill(hit->energy() * h_and_f.second); + histograms.h_caloparticle_nHits_matched_energy_layer.at(pdgid)->Fill(layerId, hit->energy() * h_and_f.second); - if (totenergy_layer.find(layerId) != totenergy_layer.end()){ - totenergy_layer[layerId] = totenergy_layer.at(layerId) + hit->energy(); + if (totenergy_layer.find(layerId) != totenergy_layer.end()) { + totenergy_layer[layerId] = totenergy_layer.at(layerId) + hit->energy(); } else { - totenergy_layer.emplace(layerId, hit->energy()); + totenergy_layer.emplace(layerId, hit->energy()); } - if (caloparticle.simClusters().size() == 1 ) - histograms.h_caloparticle_nHits_matched_energy_layer_1SimCl.at(pdgid)->Fill(layerId, hit->energy()*h_and_f.second); + if (caloparticle.simClusters().size() == 1) + histograms.h_caloparticle_nHits_matched_energy_layer_1SimCl.at(pdgid)->Fill(layerId, + hit->energy() * h_and_f.second); } minLayerId = std::min(minLayerId, layerId); maxLayerId = std::max(maxLayerId, layerId); minLayerId_matched = std::min(minLayerId_matched, layerId_matched_min); maxLayerId_matched = std::max(maxLayerId_matched, layerId_matched_max); - } } histograms.h_caloparticle_firstlayer.at(pdgid)->Fill(minLayerId); histograms.h_caloparticle_lastlayer.at(pdgid)->Fill(maxLayerId); - histograms.h_caloparticle_layersnum.at(pdgid)->Fill(int(maxLayerId-minLayerId)); + histograms.h_caloparticle_layersnum.at(pdgid)->Fill(int(maxLayerId - minLayerId)); histograms.h_caloparticle_firstlayer_matchedtoRecHit.at(pdgid)->Fill(minLayerId_matched); histograms.h_caloparticle_lastlayer_matchedtoRecHit.at(pdgid)->Fill(maxLayerId_matched); - histograms.h_caloparticle_layersnum_matchedtoRecHit.at(pdgid)->Fill(int(maxLayerId_matched-minLayerId_matched)); + histograms.h_caloparticle_layersnum_matchedtoRecHit.at(pdgid)->Fill(int(maxLayerId_matched - minLayerId_matched)); histograms.h_caloparticle_nHitsInSimClusters.at(pdgid)->Fill((float)simHits); histograms.h_caloparticle_nHitsInSimClusters_matchedtoRecHit.at(pdgid)->Fill((float)simHits_matched); histograms.h_caloparticle_selfenergy.at(pdgid)->Fill((float)energy); - histograms.h_caloparticle_energyDifference.at(pdgid)->Fill((float) 1. - energy / caloparticle.energy()); + histograms.h_caloparticle_energyDifference.at(pdgid)->Fill((float)1. - energy / caloparticle.energy()); //Calculate sum energy per-layer auto i = totenergy_layer.begin(); double sum_energy = 0.0; - while( i != totenergy_layer.end() ){ - sum_energy += i->second; - histograms.h_caloparticle_sum_energy_layer.at(pdgid)->Fill(i->first, sum_energy / caloparticle.energy() * 100. ); - i++; + while (i != totenergy_layer.end()) { + sum_energy += i->second; + histograms.h_caloparticle_sum_energy_layer.at(pdgid)->Fill(i->first, sum_energy / caloparticle.energy() * 100.); + i++; } } - } void HGVHistoProducerAlgo::fill_cluster_histos(const Histograms& histograms, From a908e61934c354ad8075a064c3f8d858a916a0a0 Mon Sep 17 00:00:00 2001 From: Erica Brondolin Date: Wed, 9 Dec 2020 10:45:57 +0100 Subject: [PATCH 12/14] Using unsigned int --- .../HGCalValidation/interface/HGCalValidator.h | 2 +- .../interface/HGVHistoProducerAlgo.h | 18 +++++++++--------- .../HGCalValidation/plugins/HGCalValidator.cc | 2 +- .../src/HGVHistoProducerAlgo.cc | 18 +++++++++--------- 4 files changed, 20 insertions(+), 20 deletions(-) diff --git a/Validation/HGCalValidation/interface/HGCalValidator.h b/Validation/HGCalValidation/interface/HGCalValidator.h index cbd1ba2585d40..b93feb3f4d38b 100644 --- a/Validation/HGCalValidation/interface/HGCalValidator.h +++ b/Validation/HGCalValidation/interface/HGCalValidator.h @@ -55,7 +55,7 @@ class HGCalValidator : public DQMGlobalEDAnalyzer { std::vector const& cPeff, std::vector const& simVertices, std::vector& selected_cPeff, - unsigned layers, + unsigned int layers, std::unordered_map const&) const; protected: diff --git a/Validation/HGCalValidation/interface/HGVHistoProducerAlgo.h b/Validation/HGCalValidation/interface/HGVHistoProducerAlgo.h index 08e91145adecb..2338cc695d4bc 100644 --- a/Validation/HGCalValidation/interface/HGVHistoProducerAlgo.h +++ b/Validation/HGCalValidation/interface/HGVHistoProducerAlgo.h @@ -170,14 +170,14 @@ class HGVHistoProducerAlgo { using Histograms = HGVHistoProducerAlgoHistograms; void bookInfo(DQMStore::IBooker& ibook, Histograms& histograms); - void bookCaloParticleHistos(DQMStore::IBooker& ibook, Histograms& histograms, int pdgid, unsigned layers); + void bookCaloParticleHistos(DQMStore::IBooker& ibook, Histograms& histograms, int pdgid, unsigned int layers); void bookClusterHistos(DQMStore::IBooker& ibook, Histograms& histograms, - unsigned layers, + unsigned int layers, std::vector thicknesses, std::string pathtomatbudfile); - void bookMultiClusterHistos(DQMStore::IBooker& ibook, Histograms& histograms, unsigned layers); + void bookMultiClusterHistos(DQMStore::IBooker& ibook, Histograms& histograms, unsigned int layers); void layerClusters_to_CaloParticles( const Histograms& histograms, edm::Handle clusterHandle, @@ -187,7 +187,7 @@ class HGVHistoProducerAlgo { std::vector const& cPIndices, std::vector const& cPSelectedIndices, std::unordered_map const&, - unsigned layers, + unsigned int layers, const edm::Handle& LCAssocByEnergyScoreHandle) const; void multiClusters_to_CaloParticles(const Histograms& histograms, int count, @@ -196,13 +196,13 @@ class HGVHistoProducerAlgo { std::vector const& cPIndices, std::vector const& cPSelectedIndices, std::unordered_map const&, - unsigned layers) const; - void fill_info_histos(const Histograms& histograms, unsigned layers) const; + unsigned int layers) const; + void fill_info_histos(const Histograms& histograms, unsigned int layers) const; void fill_caloparticle_histos(const Histograms& histograms, int pdgid, const CaloParticle& caloparticle, std::vector const& simVertices, - unsigned layers, + unsigned int layers, std::unordered_map const&) const; void fill_cluster_histos(const Histograms& histograms, int count, const reco::CaloCluster& cluster) const; void fill_generic_cluster_histos( @@ -217,7 +217,7 @@ class HGVHistoProducerAlgo { std::vector const& cPSelectedIndices, std::unordered_map const&, std::map cummatbudg, - unsigned layers, + unsigned int layers, std::vector thicknesses, edm::Handle& LCAssocByEnergyScoreHandle) const; void fill_multi_cluster_histos(const Histograms& histograms, @@ -227,7 +227,7 @@ class HGVHistoProducerAlgo { std::vector const& cPIndices, std::vector const& cPSelectedIndices, std::unordered_map const&, - unsigned layers) const; + unsigned int layers) const; double distance2(const double x1, const double y1, const double x2, const double y2) const; double distance(const double x1, const double y1, const double x2, const double y2) const; diff --git a/Validation/HGCalValidation/plugins/HGCalValidator.cc b/Validation/HGCalValidation/plugins/HGCalValidator.cc index d1253a4ef40e2..74556de066963 100644 --- a/Validation/HGCalValidation/plugins/HGCalValidator.cc +++ b/Validation/HGCalValidation/plugins/HGCalValidator.cc @@ -143,7 +143,7 @@ void HGCalValidator::cpParametersAndSelection(const Histograms& histograms, std::vector const& cPeff, std::vector const& simVertices, std::vector& selected_cPeff, - unsigned layers, + unsigned int layers, std::unordered_map const& hitMap) const { selected_cPeff.reserve(cPeff.size()); diff --git a/Validation/HGCalValidation/src/HGVHistoProducerAlgo.cc b/Validation/HGCalValidation/src/HGVHistoProducerAlgo.cc index 8b5376df7f9f1..840fe152cf355 100644 --- a/Validation/HGCalValidation/src/HGVHistoProducerAlgo.cc +++ b/Validation/HGCalValidation/src/HGVHistoProducerAlgo.cc @@ -195,7 +195,7 @@ void HGVHistoProducerAlgo::bookInfo(DQMStore::IBooker& ibook, Histograms& histog void HGVHistoProducerAlgo::bookCaloParticleHistos(DQMStore::IBooker& ibook, Histograms& histograms, int pdgid, - unsigned layers) { + unsigned int layers) { histograms.h_caloparticle_eta[pdgid] = ibook.book1D("N of caloparticle vs eta", "N of caloparticle vs eta", nintEta_, minEta_, maxEta_); histograms.h_caloparticle_eta_Zorigin[pdgid] = @@ -267,7 +267,7 @@ void HGVHistoProducerAlgo::bookCaloParticleHistos(DQMStore::IBooker& ibook, void HGVHistoProducerAlgo::bookClusterHistos(DQMStore::IBooker& ibook, Histograms& histograms, - unsigned layers, + unsigned int layers, std::vector thicknesses, std::string pathtomatbudfile) { //--------------------------------------------------------------------------------------------------------------------------- @@ -592,7 +592,7 @@ void HGVHistoProducerAlgo::bookClusterHistos(DQMStore::IBooker& ibook, //--------------------------------------------------------------------------------------------------------------------------- } -void HGVHistoProducerAlgo::bookMultiClusterHistos(DQMStore::IBooker& ibook, Histograms& histograms, unsigned layers) { +void HGVHistoProducerAlgo::bookMultiClusterHistos(DQMStore::IBooker& ibook, Histograms& histograms, unsigned int layers) { histograms.h_score_multicl2caloparticle.push_back(ibook.book1D( "Score_multicl2caloparticle", "Score of Multi Cluster per CaloParticle", nintScore_, minScore_, maxScore_)); histograms.h_score_caloparticle2multicl.push_back(ibook.book1D( @@ -824,7 +824,7 @@ void HGVHistoProducerAlgo::bookMultiClusterHistos(DQMStore::IBooker& ibook, Hist "multicluster_layersnum", "Number of layers of the multicluster", 2 * layers, 0., (float)2 * layers)); } -void HGVHistoProducerAlgo::fill_info_histos(const Histograms& histograms, unsigned layers) const { +void HGVHistoProducerAlgo::fill_info_histos(const Histograms& histograms, unsigned int layers) const { //We will save some info straight from geometry to avoid mistakes from updates //----------- TODO ---------------------------------------------------------- //For now values returned for 'lastLayerFHzp': '104', 'lastLayerFHzm': '52' are not the one expected. @@ -841,7 +841,7 @@ void HGVHistoProducerAlgo::fill_caloparticle_histos(const Histograms& histograms int pdgid, const CaloParticle& caloparticle, std::vector const& simVertices, - unsigned layers, + unsigned int layers, std::unordered_map const& hitMap) const { const auto eta = getEta(caloparticle.eta()); if (histograms.h_caloparticle_eta.count(pdgid)) { @@ -954,7 +954,7 @@ void HGVHistoProducerAlgo::layerClusters_to_CaloParticles( std::vector const& cPIndices, std::vector const& cPSelectedIndices, std::unordered_map const& hitMap, - unsigned layers, + unsigned int layers, const edm::Handle& LCAssocByEnergyScoreHandle) const { auto nLayerClusters = clusters.size(); @@ -1228,7 +1228,7 @@ void HGVHistoProducerAlgo::fill_generic_cluster_histos( std::vector const& cPSelectedIndices, std::unordered_map const& hitMap, std::map cummatbudg, - unsigned layers, + unsigned int layers, std::vector thicknesses, edm::Handle& LCAssocByEnergyScoreHandle) const { //Each event to be treated as two events: an event in +ve endcap, @@ -1549,7 +1549,7 @@ void HGVHistoProducerAlgo::multiClusters_to_CaloParticles(const Histograms& hist std::vector const& cPIndices, std::vector const& cPSelectedIndices, std::unordered_map const& hitMap, - unsigned layers) const { + unsigned int layers) const { auto nMultiClusters = multiClusters.size(); //Consider CaloParticles coming from the hard scatterer, excluding the PU contribution. auto nCaloParticles = cPIndices.size(); @@ -2155,7 +2155,7 @@ void HGVHistoProducerAlgo::fill_multi_cluster_histos(const Histograms& histogram std::vector const& cPIndices, std::vector const& cPSelectedIndices, std::unordered_map const& hitMap, - unsigned layers) const { + unsigned int layers) const { //Each event to be treated as two events: //an event in +ve endcap, plus another event in -ve endcap. From a8c9c5ef28db0f5200a8fa7e298e12dcfa18406d Mon Sep 17 00:00:00 2001 From: Erica Brondolin Date: Wed, 9 Dec 2020 10:47:34 +0100 Subject: [PATCH 13/14] Caloparticles not sured anymore in the CaloParticleValidation in premix_stage2 --- .../HGCalValidation/python/caloparticlevalidation_cfi.py | 9 --------- 1 file changed, 9 deletions(-) diff --git a/Validation/HGCalValidation/python/caloparticlevalidation_cfi.py b/Validation/HGCalValidation/python/caloparticlevalidation_cfi.py index 061afb85c34a3..402e43f2cd079 100644 --- a/Validation/HGCalValidation/python/caloparticlevalidation_cfi.py +++ b/Validation/HGCalValidation/python/caloparticlevalidation_cfi.py @@ -2,12 +2,3 @@ from Validation.HGCalValidation.caloparticlevalidationDefault_cfi import caloparticlevalidationDefault as _caloparticlevalidationDefault caloparticlevalidation = _caloparticlevalidationDefault.clone() - -# TODO: The following would be needed to use the signal+pileup -# CaloParticles for premixing. However, the code uses SimVertices, and -# - we don't propagate pileup SimVertices (actually we don't do that even in classical mixing?) -# - the code will either produce garbage or throw an exception -from Configuration.ProcessModifiers.premix_stage2_cff import premix_stage2 -premix_stage2.toModify(caloparticlevalidation, - caloParticles = "mixData:MergedCaloTruth" -) From 12f5bc0145b70f85e10c0644a17dd01b42d42256 Mon Sep 17 00:00:00 2001 From: Erica Brondolin Date: Wed, 9 Dec 2020 10:58:17 +0100 Subject: [PATCH 14/14] Code format --- Validation/HGCalValidation/src/HGVHistoProducerAlgo.cc | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/Validation/HGCalValidation/src/HGVHistoProducerAlgo.cc b/Validation/HGCalValidation/src/HGVHistoProducerAlgo.cc index 840fe152cf355..0a6506425681f 100644 --- a/Validation/HGCalValidation/src/HGVHistoProducerAlgo.cc +++ b/Validation/HGCalValidation/src/HGVHistoProducerAlgo.cc @@ -592,7 +592,9 @@ void HGVHistoProducerAlgo::bookClusterHistos(DQMStore::IBooker& ibook, //--------------------------------------------------------------------------------------------------------------------------- } -void HGVHistoProducerAlgo::bookMultiClusterHistos(DQMStore::IBooker& ibook, Histograms& histograms, unsigned int layers) { +void HGVHistoProducerAlgo::bookMultiClusterHistos(DQMStore::IBooker& ibook, + Histograms& histograms, + unsigned int layers) { histograms.h_score_multicl2caloparticle.push_back(ibook.book1D( "Score_multicl2caloparticle", "Score of Multi Cluster per CaloParticle", nintScore_, minScore_, maxScore_)); histograms.h_score_caloparticle2multicl.push_back(ibook.book1D(