diff --git a/DQMOffline/JetMET/interface/METAnalyzer.h b/DQMOffline/JetMET/interface/METAnalyzer.h index 8faa4849b6cc6..b441182326764 100644 --- a/DQMOffline/JetMET/interface/METAnalyzer.h +++ b/DQMOffline/JetMET/interface/METAnalyzer.h @@ -86,7 +86,7 @@ #include #include -class METAnalyzer : public DQMOneLumiEDAnalyzer<> { +class METAnalyzer : public DQMOneEDAnalyzer<> { public: /// Constructor METAnalyzer(const edm::ParameterSet&); diff --git a/DQMServices/Core/README.md b/DQMServices/Core/README.md index 7057d346e3823..fba63c8830d92 100644 --- a/DQMServices/Core/README.md +++ b/DQMServices/Core/README.md @@ -24,7 +24,7 @@ DQM code runs as CMSSW plugins. There are two main types of plugins: *Analyzers* There are six supported types of DQM modules: - `DQMEDAnalyzer`, based on `edm::stream::EDProducer`. Used for the majority of histogram filling in RECO jobs. -- `DQMOneEDAnalyzer` based on `edm::one::EDProducer`. Used when begin/end job transitions are required. Can accept more `edm::one` specific options. Cannot save per-lumi histograms. +- `DQMOneEDAnalyzer` based on `edm::one::EDProducer`. Used when begin/end job transitions are required. Can accept more `edm::one` specific options. - `DQMOneLumiEDAnalyzer` based on `edm::one::EDProducer`. Used when begin/end lumi transitions are needed. Blocks concurrent lumisections. - `DQMGlobalEDAnalyzer` based on `edm::global::EDProducer`. Used for DQM@HLT and a few random other things. Cannot save per-lumi histograms (this is a conflict with the fact that HLT _typically_ saves _only_ per lumi histograms, see #28341). - `DQMEDHarvester` based on `edm::one::EDProducer`. Used in harvesting jobs to manipulate histograms in lumi, run, and job transitions. diff --git a/DQMServices/Core/interface/DQMOneEDAnalyzer.h b/DQMServices/Core/interface/DQMOneEDAnalyzer.h index 6a67d757d2ac7..3ea7468f3b916 100644 --- a/DQMServices/Core/interface/DQMOneEDAnalyzer.h +++ b/DQMServices/Core/interface/DQMOneEDAnalyzer.h @@ -23,7 +23,7 @@ class DQMOneEDAnalyzer typedef dqm::reco::DQMStore DQMStore; typedef dqm::reco::MonitorElement MonitorElement; - virtual bool getCanSaveByLumi() { return false; } + virtual bool getCanSaveByLumi() { return true; } // framework calls in the order of invocation DQMOneEDAnalyzer() { @@ -44,7 +44,14 @@ class DQMOneEDAnalyzer edm::Service()->enterLumi(run.run(), /* lumi */ 0, this->moduleDescription().id()); } - void accumulate(edm::Event const& event, edm::EventSetup const& setup) final { analyze(event, setup); } + void accumulate(edm::Event const& event, edm::EventSetup const& setup) { + auto& lumi = event.getLuminosityBlock(); + edm::Service()->enterLumi( + lumi.run(), lumi.luminosityBlock(), this->moduleDescription().id()); + analyze(event, setup); + edm::Service()->leaveLumi( + lumi.run(), lumi.luminosityBlock(), this->moduleDescription().id()); + } void endRunProduce(edm::Run& run, edm::EventSetup const& setup) final { dqmEndRun(run, setup); @@ -72,9 +79,6 @@ class DQMOneEDAnalyzer * lumisections in the entire job! * Combining with edm::LuminosityBlockCache is pointless and will not work * properly, due to the ordering of global/produce transitions. - * It would be possible to make this concurrent lumi-able with a bit of work - * on the DQMStore side, but the kind of modules that need this base class - * probaby care about seeing lumisections in order anyways. */ template @@ -95,11 +99,7 @@ class DQMOneLumiEDAnalyzer dqmBeginLuminosityBlock(lumi, setup); } - //void accumulate(edm::StreamID id, edm::Event const& event, edm::EventSetup const& setup) final { - // // TODO: we could maybe switch lumis by event here, to allow concurrent - // // lumis. Not for now, though. - // analyze(event, setup); - //} + void accumulate(edm::Event const& event, edm::EventSetup const& setup) override { this->analyze(event, setup); } void endLuminosityBlockProduce(edm::LuminosityBlock& lumi, edm::EventSetup const& setup) final { dqmEndLuminosityBlock(lumi, setup); diff --git a/DQMServices/Demo/test/TestDQMEDAnalyzer.cc b/DQMServices/Demo/test/TestDQMEDAnalyzer.cc index da2b5624f2374..6f803dd577572 100644 --- a/DQMServices/Demo/test/TestDQMEDAnalyzer.cc +++ b/DQMServices/Demo/test/TestDQMEDAnalyzer.cc @@ -141,7 +141,7 @@ class TestDQMOneEDAnalyzer : public DQMOneEDAnalyzer<> { mymes_.fillall(iEvent.luminosityBlock(), iEvent.run(), myvalue_); } - BookerFiller mymes_; + BookerFiller mymes_; double myvalue_; }; DEFINE_FWK_MODULE(TestDQMOneEDAnalyzer); diff --git a/DQMServices/Demo/test/runtests.sh b/DQMServices/Demo/test/runtests.sh index 4ab4749c14477..c27429a3d4165 100755 --- a/DQMServices/Demo/test/runtests.sh +++ b/DQMServices/Demo/test/runtests.sh @@ -12,7 +12,7 @@ fi cmsRun $LOCAL_TEST_DIR/run_analyzers_cfg.py outfile=alltypes.root numberEventsInRun=100 numberEventsInLuminosityBlock=20 nEvents=100 # actually we'd expect 99, but the MEs by legacy modules are booked with JOB scope and cannot be saved to DQMIO. [ 66 = $(dqmiolistmes.py alltypes.root -r 1 | wc -l) ] -[ 55 = $(dqmiolistmes.py alltypes.root -r 1 -l 1 | wc -l) ] +[ 66 = $(dqmiolistmes.py alltypes.root -r 1 -l 1 | wc -l) ] # this is deeply related to what the analyzers actually do. # again, the legacy modules output is not saved. # most run histos (4 modules * 9 types) fill on every event and should have 100 entries. @@ -20,13 +20,13 @@ cmsRun $LOCAL_TEST_DIR/run_analyzers_cfg.py outfile=alltypes.root numberEventsIn # testonefilllumi also should have 5 entries in the histograms (9 more) # the "fillrun" module should have one entry in the histograms (9 total) and 0 in the scalars (2 total) [ "0: 1, 0.0: 1, 1: 9, 100: 36, 5: 14, 5.0: 5" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py alltypes.root -r 1 --summary)" ] -# per lumi we see 20 in most histograms (3*9), and the current lumi number in the scalars (5 modules * 2). +# per lumi we see 20 in most histograms (4*9), and the current lumi number in the scalars (6 modules * 2). # the two fillumi modules should have one entry in each of the lumi histograms, (2*9 total) -[ "1: 23, 1.0: 5, 20: 27" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py alltypes.root -r 1 -l 1 --summary)" ] -[ "1: 18, 2: 5, 2.0: 5, 20: 27" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py alltypes.root -r 1 -l 2 --summary)" ] -[ "1: 18, 20: 27, 3: 5, 3.0: 5" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py alltypes.root -r 1 -l 3 --summary)" ] -[ "1: 18, 20: 27, 4: 5, 4.0: 5" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py alltypes.root -r 1 -l 4 --summary)" ] -[ "1: 18, 20: 27, 5: 5, 5.0: 5" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py alltypes.root -r 1 -l 5 --summary)" ] +[ "1: 24, 1.0: 6, 20: 36" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py alltypes.root -r 1 -l 1 --summary)" ] +[ "1: 18, 2: 6, 2.0: 6, 20: 36" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py alltypes.root -r 1 -l 2 --summary)" ] +[ "1: 18, 20: 36, 3: 6, 3.0: 6" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py alltypes.root -r 1 -l 3 --summary)" ] +[ "1: 18, 20: 36, 4: 6, 4.0: 6" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py alltypes.root -r 1 -l 4 --summary)" ] +[ "1: 18, 20: 36, 5: 6, 5.0: 6" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py alltypes.root -r 1 -l 5 --summary)" ] # just make sure we are not off by one [ "" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py alltypes.root -r 1 -l 6 --summary)" ] @@ -43,11 +43,11 @@ cmsRun $LOCAL_TEST_DIR/run_analyzers_cfg.py outfile=nolegacy-cl.root numberEvent for f in nolegacy.root nolegacy-mt.root nolegacy-cl.root do [ "0: 1, 0.0: 1, 1: 9, 1000: 27, 5: 3, 5.0: 3" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py $f -r 1 --summary)" ] - [ "1: 1, 1.0: 1, 200: 9" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py $f -r 1 -l 1 --summary)" ] - [ "2: 1, 2.0: 1, 200: 9" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py $f -r 1 -l 2 --summary)" ] - [ "200: 9, 3: 1, 3.0: 1" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py $f -r 1 -l 3 --summary)" ] - [ "200: 9, 4: 1, 4.0: 1" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py $f -r 1 -l 4 --summary)" ] - [ "200: 9, 5: 1, 5.0: 1" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py $f -r 1 -l 5 --summary)" ] + [ "1: 2, 1.0: 2, 200: 18" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py $f -r 1 -l 1 --summary)" ] + [ "2: 2, 2.0: 2, 200: 18" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py $f -r 1 -l 2 --summary)" ] + [ "200: 18, 3: 2, 3.0: 2" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py $f -r 1 -l 3 --summary)" ] + [ "200: 18, 4: 2, 4.0: 2" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py $f -r 1 -l 4 --summary)" ] + [ "200: 18, 5: 2, 5.0: 2" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py $f -r 1 -l 5 --summary)" ] [ "" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py $f -r 1 -l 6 --summary)" ] done @@ -89,9 +89,9 @@ cmp <($LOCAL_TEST_DIR/dqmiodumpentries.py multirun.root -r 1 -l 2) <($LOCAL_TEST # 7. Try writing a TDirectory file. cmsRun $LOCAL_TEST_DIR/run_harvesters_cfg.py inputFiles=alltypes.root nomodules=True legacyoutput=True reScope=JOB -# this number is rather messy: we have 55 per-lumi objecs (harvested), 66 per-run objects (no legacy output), one folder for each set of 11, +# this number is rather messy: we have 66 per-lumi objecs (harvested), 66 per-run objects (no legacy output), one folder for each set of 11, # plus some higher-level folders and the ProvInfo hierarchy create by the FileSaver. -[ 149 = $(rootlist DQM_V0001_R000000001__Harvesting__DQMTests__DQMIO.root | wc -l) ] +[ 161 = $(rootlist DQM_V0001_R000000001__Harvesting__DQMTests__DQMIO.root | wc -l) ] cmsRun $LOCAL_TEST_DIR/run_analyzers_cfg.py numberEventsInRun=100 numberEventsInLuminosityBlock=20 nEvents=100 legacyoutput=True # we expect only the (per-job) legacy histograms here: 3*11 objects in 3 folders, plus 9 more for ProvInfo and higher-level folders. @@ -102,14 +102,14 @@ cmsRun $LOCAL_TEST_DIR/run_analyzers_cfg.py numberEventsInRun=300 numberEventsIn cmsRun $LOCAL_TEST_DIR/run_harvesters_cfg.py inputFiles=./run000001 outfile=pbdata.root nomodules=True protobufinput=True [ 99 = $(dqmiolistmes.py pbdata.root -r 1 | wc -l) ] -[ 55 = $(dqmiolistmes.py pbdata.root -r 1 -l 1 | wc -l) ] +[ 66 = $(dqmiolistmes.py pbdata.root -r 1 -l 1 | wc -l) ] # this will potentially mess up statistics (we should only fastHadd *within* a lumisection, not *across*), but should technically work. fastHadd add -o streamDQMHistograms.pb run000001/run000001_ls*_streamDQMHistograms.pb # the output format is different from the harvesting above, this is a not-DQM-formatted TDirectory file. fastHadd convert -o streamDQMHistograms.root streamDQMHistograms.pb -# here we expect all (incl. legacy) MEs (99+55), plus folders (14 + 4 higher-level) -[ 172 = $(rootlist streamDQMHistograms.root | wc -l) ] +# here we expect all (incl. legacy) MEs (99+66), plus folders (14 + 4 higher-level) +[ 184 = $(rootlist streamDQMHistograms.root | wc -l) ] # 9. Try writing online files. This is really TDirectory files, but written via a different module. @@ -131,14 +131,14 @@ cmsRun $LOCAL_TEST_DIR/run_harvesters_cfg.py inputFiles=part1.root inputFiles=pa cmsRun $LOCAL_TEST_DIR/run_analyzers_cfg.py outfile=metoedm.root numberEventsInRun=100 numberEventsInLuminosityBlock=20 nEvents=100 metoedmoutput=True cmsRun $LOCAL_TEST_DIR/run_harvesters_cfg.py outfile=edmtome.root inputFiles=metoedm.root nomodules=True metoedminput=True [ 66 = $(dqmiolistmes.py edmtome.root -r 1 | wc -l) ] -[ 55 = $(dqmiolistmes.py edmtome.root -r 1 -l 1 | wc -l) ] +[ 66 = $(dqmiolistmes.py edmtome.root -r 1 -l 1 | wc -l) ] # again, no legacy module (run) output here due to JOB scope for legacy modules [ "0: 1, 0.0: 1, 1: 9, 100: 36, 5: 14, 5.0: 5" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py edmtome.root -r 1 --summary)" ] -[ "1: 23, 1.0: 5, 20: 27" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py edmtome.root -r 1 -l 1 --summary)" ] -[ "1: 18, 2: 5, 2.0: 5, 20: 27" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py edmtome.root -r 1 -l 2 --summary)" ] -[ "1: 18, 20: 27, 3: 5, 3.0: 5" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py edmtome.root -r 1 -l 3 --summary)" ] -[ "1: 18, 20: 27, 4: 5, 4.0: 5" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py edmtome.root -r 1 -l 4 --summary)" ] -[ "1: 18, 20: 27, 5: 5, 5.0: 5" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py edmtome.root -r 1 -l 5 --summary)" ] +[ "1: 24, 1.0: 6, 20: 36" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py edmtome.root -r 1 -l 1 --summary)" ] +[ "1: 18, 2: 6, 2.0: 6, 20: 36" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py edmtome.root -r 1 -l 2 --summary)" ] +[ "1: 18, 20: 36, 3: 6, 3.0: 6" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py edmtome.root -r 1 -l 3 --summary)" ] +[ "1: 18, 20: 36, 4: 6, 4.0: 6" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py edmtome.root -r 1 -l 4 --summary)" ] +[ "1: 18, 20: 36, 5: 6, 5.0: 6" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py edmtome.root -r 1 -l 5 --summary)" ] [ "" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py edmtome.root -r 1 -l 6 --summary)" ] cmsRun $LOCAL_TEST_DIR/run_analyzers_cfg.py outfile=part1_metoedm.root metoedmoutput=True numberEventsInRun=300 numberEventsInLuminosityBlock=100 nEvents=50 # 1st half of 1st lumi