Skip to content

Commit

Permalink
Merge pull request #29321 from schneiml/dqm-allow-per-lumi-in-dqmoned…
Browse files Browse the repository at this point in the history
…analyzer

DQM: Allow per-lumi MEs in DQMOneEDAnalyzer
  • Loading branch information
cmsbuild authored Apr 14, 2020
2 parents 050bdf1 + 466e9d7 commit bd3cfb0
Show file tree
Hide file tree
Showing 5 changed files with 36 additions and 36 deletions.
2 changes: 1 addition & 1 deletion DQMOffline/JetMET/interface/METAnalyzer.h
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@
#include <map>
#include <string>

class METAnalyzer : public DQMOneLumiEDAnalyzer<> {
class METAnalyzer : public DQMOneEDAnalyzer<> {
public:
/// Constructor
METAnalyzer(const edm::ParameterSet&);
Expand Down
2 changes: 1 addition & 1 deletion DQMServices/Core/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ DQM code runs as CMSSW plugins. There are two main types of plugins: *Analyzers*

There are six supported types of DQM modules:
- `DQMEDAnalyzer`, based on `edm::stream::EDProducer`. Used for the majority of histogram filling in RECO jobs.
- `DQMOneEDAnalyzer` based on `edm::one::EDProducer`. Used when begin/end job transitions are required. Can accept more `edm::one` specific options. Cannot save per-lumi histograms.
- `DQMOneEDAnalyzer` based on `edm::one::EDProducer`. Used when begin/end job transitions are required. Can accept more `edm::one` specific options.
- `DQMOneLumiEDAnalyzer` based on `edm::one::EDProducer`. Used when begin/end lumi transitions are needed. Blocks concurrent lumisections.
- `DQMGlobalEDAnalyzer` based on `edm::global::EDProducer`. Used for DQM@HLT and a few random other things. Cannot save per-lumi histograms (this is a conflict with the fact that HLT _typically_ saves _only_ per lumi histograms, see #28341).
- `DQMEDHarvester` based on `edm::one::EDProducer`. Used in harvesting jobs to manipulate histograms in lumi, run, and job transitions.
Expand Down
20 changes: 10 additions & 10 deletions DQMServices/Core/interface/DQMOneEDAnalyzer.h
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ class DQMOneEDAnalyzer
typedef dqm::reco::DQMStore DQMStore;
typedef dqm::reco::MonitorElement MonitorElement;

virtual bool getCanSaveByLumi() { return false; }
virtual bool getCanSaveByLumi() { return true; }

// framework calls in the order of invocation
DQMOneEDAnalyzer() {
Expand All @@ -44,7 +44,14 @@ class DQMOneEDAnalyzer
edm::Service<DQMStore>()->enterLumi(run.run(), /* lumi */ 0, this->moduleDescription().id());
}

void accumulate(edm::Event const& event, edm::EventSetup const& setup) final { analyze(event, setup); }
void accumulate(edm::Event const& event, edm::EventSetup const& setup) {
auto& lumi = event.getLuminosityBlock();
edm::Service<dqm::legacy::DQMStore>()->enterLumi(
lumi.run(), lumi.luminosityBlock(), this->moduleDescription().id());
analyze(event, setup);
edm::Service<dqm::legacy::DQMStore>()->leaveLumi(
lumi.run(), lumi.luminosityBlock(), this->moduleDescription().id());
}

void endRunProduce(edm::Run& run, edm::EventSetup const& setup) final {
dqmEndRun(run, setup);
Expand Down Expand Up @@ -72,9 +79,6 @@ class DQMOneEDAnalyzer
* lumisections in the entire job!
* Combining with edm::LuminosityBlockCache is pointless and will not work
* properly, due to the ordering of global/produce transitions.
* It would be possible to make this concurrent lumi-able with a bit of work
* on the DQMStore side, but the kind of modules that need this base class
* probaby care about seeing lumisections in order anyways.
*/

template <typename... Args>
Expand All @@ -95,11 +99,7 @@ class DQMOneLumiEDAnalyzer
dqmBeginLuminosityBlock(lumi, setup);
}

//void accumulate(edm::StreamID id, edm::Event const& event, edm::EventSetup const& setup) final {
// // TODO: we could maybe switch lumis by event here, to allow concurrent
// // lumis. Not for now, though.
// analyze(event, setup);
//}
void accumulate(edm::Event const& event, edm::EventSetup const& setup) override { this->analyze(event, setup); }

void endLuminosityBlockProduce(edm::LuminosityBlock& lumi, edm::EventSetup const& setup) final {
dqmEndLuminosityBlock(lumi, setup);
Expand Down
2 changes: 1 addition & 1 deletion DQMServices/Demo/test/TestDQMEDAnalyzer.cc
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,7 @@ class TestDQMOneEDAnalyzer : public DQMOneEDAnalyzer<> {
mymes_.fillall(iEvent.luminosityBlock(), iEvent.run(), myvalue_);
}

BookerFiller<DQMStore::IBooker, MonitorElement> mymes_;
BookerFiller<DQMStore::IBooker, MonitorElement, /* DOLUMI */ true> mymes_;
double myvalue_;
};
DEFINE_FWK_MODULE(TestDQMOneEDAnalyzer);
Expand Down
46 changes: 23 additions & 23 deletions DQMServices/Demo/test/runtests.sh
Original file line number Diff line number Diff line change
Expand Up @@ -12,21 +12,21 @@ fi
cmsRun $LOCAL_TEST_DIR/run_analyzers_cfg.py outfile=alltypes.root numberEventsInRun=100 numberEventsInLuminosityBlock=20 nEvents=100
# actually we'd expect 99, but the MEs by legacy modules are booked with JOB scope and cannot be saved to DQMIO.
[ 66 = $(dqmiolistmes.py alltypes.root -r 1 | wc -l) ]
[ 55 = $(dqmiolistmes.py alltypes.root -r 1 -l 1 | wc -l) ]
[ 66 = $(dqmiolistmes.py alltypes.root -r 1 -l 1 | wc -l) ]
# this is deeply related to what the analyzers actually do.
# again, the legacy modules output is not saved.
# most run histos (4 modules * 9 types) fill on every event and should have 100 entries.
# the scalar MEs should have the last lumi number (5) (5 float + 5 int)
# testonefilllumi also should have 5 entries in the histograms (9 more)
# the "fillrun" module should have one entry in the histograms (9 total) and 0 in the scalars (2 total)
[ "0: 1, 0.0: 1, 1: 9, 100: 36, 5: 14, 5.0: 5" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py alltypes.root -r 1 --summary)" ]
# per lumi we see 20 in most histograms (3*9), and the current lumi number in the scalars (5 modules * 2).
# per lumi we see 20 in most histograms (4*9), and the current lumi number in the scalars (6 modules * 2).
# the two fillumi modules should have one entry in each of the lumi histograms, (2*9 total)
[ "1: 23, 1.0: 5, 20: 27" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py alltypes.root -r 1 -l 1 --summary)" ]
[ "1: 18, 2: 5, 2.0: 5, 20: 27" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py alltypes.root -r 1 -l 2 --summary)" ]
[ "1: 18, 20: 27, 3: 5, 3.0: 5" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py alltypes.root -r 1 -l 3 --summary)" ]
[ "1: 18, 20: 27, 4: 5, 4.0: 5" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py alltypes.root -r 1 -l 4 --summary)" ]
[ "1: 18, 20: 27, 5: 5, 5.0: 5" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py alltypes.root -r 1 -l 5 --summary)" ]
[ "1: 24, 1.0: 6, 20: 36" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py alltypes.root -r 1 -l 1 --summary)" ]
[ "1: 18, 2: 6, 2.0: 6, 20: 36" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py alltypes.root -r 1 -l 2 --summary)" ]
[ "1: 18, 20: 36, 3: 6, 3.0: 6" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py alltypes.root -r 1 -l 3 --summary)" ]
[ "1: 18, 20: 36, 4: 6, 4.0: 6" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py alltypes.root -r 1 -l 4 --summary)" ]
[ "1: 18, 20: 36, 5: 6, 5.0: 6" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py alltypes.root -r 1 -l 5 --summary)" ]
# just make sure we are not off by one
[ "" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py alltypes.root -r 1 -l 6 --summary)" ]

Expand All @@ -43,11 +43,11 @@ cmsRun $LOCAL_TEST_DIR/run_analyzers_cfg.py outfile=nolegacy-cl.root numberEvent
for f in nolegacy.root nolegacy-mt.root nolegacy-cl.root
do
[ "0: 1, 0.0: 1, 1: 9, 1000: 27, 5: 3, 5.0: 3" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py $f -r 1 --summary)" ]
[ "1: 1, 1.0: 1, 200: 9" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py $f -r 1 -l 1 --summary)" ]
[ "2: 1, 2.0: 1, 200: 9" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py $f -r 1 -l 2 --summary)" ]
[ "200: 9, 3: 1, 3.0: 1" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py $f -r 1 -l 3 --summary)" ]
[ "200: 9, 4: 1, 4.0: 1" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py $f -r 1 -l 4 --summary)" ]
[ "200: 9, 5: 1, 5.0: 1" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py $f -r 1 -l 5 --summary)" ]
[ "1: 2, 1.0: 2, 200: 18" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py $f -r 1 -l 1 --summary)" ]
[ "2: 2, 2.0: 2, 200: 18" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py $f -r 1 -l 2 --summary)" ]
[ "200: 18, 3: 2, 3.0: 2" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py $f -r 1 -l 3 --summary)" ]
[ "200: 18, 4: 2, 4.0: 2" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py $f -r 1 -l 4 --summary)" ]
[ "200: 18, 5: 2, 5.0: 2" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py $f -r 1 -l 5 --summary)" ]
[ "" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py $f -r 1 -l 6 --summary)" ]
done

Expand Down Expand Up @@ -89,9 +89,9 @@ cmp <($LOCAL_TEST_DIR/dqmiodumpentries.py multirun.root -r 1 -l 2) <($LOCAL_TEST

# 7. Try writing a TDirectory file.
cmsRun $LOCAL_TEST_DIR/run_harvesters_cfg.py inputFiles=alltypes.root nomodules=True legacyoutput=True reScope=JOB
# this number is rather messy: we have 55 per-lumi objecs (harvested), 66 per-run objects (no legacy output), one folder for each set of 11,
# this number is rather messy: we have 66 per-lumi objecs (harvested), 66 per-run objects (no legacy output), one folder for each set of 11,
# plus some higher-level folders and the ProvInfo hierarchy create by the FileSaver.
[ 149 = $(rootlist DQM_V0001_R000000001__Harvesting__DQMTests__DQMIO.root | wc -l) ]
[ 161 = $(rootlist DQM_V0001_R000000001__Harvesting__DQMTests__DQMIO.root | wc -l) ]

cmsRun $LOCAL_TEST_DIR/run_analyzers_cfg.py numberEventsInRun=100 numberEventsInLuminosityBlock=20 nEvents=100 legacyoutput=True
# we expect only the (per-job) legacy histograms here: 3*11 objects in 3 folders, plus 9 more for ProvInfo and higher-level folders.
Expand All @@ -102,14 +102,14 @@ cmsRun $LOCAL_TEST_DIR/run_analyzers_cfg.py numberEventsInRun=300 numberEventsIn

cmsRun $LOCAL_TEST_DIR/run_harvesters_cfg.py inputFiles=./run000001 outfile=pbdata.root nomodules=True protobufinput=True
[ 99 = $(dqmiolistmes.py pbdata.root -r 1 | wc -l) ]
[ 55 = $(dqmiolistmes.py pbdata.root -r 1 -l 1 | wc -l) ]
[ 66 = $(dqmiolistmes.py pbdata.root -r 1 -l 1 | wc -l) ]

# this will potentially mess up statistics (we should only fastHadd *within* a lumisection, not *across*), but should technically work.
fastHadd add -o streamDQMHistograms.pb run000001/run000001_ls*_streamDQMHistograms.pb
# the output format is different from the harvesting above, this is a not-DQM-formatted TDirectory file.
fastHadd convert -o streamDQMHistograms.root streamDQMHistograms.pb
# here we expect all (incl. legacy) MEs (99+55), plus folders (14 + 4 higher-level)
[ 172 = $(rootlist streamDQMHistograms.root | wc -l) ]
# here we expect all (incl. legacy) MEs (99+66), plus folders (14 + 4 higher-level)
[ 184 = $(rootlist streamDQMHistograms.root | wc -l) ]


# 9. Try writing online files. This is really TDirectory files, but written via a different module.
Expand All @@ -131,14 +131,14 @@ cmsRun $LOCAL_TEST_DIR/run_harvesters_cfg.py inputFiles=part1.root inputFiles=pa
cmsRun $LOCAL_TEST_DIR/run_analyzers_cfg.py outfile=metoedm.root numberEventsInRun=100 numberEventsInLuminosityBlock=20 nEvents=100 metoedmoutput=True
cmsRun $LOCAL_TEST_DIR/run_harvesters_cfg.py outfile=edmtome.root inputFiles=metoedm.root nomodules=True metoedminput=True
[ 66 = $(dqmiolistmes.py edmtome.root -r 1 | wc -l) ]
[ 55 = $(dqmiolistmes.py edmtome.root -r 1 -l 1 | wc -l) ]
[ 66 = $(dqmiolistmes.py edmtome.root -r 1 -l 1 | wc -l) ]
# again, no legacy module (run) output here due to JOB scope for legacy modules
[ "0: 1, 0.0: 1, 1: 9, 100: 36, 5: 14, 5.0: 5" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py edmtome.root -r 1 --summary)" ]
[ "1: 23, 1.0: 5, 20: 27" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py edmtome.root -r 1 -l 1 --summary)" ]
[ "1: 18, 2: 5, 2.0: 5, 20: 27" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py edmtome.root -r 1 -l 2 --summary)" ]
[ "1: 18, 20: 27, 3: 5, 3.0: 5" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py edmtome.root -r 1 -l 3 --summary)" ]
[ "1: 18, 20: 27, 4: 5, 4.0: 5" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py edmtome.root -r 1 -l 4 --summary)" ]
[ "1: 18, 20: 27, 5: 5, 5.0: 5" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py edmtome.root -r 1 -l 5 --summary)" ]
[ "1: 24, 1.0: 6, 20: 36" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py edmtome.root -r 1 -l 1 --summary)" ]
[ "1: 18, 2: 6, 2.0: 6, 20: 36" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py edmtome.root -r 1 -l 2 --summary)" ]
[ "1: 18, 20: 36, 3: 6, 3.0: 6" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py edmtome.root -r 1 -l 3 --summary)" ]
[ "1: 18, 20: 36, 4: 6, 4.0: 6" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py edmtome.root -r 1 -l 4 --summary)" ]
[ "1: 18, 20: 36, 5: 6, 5.0: 6" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py edmtome.root -r 1 -l 5 --summary)" ]
[ "" = "$($LOCAL_TEST_DIR/dqmiodumpentries.py edmtome.root -r 1 -l 6 --summary)" ]

cmsRun $LOCAL_TEST_DIR/run_analyzers_cfg.py outfile=part1_metoedm.root metoedmoutput=True numberEventsInRun=300 numberEventsInLuminosityBlock=100 nEvents=50 # 1st half of 1st lumi
Expand Down

0 comments on commit bd3cfb0

Please sign in to comment.