Skip to content

Commit

Permalink
Create test_ReserveDMu_SD_cfg.py
Browse files Browse the repository at this point in the history
Test file for the ReserveDMu Secondary Dataset
  • Loading branch information
jordan-martins committed Jun 26, 2023
1 parent 61b2448 commit 8030cfb
Showing 1 changed file with 147 additions and 0 deletions.
147 changes: 147 additions & 0 deletions Configuration/Skimming/test/test_ReserveDMu_SD_cfg.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,147 @@
# Auto generated configuration file
# using:
# Revision: 1.19
# Source: /local/reps/CMSSW/CMSSW/Configuration/Applications/python/ConfigBuilder.py,v
# with command line options: SKIM --filein file:/eos/cms/tier0/store/backfill/1/data/Tier0_REPLAY_2023/ParkingDoubleMuonLowMass0/RAW/v9121550/000/368/389/00000/fce19dd3-8384-45ce-b705-e4accb9c3ec9.root --fileout file:SD_ReservedDMu.root --nThreads 8 --no_exec --number 10 --python_filename SD_ReserveDMu_cfg.py --scenario pp --step SKIM:ReserveDMu --data --conditions 130X_dataRun3_Prompt_v3
import FWCore.ParameterSet.Config as cms



process = cms.Process('SKIM')

# import of standard configurations
process.load('Configuration.StandardSequences.Services_cff')
process.load('SimGeneral.HepPDTESSource.pythiapdt_cfi')
process.load('FWCore.MessageService.MessageLogger_cfi')
process.load('Configuration.EventContent.EventContent_cff')
process.load('Configuration.StandardSequences.GeometryRecoDB_cff')
process.load('Configuration.StandardSequences.MagneticField_cff')
process.load('Configuration.StandardSequences.Skims_cff')
process.load('Configuration.StandardSequences.FrontierConditions_GlobalTag_cff')

process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(10),
output = cms.optional.untracked.allowed(cms.int32,cms.PSet)
)

# Input source
process.source = cms.Source("PoolSource",
fileNames = cms.untracked.vstring('file:/eos/cms/tier0/store/backfill/1/data/Tier0_REPLAY_2023/ParkingDoubleMuonLowMass0/RAW/v9121550/000/368/389/00000/fce19dd3-8384-45ce-b705-e4accb9c3ec9.root'),
secondaryFileNames = cms.untracked.vstring()
)

process.options = cms.untracked.PSet(
FailPath = cms.untracked.vstring(),
IgnoreCompletely = cms.untracked.vstring(),
Rethrow = cms.untracked.vstring(),
SkipEvent = cms.untracked.vstring(),
accelerators = cms.untracked.vstring('*'),
allowUnscheduled = cms.obsolete.untracked.bool,
canDeleteEarly = cms.untracked.vstring(),
deleteNonConsumedUnscheduledModules = cms.untracked.bool(True),
dumpOptions = cms.untracked.bool(False),
emptyRunLumiMode = cms.obsolete.untracked.string,
eventSetup = cms.untracked.PSet(
forceNumberOfConcurrentIOVs = cms.untracked.PSet(
allowAnyLabel_=cms.required.untracked.uint32
),
numberOfConcurrentIOVs = cms.untracked.uint32(0)
),
fileMode = cms.untracked.string('FULLMERGE'),
forceEventSetupCacheClearOnNewRun = cms.untracked.bool(False),
holdsReferencesToDeleteEarly = cms.untracked.VPSet(),
makeTriggerResults = cms.obsolete.untracked.bool,
modulesToIgnoreForDeleteEarly = cms.untracked.vstring(),
numberOfConcurrentLuminosityBlocks = cms.untracked.uint32(0),
numberOfConcurrentRuns = cms.untracked.uint32(1),
numberOfStreams = cms.untracked.uint32(0),
numberOfThreads = cms.untracked.uint32(1),
printDependencies = cms.untracked.bool(False),
sizeOfStackForThreadsInKB = cms.optional.untracked.uint32,
throwIfIllegalParameter = cms.untracked.bool(True),
wantSummary = cms.untracked.bool(False)
)

# Production Info
process.configurationMetadata = cms.untracked.PSet(
annotation = cms.untracked.string('SKIM nevts:10'),
name = cms.untracked.string('Applications'),
version = cms.untracked.string('$Revision: 1.19 $')
)

# Output definition

process.RECOSIMoutput = cms.OutputModule("PoolOutputModule",
dataset = cms.untracked.PSet(
dataTier = cms.untracked.string(''),
filterName = cms.untracked.string('')
),
fileName = cms.untracked.string('file:SD_ReservedDMu.root'),
outputCommands = process.RECOSIMEventContent.outputCommands,
splitLevel = cms.untracked.int32(0)
)

# Additional output definition
process.SKIMStreamReserveDMu = cms.OutputModule("PoolOutputModule",
SelectEvents = cms.untracked.PSet(
SelectEvents = cms.vstring('ReserveDMuPath')
),
dataset = cms.untracked.PSet(
dataTier = cms.untracked.string('RAW'),
filterName = cms.untracked.string('ReserveDMu')
),
eventAutoFlushCompressedSize = cms.untracked.int32(5242880),
fileName = cms.untracked.string('ReserveDMu.root'),
outputCommands = cms.untracked.vstring(
'drop *',
'keep FEDRawDataCollection_rawDataCollector_*_*',
'keep FEDRawDataCollection_source_*_*',
'keep FEDRawDataCollection_rawDataCollector_*_*',
'keep FEDRawDataCollection_source_*_*',
'drop *_hlt*_*_*',
'keep FEDRawDataCollection_rawDataCollector_*_*',
'keep FEDRawDataCollection_source_*_*',
'keep GlobalObjectMapRecord_hltGtStage2ObjectMap_*_*',
'keep edmTriggerResults_*_*_*',
'keep triggerTriggerEvent_*_*_*',
'keep *_hltFEDSelectorL1_*_*',
'keep *_hltScoutingEgammaPacker_*_*',
'keep *_hltScoutingMuonPacker_*_*',
'keep *_hltScoutingPFPacker_*_*',
'keep *_hltScoutingPrimaryVertexPacker_*_*',
'keep *_hltScoutingTrackPacker_*_*',
'keep edmTriggerResults_*_*_*',
'drop *_MEtoEDMConverter_*_*',
'drop *_*_*_SKIM'
)
)

# Other statements
from Configuration.AlCa.GlobalTag import GlobalTag
process.GlobalTag = GlobalTag(process.GlobalTag, '130X_dataRun3_Prompt_v3', '')

# Path and EndPath definitions
process.RECOSIMoutput_step = cms.EndPath(process.RECOSIMoutput)
process.SKIMStreamReserveDMuOutPath = cms.EndPath(process.SKIMStreamReserveDMu)

# Schedule definition
process.schedule = cms.Schedule(process.ReserveDMuPath,process.RECOSIMoutput_step,process.SKIMStreamReserveDMuOutPath)
from PhysicsTools.PatAlgos.tools.helpers import associatePatAlgosToolsTask
associatePatAlgosToolsTask(process)

#Setup FWK for multithreaded
process.options.numberOfThreads = 8
process.options.numberOfStreams = 0



# Customisation from command line

#Have logErrorHarvester wait for the same EDProducers to finish as those providing data for the OutputModule
from FWCore.Modules.logErrorHarvester_cff import customiseLogErrorHarvesterUsingOutputCommands
process = customiseLogErrorHarvesterUsingOutputCommands(process)

# Add early deletion of temporary data products to reduce peak memory need
from Configuration.StandardSequences.earlyDeleteSettings_cff import customiseEarlyDelete
process = customiseEarlyDelete(process)
# End adding early deletion

0 comments on commit 8030cfb

Please sign in to comment.