From ef263ea40efc55b7d2f133502668249840a8668f Mon Sep 17 00:00:00 2001 From: Marino Missiroli Date: Mon, 7 Nov 2022 15:21:44 +0100 Subject: [PATCH 1/2] removed ConfDB queries from HLT-Validation tests --- .../scripts/hltCheckPrescaleModules | 13 +- .../Configuration/scripts/hltIntegrationTests | 287 ++++++++++++------ HLTrigger/Configuration/scripts/hltListPaths | 145 +++++---- HLTrigger/Configuration/test/cmsDriver.csh | 6 +- HLTrigger/Configuration/test/runAll.csh | 3 +- .../Configuration/test/runIntegration.csh | 34 +-- HLTrigger/Configuration/test/runOne.csh | 3 +- 7 files changed, 300 insertions(+), 191 deletions(-) diff --git a/HLTrigger/Configuration/scripts/hltCheckPrescaleModules b/HLTrigger/Configuration/scripts/hltCheckPrescaleModules index 210c53021aca3..33eae394ffafb 100755 --- a/HLTrigger/Configuration/scripts/hltCheckPrescaleModules +++ b/HLTrigger/Configuration/scripts/hltCheckPrescaleModules @@ -1,6 +1,4 @@ -#! /usr/bin/env python3 - -from __future__ import print_function +#!/usr/bin/env python3 import sys import types import re @@ -17,7 +15,12 @@ if len(sys.argv) != 2: sys.exit(1) # whitelist paths exempt from validation -whitelist = ('HLTriggerFirstPath', 'HLTriggerFinalPath') +whitelist = [ + 'HLTriggerFirstPath', + 'HLTriggerFinalPath', + 'Status_OnCPU', + 'Status_OnGPU', +] # load the menu and get the "process" object menu = types.ModuleType('menu') @@ -28,7 +31,7 @@ process = menu.process # get all paths paths = process.paths_() -# keep track of precaler names, and of duplicates +# keep track of prescaler names, and of duplicates prescalerNames = set() duplicateNames = set() diff --git a/HLTrigger/Configuration/scripts/hltIntegrationTests b/HLTrigger/Configuration/scripts/hltIntegrationTests index 81fbcd58ed31e..36058167fa0c9 100755 --- a/HLTrigger/Configuration/scripts/hltIntegrationTests +++ b/HLTrigger/Configuration/scripts/hltIntegrationTests @@ -1,4 +1,4 @@ -#! /bin/bash +#!/bin/bash # load common HLT functions if [ -f "$CMSSW_BASE/src/HLTrigger/Configuration/common/utils.sh" ]; then @@ -17,7 +17,6 @@ function err() { echo -e "$@" 1>&2 } - NAME=$(basename $0) HELP="Run the integration tests over all paths in a given HLT menu. @@ -28,66 +27,76 @@ Usage: [--streams STREAMS] [--threads THREADS] [-x|--extra OPTIONS] [--mc] [ [-n|--size EVENTS] [-k|--skip EVENTS] | [-e|--events EVENTS] ] MENU - MENU is the HLT menu to test. - - -s | --setup SETUP Use the Services and EventSetup modules from a different menu - (usefull when testing a ConfDB area with only some new paths). - Note it is an error to specify the converter/db here, - it uses the same as set by the HLT menu - -d | --dir WORKDIR Create all files and run all tests inside WORKDIR (defauls: ./hltintegration) - -i | --input INPUT Use the specified RAW data file as input - -n | --size EVENTS Run on EVENTS events (-1 for all, default is 100) - -k | --skip EVENTS Skip the first EVENTS (default is 0) - -e | --events EVENTS Run on a comma-separated list of EVENTS, a VEventRange - -j | --jobs JOBS Run JOBS single trigger jobs in parallel (default 4) - --streams STREAMS Run with STREAMS parallel streams (i.e. events) (default 0 means as many streams as threads) - --threads THREADS Run with THREADS threads when running the whole HLT (default 4) - -x | --extra OPTIONS Pass OPTIONS directly to hltGetConfiguration - --mc Run over MC instead of data (the default) - --dbproxy Use a socks proxy to connect to the HLT database - --dbproxyhost PROXYHOST Host of the socks proxy (default: "localhost") - --dbproxyport PROXYPORT Port of the socks proxy (default: 8080) - -h | --help Print this help message and exit. - - - The supported formats for both menu specifications are: + MENU is the HLT menu to test (format: a local cmsRun cfg file, or the name of a ConfDB configuration). + + -s | --setup SETUP HLT menu (format: ConfDB configuration) used for Services and EventSetup modules + (useful when testing a ConfDB config that only contains Paths). + Note it is an error to specify the converter/db here, + it uses the same as set by the HLT menu + -d | --dir WORKDIR Create all files and run all tests inside WORKDIR (defauls: ./hltintegration) + -i | --input INPUT Use the specified EDM file as input (data tier: RAW) + -n | --size EVENTS Run on EVENTS events (-1 for all, default is 100) + -k | --skip EVENTS Skip the first EVENTS (default is 0) + -e | --events EVENTS Run on a comma-separated list of EVENTS, a VEventRange + -j | --jobs JOBS Run JOBS single trigger jobs in parallel (default 4) + --streams STREAMS Run with STREAMS parallel streams (i.e. events) (default 0 means as many streams as threads) + --threads THREADS Run with THREADS threads when running the whole HLT (default 4) + -a | --accelerator ACCELERATOR Keyword to choose allowed accelerators (examples: \"*\", \"cpu\", \"gpu-nvidia\") + -x | --extra OPTIONS If the HLT menu is a local cmsRun cfg file, OPTIONS is used as + additional arguments to cmsRun (i.e. \"cmsRun hlt.py [OPTIONS]\") + If the HLT menu is the name of a ConfDB configuration, OPTIONS is used as + additional arguments to hltGetConfiguration (i.e. \"hltGetConfiguration [MENU] [..] [OPTIONS]\") + --mc Run over MC instead of data (the default) + --dbproxy Use a socks proxy to connect to the ConfDB database + --dbproxyhost PROXYHOST Host of the socks proxy (default: \"localhost\") + --dbproxyport PROXYPORT Port of the socks proxy (default: 8080) + -h | --help Print this help message and exit. + + The HLT menu used for the integration tests (MENU) can be specified + as either (1) a cmsRun cfg file, or (2) the name of a ConfDB configuration. + MENU is identified as a cmsRun cfg file if it is an existing regular file. + For ConfDB configurations, the supported formats are - /path/to/configuration[/Vn] - [[{v1|v2|v3}/]{run3|run2|online|adg}:]/path/to/configuration[/Vn] - run:runnumber - The possible converters are \"v1\", \"v2\", and \"v3\" (default). - The possible databases are \"run3\" (default, used for offline run3 development), \"run2\" (previously used for run2 development), \"online\" (used to - extract online menus within Point 5) and \"adg\" (used to extract the online menus outside Point 5). - If no menu version is specified, the latest one is automatically used. + Allowed converters are \"v1\", \"v2\", and \"v3\" (default). + Allowed databases are + - \"run3\" (default, used for offline Run-3 development), + - \"run2\" (previously used for Run-2 development), + - \"online\" (used to extract online menus within Point 5) and + - \"adg\" (used to extract the online menus outside Point 5). + Other converters and databases exist, but they are for expert/special use only. If \"run:\" is used instead, the HLT menu used for the given run number is looked up and used. - Note other converters and databases exist but they are for expert/special use only. + If no menu version is specified, the latest one is automatically used. - It's possible to pass arbitrary command line options to hltGetConfiguration, using \"-x --option\". + It is possible to pass arbitrary command-line options to hltGetConfiguration, using \"-x --option\". To pass multiple options, enclose them in quotes, or use \"-x\" more than once. Note: '--events' is not supported together with '--size' or '--skip'. -Exmples: +Examples: - $NAME /dev/CMSSW_4_2_0/GRun + $NAME /dev/CMSSW_X_Y_Z/GRun will test the latest version of the GRun menu. - $NAME /dev/CMSSW_4_2_0/GRun -x --l1-emulator + $NAME /dev/CMSSW_X_Y_Z/GRun -x --l1-emulator - will test the latest version of the GRun, running the L1 emulator. + will test the latest version of the GRun running the L1-Trigger emulator. $NAME /users/fwyzard/physics/HLT/V6 -s adg:/cdaq/physics/Run2011/1e33/v1.3/HLT/V6 - will test the paths from /users/fwyzard/physics/HLT/V6, using the environment from the - online menu \"1e33\" v1.3 V6 + will test the paths from /users/fwyzard/physics/HLT/V6 + using the environment from the online menu \"1e33\" v1.3 V6 " # parse command line argument and options -OPTS=$(getopt -n "$NAME" -o "s:d:i:j:n:k:e:x:h" -l "setup:,dir:,input:,jobs:,size:,skip:,streams:,threads:,events:,mc,extra:,help,dbproxy,dbproxyhost:,dbproxyport:" -- "$@") +OPTS=$(getopt -n "$NAME" -o "s:d:i:j:n:k:e:a:x:h" \ + -l "setup:,dir:,input:,jobs:,size:,skip:,streams:,threads:,accelerator:,events:,mc,extra:,help,dbproxy,dbproxyhost:,dbproxyport:" -- "$@") # invalid options if [ $? != 0 ]; then @@ -109,10 +118,11 @@ EVENTS="" JOBS=4 THREADS=4 STREAMS=0 +ACCELERATOR="cpu" WORKDIR="hltintegration" EXTRA="" -DATA="--data" -HLTLISTPATHPROXY="" +DATA="" +DBPROXYOPTS="" SELECTION="" @@ -132,7 +142,7 @@ while true; do shift 2 ;; "-i" | "--input" ) - INPUT="--input $2" + INPUT="$2" shift 2 ;; "-n" | "--size" ) @@ -190,6 +200,10 @@ while true; do THREADS=$2 shift 2 ;; + "-a" | "--accelerator" ) + ACCELERATOR="$2" + shift 2 + ;; "-x" | "--extra" ) EXTRA="$EXTRA $2" shift 2 @@ -199,18 +213,15 @@ while true; do shift 1 ;; "--dbproxy" ) - HLTLISTPATHPROXY="$HLTLISTPATHPROXY --dbproxy" - EXTRA="$EXTRA --dbproxy" + DBPROXYOPTS="${DBPROXYOPTS} --dbproxy" shift 1 ;; "--dbproxyhost" ) - HLTLISTPATHPROXY="$HLTLISTPATHPROXY --dbproxyhost $2" - EXTRA="$EXTRA --dbproxyhost $2" + DBPROXYOPTS="${DBPROXYOPTS} --dbproxyhost $2" shift 2 ;; "--dbproxyport" ) - HLTLISTPATHPROXY="$HLTLISTPATHPROXY --dbproxyport $2" - EXTRA="$EXTRA --dbproxyport $2" + DBPROXYOPTS="${DBPROXYOPTS} --dbproxyport $2" shift 2 ;; "--" ) @@ -221,6 +232,10 @@ while true; do esac done +# remove spurious whitespaces and tabs from EXTRA and DBPROXYOPTS +EXTRA=$(echo "${EXTRA}" | xargs) +DBPROXYOPTS=$(echo "${DBPROXYOPTS}" | xargs) + # parse required argument if (( $# == 0 )); then err "$NAME error: missing argument." @@ -234,32 +249,92 @@ else MENU="$1" fi +log "----------------------------" +log "Starting hltIntegrationTests" +log "----------------------------" -# run the tests -rm -rf "$WORKDIR" -mkdir "$WORKDIR" -cd "$WORKDIR" +# create empty output directory +rm -rf "${WORKDIR}" +mkdir -p "${WORKDIR}" -# find the list of all trigger paths -TRIGGERS=$(hltListPaths "$MENU" $HLTLISTPATHPROXY -p --no-dep --exclude "^HLTriggerFinalPath$") -echo "${TRIGGERS[@]}" > paths.txt +# if MENU = local cfg file, copy it to output directory +# (that copy will be customised and used for the integration tests) +if [ -f "${MENU}" ]; then + cp "${MENU}" "${WORKDIR}"/hlt.py +fi + +# move to, and run tests from, the output directory +cd "${WORKDIR}" + +if [ -f hlt.py ]; then + + # customise cfg file + log "Creating customised version of input cfg file (${MENU})" + + # warn that ${DATA} will be ignored + [ ! "${DATA}" ] || printf "\n%s" "WARNING -- variable \${DATA}=\"${DATA}\" will be ignored !" + + cat <<@EOF >> hlt.py + +# change name of cms.Process +process.setName_("TEST$(date -u +'%Y%m%d%H%M%S')") + +# disable HLT prescales +if hasattr(process, 'PrescaleService'): + del process.PrescaleService + +# set max number of input events +process.maxEvents.input = ${SIZE} +@EOF + + if [ "x${INPUT}" != "x" ]; then + cat <<@EOF >> hlt.py + +# set input EDM file +if hasattr(process, 'source') and hasattr(process.source, 'fileNames'): + process.source.fileNames = [ + "${INPUT}", + ] +else: + raise RuntimeError("ERROR -- unsupported cfg file: process.source.fileNames does not exist") +@EOF + fi + + # set MENU to name of ConfDB configuration (if any) + MENU=$(python3 -c """ +import sys +# redefine sys.argv (necessary to import +# cfg file if the latter uses VarParsing) +sys.argv = ['python3', 'hlt.py'] +from hlt import cms,process +try: + print(process.HLTConfigVersion.tableName.value()) +except: + print('') +""") + + # show name of ConfDB configuration (if available) + [ ! "${MENU}" ] || log "ConfDB configuration: ${MENU}" -# print some info -if [ "$SELECTION" == "complex" ]; then - log "Will run $(echo $TRIGGERS | wc -w) HLT paths over $(echo $EVENTS | tr ',' '\n' | wc -l) events, with $JOBS jobs in parallel" -elif [ "$SIZE" == "-1" ]; then - log "Will run $(echo $TRIGGERS | wc -w) HLT paths over all events, with $JOBS jobs in parallel" else - log "Will run $(echo $TRIGGERS | wc -w) HLT paths over $SIZE events, with $JOBS jobs in parallel" + # if ${DATA} is empty, set it to "--data" + [ "${DATA}" ] || DATA="--data" + # download HLT menu from ConfDB + HLTGETCMD="hltGetConfiguration ${MENU} \ + --process \"TEST$(date -u +'%Y%m%d%H%M%S')\" + --full --offline ${DATA} --unprescale \ + --max-events ${SIZE} ${EXTRA} ${DBPROXYOPTS} --input ${INPUT}" + HLTGETCMD=$(echo "${HLTGETCMD}" | xargs) + log "Creating HLT menu from ConfDB configuration:\n> ${HLTGETCMD}" + ${HLTGETCMD} > hlt.py + # unset EXTRA environment variable (used later in cmsRun jobs) + unset HLTGETCMD EXTRA fi -# create all dumps -log "Extracting full menu dump" -hltGetConfiguration "$MENU" --full --offline $DATA $INPUT --unprescale --process "TEST$(date -u +'%Y%m%d%H%M%S')" --max-events $SIZE $EXTRA > hlt.py - # if missing, add a simplified HLTriggerFinalPath if ! grep -q HLTriggerFinalPath hlt.py; then cat >> hlt.py << @EOF + # add (simplified) HLTriggerFinalPath if missing process.hltTriggerSummaryAOD = cms.EDProducer( "TriggerSummaryProducerAOD", processName = cms.string( "@" ) @@ -273,13 +348,16 @@ process.HLTriggerFinalPath = cms.Path( process.hltTriggerSummaryAOD + process.hl fi # select which events to run on -if [ "$SELECTION" == "complex" ]; then +if [ "${SELECTION}" == "complex" ]; then cat >> hlt.py << @EOF + # event selection customised by hltIntegrationTests process.source.eventsToProcess = cms.untracked.VEventRange( '$(echo $EVENTS | sed -e"s/,/','/g")' ) @EOF + elif (( $SKIP > 0 )); then cat >> hlt.py << @EOF + # event selection customised by hltIntegrationTests process.source.skipEvents = cms.untracked.uint32( $SKIP ) @EOF @@ -287,17 +365,29 @@ fi # set the number of threads and streams for the whole hlt job cat >> hlt.py << @EOF + # configure multithreading, and allocate 10 MB of stack space per thread -process.options.numberOfThreads = cms.untracked.uint32( $THREADS ) -process.options.numberOfStreams = cms.untracked.uint32( $STREAMS ) -process.options.sizeOfStackForThreadsInKB = cms.untracked.uint32( 10*1024 ) -process.options.accelerators = cms.untracked.vstring( 'cpu' ) +process.options.numberOfThreads = $THREADS +process.options.numberOfStreams = $STREAMS +process.options.sizeOfStackForThreadsInKB = 10*1024 +# set allowed accelerators +process.options.accelerators = [ "$ACCELERATOR" ] process.hltTriggerSummaryAOD.throw = cms.bool( True ) @EOF -# dump the menu name, and its release template -log "HLT menu: $(head -n1 hlt.py | cut -c 3-)" +# find the list of all trigger paths +TRIGGERS=$(hltListPaths hlt.py -p --no-dep --exclude "^HLTriggerFinalPath$") +echo "${TRIGGERS[@]}" > paths.txt + +# print some info +if [ "$SELECTION" == "complex" ]; then + log "Will run $(echo $TRIGGERS | wc -w) HLT paths over $(echo $EVENTS | tr ',' '\n' | wc -l) events, with $JOBS jobs in parallel" +elif [ "$SIZE" == "-1" ]; then + log "Will run $(echo $TRIGGERS | wc -w) HLT paths over all events, with $JOBS jobs in parallel" +else + log "Will run $(echo $TRIGGERS | wc -w) HLT paths over $SIZE events, with $JOBS jobs in parallel" +fi # check the prescale modules hltCheckPrescaleModules -w hlt.py @@ -305,14 +395,16 @@ hltCheckPrescaleModules -w hlt.py # check for multi-threading edmCheckMultithreading hlt.py | grep legacy -log "Preparing single-path configurations" +log "Preparing single-trigger configurations" for TRIGGER in $TRIGGERS; do - cat > "$TRIGGER".py << @EOF + cat > "${TRIGGER}".py << @EOF from hlt import * process.hltOutput = cms.OutputModule( "PoolOutputModule", - fileName = cms.untracked.string( "$TRIGGER.root" ), + fileName = cms.untracked.string( "${TRIGGER}.root" ), fastCloning = cms.untracked.bool( False ), + compressionAlgorithm = cms.untracked.string( "ZLIB" ), + compressionLevel = cms.untracked.int32( 1 ), outputCommands = cms.untracked.vstring( 'drop *', 'keep edmTriggerResults_*_*_*', @@ -321,22 +413,27 @@ process.hltOutput = cms.OutputModule( "PoolOutputModule", process.Output = cms.EndPath( process.hltOutput ) -process.schedule = cms.Schedule( process.$TRIGGER, process.HLTriggerFinalPath, process.Output ) +process.schedule = cms.Schedule( process.${TRIGGER}, process.HLTriggerFinalPath, process.Output ) process.hltTriggerSummaryAOD.throw = cms.bool( True ) @EOF done # if a separate setup is requested, create the setup_cff.py file and patch all dumps to use it -if [ "$SETUP" ]; then - log "Extracting setup_cff dump" - #we use $MENU not $SETUP here as we force the same DB / converter as the main menu - #this is the hltGetConfiguration behaviour and would be confusing if you had to - #specify converter/db on the setup menu on hltIntegration tests but not on hltGetConfiguration - read SETUP_Vx SETUP_DB _ <<< $(parse_HLT_menu "$MENU") - - hltConfigFromDB --$SETUP_Vx --$SETUP_DB $HLTLISTPATHPROXY --cff --configName "$SETUP" --nopaths --services -FUShmDQMOutputService,-PrescaleService,-EvFDaqDirector,-FastMonitoringService > setup_cff.py - sed -i -e's/process = cms.Process(.*)/&\nprocess.load("setup_cff")/' hlt.py $(for TRIGGER in $TRIGGERS; do echo "$TRIGGER".py; done) +if [ "${SETUP}" ]; then + + if [ "${MENU}" ]; then + # we use ${MENU} here, not ${SETUP}, as we force the same DB / converter as the main menu + # this is the hltGetConfiguration behaviour and would be confusing if you had to + # specify converter/db on the setup menu on hltIntegrationTests but not on hltGetConfiguration + read SETUP_Vx SETUP_DB _ <<< $(parse_HLT_menu "${MENU}") + log "Creating setup_cff from ConfDB configuration: ${SETUP_Vx}/${SETUP_DB}:${SETUP}" + hltConfigFromDB --${SETUP_Vx} --${SETUP_DB} ${DBPROXYOPTS} --cff --configName "$SETUP" \ + --nopaths --services -FUShmDQMOutputService,-PrescaleService,-EvFDaqDirector,-FastMonitoringService > setup_cff.py + sed -i -e's/process = cms.Process(.*)/&\nprocess.load("setup_cff")/' hlt.py $(for TRIGGER in $TRIGGERS; do echo "${TRIGGER}".py; done) + else + printf "%s\n" "WARNING -- \"--setup ${SETUP}\" will be ignored (failed to deduce name of HLT menu from hlt.py)" + fi fi # run all HLT dumps @@ -357,14 +454,13 @@ hlt: hlt.done hlt.done: hlt.py @echo -e "\tfull menu dump" - @cmsRun hlt.py >& hlt.log < /dev/zero && touch hlt.done + @cmsRun hlt.py ${EXTRA} >& hlt.log < /dev/zero && touch hlt.done \$(TRIGGERS): %: %.done \$(DONE): %.done: %.py @echo -e "\t\$*" - @cmsRun \$*.py >& \$*.log < /dev/zero && touch \$*.done - + @cmsRun \$*.py ${EXTRA} >& \$*.log < /dev/zero && touch \$*.done @EOF log "Running..." @@ -372,19 +468,22 @@ log "Running..." # otherwise, run it in parallel with the single-trigger jobs if ((THREADS > 0)); then make -f .makefile hlt - make -f .makefile -j$JOBS -k $TRIGGERS + make -f .makefile -j${JOBS} -k ${TRIGGERS} else - make -f .makefile -j$JOBS -k + make -f .makefile -j${JOBS} -k fi - # compare HLT results log "Comparing the results of running each path by itself with those from the full menu" hltCompareResults STATUS=$? +log "--------------------------" +if [ "${STATUS}" -eq 0 ]; then + log "hltIntegrationTests PASSED" +else + log "hltIntegrationTests FAILED" +fi +log "--------------------------" log "exit status: $STATUS" -log "done" - -# done cd .. -exit $STATUS +exit ${STATUS} diff --git a/HLTrigger/Configuration/scripts/hltListPaths b/HLTrigger/Configuration/scripts/hltListPaths index d118e7a760220..525b1a60aa081 100755 --- a/HLTrigger/Configuration/scripts/hltListPaths +++ b/HLTrigger/Configuration/scripts/hltListPaths @@ -9,16 +9,21 @@ import HLTrigger.Configuration.Tools.options as options from HLTrigger.Configuration.extend_argparse import * def getPathList(config): - # cmd-line args to select HLT configuration - if config.menu.run: - configline = f'--runNumber {config.menu.run}' - else: - configline = f'--{config.menu.database} --{config.menu.version} --configName {config.menu.name}' - # cmd to download HLT configuration - cmdline = f'hltConfigFromDB {configline} --noedsources --noes --noservices' - if config.proxy: - cmdline += f' --dbproxy --dbproxyhost {config.proxy_host} --dbproxyport {config.proxy_port}' + if isinstance(config.menu, options.ConnectionHLTMenu): + # cmd to download HLT configuration + cmdline = 'hltConfigFromDB' + if config.menu.run: + cmdline += f' --runNumber {config.menu.run}' + else: + cmdline += f' --{config.menu.database} --{config.menu.version} --configName {config.menu.name}' + cmdline += ' --noedsources --noes --noservices' + if config.proxy: + cmdline += f' --dbproxy --dbproxyhost {config.proxy_host} --dbproxyport {config.proxy_port}' + + else: + # use edmConfigDump to ensure the config can be executed + cmdline = f'edmConfigDump {config.menu}' # load HLT configuration try: @@ -31,74 +36,88 @@ def getPathList(config): if not isinstance(process, cms.Process): raise Exception(f'query did not return a valid HLT menu:\n query="{cmdline}"') + usePaths, useEndPaths, useFinalPaths = False, False, False + # Paths only if config.selection == 'paths': - pathDict = process.paths_() + usePaths = True # EndPaths only elif config.selection == 'endpaths': - pathDict = process.endpaths_() + useEndPaths = True # FinalPaths only elif config.selection == 'finalpaths': - pathDict = process.finalpaths_() + useFinalPaths = True # Paths, EndPaths, and FinalPaths ('all') + elif config.selection == 'all': + usePaths, useEndPaths, useFinalPaths = True, True, True + + # invalid value else: - pathDict = zip(process.paths_(), process.endpaths_(), process.finalpaths_()) + raise RuntimeError(f'ERROR: invalid value for option "--selection" (must be "paths", "endpaths", "finalpaths", or "all"): {config.selection}') ret = [] - for pathName in pathDict: - - # skip if name of the path matches any of - # the regular expressions listed in "--exclude" - skipPath = False - for excludeRegExpr in config.excludeRegExprs: - if bool(re.search(excludeRegExpr, pathName)): - skipPath = True - break - if skipPath: + for pathDict in [ + process.paths_() if usePaths else None, + process.endpaths_() if useEndPaths else None, + process.finalpaths_() if useFinalPaths else None, + ]: + if pathDict == None: continue - if config.no_dependent_paths: - # do not include "dependent paths", i.e. paths that depend on the result of other paths in the same job - # the current criterion to identify a path as "dependent" is that - # (1) the path contains a "TriggerResultsFilter" module and - # (2) the latter module uses the TriggerResults of the current process, and has a non-empty list of "triggerConditions" - path = pathDict[pathName] - pathIsDependent = False - isPath = isinstance(path, cms.Path) - - for moduleName in path.moduleNames(): - module = getattr(process, moduleName) - if module.type_() != 'TriggerResultsFilter' or (hasattr(module, 'triggerConditions') and len(module.triggerConditions) == 0): - continue - - usesPathStatus = hasattr(module, 'usePathStatus') and module.usePathStatus - usesTrigResOfCurrentProcess = hasattr(module, 'hltResults') and module.hltResults.getProcessName() in [process.name_(), '@currentProcess']+['']*(not isPath) - - if isPath: - if usesPathStatus: - pathIsDependent = True - elif usesTrigResOfCurrentProcess: - # The Path contains a TriggerResultsFilter with usePathStatus=False and forcing access to the TriggerResults of the current Process. - # - This is not supported, and should result in a runtime error when using cmsRun. - # - Here, a warning is returned to stderr, and the Path is omitted from the output list. - warning_msg = 'WARNING -- the cms.Path named "'+pathName+'" will be ignored.' - warning_msg += '\n'+' '*12+'- It contains a "TriggerResultsFilter" attempting to access the "TriggerResults" of the current Process (module: "'+moduleName+'").' - warning_msg += '\n'+' '*12+'- This is not supported, and should result in a runtime error when using cmsRun. Please check again the HLT configuration.' - print(warning_msg, file=sys.stderr) - pathIsDependent = True - else: - pathIsDependent = usesPathStatus or usesTrigResOfCurrentProcess + for pathName in pathDict: - if pathIsDependent: + # skip if name of the path matches any of + # the regular expressions listed in "--exclude" + skipPath = False + for excludeRegExpr in config.excludeRegExprs: + if bool(re.search(excludeRegExpr, pathName)): + skipPath = True break - - if pathIsDependent: + if skipPath: continue - ret.append(pathName) + if config.no_dependent_paths: + # do not include "dependent paths", i.e. paths that depend on the result of other paths in the same job + # the current criterion to identify a path as "dependent" is that + # (1) the path contains a "TriggerResultsFilter" module and + # (2) the latter module uses the TriggerResults of the current process, and has a non-empty list of "triggerConditions" + path = pathDict[pathName] + pathIsDependent = False + isPath = isinstance(path, cms.Path) + + for moduleName in path.moduleNames(): + module = getattr(process, moduleName) + if module.type_() != 'TriggerResultsFilter' or (hasattr(module, 'triggerConditions') and len(module.triggerConditions) == 0): + continue + + usesPathStatus = hasattr(module, 'usePathStatus') and module.usePathStatus + usesTrigResOfCurrentProcess = hasattr(module, 'hltResults') and module.hltResults.getProcessName() in [process.name_(), '@currentProcess']+['']*(not isPath) + + if isPath: + if usesPathStatus: + pathIsDependent = True + elif usesTrigResOfCurrentProcess: + # The Path contains a TriggerResultsFilter with usePathStatus=False and forcing access to the TriggerResults of the current Process. + # - This is not supported, and should result in a runtime error when using cmsRun. + # - Here, a warning is returned to stderr, and the Path is omitted from the output list. + warning_msg = 'WARNING -- the cms.Path named "'+pathName+'" will be ignored.' + warning_msg += '\n'+' '*12+'- It contains a "TriggerResultsFilter" attempting to access the "TriggerResults" of the current Process (module: "'+moduleName+'").' + warning_msg += '\n'+' '*12+'- This is not supported, and should result in a runtime error when using cmsRun. Please check again the HLT configuration.' + print(warning_msg, file=sys.stderr) + pathIsDependent = True + else: + pathIsDependent = usesPathStatus or usesTrigResOfCurrentProcess + + if pathIsDependent: + break + + if pathIsDependent: + continue + + ret.append(pathName) return ret @@ -113,8 +132,11 @@ formatter = FixedWidthFormatter( HelpFormatterRespectNewlines, width = textwidth # read defaults defaults = options.HLTProcessOptions() +def hltMenu(name): + return name if os.path.isfile(name) else options.ConnectionHLTMenu(name) + parser = argparse.ArgumentParser( - description = 'List all the Paths, EndPaths and FinalPaths of an HLT configuration in the ConfDB database.', + description = 'List all the Paths, EndPaths and FinalPaths of an HLT configuration.', argument_default = argparse.SUPPRESS, formatter_class = formatter, add_help = False ) @@ -122,9 +144,9 @@ parser = argparse.ArgumentParser( # required argument parser.add_argument('menu', action = 'store', - type = options.ConnectionHLTMenu, + type = hltMenu, metavar = 'MENU', - help = 'HLT menu to dump from the database. Supported formats are:\n - /path/to/configuration[/Vn]\n - [[{v1|v2|v3}/]{run3|run2|online|adg}:]/path/to/configuration[/Vn]\n - run:runnumber\nThe possible converters are "v1", "v2, and "v3" (default).\nThe possible databases are "run3" (default, used for offline development), "run2" (used for accessing run2 offline development menus), "online" (used to extract online menus within Point 5) and "adg" (used to extract the online menus outside Point 5).\nIf no menu version is specified, the latest one is automatically used.\nIf "run:" is used instead, the HLT menu used for the given run number is looked up and used.\nNote other converters and databases exist as options but they are only for expert/special use.' ) + help = 'HLT menu (can be a local cmsRun configuration file, or the name of a configuration in the ConfDB database). For ConfDB configurations, supported formats are:\n - /path/to/configuration[/Vn]\n - [[{v1|v2|v3}/]{run3|run2|online|adg}:]/path/to/configuration[/Vn]\n - run:runnumber\nThe possible converters are "v1", "v2, and "v3" (default).\nThe possible databases are "run3" (default, used for offline development), "run2" (used for accessing run2 offline development menus), "online" (used to extract online menus within Point 5) and "adg" (used to extract the online menus outside Point 5).\nIf no menu version is specified, the latest one is automatically used.\nIf "run:" is used instead, the HLT menu used for the given run number is looked up and used.\nNote other converters and databases exist as options but they are only for expert/special use.' ) # options parser.add_argument('--dbproxy', @@ -187,6 +209,7 @@ parser.add_argument('-h', '--help', # parse command line arguments and options config = parser.parse_args() + paths = getPathList(config) for path in paths: print(path) diff --git a/HLTrigger/Configuration/test/cmsDriver.csh b/HLTrigger/Configuration/test/cmsDriver.csh index 2d1c59c143c58..0f238788f1e58 100755 --- a/HLTrigger/Configuration/test/cmsDriver.csh +++ b/HLTrigger/Configuration/test/cmsDriver.csh @@ -1,8 +1,6 @@ -#! /bin/tcsh +#!/bin/tcsh -cmsenv - -rehash +eval `scram runtime -csh` # # old files in castor: rfdir /castor/cern.ch/cms/store/... diff --git a/HLTrigger/Configuration/test/runAll.csh b/HLTrigger/Configuration/test/runAll.csh index 3876906bffa19..ca412a71fd446 100755 --- a/HLTrigger/Configuration/test/runAll.csh +++ b/HLTrigger/Configuration/test/runAll.csh @@ -1,7 +1,6 @@ #!/bin/tcsh -cmsenv -rehash +eval `scram runtime -csh` echo date +%F\ %a\ %T diff --git a/HLTrigger/Configuration/test/runIntegration.csh b/HLTrigger/Configuration/test/runIntegration.csh index 1bed85c84a9e9..ad2fd0d7fbbe1 100755 --- a/HLTrigger/Configuration/test/runIntegration.csh +++ b/HLTrigger/Configuration/test/runIntegration.csh @@ -1,7 +1,6 @@ #!/bin/tcsh -cmsenv -rehash +eval `scram runtime -csh` echo date +%F\ %a\ %T @@ -28,44 +27,34 @@ endif foreach gtag ( $1 ) if ( $gtag == DATA ) then - set flags = "" - set infix = hlt + set extraflags = "-x realData=1 -x globalTag=@" else - set flags = --mc - set infix = mc + set extraflags = "-x realData=0 -x globalTag=@" endif foreach table ( $tables ) echo set name = HLT_Integration_${table}_${gtag} - touch ${name} + touch ${name} rm -rf ${name}* - set config = `grep tableName OnLine_HLT_${table}.py | cut -f2 -d "'"` - if ($table == Fake) then - set basegt = auto:run1_${infix}_${table} - else if ( ($table == Fake1) || ($table == Fake2) || ($table == 2018) ) then - set basegt = auto:run2_${infix}_${table} - else - set basegt = auto:run3_${infix}_${table} - endif - set autogt = "--globaltag=${basegt}" set infile = file:../RelVal_Raw_${table}_${gtag}.root -# -x "--l1-emulator" -x "--l1 L1GtTriggerMenu_L1Menu_Collisions2012_v1_mc" + set hltIntegTestCmd = "hltIntegrationTests OnLine_HLT_${table}.py ${extraflags} -d ${name} -i ${infile} -n 100 -j 4 -a cpu" - echo "`date +%T` hltIntegrationTests $config -d $name -i $infile -n 100 -j 4 $flags -x ${autogt} -x --type=$table >& $name.log" - time hltIntegrationTests $config -d $name -i $infile -n 100 -j 4 $flags -x ${autogt} -x --type=$table >& $name.log + echo "`date +%T` ${hltIntegTestCmd} >& ${name}.log" + time ${hltIntegTestCmd} >& ${name}.log set STATUS = $? + echo "`date +%T` exit status: $STATUS" - rm -f ${name}/*.root + rm -f ${name}/*.root if ($STATUS != 0) then touch ${name}/issues.txt foreach line ("`cat ${name}/issues.txt`") - cp ${name}/${line}.py ${name}_${line}.py - cp ${name}/${line}.log ${name}_${line}.log + cp ${name}/${line}.py ${name}_${line}.py + cp ${name}/${line}.log ${name}_${line}.log end endif @@ -76,4 +65,3 @@ end echo echo Finish $0 $1 $2 date +%F\ %a\ %T -# diff --git a/HLTrigger/Configuration/test/runOne.csh b/HLTrigger/Configuration/test/runOne.csh index 5cfad9b4aaf91..15e168f3ca817 100755 --- a/HLTrigger/Configuration/test/runOne.csh +++ b/HLTrigger/Configuration/test/runOne.csh @@ -1,7 +1,6 @@ #!/bin/tcsh -cmsenv -rehash +eval `scram runtime -csh` set rawLHC = L1RePack set rawSIM = DigiL1Raw From 46a9f8115cd89982eb1996799966bcf667e95697 Mon Sep 17 00:00:00 2001 From: Marino Missiroli Date: Mon, 7 Nov 2022 15:22:17 +0100 Subject: [PATCH 2/2] enable use of dbproxy in more HLT-dev tools --- .../Configuration/scripts/hltGetConfiguration | 24 +- HLTrigger/Configuration/tables/subtables.sh | 70 +-- HLTrigger/Configuration/test/getDatasets.py | 157 ++++--- .../Configuration/test/getEventContent.py | 434 +++++++++++------- HLTrigger/Configuration/test/getFrozenHLT.sh | 118 +++-- HLTrigger/Configuration/test/getHLT.sh | 190 +++----- 6 files changed, 549 insertions(+), 444 deletions(-) diff --git a/HLTrigger/Configuration/scripts/hltGetConfiguration b/HLTrigger/Configuration/scripts/hltGetConfiguration index 39da6311cafcc..6157019b9f761 100755 --- a/HLTrigger/Configuration/scripts/hltGetConfiguration +++ b/HLTrigger/Configuration/scripts/hltGetConfiguration @@ -1,11 +1,10 @@ #!/usr/bin/env python3 - -from __future__ import print_function -import sys, os +import sys +import os import argparse -from HLTrigger.Configuration.extend_argparse import * -import HLTrigger.Configuration.Tools.confdb as confdb +from HLTrigger.Configuration.extend_argparse import * +import HLTrigger.Configuration.Tools.confdb as confdb import HLTrigger.Configuration.Tools.options as options # define an argparse parser to parse our options @@ -243,8 +242,19 @@ parser.add_argument('--help', # parse command line arguments and options config = parser.parse_args(namespace = options.HLTProcessOptions()) -cmdArgs = sys.argv -cmdArgs[0] = os.path.basename(sys.argv[0]) +# do not include db-proxy options in 1st-line comment +cmdArgs, skipNext = [], False +for cmdArg in sys.argv: + if skipNext: + skipNext = False + continue + if cmdArg.startswith('--dbproxy'): + if cmdArg.startswith('--dbproxyh') or cmdArg.startswith('--dbproxyp'): + skipNext = '=' not in cmdArg + continue + cmdArgs += [cmdArg] +cmdArgs[0] = os.path.basename(cmdArgs[0]) + cmdLine = ' '.join(cmdArgs) print('# ' + cmdLine) print() diff --git a/HLTrigger/Configuration/tables/subtables.sh b/HLTrigger/Configuration/tables/subtables.sh index 94ce445db5f6b..fea75588a9918 100755 --- a/HLTrigger/Configuration/tables/subtables.sh +++ b/HLTrigger/Configuration/tables/subtables.sh @@ -1,8 +1,21 @@ -#! /bin/bash +#!/bin/bash # # utility functions used to generate HLT tables from master table in ConfDB # +# db-proxy configuration +DBPROXY="" +DBPROXYHOST="localhost" +DBPROXYPORT="8080" +while [[ $# -gt 0 ]]; do + case "$1" in + --dbproxy) DBPROXY="--dbproxy"; shift;; + --dbproxyhost) DBPROXYHOST="$2"; shift; shift;; + --dbproxyport) DBPROXYPORT="$2"; shift; shift;; + *) shift;; + esac +done + # load common HLT functions if [ -f "$CMSSW_BASE/src/HLTrigger/Configuration/common/utils.sh" ]; then source "$CMSSW_BASE/src/HLTrigger/Configuration/common/utils.sh" @@ -31,12 +44,14 @@ function cleanup() { } function getPathList() { - local DATA=$(hltConfigFromDB --$Vx --$DB --cff --configName $MASTER --noedsources --noes --noservices --nosequences --nomodules) - if echo "$DATA" | grep -q 'Exhausted Resultset\|CONFIG_NOT_FOUND'; then + [ "x${DBPROXY}" = "x" ] || local DBPROXYOPTS="${DBPROXY} --dbproxyhost ${DBPROXYHOST} --dbproxyport ${DBPROXYPORT}" + local DATA=$(hltConfigFromDB --${Vx} --${DB} --cff --configName ${MASTER} \ + --noedsources --noes --noservices --nosequences --nomodules ${DBPROXYOPTS}) + if echo "${DATA}" | grep -q 'Exhausted Resultset\|CONFIG_NOT_FOUND'; then echo "Error: $MASTER is not a valid HLT menu" exit 1 fi - echo "$DATA" | sed -ne's/ *= *cms.\(Final\|End\)\?Path.*//p' + echo "${DATA}" | sed -ne's/ *= *cms.\(Final\|End\)\?Path.*//p' } function checkJars() { @@ -73,7 +88,7 @@ function makeCreateConfig() { if [ -f $workDir/$JAR ]; then continue fi - # download to a temporay file and use an atomic move (in case an other istance is downloading the same file + # download to a temporary file and use an atomic move (in case another instance is downloading the same file) local TMPJAR=$(mktemp -p "$workDir" .${JAR}.XXXXXXXXXX) curl -s -L "$baseUrl/$JAR" -o "$TMPJAR" mv -n "$TMPJAR" "$workDir/$JAR" @@ -89,61 +104,60 @@ function makeCreateConfig() { function loadConfiguration() { case "$1" in + # v1 offline aka "hltdev" "v1/offline" | "v1/hltdev") - # v1 offline aka "hltdev" - DBHOST="cmsr1-v.cern.ch" + DBHOST="cmsr1-s.cern.ch,cmsr2-s.cern.ch,cmsr3-s.cern.ch" + [ "x${DBPROXY}" = "x" ] || DBHOST="10.116.96.89,10.116.96.139,10.116.96.105" DBNAME="cms_cond.cern.ch" DBUSER="cms_hltdev_writer" PWHASH="0196d34dd35b04c0f3597dc89fbbe6e2" ;; + # v2 offline "v2/offline") - # v2 offline - DBHOST="cmsr1-v.cern.ch" + DBHOST="cmsr1-s.cern.ch,cmsr2-s.cern.ch,cmsr3-s.cern.ch" + [ "x${DBPROXY}" = "x" ] || DBHOST="10.116.96.89,10.116.96.139,10.116.96.105" DBNAME="cms_cond.cern.ch" DBUSER="cms_hlt_gdr_w" PWHASH="0196d34dd35b04c0f3597dc89fbbe6e2" ;; - "v3/run3") - # v3 run3 - DBHOST="cmsr1-s.cern.ch" + # converter=v3*, db=run3 + "v3/run3" | "v3-beta/run3" | "v3-test/run3") + DBHOST="cmsr1-s.cern.ch,cmsr2-s.cern.ch,cmsr3-s.cern.ch" + [ "x${DBPROXY}" = "x" ] || DBHOST="10.116.96.89,10.116.96.139,10.116.96.105" DBNAME="cms_hlt.cern.ch" DBUSER="cms_hlt_v3_w" PWHASH="0196d34dd35b04c0f3597dc89fbbe6e2" ;; - "v3-test/dev") - # v3-test dev - DBHOST="cmsr1-s.cern.ch" - DBNAME="cms_hlt.cern.ch" - DBUSER="cms_hlt_gdrdev_w" - PWHASH="0196d34dd35b04c0f3597dc89fbbe6e2" - ;; - "v3/dev") - # v3 dev - DBHOST="cmsr1-s.cern.ch" + # converter=v3*, db=dev + "v3/dev" | "v3-beta/dev" | "v3-test/dev") + DBHOST="cmsr1-s.cern.ch,cmsr2-s.cern.ch,cmsr3-s.cern.ch" + [ "x${DBPROXY}" = "x" ] || DBHOST="10.116.96.89,10.116.96.139,10.116.96.105" DBNAME="cms_hlt.cern.ch" DBUSER="cms_hlt_gdrdev_w" PWHASH="0196d34dd35b04c0f3597dc89fbbe6e2" ;; *) # see https://github.com/fwyzard/hlt-confdb/blob/confdbv2/test/runCreateConfig - echo "Error, unnown database \"$1\", exiting." + echo "Error, unknown database \"$1\", exiting." exit 1 ;; esac } function runCreateConfig() { + [ "x${DBPROXY}" = "x" ] || local DBPROXYOPTS="-DsocksProxyHost=${DBPROXYHOST} -DsocksProxyPort=${DBPROXYPORT}" loadConfiguration "$1" java \ -Djava.security.egd=file:///dev/urandom \ -Doracle.jdbc.timezoneAsRegion=false \ + ${DBPROXYOPTS} \ -Xss32M \ -Xmx1024m \ - -classpath "$CLASSPATH" \ + -classpath "${CLASSPATH}" \ confdb.db.ConfDBCreateConfig \ - --dbHost $DBHOST \ - --dbName $DBNAME \ - --dbUser $DBUSER \ + --dbHost "${DBHOST}" \ + --dbName "${DBNAME}" \ + --dbUser "${DBUSER}" \ --dbPwrd $2 \ --master $3 \ --paths $4 \ @@ -219,7 +233,7 @@ function createSubtables() { # ask the user for the database password readPassword - # make sure the needed sripts are available + # make sure the needed scripts are available makeCreateConfig # extract each subtable diff --git a/HLTrigger/Configuration/test/getDatasets.py b/HLTrigger/Configuration/test/getDatasets.py index e9a551f485714..90cb1c08428f7 100755 --- a/HLTrigger/Configuration/test/getDatasets.py +++ b/HLTrigger/Configuration/test/getDatasets.py @@ -1,72 +1,107 @@ -#! /usr/bin/env python3 - -import sys +#!/usr/bin/env python3 +"""getDatasets.py: create Datasets-cff file of an HLT configuration from the ConfDB database +""" +import argparse import subprocess -import types +import os import re -import FWCore.ParameterSet.Config as cms - -def extractDatasets(version, database, config): - # dump the streams and Datasets from the HLT configuration - proc = subprocess.Popen( - "hltConfigFromDB --%s --%s --configName %s --nopsets --noedsources --noes --noservices --nooutput --nopaths" % (version, database, config), - shell = True, - stdin = None, - stdout = subprocess.PIPE, - stderr = None, - ) - (out, err) = proc.communicate() - - # load the streams and Datasets - hlt = types.ModuleType('hlt') - exec(out, globals(), hlt.__dict__) - return hlt.process +import FWCore.ParameterSet.Config as cms +import HLTrigger.Configuration.Tools.pipe as pipe +import HLTrigger.Configuration.Tools.options as options + +def getHLTProcess(config): + '''return cms.Process containing Streams and Datasets of the HLT configuration + ''' + # cmd-line args to select HLT configuration + if config.menu.run: + configline = f'--runNumber {config.menu.run}' + else: + configline = f'--{config.menu.database} --{config.menu.version} --configName {config.menu.name}' + + # cmd to download HLT configuration + cmdline = f'hltConfigFromDB {configline} --noedsources --noes --noservices --nopsets --nooutput --nopaths' + if config.proxy: + cmdline += f' --dbproxy --dbproxyhost {config.proxy_host} --dbproxyport {config.proxy_port}' + + # load HLT configuration + try: + foo = {'process': None} + exec(pipe.pipe(cmdline).decode(), foo) + process = foo['process'] + except: + raise Exception(f'query did not return a valid python file:\n query="{cmdline}"') + + if not isinstance(process, cms.Process): + raise Exception(f'query did not return a valid HLT menu:\n query="{cmdline}"') + + return process + +### +### main +### +if __name__ == '__main__': + + # defaults of cmd-line arguments + defaults = options.HLTProcessOptions() + + parser = argparse.ArgumentParser( + prog = './'+os.path.basename(__file__), + formatter_class = argparse.RawDescriptionHelpFormatter, + description = __doc__) + + # required argument + parser.add_argument('menu', + action = 'store', + type = options.ConnectionHLTMenu, + metavar = 'MENU', + help = 'HLT menu to dump from the database. Supported formats are:\n - /path/to/configuration[/Vn]\n - [[{v1|v2|v3}/]{run3|run2|online|adg}:]/path/to/configuration[/Vn]\n - run:runnumber\nThe possible converters are "v1", "v2, and "v3" (default).\nThe possible databases are "run3" (default, used for offline development), "run2" (used for accessing run2 offline development menus), "online" (used to extract online menus within Point 5) and "adg" (used to extract the online menus outside Point 5).\nIf no menu version is specified, the latest one is automatically used.\nIf "run:" is used instead, the HLT menu used for the given run number is looked up and used.\nNote other converters and databases exist as options but they are only for expert/special use.' ) + + # options + parser.add_argument('--dbproxy', + dest = 'proxy', + action = 'store_true', + default = defaults.proxy, + help = 'Use a socks proxy to connect outside CERN network (default: False)' ) + parser.add_argument('--dbproxyport', + dest = 'proxy_port', + action = 'store', + metavar = 'PROXYPORT', + default = defaults.proxy_port, + help = 'Port of the socks proxy (default: 8080)' ) + parser.add_argument('--dbproxyhost', + dest = 'proxy_host', + action = 'store', + metavar = 'PROXYHOST', + default = defaults.proxy_host, + help = 'Host of the socks proxy (default: "localhost")' ) + + # parse command line arguments and options + config = parser.parse_args() + + process = getHLTProcess(config) + + print('''# %s +import FWCore.ParameterSet.Config as cms +''' % config.menu.name) -def dumpDataset(process, stream, dataset): - if dataset in process.datasets.__dict__: - name = 'stream%s_dataset%s_selector' % (stream, dataset) - dump = '''from HLTrigger.HLTfilters.triggerResultsFilter_cfi import triggerResultsFilter as %s + for stream in sorted(process.streams.__dict__): + if re.match(r'^(Physics|Parking)', stream): + print(''' +# stream %s +''' % stream) + ds = sorted(process.streams.__dict__[stream]) + for dataset in ds: + if dataset in process.datasets.__dict__: + name = 'stream%s_dataset%s_selector' % (stream, dataset) + dump = '''from HLTrigger.HLTfilters.triggerResultsFilter_cfi import triggerResultsFilter as %s %s.hltResults = cms.InputTag('TriggerResults', '', 'HLT') %s.l1tResults = cms.InputTag('') %s.throw = cms.bool(False) %s.triggerConditions = %s - ''' % (name, name, name, name, name, process.datasets.__dict__[dataset]) - else: - dump = '''# dataset %s not found - + else: + dump = '''# dataset %s not found ''' % (dataset, ) - return dump - - -# split a "[version/]db:name" configuration into a (version, db, name) tuple -def splitConfigName(configName): - from HLTrigger.Configuration.Tools.options import ConnectionHLTMenu - menu = ConnectionHLTMenu(configName) - return (menu.version, menu.database, menu.name) - - -# get the configuration to parse and the file where to output the stream definitions from the command line -config = sys.argv[1] - -# dump the expanded event content configurations to a python configuration fragment -config = splitConfigName(config) -process = extractDatasets(* config) - -sys.stdout.write('''# %s - -import FWCore.ParameterSet.Config as cms - -''' % config[2] ) - -for stream in sorted(process.streams.__dict__): - if re.match(r'^Physics|Parking', stream): - sys.stdout.write(''' -# stream %s - -''' % stream) - ds = sorted(process.streams.__dict__[stream]) - for dataset in ds: - sys.stdout.write(dumpDataset(process, stream, dataset)) + print(dump) diff --git a/HLTrigger/Configuration/test/getEventContent.py b/HLTrigger/Configuration/test/getEventContent.py index 22095be802144..afd781b120c9c 100755 --- a/HLTrigger/Configuration/test/getEventContent.py +++ b/HLTrigger/Configuration/test/getEventContent.py @@ -1,35 +1,58 @@ -#! /usr/bin/env python3 - -import sys +#!/usr/bin/env python3 +"""getEventContent.py: print EventContent cff fragment of a ConfDB configuration +""" +import argparse import subprocess +import os +import re + import FWCore.ParameterSet.Config as cms +import HLTrigger.Configuration.Tools.pipe as pipe +import HLTrigger.Configuration.Tools.options as options + +def getHLTProcessBlocks(config, blocks): + """return cms.Process containing the OutputModules of the HLT configuration + """ + # cmd-line args to select HLT configuration + if config.menu.run: + configline = f'--runNumber {config.menu.run}' + else: + configline = f'--{config.menu.database} --{config.menu.version} --configName {config.menu.name}' -config = sys.argv[1] - -def extractBlock(config, blocks, target): - #print 'configuration: %s' % config - #print 'blocks: %s' % ', '.join(blocks) - #print 'target: %s' % target - #print - commands = ','.join( block + '::outputCommands' for block in blocks ) - proc = subprocess.Popen( - "hltConfigFromDB --configName %s --noedsources --nopaths --noes --nopsets --noservices --cff --blocks %s --format python | sed -e'/^streams/,/^)/d' -e'/^datasets/,/^)/d' > %s" % (config, commands, target), - shell = True, - stdin = None, - stdout = None, - stderr = None, - ) - proc.wait() - -def extractBlocks(config): - outputA = [ 'hltOutputA', 'hltOutputPhysicsCommissioning' ] - outputALCA = [ 'hltOutputALCAPHISYM', 'hltOutputALCAP0', 'hltOutputALCAPPSExpress', 'hltOutputALCAPPSPrompt', 'hltOutputALCALumiPixelsCountsExpress', 'hltOutputALCALumiPixelsCountsPrompt', 'hltOutputRPCMON' ] - outputMON = [ 'hltOutputA', 'hltOutputPhysicsCommissioning', 'hltOutputDQM', 'hltOutputDQMGPUvsCPU', 'hltOutputHLTMonitor', 'hltOutputReleaseValidation' ] - outputScouting = [ 'hltOutputScoutingPF' ] - extractBlock(config, outputA, 'hltOutputA_cff.py') - extractBlock(config, outputALCA, 'hltOutputALCA_cff.py') - extractBlock(config, outputMON, 'hltOutputMON_cff.py') - extractBlock(config, outputScouting, 'hltScouting_cff.py') + # cmd to download HLT configuration + cmdline = f'hltConfigFromDB {configline}' + if config.proxy: + cmdline += f' --dbproxy --dbproxyhost {config.proxy_host} --dbproxyport {config.proxy_port}' + + cmdline += ' --noedsources --noes --nopsets --noservices --nopaths --format python' + cmdline += ' --blocks '+','.join({foo+'::outputCommands' for foo in blocks}) + + # load HLT configuration + try: + foo = {} + exec(pipe.pipe(cmdline).decode(), foo) + except: + raise Exception(f'query did not return a valid python file:\n query="{cmdline}"') + + ret = {} + for block in blocks: + key = 'block_'+block + ret[key] = foo[key] if key in foo else None + if ret[key] != None and not isinstance(ret[key], cms.PSet): + raise Exception(f'query did not return valid HLT blocks:\n query="{cmdline}"') + + return ret + +def getHLTProcessBlockGroups(config, blockGroupDict): + ret = {} + blockDict = getHLTProcessBlocks(config, {bar for foo in blockGroupDict.values() for bar in foo}) + for groupName in blockGroupDict: + ret[groupName] = cms.PSet() + for blockKey in blockGroupDict[groupName]: + blockName = 'block_'+blockKey + if blockDict[blockName] != None: + setattr(ret[groupName], blockName, blockDict[blockName]) + return ret def makePSet(statements): statements = sorted(statements) @@ -65,7 +88,6 @@ def buildPSetWithoutRAWs(blocks): statements.update( statement for statement in block if statement.find('drop') != 0 and statement.find('keep FEDRawDataCollection') != 0) return makePSet(statements) - # customisation of AOD event content, requested by David Dagenhart def dropL1GlobalTriggerObjectMapRecord(block): """drop the old L1GlobalTriggerObjectMapRecord data format from the block (meant for the AOD data tier)""" @@ -78,141 +100,164 @@ def dropL1GlobalTriggerObjectMapRecord(block): # add just after it a drop statement for the old data format block.outputCommands.insert(position + 1, 'drop L1GlobalTriggerObjectMapRecord_hltL1GtObjectMap_*_*') +def printHLTriggerEventContentCff(process): + + blockGroups = getHLTProcessBlockGroups(config, { + 'hltOutputA_cff': [ + 'hltOutputA', + 'hltOutputPhysicsCommissioning', + ], + 'hltOutputALCA_cff': [ + 'hltOutputALCAPHISYM', + 'hltOutputALCAP0', + 'hltOutputALCAPPSExpress', + 'hltOutputALCAPPSPrompt', + 'hltOutputALCALumiPixelsCountsExpress', + 'hltOutputALCALumiPixelsCountsPrompt', + 'hltOutputRPCMON', + ], + 'hltOutputMON_cff': [ + 'hltOutputA', + 'hltOutputPhysicsCommissioning', + 'hltOutputDQM', + 'hltOutputDQMGPUvsCPU', + 'hltOutputHLTMonitor', + 'hltOutputReleaseValidation', + ], + 'hltOutputScouting_cff': [ + 'hltOutputScoutingPF', + ], + }) + + hltOutputA_cff = blockGroups['hltOutputA_cff'] + hltOutputALCA_cff = blockGroups['hltOutputALCA_cff'] + hltOutputMON_cff = blockGroups['hltOutputMON_cff'] + hltOutputScouting_cff = blockGroups['hltOutputScouting_cff'] + + # hltDebugOutput + + if not hasattr(hltOutputMON_cff,'block_hltOutputA'): + hltOutputMON_cff.block_hltOutputA = hltOutputMON_cff.block_hltOutputPhysicsCommissioning + if not hasattr(hltOutputMON_cff,'block_hltOutputDQM'): + hltOutputMON_cff.block_hltOutputDQM = cms.PSet(outputCommands = cms.untracked.vstring( 'drop *' )) + if not hasattr(hltOutputMON_cff,'block_hltOutputDQMGPUvsCPU'): + hltOutputMON_cff.block_hltOutputDQMGPUvsCPU = cms.PSet(outputCommands = cms.untracked.vstring( 'drop *' )) + if not hasattr(hltOutputMON_cff,'block_hltOutputHLTMonitor'): + hltOutputMON_cff.block_hltOutputHLTMonitor = cms.PSet(outputCommands = cms.untracked.vstring( 'drop *_hlt*_*_*' )) + if not hasattr(hltOutputMON_cff,'block_hltOutputReleaseValidation'): + hltOutputMON_cff.block_hltOutputReleaseValidation = cms.PSet(outputCommands = cms.untracked.vstring( 'drop *' )) + + hltDebugOutputBlocks = ( + # the DQM, DQMGPUvsCPU and HLTMON streams have the HLT debug outputs used online + hltOutputMON_cff.block_hltOutputA.outputCommands, + hltOutputMON_cff.block_hltOutputDQM.outputCommands, + hltOutputMON_cff.block_hltOutputDQMGPUvsCPU.outputCommands, + hltOutputMON_cff.block_hltOutputHLTMonitor.outputCommands, + hltOutputMON_cff.block_hltOutputReleaseValidation.outputCommands, + ) + hltDebugOutputContent = buildPSet(hltDebugOutputBlocks) + + # hltDebugWithAlCaOutput + + if not hasattr(hltOutputALCA_cff,'block_hltOutputALCAPHISYM'): + hltOutputALCA_cff.block_hltOutputALCAPHISYM = cms.PSet(outputCommands = cms.untracked.vstring( 'drop *' )) + if not hasattr(hltOutputALCA_cff,'block_hltOutputALCAP0'): + hltOutputALCA_cff.block_hltOutputALCAP0 = cms.PSet(outputCommands = cms.untracked.vstring( 'drop *' )) + if not hasattr(hltOutputALCA_cff,'block_hltOutputALCAPPSExpress'): + hltOutputALCA_cff.block_hltOutputALCAPPSExpress = cms.PSet(outputCommands = cms.untracked.vstring( 'drop *' )) + if not hasattr(hltOutputALCA_cff,'block_hltOutputALCAPPSPrompt'): + hltOutputALCA_cff.block_hltOutputALCAPPSPrompt = cms.PSet(outputCommands = cms.untracked.vstring( 'drop *' )) + if not hasattr(hltOutputALCA_cff,'block_hltOutputALCALumiPixelsCountsExpress'): + hltOutputALCA_cff.block_hltOutputALCALumiPixelsCountsExpress = cms.PSet(outputCommands = cms.untracked.vstring( 'drop *' )) + if not hasattr(hltOutputALCA_cff,'block_hltOutputALCALumiPixelsCountsPrompt'): + hltOutputALCA_cff.block_hltOutputALCALumiPixelsCountsPrompt = cms.PSet(outputCommands = cms.untracked.vstring( 'drop *' )) + if not hasattr(hltOutputALCA_cff,'block_hltOutputRPCMON'): + hltOutputALCA_cff.block_hltOutputRPCMON = cms.PSet(outputCommands = cms.untracked.vstring( 'drop *' )) + hltDebugWithAlCaOutputBlocks = ( + # the DQM, DQMGPUvsCPU and HLTMON streams have the HLT debug outputs used online + hltOutputMON_cff.block_hltOutputA.outputCommands, + hltOutputMON_cff.block_hltOutputDQM.outputCommands, + hltOutputMON_cff.block_hltOutputDQMGPUvsCPU.outputCommands, + hltOutputMON_cff.block_hltOutputHLTMonitor.outputCommands, + hltOutputMON_cff.block_hltOutputReleaseValidation.outputCommands, + # the ALCA streams have the AlCa outputs + hltOutputALCA_cff.block_hltOutputALCAPHISYM.outputCommands, + hltOutputALCA_cff.block_hltOutputALCAP0.outputCommands, + hltOutputALCA_cff.block_hltOutputALCAPPSExpress.outputCommands, + hltOutputALCA_cff.block_hltOutputALCAPPSPrompt.outputCommands, + hltOutputALCA_cff.block_hltOutputALCALumiPixelsCountsExpress.outputCommands, + hltOutputALCA_cff.block_hltOutputALCALumiPixelsCountsPrompt.outputCommands, + hltOutputALCA_cff.block_hltOutputRPCMON.outputCommands, + ) + hltDebugWithAlCaOutputContent = buildPSet(hltDebugWithAlCaOutputBlocks) + + # hltScoutingOutput + + if not hasattr(hltOutputScouting_cff,'block_hltOutputScoutingPF'): + hltOutputScouting_cff.block_hltOutputScoutingPF = cms.PSet(outputCommands = cms.untracked.vstring( 'drop *' )) + + hltScoutingOutputBlocks = ( + # the Scouting streams have the Scouting outputs + hltOutputScouting_cff.block_hltOutputScoutingPF.outputCommands, + ) + hltScoutingOutputContent = buildPSetNoDrop(hltScoutingOutputBlocks) + + # hltDefaultOutput + if not hasattr(hltOutputA_cff,'block_hltOutputA'): + hltOutputA_cff.block_hltOutputA = hltOutputA_cff.block_hltOutputPhysicsCommissioning + hltDefaultOutputBlocks = ( + # the A stream has the HLT default output, with FEDs - strip out the FEDRawDataCollection keep statements for hltDefaultOutput + hltOutputA_cff.block_hltOutputA.outputCommands, + ) + hltDefaultOutputContent = buildPSetWithoutRAWs(hltDefaultOutputBlocks) + hltDefaultOutputWithFEDsContent = buildPSet(hltDefaultOutputBlocks) -# extract the HLT layer event content -extractBlocks( config ) -import hltOutputA_cff -import hltOutputALCA_cff -import hltOutputMON_cff -import hltScouting_cff - -# hltDebugOutput - -if not hasattr(hltOutputMON_cff,'block_hltOutputA'): - hltOutputMON_cff.block_hltOutputA = hltOutputMON_cff.block_hltOutputPhysicsCommissioning -if not hasattr(hltOutputMON_cff,'block_hltOutputDQM'): - hltOutputMON_cff.block_hltOutputDQM = cms.PSet(outputCommands = cms.untracked.vstring( 'drop *' )) -if not hasattr(hltOutputMON_cff,'block_hltOutputDQMGPUvsCPU'): - hltOutputMON_cff.block_hltOutputDQMGPUvsCPU = cms.PSet(outputCommands = cms.untracked.vstring( 'drop *' )) -if not hasattr(hltOutputMON_cff,'block_hltOutputHLTMonitor'): - hltOutputMON_cff.block_hltOutputHLTMonitor = cms.PSet(outputCommands = cms.untracked.vstring( 'drop *_hlt*_*_*' )) -if not hasattr(hltOutputMON_cff,'block_hltOutputReleaseValidation'): - hltOutputMON_cff.block_hltOutputReleaseValidation = cms.PSet(outputCommands = cms.untracked.vstring( 'drop *' )) - -hltDebugOutputBlocks = ( - # the DQM, DQMGPUvsCPU and HLTMON streams have the HLT debug outputs used online - hltOutputMON_cff.block_hltOutputA.outputCommands, - hltOutputMON_cff.block_hltOutputDQM.outputCommands, - hltOutputMON_cff.block_hltOutputDQMGPUvsCPU.outputCommands, - hltOutputMON_cff.block_hltOutputHLTMonitor.outputCommands, - hltOutputMON_cff.block_hltOutputReleaseValidation.outputCommands, -) -hltDebugOutputContent = buildPSet(hltDebugOutputBlocks) - - -# hltDebugWithAlCaOutput -if not hasattr(hltOutputALCA_cff,'block_hltOutputALCAPHISYM'): - hltOutputALCA_cff.block_hltOutputALCAPHISYM = cms.PSet(outputCommands = cms.untracked.vstring( 'drop *' )) -if not hasattr(hltOutputALCA_cff,'block_hltOutputALCAP0'): - hltOutputALCA_cff.block_hltOutputALCAP0 = cms.PSet(outputCommands = cms.untracked.vstring( 'drop *' )) -if not hasattr(hltOutputALCA_cff,'block_hltOutputALCAPPSExpress'): - hltOutputALCA_cff.block_hltOutputALCAPPSExpress = cms.PSet(outputCommands = cms.untracked.vstring( 'drop *' )) -if not hasattr(hltOutputALCA_cff,'block_hltOutputALCAPPSPrompt'): - hltOutputALCA_cff.block_hltOutputALCAPPSPrompt = cms.PSet(outputCommands = cms.untracked.vstring( 'drop *' )) -if not hasattr(hltOutputALCA_cff,'block_hltOutputALCALumiPixelsCountsExpress'): - hltOutputALCA_cff.block_hltOutputALCALumiPixelsCountsExpress = cms.PSet(outputCommands = cms.untracked.vstring( 'drop *' )) -if not hasattr(hltOutputALCA_cff,'block_hltOutputALCALumiPixelsCountsPrompt'): - hltOutputALCA_cff.block_hltOutputALCALumiPixelsCountsPrompt = cms.PSet(outputCommands = cms.untracked.vstring( 'drop *' )) -if not hasattr(hltOutputALCA_cff,'block_hltOutputRPCMON'): - hltOutputALCA_cff.block_hltOutputRPCMON = cms.PSet(outputCommands = cms.untracked.vstring( 'drop *' )) -hltDebugWithAlCaOutputBlocks = ( - # the DQM, DQMGPUvsCPU and HLTMON streams have the HLT debug outputs used online - hltOutputMON_cff.block_hltOutputA.outputCommands, - hltOutputMON_cff.block_hltOutputDQM.outputCommands, - hltOutputMON_cff.block_hltOutputDQMGPUvsCPU.outputCommands, - hltOutputMON_cff.block_hltOutputHLTMonitor.outputCommands, - hltOutputMON_cff.block_hltOutputReleaseValidation.outputCommands, - # the ALCA streams have the AlCa outputs - hltOutputALCA_cff.block_hltOutputALCAPHISYM.outputCommands, - hltOutputALCA_cff.block_hltOutputALCAP0.outputCommands, - hltOutputALCA_cff.block_hltOutputALCAPPSExpress.outputCommands, - hltOutputALCA_cff.block_hltOutputALCAPPSPrompt.outputCommands, - hltOutputALCA_cff.block_hltOutputALCALumiPixelsCountsExpress.outputCommands, - hltOutputALCA_cff.block_hltOutputALCALumiPixelsCountsPrompt.outputCommands, - hltOutputALCA_cff.block_hltOutputRPCMON.outputCommands, -) -hltDebugWithAlCaOutputContent = buildPSet(hltDebugWithAlCaOutputBlocks) - -# hltScoutingOutput - -if not hasattr(hltScouting_cff,'block_hltOutputScoutingPF'): - hltScouting_cff.block_hltOutputScoutingPF = cms.PSet(outputCommands = cms.untracked.vstring( 'drop *' )) - -hltScoutingOutputBlocks = ( - # the Scouting streams have the Scouting outputs - hltScouting_cff.block_hltOutputScoutingPF.outputCommands, -) -hltScoutingOutputContent = buildPSetNoDrop(hltScoutingOutputBlocks) - - -# hltDefaultOutput -if not hasattr(hltOutputA_cff,'block_hltOutputA'): - hltOutputA_cff.block_hltOutputA = hltOutputA_cff.block_hltOutputPhysicsCommissioning -hltDefaultOutputBlocks = ( - # the A stream has the HLT default output, with FEDs - strip out the FEDRawDataCollection keep statements for hltDefaultOutput - hltOutputA_cff.block_hltOutputA.outputCommands, -) -hltDefaultOutputContent = buildPSetWithoutRAWs(hltDefaultOutputBlocks) -hltDefaultOutputWithFEDsContent = buildPSet(hltDefaultOutputBlocks) - - -# define the CMSSW default event content configurations - -# RAW event content -HLTriggerRAW = cms.PSet( - outputCommands = cms.vstring() -) -HLTriggerRAW.outputCommands.extend(hltDefaultOutputWithFEDsContent.outputCommands) -HLTriggerRAW.outputCommands.extend(hltScoutingOutputContent.outputCommands) - -# RECO event content -HLTriggerRECO = cms.PSet( - outputCommands = cms.vstring() -) -HLTriggerRECO.outputCommands.extend(hltDefaultOutputContent.outputCommands) -HLTriggerRECO.outputCommands.extend(hltScoutingOutputContent.outputCommands) - -# AOD event content -HLTriggerAOD = cms.PSet( - outputCommands = cms.vstring() -) -HLTriggerAOD.outputCommands.extend(hltDefaultOutputContent.outputCommands) -HLTriggerAOD.outputCommands.extend(hltScoutingOutputContent.outputCommands) -dropL1GlobalTriggerObjectMapRecord(HLTriggerAOD) - -# HLTDEBUG RAW event content -HLTDebugRAW = cms.PSet( - outputCommands = cms.vstring() -) -HLTDebugRAW.outputCommands.extend(hltDebugWithAlCaOutputContent.outputCommands) -HLTDebugRAW.outputCommands.extend(hltScoutingOutputContent.outputCommands) - -# HLTDEBUG FEVT event content -HLTDebugFEVT = cms.PSet( - outputCommands = cms.vstring() -) -HLTDebugFEVT.outputCommands.extend(hltDebugWithAlCaOutputContent.outputCommands) -HLTDebugFEVT.outputCommands.extend(hltScoutingOutputContent.outputCommands) - -# Scouting event content -HLTScouting = cms.PSet( - outputCommands = cms.vstring() -) -HLTScouting.outputCommands.extend(hltScoutingOutputContent.outputCommands) - -# dump the expanded event content configurations to a python configuration fragment -dump = open('HLTrigger_EventContent_cff.py', 'w') -dump.write('''import FWCore.ParameterSet.Config as cms + # define the CMSSW default event content configurations + + # RAW event content + HLTriggerRAW = cms.PSet( + outputCommands = cms.vstring() + ) + HLTriggerRAW.outputCommands.extend(hltDefaultOutputWithFEDsContent.outputCommands) + HLTriggerRAW.outputCommands.extend(hltScoutingOutputContent.outputCommands) + + # RECO event content + HLTriggerRECO = cms.PSet( + outputCommands = cms.vstring() + ) + HLTriggerRECO.outputCommands.extend(hltDefaultOutputContent.outputCommands) + HLTriggerRECO.outputCommands.extend(hltScoutingOutputContent.outputCommands) + + # AOD event content + HLTriggerAOD = cms.PSet( + outputCommands = cms.vstring() + ) + HLTriggerAOD.outputCommands.extend(hltDefaultOutputContent.outputCommands) + HLTriggerAOD.outputCommands.extend(hltScoutingOutputContent.outputCommands) + dropL1GlobalTriggerObjectMapRecord(HLTriggerAOD) + + # HLTDEBUG RAW event content + HLTDebugRAW = cms.PSet( + outputCommands = cms.vstring() + ) + HLTDebugRAW.outputCommands.extend(hltDebugWithAlCaOutputContent.outputCommands) + HLTDebugRAW.outputCommands.extend(hltScoutingOutputContent.outputCommands) + + # HLTDEBUG FEVT event content + HLTDebugFEVT = cms.PSet( + outputCommands = cms.vstring() + ) + HLTDebugFEVT.outputCommands.extend(hltDebugWithAlCaOutputContent.outputCommands) + HLTDebugFEVT.outputCommands.extend(hltScoutingOutputContent.outputCommands) + + # Scouting event content + HLTScouting = cms.PSet( + outputCommands = cms.vstring() + ) + HLTScouting.outputCommands.extend(hltScoutingOutputContent.outputCommands) + + # print the expanded event content configurations to stdout + print('''import FWCore.ParameterSet.Config as cms # EventContent for HLT related products. @@ -222,12 +267,55 @@ def dropL1GlobalTriggerObjectMapRecord(block): # HLTScouting (with Scouting products) # # as these are used in Configuration/EventContent -# -''') -dump.write('HLTriggerRAW = cms.PSet(\n outputCommands = cms.vstring( *(\n%s\n ) )\n)\n\n' % ',\n'.join( ' \'%s\'' % keep for keep in HLTriggerRAW.outputCommands)) -dump.write('HLTriggerRECO = cms.PSet(\n outputCommands = cms.vstring( *(\n%s\n ) )\n)\n\n' % ',\n'.join( ' \'%s\'' % keep for keep in HLTriggerRECO.outputCommands)) -dump.write('HLTriggerAOD = cms.PSet(\n outputCommands = cms.vstring( *(\n%s\n ) )\n)\n\n' % ',\n'.join( ' \'%s\'' % keep for keep in HLTriggerAOD.outputCommands)) -dump.write('HLTDebugRAW = cms.PSet(\n outputCommands = cms.vstring( *(\n%s\n ) )\n)\n\n' % ',\n'.join( ' \'%s\'' % keep for keep in HLTDebugRAW.outputCommands)) -dump.write('HLTDebugFEVT = cms.PSet(\n outputCommands = cms.vstring( *(\n%s\n ) )\n)\n\n' % ',\n'.join( ' \'%s\'' % keep for keep in HLTDebugFEVT.outputCommands)) -dump.write('HLTScouting = cms.PSet(\n outputCommands = cms.vstring( *(\n%s\n ) )\n)\n\n' % ',\n'.join( ' \'%s\'' % keep for keep in HLTScouting.outputCommands)) -dump.close() +#''') + print('HLTriggerRAW = cms.PSet(\n outputCommands = cms.vstring( *(\n%s\n ) )\n)\n' % ',\n'.join( ' \'%s\'' % keep for keep in HLTriggerRAW.outputCommands)) + print('HLTriggerRECO = cms.PSet(\n outputCommands = cms.vstring( *(\n%s\n ) )\n)\n' % ',\n'.join( ' \'%s\'' % keep for keep in HLTriggerRECO.outputCommands)) + print('HLTriggerAOD = cms.PSet(\n outputCommands = cms.vstring( *(\n%s\n ) )\n)\n' % ',\n'.join( ' \'%s\'' % keep for keep in HLTriggerAOD.outputCommands)) + print('HLTDebugRAW = cms.PSet(\n outputCommands = cms.vstring( *(\n%s\n ) )\n)\n' % ',\n'.join( ' \'%s\'' % keep for keep in HLTDebugRAW.outputCommands)) + print('HLTDebugFEVT = cms.PSet(\n outputCommands = cms.vstring( *(\n%s\n ) )\n)\n' % ',\n'.join( ' \'%s\'' % keep for keep in HLTDebugFEVT.outputCommands)) + print('HLTScouting = cms.PSet(\n outputCommands = cms.vstring( *(\n%s\n ) )\n)\n' % ',\n'.join( ' \'%s\'' % keep for keep in HLTScouting.outputCommands)) + +### +### main +### +if __name__ == '__main__': + + # defaults of cmd-line arguments + defaults = options.HLTProcessOptions() + + parser = argparse.ArgumentParser( + prog = './'+os.path.basename(__file__), + formatter_class = argparse.RawDescriptionHelpFormatter, + description = __doc__ + ) + + # required argument + parser.add_argument('menu', + action = 'store', + type = options.ConnectionHLTMenu, + metavar = 'MENU', + help = 'HLT menu to dump from the database. Supported formats are:\n - /path/to/configuration[/Vn]\n - [[{v1|v2|v3}/]{run3|run2|online|adg}:]/path/to/configuration[/Vn]\n - run:runnumber\nThe possible converters are "v1", "v2, and "v3" (default).\nThe possible databases are "run3" (default, used for offline development), "run2" (used for accessing run2 offline development menus), "online" (used to extract online menus within Point 5) and "adg" (used to extract the online menus outside Point 5).\nIf no menu version is specified, the latest one is automatically used.\nIf "run:" is used instead, the HLT menu used for the given run number is looked up and used.\nNote other converters and databases exist as options but they are only for expert/special use.' ) + + # options + parser.add_argument('--dbproxy', + dest = 'proxy', + action = 'store_true', + default = defaults.proxy, + help = 'Use a socks proxy to connect outside CERN network (default: False)' ) + parser.add_argument('--dbproxyport', + dest = 'proxy_port', + action = 'store', + metavar = 'PROXYPORT', + default = defaults.proxy_port, + help = 'Port of the socks proxy (default: 8080)' ) + parser.add_argument('--dbproxyhost', + dest = 'proxy_host', + action = 'store', + metavar = 'PROXYHOST', + default = defaults.proxy_host, + help = 'Host of the socks proxy (default: "localhost")' ) + + # parse command line arguments and options + config = parser.parse_args() + + printHLTriggerEventContentCff(config) diff --git a/HLTrigger/Configuration/test/getFrozenHLT.sh b/HLTrigger/Configuration/test/getFrozenHLT.sh index d5ab2c3b5a245..023d229bda332 100755 --- a/HLTrigger/Configuration/test/getFrozenHLT.sh +++ b/HLTrigger/Configuration/test/getFrozenHLT.sh @@ -1,4 +1,4 @@ -#! /bin/bash +#!/bin/bash # ConfDB configurations to use TABLES="Fake Fake1 Fake2 2022v14" @@ -7,73 +7,71 @@ HLT_Fake1="/dev/CMSSW_12_5_0/Fake1" HLT_Fake2="/dev/CMSSW_12_5_0/Fake2" HLT_2022v14="/frozen/2022/2e34/v1.4/CMSSW_12_5_X/HLT" -# print extra messages ? -VERBOSE=false - -# this is used for brace expansion -TABLES_=$(echo $TABLES | sed -e's/ \+/,/g') +# command-line arguments +VERBOSE=false # print extra messages to stdout +DBPROXYOPTS="" # db-proxy configuration +while [[ $# -gt 0 ]]; do + case "$1" in + -v) VERBOSE=true; shift;; + --dbproxy) DBPROXYOPTS="${DBPROXYOPTS} --dbproxy"; shift;; + --dbproxyhost) DBPROXYOPTS="${DBPROXYOPTS} --dbproxyhost $2"; shift; shift;; + --dbproxyport) DBPROXYOPTS="${DBPROXYOPTS} --dbproxyport $2"; shift; shift;; + *) shift;; + esac +done -[ "$1" == "-v" ] && { VERBOSE=true; shift; } -[ "$1" == "-q" ] && { VERBOSE=false; shift; } +# remove spurious whitespaces and tabs from DBPROXYOPTS +DBPROXYOPTS=$(echo "${DBPROXYOPTS}" | xargs) +# log: print to stdout only if VERBOSE=true function log() { - $VERBOSE && echo -e "$@" + ${VERBOSE} && echo -e "$@" } -function getConfigForCVS() { - local CONFIG="$1" - local NAME="$2" - log " dumping HLT cffs for $NAME from $CONFIG" - # do not use any conditions or L1 override - hltGetConfiguration --cff --data $CONFIG --type $NAME > HLT_${NAME}_cff.py -} +# path to directory hosting this script +TESTDIR=$(cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd) -function getConfigForOnline() { - local CONFIG="$1" - local NAME="$2" - log " dumping full HLT for $NAME from $CONFIG" - # override the conditions with a menu-dependent "virtual" global tag, which takes care of overriding the L1 menu +# ensure that directory hosting this script corresponds to ${CMSSW_BASE}/src/HLTrigger/Configuration/test +if [ "${TESTDIR}" != "${CMSSW_BASE}"/src/HLTrigger/Configuration/test ]; then + printf "\n%s\n" "ERROR -- the directory hosting getHLT.sh [1] does not correspond to \${CMSSW_BASE}/src/HLTrigger/Configuration/test [2]" + printf "%s\n" " [1] ${TESTDIR}" + printf "%s\n\n" " [2] ${CMSSW_BASE}/src/HLTrigger/Configuration/test" + exit 1 +fi - if [ "$NAME" == "Fake" ]; then - hltGetConfiguration --full --data $CONFIG --type $NAME --unprescale --process "HLT${NAME}" --globaltag "auto:run1_hlt_${NAME}" --input "file:RelVal_Raw_${NAME}_DATA.root" > OnLine_HLT_${NAME}.py - elif [ "$NAME" == "Fake1" ] || [ "$NAME" == "Fake2" ] || [ "$NAME" == "2018" ]; then - hltGetConfiguration --full --data $CONFIG --type $NAME --unprescale --process "HLT${NAME}" --globaltag "auto:run2_hlt_${NAME}" --input "file:RelVal_Raw_${NAME}_DATA.root" > OnLine_HLT_${NAME}.py - else - hltGetConfiguration --full --data $CONFIG --type $NAME --unprescale --process "HLT${NAME}" --globaltag "auto:run3_hlt_${NAME}" --input "file:RelVal_Raw_${NAME}_DATA.root" > OnLine_HLT_${NAME}.py - fi -} +# ensure that the python/ directory hosting cff fragments exists +if [ ! -d "${CMSSW_BASE}"/src/HLTrigger/Configuration/python ]; then + printf "\n%s\n" "ERROR -- the directory \${CMSSW_BASE}/src/HLTrigger/Configuration/python [1] does not exist" + printf "%s\n\n" " [1] ${CMSSW_BASE}/src/HLTrigger/Configuration/python" + exit 1 +fi -# make sure we're using *this* working area -eval `scramv1 runtime -sh` -hash -r +INITDIR="${PWD}" -# cff python dumps, in CVS under HLTrigger/Configuration/pyhon -log "Extracting cff python dumps" -echo "Extracting cff python dumps" -FILES=$(eval echo HLT_{$TABLES_}_cff.py) -rm -f $FILES -for TABLE in $TABLES; do - log "$TABLE" - echo "$TABLE" - CONFIG=$(eval echo \$$(echo HLT_$TABLE)) - getConfigForCVS $CONFIG $TABLE -done -log "Done" -log "$(ls -l $FILES)" -mv -f $FILES ../python/ -log +# execute the ensuing steps from ${CMSSW_BASE}/src/HLTrigger/Configuration/test +cd "${CMSSW_BASE}"/src/HLTrigger/Configuration/test -# full config dumps, in CVS under HLTrigger/Configuration/test -log "Extracting full configuration dumps" -echo "Extracting full configuration dumps" -FILES=$(eval echo OnLine_HLT_{$TABLES_}.py) -rm -f $FILES -for TABLE in $TABLES; do - log "$TABLE" - echo "$TABLE" - CONFIG=$(eval echo \$$(echo HLT_$TABLE)) - getConfigForOnline $CONFIG $TABLE +# create cff fragments and cfg configs +for TABLE in ${TABLES}; do + CONFIG=$(eval echo \$$(echo HLT_"${TABLE}")) + echo "${TABLE} (config: ${CONFIG})" + + # cff fragment of each HLT menu (do not use any conditions or L1T override) + log " creating cff fragment of HLT menu..." + hltGetConfiguration "${CONFIG}" --cff --data --type "${TABLE}" ${DBPROXYOPTS} > ../python/HLT_"${TABLE}"_cff.py + + # GlobalTag + AUTOGT="auto:run3_hlt_${TABLE}" + if [ "${TABLE}" = "Fake1" ] || [ "${TABLE}" = "Fake2" ] || [ "${TABLE}" = "2018" ]; then + AUTOGT="auto:run2_hlt_${TABLE}" + elif [ "${TABLE}" = "Fake" ]; then + AUTOGT="auto:run1_hlt_${TABLE}" + fi + + # standalone cfg file of each HLT menu + log " creating full cfg of HLT menu..." + hltGetConfiguration "${CONFIG}" --full --data --type "${TABLE}" --unprescale --process "HLT${TABLE}" --globaltag "${AUTOGT}" \ + --input "file:RelVal_Raw_${TABLE}_DATA.root" ${DBPROXYOPTS} > OnLine_HLT_"${TABLE}".py done -log "Done" -log "$(ls -l $FILES)" -log + +cd "${INITDIR}" diff --git a/HLTrigger/Configuration/test/getHLT.sh b/HLTrigger/Configuration/test/getHLT.sh index f003f5796d8de..e14613ba06f87 100755 --- a/HLTrigger/Configuration/test/getHLT.sh +++ b/HLTrigger/Configuration/test/getHLT.sh @@ -1,133 +1,93 @@ -#! /bin/bash +#!/bin/bash # ConfDB configurations to use -MASTER="/dev/CMSSW_12_5_0/HLT" # no explicit version, take the most recent -TARGET="/dev/CMSSW_12_5_0/\$TABLE" # no explicit version, take the most recent - -TABLES="GRun HIon PIon PRef" # $TABLE in the above variable will be expanded to these TABLES - -# print extra messages ? -VERBOSE=false - -# this is used for brace expansion -TABLES_=$(echo $TABLES | sed -e's/ \+/,/g') +MASTER="/dev/CMSSW_12_5_0/HLT" # no explicit version, take the most recent +TARGET="/dev/CMSSW_12_5_0/\$TABLE" # no explicit version, take the most recent + +TABLES="GRun HIon PIon PRef" # $TABLE in the above variable will be expanded to these TABLES + +# command-line arguments +VERBOSE=false # print extra messages to stdout +DBPROXYOPTS="" # db-proxy configuration +while [[ $# -gt 0 ]]; do + case "$1" in + -v) VERBOSE=true; shift;; + --dbproxy) DBPROXYOPTS="${DBPROXYOPTS} --dbproxy"; shift;; + --dbproxyhost) DBPROXYOPTS="${DBPROXYOPTS} --dbproxyhost $2"; shift; shift;; + --dbproxyport) DBPROXYOPTS="${DBPROXYOPTS} --dbproxyport $2"; shift; shift;; + *) shift;; + esac +done -[ "$1" == "-v" ] && { VERBOSE=true; shift; } -[ "$1" == "-q" ] && { VERBOSE=false; shift; } +# remove spurious whitespaces and tabs from DBPROXYOPTS +DBPROXYOPTS=$(echo "${DBPROXYOPTS}" | xargs) +# log: print to stdout only if VERBOSE=true function log() { $VERBOSE && echo -e "$@" } -function findHltScript() { - local PACKAGE="HLTrigger/Configuration" - local SCRIPT="$1" - - if [ -f "$SCRIPT" ]; then - echo "./$SCRIPT" - elif [ -f "$CMSSW_BASE/src/$PACKAGE/test/$SCRIPT" ]; then - echo "$CMSSW_BASE/src/$PACKAGE/test/$SCRIPT" - elif [ -f "$CMSSW_RELEASE_BASE/src/$PACKAGE/test/$SCRIPT" ]; then - echo "$CMSSW_RELEASE_BASE/src/$PACKAGE/test/$SCRIPT" +# path to directory hosting this script +TESTDIR=$(cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd) + +# ensure that directory hosting this script corresponds to ${CMSSW_BASE}/src/HLTrigger/Configuration/test +if [ "${TESTDIR}" != "${CMSSW_BASE}"/src/HLTrigger/Configuration/test ]; then + printf "\n%s\n" "ERROR -- the directory hosting getHLT.sh [1] does not correspond to \${CMSSW_BASE}/src/HLTrigger/Configuration/test [2]" + printf "%s\n" " [1] ${TESTDIR}" + printf "%s\n\n" " [2] ${CMSSW_BASE}/src/HLTrigger/Configuration/test" + exit 1 +fi + +# ensure that the python/ directory hosting cff fragments exists +if [ ! -d "${CMSSW_BASE}"/src/HLTrigger/Configuration/python ]; then + printf "\n%s\n" "ERROR -- the directory \${CMSSW_BASE}/src/HLTrigger/Configuration/python [1] does not exist" + printf "%s\n\n" " [1] ${CMSSW_BASE}/src/HLTrigger/Configuration/python" + exit 1 +fi + +INITDIR="${PWD}" + +# execute the ensuing steps from ${CMSSW_BASE}/src/HLTrigger/Configuration/test +cd "${CMSSW_BASE}"/src/HLTrigger/Configuration/test + +# create cff fragments and cfg configs +for TABLE in FULL ${TABLES}; do + if [ "${TABLE}" = "FULL" ]; then + CONFIG="${MASTER}" else - echo "cannot find $SCRIPT, aborting" - exit 1 + CONFIG=$(eval echo ${TARGET}) fi -} - -GETCONTENT=$(findHltScript getEventContent.py) -GETDATASETS=$(findHltScript getDatasets.py) -function getConfigForCVS() { - local CONFIG="$1" - local NAME="$2" - log " dumping HLT cffs for $NAME from $CONFIG" + echo "${TABLE} (config: ${CONFIG})" - # do not use any conditions or L1 override - hltGetConfiguration --cff --data $CONFIG --type $NAME > HLT_${NAME}_cff.py -} + # cff fragment of each HLT menu (do not use any conditions or L1T override) + log " creating cff fragment of HLT menu..." + hltGetConfiguration "${CONFIG}" --cff --data --type "${TABLE}" ${DBPROXYOPTS} > ../python/HLT_"${TABLE}"_cff.py -function getContentForCVS() { - local CONFIG="$1" - - log " dumping EventContet" - $GETCONTENT $CONFIG - rm -f hltOutput*_cff.py* hltScouting_cff.py* -} - -function getDatasetsForCVS() { - local CONFIG="$1" - local TARGET="$2" - - log " dumping Primary Dataset" - $GETDATASETS $CONFIG > $TARGET -} + # cff fragment of EventContents (only for MASTER config) + if [ "${TABLE}" = "FULL" ]; then + log " creating cff fragment of EventContents..." + ./getEventContent.py "${MASTER}" ${DBPROXYOPTS} > ../python/HLTrigger_EventContent_cff.py + fi -function getConfigForOnline() { - local CONFIG="$1" - local NAME="$2" -# local L1T="tag[,connect]" - record is hardwired as L1GtTriggerMenuRcd - -# local L1TPP="L1GtTriggerMenu_L1Menu_Collisions2012_v3_mc,sqlite_file:/afs/cern.ch/user/g/ghete/public/L1Menu/L1Menu_Collisions2012_v3/sqlFile/L1Menu_Collisions2012_v3_mc.db" -# local L1TPP="L1GtTriggerMenu_L1Menu_Collisions2012_v3_mc" -# local L1TPP="L1GtTriggerMenu_L1Menu_Collisions2015_25ns_v1_mc,sqlite_file:/afs/cern.ch/user/g/ghete/public/L1Menu/L1Menu_Collisions2015_25ns_v1/sqlFile/L1Menu_Collisions2015_25ns_v1_mc.db" -# local L1THI="L1GtTriggerMenu_L1Menu_CollisionsHeavyIons2011_v0_mc,sqlite_file:/afs/cern.ch/user/g/ghete/public/L1Menu/L1Menu_CollisionsHeavyIons2011_v0/sqlFile/L1Menu_CollisionsHeavyIons2011_v0_mc.db" -# local L1THI="L1GtTriggerMenu_L1Menu_CollisionsHeavyIons2011_v0_mc" -# local L1THI="L1GtTriggerMenu_L1Menu_Collisions2012_v3_mc" -# local L1THI="L1GtTriggerMenu_L1Menu_Collisions2015_25ns_v1_mc,sqlite_file:/afs/cern.ch/user/g/ghete/public/L1Menu/L1Menu_Collisions2015_25ns_v1/sqlFile/L1Menu_Collisions2015_25ns_v1_mc.db" -# local L1TPI="L1GtTriggerMenu_L1Menu_CollisionsHeavyIons2013_v0_mc,sqlite_file:/afs/cern.ch/user/g/ghete/public/L1Menu/L1Menu_CollisionsHeavyIons2013_v0/sqlFile/L1Menu_CollisionsHeavyIons2013_v0_mc.db" -# local L1TPI="L1GtTriggerMenu_L1Menu_CollisionsHeavyIons2013_v0_mc" -# local L1TPI="L1GtTriggerMenu_L1Menu_Collisions2012_v3_mc" -# local L1TPI="L1GtTriggerMenu_L1Menu_Collisions2015_25ns_v1_mc,sqlite_file:/afs/cern.ch/user/g/ghete/public/L1Menu/L1Menu_Collisions2015_25ns_v1/sqlFile/L1Menu_Collisions2015_25ns_v1_mc.db" - - local L1TPP1="" - local L1TPP2="" - - log " dumping full HLT for $NAME from $CONFIG" - # override L1 menus - if [ "$NAME" == "Fake" ]; then - hltGetConfiguration --full --data $CONFIG --type $NAME --unprescale --process HLT$NAME --globaltag "auto:run1_hlt_${NAME}" --input "file:RelVal_Raw_${NAME}_DATA.root" > OnLine_HLT_$NAME.py - elif [ "$NAME" == "Fake1" ] || [ "$NAME" == "Fake2" ] || [ "$NAME" == "2018" ]; then - hltGetConfiguration --full --data $CONFIG --type $NAME --unprescale --process HLT$NAME --globaltag "auto:run2_hlt_${NAME}" --input "file:RelVal_Raw_${NAME}_DATA.root" > OnLine_HLT_$NAME.py - else - hltGetConfiguration --full --data $CONFIG --type $NAME --unprescale --process HLT$NAME --globaltag "auto:run3_hlt_${NAME}" --input "file:RelVal_Raw_${NAME}_DATA.root" > OnLine_HLT_$NAME.py + # cff fragment of PrimaryDatasets of each HLT menu (except for MASTER config) + if [ "${TABLE}" != "FULL" ]; then + log " creating cff fragment of Primary Datasets..." + ./getDatasets.py "${CONFIG}" ${DBPROXYOPTS} > ../python/HLTrigger_Datasets_"${TABLE}"_cff.py fi -} + # GlobalTag + AUTOGT="auto:run3_hlt_${TABLE}" + if [ "${TABLE}" = "Fake1" ] || [ "${TABLE}" = "Fake2" ] || [ "${TABLE}" = "2018" ]; then + AUTOGT="auto:run2_hlt_${TABLE}" + elif [ "${TABLE}" = "Fake" ]; then + AUTOGT="auto:run1_hlt_${TABLE}" + fi -# make sure we're using *this* working area -eval `scramv1 runtime -sh` -hash -r - -# cff python dumps, in CVS under HLTrigger/Configuration/pyhon -log "Extracting cff python dumps" -echo "Extracting cff python dumps" -FILES=$(eval echo HLT_FULL_cff.py HLT_{$TABLES_}_cff.py HLTrigger_Datasets_{$TABLES_}_cff.py HLTrigger_EventContent_cff.py ) -rm -f $FILES -getConfigForCVS $MASTER FULL -getContentForCVS $MASTER -for TABLE in $TABLES; do - log "$TABLE" - echo "$TABLE" - getConfigForCVS $(eval echo $TARGET) $TABLE - getDatasetsForCVS $(eval echo $TARGET) HLTrigger_Datasets_${TABLE}_cff.py + # standalone cfg file of each HLT menu (incl. MASTER config) + log " creating full cfg of HLT menu..." + hltGetConfiguration "${CONFIG}" --full --data --type "${TABLE}" --unprescale --process "HLT${TABLE}" --globaltag "${AUTOGT}" \ + --input "file:RelVal_Raw_${TABLE}_DATA.root" ${DBPROXYOPTS} > OnLine_HLT_"${TABLE}".py done -log "Done" -log "$(ls -l $FILES)" -mv -f $FILES ../python/ -log - -# full config dumps, in CVS under HLTrigger/Configuration/test -log "Extracting full configuration dumps" -echo "Extracting full configuration dumps" -FILES=$(eval echo OnLine_HLT_FULL.py OnLine_HLT_{$TABLES_}.py) -rm -f $FILES -getConfigForOnline $MASTER FULL -for TABLE in $TABLES; do - log "$TABLE" - echo "$TABLE" - getConfigForOnline $(eval echo $TARGET) $TABLE -done -log "Done" -log "$(ls -l $FILES)" -log + +cd "${INITDIR}"