From 46a9f8115cd89982eb1996799966bcf667e95697 Mon Sep 17 00:00:00 2001 From: Marino Missiroli Date: Mon, 7 Nov 2022 15:22:17 +0100 Subject: [PATCH] enable use of dbproxy in more HLT-dev tools --- .../Configuration/scripts/hltGetConfiguration | 24 +- HLTrigger/Configuration/tables/subtables.sh | 70 +-- HLTrigger/Configuration/test/getDatasets.py | 157 ++++--- .../Configuration/test/getEventContent.py | 434 +++++++++++------- HLTrigger/Configuration/test/getFrozenHLT.sh | 118 +++-- HLTrigger/Configuration/test/getHLT.sh | 190 +++----- 6 files changed, 549 insertions(+), 444 deletions(-) diff --git a/HLTrigger/Configuration/scripts/hltGetConfiguration b/HLTrigger/Configuration/scripts/hltGetConfiguration index 39da6311cafcc..6157019b9f761 100755 --- a/HLTrigger/Configuration/scripts/hltGetConfiguration +++ b/HLTrigger/Configuration/scripts/hltGetConfiguration @@ -1,11 +1,10 @@ #!/usr/bin/env python3 - -from __future__ import print_function -import sys, os +import sys +import os import argparse -from HLTrigger.Configuration.extend_argparse import * -import HLTrigger.Configuration.Tools.confdb as confdb +from HLTrigger.Configuration.extend_argparse import * +import HLTrigger.Configuration.Tools.confdb as confdb import HLTrigger.Configuration.Tools.options as options # define an argparse parser to parse our options @@ -243,8 +242,19 @@ parser.add_argument('--help', # parse command line arguments and options config = parser.parse_args(namespace = options.HLTProcessOptions()) -cmdArgs = sys.argv -cmdArgs[0] = os.path.basename(sys.argv[0]) +# do not include db-proxy options in 1st-line comment +cmdArgs, skipNext = [], False +for cmdArg in sys.argv: + if skipNext: + skipNext = False + continue + if cmdArg.startswith('--dbproxy'): + if cmdArg.startswith('--dbproxyh') or cmdArg.startswith('--dbproxyp'): + skipNext = '=' not in cmdArg + continue + cmdArgs += [cmdArg] +cmdArgs[0] = os.path.basename(cmdArgs[0]) + cmdLine = ' '.join(cmdArgs) print('# ' + cmdLine) print() diff --git a/HLTrigger/Configuration/tables/subtables.sh b/HLTrigger/Configuration/tables/subtables.sh index 94ce445db5f6b..fea75588a9918 100755 --- a/HLTrigger/Configuration/tables/subtables.sh +++ b/HLTrigger/Configuration/tables/subtables.sh @@ -1,8 +1,21 @@ -#! /bin/bash +#!/bin/bash # # utility functions used to generate HLT tables from master table in ConfDB # +# db-proxy configuration +DBPROXY="" +DBPROXYHOST="localhost" +DBPROXYPORT="8080" +while [[ $# -gt 0 ]]; do + case "$1" in + --dbproxy) DBPROXY="--dbproxy"; shift;; + --dbproxyhost) DBPROXYHOST="$2"; shift; shift;; + --dbproxyport) DBPROXYPORT="$2"; shift; shift;; + *) shift;; + esac +done + # load common HLT functions if [ -f "$CMSSW_BASE/src/HLTrigger/Configuration/common/utils.sh" ]; then source "$CMSSW_BASE/src/HLTrigger/Configuration/common/utils.sh" @@ -31,12 +44,14 @@ function cleanup() { } function getPathList() { - local DATA=$(hltConfigFromDB --$Vx --$DB --cff --configName $MASTER --noedsources --noes --noservices --nosequences --nomodules) - if echo "$DATA" | grep -q 'Exhausted Resultset\|CONFIG_NOT_FOUND'; then + [ "x${DBPROXY}" = "x" ] || local DBPROXYOPTS="${DBPROXY} --dbproxyhost ${DBPROXYHOST} --dbproxyport ${DBPROXYPORT}" + local DATA=$(hltConfigFromDB --${Vx} --${DB} --cff --configName ${MASTER} \ + --noedsources --noes --noservices --nosequences --nomodules ${DBPROXYOPTS}) + if echo "${DATA}" | grep -q 'Exhausted Resultset\|CONFIG_NOT_FOUND'; then echo "Error: $MASTER is not a valid HLT menu" exit 1 fi - echo "$DATA" | sed -ne's/ *= *cms.\(Final\|End\)\?Path.*//p' + echo "${DATA}" | sed -ne's/ *= *cms.\(Final\|End\)\?Path.*//p' } function checkJars() { @@ -73,7 +88,7 @@ function makeCreateConfig() { if [ -f $workDir/$JAR ]; then continue fi - # download to a temporay file and use an atomic move (in case an other istance is downloading the same file + # download to a temporary file and use an atomic move (in case another instance is downloading the same file) local TMPJAR=$(mktemp -p "$workDir" .${JAR}.XXXXXXXXXX) curl -s -L "$baseUrl/$JAR" -o "$TMPJAR" mv -n "$TMPJAR" "$workDir/$JAR" @@ -89,61 +104,60 @@ function makeCreateConfig() { function loadConfiguration() { case "$1" in + # v1 offline aka "hltdev" "v1/offline" | "v1/hltdev") - # v1 offline aka "hltdev" - DBHOST="cmsr1-v.cern.ch" + DBHOST="cmsr1-s.cern.ch,cmsr2-s.cern.ch,cmsr3-s.cern.ch" + [ "x${DBPROXY}" = "x" ] || DBHOST="10.116.96.89,10.116.96.139,10.116.96.105" DBNAME="cms_cond.cern.ch" DBUSER="cms_hltdev_writer" PWHASH="0196d34dd35b04c0f3597dc89fbbe6e2" ;; + # v2 offline "v2/offline") - # v2 offline - DBHOST="cmsr1-v.cern.ch" + DBHOST="cmsr1-s.cern.ch,cmsr2-s.cern.ch,cmsr3-s.cern.ch" + [ "x${DBPROXY}" = "x" ] || DBHOST="10.116.96.89,10.116.96.139,10.116.96.105" DBNAME="cms_cond.cern.ch" DBUSER="cms_hlt_gdr_w" PWHASH="0196d34dd35b04c0f3597dc89fbbe6e2" ;; - "v3/run3") - # v3 run3 - DBHOST="cmsr1-s.cern.ch" + # converter=v3*, db=run3 + "v3/run3" | "v3-beta/run3" | "v3-test/run3") + DBHOST="cmsr1-s.cern.ch,cmsr2-s.cern.ch,cmsr3-s.cern.ch" + [ "x${DBPROXY}" = "x" ] || DBHOST="10.116.96.89,10.116.96.139,10.116.96.105" DBNAME="cms_hlt.cern.ch" DBUSER="cms_hlt_v3_w" PWHASH="0196d34dd35b04c0f3597dc89fbbe6e2" ;; - "v3-test/dev") - # v3-test dev - DBHOST="cmsr1-s.cern.ch" - DBNAME="cms_hlt.cern.ch" - DBUSER="cms_hlt_gdrdev_w" - PWHASH="0196d34dd35b04c0f3597dc89fbbe6e2" - ;; - "v3/dev") - # v3 dev - DBHOST="cmsr1-s.cern.ch" + # converter=v3*, db=dev + "v3/dev" | "v3-beta/dev" | "v3-test/dev") + DBHOST="cmsr1-s.cern.ch,cmsr2-s.cern.ch,cmsr3-s.cern.ch" + [ "x${DBPROXY}" = "x" ] || DBHOST="10.116.96.89,10.116.96.139,10.116.96.105" DBNAME="cms_hlt.cern.ch" DBUSER="cms_hlt_gdrdev_w" PWHASH="0196d34dd35b04c0f3597dc89fbbe6e2" ;; *) # see https://github.com/fwyzard/hlt-confdb/blob/confdbv2/test/runCreateConfig - echo "Error, unnown database \"$1\", exiting." + echo "Error, unknown database \"$1\", exiting." exit 1 ;; esac } function runCreateConfig() { + [ "x${DBPROXY}" = "x" ] || local DBPROXYOPTS="-DsocksProxyHost=${DBPROXYHOST} -DsocksProxyPort=${DBPROXYPORT}" loadConfiguration "$1" java \ -Djava.security.egd=file:///dev/urandom \ -Doracle.jdbc.timezoneAsRegion=false \ + ${DBPROXYOPTS} \ -Xss32M \ -Xmx1024m \ - -classpath "$CLASSPATH" \ + -classpath "${CLASSPATH}" \ confdb.db.ConfDBCreateConfig \ - --dbHost $DBHOST \ - --dbName $DBNAME \ - --dbUser $DBUSER \ + --dbHost "${DBHOST}" \ + --dbName "${DBNAME}" \ + --dbUser "${DBUSER}" \ --dbPwrd $2 \ --master $3 \ --paths $4 \ @@ -219,7 +233,7 @@ function createSubtables() { # ask the user for the database password readPassword - # make sure the needed sripts are available + # make sure the needed scripts are available makeCreateConfig # extract each subtable diff --git a/HLTrigger/Configuration/test/getDatasets.py b/HLTrigger/Configuration/test/getDatasets.py index e9a551f485714..90cb1c08428f7 100755 --- a/HLTrigger/Configuration/test/getDatasets.py +++ b/HLTrigger/Configuration/test/getDatasets.py @@ -1,72 +1,107 @@ -#! /usr/bin/env python3 - -import sys +#!/usr/bin/env python3 +"""getDatasets.py: create Datasets-cff file of an HLT configuration from the ConfDB database +""" +import argparse import subprocess -import types +import os import re -import FWCore.ParameterSet.Config as cms - -def extractDatasets(version, database, config): - # dump the streams and Datasets from the HLT configuration - proc = subprocess.Popen( - "hltConfigFromDB --%s --%s --configName %s --nopsets --noedsources --noes --noservices --nooutput --nopaths" % (version, database, config), - shell = True, - stdin = None, - stdout = subprocess.PIPE, - stderr = None, - ) - (out, err) = proc.communicate() - - # load the streams and Datasets - hlt = types.ModuleType('hlt') - exec(out, globals(), hlt.__dict__) - return hlt.process +import FWCore.ParameterSet.Config as cms +import HLTrigger.Configuration.Tools.pipe as pipe +import HLTrigger.Configuration.Tools.options as options + +def getHLTProcess(config): + '''return cms.Process containing Streams and Datasets of the HLT configuration + ''' + # cmd-line args to select HLT configuration + if config.menu.run: + configline = f'--runNumber {config.menu.run}' + else: + configline = f'--{config.menu.database} --{config.menu.version} --configName {config.menu.name}' + + # cmd to download HLT configuration + cmdline = f'hltConfigFromDB {configline} --noedsources --noes --noservices --nopsets --nooutput --nopaths' + if config.proxy: + cmdline += f' --dbproxy --dbproxyhost {config.proxy_host} --dbproxyport {config.proxy_port}' + + # load HLT configuration + try: + foo = {'process': None} + exec(pipe.pipe(cmdline).decode(), foo) + process = foo['process'] + except: + raise Exception(f'query did not return a valid python file:\n query="{cmdline}"') + + if not isinstance(process, cms.Process): + raise Exception(f'query did not return a valid HLT menu:\n query="{cmdline}"') + + return process + +### +### main +### +if __name__ == '__main__': + + # defaults of cmd-line arguments + defaults = options.HLTProcessOptions() + + parser = argparse.ArgumentParser( + prog = './'+os.path.basename(__file__), + formatter_class = argparse.RawDescriptionHelpFormatter, + description = __doc__) + + # required argument + parser.add_argument('menu', + action = 'store', + type = options.ConnectionHLTMenu, + metavar = 'MENU', + help = 'HLT menu to dump from the database. Supported formats are:\n - /path/to/configuration[/Vn]\n - [[{v1|v2|v3}/]{run3|run2|online|adg}:]/path/to/configuration[/Vn]\n - run:runnumber\nThe possible converters are "v1", "v2, and "v3" (default).\nThe possible databases are "run3" (default, used for offline development), "run2" (used for accessing run2 offline development menus), "online" (used to extract online menus within Point 5) and "adg" (used to extract the online menus outside Point 5).\nIf no menu version is specified, the latest one is automatically used.\nIf "run:" is used instead, the HLT menu used for the given run number is looked up and used.\nNote other converters and databases exist as options but they are only for expert/special use.' ) + + # options + parser.add_argument('--dbproxy', + dest = 'proxy', + action = 'store_true', + default = defaults.proxy, + help = 'Use a socks proxy to connect outside CERN network (default: False)' ) + parser.add_argument('--dbproxyport', + dest = 'proxy_port', + action = 'store', + metavar = 'PROXYPORT', + default = defaults.proxy_port, + help = 'Port of the socks proxy (default: 8080)' ) + parser.add_argument('--dbproxyhost', + dest = 'proxy_host', + action = 'store', + metavar = 'PROXYHOST', + default = defaults.proxy_host, + help = 'Host of the socks proxy (default: "localhost")' ) + + # parse command line arguments and options + config = parser.parse_args() + + process = getHLTProcess(config) + + print('''# %s +import FWCore.ParameterSet.Config as cms +''' % config.menu.name) -def dumpDataset(process, stream, dataset): - if dataset in process.datasets.__dict__: - name = 'stream%s_dataset%s_selector' % (stream, dataset) - dump = '''from HLTrigger.HLTfilters.triggerResultsFilter_cfi import triggerResultsFilter as %s + for stream in sorted(process.streams.__dict__): + if re.match(r'^(Physics|Parking)', stream): + print(''' +# stream %s +''' % stream) + ds = sorted(process.streams.__dict__[stream]) + for dataset in ds: + if dataset in process.datasets.__dict__: + name = 'stream%s_dataset%s_selector' % (stream, dataset) + dump = '''from HLTrigger.HLTfilters.triggerResultsFilter_cfi import triggerResultsFilter as %s %s.hltResults = cms.InputTag('TriggerResults', '', 'HLT') %s.l1tResults = cms.InputTag('') %s.throw = cms.bool(False) %s.triggerConditions = %s - ''' % (name, name, name, name, name, process.datasets.__dict__[dataset]) - else: - dump = '''# dataset %s not found - + else: + dump = '''# dataset %s not found ''' % (dataset, ) - return dump - - -# split a "[version/]db:name" configuration into a (version, db, name) tuple -def splitConfigName(configName): - from HLTrigger.Configuration.Tools.options import ConnectionHLTMenu - menu = ConnectionHLTMenu(configName) - return (menu.version, menu.database, menu.name) - - -# get the configuration to parse and the file where to output the stream definitions from the command line -config = sys.argv[1] - -# dump the expanded event content configurations to a python configuration fragment -config = splitConfigName(config) -process = extractDatasets(* config) - -sys.stdout.write('''# %s - -import FWCore.ParameterSet.Config as cms - -''' % config[2] ) - -for stream in sorted(process.streams.__dict__): - if re.match(r'^Physics|Parking', stream): - sys.stdout.write(''' -# stream %s - -''' % stream) - ds = sorted(process.streams.__dict__[stream]) - for dataset in ds: - sys.stdout.write(dumpDataset(process, stream, dataset)) + print(dump) diff --git a/HLTrigger/Configuration/test/getEventContent.py b/HLTrigger/Configuration/test/getEventContent.py index 22095be802144..afd781b120c9c 100755 --- a/HLTrigger/Configuration/test/getEventContent.py +++ b/HLTrigger/Configuration/test/getEventContent.py @@ -1,35 +1,58 @@ -#! /usr/bin/env python3 - -import sys +#!/usr/bin/env python3 +"""getEventContent.py: print EventContent cff fragment of a ConfDB configuration +""" +import argparse import subprocess +import os +import re + import FWCore.ParameterSet.Config as cms +import HLTrigger.Configuration.Tools.pipe as pipe +import HLTrigger.Configuration.Tools.options as options + +def getHLTProcessBlocks(config, blocks): + """return cms.Process containing the OutputModules of the HLT configuration + """ + # cmd-line args to select HLT configuration + if config.menu.run: + configline = f'--runNumber {config.menu.run}' + else: + configline = f'--{config.menu.database} --{config.menu.version} --configName {config.menu.name}' -config = sys.argv[1] - -def extractBlock(config, blocks, target): - #print 'configuration: %s' % config - #print 'blocks: %s' % ', '.join(blocks) - #print 'target: %s' % target - #print - commands = ','.join( block + '::outputCommands' for block in blocks ) - proc = subprocess.Popen( - "hltConfigFromDB --configName %s --noedsources --nopaths --noes --nopsets --noservices --cff --blocks %s --format python | sed -e'/^streams/,/^)/d' -e'/^datasets/,/^)/d' > %s" % (config, commands, target), - shell = True, - stdin = None, - stdout = None, - stderr = None, - ) - proc.wait() - -def extractBlocks(config): - outputA = [ 'hltOutputA', 'hltOutputPhysicsCommissioning' ] - outputALCA = [ 'hltOutputALCAPHISYM', 'hltOutputALCAP0', 'hltOutputALCAPPSExpress', 'hltOutputALCAPPSPrompt', 'hltOutputALCALumiPixelsCountsExpress', 'hltOutputALCALumiPixelsCountsPrompt', 'hltOutputRPCMON' ] - outputMON = [ 'hltOutputA', 'hltOutputPhysicsCommissioning', 'hltOutputDQM', 'hltOutputDQMGPUvsCPU', 'hltOutputHLTMonitor', 'hltOutputReleaseValidation' ] - outputScouting = [ 'hltOutputScoutingPF' ] - extractBlock(config, outputA, 'hltOutputA_cff.py') - extractBlock(config, outputALCA, 'hltOutputALCA_cff.py') - extractBlock(config, outputMON, 'hltOutputMON_cff.py') - extractBlock(config, outputScouting, 'hltScouting_cff.py') + # cmd to download HLT configuration + cmdline = f'hltConfigFromDB {configline}' + if config.proxy: + cmdline += f' --dbproxy --dbproxyhost {config.proxy_host} --dbproxyport {config.proxy_port}' + + cmdline += ' --noedsources --noes --nopsets --noservices --nopaths --format python' + cmdline += ' --blocks '+','.join({foo+'::outputCommands' for foo in blocks}) + + # load HLT configuration + try: + foo = {} + exec(pipe.pipe(cmdline).decode(), foo) + except: + raise Exception(f'query did not return a valid python file:\n query="{cmdline}"') + + ret = {} + for block in blocks: + key = 'block_'+block + ret[key] = foo[key] if key in foo else None + if ret[key] != None and not isinstance(ret[key], cms.PSet): + raise Exception(f'query did not return valid HLT blocks:\n query="{cmdline}"') + + return ret + +def getHLTProcessBlockGroups(config, blockGroupDict): + ret = {} + blockDict = getHLTProcessBlocks(config, {bar for foo in blockGroupDict.values() for bar in foo}) + for groupName in blockGroupDict: + ret[groupName] = cms.PSet() + for blockKey in blockGroupDict[groupName]: + blockName = 'block_'+blockKey + if blockDict[blockName] != None: + setattr(ret[groupName], blockName, blockDict[blockName]) + return ret def makePSet(statements): statements = sorted(statements) @@ -65,7 +88,6 @@ def buildPSetWithoutRAWs(blocks): statements.update( statement for statement in block if statement.find('drop') != 0 and statement.find('keep FEDRawDataCollection') != 0) return makePSet(statements) - # customisation of AOD event content, requested by David Dagenhart def dropL1GlobalTriggerObjectMapRecord(block): """drop the old L1GlobalTriggerObjectMapRecord data format from the block (meant for the AOD data tier)""" @@ -78,141 +100,164 @@ def dropL1GlobalTriggerObjectMapRecord(block): # add just after it a drop statement for the old data format block.outputCommands.insert(position + 1, 'drop L1GlobalTriggerObjectMapRecord_hltL1GtObjectMap_*_*') +def printHLTriggerEventContentCff(process): + + blockGroups = getHLTProcessBlockGroups(config, { + 'hltOutputA_cff': [ + 'hltOutputA', + 'hltOutputPhysicsCommissioning', + ], + 'hltOutputALCA_cff': [ + 'hltOutputALCAPHISYM', + 'hltOutputALCAP0', + 'hltOutputALCAPPSExpress', + 'hltOutputALCAPPSPrompt', + 'hltOutputALCALumiPixelsCountsExpress', + 'hltOutputALCALumiPixelsCountsPrompt', + 'hltOutputRPCMON', + ], + 'hltOutputMON_cff': [ + 'hltOutputA', + 'hltOutputPhysicsCommissioning', + 'hltOutputDQM', + 'hltOutputDQMGPUvsCPU', + 'hltOutputHLTMonitor', + 'hltOutputReleaseValidation', + ], + 'hltOutputScouting_cff': [ + 'hltOutputScoutingPF', + ], + }) + + hltOutputA_cff = blockGroups['hltOutputA_cff'] + hltOutputALCA_cff = blockGroups['hltOutputALCA_cff'] + hltOutputMON_cff = blockGroups['hltOutputMON_cff'] + hltOutputScouting_cff = blockGroups['hltOutputScouting_cff'] + + # hltDebugOutput + + if not hasattr(hltOutputMON_cff,'block_hltOutputA'): + hltOutputMON_cff.block_hltOutputA = hltOutputMON_cff.block_hltOutputPhysicsCommissioning + if not hasattr(hltOutputMON_cff,'block_hltOutputDQM'): + hltOutputMON_cff.block_hltOutputDQM = cms.PSet(outputCommands = cms.untracked.vstring( 'drop *' )) + if not hasattr(hltOutputMON_cff,'block_hltOutputDQMGPUvsCPU'): + hltOutputMON_cff.block_hltOutputDQMGPUvsCPU = cms.PSet(outputCommands = cms.untracked.vstring( 'drop *' )) + if not hasattr(hltOutputMON_cff,'block_hltOutputHLTMonitor'): + hltOutputMON_cff.block_hltOutputHLTMonitor = cms.PSet(outputCommands = cms.untracked.vstring( 'drop *_hlt*_*_*' )) + if not hasattr(hltOutputMON_cff,'block_hltOutputReleaseValidation'): + hltOutputMON_cff.block_hltOutputReleaseValidation = cms.PSet(outputCommands = cms.untracked.vstring( 'drop *' )) + + hltDebugOutputBlocks = ( + # the DQM, DQMGPUvsCPU and HLTMON streams have the HLT debug outputs used online + hltOutputMON_cff.block_hltOutputA.outputCommands, + hltOutputMON_cff.block_hltOutputDQM.outputCommands, + hltOutputMON_cff.block_hltOutputDQMGPUvsCPU.outputCommands, + hltOutputMON_cff.block_hltOutputHLTMonitor.outputCommands, + hltOutputMON_cff.block_hltOutputReleaseValidation.outputCommands, + ) + hltDebugOutputContent = buildPSet(hltDebugOutputBlocks) + + # hltDebugWithAlCaOutput + + if not hasattr(hltOutputALCA_cff,'block_hltOutputALCAPHISYM'): + hltOutputALCA_cff.block_hltOutputALCAPHISYM = cms.PSet(outputCommands = cms.untracked.vstring( 'drop *' )) + if not hasattr(hltOutputALCA_cff,'block_hltOutputALCAP0'): + hltOutputALCA_cff.block_hltOutputALCAP0 = cms.PSet(outputCommands = cms.untracked.vstring( 'drop *' )) + if not hasattr(hltOutputALCA_cff,'block_hltOutputALCAPPSExpress'): + hltOutputALCA_cff.block_hltOutputALCAPPSExpress = cms.PSet(outputCommands = cms.untracked.vstring( 'drop *' )) + if not hasattr(hltOutputALCA_cff,'block_hltOutputALCAPPSPrompt'): + hltOutputALCA_cff.block_hltOutputALCAPPSPrompt = cms.PSet(outputCommands = cms.untracked.vstring( 'drop *' )) + if not hasattr(hltOutputALCA_cff,'block_hltOutputALCALumiPixelsCountsExpress'): + hltOutputALCA_cff.block_hltOutputALCALumiPixelsCountsExpress = cms.PSet(outputCommands = cms.untracked.vstring( 'drop *' )) + if not hasattr(hltOutputALCA_cff,'block_hltOutputALCALumiPixelsCountsPrompt'): + hltOutputALCA_cff.block_hltOutputALCALumiPixelsCountsPrompt = cms.PSet(outputCommands = cms.untracked.vstring( 'drop *' )) + if not hasattr(hltOutputALCA_cff,'block_hltOutputRPCMON'): + hltOutputALCA_cff.block_hltOutputRPCMON = cms.PSet(outputCommands = cms.untracked.vstring( 'drop *' )) + hltDebugWithAlCaOutputBlocks = ( + # the DQM, DQMGPUvsCPU and HLTMON streams have the HLT debug outputs used online + hltOutputMON_cff.block_hltOutputA.outputCommands, + hltOutputMON_cff.block_hltOutputDQM.outputCommands, + hltOutputMON_cff.block_hltOutputDQMGPUvsCPU.outputCommands, + hltOutputMON_cff.block_hltOutputHLTMonitor.outputCommands, + hltOutputMON_cff.block_hltOutputReleaseValidation.outputCommands, + # the ALCA streams have the AlCa outputs + hltOutputALCA_cff.block_hltOutputALCAPHISYM.outputCommands, + hltOutputALCA_cff.block_hltOutputALCAP0.outputCommands, + hltOutputALCA_cff.block_hltOutputALCAPPSExpress.outputCommands, + hltOutputALCA_cff.block_hltOutputALCAPPSPrompt.outputCommands, + hltOutputALCA_cff.block_hltOutputALCALumiPixelsCountsExpress.outputCommands, + hltOutputALCA_cff.block_hltOutputALCALumiPixelsCountsPrompt.outputCommands, + hltOutputALCA_cff.block_hltOutputRPCMON.outputCommands, + ) + hltDebugWithAlCaOutputContent = buildPSet(hltDebugWithAlCaOutputBlocks) + + # hltScoutingOutput + + if not hasattr(hltOutputScouting_cff,'block_hltOutputScoutingPF'): + hltOutputScouting_cff.block_hltOutputScoutingPF = cms.PSet(outputCommands = cms.untracked.vstring( 'drop *' )) + + hltScoutingOutputBlocks = ( + # the Scouting streams have the Scouting outputs + hltOutputScouting_cff.block_hltOutputScoutingPF.outputCommands, + ) + hltScoutingOutputContent = buildPSetNoDrop(hltScoutingOutputBlocks) + + # hltDefaultOutput + if not hasattr(hltOutputA_cff,'block_hltOutputA'): + hltOutputA_cff.block_hltOutputA = hltOutputA_cff.block_hltOutputPhysicsCommissioning + hltDefaultOutputBlocks = ( + # the A stream has the HLT default output, with FEDs - strip out the FEDRawDataCollection keep statements for hltDefaultOutput + hltOutputA_cff.block_hltOutputA.outputCommands, + ) + hltDefaultOutputContent = buildPSetWithoutRAWs(hltDefaultOutputBlocks) + hltDefaultOutputWithFEDsContent = buildPSet(hltDefaultOutputBlocks) -# extract the HLT layer event content -extractBlocks( config ) -import hltOutputA_cff -import hltOutputALCA_cff -import hltOutputMON_cff -import hltScouting_cff - -# hltDebugOutput - -if not hasattr(hltOutputMON_cff,'block_hltOutputA'): - hltOutputMON_cff.block_hltOutputA = hltOutputMON_cff.block_hltOutputPhysicsCommissioning -if not hasattr(hltOutputMON_cff,'block_hltOutputDQM'): - hltOutputMON_cff.block_hltOutputDQM = cms.PSet(outputCommands = cms.untracked.vstring( 'drop *' )) -if not hasattr(hltOutputMON_cff,'block_hltOutputDQMGPUvsCPU'): - hltOutputMON_cff.block_hltOutputDQMGPUvsCPU = cms.PSet(outputCommands = cms.untracked.vstring( 'drop *' )) -if not hasattr(hltOutputMON_cff,'block_hltOutputHLTMonitor'): - hltOutputMON_cff.block_hltOutputHLTMonitor = cms.PSet(outputCommands = cms.untracked.vstring( 'drop *_hlt*_*_*' )) -if not hasattr(hltOutputMON_cff,'block_hltOutputReleaseValidation'): - hltOutputMON_cff.block_hltOutputReleaseValidation = cms.PSet(outputCommands = cms.untracked.vstring( 'drop *' )) - -hltDebugOutputBlocks = ( - # the DQM, DQMGPUvsCPU and HLTMON streams have the HLT debug outputs used online - hltOutputMON_cff.block_hltOutputA.outputCommands, - hltOutputMON_cff.block_hltOutputDQM.outputCommands, - hltOutputMON_cff.block_hltOutputDQMGPUvsCPU.outputCommands, - hltOutputMON_cff.block_hltOutputHLTMonitor.outputCommands, - hltOutputMON_cff.block_hltOutputReleaseValidation.outputCommands, -) -hltDebugOutputContent = buildPSet(hltDebugOutputBlocks) - - -# hltDebugWithAlCaOutput -if not hasattr(hltOutputALCA_cff,'block_hltOutputALCAPHISYM'): - hltOutputALCA_cff.block_hltOutputALCAPHISYM = cms.PSet(outputCommands = cms.untracked.vstring( 'drop *' )) -if not hasattr(hltOutputALCA_cff,'block_hltOutputALCAP0'): - hltOutputALCA_cff.block_hltOutputALCAP0 = cms.PSet(outputCommands = cms.untracked.vstring( 'drop *' )) -if not hasattr(hltOutputALCA_cff,'block_hltOutputALCAPPSExpress'): - hltOutputALCA_cff.block_hltOutputALCAPPSExpress = cms.PSet(outputCommands = cms.untracked.vstring( 'drop *' )) -if not hasattr(hltOutputALCA_cff,'block_hltOutputALCAPPSPrompt'): - hltOutputALCA_cff.block_hltOutputALCAPPSPrompt = cms.PSet(outputCommands = cms.untracked.vstring( 'drop *' )) -if not hasattr(hltOutputALCA_cff,'block_hltOutputALCALumiPixelsCountsExpress'): - hltOutputALCA_cff.block_hltOutputALCALumiPixelsCountsExpress = cms.PSet(outputCommands = cms.untracked.vstring( 'drop *' )) -if not hasattr(hltOutputALCA_cff,'block_hltOutputALCALumiPixelsCountsPrompt'): - hltOutputALCA_cff.block_hltOutputALCALumiPixelsCountsPrompt = cms.PSet(outputCommands = cms.untracked.vstring( 'drop *' )) -if not hasattr(hltOutputALCA_cff,'block_hltOutputRPCMON'): - hltOutputALCA_cff.block_hltOutputRPCMON = cms.PSet(outputCommands = cms.untracked.vstring( 'drop *' )) -hltDebugWithAlCaOutputBlocks = ( - # the DQM, DQMGPUvsCPU and HLTMON streams have the HLT debug outputs used online - hltOutputMON_cff.block_hltOutputA.outputCommands, - hltOutputMON_cff.block_hltOutputDQM.outputCommands, - hltOutputMON_cff.block_hltOutputDQMGPUvsCPU.outputCommands, - hltOutputMON_cff.block_hltOutputHLTMonitor.outputCommands, - hltOutputMON_cff.block_hltOutputReleaseValidation.outputCommands, - # the ALCA streams have the AlCa outputs - hltOutputALCA_cff.block_hltOutputALCAPHISYM.outputCommands, - hltOutputALCA_cff.block_hltOutputALCAP0.outputCommands, - hltOutputALCA_cff.block_hltOutputALCAPPSExpress.outputCommands, - hltOutputALCA_cff.block_hltOutputALCAPPSPrompt.outputCommands, - hltOutputALCA_cff.block_hltOutputALCALumiPixelsCountsExpress.outputCommands, - hltOutputALCA_cff.block_hltOutputALCALumiPixelsCountsPrompt.outputCommands, - hltOutputALCA_cff.block_hltOutputRPCMON.outputCommands, -) -hltDebugWithAlCaOutputContent = buildPSet(hltDebugWithAlCaOutputBlocks) - -# hltScoutingOutput - -if not hasattr(hltScouting_cff,'block_hltOutputScoutingPF'): - hltScouting_cff.block_hltOutputScoutingPF = cms.PSet(outputCommands = cms.untracked.vstring( 'drop *' )) - -hltScoutingOutputBlocks = ( - # the Scouting streams have the Scouting outputs - hltScouting_cff.block_hltOutputScoutingPF.outputCommands, -) -hltScoutingOutputContent = buildPSetNoDrop(hltScoutingOutputBlocks) - - -# hltDefaultOutput -if not hasattr(hltOutputA_cff,'block_hltOutputA'): - hltOutputA_cff.block_hltOutputA = hltOutputA_cff.block_hltOutputPhysicsCommissioning -hltDefaultOutputBlocks = ( - # the A stream has the HLT default output, with FEDs - strip out the FEDRawDataCollection keep statements for hltDefaultOutput - hltOutputA_cff.block_hltOutputA.outputCommands, -) -hltDefaultOutputContent = buildPSetWithoutRAWs(hltDefaultOutputBlocks) -hltDefaultOutputWithFEDsContent = buildPSet(hltDefaultOutputBlocks) - - -# define the CMSSW default event content configurations - -# RAW event content -HLTriggerRAW = cms.PSet( - outputCommands = cms.vstring() -) -HLTriggerRAW.outputCommands.extend(hltDefaultOutputWithFEDsContent.outputCommands) -HLTriggerRAW.outputCommands.extend(hltScoutingOutputContent.outputCommands) - -# RECO event content -HLTriggerRECO = cms.PSet( - outputCommands = cms.vstring() -) -HLTriggerRECO.outputCommands.extend(hltDefaultOutputContent.outputCommands) -HLTriggerRECO.outputCommands.extend(hltScoutingOutputContent.outputCommands) - -# AOD event content -HLTriggerAOD = cms.PSet( - outputCommands = cms.vstring() -) -HLTriggerAOD.outputCommands.extend(hltDefaultOutputContent.outputCommands) -HLTriggerAOD.outputCommands.extend(hltScoutingOutputContent.outputCommands) -dropL1GlobalTriggerObjectMapRecord(HLTriggerAOD) - -# HLTDEBUG RAW event content -HLTDebugRAW = cms.PSet( - outputCommands = cms.vstring() -) -HLTDebugRAW.outputCommands.extend(hltDebugWithAlCaOutputContent.outputCommands) -HLTDebugRAW.outputCommands.extend(hltScoutingOutputContent.outputCommands) - -# HLTDEBUG FEVT event content -HLTDebugFEVT = cms.PSet( - outputCommands = cms.vstring() -) -HLTDebugFEVT.outputCommands.extend(hltDebugWithAlCaOutputContent.outputCommands) -HLTDebugFEVT.outputCommands.extend(hltScoutingOutputContent.outputCommands) - -# Scouting event content -HLTScouting = cms.PSet( - outputCommands = cms.vstring() -) -HLTScouting.outputCommands.extend(hltScoutingOutputContent.outputCommands) - -# dump the expanded event content configurations to a python configuration fragment -dump = open('HLTrigger_EventContent_cff.py', 'w') -dump.write('''import FWCore.ParameterSet.Config as cms + # define the CMSSW default event content configurations + + # RAW event content + HLTriggerRAW = cms.PSet( + outputCommands = cms.vstring() + ) + HLTriggerRAW.outputCommands.extend(hltDefaultOutputWithFEDsContent.outputCommands) + HLTriggerRAW.outputCommands.extend(hltScoutingOutputContent.outputCommands) + + # RECO event content + HLTriggerRECO = cms.PSet( + outputCommands = cms.vstring() + ) + HLTriggerRECO.outputCommands.extend(hltDefaultOutputContent.outputCommands) + HLTriggerRECO.outputCommands.extend(hltScoutingOutputContent.outputCommands) + + # AOD event content + HLTriggerAOD = cms.PSet( + outputCommands = cms.vstring() + ) + HLTriggerAOD.outputCommands.extend(hltDefaultOutputContent.outputCommands) + HLTriggerAOD.outputCommands.extend(hltScoutingOutputContent.outputCommands) + dropL1GlobalTriggerObjectMapRecord(HLTriggerAOD) + + # HLTDEBUG RAW event content + HLTDebugRAW = cms.PSet( + outputCommands = cms.vstring() + ) + HLTDebugRAW.outputCommands.extend(hltDebugWithAlCaOutputContent.outputCommands) + HLTDebugRAW.outputCommands.extend(hltScoutingOutputContent.outputCommands) + + # HLTDEBUG FEVT event content + HLTDebugFEVT = cms.PSet( + outputCommands = cms.vstring() + ) + HLTDebugFEVT.outputCommands.extend(hltDebugWithAlCaOutputContent.outputCommands) + HLTDebugFEVT.outputCommands.extend(hltScoutingOutputContent.outputCommands) + + # Scouting event content + HLTScouting = cms.PSet( + outputCommands = cms.vstring() + ) + HLTScouting.outputCommands.extend(hltScoutingOutputContent.outputCommands) + + # print the expanded event content configurations to stdout + print('''import FWCore.ParameterSet.Config as cms # EventContent for HLT related products. @@ -222,12 +267,55 @@ def dropL1GlobalTriggerObjectMapRecord(block): # HLTScouting (with Scouting products) # # as these are used in Configuration/EventContent -# -''') -dump.write('HLTriggerRAW = cms.PSet(\n outputCommands = cms.vstring( *(\n%s\n ) )\n)\n\n' % ',\n'.join( ' \'%s\'' % keep for keep in HLTriggerRAW.outputCommands)) -dump.write('HLTriggerRECO = cms.PSet(\n outputCommands = cms.vstring( *(\n%s\n ) )\n)\n\n' % ',\n'.join( ' \'%s\'' % keep for keep in HLTriggerRECO.outputCommands)) -dump.write('HLTriggerAOD = cms.PSet(\n outputCommands = cms.vstring( *(\n%s\n ) )\n)\n\n' % ',\n'.join( ' \'%s\'' % keep for keep in HLTriggerAOD.outputCommands)) -dump.write('HLTDebugRAW = cms.PSet(\n outputCommands = cms.vstring( *(\n%s\n ) )\n)\n\n' % ',\n'.join( ' \'%s\'' % keep for keep in HLTDebugRAW.outputCommands)) -dump.write('HLTDebugFEVT = cms.PSet(\n outputCommands = cms.vstring( *(\n%s\n ) )\n)\n\n' % ',\n'.join( ' \'%s\'' % keep for keep in HLTDebugFEVT.outputCommands)) -dump.write('HLTScouting = cms.PSet(\n outputCommands = cms.vstring( *(\n%s\n ) )\n)\n\n' % ',\n'.join( ' \'%s\'' % keep for keep in HLTScouting.outputCommands)) -dump.close() +#''') + print('HLTriggerRAW = cms.PSet(\n outputCommands = cms.vstring( *(\n%s\n ) )\n)\n' % ',\n'.join( ' \'%s\'' % keep for keep in HLTriggerRAW.outputCommands)) + print('HLTriggerRECO = cms.PSet(\n outputCommands = cms.vstring( *(\n%s\n ) )\n)\n' % ',\n'.join( ' \'%s\'' % keep for keep in HLTriggerRECO.outputCommands)) + print('HLTriggerAOD = cms.PSet(\n outputCommands = cms.vstring( *(\n%s\n ) )\n)\n' % ',\n'.join( ' \'%s\'' % keep for keep in HLTriggerAOD.outputCommands)) + print('HLTDebugRAW = cms.PSet(\n outputCommands = cms.vstring( *(\n%s\n ) )\n)\n' % ',\n'.join( ' \'%s\'' % keep for keep in HLTDebugRAW.outputCommands)) + print('HLTDebugFEVT = cms.PSet(\n outputCommands = cms.vstring( *(\n%s\n ) )\n)\n' % ',\n'.join( ' \'%s\'' % keep for keep in HLTDebugFEVT.outputCommands)) + print('HLTScouting = cms.PSet(\n outputCommands = cms.vstring( *(\n%s\n ) )\n)\n' % ',\n'.join( ' \'%s\'' % keep for keep in HLTScouting.outputCommands)) + +### +### main +### +if __name__ == '__main__': + + # defaults of cmd-line arguments + defaults = options.HLTProcessOptions() + + parser = argparse.ArgumentParser( + prog = './'+os.path.basename(__file__), + formatter_class = argparse.RawDescriptionHelpFormatter, + description = __doc__ + ) + + # required argument + parser.add_argument('menu', + action = 'store', + type = options.ConnectionHLTMenu, + metavar = 'MENU', + help = 'HLT menu to dump from the database. Supported formats are:\n - /path/to/configuration[/Vn]\n - [[{v1|v2|v3}/]{run3|run2|online|adg}:]/path/to/configuration[/Vn]\n - run:runnumber\nThe possible converters are "v1", "v2, and "v3" (default).\nThe possible databases are "run3" (default, used for offline development), "run2" (used for accessing run2 offline development menus), "online" (used to extract online menus within Point 5) and "adg" (used to extract the online menus outside Point 5).\nIf no menu version is specified, the latest one is automatically used.\nIf "run:" is used instead, the HLT menu used for the given run number is looked up and used.\nNote other converters and databases exist as options but they are only for expert/special use.' ) + + # options + parser.add_argument('--dbproxy', + dest = 'proxy', + action = 'store_true', + default = defaults.proxy, + help = 'Use a socks proxy to connect outside CERN network (default: False)' ) + parser.add_argument('--dbproxyport', + dest = 'proxy_port', + action = 'store', + metavar = 'PROXYPORT', + default = defaults.proxy_port, + help = 'Port of the socks proxy (default: 8080)' ) + parser.add_argument('--dbproxyhost', + dest = 'proxy_host', + action = 'store', + metavar = 'PROXYHOST', + default = defaults.proxy_host, + help = 'Host of the socks proxy (default: "localhost")' ) + + # parse command line arguments and options + config = parser.parse_args() + + printHLTriggerEventContentCff(config) diff --git a/HLTrigger/Configuration/test/getFrozenHLT.sh b/HLTrigger/Configuration/test/getFrozenHLT.sh index d5ab2c3b5a245..023d229bda332 100755 --- a/HLTrigger/Configuration/test/getFrozenHLT.sh +++ b/HLTrigger/Configuration/test/getFrozenHLT.sh @@ -1,4 +1,4 @@ -#! /bin/bash +#!/bin/bash # ConfDB configurations to use TABLES="Fake Fake1 Fake2 2022v14" @@ -7,73 +7,71 @@ HLT_Fake1="/dev/CMSSW_12_5_0/Fake1" HLT_Fake2="/dev/CMSSW_12_5_0/Fake2" HLT_2022v14="/frozen/2022/2e34/v1.4/CMSSW_12_5_X/HLT" -# print extra messages ? -VERBOSE=false - -# this is used for brace expansion -TABLES_=$(echo $TABLES | sed -e's/ \+/,/g') +# command-line arguments +VERBOSE=false # print extra messages to stdout +DBPROXYOPTS="" # db-proxy configuration +while [[ $# -gt 0 ]]; do + case "$1" in + -v) VERBOSE=true; shift;; + --dbproxy) DBPROXYOPTS="${DBPROXYOPTS} --dbproxy"; shift;; + --dbproxyhost) DBPROXYOPTS="${DBPROXYOPTS} --dbproxyhost $2"; shift; shift;; + --dbproxyport) DBPROXYOPTS="${DBPROXYOPTS} --dbproxyport $2"; shift; shift;; + *) shift;; + esac +done -[ "$1" == "-v" ] && { VERBOSE=true; shift; } -[ "$1" == "-q" ] && { VERBOSE=false; shift; } +# remove spurious whitespaces and tabs from DBPROXYOPTS +DBPROXYOPTS=$(echo "${DBPROXYOPTS}" | xargs) +# log: print to stdout only if VERBOSE=true function log() { - $VERBOSE && echo -e "$@" + ${VERBOSE} && echo -e "$@" } -function getConfigForCVS() { - local CONFIG="$1" - local NAME="$2" - log " dumping HLT cffs for $NAME from $CONFIG" - # do not use any conditions or L1 override - hltGetConfiguration --cff --data $CONFIG --type $NAME > HLT_${NAME}_cff.py -} +# path to directory hosting this script +TESTDIR=$(cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd) -function getConfigForOnline() { - local CONFIG="$1" - local NAME="$2" - log " dumping full HLT for $NAME from $CONFIG" - # override the conditions with a menu-dependent "virtual" global tag, which takes care of overriding the L1 menu +# ensure that directory hosting this script corresponds to ${CMSSW_BASE}/src/HLTrigger/Configuration/test +if [ "${TESTDIR}" != "${CMSSW_BASE}"/src/HLTrigger/Configuration/test ]; then + printf "\n%s\n" "ERROR -- the directory hosting getHLT.sh [1] does not correspond to \${CMSSW_BASE}/src/HLTrigger/Configuration/test [2]" + printf "%s\n" " [1] ${TESTDIR}" + printf "%s\n\n" " [2] ${CMSSW_BASE}/src/HLTrigger/Configuration/test" + exit 1 +fi - if [ "$NAME" == "Fake" ]; then - hltGetConfiguration --full --data $CONFIG --type $NAME --unprescale --process "HLT${NAME}" --globaltag "auto:run1_hlt_${NAME}" --input "file:RelVal_Raw_${NAME}_DATA.root" > OnLine_HLT_${NAME}.py - elif [ "$NAME" == "Fake1" ] || [ "$NAME" == "Fake2" ] || [ "$NAME" == "2018" ]; then - hltGetConfiguration --full --data $CONFIG --type $NAME --unprescale --process "HLT${NAME}" --globaltag "auto:run2_hlt_${NAME}" --input "file:RelVal_Raw_${NAME}_DATA.root" > OnLine_HLT_${NAME}.py - else - hltGetConfiguration --full --data $CONFIG --type $NAME --unprescale --process "HLT${NAME}" --globaltag "auto:run3_hlt_${NAME}" --input "file:RelVal_Raw_${NAME}_DATA.root" > OnLine_HLT_${NAME}.py - fi -} +# ensure that the python/ directory hosting cff fragments exists +if [ ! -d "${CMSSW_BASE}"/src/HLTrigger/Configuration/python ]; then + printf "\n%s\n" "ERROR -- the directory \${CMSSW_BASE}/src/HLTrigger/Configuration/python [1] does not exist" + printf "%s\n\n" " [1] ${CMSSW_BASE}/src/HLTrigger/Configuration/python" + exit 1 +fi -# make sure we're using *this* working area -eval `scramv1 runtime -sh` -hash -r +INITDIR="${PWD}" -# cff python dumps, in CVS under HLTrigger/Configuration/pyhon -log "Extracting cff python dumps" -echo "Extracting cff python dumps" -FILES=$(eval echo HLT_{$TABLES_}_cff.py) -rm -f $FILES -for TABLE in $TABLES; do - log "$TABLE" - echo "$TABLE" - CONFIG=$(eval echo \$$(echo HLT_$TABLE)) - getConfigForCVS $CONFIG $TABLE -done -log "Done" -log "$(ls -l $FILES)" -mv -f $FILES ../python/ -log +# execute the ensuing steps from ${CMSSW_BASE}/src/HLTrigger/Configuration/test +cd "${CMSSW_BASE}"/src/HLTrigger/Configuration/test -# full config dumps, in CVS under HLTrigger/Configuration/test -log "Extracting full configuration dumps" -echo "Extracting full configuration dumps" -FILES=$(eval echo OnLine_HLT_{$TABLES_}.py) -rm -f $FILES -for TABLE in $TABLES; do - log "$TABLE" - echo "$TABLE" - CONFIG=$(eval echo \$$(echo HLT_$TABLE)) - getConfigForOnline $CONFIG $TABLE +# create cff fragments and cfg configs +for TABLE in ${TABLES}; do + CONFIG=$(eval echo \$$(echo HLT_"${TABLE}")) + echo "${TABLE} (config: ${CONFIG})" + + # cff fragment of each HLT menu (do not use any conditions or L1T override) + log " creating cff fragment of HLT menu..." + hltGetConfiguration "${CONFIG}" --cff --data --type "${TABLE}" ${DBPROXYOPTS} > ../python/HLT_"${TABLE}"_cff.py + + # GlobalTag + AUTOGT="auto:run3_hlt_${TABLE}" + if [ "${TABLE}" = "Fake1" ] || [ "${TABLE}" = "Fake2" ] || [ "${TABLE}" = "2018" ]; then + AUTOGT="auto:run2_hlt_${TABLE}" + elif [ "${TABLE}" = "Fake" ]; then + AUTOGT="auto:run1_hlt_${TABLE}" + fi + + # standalone cfg file of each HLT menu + log " creating full cfg of HLT menu..." + hltGetConfiguration "${CONFIG}" --full --data --type "${TABLE}" --unprescale --process "HLT${TABLE}" --globaltag "${AUTOGT}" \ + --input "file:RelVal_Raw_${TABLE}_DATA.root" ${DBPROXYOPTS} > OnLine_HLT_"${TABLE}".py done -log "Done" -log "$(ls -l $FILES)" -log + +cd "${INITDIR}" diff --git a/HLTrigger/Configuration/test/getHLT.sh b/HLTrigger/Configuration/test/getHLT.sh index f003f5796d8de..e14613ba06f87 100755 --- a/HLTrigger/Configuration/test/getHLT.sh +++ b/HLTrigger/Configuration/test/getHLT.sh @@ -1,133 +1,93 @@ -#! /bin/bash +#!/bin/bash # ConfDB configurations to use -MASTER="/dev/CMSSW_12_5_0/HLT" # no explicit version, take the most recent -TARGET="/dev/CMSSW_12_5_0/\$TABLE" # no explicit version, take the most recent - -TABLES="GRun HIon PIon PRef" # $TABLE in the above variable will be expanded to these TABLES - -# print extra messages ? -VERBOSE=false - -# this is used for brace expansion -TABLES_=$(echo $TABLES | sed -e's/ \+/,/g') +MASTER="/dev/CMSSW_12_5_0/HLT" # no explicit version, take the most recent +TARGET="/dev/CMSSW_12_5_0/\$TABLE" # no explicit version, take the most recent + +TABLES="GRun HIon PIon PRef" # $TABLE in the above variable will be expanded to these TABLES + +# command-line arguments +VERBOSE=false # print extra messages to stdout +DBPROXYOPTS="" # db-proxy configuration +while [[ $# -gt 0 ]]; do + case "$1" in + -v) VERBOSE=true; shift;; + --dbproxy) DBPROXYOPTS="${DBPROXYOPTS} --dbproxy"; shift;; + --dbproxyhost) DBPROXYOPTS="${DBPROXYOPTS} --dbproxyhost $2"; shift; shift;; + --dbproxyport) DBPROXYOPTS="${DBPROXYOPTS} --dbproxyport $2"; shift; shift;; + *) shift;; + esac +done -[ "$1" == "-v" ] && { VERBOSE=true; shift; } -[ "$1" == "-q" ] && { VERBOSE=false; shift; } +# remove spurious whitespaces and tabs from DBPROXYOPTS +DBPROXYOPTS=$(echo "${DBPROXYOPTS}" | xargs) +# log: print to stdout only if VERBOSE=true function log() { $VERBOSE && echo -e "$@" } -function findHltScript() { - local PACKAGE="HLTrigger/Configuration" - local SCRIPT="$1" - - if [ -f "$SCRIPT" ]; then - echo "./$SCRIPT" - elif [ -f "$CMSSW_BASE/src/$PACKAGE/test/$SCRIPT" ]; then - echo "$CMSSW_BASE/src/$PACKAGE/test/$SCRIPT" - elif [ -f "$CMSSW_RELEASE_BASE/src/$PACKAGE/test/$SCRIPT" ]; then - echo "$CMSSW_RELEASE_BASE/src/$PACKAGE/test/$SCRIPT" +# path to directory hosting this script +TESTDIR=$(cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd) + +# ensure that directory hosting this script corresponds to ${CMSSW_BASE}/src/HLTrigger/Configuration/test +if [ "${TESTDIR}" != "${CMSSW_BASE}"/src/HLTrigger/Configuration/test ]; then + printf "\n%s\n" "ERROR -- the directory hosting getHLT.sh [1] does not correspond to \${CMSSW_BASE}/src/HLTrigger/Configuration/test [2]" + printf "%s\n" " [1] ${TESTDIR}" + printf "%s\n\n" " [2] ${CMSSW_BASE}/src/HLTrigger/Configuration/test" + exit 1 +fi + +# ensure that the python/ directory hosting cff fragments exists +if [ ! -d "${CMSSW_BASE}"/src/HLTrigger/Configuration/python ]; then + printf "\n%s\n" "ERROR -- the directory \${CMSSW_BASE}/src/HLTrigger/Configuration/python [1] does not exist" + printf "%s\n\n" " [1] ${CMSSW_BASE}/src/HLTrigger/Configuration/python" + exit 1 +fi + +INITDIR="${PWD}" + +# execute the ensuing steps from ${CMSSW_BASE}/src/HLTrigger/Configuration/test +cd "${CMSSW_BASE}"/src/HLTrigger/Configuration/test + +# create cff fragments and cfg configs +for TABLE in FULL ${TABLES}; do + if [ "${TABLE}" = "FULL" ]; then + CONFIG="${MASTER}" else - echo "cannot find $SCRIPT, aborting" - exit 1 + CONFIG=$(eval echo ${TARGET}) fi -} - -GETCONTENT=$(findHltScript getEventContent.py) -GETDATASETS=$(findHltScript getDatasets.py) -function getConfigForCVS() { - local CONFIG="$1" - local NAME="$2" - log " dumping HLT cffs for $NAME from $CONFIG" + echo "${TABLE} (config: ${CONFIG})" - # do not use any conditions or L1 override - hltGetConfiguration --cff --data $CONFIG --type $NAME > HLT_${NAME}_cff.py -} + # cff fragment of each HLT menu (do not use any conditions or L1T override) + log " creating cff fragment of HLT menu..." + hltGetConfiguration "${CONFIG}" --cff --data --type "${TABLE}" ${DBPROXYOPTS} > ../python/HLT_"${TABLE}"_cff.py -function getContentForCVS() { - local CONFIG="$1" - - log " dumping EventContet" - $GETCONTENT $CONFIG - rm -f hltOutput*_cff.py* hltScouting_cff.py* -} - -function getDatasetsForCVS() { - local CONFIG="$1" - local TARGET="$2" - - log " dumping Primary Dataset" - $GETDATASETS $CONFIG > $TARGET -} + # cff fragment of EventContents (only for MASTER config) + if [ "${TABLE}" = "FULL" ]; then + log " creating cff fragment of EventContents..." + ./getEventContent.py "${MASTER}" ${DBPROXYOPTS} > ../python/HLTrigger_EventContent_cff.py + fi -function getConfigForOnline() { - local CONFIG="$1" - local NAME="$2" -# local L1T="tag[,connect]" - record is hardwired as L1GtTriggerMenuRcd - -# local L1TPP="L1GtTriggerMenu_L1Menu_Collisions2012_v3_mc,sqlite_file:/afs/cern.ch/user/g/ghete/public/L1Menu/L1Menu_Collisions2012_v3/sqlFile/L1Menu_Collisions2012_v3_mc.db" -# local L1TPP="L1GtTriggerMenu_L1Menu_Collisions2012_v3_mc" -# local L1TPP="L1GtTriggerMenu_L1Menu_Collisions2015_25ns_v1_mc,sqlite_file:/afs/cern.ch/user/g/ghete/public/L1Menu/L1Menu_Collisions2015_25ns_v1/sqlFile/L1Menu_Collisions2015_25ns_v1_mc.db" -# local L1THI="L1GtTriggerMenu_L1Menu_CollisionsHeavyIons2011_v0_mc,sqlite_file:/afs/cern.ch/user/g/ghete/public/L1Menu/L1Menu_CollisionsHeavyIons2011_v0/sqlFile/L1Menu_CollisionsHeavyIons2011_v0_mc.db" -# local L1THI="L1GtTriggerMenu_L1Menu_CollisionsHeavyIons2011_v0_mc" -# local L1THI="L1GtTriggerMenu_L1Menu_Collisions2012_v3_mc" -# local L1THI="L1GtTriggerMenu_L1Menu_Collisions2015_25ns_v1_mc,sqlite_file:/afs/cern.ch/user/g/ghete/public/L1Menu/L1Menu_Collisions2015_25ns_v1/sqlFile/L1Menu_Collisions2015_25ns_v1_mc.db" -# local L1TPI="L1GtTriggerMenu_L1Menu_CollisionsHeavyIons2013_v0_mc,sqlite_file:/afs/cern.ch/user/g/ghete/public/L1Menu/L1Menu_CollisionsHeavyIons2013_v0/sqlFile/L1Menu_CollisionsHeavyIons2013_v0_mc.db" -# local L1TPI="L1GtTriggerMenu_L1Menu_CollisionsHeavyIons2013_v0_mc" -# local L1TPI="L1GtTriggerMenu_L1Menu_Collisions2012_v3_mc" -# local L1TPI="L1GtTriggerMenu_L1Menu_Collisions2015_25ns_v1_mc,sqlite_file:/afs/cern.ch/user/g/ghete/public/L1Menu/L1Menu_Collisions2015_25ns_v1/sqlFile/L1Menu_Collisions2015_25ns_v1_mc.db" - - local L1TPP1="" - local L1TPP2="" - - log " dumping full HLT for $NAME from $CONFIG" - # override L1 menus - if [ "$NAME" == "Fake" ]; then - hltGetConfiguration --full --data $CONFIG --type $NAME --unprescale --process HLT$NAME --globaltag "auto:run1_hlt_${NAME}" --input "file:RelVal_Raw_${NAME}_DATA.root" > OnLine_HLT_$NAME.py - elif [ "$NAME" == "Fake1" ] || [ "$NAME" == "Fake2" ] || [ "$NAME" == "2018" ]; then - hltGetConfiguration --full --data $CONFIG --type $NAME --unprescale --process HLT$NAME --globaltag "auto:run2_hlt_${NAME}" --input "file:RelVal_Raw_${NAME}_DATA.root" > OnLine_HLT_$NAME.py - else - hltGetConfiguration --full --data $CONFIG --type $NAME --unprescale --process HLT$NAME --globaltag "auto:run3_hlt_${NAME}" --input "file:RelVal_Raw_${NAME}_DATA.root" > OnLine_HLT_$NAME.py + # cff fragment of PrimaryDatasets of each HLT menu (except for MASTER config) + if [ "${TABLE}" != "FULL" ]; then + log " creating cff fragment of Primary Datasets..." + ./getDatasets.py "${CONFIG}" ${DBPROXYOPTS} > ../python/HLTrigger_Datasets_"${TABLE}"_cff.py fi -} + # GlobalTag + AUTOGT="auto:run3_hlt_${TABLE}" + if [ "${TABLE}" = "Fake1" ] || [ "${TABLE}" = "Fake2" ] || [ "${TABLE}" = "2018" ]; then + AUTOGT="auto:run2_hlt_${TABLE}" + elif [ "${TABLE}" = "Fake" ]; then + AUTOGT="auto:run1_hlt_${TABLE}" + fi -# make sure we're using *this* working area -eval `scramv1 runtime -sh` -hash -r - -# cff python dumps, in CVS under HLTrigger/Configuration/pyhon -log "Extracting cff python dumps" -echo "Extracting cff python dumps" -FILES=$(eval echo HLT_FULL_cff.py HLT_{$TABLES_}_cff.py HLTrigger_Datasets_{$TABLES_}_cff.py HLTrigger_EventContent_cff.py ) -rm -f $FILES -getConfigForCVS $MASTER FULL -getContentForCVS $MASTER -for TABLE in $TABLES; do - log "$TABLE" - echo "$TABLE" - getConfigForCVS $(eval echo $TARGET) $TABLE - getDatasetsForCVS $(eval echo $TARGET) HLTrigger_Datasets_${TABLE}_cff.py + # standalone cfg file of each HLT menu (incl. MASTER config) + log " creating full cfg of HLT menu..." + hltGetConfiguration "${CONFIG}" --full --data --type "${TABLE}" --unprescale --process "HLT${TABLE}" --globaltag "${AUTOGT}" \ + --input "file:RelVal_Raw_${TABLE}_DATA.root" ${DBPROXYOPTS} > OnLine_HLT_"${TABLE}".py done -log "Done" -log "$(ls -l $FILES)" -mv -f $FILES ../python/ -log - -# full config dumps, in CVS under HLTrigger/Configuration/test -log "Extracting full configuration dumps" -echo "Extracting full configuration dumps" -FILES=$(eval echo OnLine_HLT_FULL.py OnLine_HLT_{$TABLES_}.py) -rm -f $FILES -getConfigForOnline $MASTER FULL -for TABLE in $TABLES; do - log "$TABLE" - echo "$TABLE" - getConfigForOnline $(eval echo $TARGET) $TABLE -done -log "Done" -log "$(ls -l $FILES)" -log + +cd "${INITDIR}"