Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Introduce unit tests for Tracker Alignment all-in-one meta-validation tool #28739

Merged
merged 6 commits into from
Jan 19, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -52,8 +52,8 @@ pedeMem = 32000
datasetdir = /afs/cern.ch/cms/CAF/CMSALCA/ALCA_TRACKERALIGN/MP/MPproduction/datasetfiles
configTemplate = universalConfigTemplate.py
globaltag = auto:run2_data
FirstRunForStartGeometry = 0 ; set this to the run from where you want to start

;# set this to the run from where you want to start
FirstRunForStartGeometry = 0

;###############################################################################
;## weights
Expand Down
13 changes: 7 additions & 6 deletions Alignment/OfflineValidation/scripts/validateAlignments.py
Original file line number Diff line number Diff line change
Expand Up @@ -550,11 +550,9 @@ def createMergeScript( path, validations, options ):
repMap[(validationtype, validationName, referenceName)]["beforeMerge"] += validationtype.doInitMerge()
repMap[(validationtype, validationName, referenceName)]["doMerge"] += validation.doMerge()
for f in validation.getRepMap()["outputFiles"]:
longName = os.path.join("/eos/cms/store/group/alca_trackeralign/AlignmentValidation/",
validation.getRepMap()["eosdir"], f)
repMap[(validationtype, validationName, referenceName)]["rmUnmerged"] += " rm "+longName+"\n"


longName = os.path.join("/eos/cms/store/group/alca_trackeralign/AlignmentValidation/",
validation.getRepMap()["eosdir"], f)
repMap[(validationtype, validationName, referenceName)]["rmUnmerged"] += " rm "+longName+"\n"

repMap[(validationtype, validationName, referenceName)]["rmUnmerged"] += ("else\n"
" echo -e \\n\"WARNING: Merging failed, unmerged"
Expand Down Expand Up @@ -743,7 +741,10 @@ def main(argv = None):

map( lambda job: job.runJob(), jobs )

ValidationJobMultiIOV.runCondorJobs(outPath)
if options.dryRun:
pass
else:
ValidationJobMultiIOV.runCondorJobs(outPath)


if __name__ == "__main__":
Expand Down
10 changes: 9 additions & 1 deletion Alignment/OfflineValidation/test/testValidate.ini
Original file line number Diff line number Diff line change
Expand Up @@ -33,28 +33,36 @@ style = 2402
# configuration of individual validations

[offline:validation_MinBias]
multiIOV = false
maxevents = 1000
dataset = /MinimumBias/Run2017A-TkAlMinBias-PromptReco-v1/ALCARECO
magneticfield = 3.8
dataset = /MinimumBias/Run2017A-TkAlMinBias-PromptReco-v1/ALCARECO
trackcollection = ALCARECOTkAlMinBias

[offline:validation_cosmics]
multiIOV = false
maxevents = 1000
magneticfield = 3.8
dataset = /Cosmics/Run2017A-TkAlCosmics0T-PromptReco-v1/ALCARECO
trackcollection = ALCARECOTkAlCosmicsCTF0T

[compare:Tracker]
multiIOV = false
levels = "Tracker","DetUnit"
dbOutput = false

[zmumu:some_zmumu_validation]
multiIOV = false
maxevents = 1000
magneticfield = 3.8
dataset = /DoubleMuon/Run2017A-TkAlZMuMu-PromptReco-v3/ALCARECO
etamaxneg = 2.4
etaminneg = -2.4
etamaxpos = 2.4
etaminpos = -2.4

[split:some_split_validation]
multiIOV = false
maxevents = 1000
dataset = /Cosmics/Run2017A-TkAlCosmics0T-PromptReco-v1/ALCARECO
trackcollection = ALCARECOTkAlCosmicsCTF0T
Expand Down
92 changes: 91 additions & 1 deletion Alignment/OfflineValidation/test/test_all.sh
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,94 @@
function die { echo $1: status $2 ; exit $2; }

echo "TESTING Alignment/OfflineValidation ..."
cmsRun ${LOCAL_TEST_DIR}/test_all_cfg.py || die "Failure running test_OfflineValidaiton_cfg.py" $?
cmsRun ${LOCAL_TEST_DIR}/test_all_cfg.py || die "Failure running test_OfflineValidation_cfg.py" $?

if test -f "validation_config.ini"; then
rm -f validation_config.ini
fi

cat <<EOF >> validation_config.ini
[general]
jobmode = interactive
eosdir = Test

[alignment:prompt]
title = prompt
globaltag = 92X_dataRun2_Prompt_v2
color = 1
style = 2001

[alignment:express]
title = express
globaltag = 92X_dataRun2_Express_v2
color = 2
style = 2402

[offline:validation_MinBias]
multiIOV = false
maxevents = 10
dataset = /MinimumBias/Run2017A-TkAlMinBias-PromptReco-v1/ALCARECO
magneticfield = 3.8
trackcollection = ALCARECOTkAlMinBias

[offline:validation_cosmics]
multiIOV = false
maxevents = 10
dataset = /Cosmics/Run2017A-TkAlCosmics0T-PromptReco-v1/ALCARECO
magneticfield = 3.8
trackcollection = ALCARECOTkAlCosmicsCTF0T

[compare:Tracker]
multiIOV = false
levels = "Tracker","DetUnit"
dbOutput = false

[zmumu:some_zmumu_validation]
multiIOV = false
maxevents = 10
dataset = /DoubleMuon/Run2017A-TkAlZMuMu-PromptReco-v3/ALCARECO
etamaxneg = 2.4
etaminneg = -2.4
etamaxpos = 2.4
etaminpos = -2.4

[split:some_split_validation]
multiIOV = false
maxevents = 10
dataset = /Cosmics/Run2017A-TkAlCosmics0T-PromptReco-v1/ALCARECO
trackcollection = ALCARECOTkAlCosmicsCTF0T

[plots:offline]
DMROptions = plain split
DMRMinimum = 5
legendoptions = meanerror rmserror modules outside
customtitle = #CMS{Preliminary}
customrighttitle = 2017A cosmics and collisions data
legendheader = header
bigtext = true

[plots:split]
outliercut = 0.95

customtitle = #CMS{Preliminary}
customrighttitle = 2017A 3.8T cosmics data
legendheader = header

[plots:zmumu]
customtitle = #CMS{Preliminary}
customrighttitle = 2016G Z#rightarrow#mu#mu data, |#eta|<2.4
legendheader = header

[validation]
offline validation_MinBias : prompt
offline validation_MinBias : express
offline validation_cosmics : prompt
offline validation_cosmics : express
compare Tracker: prompt 278819, express 278819
zmumu some_zmumu_validation : prompt
zmumu some_zmumu_validation : express
split some_split_validation : prompt
split some_split_validation : express
EOF

validateAlignments.py -c validation_config.ini -N testingAllInOneTool --dryRun || die "Failure running all-in-one test" $?