diff --git a/Alignment/APEEstimation/python/ApeEstimator_cfi.py b/Alignment/APEEstimation/python/ApeEstimator_cfi.py
index 1ab006bfbfecc..c8894c01b754b 100644
--- a/Alignment/APEEstimation/python/ApeEstimator_cfi.py
+++ b/Alignment/APEEstimation/python/ApeEstimator_cfi.py
@@ -74,7 +74,7 @@
minGoodHitsPerTrack = cms.uint32(0),
#File containing TrackerTree with ideal Geometry
- TrackerTreeFile = cms.string(os.environ['CMSSW_BASE'] + '/src/Alignment/TrackerAlignment/hists/TrackerTree.root'),
+ TrackerTreeFile = cms.string(os.environ['CMSSW_BASE'] + '/src/Alignment/APEEstimation/hists/TrackerTree.root'),
#Sectors defining set of modules for common overview plots resp. APE values
Sectors = cms.VPSet(),
diff --git a/Alignment/APEEstimation/python/samples/MC_UnitTest_TkAlMuonIsolated_cff.py b/Alignment/APEEstimation/python/samples/MC_UnitTest_TkAlMuonIsolated_cff.py
new file mode 100644
index 0000000000000..27e1538ec30d0
--- /dev/null
+++ b/Alignment/APEEstimation/python/samples/MC_UnitTest_TkAlMuonIsolated_cff.py
@@ -0,0 +1,20 @@
+import FWCore.ParameterSet.Config as cms
+
+maxEvents = cms.untracked.PSet( input = cms.untracked.int32(-1) )
+readFiles = cms.untracked.vstring()
+secFiles = cms.untracked.vstring()
+source = cms.Source ("PoolSource",fileNames = readFiles, secondaryFileNames = secFiles)
+
+
+readFiles.extend( [
+"/store/mc/Run3Winter22DRPremix/WJetsToLNu_TuneCP5_13p6TeV-madgraphMLM-pythia8/ALCARECO/TkAlMuonIsolated-TRKALCADesign_design_geometry_122X_mcRun3_2021_design_v9-v2/2520000/0027e6ed-2626-4ede-97a5-f0a44164b81b.root",
+"/store/mc/Run3Winter22DRPremix/WJetsToLNu_TuneCP5_13p6TeV-madgraphMLM-pythia8/ALCARECO/TkAlMuonIsolated-TRKALCADesign_design_geometry_122X_mcRun3_2021_design_v9-v2/2520000/00899b9d-32ab-46f2-b77b-0b0a8d666027.root",
+"/store/mc/Run3Winter22DRPremix/WJetsToLNu_TuneCP5_13p6TeV-madgraphMLM-pythia8/ALCARECO/TkAlMuonIsolated-TRKALCADesign_design_geometry_122X_mcRun3_2021_design_v9-v2/2520000/07851676-0c65-4630-bbab-7406defeb670.root",
+"/store/mc/Run3Winter22DRPremix/WJetsToLNu_TuneCP5_13p6TeV-madgraphMLM-pythia8/ALCARECO/TkAlMuonIsolated-TRKALCADesign_design_geometry_122X_mcRun3_2021_design_v9-v2/2520000/0a54c512-0f69-44e1-90bc-16da035cbe02.root",
+ ] );
+
+
+
+secFiles.extend( [
+ ] )
+
diff --git a/Alignment/APEEstimation/scripts/initialise.bash b/Alignment/APEEstimation/scripts/initialise.bash
index 083753a57639a..9f9adbba24439 100644
--- a/Alignment/APEEstimation/scripts/initialise.bash
+++ b/Alignment/APEEstimation/scripts/initialise.bash
@@ -2,8 +2,6 @@
DIRBASE="$CMSSW_BASE/src/Alignment/APEEstimation"
-mkdir $CMSSW_BASE/src/Alignment/TrackerAlignment/hists/
-
mkdir $DIRBASE/hists/
mkdir $DIRBASE/hists/workingArea/
mkdir $DIRBASE/hists/workingArea/apeObjects/
diff --git a/Alignment/APEEstimation/test/BuildFile.xml b/Alignment/APEEstimation/test/BuildFile.xml
new file mode 100644
index 0000000000000..1d697d08f8da8
--- /dev/null
+++ b/Alignment/APEEstimation/test/BuildFile.xml
@@ -0,0 +1,3 @@
+
+
+
diff --git a/Alignment/APEEstimation/test/SkimProducer/skimProducer_cfg.py b/Alignment/APEEstimation/test/SkimProducer/skimProducer_cfg.py
index 42d16e267f987..b04d66eb6d921 100644
--- a/Alignment/APEEstimation/test/SkimProducer/skimProducer_cfg.py
+++ b/Alignment/APEEstimation/test/SkimProducer/skimProducer_cfg.py
@@ -10,15 +10,11 @@
import sys
options = VarParsing.VarParsing ('standard')
options.register('sample', 'data1', VarParsing.VarParsing.multiplicity.singleton, VarParsing.VarParsing.varType.string, "Input sample")
-options.register('useTrackList', False, VarParsing.VarParsing.multiplicity.singleton, VarParsing.VarParsing.varType.bool, "Use list of preselected tracks")
-options.register('isTest', False, VarParsing.VarParsing.multiplicity.singleton, VarParsing.VarParsing.varType.bool, "Test run")
# get and parse the command line arguments
options.parseArguments()
print("Input sample: ", options.sample)
-print("Use list of preselected tracks: ", options.useTrackList)
-print("Test run: ", options.isTest)
##
@@ -79,11 +75,6 @@
outputName = 'MinBias.root'
#outputPath = "workingArea"
trackSelection = "MinBias"
-if options.sample == 'data2':
- process.load("Alignment.APEEstimation.samples.Data_TkAlMinBias_Run2018C_PromptReco_v3_cff")
- outputName = 'MinBias1.root'
- #outputPath = "workingArea"
- trackSelection = "MinBias"
if options.sample == 'data3':
process.load("Alignment.APEEstimation.samples.Data_TkAlMuonIsolated_22Jan2013C_v1_cff")
outputName = 'Data_TkAlMuonIsolated_22Jan2013C.root'
@@ -104,22 +95,14 @@
outputPath = '/eos/cms/store/caf/user/jschulz/Skims/MC/UL2016ReRecoRealistic'
outputName = 'Mc_TkAlMuonIsolated_WJetsToLNu_2016.root'
trackSelection = "SingleMu"
-if options.sample == 'zmumu':
- process.load("")
- outputName = ''
- trackSelection = "DoubleMu"
-if options.sample == 'zmumu10':
- process.load("Alignment.APEEstimation.samples.Mc_TkAlMuonIsolated_Summer12_zmumu10_cff")
- outputName = 'Mc_TkAlMuonIsolated_Summer12_zmumu10.root'
- trackSelection = "DoubleMu"
-if options.sample == 'zmumu20':
- process.load("Alignment.APEEstimation.samples.Mc_TkAlMuonIsolated_Summer12_zmumu20_cff")
- outputName = 'Mc_TkAlMuonIsolated_Summer12_zmumu20.root'
- trackSelection = "DoubleMu"
-if options.sample == 'zmumu50':
- process.load("Alignment.APEEstimation.samples.DYToMuMu_M-50_Tune4C_13TeV-pythia8_Spring14dr-TkAlMuonIsolated-castor_PU_S14_POSTLS170_V6-v1_ALCARECO_cff")
- outputName = 'Mc_DYToMuMu_M-50_Tune4C_13TeV-pythia8_Spring14dr-TkAlMuonIsolated-castor_PU_S14_POSTLS170_V6-v1.root'
- trackSelection = "DoubleMu"
+
+# For unit tests
+if options.sample == 'UnitTest':
+ process.load("Alignment.APEEstimation.samples.MC_UnitTest_TkAlMuonIsolated_cff")
+ outputName = 'MC_UnitTest_TkAlMuonIsolated.root'
+ maxEvents = 1000
+ globalTag = "auto:phase1_2022_design"
+ trackSelection = "SingleMu"
print("Using output name %s"%(outputName))
@@ -148,7 +131,6 @@
## Number of Events (should be after input file)
##
process.maxEvents = cms.untracked.PSet( input = cms.untracked.int32(maxEvents) )
-if options.isTest: process.maxEvents.input = 1001
##
@@ -177,13 +159,6 @@
process.MuSkim = trackSelector
-##
-## If preselected track list is used
-##
-if options.useTrackList:
- process.MuSkim.src = 'TrackList'
- process.TriggerSelectionSequence *= process.TrackList
-
import Alignment.CommonAlignment.tools.trackselectionRefitting as trackselRefit
process.seqTrackselRefit = trackselRefit.getSequence(process, trackSelector.src.getModuleLabel())
@@ -226,10 +201,6 @@
process.out.outputCommands.extend(process.ApeSkimEventContent.outputCommands)
-if options.isTest:
- process.out.fileName = os.environ['CMSSW_BASE'] + '/src/Alignment/APEEstimation/hists/test_apeSkim.root'
-
-
##
## Outpath
##
diff --git a/Alignment/APEEstimation/test/SkimProducer/startSkim.py b/Alignment/APEEstimation/test/SkimProducer/startSkim.py
index f2eb82619d527..e73b035f71dcb 100644
--- a/Alignment/APEEstimation/test/SkimProducer/startSkim.py
+++ b/Alignment/APEEstimation/test/SkimProducer/startSkim.py
@@ -61,7 +61,7 @@ def condorSubmitSkim(sample, caf=False):
def localStartSkim(sample):
base = os.environ['CMSSW_BASE']
- execString = "cmsRun {base}/src/Alignment/APEEstimation/test/SkimProducer/skimProducer_cfg.py isTest=False useTrackList=False sample={sample}".format(sample=sample, base=base)
+ execString = "cmsRun {base}/src/Alignment/APEEstimation/test/SkimProducer/skimProducer_cfg.py sample={sample}".format(sample=sample, base=base)
print(execString)
toExec = execString.split(" ")
@@ -73,7 +73,7 @@ def localStartSkim(sample):
def get_output(proc):
while True:
- line = proc.stdout.readline().rstrip()
+ line = proc.stdout.readline().rstrip().decode()
if not line:
break
yield line
@@ -153,7 +153,7 @@ def main(argv):
if len(args.samples) == 0:
print("Usage: python startSkim.py -s ")
- sys.exit()
+ sys.exit(1)
finalSamples = []
for sample in args.samples:
diff --git a/Alignment/APEEstimation/test/autoSubmitter/autoSubmitter.py b/Alignment/APEEstimation/test/autoSubmitter/autoSubmitter.py
index 27e0b0abe4882..a182f2922c9cf 100644
--- a/Alignment/APEEstimation/test/autoSubmitter/autoSubmitter.py
+++ b/Alignment/APEEstimation/test/autoSubmitter/autoSubmitter.py
@@ -29,18 +29,10 @@ def save(name, object):
lock.release()
class Dataset:
- name = ""
- nFiles = 0
- maxEvents = -1
- baseDirectory = ""
- sampleType = "data1"
- fileList = []
- conditions = []
-
def __init__(self, config, name):
dsDict = dict(config.items("dataset:{}".format(name)))
self.name = name
- self.baseDirectory = dsDict["baseDirectory"]
+ self.baseDirectory = dsDict["baseDirectory"].replace("$CMSSW_BASE", os.environ['CMSSW_BASE'])
self.fileList = []
names = dsDict["fileNames"].split(" ")
@@ -49,21 +41,20 @@ def __init__(self, config, name):
for fileName in parsedNames:
self.fileList.append(self.baseDirectory+"/"+fileName)
self.nFiles = len(self.fileList)
-
+
+ self.maxEvents = -1
if "maxEvents" in dsDict:
self.maxEvents = int(dsDict["maxEvents"])
- if "isMC" in dsDict:
- if dsDict["isMC"] == "True":
- self.sampleType = "MC"
- else:
- self.sampleType ="data1"
-
+
+ self.sampleType ="data1"
+ if "isMC" in dsDict and dsDict["isMC"] == "True":
+ self.sampleType = "MC"
+
+ self.isCosmics = False
if "isCosmics" in dsDict:
self.isCosmics = (dsDict["isCosmics"] == "True")
- else:
- self.isCosmics = False
- self.conditions, dummy, self.validConditions = loadConditions(dsDict)
+ self.conditions, self.validConditions = loadConditions(dsDict)
# check if any of the sources used for conditions is invalid
if not self.validConditions:
@@ -75,42 +66,29 @@ def __init__(self, config, name):
if not self.existingFiles:
for fileName in missingFiles:
print("Invalid file name {} defined for dataset {}".format(fileName, self.name))
-
-
-class Alignment:
- name = ""
- alignmentName = None
- baselineDir = "Design"
- globalTag = "None"
- isDesign = False
- hasAlignmentCondition = False
- conditions = []
+class Alignment:
def __init__(self, config, name):
alDict = dict(config.items("alignment:{}".format(name)))
self.name = name
- if "alignmentName" in alDict:
- self.alignmentName = alDict["alignmentName"]
+
+ self.globalTag = "None"
if "globalTag" in alDict:
self.globalTag = alDict["globalTag"]
+ self.baselineDir = "Design"
if "baselineDir" in alDict:
self.baselineDir= alDict["baselineDir"]
+ self.isDesign = False
if "isDesign" in alDict:
self.isDesign= (alDict["isDesign"] == "True")
- # If self.hasAlignmentCondition is true, no other Alignment-Object is loaded in apeEstimator_cfg.py using the alignmentName
- self.conditions, self.hasAlignmentCondition, self.validConditions = loadConditions(alDict)
+ # If self.hasAlignmentCondition is true, no other Alignment-Object is loaded in apeEstimator_cfg.py using the
+ self.conditions, self.validConditions = loadConditions(alDict)
# check if any of the sources used for conditions is invalid
if not self.validConditions:
print("Invalid conditions defined for alignment {}".format(self.name))
-
- # check if at least one of the two ways to define the alignment was used
- if self.alignmentName == None and not self.hasAlignmentCondition:
- print("Error: No alignment object name or record was defined for alignment {}".format(self.name))
- sys.exit()
-
class ApeMeasurement:
name = "workingArea"
@@ -118,7 +96,6 @@ class ApeMeasurement:
firstIteration = 0
maxIterations = 15
maxEvents = None
- status = STATE_NONE
dataset = None
alignment = None
runningJobs = None
@@ -128,10 +105,10 @@ class ApeMeasurement:
def __init__(self, name, config, settings):
self.name = name
- self.status = STATE_ITERATION_START
+ self.status_ = STATE_ITERATION_START
self.runningJobs = []
self.failedJobs = []
- self.startTime = subprocess.check_output(["date"]).strip()
+ self.startTime = subprocess.check_output(["date"]).decode().strip()
# load conditions from dictionary, overwrite defaults if defined
for key, value in settings.items():
@@ -139,9 +116,6 @@ def __init__(self, name, config, settings):
setattr(self, key, value)
# Replace names with actual Dataset and Alignment objects
- # In principle, one could preload all these once so they are not
- # redefined for each measurement, but right now this does not
- # seem necessary
self.dataset = Dataset(config, settings["dataset"])
self.alignment = Alignment(config, settings["alignment"])
@@ -156,16 +130,16 @@ def __init__(self, name, config, settings):
if self.alignment.isDesign:
self.maxIterations = 0
- self.conditions, dummy, self.validConditions = loadConditions(settings)
+ self.conditions, self.validConditions = loadConditions(settings)
# see if sanity checks passed
if not self.alignment.validConditions or not self.dataset.validConditions or not self.dataset.existingFiles or not self.validConditions:
- self.status = STATE_INVALID_CONDITIONS
- self.print_status()
- self.finishTime = subprocess.check_output(["date"]).strip()
+ self.setStatus(STATE_INVALID_CONDITIONS, True)
+ return
+
+ if unitTest:
return
-
if self.alignment.isDesign and self.dataset.sampleType != "MC":
# For now, this won't immediately shut down the program
print("APE Measurement {} is scheduled to to an APE baseline measurement with a dataset that is not marked as isMC=True. Is this intended?".format(self.name))
@@ -173,20 +147,25 @@ def __init__(self, name, config, settings):
if not self.alignment.isDesign:
ensurePathExists('{}/hists/{}/apeObjects'.format(base, self.name))
-
- def get_status(self):
- return status_map[self.status]
+ def status(self):
+ return status_map[self.status_]
- def print_status(self):
- print("APE Measurement {} in iteration {} is now in status {}".format(self.name, self.curIteration, self.get_status()))
+ def printStatus(self):
+ print("APE Measurement {} in iteration {} is now in status {}".format(self.name, self.curIteration, self.status()))
+
+ def setStatus(self, status, terminal=False):
+ if self.status_ != status:
+ self.status_ = status
+ self.printStatus()
+ if terminal:
+ self.finishTime = subprocess.check_output(["date"]).decode().strip()
# submit jobs for track refit and hit categorization
- def submit_jobs(self):
+ def submitJobs(self):
toSubmit = []
allConditions = self.alignment.conditions+self.dataset.conditions+self.conditions
- allConditions = list({v['record']:v for v in allConditions}.values()) # should we clean for duplicate records? the record last defined (from dataset)
- # will be kept in case of overlap, which is the same as if there was no overlap removal
+ allConditions = list({v['record']:v for v in allConditions}.values()) # Removes double definitions of Records
ensurePathExists("{}/test/autoSubmitter/workingArea".format(base))
@@ -203,10 +182,7 @@ def submit_jobs(self):
for condition in allConditions:
fi.write(conditionsTemplate.format(record=condition["record"], connect=condition["connect"], tag=condition["tag"]))
-
- alignmentNameToUse = self.alignment.alignmentName
- if self.alignment.hasAlignmentCondition:
- alignmentNameToUse = "fromConditions"
+ alignmentNameToUse = "fromConditions"
lastIter = (self.curIteration==self.maxIterations) and not self.alignment.isDesign
@@ -227,7 +203,7 @@ def submit_jobs(self):
arguments += condorArgumentTemplate.format(fileNumber=fileNumber, inputFile=inputFile)
# build condor submit script
- date = subprocess.check_output(["date", "+%m_%d_%H_%M_%S"]).strip()
+ date = subprocess.check_output(["date", "+%m_%d_%H_%M_%S"]).decode().strip()
sub = "{}/test/autoSubmitter/workingArea/job_{}_iter{}".format(base, self.name, self.curIteration)
errorFileTemp = sub+"_error_{}.txt"
@@ -255,7 +231,7 @@ def submit_jobs(self):
# submit batch
from autoSubmitterTemplates import submitCondorTemplate
- subOut = subprocess.check_output(submitCondorTemplate.format(subFile=submitFileName), shell=True).strip()
+ subOut = subprocess.check_output(submitCondorTemplate.format(subFile=submitFileName), shell=True).decode().strip()
if len(subOut) == 0:
print("Running on environment that does not know bsub command or ssh session is timed out (ongoing for longer than 24h?), exiting")
@@ -266,12 +242,9 @@ def submit_jobs(self):
# list contains condor log files from which to read when job is terminated to detect errors
self.runningJobs.append((logFileTemp.format(i), errorFileTemp.format(i), "{}.{}".format(cluster, i)))
-
- self.status = STATE_BJOBS_WAITING
- self.print_status()
+ self.setStatus(STATE_BJOBS_WAITING)
- def check_jobs(self):
- lastStatus = self.status
+ def checkJobs(self):
stillRunningJobs = []
# check all still running jobs
for logName, errName, jobId in self.runningJobs:
@@ -313,10 +286,9 @@ def check_jobs(self):
# at least one job failed
if len(self.failedJobs) > 0:
- self.status = STATE_BJOBS_FAILED
- self.finishTime = subprocess.check_output(["date"]).strip()
+ self.setStatus(STATE_BJOBS_FAILED, True)
elif len(self.runningJobs) == 0:
- self.status = STATE_BJOBS_DONE
+ self.setStatus(STATE_BJOBS_DONE)
print("All condor jobs of APE measurement {} in iteration {} are done".format(self.name, self.curIteration))
# remove files
@@ -334,13 +306,10 @@ def check_jobs(self):
os.remove(errorFile)
os.remove(outputFile)
os.remove(logFile)
-
- if lastStatus != self.status:
- self.print_status()
# merges files from jobs
- def do_merge(self):
- self.status = STATE_MERGE_WAITING
+ def mergeFiles(self):
+ self.setStatus(STATE_MERGE_WAITING)
if self.alignment.isDesign:
folderName = '{}/hists/{}/baseline'.format(base, self.name)
else:
@@ -365,15 +334,13 @@ def do_merge(self):
os.remove(name)
if rootFileValid("{}/allData.root".format(folderName)) and merge_result == 0:
- self.status = STATE_MERGE_DONE
+ self.setStatus(STATE_MERGE_DONE)
else:
- self.status = STATE_MERGE_FAILED
- self.finishTime = subprocess.check_output(["date"]).strip()
- self.print_status()
+ self.setStatus(STATE_MERGE_FAILED, True)
# calculates APE
- def do_summary(self):
- self.status = STATE_SUMMARY_WAITING
+ def calculateApe(self):
+ self.status_ = STATE_SUMMARY_WAITING
from autoSubmitterTemplates import summaryTemplate
if self.alignment.isDesign:
#use measurement name as baseline folder name in this case
@@ -383,34 +350,29 @@ def do_summary(self):
summary_result = subprocess.call(summaryTemplate.format(inputCommands=inputCommands), shell=True) # returns exit code (0 if no error occured)
if summary_result == 0:
- self.status = STATE_SUMMARY_DONE
+ self.setStatus(STATE_SUMMARY_DONE)
else:
- self.status = STATE_SUMMARY_FAILED
- self.finishTime = subprocess.check_output(["date"]).strip()
- self.print_status()
+ self.setStatus(STATE_SUMMARY_FAILED, True)
# saves APE to .db file so it can be read out next iteration
- def do_local_setting(self):
- self.status = STATE_LOCAL_WAITING
+ def writeApeToDb(self):
+ self.setStatus(STATE_LOCAL_WAITING)
from autoSubmitterTemplates import localSettingTemplate
inputCommands = "iterNumber={} setBaseline={} measurementName={}".format(self.curIteration,self.alignment.isDesign,self.name)
local_setting_result = subprocess.call(localSettingTemplate.format(inputCommands=inputCommands), shell=True) # returns exit code (0 if no error occured)
if local_setting_result == 0:
- self.status = STATE_LOCAL_DONE
+ self.setStatus(STATE_LOCAL_DONE)
else:
- self.status = STATE_LOCAL_FAILED
- self.finishTime = subprocess.check_output(["date"]).strip()
- self.print_status()
+ self.setStatus(STATE_LOCAL_FAILED, True)
- def finish_iteration(self):
+ def finishIteration(self):
print("APE Measurement {} just finished iteration {}".format(self.name, self.curIteration))
if self.curIteration < self.maxIterations:
self.curIteration += 1
- self.status = STATE_ITERATION_START
+ self.setStatus(STATE_ITERATION_START)
else:
- self.status = STATE_FINISHED
- self.finishTime = subprocess.check_output(["date"]).strip()
+ self.setStatus(STATE_FINISHED, True)
print("APE Measurement {}, which was started at {} was finished after {} iterations, at {}".format(self.name, self.startTime, self.curIteration, self.finishTime))
def kill(self):
@@ -418,7 +380,7 @@ def kill(self):
for log, err, jobId in self.runningJobs:
subprocess.call(killJobTemplate.format(jobId=jobId), shell=True)
self.runningJobs = []
- self.status = STATE_NONE
+ self.setStatus(STATE_NONE)
def purge(self):
self.kill()
@@ -426,18 +388,17 @@ def purge(self):
shutil.rmtree(folderName)
# remove log-files as well?
- def run_iteration(self):
+ def runIteration(self):
global threadcounter
global measurements
threadcounter.acquire()
try:
- if self.status == STATE_ITERATION_START:
+ if self.status_ == STATE_ITERATION_START:
# start bjobs
print("APE Measurement {} just started iteration {}".format(self.name, self.curIteration))
-
try:
- self.submit_jobs()
+ self.submitJobs()
save("measurements", measurements)
except Exception as e:
# this is needed in case the scheduler goes down
@@ -445,56 +406,57 @@ def run_iteration(self):
print(e)
return
- if self.status == STATE_BJOBS_WAITING:
+ if self.status_ == STATE_BJOBS_WAITING:
# check if bjobs are finished
- self.check_jobs()
+ self.checkJobs()
save("measurements", measurements)
- if self.status == STATE_BJOBS_DONE:
+ if self.status_ == STATE_BJOBS_DONE:
# merge files
- self.do_merge()
+ self.mergeFiles()
save("measurements", measurements)
- if self.status == STATE_MERGE_DONE:
+ if self.status_ == STATE_MERGE_DONE:
# start summary
- self.do_summary()
+ self.calculateApe()
save("measurements", measurements)
- if self.status == STATE_SUMMARY_DONE:
+ if self.status_ == STATE_SUMMARY_DONE:
# start local setting (only if not a baseline measurement)
if self.alignment.isDesign:
- self.status = STATE_LOCAL_DONE
+ self.setStatus(STATE_LOCAL_DONE)
else:
- self.do_local_setting()
+ self.writeApeToDb()
save("measurements", measurements)
- if self.status == STATE_LOCAL_DONE:
- self.finish_iteration()
+ if self.status_ == STATE_LOCAL_DONE:
+ self.finishIteration()
save("measurements", measurements)
# go to next iteration or finish measurement
- if self.status == STATE_BJOBS_FAILED or \
- self.status == STATE_MERGE_FAILED or \
- self.status == STATE_SUMMARY_FAILED or \
- self.status == STATE_LOCAL_FAILED or \
- self.status == STATE_INVALID_CONDITIONS or \
- self.status == STATE_FINISHED:
+
+ if self.status_ == STATE_BJOBS_FAILED or \
+ self.status_ == STATE_MERGE_FAILED or \
+ self.status_ == STATE_SUMMARY_FAILED or \
+ self.status_ == STATE_LOCAL_FAILED or \
+ self.status_ == STATE_INVALID_CONDITIONS or \
+ self.status_ == STATE_FINISHED:
with open(history_file, "a") as fi:
- fi.write("APE measurement {name} which was started at {start} finished at {end} with state {state} in iteration {iteration}\n".format(name=self.name, start=self.startTime, end=self.finishTime, state=self.get_status(), iteration=self.curIteration))
- if self.status == STATE_FINISHED:
+ fi.write("APE measurement {name} which was started at {start} finished at {end} with state {state} in iteration {iteration}\n".format(name=self.name, start=self.startTime, end=self.finishTime, state=self.status(), iteration=self.curIteration))
+ if self.status_ == STATE_FINISHED:
global finished_measurements
finished_measurements[self.name] = self
save("finished", finished_measurements)
else:
global failed_measurements
failed_measurements[self.name] = self
- self.status = STATE_NONE
+
+ self.setStatus(STATE_NONE)
save("failed", failed_measurements)
save("measurements", measurements)
- if self.status == STATE_ITERATION_START: # this ensures that jobs do not go into idle if many measurements are done simultaneously
+ if self.status_ == STATE_ITERATION_START: # this ensures that jobs do not go into idle if many measurements are done simultaneously
# start bjobs
print("APE Measurement {} just started iteration {}".format(self.name, self.curIteration))
- self.submit_jobs()
+ self.submitJobs()
save("measurements", measurements)
finally:
threadcounter.release()
-
def main():
parser = argparse.ArgumentParser(description="Automatically run APE measurements")
parser.add_argument("-c", "--config", action="append", dest="configs", default=[],
@@ -511,6 +473,8 @@ def main():
help='Number of threads running in parallel')
parser.add_argument("-C", "--caf",action="store_true", dest="caf", default=False,
help="Use CAF queue for condor jobs")
+ parser.add_argument("-u", "--unitTest", action="store_true", dest="unitTest", default=False,
+ help='If this is used, as soon as a measurement fails, the program will exit and as exit code the status of the measurement, i.e., where it failed')
args = parser.parse_args()
global base
@@ -519,10 +483,10 @@ def main():
global threadcounter
global lock
global use_caf
+ global unitTest
use_caf = args.caf
- enableCAF(use_caf)
-
+ unitTest = args.unitTest
threadcounter = threading.BoundedSemaphore(args.ncores)
lock = threading.Lock()
@@ -535,9 +499,8 @@ def main():
base = os.environ['CMSSW_BASE']+"/src/Alignment/APEEstimation"
except KeyError:
print("No CMSSW environment was set, exiting")
- sys.exit()
-
-
+ sys.exit(1)
+
killTargets = []
purgeTargets = []
for toConvert in args.kill:
@@ -565,7 +528,7 @@ def main():
for res in resumed:
measurements.append(res)
- print("Measurement {} in state {} in iteration {} was resumed".format(res.name, res.get_status(), res.curIteration))
+ print("Measurement {} in state {} in iteration {} was resumed".format(res.name, res.status(), res.curIteration))
# Killing and purging is done here, because it doesn't make
# sense to kill or purge a measurement that was just started
for to_kill in args.kill:
@@ -580,7 +543,7 @@ def main():
except IOError:
print("Could not resume because {} could not be opened, exiting".format(shelve_name))
- sys.exit()
+ sys.exit(2)
# read out from config file
if args.configs != []:
@@ -599,22 +562,28 @@ def main():
measurement = ApeMeasurement(name, config, settings)
- if measurement.status >= STATE_ITERATION_START and measurement.status <= STATE_FINISHED:
+ if measurement.status_ >= STATE_ITERATION_START:
measurements.append(measurement)
print("APE Measurement {} was started".format(measurement.name))
-
-
+
+ if unitTest:
+ # status is 0 if successful, 101 if wrongly configured
+ sys.exit(measurement.status_)
+
+ initializeModuleLoading()
+ enableCAF(use_caf)
+
while True:
# remove finished and failed measurements
- measurements = [measurement for measurement in measurements if not (measurement.status==STATE_NONE or measurement.status == STATE_FINISHED)]
+ measurements = [measurement for measurement in measurements if not (measurement.status_==STATE_NONE or measurement.status_ == STATE_FINISHED)]
save("measurements", measurements)
save("failed", failed_measurements)
save("finished", finished_measurements)
list_threads = []
for measurement in measurements:
- t = threading.Thread(target=measurement.run_iteration)
+ t = threading.Thread(target=measurement.runIteration)
list_threads.append(t)
t.start()
@@ -624,7 +593,7 @@ def main():
if len(measurements) == 0:
print("No APE measurements are active, exiting")
- break
+ sys.exit(0)
try: # so that interrupting does not give an error message and just ends the program
time_remaining = clock_interval
@@ -639,7 +608,6 @@ def main():
sys.stdout.write("\033[K")
except KeyboardInterrupt:
sys.exit(0)
-
-
+
if __name__ == "__main__":
main()
diff --git a/Alignment/APEEstimation/test/autoSubmitter/autoSubmitterTemplates.py b/Alignment/APEEstimation/test/autoSubmitter/autoSubmitterTemplates.py
index b09b1f091e673..5632d093973d3 100644
--- a/Alignment/APEEstimation/test/autoSubmitter/autoSubmitterTemplates.py
+++ b/Alignment/APEEstimation/test/autoSubmitter/autoSubmitterTemplates.py
@@ -21,14 +21,13 @@
Error = {errorFile}
Log = {logFile}
request_memory = 2000M
-request_disk = 400M
+request_disk = 500M
batch_name = {jobName}
+JobFlavour = "workday"
Queue Arguments from (
{arguments})
"""
-
condorSubTemplateCAF="""
Executable = {jobFile}
Universe = vanilla
@@ -36,7 +35,7 @@
Error = {errorFile}
Log = {logFile}
request_memory = 2000M
-request_disk = 400M
+request_disk = 500M
batch_name = {jobName}
+JobFlavour = "workday"
+AccountingGroup = "group_u_CMS.CAF.ALCA"
diff --git a/Alignment/APEEstimation/test/autoSubmitter/helpers.py b/Alignment/APEEstimation/test/autoSubmitter/helpers.py
index 7c37fd6e39661..bb78557ff2b73 100644
--- a/Alignment/APEEstimation/test/autoSubmitter/helpers.py
+++ b/Alignment/APEEstimation/test/autoSubmitter/helpers.py
@@ -70,17 +70,18 @@ def rootFileValid(path):
result &= not file.IsZombie()
return result
-if not 'MODULEPATH' in os.environ:
- f = open(os.environ['MODULESHOME'] + "/init/.modulespath", "r")
- path = []
- for line in f.readlines():
- line = re.sub("#.*$", '', line)
- if line != '':
- path.append(line)
- os.environ['MODULEPATH'] = ':'.join(path)
-
-if not 'LOADEDMODULES' in os.environ:
- os.environ['LOADEDMODULES'] = ''
+def initializeModuleLoading():
+ if not 'MODULEPATH' in os.environ:
+ f = open(os.environ['MODULESHOME'] + "/init/.modulespath", "r")
+ path = []
+ for line in f.readlines():
+ line = re.sub("#.*$", '', line)
+ if line != '':
+ path.append(line)
+ os.environ['MODULEPATH'] = ':'.join(path)
+
+ if not 'LOADEDMODULES' in os.environ:
+ os.environ['LOADEDMODULES'] = ''
def module(*args):
if type(args[0]) == type([]):
@@ -95,8 +96,6 @@ def enableCAF(switch):
module('load', 'lxbatch/tzero')
else:
module('load', 'lxbatch/share')
-
-
def ensurePathExists(path):
try:
@@ -105,7 +104,6 @@ def ensurePathExists(path):
if exception.errno != errno.EEXIST:
raise
-
def replaceAllRanges(string):
if "[" in string and "]" in string:
strings = []
@@ -150,8 +148,6 @@ def allFilesExist(dataset):
passed = False
missingFiles.append(fileName)
return passed, missingFiles
-
-
def hasValidSource(condition):
if condition["connect"].startswith("frontier://FrontierProd/"):
@@ -165,7 +161,6 @@ def hasValidSource(condition):
return False
def loadConditions(dictionary):
- hasAlignmentCondition = False
goodConditions = True
conditions = []
for key, value in dictionary.items():
@@ -176,8 +171,6 @@ def loadConditions(dictionary):
# structure is "condition rcd:source tag"
record = key.split(" ")[1]
connect, tag = value.split(" ")
- if record == "TrackerAlignmentRcd":
- hasAlignmentCondition = True
conditions.append({"record":record, "connect":replaceShortcuts(connect), "tag":tag})
elif len(value.split(" ")) == 1 and len(key.split(" ")) == 2:
# structure is "condition tag:source", so we have to guess rcd from the tag. might also be "condition tag1+tag2+...+tagN:source"
@@ -188,8 +181,6 @@ def loadConditions(dictionary):
for possibleTag, possibleRcd in records.items():
if tag.startswith(possibleTag):
conditions.append({"record":possibleRcd, "connect":replaceShortcuts(connect), "tag":tag})
- if possibleRcd == "TrackerAlignmentRcd":
- hasAlignmentCondition = True
foundTag = True
break
if not foundTag:
@@ -207,4 +198,4 @@ def loadConditions(dictionary):
if not condition["record"].endswith("Rcd"):
goodConditions = False
print("'{}' is not a valid record name.".format(condition["record"]))
- return conditions, hasAlignmentCondition, goodConditions
+ return conditions, goodConditions
diff --git a/Alignment/APEEstimation/test/autoSubmitter/unitTest.ini b/Alignment/APEEstimation/test/autoSubmitter/unitTest.ini
new file mode 100644
index 0000000000000..3920aace7d635
--- /dev/null
+++ b/Alignment/APEEstimation/test/autoSubmitter/unitTest.ini
@@ -0,0 +1,12 @@
+[dataset:wlnu]
+baseDirectory=$CMSSW_BASE/unit_tests/ApeTest
+fileNames=MC_UnitTest_TkAlMuonIsolated_1.root
+isMC=True
+
+[alignment:FromGT]
+globalTag=auto:phase1_2022_design
+isDesign=True
+
+[ape:Design]
+dataset: wlnu
+alignment: FromGT
diff --git a/Alignment/APEEstimation/test/testApeestimatorSummary_cfg.py b/Alignment/APEEstimation/test/testApeestimatorSummary_cfg.py
deleted file mode 100644
index 517cfb40010ec..0000000000000
--- a/Alignment/APEEstimation/test/testApeestimatorSummary_cfg.py
+++ /dev/null
@@ -1,154 +0,0 @@
-from __future__ import print_function
-import os
-
-import FWCore.ParameterSet.Config as cms
-
-
-
-
-
-##
-## Setup command line options
-##
-import FWCore.ParameterSet.VarParsing as VarParsing
-import sys
-options = VarParsing.VarParsing ('standard')
-options.register('sample', 'wlnu', VarParsing.VarParsing.multiplicity.singleton, VarParsing.VarParsing.varType.string, "wlnu")
-options.register('isTest', True, VarParsing.VarParsing.multiplicity.singleton, VarParsing.VarParsing.varType.bool, "Test run")
-
-# get and parse the command line arguments
-if( hasattr(sys, "argv") ):
- for args in sys.argv :
- arg = args.split(',')
- for val in arg:
- val = val.split('=')
- if(len(val)==2):
- setattr(options,val[0], val[1])
-
-print("Input sample: ", options.sample)
-print("Test run: ", options.isTest)
-
-
-
-##
-## Process definition
-##
-process = cms.Process("ApeEstimatorSummary")
-
-
-
-##
-## Message Logger
-##
-process.load("FWCore.MessageService.MessageLogger_cfi")
-process.MessageLogger.CalculateAPE=dict()
-#process.MessageLogger.ApeEstimatorSummary=dict()
-process.MessageLogger.cerr.INFO.limit = 0
-process.MessageLogger.cerr.default.limit = -1
-process.MessageLogger.cerr.CalculateAPE = cms.untracked.PSet(limit = cms.untracked.int32(-1))
-#process.MessageLogger.cerr.ApeEstimatorSummary = cms.untracked.PSet(limit = cms.untracked.int32(-1))
-
-#process.MessageLogger.cout = cms.untracked.PSet(INFO = cms.untracked.PSet(
-# reportEvery = cms.untracked.int32(100), # every 100th only
-# limit = cms.untracked.int32(10), # or limit to 10 printouts...
-#))
-process.MessageLogger.cerr.FwkReport.reportEvery = 1000 ## really show only every 1000th
-
-
-
-##
-## Process options
-##
-process.options = cms.untracked.PSet(
- wantSummary = cms.untracked.bool(True),
-)
-
-
-
-##
-## Input sample definition
-##
-isData1 = isData2 = False
-isData = False
-isQcd = isWlnu = isZmumu = isZtautau = isZmumu10 = isZmumu20 = False
-isMc = False
-if options.sample == 'data1':
- isData1 = True
- isData = True
-elif options.sample == 'data2':
- isData2 = True
- isData = True
-elif options.sample == 'qcd':
- isQcd = True
- isMc = True
-elif options.sample == 'wlnu':
- isWlnu = True
- isMc = True
-elif options.sample == 'zmumu':
- isZmumu = True
- isMc = True
-elif options.sample == 'ztautau':
- isZtautau = True
- isMc = True
-elif options.sample == 'zmumu10':
- isZmumu10 = True
- isMc = True
-elif options.sample == 'zmumu20':
- isZmumu20 = True
- isMc = True
-else:
- print('ERROR --- incorrect data sammple: ', options.sample)
- exit(8888)
-
-
-
-##
-## Input Files
-##
-process.source = cms.Source("EmptySource")
-
-
-
-##
-## Number of Events
-##
-process.maxEvents = cms.untracked.PSet( input = cms.untracked.int32(1) )
-
-
-
-##
-## ApeEstimatorSummary
-##
-from Alignment.APEEstimation.ApeEstimatorSummary_cff import *
-process.ApeEstimatorSummary1 = ApeEstimatorSummaryBaseline.clone(
- InputFile = os.environ['CMSSW_BASE'] + '/src/Alignment/APEEstimation/hists/test_' + options.sample + '.root',
- ResultsFile = os.environ['CMSSW_BASE'] + '/src/Alignment/APEEstimation/hists/test_' + options.sample + '_resultsFile1.root',
- BaselineFile = os.environ['CMSSW_BASE'] + '/src/Alignment/APEEstimation/hists/test_' + options.sample + '_baselineApe.root',
-)
-process.ApeEstimatorSummary2 = ApeEstimatorSummaryIter.clone(
- correctionScaling = 0.6,
- InputFile = os.environ['CMSSW_BASE'] + '/src/Alignment/APEEstimation/hists/test_' + options.sample + '.root',
- ResultsFile = os.environ['CMSSW_BASE'] + '/src/Alignment/APEEstimation/hists/test_' + options.sample + '_resultsFile2.root',
- BaselineFile = os.environ['CMSSW_BASE'] + '/src/Alignment/APEEstimation/hists/test_' + options.sample + '_baselineApe.root',
- IterationFile = os.environ['CMSSW_BASE'] + '/src/Alignment/APEEstimation/hists/test_' + options.sample + '_iterationApe2.root',
- ApeOutputFile = os.environ['CMSSW_BASE'] + '/src/Alignment/APEEstimation/hists/test_' + options.sample + '_apeOutput2.txt',
-)
-process.ApeEstimatorSummary3 = ApeEstimatorSummaryIter.clone(
- correctionScaling = 0.6,
- InputFile = os.environ['CMSSW_BASE'] + '/src/Alignment/APEEstimation/hists/test_' + options.sample + '.root',
- ResultsFile = os.environ['CMSSW_BASE'] + '/src/Alignment/APEEstimation/hists/test_' + options.sample + '_resultsFile3.root',
- BaselineFile = os.environ['CMSSW_BASE'] + '/src/Alignment/APEEstimation/hists/test_' + options.sample + '_baselineApe.root',
- IterationFile = os.environ['CMSSW_BASE'] + '/src/Alignment/APEEstimation/hists/test_' + options.sample + '_iterationApe3.root',
- ApeOutputFile = os.environ['CMSSW_BASE'] + '/src/Alignment/APEEstimation/hists/test_' + options.sample + '_apeOutput3.txt',
-)
-
-
-
-process.p = cms.Path(
- process.ApeEstimatorSummary1*
- process.ApeEstimatorSummary2
- #~ *process.ApeEstimatorSummary3
-)
-
-
-
diff --git a/Alignment/APEEstimation/test/testApeestimator_cfg.py b/Alignment/APEEstimation/test/testApeestimator_cfg.py
deleted file mode 100644
index 21f58700b8a20..0000000000000
--- a/Alignment/APEEstimation/test/testApeestimator_cfg.py
+++ /dev/null
@@ -1,313 +0,0 @@
-from __future__ import print_function
-import os
-
-import FWCore.ParameterSet.Config as cms
-
-
-
-
-##
-## Setup command line options
-##
-import FWCore.ParameterSet.VarParsing as VarParsing
-import sys
-options = VarParsing.VarParsing ('standard')
-options.register('sample', 'wlnu', VarParsing.VarParsing.multiplicity.singleton, VarParsing.VarParsing.varType.string, "Input sample")
-options.register('isTest', True, VarParsing.VarParsing.multiplicity.singleton, VarParsing.VarParsing.varType.bool, "Test run")
-
-# get and parse the command line arguments
-if( hasattr(sys, "argv") ):
- for args in sys.argv :
- arg = args.split(',')
- for val in arg:
- val = val.split('=')
- if(len(val)==2):
- setattr(options,val[0], val[1])
-
-print("Input sample: ", options.sample)
-print("Test run: ", options.isTest)
-
-
-
-##
-## Process definition
-##
-process = cms.Process("ApeEstimator")
-
-
-
-##
-## Message Logger
-##
-process.load("FWCore.MessageService.MessageLogger_cfi")
-process.MessageLogger.SectorBuilder=dict()
-process.MessageLogger.ResidualErrorBinning=dict()
-process.MessageLogger.HitSelector=dict()
-process.MessageLogger.CalculateAPE=dict()
-process.MessageLogger.ApeEstimator=dict()
-#process.MessageLogger.TrackRefitter=dict()
-process.MessageLogger.AlignmentTrackSelector=dict()
-process.MessageLogger.cerr.INFO.limit = 0
-process.MessageLogger.cerr.default.limit = -1 # Do not use =0, else all error messages (except those listed below) are supressed
-process.MessageLogger.cerr.SectorBuilder = cms.untracked.PSet(limit = cms.untracked.int32(-1))
-process.MessageLogger.cerr.HitSelector = cms.untracked.PSet(limit = cms.untracked.int32(-1))
-process.MessageLogger.cerr.CalculateAPE = cms.untracked.PSet(limit = cms.untracked.int32(-1))
-process.MessageLogger.cerr.ApeEstimator = cms.untracked.PSet(limit = cms.untracked.int32(-1))
-process.MessageLogger.cerr.AlignmentTrackSelector = cms.untracked.PSet(limit = cms.untracked.int32(-1))
-process.MessageLogger.cerr.FwkReport.reportEvery = 1000 ## really show only every 1000th
-
-
-
-##
-## Process options
-##
-process.options = cms.untracked.PSet(
- wantSummary = cms.untracked.bool(True),
-)
-
-
-
-##
-## Input sample definition
-##
-isData1 = isData2 = False
-isData = False
-isQcd = isWlnu = isZmumu = isZtautau = isZmumu10 = isZmumu20 = False
-isMc = False
-if options.sample == 'data1':
- isData1 = True
- isData = True
-elif options.sample == 'data2':
- isData2 = True
- isData = True
-elif options.sample == 'qcd':
- isQcd = True
- isMc = True
-elif options.sample == 'wlnu':
- isWlnu = True
- isMc = True
-elif options.sample == 'zmumu':
- isZmumu = True
- isMc = True
-elif options.sample == 'ztautau':
- isZtautau = True
- isMc = True
-elif options.sample == 'zmumu10':
- isZmumu10 = True
- isMc = True
-elif options.sample == 'zmumu20':
- isZmumu20 = True
- isMc = True
-else:
- print('ERROR --- incorrect data sammple: ', options.sample)
- exit(8888)
-
-
-
-##
-## Input Files
-##
-if isData1:
- process.load("Alignment.APEEstimation.samples.Data_TkAlMuonIsolated_Run2011A_May10ReReco_ApeSkim_cff")
-elif isData2:
- process.load("Alignment.APEEstimationsamples.Data_TkAlMuonIsolated_Run2011A_PromptV4_ApeSkim_cff")
-elif isQcd:
- process.load("Alignment.APEEstimation.samples.Mc_TkAlMuonIsolated_Summer11_qcd_ApeSkim_cff")
-elif isWlnu:
- process.load("Alignment.APEEstimation.samples.Mc_WJetsToLNu_74XTest_ApeSkim_cff")
-elif isZmumu10:
- process.load("Alignment.APEEstimation.samples.Mc_TkAlMuonIsolated_Summer11_zmumu10_ApeSkim_cff")
-elif isZmumu20:
- process.load("Alignment.APEEstimation.samples.Mc_TkAlMuonIsolated_Summer11_zmumu20_ApeSkim_cff")
-
-
-
-##
-## Number of Events (should be after input file)
-##
-process.maxEvents = cms.untracked.PSet( input = cms.untracked.int32(-1) )
-if options.isTest: process.maxEvents.input = 10001
-
-
-##
-## Check run and event numbers for Dublicates --- only for real data
-##
-#process.source.duplicateCheckMode = cms.untracked.string("noDuplicateCheck")
-#process.source.duplicateCheckMode = cms.untracked.string("checkEachFile")
-process.source.duplicateCheckMode = cms.untracked.string("checkEachRealDataFile")
-#process.source.duplicateCheckMode = cms.untracked.string("checkAllFilesOpened") # default value
-
-
-
-##
-## Whole Refitter Sequence
-##
-process.load("Alignment.APEEstimation.TrackRefitter_38T_cff")
-
-process.load('Configuration.StandardSequences.FrontierConditions_GlobalTag_cff')
-from Configuration.AlCa.GlobalTag import GlobalTag
-process.GlobalTag = GlobalTag(process.GlobalTag, 'auto:run2_design', '')
-
-
-##### To be used when running on Phys14MC with a CMSSW version > 72X
-#process.GlobalTag.toGet = cms.VPSet(
-# cms.PSet(
-# record = cms.string("BeamSpotObjectsRcd"),
-# tag = cms.string("Realistic8TeVCollisions_START50_V13_v1_mc"),
-# connect = cms.untracked.string("frontier://FrontierProd/CMS_CONDITIONS"),
-# )
-#)
-print("Using global tag "+process.GlobalTag.globaltag._value)
-
-
-
-##
-## New pixel templates
-##
-process.GlobalTag.toGet = cms.VPSet(
- cms.PSet(
- record = cms.string("SiPixelTemplateDBObjectRcd"),
- tag = cms.string("SiPixelTemplateDBObject_38T_v3_mc"),
- connect = cms.untracked.string("frontier://FrontierProd/CMS_CONDITIONS"),
- )
-)
-
-
-
-##
-## Alignment and APE
-##
-import CalibTracker.Configuration.Common.PoolDBESSource_cfi
-## Choose Alignment (w/o touching APE)
-if isMc:
- process.myTrackerAlignment = CalibTracker.Configuration.Common.PoolDBESSource_cfi.poolDBESSource.clone(
- connect = 'frontier://FrontierProd/CMS_CONDITIONS', # or your sqlite file
- toGet = [
- cms.PSet(
- record = cms.string('TrackerAlignmentRcd'),
- tag = cms.string('TrackerIdealGeometry210_mc') # 'TrackerAlignment_2009_v2_offline'
- ),
- ],
- )
- process.es_prefer_trackerAlignment = cms.ESPrefer("PoolDBESSource","myTrackerAlignment")
-
-process.es_prefer_trackerAlignment = cms.ESPrefer("PoolDBESSource","myTrackerAlignment")
-if isData:
- # Recent geometry
- process.myTrackerAlignment = CalibTracker.Configuration.Common.PoolDBESSource_cfi.poolDBESSource.clone(
- connect = 'frontier://FrontierProd/CMS_CONDITIONS',
- toGet = [
- cms.PSet(
- record = cms.string('TrackerAlignmentRcd'),
- tag = cms.string('TrackerAlignment_GR10_v6_offline'),
- ),
- ],
- )
- process.es_prefer_trackerAlignment = cms.ESPrefer("PoolDBESSource","myTrackerAlignment")
- # Kinks and bows
- process.myTrackerAlignmentKinksAndBows = CalibTracker.Configuration.Common.PoolDBESSource_cfi.poolDBESSource.clone(
- connect = 'frontier://FrontierProd/CMS_CONDITIONS',
- toGet = [
- cms.PSet(
- record = cms.string('TrackerSurfaceDeformationRcd'),
- tag = cms.string('TrackerSurfaceDeformations_v1_offline'),
- ),
- ],
- )
- process.es_prefer_trackerAlignmentKinksAndBows = cms.ESPrefer("PoolDBESSource","myTrackerAlignmentKinksAndBows")
-
-## APE (set to zero)
-process.myTrackerAlignmentErr = CalibTracker.Configuration.Common.PoolDBESSource_cfi.poolDBESSource.clone(
- connect = 'frontier://FrontierProd/CMS_CONDITIONS',
- toGet = [
- cms.PSet(
- record = cms.string('TrackerAlignmentErrorExtendedRcd'),
- tag = cms.string('TrackerIdealGeometryErrorsExtended210_mc')
- ),
- ],
-)
-process.es_prefer_trackerAlignmentErr = cms.ESPrefer("PoolDBESSource","myTrackerAlignmentErr")
-
-
-
-##
-## Trigger Selection
-##
-process.load("Alignment.APEEstimation.TriggerSelection_cff")
-
-
-
-##
-## ApeEstimator
-##
-from Alignment.APEEstimation.ApeEstimator_cff import *
-process.ApeEstimator1 = ApeEstimator.clone(
- #~ tjTkAssociationMapTag = "TrackRefitterHighPurityForApeEstimator",
- tjTkAssociationMapTag = "TrackRefitterForApeEstimator",
- maxTracksPerEvent = 0,
- applyTrackCuts = False,
- Sectors = RecentSectors,
- analyzerMode = False,
- calculateApe = True
-)
-process.ApeEstimator1.HitSelector.width = []
-process.ApeEstimator1.HitSelector.maxIndex = []
-process.ApeEstimator1.HitSelector.widthProj = []
-process.ApeEstimator1.HitSelector.widthDiff = []
-process.ApeEstimator1.HitSelector.edgeStrips = []
-process.ApeEstimator1.HitSelector.sOverN = []
-process.ApeEstimator1.HitSelector.maxCharge = []
-process.ApeEstimator1.HitSelector.chargeOnEdges = []
-process.ApeEstimator1.HitSelector.probX = []
-process.ApeEstimator1.HitSelector.phiSensX = []
-process.ApeEstimator1.HitSelector.phiSensY = []
-process.ApeEstimator1.HitSelector.errXHit = []
-process.ApeEstimator1.HitSelector.chargePixel = []
-process.ApeEstimator1.HitSelector.widthX = []
-process.ApeEstimator1.HitSelector.widthY = []
-process.ApeEstimator1.HitSelector.logClusterProbability = []
-process.ApeEstimator1.HitSelector.isOnEdge = []
-process.ApeEstimator1.HitSelector.qBin = []
-
-
-process.ApeEstimator2 = process.ApeEstimator1.clone(
- Sectors = ValidationSectors,
- analyzerMode = True,
- calculateApe = False,
-)
-
-process.ApeEstimator3 = process.ApeEstimator2.clone(
- zoomHists = False,
-)
-
-
-
-##
-## Output File Configuration
-##
-outputName = os.environ['CMSSW_BASE'] + '/src/Alignment/APEEstimation/hists/'
-if options.isTest:
- outputName = outputName + 'test_'
-outputName = outputName + options.sample + '.root'
-
-process.TFileService = cms.Service("TFileService",
- fileName = cms.string(outputName),
- closeFileFast = cms.untracked.bool(True)
-)
-
-
-
-##
-## Path
-##
-process.p = cms.Path(
- process.TriggerSelectionSequence*
- process.RefitterHighPuritySequence*
- (process.ApeEstimator1+
- process.ApeEstimator2+
- process.ApeEstimator3
- )
-)
-
-
-
diff --git a/Alignment/APEEstimation/test/trackerTreeGenerator_cfg.py b/Alignment/APEEstimation/test/trackerTreeGenerator_cfg.py
new file mode 100644
index 0000000000000..4727e156f1327
--- /dev/null
+++ b/Alignment/APEEstimation/test/trackerTreeGenerator_cfg.py
@@ -0,0 +1,90 @@
+from __future__ import print_function
+import FWCore.ParameterSet.Config as cms
+
+import os
+
+
+##
+## Process definition
+##
+process = cms.Process("TrackerTreeGeneration")
+
+
+
+##
+## MessageLogger
+##
+process.load("FWCore.MessageService.MessageLogger_cfi")
+process.MessageLogger.cerr.threshold = 'INFO'
+process.MessageLogger.TrackerTreeGenerator=dict()
+process.MessageLogger.cerr.INFO.limit = 0
+process.MessageLogger.cerr.default.limit = -1
+process.MessageLogger.cerr.TrackerTreeGenerator = cms.untracked.PSet(limit = cms.untracked.int32(-1))
+
+
+
+##
+## Process options
+##
+process.options = cms.untracked.PSet( wantSummary = cms.untracked.bool(True) )
+
+
+
+##
+## Input source
+##
+process.source = cms.Source("EmptySource")
+
+
+
+##
+## Number of events
+##
+process.maxEvents = cms.untracked.PSet( input = cms.untracked.int32(1) )
+
+
+
+##
+## Geometry
+##
+process.load("Configuration.Geometry.GeometryRecoDB_cff")
+
+
+##
+## Conditions
+##
+# use always ideal conditions to get no influence from Alignment on absolute Positions, Orientations...
+# so it is clear that when choosing special regions in e.g. globalPhi, Modules of the same Rod are contained in the same region
+process.load('Configuration.StandardSequences.FrontierConditions_GlobalTag_cff')
+from Configuration.AlCa.GlobalTag import GlobalTag
+process.GlobalTag = GlobalTag(process.GlobalTag, 'auto:phase1_2023_design', '')
+print("Using global tag "+process.GlobalTag.globaltag._value)
+
+
+##
+## Analyzer
+##
+process.load("Alignment.TrackerAlignment.TrackerTreeGenerator_cfi")
+
+
+
+##
+## Output File Configuration
+##
+process.TFileService = cms.Service("TFileService",
+ fileName = cms.string(os.environ['CMSSW_BASE'] + '/src/Alignment/APEEstimation/hists/TrackerTree.root')
+)
+
+
+
+##
+## Path
+##
+process.p = cms.Path(process.TrackerTreeGenerator)
+
+
+
+
+
+
+
diff --git a/Alignment/APEEstimation/test/unitTest.sh b/Alignment/APEEstimation/test/unitTest.sh
new file mode 100755
index 0000000000000..a29c2099c3f24
--- /dev/null
+++ b/Alignment/APEEstimation/test/unitTest.sh
@@ -0,0 +1,11 @@
+#! /bin/bash
+function die { echo $1: status $2 ; exit $2; }
+
+echo " TESTING data set skimming"
+# skim the predefined data set
+python3 $CMSSW_BASE/src/Alignment/APEEstimation/test/SkimProducer/startSkim.py -s UnitTest || die "Failure skimming data set" $?
+
+echo " TESTING auto submitter"
+# start baseline measurement
+python3 $CMSSW_BASE/src/Alignment/APEEstimation/test/autoSubmitter/autoSubmitter.py -c $CMSSW_BASE/src/Alignment/APEEstimation/test/autoSubmitter/unitTest.ini -u || die "Failure running autoSubmitter" $?
+
diff --git a/CondCore/BeamSpotPlugins/interface/BeamSpotPayloadInspectorHelper.h b/CondCore/BeamSpotPlugins/interface/BeamSpotPayloadInspectorHelper.h
index a151687b95edb..2e03a2cbd1d5f 100644
--- a/CondCore/BeamSpotPlugins/interface/BeamSpotPayloadInspectorHelper.h
+++ b/CondCore/BeamSpotPlugins/interface/BeamSpotPayloadInspectorHelper.h
@@ -9,6 +9,7 @@
#include "FWCore/MessageLogger/interface/MessageLogger.h"
// system includes
+#include
#include
#include
#include
@@ -60,6 +61,26 @@ namespace beamSpotPI {
END_OF_TYPES = 25,
};
+ /************************************************/
+ // Function to convert cond::Time_t (in microseconds) to human-readable date string
+ std::string convertTimeToDateString(cond::Time_t timeValue, bool hasMicros = false, bool toUTC = true) {
+ // Convert microseconds to seconds
+ std::time_t unixTime = static_cast(hasMicros ? timeValue / 1000000 : timeValue);
+
+ // Convert std::time_t to struct tm (to UTC, or not)
+ std::tm* timeInfo = toUTC ? std::gmtime(&unixTime) : std::localtime(&unixTime);
+
+ // Convert struct tm to human-readable string format
+ char buffer[80];
+ std::strftime(buffer, sizeof(buffer), "%Y-%m-%d %H:%M:%S", timeInfo);
+
+ // Append microseconds to the string
+ std::string dateString(buffer);
+ //dateString += "." + std::to_string(timeValue % 1000000);
+
+ return dateString;
+ }
+
/************************************************/
inline std::string getStringFromParamEnum(const parameters& parameter,
const bool addUnits = false /*not used by default*/) {
@@ -432,6 +453,44 @@ namespace beamSpotPI {
(tagname + " IOV: #color[4]{" + std::to_string(runLS.first) + "," + std::to_string(runLS.second) + "}")
.c_str());
+ if constexpr (std::is_same_v) {
+ // protections needed against old payload that do not have these data members persisted
+ const auto& creationTime = test_(
+ [&]() {
+ return m_payload->creationTime();
+ }, // Lambda function capturing m_payload and calling creationTime
+ better_error);
+
+ const auto& startTime = test_(
+ [&]() {
+ return m_payload->startTimeStamp();
+ }, // Lambda function capturing m_payload and calling startTimeStamp
+ better_error);
+
+ const auto& endTime = test_(
+ [&]() {
+ return m_payload->endTimeStamp();
+ }, // Lambda function capturing m_payload and calling endTimeStamp
+ better_error);
+ canvas.cd(2);
+ ltx.SetTextSize(0.025);
+ ltx.DrawLatexNDC(
+ gPad->GetLeftMargin() + 0.01,
+ gPad->GetBottomMargin() + 0.15,
+ ("#color[2]{(" + beamSpotPI::convertTimeToDateString(creationTime, /*has us*/ true) + ")}").c_str());
+
+ ltx.DrawLatexNDC(gPad->GetLeftMargin() + 0.01,
+ gPad->GetBottomMargin() + 0.085,
+ ("#color[2]{(" + beamSpotPI::convertTimeToDateString(startTime) + ")}").c_str());
+
+ ltx.DrawLatexNDC(gPad->GetLeftMargin() + 0.01,
+ gPad->GetBottomMargin() + 0.025,
+ ("#color[2]{(" + beamSpotPI::convertTimeToDateString(endTime) + ")}").c_str());
+
+ ltx.DrawLatexNDC(
+ gPad->GetLeftMargin(), gPad->GetBottomMargin() - 0.05, "#color[4]{N.B.} TimeStamps are in UTC");
+ }
+
std::string fileName(this->m_imageFileName);
canvas.SaveAs(fileName.c_str());
@@ -477,6 +536,22 @@ namespace beamSpotPI {
return "should never be here";
}
}
+
+ // Slightly better error handler
+ static void better_error(const std::exception& e) { edm::LogError("DisplayParameters") << e.what() << '\n'; }
+
+ // Method to catch exceptions
+ template
+ T test_(Func f, Response r) const {
+ try {
+ LogDebug("DisplayParameters") << "I have tried" << std::endl;
+ return f();
+ } catch (const Except& e) {
+ LogDebug("DisplayParameters") << "I have caught!" << std::endl;
+ r(e);
+ return static_cast(1);
+ }
+ }
};
/************************************************
diff --git a/Configuration/PyReleaseValidation/python/relval_steps.py b/Configuration/PyReleaseValidation/python/relval_steps.py
index 718c994883beb..df760bfeec429 100644
--- a/Configuration/PyReleaseValidation/python/relval_steps.py
+++ b/Configuration/PyReleaseValidation/python/relval_steps.py
@@ -2900,14 +2900,14 @@ def gen2023HiMix(fragment,howMuch):
steps['RECODR3_reHLT_HCALOnlyCPU']=merge([{'-s': 'RAW2DIGI:RawToDigi_hcalOnly,RECO:reconstruction_hcalOnly,DQM:@hcalOnly+@hcal2Only'},steps['RECODR3_reHLT_2023']])
steps['RECODR3_reHLT_HCALOnlyGPU']=merge([step3_gpu, steps['RECODR3_reHLT_HCALOnlyCPU']])
-steps['RECONANORUN3_reHLT_2022']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,PAT,NANO,DQM:@standardDQMFakeHLT+@miniAODDQM+@nanoAODDQM'},steps['RECODR3_reHLT_2022']])
-steps['RECONANORUN3_ZB_reHLT_2022']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,PAT,NANO,DQM:@rerecoZeroBiasFakeHLT+@miniAODDQM+@nanoAODDQM'},steps['RECODR3_reHLT_2022']])
+steps['RECONANORUN3_reHLT_2022']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,PAT,NANO,DQM:@standardDQMFakeHLT+@miniAODDQM+@nanoAODDQM','--datatier':'RECO,MINIAOD,NANOAOD,DQMIO','--eventcontent':'RECO,MINIAOD,NANOEDMAOD,DQM'},steps['RECODR3_reHLT_2022']])
+steps['RECONANORUN3_ZB_reHLT_2022']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,PAT,NANO,DQM:@rerecoZeroBiasFakeHLT+@miniAODDQM+@nanoAODDQM'},steps['RECONANORUN3_reHLT_2022']])
steps['RECOCOSMRUN3_reHLT_2022']=merge([{'--scenario':'cosmics','-s':'RAW2DIGI,L1Reco,RECO,DQM','--datatier':'RECO,DQMIO','--eventcontent':'RECO,DQM'},steps['RECONANORUN3_reHLT_2022']])
-steps['RECONANORUN3_reHLT_2023']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,PAT,NANO,DQM:@standardDQM+@miniAODDQM+@nanoAODDQM'},steps['RECODR3_reHLT_2023']])
+steps['RECONANORUN3_reHLT_2023']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,PAT,NANO,DQM:@standardDQM+@miniAODDQM+@nanoAODDQM','--datatier':'RECO,MINIAOD,NANOAOD,DQMIO','--eventcontent':'RECO,MINIAOD,NANOEDMAOD,DQM'},steps['RECODR3_reHLT_2023']])
steps['RECONANORUN3_reHLT_2023B']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,PAT,NANO,DQM:@standardDQM+@miniAODDQM+@nanoAODDQM','--datatier':'RECO,MINIAOD,NANOAOD,DQMIO','--eventcontent':'RECO,MINIAOD,NANOEDMAOD,DQM'},steps['RECODR3_reHLT_2023B']])
-steps['RECONANORUN3_ZB_reHLT_2023B']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,PAT,NANO,DQM:@rerecoZeroBiasFakeHLT+@miniAODDQM+@nanoAODDQM'},steps['RECODR3_reHLT_2023B']])
-steps['RECONANORUN3_ZB_reHLT_2023']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,PAT,NANO,DQM:@rerecoZeroBias+@miniAODDQM+@nanoAODDQM'},steps['RECODR3_reHLT_2023']])
+steps['RECONANORUN3_ZB_reHLT_2023B']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,PAT,NANO,DQM:@rerecoZeroBiasFakeHLT+@miniAODDQM+@nanoAODDQM'},steps['RECONANORUN3_reHLT_2023B']])
+steps['RECONANORUN3_ZB_reHLT_2023']=merge([{'-s':'RAW2DIGI,L1Reco,RECO,PAT,NANO,DQM:@rerecoZeroBias+@miniAODDQM+@nanoAODDQM'},steps['RECONANORUN3_reHLT_2023']])
steps['RECOCOSMRUN3_reHLT_2023']=merge([{'--scenario':'cosmics','-s':'RAW2DIGI,L1Reco,RECO,DQM','--datatier':'RECO,DQMIO','--eventcontent':'RECO,DQM'},steps['RECONANORUN3_reHLT_2023']])
# mask away - to be removed once we'll migrate the matrix to be fully unscheduled for RECO step
diff --git a/Configuration/StandardSequences/python/DigiToRaw_Repack_cff.py b/Configuration/StandardSequences/python/DigiToRaw_Repack_cff.py
index 24af3647931b1..56bf9825f1c8f 100644
--- a/Configuration/StandardSequences/python/DigiToRaw_Repack_cff.py
+++ b/Configuration/StandardSequences/python/DigiToRaw_Repack_cff.py
@@ -72,7 +72,7 @@
cms.InputTag('siStripZeroSuppressionHLT','ScopeMode')),
)
-from RecoLocalTracker.SiStripClusterizer.SiStripClusters2ApproxClusters_cff import hltSiStripClusters2ApproxClusters
+from RecoLocalTracker.SiStripClusterizer.SiStripClusters2ApproxClusters_cff import *
from EventFilter.Utilities.EvFFEDExcluder_cfi import EvFFEDExcluder as _EvFFEDExcluder
rawPrimeDataRepacker = _EvFFEDExcluder.clone(
@@ -80,5 +80,9 @@
fedsToExclude = [foo for foo in range(50, 490)]
)
-DigiToApproxClusterRawTask = cms.Task(siStripDigisHLT,siStripZeroSuppressionHLT,siStripClustersHLT,hltSiStripClusters2ApproxClusters,rawPrimeDataRepacker)
+hltScalersRawToDigi = cms.EDProducer( "ScalersRawToDigi",
+ scalersInputTag = cms.InputTag( "rawDataRepacker" )
+)
+
+DigiToApproxClusterRawTask = cms.Task(siStripDigisHLT,siStripZeroSuppressionHLT,hltScalersRawToDigi,hltBeamSpotProducer,siStripClustersHLT,hltSiStripClusters2ApproxClusters,rawPrimeDataRepacker)
DigiToApproxClusterRaw = cms.Sequence(DigiToApproxClusterRawTask)
diff --git a/DataFormats/SiStripCluster/interface/SiStripApproximateCluster.h b/DataFormats/SiStripCluster/interface/SiStripApproximateCluster.h
index 8ff3317666376..baf87f791d9f6 100644
--- a/DataFormats/SiStripCluster/interface/SiStripApproximateCluster.h
+++ b/DataFormats/SiStripCluster/interface/SiStripApproximateCluster.h
@@ -8,27 +8,38 @@ class SiStripApproximateCluster {
public:
SiStripApproximateCluster() {}
- explicit SiStripApproximateCluster(cms_uint16_t barycenter,
- cms_uint8_t width,
- cms_uint8_t avgCharge,
- bool isSaturated) {
+ explicit SiStripApproximateCluster(
+ cms_uint16_t barycenter, cms_uint8_t width, cms_uint8_t avgCharge, bool filter, bool isSaturated) {
barycenter_ = barycenter;
width_ = width;
avgCharge_ = avgCharge;
+ filter_ = filter;
isSaturated_ = isSaturated;
}
- explicit SiStripApproximateCluster(const SiStripCluster& cluster, unsigned int maxNSat);
+ explicit SiStripApproximateCluster(const SiStripCluster& cluster,
+ unsigned int maxNSat,
+ float hitPredPos,
+ bool peakFilter);
cms_uint16_t barycenter() const { return barycenter_; }
cms_uint8_t width() const { return width_; }
cms_uint8_t avgCharge() const { return avgCharge_; }
+ bool filter() const { return filter_; }
bool isSaturated() const { return isSaturated_; }
+ bool peakFilter() const { return peakFilter_; }
private:
cms_uint16_t barycenter_ = 0;
cms_uint8_t width_ = 0;
cms_uint8_t avgCharge_ = 0;
+ bool filter_ = false;
bool isSaturated_ = false;
+ bool peakFilter_ = false;
+ static constexpr double trimMaxADC_ = 30.;
+ static constexpr double trimMaxFracTotal_ = .15;
+ static constexpr double trimMaxFracNeigh_ = .25;
+ static constexpr double maxTrimmedSizeDiffNeg_ = .7;
+ static constexpr double maxTrimmedSizeDiffPos_ = 1.;
};
#endif // DataFormats_SiStripCluster_SiStripApproximateCluster_h
diff --git a/DataFormats/SiStripCluster/interface/SiStripCluster.h b/DataFormats/SiStripCluster/interface/SiStripCluster.h
index 383e3960f629e..d95f379251036 100644
--- a/DataFormats/SiStripCluster/interface/SiStripCluster.h
+++ b/DataFormats/SiStripCluster/interface/SiStripCluster.h
@@ -82,6 +82,10 @@ class SiStripCluster {
*/
int charge() const;
+ bool filter() const;
+
+ bool isFromApprox() const;
+
/** Test (set) the merged status of the cluster
*
*/
@@ -99,6 +103,7 @@ class SiStripCluster {
//these are used if amplitude information is not available (using approximate cluster constructor)
float barycenter_ = 0;
int charge_ = 0;
+ bool filter_ = false;
// ggiurgiu@fnal.gov, 01/05/12
// Add cluster errors to be used by rechits from split clusters.
diff --git a/DataFormats/SiStripCluster/src/SiStripApproximateCluster.cc b/DataFormats/SiStripCluster/src/SiStripApproximateCluster.cc
index 10a7c29c2f60c..01b30963a083a 100644
--- a/DataFormats/SiStripCluster/src/SiStripApproximateCluster.cc
+++ b/DataFormats/SiStripCluster/src/SiStripApproximateCluster.cc
@@ -3,11 +3,16 @@
#include
#include
-SiStripApproximateCluster::SiStripApproximateCluster(const SiStripCluster& cluster, unsigned int maxNSat) {
+SiStripApproximateCluster::SiStripApproximateCluster(const SiStripCluster& cluster,
+ unsigned int maxNSat,
+ float hitPredPos,
+ bool peakFilter) {
barycenter_ = std::round(cluster.barycenter() * 10);
width_ = cluster.size();
avgCharge_ = cluster.charge() / cluster.size();
+ filter_ = false;
isSaturated_ = false;
+ peakFilter_ = peakFilter;
//mimicing the algorithm used in StripSubClusterShapeTrajectoryFilter...
//Looks for 3 adjacent saturated strips (ADC>=254)
@@ -25,6 +30,28 @@ SiStripApproximateCluster::SiStripApproximateCluster(const SiStripCluster& clust
maxSat = std::max(maxSat, thisSat);
}
if (maxSat >= maxNSat) {
+ filter_ = true;
isSaturated_ = true;
}
+
+ unsigned int hitStripsTrim = ampls.size();
+ int sum = std::accumulate(ampls.begin(), ampls.end(), 0);
+ uint8_t trimCut = std::min(trimMaxADC_, std::floor(trimMaxFracTotal_ * sum));
+ auto begin = ampls.begin();
+ auto last = ampls.end() - 1;
+ while (hitStripsTrim > 1 && (*begin < std::max(trimCut, trimMaxFracNeigh_ * (*(begin + 1))))) {
+ hitStripsTrim--;
+ ++begin;
+ }
+ while (hitStripsTrim > 1 && (*last < std::max(trimCut, trimMaxFracNeigh_ * (*(last - 1))))) {
+ hitStripsTrim--;
+ --last;
+ }
+ if (hitStripsTrim < std::floor(std::abs(hitPredPos) - maxTrimmedSizeDiffNeg_)) {
+ filter_ = false;
+ } else if (hitStripsTrim <= std::ceil(std::abs(hitPredPos) + maxTrimmedSizeDiffPos_)) {
+ filter_ = true;
+ } else {
+ filter_ = peakFilter_;
+ }
}
diff --git a/DataFormats/SiStripCluster/src/SiStripCluster.cc b/DataFormats/SiStripCluster/src/SiStripCluster.cc
index 3d02bf727648e..9deb73b4c4a22 100644
--- a/DataFormats/SiStripCluster/src/SiStripCluster.cc
+++ b/DataFormats/SiStripCluster/src/SiStripCluster.cc
@@ -26,6 +26,7 @@ SiStripCluster::SiStripCluster(const SiStripApproximateCluster cluster, const ui
barycenter_ = cluster.barycenter() / 10.0;
charge_ = cluster.width() * cluster.avgCharge();
amplitudes_.resize(cluster.width(), cluster.avgCharge());
+ filter_ = cluster.filter();
float halfwidth_ = 0.5f * float(cluster.width());
@@ -60,3 +61,10 @@ float SiStripCluster::barycenter() const {
// Need to mask off the high bit of firstStrip_, which contains the merged status.
return float((firstStrip_ & stripIndexMask)) + float(sumx) / float(suma) + 0.5f;
}
+bool SiStripCluster::filter() const {
+ if (barycenter_ > 0)
+ return filter_;
+ return false;
+}
+
+bool SiStripCluster::isFromApprox() const { return (barycenter_ > 0); }
diff --git a/DataFormats/SiStripCluster/src/classes_def.xml b/DataFormats/SiStripCluster/src/classes_def.xml
index 8c8c0a49d911a..3efcd26a23881 100755
--- a/DataFormats/SiStripCluster/src/classes_def.xml
+++ b/DataFormats/SiStripCluster/src/classes_def.xml
@@ -1,6 +1,7 @@
-
+
+
@@ -24,7 +25,8 @@
-
+
+
diff --git a/DataFormats/SiStripCommon/interface/ConstantsForHardwareSystems.h b/DataFormats/SiStripCommon/interface/ConstantsForHardwareSystems.h
index 7f5d86d420854..a8c9a755f6cbb 100644
--- a/DataFormats/SiStripCommon/interface/ConstantsForHardwareSystems.h
+++ b/DataFormats/SiStripCommon/interface/ConstantsForHardwareSystems.h
@@ -44,6 +44,7 @@ namespace sistrip {
static const uint16_t STRIPS_PER_FEDCH = STRIPS_PER_APV * APVS_PER_FEDCH;
static const uint16_t STRIPS_PER_FEUNIT = STRIPS_PER_FEDCH * FEDCH_PER_FEUNIT; // 3072
static const uint16_t STRIPS_PER_FED = STRIPS_PER_FEUNIT * FEUNITS_PER_FED; // 24576
+ static constexpr float MeVperADCStrip = 9.5665E-4;
// -------------------- FED buffers --------------------
diff --git a/Geometry/TrackerGeometryBuilder/interface/TrackerGeometry.h b/Geometry/TrackerGeometryBuilder/interface/TrackerGeometry.h
index a54cb51a3b258..1a5ead83879b0 100644
--- a/Geometry/TrackerGeometryBuilder/interface/TrackerGeometry.h
+++ b/Geometry/TrackerGeometryBuilder/interface/TrackerGeometry.h
@@ -55,6 +55,14 @@ class TrackerGeometry final : public TrackingGeometry {
Ph2SS
};
+ // deleted copy constructor and copy assignment operators
+ TrackerGeometry(TrackerGeometry const&) = delete;
+ TrackerGeometry& operator=(TrackerGeometry const&) = delete;
+
+ // defaulted move constructor and move assignment operators
+ TrackerGeometry(TrackerGeometry&&) = default;
+ TrackerGeometry& operator=(TrackerGeometry&&) = default;
+
~TrackerGeometry() override;
const DetTypeContainer& detTypes() const override { return theDetTypes; }
diff --git a/HLTrigger/Configuration/python/customizeHLTforCMSSW.py b/HLTrigger/Configuration/python/customizeHLTforCMSSW.py
index cb80c17f2bb22..1cb644d600e12 100644
--- a/HLTrigger/Configuration/python/customizeHLTforCMSSW.py
+++ b/HLTrigger/Configuration/python/customizeHLTforCMSSW.py
@@ -226,6 +226,16 @@ def customizeHLTfor41495(process):
return process
+def customizeHLTfor41815(process):
+ # use hlt online BeamSpot for SiStripClusters2ApproxClusters
+ for producer in producers_by_type(process, 'SiStripClusters2ApproxClusters'):
+ producer.beamSpot = cms.InputTag('hltOnlineBeamSpot')
+
+ if hasattr(process, 'HLT_HIRandom_v4'):
+ getattr(process,'HLT_HIRandom_v4').insert(2,process.HLTBeamSpot)
+
+ return process
+
# CMSSW version specific customizations
def customizeHLTforCMSSW(process, menuType="GRun"):
@@ -236,5 +246,6 @@ def customizeHLTforCMSSW(process, menuType="GRun"):
process = customizeHLTfor41058(process)
process = customizeHLTfor41495(process)
+ process = customizeHLTfor41815(process)
return process
diff --git a/RecoLocalTracker/SiStripClusterizer/plugins/BuildFile.xml b/RecoLocalTracker/SiStripClusterizer/plugins/BuildFile.xml
index 27ae390132063..8933e253d7be6 100644
--- a/RecoLocalTracker/SiStripClusterizer/plugins/BuildFile.xml
+++ b/RecoLocalTracker/SiStripClusterizer/plugins/BuildFile.xml
@@ -6,5 +6,8 @@
+
+
+
diff --git a/RecoLocalTracker/SiStripClusterizer/plugins/SiStripApprox2ApproxClusters.cc b/RecoLocalTracker/SiStripClusterizer/plugins/SiStripApprox2ApproxClusters.cc
index 910e8c8eb54a2..2c6d8f838cf78 100644
--- a/RecoLocalTracker/SiStripClusterizer/plugins/SiStripApprox2ApproxClusters.cc
+++ b/RecoLocalTracker/SiStripClusterizer/plugins/SiStripApprox2ApproxClusters.cc
@@ -59,6 +59,7 @@ void SiStripApprox2ApproxClusters::produce(edm::Event& event, edm::EventSetup co
float barycenter = cluster.barycenter();
uint8_t width = cluster.width();
float avgCharge = cluster.avgCharge();
+ bool filter = cluster.filter();
bool isSaturated = cluster.isSaturated();
switch (approxVersion) {
@@ -86,7 +87,7 @@ void SiStripApprox2ApproxClusters::produce(edm::Event& event, edm::EventSetup co
break;
}
- ff.push_back(SiStripApproximateCluster(barycenter, width, avgCharge, isSaturated));
+ ff.push_back(SiStripApproximateCluster(barycenter, width, avgCharge, filter, isSaturated));
}
}
diff --git a/RecoLocalTracker/SiStripClusterizer/plugins/SiStripClusters2ApproxClusters.cc b/RecoLocalTracker/SiStripClusterizer/plugins/SiStripClusters2ApproxClusters.cc
index 2f4d32f672aea..c842c384c4971 100644
--- a/RecoLocalTracker/SiStripClusterizer/plugins/SiStripClusters2ApproxClusters.cc
+++ b/RecoLocalTracker/SiStripClusterizer/plugins/SiStripClusters2ApproxClusters.cc
@@ -1,16 +1,33 @@
-
-
-#include "FWCore/Framework/interface/MakerMacros.h"
+#include "CalibFormats/SiStripObjects/interface/SiStripDetInfo.h"
+#include "CalibTracker/SiStripCommon/interface/SiStripDetInfoFileReader.h"
+#include "CondFormats/DataRecord/interface/SiStripNoisesRcd.h"
+#include "CondFormats/SiStripObjects/interface/SiStripNoises.h"
+#include "DataFormats/BeamSpot/interface/BeamSpot.h"
+#include "DataFormats/Common/interface/DetSetVector.h"
+#include "DataFormats/Common/interface/DetSetVectorNew.h"
+#include "DataFormats/GeometryCommonDetAlgo/interface/MeasurementPoint.h"
+#include "DataFormats/GeometryVector/interface/GlobalPoint.h"
+#include "DataFormats/GeometryVector/interface/LocalPoint.h"
+#include "DataFormats/SiStripCluster/interface/SiStripApproximateCluster.h"
+#include "DataFormats/SiStripCluster/interface/SiStripCluster.h"
+#include "DataFormats/TrackReco/interface/Track.h"
+#include "DataFormats/TrackReco/interface/TrackBase.h"
+#include "DataFormats/SiStripCommon/interface/ConstantsForHardwareSystems.h"
#include "FWCore/Framework/interface/Frameworkfwd.h"
+#include "FWCore/Framework/interface/MakerMacros.h"
#include "FWCore/Framework/interface/stream/EDProducer.h"
-#include "FWCore/ParameterSet/interface/ParameterSet.h"
-#include "FWCore/Utilities/interface/InputTag.h"
#include "FWCore/ParameterSet/interface/ConfigurationDescriptions.h"
+#include "FWCore/ParameterSet/interface/FileInPath.h"
+#include "FWCore/ParameterSet/interface/ParameterSet.h"
#include "FWCore/ParameterSet/interface/ParameterSetDescription.h"
-#include "DataFormats/SiStripCluster/interface/SiStripApproximateCluster.h"
-#include "DataFormats/SiStripCluster/interface/SiStripCluster.h"
-#include "DataFormats/Common/interface/DetSetVectorNew.h"
-#include "DataFormats/Common/interface/DetSetVector.h"
+#include "FWCore/Utilities/interface/ESInputTag.h"
+#include "FWCore/Utilities/interface/InputTag.h"
+#include "Geometry/Records/interface/TrackerDigiGeometryRecord.h"
+#include "Geometry/TrackerGeometryBuilder/interface/StripGeomDetUnit.h"
+#include "Geometry/TrackerGeometryBuilder/interface/TrackerGeometry.h"
+#include "RecoTracker/PixelLowPtUtilities/interface/ClusterShapeHitFilter.h"
+#include "RecoTracker/PixelLowPtUtilities/interface/SlidingPeakFinder.h"
+#include "RecoTracker/Record/interface/CkfComponentsRecord.h"
#include
#include
@@ -27,6 +44,25 @@ class SiStripClusters2ApproxClusters : public edm::stream::EDProducer<> {
edm::EDGetTokenT > clusterToken;
unsigned int maxNSat;
+ static constexpr double subclusterWindow_ = .7;
+ static constexpr double seedCutMIPs_ = .35;
+ static constexpr double seedCutSN_ = 7.;
+ static constexpr double subclusterCutMIPs_ = .45;
+ static constexpr double subclusterCutSN_ = 12.;
+
+ edm::InputTag beamSpot_;
+ edm::EDGetTokenT beamSpotToken_;
+
+ edm::ESGetToken tkGeomToken_;
+
+ edm::FileInPath fileInPath_;
+ SiStripDetInfo detInfo_;
+
+ std::string csfLabel_;
+ edm::ESGetToken csfToken_;
+
+ edm::ESGetToken stripNoiseToken_;
+ edm::ESHandle theNoise_;
};
SiStripClusters2ApproxClusters::SiStripClusters2ApproxClusters(const edm::ParameterSet& conf) {
@@ -34,18 +70,79 @@ SiStripClusters2ApproxClusters::SiStripClusters2ApproxClusters(const edm::Parame
maxNSat = conf.getParameter("maxSaturatedStrips");
clusterToken = consumes >(inputClusters);
+
+ beamSpot_ = conf.getParameter("beamSpot");
+ beamSpotToken_ = consumes(beamSpot_);
+
+ tkGeomToken_ = esConsumes();
+
+ fileInPath_ = edm::FileInPath(SiStripDetInfoFileReader::kDefaultFile);
+ detInfo_ = SiStripDetInfoFileReader::read(fileInPath_.fullPath());
+
+ csfLabel_ = conf.getParameter("clusterShapeHitFilterLabel");
+ csfToken_ = esConsumes(edm::ESInputTag("", csfLabel_));
+
+ stripNoiseToken_ = esConsumes();
+
produces >();
}
-void SiStripClusters2ApproxClusters::produce(edm::Event& event, edm::EventSetup const&) {
+void SiStripClusters2ApproxClusters::produce(edm::Event& event, edm::EventSetup const& iSetup) {
auto result = std::make_unique >();
const auto& clusterCollection = event.get(clusterToken);
+ auto const beamSpotHandle = event.getHandle(beamSpotToken_);
+ auto const& bs = beamSpotHandle.isValid() ? *beamSpotHandle : reco::BeamSpot();
+ if (not beamSpotHandle.isValid()) {
+ edm::LogError("SiStripClusters2ApproxClusters")
+ << "didn't find a valid beamspot with label \"" << beamSpot_.encode() << "\" -> using (0,0,0)";
+ }
+
+ const auto& tkGeom = &iSetup.getData(tkGeomToken_);
+ const auto& theFilter = &iSetup.getData(csfToken_);
+ const auto& theNoise_ = &iSetup.getData(stripNoiseToken_);
+
for (const auto& detClusters : clusterCollection) {
edmNew::DetSetVector::FastFiller ff{*result, detClusters.id()};
- for (const auto& cluster : detClusters)
- ff.push_back(SiStripApproximateCluster(cluster, maxNSat));
+ unsigned int detId = detClusters.id();
+ const GeomDet* det = tkGeom->idToDet(detId);
+ double nApvs = detInfo_.getNumberOfApvsAndStripLength(detId).first;
+ double stripLength = detInfo_.getNumberOfApvsAndStripLength(detId).second;
+ double barycenter_ypos = 0.5 * stripLength;
+
+ const StripGeomDetUnit* stripDet = dynamic_cast(det);
+ float mip = 3.9 / (sistrip::MeVperADCStrip / stripDet->surface().bounds().thickness());
+
+ for (const auto& cluster : detClusters) {
+ const LocalPoint& lp = LocalPoint(((cluster.barycenter() * 10 / (sistrip::STRIPS_PER_APV * nApvs)) -
+ ((stripDet->surface().bounds().width()) * 0.5f)),
+ barycenter_ypos - (0.5f * stripLength),
+ 0.);
+ const GlobalPoint& gpos = det->surface().toGlobal(lp);
+ GlobalPoint beamspot(bs.position().x(), bs.position().y(), bs.position().z());
+ const GlobalVector& gdir = gpos - beamspot;
+ const LocalVector& ldir = det->toLocal(gdir);
+
+ int hitStrips;
+ float hitPredPos;
+ theFilter->getSizes(detId, cluster, lp, ldir, hitStrips, hitPredPos);
+
+ bool peakFilter = false;
+ SlidingPeakFinder pf(std::max(2, std::ceil(std::abs(hitPredPos) + subclusterWindow_)));
+ float mipnorm = mip / std::abs(ldir.z());
+ PeakFinderTest test(mipnorm,
+ detId,
+ cluster.firstStrip(),
+ theNoise_,
+ seedCutMIPs_,
+ seedCutSN_,
+ subclusterCutMIPs_,
+ subclusterCutSN_);
+ peakFilter = pf.apply(cluster.amplitudes(), test);
+
+ ff.push_back(SiStripApproximateCluster(cluster, maxNSat, hitPredPos, peakFilter));
+ }
}
event.put(std::move(result));
@@ -55,6 +152,8 @@ void SiStripClusters2ApproxClusters::fillDescriptions(edm::ConfigurationDescript
edm::ParameterSetDescription desc;
desc.add("inputClusters", edm::InputTag("siStripClusters"));
desc.add("maxSaturatedStrips", 3);
+ desc.add("clusterShapeHitFilterLabel", "ClusterShapeHitFilter"); // add CSF label
+ desc.add("beamSpot", edm::InputTag("offlineBeamSpot")); // add BeamSpot tag
descriptions.add("SiStripClusters2ApproxClusters", desc);
}
diff --git a/RecoLocalTracker/SiStripClusterizer/python/SiStripClusters2ApproxClusters_cff.py b/RecoLocalTracker/SiStripClusterizer/python/SiStripClusters2ApproxClusters_cff.py
index 2454d2f64292f..ee674d4c60a9f 100644
--- a/RecoLocalTracker/SiStripClusterizer/python/SiStripClusters2ApproxClusters_cff.py
+++ b/RecoLocalTracker/SiStripClusterizer/python/SiStripClusters2ApproxClusters_cff.py
@@ -1,5 +1,14 @@
from RecoLocalTracker.SiStripClusterizer.SiStripClusters2ApproxClusters_cfi import *
from Configuration.ProcessModifiers.approxSiStripClusters_cff import approxSiStripClusters
+
+from RecoTracker.PixelLowPtUtilities.ClusterShapeHitFilterESProducer_cfi import ClusterShapeHitFilterESProducer as _ClusterShapeHitFilterESProducer
+hltClusterShapeHitFilterESProducer = _ClusterShapeHitFilterESProducer.clone(ComponentName = 'hltClusterShapeHitFilterESProducer')
+
+from RecoVertex.BeamSpotProducer.BeamSpotOnline_cfi import onlineBeamSpotProducer
+hltBeamSpotProducer = onlineBeamSpotProducer.clone(src = 'hltScalersRawToDigi')
hltSiStripClusters2ApproxClusters = SiStripClusters2ApproxClusters.clone()
-approxSiStripClusters.toModify(hltSiStripClusters2ApproxClusters, inputClusters = "siStripClustersHLT")
+approxSiStripClusters.toModify(hltSiStripClusters2ApproxClusters,
+ beamSpot = "hltBeamSpotProducer",
+ inputClusters = "siStripClustersHLT",
+ clusterShapeHitFilterLabel = "hltClusterShapeHitFilterESProducer")
diff --git a/RecoTracker/IterativeTracking/python/PixelLessStep_cff.py b/RecoTracker/IterativeTracking/python/PixelLessStep_cff.py
index 5d6a4684d50f6..967a02c1a9ff8 100644
--- a/RecoTracker/IterativeTracking/python/PixelLessStep_cff.py
+++ b/RecoTracker/IterativeTracking/python/PixelLessStep_cff.py
@@ -201,15 +201,12 @@
FilterStripHits = cms.bool(True),
ClusterShapeHitFilterName = cms.string('pixelLessStepClusterShapeHitFilter'),
ClusterShapeCacheSrc = cms.InputTag('siPixelClusterShapeCache') # not really needed here since FilterPixelHits=False
- )
+ ),
+ _StripSubClusterShapeSeedFilter.clone()
)
)
)
-from RecoTracker.PixelLowPtUtilities.StripSubClusterShapeSeedFilter_cfi import StripSubClusterShapeSeedFilter as _StripSubClusterShapeSeedFilter
-from Configuration.ProcessModifiers.approxSiStripClusters_cff import approxSiStripClusters
-(~approxSiStripClusters).toModify(pixelLessStepSeeds.SeedComparitorPSet.comparitors, func = lambda list: list.append(_StripSubClusterShapeSeedFilter.clone()) )
-
trackingLowPU.toModify(pixelLessStepHitDoublets, produceSeedingHitSets=True, produceIntermediateHitDoublets=False)
trackingLowPU.toModify(pixelLessStepSeeds,
seedingHitSets = 'pixelLessStepHitDoublets',
diff --git a/RecoTracker/IterativeTracking/python/TobTecStep_cff.py b/RecoTracker/IterativeTracking/python/TobTecStep_cff.py
index 1df35f49af81c..97d7296501729 100644
--- a/RecoTracker/IterativeTracking/python/TobTecStep_cff.py
+++ b/RecoTracker/IterativeTracking/python/TobTecStep_cff.py
@@ -93,6 +93,7 @@
extraPhiKDBox = 0.01,
)
from RecoTracker.TkSeedGenerator.seedCreatorFromRegionConsecutiveHitsEDProducer_cff import seedCreatorFromRegionConsecutiveHitsEDProducer as _seedCreatorFromRegionConsecutiveHitsTripletOnlyEDProducer
+from RecoTracker.PixelLowPtUtilities.StripSubClusterShapeSeedFilter_cfi import StripSubClusterShapeSeedFilter as _StripSubClusterShapeSeedFilter
_tobTecStepSeedComparitorPSet = dict(
ComponentName = 'CombinedSeedComparitor',
mode = cms.string('and'),
@@ -104,7 +105,8 @@
FilterStripHits = cms.bool(True),
ClusterShapeHitFilterName = cms.string('tobTecStepClusterShapeHitFilter'),
ClusterShapeCacheSrc = cms.InputTag('siPixelClusterShapeCache') # not really needed here since FilterPixelHits=False
- )
+ ),
+ _StripSubClusterShapeSeedFilter.clone()
)
)
@@ -113,10 +115,6 @@
SeedComparitorPSet = _tobTecStepSeedComparitorPSet,
)
-from RecoTracker.PixelLowPtUtilities.StripSubClusterShapeSeedFilter_cfi import StripSubClusterShapeSeedFilter as _StripSubClusterShapeSeedFilter
-from Configuration.ProcessModifiers.approxSiStripClusters_cff import approxSiStripClusters
-(~approxSiStripClusters).toModify(tobTecStepSeedsTripl.SeedComparitorPSet.comparitors, func = lambda list: list.append(_StripSubClusterShapeSeedFilter.clone()) )
-
#fastsim
import FastSimulation.Tracking.TrajectorySeedProducer_cfi
from FastSimulation.Tracking.SeedingMigration import _hitSetProducerToFactoryPSet
diff --git a/RecoTracker/PixelLowPtUtilities/interface/SlidingPeakFinder.h b/RecoTracker/PixelLowPtUtilities/interface/SlidingPeakFinder.h
new file mode 100644
index 0000000000000..40e3189b58f4b
--- /dev/null
+++ b/RecoTracker/PixelLowPtUtilities/interface/SlidingPeakFinder.h
@@ -0,0 +1,102 @@
+#ifndef RecoTracker_PixelLowPtUtilities_SlidingPeakFinder_h
+#define RecoTracker_PixelLowPtUtilities_SlidingPeakFinder_h
+
+#include
+#include
+#include
+#include
+#include "CondFormats/SiStripObjects/interface/SiStripNoises.h"
+#include "CondFormats/DataRecord/interface/SiStripNoisesRcd.h"
+
+class SlidingPeakFinder {
+public:
+ SlidingPeakFinder(unsigned int size) : size_(size), half_((size + 1) / 2) {}
+
+ template
+ bool apply(const uint8_t *x,
+ const uint8_t *begin,
+ const uint8_t *end,
+ const Test &test,
+ bool verbose = false,
+ int firststrip = 0) {
+ const uint8_t *ileft = (x != begin) ? std::min_element(x - 1, x + half_) : begin - 1;
+ const uint8_t *iright = ((x + size_) < end) ? std::min_element(x + half_, std::min(x + size_ + 1, end)) : end;
+ uint8_t left = (ileft < begin ? 0 : *ileft);
+ uint8_t right = (iright >= end ? 0 : *iright);
+ uint8_t center = *std::max_element(x, std::min(x + size_, end));
+ uint8_t maxmin = std::max(left, right);
+ if (maxmin < center) {
+ bool ret = test(center, maxmin);
+ if (ret) {
+ ret = test(ileft, iright, begin, end);
+ }
+ return ret;
+ } else {
+ return false;
+ }
+ }
+
+ template
+ bool apply(const V &ls, const Test &test, bool verbose = false, int firststrip = 0) {
+ const uint8_t *begin = &*ampls.begin();
+ const uint8_t *end = &*ampls.end();
+ for (const uint8_t *x = begin; x < end - (half_ - 1); ++x) {
+ if (apply(x, begin, end, test, verbose, firststrip)) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+private:
+ unsigned int size_, half_;
+};
+
+struct PeakFinderTest {
+ PeakFinderTest(float mip,
+ uint32_t detid,
+ uint32_t firstStrip,
+ const SiStripNoises *theNoise,
+ float seedCutMIPs,
+ float seedCutSN,
+ float subclusterCutMIPs,
+ float subclusterCutSN)
+ : mip_(mip),
+ detid_(detid),
+ firstStrip_(firstStrip),
+ noiseObj_(theNoise),
+ noises_(theNoise->getRange(detid)),
+ subclusterCutMIPs_(subclusterCutMIPs),
+ sumCut_(subclusterCutMIPs_ * mip_),
+ subclusterCutSN2_(subclusterCutSN * subclusterCutSN) {
+ cut_ = std::min(seedCutMIPs * mip, seedCutSN * noiseObj_->getNoise(firstStrip + 1, noises_));
+ }
+
+ bool operator()(uint8_t max, uint8_t min) const { return max - min > cut_; }
+ bool operator()(const uint8_t *left, const uint8_t *right, const uint8_t *begin, const uint8_t *end) const {
+ int yleft = (left < begin ? 0 : *left);
+ int yright = (right >= end ? 0 : *right);
+ float sum = 0.0;
+ int maxval = 0;
+ float noise = 0;
+ for (const uint8_t *x = left + 1; x < right; ++x) {
+ int baseline = (yleft * int(right - x) + yright * int(x - left)) / int(right - left);
+ sum += int(*x) - baseline;
+ noise += std::pow(noiseObj_->getNoise(firstStrip_ + int(x - begin), noises_), 2);
+ maxval = std::max(maxval, int(*x) - baseline);
+ }
+ if (sum > sumCut_ && sum * sum > noise * subclusterCutSN2_)
+ return true;
+ return false;
+ }
+
+private:
+ float mip_;
+ unsigned int detid_;
+ int firstStrip_;
+ const SiStripNoises *noiseObj_;
+ SiStripNoises::Range noises_;
+ uint8_t cut_;
+ float subclusterCutMIPs_, sumCut_, subclusterCutSN2_;
+};
+#endif
diff --git a/RecoTracker/PixelLowPtUtilities/src/StripSubClusterShapeTrajectoryFilter.cc b/RecoTracker/PixelLowPtUtilities/src/StripSubClusterShapeTrajectoryFilter.cc
index de333a956d6bf..362d15005167a 100644
--- a/RecoTracker/PixelLowPtUtilities/src/StripSubClusterShapeTrajectoryFilter.cc
+++ b/RecoTracker/PixelLowPtUtilities/src/StripSubClusterShapeTrajectoryFilter.cc
@@ -10,6 +10,7 @@
#include "DataFormats/TrackerRecHit2D/interface/SiStripMatchedRecHit2D.h"
#include "DataFormats/TrackerRecHit2D/interface/SiStripRecHit2D.h"
#include "DataFormats/TrackingRecHit/interface/TrackingRecHit.h"
+#include "DataFormats/SiStripCommon/interface/ConstantsForHardwareSystems.h"
#include "FWCore/Framework/interface/ESHandle.h"
#include "FWCore/Framework/interface/EventSetup.h"
#include "FWCore/MessageLogger/interface/MessageLogger.h"
@@ -184,13 +185,13 @@ StripSubClusterShapeFilterBase::StripSubClusterShapeFilterBase(const edm::Parame
StripSubClusterShapeFilterBase::~StripSubClusterShapeFilterBase() {
#if 0
- std::cout << "StripSubClusterShapeFilterBase " << label_ <<": called " << called_ << std::endl;
- std::cout << "StripSubClusterShapeFilterBase " << label_ <<": saturated " << saturated_ << std::endl;
- std::cout << "StripSubClusterShapeFilterBase " << label_ <<": test " << test_ << std::endl;
- std::cout << "StripSubClusterShapeFilterBase " << label_ <<": failTooNarrow " << failTooNarrow_ << std::endl;
- std::cout << "StripSubClusterShapeFilterBase " << label_ <<": passTrim " << passTrim_ << std::endl;
- std::cout << "StripSubClusterShapeFilterBase " << label_ <<": passSC " << passSC_ << std::endl;
- std::cout << "StripSubClusterShapeFilterBase " << label_ <<": failTooLarge " << failTooLarge_ << std::endl;
+ std::cout << "StripSubClusterShapeFilterBase " << label_ << ": called " << called_ << std::endl;
+ std::cout << "StripSubClusterShapeFilterBase " << label_ << ": saturated " << saturated_ << std::endl;
+ std::cout << "StripSubClusterShapeFilterBase " << label_ << ": test " << test_ << std::endl;
+ std::cout << "StripSubClusterShapeFilterBase " << label_ << ": failTooNarrow " << failTooNarrow_ << std::endl;
+ std::cout << "StripSubClusterShapeFilterBase " << label_ << ": passTrim " << passTrim_ << std::endl;
+ std::cout << "StripSubClusterShapeFilterBase " << label_ << ": passSC " << passSC_ << std::endl;
+ std::cout << "StripSubClusterShapeFilterBase " << label_ << ": failTooLarge " << failTooLarge_ << std::endl;
#endif
}
@@ -265,74 +266,77 @@ bool StripSubClusterShapeFilterBase::testLastHit(const TrackingRecHit *hit,
return true;
}
- // compute number of consecutive saturated strips
- // (i.e. with adc count >= 254, see SiStripCluster class for documentation)
- unsigned int thisSat = (ampls[0] >= 254), maxSat = thisSat;
- for (unsigned int i = 1, n = ampls.size(); i < n; ++i) {
- if (ampls[i] >= 254) {
- thisSat++;
- } else if (thisSat > 0) {
+ if (!cluster.isFromApprox()) {
+ // compute number of consecutive saturated strips
+ // (i.e. with adc count >= 254, see SiStripCluster class for documentation)
+ unsigned int thisSat = (ampls[0] >= 254), maxSat = thisSat;
+ for (unsigned int i = 1, n = ampls.size(); i < n; ++i) {
+ if (ampls[i] >= 254) {
+ thisSat++;
+ } else if (thisSat > 0) {
+ maxSat = std::max(maxSat, thisSat);
+ thisSat = 0;
+ }
+ }
+ if (thisSat > 0) {
maxSat = std::max(maxSat, thisSat);
- thisSat = 0;
}
- }
- if (thisSat > 0) {
- maxSat = std::max(maxSat, thisSat);
- }
- if (maxSat >= maxNSat_) {
- INC_COUNTER(saturated_)
- return true;
- }
+ if (maxSat >= maxNSat_) {
+ INC_COUNTER(saturated_)
+ return true;
+ }
- // trimming
- INC_COUNTER(test_)
- unsigned int hitStripsTrim = ampls.size();
- int sum = std::accumulate(ampls.begin(), ampls.end(), 0);
- uint8_t trimCut = std::min(trimMaxADC_, std::floor(trimMaxFracTotal_ * sum));
- auto begin = ampls.begin();
- auto last = ampls.end() - 1;
- while (hitStripsTrim > 1 && (*begin < std::max(trimCut, trimMaxFracNeigh_ * (*(begin + 1))))) {
- hitStripsTrim--;
- ++begin;
- }
- while (hitStripsTrim > 1 && (*last < std::max(trimCut, trimMaxFracNeigh_ * (*(last - 1))))) {
- hitStripsTrim--;
- --last;
- }
+ // trimming
+ INC_COUNTER(test_)
+ unsigned int hitStripsTrim = ampls.size();
+ int sum = std::accumulate(ampls.begin(), ampls.end(), 0);
+ uint8_t trimCut = std::min(trimMaxADC_, std::floor(trimMaxFracTotal_ * sum));
+ auto begin = ampls.begin();
+ auto last = ampls.end() - 1;
+ while (hitStripsTrim > 1 && (*begin < std::max(trimCut, trimMaxFracNeigh_ * (*(begin + 1))))) {
+ hitStripsTrim--;
+ ++begin;
+ }
+ while (hitStripsTrim > 1 && (*last < std::max(trimCut, trimMaxFracNeigh_ * (*(last - 1))))) {
+ hitStripsTrim--;
+ --last;
+ }
- if (hitStripsTrim < std::floor(std::abs(hitPredPos) - maxTrimmedSizeDiffNeg_)) {
- INC_COUNTER(failTooNarrow_)
- return false;
- } else if (hitStripsTrim <= std::ceil(std::abs(hitPredPos) + maxTrimmedSizeDiffPos_)) {
- INC_COUNTER(passTrim_)
- return true;
- }
+ if (hitStripsTrim < std::floor(std::abs(hitPredPos) - maxTrimmedSizeDiffNeg_)) {
+ INC_COUNTER(failTooNarrow_)
+ return false;
+ } else if (hitStripsTrim <= std::ceil(std::abs(hitPredPos) + maxTrimmedSizeDiffPos_)) {
+ INC_COUNTER(passTrim_)
+ return true;
+ }
- const StripGeomDetUnit *stripDetUnit = dynamic_cast(det);
- if (det == nullptr) {
- edm::LogError("Strip not a StripGeomDetUnit?") << " on " << detId.rawId() << "\n";
- return true;
- }
+ const StripGeomDetUnit *stripDetUnit = dynamic_cast(det);
+ if (det == nullptr) {
+ edm::LogError("Strip not a StripGeomDetUnit?") << " on " << detId.rawId() << "\n";
+ return true;
+ }
- float MeVperADCStrip = 9.5665E-4; // conversion constant from ADC counts to MeV for the SiStrip detector
- float mip =
- 3.9 / (MeVperADCStrip / stripDetUnit->surface().bounds().thickness()); // 3.9 MeV/cm = ionization in silicon
- float mipnorm = mip / std::abs(ldir.z());
- ::SlidingPeakFinder pf(std::max(2, std::ceil(std::abs(hitPredPos) + subclusterWindow_)));
- ::PeakFinderTest test(mipnorm,
- detId(),
- cluster.firstStrip(),
- &*theNoise,
- seedCutMIPs_,
- seedCutSN_,
- subclusterCutMIPs_,
- subclusterCutSN_);
- if (pf.apply(cluster.amplitudes(), test)) {
- INC_COUNTER(passSC_)
- return true;
+ float mip = 3.9 / (sistrip::MeVperADCStrip /
+ stripDetUnit->surface().bounds().thickness()); // 3.9 MeV/cm = ionization in silicon
+ float mipnorm = mip / std::abs(ldir.z());
+ ::SlidingPeakFinder pf(std::max(2, std::ceil(std::abs(hitPredPos) + subclusterWindow_)));
+ ::PeakFinderTest test(mipnorm,
+ detId(),
+ cluster.firstStrip(),
+ &*theNoise,
+ seedCutMIPs_,
+ seedCutSN_,
+ subclusterCutMIPs_,
+ subclusterCutSN_);
+ if (pf.apply(cluster.amplitudes(), test)) {
+ INC_COUNTER(passSC_)
+ return true;
+ } else {
+ INC_COUNTER(failTooLarge_)
+ return false;
+ }
} else {
- INC_COUNTER(failTooLarge_)
- return false;
+ return cluster.filter();
}
}
return true;