From 56c4958782df531b6fa1a7a1f869efd191ff631f Mon Sep 17 00:00:00 2001 From: Zygimantas Matonis Date: Thu, 25 Jan 2018 16:02:01 +0100 Subject: [PATCH] logreader config files are generated when logs are parsed --- es_ibs_log.py | 15 ++++- logreaderUtils.py | 37 +++++++++++ runPyRelValThread.py | 9 ++- tests/test_logreaderUtils.py | 120 +++++++++++++++++++++++++++++++++++ 4 files changed, 177 insertions(+), 4 deletions(-) create mode 100644 logreaderUtils.py create mode 100644 tests/test_logreaderUtils.py diff --git a/es_ibs_log.py b/es_ibs_log.py index ee33c2b24075..071dc78dc806 100755 --- a/es_ibs_log.py +++ b/es_ibs_log.py @@ -7,6 +7,7 @@ from es_utils import send_payload import commands from cmsutils import cmsswIB2Week +from logreaderUtils import write_config_file, add_exception_to_config def send_unittest_dataset(datasets, payload, id, index, doc): for ds in datasets: @@ -35,7 +36,13 @@ def process_unittest_log(logFile): payload["architecture"]=architecture payload["@timestamp"]=timestp id = None - for l in file(logFile).read().split("\n"): + config_list = [] + custom_rule_set = [ + {"str_to_match": "test (.*) had ERRORS", "name": "{0} failed"}, + {"str_to_match": '===== Test "([^\s]+)" ====', "name": "{0}"} + ] + for index, l in enumerate(file(logFile).read().split("\n")): + config_list = add_exception_to_config(l,index,config_list,custom_rule_set) if l.startswith('===== Test "') and l.endswith('" ===='): if utname: send_unittest_dataset(datasets, payload, id, "ib-dataset-"+week, "unittest-dataset") datasets = [] @@ -48,6 +55,7 @@ def process_unittest_log(logFile): if (not "file:" in rootfile) and (not rootfile in datasets): datasets.append(rootfile) except: pass if datasets: send_unittest_dataset(datasets, payload, id, "ib-dataset-"+week,"unittest-dataset") + write_config_file(logFile + "-read_config", config_list) return def process_addon_log(logFile): @@ -64,13 +72,16 @@ def process_addon_log(logFile): payload["@timestamp"]=timestp payload["name"] = pathInfo[-1].split("-")[1].split("_cmsRun_")[0].split("_cmsDriver.py_")[0] id = sha1(release + architecture + "addon" + payload["name"]).hexdigest() - for l in file(logFile).read().split("\n"): + config_list = [] + for index, l in enumerate(file(logFile).read().split("\n")): + config_list = add_exception_to_config(l,index, config_list) if " Initiating request to open file " in l: try: rootfile = l.split(" Initiating request to open file ")[1].split(" ")[0] if (not "file:" in rootfile) and (not rootfile in datasets): datasets.append(rootfile) except: pass send_unittest_dataset(datasets, payload, id, "ib-dataset-"+week,"addon-dataset") + write_config_file(logFile + "-read_config", config_list) return def process_ib_utests(logFile): diff --git a/logreaderUtils.py b/logreaderUtils.py new file mode 100644 index 000000000000..dac74d7a3d57 --- /dev/null +++ b/logreaderUtils.py @@ -0,0 +1,37 @@ +#! /usr/bin/env python +import json +import re + + +def add_exception_to_config(line, index, config_list, custom_rule_list=[]): + default_rules_list = [ + # will ignore " IgnoreCompletely" messages + {"str_to_match": "Begin(?! IgnoreCompletely)(.*Exception)", "name": "{0}"}, + # {"str_to_match": "\sException", "name": "Exception"}, + {"str_to_match": "edm::service::InitRootHandlers", "name": "Segmentation fault"} + ] + for rule in default_rules_list + custom_rule_list: + match = re.search(rule["str_to_match"], line, re.IGNORECASE); + if match: + try: + name = rule["name"].format(*match.groups()) + except: + name = rule["name"] + new_exception_config = { + "lineStart": index, + "lineEnd": index, + "name": name + " at line #" + str(index + 1) + } + config_list.append(new_exception_config) + return config_list + return config_list + + +def write_config_file(log_reader_config_path, config_list): + try: + log_reader_config_f = open(log_reader_config_path, "w") + json.dump({"list_to_show": config_list}, log_reader_config_f) + log_reader_config_f.close() + except: + print("Error writing exception file") + diff --git a/runPyRelValThread.py b/runPyRelValThread.py index 1adf82932595..95165bbacd1f 100755 --- a/runPyRelValThread.py +++ b/runPyRelValThread.py @@ -4,6 +4,7 @@ from es_relval_log import es_parse_log from RelValArgs import FixWFArgs import json +from logreaderUtils import write_config_file, add_exception_to_config def runStep1Only(basedir, workflow, args=''): args = FixWFArgs (os.environ["CMSSW_VERSION"],os.environ["SCRAM_ARCH"],workflow,args) @@ -22,7 +23,7 @@ def runStep1Only(basedir, workflow, args=''): def runThreadMatrix(basedir, workflow, args='', logger=None, force=False, wf_err={}): if (not force) and logger and logger.relvalAlreadyDone(workflow): - print "Message>> Not ruuning workflow ",workflow," as it is already ran" + print "Message>> Not runing workflow ",workflow," as it is already ran" return args = FixWFArgs (os.environ["CMSSW_VERSION"],os.environ["SCRAM_ARCH"],workflow,args) workdir = os.path.join(basedir, workflow) @@ -249,6 +250,8 @@ def parseLog(self): data = [0, 0, 0] logFile = logData[wf]['steps'][step] json_cache = os.path.dirname(logFile)+"/logcache_"+str(step)+".json" + log_reader_config_path = logFile + "-read_config" + config_list = [] cache_ok = False if (os.path.exists(json_cache)) and (os.path.getmtime(logFile)<=os.path.getmtime(json_cache)): try: @@ -265,7 +268,8 @@ def parseLog(self): except Exception as e: print "Sending log information to elasticsearch failed" , str(e) inFile = open(logFile) - for line in inFile: + for index, line in enumerate(inFile): + config_list = add_exception_to_config(line, index, config_list) if '%MSG-w' in line: data[1]=data[1]+1 if '%MSG-e' in line: data[2]=data[2]+1 if 'Begin processing the ' in line: data[0]=data[0]+1 @@ -273,6 +277,7 @@ def parseLog(self): jfile = open(json_cache,"w") json.dump(data,jfile) jfile.close() + write_config_file(log_reader_config_path,config_list) log_processed+=1 logData[wf]['events'][index] = data[0] logData[wf]['failed'][index] = data[2] diff --git a/tests/test_logreaderUtils.py b/tests/test_logreaderUtils.py new file mode 100644 index 000000000000..1c4fbb72e8c1 --- /dev/null +++ b/tests/test_logreaderUtils.py @@ -0,0 +1,120 @@ +import json +import os +import sys +import unittest + +sys.path.append(os.path.join(os.path.dirname(__file__), "../")) + +from logreaderUtils import write_config_file, add_exception_to_config + +unittestlog = """ +===== Test "Para_" ==== +Running . + xhalf[cm]=5 yhalf[cm]=6 zhalf[cm]=7 alpha[deg]=15 theta[deg]=30 phi[deg]=45 + g4 volume = 1680 cm3 + dd volume = 1680 cm3 + DD Information: GLOBAL:fred1 Parallelepiped: xhalf[cm]=5 yhalf[cm]=6 zhalf[cm]=7 alpha[deg]=15 theta[deg]=30 phi[deg]=45 vol=1680 cm3 + + +OK (1) + +---> test Para_ succeeded + +^^^^ End Test Para_ ^^^^ + +===== Test "Cons_" ==== +Running . + zhalf=20 rIn-Z=10 rOut-Z=15 rIn+Z=20 rOut+Z=25 startPhi=0 deltaPhi=90 + g4 volume = 5497.79 cm3 + dd volume = 5497.79 cm3 + DD Information: GLOBAL:fred1 Cone(section): zhalf=20 rIn-Z=10 rOut-Z=15 rIn+Z=20 rOut+Z=25 startPhi=0 deltaPhi=90 vol=5497.79 cm3 +F + +Cons_.cpp:51:Assertion +Test name: testCons::matched_g4_and_dd +assertion failed +- Expression: g4v == ddv + +Failures !!! +Run: 1 Failure total: 1 Failures: 1 Errors: 0 + +---> test Cons_ had ERRORS + +^^^^ End Test Cons_ ^^^^ + +===== Test "Sphere_" ==== +Running . + innerRadius=10 outerRadius=15 startPhi=0 deltaPhi=90 startTheta=0 deltaTheta=180 + g4 volume = 2487.09 cm3 + dd volume = 2487.09 cm3 + DD Information: GLOBAL:fred1 Sphere(section): innerRadius=10 outerRadius=15 startPhi=0 deltaPhi=90 startTheta=0 deltaTheta=180 vol=2487.09 cm3 + + +OK (1) + +---> test Sphere_ succeeded + +^^^^ End Test Sphere_ ^^^^ + +===== Test "ExtrudedPolygon_" ==== +Running . XY Points[cm]=-30, -30; -30, 30; 30, 30; 30, -30; 15, -30; 15, 15; -15, 15; -15, -30; with 4 Z sections: z[cm]=-60, x[cm]=0, y[cm]=30, scale[cm]=0.8; z[cm]=-15, x[cm]=0, y[cm]=-30, scale[cm]=1; z[cm]=10, x[cm]=0, y[cm]=0, scale[cm]=0.6; z[cm]=60, x[cm]=0, y[cm]=30, scale[cm]=1.2; + g4 volume = 2.136e+07 cm3 + dd volume = 0 cm3 + DD Information: GLOBAL:fred1 ExtrudedPolygon: XY Points[cm]=-30, -30; -30, 30; 30, 30; 30, -30; 15, -30; 15, 15; -15, 15; -15, -30; with 4 Z sections: z[cm]=-60, x[cm]=0, y[cm]=30, scale[cm]=0.8; z[cm]=-15, x[cm]=0, y[cm]=-30, scale[cm]=1; z[cm]=10, x[cm]=0, y[cm]=0, scale[cm]=0.6; z[cm]=60, x[cm]=0, y[cm]=30, scale[cm]=1.2; vol= 0 + + +OK (1) + +""" + + +class TestSequenceFunctions(unittest.TestCase): + def test_unittestlogs(self): + config_list = [] + custom_rule_set = [ + {"str_to_match": "test (.*) had ERRORS", "name": "{0}{1}{2} failed"}, + {"str_to_match": '===== Test "([^\s]+)" ====', "name": "{0}"} + ] + for index, l in enumerate(unittestlog.split("\n")): + config_list = add_exception_to_config(l, index, config_list, custom_rule_set) + write_config_file("tmp/unittestlogs.log" + "-read_config", config_list) + + # def test_reg_th(self): + # for line in lines_th: + # line = line.strip() + # self.assertTrue(re.search(regex_th, line)) + # self.assertFalse(re.search(regex_td, line)) + # self.assertFalse(re.search(regex_th, line_files)) + # self.assertFalse(re.search(regex_th, line_td)) + # + + +# def readLog(): +# config_list = [] +# data = [0, 0, 0] +# # hardcoding +# logFile = "/home/zmatonis/Downloads/procesLogTestFolder/1234/step10.log" +# step = "step" +# json_cache = os.path.dirname(logFile) + "/logcache_" + str(step) + ".json" +# log_reader_config_path = logFile + "-read_config" +# +# inFile = open(logFile) +# for index, line in enumerate(inFile): +# config_list = add_exception_to_config(line, index, config_list) +# if '%MSG-w' in line: data[1] = data[1] + 1 +# if '%MSG-e' in line: data[2] = data[2] + 1 +# if 'Begin processing the ' in line: data[0] = data[0] + 1 +# inFile.close() +# jfile = open(json_cache, "w") +# json.dump(data, jfile) +# jfile.close() +# +# log_reader_config_f = open(log_reader_config_path, "w") +# json.dump({"list_to_show": config_list}, log_reader_config_f) +# log_reader_config_f.close() + +# readLog() + + +if __name__ == '__main__': + unittest.main()