Skip to content

Commit

Permalink
Merge pull request #941 from gudrutis/master
Browse files Browse the repository at this point in the history
logreader config files are generated when logs are parsed
  • Loading branch information
gudrutis authored Jan 25, 2018
2 parents 9568ad7 + 56c4958 commit d7ad712
Show file tree
Hide file tree
Showing 4 changed files with 177 additions and 4 deletions.
15 changes: 13 additions & 2 deletions es_ibs_log.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
from es_utils import send_payload
import commands
from cmsutils import cmsswIB2Week
from logreaderUtils import write_config_file, add_exception_to_config

def send_unittest_dataset(datasets, payload, id, index, doc):
for ds in datasets:
Expand Down Expand Up @@ -35,7 +36,13 @@ def process_unittest_log(logFile):
payload["architecture"]=architecture
payload["@timestamp"]=timestp
id = None
for l in file(logFile).read().split("\n"):
config_list = []
custom_rule_set = [
{"str_to_match": "test (.*) had ERRORS", "name": "{0} failed"},
{"str_to_match": '===== Test "([^\s]+)" ====', "name": "{0}"}
]
for index, l in enumerate(file(logFile).read().split("\n")):
config_list = add_exception_to_config(l,index,config_list,custom_rule_set)
if l.startswith('===== Test "') and l.endswith('" ===='):
if utname: send_unittest_dataset(datasets, payload, id, "ib-dataset-"+week, "unittest-dataset")
datasets = []
Expand All @@ -48,6 +55,7 @@ def process_unittest_log(logFile):
if (not "file:" in rootfile) and (not rootfile in datasets): datasets.append(rootfile)
except: pass
if datasets: send_unittest_dataset(datasets, payload, id, "ib-dataset-"+week,"unittest-dataset")
write_config_file(logFile + "-read_config", config_list)
return

def process_addon_log(logFile):
Expand All @@ -64,13 +72,16 @@ def process_addon_log(logFile):
payload["@timestamp"]=timestp
payload["name"] = pathInfo[-1].split("-")[1].split("_cmsRun_")[0].split("_cmsDriver.py_")[0]
id = sha1(release + architecture + "addon" + payload["name"]).hexdigest()
for l in file(logFile).read().split("\n"):
config_list = []
for index, l in enumerate(file(logFile).read().split("\n")):
config_list = add_exception_to_config(l,index, config_list)
if " Initiating request to open file " in l:
try:
rootfile = l.split(" Initiating request to open file ")[1].split(" ")[0]
if (not "file:" in rootfile) and (not rootfile in datasets): datasets.append(rootfile)
except: pass
send_unittest_dataset(datasets, payload, id, "ib-dataset-"+week,"addon-dataset")
write_config_file(logFile + "-read_config", config_list)
return

def process_ib_utests(logFile):
Expand Down
37 changes: 37 additions & 0 deletions logreaderUtils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
#! /usr/bin/env python
import json
import re


def add_exception_to_config(line, index, config_list, custom_rule_list=[]):
default_rules_list = [
# will ignore " IgnoreCompletely" messages
{"str_to_match": "Begin(?! IgnoreCompletely)(.*Exception)", "name": "{0}"},
# {"str_to_match": "\sException", "name": "Exception"},
{"str_to_match": "edm::service::InitRootHandlers", "name": "Segmentation fault"}
]
for rule in default_rules_list + custom_rule_list:
match = re.search(rule["str_to_match"], line, re.IGNORECASE);
if match:
try:
name = rule["name"].format(*match.groups())
except:
name = rule["name"]
new_exception_config = {
"lineStart": index,
"lineEnd": index,
"name": name + " at line #" + str(index + 1)
}
config_list.append(new_exception_config)
return config_list
return config_list


def write_config_file(log_reader_config_path, config_list):
try:
log_reader_config_f = open(log_reader_config_path, "w")
json.dump({"list_to_show": config_list}, log_reader_config_f)
log_reader_config_f.close()
except:
print("Error writing exception file")

9 changes: 7 additions & 2 deletions runPyRelValThread.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from es_relval_log import es_parse_log
from RelValArgs import FixWFArgs
import json
from logreaderUtils import write_config_file, add_exception_to_config

def runStep1Only(basedir, workflow, args=''):
args = FixWFArgs (os.environ["CMSSW_VERSION"],os.environ["SCRAM_ARCH"],workflow,args)
Expand All @@ -22,7 +23,7 @@ def runStep1Only(basedir, workflow, args=''):

def runThreadMatrix(basedir, workflow, args='', logger=None, force=False, wf_err={}):
if (not force) and logger and logger.relvalAlreadyDone(workflow):
print "Message>> Not ruuning workflow ",workflow," as it is already ran"
print "Message>> Not runing workflow ",workflow," as it is already ran"
return
args = FixWFArgs (os.environ["CMSSW_VERSION"],os.environ["SCRAM_ARCH"],workflow,args)
workdir = os.path.join(basedir, workflow)
Expand Down Expand Up @@ -249,6 +250,8 @@ def parseLog(self):
data = [0, 0, 0]
logFile = logData[wf]['steps'][step]
json_cache = os.path.dirname(logFile)+"/logcache_"+str(step)+".json"
log_reader_config_path = logFile + "-read_config"
config_list = []
cache_ok = False
if (os.path.exists(json_cache)) and (os.path.getmtime(logFile)<=os.path.getmtime(json_cache)):
try:
Expand All @@ -265,14 +268,16 @@ def parseLog(self):
except Exception as e:
print "Sending log information to elasticsearch failed" , str(e)
inFile = open(logFile)
for line in inFile:
for index, line in enumerate(inFile):
config_list = add_exception_to_config(line, index, config_list)
if '%MSG-w' in line: data[1]=data[1]+1
if '%MSG-e' in line: data[2]=data[2]+1
if 'Begin processing the ' in line: data[0]=data[0]+1
inFile.close()
jfile = open(json_cache,"w")
json.dump(data,jfile)
jfile.close()
write_config_file(log_reader_config_path,config_list)
log_processed+=1
logData[wf]['events'][index] = data[0]
logData[wf]['failed'][index] = data[2]
Expand Down
120 changes: 120 additions & 0 deletions tests/test_logreaderUtils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,120 @@
import json
import os
import sys
import unittest

sys.path.append(os.path.join(os.path.dirname(__file__), "../"))

from logreaderUtils import write_config_file, add_exception_to_config

unittestlog = """
===== Test "Para_" ====
Running .
xhalf[cm]=5 yhalf[cm]=6 zhalf[cm]=7 alpha[deg]=15 theta[deg]=30 phi[deg]=45
g4 volume = 1680 cm3
dd volume = 1680 cm3
DD Information: GLOBAL:fred1 Parallelepiped: xhalf[cm]=5 yhalf[cm]=6 zhalf[cm]=7 alpha[deg]=15 theta[deg]=30 phi[deg]=45 vol=1680 cm3
OK (1)
---> test Para_ succeeded
^^^^ End Test Para_ ^^^^
===== Test "Cons_" ====
Running .
zhalf=20 rIn-Z=10 rOut-Z=15 rIn+Z=20 rOut+Z=25 startPhi=0 deltaPhi=90
g4 volume = 5497.79 cm3
dd volume = 5497.79 cm3
DD Information: GLOBAL:fred1 Cone(section): zhalf=20 rIn-Z=10 rOut-Z=15 rIn+Z=20 rOut+Z=25 startPhi=0 deltaPhi=90 vol=5497.79 cm3
F
Cons_.cpp:51:Assertion
Test name: testCons::matched_g4_and_dd
assertion failed
- Expression: g4v == ddv
Failures !!!
Run: 1 Failure total: 1 Failures: 1 Errors: 0
---> test Cons_ had ERRORS
^^^^ End Test Cons_ ^^^^
===== Test "Sphere_" ====
Running .
innerRadius=10 outerRadius=15 startPhi=0 deltaPhi=90 startTheta=0 deltaTheta=180
g4 volume = 2487.09 cm3
dd volume = 2487.09 cm3
DD Information: GLOBAL:fred1 Sphere(section): innerRadius=10 outerRadius=15 startPhi=0 deltaPhi=90 startTheta=0 deltaTheta=180 vol=2487.09 cm3
OK (1)
---> test Sphere_ succeeded
^^^^ End Test Sphere_ ^^^^
===== Test "ExtrudedPolygon_" ====
Running . XY Points[cm]=-30, -30; -30, 30; 30, 30; 30, -30; 15, -30; 15, 15; -15, 15; -15, -30; with 4 Z sections: z[cm]=-60, x[cm]=0, y[cm]=30, scale[cm]=0.8; z[cm]=-15, x[cm]=0, y[cm]=-30, scale[cm]=1; z[cm]=10, x[cm]=0, y[cm]=0, scale[cm]=0.6; z[cm]=60, x[cm]=0, y[cm]=30, scale[cm]=1.2;
g4 volume = 2.136e+07 cm3
dd volume = 0 cm3
DD Information: GLOBAL:fred1 ExtrudedPolygon: XY Points[cm]=-30, -30; -30, 30; 30, 30; 30, -30; 15, -30; 15, 15; -15, 15; -15, -30; with 4 Z sections: z[cm]=-60, x[cm]=0, y[cm]=30, scale[cm]=0.8; z[cm]=-15, x[cm]=0, y[cm]=-30, scale[cm]=1; z[cm]=10, x[cm]=0, y[cm]=0, scale[cm]=0.6; z[cm]=60, x[cm]=0, y[cm]=30, scale[cm]=1.2; vol= 0
OK (1)
"""


class TestSequenceFunctions(unittest.TestCase):
def test_unittestlogs(self):
config_list = []
custom_rule_set = [
{"str_to_match": "test (.*) had ERRORS", "name": "{0}{1}{2} failed"},
{"str_to_match": '===== Test "([^\s]+)" ====', "name": "{0}"}
]
for index, l in enumerate(unittestlog.split("\n")):
config_list = add_exception_to_config(l, index, config_list, custom_rule_set)
write_config_file("tmp/unittestlogs.log" + "-read_config", config_list)

# def test_reg_th(self):
# for line in lines_th:
# line = line.strip()
# self.assertTrue(re.search(regex_th, line))
# self.assertFalse(re.search(regex_td, line))
# self.assertFalse(re.search(regex_th, line_files))
# self.assertFalse(re.search(regex_th, line_td))
#


# def readLog():
# config_list = []
# data = [0, 0, 0]
# # hardcoding
# logFile = "/home/zmatonis/Downloads/procesLogTestFolder/1234/step10.log"
# step = "step"
# json_cache = os.path.dirname(logFile) + "/logcache_" + str(step) + ".json"
# log_reader_config_path = logFile + "-read_config"
#
# inFile = open(logFile)
# for index, line in enumerate(inFile):
# config_list = add_exception_to_config(line, index, config_list)
# if '%MSG-w' in line: data[1] = data[1] + 1
# if '%MSG-e' in line: data[2] = data[2] + 1
# if 'Begin processing the ' in line: data[0] = data[0] + 1
# inFile.close()
# jfile = open(json_cache, "w")
# json.dump(data, jfile)
# jfile.close()
#
# log_reader_config_f = open(log_reader_config_path, "w")
# json.dump({"list_to_show": config_list}, log_reader_config_f)
# log_reader_config_f.close()

# readLog()


if __name__ == '__main__':
unittest.main()

0 comments on commit d7ad712

Please sign in to comment.