-
Notifications
You must be signed in to change notification settings - Fork 5
/
analyze_files.py
194 lines (152 loc) · 5.71 KB
/
analyze_files.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
#!/usr/bin/env python2
import sys
import os
import logging
import re
import datetime
import subprocess
import socket
import time
import json
from collections import OrderedDict, namedtuple
import fff_dqmtools
import fff_filemonitor
import fff_cluster
log = logging.getLogger(__name__)
RunEntry = namedtuple('RunEntry', ["run", "path", "start_time", "start_time_source"])
FileEntry = namedtuple('FileEntry', ["ls", "stream", "mtime", "ctime", "evt_processed", "evt_accepted", "fsize"])
LUMI = 23.310893056
def find_match(re, iter):
xm = map(re.match, iter)
return filter(lambda x: x is not None, xm)
def collect_run_timestamps(path):
lst = os.listdir(path)
path_dct = {}
path_pattern_re = re.compile(r"^run(\d+)$")
for m in find_match(path_pattern_re, lst):
path_dct[int(m.group(1))] = os.path.join(path, m.group(0))
global_dct = {}
global_pattern_re = re.compile(r"^\.run(\d+)\.global$")
for m in find_match(global_pattern_re, lst):
f = os.path.join(path, m.group(0))
stat = os.stat(f)
ftime = stat.st_mtime
global_dct[int(m.group(1))] = ftime
run_list = []
for run in path_dct.keys():
# runs without global will have 'None'
run_list.append(RunEntry(run, path_dct[run], global_dct.get(run, None), "global_file"))
run_list.sort()
return run_list
def analyze_run_entry(e):
lst = os.listdir(e.path)
re_jsn = re.compile(r"^run(?P<run>\d+)_ls(?P<ls>\d+)(?P<leftover>_.+\.jsn)$")
files = []
for m in find_match(re_jsn, lst):
d = m.groupdict()
if int(d['run']) != e.run: continue
f = os.path.join(e.path, m.group(0))
stream = d["leftover"].strip("_")
stream = re.sub(r".jsn$", r"", stream)
mtime = os.stat(f).st_mtime
ctime = os.stat(f).st_ctime
# read the file contents
evt_processed, evt_accepted, fsize = [-1, -1, -1]
if "EoR" not in f:
try:
with open(f, "r") as fd:
jsn = json.load(fd).get("data", [-1]*5)
evt_processed = long(jsn[0])
evt_accepted = long(jsn[1])
fsize = long(jsn[4])
except:
log.warning("Crash while reading %s.", f, exc_info=True)
files.append(FileEntry(int(d['ls']), stream, mtime, ctime, evt_processed, evt_accepted, fsize))
files.sort()
if (e.start_time is None) and len(files):
e = e._replace(start_time = files[0].mtime - LUMI, start_time_source = "first_lumi")
return e, files
class Analyzer(object):
def __init__(self, top, report_directory, app_tag):
self.top = top
self.report_directory = report_directory
self.app_tag = app_tag
self.hostname = socket.gethostname()
def make_report(self, backlog=5):
timestamps = collect_run_timestamps(self.top)
# only last 5 entries
for entry in timestamps[-backlog:]:
entry, files = analyze_run_entry(entry)
# group by stream name in order to save space
# and make a dictionary for json
grouped = {}
for f in files:
if "EoR" in f.stream:
# don't include EoR file
continue
lst = grouped.setdefault(f.stream, {
'lumis': [],
'mtimes': [],
'ctimes': [],
'evt_processed': [],
'evt_accepted': [],
'fsize': [],
})
lst['lumis'].append(f.ls)
lst['mtimes'].append(f.mtime)
lst['ctimes'].append(f.ctime)
lst['evt_processed'].append(f.evt_processed)
lst['evt_accepted'].append(f.evt_accepted)
lst['fsize'].append(f.fsize)
id = "dqm-files-%s-%s-run%d" % (self.hostname, self.app_tag, entry.run)
doc = {
"sequence": 0,
"hostname": self.hostname,
"tag": self.app_tag,
"run": entry.run,
"extra": {
"streams": grouped,
"global_start": entry.start_time,
"global_start_source": entry.start_time_source,
"lumi": LUMI,
#"run_timestamps": run_dct,
},
"pid": os.getpid(),
"_id": id,
"type": "dqm-files"
}
final_fp = os.path.join(self.report_directory, doc["_id"] + ".jsn")
body = json.dumps(doc, indent=None)
fff_filemonitor.atomic_create_write(final_fp, body)
log.info("Made report file: %s", final_fp)
def run_greenlet(self):
while True:
if os.path.isdir(self.report_directory):
self.make_report()
else:
log.warning("Directory %s does not exists. Reports disabled.", self.report_directory)
time.sleep(105)
@fff_cluster.host_wrapper(allow = ["bu-c2f13-31-01", "bu-c2f11-09-01", "bu-c2f11-13-01"])
@fff_dqmtools.fork_wrapper(__name__, uid="dqmpro", gid="dqmpro")
@fff_dqmtools.lock_wrapper
def __run__(opts, logger, **kwargs):
global log
log = logger
s = Analyzer(
top = "/fff/ramdisk/",
app_tag = kwargs["name"],
report_directory = opts["path"],
)
s.run_greenlet()
if __name__ == "__main__":
log = fff_dqmtools.LogCaptureHandler.create_logger_subprocess("root")
path = "/tmp/dqm_monitoring/"
import sys
if len(sys.argv) == 2:
path = sys.argv[1]
s = Analyzer(
top = "/fff/ramdisk/",
app_tag = "analyze_files",
report_directory = path,
)
s.make_report(backlog=50)