Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

update the machine names #6

Merged
merged 6 commits into from
Feb 13, 2023
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 6 additions & 6 deletions applets/analyze_files.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#!/usr/bin/env python2
#!/usr/bin/env python3

import sys
import os
Expand All @@ -13,7 +13,7 @@
from collections import OrderedDict, namedtuple

import fff_dqmtools
import fff_filemonitor
import applets.fff_filemonitor as fff_filemonitor
import fff_cluster

log = logging.getLogger(__name__)
Expand Down Expand Up @@ -75,9 +75,9 @@ def analyze_run_entry(e):
try:
with open(f, "r") as fd:
jsn = json.load(fd).get("data", [-1]*5)
evt_processed = long(jsn[0])
evt_accepted = long(jsn[1])
fsize = long(jsn[4])
evt_processed = int(jsn[0])
evt_accepted = int(jsn[1])
fsize = int(jsn[4])
except:
log.warning("Crash while reading %s.", f, exc_info=True)

Expand Down Expand Up @@ -162,7 +162,7 @@ def run_greenlet(self):

time.sleep(105)

@fff_cluster.host_wrapper(allow = ["bu-c2f13-31-01", "bu-c2f11-09-01", "bu-c2f11-13-01"])
@fff_cluster.host_wrapper(allow = ["dqmrubu-c2a06-05-01", "dqmrubu-c2a06-01-01", "dqmrubu-c2a06-03-01"])
@fff_dqmtools.fork_wrapper(__name__, uid="dqmpro", gid="dqmpro")
@fff_dqmtools.lock_wrapper
def __run__(opts, logger, **kwargs):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import fff_cluster
import logging

@fff_cluster.host_wrapper(allow = ["bu-c2f11-19-01"])
@fff_cluster.host_wrapper(allow = ["dqmrubu-c2a06-05-01"])
@fff_dqmtools.fork_wrapper(__name__, uid="dqmpro", gid="dqmpro")
@fff_dqmtools.lock_wrapper
def __run__(opts, **kwargs):
Expand Down
6 changes: 3 additions & 3 deletions applets/analyze_releases.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#!/usr/bin/env python2
#!/usr/bin/env python3

import sys
import os
Expand All @@ -14,8 +14,8 @@
from collections import OrderedDict, namedtuple

import fff_dqmtools
import fff_filemonitor
import fff_deleter
import applets.fff_filemonitor as fff_filemonitor
import applets.fff_deleter as fff_deleter
import fff_cluster

from utils import cmssw_deploy
Expand Down
8 changes: 4 additions & 4 deletions applets/fff_deleter.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#!/usr/bin/env python2
#!/usr/bin/env python3

import sys
import os
Expand All @@ -13,7 +13,7 @@
from collections import OrderedDict, namedtuple

import fff_dqmtools
import fff_filemonitor
import applets.fff_filemonitor as fff_filemonitor
import fff_cluster

DataEntry = namedtuple("DataEntry", ["key", "path", "fsize", "ftime"])
Expand Down Expand Up @@ -60,7 +60,7 @@ def stat(x):

# insert folder descriptions
if re_folders.match(root_rl) is not None:
if not collected_paths.has_key(root_rl):
if root_rl not in collected_paths:
_dsize, dtime = stat(root)
collected_paths[root_rl] = DataEntry(root_rl, root, 0, dtime)

Expand Down Expand Up @@ -246,7 +246,7 @@ def do_the_cleanup(self):
# rename them, as a warning for the next iteration
self.rename(fp)

if self.thresholds.has_key("delete_folders") and self.thresholds["delete_folders"]:
if "delete_folders" in self.thresholds and self.thresholds["delete_folders"]:
for entry in collected_paths:
if self.skip_latest and str(latest_run) in entry.path : continue

Expand Down
Original file line number Diff line number Diff line change
@@ -1,16 +1,16 @@
import fff_dqmtools
import fff_cluster
import fff_deleter
import applets.fff_deleter as fff_deleter
import logging

@fff_cluster.host_wrapper(allow = ["bu-c2f11-09-01"])
@fff_cluster.host_wrapper(allow = ["dqmrubu-c2a06-01-01"])
@fff_dqmtools.fork_wrapper(__name__)
@fff_dqmtools.lock_wrapper
def __run__(opts, **kwargs):
log = kwargs["logger"]

ramdisk = "/fff/ramdisk/"
tag = "fff_deleter_c2f11_09_01"
tag = "fff_deleter_c2a06_01_01"

service = fff_deleter.FileDeleter(
top = ramdisk,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,16 +1,17 @@
import fff_dqmtools
import fff_cluster
import fff_deleter
import applets.fff_deleter as fff_deleter

import logging

@fff_cluster.host_wrapper(allow = ["bu-c2f11-19-01"])
@fff_cluster.host_wrapper(allow = ["dqmrubu-c2a06-05-01"])
@fff_dqmtools.fork_wrapper(__name__)
@fff_dqmtools.lock_wrapper
def __run__(opts, **kwargs):
log = kwargs["logger"]

ramdisk = "/fff/output/lookarea/"
tag = "fff_deleter_lookarea_c2f11_19_01"
tag = "fff_deleter_lookarea_c2a06_05_01"

service = fff_deleter.FileDeleter(
top = ramdisk,
Expand Down
Original file line number Diff line number Diff line change
@@ -1,16 +1,16 @@
import fff_dqmtools
import fff_deleter
import applets.fff_deleter as fff_deleter
import fff_cluster
import logging

@fff_cluster.host_wrapper(allow = ["bu-c2f11-19-01"])
@fff_cluster.host_wrapper(allow = ["dqmrubu-c2a06-05-01"])
@fff_dqmtools.fork_wrapper(__name__)
@fff_dqmtools.lock_wrapper
def __run__(opts, **kwargs):
log = kwargs["logger"]

ramdisk = "/cmsnfsdqmminidaq/dqmminidaq/"
tag = "fff_deleter_minidaq_c2f11_19_01"
tag = "fff_deleter_minidaq_c2a06_05_01"

service = fff_deleter.FileDeleter(
top = ramdisk,
Expand Down
2 changes: 1 addition & 1 deletion applets/fff_deleter_minidaq_cms904.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import fff_dqmtools
import fff_deleter
import applets.fff_deleter as fff_deleter
import fff_cluster
import logging

Expand Down
Original file line number Diff line number Diff line change
@@ -1,16 +1,16 @@
import fff_dqmtools
import fff_cluster
import fff_deleter
import applets.fff_deleter as fff_deleter
import logging

@fff_cluster.host_wrapper(allow = ["bu-c2f11-13-01"])
@fff_cluster.host_wrapper(allow = ["dqmrubu-c2a06-03-01"])
@fff_dqmtools.fork_wrapper(__name__)
@fff_dqmtools.lock_wrapper
def __run__(opts, **kwargs):
log = kwargs["logger"]

ramdisk = "/fff/ramdisk/"
tag = "fff_deleter_playback_c2f11_13_01"
tag = "fff_deleter_playback_c2a06_03_01"

service = fff_deleter.FileDeleter(
top = ramdisk,
Expand Down
20 changes: 10 additions & 10 deletions applets/fff_filemonitor.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#!/usr/bin/env python2
#!/usr/bin/env python3

import os
import sys
Expand All @@ -9,7 +9,7 @@
import socket
import struct
import time
import urllib2
import urllib.request, urllib.error, urllib.parse
import json

import fff_dqmtools
Expand All @@ -19,15 +19,15 @@ def atomic_read_delete(fp):

tmp_fp = fp
tmp_fp += ".open_pid%d" % os.getpid()
tmp_fp += "_tag" + os.urandom(32).encode("hex").upper()
tmp_fp += "_tag" + os.urandom(32).hex().upper()

os.rename(fp, tmp_fp)

# ensure the file is regular!
flags = os.O_RDWR|os.O_NOFOLLOW|os.O_NOCTTY|os.O_NONBLOCK|os.O_NDELAY
try:
fd = os.open(tmp_fp, flags)
except OSError, e:
except OSError as e:
# check if it was a symbolic link and remove if so
if e.errno == errno.ELOOP:
os.unlink(tmp_fp)
Expand Down Expand Up @@ -58,7 +58,7 @@ def atomic_read_delete(fp):

return b

def atomic_create_write(fp, body, mode=0600):
def atomic_create_write(fp, body, mode=0o600):
import tempfile

dir = os.path.dirname(fp)
Expand All @@ -68,10 +68,10 @@ def atomic_create_write(fp, body, mode=0600):

try:
tmp_fp = f.name
f.write(body)
f.write( body.encode('utf-8') )
f.close()

if mode != 0600:
if mode != 0o600:
os.chmod(tmp_fp, mode)

os.rename(tmp_fp, fp)
Expand All @@ -87,13 +87,13 @@ def http_upload(lst_gen, port, log=None, test_webserver=False):
return 0

data = json.dumps({ "docs": docs })
r = urllib2.Request(url, data, {'Content-Type': 'application/json'})
r = urllib.request.Request(url, data.encode('utf-8'), {'Content-Type': 'application/json'})

f = None
try:
f = urllib2.urlopen(r)
f = urllib.request.urlopen(r)
resp = f.read()
except urllib2.HTTPError:
except urllib.error.HTTPError:
if log: log.warning("Couldn't upload files to a web instance: %s", url, exc_info=True)
raise
finally:
Expand Down
6 changes: 3 additions & 3 deletions applets/fff_logcleaner.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,11 +90,11 @@ def __run__(opts, **kwargs):
exit("You need root permissions to run this.")

if len(sys.argv) == 2:
newsize = long(sys.argv[1])
newsize = int(sys.argv[1])

print "Set MAX_SIZE (%.03f) to %.03f megabytes." % (float(MAX_SIZE) / 1024 / 1024, newsize, )
print ("Set MAX_SIZE (%.03f) to %.03f megabytes." % (float(MAX_SIZE) / 1024 / 1024, newsize, ))
MAX_SIZE = newsize * 1024 * 1024

running_set = collect_open()
print "Running with max_size=%.03f keep_size=%.03f" % (float(MAX_SIZE) / 1024 / 1024, float(KEEP_SIZE) / 1024 / 1024, )
print( "Running with max_size=%.03f keep_size=%.03f" % (float(MAX_SIZE) / 1024 / 1024, float(KEEP_SIZE) / 1024 / 1024, ) )
do_the_log_cleanup(running_set = running_set)
6 changes: 3 additions & 3 deletions applets/fff_selftest.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#!/usr/bin/env python2
#!/usr/bin/env python3

import os
import sys
Expand All @@ -9,7 +9,7 @@
import logging

import fff_dqmtools
import fff_filemonitor
import applets.fff_filemonitor as fff_filemonitor

log = logging.getLogger(__name__)

Expand Down Expand Up @@ -92,4 +92,4 @@ def __run__(opts, **kwargs):
x = FFFMonitoringTest(path="./")
fp = x.make_selftest()

print "Made report:", fp
print("Made report:", fp)
15 changes: 6 additions & 9 deletions applets/fff_simulator.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
# usually atomic_create_write writes files with 0600 mask.
# we need a bit more readable files instead.
def atomic_write(filename, content):
return atomic_create_write(filename, content, mode=0644)
return atomic_create_write(filename, content, mode=0o644)

log = logging.getLogger("fff_simulator")

Expand Down Expand Up @@ -254,16 +254,13 @@ def discover_files(self):
# remap stream if set
stream_orig = stream
remap = self.config.get("stream_remap", {})
if remap.has_key(stream):
if stream in remap:
stream = remap[stream]
if not self.streams_found.has_key(stream):
if stream not in self.streams_found:
log.info("Stream %s will be converted into stream %s", stream_orig, stream)

files_found.add(f)
stream_dct = self.streams_found.setdefault(stream, {
'lumi_files': []
})

stream_dct = self.streams_found.setdefault(stream, { 'lumi_files': [] })
stream_dct["lumi_files"].append((f, stream_source, ))

if run_found is None:
Expand All @@ -279,7 +276,7 @@ def discover_files(self):

def create_run_directory(self):
rd = os.path.join(self.config["ramdisk"], 'run%d' % self.config["run"])
os.makedirs(rd, 0755)
os.makedirs(rd, 0o755)
log.info('Created run directory: %s' % rd)
self.run_directory = rd

Expand Down Expand Up @@ -582,7 +579,7 @@ def handle_line(self, line, write_f):

run.control(line.strip(), write_f)

@fff_cluster.host_wrapper(allow = ["bu-c2f11-13-01"])
@fff_cluster.host_wrapper(allow = ["dqmrubu-c2a06-03-01"])
@fff_dqmtools.fork_wrapper(__name__)
@fff_dqmtools.lock_wrapper
def __run__(**kwargs):
Expand Down
Loading