Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

messages: use pyulog for events extraction #276

Merged
merged 1 commit into from
Mar 22, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 0 additions & 5 deletions app/plot_app/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,6 @@
__PARAMETERS_FILENAME = os.path.join(__CACHE_FILE_PATH, 'parameters.xml')
__EVENTS_FILENAME = os.path.join(__CACHE_FILE_PATH, 'events.json.xz')
__RELEASES_FILENAME = os.path.join(__CACHE_FILE_PATH, 'releases.json')
__METADATA_CACHE_PATH = os.path.join(__CACHE_FILE_PATH, 'metadata')

__PRINT_TIMING = int(_conf.get('debug', 'print_timing'))
__VERBOSE_OUTPUT = int(_conf.get('debug', 'verbose_output'))
Expand Down Expand Up @@ -126,10 +125,6 @@ def get_releases_filename():
""" get configured releases file name """
return __RELEASES_FILENAME

def get_metadata_cache_path():
""" get configured metadata cache path """
return __METADATA_CACHE_PATH

def get_parameters_filename():
""" get configured parameters file name """
return __PARAMETERS_FILENAME
Expand Down
174 changes: 22 additions & 152 deletions app/plot_app/events.py
Original file line number Diff line number Diff line change
@@ -1,169 +1,39 @@
""" Event parsing """
import json
import lzma
import os
import sys
import random
from typing import Optional, Any, List, Tuple

from helper import download_file_maybe
from config import get_events_url, get_events_filename, get_metadata_cache_path
from config import get_events_url, get_events_filename
from pyulog import ULog
from pyulog.px4_events import PX4Events

#pylint: disable=wrong-import-position
sys.path.append(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'libevents/libs/python'))
from libevents_parse.parser import Parser
# pylint: disable=global-statement
__event_parser: PX4Events = None # Keep the parser to cache the default event definitions

#pylint: disable=invalid-name,global-statement

class FileCache:
""" Very simple file cache with a maximum number of files """

def __init__(self, path, max_num_files=1000):
self._path = path
self._max_num_files = max_num_files
if not os.path.isdir(path):
os.mkdir(path)

def access(self, file_name):
""" check if a file exists in the cache """
return os.path.isfile(os.path.join(self._path, file_name))

def insert(self, file_name, data):
""" insert data (bytes) """
# check cache size
cache_files = os.listdir(self._path)
if len(cache_files) >= self._max_num_files:
# select a random file (could be improved to LRU)
# (if flight review runs in multi-process mode, there's a minimal chance we delete a
# file that's trying to be accessed by another instance)
remove_index = random.randint(0, len(cache_files) - 1)
os.remove(os.path.join(self._path, cache_files[remove_index]))

# write
with open(os.path.join(self._path, file_name), 'wb') as cache_file:
cache_file.write(data)

@property
def path(self):
""" Get configured path """
return self._path

def get_event_definitions_from_log_file(ulog: ULog):
"""
Get the event definitions json file from the log file.
:return: path to json file or None
"""
if 'metadata_events' in ulog.msg_info_multiple_dict and \
'metadata_events_sha256' in ulog.msg_info_dict:
file_hash = ulog.msg_info_dict['metadata_events_sha256']
if len(file_hash) <= 64 and file_hash.isalnum():

file_cache = FileCache(get_metadata_cache_path())
events_metadata_filename = 'events.' + file_hash + '.json'
if not file_cache.access(events_metadata_filename):
# insert into the cache
metadata_events_bytes = b''.join(ulog.msg_info_multiple_dict['metadata_events'][0])
metadata_events_json = lzma.decompress(metadata_events_bytes)
file_cache.insert(events_metadata_filename, metadata_events_json)

return os.path.join(file_cache.path, events_metadata_filename)

return None


__event_parser = None # fallback event parser, used if the log doesn't contain the event definitions
def get_event_parser(ulog: ULog):
""" get event parser instance or None on error """
events_profile = 'dev'

event_definitions_json = get_event_definitions_from_log_file(ulog)
if event_definitions_json is not None:
with open(event_definitions_json, 'r', encoding="utf8") as json_file:
p = Parser()
p.load_definitions(json.load(json_file))
p.set_profile(events_profile)
return p

# No json definitions in the log -> use global definitions
global __event_parser
events_json_xz = get_events_filename()
# check for cached file update
downloaded = download_file_maybe(events_json_xz, get_events_url())
if downloaded == 2 or (downloaded == 1 and __event_parser is None):
# decompress
with lzma.open(events_json_xz, 'rt') as json_file:
p = Parser()
p.load_definitions(json.load(json_file))
p.set_profile(events_profile)
__event_parser = p

return __event_parser


def get_logged_events(ulog):
def get_logged_events(ulog: ULog) -> List[Tuple[int, str, str]]:
"""
Get the events as list of messages
:return: list of (timestamp, time str, log level str, message) tuples
:return: list of (timestamp, log level str, message) tuples
"""

try:
event_parser = get_event_parser(ulog)
except Exception as e:
print('Failed to get event parser: {}'.format(e))
return []
def get_default_json_definitions(already_has_default_parser: bool) -> Optional[Any]:
""" Retrieve the default json event definitions """

def event_log_level_str(log_level: int):
return {0: 'EMERGENCY',
1: 'ALERT',
2: 'CRITICAL',
3: 'ERROR',
4: 'WARNING',
5: 'NOTICE',
6: 'INFO',
7: 'DEBUG',
8: 'PROTOCOL',
9: 'DISABLED'}.get(log_level, 'UNKNOWN')
events_json_xz = get_events_filename()
# Check for cached file update
downloaded = download_file_maybe(events_json_xz, get_events_url())
if downloaded == 2 or (downloaded == 1 and not already_has_default_parser):
# Decompress
with lzma.open(events_json_xz, 'rt') as json_file:
return json.load(json_file)

def time_str(t):
m1, s1 = divmod(int(t/1e6), 60)
h1, m1 = divmod(m1, 60)
return "{:d}:{:02d}:{:02d}".format(h1, m1, s1)
return None

# parse events
messages = []
try:
events = ulog.get_dataset('event')
all_ids = events.data['id']
for event_idx, event_id in enumerate(all_ids):
log_level = (events.data['log_levels'][event_idx] >> 4) & 0xf
if log_level >= 8:
continue
args = []
i = 0
while True:
arg_str = 'arguments[{}]'.format(i)
if arg_str not in events.data:
break
arg = events.data[arg_str][event_idx]
args.append(arg)
i += 1
log_level_str = event_log_level_str(log_level)
t = events.data['timestamp'][event_idx]
event = None
if event_parser is not None:
event = event_parser.parse(event_id, bytes(args))
if event is None:
messages.append((t, time_str(t), log_level_str, \
'[Unknown event with ID {:}]'.format(event_id)))
else:
# only show default group
if event.group() == "default":
messages.append((t, time_str(t), log_level_str, event.message()))
# we could expand this a bit for events:
# - show the description too
# - handle url's as link (currently it's shown as text, and all tags are escaped)
except (KeyError, IndexError, ValueError) as error:
# no events in log
pass
global __event_parser
if __event_parser is None:
__event_parser = PX4Events()
__event_parser.set_default_json_definitions_cb(get_default_json_definitions)

return messages
return __event_parser.get_logged_events(ulog)
6 changes: 3 additions & 3 deletions app/plot_app/plotted_tables.py
Original file line number Diff line number Diff line change
Expand Up @@ -556,12 +556,12 @@ def time_str(t):
# in addition to an event with the same message so we can ignore those
if m.message[-1] == '\t':
continue
messages.append((m.timestamp, time_str(m.timestamp), m.log_level_str(), m.message))
messages.append((m.timestamp, m.log_level_str(), m.message))

messages = sorted(messages, key=lambda m: m[0])

log_times, log_times_str, log_levels, log_messages = \
zip(*messages) if len(messages) > 0 else ([],[],[],[])
log_times, log_levels, log_messages = zip(*messages) if len(messages) > 0 else ([],[],[])
log_times_str = [time_str(t) for t in log_times]
log_data = {
'times': log_times_str,
'levels': log_levels,
Expand Down
2 changes: 1 addition & 1 deletion app/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ jinja2
jupyter
pyfftw
pylint
pyulog>=0.9
pyulog>=1.1
requests
scipy>=1.8.1
simplekml
Expand Down
Loading