Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

NAS-130471 / 24.10-RC.1 / Add an event source for app statistics (by sonicaj) #14363

Merged
merged 12 commits into from
Aug 28, 2024
52 changes: 52 additions & 0 deletions src/middlewared/middlewared/plugins/apps/ix_apps/docker/stats.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
from collections import defaultdict

import requests

from .utils import get_docker_client, PROJECT_KEY


def get_default_stats():
return defaultdict(lambda: {
'cpu_usage': 0,
'memory': 0,
'networks': defaultdict(lambda: {'rx_bytes': 0, 'tx_bytes': 0}),
'blkio': {'read': 0, 'write': 0},
})


def list_resources_stats_by_project(project_name: str | None = None) -> dict:
retries = 2
while retries > 0:
# We do this because when an app is being stopped, we can run into a race condition
# where the container got listed but when we queried it's stats we were not able
# to get them as the container by that time had been nuked (this is similar to what we
# do when we list resources by project)
try:
return list_resources_stats_by_project_internal(project_name)
except requests.exceptions.HTTPError:
retries -= 1
if retries == 0:
raise


def list_resources_stats_by_project_internal(project_name: str | None = None) -> dict:
projects = get_default_stats()
with get_docker_client() as client:
label_filter = {'label': f'{PROJECT_KEY}={project_name}' if project_name else PROJECT_KEY}
for container in client.containers.list(all=True, filters=label_filter, sparse=False):
stats = container.stats(stream=False, decode=None, one_shot=True)
project = container.labels.get(PROJECT_KEY)
if not project:
continue

blkio_container_stats = stats.get('blkio_stats', {}).get('io_service_bytes_recursive') or {}
project_stats = projects[project]
project_stats['cpu_usage'] += stats.get('cpu_stats', {}).get('cpu_usage', {}).get('total_usage', 0)
project_stats['memory'] += stats.get('memory_stats', {}).get('usage', 0)
for entry in filter(lambda x: x['op'] in ('read', 'write'), blkio_container_stats):
project_stats['blkio'][entry['op']] += entry['value']
for net_name, net_values in stats.get('networks', {}).items():
project_stats['networks'][net_name]['rx_bytes'] += net_values.get('rx_bytes', 0)
project_stats['networks'][net_name]['tx_bytes'] += net_values.get('tx_bytes', 0)

return projects
74 changes: 74 additions & 0 deletions src/middlewared/middlewared/plugins/apps/stats.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
import time

from middlewared.event import EventSource
from middlewared.plugins.docker.state_utils import Status
from middlewared.schema import Dict, Int, Str, List
from middlewared.service import CallError
from middlewared.validators import Range

from .ix_apps.docker.stats import list_resources_stats_by_project
from .stats_util import normalize_projects_stats


class AppStatsEventSource(EventSource):

"""
Retrieve statistics of apps.
"""

ACCEPTS = Dict(
Int('interval', default=2, validators=[Range(min_=2)]),
)
RETURNS = List(
'apps_stats',
items=[
Dict(
'stats',
Str('app_name'),
Int('cpu_usage', description='Percentage of cpu used by an app'),
Int('memory', description='Current memory(in bytes) used by an app'),
List(
'networks',
items=[
Dict(
'interface_stats',
Str('interface_name', description='Name of the interface use by the app'),
Int('rx_bytes', description='Received bytes/s by an interface'),
Int('tx_bytes', description='Transmitted bytes/s by an interface')
),
]
),
Dict(
'blkio',
Int('read', description='Blkio read bytes'),
Int('write', description='Blkio write bytes')
)
)
]
)

def run_sync(self):
if not self.middleware.call_sync('docker.state.validate', False):
raise CallError('Apps are not available')

old_projects_stats = list_resources_stats_by_project()
interval = self.arg['interval']
time.sleep(interval)

while not self._cancel_sync.is_set():
try:
project_stats = list_resources_stats_by_project()
self.send_event(
'ADDED', fields=normalize_projects_stats(project_stats, old_projects_stats, interval)
)
old_projects_stats = project_stats
time.sleep(interval)
except Exception:
if self.middleware.call_sync('docker.status')['status'] != Status.RUNNING.value:
return

raise


def setup(middleware):
middleware.register_event_source('app.stats', AppStatsEventSource, roles=['APPS_READ'])
55 changes: 55 additions & 0 deletions src/middlewared/middlewared/plugins/apps/stats_util.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
from .ix_apps.metadata import get_collective_metadata
from .ix_apps.utils import get_app_name_from_project_name


NANO_SECOND = 1000000000


def normalize_projects_stats(all_projects_stats: dict, old_stats: dict, interval: int) -> list[dict]:
normalized_projects_stats = []
all_configured_apps = get_collective_metadata()
for project, data in all_projects_stats.items():
app_name = get_app_name_from_project_name(project)
if app_name not in all_configured_apps:
continue
else:
all_configured_apps.pop(app_name)

normalized_data = {
'app_name': app_name,
'memory': data['memory'],
'blkio': data['blkio'],
}

# Docker provides CPU usage time in nanoseconds.
# To calculate the CPU usage percentage:
# 1. Calculate the difference in CPU usage (`cpu_delta`) between the current and previous stats.
# 2. Normalize this delta over the given time interval by dividing by (interval * NANO_SECOND).
# 3. Multiply by 100 to convert to percentage.
cpu_delta = data['cpu_usage'] - old_stats[project]['cpu_usage']
normalized_data['cpu_usage'] = (cpu_delta / (interval * NANO_SECOND)) * 100

networks = []
for net_name, network_data in data['networks'].items():
networks.append({
'interface_name': net_name,
'rx_bytes': int(
(network_data['rx_bytes'] - old_stats[project]['networks'][net_name]['rx_bytes']) / interval
),
'tx_bytes': int(
(network_data['tx_bytes'] - old_stats[project]['networks'][net_name]['tx_bytes']) / interval
),
})
normalized_data['networks'] = networks
normalized_projects_stats.append(normalized_data)

for stopped_app in all_configured_apps:
normalized_projects_stats.append({
'app_name': stopped_app,
'memory': 0,
'cpu_usage': 0,
'networks': [],
'blkio': {'read': 0, 'write': 0},
})

return normalized_projects_stats
Loading