Skip to content

Commit

Permalink
Fix updates to deployments and session grouping (#689)
Browse files Browse the repository at this point in the history
* feat: allow regrouping events in background from admin

* fix: update cached event count explicitly after regrouping

* fix: troubleshoot stalled event regrouping with logging

* fix: bingo! don't try to delete empty events accross entire database!

* feat: determine when to regroup events another way

* fix: checking dates on new deployments
  • Loading branch information
mihow authored Jan 22, 2025
1 parent 4d7079d commit bfd6145
Show file tree
Hide file tree
Showing 4 changed files with 38 additions and 14 deletions.
10 changes: 4 additions & 6 deletions ami/main/admin.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,13 +115,11 @@ def sync_captures(self, request: HttpRequest, queryset: QuerySet[Deployment]) ->
self.message_user(request, msg)

# Action that regroups all captures in the deployment into events
@admin.action(description="Regroup captures into events")
@admin.action(description="Regroup captures into events (async)")
def regroup_events(self, request: HttpRequest, queryset: QuerySet[Deployment]) -> None:
from ami.main.models import group_images_into_events

for deployment in queryset:
group_images_into_events(deployment)
self.message_user(request, f"Regrouped {queryset.count()} deployments.")
queued_tasks = [tasks.regroup_events.delay(deployment.pk) for deployment in queryset]
msg = f"Regrouping captures into events for {len(queued_tasks)} deployments in background: {queued_tasks}"
self.message_user(request, msg)

list_filter = ("project",)
actions = [sync_captures, regroup_events]
Expand Down
39 changes: 33 additions & 6 deletions ami/main/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -602,11 +602,21 @@ def update_calculated_fields(self, save=False):
self.save(update_calculated_fields=False)

def save(self, update_calculated_fields=True, *args, **kwargs):
last_updated = self.updated_at or timezone.now()
if self.pk:
events_last_updated = min(
[
self.events.aggregate(latest_updated_at=models.Max("updated_at")).get("latest_update_at")
or datetime.datetime.max,
self.updated_at,
]
)
else:
events_last_updated = datetime.datetime.min

super().save(*args, **kwargs)
if self.pk and update_calculated_fields:
# @TODO Use "dirty" flag strategy to only update when needed
new_or_updated_captures = self.captures.filter(updated_at__gte=last_updated).count()
new_or_updated_captures = self.captures.filter(updated_at__gte=events_last_updated).count()
deleted_captures = True if self.captures.count() < (self.captures_count or 0) else False
if new_or_updated_captures or deleted_captures:
ami.tasks.regroup_events.delay(self.pk)
Expand Down Expand Up @@ -896,13 +906,20 @@ def group_images_into_events(
f"Duration: {event.duration_label()}"
)

logger.info(
f"Done grouping {len(image_timestamps)} captures into {len(events)} events " f"for deployment {deployment}"
)

if delete_empty:
delete_empty_events()
logger.info("Deleting empty events for deployment")
delete_empty_events(deployment=deployment)

for event in events:
# Set the width and height of all images in each event based on the first image
logger.info(f"Setting image dimensions for event {event}")
set_dimensions_for_collection(event)

logger.info("Checking for unusual statistics of events")
events_over_24_hours = Event.objects.filter(
deployment=deployment, start__lt=models.F("end") - datetime.timedelta(days=1)
)
Expand All @@ -916,10 +933,14 @@ def group_images_into_events(
f"Found {events_starting_before_noon.count()} events starting before noon in deployment {deployment}. "
)

logger.info("Updating relevant cached fields on deployment")
deployment.events_count = len(events)
deployment.save(update_calculated_fields=False, update_fields=["events_count"])

return events


def delete_empty_events(dry_run=False):
def delete_empty_events(deployment: Deployment, dry_run=False):
"""
Delete events that have no images, occurrences or other related records.
"""
Expand All @@ -931,8 +952,14 @@ def delete_empty_events(dry_run=False):
# if f.one_to_many or f.one_to_one or (f.many_to_many and f.auto_created)
# ]

events = Event.objects.annotate(num_images=models.Count("captures")).filter(num_images=0)
events = events.annotate(num_occurrences=models.Count("occurrences")).filter(num_occurrences=0)
events = (
Event.objects.filter(deployment=deployment)
.annotate(
num_images=models.Count("captures"),
num_occurrences=models.Count("occurrences"),
)
.filter(num_images=0, num_occurrences=0)
)

if dry_run:
for event in events:
Expand Down
2 changes: 1 addition & 1 deletion ami/main/tests.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ def test_pruning_empty_events(self):
for event in events:
event.captures.all().delete()

delete_empty_events()
delete_empty_events(deployment=self.deployment)

remaining_events = Event.objects.filter(pk__in=[event.pk for event in events])

Expand Down
1 change: 0 additions & 1 deletion ami/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,6 @@ def regroup_events(deployment_id: int) -> None:
logger.info(f"{deployment } now has {len(events)} events")
else:
logger.error(f"Deployment with id {deployment_id} not found")
deployment.update_calculated_fields(save=True)


@celery_app.task(soft_time_limit=one_hour, time_limit=one_hour + 60)
Expand Down

0 comments on commit bfd6145

Please sign in to comment.