Skip to content

Commit

Permalink
Remove assert no errors
Browse files Browse the repository at this point in the history
  • Loading branch information
armenzg committed Dec 12, 2024
1 parent fdf92ab commit 31f71d4
Showing 1 changed file with 13 additions and 26 deletions.
39 changes: 13 additions & 26 deletions tests/sentry/tasks/test_backfill_seer_grouping_records.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,7 @@ def create_group_event_rows(self, num: int) -> Mapping[str, Any]:
"timestamp": before_now(seconds=10).isoformat(),
"title": "title",
}
event = self.store_event(data=data, project_id=self.project.id, assert_no_errors=False)
event = self.store_event(data=data, project_id=self.project.id)
events.append(event)
event.group.times_seen = 5
event.group.save()
Expand Down Expand Up @@ -273,7 +273,6 @@ def test_lookup_group_data_stacktrace_bulk_not_stacktrace_grouping(self):
event = self.store_event(
data={"exception": EXCEPTION, "title": "title", "fingerprint": ["2"]},
project_id=self.project.id,
assert_no_errors=False,
)
group_ids = [row["group_id"] for row in rows]
for group_id in group_ids:
Expand Down Expand Up @@ -311,7 +310,7 @@ def test_lookup_group_data_stacktrace_bulk_no_stacktrace_exception(self):
for group_id in group_ids:
hashes.update({group_id: self.group_hashes[group_id]})
# Create one event where the stacktrace has no exception
event = self.store_event(data={}, project_id=self.project.id, assert_no_errors=False)
event = self.store_event(data={}, project_id=self.project.id)
rows.append({"event_id": event.event_id, "group_id": event.group_id})
hashes.update({event.group_id: GroupHash.objects.get(group_id=event.group.id).hash})

Expand Down Expand Up @@ -364,7 +363,6 @@ def test_lookup_group_data_stacktrace_bulk_invalid_stacktrace_exception(self, mo
"timestamp": before_now(seconds=10).isoformat(),
},
project_id=self.project.id,
assert_no_errors=False,
)
rows.append({"event_id": event.event_id, "group_id": event.group_id})
group_hash = GroupHash.objects.filter(group_id=event.group.id).first()
Expand Down Expand Up @@ -592,7 +590,6 @@ def test_backfill_seer_grouping_records_success_cohorts_simple(
"timestamp": before_now(seconds=10).isoformat(),
},
project_id=project2.id,
assert_no_errors=False,
)
event2.group.times_seen = 5
event2.group.save()
Expand Down Expand Up @@ -645,7 +642,6 @@ def test_backfill_seer_grouping_records_success_cohorts_setting_defined(
"timestamp": before_now(seconds=10).isoformat(),
},
project_id=project2.id,
assert_no_errors=False,
)
event2.group.times_seen = 5
event2.group.save()
Expand Down Expand Up @@ -721,7 +717,7 @@ def test_backfill_seer_grouping_records_groups_1_times_seen(
"title": "title",
"timestamp": before_now(seconds=10).isoformat(),
}
event = self.store_event(data=data, project_id=self.project.id, assert_no_errors=False)
event = self.store_event(data=data, project_id=self.project.id)
groups_seen_once.append(event.group)

with TaskRunner():
Expand Down Expand Up @@ -760,7 +756,7 @@ def test_backfill_seer_grouping_records_groups_have_neighbor(
"title": "title",
"timestamp": before_now(seconds=10),
}
event = self.store_event(data=data, project_id=self.project.id, assert_no_errors=False)
event = self.store_event(data=data, project_id=self.project.id)
event.group.times_seen = 2
event.group.save()
# Arbitrarily choose a parent group's hash that has times_seen = 5
Expand Down Expand Up @@ -819,7 +815,7 @@ def test_backfill_seer_grouping_records_groups_has_invalid_neighbor(
"title": "title",
"timestamp": before_now(seconds=10).isoformat(),
}
event = self.store_event(data=data, project_id=self.project.id, assert_no_errors=False)
event = self.store_event(data=data, project_id=self.project.id)
event.group.times_seen = 2
event.group.save()
# Make the similar group a hash that does not exist
Expand Down Expand Up @@ -876,7 +872,7 @@ def test_backfill_seer_grouping_records_multiple_batches(
"title": "title",
"timestamp": before_now(seconds=10).isoformat(),
}
event = self.store_event(data=data, project_id=self.project.id, assert_no_errors=False)
event = self.store_event(data=data, project_id=self.project.id)
event.group.times_seen = 2
event.group.save()

Expand Down Expand Up @@ -1016,7 +1012,7 @@ def test_backfill_seer_grouping_records_only_delete(self, mock_project_delete_gr
"title": "title",
"timestamp": before_now(seconds=10).isoformat(),
}
event = self.store_event(data=data, project_id=self.project.id, assert_no_errors=False)
event = self.store_event(data=data, project_id=self.project.id)
event.group.times_seen = 2
event.group.data["metadata"] = copy.deepcopy(default_metadata)
if i < 3:
Expand Down Expand Up @@ -1049,7 +1045,6 @@ def test_backfill_seer_grouping_records_cohort_only_delete(self, mock_delete_gro
"timestamp": before_now(seconds=10).isoformat(),
},
project_id=project2.id,
assert_no_errors=False,
)
event2.group.times_seen = 5
event2.group.save()
Expand Down Expand Up @@ -1081,7 +1076,7 @@ def test_backfill_seer_grouping_records_exclude_deleted_groups(
"title": "title",
"timestamp": before_now(seconds=10).isoformat(),
}
event = self.store_event(data=data, project_id=self.project.id, assert_no_errors=False)
event = self.store_event(data=data, project_id=self.project.id)
event.group.times_seen = 2
event.group.status = GroupStatus.PENDING_DELETION
event.group.substatus = None
Expand All @@ -1093,7 +1088,7 @@ def test_backfill_seer_grouping_records_exclude_deleted_groups(
"title": "title",
"timestamp": before_now(seconds=10).isoformat(),
}
event = self.store_event(data=data, project_id=self.project.id, assert_no_errors=False)
event = self.store_event(data=data, project_id=self.project.id)
event.group.times_seen = 2
event.group.status = GroupStatus.DELETION_IN_PROGRESS
event.group.substatus = None
Expand Down Expand Up @@ -1206,7 +1201,7 @@ def test_backfill_seer_grouping_records_exclude_90_day_old_groups(
"title": "title",
"timestamp": before_now(seconds=10).isoformat(),
}
event = self.store_event(data=data, project_id=self.project.id, assert_no_errors=False)
event = self.store_event(data=data, project_id=self.project.id)
event.group.times_seen = 2
event.group.last_seen = datetime.now(UTC) - timedelta(days=90)
event.group.save()
Expand Down Expand Up @@ -1305,7 +1300,6 @@ def test_backfill_seer_grouping_records_exclude_invalid_groups(
event = self.store_event(
data={"exception": EXCEPTION, "title": "title", "fingerprint": ["2"]},
project_id=self.project.id,
assert_no_errors=False,
)
event.group.times_seen = 5
event.group.save()
Expand Down Expand Up @@ -1550,9 +1544,7 @@ def test_backfill_seer_grouping_records_empty_batch(
"title": "title",
"timestamp": before_now(seconds=10).isoformat(),
}
event = self.store_event(
data=data, project_id=project_invalid_batch.id, assert_no_errors=False
)
event = self.store_event(data=data, project_id=project_invalid_batch.id)
event.group.times_seen = 2
# event.group.data["metadata"] = copy.deepcopy(default_metadata)
event.group.save()
Expand All @@ -1572,9 +1564,7 @@ def test_backfill_seer_grouping_records_empty_batch(
"title": "title",
"timestamp": before_now(seconds=10).isoformat(),
}
event = self.store_event(
data=data, project_id=project_invalid_batch.id, assert_no_errors=False
)
event = self.store_event(data=data, project_id=project_invalid_batch.id)
event.group.times_seen = 1 if i < batch_size / 2 else 2
event.group.status = (
GroupStatus.PENDING_DELETION if i >= batch_size / 2 else GroupStatus.UNRESOLVED
Expand Down Expand Up @@ -1654,7 +1644,7 @@ def test_make_postgres_call_with_filter_invalid(self):
"title": "title",
"timestamp": before_now(seconds=10).isoformat(),
}
event = self.store_event(data=data, project_id=self.project.id, assert_no_errors=False)
event = self.store_event(data=data, project_id=self.project.id)
event.group.times_seen = 2
event.group.status = GroupStatus.PENDING_DELETION
event.group.substatus = None
Expand Down Expand Up @@ -1795,7 +1785,6 @@ def test_backfill_seer_grouping_records_cohort_creation(
"timestamp": before_now(seconds=10).isoformat(),
},
project_id=project_same_cohort.id,
assert_no_errors=False,
)
event_same_cohort.group.times_seen = 5
event_same_cohort.group.save()
Expand Down Expand Up @@ -1902,7 +1891,6 @@ def test_backfill_seer_grouping_records_cohort_creation_not_seer_eligible(
"timestamp": before_now(seconds=10).isoformat(),
},
project_id=project_same_cohort_not_eligible.id,
assert_no_errors=False,
)
event_same_cohort.group.times_seen = 5
event_same_cohort.group.save()
Expand Down Expand Up @@ -1999,7 +1987,6 @@ def test_backfill_seer_grouping_records_cohort_creation_multiple_batches(
"timestamp": before_now(seconds=10).isoformat(),
},
project_id=project_same_worker.id,
assert_no_errors=False,
)
event_same_worker.group.times_seen = 5
event_same_worker.group.save()
Expand Down

0 comments on commit 31f71d4

Please sign in to comment.