Skip to content

Commit

Permalink
Merge PR #4541 from @frack113 - Update SIGMA tests
Browse files Browse the repository at this point in the history
chore: remove duplicate tests that already covered by pysigma validation
  • Loading branch information
frack113 authored Nov 6, 2023
1 parent 271f972 commit f6eca9a
Show file tree
Hide file tree
Showing 3 changed files with 145 additions and 138 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/sigma-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ jobs:
pip install sigma-cli
- name: Test Sigma Rule Syntax
run: |
sigma check rules* -c tests/sigma_cli_conf.yml
sigma check --fail-on-error --fail-on-issues --validation-config tests/sigma_cli_conf.yml rules*
- name: Test Sigma Rules
run: |
pip install PyYAML attackcti colorama
Expand Down
8 changes: 5 additions & 3 deletions tests/sigma_cli_conf.yml
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
validators:
- all
- -tlptag
- -tlpv1_tag
- all_of_them_condition
- duplicate_tag
- duplicate_title
- identifier_existence
- identifier_uniqueness
exclusions:
# escaped_wildcard
021310d9-30a6-480a-84b7-eaa69aeb92bb: escaped_wildcard
Expand Down
273 changes: 139 additions & 134 deletions tests/test_rules.py
Original file line number Diff line number Diff line change
Expand Up @@ -160,28 +160,29 @@ def test_confirm_correct_mitre_tags(self):
+ "There are rules with incorrect/unknown MITRE Tags. (please inform us about new tags that are not yet supported in our tests) and check the correct tags here: https://attack.mitre.org/ ",
)

def test_duplicate_tags(self):
files_with_incorrect_mitre_tags = []

for file in self.yield_next_rule_file_path(self.path_to_rules):
tags = self.get_rule_part(file_path=file, part_name="tags")
if tags:
known_tags = []
for tag in tags:
if tag in known_tags:
print(
Fore.RED
+ "Rule {} has the duplicate tag {}".format(file, tag)
)
files_with_incorrect_mitre_tags.append(file)
else:
known_tags.append(tag)
# sigma validators duplicate_tag
# def test_duplicate_tags(self):
# files_with_incorrect_mitre_tags = []

self.assertEqual(
files_with_incorrect_mitre_tags,
[],
Fore.RED + "There are rules with duplicate tags",
)
# for file in self.yield_next_rule_file_path(self.path_to_rules):
# tags = self.get_rule_part(file_path=file, part_name="tags")
# if tags:
# known_tags = []
# for tag in tags:
# if tag in known_tags:
# print(
# Fore.RED
# + "Rule {} has the duplicate tag {}".format(file, tag)
# )
# files_with_incorrect_mitre_tags.append(file)
# else:
# known_tags.append(tag)

# self.assertEqual(
# files_with_incorrect_mitre_tags,
# [],
# Fore.RED + "There are rules with duplicate tags",
# )

def test_duplicate_references(self):
files_with_duplicate_references = []
Expand Down Expand Up @@ -313,21 +314,22 @@ def test_single_named_condition_with_x_of_them(self):
+ "There are rules using '1/all of them' style conditions but only have one condition",
)

def test_all_of_them_condition(self):
faulty_detections = []
# Sigma validator all_of_them_condition
# def test_all_of_them_condition(self):
# faulty_detections = []

for file in self.yield_next_rule_file_path(self.path_to_rules):
detection = self.get_rule_part(file_path=file, part_name="detection")
# for file in self.yield_next_rule_file_path(self.path_to_rules):
# detection = self.get_rule_part(file_path=file, part_name="detection")

if "all of them" in detection["condition"]:
faulty_detections.append(file)
# if "all of them" in detection["condition"]:
# faulty_detections.append(file)

self.assertEqual(
faulty_detections,
[],
Fore.RED
+ "There are rules using 'all of them'. Better use e.g. 'all of selection*' instead (and use the 'selection_' prefix as search-identifier).",
)
# self.assertEqual(
# faulty_detections,
# [],
# Fore.RED
# + "There are rules using 'all of them'. Better use e.g. 'all of selection*' instead (and use the 'selection_' prefix as search-identifier).",
# )

def test_duplicate_detections(self):
def compare_detections(detection1: dict, detection2: dict) -> bool:
Expand Down Expand Up @@ -488,37 +490,38 @@ def test_event_id_instead_of_process_creation(self):
+ "There are rules still using Sysmon 1 or Event ID 4688. Please migrate to the process_creation category.",
)

def test_missing_id(self):
faulty_rules = []
dict_id = {}
for file in self.yield_next_rule_file_path(self.path_to_rules):
id = self.get_rule_part(file_path=file, part_name="id")
if not id:
print(Fore.YELLOW + "Rule {} has no field 'id'.".format(file))
faulty_rules.append(file)
elif len(id) != 36:
print(
Fore.YELLOW
+ "Rule {} has a malformed 'id' (not 36 chars).".format(file)
)
faulty_rules.append(file)
elif id.lower() in dict_id.keys():
print(
Fore.YELLOW
+ "Rule {} has the same 'id' as {}. Ids have to be unique.".format(
file, dict_id[id]
)
)
faulty_rules.append(file)
else:
dict_id[id.lower()] = file
# sigma validator identifier_existence identifier_uniqueness
# def test_missing_id(self):
# faulty_rules = []
# dict_id = {}
# for file in self.yield_next_rule_file_path(self.path_to_rules):
# id = self.get_rule_part(file_path=file, part_name="id")
# if not id:
# print(Fore.YELLOW + "Rule {} has no field 'id'.".format(file))
# faulty_rules.append(file)
# elif len(id) != 36:
# print(
# Fore.YELLOW
# + "Rule {} has a malformed 'id' (not 36 chars).".format(file)
# )
# faulty_rules.append(file)
# elif id.lower() in dict_id.keys():
# print(
# Fore.YELLOW
# + "Rule {} has the same 'id' as {}. Ids have to be unique.".format(
# file, dict_id[id]
# )
# )
# faulty_rules.append(file)
# else:
# dict_id[id.lower()] = file

self.assertEqual(
faulty_rules,
[],
Fore.RED
+ "There are rules with missing or malformed 'id' fields. Generate an id (e.g. here: https://www.uuidgenerator.net/version4) and add it to the reported rule(s).",
)
# self.assertEqual(
# faulty_rules,
# [],
# Fore.RED
# + "There are rules with missing or malformed 'id' fields. Generate an id (e.g. here: https://www.uuidgenerator.net/version4) and add it to the reported rule(s).",
# )

def test_optional_related(self):
faulty_rules = []
Expand Down Expand Up @@ -1349,33 +1352,34 @@ def test_title_in_first_line(self):
+ "There are rules without the 'title' attribute in their first line.",
)

def test_duplicate_titles(self):
# This test ensure that every rule has a unique title
faulty_rules = []
titles_dict = {}
for file in self.yield_next_rule_file_path(self.path_to_rules):
title = (
self.get_rule_part(file_path=file, part_name="title").lower().rstrip()
)
duplicate = False
for rule, title_ in titles_dict.items():
if title == title_:
print(
Fore.RED
+ "Rule {} has an already used title in {}.".format(file, rule)
)
duplicate = True
faulty_rules.append(file)
continue
if not duplicate:
titles_dict[file] = title

self.assertEqual(
faulty_rules,
[],
Fore.RED
+ "There are rules that share the same 'title'. Please check: https://github.com/SigmaHQ/sigma/wiki/Rule-Creation-Guide#title",
)
# sigma validators duplicate_title
# def test_duplicate_titles(self):
# # This test ensure that every rule has a unique title
# faulty_rules = []
# titles_dict = {}
# for file in self.yield_next_rule_file_path(self.path_to_rules):
# title = (
# self.get_rule_part(file_path=file, part_name="title").lower().rstrip()
# )
# duplicate = False
# for rule, title_ in titles_dict.items():
# if title == title_:
# print(
# Fore.RED
# + "Rule {} has an already used title in {}.".format(file, rule)
# )
# duplicate = True
# faulty_rules.append(file)
# continue
# if not duplicate:
# titles_dict[file] = title

# self.assertEqual(
# faulty_rules,
# [],
# Fore.RED
# + "There are rules that share the same 'title'. Please check: https://github.com/SigmaHQ/sigma/wiki/Rule-Creation-Guide#title",
# )

# def test_invalid_logsource_attributes(self):
# faulty_rules = []
Expand Down Expand Up @@ -1598,50 +1602,51 @@ def test_unused_selection(self):

# self.assertEqual(faulty_rules, [], Fore.RED + "There are rules with common typos in field names.")

def test_unknown_value_modifier(self):
known_modifiers = [
"contains",
"startswith",
"endswith",
"all",
"base64offset",
"base64",
"utf16le",
"utf16be",
"wide",
"utf16",
"windash",
"re",
"cidr",
]
faulty_rules = []
for file in self.yield_next_rule_file_path(self.path_to_rules):
detection = self.get_rule_part(file_path=file, part_name="detection")
if detection:
for search_identifier in detection:
if isinstance(detection[search_identifier], dict):
for field in detection[search_identifier]:
if "|" in field:
for current_modifier in field.split("|")[1:]:
found = False
for target_modifier in known_modifiers:
if current_modifier == target_modifier:
found = True
if not found:
print(
Fore.RED
+ "Rule {} uses an unknown field modifier ({}/{})".format(
file, search_identifier, field
)
)
faulty_rules.append(file)

self.assertEqual(
faulty_rules,
[],
Fore.RED
+ "There are rules with unknown value modifiers. Most often it is just a typo.",
)
# Sigma error validator SigmaModifierError
# def test_unknown_value_modifier(self):
# known_modifiers = [
# "contains",
# "startswith",
# "endswith",
# "all",
# "base64offset",
# "base64",
# "utf16le",
# "utf16be",
# "wide",
# "utf16",
# "windash",
# "re",
# "cidr",
# ]
# faulty_rules = []
# for file in self.yield_next_rule_file_path(self.path_to_rules):
# detection = self.get_rule_part(file_path=file, part_name="detection")
# if detection:
# for search_identifier in detection:
# if isinstance(detection[search_identifier], dict):
# for field in detection[search_identifier]:
# if "|" in field:
# for current_modifier in field.split("|")[1:]:
# found = False
# for target_modifier in known_modifiers:
# if current_modifier == target_modifier:
# found = True
# if not found:
# print(
# Fore.RED
# + "Rule {} uses an unknown field modifier ({}/{})".format(
# file, search_identifier, field
# )
# )
# faulty_rules.append(file)

# self.assertEqual(
# faulty_rules,
# [],
# Fore.RED
# + "There are rules with unknown value modifiers. Most often it is just a typo.",
# )

def test_all_value_modifier_single_item(self):
faulty_rules = []
Expand Down

0 comments on commit f6eca9a

Please sign in to comment.