Skip to content

Commit cd5360b

Browse files
authored
Merge pull request #34 from UncoderIO/issue-33
Sigma Platform: The "references" field is now optional; fixed problem…
2 parents 82a49e2 + d54f88f commit cd5360b

File tree

1 file changed

+6
-2
lines changed
  • translator/app/translator/platforms/sigma/parsers

1 file changed

+6
-2
lines changed

translator/app/translator/platforms/sigma/parsers/sigma.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@ class SigmaParser(YamlRuleMixin):
3737
condition_tokenizer = SigmaConditionTokenizer()
3838
tokenizer: SigmaTokenizer = SigmaTokenizer()
3939
mappings: SigmaMappings = sigma_mappings
40-
mandatory_fields = {"title", "description", "references", "logsource", "detection"}
40+
mandatory_fields = {"title", "description", "logsource", "detection"}
4141

4242
@staticmethod
4343
def __parse_false_positives(false_positives: Union[str, List[str], None]) -> list:
@@ -69,7 +69,11 @@ def __validate_rule(self, rule: dict):
6969
def parse(self, text: str) -> SiemContainer:
7070
sigma_rule = self.load_rule(text=text)
7171
self.__validate_rule(rule=sigma_rule)
72-
log_sources = {key: [value] for key, value in (sigma_rule.get("logsource", {})).items()}
72+
log_sources = {
73+
key: [value]
74+
for key, value in (sigma_rule.get("logsource", {})).items()
75+
if key in ("product", "service", "category")
76+
}
7377
tokens = self.tokenizer.tokenize(detection=sigma_rule.get("detection"))
7478
field_tokens = QueryTokenizer.filter_tokens(tokens, Field)
7579
field_names = [field.source_name for field in field_tokens]

0 commit comments

Comments
 (0)