diff --git a/uncoder-core/app/translator/core/context_vars.py b/uncoder-core/app/translator/core/context_vars.py index 2fd36c45..591883d8 100644 --- a/uncoder-core/app/translator/core/context_vars.py +++ b/uncoder-core/app/translator/core/context_vars.py @@ -1,4 +1,4 @@ from contextvars import ContextVar return_only_first_query_ctx_var: ContextVar[bool] = ContextVar("return_only_first_query_ctx_var", default=False) -"""Set to True to return ony first query if rendered multiple options""" +"""Set to True to return only first query if rendered multiple options""" diff --git a/uncoder-core/app/translator/core/models/field.py b/uncoder-core/app/translator/core/models/field.py index 10b661b0..2576f93a 100644 --- a/uncoder-core/app/translator/core/models/field.py +++ b/uncoder-core/app/translator/core/models/field.py @@ -60,6 +60,11 @@ def value(self) -> Union[int, str, StrValue, list[Union[int, str, StrValue]]]: return self.values[0] return self.values + @value.setter + def value(self, new_value: Union[int, str, StrValue, list[Union[int, str, StrValue]]]) -> None: + self.values = [] + self.__add_value(new_value) + def __add_value(self, value: Optional[Union[int, str, StrValue, list, tuple]]) -> None: if value and isinstance(value, (list, tuple)): for v in value: diff --git a/uncoder-core/app/translator/core/render.py b/uncoder-core/app/translator/core/render.py index d7d0716a..055ce889 100644 --- a/uncoder-core/app/translator/core/render.py +++ b/uncoder-core/app/translator/core/render.py @@ -16,9 +16,10 @@ limitations under the License. ----------------------------------------------------------------- """ + from abc import ABC, abstractmethod from collections.abc import Callable -from typing import Optional, Union +from typing import ClassVar, Optional, Union from app.translator.const import DEFAULT_VALUE_TYPE from app.translator.core.context_vars import return_only_first_query_ctx_var @@ -165,7 +166,14 @@ class QueryRender(ABC): is_single_line_comment: bool = False unsupported_functions_text = "Unsupported functions were excluded from the result query:" - platform_functions: PlatformFunctions = PlatformFunctions() + platform_functions: PlatformFunctions = None + + def __init__(self): + self.init_platform_functions() + + def init_platform_functions(self) -> None: + self.platform_functions = PlatformFunctions() + self.platform_functions.platform_query_render = self def render_not_supported_functions(self, not_supported_functions: list) -> str: line_template = f"{self.comment_symbol} " if self.comment_symbol and self.is_single_line_comment else "" @@ -192,19 +200,19 @@ class PlatformQueryRender(QueryRender): field_value_map = BaseQueryFieldValue(or_token=or_token) - query_pattern = "{table} {query} {functions}" - raw_log_field_pattern_map: dict = None + raw_log_field_pattern_map: ClassVar[dict[str, str]] = None def __init__(self): + super().__init__() self.operator_map = { LogicalOperatorType.AND: f" {self.and_token} ", LogicalOperatorType.OR: f" {self.or_token} ", LogicalOperatorType.NOT: f" {self.not_token} ", } - def generate_prefix(self, log_source_signature: LogSourceSignature, functions_prefix: str = "") -> str: # noqa: ARG002 - if str(log_source_signature): - return f"{log_source_signature!s} {self.and_token}" + def generate_prefix(self, log_source_signature: Optional[LogSourceSignature], functions_prefix: str = "") -> str: # noqa: ARG002 + if log_source_signature and str(log_source_signature): + return f"{log_source_signature} {self.and_token}" return "" def generate_functions(self, functions: list[Function], source_mapping: SourceMapping) -> RenderedFunctions: @@ -272,6 +280,10 @@ def wrap_query_with_meta_info(self, meta_info: MetaInfoContainer, query: str) -> query = f"{query}\n\n{query_meta_info}" return query + @staticmethod + def _finalize_search_query(query: str) -> str: + return query + def finalize_query( self, prefix: str, @@ -283,8 +295,8 @@ def finalize_query( *args, # noqa: ARG002 **kwargs, # noqa: ARG002 ) -> str: - query = self.query_pattern.format(prefix=prefix, query=query, functions=functions).strip() - + parts = filter(lambda s: bool(s), map(str.strip, [prefix, self._finalize_search_query(query), functions])) + query = " ".join(parts) query = self.wrap_query_with_meta_info(meta_info=meta_info, query=query) if not_supported_functions: rendered_not_supported = self.render_not_supported_functions(not_supported_functions) @@ -327,15 +339,15 @@ def _generate_from_raw_query_container(self, query_container: RawQueryContainer) def process_raw_log_field(self, field: str, field_type: str) -> Optional[str]: if raw_log_field_pattern := self.raw_log_field_pattern_map.get(field_type): - return raw_log_field_pattern.pattern.format(field=field) + return raw_log_field_pattern.format(field=field) def process_raw_log_field_prefix(self, field: str, source_mapping: SourceMapping) -> Optional[list]: if isinstance(field, list): - list_of_prefix = [] + prefix_list = [] for f in field: - if prepared_prefix := self.process_raw_log_field_prefix(field=f, source_mapping=source_mapping): - list_of_prefix.extend(prepared_prefix) - return list_of_prefix + if _prefix_list := self.process_raw_log_field_prefix(field=f, source_mapping=source_mapping): + prefix_list.extend(_prefix_list) + return prefix_list if raw_log_field_type := source_mapping.raw_log_fields.get(field): return [self.process_raw_log_field(field=field, field_type=raw_log_field_type)] @@ -352,9 +364,11 @@ def generate_raw_log_fields(self, fields: list[Field], source_mapping: SourceMap ) if not mapped_field and self.is_strict_mapping: raise StrictPlatformException(field_name=field.source_name, platform_name=self.details.name) - if field_prefix := self.process_raw_log_field_prefix(field=mapped_field, source_mapping=source_mapping): - defined_raw_log_fields.extend(field_prefix) - return "\n".join(set(defined_raw_log_fields)) + if prefix_list := self.process_raw_log_field_prefix(field=mapped_field, source_mapping=source_mapping): + for prefix in prefix_list: + if prefix not in defined_raw_log_fields: + defined_raw_log_fields.append(prefix) + return "\n".join(defined_raw_log_fields) def _generate_from_tokenized_query_container(self, query_container: TokenizedQueryContainer) -> str: queries_map = {} diff --git a/uncoder-core/app/translator/core/str_value_manager.py b/uncoder-core/app/translator/core/str_value_manager.py index cd7523c0..74a9f532 100644 --- a/uncoder-core/app/translator/core/str_value_manager.py +++ b/uncoder-core/app/translator/core/str_value_manager.py @@ -16,6 +16,7 @@ limitations under the License. ----------------------------------------------------------------- """ + from typing import ClassVar, Optional, TypeVar, Union from app.translator.core.custom_types.values import ValueType diff --git a/uncoder-core/app/translator/mappings/platforms/palo_alto_cortex/default.yml b/uncoder-core/app/translator/mappings/platforms/palo_alto_cortex/default.yml index 81d9dcc8..f6b25023 100644 --- a/uncoder-core/app/translator/mappings/platforms/palo_alto_cortex/default.yml +++ b/uncoder-core/app/translator/mappings/platforms/palo_alto_cortex/default.yml @@ -77,6 +77,7 @@ field_mapping: OldTargetUserName: xdm.target.user.username UserPrincipalName: xdm.source.user.username DestAddress: xdm.target.ipv4 + SubjectAccountName: xdm.source.user.username SubjectUserName: xdm.source.user.username SubjectUserSid: xdm.source.user.identifier SourceAddr: xdm.source.ipv4 @@ -117,7 +118,6 @@ field_mapping: method: xdm.network.http.method notice.user_agent: xdm.network.http.browser hasIdentity: xdm.source.user.identity_type - SubjectAccountName: xdm.source.user.username ComputerName: xdm.source.host.hostname ExternalSeverity: xdm.alert.severity SourceMAC: xdm.source.host.mac_addresses diff --git a/uncoder-core/app/translator/mappings/platforms/palo_alto_cortex/windows_registry_event.yml b/uncoder-core/app/translator/mappings/platforms/palo_alto_cortex/windows_registry_event.yml index 86110049..04abb36b 100644 --- a/uncoder-core/app/translator/mappings/platforms/palo_alto_cortex/windows_registry_event.yml +++ b/uncoder-core/app/translator/mappings/platforms/palo_alto_cortex/windows_registry_event.yml @@ -28,4 +28,5 @@ field_mapping: ParentIntegrityLevel: causality_actor_process_integrity_level ParentLogonId: causality_actor_process_logon_id ParentProduct: causality_actor_process_signature_product - ParentCompany: causality_actor_process_signature_vendor \ No newline at end of file + ParentCompany: causality_actor_process_signature_vendor + EventType: event_sub_type \ No newline at end of file diff --git a/uncoder-core/app/translator/mappings/platforms/palo_alto_cortex/windows_security.yml b/uncoder-core/app/translator/mappings/platforms/palo_alto_cortex/windows_security.yml index 42fe9a54..59a56f71 100644 --- a/uncoder-core/app/translator/mappings/platforms/palo_alto_cortex/windows_security.yml +++ b/uncoder-core/app/translator/mappings/platforms/palo_alto_cortex/windows_security.yml @@ -7,7 +7,8 @@ default_log_source: field_mapping: EventID: action_evtlog_event_id Provider_Name: provider_name - + SubjectAccountName: actor_effective_username + raw_log_fields: ParentImage: regex AccessMask: regex diff --git a/uncoder-core/app/translator/mappings/platforms/qradar/windows_security.yml b/uncoder-core/app/translator/mappings/platforms/qradar/windows_security.yml index 20883e94..7d01b97e 100644 --- a/uncoder-core/app/translator/mappings/platforms/qradar/windows_security.yml +++ b/uncoder-core/app/translator/mappings/platforms/qradar/windows_security.yml @@ -130,6 +130,9 @@ field_mapping: NewValue: NewValue Source: Source Status: Status + SubjectAccountName: + - Subject Account Name + - SubjectAccountName SubjectDomainName: SubjectDomainName SubjectUserName: Target Username SubjectUserSid: SubjectUserSid @@ -171,5 +174,4 @@ field_mapping: UserID: UserID ParentProcessName: Parent Process Name Service: Service - hasIdentity: hasIdentity - SubjectAccountName: SubjectAccountName \ No newline at end of file + hasIdentity: hasIdentity \ No newline at end of file diff --git a/uncoder-core/app/translator/platforms/athena/renders/athena.py b/uncoder-core/app/translator/platforms/athena/renders/athena.py index a717d94f..a62e5b00 100644 --- a/uncoder-core/app/translator/platforms/athena/renders/athena.py +++ b/uncoder-core/app/translator/platforms/athena/renders/athena.py @@ -16,6 +16,7 @@ limitations under the License. ----------------------------------------------------------------- """ + from app.translator.core.models.platform_details import PlatformDetails from app.translator.managers import render_manager from app.translator.platforms.athena.const import athena_details @@ -35,6 +36,9 @@ class AthenaQueryRender(SqlQueryRender): or_token = "OR" field_value_map = AthenaFieldValue(or_token=or_token) - query_pattern = "{prefix} WHERE {query} {functions}" comment_symbol = "--" is_single_line_comment = True + + @staticmethod + def _finalize_search_query(query: str) -> str: + return f"WHERE {query}" if query else "" diff --git a/uncoder-core/app/translator/platforms/base/aql/renders/aql.py b/uncoder-core/app/translator/platforms/base/aql/renders/aql.py index 6792d900..05826d08 100644 --- a/uncoder-core/app/translator/platforms/base/aql/renders/aql.py +++ b/uncoder-core/app/translator/platforms/base/aql/renders/aql.py @@ -16,6 +16,7 @@ limitations under the License. ----------------------------------------------------------------- """ + from typing import Union from app.translator.const import DEFAULT_VALUE_TYPE @@ -127,7 +128,6 @@ class AQLQueryRender(PlatformQueryRender): not_token = "NOT" field_value_map = AQLFieldValue(or_token=or_token) - query_pattern = "{prefix} AND {query} {functions}" def generate_prefix(self, log_source_signature: AQLLogSourceSignature, functions_prefix: str = "") -> str: # noqa: ARG002 table = str(log_source_signature) @@ -136,3 +136,7 @@ def generate_prefix(self, log_source_signature: AQLLogSourceSignature, functions def wrap_with_comment(self, value: str) -> str: return f"/* {value} */" + + @staticmethod + def _finalize_search_query(query: str) -> str: + return f"AND {query}" if query else "" diff --git a/uncoder-core/app/translator/platforms/base/lucene/renders/lucene.py b/uncoder-core/app/translator/platforms/base/lucene/renders/lucene.py index 70760930..b5994499 100644 --- a/uncoder-core/app/translator/platforms/base/lucene/renders/lucene.py +++ b/uncoder-core/app/translator/platforms/base/lucene/renders/lucene.py @@ -16,6 +16,7 @@ limitations under the License. ----------------------------------------------------------------- """ + from typing import Optional, Union from app.translator.const import DEFAULT_VALUE_TYPE @@ -106,8 +107,6 @@ class LuceneQueryRender(PlatformQueryRender): and_token = "AND" not_token = "NOT" - query_pattern = "{query} {functions}" - comment_symbol = "//" is_single_line_comment = True diff --git a/uncoder-core/app/translator/platforms/base/spl/renders/spl.py b/uncoder-core/app/translator/platforms/base/spl/renders/spl.py index 995adf54..b2c12068 100644 --- a/uncoder-core/app/translator/platforms/base/spl/renders/spl.py +++ b/uncoder-core/app/translator/platforms/base/spl/renders/spl.py @@ -16,6 +16,7 @@ limitations under the License. ----------------------------------------------------------------- """ + from typing import Union from app.translator.const import DEFAULT_VALUE_TYPE @@ -78,7 +79,6 @@ class SplQueryRender(PlatformQueryRender): and_token = "AND" not_token = "NOT" - query_pattern = "{prefix} {query} {functions}" comment_symbol = "```" def wrap_with_comment(self, value: str) -> str: diff --git a/uncoder-core/app/translator/platforms/base/sql/renders/sql.py b/uncoder-core/app/translator/platforms/base/sql/renders/sql.py index ebcb21af..43904a1e 100644 --- a/uncoder-core/app/translator/platforms/base/sql/renders/sql.py +++ b/uncoder-core/app/translator/platforms/base/sql/renders/sql.py @@ -16,6 +16,7 @@ limitations under the License. ----------------------------------------------------------------- """ + from typing import Union from app.translator.const import DEFAULT_VALUE_TYPE @@ -76,10 +77,13 @@ class SqlQueryRender(PlatformQueryRender): and_token = "AND" not_token = "NOT" - query_pattern = "{prefix} WHERE {query} {functions}" comment_symbol = "--" is_single_line_comment = True def generate_prefix(self, log_source_signature: LogSourceSignature, functions_prefix: str = "") -> str: # noqa: ARG002 table = str(log_source_signature) if str(log_source_signature) else "eventlog" return f"SELECT * FROM {table}" + + @staticmethod + def _finalize_search_query(query: str) -> str: + return f"WHERE {query}" if query else "" diff --git a/uncoder-core/app/translator/platforms/chronicle/renders/chronicle.py b/uncoder-core/app/translator/platforms/chronicle/renders/chronicle.py index 63f75608..4101b825 100644 --- a/uncoder-core/app/translator/platforms/chronicle/renders/chronicle.py +++ b/uncoder-core/app/translator/platforms/chronicle/renders/chronicle.py @@ -16,6 +16,7 @@ limitations under the License. ----------------------------------------------------------------- """ + from typing import Union from app.translator.const import DEFAULT_VALUE_TYPE @@ -109,6 +110,5 @@ class ChronicleQueryRender(PlatformQueryRender): not_token = "not" field_value_map = ChronicleFieldValue(or_token=or_token) - query_pattern = "{query} {functions}" comment_symbol = "//" is_single_line_comment = True diff --git a/uncoder-core/app/translator/platforms/crowdstrike/renders/crowdstrike.py b/uncoder-core/app/translator/platforms/crowdstrike/renders/crowdstrike.py index 17ae1a15..8c6630e9 100644 --- a/uncoder-core/app/translator/platforms/crowdstrike/renders/crowdstrike.py +++ b/uncoder-core/app/translator/platforms/crowdstrike/renders/crowdstrike.py @@ -16,6 +16,7 @@ limitations under the License. ----------------------------------------------------------------- """ + from app.translator.core.models.platform_details import PlatformDetails from app.translator.managers import render_manager from app.translator.platforms.base.spl.renders.spl import SplFieldValue, SplQueryRender @@ -31,14 +32,13 @@ class CrowdStrikeFieldValue(SplFieldValue): @render_manager.register class CrowdStrikeQueryRender(SplQueryRender): details: PlatformDetails = crowdstrike_query_details - query_pattern = "{prefix} {query} {functions}" mappings: CrowdstrikeMappings = crowdstrike_mappings - platform_functions: CrowdStrikeFunctions = crowd_strike_functions + platform_functions: CrowdStrikeFunctions = None or_token = "OR" field_value_map = CrowdStrikeFieldValue(or_token=or_token) comment_symbol = "`" - def __init__(self): - super().__init__() - self.platform_functions.manager.post_init_configure(self) + def init_platform_functions(self) -> None: + self.platform_functions = crowd_strike_functions + self.platform_functions.platform_query_render = self diff --git a/uncoder-core/app/translator/platforms/elasticsearch/renders/detection_rule.py b/uncoder-core/app/translator/platforms/elasticsearch/renders/detection_rule.py index 8e1e9aec..09fad79b 100644 --- a/uncoder-core/app/translator/platforms/elasticsearch/renders/detection_rule.py +++ b/uncoder-core/app/translator/platforms/elasticsearch/renders/detection_rule.py @@ -51,7 +51,6 @@ class ElasticSearchRuleRender(ElasticSearchQueryRender): not_token = "NOT" field_value_map = ElasticSearchRuleFieldValue(or_token=or_token) - query_pattern = "{prefix} {query} {functions}" def __create_mitre_threat(self, mitre_attack: dict) -> Union[list, list[dict]]: if not mitre_attack.get("techniques"): diff --git a/uncoder-core/app/translator/platforms/elasticsearch/renders/elast_alert.py b/uncoder-core/app/translator/platforms/elasticsearch/renders/elast_alert.py index ba1bb93b..104b8ecc 100644 --- a/uncoder-core/app/translator/platforms/elasticsearch/renders/elast_alert.py +++ b/uncoder-core/app/translator/platforms/elasticsearch/renders/elast_alert.py @@ -16,6 +16,7 @@ limitations under the License. ----------------------------------------------------------------- """ + from typing import Optional from app.translator.core.custom_types.meta_info import SeverityType @@ -49,7 +50,6 @@ class ElastAlertRuleRender(ElasticSearchQueryRender): not_token = "NOT" field_value_map = ElasticAlertRuleFieldValue(or_token=or_token) - query_pattern = "{prefix} {query} {functions}" def finalize_query( self, diff --git a/uncoder-core/app/translator/platforms/forti_siem/renders/forti_siem_rule.py b/uncoder-core/app/translator/platforms/forti_siem/renders/forti_siem_rule.py index bef9392b..65ca0b07 100644 --- a/uncoder-core/app/translator/platforms/forti_siem/renders/forti_siem_rule.py +++ b/uncoder-core/app/translator/platforms/forti_siem/renders/forti_siem_rule.py @@ -18,6 +18,7 @@ from typing import Optional, Union from app.translator.const import DEFAULT_VALUE_TYPE +from app.translator.core.context_vars import return_only_first_query_ctx_var from app.translator.core.custom_types.meta_info import SeverityType from app.translator.core.custom_types.tokens import GroupType, LogicalOperatorType, OperatorType from app.translator.core.custom_types.values import ValueType @@ -192,7 +193,6 @@ class FortiSiemRuleRender(PlatformQueryRender): not_token = None group_token = "(%s)" - query_pattern = "{prefix} {query}" field_value_map = FortiSiemFieldValue(or_token=or_token) @@ -273,6 +273,8 @@ def _generate_from_tokenized_query_container(self, query_container: TokenizedQue source_mapping=source_mapping, fields=mapped_fields_set, ) + if return_only_first_query_ctx_var.get() is True: + return finalized_query queries_map[source_mapping.source_id] = finalized_query return self.finalize(queries_map) @@ -301,7 +303,7 @@ def finalize_query( self, prefix: str, query: str, - functions: str, # noqa: ARG002 + functions: str, meta_info: Optional[MetaInfoContainer] = None, source_mapping: Optional[SourceMapping] = None, # noqa: ARG002 not_supported_functions: Optional[list] = None, @@ -309,7 +311,7 @@ def finalize_query( *args, # noqa: ARG002 **kwargs, # noqa: ARG002 ) -> str: - query = self.query_pattern.format(prefix=prefix, query=query).strip() + query = super().finalize_query(prefix=prefix, query=query, functions=functions) rule = FORTI_SIEM_RULE.replace("", self.generate_rule_header(meta_info)) title = meta_info.title or _AUTOGENERATED_TEMPLATE rule = rule.replace("", self.generate_rule_name(title)) diff --git a/uncoder-core/app/translator/platforms/hunters/renders/hunters.py b/uncoder-core/app/translator/platforms/hunters/renders/hunters.py index 1dc54e94..0348bfb0 100644 --- a/uncoder-core/app/translator/platforms/hunters/renders/hunters.py +++ b/uncoder-core/app/translator/platforms/hunters/renders/hunters.py @@ -16,6 +16,7 @@ limitations under the License. ----------------------------------------------------------------- """ + from app.translator.core.models.platform_details import PlatformDetails from app.translator.managers import render_manager from app.translator.platforms.base.sql.renders.sql import SqlFieldValue, SqlQueryRender @@ -35,4 +36,7 @@ class HuntersQueryRender(SqlQueryRender): or_token = "OR" field_value_map = HuntersFieldValue(or_token=or_token) - query_pattern = "{prefix} WHERE {query} {functions}" + + @staticmethod + def _finalize_search_query(query: str) -> str: + return f"WHERE {query}" if query else "" diff --git a/uncoder-core/app/translator/platforms/logrhythm_axon/renders/logrhythm_axon_query.py b/uncoder-core/app/translator/platforms/logrhythm_axon/renders/logrhythm_axon_query.py index 9be24b73..624fa3d7 100644 --- a/uncoder-core/app/translator/platforms/logrhythm_axon/renders/logrhythm_axon_query.py +++ b/uncoder-core/app/translator/platforms/logrhythm_axon/renders/logrhythm_axon_query.py @@ -16,9 +16,11 @@ limitations under the License. ----------------------------------------------------------------- """ + from typing import Union from app.translator.const import DEFAULT_VALUE_TYPE +from app.translator.core.context_vars import return_only_first_query_ctx_var from app.translator.core.custom_types.tokens import LogicalOperatorType from app.translator.core.custom_types.values import ValueType from app.translator.core.exceptions.core import StrictPlatformException @@ -203,13 +205,16 @@ class LogRhythmAxonQueryRender(PlatformQueryRender): not_token = "NOT" field_value_map = LogRhythmAxonFieldValue(or_token=or_token) - query_pattern = "{prefix} AND {query}" mappings: LogRhythmAxonMappings = logrhythm_axon_mappings comment_symbol = "//" is_single_line_comment = True is_strict_mapping = True + @staticmethod + def _finalize_search_query(query: str) -> str: + return f"AND {query}" if query else "" + def generate_prefix(self, log_source_signature: LogSourceSignature, functions_prefix: str = "") -> str: # noqa: ARG002 return str(log_source_signature) @@ -262,6 +267,8 @@ def _generate_from_tokenized_query_container(self, query_container: TokenizedQue meta_info=query_container.meta_info, source_mapping=source_mapping, ) + if return_only_first_query_ctx_var.get() is True: + return finalized_query queries_map[source_mapping.source_id] = finalized_query return self.finalize(queries_map) diff --git a/uncoder-core/app/translator/platforms/logscale/renders/logscale.py b/uncoder-core/app/translator/platforms/logscale/renders/logscale.py index a4e529ed..9cb7cf05 100644 --- a/uncoder-core/app/translator/platforms/logscale/renders/logscale.py +++ b/uncoder-core/app/translator/platforms/logscale/renders/logscale.py @@ -16,6 +16,7 @@ limitations under the License. ----------------------------------------------------------------- """ + from typing import Optional, Union from app.translator.const import DEFAULT_VALUE_TYPE @@ -95,18 +96,17 @@ def keywords(self, field: str, value: DEFAULT_VALUE_TYPE) -> str: class LogScaleQueryRender(PlatformQueryRender): details: PlatformDetails = logscale_query_details mappings: LogScaleMappings = logscale_mappings - platform_functions: LogScaleFunctions = log_scale_functions + platform_functions: LogScaleFunctions = None or_token = "or" and_token = "" not_token = "not" field_value_map = LogScaleFieldValue(or_token=or_token) - query_pattern = "{prefix} {query} {functions}" - def __init__(self): - super().__init__() - self.platform_functions.manager.post_init_configure(self) + def init_platform_functions(self) -> None: + self.platform_functions = log_scale_functions + self.platform_functions.platform_query_render = self def wrap_with_comment(self, value: str) -> str: return f"/* {value} */" @@ -122,10 +122,7 @@ def finalize_query( *args, # noqa: ARG002 **kwargs, # noqa: ARG002 ) -> str: - if prefix: - query = self.query_pattern.format(prefix=prefix, query=query, functions=functions) - else: - query = f"{query} {functions.lstrip()}" + query = super().finalize_query(prefix=prefix, query=query, functions=functions) query = self.wrap_query_with_meta_info(meta_info=meta_info, query=query) if not_supported_functions: rendered_not_supported = self.render_not_supported_functions(not_supported_functions) diff --git a/uncoder-core/app/translator/platforms/microsoft/renders/microsoft_sentinel.py b/uncoder-core/app/translator/platforms/microsoft/renders/microsoft_sentinel.py index cb32443a..3153f8d4 100644 --- a/uncoder-core/app/translator/platforms/microsoft/renders/microsoft_sentinel.py +++ b/uncoder-core/app/translator/platforms/microsoft/renders/microsoft_sentinel.py @@ -16,6 +16,7 @@ limitations under the License. ----------------------------------------------------------------- """ + from typing import Union from app.translator.const import DEFAULT_VALUE_TYPE @@ -121,22 +122,25 @@ def is_not_none(self, field: str, value: Union[str, int]) -> str: # noqa: ARG00 @render_manager.register class MicrosoftSentinelQueryRender(PlatformQueryRender): details: PlatformDetails = microsoft_sentinel_query_details - platform_functions: MicrosoftFunctions = microsoft_sentinel_functions + platform_functions: MicrosoftFunctions = None or_token = "or" and_token = "and" not_token = "not" field_value_map = MicrosoftSentinelFieldValue(or_token=or_token) - query_pattern = "{prefix} | where {query}{functions}" mappings: MicrosoftSentinelMappings = microsoft_sentinel_mappings comment_symbol = "//" is_single_line_comment = True - def __init__(self): - super().__init__() - self.platform_functions.manager.post_init_configure(self) + def init_platform_functions(self) -> None: + self.platform_functions = microsoft_sentinel_functions + self.platform_functions.platform_query_render = self def generate_prefix(self, log_source_signature: LogSourceSignature, functions_prefix: str = "") -> str: # noqa: ARG002 return str(log_source_signature) + + @staticmethod + def _finalize_search_query(query: str) -> str: + return f"| where {query}" if query else "" diff --git a/uncoder-core/app/translator/platforms/opensearch/renders/opensearch_rule.py b/uncoder-core/app/translator/platforms/opensearch/renders/opensearch_rule.py index 557f911e..3f68e6c6 100644 --- a/uncoder-core/app/translator/platforms/opensearch/renders/opensearch_rule.py +++ b/uncoder-core/app/translator/platforms/opensearch/renders/opensearch_rule.py @@ -16,6 +16,7 @@ limitations under the License. ----------------------------------------------------------------- """ + import copy import json from typing import Optional, Union @@ -49,7 +50,6 @@ class OpenSearchRuleRender(OpenSearchQueryRender): not_token = "NOT" field_value_map = OpenSearchRuleFieldValue(or_token=or_token) - query_pattern = "{prefix} {query} {functions}" def __init__(self): super().__init__() diff --git a/uncoder-core/app/translator/platforms/palo_alto/renders/cortex_xsiam.py b/uncoder-core/app/translator/platforms/palo_alto/renders/cortex_xsiam.py index 0af8206a..70f59fd6 100644 --- a/uncoder-core/app/translator/platforms/palo_alto/renders/cortex_xsiam.py +++ b/uncoder-core/app/translator/platforms/palo_alto/renders/cortex_xsiam.py @@ -16,10 +16,14 @@ limitations under the License. ----------------------------------------------------------------- """ -from typing import Optional, Union + +from typing import ClassVar, Optional, Union from app.translator.const import DEFAULT_VALUE_TYPE from app.translator.core.custom_types.values import ValueType +from app.translator.core.mapping import SourceMapping +from app.translator.core.models.field import FieldValue, Keyword +from app.translator.core.models.identifier import Identifier from app.translator.core.models.platform_details import PlatformDetails from app.translator.core.render import BaseQueryFieldValue, PlatformQueryRender from app.translator.core.str_value_manager import StrValue @@ -33,6 +37,16 @@ ) from app.translator.platforms.palo_alto.str_value_manager import cortex_xql_str_value_manager +SOURCE_MAPPING_TO_FIELD_VALUE_MAP = { + "windows_registry_event": { + "EventType": { + "SetValue": "REGISTRY_SET_VALUE", + "DeleteValue": "REGISTRY_DELETE_VALUE", + "CreateKey": "REGISTRY_CREATE_KEY", + } + } +} + class CortexXQLFieldValue(BaseQueryFieldValue): details: PlatformDetails = cortex_xql_query_details @@ -136,25 +150,24 @@ class CortexXQLQueryRender(PlatformQueryRender): details: PlatformDetails = cortex_xql_query_details mappings: CortexXQLMappings = cortex_xql_mappings is_strict_mapping = True - raw_log_field_pattern_map = { - 'regex': '| alter {field} = regextract(to_json_string(action_evtlog_data_fields)->{field}{{}}, "\\"(.*)\\"")', - 'object': '| alter {field_name} = json_extract_scalar({field_object} , "$.{field_path}")', - 'list': '| alter {field_name} = arraystring(json_extract_array({field_object} , "$.{field_path}")," ")' + raw_log_field_pattern_map: ClassVar[dict[str, str]] = { + "regex": '| alter {field} = regextract(to_json_string(action_evtlog_data_fields)->{field}{{}}, "\\"(.*)\\"")', + "object": '| alter {field_name} = json_extract_scalar({field_object} , "$.{field_path}")', + "list": '| alter {field_name} = arraystring(json_extract_array({field_object} , "$.{field_path}")," ")', } - platform_functions: CortexXQLFunctions = cortex_xql_functions + platform_functions: CortexXQLFunctions = None or_token = "or" and_token = "and" not_token = "not" field_value_map = CortexXQLFieldValue(or_token=or_token) - query_pattern = "{prefix} | filter {query} {functions}" comment_symbol = "//" is_single_line_comment = False - def __init__(self): - super().__init__() - self.platform_functions.manager.post_init_configure(self) + def init_platform_functions(self) -> None: + self.platform_functions = cortex_xql_functions + self.platform_functions.platform_query_render = self def process_raw_log_field(self, field: str, field_type: str) -> Optional[str]: raw_log_field_pattern = self.raw_log_field_pattern_map.get(field_type) @@ -171,3 +184,20 @@ def process_raw_log_field(self, field: str, field_type: str) -> Optional[str]: def generate_prefix(self, log_source_signature: CortexXQLLogSourceSignature, functions_prefix: str = "") -> str: functions_prefix = f"{functions_prefix} | " if functions_prefix else "" return f"{functions_prefix}{log_source_signature}" + + def apply_token(self, token: Union[FieldValue, Keyword, Identifier], source_mapping: SourceMapping) -> str: + if isinstance(token, FieldValue): + field_name = token.field.source_name + if values_map := SOURCE_MAPPING_TO_FIELD_VALUE_MAP.get(source_mapping.source_id, {}).get(field_name): + values_to_update = [] + for token_value in token.values: + mapped_value: str = values_map.get(token_value, token_value) + values_to_update.append( + StrValue(value=mapped_value, split_value=mapped_value.split()) if mapped_value else token_value + ) + token.value = values_to_update + return super().apply_token(token=token, source_mapping=source_mapping) + + @staticmethod + def _finalize_search_query(query: str) -> str: + return f"| filter {query}" if query else "" diff --git a/uncoder-core/app/translator/platforms/sigma/escape_manager.py b/uncoder-core/app/translator/platforms/sigma/escape_manager.py index b656c4ad..c0efb332 100644 --- a/uncoder-core/app/translator/platforms/sigma/escape_manager.py +++ b/uncoder-core/app/translator/platforms/sigma/escape_manager.py @@ -7,7 +7,7 @@ class SigmaEscapeManager(EscapeManager): escape_map: ClassVar[dict[str, list[EscapeDetails]]] = { - ValueType.value: [EscapeDetails(pattern=r'([*?\\])', escape_symbols=r"\\\1")], + ValueType.value: [EscapeDetails(pattern=r"([*?\\])", escape_symbols=r"\\\1")] } diff --git a/uncoder-core/app/translator/platforms/sigma/models/compiler.py b/uncoder-core/app/translator/platforms/sigma/models/compiler.py index 5969d06c..2c0b6472 100644 --- a/uncoder-core/app/translator/platforms/sigma/models/compiler.py +++ b/uncoder-core/app/translator/platforms/sigma/models/compiler.py @@ -15,6 +15,7 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ----------------------------------------------------------------- """ + from typing import Union from app.translator.core.custom_types.tokens import GroupType, LogicalOperatorType diff --git a/uncoder-core/app/translator/platforms/sigma/models/modifiers.py b/uncoder-core/app/translator/platforms/sigma/models/modifiers.py index 7ae75726..446eb310 100644 --- a/uncoder-core/app/translator/platforms/sigma/models/modifiers.py +++ b/uncoder-core/app/translator/platforms/sigma/models/modifiers.py @@ -26,7 +26,7 @@ def map_modifier(self, modifier: str) -> Identifier: return Identifier(token_type=self.modifier_map.get(modifier, modifier)) def modifier_all(self, field_name: str, modifier: str, values: Union[str, list[str]]) -> Union[tuple, list]: - if (isinstance(values, list) and len(values) == 1) or isinstance(values, str): + if (isinstance(values, list) and len(values) == 1) or isinstance(values, (str, int)): operator = self.map_modifier(modifier=modifier) values = self.convert_values_to_str_values(values, modifier) return (FieldValue(source_name=field_name, operator=operator, value=values),) @@ -80,8 +80,7 @@ def apply_modifier(self, field_name: str, modifier: list, values: Union[int, str @staticmethod def convert_values_to_str_values( - values: Union[int, str, list[Union[int, str]]], - operator: str + values: Union[int, str, list[Union[int, str]]], operator: str ) -> Union[StrValue, list[StrValue]]: if not isinstance(values, list): values = [values] diff --git a/uncoder-core/app/translator/platforms/sigma/parsers/sigma.py b/uncoder-core/app/translator/platforms/sigma/parsers/sigma.py index c5f1293b..9f2fd7ab 100644 --- a/uncoder-core/app/translator/platforms/sigma/parsers/sigma.py +++ b/uncoder-core/app/translator/platforms/sigma/parsers/sigma.py @@ -17,14 +17,13 @@ ----------------------------------------------------------------- """ - from typing import Union from app.translator.core.exceptions.core import SigmaRuleValidationException from app.translator.core.mixins.rule import YamlRuleMixin -from app.translator.core.models.field import FieldValue, Field -from app.translator.core.models.query_container import MetaInfoContainer, TokenizedQueryContainer, RawQueryContainer +from app.translator.core.models.field import Field, FieldValue from app.translator.core.models.platform_details import PlatformDetails +from app.translator.core.models.query_container import MetaInfoContainer, RawQueryContainer, TokenizedQueryContainer from app.translator.core.parser import QueryParser from app.translator.core.tokenizer import QueryTokenizer from app.translator.managers import parser_manager @@ -50,12 +49,12 @@ def __parse_false_positives(false_positives: Union[str, list[str], None]) -> lis return false_positives def _get_meta_info( - self, - rule: dict, - source_mapping_ids: list[str], - parsed_logsources: dict, - fields_tokens: list[Field], - sigma_fields_tokens: Union[list[Field], None] = None + self, + rule: dict, + source_mapping_ids: list[str], + parsed_logsources: dict, + fields_tokens: list[Field], + sigma_fields_tokens: Union[list[Field], None] = None, ) -> MetaInfoContainer: return MetaInfoContainer( title=rule.get("title"), @@ -73,7 +72,7 @@ def _get_meta_info( tags=sorted(set(rule.get("tags", []))), false_positives=self.__parse_false_positives(rule.get("falsepositives")), source_mapping_ids=source_mapping_ids, - parsed_logsources=parsed_logsources + parsed_logsources=parsed_logsources, ) def __validate_rule(self, rule: dict): @@ -97,10 +96,11 @@ def parse(self, raw_query_container: RawQueryContainer) -> TokenizedQueryContain source_mappings = self.mappings.get_suitable_source_mappings(field_names=field_names, **log_sources) QueryTokenizer.set_field_tokens_generic_names_map(field_tokens, source_mappings, self.mappings.default_mapping) sigma_fields_tokens = None - if sigma_fields := sigma_rule.get('fields'): + if sigma_fields := sigma_rule.get("fields"): sigma_fields_tokens = [Field(source_name=field) for field in sigma_fields] - QueryTokenizer.set_field_tokens_generic_names_map(sigma_fields_tokens, source_mappings, - self.mappings.default_mapping) + QueryTokenizer.set_field_tokens_generic_names_map( + sigma_fields_tokens, source_mappings, self.mappings.default_mapping + ) return TokenizedQueryContainer( tokens=tokens, meta_info=self._get_meta_info( @@ -108,6 +108,6 @@ def parse(self, raw_query_container: RawQueryContainer) -> TokenizedQueryContain source_mapping_ids=[source_mapping.source_id for source_mapping in source_mappings], sigma_fields_tokens=sigma_fields_tokens, parsed_logsources=log_sources, - fields_tokens=field_tokens - ) + fields_tokens=field_tokens, + ), ) diff --git a/uncoder-core/app/translator/platforms/sigma/renders/sigma.py b/uncoder-core/app/translator/platforms/sigma/renders/sigma.py index b0e49ee1..dc33a507 100644 --- a/uncoder-core/app/translator/platforms/sigma/renders/sigma.py +++ b/uncoder-core/app/translator/platforms/sigma/renders/sigma.py @@ -25,8 +25,8 @@ from app.translator.core.custom_types.tokens import OperatorType from app.translator.core.mapping import DEFAULT_MAPPING_NAME, SourceMapping from app.translator.core.models.field import FieldValue, Keyword -from app.translator.core.models.query_container import TokenizedQueryContainer, RawQueryContainer from app.translator.core.models.platform_details import PlatformDetails +from app.translator.core.models.query_container import RawQueryContainer, TokenizedQueryContainer from app.translator.core.render import QueryRender from app.translator.core.str_value_manager import StrValue from app.translator.managers import render_manager diff --git a/uncoder-core/app/translator/platforms/sigma/str_value_manager.py b/uncoder-core/app/translator/platforms/sigma/str_value_manager.py index c73115e7..7b1ccee1 100644 --- a/uncoder-core/app/translator/platforms/sigma/str_value_manager.py +++ b/uncoder-core/app/translator/platforms/sigma/str_value_manager.py @@ -16,6 +16,7 @@ limitations under the License. ----------------------------------------------------------------- """ + from app.translator.core.str_value_manager import ( ReAnySymbol, ReCaretSymbol, @@ -57,7 +58,7 @@ "}": ReRightCurlyBracket, "|": ReOrOperator, ",": ReCommaSymbol, - "-": ReHyphenSymbol + "-": ReHyphenSymbol, } diff --git a/uncoder-core/app/translator/platforms/sigma/tokenizer.py b/uncoder-core/app/translator/platforms/sigma/tokenizer.py index bb1736dd..0893588f 100644 --- a/uncoder-core/app/translator/platforms/sigma/tokenizer.py +++ b/uncoder-core/app/translator/platforms/sigma/tokenizer.py @@ -28,6 +28,7 @@ class Selection: token_type = "selection" + def __init__(self, name): self.name = name @@ -142,10 +143,12 @@ def get_missed_parentheses(tokens: list[Union[Selection, Identifier]]) -> list[i missed_indices.append(index + 1) return missed_indices - def __add_parentheses_after_and_not(self, tokens: list[Union[Selection, Identifier]]) -> list[Union[Selection, Identifier]]: + def __add_parentheses_after_and_not( + self, tokens: list[Union[Selection, Identifier]] + ) -> list[Union[Selection, Identifier]]: indices = self.get_missed_parentheses(tokens=tokens) for index in reversed(indices): - tokens.insert(index+1, Identifier(token_type=GroupType.R_PAREN)) + tokens.insert(index + 1, Identifier(token_type=GroupType.R_PAREN)) tokens.insert(index, Identifier(token_type=GroupType.L_PAREN)) return tokens