Movatterモバイル変換


[0]ホーム

URL:


Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Sign up
Appearance settings

Gis 8141#168

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to ourterms of service andprivacy statement. We’ll occasionally send you account related emails.

Already on GitHub?Sign in to your account

Merged
alexvolha merged 2 commits intomainfromgis-8141
Jul 4, 2024
Merged
Show file tree
Hide file tree
Changes fromall commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
48 changes: 28 additions & 20 deletionsuncoder-core/app/translator/core/render.py
View file
Open in desktop
Original file line numberDiff line numberDiff line change
Expand Up@@ -397,37 +397,45 @@ def generate_raw_log_fields(self, fields: list[Field], source_mapping: SourceMap
defined_raw_log_fields.append(prefix)
return "\n".join(defined_raw_log_fields)

def _generate_from_tokenized_query_container_by_source_mapping(
self, query_container: TokenizedQueryContainer, source_mapping: SourceMapping
) -> str:
rendered_functions = self.generate_functions(query_container.functions.functions, source_mapping)
prefix = self.generate_prefix(source_mapping.log_source_signature, rendered_functions.rendered_prefix)

if source_mapping.raw_log_fields:
defined_raw_log_fields = self.generate_raw_log_fields(
fields=query_container.meta_info.query_fields, source_mapping=source_mapping
)
prefix += f"\n{defined_raw_log_fields}"
query = self.generate_query(tokens=query_container.tokens, source_mapping=source_mapping)
not_supported_functions = query_container.functions.not_supported + rendered_functions.not_supported
return self.finalize_query(
prefix=prefix,
query=query,
functions=rendered_functions.rendered,
not_supported_functions=not_supported_functions,
meta_info=query_container.meta_info,
source_mapping=source_mapping,
)

def generate_from_tokenized_query_container(self, query_container: TokenizedQueryContainer) -> str:
queries_map = {}
errors = []
source_mappings = self._get_source_mappings(query_container.meta_info.source_mapping_ids)

for source_mapping in source_mappings:
rendered_functions = self.generate_functions(query_container.functions.functions, source_mapping)
prefix = self.generate_prefix(source_mapping.log_source_signature, rendered_functions.rendered_prefix)
try:
if source_mapping.raw_log_fields:
defined_raw_log_fields = self.generate_raw_log_fields(
fields=query_container.meta_info.query_fields, source_mapping=source_mapping
)
prefix += f"\n{defined_raw_log_fields}"
result = self.generate_query(tokens=query_container.tokens, source_mapping=source_mapping)
except StrictPlatformException as err:
errors.append(err)
continue
else:
not_supported_functions = query_container.functions.not_supported + rendered_functions.not_supported
finalized_query = self.finalize_query(
prefix=prefix,
query=result,
functions=rendered_functions.rendered,
not_supported_functions=not_supported_functions,
meta_info=query_container.meta_info,
source_mapping=source_mapping,
finalized_query = self._generate_from_tokenized_query_container_by_source_mapping(
query_container, source_mapping
)
if return_only_first_query_ctx_var.get() is True:
return finalized_query
queries_map[source_mapping.source_id] = finalized_query
except StrictPlatformException as err:
errors.append(err)
continue

if not queries_map and errors:
raise errors[0]
return self.finalize(queries_map)
Expand Down
View file
Open in desktop
Original file line numberDiff line numberDiff line change
Expand Up@@ -19,7 +19,6 @@

from app.translator.const import DEFAULT_VALUE_TYPE
from app.translator.core.const import TOKEN_TYPE
from app.translator.core.context_vars import return_only_first_query_ctx_var
from app.translator.core.custom_types.meta_info import SeverityType
from app.translator.core.custom_types.tokens import GroupType, LogicalOperatorType, OperatorType
from app.translator.core.custom_types.values import ValueType
Expand DownExpand Up@@ -244,40 +243,33 @@ def __replace_not_tokens(self, tokens: list[TOKEN_TYPE]) -> list[TOKEN_TYPE]:

return tokens

def generate_from_tokenized_query_container(self, query_container: TokenizedQueryContainer) -> str:
queries_map = {}
source_mappings = self._get_source_mappings(query_container.meta_info.source_mapping_ids)

for source_mapping in source_mappings:
is_event_type_set = False
field_values = [token for token in query_container.tokens if isinstance(token, FieldValue)]
mapped_fields_set = set()
for field_value in field_values:
mapped_fields = self.map_field(field_value.field, source_mapping)
mapped_fields_set = mapped_fields_set.union(set(mapped_fields))
if _EVENT_TYPE_FIELD in mapped_fields:
is_event_type_set = True
self.__update_event_type_values(field_value, source_mapping.source_id)

tokens = self.__replace_not_tokens(query_container.tokens)
result = self.generate_query(tokens=tokens, source_mapping=source_mapping)
prefix = "" if is_event_type_set else self.generate_prefix(source_mapping.log_source_signature)
rendered_functions = self.generate_functions(query_container.functions.functions, source_mapping)
not_supported_functions = query_container.functions.not_supported + rendered_functions.not_supported
finalized_query = self.finalize_query(
prefix=prefix,
query=result,
functions=rendered_functions.rendered,
not_supported_functions=not_supported_functions,
meta_info=query_container.meta_info,
source_mapping=source_mapping,
fields=mapped_fields_set,
)
if return_only_first_query_ctx_var.get() is True:
return finalized_query
queries_map[source_mapping.source_id] = finalized_query

return self.finalize(queries_map)
def _generate_from_tokenized_query_container_by_source_mapping(
self, query_container: TokenizedQueryContainer, source_mapping: SourceMapping
) -> str:
is_event_type_set = False
field_values = [token for token in query_container.tokens if isinstance(token, FieldValue)]
mapped_fields_set = set()
for field_value in field_values:
mapped_fields = self.map_field(field_value.field, source_mapping)
mapped_fields_set = mapped_fields_set.union(set(mapped_fields))
if _EVENT_TYPE_FIELD in mapped_fields:
is_event_type_set = True
self.__update_event_type_values(field_value, source_mapping.source_id)

tokens = self.__replace_not_tokens(query_container.tokens)
result = self.generate_query(tokens=tokens, source_mapping=source_mapping)
prefix = "" if is_event_type_set else self.generate_prefix(source_mapping.log_source_signature)
rendered_functions = self.generate_functions(query_container.functions.functions, source_mapping)
not_supported_functions = query_container.functions.not_supported + rendered_functions.not_supported
return self.finalize_query(
prefix=prefix,
query=result,
functions=rendered_functions.rendered,
not_supported_functions=not_supported_functions,
meta_info=query_container.meta_info,
source_mapping=source_mapping,
fields=mapped_fields_set,
)

@staticmethod
def __update_event_type_values(field_value: FieldValue, source_id: str) -> None:
Expand Down
View file
Open in desktop
Original file line numberDiff line numberDiff line change
Expand Up@@ -20,7 +20,6 @@
from typing import Union

from app.translator.const import DEFAULT_VALUE_TYPE
from app.translator.core.context_vars import return_only_first_query_ctx_var
from app.translator.core.custom_types.tokens import LogicalOperatorType
from app.translator.core.custom_types.values import ValueType
from app.translator.core.exceptions.core import StrictPlatformException
Expand DownExpand Up@@ -242,30 +241,23 @@ def apply_token(self, token: Union[FieldValue, Keyword, Identifier], source_mapp

return super().apply_token(token, source_mapping)

def generate_from_tokenized_query_container(self, query_container: TokenizedQueryContainer) -> str:
queries_map = {}
source_mappings = self._get_source_mappings(query_container.meta_info.source_mapping_ids)

for source_mapping in source_mappings:
prefix = self.generate_prefix(source_mapping.log_source_signature)
if "product" in query_container.meta_info.parsed_logsources:
prefix = f"{prefix} CONTAINS {query_container.meta_info.parsed_logsources['product'][0]}"
else:
prefix = f"{prefix} CONTAINS anything"

result = self.generate_query(tokens=query_container.tokens, source_mapping=source_mapping)
rendered_functions = self.generate_functions(query_container.functions.functions, source_mapping)
not_supported_functions = query_container.functions.not_supported + rendered_functions.not_supported
finalized_query = self.finalize_query(
prefix=prefix,
query=result,
functions=rendered_functions.rendered,
not_supported_functions=not_supported_functions,
meta_info=query_container.meta_info,
source_mapping=source_mapping,
)
if return_only_first_query_ctx_var.get() is True:
return finalized_query
queries_map[source_mapping.source_id] = finalized_query

return self.finalize(queries_map)
def _generate_from_tokenized_query_container_by_source_mapping(
self, query_container: TokenizedQueryContainer, source_mapping: SourceMapping
) -> str:
prefix = self.generate_prefix(source_mapping.log_source_signature)
if "product" in query_container.meta_info.parsed_logsources:
prefix = f"{prefix} CONTAINS {query_container.meta_info.parsed_logsources['product'][0]}"
else:
prefix = f"{prefix} CONTAINS anything"

result = self.generate_query(tokens=query_container.tokens, source_mapping=source_mapping)
rendered_functions = self.generate_functions(query_container.functions.functions, source_mapping)
not_supported_functions = query_container.functions.not_supported + rendered_functions.not_supported
return self.finalize_query(
prefix=prefix,
query=result,
functions=rendered_functions.rendered,
not_supported_functions=not_supported_functions,
meta_info=query_container.meta_info,
source_mapping=source_mapping,
)
View file
Open in desktop
Original file line numberDiff line numberDiff line change
Expand Up@@ -16,17 +16,19 @@
limitations under the License.
-----------------------------------------------------------------
"""

from contextlib import suppress
from typing import ClassVar, Optional, Union

from app.translator.const import DEFAULT_VALUE_TYPE
from app.translator.core.context_vars import preset_log_source_str_ctx_var
from app.translator.core.context_vars import preset_log_source_str_ctx_var, return_only_first_query_ctx_var
from app.translator.core.custom_types.tokens import OperatorType
from app.translator.core.custom_types.values import ValueType
from app.translator.core.mapping import SourceMapping
from app.translator.core.exceptions.core import StrictPlatformException
from app.translator.core.mapping import DEFAULT_MAPPING_NAME, SourceMapping
from app.translator.core.models.field import FieldValue, Keyword
from app.translator.core.models.identifier import Identifier
from app.translator.core.models.platform_details import PlatformDetails
from app.translator.core.models.query_container import TokenizedQueryContainer
from app.translator.core.render import BaseFieldFieldRender, BaseFieldValueRender, PlatformQueryRender
from app.translator.core.str_value_manager import StrValue
from app.translator.managers import render_manager
Expand DownExpand Up@@ -71,8 +73,7 @@ def _wrap_str_value(value: str) -> str:
def equal_modifier(self, field: str, value: DEFAULT_VALUE_TYPE) -> str:
if isinstance(value, list):
values = ", ".join(
f"{self._pre_process_value(field, str(v) if isinstance(v, int) else v, ValueType.value, True)}"
for v in value
f"{self._pre_process_value(field, str(v), value_type=ValueType.value, wrap_str=True)}" for v in value
)
return f"{field} in ({values})"

Expand DownExpand Up@@ -222,3 +223,32 @@ def apply_token(self, token: Union[FieldValue, Keyword, Identifier], source_mapp
@staticmethod
def _finalize_search_query(query: str) -> str:
return f"| filter {query}" if query else ""

def generate_from_tokenized_query_container(self, query_container: TokenizedQueryContainer) -> str:
queries_map = {}
errors = []
source_mappings = self._get_source_mappings(query_container.meta_info.source_mapping_ids)

last_mapping_index = len(source_mappings) - 1
for index, source_mapping in enumerate(source_mappings):
try:
finalized_query = self._generate_from_tokenized_query_container_by_source_mapping(
query_container, source_mapping
)
if return_only_first_query_ctx_var.get() is True:
return finalized_query
queries_map[source_mapping.source_id] = finalized_query
except StrictPlatformException as err:
errors.append(err)
if index != last_mapping_index or source_mapping.source_id == DEFAULT_MAPPING_NAME or queries_map:
continue

with suppress(StrictPlatformException):
finalized_query = self._generate_from_tokenized_query_container_by_source_mapping(
query_container, self.mappings.get_source_mapping(DEFAULT_MAPPING_NAME)
)
queries_map[source_mapping.source_id] = finalized_query

if not queries_map and errors:
raise errors[0]
return self.finalize(queries_map)

[8]ページ先頭

©2009-2025 Movatter.jp