Movatterモバイル変換


[0]ホーム

URL:


Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Sign up
Appearance settings

separate field tokens#208

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to ourterms of service andprivacy statement. We’ll occasionally send you account related emails.

Already on GitHub?Sign in to your account

Merged
alexvolha merged 1 commit intomainfromfield-tokens-separation
Dec 2, 2024
Merged
Show file tree
Hide file tree
Changes fromall commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletionsuncoder-core/app/translator/core/functions.py
View file
Open in desktop
Original file line numberDiff line numberDiff line change
Expand Up@@ -164,6 +164,10 @@ def order_to_render(self) -> dict[str, int]:

return {}

@property
defsupported_render_names(self)->set[str]:
returnset(self._renders_map)


classPlatformFunctions:
dir_path:str=None
Expand Down
28 changes: 22 additions & 6 deletionsuncoder-core/app/translator/core/mapping.py
View file
Open in desktop
Original file line numberDiff line numberDiff line change
Expand Up@@ -188,13 +188,22 @@ def get_source_mapping(self, source_id: str) -> Optional[SourceMapping]:
defdefault_mapping(self)->SourceMapping:
returnself._source_mappings[DEFAULT_MAPPING_NAME]

defcheck_fields_mapping_existence(self,field_tokens:list[Field],source_mapping:SourceMapping)->list[str]:
defcheck_fields_mapping_existence(
self,
query_field_tokens:list[Field],
function_field_tokens_map:dict[str,list[Field]],
supported_func_render_names:set[str],
source_mapping:SourceMapping,
)->list[str]:
unmapped= []
forfieldinfield_tokens:
generic_field_name=field.get_generic_field_name(source_mapping.source_id)
mapped_field=source_mapping.fields_mapping.get_platform_field_name(generic_field_name=generic_field_name)
ifnotmapped_fieldandfield.source_namenotinunmapped:
unmapped.append(field.source_name)

forfieldinquery_field_tokens:
self._check_field_mapping_existence(field,source_mapping,unmapped)

forfunc_name,function_field_tokensinfunction_field_tokens_map.items():
iffunc_nameinsupported_func_render_names:
forfieldinfunction_field_tokens:
self._check_field_mapping_existence(field,source_mapping,unmapped)

ifself.is_strict_mappingandunmapped:
raiseStrictPlatformException(
Expand All@@ -203,6 +212,13 @@ def check_fields_mapping_existence(self, field_tokens: list[Field], source_mappi

returnunmapped

@staticmethod
def_check_field_mapping_existence(field:Field,source_mapping:SourceMapping,unmapped:list[str])->None:
generic_field_name=field.get_generic_field_name(source_mapping.source_id)
mapped_field=source_mapping.fields_mapping.get_platform_field_name(generic_field_name=generic_field_name)
ifnotmapped_fieldandfield.source_namenotinunmapped:
unmapped.append(field.source_name)

@staticmethod
defmap_field(field:Field,source_mapping:SourceMapping)->list[str]:
generic_field_name=field.get_generic_field_name(source_mapping.source_id)
Expand Down
View file
Open in desktop
Original file line numberDiff line numberDiff line change
Expand Up@@ -65,6 +65,8 @@ def __init__(
date:Optional[str]=None,
output_table_fields:Optional[list[Field]]=None,
query_fields:Optional[list[Field]]=None,
function_fields:Optional[list[Field]]=None,
function_fields_map:Optional[dict[str,list[Field]]]=None,
license_:Optional[str]=None,
severity:Optional[str]=None,
references:Optional[list[str]]=None,
Expand All@@ -90,6 +92,8 @@ def __init__(
self.date=dateordatetime.now().date().strftime("%Y-%m-%d")
self.output_table_fields=output_table_fieldsor []
self.query_fields=query_fieldsor []
self.function_fields=function_fieldsor []
self.function_fields_map=function_fields_mapor {}
self.license=license_or"DRL 1.1"
self.severity=severityorSeverityType.low
self.references=referencesor []
Expand Down
15 changes: 9 additions & 6 deletionsuncoder-core/app/translator/core/parser.py
View file
Open in desktop
Original file line numberDiff line numberDiff line change
Expand Up@@ -65,16 +65,19 @@ def get_query_tokens(self, query: str) -> list[QUERY_TOKEN_TYPE]:
@staticmethod
def get_field_tokens(
query_tokens: list[QUERY_TOKEN_TYPE], functions: Optional[list[Function]] = None
) -> list[Field]:
field_tokens = []
) -> tuple[list[Field], list[Field], dict[str, list[Field]]]:
query_field_tokens = []
function_field_tokens = []
function_field_tokens_map = {}
for token in query_tokens:
if isinstance(token, (FieldField, FieldValue, FunctionValue)):
field_tokens.extend(token.fields)
query_field_tokens.extend(token.fields)

if functions:
field_tokens.extend([field for func in functions for field in func.fields])
for func in functions or []:
function_field_tokens.extend(func.fields)
function_field_tokens_map[func.name] = func.fields

returnfield_tokens
returnquery_field_tokens, function_field_tokens, function_field_tokens_map

def get_source_mappings(
self, field_tokens: list[Field], log_sources: dict[str, list[Union[int, str]]]
Expand Down
8 changes: 6 additions & 2 deletionsuncoder-core/app/translator/core/render.py
View file
Open in desktop
Original file line numberDiff line numberDiff line change
Expand Up@@ -428,14 +428,18 @@ def _generate_from_tokenized_query_container_by_source_mapping(
self, query_container: TokenizedQueryContainer, source_mapping: SourceMapping
) -> str:
unmapped_fields = self.mappings.check_fields_mapping_existence(
query_container.meta_info.query_fields, source_mapping
query_container.meta_info.query_fields,
query_container.meta_info.function_fields_map,
self.platform_functions.manager.supported_render_names,
source_mapping,
)
rendered_functions = self.generate_functions(query_container.functions.functions, source_mapping)
prefix = self.generate_prefix(source_mapping.log_source_signature, rendered_functions.rendered_prefix)

if source_mapping.raw_log_fields:
defined_raw_log_fields = self.generate_raw_log_fields(
fields=query_container.meta_info.query_fields, source_mapping=source_mapping
fields=query_container.meta_info.query_fields + query_container.meta_info.function_fields,
source_mapping=source_mapping,
)
prefix += f"\n{defined_raw_log_fields}"
query = self.generate_query(tokens=query_container.tokens, source_mapping=source_mapping)
Expand Down
View file
Open in desktop
Original file line numberDiff line numberDiff line change
Expand Up@@ -115,9 +115,13 @@ def _parse_query(self, text: str) -> tuple[str, dict[str, Union[list[str], list[
defparse(self,raw_query_container:RawQueryContainer)->TokenizedQueryContainer:
query,log_sources,functions=self._parse_query(raw_query_container.query)
query_tokens=self.get_query_tokens(query)
field_tokens=self.get_field_tokens(query_tokens,functions.functions)
source_mappings=self.get_source_mappings(field_tokens,log_sources)
query_field_tokens,function_field_tokens,function_field_tokens_map=self.get_field_tokens(
query_tokens,functions.functions
)
source_mappings=self.get_source_mappings(query_field_tokens+function_field_tokens,log_sources)
meta_info=raw_query_container.meta_info
meta_info.query_fields=field_tokens
meta_info.query_fields=query_field_tokens
meta_info.function_fields=function_field_tokens
meta_info.function_fields_map=function_field_tokens_map
meta_info.source_mapping_ids= [source_mapping.source_idforsource_mappinginsource_mappings]
returnTokenizedQueryContainer(tokens=query_tokens,meta_info=meta_info,functions=functions)
View file
Open in desktop
Original file line numberDiff line numberDiff line change
Expand Up@@ -48,9 +48,9 @@ def _parse_query(self, query: str) -> tuple[str, dict[str, list[str]]]:
defparse(self,raw_query_container:RawQueryContainer)->TokenizedQueryContainer:
query,log_sources=self._parse_query(raw_query_container.query)
query_tokens=self.get_query_tokens(query)
field_tokens=self.get_field_tokens(query_tokens)
source_mappings=self.get_source_mappings(field_tokens,log_sources)
query_field_tokens,_,_=self.get_field_tokens(query_tokens)
source_mappings=self.get_source_mappings(query_field_tokens,log_sources)
meta_info=raw_query_container.meta_info
meta_info.query_fields=field_tokens
meta_info.query_fields=query_field_tokens
meta_info.source_mapping_ids= [source_mapping.source_idforsource_mappinginsource_mappings]
returnTokenizedQueryContainer(tokens=query_tokens,meta_info=meta_info)
View file
Open in desktop
Original file line numberDiff line numberDiff line change
Expand Up@@ -26,7 +26,8 @@ def parse(self, query: str) -> tuple[str, ParsedFunctions]:
functions=query.split(self.function_delimiter)
result_query=self.prepare_query(functions[0])
forfuncinfunctions[1:]:
split_func=func.strip().split(" ")
func=func.strip()
split_func=func.split(" ")
func_name,func_body=split_func[0]," ".join(split_func[1:])
try:
func_parser=self.manager.get_hof_parser(func_name)
Expand Down
View file
Open in desktop
Original file line numberDiff line numberDiff line change
Expand Up@@ -29,7 +29,7 @@

classSplQueryParser(PlatformQueryParser):
log_source_pattern=r"^___source_type___\s*=\s*(?:\"(?P<d_q_value>[%a-zA-Z_*:0-9\-/]+)\"|(?P<value>[%a-zA-Z_*:0-9\-/]+))(?:\s+(?:and|or)\s+|\s+)?"# noqa: E501
rule_name_pattern=r"`(?P<name>(?:[:a-zA-Z*0-9=+%#\-_/,;`?~‘\'.<>$&^@!\]\[()\s])*)`"
rule_name_pattern=r"`(?P<name>(?:[:a-zA-Z*0-9=+%#\-_/,;`?~‘\'.<>$&^@!\]\[()\s])*)`"# noqa: RUF001
log_source_key_types= ("index","source","sourcetype","sourcecategory")

platform_functions:SplFunctions=None
Expand All@@ -56,7 +56,7 @@ def _parse_log_sources(self, query: str) -> tuple[dict[str, list[str]], str]:
def_parse_query(self,query:str)->tuple[str,dict[str,list[str]],ParsedFunctions]:
ifre.match(self.rule_name_pattern,query):
search=re.search(self.rule_name_pattern,query,flags=re.IGNORECASE)
query=query[:search.start()]+query[search.end():]
query=query[:search.start()]+query[search.end():]
query=query.strip()
log_sources,query=self._parse_log_sources(query)
query,functions=self.platform_functions.parse(query)
Expand All@@ -72,9 +72,13 @@ def parse(self, raw_query_container: RawQueryContainer) -> TokenizedQueryContain

query,log_sources,functions=self._parse_query(raw_query_container.query)
query_tokens=self.get_query_tokens(query)
field_tokens=self.get_field_tokens(query_tokens,functions.functions)
source_mappings=self.get_source_mappings(field_tokens,log_sources)
query_field_tokens,function_field_tokens,function_field_tokens_map=self.get_field_tokens(
query_tokens,functions.functions
)
source_mappings=self.get_source_mappings(query_field_tokens+function_field_tokens,log_sources)
meta_info=raw_query_container.meta_info
meta_info.query_fields=field_tokens
meta_info.query_fields=query_field_tokens
meta_info.function_fields=function_field_tokens
meta_info.function_fields_map=function_field_tokens_map
meta_info.source_mapping_ids= [source_mapping.source_idforsource_mappinginsource_mappings]
returnTokenizedQueryContainer(tokens=query_tokens,meta_info=meta_info,functions=functions)
View file
Open in desktop
Original file line numberDiff line numberDiff line change
Expand Up@@ -43,9 +43,9 @@ def _parse_query(self, query: str) -> tuple[str, dict[str, list[str]]]:
def parse(self, raw_query_container: RawQueryContainer) -> TokenizedQueryContainer:
query, log_sources = self._parse_query(raw_query_container.query)
query_tokens = self.get_query_tokens(query)
field_tokens = self.get_field_tokens(query_tokens)
source_mappings = self.get_source_mappings(field_tokens, log_sources)
query_field_tokens, _, _ = self.get_field_tokens(query_tokens)
source_mappings = self.get_source_mappings(query_field_tokens, log_sources)
meta_info = raw_query_container.meta_info
meta_info.query_fields =field_tokens
meta_info.query_fields =query_field_tokens
meta_info.source_mapping_ids = [source_mapping.source_id for source_mapping in source_mappings]
return TokenizedQueryContainer(tokens=query_tokens, meta_info=meta_info)
View file
Open in desktop
Original file line numberDiff line numberDiff line change
Expand Up@@ -35,9 +35,9 @@ class ChronicleQueryParser(PlatformQueryParser):

def parse(self, raw_query_container: RawQueryContainer) -> TokenizedQueryContainer:
query_tokens = self.get_query_tokens(raw_query_container.query)
field_tokens = self.get_field_tokens(query_tokens)
source_mappings = self.get_source_mappings(field_tokens, {})
query_field_tokens, _, _ = self.get_field_tokens(query_tokens)
source_mappings = self.get_source_mappings(query_field_tokens, {})
meta_info = raw_query_container.meta_info
meta_info.query_fields =field_tokens
meta_info.query_fields =query_field_tokens
meta_info.source_mapping_ids = [source_mapping.source_id for source_mapping in source_mappings]
return TokenizedQueryContainer(tokens=query_tokens, meta_info=meta_info)
View file
Open in desktop
Original file line numberDiff line numberDiff line change
Expand Up@@ -29,9 +29,9 @@ def _parse_query(self, query: str) -> tuple[str, dict[str, list[str]]]:
defparse(self,raw_query_container:RawQueryContainer)->TokenizedQueryContainer:
query,log_sources=self._parse_query(raw_query_container.query)
query_tokens=self.get_query_tokens(query)
field_tokens=self.get_field_tokens(query_tokens)
source_mappings=self.get_source_mappings(field_tokens,log_sources)
query_field_tokens,_,_=self.get_field_tokens(query_tokens)
source_mappings=self.get_source_mappings(query_field_tokens,log_sources)
meta_info=raw_query_container.meta_info
meta_info.query_fields=field_tokens
meta_info.query_fields=query_field_tokens
meta_info.source_mapping_ids= [source_mapping.source_idforsource_mappinginsource_mappings]
returnTokenizedQueryContainer(tokens=query_tokens,meta_info=meta_info)
View file
Open in desktop
Original file line numberDiff line numberDiff line change
Expand Up@@ -232,7 +232,10 @@ def _generate_from_tokenized_query_container_by_source_mapping(
self,query_container:TokenizedQueryContainer,source_mapping:SourceMapping
)->str:
unmapped_fields=self.mappings.check_fields_mapping_existence(
query_container.meta_info.query_fields,source_mapping
query_container.meta_info.query_fields,
query_container.meta_info.function_fields_map,
self.platform_functions.manager.supported_render_names,
source_mapping,
)
is_event_type_set=False
field_values= [tokenfortokeninquery_container.tokensifisinstance(token,FieldValue)]
Expand Down
Original file line numberDiff line numberDiff line change
Expand Up@@ -244,7 +244,10 @@ def _generate_from_tokenized_query_container_by_source_mapping(
self,query_container:TokenizedQueryContainer,source_mapping:SourceMapping
)->str:
unmapped_fields=self.mappings.check_fields_mapping_existence(
query_container.meta_info.query_fields,source_mapping
query_container.meta_info.query_fields,
query_container.meta_info.function_fields_map,
self.platform_functions.manager.supported_render_names,
source_mapping,
)
prefix=self.generate_prefix(source_mapping.log_source_signature)
if"product"inquery_container.meta_info.parsed_logsources:
Expand Down
View file
Open in desktop
Original file line numberDiff line numberDiff line change
Expand Up@@ -43,9 +43,13 @@ def _parse_query(self, query: str) -> tuple[str, ParsedFunctions]:
defparse(self,raw_query_container:RawQueryContainer)->TokenizedQueryContainer:
query,functions=self._parse_query(query=raw_query_container.query)
query_tokens=self.get_query_tokens(query)
field_tokens=self.get_field_tokens(query_tokens,functions.functions)
source_mappings=self.get_source_mappings(field_tokens, {})
query_field_tokens,function_field_tokens,function_field_tokens_map=self.get_field_tokens(
query_tokens,functions.functions
)
source_mappings=self.get_source_mappings(query_field_tokens+function_field_tokens, {})
meta_info=raw_query_container.meta_info
meta_info.query_fields=field_tokens
meta_info.query_fields=query_field_tokens
meta_info.function_fields=function_field_tokens
meta_info.function_fields_map=function_field_tokens_map
meta_info.source_mapping_ids= [source_mapping.source_idforsource_mappinginsource_mappings]
returnTokenizedQueryContainer(tokens=query_tokens,meta_info=meta_info,functions=functions)
View file
Open in desktop
Original file line numberDiff line numberDiff line change
Expand Up@@ -22,7 +22,8 @@ def parse(self, query: str) -> tuple[str, str, ParsedFunctions]:
table=split_query[0].strip()
query_parts= []
forfuncinsplit_query[1:]:
split_func=func.strip(" ").split(" ")
func=func.strip()
split_func=func.split(" ")
func_name,func_body=split_func[0]," ".join(split_func[1:])
iffunc_name==KQLFunctionType.where:
query_parts.append(func_body)
Expand Down
View file
Open in desktop
Original file line numberDiff line numberDiff line change
Expand Up@@ -44,9 +44,13 @@ def _parse_query(self, query: str) -> tuple[str, dict[str, list[str]], ParsedFun
defparse(self,raw_query_container:RawQueryContainer)->TokenizedQueryContainer:
query,log_sources,functions=self._parse_query(query=raw_query_container.query)
query_tokens=self.get_query_tokens(query)
field_tokens=self.get_field_tokens(query_tokens,functions.functions)
source_mappings=self.get_source_mappings(field_tokens,log_sources)
query_field_tokens,function_field_tokens,function_field_tokens_map=self.get_field_tokens(
query_tokens,functions.functions
)
source_mappings=self.get_source_mappings(query_field_tokens+function_field_tokens,log_sources)
meta_info=raw_query_container.meta_info
meta_info.query_fields=field_tokens
meta_info.query_fields=query_field_tokens
meta_info.function_fields=function_field_tokens
meta_info.function_fields_map=function_field_tokens_map
meta_info.source_mapping_ids= [source_mapping.source_idforsource_mappinginsource_mappings]
returnTokenizedQueryContainer(tokens=query_tokens,meta_info=meta_info,functions=functions)

[8]ページ先頭

©2009-2025 Movatter.jp