Movatterモバイル変換


[0]ホーム

URL:


Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Sign up
Appearance settings

Interpret a space as and#18

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to ourterms of service andprivacy statement. We’ll occasionally send you account related emails.

Already on GitHub?Sign in to your account

Merged
saltar-ua merged 1 commit intomainfromspace-as-and
Dec 4, 2023
Merged
Show file tree
Hide file tree
Changes fromall commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
27 changes: 27 additions & 0 deletionssiem-converter/app/converter/core/mixins/logic.py
View file
Open in desktop
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,27 @@
fromtypingimportList,Union

fromapp.converter.core.models.fieldimportField,Keyword
fromapp.converter.core.models.identifierimportIdentifier
fromapp.converter.core.custom_types.tokensimportLogicalOperatorType,GroupType


classANDLogicOperatorMixin:

@staticmethod
defget_missed_and_token_indices(tokens:List[Union[Field,Keyword,Identifier]])->List[int]:
missed_and_indices= []
forindexinrange(len(tokens)-1):
token=tokens[index]
next_token=tokens[index+1]
if (isinstance(token, (Field,Keyword))
andnot (isinstance(next_token,Identifier)and (
next_token.token_typeinLogicalOperatorType
ornext_token.token_type==GroupType.R_PAREN))):
missed_and_indices.append(index+1)
returnreversed(missed_and_indices)

defadd_and_token_if_missed(self,tokens:List[Union[Field,Keyword,Identifier]])->List[Union[Field,Keyword,Identifier]]:
indices=self.get_missed_and_token_indices(tokens=tokens)
forindexinindices:
tokens.insert(index,Identifier(token_type=LogicalOperatorType.AND))
returntokens
View file
Open in desktop
Original file line numberDiff line numberDiff line change
Expand Up@@ -20,14 +20,15 @@
fromtypingimportTuple,Union,List,Any

fromapp.converter.core.exceptions.parserimportTokenizerGeneralException
fromapp.converter.core.mixins.logicimportANDLogicOperatorMixin
fromapp.converter.core.models.fieldimportKeyword,Field
fromapp.converter.core.models.identifierimportIdentifier
fromapp.converter.core.tokenizerimportQueryTokenizer
fromapp.converter.core.custom_types.tokensimportOperatorType
fromapp.converter.tools.utilsimportget_match_group


classLuceneTokenizer(QueryTokenizer):
classLuceneTokenizer(QueryTokenizer,ANDLogicOperatorMixin):
field_pattern=r"(?P<field_name>[a-zA-Z\.\-_]+)"
match_operator_pattern=r"(?:___field___\s*(?P<match_operator>:))\s*"

Expand DownExpand Up@@ -107,3 +108,7 @@ def search_keyword(self, query: str) -> Tuple[Keyword, str]:
keyword=Keyword(value=value)
pos=keyword_search.end()-1
returnkeyword,query[pos:]

deftokenize(self,query:str)->List[Union[Field,Keyword,Identifier]]:
tokens=super().tokenize(query=query)
returnself.add_and_token_if_missed(tokens=tokens)
11 changes: 9 additions & 2 deletionssiem-converter/app/converter/platforms/base/spl/tokenizer.py
View file
Open in desktop
Original file line numberDiff line numberDiff line change
Expand Up@@ -17,14 +17,17 @@
"""

importre
fromtypingimportTuple,Any
fromtypingimportTuple,Any,List,Union

fromapp.converter.core.mixins.logicimportANDLogicOperatorMixin
fromapp.converter.core.models.fieldimportField,Keyword
fromapp.converter.core.models.identifierimportIdentifier
fromapp.converter.core.tokenizerimportQueryTokenizer
fromapp.converter.core.custom_types.tokensimportOperatorType
fromapp.converter.tools.utilsimportget_match_group


classSplTokenizer(QueryTokenizer):
classSplTokenizer(QueryTokenizer,ANDLogicOperatorMixin):
field_pattern=r"(?P<field_name>[a-zA-Z\.\-_\{\}]+)"
num_value_pattern=r"(?P<num_value>\d+(?:\.\d+)*)\s*"
double_quotes_value_pattern=r'"(?P<d_q_value>(?:[:a-zA-Z\*0-9=+%#\-_/,;\'\.$&^@!\(\)\{\}\s]|\\\"|\\)*)"\s*'
Expand All@@ -51,3 +54,7 @@ def get_operator_and_value(self, match: re.Match, operator: str = OperatorType.E
returnoperator,s_q_value

returnsuper().get_operator_and_value(match)

deftokenize(self,query:str)->List[Union[Field,Keyword,Identifier]]:
tokens=super().tokenize(query=query)
returnself.add_and_token_if_missed(tokens=tokens)
View file
Open in desktop
Original file line numberDiff line numberDiff line change
Expand Up@@ -17,16 +17,17 @@
"""

importre
fromtypingimportTuple,Any
fromtypingimportTuple,Any,List,Union

fromapp.converter.core.mixins.logicimportANDLogicOperatorMixin
fromapp.converter.core.models.fieldimportKeyword,Field
fromapp.converter.core.models.identifierimportIdentifier
fromapp.converter.core.custom_types.tokensimportGroupType,LogicalOperatorType,OperatorType
fromapp.converter.core.tokenizerimportQueryTokenizer
fromapp.converter.tools.utilsimportget_match_group


classLogScaleTokenizer(QueryTokenizer):
classLogScaleTokenizer(QueryTokenizer,ANDLogicOperatorMixin):
match_operator_pattern=r"""(?:___field___\s?(?P<match_operator>=|!=))\s?"""
num_value_pattern=r"(?P<num_value>\d+(?:\.\d+)*)\s*"
double_quotes_value_pattern=r'"(?P<d_q_value>(?:[:a-zA-Z\*0-9=+%#\-_/,\'\.$&^@!\(\)\{\}\s]|\\\"|\\)*)"\s*'
Expand DownExpand Up@@ -65,7 +66,7 @@ def __get_identifier(self, query: str) -> (list, str):
else:
returnself.search_field_value(query)

deftokenize(self,query:str)->list:
deftokenize(self,query:str)->List[Union[Field,Keyword,Identifier]]:
tokenized= []
whilequery:
identifier,query=self.__get_identifier(query=query)
Expand All@@ -78,4 +79,4 @@ def tokenize(self, query: str) -> list:
tokenized.append(Identifier(token_type=LogicalOperatorType.AND))
tokenized.append(identifier)
self._validate_parentheses(tokenized)
returntokenized
returnself.add_and_token_if_missed(tokens=tokenized)

[8]ページ先頭

©2009-2025 Movatter.jp