robot.parsing.lexer package

Submodules

robot.parsing.lexer.blocklexers module

classrobot.parsing.lexer.blocklexers.BlockLexer(ctx:LexingContext)[source]

Bases:Lexer,ABC

accepts_more(statement:List[Token])bool[source]
input(statement:List[Token])[source]
lexer_for(statement:List[Token])Lexer[source]
lexer_classes()tuple[type[Lexer],...][source]
lex()[source]
classrobot.parsing.lexer.blocklexers.FileLexer(ctx:LexingContext)[source]

Bases:BlockLexer

lex()[source]
lexer_classes()tuple[type[Lexer],...][source]
classrobot.parsing.lexer.blocklexers.SectionLexer(ctx:LexingContext)[source]

Bases:BlockLexer,ABC

ctx:FileContext
accepts_more(statement:List[Token])bool[source]
classrobot.parsing.lexer.blocklexers.SettingSectionLexer(ctx:LexingContext)[source]

Bases:SectionLexer

handles(statement:List[Token])bool[source]
lexer_classes()tuple[type[Lexer],...][source]
classrobot.parsing.lexer.blocklexers.VariableSectionLexer(ctx:LexingContext)[source]

Bases:SectionLexer

handles(statement:List[Token])bool[source]
lexer_classes()tuple[type[Lexer],...][source]
classrobot.parsing.lexer.blocklexers.TestCaseSectionLexer(ctx:LexingContext)[source]

Bases:SectionLexer

handles(statement:List[Token])bool[source]
lexer_classes()tuple[type[Lexer],...][source]
classrobot.parsing.lexer.blocklexers.TaskSectionLexer(ctx:LexingContext)[source]

Bases:SectionLexer

handles(statement:List[Token])bool[source]
lexer_classes()tuple[type[Lexer],...][source]
classrobot.parsing.lexer.blocklexers.KeywordSectionLexer(ctx:LexingContext)[source]

Bases:SettingSectionLexer

handles(statement:List[Token])bool[source]
lexer_classes()tuple[type[Lexer],...][source]
classrobot.parsing.lexer.blocklexers.CommentSectionLexer(ctx:LexingContext)[source]

Bases:SectionLexer

handles(statement:List[Token])bool[source]
lexer_classes()tuple[type[Lexer],...][source]
classrobot.parsing.lexer.blocklexers.ImplicitCommentSectionLexer(ctx:LexingContext)[source]

Bases:SectionLexer

handles(statement:List[Token])bool[source]
lexer_classes()tuple[type[Lexer],...][source]
classrobot.parsing.lexer.blocklexers.InvalidSectionLexer(ctx:LexingContext)[source]

Bases:SectionLexer

handles(statement:List[Token])bool[source]
lexer_classes()tuple[type[Lexer],...][source]
classrobot.parsing.lexer.blocklexers.TestOrKeywordLexer(ctx:LexingContext)[source]

Bases:BlockLexer,ABC

name_type:str
accepts_more(statement:List[Token])bool[source]
input(statement:List[Token])[source]
classrobot.parsing.lexer.blocklexers.TestCaseLexer(ctx:SuiteFileContext)[source]

Bases:TestOrKeywordLexer

name_type:str='TESTCASENAME'
lex()[source]
lexer_classes()tuple[type[Lexer],...][source]
classrobot.parsing.lexer.blocklexers.KeywordLexer(ctx:FileContext)[source]

Bases:TestOrKeywordLexer

name_type:str='KEYWORDNAME'
lexer_classes()tuple[type[Lexer],...][source]
classrobot.parsing.lexer.blocklexers.NestedBlockLexer(ctx:TestCaseContext|KeywordContext)[source]

Bases:BlockLexer,ABC

ctx:TestCaseContext|KeywordContext
accepts_more(statement:List[Token])bool[source]
input(statement:List[Token])[source]
classrobot.parsing.lexer.blocklexers.ForLexer(ctx:TestCaseContext|KeywordContext)[source]

Bases:NestedBlockLexer

handles(statement:List[Token])bool[source]
lexer_classes()tuple[type[Lexer],...][source]
classrobot.parsing.lexer.blocklexers.WhileLexer(ctx:TestCaseContext|KeywordContext)[source]

Bases:NestedBlockLexer

handles(statement:List[Token])bool[source]
lexer_classes()tuple[type[Lexer],...][source]
classrobot.parsing.lexer.blocklexers.TryLexer(ctx:TestCaseContext|KeywordContext)[source]

Bases:NestedBlockLexer

handles(statement:List[Token])bool[source]
lexer_classes()tuple[type[Lexer],...][source]
classrobot.parsing.lexer.blocklexers.GroupLexer(ctx:TestCaseContext|KeywordContext)[source]

Bases:NestedBlockLexer

handles(statement:List[Token])bool[source]
lexer_classes()tuple[type[Lexer],...][source]
classrobot.parsing.lexer.blocklexers.IfLexer(ctx:TestCaseContext|KeywordContext)[source]

Bases:NestedBlockLexer

handles(statement:List[Token])bool[source]
lexer_classes()tuple[type[Lexer],...][source]
classrobot.parsing.lexer.blocklexers.InlineIfLexer(ctx:TestCaseContext|KeywordContext)[source]

Bases:NestedBlockLexer

handles(statement:List[Token])bool[source]
accepts_more(statement:List[Token])bool[source]
lexer_classes()tuple[type[Lexer],...][source]
input(statement:List[Token])[source]

robot.parsing.lexer.context module

classrobot.parsing.lexer.context.LexingContext(settings:Settings,languages:Languages)[source]

Bases:object

lex_setting(statement:List[Token])[source]
classrobot.parsing.lexer.context.FileContext(lang:Languages|Language|str|Path|Iterable[Language|str|Path]|None=None)[source]

Bases:LexingContext

settings:FileSettings
add_language(lang:Language|str|Path)[source]
keyword_context()KeywordContext[source]
setting_section(statement:List[Token])bool[source]
variable_section(statement:List[Token])bool[source]
test_case_section(statement:List[Token])bool[source]
task_section(statement:List[Token])bool[source]
keyword_section(statement:List[Token])bool[source]
comment_section(statement:List[Token])bool[source]
lex_invalid_section(statement:List[Token])[source]
classrobot.parsing.lexer.context.SuiteFileContext(lang:Languages|Language|str|Path|Iterable[Language|str|Path]|None=None)[source]

Bases:FileContext

settings:SuiteFileSettings
test_case_context()TestCaseContext[source]
test_case_section(statement:List[Token])bool[source]
task_section(statement:List[Token])bool[source]
classrobot.parsing.lexer.context.ResourceFileContext(lang:Languages|Language|str|Path|Iterable[Language|str|Path]|None=None)[source]

Bases:FileContext

settings:ResourceFileSettings
classrobot.parsing.lexer.context.InitFileContext(lang:Languages|Language|str|Path|Iterable[Language|str|Path]|None=None)[source]

Bases:FileContext

settings:InitFileSettings
classrobot.parsing.lexer.context.TestCaseContext(settings:TestCaseSettings)[source]

Bases:LexingContext

settings:TestCaseSettings
propertytemplate_set:bool
classrobot.parsing.lexer.context.KeywordContext(settings:KeywordSettings)[source]

Bases:LexingContext

settings:KeywordSettings
propertytemplate_set:bool

robot.parsing.lexer.lexer module

robot.parsing.lexer.lexer.get_tokens(source:Path|str|TextIO,data_only:bool=False,tokenize_variables:bool=False,lang:Languages|Language|str|Path|Iterable[Language|str|Path]|None=None)Iterator[Token][source]

Parses the given source to tokens.

Parameters:
  • source – The source where to read the data. Can be a path toa source file as a string or aspathlib.Path object, an alreadyopened file object, or Unicode text containing the date directly.Source files must be UTF-8 encoded.

  • data_only – WhenFalse (default), returns all tokens. When settoTrue, omits separators, comments, continuation markers, andother non-data tokens.

  • tokenize_variables – WhenTrue, possible variables in keywordarguments and elsewhere are tokenized. See thetokenize_variables()method for details.

  • lang – Additional languages to be supported during parsing.Can be a string matching any of the supported language codes or names,an initializedLanguage subclass,a list containing such strings or instances, or aLanguages instance.

Returns a generator that yieldsTokeninstances.

robot.parsing.lexer.lexer.get_resource_tokens(source:Path|str|TextIO,data_only:bool=False,tokenize_variables:bool=False,lang:Languages|Language|str|Path|Iterable[Language|str|Path]|None=None)Iterator[Token][source]

Parses the given source to resource file tokens.

Same asget_tokens() otherwise, but the source is considered to bea resource file. This affects, for example, what settings are valid.

robot.parsing.lexer.lexer.get_init_tokens(source:Path|str|TextIO,data_only:bool=False,tokenize_variables:bool=False,lang:Languages|Language|str|Path|Iterable[Language|str|Path]|None=None)Iterator[Token][source]

Parses the given source to init file tokens.

Same asget_tokens() otherwise, but the source is considered to bea suite initialization file. This affects, for example, what settings arevalid.

classrobot.parsing.lexer.lexer.Lexer(ctx:LexingContext,data_only:bool=False,tokenize_variables:bool=False)[source]

Bases:object

input(source:Path|str|TextIO)[source]
get_tokens()Iterator[Token][source]

robot.parsing.lexer.settings module

classrobot.parsing.lexer.settings.Settings(languages:Languages)[source]

Bases:ABC

names:tuple[str,...]=()
aliases:dict[str,str]={}
multi_use=('Metadata','Library','Resource','Variables')
single_value=('Resource','TestTimeout','TestTemplate','Timeout','Template','Name')
name_and_arguments=('Metadata','SuiteSetup','SuiteTeardown','TestSetup','TestTeardown','TestTemplate','Setup','Teardown','Template','Resource','Variables')
name_arguments_and_with_name=('Library',)
lex(statement:List[Token])[source]
classrobot.parsing.lexer.settings.FileSettings(languages:Languages)[source]

Bases:Settings,ABC

classrobot.parsing.lexer.settings.SuiteFileSettings(languages:Languages)[source]

Bases:FileSettings

names:tuple[str,...]=('Documentation','Metadata','Name','SuiteSetup','SuiteTeardown','TestSetup','TestTeardown','TestTemplate','TestTimeout','TestTags','DefaultTags','KeywordTags','Library','Resource','Variables')
aliases:dict[str,str]={'ForceTags':'TestTags','TaskSetup':'TestSetup','TaskTags':'TestTags','TaskTeardown':'TestTeardown','TaskTemplate':'TestTemplate','TaskTimeout':'TestTimeout'}
classrobot.parsing.lexer.settings.InitFileSettings(languages:Languages)[source]

Bases:FileSettings

names:tuple[str,...]=('Documentation','Metadata','Name','SuiteSetup','SuiteTeardown','TestSetup','TestTeardown','TestTimeout','TestTags','KeywordTags','Library','Resource','Variables')
aliases:dict[str,str]={'ForceTags':'TestTags','TaskSetup':'TestSetup','TaskTags':'TestTags','TaskTeardown':'TestTeardown','TaskTimeout':'TestTimeout'}
classrobot.parsing.lexer.settings.ResourceFileSettings(languages:Languages)[source]

Bases:FileSettings

names:tuple[str,...]=('Documentation','KeywordTags','Library','Resource','Variables')
classrobot.parsing.lexer.settings.TestCaseSettings(parent:SuiteFileSettings)[source]

Bases:Settings

names:tuple[str,...]=('Documentation','Tags','Setup','Teardown','Template','Timeout')
propertytemplate_set:bool
classrobot.parsing.lexer.settings.KeywordSettings(parent:FileSettings)[source]

Bases:Settings

names:tuple[str,...]=('Documentation','Arguments','Setup','Teardown','Timeout','Tags','Return')

robot.parsing.lexer.statementlexers module

classrobot.parsing.lexer.statementlexers.Lexer(ctx:LexingContext)[source]

Bases:ABC

handles(statement:List[Token])bool[source]
abstractmethodaccepts_more(statement:List[Token])bool[source]
abstractmethodinput(statement:List[Token])[source]
abstractmethodlex()[source]
classrobot.parsing.lexer.statementlexers.StatementLexer(ctx:LexingContext)[source]

Bases:Lexer,ABC

token_type:str
accepts_more(statement:List[Token])bool[source]
input(statement:List[Token])[source]
abstractmethodlex()[source]
classrobot.parsing.lexer.statementlexers.SingleType(ctx:LexingContext)[source]

Bases:StatementLexer,ABC

lex()[source]
classrobot.parsing.lexer.statementlexers.TypeAndArguments(ctx:LexingContext)[source]

Bases:StatementLexer,ABC

lex()[source]
classrobot.parsing.lexer.statementlexers.SectionHeaderLexer(ctx:LexingContext)[source]

Bases:SingleType,ABC

ctx:FileContext
handles(statement:List[Token])bool[source]
classrobot.parsing.lexer.statementlexers.SettingSectionHeaderLexer(ctx:LexingContext)[source]

Bases:SectionHeaderLexer

token_type:str='SETTINGHEADER'
classrobot.parsing.lexer.statementlexers.VariableSectionHeaderLexer(ctx:LexingContext)[source]

Bases:SectionHeaderLexer

token_type:str='VARIABLEHEADER'
classrobot.parsing.lexer.statementlexers.TestCaseSectionHeaderLexer(ctx:LexingContext)[source]

Bases:SectionHeaderLexer

token_type:str='TESTCASEHEADER'
classrobot.parsing.lexer.statementlexers.TaskSectionHeaderLexer(ctx:LexingContext)[source]

Bases:SectionHeaderLexer

token_type:str='TASKHEADER'
classrobot.parsing.lexer.statementlexers.KeywordSectionHeaderLexer(ctx:LexingContext)[source]

Bases:SectionHeaderLexer

token_type:str='KEYWORDHEADER'
classrobot.parsing.lexer.statementlexers.CommentSectionHeaderLexer(ctx:LexingContext)[source]

Bases:SectionHeaderLexer

token_type:str='COMMENTHEADER'
classrobot.parsing.lexer.statementlexers.InvalidSectionHeaderLexer(ctx:LexingContext)[source]

Bases:SectionHeaderLexer

token_type:str='INVALIDHEADER'
lex()[source]
classrobot.parsing.lexer.statementlexers.CommentLexer(ctx:LexingContext)[source]

Bases:SingleType

token_type:str='COMMENT'
classrobot.parsing.lexer.statementlexers.ImplicitCommentLexer(ctx:LexingContext)[source]

Bases:CommentLexer

ctx:FileContext
input(statement:List[Token])[source]
lex()[source]
classrobot.parsing.lexer.statementlexers.SettingLexer(ctx:LexingContext)[source]

Bases:StatementLexer

ctx:FileContext
lex()[source]
classrobot.parsing.lexer.statementlexers.TestCaseSettingLexer(ctx:LexingContext)[source]

Bases:StatementLexer

ctx:TestCaseContext
lex()[source]
handles(statement:List[Token])bool[source]
classrobot.parsing.lexer.statementlexers.KeywordSettingLexer(ctx:LexingContext)[source]

Bases:StatementLexer

ctx:KeywordContext
lex()[source]
handles(statement:List[Token])bool[source]
classrobot.parsing.lexer.statementlexers.VariableLexer(ctx:LexingContext)[source]

Bases:TypeAndArguments

ctx:FileContext
token_type:str='VARIABLE'
lex()[source]
classrobot.parsing.lexer.statementlexers.KeywordCallLexer(ctx:LexingContext)[source]

Bases:StatementLexer

ctx:TestCaseContext|KeywordContext
lex()[source]
classrobot.parsing.lexer.statementlexers.ForHeaderLexer(ctx:LexingContext)[source]

Bases:StatementLexer

separators=('IN','INRANGE','INENUMERATE','INZIP')
handles(statement:List[Token])bool[source]
lex()[source]
classrobot.parsing.lexer.statementlexers.IfHeaderLexer(ctx:LexingContext)[source]

Bases:TypeAndArguments

token_type:str='IF'
handles(statement:List[Token])bool[source]
classrobot.parsing.lexer.statementlexers.InlineIfHeaderLexer(ctx:LexingContext)[source]

Bases:StatementLexer

token_type:str='INLINEIF'
handles(statement:List[Token])bool[source]
lex()[source]
classrobot.parsing.lexer.statementlexers.ElseIfHeaderLexer(ctx:LexingContext)[source]

Bases:TypeAndArguments

token_type:str='ELSEIF'
handles(statement:List[Token])bool[source]
classrobot.parsing.lexer.statementlexers.ElseHeaderLexer(ctx:LexingContext)[source]

Bases:TypeAndArguments

token_type:str='ELSE'
handles(statement:List[Token])bool[source]
classrobot.parsing.lexer.statementlexers.TryHeaderLexer(ctx:LexingContext)[source]

Bases:TypeAndArguments

token_type:str='TRY'
handles(statement:List[Token])bool[source]
classrobot.parsing.lexer.statementlexers.ExceptHeaderLexer(ctx:LexingContext)[source]

Bases:StatementLexer

token_type:str='EXCEPT'
handles(statement:List[Token])bool[source]
lex()[source]
classrobot.parsing.lexer.statementlexers.FinallyHeaderLexer(ctx:LexingContext)[source]

Bases:TypeAndArguments

token_type:str='FINALLY'
handles(statement:List[Token])bool[source]
classrobot.parsing.lexer.statementlexers.WhileHeaderLexer(ctx:LexingContext)[source]

Bases:StatementLexer

token_type:str='WHILE'
handles(statement:List[Token])bool[source]
lex()[source]
classrobot.parsing.lexer.statementlexers.GroupHeaderLexer(ctx:LexingContext)[source]

Bases:TypeAndArguments

token_type:str='GROUP'
handles(statement:List[Token])bool[source]
classrobot.parsing.lexer.statementlexers.EndLexer(ctx:LexingContext)[source]

Bases:TypeAndArguments

token_type:str='END'
handles(statement:List[Token])bool[source]
classrobot.parsing.lexer.statementlexers.VarLexer(ctx:LexingContext)[source]

Bases:StatementLexer

token_type:str='VAR'
handles(statement:List[Token])bool[source]
lex()[source]
classrobot.parsing.lexer.statementlexers.ReturnLexer(ctx:LexingContext)[source]

Bases:TypeAndArguments

token_type:str='RETURNSTATEMENT'
handles(statement:List[Token])bool[source]
classrobot.parsing.lexer.statementlexers.ContinueLexer(ctx:LexingContext)[source]

Bases:TypeAndArguments

token_type:str='CONTINUE'
handles(statement:List[Token])bool[source]
classrobot.parsing.lexer.statementlexers.BreakLexer(ctx:LexingContext)[source]

Bases:TypeAndArguments

token_type:str='BREAK'
handles(statement:List[Token])bool[source]
classrobot.parsing.lexer.statementlexers.SyntaxErrorLexer(ctx:LexingContext)[source]

Bases:TypeAndArguments

token_type:str='ERROR'
handles(statement:List[Token])bool[source]
lex()[source]

robot.parsing.lexer.tokenizer module

classrobot.parsing.lexer.tokenizer.Tokenizer[source]

Bases:object

tokenize(data:str,data_only:bool=False)Iterator[list[Token]][source]

robot.parsing.lexer.tokens module

classrobot.parsing.lexer.tokens.Token(type:str|None=None,value:str|None=None,lineno:int=-1,col_offset:int=-1,error:str|None=None)[source]

Bases:object

Token representing piece of Robot Framework data.

Each token has type, value, line number, column offset and end columnoffset intype,value,lineno,col_offsetandend_col_offset attributes, respectively. Tokens representingerror also have their error message inerror attribute.

Token types are declared as class attributes such asSETTING_HEADERandEOL. Values of these constants have changed slightly in RobotFramework 4.0, and they may change again in the future. It is thus saferto use the constants, not their values, when types are needed. For example,useToken(Token.EOL) instead ofToken('EOL') andtoken.type==Token.EOL instead oftoken.type=='EOL'.

Ifvalue is not given andtype is a special marker likeIF or:attr:`EOL, the value is set automatically.

SETTING_HEADER='SETTINGHEADER'
VARIABLE_HEADER='VARIABLEHEADER'
TESTCASE_HEADER='TESTCASEHEADER'
TASK_HEADER='TASKHEADER'
KEYWORD_HEADER='KEYWORDHEADER'
COMMENT_HEADER='COMMENTHEADER'
INVALID_HEADER='INVALIDHEADER'
FATAL_INVALID_HEADER='FATALINVALIDHEADER'
TESTCASE_NAME='TESTCASENAME'
KEYWORD_NAME='KEYWORDNAME'
SUITE_NAME='SUITENAME'
DOCUMENTATION='DOCUMENTATION'
SUITE_SETUP='SUITESETUP'
SUITE_TEARDOWN='SUITETEARDOWN'
METADATA='METADATA'
TEST_SETUP='TESTSETUP'
TEST_TEARDOWN='TESTTEARDOWN'
TEST_TEMPLATE='TESTTEMPLATE'
TEST_TIMEOUT='TESTTIMEOUT'
TEST_TAGS='TESTTAGS'
FORCE_TAGS='TESTTAGS'
DEFAULT_TAGS='DEFAULTTAGS'
KEYWORD_TAGS='KEYWORDTAGS'
LIBRARY='LIBRARY'
RESOURCE='RESOURCE'
VARIABLES='VARIABLES'
SETUP='SETUP'
TEARDOWN='TEARDOWN'
TEMPLATE='TEMPLATE'
TIMEOUT='TIMEOUT'
TAGS='TAGS'
ARGUMENTS='ARGUMENTS'
RETURN='RETURN'
RETURN_SETTING='RETURN'
AS='AS'
WITH_NAME='AS'
NAME='NAME'
VARIABLE='VARIABLE'
ARGUMENT='ARGUMENT'
ASSIGN='ASSIGN'
KEYWORD='KEYWORD'
FOR='FOR'
FOR_SEPARATOR='FORSEPARATOR'
END='END'
IF='IF'
INLINE_IF='INLINEIF'
ELSE_IF='ELSEIF'
ELSE='ELSE'
TRY='TRY'
EXCEPT='EXCEPT'
FINALLY='FINALLY'
WHILE='WHILE'
VAR='VAR'
RETURN_STATEMENT='RETURNSTATEMENT'
CONTINUE='CONTINUE'
BREAK='BREAK'
OPTION='OPTION'
GROUP='GROUP'
SEPARATOR='SEPARATOR'
COMMENT='COMMENT'
CONTINUATION='CONTINUATION'
CONFIG='CONFIG'
EOL='EOL'
EOS='EOS'
ERROR='ERROR'
FATAL_ERROR='FATALERROR'
NON_DATA_TOKENS={'COMMENT','CONTINUATION','EOL','EOS','SEPARATOR'}
SETTING_TOKENS={'ARGUMENTS','DEFAULTTAGS','DOCUMENTATION','KEYWORDTAGS','LIBRARY','METADATA','RESOURCE','RETURN','SETUP','SUITENAME','SUITESETUP','SUITETEARDOWN','TAGS','TEARDOWN','TEMPLATE','TESTSETUP','TESTTAGS','TESTTEARDOWN','TESTTEMPLATE','TESTTIMEOUT','TIMEOUT','VARIABLES'}
HEADER_TOKENS={'COMMENTHEADER','INVALIDHEADER','KEYWORDHEADER','SETTINGHEADER','TASKHEADER','TESTCASEHEADER','VARIABLEHEADER'}
ALLOW_VARIABLES={'ARGUMENT','KEYWORDNAME','NAME','TESTCASENAME'}
type
value
lineno
col_offset
error
propertyend_col_offset:int
set_error(error:str)[source]
tokenize_variables()Iterator[Token][source]

Tokenizes possible variables in token value.

Yields the token itself if the token does not allow variables (seeToken.ALLOW_VARIABLES) or its value does not containvariables. Otherwise, yields variable tokens as well as tokensbefore, after, or between variables so that they have the sametype as the original token.

classrobot.parsing.lexer.tokens.EOS(lineno:int=-1,col_offset:int=-1)[source]

Bases:Token

Token representing end of a statement.

classmethodfrom_token(token:Token,before:bool=False)EOS[source]
classrobot.parsing.lexer.tokens.END(lineno:int=-1,col_offset:int=-1,virtual:bool=False)[source]

Bases:Token

Token representing END token used to signify block ending.

Virtual END tokens have ‘’ as their value, with “real” END tokens thevalue is ‘END’.

classmethodfrom_token(token:Token,virtual:bool=False)END[source]