arx.lexer.core

arx.lexer.core

Source: packages/arx/src/arx/lexer/core.py

title: Module for handling the lexer analysis.

Classes

TokenKind(Enum)

title: TokenKind enumeration for known variables returned by the lexer.

Token

title: Token class store the kind and the value of the token.
attributes:
  kind:
    type: TokenKind
  value:
    type: Any
  location:
    type: SourceLocation

Methods

get_name(self) -> str
title: Get the name of the specified token.
returns:
  type: str
  description: Name of the token.
get_display_value(self) -> str
title: Return the string representation of a token value.
returns:
  type: str
  description: The string representation of the token value.

TokenList

title: Class for handle a List of tokens.
attributes:
  tokens:
    type: list[Token]
  position:
    type: int
  cur_tok:
    type: Token

Methods

get_token(self) -> Token
title: Get the next token.
returns:
  type: Token
  description: The next token from standard input.
get_next_token(self) -> Token
title: Provide a simple token buffer.
returns:
  type: Token
  description: >-
    The current token the parser is looking at. Reads another token
    from the lexer and updates cur_tok with its results.

LexerError(Exception)

title: Custom exception for lexer errors.
attributes:
  location:
    description: The source location where the error occurred.

Lexer

title: Lexer class for tokenizing known variables.
attributes:
  lex_loc:
    type: SourceLocation
    description: Source location for lexer.
  last_char:
    type: str
  new_line:
    type: bool
  _keyword_map:
    type: dict[str, TokenKind]
  _line_comment_delims:
    type: tuple[str, Ellipsis]
  _multi_char_ops:
    type: set[str]
  _literal_keywords:
    type: dict[str, Any]
  _keyword_token_map:
    type: dict[str, TokenKind]

Methods

clean(self) -> None
title: Reset the Lexer attributes.
get_token(self) -> Token
title: Get the next token.
returns:
  type: Token
  description: The next token from standard input.
advance(self) -> str
title: Advance the token from the buffer.
returns:
  type: str
  description: TokenKind in integer form.
lex(self) -> TokenList
title: Create a list of tokens from input source.
returns:
  type: TokenList