Class: Rox::Core::TokenizedExpression
- Inherits:
-
Object
- Object
- Rox::Core::TokenizedExpression
- Defined in:
- lib/rox/core/roxx/tokenized_expression.rb
Constant Summary collapse
- DICT_START_DELIMITER =
'{'.freeze
- DICT_END_DELIMITER =
'}'.freeze
- ARRAY_START_DELIMITER =
'['.freeze
- ARRAY_END_DELIMITER =
']'.freeze
- TOKEN_DELIMITERS =
"{}[]():, \t\r\n\"".freeze
- PRE_POST_STRING_CHAR =
''.freeze
- STRING_DELIMITER =
'"'.freeze
- ESCAPED_QUOTE =
'\\"'.freeze
- ESCAPED_QUOTE_PLACEHOLDER =
'\\RO_Q'.freeze
Instance Method Summary collapse
-
#initialize(expression, operators) ⇒ TokenizedExpression
constructor
A new instance of TokenizedExpression.
- #node_from_array(items) ⇒ Object
- #node_from_dict(items) ⇒ Object
- #node_from_token(token) ⇒ Object
- #push_node(node) ⇒ Object
- #tokenize(expression) ⇒ Object
- #tokens ⇒ Object
Constructor Details
#initialize(expression, operators) ⇒ TokenizedExpression
Returns a new instance of TokenizedExpression.
19 20 21 22 23 24 25 26 |
# File 'lib/rox/core/roxx/tokenized_expression.rb', line 19 def initialize(expression, operators) @expression = expression @operators = operators @result_list = nil @array_accumulator = nil @dict_accumulator = nil @dict_key = nil end |
Instance Method Details
#node_from_array(items) ⇒ Object
91 92 93 |
# File 'lib/rox/core/roxx/tokenized_expression.rb', line 91 def node_from_array(items) Node.new(NodeTypes::RAND, items) end |
#node_from_dict(items) ⇒ Object
95 96 97 |
# File 'lib/rox/core/roxx/tokenized_expression.rb', line 95 def node_from_dict(items) Node.new(NodeTypes::RAND, items) end |
#node_from_token(token) ⇒ Object
99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 |
# File 'lib/rox/core/roxx/tokenized_expression.rb', line 99 def node_from_token(token) return Node.new(NodeTypes::RATOR, token) if @operators.include?(token) return Node.new(NodeTypes::RAND, true) if token == Symbols::ROXX_TRUE return Node.new(NodeTypes::RAND, false) if token == Symbols::ROXX_FALSE return Node.new(NodeTypes::RAND, TokenType::UNDEFINED) if token == Symbols::ROXX_UNDEFINED token_type = TokenType.from_token(token) return Node.new(NodeTypes::RAND, token[1...-2]) if token_type == TokenType::STRING if token_type == TokenType::NUMBER begin return Node.new(NodeTypes::RAND, Integer(token)) rescue ArgumentError begin return Node.new(NodeTypes::RAND, Float(token)) rescue ArgumentError => e raise ArgumentError, "Excepted Number, got '#{token}' (#{token_type}): #{e}" end end end Node.new(NodeTypes::UNKNOWN, nil) end |
#push_node(node) ⇒ Object
32 33 34 35 36 37 38 39 40 41 42 43 |
# File 'lib/rox/core/roxx/tokenized_expression.rb', line 32 def push_node(node) if !@dict_accumulator.nil? && @dict_key.nil? @dict_key = node.value.to_s elsif !@dict_accumulator.nil? && !@dict_key.nil? @dict_accumulator[@dict_key] = node.value @dict_key = nil elsif !@array_accumulator.nil? @array_accumulator << node.value else @result_list << node end end |
#tokenize(expression) ⇒ Object
45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 |
# File 'lib/rox/core/roxx/tokenized_expression.rb', line 45 def tokenize(expression) @result_list = [] @array_accumulator = nil @dict_accumulator = nil @dict_key = nil delimiters_to_use = TokenizedExpression::TOKEN_DELIMITERS normalized_expression = expression.gsub(TokenizedExpression::ESCAPED_QUOTE, TokenizedExpression::ESCAPED_QUOTE_PLACEHOLDER) tokenizer = StringTokenizer.new(normalized_expression, delimiters_to_use, true) token = nil while tokenizer.more_tokens? prev_token = token token = tokenizer.next_token(delimiters_to_use) in_string = delimiters_to_use == TokenizedExpression::STRING_DELIMITER if !in_string && token == TokenizedExpression::DICT_START_DELIMITER @dict_accumulator = {} elsif !in_string && token == TokenizedExpression::DICT_END_DELIMITER dict_result = @dict_accumulator @dict_accumulator = nil push_node(node_from_dict(dict_result)) elsif !in_string && token == TokenizedExpression::ARRAY_START_DELIMITER @array_accumulator = [] elsif !in_string && token == TokenizedExpression::ARRAY_END_DELIMITER array_result = @array_accumulator @array_accumulator = nil push_node(node_from_array(array_result)) elsif token == TokenizedExpression::STRING_DELIMITER if prev_token == TokenizedExpression::STRING_DELIMITER push_node(node_from_token(Symbols::ROXX_EMPTY_STRING)) end delimiters_to_use = in_string ? TokenizedExpression::TOKEN_DELIMITERS : TokenizedExpression::STRING_DELIMITER elsif delimiters_to_use == TokenizedExpression::STRING_DELIMITER push_node(Node.new(NodeTypes::RAND, token.gsub(TokenizedExpression::ESCAPED_QUOTE_PLACEHOLDER, TokenizedExpression::ESCAPED_QUOTE))) elsif !TokenizedExpression::TOKEN_DELIMITERS.include?(token) && token != TokenizedExpression::PRE_POST_STRING_CHAR push_node(node_from_token(token)) end end @result_list end |
#tokens ⇒ Object
28 29 30 |
# File 'lib/rox/core/roxx/tokenized_expression.rb', line 28 def tokens tokenize(@expression) end |