Class: ShuntingYard::Lexer
- Inherits:
-
Object
- Object
- ShuntingYard::Lexer
- Defined in:
- lib/shunting_yard/lexer.rb
Constant Summary collapse
- SPACE_OR_EOL =
/(\s|$)/.freeze
Instance Attribute Summary collapse
-
#patterns ⇒ Object
Returns the value of attribute patterns.
-
#separator_pattern ⇒ Object
Returns the value of attribute separator_pattern.
Instance Method Summary collapse
- #add_pattern(name, regex, evaluator = -> (lexeme) { lexeme }) ⇒ Object
-
#initialize ⇒ Lexer
constructor
A new instance of Lexer.
- #tokenize(input) ⇒ Object
Constructor Details
#initialize ⇒ Lexer
Returns a new instance of Lexer.
10 11 12 13 |
# File 'lib/shunting_yard/lexer.rb', line 10 def initialize @patterns = [] @separator_pattern = SPACE_OR_EOL end |
Instance Attribute Details
#patterns ⇒ Object
Returns the value of attribute patterns.
7 8 9 |
# File 'lib/shunting_yard/lexer.rb', line 7 def patterns @patterns end |
#separator_pattern ⇒ Object
Returns the value of attribute separator_pattern.
8 9 10 |
# File 'lib/shunting_yard/lexer.rb', line 8 def separator_pattern @separator_pattern end |
Instance Method Details
#add_pattern(name, regex, evaluator = -> (lexeme) { lexeme }) ⇒ Object
15 16 17 |
# File 'lib/shunting_yard/lexer.rb', line 15 def add_pattern(name, regex, evaluator = -> (lexeme) { lexeme }) @patterns << [name, regex, evaluator] end |
#tokenize(input) ⇒ Object
19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 |
# File 'lib/shunting_yard/lexer.rb', line 19 def tokenize(input) sc = StringScanner.new(input) matches = [] until sc.eos? last_match = nil @patterns.each do |name, regex, evaluator| match = sc.check(regex) next if match.nil? value = evaluator.(match) last_match = [name, match, value] break end if last_match.nil? unknown_token = sc.check_until(separator_pattern).sub(separator_pattern, "") raise UnknownTokenError.new(unknown_token, sc.pos + 1) end sc.pos += last_match[1].bytesize matches << build_token(last_match) unless last_match[2].nil? end matches end |