Class: GenericLexer
- Inherits:
-
Object
- Object
- GenericLexer
- Defined in:
- lib/vertigo/generic_lexer.rb
Direct Known Subclasses
Instance Method Summary collapse
- #get_token ⇒ Object
- #ignore(pattern) ⇒ Object
-
#initialize ⇒ GenericLexer
constructor
A new instance of GenericLexer.
- #keyword(str) ⇒ Object
- #next_token ⇒ Object
- #open(code) ⇒ Object
- #position ⇒ Object
- #token(hash) ⇒ Object
- #tokenize(code) ⇒ Object
Constructor Details
#initialize ⇒ GenericLexer
Returns a new instance of GenericLexer.
6 7 8 9 |
# File 'lib/vertigo/generic_lexer.rb', line 6 def initialize @rules = [] @rules << [:newline,/[\n]/] end |
Instance Method Details
#get_token ⇒ Object
35 36 37 38 39 40 41 42 |
# File 'lib/vertigo/generic_lexer.rb', line 35 def get_token linecol=position() @rules.each do |rule, regexp| val = @ssc.scan(regexp) return Vertigo::Token.new([rule, val, linecol]) if val end raise "lexing error line #{linecol.first} around : ...'#{@ssc.peek(5)}'... " end |
#ignore(pattern) ⇒ Object
11 12 13 |
# File 'lib/vertigo/generic_lexer.rb', line 11 def ignore pattern @rules << [:skip,pattern] end |
#keyword(str) ⇒ Object
15 16 17 |
# File 'lib/vertigo/generic_lexer.rb', line 15 def keyword str @rules.unshift [str.to_sym,/#{str}\b/i] end |
#next_token ⇒ Object
29 30 31 32 33 |
# File 'lib/vertigo/generic_lexer.rb', line 29 def next_token return [nil,nil,nil] if @ssc.empty? tok = get_token return (tok.is_a? :skip) ? next_token : tok end |
#open(code) ⇒ Object
24 25 26 27 |
# File 'lib/vertigo/generic_lexer.rb', line 24 def open code @ssc = StringScanner.new code @line=0 end |
#position ⇒ Object
44 45 46 47 48 49 50 |
# File 'lib/vertigo/generic_lexer.rb', line 44 def position if @ssc.bol? @line+=1 @old_pos=@ssc.pos end [@line,@ssc.pos-@old_pos+1] end |
#token(hash) ⇒ Object
19 20 21 22 |
# File 'lib/vertigo/generic_lexer.rb', line 19 def token hash token,pattern=*hash.to_a.flatten @rules << [token, pattern] end |
#tokenize(code) ⇒ Object
52 53 54 55 56 57 58 59 60 |
# File 'lib/vertigo/generic_lexer.rb', line 52 def tokenize code open(code) tokens=[] tokens << next_token() while not @ssc.eos? # while not @ssc.eos? # tokens << (p next_token) # end #usefull for debug tokens end |