Class: Sparkql::Lexer

Inherits:
StringScanner
  • Object
show all
Includes:
Token
Defined in:
lib/sparkql/lexer.rb

Constant Summary

Constants included from Token

Token::BOOLEAN, Token::CHARACTER, Token::CONJUNCTIONS, Token::CUSTOM_FIELD, Token::DATE, Token::DATETIME, Token::DECIMAL, Token::EQUALITY_OPERATORS, Token::INTEGER, Token::KEYWORD, Token::LPAREN, Token::NEWLINE, Token::NULL, Token::OPERATORS, Token::RANGE_OPERATOR, Token::RPAREN, Token::SPACE, Token::STANDARD_FIELD, Token::TIME, Token::UNARY_CONJUNCTIONS

Instance Attribute Summary collapse

Instance Method Summary collapse

Constructor Details

#initialize(str) ⇒ Lexer

Returns a new instance of Lexer.



8
9
10
11
12
13
14
# File 'lib/sparkql/lexer.rb', line 8

def initialize(str)
  str.freeze
  super(str, false) # DO NOT dup str
  @level = 0
  @block_group_identifier = 0
  @expression_count = 0
end

Instance Attribute Details

#block_group_identifierObject

Returns the value of attribute block_group_identifier.



4
5
6
# File 'lib/sparkql/lexer.rb', line 4

def block_group_identifier
  @block_group_identifier
end

#current_token_valueObject (readonly)

Returns the value of attribute current_token_value.



6
7
8
# File 'lib/sparkql/lexer.rb', line 6

def current_token_value
  @current_token_value
end

#last_fieldObject (readonly)

Returns the value of attribute last_field.



6
7
8
# File 'lib/sparkql/lexer.rb', line 6

def last_field
  @last_field
end

#levelObject

Returns the value of attribute level.



4
5
6
# File 'lib/sparkql/lexer.rb', line 4

def level
  @level
end

#token_indexObject (readonly)

Returns the value of attribute token_index.



6
7
8
# File 'lib/sparkql/lexer.rb', line 6

def token_index
  @token_index
end

Instance Method Details

#check_keywords(value) ⇒ Object



89
90
91
92
93
94
95
# File 'lib/sparkql/lexer.rb', line 89

def check_keywords(value)
  result = check_reserved_words(value)
  if result.first == :UNKNOWN
    result = [:KEYWORD,value]
  end
  result
end

#check_reserved_words(value) ⇒ Object



65
66
67
68
69
70
71
72
73
74
75
76
77
78
# File 'lib/sparkql/lexer.rb', line 65

def check_reserved_words(value)
  u_value = value.capitalize
  if OPERATORS.include?(u_value)
    [:OPERATOR,u_value]
  elsif RANGE_OPERATOR == u_value
    [:RANGE_OPERATOR,u_value]
  elsif CONJUNCTIONS.include?(u_value)
    [:CONJUNCTION,u_value]
  elsif UNARY_CONJUNCTIONS.include?(u_value)
    [:UNARY_CONJUNCTION,u_value]
  else
    [:UNKNOWN, "ERROR: '#{self.string}'"]
  end
end

#check_standard_fields(value) ⇒ Object



80
81
82
83
84
85
86
87
# File 'lib/sparkql/lexer.rb', line 80

def check_standard_fields(value)
  result = check_reserved_words(value)
  if result.first == :UNKNOWN
    @last_field = value
    result = [:STANDARD_FIELD,value]
  end
  result
end

#leveldownObject



102
103
104
# File 'lib/sparkql/lexer.rb', line 102

def leveldown
  @level -= 1
end

#levelupObject



97
98
99
100
# File 'lib/sparkql/lexer.rb', line 97

def levelup
  @level += 1
  @block_group_identifier += 1
end

#literal(symbol, value) ⇒ Object



106
107
108
109
110
111
112
# File 'lib/sparkql/lexer.rb', line 106

def literal(symbol, value)
  node = {
    :type => symbol.to_s.downcase.to_sym,
    :value => value
  }
  [symbol, node]
end

#shiftObject

Lookup the next matching token

TODO the old implementation did value type detection conversion at a later date, we can perform this at parse time if we want!!!!



20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
# File 'lib/sparkql/lexer.rb', line 20

def shift
  @token_index = self.pos

  token = case
    when @current_token_value = scan(SPACE)
      [:SPACE, @current_token_value]
    when @current_token_value = scan(LPAREN)
      levelup
      [:LPAREN, @current_token_value]
    when @current_token_value = scan(RPAREN)
      # leveldown: do this after parsing group
      [:RPAREN, @current_token_value]
    when @current_token_value = scan(/\,/)
      [:COMMA,@current_token_value]
    when @current_token_value = scan(NULL)
      literal :NULL, "NULL"
    when @current_token_value = scan(STANDARD_FIELD)
      check_standard_fields(@current_token_value)
    when @current_token_value = scan(DATETIME)
      literal :DATETIME, @current_token_value
    when @current_token_value = scan(DATE)
      literal :DATE, @current_token_value
    when @current_token_value = scan(TIME)
      literal :TIME, @current_token_value
    when @current_token_value = scan(DECIMAL)
      literal :DECIMAL, @current_token_value
    when @current_token_value = scan(INTEGER)
      literal :INTEGER, @current_token_value
    when @current_token_value = scan(CHARACTER)
      literal :CHARACTER, @current_token_value
    when @current_token_value = scan(BOOLEAN)
      literal :BOOLEAN, @current_token_value
    when @current_token_value = scan(KEYWORD)
      check_keywords(@current_token_value)
    when @current_token_value = scan(CUSTOM_FIELD)
      [:CUSTOM_FIELD,@current_token_value]
    when eos?
      [false, false] # end of file, \Z don't work with StringScanner
    else
      [:UNKNOWN, "ERROR: '#{self.string}'"]
  end

  token.freeze
end