Module: CTokenizer
Defined Under Namespace
Modules: Expression, Scoped, Sourced
Classes: CLexer, CPLexer, Error, Lexer, LexerBase, SkipMacros, Splitter
Constant Summary
collapse
- EOF_TOKEN =
token behaves like a normal token. Removes a special case
[false, ''.freeze].freeze
- C_RESERVED =
c_reserved_symbol.dup
- CP_RESERVED =
cp_reserved_symbol.dup
Class Method Summary
collapse
Instance Method Summary
collapse
Class Method Details
.error(file, line, msg) ⇒ Object
.line_count(str) ⇒ Object
69
70
71
72
73
|
# File 'lib/caphir/ctokenizer.rb', line 69
def CTokenizer.line_count(str)
count = str.count("\n")
count = str.count("\r") if count == 0
count
end
|
Instance Method Details
#collect ⇒ Object
110
111
112
113
114
115
116
|
# File 'lib/caphir/ctokenizer.rb', line 110
def collect
ary = []
until self.empty?
ary << yield(self.shift)
end
ary
end
|
#each ⇒ Object
103
104
105
106
107
108
|
# File 'lib/caphir/ctokenizer.rb', line 103
def each
until self.empty?
yield(self.shift)
end
self
end
|
#error(msg) ⇒ Object
75
76
77
|
# File 'lib/caphir/ctokenizer.rb', line 75
def error(msg)
CTokenizer.error(file, line, msg)
end
|
#parse_error(token) ⇒ Object
87
88
89
|
# File 'lib/caphir/ctokenizer.rb', line 87
def parse_error(token)
self.error("parse error on token: #{token}")
end
|
#to_a ⇒ Object
95
96
97
98
99
100
101
|
# File 'lib/caphir/ctokenizer.rb', line 95
def to_a
ary = []
until self.empty?
ary << self.shift
end
ary
end
|
#token_error(token) ⇒ Object
79
80
81
|
# File 'lib/caphir/ctokenizer.rb', line 79
def token_error(token)
self.error("unrecognized token: #{token}")
end
|
#unmatched_error(token) ⇒ Object
83
84
85
|
# File 'lib/caphir/ctokenizer.rb', line 83
def unmatched_error(token)
self.error("unmatched '#{token}'")
end
|
#warning(msg) ⇒ Object
91
92
93
|
# File 'lib/caphir/ctokenizer.rb', line 91
def warning(msg)
warn "#{file + ':' if file}#{line}: #{msg}"
end
|