Class: Antlr4::Runtime::BufferedTokenStream

Inherits:
TokenStream show all
Defined in:
lib/antlr4/runtime/buffered_token_stream.rb

Direct Known Subclasses

CommonTokenStream

Constant Summary

Constants inherited from IntStream

IntStream::EOF, IntStream::UNKNOWN_SOURCE_NAME

Instance Attribute Summary collapse

Instance Method Summary collapse

Constructor Details

#initialize(token_source) ⇒ BufferedTokenStream

Returns a new instance of BufferedTokenStream.

Raises:

  • (NilPointerException)


7
8
9
10
11
12
13
14
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 7

def initialize(token_source)
  raise NilPointerException, 'token_source cannot be nil' if token_source.nil?

  @token_source = token_source
  @tokens = []
  @ptr = -1
  @fetched_eof = false
end

Instance Attribute Details

#token_sourceObject (readonly)

Returns the value of attribute token_source.



16
17
18
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 16

def token_source
  @token_source
end

#tokensObject (readonly)

Returns the value of attribute tokens.



165
166
167
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 165

def tokens
  @tokens
end

Instance Method Details

#adjust_seek_index(i) ⇒ Object



145
146
147
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 145

def adjust_seek_index(i)
  i
end

#consumeObject



43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 43

def consume
  skip_eof_check = false
  if @ptr >= 0
    if @fetched_eof
      # the last token in tokens is EOF. skip check if p indexes any
      # fetched token except the last.
      skip_eof_check = @ptr < @tokens.length - 1
    else # no EOF token in tokens. skip check if p indexes a fetched token.
      skip_eof_check = @ptr < @tokens.length
    end
  else # not yet initialized
    skip_eof_check = false
  end

  if !skip_eof_check && la(1) == EOF
    raise IllegalStateException, 'cannot consume EOF'
  end

  @ptr = adjust_seek_index(@ptr + 1) if sync(@ptr + 1)
end

#fetch(n) ⇒ Object



75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 75

def fetch(n)
  return 0 if @fetched_eof

  i = 0
  while i < n
    t = @token_source.next_token
    t.token_index = @tokens.length if t.is_a? WritableToken
    @tokens << t
    if t.type == Token::EOF
      @fetched_eof = true
      return i + 1
    end
    i += 1
  end

  n
end

#fillObject



326
327
328
329
330
331
332
333
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 326

def fill
  lazy_init
  block_size = 1000
  loop do
    fetched = fetch(block_size)
    return if fetched < block_size
  end
end

#filter_for_channel(from, to, channel) ⇒ Object



269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 269

def filter_for_channel(from, to, channel)
  hidden = []
  i = from
  while i <= to
    t = @tokens[i]
    if channel == -1
      hidden << t if t.channel != Lexer::DEFAULT_TOKEN_CHANNEL
    else
      hidden << t if t.channel == channel
    end
    i += 1
  end
  return nil if hidden.empty?

  hidden
end

#get(i) ⇒ Object



93
94
95
96
97
98
99
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 93

def get(i)
  if i < 0 || i >= @tokens.length
    raise IndexOutOfBoundsException, 'token index ' + i + ' out of range 0..' + (@tokens.length - 1)
  end

  @tokens[i]
end

#get_list(start, stop) ⇒ Object



101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 101

def get_list(start, stop)
  return nil if start < 0 || stop < 0

  lazy_init
  subset = []
  stop = @tokens.length - 1 if stop >= @tokens.length
  i = start
  while i <= stop
    t = @tokens[i]
    break if t.type == Token::EOF

    subset << t
    i += 1
  end
  subset
end

#get_tokens2(start, stop, ttype) ⇒ Object



187
188
189
190
191
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 187

def get_tokens2(start, stop, ttype)
  s = Set.new
  s << ttype
  tokens1(start, stop, s)
end

#hidden_tokens_to_left(token_index, channel) ⇒ Object



244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 244

def hidden_tokens_to_left(token_index, channel)
  lazy_init
  if token_index < 0 || token_index >= tokens.size
    raise IndexOutOfBoundsException, token_index + ' not in 0..' + (@tokens.length - 1)
  end

  if token_index == 0
    # obviously no tokens can appear before the first token
    return nil
  end

  prev_on_channel = previous_token_on_channel(token_index - 1, Lexer::DEFAULT_TOKEN_CHANNEL)
  return nil if prev_on_channel == token_index - 1

  # if none onchannel to left, prev_on_channel=-1 then from=0
  from = prev_on_channel + 1
  to = token_index - 1

  filter_for_channel(from, to, channel)
end

#hidden_tokens_to_left1(token_index) ⇒ Object



265
266
267
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 265

def hidden_tokens_to_left1(token_index)
  hidden_tokens_to_left(token_index, -1)
end

#hidden_tokens_to_right(token_index, channel) ⇒ Object



226
227
228
229
230
231
232
233
234
235
236
237
238
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 226

def hidden_tokens_to_right(token_index, channel)
  lazy_init
  if token_index < 0 || token_index >= tokens.size
    raise IndexOutOfBoundsException, token_index + ' not in 0..' + (@tokens.length - 1)
  end

  next_on_channel = next_token_on_channel(token_index + 1, Lexer::DEFAULT_TOKEN_CHANNEL)
  from = token_index + 1
  # if none onchannel to right, next_on_channel=-1 so set to = last token
  to = next_on_channel == -1 ? size - 1 : next_on_channel

  filter_for_channel(from, to, channel)
end

#hidden_tokens_to_right2(token_index) ⇒ Object



240
241
242
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 240

def hidden_tokens_to_right2(token_index)
  hidden_tokens_to_right(token_index, -1)
end

#indexObject



18
19
20
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 18

def index
  @ptr
end

#la(i) ⇒ Object



118
119
120
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 118

def la(i)
  lt(i).type
end

#lazy_initObject



149
150
151
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 149

def lazy_init
  setup if @ptr == -1
end

#lb(k) ⇒ Object



122
123
124
125
126
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 122

def lb(k)
  return nil if (@ptr - k) < 0

  @tokens[@ptr - k]
end

#lt(k) ⇒ Object



128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 128

def lt(k)
  lazy_init
  return nil if k == 0

  return lb(-k) if k < 0

  i = @ptr + k - 1
  sync(i)
  if i >= @tokens.length # return EOF token
    # EOF must be last token
    return @tokens.get(@tokens.length - 1)
  end

  #    if ( i>range ) range = i
  @tokens[i]
end

#markObject



22
23
24
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 22

def mark
  0
end

#next_token_on_channel(i, channel) ⇒ Object



193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 193

def next_token_on_channel(i, channel)
  sync(i)
  return size - 1 if i >= size

  token = @tokens[i]
  while token.channel != channel
    return i if token.type == Token::EOF

    i += 1
    sync(i)
    token = @tokens[i]
  end

  i
end

#previous_token_on_channel(i, channel) ⇒ Object



209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 209

def previous_token_on_channel(i, channel)
  sync(i)
  if i >= size
    # the EOF token is on every channel
    return size - 1
  end

  while i >= 0
    token = @tokens[i]
    return i if token.type == Token::EOF || token.channel == channel

    i -= 1
  end

  i
end

#release(marker) ⇒ Object



26
27
28
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 26

def release(marker)
  ;
end

#resetObject



30
31
32
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 30

def reset
  seek(0)
end

#seek(index) ⇒ Object



34
35
36
37
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 34

def seek(index)
  lazy_init
  @ptr = adjust_seek_index(index)
end

#set_token_source(tokenSource) ⇒ Object



158
159
160
161
162
163
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 158

def set_token_source(tokenSource)
  @token_source = tokenSource
  @tokens.clear
  @ptr = -1
  @fetched_eof = false
end

#setupObject



153
154
155
156
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 153

def setup
  sync(0)
  @ptr = adjust_seek_index(0)
end

#sizeObject



39
40
41
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 39

def size
  @tokens.length
end

#source_nameObject



286
287
288
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 286

def source_name
  @token_source.get_source_name
end

#sync(i) ⇒ Object



64
65
66
67
68
69
70
71
72
73
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 64

def sync(i)
  n = i - @tokens.length + 1 # how many more elements we need?

  if n > 0
    fetched = fetch(n)
    return fetched >= n
  end

  true
end

#textObject



290
291
292
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 290

def text
  text2(Interval.of(0, size - 1))
end

#text2(interval) ⇒ Object



294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 294

def text2(interval)
  start = interval.a
  stop = interval.b
  return '' if start < 0 || stop < 0

  fill
  stop = @tokens.length - 1 if stop >= @tokens.length

  buf = ''
  i = start
  while i <= stop
    t = @tokens[i]
    break if t.type == Token::EOF

    buf << t.text
    i += 1
  end
  buf
end

#text3(ctx) ⇒ Object



314
315
316
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 314

def text3(ctx)
  text2(ctx.source_interval)
end

#text4(start, stop) ⇒ Object



318
319
320
321
322
323
324
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 318

def text4(start, stop)
  if !start.nil? && !stop.nil?
    return text2(Interval.of(start.token_index, stop.token_index))
  end

  ''
end

#tokens1(start, stop, types = nil) ⇒ Object



167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
# File 'lib/antlr4/runtime/buffered_token_stream.rb', line 167

def tokens1(start, stop, types = nil)
  lazy_init
  if start < 0 || stop >= @tokens.length || stop < 0 || start >= @tokens.length

    raise IndexOutOfBoundsException, 'start ' + start + ' or stop ' + stop + ' not in 0..' + (@tokens.length - 1)
  end
  return nil if start > stop

  # list = tokens[start:stop]:T t, t.getType() in typesend
  filtered_tokens = []
  i = start
  while i <= stop
    t = @tokens[i]
    filtered_tokens << t if types.nil? || types.include?(t.type)
    i += 1
  end
  filtered_tokens = nil if filtered_tokens.empty?
  filtered_tokens
end