Method: Antlr4::Runtime::ATNDeserializer#deserialize

Defined in:
lib/antlr4/runtime/atn_deserializer.rb

#deserialize(serialized_data) ⇒ Object



62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
# File 'lib/antlr4/runtime/atn_deserializer.rb', line 62

def deserialize(serialized_data)
  data = serialized_data.codepoints

  i = 1
  while i < data.length
    data[i] = data[i] - 2
    i += 1
  end

  p = 0
  version = data[p]
  p += 1
  if version != SERIALIZED_VERSION
    reason = sprintf format("Could not deserialize ATN with version %d (expected %d).\n", version, SERIALIZED_VERSION)
    raise UnsupportedOperationException, reason
  end

  uuid = to_uuid(data, p)
  p += 8
  unless @@supported_uuids.include?(uuid)
    reason = sprintf format("Could not deserialize ATN with Uuid %s (expected %s or a legacy Uuid).\n", uuid, @@serialized_uuid)
    raise UnsupportedOperationException, reason
  end

  supports_precedence_predicates = feature_supported?(@@added_precedence_transitions, uuid)
  supports_lexer_actions = feature_supported?(@@added_lexer_actions, uuid)

  grammar_type = ATNType::VALUES[data[p]]
  p += 1
  max_token_type = data[p]
  p += 1
  atn = ATN.new(grammar_type, max_token_type)

  loop_back_state_numbers = []
  end_state_numbers = []
  n_states = data[p]
  p += 1
  i = 0
  while i < n_states
    s_type = data[p]
    p += 1
    if s_type == ATNState::INVALID_TYPE
      atn.add_state(nil)
      i += 1
      next
    end

    rule_index = data[p]
    p += 1
    rule_index = -1 if rule_index == 0xFFFF

    s = state_factory(s_type, rule_index)
    if s_type == ATNState::LOOP_END
      loop_back_state_number = data[p]
      p += 1
      pair = OpenStruct.new
      pair.a = s
      pair.b = loop_back_state_number
      loop_back_state_numbers << pair
    elsif s.is_a? BlockStartState
      end_state_number = data[p]
      p += 1
      pair = OpenStruct.new
      pair.a = s
      pair.b = end_state_number
      end_state_numbers << pair
    end
    atn.add_state(s)
    i += 1
  end

  i = 0
  while i < loop_back_state_numbers.length
    pair = loop_back_state_numbers[i]
    pair.a.loopback_state = atn.states[pair.b]
    i += 1
  end

  i = 0
  while i < end_state_numbers.length
    pair = end_state_numbers[i]
    pair.a.end_state = atn.states[pair.b]
    i += 1
  end

  num_non_greedy_states = data[p]
  p += 1
  i = 0
  while i < num_non_greedy_states
    state_number = data[p]
    p += 1
    atn.states[state_number].non_greedy = true
    i += 1
  end

  if supports_precedence_predicates
    num_precedence_states = data[p]
    p += 1
    i = 0
    while i < num_precedence_states
      state_number = data[p]
      p += 1
      atn.states[state_number].is_left_recursive_rule = true
      i += 1
    end
  end

  nrules = data[p]
  p += 1
  atn.rule_to_token_type = [] if atn.grammar_type == ATNType::LEXER

  atn.rule_to_start_state = []
  i = 0
  while i < nrules
    s = data[p]
    p += 1
    start_state = atn.states[s]
    atn.rule_to_start_state[i] = start_state
    if atn.grammar_type == ATNType::LEXER
      token_type = data[p]
      p += 1
      token_type = Token::EOF if token_type == 0xFFFF

      atn.rule_to_token_type[i] = token_type

      unless feature_supported?(@@added_lexer_actions, uuid)
        @action_index_ignored = data[p]
        p += 1
      end
    end
    i += 1
  end

  atn.rule_to_stop_state = []
  i = 0
  while i < atn.states.length
    state = atn.states[i]
    unless state.is_a? RuleStopState
      i += 1
      next
    end

    stop_state = state
    atn.rule_to_stop_state[state.rule_index] = stop_state
    atn.rule_to_start_state[state.rule_index].stop_state = stop_state
    i += 1
  end

  n_modes = data[p]
  p += 1
  i = 0
  while i < n_modes
    s = data[p]
    p += 1
    atn.mode_to_start_state << atn.states[s]
    i += 1
  end

  sets = []

  p = deserialize_sets(data, p, sets, ATNDeserializer.unicode_deserializer(UnicodeDeserializingMode::UNICODE_BMP))

  if feature_supported?(@@added_unicode_smp, uuid)
    p = deserialize_sets(data, p, sets, ATNDeserializer.unicode_deserializer(UnicodeDeserializingMode::UNICODE_SMP))
  end

  n_edges = data[p]
  p += 1
  i = 0
  while i < n_edges
    src = data[p]
    trg = data[p + 1]
    ttype = data[p + 2]
    arg1 = data[p + 3]
    arg2 = data[p + 4]
    arg3 = data[p + 5]
    trans = edge_factory(atn, ttype, src, trg, arg1, arg2, arg3, sets)
    src_state = atn.states[src]
    src_state.add_transition(trans)
    p += 6
    i += 1
  end

  k = 0
  while k < atn.states.length
    state = atn.states[k]
    i = 0
    while i < state.number_of_transitions
      t = state.transition(i)
      unless t.is_a? RuleTransition
        i += 1
        next
      end

      rule_transition = t
      outermost_precedence_return = -1
      if atn.rule_to_start_state[rule_transition.target.rule_index].is_left_recursive_rule
        if rule_transition.precedence == 0
          outermost_precedence_return = rule_transition.target.rule_index
        end
      end

      return_transition = EpsilonTransition.new(rule_transition.follow_state, outermost_precedence_return)
      atn.rule_to_stop_state[rule_transition.target.rule_index].add_transition(return_transition)
      i += 1
    end
    k += 1
  end

  k = 0
  while k < atn.states.length
    state = atn.states[k]
    if state.is_a? BlockStartState
      raise IllegalStateException if state.end_state.nil?

      raise IllegalStateException unless state.end_state.start_state.nil?

      state.end_state.start_state = state
    end

    if state.is_a? PlusLoopbackState
      loopback_state = state
      i = 0
      while i < loopback_state.number_of_transitions
        target = loopback_state.transition(i).target
        if target.is_a? PlusBlockStartState
          target.loopback_state = loopback_state
        end
        i += 1
      end
    elsif state.is_a? StarLoopbackState
      loopback_state = state
      i = 0
      while i < loopback_state.number_of_transitions
        target = loopback_state.transition(i).target
        if target.is_a? StarLoopEntryState
          target.loopback_state = loopback_state
        end
        i += 1
      end
    end
    k += 1
  end

  n_decisions = data[p]
  p += 1
  i = 1
  while i <= n_decisions
    s = data[p]
    p += 1
    dec_state = atn.states[s]
    atn.decision_to_state << dec_state
    dec_state.decision = i - 1
    i += 1
  end

  if atn.grammar_type == ATNType::LEXER
    if supports_lexer_actions
      atn._a = Array.new(data[p])
      p += 1
      i = 0
      while i < atn._a.length
        action_type = data[p]
        p += 1
        data1 = data[p]
        p += 1
        data1 = -1 if data1 == 0xFFFF

        data2 = data[p]
        p += 1
        data2 = -1 if data2 == 0xFFFF

        lexer_action = lexer_action_factory(action_type, data1, data2)

        atn._a[i] = lexer_action
        i += 1
      end
    else
      legacy_lexer_actions = []
      k = 0
      while k < atn.states.length
        state = atn.states[k]
        i = 0
        while i < state.number_of_transitions
          transition = state.transition(i)
          unless transition.is_a? ActionTransition
            i += 1
            next
          end

          rule_index = transition.rule_index
          action_index = transition.action_index
          lexer_action = LexerCustomAction.new(rule_index, action_index)
          state.set_transition(i, ActionTransition.new(transition.target, rule_index, legacy_lexer_actions.length, false))
          legacy_lexer_actions << lexer_action
          i += 1
        end
        k += 1
      end

      atn._a = legacy_lexer_actions
    end
  end

  mark_precedence_decisions(atn)

  verify_atn(atn) if @deserialization_options.verify_atn?

  if @deserialization_options.generate_rule_bypass_transitions? && atn.grammar_type == ATNType::PARSER
    atn.rule_to_token_type = []
    i = 0
    while i < atn.rule_to_start_state.length
      atn.rule_to_token_type[i] = atn.max_token_type + i + 1
      i += 1
    end

    i = 0
    while i < atn.rule_to_start_state.length
      bypass_start = BasicBlockStartState.new
      bypass_start.rule_index = i
      atn.add_state(bypass_start)

      bypass_stop = BlockEndState.new
      bypass_stop.rule_index = i
      atn.add_state(bypass_stop)

      bypass_start.end_state = bypass_stop
      atn.define_decision_state(bypass_start)

      bypass_stop.start_state = bypass_start

      exclude_transition = nil
      if atn.rule_to_start_state[i].is_left_recursive_rule
        end_state = nil
        i = 0
        while i < atn.states.length
          state = atn.states[i]
          if state.rule_index != i
            i += 1
            next
          end

          unless state.is_a? StarLoopEntryState
            i += 1
            next
          end

          maybe_loop_end_state = state.transition(state.number_of_transitions - 1).target
          unless maybe_loop_end_state.is_a? LoopEndState
            i += 1
            next
          end

          if maybe_loop_end_state.epsilon_only_transitions && maybe_loop_end_state.transition(0).target.is_a?(RuleStopState)
            end_state = state
            break
          end
          i += 1
        end

        if end_state.nil?
          raise UnsupportedOperationException, "Couldn't identify final state of the precedence rule prefix section."
        end

        exclude_transition = end_state.loopback_state.transition(0)
      else
        end_state = atn.rule_to_stop_state[i]
      end

      i = 0
      while i < atn.states.length
        state = atn.states[i]
        j = 0
        while j < state.transitions.length
          transition = state.transitions[j]
          if transition == exclude_transition
            j += 1
            next
          end

          transition.target = bypass_stop if transition.target == end_state
          j += 1
        end
        i += 1
      end

      while atn.rule_to_start_state[i].number_of_transitions > 0
        transition = atn.rule_to_start_state[i].remove_transition(atn.rule_to_start_state[i].number_of_transitions - 1)
        bypass_start.add_transition(transition)
      end

      atn.rule_to_start_state[i].add_transition(EpsilonTransition.new(bypass_start))
      bypass_stop.add_transition(EpsilonTransition.new(end_state))

      match_state = BasicState.new
      atn.add_state(match_state)
      match_state.add_transition(AtomTransition.new(bypass_stop, atn.rule_to_token_type[i]))
      bypass_start.add_transition(EpsilonTransition.new(match_state))
    end

    verify_atn(atn) if @deserialization_options.verify_atn?
  end

  atn
end