Class: LangScan::C::Tokenizer

Inherits:
Data
  • Object
show all
Defined in:
ext/langscan/c/c/c.c

Instance Method Summary collapse

Constructor Details

#initialize(user_data) ⇒ Object



70
71
72
73
74
75
76
77
78
79
80
81
82
# File 'ext/langscan/c/c/c.c', line 70

static VALUE tokenizer_initialize(VALUE self, VALUE user_data)
{
  VALUE tmp;
  user_read_t user_read;
  langscan_c_tokenizer_t *tokenizer;
  Data_Get_Struct(self, langscan_c_tokenizer_t, tokenizer);
  StringValue(user_data);
  user_read = user_read_str;
  user_data = rb_ary_new3(2, rb_str_new4(user_data), INT2FIX(0));
  RBASIC(user_data)->klass = 0;
  DATA_PTR(self) = langscan_c_make_tokenizer(user_read, (void *)user_data);
  return self;
}

Instance Method Details

#closeObject



107
108
109
110
111
112
113
114
115
# File 'ext/langscan/c/c/c.c', line 107

static VALUE tokenizer_close(VALUE self)
{
  langscan_c_tokenizer_t *tokenizer;
  Data_Get_Struct(self, langscan_c_tokenizer_t, tokenizer);
  if (tokenizer == NULL) { return Qnil; }
  DATA_PTR(self) = NULL;
  langscan_c_free_tokenizer(tokenizer);
  return Qnil;
}

#get_tokenObject



84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
# File 'ext/langscan/c/c/c.c', line 84

static VALUE tokenizer_get_token(VALUE self)
{
  langscan_c_tokenizer_t *tokenizer;
  langscan_c_token_t token;
  Data_Get_Struct(self, langscan_c_tokenizer_t, tokenizer);
  if (tokenizer == NULL) { return Qnil; }
  token = langscan_c_get_token(tokenizer);
  if (token == langscan_c_eof) {
    DATA_PTR(self) = NULL;
    langscan_c_free_tokenizer(tokenizer);
    return Qnil;
  }
  return rb_ary_new3(8,
    token_symbol_list[token],
    rb_str_new(langscan_c_curtoken_text(tokenizer), langscan_c_curtoken_leng(tokenizer)),
    INT2NUM(langscan_c_curtoken_beg_lineno(tokenizer)),
    INT2NUM(langscan_c_curtoken_beg_columnno(tokenizer)),
    INT2NUM(langscan_c_curtoken_beg_byteno(tokenizer)),
    INT2NUM(langscan_c_curtoken_end_lineno(tokenizer)),
    INT2NUM(langscan_c_curtoken_end_columnno(tokenizer)),
    INT2NUM(langscan_c_curtoken_end_byteno(tokenizer)));
}