Class: LangScan::Scheme::Tokenizer
- Defined in:
- ext/langscan/scheme/scheme/scheme.c
Instance Method Summary collapse
Constructor Details
#initialize(user_data) ⇒ Object
70 71 72 73 74 75 76 77 78 79 80 81 82 |
# File 'ext/langscan/scheme/scheme/scheme.c', line 70
static VALUE tokenizer_initialize(VALUE self, VALUE user_data)
{
VALUE tmp;
user_read_t user_read;
langscan_scheme_tokenizer_t *tokenizer;
Data_Get_Struct(self, langscan_scheme_tokenizer_t, tokenizer);
StringValue(user_data);
user_read = user_read_str;
user_data = rb_ary_new3(2, rb_str_new4(user_data), INT2FIX(0));
RBASIC(user_data)->klass = 0;
DATA_PTR(self) = langscan_scheme_make_tokenizer(user_read, (void *)user_data);
return self;
}
|
Instance Method Details
#close ⇒ Object
107 108 109 110 111 112 113 114 115 |
# File 'ext/langscan/scheme/scheme/scheme.c', line 107
static VALUE tokenizer_close(VALUE self)
{
langscan_scheme_tokenizer_t *tokenizer;
Data_Get_Struct(self, langscan_scheme_tokenizer_t, tokenizer);
if (tokenizer == NULL) { return Qnil; }
DATA_PTR(self) = NULL;
langscan_scheme_free_tokenizer(tokenizer);
return Qnil;
}
|
#get_token ⇒ Object
84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 |
# File 'ext/langscan/scheme/scheme/scheme.c', line 84
static VALUE tokenizer_get_token(VALUE self)
{
langscan_scheme_tokenizer_t *tokenizer;
langscan_scheme_token_t token;
Data_Get_Struct(self, langscan_scheme_tokenizer_t, tokenizer);
if (tokenizer == NULL) { return Qnil; }
token = langscan_scheme_get_token(tokenizer);
if (token == langscan_scheme_eof) {
DATA_PTR(self) = NULL;
langscan_scheme_free_tokenizer(tokenizer);
return Qnil;
}
return rb_ary_new3(8,
token_symbol_list[token],
rb_str_new(langscan_scheme_curtoken_text(tokenizer), langscan_scheme_curtoken_leng(tokenizer)),
INT2NUM(langscan_scheme_curtoken_beg_lineno(tokenizer)),
INT2NUM(langscan_scheme_curtoken_beg_columnno(tokenizer)),
INT2NUM(langscan_scheme_curtoken_beg_byteno(tokenizer)),
INT2NUM(langscan_scheme_curtoken_end_lineno(tokenizer)),
INT2NUM(langscan_scheme_curtoken_end_columnno(tokenizer)),
INT2NUM(langscan_scheme_curtoken_end_byteno(tokenizer)));
}
|