37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
|
# File 'lib/proc_source.rb', line 37
def handle(proc)
filename, line = proc.source_descriptor
lines = get_lines(filename, line) || []
lexer = RubyLex.new
lexer.set_input(StringIO.new(lines.join))
state = :before_constructor
nesting_level = 1
start_token, end_token = nil, nil
found = false
while token = lexer.token
if [:before_constructor, :check_more].include?(state)
if token.is_a?(RubyToken::TkNL) and state == :check_more
state = :done
break
end
if token.is_a?(RubyToken::TkCONSTANT) and
token.instance_variable_get(:@name) == "Proc"
if lexer.token.is_a?(RubyToken::TkDOT)
method = lexer.token
if method.is_a?(RubyToken::TkIDENTIFIER) and
method.instance_variable_get(:@name) == "new"
unless state == :check_more
state = :before_code
else
return
end
end
end
elsif token.is_a?(RubyToken::TkIDENTIFIER) and
%w{proc lambda}.include?(token.instance_variable_get(:@name))
unless state == :check_more
state = :before_code
else
return
end
end
elsif state == :before_code
if token.is_a?(RubyToken::TkfLBRACE) or token.is_a?(RubyToken::TkDO)
state = :in_code
start_token = token
end
elsif state == :in_code
if token.is_a?(RubyToken::TkRBRACE) or token.is_a?(RubyToken::TkEND)
nesting_level -= 1
if nesting_level == 0
end_token = token
state = :check_more
end
elsif token.is_a?(RubyToken::TkfLBRACE) or token.is_a?(RubyToken::TkDO) or
token.is_a?(RubyToken::TkBEGIN) or token.is_a?(RubyToken::TkCASE) or
token.is_a?(RubyToken::TkCLASS) or token.is_a?(RubyToken::TkDEF) or
token.is_a?(RubyToken::TkFOR) or token.is_a?(RubyToken::TkIF) or
token.is_a?(RubyToken::TkMODULE) or token.is_a?(RubyToken::TkUNLESS) or
token.is_a?(RubyToken::TkUNTIL) or token.is_a?(RubyToken::TkWHILE) or
token.is_a?(RubyToken::TklBEGIN)
nesting_level += 1
end
end
end
if start_token and end_token
start_line, end_line = start_token.line_no - 1, end_token.line_no - 1
source = lines[start_line .. end_line]
start_offset = start_token.char_no
start_offset += 1 if start_token.is_a?(RubyToken::TkDO)
end_offset = -(source.last.length - end_token.char_no)
source.first.slice!(0 .. start_offset)
source.last.slice!(end_offset .. -1)
proc.source = source.join.gsub(/^ | $/, "")
end
end
|