Class: Proc

Inherits:
Object
  • Object
show all
Defined in:
lib/core_extensions/proc.rb

Constant Summary collapse

TLAMBEG =
[:on_tlambeg, "{"]
TLAMBDA =
[:on_tlambda, "->"]
LBRACE =
[:on_lbrace, '{']
TOKEN_PAIRS =
{LBRACE             => [:on_rbrace, '}'],
[:on_kw, 'do']     => [:on_kw, 'end'],
TLAMBDA            => [:on_rbrace, '}']}

Class Method Summary collapse

Instance Method Summary collapse

Class Method Details

.from_source(prc_src) ⇒ Object

Raises:

  • (ArgumentError)


96
97
98
99
100
101
# File 'lib/core_extensions/proc.rb', line 96

def self.from_source(prc_src)
  raise ArgumentError unless prc_src.kind_of?(String)
  prc = eval(prc_src)
  prc.instance_variable_set(:@source, prc_src)
  prc
end

Instance Method Details

#_actually_starting_a_proc?(tokens, tok) ⇒ Boolean

Returns:

  • (Boolean)


58
59
60
61
62
63
64
65
66
67
# File 'lib/core_extensions/proc.rb', line 58

def _actually_starting_a_proc?(tokens, tok)
  return true if tokens.index(tok).eql?(0)
  look_back = tokens.slice(0..tokens.index(tok)-1)
  look_back.pop if look_back.last.try(:[], 1).eql? :on_sp
  if [:on_tlambeg, :on_tlambda].include?(tok[1])
    true
  else
    ![:on_comma, :on_lparen, :on_label].include?(look_back.last.try(:[], 1))
  end
end

#sourceObject

Make a best effort to provide the original source for a block based on extracting a string from the file identified in Proc#source_location using Ruby’s tokenizer.

This works for first block declared on a line in a source file. If additional blocks are specified inside the first block on the same line as the start of the block, only the outer-most block declaration will be identified as a the block we want.

If you require only the source of blocks-within-other-blocks, start them on a new line as would be best practice for clarity and readability.



24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
# File 'lib/core_extensions/proc.rb', line 24

def source
  @source ||= begin
    file, line_no = source_location
    raise "no file provided by source_location: #{self}" if file.nil?
    raise "no line number provided for source_location: #{self}" if line_no.nil?
    tokens =  Ripper.lex File.read(file)
    tokens_on_line = tokens.select {|pos, lbl, str| pos[0].eql?(line_no) }
    starting_token = tokens_on_line.detect do |pos, lbl, str|
        TOKEN_PAIRS.keys.include?([lbl, str]) &&
        _actually_starting_a_proc?(tokens, [pos, lbl, str])
    end
    starting_token_type = [starting_token[1], starting_token[2]]
    ending_token_type = TOKEN_PAIRS[starting_token_type]
    source_str = ""
    remaining_tokens = tokens[tokens.index(starting_token)..-1]
    nesting = -1
    starting_nesting_token_types = if [TLAMBDA, LBRACE].include?(starting_token_type)
      [TLAMBDA, LBRACE]
    else
      [starting_token_type]
    end

    while token = remaining_tokens.shift
      token = [token[1], token[2]] # strip position
      source_str << token[1]
      nesting += 1 if starting_nesting_token_types.include? token
      is_ending_token = token.eql?(ending_token_type)
      break if is_ending_token && nesting.eql?(0)
      nesting -= 1 if is_ending_token
    end
    source_str
  end
end

#source_bodyObject

Examines the source of a proc to extract the body by removing the outermost block delimiters and any surrounding. whitespace.

Raises exception if the block takes arguments.



75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
# File 'lib/core_extensions/proc.rb', line 75

def source_body
  raise "Cannot extract proc body on non-zero arity" unless arity.eql?(0)
  tokens = Ripper.lex source
  body_start_idx = 2
  body_end_idx = -1
  if tokens[0][1].eql?(:on_tlambda)
    body_start_idx = tokens.index(tokens.detect { |t| t[1].eql?(:on_tlambeg) }) + 1
  end
  body_tokens = tokens[body_start_idx..-1]

  body_tokens.pop # ending token of proc
  # remove trailing whitespace
  whitespace = [:on_sp, :on_nl, :on_ignored_nl]
  body_tokens.pop while whitespace.include?(body_tokens[-1][1])
  # remove leading whitespace
  body_tokens.shift while whitespace.include?(body_tokens[0][1])

  # put them back together
  body_tokens.map {|token| token[2] }.join
end