Class: RuleEvaluator

Inherits:
Object
  • Object
show all
Includes:
LogicHelper
Defined in:
lib/rast/rules/rule_evaluator.rb

Overview

Evaluates the rules. “Internal refers to the ‘*true` or `*false` results.”

Constant Summary collapse

NOT =
Operator.new(name: 'not', symbol: '!', precedence: 100)
AND =
Operator.new(name: 'and', symbol: '&', precedence: 2)
OR =
Operator.new(name: 'or', symbol: '|', precedence: 1)
OPERATORS =
[NOT, AND, OR].freeze
OPERATORS_CONCAT =
OPERATORS.map(&:to_s).join
RE_TOKEN_BODY =

the “false” part of the “false

/^.+(?=\[)/.freeze
RE_TOKENS =
/([!|)(&])|([*a-zA-Z\s0-9-]+\[\d\])/.freeze
DEFAULT_CONVERT_HASH =
{
  Integer => IntConverter.new,
  Float => FloatConverter.new,
  Array => DefaultConverter.new,
  TrueClass => BoolConverter.new,
  FalseClass => BoolConverter.new,
  String => StrConverter.new,
  NilClass => DefaultConverter.new
}.freeze

Constants included from LogicHelper

LogicHelper::FALSE, LogicHelper::LOGIC_PRIMARY_RESULT, LogicHelper::OPPOSITE, LogicHelper::TRUE

Class Method Summary collapse

Instance Method Summary collapse

Methods included from LogicHelper

#close_bracket?, #open_bracket?, #perform_logical

Constructor Details

#initialize(token_converters: {}) ⇒ RuleEvaluator

/** @param token_converters token to converter mapping */



42
43
44
45
46
47
48
# File 'lib/rast/rules/rule_evaluator.rb', line 42

def initialize(token_converters: {})
  @token_converters = token_converters

  @stack_operations = []
  @stack_rpn = []
  @stack_answer = []
end

Class Method Details

.operator_from_symbol(symbol: nil) ⇒ Object



27
28
29
# File 'lib/rast/rules/rule_evaluator.rb', line 27

def self.operator_from_symbol(symbol: nil)
  OPERATORS.find { |operator| operator.symbol == symbol }
end

.tokenize(clause: '') ⇒ Object

splitting input string into tokens @ clause - rule clause to be tokenized



78
79
80
# File 'lib/rast/rules/rule_evaluator.rb', line 78

def self.tokenize(clause: '')
  clause.to_s.split(RE_TOKENS).reject(&:empty?)
end

Instance Method Details

#evaluate(scenario: []) ⇒ Object

/**

* Evaluates once parsed math expression with "var" variable included.
*
* @param scenario List of values to evaluate against the rule expression.
* @param rule_token_convert mapping of rule tokens to converter.
* @return <code>String</code> representation of the result
*/


89
90
91
92
93
94
95
96
97
98
# File 'lib/rast/rules/rule_evaluator.rb', line 89

def evaluate(scenario: [])
  if @stack_rpn.size == 1
    evaluate_one_rpn(scenario: scenario).to_s
  else
    evaluate_multi_rpn(
      scenario: scenario,
      rule_token_convert: @token_converters
    )
  end
end

#next_value(rule_token_convert: {}) ⇒ Object

/**

* @param rule_token_convert token to converter map.
* @param default_converter default converter to use.
*/


104
105
106
107
108
109
110
111
112
113
114
# File 'lib/rast/rules/rule_evaluator.rb', line 104

def next_value(rule_token_convert: {})
  token = @stack_answer.pop
  default = {
    subscript: -1,
    value: token
  }

  return default if token.is_a?(Array) || [TRUE, FALSE].include?(token)

  next_value_default(token)
end

#next_value_default(token) ⇒ Object

private



117
118
119
120
121
122
123
124
125
126
127
128
# File 'lib/rast/rules/rule_evaluator.rb', line 117

def next_value_default(token)
  token_cleaned = token.to_s.strip
  subscript = TokenUtil.extract_subscript(token: token_cleaned)
  token_body = subscript > -1 ? token_cleaned[/^.+(?=\[)/] : token_cleaned

  raise "Config Error: Outcome clause token: '#{token}' not found in variables" if @token_converters[token_body].nil?

  {
    value: @token_converters[token_body].convert(token_body),
    subscript: subscript
  }
end

#parse(expression: '') ⇒ Object

/**

* Parses the math expression (complicated formula) and stores the result.
*
* @param pExpression <code>String</code> input expression (logical
*            expression formula)
* @return void.
* @since 0.3.0
*/


58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
# File 'lib/rast/rules/rule_evaluator.rb', line 58

def parse(expression: '')
  # /* cleaning stacks */
  @stack_operations.clear
  @stack_rpn.clear

  tokens = if expression.is_a?(Array)
             expression
           else
             RuleEvaluator.tokenize(clause: expression)
           end

  # /* loop for handling each token - shunting-yard algorithm */
  tokens.each { |token| shunt_internal(token: token) }

  @stack_rpn << @stack_operations.pop while @stack_operations.any?
  @stack_rpn.reverse!
end

#shunt_closeObject



153
154
155
156
157
158
159
# File 'lib/rast/rules/rule_evaluator.rb', line 153

def shunt_close
  while @stack_operations.any? &&
        !open_bracket?(token: @stack_operations.last.strip)
    @stack_rpn << @stack_operations.pop
  end
  @stack_operations.pop
end

#shunt_internal(token: '') ⇒ Object

/** @param token token. */



131
132
133
134
135
136
137
138
139
140
141
# File 'lib/rast/rules/rule_evaluator.rb', line 131

def shunt_internal(token: '')
  if open_bracket?(token: token)
    @stack_operations << token
  elsif close_bracket?(token: token)
    shunt_close
  elsif operator?(token: token)
    shunt_operator(token)
  else
    @stack_rpn << token
  end
end

#shunt_operator(token) ⇒ Object



143
144
145
146
147
148
149
150
151
# File 'lib/rast/rules/rule_evaluator.rb', line 143

def shunt_operator(token)
  while !@stack_operations.empty? &&
        operator?(token: @stack_operations.last.strip) &&
        precedence(symbol_char: token[0]) <=
        precedence(symbol_char: @stack_operations.last.strip[0])
    @stack_rpn << @stack_operations.pop
  end
  @stack_operations << token
end