Class: Redis::TimeSeries::Info

Inherits:
Struct
  • Object
show all
Defined in:
lib/redis/time_series/info.rb

Overview

The Info struct wraps the result of the TS.INFO command with method access. It also applies some limited parsing to the result values, mainly snakifying the property keys, and instantiating Rule objects if necessary.

All properties of the struct are also available on a TimeSeries object itself via delegation.

Instance Attribute Summary collapse

Class Method Summary collapse

Instance Method Summary collapse

Instance Attribute Details

#chunk_countInteger (readonly)

Returns number of memory chunks used for the time-series.

Returns:

  • (Integer)

    number of memory chunks used for the time-series



42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
# File 'lib/redis/time_series/info.rb', line 42

Info = Struct.new(
  :chunk_count,
  :chunk_size,
  :chunk_type,
  :duplicate_policy,
  :first_timestamp,
  :labels,
  :last_timestamp,
  :max_samples_per_chunk,
  :memory_usage,
  :retention_time,
  :rules,
  :series,
  :source_key,
  :total_samples,
  keyword_init: true
) do
  class << self
    # @api private
    # @return [Info]
    def parse(series:, data:)
      build_hash(data)
        .merge(series: series)
        .then(&method(:parse_labels))
        .then(&method(:parse_policies))
        .then(&method(:parse_rules))
        .then(&method(:new))
    end

    private

    def build_hash(data)
      data.each_slice(2).reduce({}) do |h, (key, value)|
        # Convert camelCase info keys to snake_case
        key = key.gsub(/(.)([A-Z])/,'\1_\2').downcase.to_sym
        # Skip unknown properties
        next h unless members.include?(key)
        h.merge(key => value)
      end
    end

    def parse_labels(hash)
      hash[:labels] = hash[:labels].to_h.transform_values { |v| v.to_i.to_s == v ? v.to_i : v }
      hash
    end

    def parse_policies(hash)
      hash[:duplicate_policy] = DuplicatePolicy.new(hash[:duplicate_policy]) if hash[:duplicate_policy]
      hash
    end

    def parse_rules(hash)
      hash[:rules] = hash[:rules].map { |d| Rule.new(source: hash[:series], data: d) }
      hash
    end
  end

  alias count total_samples
  alias length total_samples
  alias size total_samples

  # If this series is the destination of a compaction rule, returns the source series of the data.
  # @return [TimeSeries, nil] the series referred to by {source_key}
  def source
    return unless source_key
    @source ||= TimeSeries.new(source_key, redis: series.redis)
  end
end

#chunk_sizeInteger (readonly)

Returns amount of allocated memory in bytes.

Returns:

  • (Integer)

    amount of allocated memory in bytes



42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
# File 'lib/redis/time_series/info.rb', line 42

Info = Struct.new(
  :chunk_count,
  :chunk_size,
  :chunk_type,
  :duplicate_policy,
  :first_timestamp,
  :labels,
  :last_timestamp,
  :max_samples_per_chunk,
  :memory_usage,
  :retention_time,
  :rules,
  :series,
  :source_key,
  :total_samples,
  keyword_init: true
) do
  class << self
    # @api private
    # @return [Info]
    def parse(series:, data:)
      build_hash(data)
        .merge(series: series)
        .then(&method(:parse_labels))
        .then(&method(:parse_policies))
        .then(&method(:parse_rules))
        .then(&method(:new))
    end

    private

    def build_hash(data)
      data.each_slice(2).reduce({}) do |h, (key, value)|
        # Convert camelCase info keys to snake_case
        key = key.gsub(/(.)([A-Z])/,'\1_\2').downcase.to_sym
        # Skip unknown properties
        next h unless members.include?(key)
        h.merge(key => value)
      end
    end

    def parse_labels(hash)
      hash[:labels] = hash[:labels].to_h.transform_values { |v| v.to_i.to_s == v ? v.to_i : v }
      hash
    end

    def parse_policies(hash)
      hash[:duplicate_policy] = DuplicatePolicy.new(hash[:duplicate_policy]) if hash[:duplicate_policy]
      hash
    end

    def parse_rules(hash)
      hash[:rules] = hash[:rules].map { |d| Rule.new(source: hash[:series], data: d) }
      hash
    end
  end

  alias count total_samples
  alias length total_samples
  alias size total_samples

  # If this series is the destination of a compaction rule, returns the source series of the data.
  # @return [TimeSeries, nil] the series referred to by {source_key}
  def source
    return unless source_key
    @source ||= TimeSeries.new(source_key, redis: series.redis)
  end
end

#chunk_typeString (readonly)

Returns whether the chunk is “compressed” or “uncompressed”.

Returns:

  • (String)

    whether the chunk is “compressed” or “uncompressed”



42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
# File 'lib/redis/time_series/info.rb', line 42

Info = Struct.new(
  :chunk_count,
  :chunk_size,
  :chunk_type,
  :duplicate_policy,
  :first_timestamp,
  :labels,
  :last_timestamp,
  :max_samples_per_chunk,
  :memory_usage,
  :retention_time,
  :rules,
  :series,
  :source_key,
  :total_samples,
  keyword_init: true
) do
  class << self
    # @api private
    # @return [Info]
    def parse(series:, data:)
      build_hash(data)
        .merge(series: series)
        .then(&method(:parse_labels))
        .then(&method(:parse_policies))
        .then(&method(:parse_rules))
        .then(&method(:new))
    end

    private

    def build_hash(data)
      data.each_slice(2).reduce({}) do |h, (key, value)|
        # Convert camelCase info keys to snake_case
        key = key.gsub(/(.)([A-Z])/,'\1_\2').downcase.to_sym
        # Skip unknown properties
        next h unless members.include?(key)
        h.merge(key => value)
      end
    end

    def parse_labels(hash)
      hash[:labels] = hash[:labels].to_h.transform_values { |v| v.to_i.to_s == v ? v.to_i : v }
      hash
    end

    def parse_policies(hash)
      hash[:duplicate_policy] = DuplicatePolicy.new(hash[:duplicate_policy]) if hash[:duplicate_policy]
      hash
    end

    def parse_rules(hash)
      hash[:rules] = hash[:rules].map { |d| Rule.new(source: hash[:series], data: d) }
      hash
    end
  end

  alias count total_samples
  alias length total_samples
  alias size total_samples

  # If this series is the destination of a compaction rule, returns the source series of the data.
  # @return [TimeSeries, nil] the series referred to by {source_key}
  def source
    return unless source_key
    @source ||= TimeSeries.new(source_key, redis: series.redis)
  end
end

#duplicate_policyObject

Returns the value of attribute duplicate_policy

Returns:

  • (Object)

    the current value of duplicate_policy



42
43
44
# File 'lib/redis/time_series/info.rb', line 42

def duplicate_policy
  @duplicate_policy
end

#first_timestampInteger (readonly)

Returns first timestamp present in the time-series (milliseconds since epoch).

Returns:

  • (Integer)

    first timestamp present in the time-series (milliseconds since epoch)



42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
# File 'lib/redis/time_series/info.rb', line 42

Info = Struct.new(
  :chunk_count,
  :chunk_size,
  :chunk_type,
  :duplicate_policy,
  :first_timestamp,
  :labels,
  :last_timestamp,
  :max_samples_per_chunk,
  :memory_usage,
  :retention_time,
  :rules,
  :series,
  :source_key,
  :total_samples,
  keyword_init: true
) do
  class << self
    # @api private
    # @return [Info]
    def parse(series:, data:)
      build_hash(data)
        .merge(series: series)
        .then(&method(:parse_labels))
        .then(&method(:parse_policies))
        .then(&method(:parse_rules))
        .then(&method(:new))
    end

    private

    def build_hash(data)
      data.each_slice(2).reduce({}) do |h, (key, value)|
        # Convert camelCase info keys to snake_case
        key = key.gsub(/(.)([A-Z])/,'\1_\2').downcase.to_sym
        # Skip unknown properties
        next h unless members.include?(key)
        h.merge(key => value)
      end
    end

    def parse_labels(hash)
      hash[:labels] = hash[:labels].to_h.transform_values { |v| v.to_i.to_s == v ? v.to_i : v }
      hash
    end

    def parse_policies(hash)
      hash[:duplicate_policy] = DuplicatePolicy.new(hash[:duplicate_policy]) if hash[:duplicate_policy]
      hash
    end

    def parse_rules(hash)
      hash[:rules] = hash[:rules].map { |d| Rule.new(source: hash[:series], data: d) }
      hash
    end
  end

  alias count total_samples
  alias length total_samples
  alias size total_samples

  # If this series is the destination of a compaction rule, returns the source series of the data.
  # @return [TimeSeries, nil] the series referred to by {source_key}
  def source
    return unless source_key
    @source ||= TimeSeries.new(source_key, redis: series.redis)
  end
end

#labelsHash (readonly)

Returns a hash of label-value pairs that represent metadata labels of the time-series.

Returns:

  • (Hash)

    a hash of label-value pairs that represent metadata labels of the time-series



42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
# File 'lib/redis/time_series/info.rb', line 42

Info = Struct.new(
  :chunk_count,
  :chunk_size,
  :chunk_type,
  :duplicate_policy,
  :first_timestamp,
  :labels,
  :last_timestamp,
  :max_samples_per_chunk,
  :memory_usage,
  :retention_time,
  :rules,
  :series,
  :source_key,
  :total_samples,
  keyword_init: true
) do
  class << self
    # @api private
    # @return [Info]
    def parse(series:, data:)
      build_hash(data)
        .merge(series: series)
        .then(&method(:parse_labels))
        .then(&method(:parse_policies))
        .then(&method(:parse_rules))
        .then(&method(:new))
    end

    private

    def build_hash(data)
      data.each_slice(2).reduce({}) do |h, (key, value)|
        # Convert camelCase info keys to snake_case
        key = key.gsub(/(.)([A-Z])/,'\1_\2').downcase.to_sym
        # Skip unknown properties
        next h unless members.include?(key)
        h.merge(key => value)
      end
    end

    def parse_labels(hash)
      hash[:labels] = hash[:labels].to_h.transform_values { |v| v.to_i.to_s == v ? v.to_i : v }
      hash
    end

    def parse_policies(hash)
      hash[:duplicate_policy] = DuplicatePolicy.new(hash[:duplicate_policy]) if hash[:duplicate_policy]
      hash
    end

    def parse_rules(hash)
      hash[:rules] = hash[:rules].map { |d| Rule.new(source: hash[:series], data: d) }
      hash
    end
  end

  alias count total_samples
  alias length total_samples
  alias size total_samples

  # If this series is the destination of a compaction rule, returns the source series of the data.
  # @return [TimeSeries, nil] the series referred to by {source_key}
  def source
    return unless source_key
    @source ||= TimeSeries.new(source_key, redis: series.redis)
  end
end

#last_timestampInteger (readonly)

Returns last timestamp present in the time-series (milliseconds since epoch).

Returns:

  • (Integer)

    last timestamp present in the time-series (milliseconds since epoch)



42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
# File 'lib/redis/time_series/info.rb', line 42

Info = Struct.new(
  :chunk_count,
  :chunk_size,
  :chunk_type,
  :duplicate_policy,
  :first_timestamp,
  :labels,
  :last_timestamp,
  :max_samples_per_chunk,
  :memory_usage,
  :retention_time,
  :rules,
  :series,
  :source_key,
  :total_samples,
  keyword_init: true
) do
  class << self
    # @api private
    # @return [Info]
    def parse(series:, data:)
      build_hash(data)
        .merge(series: series)
        .then(&method(:parse_labels))
        .then(&method(:parse_policies))
        .then(&method(:parse_rules))
        .then(&method(:new))
    end

    private

    def build_hash(data)
      data.each_slice(2).reduce({}) do |h, (key, value)|
        # Convert camelCase info keys to snake_case
        key = key.gsub(/(.)([A-Z])/,'\1_\2').downcase.to_sym
        # Skip unknown properties
        next h unless members.include?(key)
        h.merge(key => value)
      end
    end

    def parse_labels(hash)
      hash[:labels] = hash[:labels].to_h.transform_values { |v| v.to_i.to_s == v ? v.to_i : v }
      hash
    end

    def parse_policies(hash)
      hash[:duplicate_policy] = DuplicatePolicy.new(hash[:duplicate_policy]) if hash[:duplicate_policy]
      hash
    end

    def parse_rules(hash)
      hash[:rules] = hash[:rules].map { |d| Rule.new(source: hash[:series], data: d) }
      hash
    end
  end

  alias count total_samples
  alias length total_samples
  alias size total_samples

  # If this series is the destination of a compaction rule, returns the source series of the data.
  # @return [TimeSeries, nil] the series referred to by {source_key}
  def source
    return unless source_key
    @source ||= TimeSeries.new(source_key, redis: series.redis)
  end
end

#max_samples_per_chunkInteger (readonly)

Returns maximum number of samples per memory chunk.

Returns:

  • (Integer)

    maximum number of samples per memory chunk



42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
# File 'lib/redis/time_series/info.rb', line 42

Info = Struct.new(
  :chunk_count,
  :chunk_size,
  :chunk_type,
  :duplicate_policy,
  :first_timestamp,
  :labels,
  :last_timestamp,
  :max_samples_per_chunk,
  :memory_usage,
  :retention_time,
  :rules,
  :series,
  :source_key,
  :total_samples,
  keyword_init: true
) do
  class << self
    # @api private
    # @return [Info]
    def parse(series:, data:)
      build_hash(data)
        .merge(series: series)
        .then(&method(:parse_labels))
        .then(&method(:parse_policies))
        .then(&method(:parse_rules))
        .then(&method(:new))
    end

    private

    def build_hash(data)
      data.each_slice(2).reduce({}) do |h, (key, value)|
        # Convert camelCase info keys to snake_case
        key = key.gsub(/(.)([A-Z])/,'\1_\2').downcase.to_sym
        # Skip unknown properties
        next h unless members.include?(key)
        h.merge(key => value)
      end
    end

    def parse_labels(hash)
      hash[:labels] = hash[:labels].to_h.transform_values { |v| v.to_i.to_s == v ? v.to_i : v }
      hash
    end

    def parse_policies(hash)
      hash[:duplicate_policy] = DuplicatePolicy.new(hash[:duplicate_policy]) if hash[:duplicate_policy]
      hash
    end

    def parse_rules(hash)
      hash[:rules] = hash[:rules].map { |d| Rule.new(source: hash[:series], data: d) }
      hash
    end
  end

  alias count total_samples
  alias length total_samples
  alias size total_samples

  # If this series is the destination of a compaction rule, returns the source series of the data.
  # @return [TimeSeries, nil] the series referred to by {source_key}
  def source
    return unless source_key
    @source ||= TimeSeries.new(source_key, redis: series.redis)
  end
end

#memory_usageInteger (readonly)

Returns total number of bytes allocated for the time-series.

Returns:

  • (Integer)

    total number of bytes allocated for the time-series



42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
# File 'lib/redis/time_series/info.rb', line 42

Info = Struct.new(
  :chunk_count,
  :chunk_size,
  :chunk_type,
  :duplicate_policy,
  :first_timestamp,
  :labels,
  :last_timestamp,
  :max_samples_per_chunk,
  :memory_usage,
  :retention_time,
  :rules,
  :series,
  :source_key,
  :total_samples,
  keyword_init: true
) do
  class << self
    # @api private
    # @return [Info]
    def parse(series:, data:)
      build_hash(data)
        .merge(series: series)
        .then(&method(:parse_labels))
        .then(&method(:parse_policies))
        .then(&method(:parse_rules))
        .then(&method(:new))
    end

    private

    def build_hash(data)
      data.each_slice(2).reduce({}) do |h, (key, value)|
        # Convert camelCase info keys to snake_case
        key = key.gsub(/(.)([A-Z])/,'\1_\2').downcase.to_sym
        # Skip unknown properties
        next h unless members.include?(key)
        h.merge(key => value)
      end
    end

    def parse_labels(hash)
      hash[:labels] = hash[:labels].to_h.transform_values { |v| v.to_i.to_s == v ? v.to_i : v }
      hash
    end

    def parse_policies(hash)
      hash[:duplicate_policy] = DuplicatePolicy.new(hash[:duplicate_policy]) if hash[:duplicate_policy]
      hash
    end

    def parse_rules(hash)
      hash[:rules] = hash[:rules].map { |d| Rule.new(source: hash[:series], data: d) }
      hash
    end
  end

  alias count total_samples
  alias length total_samples
  alias size total_samples

  # If this series is the destination of a compaction rule, returns the source series of the data.
  # @return [TimeSeries, nil] the series referred to by {source_key}
  def source
    return unless source_key
    @source ||= TimeSeries.new(source_key, redis: series.redis)
  end
end

#retention_timeInteger (readonly)

Returns retention time, in milliseconds, for the time-series. A zero value means unlimited retention.

Returns:

  • (Integer)

    retention time, in milliseconds, for the time-series. A zero value means unlimited retention.



42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
# File 'lib/redis/time_series/info.rb', line 42

Info = Struct.new(
  :chunk_count,
  :chunk_size,
  :chunk_type,
  :duplicate_policy,
  :first_timestamp,
  :labels,
  :last_timestamp,
  :max_samples_per_chunk,
  :memory_usage,
  :retention_time,
  :rules,
  :series,
  :source_key,
  :total_samples,
  keyword_init: true
) do
  class << self
    # @api private
    # @return [Info]
    def parse(series:, data:)
      build_hash(data)
        .merge(series: series)
        .then(&method(:parse_labels))
        .then(&method(:parse_policies))
        .then(&method(:parse_rules))
        .then(&method(:new))
    end

    private

    def build_hash(data)
      data.each_slice(2).reduce({}) do |h, (key, value)|
        # Convert camelCase info keys to snake_case
        key = key.gsub(/(.)([A-Z])/,'\1_\2').downcase.to_sym
        # Skip unknown properties
        next h unless members.include?(key)
        h.merge(key => value)
      end
    end

    def parse_labels(hash)
      hash[:labels] = hash[:labels].to_h.transform_values { |v| v.to_i.to_s == v ? v.to_i : v }
      hash
    end

    def parse_policies(hash)
      hash[:duplicate_policy] = DuplicatePolicy.new(hash[:duplicate_policy]) if hash[:duplicate_policy]
      hash
    end

    def parse_rules(hash)
      hash[:rules] = hash[:rules].map { |d| Rule.new(source: hash[:series], data: d) }
      hash
    end
  end

  alias count total_samples
  alias length total_samples
  alias size total_samples

  # If this series is the destination of a compaction rule, returns the source series of the data.
  # @return [TimeSeries, nil] the series referred to by {source_key}
  def source
    return unless source_key
    @source ||= TimeSeries.new(source_key, redis: series.redis)
  end
end

#rulesArray<Rule> (readonly)

Returns an array of configured compaction Rules.

Returns:

  • (Array<Rule>)

    an array of configured compaction Rules



42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
# File 'lib/redis/time_series/info.rb', line 42

Info = Struct.new(
  :chunk_count,
  :chunk_size,
  :chunk_type,
  :duplicate_policy,
  :first_timestamp,
  :labels,
  :last_timestamp,
  :max_samples_per_chunk,
  :memory_usage,
  :retention_time,
  :rules,
  :series,
  :source_key,
  :total_samples,
  keyword_init: true
) do
  class << self
    # @api private
    # @return [Info]
    def parse(series:, data:)
      build_hash(data)
        .merge(series: series)
        .then(&method(:parse_labels))
        .then(&method(:parse_policies))
        .then(&method(:parse_rules))
        .then(&method(:new))
    end

    private

    def build_hash(data)
      data.each_slice(2).reduce({}) do |h, (key, value)|
        # Convert camelCase info keys to snake_case
        key = key.gsub(/(.)([A-Z])/,'\1_\2').downcase.to_sym
        # Skip unknown properties
        next h unless members.include?(key)
        h.merge(key => value)
      end
    end

    def parse_labels(hash)
      hash[:labels] = hash[:labels].to_h.transform_values { |v| v.to_i.to_s == v ? v.to_i : v }
      hash
    end

    def parse_policies(hash)
      hash[:duplicate_policy] = DuplicatePolicy.new(hash[:duplicate_policy]) if hash[:duplicate_policy]
      hash
    end

    def parse_rules(hash)
      hash[:rules] = hash[:rules].map { |d| Rule.new(source: hash[:series], data: d) }
      hash
    end
  end

  alias count total_samples
  alias length total_samples
  alias size total_samples

  # If this series is the destination of a compaction rule, returns the source series of the data.
  # @return [TimeSeries, nil] the series referred to by {source_key}
  def source
    return unless source_key
    @source ||= TimeSeries.new(source_key, redis: series.redis)
  end
end

#seriesTimeSeries (readonly)

Returns the series this info is from.

Returns:



42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
# File 'lib/redis/time_series/info.rb', line 42

Info = Struct.new(
  :chunk_count,
  :chunk_size,
  :chunk_type,
  :duplicate_policy,
  :first_timestamp,
  :labels,
  :last_timestamp,
  :max_samples_per_chunk,
  :memory_usage,
  :retention_time,
  :rules,
  :series,
  :source_key,
  :total_samples,
  keyword_init: true
) do
  class << self
    # @api private
    # @return [Info]
    def parse(series:, data:)
      build_hash(data)
        .merge(series: series)
        .then(&method(:parse_labels))
        .then(&method(:parse_policies))
        .then(&method(:parse_rules))
        .then(&method(:new))
    end

    private

    def build_hash(data)
      data.each_slice(2).reduce({}) do |h, (key, value)|
        # Convert camelCase info keys to snake_case
        key = key.gsub(/(.)([A-Z])/,'\1_\2').downcase.to_sym
        # Skip unknown properties
        next h unless members.include?(key)
        h.merge(key => value)
      end
    end

    def parse_labels(hash)
      hash[:labels] = hash[:labels].to_h.transform_values { |v| v.to_i.to_s == v ? v.to_i : v }
      hash
    end

    def parse_policies(hash)
      hash[:duplicate_policy] = DuplicatePolicy.new(hash[:duplicate_policy]) if hash[:duplicate_policy]
      hash
    end

    def parse_rules(hash)
      hash[:rules] = hash[:rules].map { |d| Rule.new(source: hash[:series], data: d) }
      hash
    end
  end

  alias count total_samples
  alias length total_samples
  alias size total_samples

  # If this series is the destination of a compaction rule, returns the source series of the data.
  # @return [TimeSeries, nil] the series referred to by {source_key}
  def source
    return unless source_key
    @source ||= TimeSeries.new(source_key, redis: series.redis)
  end
end

#source_keyString? (readonly)

Returns the key of the source series, if this series is the destination of a compaction rule.

Returns:

  • (String, nil)

    the key of the source series, if this series is the destination of a compaction rule



42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
# File 'lib/redis/time_series/info.rb', line 42

Info = Struct.new(
  :chunk_count,
  :chunk_size,
  :chunk_type,
  :duplicate_policy,
  :first_timestamp,
  :labels,
  :last_timestamp,
  :max_samples_per_chunk,
  :memory_usage,
  :retention_time,
  :rules,
  :series,
  :source_key,
  :total_samples,
  keyword_init: true
) do
  class << self
    # @api private
    # @return [Info]
    def parse(series:, data:)
      build_hash(data)
        .merge(series: series)
        .then(&method(:parse_labels))
        .then(&method(:parse_policies))
        .then(&method(:parse_rules))
        .then(&method(:new))
    end

    private

    def build_hash(data)
      data.each_slice(2).reduce({}) do |h, (key, value)|
        # Convert camelCase info keys to snake_case
        key = key.gsub(/(.)([A-Z])/,'\1_\2').downcase.to_sym
        # Skip unknown properties
        next h unless members.include?(key)
        h.merge(key => value)
      end
    end

    def parse_labels(hash)
      hash[:labels] = hash[:labels].to_h.transform_values { |v| v.to_i.to_s == v ? v.to_i : v }
      hash
    end

    def parse_policies(hash)
      hash[:duplicate_policy] = DuplicatePolicy.new(hash[:duplicate_policy]) if hash[:duplicate_policy]
      hash
    end

    def parse_rules(hash)
      hash[:rules] = hash[:rules].map { |d| Rule.new(source: hash[:series], data: d) }
      hash
    end
  end

  alias count total_samples
  alias length total_samples
  alias size total_samples

  # If this series is the destination of a compaction rule, returns the source series of the data.
  # @return [TimeSeries, nil] the series referred to by {source_key}
  def source
    return unless source_key
    @source ||= TimeSeries.new(source_key, redis: series.redis)
  end
end

#total_samplesInteger (readonly) Also known as: count, length, size

Returns the total number of samples in the series.

Returns:

  • (Integer)

    the total number of samples in the series



42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
# File 'lib/redis/time_series/info.rb', line 42

Info = Struct.new(
  :chunk_count,
  :chunk_size,
  :chunk_type,
  :duplicate_policy,
  :first_timestamp,
  :labels,
  :last_timestamp,
  :max_samples_per_chunk,
  :memory_usage,
  :retention_time,
  :rules,
  :series,
  :source_key,
  :total_samples,
  keyword_init: true
) do
  class << self
    # @api private
    # @return [Info]
    def parse(series:, data:)
      build_hash(data)
        .merge(series: series)
        .then(&method(:parse_labels))
        .then(&method(:parse_policies))
        .then(&method(:parse_rules))
        .then(&method(:new))
    end

    private

    def build_hash(data)
      data.each_slice(2).reduce({}) do |h, (key, value)|
        # Convert camelCase info keys to snake_case
        key = key.gsub(/(.)([A-Z])/,'\1_\2').downcase.to_sym
        # Skip unknown properties
        next h unless members.include?(key)
        h.merge(key => value)
      end
    end

    def parse_labels(hash)
      hash[:labels] = hash[:labels].to_h.transform_values { |v| v.to_i.to_s == v ? v.to_i : v }
      hash
    end

    def parse_policies(hash)
      hash[:duplicate_policy] = DuplicatePolicy.new(hash[:duplicate_policy]) if hash[:duplicate_policy]
      hash
    end

    def parse_rules(hash)
      hash[:rules] = hash[:rules].map { |d| Rule.new(source: hash[:series], data: d) }
      hash
    end
  end

  alias count total_samples
  alias length total_samples
  alias size total_samples

  # If this series is the destination of a compaction rule, returns the source series of the data.
  # @return [TimeSeries, nil] the series referred to by {source_key}
  def source
    return unless source_key
    @source ||= TimeSeries.new(source_key, redis: series.redis)
  end
end

Class Method Details

.parse(series:, data:) ⇒ Info

This method is part of a private API. You should avoid using this method if possible, as it may be removed or be changed in the future.

Returns:



62
63
64
65
66
67
68
69
# File 'lib/redis/time_series/info.rb', line 62

def parse(series:, data:)
  build_hash(data)
    .merge(series: series)
    .then(&method(:parse_labels))
    .then(&method(:parse_policies))
    .then(&method(:parse_rules))
    .then(&method(:new))
end

Instance Method Details

#sourceTimeSeries?

If this series is the destination of a compaction rule, returns the source series of the data.

Returns:



105
106
107
108
# File 'lib/redis/time_series/info.rb', line 105

def source
  return unless source_key
  @source ||= TimeSeries.new(source_key, redis: series.redis)
end