Class: Redis::TimeSeries

Inherits:
Object
  • Object
show all
Extended by:
Forwardable, Client
Defined in:
lib/redis/time_series.rb,
lib/redis/time_series/info.rb,
lib/redis/time_series/rule.rb,
lib/redis/time_series/multi.rb,
lib/redis/time_series/client.rb,
lib/redis/time_series/errors.rb,
lib/redis/time_series/sample.rb,
lib/redis/time_series/filters.rb,
lib/redis/time_series/version.rb,
lib/redis/time_series/aggregation.rb,
lib/redis/time_series/duplicate_policy.rb

Overview

The Redis::TimeSeries class is an interface for working with time-series data in Redis, using the RedisTimeSeries module.

You can’t use this gem with vanilla Redis, the time series module must be compiled and loaded. The easiest way to do this is by running the provided Docker container. Refer to the setup guide for more info.

docker run -p 6379:6379 -it –rm redislabs/redistimeseries

Once you’re up and running, you can create a new time series and start recording data. Many commands are documented below, but you should refer to the command documentation for the most authoritative and up-to-date reference.

Examples:

ts = Redis::TimeSeries.create('time_series_example')
ts.add(12345)
ts.get
#=> #<Redis::TimeSeries::Sample:0x00007ff00d942e60 @time=2020-07-19 16:52:48 -0700, @value=0.12345e5>

Defined Under Namespace

Modules: Client Classes: Aggregation, AggregationError, DuplicatePolicy, Error, FilterError, Filters, Info, Multi, Rule, Sample, UnknownPolicyError

Constant Summary collapse

VERSION =
'0.8.1'

Instance Attribute Summary collapse

Class Method Summary collapse

Instance Method Summary collapse

Methods included from Client

debug, debug=, extended, redis, redis=

Constructor Details

#initialize(key, redis: self.class.redis) ⇒ TimeSeries



227
228
229
230
# File 'lib/redis/time_series.rb', line 227

def initialize(key, redis: self.class.redis)
  @key = key
  @redis = redis
end

Instance Attribute Details

#chunk_countInteger (readonly)



42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
# File 'lib/redis/time_series/info.rb', line 42

Info = Struct.new(
  :chunk_count,
  :chunk_size,
  :chunk_type,
  :duplicate_policy,
  :first_timestamp,
  :labels,
  :last_timestamp,
  :max_samples_per_chunk,
  :memory_usage,
  :retention_time,
  :rules,
  :series,
  :source_key,
  :total_samples,
  keyword_init: true
) do
  class << self
    # @api private
    # @return [Info]
    def parse(series:, data:)
      build_hash(data)
        .merge(series: series)
        .then(&method(:parse_labels))
        .then(&method(:parse_policies))
        .then(&method(:parse_rules))
        .then(&method(:new))
    end

    private

    def build_hash(data)
      data.each_slice(2).reduce({}) do |h, (key, value)|
        # Convert camelCase info keys to snake_case
        key = key.gsub(/(.)([A-Z])/,'\1_\2').downcase.to_sym
        # Skip unknown properties
        next h unless members.include?(key)
        h.merge(key => value)
      end
    end

    def parse_labels(hash)
      hash[:labels] = hash[:labels].to_h.transform_values { |v| v.to_i.to_s == v ? v.to_i : v }
      hash
    end

    def parse_policies(hash)
      hash[:duplicate_policy] = DuplicatePolicy.new(hash[:duplicate_policy]) if hash[:duplicate_policy]
      hash
    end

    def parse_rules(hash)
      hash[:rules] = hash[:rules].map { |d| Rule.new(source: hash[:series], data: d) }
      hash
    end
  end

  alias count total_samples
  alias length total_samples
  alias size total_samples

  # If this series is the destination of a compaction rule, returns the source series of the data.
  # @return [TimeSeries, nil] the series referred to by {source_key}
  def source
    return unless source_key
    @source ||= TimeSeries.new(source_key, redis: series.redis)
  end
end

#chunk_sizeInteger (readonly)



42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
# File 'lib/redis/time_series/info.rb', line 42

Info = Struct.new(
  :chunk_count,
  :chunk_size,
  :chunk_type,
  :duplicate_policy,
  :first_timestamp,
  :labels,
  :last_timestamp,
  :max_samples_per_chunk,
  :memory_usage,
  :retention_time,
  :rules,
  :series,
  :source_key,
  :total_samples,
  keyword_init: true
) do
  class << self
    # @api private
    # @return [Info]
    def parse(series:, data:)
      build_hash(data)
        .merge(series: series)
        .then(&method(:parse_labels))
        .then(&method(:parse_policies))
        .then(&method(:parse_rules))
        .then(&method(:new))
    end

    private

    def build_hash(data)
      data.each_slice(2).reduce({}) do |h, (key, value)|
        # Convert camelCase info keys to snake_case
        key = key.gsub(/(.)([A-Z])/,'\1_\2').downcase.to_sym
        # Skip unknown properties
        next h unless members.include?(key)
        h.merge(key => value)
      end
    end

    def parse_labels(hash)
      hash[:labels] = hash[:labels].to_h.transform_values { |v| v.to_i.to_s == v ? v.to_i : v }
      hash
    end

    def parse_policies(hash)
      hash[:duplicate_policy] = DuplicatePolicy.new(hash[:duplicate_policy]) if hash[:duplicate_policy]
      hash
    end

    def parse_rules(hash)
      hash[:rules] = hash[:rules].map { |d| Rule.new(source: hash[:series], data: d) }
      hash
    end
  end

  alias count total_samples
  alias length total_samples
  alias size total_samples

  # If this series is the destination of a compaction rule, returns the source series of the data.
  # @return [TimeSeries, nil] the series referred to by {source_key}
  def source
    return unless source_key
    @source ||= TimeSeries.new(source_key, redis: series.redis)
  end
end

#chunk_typeString (readonly)



42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
# File 'lib/redis/time_series/info.rb', line 42

Info = Struct.new(
  :chunk_count,
  :chunk_size,
  :chunk_type,
  :duplicate_policy,
  :first_timestamp,
  :labels,
  :last_timestamp,
  :max_samples_per_chunk,
  :memory_usage,
  :retention_time,
  :rules,
  :series,
  :source_key,
  :total_samples,
  keyword_init: true
) do
  class << self
    # @api private
    # @return [Info]
    def parse(series:, data:)
      build_hash(data)
        .merge(series: series)
        .then(&method(:parse_labels))
        .then(&method(:parse_policies))
        .then(&method(:parse_rules))
        .then(&method(:new))
    end

    private

    def build_hash(data)
      data.each_slice(2).reduce({}) do |h, (key, value)|
        # Convert camelCase info keys to snake_case
        key = key.gsub(/(.)([A-Z])/,'\1_\2').downcase.to_sym
        # Skip unknown properties
        next h unless members.include?(key)
        h.merge(key => value)
      end
    end

    def parse_labels(hash)
      hash[:labels] = hash[:labels].to_h.transform_values { |v| v.to_i.to_s == v ? v.to_i : v }
      hash
    end

    def parse_policies(hash)
      hash[:duplicate_policy] = DuplicatePolicy.new(hash[:duplicate_policy]) if hash[:duplicate_policy]
      hash
    end

    def parse_rules(hash)
      hash[:rules] = hash[:rules].map { |d| Rule.new(source: hash[:series], data: d) }
      hash
    end
  end

  alias count total_samples
  alias length total_samples
  alias size total_samples

  # If this series is the destination of a compaction rule, returns the source series of the data.
  # @return [TimeSeries, nil] the series referred to by {source_key}
  def source
    return unless source_key
    @source ||= TimeSeries.new(source_key, redis: series.redis)
  end
end

#first_timestampInteger (readonly)



42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
# File 'lib/redis/time_series/info.rb', line 42

Info = Struct.new(
  :chunk_count,
  :chunk_size,
  :chunk_type,
  :duplicate_policy,
  :first_timestamp,
  :labels,
  :last_timestamp,
  :max_samples_per_chunk,
  :memory_usage,
  :retention_time,
  :rules,
  :series,
  :source_key,
  :total_samples,
  keyword_init: true
) do
  class << self
    # @api private
    # @return [Info]
    def parse(series:, data:)
      build_hash(data)
        .merge(series: series)
        .then(&method(:parse_labels))
        .then(&method(:parse_policies))
        .then(&method(:parse_rules))
        .then(&method(:new))
    end

    private

    def build_hash(data)
      data.each_slice(2).reduce({}) do |h, (key, value)|
        # Convert camelCase info keys to snake_case
        key = key.gsub(/(.)([A-Z])/,'\1_\2').downcase.to_sym
        # Skip unknown properties
        next h unless members.include?(key)
        h.merge(key => value)
      end
    end

    def parse_labels(hash)
      hash[:labels] = hash[:labels].to_h.transform_values { |v| v.to_i.to_s == v ? v.to_i : v }
      hash
    end

    def parse_policies(hash)
      hash[:duplicate_policy] = DuplicatePolicy.new(hash[:duplicate_policy]) if hash[:duplicate_policy]
      hash
    end

    def parse_rules(hash)
      hash[:rules] = hash[:rules].map { |d| Rule.new(source: hash[:series], data: d) }
      hash
    end
  end

  alias count total_samples
  alias length total_samples
  alias size total_samples

  # If this series is the destination of a compaction rule, returns the source series of the data.
  # @return [TimeSeries, nil] the series referred to by {source_key}
  def source
    return unless source_key
    @source ||= TimeSeries.new(source_key, redis: series.redis)
  end
end

#keyString (readonly)



223
224
225
# File 'lib/redis/time_series.rb', line 223

def key
  @key
end

#labelsHash (readonly)



42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
# File 'lib/redis/time_series/info.rb', line 42

Info = Struct.new(
  :chunk_count,
  :chunk_size,
  :chunk_type,
  :duplicate_policy,
  :first_timestamp,
  :labels,
  :last_timestamp,
  :max_samples_per_chunk,
  :memory_usage,
  :retention_time,
  :rules,
  :series,
  :source_key,
  :total_samples,
  keyword_init: true
) do
  class << self
    # @api private
    # @return [Info]
    def parse(series:, data:)
      build_hash(data)
        .merge(series: series)
        .then(&method(:parse_labels))
        .then(&method(:parse_policies))
        .then(&method(:parse_rules))
        .then(&method(:new))
    end

    private

    def build_hash(data)
      data.each_slice(2).reduce({}) do |h, (key, value)|
        # Convert camelCase info keys to snake_case
        key = key.gsub(/(.)([A-Z])/,'\1_\2').downcase.to_sym
        # Skip unknown properties
        next h unless members.include?(key)
        h.merge(key => value)
      end
    end

    def parse_labels(hash)
      hash[:labels] = hash[:labels].to_h.transform_values { |v| v.to_i.to_s == v ? v.to_i : v }
      hash
    end

    def parse_policies(hash)
      hash[:duplicate_policy] = DuplicatePolicy.new(hash[:duplicate_policy]) if hash[:duplicate_policy]
      hash
    end

    def parse_rules(hash)
      hash[:rules] = hash[:rules].map { |d| Rule.new(source: hash[:series], data: d) }
      hash
    end
  end

  alias count total_samples
  alias length total_samples
  alias size total_samples

  # If this series is the destination of a compaction rule, returns the source series of the data.
  # @return [TimeSeries, nil] the series referred to by {source_key}
  def source
    return unless source_key
    @source ||= TimeSeries.new(source_key, redis: series.redis)
  end
end

#last_timestampInteger (readonly)



42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
# File 'lib/redis/time_series/info.rb', line 42

Info = Struct.new(
  :chunk_count,
  :chunk_size,
  :chunk_type,
  :duplicate_policy,
  :first_timestamp,
  :labels,
  :last_timestamp,
  :max_samples_per_chunk,
  :memory_usage,
  :retention_time,
  :rules,
  :series,
  :source_key,
  :total_samples,
  keyword_init: true
) do
  class << self
    # @api private
    # @return [Info]
    def parse(series:, data:)
      build_hash(data)
        .merge(series: series)
        .then(&method(:parse_labels))
        .then(&method(:parse_policies))
        .then(&method(:parse_rules))
        .then(&method(:new))
    end

    private

    def build_hash(data)
      data.each_slice(2).reduce({}) do |h, (key, value)|
        # Convert camelCase info keys to snake_case
        key = key.gsub(/(.)([A-Z])/,'\1_\2').downcase.to_sym
        # Skip unknown properties
        next h unless members.include?(key)
        h.merge(key => value)
      end
    end

    def parse_labels(hash)
      hash[:labels] = hash[:labels].to_h.transform_values { |v| v.to_i.to_s == v ? v.to_i : v }
      hash
    end

    def parse_policies(hash)
      hash[:duplicate_policy] = DuplicatePolicy.new(hash[:duplicate_policy]) if hash[:duplicate_policy]
      hash
    end

    def parse_rules(hash)
      hash[:rules] = hash[:rules].map { |d| Rule.new(source: hash[:series], data: d) }
      hash
    end
  end

  alias count total_samples
  alias length total_samples
  alias size total_samples

  # If this series is the destination of a compaction rule, returns the source series of the data.
  # @return [TimeSeries, nil] the series referred to by {source_key}
  def source
    return unless source_key
    @source ||= TimeSeries.new(source_key, redis: series.redis)
  end
end

#max_samples_per_chunkInteger (readonly)



42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
# File 'lib/redis/time_series/info.rb', line 42

Info = Struct.new(
  :chunk_count,
  :chunk_size,
  :chunk_type,
  :duplicate_policy,
  :first_timestamp,
  :labels,
  :last_timestamp,
  :max_samples_per_chunk,
  :memory_usage,
  :retention_time,
  :rules,
  :series,
  :source_key,
  :total_samples,
  keyword_init: true
) do
  class << self
    # @api private
    # @return [Info]
    def parse(series:, data:)
      build_hash(data)
        .merge(series: series)
        .then(&method(:parse_labels))
        .then(&method(:parse_policies))
        .then(&method(:parse_rules))
        .then(&method(:new))
    end

    private

    def build_hash(data)
      data.each_slice(2).reduce({}) do |h, (key, value)|
        # Convert camelCase info keys to snake_case
        key = key.gsub(/(.)([A-Z])/,'\1_\2').downcase.to_sym
        # Skip unknown properties
        next h unless members.include?(key)
        h.merge(key => value)
      end
    end

    def parse_labels(hash)
      hash[:labels] = hash[:labels].to_h.transform_values { |v| v.to_i.to_s == v ? v.to_i : v }
      hash
    end

    def parse_policies(hash)
      hash[:duplicate_policy] = DuplicatePolicy.new(hash[:duplicate_policy]) if hash[:duplicate_policy]
      hash
    end

    def parse_rules(hash)
      hash[:rules] = hash[:rules].map { |d| Rule.new(source: hash[:series], data: d) }
      hash
    end
  end

  alias count total_samples
  alias length total_samples
  alias size total_samples

  # If this series is the destination of a compaction rule, returns the source series of the data.
  # @return [TimeSeries, nil] the series referred to by {source_key}
  def source
    return unless source_key
    @source ||= TimeSeries.new(source_key, redis: series.redis)
  end
end

#memory_usageInteger (readonly)



42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
# File 'lib/redis/time_series/info.rb', line 42

Info = Struct.new(
  :chunk_count,
  :chunk_size,
  :chunk_type,
  :duplicate_policy,
  :first_timestamp,
  :labels,
  :last_timestamp,
  :max_samples_per_chunk,
  :memory_usage,
  :retention_time,
  :rules,
  :series,
  :source_key,
  :total_samples,
  keyword_init: true
) do
  class << self
    # @api private
    # @return [Info]
    def parse(series:, data:)
      build_hash(data)
        .merge(series: series)
        .then(&method(:parse_labels))
        .then(&method(:parse_policies))
        .then(&method(:parse_rules))
        .then(&method(:new))
    end

    private

    def build_hash(data)
      data.each_slice(2).reduce({}) do |h, (key, value)|
        # Convert camelCase info keys to snake_case
        key = key.gsub(/(.)([A-Z])/,'\1_\2').downcase.to_sym
        # Skip unknown properties
        next h unless members.include?(key)
        h.merge(key => value)
      end
    end

    def parse_labels(hash)
      hash[:labels] = hash[:labels].to_h.transform_values { |v| v.to_i.to_s == v ? v.to_i : v }
      hash
    end

    def parse_policies(hash)
      hash[:duplicate_policy] = DuplicatePolicy.new(hash[:duplicate_policy]) if hash[:duplicate_policy]
      hash
    end

    def parse_rules(hash)
      hash[:rules] = hash[:rules].map { |d| Rule.new(source: hash[:series], data: d) }
      hash
    end
  end

  alias count total_samples
  alias length total_samples
  alias size total_samples

  # If this series is the destination of a compaction rule, returns the source series of the data.
  # @return [TimeSeries, nil] the series referred to by {source_key}
  def source
    return unless source_key
    @source ||= TimeSeries.new(source_key, redis: series.redis)
  end
end

#retention_timeInteger (readonly)



42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
# File 'lib/redis/time_series/info.rb', line 42

Info = Struct.new(
  :chunk_count,
  :chunk_size,
  :chunk_type,
  :duplicate_policy,
  :first_timestamp,
  :labels,
  :last_timestamp,
  :max_samples_per_chunk,
  :memory_usage,
  :retention_time,
  :rules,
  :series,
  :source_key,
  :total_samples,
  keyword_init: true
) do
  class << self
    # @api private
    # @return [Info]
    def parse(series:, data:)
      build_hash(data)
        .merge(series: series)
        .then(&method(:parse_labels))
        .then(&method(:parse_policies))
        .then(&method(:parse_rules))
        .then(&method(:new))
    end

    private

    def build_hash(data)
      data.each_slice(2).reduce({}) do |h, (key, value)|
        # Convert camelCase info keys to snake_case
        key = key.gsub(/(.)([A-Z])/,'\1_\2').downcase.to_sym
        # Skip unknown properties
        next h unless members.include?(key)
        h.merge(key => value)
      end
    end

    def parse_labels(hash)
      hash[:labels] = hash[:labels].to_h.transform_values { |v| v.to_i.to_s == v ? v.to_i : v }
      hash
    end

    def parse_policies(hash)
      hash[:duplicate_policy] = DuplicatePolicy.new(hash[:duplicate_policy]) if hash[:duplicate_policy]
      hash
    end

    def parse_rules(hash)
      hash[:rules] = hash[:rules].map { |d| Rule.new(source: hash[:series], data: d) }
      hash
    end
  end

  alias count total_samples
  alias length total_samples
  alias size total_samples

  # If this series is the destination of a compaction rule, returns the source series of the data.
  # @return [TimeSeries, nil] the series referred to by {source_key}
  def source
    return unless source_key
    @source ||= TimeSeries.new(source_key, redis: series.redis)
  end
end

#rulesArray<Rule> (readonly)



42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
# File 'lib/redis/time_series/info.rb', line 42

Info = Struct.new(
  :chunk_count,
  :chunk_size,
  :chunk_type,
  :duplicate_policy,
  :first_timestamp,
  :labels,
  :last_timestamp,
  :max_samples_per_chunk,
  :memory_usage,
  :retention_time,
  :rules,
  :series,
  :source_key,
  :total_samples,
  keyword_init: true
) do
  class << self
    # @api private
    # @return [Info]
    def parse(series:, data:)
      build_hash(data)
        .merge(series: series)
        .then(&method(:parse_labels))
        .then(&method(:parse_policies))
        .then(&method(:parse_rules))
        .then(&method(:new))
    end

    private

    def build_hash(data)
      data.each_slice(2).reduce({}) do |h, (key, value)|
        # Convert camelCase info keys to snake_case
        key = key.gsub(/(.)([A-Z])/,'\1_\2').downcase.to_sym
        # Skip unknown properties
        next h unless members.include?(key)
        h.merge(key => value)
      end
    end

    def parse_labels(hash)
      hash[:labels] = hash[:labels].to_h.transform_values { |v| v.to_i.to_s == v ? v.to_i : v }
      hash
    end

    def parse_policies(hash)
      hash[:duplicate_policy] = DuplicatePolicy.new(hash[:duplicate_policy]) if hash[:duplicate_policy]
      hash
    end

    def parse_rules(hash)
      hash[:rules] = hash[:rules].map { |d| Rule.new(source: hash[:series], data: d) }
      hash
    end
  end

  alias count total_samples
  alias length total_samples
  alias size total_samples

  # If this series is the destination of a compaction rule, returns the source series of the data.
  # @return [TimeSeries, nil] the series referred to by {source_key}
  def source
    return unless source_key
    @source ||= TimeSeries.new(source_key, redis: series.redis)
  end
end

#seriesTimeSeries (readonly)



42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
# File 'lib/redis/time_series/info.rb', line 42

Info = Struct.new(
  :chunk_count,
  :chunk_size,
  :chunk_type,
  :duplicate_policy,
  :first_timestamp,
  :labels,
  :last_timestamp,
  :max_samples_per_chunk,
  :memory_usage,
  :retention_time,
  :rules,
  :series,
  :source_key,
  :total_samples,
  keyword_init: true
) do
  class << self
    # @api private
    # @return [Info]
    def parse(series:, data:)
      build_hash(data)
        .merge(series: series)
        .then(&method(:parse_labels))
        .then(&method(:parse_policies))
        .then(&method(:parse_rules))
        .then(&method(:new))
    end

    private

    def build_hash(data)
      data.each_slice(2).reduce({}) do |h, (key, value)|
        # Convert camelCase info keys to snake_case
        key = key.gsub(/(.)([A-Z])/,'\1_\2').downcase.to_sym
        # Skip unknown properties
        next h unless members.include?(key)
        h.merge(key => value)
      end
    end

    def parse_labels(hash)
      hash[:labels] = hash[:labels].to_h.transform_values { |v| v.to_i.to_s == v ? v.to_i : v }
      hash
    end

    def parse_policies(hash)
      hash[:duplicate_policy] = DuplicatePolicy.new(hash[:duplicate_policy]) if hash[:duplicate_policy]
      hash
    end

    def parse_rules(hash)
      hash[:rules] = hash[:rules].map { |d| Rule.new(source: hash[:series], data: d) }
      hash
    end
  end

  alias count total_samples
  alias length total_samples
  alias size total_samples

  # If this series is the destination of a compaction rule, returns the source series of the data.
  # @return [TimeSeries, nil] the series referred to by {source_key}
  def source
    return unless source_key
    @source ||= TimeSeries.new(source_key, redis: series.redis)
  end
end

#source_keyString? (readonly)



42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
# File 'lib/redis/time_series/info.rb', line 42

Info = Struct.new(
  :chunk_count,
  :chunk_size,
  :chunk_type,
  :duplicate_policy,
  :first_timestamp,
  :labels,
  :last_timestamp,
  :max_samples_per_chunk,
  :memory_usage,
  :retention_time,
  :rules,
  :series,
  :source_key,
  :total_samples,
  keyword_init: true
) do
  class << self
    # @api private
    # @return [Info]
    def parse(series:, data:)
      build_hash(data)
        .merge(series: series)
        .then(&method(:parse_labels))
        .then(&method(:parse_policies))
        .then(&method(:parse_rules))
        .then(&method(:new))
    end

    private

    def build_hash(data)
      data.each_slice(2).reduce({}) do |h, (key, value)|
        # Convert camelCase info keys to snake_case
        key = key.gsub(/(.)([A-Z])/,'\1_\2').downcase.to_sym
        # Skip unknown properties
        next h unless members.include?(key)
        h.merge(key => value)
      end
    end

    def parse_labels(hash)
      hash[:labels] = hash[:labels].to_h.transform_values { |v| v.to_i.to_s == v ? v.to_i : v }
      hash
    end

    def parse_policies(hash)
      hash[:duplicate_policy] = DuplicatePolicy.new(hash[:duplicate_policy]) if hash[:duplicate_policy]
      hash
    end

    def parse_rules(hash)
      hash[:rules] = hash[:rules].map { |d| Rule.new(source: hash[:series], data: d) }
      hash
    end
  end

  alias count total_samples
  alias length total_samples
  alias size total_samples

  # If this series is the destination of a compaction rule, returns the source series of the data.
  # @return [TimeSeries, nil] the series referred to by {source_key}
  def source
    return unless source_key
    @source ||= TimeSeries.new(source_key, redis: series.redis)
  end
end

#total_samplesInteger (readonly)



42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
# File 'lib/redis/time_series/info.rb', line 42

Info = Struct.new(
  :chunk_count,
  :chunk_size,
  :chunk_type,
  :duplicate_policy,
  :first_timestamp,
  :labels,
  :last_timestamp,
  :max_samples_per_chunk,
  :memory_usage,
  :retention_time,
  :rules,
  :series,
  :source_key,
  :total_samples,
  keyword_init: true
) do
  class << self
    # @api private
    # @return [Info]
    def parse(series:, data:)
      build_hash(data)
        .merge(series: series)
        .then(&method(:parse_labels))
        .then(&method(:parse_policies))
        .then(&method(:parse_rules))
        .then(&method(:new))
    end

    private

    def build_hash(data)
      data.each_slice(2).reduce({}) do |h, (key, value)|
        # Convert camelCase info keys to snake_case
        key = key.gsub(/(.)([A-Z])/,'\1_\2').downcase.to_sym
        # Skip unknown properties
        next h unless members.include?(key)
        h.merge(key => value)
      end
    end

    def parse_labels(hash)
      hash[:labels] = hash[:labels].to_h.transform_values { |v| v.to_i.to_s == v ? v.to_i : v }
      hash
    end

    def parse_policies(hash)
      hash[:duplicate_policy] = DuplicatePolicy.new(hash[:duplicate_policy]) if hash[:duplicate_policy]
      hash
    end

    def parse_rules(hash)
      hash[:rules] = hash[:rules].map { |d| Rule.new(source: hash[:series], data: d) }
      hash
    end
  end

  alias count total_samples
  alias length total_samples
  alias size total_samples

  # If this series is the destination of a compaction rule, returns the source series of the data.
  # @return [TimeSeries, nil] the series referred to by {source_key}
  def source
    return unless source_key
    @source ||= TimeSeries.new(source_key, redis: series.redis)
  end
end

Class Method Details

.add_multipleArray<Sample, Redis::CommandError>

Add multiple values to multiple series.

Examples:

Adding multiple values with timestamps

Redis::TimeSeries.madd(
  foo: { 2.minutes.ago => 123, 1.minute.ago => 456, Time.current => 789) },
  bar: { 2.minutes.ago => 987, 1.minute.ago => 654, Time.current => 321) }
)

Adding multiple values without timestamps

Redis::TimeSeries.madd(foo: 1, bar: 2, baz: 3)


120
121
122
123
124
125
126
127
128
129
# File 'lib/redis/time_series.rb', line 120

def madd(data)
  data.reduce([]) do |memo, (key, value)|
    memo += parse_madd_values(key, value)
    memo
  end.then do |args|
    cmd('TS.MADD', args).each_with_index.map do |result, idx|
      result.is_a?(Redis::CommandError) ? result : Sample.new(result, args[idx][2])
    end
  end
end

.create(key, **options) ⇒ Redis::TimeSeries

Create a new time series.

Options Hash (**options):

  • :labels (Hash)

    A hash of label-value pairs to apply to this series.

  • :redis (Redis) — default: self.class.redis

    a different Redis client to use

  • :retention (Integer)

    Maximum age for samples compared to last event time (in milliseconds). With no value, the series will not be trimmed.

  • :uncompressed (Boolean)

    When true, series data will be stored in an uncompressed format.

  • :duplicate_policy (String, Symbol)

    A duplication policy to resolve conflicts when adding values to the series. Valid values are in Redis::TimeSeries::DuplicatePolicy::VALID_POLICIES

  • :chunk_size (Integer)

    Amount of memory, in bytes, to allocate for each chunk of data. Must be a multiple of 8. Default for a series is 4096.

See Also:



49
50
51
# File 'lib/redis/time_series.rb', line 49

def create(key, **options)
  new(key, redis: options.fetch(:redis, redis)).create(**options)
end

.create_rule(source:, dest:, aggregation:) ⇒ String

Create a compaction rule for a series. Note that both source and destination series must exist before the rule can be created.

Raises:

See Also:



68
69
70
# File 'lib/redis/time_series.rb', line 68

def create_rule(source:, dest:, aggregation:)
  cmd 'TS.CREATERULE', key_for(source), key_for(dest), Aggregation.parse(aggregation).to_a
end

.delete_rule(source:, dest:) ⇒ String

Delete an existing compaction rule.

Raises:

  • (Redis::CommandError)

    if the compaction rule does not exist



79
80
81
# File 'lib/redis/time_series.rb', line 79

def delete_rule(source:, dest:)
  cmd 'TS.DELETERULE', key_for(source), key_for(dest)
end

.destroy(key) ⇒ 1, 0

Delete all data and remove a time series from Redis.



88
89
90
# File 'lib/redis/time_series.rb', line 88

def destroy(key)
  redis.del key
end

.madd(data) ⇒ Array<Sample, Redis::CommandError>

Add multiple values to multiple series.

Examples:

Adding multiple values with timestamps

Redis::TimeSeries.madd(
  foo: { 2.minutes.ago => 123, 1.minute.ago => 456, Time.current => 789) },
  bar: { 2.minutes.ago => 987, 1.minute.ago => 654, Time.current => 321) }
)

Adding multiple values without timestamps

Redis::TimeSeries.madd(foo: 1, bar: 2, baz: 3)


109
110
111
112
113
114
115
116
117
118
# File 'lib/redis/time_series.rb', line 109

def madd(data)
  data.reduce([]) do |memo, (key, value)|
    memo += parse_madd_values(key, value)
    memo
  end.then do |args|
    cmd('TS.MADD', args).each_with_index.map do |result, idx|
      result.is_a?(Redis::CommandError) ? result : Sample.new(result, args[idx][2])
    end
  end
end

.mrange(range, filter:, count: nil, aggregation: nil, with_labels: false) ⇒ Multi

Query across multiple series, returning values from oldest to newest.



137
138
139
# File 'lib/redis/time_series.rb', line 137

def mrange(range, filter:, count: nil, aggregation: nil, with_labels: false)
  multi_cmd('TS.MRANGE', range, filter, count, aggregation, with_labels)
end

.mrevrange(range, filter:, count: nil, aggregation: nil, with_labels: false) ⇒ Multi

Query across multiple series, returning values from newest to oldest.



156
157
158
# File 'lib/redis/time_series.rb', line 156

def mrevrange(range, filter:, count: nil, aggregation: nil, with_labels: false)
  multi_cmd('TS.MREVRANGE', range, filter, count, aggregation, with_labels)
end

.multi_addArray<Sample, Redis::CommandError>

Add multiple values to multiple series.

Examples:

Adding multiple values with timestamps

Redis::TimeSeries.madd(
  foo: { 2.minutes.ago => 123, 1.minute.ago => 456, Time.current => 789) },
  bar: { 2.minutes.ago => 987, 1.minute.ago => 654, Time.current => 321) }
)

Adding multiple values without timestamps

Redis::TimeSeries.madd(foo: 1, bar: 2, baz: 3)


119
120
121
122
123
124
125
126
127
128
# File 'lib/redis/time_series.rb', line 119

def madd(data)
  data.reduce([]) do |memo, (key, value)|
    memo += parse_madd_values(key, value)
    memo
  end.then do |args|
    cmd('TS.MADD', args).each_with_index.map do |result, idx|
      result.is_a?(Redis::CommandError) ? result : Sample.new(result, args[idx][2])
    end
  end
end

.query_index(filter_value) ⇒ Array<TimeSeries> Also known as: where

Search for a time series matching the provided filters. Refer to the Filters documentation for more details on how to filter.

Examples:

Using a filter string

Redis::TimeSeries.query_index('foo=bar')
#=> [#<Redis::TimeSeries:0x00007ff00e222788 @key="ts3", @redis=#<Redis...>>]

Using the .where alias with hash DSL

Redis::TimeSeries.where(foo: 'bar')
#=> [#<Redis::TimeSeries:0x00007ff00e2a1d30 @key="ts3", @redis=#<Redis...>>]

See Also:



176
177
178
179
180
# File 'lib/redis/time_series.rb', line 176

def query_index(filter_value)
  filters = Filters.new(filter_value)
  filters.validate!
  cmd('TS.QUERYINDEX', filters.to_a).map { |key| new(key) }
end

Instance Method Details

#==(other) ⇒ Boolean

Compare series based on Redis key and configured client.



444
445
446
447
# File 'lib/redis/time_series.rb', line 444

def ==(other)
  return false unless other.is_a?(self.class)
  key == other.key && redis == other.redis
end

#add(value, timestamp = '*', uncompressed: nil, on_duplicate: nil, chunk_size: nil) ⇒ Sample

Add a value to the series.

Raises:

  • (Redis::CommandError)

    if the value being added is older than the latest timestamp in the series

See Also:



244
245
246
247
248
249
250
251
252
253
# File 'lib/redis/time_series.rb', line 244

def add(value, timestamp = '*', uncompressed: nil, on_duplicate: nil, chunk_size: nil)
  ts = cmd 'TS.ADD',
           key,
           timestamp,
           value,
           ('UNCOMPRESSED' if uncompressed),
           (['CHUNK_SIZE', chunk_size] if chunk_size),
           (DuplicatePolicy.new(on_duplicate).to_a('ON_DUPLICATE') if on_duplicate)
  Sample.new(ts, value)
end

#create(retention: nil, uncompressed: nil, labels: nil, duplicate_policy: nil, chunk_size: nil) ⇒ Object

This method is part of a private API. You should avoid using this method if possible, as it may be removed or be changed in the future.

Issues a TS.CREATE command for the current series. You should use class method create instead.



258
259
260
261
262
263
264
265
266
# File 'lib/redis/time_series.rb', line 258

def create(retention: nil, uncompressed: nil, labels: nil, duplicate_policy: nil, chunk_size: nil)
  cmd 'TS.CREATE', key,
      (['RETENTION', retention] if retention),
      ('UNCOMPRESSED' if uncompressed),
      (['CHUNK_SIZE', chunk_size] if chunk_size),
      (DuplicatePolicy.new(duplicate_policy).to_a if duplicate_policy),
      (['LABELS', labels.to_a] if labels&.any?)
  self
end

#create_rule(dest:, aggregation:) ⇒ String

Create a compaction rule for this series.

Raises:

See Also:



280
281
282
# File 'lib/redis/time_series.rb', line 280

def create_rule(dest:, aggregation:)
  self.class.create_rule(source: self, dest: dest, aggregation: aggregation)
end

#decrby(value = 1, timestamp = nil, uncompressed: nil, chunk_size: nil) ⇒ Integer Also known as: decrement

Decrement the current value of the series.



305
306
307
308
309
310
311
312
# File 'lib/redis/time_series.rb', line 305

def decrby(value = 1, timestamp = nil, uncompressed: nil, chunk_size: nil)
  cmd 'TS.DECRBY',
      key,
      value,
      (timestamp if timestamp),
      ('UNCOMPRESSED' if uncompressed),
      (['CHUNK_SIZE', chunk_size] if chunk_size)
end

#delete_rule(dest:) ⇒ String

Delete an existing compaction rule.

Raises:

  • (Redis::CommandError)

    if the compaction rule does not exist

See Also:



292
293
294
# File 'lib/redis/time_series.rb', line 292

def delete_rule(dest:)
  self.class.delete_rule(source: self, dest: dest)
end

#destroy1, 0

Delete all data and remove this time series from Redis.



320
321
322
# File 'lib/redis/time_series.rb', line 320

def destroy
  redis.del key
end

#getSample?

Get the most recent sample for this series.



330
331
332
333
334
335
# File 'lib/redis/time_series.rb', line 330

def get
  cmd('TS.GET', key).then do |timestamp, value|
    return unless value
    Sample.new(timestamp, value)
  end
end

#incrby(value = 1, timestamp = nil, uncompressed: nil, chunk_size: nil) ⇒ Integer Also known as: increment

Increment the current value of the series.



346
347
348
349
350
351
352
353
# File 'lib/redis/time_series.rb', line 346

def incrby(value = 1, timestamp = nil, uncompressed: nil, chunk_size: nil)
  cmd 'TS.INCRBY',
      key,
      value,
      (timestamp if timestamp),
      ('UNCOMPRESSED' if uncompressed),
      (['CHUNK_SIZE', chunk_size] if chunk_size)
end

#infoInfo

Get information about the series. Note that all properties of Info are also available on the series itself via delegation.



364
365
366
# File 'lib/redis/time_series.rb', line 364

def info
  Info.parse series: self, data: cmd('TS.INFO', key)
end

#labels=(val) ⇒ Hash (readonly)

Assign labels to the series using TS.ALTER



375
376
377
# File 'lib/redis/time_series.rb', line 375

def labels=(val)
  cmd 'TS.ALTER', key, 'LABELS', val.to_a
end

#madd(data) ⇒ Array<Sample, Redis::CommandError> Also known as: multi_add, add_multiple

Add multiple values to the series.

Examples:

Adding multiple values with timestamps

ts.madd(2.minutes.ago => 987, 1.minute.ago => 654, Time.current => 321)


390
391
392
393
394
395
# File 'lib/redis/time_series.rb', line 390

def madd(data)
  args = self.class.send(:parse_madd_values, key, data)
  cmd('TS.MADD', args).each_with_index.map do |result, idx|
    result.is_a?(Redis::CommandError) ? result : Sample.new(result, args[idx][2])
  end
end

#range(range, count: nil, aggregation: nil) ⇒ Array<Sample>

Get a range of values from the series, from earliest to most recent



411
412
413
# File 'lib/redis/time_series.rb', line 411

def range(range, count: nil, aggregation: nil)
  range_cmd('TS.RANGE', range, count, aggregation)
end

#retention=(val) ⇒ Integer

Set data retention time for the series using TS.ALTER



437
438
439
440
# File 'lib/redis/time_series.rb', line 437

def retention=(val)
  # TODO: this should also accept an ActiveSupport::Duration
  cmd 'TS.ALTER', key, 'RETENTION', val.to_i
end

#revrange(range, count: nil, aggregation: nil) ⇒ Array<Sample>

Get a range of values from the series, from most recent to earliest



427
428
429
# File 'lib/redis/time_series.rb', line 427

def revrange(range, count: nil, aggregation: nil)
  range_cmd('TS.REVRANGE', range, count, aggregation)
end