Class: SolidCache::Entry

Inherits:
Record
  • Object
show all
Includes:
Encryption, Expiration, Size
Defined in:
app/models/solid_cache/entry.rb,
app/models/solid_cache/entry/size.rb,
app/models/solid_cache/entry/encryption.rb,
app/models/solid_cache/entry/expiration.rb,
app/models/solid_cache/entry/size/estimate.rb,
app/models/solid_cache/entry/size/moving_average_estimate.rb

Defined Under Namespace

Modules: Encryption, Expiration, Size

Constant Summary collapse

ESTIMATED_ROW_OVERHEAD =

The estimated cost of an extra row in bytes, including fixed size columns, overhead, indexes and free space Based on experimentation on SQLite, MySQL and Postgresql. A bit high for SQLite (more like 90 bytes), but about right for MySQL/Postgresql.

140
ESTIMATED_ENCRYPTION_OVERHEAD =

Assuming MessagePack serialization

170
KEY_HASH_ID_RANGE =
-(2**63)..(2**63 - 1)
MULTI_BATCH_SIZE =
1000

Constants inherited from Record

Record::NULL_INSTRUMENTER

Class Method Summary collapse

Methods inherited from Record

disable_instrumentation, each_shard, with_instrumenter, with_shard

Class Method Details

.clear_deleteObject



60
61
62
63
64
# File 'app/models/solid_cache/entry.rb', line 60

def clear_delete
  without_query_cache do
    in_batches.delete_all
  end
end

.clear_truncateObject



56
57
58
# File 'app/models/solid_cache/entry.rb', line 56

def clear_truncate
  connection.truncate(table_name)
end

.delete_by_key(*keys) ⇒ Object



50
51
52
53
54
# File 'app/models/solid_cache/entry.rb', line 50

def delete_by_key(*keys)
  without_query_cache do
    where(key_hash: key_hashes_for(keys)).delete_all
  end
end

.id_rangeObject



77
78
79
80
81
# File 'app/models/solid_cache/entry.rb', line 77

def id_range
  without_query_cache do
    pick(Arel.sql("max(id) - min(id) + 1")) || 0
  end
end

.lock_and_write(key, &block) ⇒ Object



66
67
68
69
70
71
72
73
74
75
# File 'app/models/solid_cache/entry.rb', line 66

def lock_and_write(key, &block)
  transaction do
    without_query_cache do
      result = lock.where(key_hash: key_hash_for(key)).pick(:key, :value)
      new_value = block.call(result&.first == key ? result[1] : nil)
      write(key, new_value) if new_value
      new_value
    end
  end
end

.read(key) ⇒ Object



34
35
36
# File 'app/models/solid_cache/entry.rb', line 34

def read(key)
  read_multi([key])[key]
end

.read_multi(keys) ⇒ Object



38
39
40
41
42
43
44
45
46
47
48
# File 'app/models/solid_cache/entry.rb', line 38

def read_multi(keys)
  without_query_cache do
    {}.tap do |results|
      keys.each_slice(MULTI_BATCH_SIZE).each do |keys_batch|
        query = Arel.sql(select_sql(keys_batch), *key_hashes_for(keys_batch))

        results.merge!(connection.select_all(query, "SolidCache::Entry Load").cast_values(attribute_types).to_h)
      end
    end
  end
end

.write(key, value) ⇒ Object



20
21
22
# File 'app/models/solid_cache/entry.rb', line 20

def write(key, value)
  write_multi([ { key: key, value: value } ])
end

.write_multi(payloads) ⇒ Object



24
25
26
27
28
29
30
31
32
# File 'app/models/solid_cache/entry.rb', line 24

def write_multi(payloads)
  without_query_cache do
    payloads.each_slice(MULTI_BATCH_SIZE).each do |payload_batch|
      upsert_all \
        add_key_hash_and_byte_size(payload_batch),
        unique_by: upsert_unique_by, on_duplicate: :update, update_only: [ :key, :value, :byte_size ]
    end
  end
end