@@ -258,85 +258,75 @@ def mute
258258 # end
259259 # cache.fetch('city') # => "Duckburgh"
260260 #
261- # You may also specify additional options via the +options+ argument.
262- # Setting <tt>force: true</tt> forces a cache "miss," meaning we treat
263- # the cache value as missing even if it's present. Passing a block is
264- # required when +force+ is true so this always results in a cache write.
261+ # ==== Options
265262 #
266- # cache.write('today', 'Monday')
267- # cache. fetch('today', force: true) { 'Tuesday' } # => 'Tuesday'
268- # cache. fetch('today', force: true) # => ArgumentError
263+ # Internally, +fetch+ calls #read_entry, and calls #write_entry on a cache
264+ # miss. Thus, + fetch+ supports the same options as #read and #write.
265+ # Additionally, + fetch+ supports the following options:
269266 #
270- # The +: force+ option is useful when you're calling some other method to
271- # ask whether you should force a cache write. Otherwise, it's clearer to
272- # just call <tt>Cache# write</tt> .
267+ # * <tt> force: true</tt> - Forces a cache "miss," meaning we treat the
268+ # cache value as missing even if it's present. Passing a block is
269+ # required when +force+ is true so this always results in a cache write.
273270 #
274- # Setting <tt>skip_nil: true</tt> will not cache nil result:
271+ # cache.write('today', 'Monday')
272+ # cache.fetch('today', force: true) { 'Tuesday' } # => 'Tuesday'
273+ # cache.fetch('today', force: true) # => ArgumentError
275274 #
276- # cache.fetch('foo') { nil }
277- # cache.fetch('bar', skip_nil: true) { nil }
278- # cache.exist?('foo') # => true
279- # cache.exist?('bar') # => false
275+ # The +:force+ option is useful when you're calling some other method to
276+ # ask whether you should force a cache write. Otherwise, it's clearer to
277+ # just call +write+.
280278 #
281- # Setting <tt>:race_condition_ttl</tt> is very useful in situations where
282- # a cache entry is used very frequently and is under heavy load. If a
283- # cache expires and due to heavy load several different processes will try
284- # to read data natively and then they all will try to write to cache. To
285- # avoid that case the first process to find an expired cache entry will
286- # bump the cache expiration time by the value set in <tt>:race_condition_ttl</tt>.
287- # Yes, this process is extending the time for a stale value by another few
288- # seconds. Because of extended life of the previous cache, other processes
289- # will continue to use slightly stale data for a just a bit longer. In the
290- # meantime that first process will go ahead and will write into cache the
291- # new value. After that all the processes will start getting the new value.
292- # The key is to keep <tt>:race_condition_ttl</tt> small.
279+ # * <tt>skip_nil: true</tt> - Prevents caching a nil result:
293280 #
294- # If the process regenerating the entry errors out, the entry will be
295- # regenerated after the specified number of seconds. Also note that the
296- # life of stale cache is extended only if it expired recently. Otherwise
297- # a new value is generated and <tt>:race_condition_ttl</tt> does not play
298- # any role.
281+ # cache.fetch('foo') { nil }
282+ # cache.fetch('bar', skip_nil: true) { nil }
283+ # cache.exist?('foo') # => true
284+ # cache.exist?('bar') # => false
299285 #
300- # # Set all values to expire after one minute.
301- # cache = ActiveSupport::Cache::MemoryStore.new(expires_in: 1.minute)
286+ # * +:race_condition_ttl+ - Specifies the number of seconds during which
287+ # an expired value can be reused while a new value is being generated.
288+ # This can be used to prevent race conditions when cache entries expire,
289+ # by preventing multiple processes from simultaneously regenerating the
290+ # same entry (also known as the dog pile effect).
302291 #
303- # cache.write('foo', 'original value')
304- # val_1 = nil
305- # val_2 = nil
306- # sleep 60
292+ # When a process encounters a cache entry that has expired less than
293+ # +:race_condition_ttl+ seconds ago, it will bump the expiration time by
294+ # +:race_condition_ttl+ seconds before generating a new value. During
295+ # this extended time window, while the process generates a new value,
296+ # other processes will continue to use the old value. After the first
297+ # process writes the new value, other processes will then use it.
307298 #
308- # Thread.new do
309- # val_1 = cache.fetch('foo', race_condition_ttl: 10.seconds) do
310- # sleep 1
311- # 'new value 1'
312- # end
313- # end
299+ # If the first process errors out while generating a new value, another
300+ # process can try to generate a new value after the extended time window
301+ # has elapsed.
314302 #
315- # Thread.new do
316- # val_2 = cache.fetch('foo', race_condition_ttl: 10.seconds) do
317- # 'new value 2'
318- # end
319- # end
303+ # # Set all values to expire after one minute.
304+ # cache = ActiveSupport::Cache::MemoryStore.new(expires_in: 1.minute)
320305 #
321- # cache.fetch('foo') # => "original value"
322- # sleep 10 # First thread extended the life of cache by another 10 seconds
323- # cache.fetch('foo') # => "new value 1"
324- # val_1 # => "new value 1"
325- # val_2 # => "original value"
306+ # cache.write('foo', 'original value')
307+ # val_1 = nil
308+ # val_2 = nil
309+ # sleep 60
326310 #
327- # Other options will be handled by the specific cache store implementation.
328- # Internally, #fetch calls #read_entry, and calls #write_entry on a cache
329- # miss. +options+ will be passed to the #read and #write calls.
311+ # Thread.new do
312+ # val_1 = cache.fetch('foo', race_condition_ttl: 10.seconds) do
313+ # sleep 1
314+ # 'new value 1'
315+ # end
316+ # end
330317 #
331- # For example, MemCacheStore's #write method supports the +:raw+
332- # option, which tells the memcached server to store all values as strings.
333- # We can use this option with #fetch too:
318+ # Thread.new do
319+ # val_2 = cache.fetch('foo', race_condition_ttl: 10.seconds) do
320+ # 'new value 2'
321+ # end
322+ # end
323+ #
324+ # cache.fetch('foo') # => "original value"
325+ # sleep 10 # First thread extended the life of cache by another 10 seconds
326+ # cache.fetch('foo') # => "new value 1"
327+ # val_1 # => "new value 1"
328+ # val_2 # => "original value"
334329 #
335- # cache = ActiveSupport::Cache::MemCacheStore.new
336- # cache.fetch("foo", force: true, raw: true) do
337- # :bar
338- # end
339- # cache.fetch('foo') # => "bar"
340330 def fetch ( name , options = nil , &block )
341331 if block_given?
342332 options = merged_options ( options )
0 commit comments