Use item proxies better for items#index?format=html :D
We used get_multi when preparing the proxies to decide which to load from the database, but then sent multiple get requests to Memcache to re-fetch the same data from that get_multi. Silly! Use the data that's already stored on the proxy anyway.
This commit is contained in:
parent
6b340f906e
commit
1ce32e5867
3 changed files with 22 additions and 6 deletions
|
@ -126,7 +126,14 @@ module ItemsHelper
|
||||||
# helper, we have to do some indirection. Fake that the render is in a
|
# helper, we have to do some indirection. Fake that the render is in a
|
||||||
# template, then capture the resulting buffer output.
|
# template, then capture the resulting buffer output.
|
||||||
capture do
|
capture do
|
||||||
localized_cache("items/#{item.id}#item_link_partial") do
|
# Try to read from the prepared proxy's known partial output, if it's
|
||||||
|
# even a proxy at all.
|
||||||
|
if item.respond_to?(:known_partial_output)
|
||||||
|
prepared_output = item.known_partial_output(:item_link_partial).html_safe
|
||||||
|
else
|
||||||
|
prepared_output = nil
|
||||||
|
end
|
||||||
|
prepared_output || localized_cache("items/#{item.id}#item_link_partial") do
|
||||||
safe_concat render(partial: 'items/item_link', locals: {item: item})
|
safe_concat render(partial: 'items/item_link', locals: {item: item})
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
|
@ -11,8 +11,13 @@ class Item
|
||||||
Item.model_name
|
Item.model_name
|
||||||
end
|
end
|
||||||
|
|
||||||
def initialize(id)
|
def initialize(item_or_id)
|
||||||
@id = id
|
if item_or_id.is_a? Item
|
||||||
|
@item = item_or_id
|
||||||
|
@id = @item.id
|
||||||
|
else
|
||||||
|
@id = item_or_id.to_i
|
||||||
|
end
|
||||||
@known_outputs = {method: {}, partial: {}}
|
@known_outputs = {method: {}, partial: {}}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -46,12 +51,16 @@ class Item
|
||||||
@known_outputs[type][name] = value
|
@known_outputs[type][name] = value
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def known_partial_output(name)
|
||||||
|
@known_outputs[:partial][name]
|
||||||
|
end
|
||||||
|
|
||||||
private
|
private
|
||||||
|
|
||||||
def cache_method(method_name, *args, &block)
|
def cache_method(method_name, *args, &block)
|
||||||
# Two layers of cache: a local copy, in case the method is called again,
|
# Two layers of cache: a local copy, in case the method is called again,
|
||||||
# and then the Rails cache, before we hit the actual method call.
|
# and then the Rails cache, before we hit the actual method call.
|
||||||
@known_outputs[method_name] ||= begin
|
@known_outputs[:method][method_name] ||= begin
|
||||||
key = fragment_key(:method, method_name)
|
key = fragment_key(:method, method_name)
|
||||||
Rails.cache.fetch(key) { item.send(method_name, *args) }
|
Rails.cache.fetch(key) { item.send(method_name, *args) }
|
||||||
end
|
end
|
||||||
|
|
|
@ -12,8 +12,8 @@ class Item
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
def initialize(ids)
|
def initialize(items_or_ids)
|
||||||
self.replace(ids.map { |id| Proxy.new(id.to_i) })
|
self.replace(items_or_ids.map { |item_or_id| Proxy.new(item_or_id) })
|
||||||
end
|
end
|
||||||
|
|
||||||
def prepare_method(name)
|
def prepare_method(name)
|
||||||
|
|
Loading…
Reference in a new issue