2010-05-15 08:38:45 -07:00
|
|
|
class Item < ActiveRecord::Base
|
2011-07-26 15:49:52 -07:00
|
|
|
include PrettyParam
|
2013-01-11 14:20:06 -08:00
|
|
|
|
|
|
|
set_inheritance_column 'inheritance_type' # PHP Impress used "type" to describe category
|
2011-07-26 15:49:52 -07:00
|
|
|
|
2010-05-19 16:17:33 -07:00
|
|
|
SwfAssetType = 'object'
|
2013-01-11 14:20:06 -08:00
|
|
|
|
|
|
|
translates :name, :description, :rarity
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2011-07-12 16:37:16 -07:00
|
|
|
has_many :closet_hangers
|
2010-11-06 15:08:42 -07:00
|
|
|
has_one :contribution, :as => :contributed
|
2012-01-12 15:17:59 -08:00
|
|
|
has_many :parent_swf_asset_relationships, :as => :parent
|
|
|
|
has_many :swf_assets, :through => :parent_swf_asset_relationships
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2011-07-22 13:18:15 -07:00
|
|
|
attr_writer :current_body_id, :owned, :wanted
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-09-08 19:49:39 -07:00
|
|
|
NCRarities = [0, 500]
|
2011-05-02 15:07:56 -07:00
|
|
|
PAINTBRUSH_SET_DESCRIPTION = 'This item is part of a deluxe paint brush set!'
|
2012-09-29 10:40:55 -07:00
|
|
|
SPECIAL_COLOR_DESCRIPTION_REGEX =
|
|
|
|
/This item is only wearable by Neopets painted ([a-zA-Z]+)\.|WARNING: This [a-zA-Z]+ can be worn by ([a-zA-Z]+) [a-zA-Z]+ ONLY!/
|
2011-05-02 15:07:56 -07:00
|
|
|
|
|
|
|
SPECIAL_PAINTBRUSH_COLORS_PATH = Rails.root.join('config', 'colors_with_unique_bodies.txt')
|
|
|
|
SPECIAL_PAINTBRUSH_COLORS = File.read(SPECIAL_PAINTBRUSH_COLORS_PATH).split("\n").map { |name| Color.find_by_name(name) }
|
|
|
|
|
2010-05-15 11:14:17 -07:00
|
|
|
cattr_reader :per_page
|
|
|
|
@@per_page = 30
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2013-01-11 15:56:33 -08:00
|
|
|
scope :alphabetize, order(arel_table[:name])
|
|
|
|
scope :alphabetize_by_translations, lambda {
|
|
|
|
it = Item::Translation.arel_table
|
|
|
|
order(it[:name])
|
|
|
|
}
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2013-01-11 14:20:06 -08:00
|
|
|
scope :join_swf_assets, joins(:swf_assets).group(arel_table[:id])
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2011-08-04 07:04:15 -07:00
|
|
|
scope :newest, order(arel_table[:created_at].desc) if arel_table[:created_at]
|
2011-08-04 07:01:44 -07:00
|
|
|
|
2011-09-06 09:15:09 -07:00
|
|
|
scope :spidered_longest_ago, order(["(last_spidered IS NULL) DESC", "last_spidered DESC"])
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-12-05 13:06:48 -08:00
|
|
|
scope :sold_in_mall, where(:sold_in_mall => true)
|
|
|
|
scope :not_sold_in_mall, where(:sold_in_mall => false)
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2013-01-11 15:56:33 -08:00
|
|
|
scope :sitemap, select([arel_table[:id], arel_table[:name]]).
|
|
|
|
order(arel_table[:id]).limit(49999)
|
2011-05-20 17:49:48 -07:00
|
|
|
|
2011-07-17 14:24:29 -07:00
|
|
|
scope :with_closet_hangers, joins(:closet_hangers)
|
|
|
|
|
2011-07-12 22:21:48 -07:00
|
|
|
def closeted?
|
2011-07-22 13:18:15 -07:00
|
|
|
@owned || @wanted
|
2011-07-12 22:21:48 -07:00
|
|
|
end
|
2012-10-24 20:09:05 -07:00
|
|
|
|
|
|
|
# Return an OrderedHash mapping users to the number of times they
|
|
|
|
# contributed to this item's assets, from most contributions to least.
|
|
|
|
def contributors_with_counts
|
|
|
|
# Get contributing users' IDs
|
|
|
|
swf_asset_ids = swf_assets.select(SwfAsset.arel_table[:id]).map(&:id)
|
|
|
|
swf_asset_contributions = Contribution.select('user_id').
|
|
|
|
where(:contributed_type => 'SwfAsset', :contributed_id => swf_asset_ids)
|
|
|
|
contributor_ids = swf_asset_contributions.map(&:user_id)
|
|
|
|
|
|
|
|
# Get the users, mapped by ID
|
|
|
|
contributors_by_id = {}
|
|
|
|
User.find(contributor_ids).each { |u| contributors_by_id[u.id] = u }
|
|
|
|
|
|
|
|
# Count each user's contributions
|
|
|
|
contributor_counts_by_id = Hash.new(0)
|
|
|
|
contributor_ids.each { |id| contributor_counts_by_id[id] += 1 }
|
|
|
|
|
|
|
|
# Build an OrderedHash mapping users to counts in descending order
|
|
|
|
contributors_with_counts = ActiveSupport::OrderedHash.new
|
|
|
|
contributor_counts_by_id.sort_by { |k, v| v }.reverse.each do |id, count|
|
|
|
|
contributor = contributors_by_id[id]
|
|
|
|
contributors_with_counts[contributor] = count
|
|
|
|
end
|
|
|
|
contributors_with_counts
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-09-08 19:49:39 -07:00
|
|
|
def nc?
|
|
|
|
NCRarities.include?(rarity_index)
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2011-07-22 13:18:15 -07:00
|
|
|
def owned?
|
|
|
|
@owned
|
|
|
|
end
|
|
|
|
|
|
|
|
def wanted?
|
|
|
|
@wanted
|
|
|
|
end
|
|
|
|
|
2010-09-08 19:49:39 -07:00
|
|
|
def restricted_zones
|
|
|
|
unless @restricted_zones
|
|
|
|
@restricted_zones = []
|
|
|
|
zones_restrict.split(//).each_with_index do |switch, id|
|
|
|
|
@restricted_zones << Zone.find(id.to_i + 1) if switch == '1'
|
|
|
|
end
|
|
|
|
end
|
|
|
|
@restricted_zones
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-09-08 19:49:39 -07:00
|
|
|
def occupied_zones
|
|
|
|
all_body_ids = []
|
|
|
|
zone_body_ids = {}
|
|
|
|
selected_assets = swf_assets.select('body_id, zone_id').each do |swf_asset|
|
|
|
|
zone_body_ids[swf_asset.zone_id] ||= []
|
|
|
|
body_ids = zone_body_ids[swf_asset.zone_id]
|
|
|
|
body_ids << swf_asset.body_id unless body_ids.include?(swf_asset.body_id)
|
|
|
|
all_body_ids << swf_asset.body_id unless all_body_ids.include?(swf_asset.body_id)
|
|
|
|
end
|
|
|
|
zones = []
|
|
|
|
total_body_ids = all_body_ids.size
|
|
|
|
zone_body_ids.each do |zone_id, body_ids|
|
|
|
|
zone = Zone.find(zone_id)
|
|
|
|
zone.sometimes = true if body_ids.size < total_body_ids
|
|
|
|
zones << zone
|
|
|
|
end
|
|
|
|
zones
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2011-01-27 13:35:46 -08:00
|
|
|
def affected_zones
|
|
|
|
restricted_zones + occupied_zones
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
|
|
|
def special_color
|
|
|
|
@special_color ||= determine_special_color
|
|
|
|
end
|
|
|
|
|
|
|
|
protected
|
|
|
|
def determine_special_color
|
|
|
|
if description.include?(PAINTBRUSH_SET_DESCRIPTION)
|
|
|
|
downcased_name = name.downcase
|
|
|
|
SPECIAL_PAINTBRUSH_COLORS.each do |color|
|
|
|
|
return color if downcased_name.include?(color.name)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
match = description.match(SPECIAL_COLOR_DESCRIPTION_REGEX)
|
|
|
|
if match
|
2012-09-29 10:40:55 -07:00
|
|
|
color = match[1] || match[2]
|
|
|
|
return Color.find_by_name(color.downcase)
|
2011-05-02 15:07:56 -07:00
|
|
|
end
|
|
|
|
end
|
|
|
|
public
|
|
|
|
|
2010-05-15 08:38:45 -07:00
|
|
|
def species_support_ids
|
2010-11-27 15:41:06 -08:00
|
|
|
@species_support_ids_array ||= read_attribute('species_support_ids').split(',').map(&:to_i) rescue nil
|
2010-05-15 08:38:45 -07:00
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-05-15 08:38:45 -07:00
|
|
|
def species_support_ids=(replacement)
|
2010-05-16 12:01:38 -07:00
|
|
|
@species_support_ids_array = nil
|
2010-05-15 08:38:45 -07:00
|
|
|
replacement = replacement.join(',') if replacement.is_a?(Array)
|
|
|
|
write_attribute('species_support_ids', replacement)
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-05-16 13:15:21 -07:00
|
|
|
def supported_species
|
2013-01-22 22:25:09 -08:00
|
|
|
body_ids = swf_assets.select([:body_id]).map(&:body_id)
|
|
|
|
return Species.all if body_ids.include?(0)
|
|
|
|
|
|
|
|
pet_types = PetType.where(:body_id => body_ids).select([:species_id])
|
|
|
|
species_ids = pet_types.map(&:species_id).uniq
|
|
|
|
Species.find(species_ids)
|
wardrobe now considers item.species_support_ids when deciding compatibility
For example, the Meerca Maid Tray is a foreground item, so the SWF is marked
as compatible with all body types, but the item itself is clearly marked as
Meercas-only. items#show reflected this properly, but the swf_assets#index
call that the wardrobe uses ignored item.species_support_ids.
So, /bodies/:body_id/swf_assets.json?item_ids[]=... was deprecated in favor
of /pet_types/:pet_type_id/items/swf_assets.json?item_ids=[]..., which is
much like the former route but, before loading assets, also loads the pet
type and items, then filters the items by compatibility, then only loads
assets for the compatible items.
2013-01-02 20:15:32 -08:00
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2013-01-11 15:56:33 -08:00
|
|
|
def self.search(query, user, locale)
|
2010-11-16 20:32:53 -08:00
|
|
|
raise SearchError, "Please provide a search query" unless query
|
2010-05-15 11:56:21 -07:00
|
|
|
query = query.strip
|
2010-11-16 20:32:53 -08:00
|
|
|
raise SearchError, "Search queries should be at least 3 characters" if query.length < 3
|
2010-05-15 08:38:45 -07:00
|
|
|
query_conditions = [Condition.new]
|
|
|
|
in_phrase = false
|
|
|
|
query.each_char do |c|
|
|
|
|
if c == ' ' && !in_phrase
|
|
|
|
query_conditions << Condition.new
|
|
|
|
elsif c == '"'
|
|
|
|
in_phrase = !in_phrase
|
|
|
|
elsif c == ':' && !in_phrase
|
2010-06-22 09:42:25 -07:00
|
|
|
query_conditions.last.to_filter!
|
2010-05-15 09:43:54 -07:00
|
|
|
elsif c == '-' && !in_phrase && query_conditions.last.empty?
|
|
|
|
query_conditions.last.negate!
|
2010-05-15 08:38:45 -07:00
|
|
|
else
|
|
|
|
query_conditions.last << c
|
|
|
|
end
|
|
|
|
end
|
2010-11-18 20:21:03 -08:00
|
|
|
limited_filters_used = []
|
2013-01-11 15:56:33 -08:00
|
|
|
query_conditions.inject(self.with_translations(locale)) do |scope, condition|
|
2010-11-18 20:21:03 -08:00
|
|
|
if condition.filter? && LimitedSearchFilters.include?(condition.filter)
|
|
|
|
if limited_filters_used.include?(condition.filter)
|
|
|
|
raise SearchError, "The #{condition.filter} filter is complex; please only use one per search. Thanks!"
|
|
|
|
else
|
|
|
|
limited_filters_used << condition.filter
|
|
|
|
end
|
|
|
|
end
|
2011-07-17 14:24:29 -07:00
|
|
|
condition.narrow(scope, user)
|
2010-05-15 08:38:45 -07:00
|
|
|
end
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-06-22 10:00:55 -07:00
|
|
|
def as_json(options = {})
|
|
|
|
{
|
|
|
|
:description => description,
|
|
|
|
:id => id,
|
|
|
|
:name => name,
|
|
|
|
:thumbnail_url => thumbnail_url,
|
2010-09-08 17:07:09 -07:00
|
|
|
:zones_restrict => zones_restrict,
|
2011-07-12 22:21:48 -07:00
|
|
|
:rarity_index => rarity_index,
|
2011-07-22 14:06:21 -07:00
|
|
|
:owned => owned?,
|
2012-08-06 18:15:31 -07:00
|
|
|
:wanted => wanted?,
|
|
|
|
:nc => nc?
|
2010-06-22 10:00:55 -07:00
|
|
|
}
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-10-10 11:43:01 -07:00
|
|
|
before_create do
|
2010-11-25 08:10:21 -08:00
|
|
|
self.sold_in_mall ||= false
|
2010-10-09 07:53:58 -07:00
|
|
|
true
|
2010-10-07 07:46:23 -07:00
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-11-06 15:08:42 -07:00
|
|
|
def handle_assets!
|
2010-10-09 09:22:40 -07:00
|
|
|
if @parent_swf_asset_relationships_to_update && @current_body_id
|
|
|
|
new_swf_asset_ids = @parent_swf_asset_relationships_to_update.map(&:swf_asset_id)
|
|
|
|
rels = ParentSwfAssetRelationship.arel_table
|
|
|
|
swf_assets = SwfAsset.arel_table
|
2012-01-26 11:51:30 -08:00
|
|
|
|
2011-07-22 12:08:17 -07:00
|
|
|
# If a relationship used to bind an item and asset for this body type,
|
|
|
|
# but doesn't in this sample, the two have been unbound. Delete the
|
|
|
|
# relationship.
|
2010-10-09 09:22:40 -07:00
|
|
|
ids_to_delete = self.parent_swf_asset_relationships.
|
2012-01-26 11:51:30 -08:00
|
|
|
select(rels[:id]).
|
2012-01-12 15:17:59 -08:00
|
|
|
joins(:swf_asset).
|
2010-11-16 14:26:06 -08:00
|
|
|
where(rels[:swf_asset_id].not_in(new_swf_asset_ids)).
|
2010-10-09 09:22:40 -07:00
|
|
|
where(swf_assets[:body_id].in([@current_body_id, 0])).
|
2012-01-26 11:51:30 -08:00
|
|
|
map(&:id)
|
|
|
|
|
2010-10-09 09:22:40 -07:00
|
|
|
unless ids_to_delete.empty?
|
2012-01-26 11:51:30 -08:00
|
|
|
ParentSwfAssetRelationship.where(:id => ids_to_delete).delete_all
|
2010-10-09 09:22:40 -07:00
|
|
|
end
|
2012-01-26 11:51:30 -08:00
|
|
|
|
2011-10-23 12:09:53 -07:00
|
|
|
@parent_swf_asset_relationships_to_update.each do |rel|
|
|
|
|
rel.save!
|
|
|
|
rel.swf_asset.save!
|
|
|
|
end
|
2010-10-09 09:22:40 -07:00
|
|
|
end
|
|
|
|
end
|
2013-01-22 22:25:09 -08:00
|
|
|
|
|
|
|
def body_specific?
|
|
|
|
# If there are species support IDs (it's not empty), the item is
|
|
|
|
# body-specific. If it's empty, it fits everyone the same.
|
|
|
|
!species_support_ids.empty?
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-10-07 07:46:23 -07:00
|
|
|
def origin_registry_info=(info)
|
|
|
|
# bear in mind that numbers from registries are floats
|
|
|
|
self.species_support_ids = info[:species_support].map(&:to_i)
|
|
|
|
attribute_names.each do |attribute|
|
|
|
|
value = info[attribute.to_sym]
|
|
|
|
if value
|
|
|
|
value = value.to_i if value.is_a? Float
|
|
|
|
self[attribute] = value
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-11-06 15:08:42 -07:00
|
|
|
def pending_swf_assets
|
|
|
|
@parent_swf_asset_relationships_to_update.inject([]) do |all_swf_assets, relationship|
|
|
|
|
all_swf_assets << relationship.swf_asset
|
|
|
|
end
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-10-09 09:22:40 -07:00
|
|
|
def parent_swf_asset_relationships_to_update=(rels)
|
|
|
|
@parent_swf_asset_relationships_to_update = rels
|
|
|
|
end
|
2013-01-11 17:16:16 -08:00
|
|
|
|
|
|
|
def needed_translations
|
2013-01-17 20:16:34 -08:00
|
|
|
translatable_locales = Set.new(I18n.locales_with_neopets_language_code)
|
2013-01-11 17:16:16 -08:00
|
|
|
translated_locales = Set.new(translations.map(&:locale))
|
2013-01-17 20:16:34 -08:00
|
|
|
translatable_locales - translated_locales
|
2013-01-11 17:16:16 -08:00
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-11-06 08:52:58 -07:00
|
|
|
def self.all_by_ids_or_children(ids, swf_assets)
|
|
|
|
swf_asset_ids = []
|
|
|
|
swf_assets_by_id = {}
|
|
|
|
swf_assets_by_parent_id = {}
|
|
|
|
swf_assets.each do |swf_asset|
|
|
|
|
id = swf_asset.id
|
|
|
|
swf_assets_by_id[id] = swf_asset
|
|
|
|
swf_asset_ids << id
|
|
|
|
end
|
2012-01-12 15:17:59 -08:00
|
|
|
SwfAsset.select([
|
|
|
|
SwfAsset.arel_table[:id],
|
|
|
|
ParentSwfAssetRelationship.arel_table[:parent_id]
|
|
|
|
]).object_assets.joins(:parent_swf_asset_relationships).
|
2010-11-06 08:52:58 -07:00
|
|
|
where(SwfAsset.arel_table[:id].in(swf_asset_ids)).each do |row|
|
|
|
|
item_id = row.parent_id.to_i
|
|
|
|
swf_assets_by_parent_id[item_id] ||= []
|
|
|
|
swf_assets_by_parent_id[item_id] << swf_assets_by_id[row.id.to_i]
|
|
|
|
ids << item_id
|
|
|
|
end
|
|
|
|
find(ids).tap do |items|
|
|
|
|
items.each do |item|
|
|
|
|
swf_assets = swf_assets_by_parent_id[item.id]
|
|
|
|
if swf_assets
|
|
|
|
swf_assets.each do |swf_asset|
|
|
|
|
swf_asset.item = item
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2013-01-13 18:10:01 -08:00
|
|
|
def self.collection_from_pet_type_and_registries(pet_type, info_registry, asset_registry, scope=Item.scoped)
|
2010-10-07 07:46:23 -07:00
|
|
|
# bear in mind that registries are arrays with many nil elements,
|
|
|
|
# due to how the parser works
|
2011-07-22 12:08:17 -07:00
|
|
|
|
|
|
|
# Collect existing items
|
2010-10-07 07:46:23 -07:00
|
|
|
items = {}
|
|
|
|
item_ids = []
|
2011-01-13 14:22:07 -08:00
|
|
|
info_registry.each do |item_id, info|
|
2010-10-09 08:06:59 -07:00
|
|
|
if info && info[:is_compatible]
|
2011-01-13 14:22:07 -08:00
|
|
|
item_ids << item_id.to_i
|
2010-10-07 07:46:23 -07:00
|
|
|
end
|
|
|
|
end
|
2011-07-22 12:08:17 -07:00
|
|
|
|
|
|
|
# Collect existing relationships
|
2010-10-07 07:46:23 -07:00
|
|
|
existing_relationships_by_item_id_and_swf_asset_id = {}
|
2013-01-13 18:10:01 -08:00
|
|
|
existing_items = scope.find_all_by_id(item_ids, :include => :parent_swf_asset_relationships)
|
2010-10-07 07:46:23 -07:00
|
|
|
existing_items.each do |item|
|
|
|
|
items[item.id] = item
|
|
|
|
relationships_by_swf_asset_id = {}
|
|
|
|
item.parent_swf_asset_relationships.each do |relationship|
|
|
|
|
relationships_by_swf_asset_id[relationship.swf_asset_id] = relationship
|
|
|
|
end
|
|
|
|
existing_relationships_by_item_id_and_swf_asset_id[item.id] =
|
|
|
|
relationships_by_swf_asset_id
|
|
|
|
end
|
2011-07-22 12:08:17 -07:00
|
|
|
|
|
|
|
# Collect existing assets
|
2010-10-07 07:46:23 -07:00
|
|
|
swf_asset_ids = []
|
2011-01-13 14:22:07 -08:00
|
|
|
asset_registry.each do |asset_id, asset_data|
|
|
|
|
swf_asset_ids << asset_id.to_i if asset_data
|
2010-10-07 07:46:23 -07:00
|
|
|
end
|
2012-01-12 15:17:59 -08:00
|
|
|
existing_swf_assets = SwfAsset.object_assets.find_all_by_remote_id swf_asset_ids
|
2012-01-13 13:56:31 -08:00
|
|
|
existing_swf_assets_by_remote_id = {}
|
2010-10-07 07:46:23 -07:00
|
|
|
existing_swf_assets.each do |swf_asset|
|
2012-01-13 13:56:31 -08:00
|
|
|
existing_swf_assets_by_remote_id[swf_asset.remote_id] = swf_asset
|
2010-10-07 07:46:23 -07:00
|
|
|
end
|
2011-07-22 12:08:17 -07:00
|
|
|
|
|
|
|
# With each asset in the registry,
|
2010-10-07 07:46:23 -07:00
|
|
|
relationships_by_item_id = {}
|
2011-01-13 14:22:07 -08:00
|
|
|
asset_registry.each do |asset_id, asset_data|
|
2010-10-07 07:46:23 -07:00
|
|
|
if asset_data
|
2011-07-22 12:08:17 -07:00
|
|
|
# Build and update the item
|
2010-10-07 07:46:23 -07:00
|
|
|
item_id = asset_data[:obj_info_id].to_i
|
2011-07-22 12:08:17 -07:00
|
|
|
next unless item_ids.include?(item_id) # skip incompatible (Uni Bug)
|
2010-10-07 07:46:23 -07:00
|
|
|
item = items[item_id]
|
|
|
|
unless item
|
|
|
|
item = Item.new
|
|
|
|
item.id = item_id
|
|
|
|
items[item_id] = item
|
|
|
|
end
|
2011-01-13 14:22:07 -08:00
|
|
|
item.origin_registry_info = info_registry[item.id.to_s]
|
2010-10-09 09:22:40 -07:00
|
|
|
item.current_body_id = pet_type.body_id
|
2011-07-22 12:08:17 -07:00
|
|
|
|
|
|
|
# Build and update the SWF
|
2012-01-13 13:56:31 -08:00
|
|
|
swf_asset_remote_id = asset_data[:asset_id].to_i
|
|
|
|
swf_asset = existing_swf_assets_by_remote_id[swf_asset_remote_id]
|
2010-10-07 07:46:23 -07:00
|
|
|
unless swf_asset
|
|
|
|
swf_asset = SwfAsset.new
|
2012-01-13 13:56:31 -08:00
|
|
|
swf_asset.remote_id = swf_asset_remote_id
|
2010-10-07 07:46:23 -07:00
|
|
|
end
|
|
|
|
swf_asset.origin_object_data = asset_data
|
|
|
|
swf_asset.origin_pet_type = pet_type
|
2012-10-05 18:56:52 -07:00
|
|
|
swf_asset.item = item
|
2011-07-22 12:08:17 -07:00
|
|
|
|
|
|
|
# Build and update the relationship
|
2012-01-12 20:02:12 -08:00
|
|
|
relationship = existing_relationships_by_item_id_and_swf_asset_id[item.id][swf_asset.id] rescue nil
|
2010-10-07 07:46:23 -07:00
|
|
|
unless relationship
|
|
|
|
relationship = ParentSwfAssetRelationship.new
|
2012-01-12 15:17:59 -08:00
|
|
|
relationship.parent = item
|
2010-10-07 07:46:23 -07:00
|
|
|
end
|
2012-01-12 15:17:59 -08:00
|
|
|
relationship.swf_asset = swf_asset
|
2010-10-07 07:46:23 -07:00
|
|
|
relationships_by_item_id[item_id] ||= []
|
|
|
|
relationships_by_item_id[item_id] << relationship
|
|
|
|
end
|
|
|
|
end
|
2011-07-22 12:08:17 -07:00
|
|
|
|
|
|
|
# Set up the relationships to be updated on item save
|
2010-10-07 07:46:23 -07:00
|
|
|
relationships_by_item_id.each do |item_id, relationships|
|
2010-10-09 09:22:40 -07:00
|
|
|
items[item_id].parent_swf_asset_relationships_to_update = relationships
|
2010-10-07 07:46:23 -07:00
|
|
|
end
|
2011-07-22 12:08:17 -07:00
|
|
|
|
2010-10-07 07:46:23 -07:00
|
|
|
items.values
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-11-25 08:10:21 -08:00
|
|
|
class << self
|
|
|
|
MALL_HOST = 'ncmall.neopets.com'
|
|
|
|
MALL_MAIN_PATH = '/mall/shop.phtml'
|
2010-11-27 15:41:06 -08:00
|
|
|
MALL_CATEGORY_PATH = '/mall/ajax/load_page.phtml'
|
|
|
|
MALL_CATEGORY_QUERY = 'type=browse&cat={cat}&lang=en'
|
2010-11-25 08:10:21 -08:00
|
|
|
MALL_CATEGORY_TRIGGER = /load_items_pane\("browse", ([0-9]+)\);/
|
|
|
|
MALL_JSON_ITEM_DATA_KEY = 'object_data'
|
|
|
|
MALL_ITEM_URL_TEMPLATE = 'http://images.neopets.com/items/%s.gif'
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-11-25 08:10:21 -08:00
|
|
|
MALL_MAIN_URI = Addressable::URI.new :scheme => 'http',
|
|
|
|
:host => MALL_HOST, :path => MALL_MAIN_PATH
|
|
|
|
MALL_CATEGORY_URI = Addressable::URI.new :scheme => 'http',
|
2010-11-27 15:41:06 -08:00
|
|
|
:host => MALL_HOST, :path => MALL_CATEGORY_PATH,
|
|
|
|
:query => MALL_CATEGORY_QUERY
|
2010-11-25 08:10:21 -08:00
|
|
|
MALL_CATEGORY_TEMPLATE = Addressable::Template.new MALL_CATEGORY_URI
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-11-25 08:10:21 -08:00
|
|
|
def spider_mall!
|
|
|
|
# Load the mall HTML, scan it for category onclicks
|
|
|
|
items = {}
|
|
|
|
spider_request(MALL_MAIN_URI).scan(MALL_CATEGORY_TRIGGER) do |match|
|
|
|
|
# Plug the category ID into the URI for that category's JSON document
|
|
|
|
uri = MALL_CATEGORY_TEMPLATE.expand :cat => match[0]
|
|
|
|
begin
|
|
|
|
# Load up that JSON and send it off to be parsed
|
|
|
|
puts "Loading #{uri}..."
|
|
|
|
category_items = spider_mall_category(spider_request(uri))
|
|
|
|
puts "...found #{category_items.size} items"
|
|
|
|
items.merge!(category_items)
|
|
|
|
rescue SpiderJSONError => e
|
|
|
|
# If there was a parsing error, add where it came from
|
|
|
|
Rails.logger.warn "Error parsing JSON at #{uri}, skipping: #{e.message}"
|
|
|
|
end
|
|
|
|
end
|
2010-12-05 13:06:48 -08:00
|
|
|
puts "#{items.size} items found"
|
|
|
|
all_item_ids = items.keys
|
|
|
|
# Find which of these already exist but aren't marked as sold_in_mall so
|
|
|
|
# we can update them as being sold
|
|
|
|
Item.not_sold_in_mall.where(:id => items.keys).select([:id, :name]).each do |item|
|
|
|
|
items.delete(item.id)
|
|
|
|
item.sold_in_mall = true
|
|
|
|
item.save
|
|
|
|
puts "#{item.name} (#{item.id}) now in mall, updated"
|
|
|
|
end
|
|
|
|
# Find items marked as sold_in_mall so we can skip those we just found
|
|
|
|
# if they already are properly marked, and mark those that we didn't just
|
|
|
|
# find as no longer sold_in_mall
|
|
|
|
Item.sold_in_mall.select([:id, :name]).each do |item|
|
|
|
|
if all_item_ids.include?(item.id)
|
|
|
|
items.delete(item.id)
|
|
|
|
else
|
|
|
|
item.sold_in_mall = false
|
|
|
|
item.save
|
|
|
|
puts "#{item.name} (#{item.id}) no longer in mall, removed sold_in_mall status"
|
|
|
|
end
|
|
|
|
end
|
2010-11-25 08:10:21 -08:00
|
|
|
puts "#{items.size} new items"
|
|
|
|
items.each do |item_id, item|
|
|
|
|
item.save
|
|
|
|
puts "Saved #{item.name} (#{item_id})"
|
|
|
|
end
|
|
|
|
items
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-11-27 15:41:06 -08:00
|
|
|
def spider_mall_assets!(limit)
|
|
|
|
items = self.select([arel_table[:id], arel_table[:name]]).sold_in_mall.spidered_longest_ago.limit(limit).all
|
|
|
|
puts "- #{items.size} items need asset spidering"
|
|
|
|
AssetStrategy.build_strategies
|
|
|
|
items.each do |item|
|
|
|
|
AssetStrategy.spider item
|
|
|
|
end
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-11-27 15:41:06 -08:00
|
|
|
def spider_request(uri)
|
|
|
|
begin
|
|
|
|
response = Net::HTTP.get_response uri
|
|
|
|
rescue SocketError => e
|
|
|
|
raise SpiderHTTPError, "Error loading #{uri}: #{e.message}"
|
|
|
|
end
|
|
|
|
unless response.is_a? Net::HTTPOK
|
|
|
|
raise SpiderHTTPError, "Error loading #{uri}: Response was a #{response.class}"
|
|
|
|
end
|
|
|
|
response.body
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-11-25 08:10:21 -08:00
|
|
|
private
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-11-27 15:41:06 -08:00
|
|
|
class AssetStrategy
|
|
|
|
Strategies = {}
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-11-27 15:41:06 -08:00
|
|
|
MALL_ASSET_PATH = '/mall/ajax/get_item_assets.phtml'
|
|
|
|
MALL_ASSET_QUERY = 'pet={pet_name}&oii={item_id}'
|
|
|
|
MALL_ASSET_URI = Addressable::URI.new :scheme => 'http',
|
|
|
|
:host => MALL_HOST, :path => MALL_ASSET_PATH,
|
|
|
|
:query => MALL_ASSET_QUERY
|
|
|
|
MALL_ASSET_TEMPLATE = Addressable::Template.new MALL_ASSET_URI
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-11-27 15:41:06 -08:00
|
|
|
def initialize(name, options)
|
|
|
|
@name = name
|
|
|
|
@pass = options[:pass]
|
|
|
|
@complete = options[:complete]
|
|
|
|
@pet_types = options[:pet_types]
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-11-27 15:41:06 -08:00
|
|
|
def spider(item)
|
|
|
|
puts " - Using #{@name} strategy"
|
|
|
|
exit = false
|
|
|
|
@pet_types.each do |pet_type|
|
|
|
|
swf_assets = load_for_pet_type(item, pet_type)
|
|
|
|
if swf_assets
|
|
|
|
contains_body_specific_assets = false
|
|
|
|
swf_assets.each do |swf_asset|
|
|
|
|
if swf_asset.body_specific?
|
|
|
|
contains_body_specific_assets = true
|
|
|
|
break
|
|
|
|
end
|
|
|
|
end
|
|
|
|
if contains_body_specific_assets
|
|
|
|
if @pass
|
|
|
|
Strategies[@pass].spider(item) unless @pass == :exit
|
|
|
|
exit = true
|
|
|
|
break
|
|
|
|
end
|
|
|
|
else
|
|
|
|
# if all are universal, no need to spider more
|
|
|
|
puts " - No body specific assets; moving on"
|
|
|
|
exit = true
|
|
|
|
break
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
if !exit && @complete && @complete != :exit
|
|
|
|
Strategies[@complete].spider(item)
|
|
|
|
end
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-11-27 15:41:06 -08:00
|
|
|
private
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-11-27 15:41:06 -08:00
|
|
|
def load_for_pet_type(item, pet_type, banned_pet_ids=[])
|
|
|
|
pet_id = pet_type.pet_id
|
|
|
|
pet_name = pet_type.pet_name
|
2011-05-21 19:32:01 -07:00
|
|
|
pet_valid = nil
|
|
|
|
begin
|
|
|
|
pet = Pet.load(pet_name)
|
|
|
|
if pet.pet_type_id == pet_type.id
|
|
|
|
pet_valid = true
|
|
|
|
else
|
|
|
|
pet_valid = false
|
|
|
|
puts " - Pet #{pet_name} is pet type \##{pet.pet_type_id}, not \##{pet_type.id}; saving it and loading new pet"
|
|
|
|
pet.save!
|
|
|
|
end
|
|
|
|
rescue Pet::PetNotFound
|
|
|
|
pet_valid = false
|
|
|
|
puts " - Pet #{pet_name} no longer exists; destroying and loading new pet"
|
|
|
|
Pet.find_by_name(pet_name).destroy
|
|
|
|
end
|
|
|
|
if pet_valid
|
2010-11-27 15:41:06 -08:00
|
|
|
swf_assets = load_for_pet_name(item, pet_type, pet_name)
|
|
|
|
if swf_assets
|
|
|
|
puts " - Modeled with #{pet_name}, saved assets (#{swf_assets.map(&:id).join(', ')})"
|
|
|
|
else
|
|
|
|
puts " - Item #{item.name} does not fit #{pet_name}"
|
|
|
|
end
|
|
|
|
return swf_assets
|
|
|
|
else
|
|
|
|
banned_pet_ids << pet_id
|
|
|
|
new_pet = pet_type.pets.select([:id, :name]).where(Pet.arel_table[:id].not_in(banned_pet_ids)).first
|
|
|
|
if new_pet
|
|
|
|
pet_type.pet_id = new_pet.id
|
|
|
|
pet_type.pet_name = new_pet.name
|
|
|
|
load_for_pet_type(item, pet_type, banned_pet_ids)
|
|
|
|
else
|
|
|
|
puts " - We have no more pets of type \##{pet_type.id}. Skipping"
|
|
|
|
return nil
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-11-27 15:41:06 -08:00
|
|
|
def load_for_pet_name(item, pet_type, pet_name)
|
|
|
|
uri = MALL_ASSET_TEMPLATE.
|
|
|
|
expand(
|
|
|
|
:item_id => item.id,
|
|
|
|
:pet_name => pet_name
|
|
|
|
)
|
|
|
|
raw_data = Item.spider_request(uri)
|
|
|
|
data = JSON.parse(raw_data)
|
|
|
|
item_id_key = item.id.to_s
|
|
|
|
if !data.empty? && data[item_id_key] && data[item_id_key]['asset_data']
|
|
|
|
data[item_id_key]['asset_data'].map do |asset_id_str, asset_data|
|
|
|
|
item.zones_restrict = asset_data['restrict']
|
|
|
|
item.save
|
2012-01-13 13:27:30 -08:00
|
|
|
swf_asset = SwfAsset.find_or_initialize_by_type_and_remote_id(SwfAssetType, asset_id_str.to_i)
|
2010-11-27 15:41:06 -08:00
|
|
|
swf_asset.type = SwfAssetType
|
|
|
|
swf_asset.body_id = pet_type.body_id
|
|
|
|
swf_asset.mall_data = asset_data
|
|
|
|
item.swf_assets << swf_asset unless item.swf_assets.include? swf_asset
|
|
|
|
swf_asset.save
|
|
|
|
swf_asset
|
|
|
|
end
|
|
|
|
else
|
|
|
|
nil
|
|
|
|
end
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-11-27 15:41:06 -08:00
|
|
|
class << self
|
|
|
|
def add_strategy(name, options)
|
|
|
|
Strategies[name] = new(name, options)
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-11-27 15:41:06 -08:00
|
|
|
def add_cascading_strategy(name, options)
|
|
|
|
pet_type_groups = options[:pet_types]
|
|
|
|
pet_type_group_names = pet_type_groups.keys
|
|
|
|
pet_type_group_names.each_with_index do |pet_type_group_name, i|
|
|
|
|
remaining_pet_types = pet_type_groups[pet_type_group_name]
|
|
|
|
first_pet_type = [remaining_pet_types.slice!(0)]
|
|
|
|
cascade_name = "#{name}_cascade"
|
|
|
|
next_name = pet_type_group_names[i + 1]
|
|
|
|
next_name = next_name ? "group_#{next_name}" : options[:complete]
|
|
|
|
first_strategy_options = {:complete => next_name, :pass => :exit,
|
|
|
|
:pet_types => first_pet_type}
|
|
|
|
unless remaining_pet_types.empty?
|
|
|
|
first_strategy_options[:pass] = cascade_name
|
|
|
|
add_strategy cascade_name, :complete => :exit,
|
|
|
|
:pet_types => remaining_pet_types
|
|
|
|
end
|
|
|
|
add_strategy name, first_strategy_options
|
|
|
|
name = next_name
|
|
|
|
end
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-11-27 15:41:06 -08:00
|
|
|
def spider(item)
|
|
|
|
puts "- Spidering for #{item.name}"
|
|
|
|
Strategies[:start].spider(item)
|
|
|
|
item.last_spidered = Time.now
|
|
|
|
item.save
|
|
|
|
puts "- #{item.name} done spidering, saved last spidered timestamp"
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-11-27 15:41:06 -08:00
|
|
|
def build_strategies
|
|
|
|
if Strategies.empty?
|
|
|
|
pet_type_t = PetType.arel_table
|
|
|
|
require 'pet' # FIXME: console is whining when i don't do this
|
|
|
|
pet_t = Pet.arel_table
|
|
|
|
pet_types = PetType.select([pet_type_t[:id], pet_type_t[:body_id], "#{Pet.table_name}.id as pet_id, #{Pet.table_name}.name as pet_name"]).
|
|
|
|
joins(:pets).group(pet_type_t[:id])
|
|
|
|
remaining_standard_pet_types = pet_types.single_standard_color.order(:species_id)
|
|
|
|
first_standard_pet_type = [remaining_standard_pet_types.slice!(0)]
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-11-27 15:41:06 -08:00
|
|
|
add_strategy :start, :pass => :remaining_standard, :complete => :first_nonstandard_color,
|
|
|
|
:pet_types => first_standard_pet_type
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-11-27 15:41:06 -08:00
|
|
|
add_strategy :remaining_standard, :complete => :exit,
|
|
|
|
:pet_types => remaining_standard_pet_types
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-11-27 15:41:06 -08:00
|
|
|
add_cascading_strategy :first_nonstandard_color, :complete => :remaining_standard,
|
|
|
|
:pet_types => pet_types.select(pet_type_t[:color_id]).nonstandard_colors.all.group_by(&:color_id)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-11-25 08:10:21 -08:00
|
|
|
def spider_mall_category(json)
|
|
|
|
begin
|
|
|
|
items_data = JSON.parse(json)[MALL_JSON_ITEM_DATA_KEY]
|
|
|
|
unless items_data
|
|
|
|
raise SpiderJSONError, "Missing key #{MALL_JSON_ITEM_DATA_KEY}"
|
|
|
|
end
|
|
|
|
rescue Exception => e
|
|
|
|
# Catch both errors parsing JSON and the missing key
|
|
|
|
raise SpiderJSONError, e.message
|
|
|
|
end
|
|
|
|
items = {}
|
|
|
|
items_data.each do |item_id, item_data|
|
|
|
|
if item_data['isWearable'] == 1
|
|
|
|
relevant_item_data = item_data.slice('name', 'description', 'price')
|
|
|
|
item = Item.new relevant_item_data
|
|
|
|
item.id = item_data['id']
|
|
|
|
item.thumbnail_url = sprintf(MALL_ITEM_URL_TEMPLATE, item_data['imageFile'])
|
|
|
|
item.sold_in_mall = true
|
|
|
|
items[item.id] = item
|
|
|
|
end
|
|
|
|
end
|
|
|
|
items
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-11-25 08:10:21 -08:00
|
|
|
class SpiderError < RuntimeError;end
|
|
|
|
class SpiderHTTPError < SpiderError;end
|
|
|
|
class SpiderJSONError < SpiderError;end
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-05-15 08:38:45 -07:00
|
|
|
private
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-06-22 09:42:25 -07:00
|
|
|
SearchFilterScopes = []
|
2010-11-18 20:21:03 -08:00
|
|
|
LimitedSearchFilters = []
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-11-18 20:21:03 -08:00
|
|
|
def self.search_filter(name, options={}, &block)
|
|
|
|
assume_complement = options.delete(:assume_complement) || true
|
|
|
|
name = name.to_s
|
|
|
|
SearchFilterScopes << name
|
|
|
|
LimitedSearchFilters << name if options[:limit]
|
2011-07-17 14:24:29 -07:00
|
|
|
|
2010-11-18 20:21:03 -08:00
|
|
|
(class << self; self; end).instance_eval do
|
|
|
|
if options[:full]
|
|
|
|
define_method "search_filter_#{name}", &options[:full]
|
|
|
|
else
|
|
|
|
if assume_complement
|
|
|
|
define_method "search_filter_not_#{name}", &Item.search_filter_block(options, false, &block)
|
|
|
|
end
|
|
|
|
define_method "search_filter_#{name}", &Item.search_filter_block(options, true, &block)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-11-18 20:21:03 -08:00
|
|
|
def self.single_search_filter(name, options={}, &block)
|
|
|
|
options[:assume_complement] = false
|
|
|
|
search_filter name, options, &block
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2011-07-17 14:24:29 -07:00
|
|
|
def self.search_filter_block(options, positive, &block)
|
|
|
|
Proc.new { |str, user, scope|
|
|
|
|
condition = block.arity == 1 ? block.call(str) : block.call(str, user)
|
2011-07-22 13:18:15 -07:00
|
|
|
unless positive
|
|
|
|
condition = condition.to_sql if condition.respond_to?(:to_sql)
|
|
|
|
condition = "!(#{condition})"
|
|
|
|
end
|
2010-11-18 20:21:03 -08:00
|
|
|
scope = scope.send(options[:scope]) if options[:scope]
|
|
|
|
scope.where(condition)
|
2010-06-22 09:42:25 -07:00
|
|
|
}
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-06-22 09:42:25 -07:00
|
|
|
search_filter :name do |name|
|
2013-01-11 15:56:33 -08:00
|
|
|
Item::Translation.arel_table[:name].matches("%#{name}%")
|
2010-06-22 09:42:25 -07:00
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-06-22 09:42:25 -07:00
|
|
|
search_filter :description do |description|
|
2013-01-11 15:56:33 -08:00
|
|
|
Item::Translation.arel_table[:description].matches("%#{description}%")
|
2010-06-22 09:42:25 -07:00
|
|
|
end
|
2011-09-06 09:15:09 -07:00
|
|
|
|
|
|
|
def self.adjective_filters
|
|
|
|
@adjective_filters ||= {
|
|
|
|
'nc' => arel_table[:rarity_index].in(NCRarities),
|
|
|
|
'pb' => arel_table[:description].eq(PAINTBRUSH_SET_DESCRIPTION)
|
|
|
|
}
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-10-03 17:14:48 -07:00
|
|
|
search_filter :is do |adjective|
|
2011-09-06 09:15:09 -07:00
|
|
|
filter = adjective_filters[adjective]
|
2010-10-03 17:14:48 -07:00
|
|
|
unless filter
|
2010-11-16 20:32:53 -08:00
|
|
|
raise SearchError,
|
2010-10-03 17:14:48 -07:00
|
|
|
"We don't know how an item can be \"#{adjective}\". " +
|
|
|
|
"Did you mean is:nc or is:pb?"
|
|
|
|
end
|
|
|
|
filter
|
2010-07-10 09:42:18 -07:00
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2011-07-22 13:18:15 -07:00
|
|
|
USER_ADJECTIVES = {
|
|
|
|
'own' => true,
|
|
|
|
'owns' => true,
|
|
|
|
'owned' => true,
|
|
|
|
'want' => false,
|
|
|
|
'wants' => false,
|
|
|
|
'wanted' => false,
|
|
|
|
'all' => nil,
|
|
|
|
'items' => nil
|
|
|
|
}
|
|
|
|
def self.parse_user_adjective(adjective, user)
|
|
|
|
unless USER_ADJECTIVES.has_key?(adjective)
|
2011-07-17 14:24:29 -07:00
|
|
|
raise SearchError, "We don't understand user:#{adjective}. " +
|
2011-07-22 13:18:15 -07:00
|
|
|
"Find items you own with user:owns, items you want with user:wants, or " +
|
|
|
|
"both with user:all"
|
2011-07-17 14:24:29 -07:00
|
|
|
end
|
|
|
|
|
|
|
|
unless user
|
|
|
|
raise SearchError, "It looks like you're not logged in, so you don't own any items."
|
|
|
|
end
|
2011-07-22 13:18:15 -07:00
|
|
|
|
|
|
|
USER_ADJECTIVES[adjective]
|
2011-07-17 14:24:29 -07:00
|
|
|
end
|
|
|
|
|
2011-07-22 13:18:15 -07:00
|
|
|
search_filter :user do |adjective, user|
|
|
|
|
# Though joins may seem more efficient here for the positive case, we need
|
|
|
|
# to be able to handle cases like "user:owns user:wants", which breaks on
|
|
|
|
# the JOIN approach. Just have to look up the IDs in advance.
|
2011-07-17 14:24:29 -07:00
|
|
|
|
2011-07-22 13:18:15 -07:00
|
|
|
owned_value = parse_user_adjective(adjective, user)
|
|
|
|
hangers = ClosetHanger.arel_table
|
|
|
|
items = user.closeted_items
|
|
|
|
items = items.where(ClosetHanger.arel_table[:owned].eq(owned_value)) unless owned_value.nil?
|
|
|
|
item_ids = items.map(&:id)
|
|
|
|
# Though it's best to do arel_table[:id].in(item_ids), it breaks in this
|
|
|
|
# version of Arel, and other conditions will overwrite this one. Since IDs
|
|
|
|
# are guaranteed to be integers, let's just build our own string condition
|
|
|
|
# and be done with it.
|
2011-07-31 20:50:33 -07:00
|
|
|
|
|
|
|
if item_ids.empty?
|
|
|
|
raise SearchError, "You don't #{ClosetHanger.verb :you, owned_value} " +
|
|
|
|
"any items yet. Head to Your Items to add some!"
|
|
|
|
end
|
|
|
|
|
2012-01-13 13:20:47 -08:00
|
|
|
arel_table[:id].in(item_ids)
|
2011-07-17 14:24:29 -07:00
|
|
|
end
|
|
|
|
|
2010-06-22 09:42:25 -07:00
|
|
|
search_filter :only do |species_name|
|
2010-11-18 20:25:34 -08:00
|
|
|
begin
|
|
|
|
id = Species.require_by_name(species_name).id
|
|
|
|
rescue Species::NotFound => e
|
|
|
|
raise SearchError, e.message
|
|
|
|
end
|
2010-06-22 09:42:25 -07:00
|
|
|
arel_table[:species_support_ids].eq(id.to_s)
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-06-22 09:42:25 -07:00
|
|
|
search_filter :species do |species_name|
|
2010-11-18 20:25:34 -08:00
|
|
|
begin
|
|
|
|
id = Species.require_by_name(species_name).id
|
|
|
|
rescue Species::NotFound => e
|
|
|
|
raise SearchError, e.message
|
|
|
|
end
|
2010-06-22 09:42:25 -07:00
|
|
|
ids = arel_table[:species_support_ids]
|
2010-11-16 20:32:53 -08:00
|
|
|
ids.eq('').or(ids.matches_any([
|
2010-06-22 09:42:25 -07:00
|
|
|
id,
|
|
|
|
"#{id},%",
|
|
|
|
"%,#{id},%",
|
|
|
|
"%,#{id}"
|
2010-11-16 20:32:53 -08:00
|
|
|
]))
|
2010-06-22 09:42:25 -07:00
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-11-18 20:21:03 -08:00
|
|
|
single_search_filter :type, {:limit => true, :scope => :join_swf_assets} do |zone_set_name|
|
2012-10-08 19:20:18 -07:00
|
|
|
zone_set = Zone.find_set(zone_set_name)
|
2010-11-16 20:32:53 -08:00
|
|
|
raise SearchError, "Type \"#{zone_set_name}\" does not exist" unless zone_set
|
2010-06-22 09:42:25 -07:00
|
|
|
SwfAsset.arel_table[:zone_id].in(zone_set.map(&:id))
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2011-07-17 14:24:29 -07:00
|
|
|
single_search_filter :not_type, :full => lambda { |zone_set_name, user, scope|
|
2010-11-18 20:21:03 -08:00
|
|
|
zone_set = Zone::ItemZoneSets[zone_set_name]
|
|
|
|
raise SearchError, "Type \"#{zone_set_name}\" does not exist" unless zone_set
|
|
|
|
psa = ParentSwfAssetRelationship.arel_table.alias
|
|
|
|
sa = SwfAsset.arel_table.alias
|
|
|
|
# Join to SWF assets, including the zone condition in the join so that
|
|
|
|
# SWFs that don't match end up being NULL rows. Then we take the max SWF
|
|
|
|
# asset ID, which is NULL if and only if there are no rows that matched
|
|
|
|
# the zone requirement. If that max was NULL, return the object.
|
2010-11-19 13:31:58 -08:00
|
|
|
item_ids = select(arel_table[:id]).joins(
|
2010-11-18 20:21:03 -08:00
|
|
|
"LEFT JOIN #{ParentSwfAssetRelationship.table_name} #{psa.name} ON " +
|
2012-01-12 15:17:59 -08:00
|
|
|
psa[:parent_type].eq(self.name).
|
2011-05-31 07:36:32 -07:00
|
|
|
and(psa[:parent_id].eq(arel_table[:id])).
|
|
|
|
to_sql
|
2010-11-18 20:21:03 -08:00
|
|
|
).
|
|
|
|
joins(
|
|
|
|
"LEFT JOIN #{SwfAsset.table_name} #{sa.name} ON " +
|
2011-05-31 07:36:32 -07:00
|
|
|
sa[:type].eq(SwfAssetType).
|
|
|
|
and(sa[:id].eq(psa[:swf_asset_id])).
|
|
|
|
and(sa[:zone_id].in(zone_set.map(&:id))).
|
|
|
|
to_sql
|
2010-11-18 20:21:03 -08:00
|
|
|
).
|
|
|
|
group("#{table_name}.id").
|
2010-11-19 13:31:58 -08:00
|
|
|
having("MAX(#{sa.name}.id) IS NULL"). # SwfAsset.arel_table[:id].maximum has no #eq
|
|
|
|
map(&:id)
|
|
|
|
scope.where(arel_table[:id].in(item_ids))
|
2010-11-18 20:21:03 -08:00
|
|
|
}
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-05-15 08:38:45 -07:00
|
|
|
class Condition < String
|
2010-11-18 20:21:03 -08:00
|
|
|
attr_accessor :filter
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-11-18 20:21:03 -08:00
|
|
|
def initialize
|
|
|
|
@positive = true
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-11-18 20:21:03 -08:00
|
|
|
def filter?
|
|
|
|
!@filter.nil?
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-06-22 09:42:25 -07:00
|
|
|
def to_filter!
|
|
|
|
@filter = self.clone
|
2010-05-15 08:38:45 -07:00
|
|
|
self.replace ''
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-05-15 09:43:54 -07:00
|
|
|
def negate!
|
2010-11-18 20:21:03 -08:00
|
|
|
@positive = !@positive
|
2010-05-15 09:43:54 -07:00
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2011-07-17 14:24:29 -07:00
|
|
|
def narrow(scope, user)
|
2010-06-22 09:42:25 -07:00
|
|
|
if SearchFilterScopes.include?(filter)
|
2010-11-18 20:21:03 -08:00
|
|
|
polarized_filter = @positive ? filter : "not_#{filter}"
|
2011-07-17 14:24:29 -07:00
|
|
|
Item.send("search_filter_#{polarized_filter}", self, user, scope)
|
2010-05-15 12:00:53 -07:00
|
|
|
else
|
2010-11-16 20:32:53 -08:00
|
|
|
raise SearchError, "Filter #{filter} does not exist"
|
2010-05-15 08:38:45 -07:00
|
|
|
end
|
2010-06-22 09:42:25 -07:00
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-06-22 09:42:25 -07:00
|
|
|
def filter
|
|
|
|
@filter || 'name'
|
2010-05-15 08:38:45 -07:00
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-05-15 08:38:45 -07:00
|
|
|
def inspect
|
2010-06-22 09:42:25 -07:00
|
|
|
@filter ? "#{@filter}:#{super}" : super
|
2010-05-15 08:38:45 -07:00
|
|
|
end
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-11-16 20:32:53 -08:00
|
|
|
class SearchError < ArgumentError;end
|
2010-05-14 15:41:40 -07:00
|
|
|
end
|