2010-05-15 08:38:45 -07:00
|
|
|
class Item < ActiveRecord::Base
|
globalized search first draft
Confirmed features:
* Output (retrieval, sorting, etc.)
* Name (positive and negative, but new behavior)
* Flags (positive and negative)
Planned features:
* users:owns, user:wants
Known issues:
* Sets are broken
* Don't render properly
* Shouldn't actually be done as joined sets, anyway, since
we actually want (set1_zone1 OR set1_zone2) AND
(set2_zone1 OR set2_zone2), which will require breaking
it into multiple terms queries.
* Name has regressed: ignores phrases, doesn't require *all*
words. While we're breaking sets into multiple queries,
maybe we'll do something similar for name. In fact, we
really kinda have to if we're gonna keep sorting by name,
since "straw hat" returns all hats. Eww.
2013-01-18 21:23:37 -08:00
|
|
|
include Flex::Model
|
2011-07-26 15:49:52 -07:00
|
|
|
include PrettyParam
|
2013-01-11 14:20:06 -08:00
|
|
|
|
|
|
|
set_inheritance_column 'inheritance_type' # PHP Impress used "type" to describe category
|
2011-07-26 15:49:52 -07:00
|
|
|
|
2010-05-19 16:17:33 -07:00
|
|
|
SwfAssetType = 'object'
|
2013-01-11 14:20:06 -08:00
|
|
|
|
|
|
|
translates :name, :description, :rarity
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2011-07-12 16:37:16 -07:00
|
|
|
has_many :closet_hangers
|
2010-11-06 15:08:42 -07:00
|
|
|
has_one :contribution, :as => :contributed
|
2012-01-12 15:17:59 -08:00
|
|
|
has_many :parent_swf_asset_relationships, :as => :parent
|
|
|
|
has_many :swf_assets, :through => :parent_swf_asset_relationships
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2011-07-22 13:18:15 -07:00
|
|
|
attr_writer :current_body_id, :owned, :wanted
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-09-08 19:49:39 -07:00
|
|
|
NCRarities = [0, 500]
|
2011-05-02 15:07:56 -07:00
|
|
|
PAINTBRUSH_SET_DESCRIPTION = 'This item is part of a deluxe paint brush set!'
|
2012-09-29 10:40:55 -07:00
|
|
|
SPECIAL_COLOR_DESCRIPTION_REGEX =
|
|
|
|
/This item is only wearable by Neopets painted ([a-zA-Z]+)\.|WARNING: This [a-zA-Z]+ can be worn by ([a-zA-Z]+) [a-zA-Z]+ ONLY!/
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-05-15 11:14:17 -07:00
|
|
|
cattr_reader :per_page
|
|
|
|
@@per_page = 30
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2013-01-11 15:56:33 -08:00
|
|
|
scope :alphabetize_by_translations, lambda {
|
|
|
|
it = Item::Translation.arel_table
|
|
|
|
order(it[:name])
|
|
|
|
}
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2013-01-11 14:20:06 -08:00
|
|
|
scope :join_swf_assets, joins(:swf_assets).group(arel_table[:id])
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2011-08-04 07:04:15 -07:00
|
|
|
scope :newest, order(arel_table[:created_at].desc) if arel_table[:created_at]
|
2011-08-04 07:01:44 -07:00
|
|
|
|
2011-09-06 09:15:09 -07:00
|
|
|
scope :spidered_longest_ago, order(["(last_spidered IS NULL) DESC", "last_spidered DESC"])
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-12-05 13:06:48 -08:00
|
|
|
scope :sold_in_mall, where(:sold_in_mall => true)
|
|
|
|
scope :not_sold_in_mall, where(:sold_in_mall => false)
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2013-01-28 01:01:25 -08:00
|
|
|
scope :sitemap, order([:id]).limit(49999)
|
2011-05-20 17:49:48 -07:00
|
|
|
|
2011-07-17 14:24:29 -07:00
|
|
|
scope :with_closet_hangers, joins(:closet_hangers)
|
globalized search first draft
Confirmed features:
* Output (retrieval, sorting, etc.)
* Name (positive and negative, but new behavior)
* Flags (positive and negative)
Planned features:
* users:owns, user:wants
Known issues:
* Sets are broken
* Don't render properly
* Shouldn't actually be done as joined sets, anyway, since
we actually want (set1_zone1 OR set1_zone2) AND
(set2_zone1 OR set2_zone2), which will require breaking
it into multiple terms queries.
* Name has regressed: ignores phrases, doesn't require *all*
words. While we're breaking sets into multiple queries,
maybe we'll do something similar for name. In fact, we
really kinda have to if we're gonna keep sorting by name,
since "straw hat" returns all hats. Eww.
2013-01-18 21:23:37 -08:00
|
|
|
|
|
|
|
flex.sync self
|
|
|
|
|
|
|
|
def flex_source
|
|
|
|
indexed_attributes = {
|
|
|
|
:is_nc => self.nc?,
|
|
|
|
:is_pb => self.pb?,
|
2013-01-28 15:54:50 -08:00
|
|
|
:species_support_id => self.supported_species_ids,
|
globalized search first draft
Confirmed features:
* Output (retrieval, sorting, etc.)
* Name (positive and negative, but new behavior)
* Flags (positive and negative)
Planned features:
* users:owns, user:wants
Known issues:
* Sets are broken
* Don't render properly
* Shouldn't actually be done as joined sets, anyway, since
we actually want (set1_zone1 OR set1_zone2) AND
(set2_zone1 OR set2_zone2), which will require breaking
it into multiple terms queries.
* Name has regressed: ignores phrases, doesn't require *all*
words. While we're breaking sets into multiple queries,
maybe we'll do something similar for name. In fact, we
really kinda have to if we're gonna keep sorting by name,
since "straw hat" returns all hats. Eww.
2013-01-18 21:23:37 -08:00
|
|
|
:occupied_zone_id => self.occupied_zone_ids,
|
|
|
|
:restricted_zone_id => self.restricted_zone_ids,
|
|
|
|
:name => {}
|
|
|
|
}
|
|
|
|
|
|
|
|
I18n.usable_locales_with_neopets_language_code.each do |locale|
|
2013-01-21 14:01:41 -08:00
|
|
|
I18n.with_locale(locale) do
|
globalized search first draft
Confirmed features:
* Output (retrieval, sorting, etc.)
* Name (positive and negative, but new behavior)
* Flags (positive and negative)
Planned features:
* users:owns, user:wants
Known issues:
* Sets are broken
* Don't render properly
* Shouldn't actually be done as joined sets, anyway, since
we actually want (set1_zone1 OR set1_zone2) AND
(set2_zone1 OR set2_zone2), which will require breaking
it into multiple terms queries.
* Name has regressed: ignores phrases, doesn't require *all*
words. While we're breaking sets into multiple queries,
maybe we'll do something similar for name. In fact, we
really kinda have to if we're gonna keep sorting by name,
since "straw hat" returns all hats. Eww.
2013-01-18 21:23:37 -08:00
|
|
|
indexed_attributes[:name][locale] = self.name
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
indexed_attributes.to_json
|
|
|
|
end
|
2011-07-17 14:24:29 -07:00
|
|
|
|
2011-07-12 22:21:48 -07:00
|
|
|
def closeted?
|
2011-07-22 13:18:15 -07:00
|
|
|
@owned || @wanted
|
2011-07-12 22:21:48 -07:00
|
|
|
end
|
2012-10-24 20:09:05 -07:00
|
|
|
|
|
|
|
# Return an OrderedHash mapping users to the number of times they
|
|
|
|
# contributed to this item's assets, from most contributions to least.
|
|
|
|
def contributors_with_counts
|
|
|
|
# Get contributing users' IDs
|
|
|
|
swf_asset_ids = swf_assets.select(SwfAsset.arel_table[:id]).map(&:id)
|
|
|
|
swf_asset_contributions = Contribution.select('user_id').
|
|
|
|
where(:contributed_type => 'SwfAsset', :contributed_id => swf_asset_ids)
|
|
|
|
contributor_ids = swf_asset_contributions.map(&:user_id)
|
|
|
|
|
|
|
|
# Get the users, mapped by ID
|
|
|
|
contributors_by_id = {}
|
|
|
|
User.find(contributor_ids).each { |u| contributors_by_id[u.id] = u }
|
|
|
|
|
|
|
|
# Count each user's contributions
|
|
|
|
contributor_counts_by_id = Hash.new(0)
|
|
|
|
contributor_ids.each { |id| contributor_counts_by_id[id] += 1 }
|
|
|
|
|
|
|
|
# Build an OrderedHash mapping users to counts in descending order
|
|
|
|
contributors_with_counts = ActiveSupport::OrderedHash.new
|
|
|
|
contributor_counts_by_id.sort_by { |k, v| v }.reverse.each do |id, count|
|
|
|
|
contributor = contributors_by_id[id]
|
|
|
|
contributors_with_counts[contributor] = count
|
|
|
|
end
|
|
|
|
contributors_with_counts
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-09-08 19:49:39 -07:00
|
|
|
def nc?
|
|
|
|
NCRarities.include?(rarity_index)
|
|
|
|
end
|
globalized search first draft
Confirmed features:
* Output (retrieval, sorting, etc.)
* Name (positive and negative, but new behavior)
* Flags (positive and negative)
Planned features:
* users:owns, user:wants
Known issues:
* Sets are broken
* Don't render properly
* Shouldn't actually be done as joined sets, anyway, since
we actually want (set1_zone1 OR set1_zone2) AND
(set2_zone1 OR set2_zone2), which will require breaking
it into multiple terms queries.
* Name has regressed: ignores phrases, doesn't require *all*
words. While we're breaking sets into multiple queries,
maybe we'll do something similar for name. In fact, we
really kinda have to if we're gonna keep sorting by name,
since "straw hat" returns all hats. Eww.
2013-01-18 21:23:37 -08:00
|
|
|
|
|
|
|
def pb?
|
2013-01-26 07:52:21 -08:00
|
|
|
I18n.with_locale(:en) { self.description == PAINTBRUSH_SET_DESCRIPTION }
|
globalized search first draft
Confirmed features:
* Output (retrieval, sorting, etc.)
* Name (positive and negative, but new behavior)
* Flags (positive and negative)
Planned features:
* users:owns, user:wants
Known issues:
* Sets are broken
* Don't render properly
* Shouldn't actually be done as joined sets, anyway, since
we actually want (set1_zone1 OR set1_zone2) AND
(set2_zone1 OR set2_zone2), which will require breaking
it into multiple terms queries.
* Name has regressed: ignores phrases, doesn't require *all*
words. While we're breaking sets into multiple queries,
maybe we'll do something similar for name. In fact, we
really kinda have to if we're gonna keep sorting by name,
since "straw hat" returns all hats. Eww.
2013-01-18 21:23:37 -08:00
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2011-07-22 13:18:15 -07:00
|
|
|
def owned?
|
|
|
|
@owned
|
|
|
|
end
|
|
|
|
|
|
|
|
def wanted?
|
|
|
|
@wanted
|
|
|
|
end
|
|
|
|
|
2013-01-21 17:34:39 -08:00
|
|
|
def restricted_zones(options={})
|
|
|
|
options[:scope] ||= Zone.scoped
|
|
|
|
options[:scope].find(restricted_zone_ids)
|
2010-09-08 19:49:39 -07:00
|
|
|
end
|
globalized search first draft
Confirmed features:
* Output (retrieval, sorting, etc.)
* Name (positive and negative, but new behavior)
* Flags (positive and negative)
Planned features:
* users:owns, user:wants
Known issues:
* Sets are broken
* Don't render properly
* Shouldn't actually be done as joined sets, anyway, since
we actually want (set1_zone1 OR set1_zone2) AND
(set2_zone1 OR set2_zone2), which will require breaking
it into multiple terms queries.
* Name has regressed: ignores phrases, doesn't require *all*
words. While we're breaking sets into multiple queries,
maybe we'll do something similar for name. In fact, we
really kinda have to if we're gonna keep sorting by name,
since "straw hat" returns all hats. Eww.
2013-01-18 21:23:37 -08:00
|
|
|
|
|
|
|
def restricted_zone_ids
|
2013-01-21 17:34:39 -08:00
|
|
|
unless @restricted_zone_ids
|
|
|
|
@restricted_zone_ids = []
|
|
|
|
zones_restrict.split(//).each_with_index do |switch, id|
|
|
|
|
@restricted_zone_ids << (id.to_i + 1) if switch == '1'
|
|
|
|
end
|
|
|
|
end
|
|
|
|
@restricted_zone_ids
|
globalized search first draft
Confirmed features:
* Output (retrieval, sorting, etc.)
* Name (positive and negative, but new behavior)
* Flags (positive and negative)
Planned features:
* users:owns, user:wants
Known issues:
* Sets are broken
* Don't render properly
* Shouldn't actually be done as joined sets, anyway, since
we actually want (set1_zone1 OR set1_zone2) AND
(set2_zone1 OR set2_zone2), which will require breaking
it into multiple terms queries.
* Name has regressed: ignores phrases, doesn't require *all*
words. While we're breaking sets into multiple queries,
maybe we'll do something similar for name. In fact, we
really kinda have to if we're gonna keep sorting by name,
since "straw hat" returns all hats. Eww.
2013-01-18 21:23:37 -08:00
|
|
|
end
|
|
|
|
|
|
|
|
def occupied_zone_ids
|
|
|
|
occupied_zones.map(&:id)
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2013-01-21 17:34:39 -08:00
|
|
|
def occupied_zones(options={})
|
|
|
|
options[:scope] ||= Zone.scoped
|
2010-09-08 19:49:39 -07:00
|
|
|
all_body_ids = []
|
|
|
|
zone_body_ids = {}
|
|
|
|
selected_assets = swf_assets.select('body_id, zone_id').each do |swf_asset|
|
|
|
|
zone_body_ids[swf_asset.zone_id] ||= []
|
|
|
|
body_ids = zone_body_ids[swf_asset.zone_id]
|
|
|
|
body_ids << swf_asset.body_id unless body_ids.include?(swf_asset.body_id)
|
|
|
|
all_body_ids << swf_asset.body_id unless all_body_ids.include?(swf_asset.body_id)
|
|
|
|
end
|
2013-01-21 17:34:39 -08:00
|
|
|
zones = options[:scope].find(zone_body_ids.keys)
|
|
|
|
zones_by_id = zones.inject({}) { |h, z| h[z.id] = z; h }
|
2010-09-08 19:49:39 -07:00
|
|
|
total_body_ids = all_body_ids.size
|
|
|
|
zone_body_ids.each do |zone_id, body_ids|
|
2013-01-21 17:34:39 -08:00
|
|
|
zones_by_id[zone_id].sometimes = true if body_ids.size < total_body_ids
|
2010-09-08 19:49:39 -07:00
|
|
|
end
|
|
|
|
zones
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2011-01-27 13:35:46 -08:00
|
|
|
def affected_zones
|
|
|
|
restricted_zones + occupied_zones
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
|
|
|
def special_color
|
|
|
|
@special_color ||= determine_special_color
|
|
|
|
end
|
|
|
|
|
|
|
|
protected
|
|
|
|
def determine_special_color
|
2013-01-21 14:01:41 -08:00
|
|
|
I18n.with_locale(I18n.default_locale) do
|
|
|
|
# Rather than go find the special description in all locales, let's just
|
|
|
|
# run this logic in English.
|
|
|
|
if description.include?(PAINTBRUSH_SET_DESCRIPTION)
|
|
|
|
downcased_name = name.downcase
|
|
|
|
Color.nonstandard.each do |color|
|
|
|
|
return color if downcased_name.include?(color.name)
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
end
|
|
|
|
|
2013-01-21 14:01:41 -08:00
|
|
|
match = description.match(SPECIAL_COLOR_DESCRIPTION_REGEX)
|
|
|
|
if match
|
|
|
|
color = match[1] || match[2]
|
|
|
|
return Color.find_by_name(color.downcase)
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
end
|
|
|
|
end
|
|
|
|
public
|
|
|
|
|
2010-05-15 08:38:45 -07:00
|
|
|
def species_support_ids
|
2010-11-27 15:41:06 -08:00
|
|
|
@species_support_ids_array ||= read_attribute('species_support_ids').split(',').map(&:to_i) rescue nil
|
2010-05-15 08:38:45 -07:00
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-05-15 08:38:45 -07:00
|
|
|
def species_support_ids=(replacement)
|
2010-05-16 12:01:38 -07:00
|
|
|
@species_support_ids_array = nil
|
2010-05-15 08:38:45 -07:00
|
|
|
replacement = replacement.join(',') if replacement.is_a?(Array)
|
|
|
|
write_attribute('species_support_ids', replacement)
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-05-16 13:15:21 -07:00
|
|
|
def supported_species
|
2013-01-22 22:25:09 -08:00
|
|
|
body_ids = swf_assets.select([:body_id]).map(&:body_id)
|
2013-01-28 14:54:25 -08:00
|
|
|
|
2013-01-22 22:25:09 -08:00
|
|
|
return Species.all if body_ids.include?(0)
|
|
|
|
|
|
|
|
pet_types = PetType.where(:body_id => body_ids).select([:species_id])
|
|
|
|
species_ids = pet_types.map(&:species_id).uniq
|
2013-01-28 14:54:25 -08:00
|
|
|
|
|
|
|
# If there are multiple known supported species, it probably supports them
|
|
|
|
# all. (I've never heard of only a handful of species being supported :P)
|
|
|
|
species_ids.size > 1 ? Species.all : Species.find(species_ids)
|
wardrobe now considers item.species_support_ids when deciding compatibility
For example, the Meerca Maid Tray is a foreground item, so the SWF is marked
as compatible with all body types, but the item itself is clearly marked as
Meercas-only. items#show reflected this properly, but the swf_assets#index
call that the wardrobe uses ignored item.species_support_ids.
So, /bodies/:body_id/swf_assets.json?item_ids[]=... was deprecated in favor
of /pet_types/:pet_type_id/items/swf_assets.json?item_ids=[]..., which is
much like the former route but, before loading assets, also loads the pet
type and items, then filters the items by compatibility, then only loads
assets for the compatible items.
2013-01-02 20:15:32 -08:00
|
|
|
end
|
2013-01-21 12:55:48 -08:00
|
|
|
|
2013-01-28 15:54:50 -08:00
|
|
|
def supported_species_ids
|
|
|
|
supported_species.map(&:id)
|
|
|
|
end
|
|
|
|
|
2013-01-21 12:55:48 -08:00
|
|
|
def support_species?(species)
|
|
|
|
species_support_ids.blank? || species_support_ids.include?(species.id)
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-06-22 10:00:55 -07:00
|
|
|
def as_json(options = {})
|
|
|
|
{
|
|
|
|
:description => description,
|
|
|
|
:id => id,
|
|
|
|
:name => name,
|
|
|
|
:thumbnail_url => thumbnail_url,
|
2010-09-08 17:07:09 -07:00
|
|
|
:zones_restrict => zones_restrict,
|
2011-07-12 22:21:48 -07:00
|
|
|
:rarity_index => rarity_index,
|
2011-07-22 14:06:21 -07:00
|
|
|
:owned => owned?,
|
2012-08-06 18:15:31 -07:00
|
|
|
:wanted => wanted?,
|
|
|
|
:nc => nc?
|
2010-06-22 10:00:55 -07:00
|
|
|
}
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-10-10 11:43:01 -07:00
|
|
|
before_create do
|
2010-11-25 08:10:21 -08:00
|
|
|
self.sold_in_mall ||= false
|
2010-10-09 07:53:58 -07:00
|
|
|
true
|
2010-10-07 07:46:23 -07:00
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-11-06 15:08:42 -07:00
|
|
|
def handle_assets!
|
2010-10-09 09:22:40 -07:00
|
|
|
if @parent_swf_asset_relationships_to_update && @current_body_id
|
|
|
|
new_swf_asset_ids = @parent_swf_asset_relationships_to_update.map(&:swf_asset_id)
|
|
|
|
rels = ParentSwfAssetRelationship.arel_table
|
|
|
|
swf_assets = SwfAsset.arel_table
|
2012-01-26 11:51:30 -08:00
|
|
|
|
2011-07-22 12:08:17 -07:00
|
|
|
# If a relationship used to bind an item and asset for this body type,
|
|
|
|
# but doesn't in this sample, the two have been unbound. Delete the
|
|
|
|
# relationship.
|
2010-10-09 09:22:40 -07:00
|
|
|
ids_to_delete = self.parent_swf_asset_relationships.
|
2012-01-26 11:51:30 -08:00
|
|
|
select(rels[:id]).
|
2012-01-12 15:17:59 -08:00
|
|
|
joins(:swf_asset).
|
2010-11-16 14:26:06 -08:00
|
|
|
where(rels[:swf_asset_id].not_in(new_swf_asset_ids)).
|
2010-10-09 09:22:40 -07:00
|
|
|
where(swf_assets[:body_id].in([@current_body_id, 0])).
|
2012-01-26 11:51:30 -08:00
|
|
|
map(&:id)
|
|
|
|
|
2010-10-09 09:22:40 -07:00
|
|
|
unless ids_to_delete.empty?
|
2012-01-26 11:51:30 -08:00
|
|
|
ParentSwfAssetRelationship.where(:id => ids_to_delete).delete_all
|
2010-10-09 09:22:40 -07:00
|
|
|
end
|
2012-01-26 11:51:30 -08:00
|
|
|
|
2011-10-23 12:09:53 -07:00
|
|
|
@parent_swf_asset_relationships_to_update.each do |rel|
|
|
|
|
rel.save!
|
|
|
|
rel.swf_asset.save!
|
|
|
|
end
|
2010-10-09 09:22:40 -07:00
|
|
|
end
|
|
|
|
end
|
2013-01-22 22:25:09 -08:00
|
|
|
|
|
|
|
def body_specific?
|
|
|
|
# If there are species support IDs (it's not empty), the item is
|
|
|
|
# body-specific. If it's empty, it fits everyone the same.
|
|
|
|
!species_support_ids.empty?
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-10-07 07:46:23 -07:00
|
|
|
def origin_registry_info=(info)
|
|
|
|
# bear in mind that numbers from registries are floats
|
|
|
|
self.species_support_ids = info[:species_support].map(&:to_i)
|
|
|
|
attribute_names.each do |attribute|
|
|
|
|
value = info[attribute.to_sym]
|
|
|
|
if value
|
|
|
|
value = value.to_i if value.is_a? Float
|
|
|
|
self[attribute] = value
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-11-06 15:08:42 -07:00
|
|
|
def pending_swf_assets
|
|
|
|
@parent_swf_asset_relationships_to_update.inject([]) do |all_swf_assets, relationship|
|
|
|
|
all_swf_assets << relationship.swf_asset
|
|
|
|
end
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-10-09 09:22:40 -07:00
|
|
|
def parent_swf_asset_relationships_to_update=(rels)
|
|
|
|
@parent_swf_asset_relationships_to_update = rels
|
|
|
|
end
|
2013-01-11 17:16:16 -08:00
|
|
|
|
|
|
|
def needed_translations
|
2013-01-17 20:16:34 -08:00
|
|
|
translatable_locales = Set.new(I18n.locales_with_neopets_language_code)
|
2013-01-11 17:16:16 -08:00
|
|
|
translated_locales = Set.new(translations.map(&:locale))
|
2013-01-17 20:16:34 -08:00
|
|
|
translatable_locales - translated_locales
|
2013-01-11 17:16:16 -08:00
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-11-06 08:52:58 -07:00
|
|
|
def self.all_by_ids_or_children(ids, swf_assets)
|
|
|
|
swf_asset_ids = []
|
|
|
|
swf_assets_by_id = {}
|
|
|
|
swf_assets_by_parent_id = {}
|
|
|
|
swf_assets.each do |swf_asset|
|
|
|
|
id = swf_asset.id
|
|
|
|
swf_assets_by_id[id] = swf_asset
|
|
|
|
swf_asset_ids << id
|
|
|
|
end
|
2012-01-12 15:17:59 -08:00
|
|
|
SwfAsset.select([
|
|
|
|
SwfAsset.arel_table[:id],
|
|
|
|
ParentSwfAssetRelationship.arel_table[:parent_id]
|
|
|
|
]).object_assets.joins(:parent_swf_asset_relationships).
|
2010-11-06 08:52:58 -07:00
|
|
|
where(SwfAsset.arel_table[:id].in(swf_asset_ids)).each do |row|
|
|
|
|
item_id = row.parent_id.to_i
|
|
|
|
swf_assets_by_parent_id[item_id] ||= []
|
|
|
|
swf_assets_by_parent_id[item_id] << swf_assets_by_id[row.id.to_i]
|
|
|
|
ids << item_id
|
|
|
|
end
|
|
|
|
find(ids).tap do |items|
|
|
|
|
items.each do |item|
|
|
|
|
swf_assets = swf_assets_by_parent_id[item.id]
|
|
|
|
if swf_assets
|
|
|
|
swf_assets.each do |swf_asset|
|
|
|
|
swf_asset.item = item
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2013-01-13 18:10:01 -08:00
|
|
|
def self.collection_from_pet_type_and_registries(pet_type, info_registry, asset_registry, scope=Item.scoped)
|
2010-10-07 07:46:23 -07:00
|
|
|
# bear in mind that registries are arrays with many nil elements,
|
|
|
|
# due to how the parser works
|
2011-07-22 12:08:17 -07:00
|
|
|
|
|
|
|
# Collect existing items
|
2010-10-07 07:46:23 -07:00
|
|
|
items = {}
|
|
|
|
item_ids = []
|
2011-01-13 14:22:07 -08:00
|
|
|
info_registry.each do |item_id, info|
|
2010-10-09 08:06:59 -07:00
|
|
|
if info && info[:is_compatible]
|
2011-01-13 14:22:07 -08:00
|
|
|
item_ids << item_id.to_i
|
2010-10-07 07:46:23 -07:00
|
|
|
end
|
|
|
|
end
|
2011-07-22 12:08:17 -07:00
|
|
|
|
|
|
|
# Collect existing relationships
|
2010-10-07 07:46:23 -07:00
|
|
|
existing_relationships_by_item_id_and_swf_asset_id = {}
|
2013-01-13 18:10:01 -08:00
|
|
|
existing_items = scope.find_all_by_id(item_ids, :include => :parent_swf_asset_relationships)
|
2010-10-07 07:46:23 -07:00
|
|
|
existing_items.each do |item|
|
|
|
|
items[item.id] = item
|
|
|
|
relationships_by_swf_asset_id = {}
|
|
|
|
item.parent_swf_asset_relationships.each do |relationship|
|
|
|
|
relationships_by_swf_asset_id[relationship.swf_asset_id] = relationship
|
|
|
|
end
|
|
|
|
existing_relationships_by_item_id_and_swf_asset_id[item.id] =
|
|
|
|
relationships_by_swf_asset_id
|
|
|
|
end
|
2011-07-22 12:08:17 -07:00
|
|
|
|
|
|
|
# Collect existing assets
|
2010-10-07 07:46:23 -07:00
|
|
|
swf_asset_ids = []
|
2011-01-13 14:22:07 -08:00
|
|
|
asset_registry.each do |asset_id, asset_data|
|
|
|
|
swf_asset_ids << asset_id.to_i if asset_data
|
2010-10-07 07:46:23 -07:00
|
|
|
end
|
2013-01-28 00:19:37 -08:00
|
|
|
existing_swf_assets = SwfAsset.object_assets.includes(:zone).
|
|
|
|
find_all_by_remote_id swf_asset_ids
|
2012-01-13 13:56:31 -08:00
|
|
|
existing_swf_assets_by_remote_id = {}
|
2010-10-07 07:46:23 -07:00
|
|
|
existing_swf_assets.each do |swf_asset|
|
2012-01-13 13:56:31 -08:00
|
|
|
existing_swf_assets_by_remote_id[swf_asset.remote_id] = swf_asset
|
2010-10-07 07:46:23 -07:00
|
|
|
end
|
2011-07-22 12:08:17 -07:00
|
|
|
|
|
|
|
# With each asset in the registry,
|
2010-10-07 07:46:23 -07:00
|
|
|
relationships_by_item_id = {}
|
2011-01-13 14:22:07 -08:00
|
|
|
asset_registry.each do |asset_id, asset_data|
|
2010-10-07 07:46:23 -07:00
|
|
|
if asset_data
|
2011-07-22 12:08:17 -07:00
|
|
|
# Build and update the item
|
2010-10-07 07:46:23 -07:00
|
|
|
item_id = asset_data[:obj_info_id].to_i
|
2011-07-22 12:08:17 -07:00
|
|
|
next unless item_ids.include?(item_id) # skip incompatible (Uni Bug)
|
2010-10-07 07:46:23 -07:00
|
|
|
item = items[item_id]
|
|
|
|
unless item
|
|
|
|
item = Item.new
|
|
|
|
item.id = item_id
|
|
|
|
items[item_id] = item
|
|
|
|
end
|
2011-01-13 14:22:07 -08:00
|
|
|
item.origin_registry_info = info_registry[item.id.to_s]
|
2010-10-09 09:22:40 -07:00
|
|
|
item.current_body_id = pet_type.body_id
|
2011-07-22 12:08:17 -07:00
|
|
|
|
|
|
|
# Build and update the SWF
|
2012-01-13 13:56:31 -08:00
|
|
|
swf_asset_remote_id = asset_data[:asset_id].to_i
|
|
|
|
swf_asset = existing_swf_assets_by_remote_id[swf_asset_remote_id]
|
2010-10-07 07:46:23 -07:00
|
|
|
unless swf_asset
|
|
|
|
swf_asset = SwfAsset.new
|
2012-01-13 13:56:31 -08:00
|
|
|
swf_asset.remote_id = swf_asset_remote_id
|
2010-10-07 07:46:23 -07:00
|
|
|
end
|
|
|
|
swf_asset.origin_object_data = asset_data
|
|
|
|
swf_asset.origin_pet_type = pet_type
|
2012-10-05 18:56:52 -07:00
|
|
|
swf_asset.item = item
|
2011-07-22 12:08:17 -07:00
|
|
|
|
|
|
|
# Build and update the relationship
|
2012-01-12 20:02:12 -08:00
|
|
|
relationship = existing_relationships_by_item_id_and_swf_asset_id[item.id][swf_asset.id] rescue nil
|
2010-10-07 07:46:23 -07:00
|
|
|
unless relationship
|
|
|
|
relationship = ParentSwfAssetRelationship.new
|
2012-01-12 15:17:59 -08:00
|
|
|
relationship.parent = item
|
2010-10-07 07:46:23 -07:00
|
|
|
end
|
2012-01-12 15:17:59 -08:00
|
|
|
relationship.swf_asset = swf_asset
|
2010-10-07 07:46:23 -07:00
|
|
|
relationships_by_item_id[item_id] ||= []
|
|
|
|
relationships_by_item_id[item_id] << relationship
|
|
|
|
end
|
|
|
|
end
|
2011-07-22 12:08:17 -07:00
|
|
|
|
|
|
|
# Set up the relationships to be updated on item save
|
2010-10-07 07:46:23 -07:00
|
|
|
relationships_by_item_id.each do |item_id, relationships|
|
2010-10-09 09:22:40 -07:00
|
|
|
items[item_id].parent_swf_asset_relationships_to_update = relationships
|
2010-10-07 07:46:23 -07:00
|
|
|
end
|
2011-07-22 12:08:17 -07:00
|
|
|
|
2010-10-07 07:46:23 -07:00
|
|
|
items.values
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-11-25 08:10:21 -08:00
|
|
|
class << self
|
|
|
|
MALL_HOST = 'ncmall.neopets.com'
|
|
|
|
MALL_MAIN_PATH = '/mall/shop.phtml'
|
2010-11-27 15:41:06 -08:00
|
|
|
MALL_CATEGORY_PATH = '/mall/ajax/load_page.phtml'
|
|
|
|
MALL_CATEGORY_QUERY = 'type=browse&cat={cat}&lang=en'
|
2010-11-25 08:10:21 -08:00
|
|
|
MALL_CATEGORY_TRIGGER = /load_items_pane\("browse", ([0-9]+)\);/
|
|
|
|
MALL_JSON_ITEM_DATA_KEY = 'object_data'
|
|
|
|
MALL_ITEM_URL_TEMPLATE = 'http://images.neopets.com/items/%s.gif'
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-11-25 08:10:21 -08:00
|
|
|
MALL_MAIN_URI = Addressable::URI.new :scheme => 'http',
|
|
|
|
:host => MALL_HOST, :path => MALL_MAIN_PATH
|
|
|
|
MALL_CATEGORY_URI = Addressable::URI.new :scheme => 'http',
|
2010-11-27 15:41:06 -08:00
|
|
|
:host => MALL_HOST, :path => MALL_CATEGORY_PATH,
|
|
|
|
:query => MALL_CATEGORY_QUERY
|
2010-11-25 08:10:21 -08:00
|
|
|
MALL_CATEGORY_TEMPLATE = Addressable::Template.new MALL_CATEGORY_URI
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-11-25 08:10:21 -08:00
|
|
|
def spider_mall!
|
|
|
|
# Load the mall HTML, scan it for category onclicks
|
|
|
|
items = {}
|
|
|
|
spider_request(MALL_MAIN_URI).scan(MALL_CATEGORY_TRIGGER) do |match|
|
|
|
|
# Plug the category ID into the URI for that category's JSON document
|
|
|
|
uri = MALL_CATEGORY_TEMPLATE.expand :cat => match[0]
|
|
|
|
begin
|
|
|
|
# Load up that JSON and send it off to be parsed
|
|
|
|
puts "Loading #{uri}..."
|
|
|
|
category_items = spider_mall_category(spider_request(uri))
|
|
|
|
puts "...found #{category_items.size} items"
|
|
|
|
items.merge!(category_items)
|
|
|
|
rescue SpiderJSONError => e
|
|
|
|
# If there was a parsing error, add where it came from
|
|
|
|
Rails.logger.warn "Error parsing JSON at #{uri}, skipping: #{e.message}"
|
|
|
|
end
|
|
|
|
end
|
2010-12-05 13:06:48 -08:00
|
|
|
puts "#{items.size} items found"
|
|
|
|
all_item_ids = items.keys
|
|
|
|
# Find which of these already exist but aren't marked as sold_in_mall so
|
|
|
|
# we can update them as being sold
|
2013-01-28 01:01:25 -08:00
|
|
|
Item.not_sold_in_mall.where(:id => items.keys).select([:id]).each do |item|
|
2010-12-05 13:06:48 -08:00
|
|
|
items.delete(item.id)
|
|
|
|
item.sold_in_mall = true
|
|
|
|
item.save
|
|
|
|
puts "#{item.name} (#{item.id}) now in mall, updated"
|
|
|
|
end
|
|
|
|
# Find items marked as sold_in_mall so we can skip those we just found
|
|
|
|
# if they already are properly marked, and mark those that we didn't just
|
|
|
|
# find as no longer sold_in_mall
|
|
|
|
Item.sold_in_mall.select([:id, :name]).each do |item|
|
|
|
|
if all_item_ids.include?(item.id)
|
|
|
|
items.delete(item.id)
|
|
|
|
else
|
|
|
|
item.sold_in_mall = false
|
|
|
|
item.save
|
|
|
|
puts "#{item.name} (#{item.id}) no longer in mall, removed sold_in_mall status"
|
|
|
|
end
|
|
|
|
end
|
2010-11-25 08:10:21 -08:00
|
|
|
puts "#{items.size} new items"
|
|
|
|
items.each do |item_id, item|
|
|
|
|
item.save
|
|
|
|
puts "Saved #{item.name} (#{item_id})"
|
|
|
|
end
|
|
|
|
items
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-11-27 15:41:06 -08:00
|
|
|
def spider_mall_assets!(limit)
|
2013-01-28 01:01:25 -08:00
|
|
|
items = self.select([arel_table[:id]]).sold_in_mall.spidered_longest_ago.limit(limit).all
|
2010-11-27 15:41:06 -08:00
|
|
|
puts "- #{items.size} items need asset spidering"
|
|
|
|
AssetStrategy.build_strategies
|
|
|
|
items.each do |item|
|
|
|
|
AssetStrategy.spider item
|
|
|
|
end
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-11-27 15:41:06 -08:00
|
|
|
def spider_request(uri)
|
|
|
|
begin
|
|
|
|
response = Net::HTTP.get_response uri
|
|
|
|
rescue SocketError => e
|
|
|
|
raise SpiderHTTPError, "Error loading #{uri}: #{e.message}"
|
|
|
|
end
|
|
|
|
unless response.is_a? Net::HTTPOK
|
|
|
|
raise SpiderHTTPError, "Error loading #{uri}: Response was a #{response.class}"
|
|
|
|
end
|
|
|
|
response.body
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-11-25 08:10:21 -08:00
|
|
|
private
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-11-27 15:41:06 -08:00
|
|
|
class AssetStrategy
|
|
|
|
Strategies = {}
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-11-27 15:41:06 -08:00
|
|
|
MALL_ASSET_PATH = '/mall/ajax/get_item_assets.phtml'
|
|
|
|
MALL_ASSET_QUERY = 'pet={pet_name}&oii={item_id}'
|
|
|
|
MALL_ASSET_URI = Addressable::URI.new :scheme => 'http',
|
|
|
|
:host => MALL_HOST, :path => MALL_ASSET_PATH,
|
|
|
|
:query => MALL_ASSET_QUERY
|
|
|
|
MALL_ASSET_TEMPLATE = Addressable::Template.new MALL_ASSET_URI
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-11-27 15:41:06 -08:00
|
|
|
def initialize(name, options)
|
|
|
|
@name = name
|
|
|
|
@pass = options[:pass]
|
|
|
|
@complete = options[:complete]
|
|
|
|
@pet_types = options[:pet_types]
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-11-27 15:41:06 -08:00
|
|
|
def spider(item)
|
|
|
|
puts " - Using #{@name} strategy"
|
|
|
|
exit = false
|
|
|
|
@pet_types.each do |pet_type|
|
|
|
|
swf_assets = load_for_pet_type(item, pet_type)
|
|
|
|
if swf_assets
|
|
|
|
contains_body_specific_assets = false
|
|
|
|
swf_assets.each do |swf_asset|
|
|
|
|
if swf_asset.body_specific?
|
|
|
|
contains_body_specific_assets = true
|
|
|
|
break
|
|
|
|
end
|
|
|
|
end
|
|
|
|
if contains_body_specific_assets
|
|
|
|
if @pass
|
|
|
|
Strategies[@pass].spider(item) unless @pass == :exit
|
|
|
|
exit = true
|
|
|
|
break
|
|
|
|
end
|
|
|
|
else
|
|
|
|
# if all are universal, no need to spider more
|
|
|
|
puts " - No body specific assets; moving on"
|
|
|
|
exit = true
|
|
|
|
break
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
if !exit && @complete && @complete != :exit
|
|
|
|
Strategies[@complete].spider(item)
|
|
|
|
end
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-11-27 15:41:06 -08:00
|
|
|
private
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-11-27 15:41:06 -08:00
|
|
|
def load_for_pet_type(item, pet_type, banned_pet_ids=[])
|
|
|
|
pet_id = pet_type.pet_id
|
|
|
|
pet_name = pet_type.pet_name
|
2011-05-21 19:32:01 -07:00
|
|
|
pet_valid = nil
|
|
|
|
begin
|
|
|
|
pet = Pet.load(pet_name)
|
|
|
|
if pet.pet_type_id == pet_type.id
|
|
|
|
pet_valid = true
|
|
|
|
else
|
|
|
|
pet_valid = false
|
|
|
|
puts " - Pet #{pet_name} is pet type \##{pet.pet_type_id}, not \##{pet_type.id}; saving it and loading new pet"
|
|
|
|
pet.save!
|
|
|
|
end
|
|
|
|
rescue Pet::PetNotFound
|
|
|
|
pet_valid = false
|
|
|
|
puts " - Pet #{pet_name} no longer exists; destroying and loading new pet"
|
|
|
|
Pet.find_by_name(pet_name).destroy
|
|
|
|
end
|
|
|
|
if pet_valid
|
2010-11-27 15:41:06 -08:00
|
|
|
swf_assets = load_for_pet_name(item, pet_type, pet_name)
|
|
|
|
if swf_assets
|
|
|
|
puts " - Modeled with #{pet_name}, saved assets (#{swf_assets.map(&:id).join(', ')})"
|
|
|
|
else
|
|
|
|
puts " - Item #{item.name} does not fit #{pet_name}"
|
|
|
|
end
|
|
|
|
return swf_assets
|
|
|
|
else
|
|
|
|
banned_pet_ids << pet_id
|
|
|
|
new_pet = pet_type.pets.select([:id, :name]).where(Pet.arel_table[:id].not_in(banned_pet_ids)).first
|
|
|
|
if new_pet
|
|
|
|
pet_type.pet_id = new_pet.id
|
|
|
|
pet_type.pet_name = new_pet.name
|
|
|
|
load_for_pet_type(item, pet_type, banned_pet_ids)
|
|
|
|
else
|
|
|
|
puts " - We have no more pets of type \##{pet_type.id}. Skipping"
|
|
|
|
return nil
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-11-27 15:41:06 -08:00
|
|
|
def load_for_pet_name(item, pet_type, pet_name)
|
|
|
|
uri = MALL_ASSET_TEMPLATE.
|
|
|
|
expand(
|
|
|
|
:item_id => item.id,
|
|
|
|
:pet_name => pet_name
|
|
|
|
)
|
|
|
|
raw_data = Item.spider_request(uri)
|
|
|
|
data = JSON.parse(raw_data)
|
|
|
|
item_id_key = item.id.to_s
|
|
|
|
if !data.empty? && data[item_id_key] && data[item_id_key]['asset_data']
|
|
|
|
data[item_id_key]['asset_data'].map do |asset_id_str, asset_data|
|
|
|
|
item.zones_restrict = asset_data['restrict']
|
|
|
|
item.save
|
2012-01-13 13:27:30 -08:00
|
|
|
swf_asset = SwfAsset.find_or_initialize_by_type_and_remote_id(SwfAssetType, asset_id_str.to_i)
|
2010-11-27 15:41:06 -08:00
|
|
|
swf_asset.type = SwfAssetType
|
|
|
|
swf_asset.body_id = pet_type.body_id
|
|
|
|
swf_asset.mall_data = asset_data
|
|
|
|
item.swf_assets << swf_asset unless item.swf_assets.include? swf_asset
|
|
|
|
swf_asset.save
|
|
|
|
swf_asset
|
|
|
|
end
|
|
|
|
else
|
|
|
|
nil
|
|
|
|
end
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-11-27 15:41:06 -08:00
|
|
|
class << self
|
|
|
|
def add_strategy(name, options)
|
|
|
|
Strategies[name] = new(name, options)
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-11-27 15:41:06 -08:00
|
|
|
def add_cascading_strategy(name, options)
|
|
|
|
pet_type_groups = options[:pet_types]
|
|
|
|
pet_type_group_names = pet_type_groups.keys
|
|
|
|
pet_type_group_names.each_with_index do |pet_type_group_name, i|
|
|
|
|
remaining_pet_types = pet_type_groups[pet_type_group_name]
|
|
|
|
first_pet_type = [remaining_pet_types.slice!(0)]
|
|
|
|
cascade_name = "#{name}_cascade"
|
|
|
|
next_name = pet_type_group_names[i + 1]
|
|
|
|
next_name = next_name ? "group_#{next_name}" : options[:complete]
|
|
|
|
first_strategy_options = {:complete => next_name, :pass => :exit,
|
|
|
|
:pet_types => first_pet_type}
|
|
|
|
unless remaining_pet_types.empty?
|
|
|
|
first_strategy_options[:pass] = cascade_name
|
|
|
|
add_strategy cascade_name, :complete => :exit,
|
|
|
|
:pet_types => remaining_pet_types
|
|
|
|
end
|
|
|
|
add_strategy name, first_strategy_options
|
|
|
|
name = next_name
|
|
|
|
end
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-11-27 15:41:06 -08:00
|
|
|
def spider(item)
|
|
|
|
puts "- Spidering for #{item.name}"
|
|
|
|
Strategies[:start].spider(item)
|
|
|
|
item.last_spidered = Time.now
|
|
|
|
item.save
|
|
|
|
puts "- #{item.name} done spidering, saved last spidered timestamp"
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-11-27 15:41:06 -08:00
|
|
|
def build_strategies
|
|
|
|
if Strategies.empty?
|
|
|
|
pet_type_t = PetType.arel_table
|
|
|
|
require 'pet' # FIXME: console is whining when i don't do this
|
|
|
|
pet_t = Pet.arel_table
|
|
|
|
pet_types = PetType.select([pet_type_t[:id], pet_type_t[:body_id], "#{Pet.table_name}.id as pet_id, #{Pet.table_name}.name as pet_name"]).
|
|
|
|
joins(:pets).group(pet_type_t[:id])
|
|
|
|
remaining_standard_pet_types = pet_types.single_standard_color.order(:species_id)
|
|
|
|
first_standard_pet_type = [remaining_standard_pet_types.slice!(0)]
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-11-27 15:41:06 -08:00
|
|
|
add_strategy :start, :pass => :remaining_standard, :complete => :first_nonstandard_color,
|
|
|
|
:pet_types => first_standard_pet_type
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-11-27 15:41:06 -08:00
|
|
|
add_strategy :remaining_standard, :complete => :exit,
|
|
|
|
:pet_types => remaining_standard_pet_types
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-11-27 15:41:06 -08:00
|
|
|
add_cascading_strategy :first_nonstandard_color, :complete => :remaining_standard,
|
|
|
|
:pet_types => pet_types.select(pet_type_t[:color_id]).nonstandard_colors.all.group_by(&:color_id)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-11-25 08:10:21 -08:00
|
|
|
def spider_mall_category(json)
|
|
|
|
begin
|
|
|
|
items_data = JSON.parse(json)[MALL_JSON_ITEM_DATA_KEY]
|
|
|
|
unless items_data
|
|
|
|
raise SpiderJSONError, "Missing key #{MALL_JSON_ITEM_DATA_KEY}"
|
|
|
|
end
|
|
|
|
rescue Exception => e
|
|
|
|
# Catch both errors parsing JSON and the missing key
|
|
|
|
raise SpiderJSONError, e.message
|
|
|
|
end
|
|
|
|
items = {}
|
|
|
|
items_data.each do |item_id, item_data|
|
|
|
|
if item_data['isWearable'] == 1
|
|
|
|
relevant_item_data = item_data.slice('name', 'description', 'price')
|
|
|
|
item = Item.new relevant_item_data
|
|
|
|
item.id = item_data['id']
|
|
|
|
item.thumbnail_url = sprintf(MALL_ITEM_URL_TEMPLATE, item_data['imageFile'])
|
|
|
|
item.sold_in_mall = true
|
|
|
|
items[item.id] = item
|
|
|
|
end
|
|
|
|
end
|
|
|
|
items
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-11-25 08:10:21 -08:00
|
|
|
class SpiderError < RuntimeError;end
|
|
|
|
class SpiderHTTPError < SpiderError;end
|
|
|
|
class SpiderJSONError < SpiderError;end
|
|
|
|
end
|
2010-05-14 15:41:40 -07:00
|
|
|
end
|