2023-08-02 16:05:02 -07:00
|
|
|
class Item < ApplicationRecord
|
2011-07-26 15:49:52 -07:00
|
|
|
include PrettyParam
|
2024-06-18 15:21:43 -07:00
|
|
|
include Item::Dyeworks
|
2013-01-11 14:20:06 -08:00
|
|
|
|
2023-07-22 12:23:14 -07:00
|
|
|
# We use the `type` column to mean something other than what Rails means!
|
|
|
|
self.inheritance_column = nil
|
2011-07-26 15:49:52 -07:00
|
|
|
|
2010-05-19 16:17:33 -07:00
|
|
|
SwfAssetType = 'object'
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2024-10-02 17:50:42 -07:00
|
|
|
serialize :cached_compatible_body_ids, coder: Serializers::IntegerSet
|
|
|
|
serialize :cached_occupied_zone_ids, coder: Serializers::IntegerSet
|
|
|
|
|
2011-07-12 16:37:16 -07:00
|
|
|
has_many :closet_hangers
|
2024-09-30 23:16:03 -07:00
|
|
|
has_one :contribution, as: :contributed, inverse_of: :contributed
|
2024-05-14 00:09:27 -07:00
|
|
|
has_one :nc_mall_record
|
2024-09-30 23:16:03 -07:00
|
|
|
has_many :parent_swf_asset_relationships, as: :parent
|
|
|
|
has_many :swf_assets, through: :parent_swf_asset_relationships
|
2024-06-07 20:10:06 -07:00
|
|
|
belongs_to :dyeworks_base_item, class_name: "Item",
|
|
|
|
default: -> { inferred_dyeworks_base_item }, optional: true
|
2024-06-20 12:54:39 -07:00
|
|
|
has_many :dyeworks_variants, class_name: "Item",
|
|
|
|
inverse_of: :dyeworks_base_item
|
2024-06-07 20:10:06 -07:00
|
|
|
|
2024-11-19 17:00:47 -08:00
|
|
|
# We require a name field. A number of other fields must be *specified*: they
|
|
|
|
# can't be nil, to help ensure we aren't forgetting any fields when importing
|
|
|
|
# items. But sometimes they happen to be blank (e.g. when TNT leaves an item
|
|
|
|
# description empty, oops), in which case we want to accept that reality!
|
|
|
|
validates_presence_of :name
|
|
|
|
validates :description, :thumbnail_url, :rarity, :price, :zones_restrict,
|
|
|
|
exclusion: {in: [nil], message: "must be specified"}
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2024-11-20 11:22:33 -08:00
|
|
|
after_save :update_cached_fields,
|
|
|
|
if: :modeling_status_hint_previously_changed?
|
|
|
|
|
2011-07-22 13:18:15 -07:00
|
|
|
attr_writer :current_body_id, :owned, :wanted
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-09-08 19:49:39 -07:00
|
|
|
NCRarities = [0, 500]
|
2011-05-02 15:07:56 -07:00
|
|
|
PAINTBRUSH_SET_DESCRIPTION = 'This item is part of a deluxe paint brush set!'
|
|
|
|
|
2023-07-22 14:04:01 -07:00
|
|
|
scope :newest, -> {
|
|
|
|
order(arel_table[:created_at].desc) if arel_table[:created_at]
|
|
|
|
}
|
2011-08-04 07:01:44 -07:00
|
|
|
|
2023-07-22 14:04:01 -07:00
|
|
|
scope :sitemap, -> { order([:id]).limit(49999) }
|
2011-05-20 17:49:48 -07:00
|
|
|
|
2024-02-25 15:00:22 -08:00
|
|
|
scope :name_includes, ->(value) {
|
2024-02-20 16:04:41 -08:00
|
|
|
Item.where("name LIKE ?", "%" + sanitize_sql_like(value) + "%")
|
2023-07-26 11:15:35 -07:00
|
|
|
}
|
2024-02-25 15:00:22 -08:00
|
|
|
scope :name_excludes, ->(value) {
|
2024-02-20 16:04:41 -08:00
|
|
|
Item.where("name NOT LIKE ?", "%" + sanitize_sql_like(value) + "%")
|
2023-07-26 11:15:35 -07:00
|
|
|
}
|
2023-07-22 18:13:11 -07:00
|
|
|
scope :is_nc, -> {
|
|
|
|
i = Item.arel_table
|
Fix crash when searching for "is:nc"
Huh, Arel can *sometimes* handle just having an attribute stand in as
"X is true" in a condition, but sometimes gets upset about it. I guess
this changed in Rails since we recently wrote this?
Specifically, item search would crash on "is:nc" (but *not* "is:np"),
saying:
```
undefined method `fetch_attribute' for #<struct Arel::Attributes::Attribute relation=#<Arel::Table:0x0000000109a67110 @name="items", @klass=Item(…), @type_caster=#<ActiveRecord::TypeCaster::Map:0x0000000109a66e90 @klass=Item(…)>, @table_alias=nil>, name="is_manually_nc">
```
The traceback was a bit misleading (it happened at the part where we
merge all the scopes together), but that hinted to me that it working
with an attribute in a place where it expected a conditional. So I
converted the attribute in the `is_nc` scope to a conditional, and made
the matching change in `is_np`, and that fixed it! Ok phew!
2023-10-25 12:46:48 -07:00
|
|
|
where(i[:rarity_index].in(Item::NCRarities).or(i[:is_manually_nc].eq(true)))
|
2023-07-22 18:13:11 -07:00
|
|
|
}
|
2024-02-25 12:57:04 -08:00
|
|
|
scope :is_not_nc, -> {
|
2023-07-22 18:13:11 -07:00
|
|
|
i = Item.arel_table
|
Fix crash when searching for "is:nc"
Huh, Arel can *sometimes* handle just having an attribute stand in as
"X is true" in a condition, but sometimes gets upset about it. I guess
this changed in Rails since we recently wrote this?
Specifically, item search would crash on "is:nc" (but *not* "is:np"),
saying:
```
undefined method `fetch_attribute' for #<struct Arel::Attributes::Attribute relation=#<Arel::Table:0x0000000109a67110 @name="items", @klass=Item(…), @type_caster=#<ActiveRecord::TypeCaster::Map:0x0000000109a66e90 @klass=Item(…)>, @table_alias=nil>, name="is_manually_nc">
```
The traceback was a bit misleading (it happened at the part where we
merge all the scopes together), but that hinted to me that it working
with an attribute in a place where it expected a conditional. So I
converted the attribute in the `is_nc` scope to a conditional, and made
the matching change in `is_np`, and that fixed it! Ok phew!
2023-10-25 12:46:48 -07:00
|
|
|
where(i[:rarity_index].in(Item::NCRarities).or(i[:is_manually_nc].eq(true)).not)
|
2023-07-22 18:13:11 -07:00
|
|
|
}
|
2024-02-25 12:57:04 -08:00
|
|
|
scope :is_np, -> {
|
|
|
|
self.is_not_nc.is_not_pb
|
|
|
|
}
|
|
|
|
scope :is_not_np, -> {
|
|
|
|
self.merge Item.is_nc.or(Item.is_pb)
|
|
|
|
}
|
2023-07-26 11:51:52 -07:00
|
|
|
scope :is_pb, -> {
|
2024-02-20 16:04:41 -08:00
|
|
|
where('description LIKE ?',
|
|
|
|
'%' + sanitize_sql_like(PAINTBRUSH_SET_DESCRIPTION) + '%')
|
2023-07-26 11:51:52 -07:00
|
|
|
}
|
|
|
|
scope :is_not_pb, -> {
|
2024-02-20 16:04:41 -08:00
|
|
|
where('description NOT LIKE ?',
|
|
|
|
'%' + sanitize_sql_like(PAINTBRUSH_SET_DESCRIPTION) + '%')
|
2023-07-26 11:51:52 -07:00
|
|
|
}
|
2024-11-19 15:52:52 -08:00
|
|
|
scope :is_modeled, -> {
|
|
|
|
where(cached_predicted_fully_modeled: true)
|
|
|
|
}
|
|
|
|
scope :is_not_modeled, -> {
|
|
|
|
where(cached_predicted_fully_modeled: false)
|
|
|
|
}
|
2024-01-23 05:43:00 -08:00
|
|
|
scope :occupies, ->(zone_label) {
|
2024-09-30 23:16:03 -07:00
|
|
|
Zone.matching_label(zone_label).
|
|
|
|
map { |z| occupies_zone_id(z.id) }.reduce(none, &:or)
|
2023-07-26 12:28:25 -07:00
|
|
|
}
|
2024-01-23 05:43:00 -08:00
|
|
|
scope :not_occupies, ->(zone_label) {
|
2024-09-30 23:16:03 -07:00
|
|
|
Zone.matching_label(zone_label).
|
|
|
|
map { |z| not_occupies_zone_id(z.id) }.reduce(all, &:and)
|
|
|
|
}
|
|
|
|
scope :occupies_zone_id, ->(zone_id) {
|
|
|
|
where("FIND_IN_SET(?, cached_occupied_zone_ids) > 0", zone_id)
|
|
|
|
}
|
|
|
|
scope :not_occupies_zone_id, ->(zone_id) {
|
|
|
|
where.not("FIND_IN_SET(?, cached_occupied_zone_ids) > 0", zone_id)
|
2023-07-26 12:41:37 -07:00
|
|
|
}
|
2024-01-23 05:43:00 -08:00
|
|
|
scope :restricts, ->(zone_label) {
|
|
|
|
zone_ids = Zone.matching_label(zone_label).map(&:id)
|
2023-07-29 13:11:53 -07:00
|
|
|
condition = zone_ids.map { '(SUBSTR(items.zones_restrict, ?, 1) = "1")' }.join(' OR ')
|
2023-07-26 12:41:37 -07:00
|
|
|
where(condition, *zone_ids)
|
|
|
|
}
|
2024-01-23 05:43:00 -08:00
|
|
|
scope :not_restricts, ->(zone_label) {
|
|
|
|
zone_ids = Zone.matching_label(zone_label).map(&:id)
|
2023-07-29 13:11:53 -07:00
|
|
|
condition = zone_ids.map { '(SUBSTR(items.zones_restrict, ?, 1) = "1")' }.join(' OR ')
|
2023-07-26 12:41:37 -07:00
|
|
|
where("NOT (#{condition})", *zone_ids)
|
2023-07-26 12:28:25 -07:00
|
|
|
}
|
2023-07-28 14:45:10 -07:00
|
|
|
scope :fits, ->(body_id) {
|
2024-09-30 23:16:03 -07:00
|
|
|
where("FIND_IN_SET(?, cached_compatible_body_ids) > 0", body_id).
|
|
|
|
or(where("FIND_IN_SET('0', cached_compatible_body_ids) > 0"))
|
2023-07-28 14:45:10 -07:00
|
|
|
}
|
|
|
|
scope :not_fits, ->(body_id) {
|
2024-09-30 23:16:03 -07:00
|
|
|
where.not("FIND_IN_SET(?, cached_compatible_body_ids) > 0", body_id).
|
|
|
|
and(where.not("FIND_IN_SET('0', cached_compatible_body_ids) > 0"))
|
2023-07-28 14:45:10 -07:00
|
|
|
}
|
2023-07-22 18:13:11 -07:00
|
|
|
|
2023-11-03 16:20:02 -07:00
|
|
|
def nc_trade_value
|
|
|
|
return nil unless nc?
|
2024-05-27 16:21:22 -07:00
|
|
|
|
|
|
|
# Load the trade value, if we haven't already. Note that, because the trade
|
|
|
|
# value may be nil, we also save an explicit boolean for whether we've
|
|
|
|
# already looked it up, rather than checking if the saved value is empty.
|
|
|
|
return @nc_trade_value if @nc_trade_value_loaded
|
|
|
|
|
|
|
|
@nc_trade_value = begin
|
|
|
|
Rails.logger.debug "Item #{id} (#{name}) <lookup>"
|
2024-02-20 15:37:07 -08:00
|
|
|
OwlsValueGuide.find_by_name(name)
|
2023-11-03 16:20:02 -07:00
|
|
|
rescue OwlsValueGuide::NotFound => error
|
2024-02-20 15:37:07 -08:00
|
|
|
Rails.logger.debug("No NC trade value listed for #{name} (#{id})")
|
2024-05-27 16:21:22 -07:00
|
|
|
nil
|
2023-11-03 16:20:02 -07:00
|
|
|
rescue OwlsValueGuide::NetworkError => error
|
|
|
|
Rails.logger.error("Couldn't load nc_trade_value: #{error.full_message}")
|
2024-05-27 16:21:22 -07:00
|
|
|
nil
|
2023-11-03 16:20:02 -07:00
|
|
|
end
|
2024-05-27 16:21:22 -07:00
|
|
|
|
|
|
|
@nc_trade_value_loaded = true
|
|
|
|
@nc_trade_value
|
2011-07-12 22:21:48 -07:00
|
|
|
end
|
2012-10-24 20:09:05 -07:00
|
|
|
|
|
|
|
# Return an OrderedHash mapping users to the number of times they
|
|
|
|
# contributed to this item's assets, from most contributions to least.
|
|
|
|
def contributors_with_counts
|
|
|
|
# Get contributing users' IDs
|
|
|
|
swf_asset_ids = swf_assets.select(SwfAsset.arel_table[:id]).map(&:id)
|
|
|
|
swf_asset_contributions = Contribution.select('user_id').
|
|
|
|
where(:contributed_type => 'SwfAsset', :contributed_id => swf_asset_ids)
|
|
|
|
contributor_ids = swf_asset_contributions.map(&:user_id)
|
|
|
|
|
|
|
|
# Get the users, mapped by ID
|
|
|
|
contributors_by_id = {}
|
|
|
|
User.find(contributor_ids).each { |u| contributors_by_id[u.id] = u }
|
|
|
|
|
|
|
|
# Count each user's contributions
|
|
|
|
contributor_counts_by_id = Hash.new(0)
|
|
|
|
contributor_ids.each { |id| contributor_counts_by_id[id] += 1 }
|
|
|
|
|
|
|
|
# Build an OrderedHash mapping users to counts in descending order
|
|
|
|
contributors_with_counts = ActiveSupport::OrderedHash.new
|
|
|
|
contributor_counts_by_id.sort_by { |k, v| v }.reverse.each do |id, count|
|
|
|
|
contributor = contributors_by_id[id]
|
|
|
|
contributors_with_counts[contributor] = count
|
|
|
|
end
|
|
|
|
contributors_with_counts
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-09-08 19:49:39 -07:00
|
|
|
def nc?
|
2023-11-03 16:27:39 -07:00
|
|
|
is_manually_nc? || NCRarities.include?(rarity_index)
|
2010-09-08 19:49:39 -07:00
|
|
|
end
|
globalized search first draft
Confirmed features:
* Output (retrieval, sorting, etc.)
* Name (positive and negative, but new behavior)
* Flags (positive and negative)
Planned features:
* users:owns, user:wants
Known issues:
* Sets are broken
* Don't render properly
* Shouldn't actually be done as joined sets, anyway, since
we actually want (set1_zone1 OR set1_zone2) AND
(set2_zone1 OR set2_zone2), which will require breaking
it into multiple terms queries.
* Name has regressed: ignores phrases, doesn't require *all*
words. While we're breaking sets into multiple queries,
maybe we'll do something similar for name. In fact, we
really kinda have to if we're gonna keep sorting by name,
since "straw hat" returns all hats. Eww.
2013-01-18 21:23:37 -08:00
|
|
|
|
|
|
|
def pb?
|
2013-01-26 07:52:21 -08:00
|
|
|
I18n.with_locale(:en) { self.description == PAINTBRUSH_SET_DESCRIPTION }
|
globalized search first draft
Confirmed features:
* Output (retrieval, sorting, etc.)
* Name (positive and negative, but new behavior)
* Flags (positive and negative)
Planned features:
* users:owns, user:wants
Known issues:
* Sets are broken
* Don't render properly
* Shouldn't actually be done as joined sets, anyway, since
we actually want (set1_zone1 OR set1_zone2) AND
(set2_zone1 OR set2_zone2), which will require breaking
it into multiple terms queries.
* Name has regressed: ignores phrases, doesn't require *all*
words. While we're breaking sets into multiple queries,
maybe we'll do something similar for name. In fact, we
really kinda have to if we're gonna keep sorting by name,
since "straw hat" returns all hats. Eww.
2013-01-18 21:23:37 -08:00
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
Add bare-bones Item Getting Guide page
TNT requested that we figure out ways to connect the dots between
people's intentions on DTI to their purchases in the NC Mall.
But rather than just slam ad links everywhere, our plan is to design an
actually useful feature about it: the "Item Getting Guide". It'll break
down items by how you can actually get them (NP economy, NC Mall,
retired NC, Dyeworks, etc), and we're planning some cute actions you
can take, like shortcuts for getting them onto trade wishlists or into
your NC Mall cart.
This is just a little demo version of the page, just breaking down
items specified in the URL into NC/NP/PB! Later we'll do more granular
breakdown than this, with more info and actions—and we'll also like,
link to it at all, which isn't the case yet! (The main way we expect
people to get here is by a "Get these items" button we'll add to the
outfit editor, but there might be other paths, too.)
2024-05-06 20:37:59 -07:00
|
|
|
def np?
|
|
|
|
!nc? && !pb?
|
|
|
|
end
|
|
|
|
|
2024-05-14 00:09:27 -07:00
|
|
|
def currently_in_mall?
|
|
|
|
nc_mall_record.present?
|
|
|
|
end
|
|
|
|
|
2024-06-16 12:37:53 -07:00
|
|
|
def source
|
|
|
|
if dyeworks_buyable?
|
|
|
|
:dyeworks
|
|
|
|
elsif currently_in_mall?
|
|
|
|
:nc_mall
|
|
|
|
elsif nc?
|
|
|
|
:other_nc
|
|
|
|
elsif np?
|
|
|
|
:np
|
|
|
|
elsif pb?
|
|
|
|
:pb
|
|
|
|
else
|
|
|
|
raise "Item has no matching source (should not happen?)"
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2011-07-22 13:18:15 -07:00
|
|
|
def owned?
|
2024-02-23 10:44:50 -08:00
|
|
|
@owned || false
|
2011-07-22 13:18:15 -07:00
|
|
|
end
|
|
|
|
|
|
|
|
def wanted?
|
2024-02-23 10:44:50 -08:00
|
|
|
@wanted || false
|
2011-07-22 13:18:15 -07:00
|
|
|
end
|
|
|
|
|
2024-05-14 16:03:35 -07:00
|
|
|
def current_nc_price
|
2024-05-21 17:24:55 -07:00
|
|
|
nc_mall_record&.current_price
|
2024-05-14 16:03:35 -07:00
|
|
|
end
|
|
|
|
|
2024-05-22 15:41:46 -07:00
|
|
|
# If this is a PB item, return the corresponding Color, inferred from the
|
2024-06-05 19:46:12 -07:00
|
|
|
# item name. If it's not a PB item, or we fail to infer a specific color,
|
|
|
|
# return nil. (This is expected to be nil for some PB items, like the "Aisha
|
|
|
|
# Collar", which belong to many colors. It can also be nil for PB items for
|
|
|
|
# new colors we haven't manually added to the database yet, or if a PB item
|
|
|
|
# is named strangely in the future.)
|
2024-05-22 15:41:46 -07:00
|
|
|
def pb_color
|
|
|
|
return nil unless pb?
|
|
|
|
|
|
|
|
# NOTE: To handle colors like "Royalboy", where the items aren't consistent
|
|
|
|
# with the color name regarding whether or not there's spaces, we remove
|
|
|
|
# all spaces from the item name and color name when matching. We also
|
|
|
|
# hackily handle the fact that "Elderlyboy" color has items named "Elderly
|
|
|
|
# Male" (and same for Girl/Female) by replacing those words, too. These
|
|
|
|
# hacks could cause false matches in theory, but I'm not aware of any rn!
|
|
|
|
normalized_name = name.downcase.gsub("female", "girl").gsub("male", "boy").
|
|
|
|
gsub(/\s/, "")
|
|
|
|
|
2024-09-13 19:56:41 -07:00
|
|
|
# For each color, normalize its name, look for it in the item name, and
|
|
|
|
# return the matching color that appears earliest. (This is important for
|
|
|
|
# items that contain multiple color names, like the "Royal Girl Elephante
|
|
|
|
# Gold Bracelets".)
|
|
|
|
Color.all.to_h { |c| [c, c.name.downcase.gsub(/\s/, "")] }.
|
|
|
|
transform_values { |n| normalized_name.index(n) }.
|
|
|
|
filter { |c, n| n.present? }.
|
|
|
|
min_by { |c, i| i }&.first
|
2024-05-22 15:41:46 -07:00
|
|
|
end
|
|
|
|
|
2024-05-22 17:53:52 -07:00
|
|
|
# If this is a PB item, return the corresponding Species, inferred from the
|
2024-06-05 19:46:12 -07:00
|
|
|
# item name. If it's not a PB item, or we fail to infer a specific species,
|
|
|
|
# return nil. (This is not expected to be nil in general, but could be for PB
|
|
|
|
# items for new species we haven't manually added to the database yet, or if
|
|
|
|
# a PB item is named strangely in the future.)
|
2024-05-22 17:53:52 -07:00
|
|
|
def pb_species
|
|
|
|
return nil unless pb?
|
|
|
|
normalized_name = name.downcase
|
|
|
|
Species.order(:name).find { |s| normalized_name.include?(s.name.downcase) }
|
|
|
|
end
|
|
|
|
|
2024-05-22 15:41:46 -07:00
|
|
|
def pb_item_name
|
|
|
|
pb_color&.pb_item_name
|
|
|
|
end
|
|
|
|
|
2013-01-21 17:34:39 -08:00
|
|
|
def restricted_zones(options={})
|
2023-11-09 21:35:42 -08:00
|
|
|
options[:scope] ||= Zone.all
|
2013-01-21 17:34:39 -08:00
|
|
|
options[:scope].find(restricted_zone_ids)
|
2010-09-08 19:49:39 -07:00
|
|
|
end
|
globalized search first draft
Confirmed features:
* Output (retrieval, sorting, etc.)
* Name (positive and negative, but new behavior)
* Flags (positive and negative)
Planned features:
* users:owns, user:wants
Known issues:
* Sets are broken
* Don't render properly
* Shouldn't actually be done as joined sets, anyway, since
we actually want (set1_zone1 OR set1_zone2) AND
(set2_zone1 OR set2_zone2), which will require breaking
it into multiple terms queries.
* Name has regressed: ignores phrases, doesn't require *all*
words. While we're breaking sets into multiple queries,
maybe we'll do something similar for name. In fact, we
really kinda have to if we're gonna keep sorting by name,
since "straw hat" returns all hats. Eww.
2013-01-18 21:23:37 -08:00
|
|
|
|
|
|
|
def restricted_zone_ids
|
2013-01-21 17:34:39 -08:00
|
|
|
unless @restricted_zone_ids
|
|
|
|
@restricted_zone_ids = []
|
|
|
|
zones_restrict.split(//).each_with_index do |switch, id|
|
|
|
|
@restricted_zone_ids << (id.to_i + 1) if switch == '1'
|
|
|
|
end
|
|
|
|
end
|
|
|
|
@restricted_zone_ids
|
globalized search first draft
Confirmed features:
* Output (retrieval, sorting, etc.)
* Name (positive and negative, but new behavior)
* Flags (positive and negative)
Planned features:
* users:owns, user:wants
Known issues:
* Sets are broken
* Don't render properly
* Shouldn't actually be done as joined sets, anyway, since
we actually want (set1_zone1 OR set1_zone2) AND
(set2_zone1 OR set2_zone2), which will require breaking
it into multiple terms queries.
* Name has regressed: ignores phrases, doesn't require *all*
words. While we're breaking sets into multiple queries,
maybe we'll do something similar for name. In fact, we
really kinda have to if we're gonna keep sorting by name,
since "straw hat" returns all hats. Eww.
2013-01-18 21:23:37 -08:00
|
|
|
end
|
2024-09-03 12:55:10 -07:00
|
|
|
|
globalized search first draft
Confirmed features:
* Output (retrieval, sorting, etc.)
* Name (positive and negative, but new behavior)
* Flags (positive and negative)
Planned features:
* users:owns, user:wants
Known issues:
* Sets are broken
* Don't render properly
* Shouldn't actually be done as joined sets, anyway, since
we actually want (set1_zone1 OR set1_zone2) AND
(set2_zone1 OR set2_zone2), which will require breaking
it into multiple terms queries.
* Name has regressed: ignores phrases, doesn't require *all*
words. While we're breaking sets into multiple queries,
maybe we'll do something similar for name. In fact, we
really kinda have to if we're gonna keep sorting by name,
since "straw hat" returns all hats. Eww.
2013-01-18 21:23:37 -08:00
|
|
|
def occupied_zone_ids
|
|
|
|
occupied_zones.map(&:id)
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2024-08-31 13:14:22 -07:00
|
|
|
def occupied_zones
|
|
|
|
zone_ids = swf_assets.map(&:zone_id).uniq
|
|
|
|
Zone.find(zone_ids)
|
2010-09-08 19:49:39 -07:00
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2011-01-27 13:35:46 -08:00
|
|
|
def affected_zones
|
|
|
|
restricted_zones + occupied_zones
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2024-09-30 23:10:37 -07:00
|
|
|
def update_cached_fields
|
2024-11-19 15:52:52 -08:00
|
|
|
# First, clear out some cached instance variables we use for performance,
|
|
|
|
# to ensure we recompute the latest values.
|
|
|
|
@predicted_body_ids = nil
|
|
|
|
@predicted_missing_body_ids = nil
|
|
|
|
|
|
|
|
# We also need to reload our associations, so they include any new records.
|
2024-11-19 14:26:06 -08:00
|
|
|
swf_assets.reload
|
|
|
|
|
2024-11-19 15:52:52 -08:00
|
|
|
# Finally, compute and save our cached fields.
|
2024-10-02 17:50:42 -07:00
|
|
|
self.cached_occupied_zone_ids = occupied_zone_ids
|
2024-10-02 17:55:20 -07:00
|
|
|
self.cached_compatible_body_ids = compatible_body_ids(use_cached: false)
|
2024-11-19 15:52:52 -08:00
|
|
|
self.cached_predicted_fully_modeled =
|
|
|
|
predicted_fully_modeled?(use_cached: false)
|
2024-11-06 14:31:16 -08:00
|
|
|
self.save!
|
2024-09-30 23:10:37 -07:00
|
|
|
end
|
|
|
|
|
2010-05-15 08:38:45 -07:00
|
|
|
def species_support_ids
|
2010-11-27 15:41:06 -08:00
|
|
|
@species_support_ids_array ||= read_attribute('species_support_ids').split(',').map(&:to_i) rescue nil
|
2010-05-15 08:38:45 -07:00
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2010-05-15 08:38:45 -07:00
|
|
|
def species_support_ids=(replacement)
|
2010-05-16 12:01:38 -07:00
|
|
|
@species_support_ids_array = nil
|
2010-05-15 08:38:45 -07:00
|
|
|
replacement = replacement.join(',') if replacement.is_a?(Array)
|
|
|
|
write_attribute('species_support_ids', replacement)
|
|
|
|
end
|
2024-09-30 23:10:37 -07:00
|
|
|
|
2024-11-20 11:22:33 -08:00
|
|
|
def modeling_hinted_done?
|
|
|
|
modeling_status_hint == "done" || modeling_status_hint == "glitchy"
|
|
|
|
end
|
|
|
|
|
2013-12-14 15:19:27 -08:00
|
|
|
def predicted_body_ids
|
2024-11-20 11:22:33 -08:00
|
|
|
@predicted_body_ids ||= if modeling_hinted_done?
|
|
|
|
# If we've manually set this item to no longer report as needing modeling,
|
|
|
|
# predict that the current bodies are all of the compatible bodies.
|
|
|
|
compatible_body_ids
|
|
|
|
elsif compatible_body_ids.include?(0)
|
2013-12-14 15:19:27 -08:00
|
|
|
# Oh, look, it's already known to fit everybody! Sweet. We're done. (This
|
|
|
|
# isn't folded into the case below, in case this item somehow got a
|
|
|
|
# body-specific and non-body-specific asset. In all the cases I've seen
|
|
|
|
# it, that indicates a glitched item, but this method chooses to reflect
|
|
|
|
# behavior elsewhere in the app by saying that we can put this item on
|
|
|
|
# anybody. (Heh. Any body.))
|
2024-10-02 17:21:37 -07:00
|
|
|
compatible_body_ids
|
|
|
|
elsif compatible_body_ids.size == 1
|
2013-12-14 15:19:27 -08:00
|
|
|
# This might just be a species-specific item. Let's be conservative in
|
|
|
|
# our prediction, though we'll revise it if we see another body ID.
|
2024-10-02 17:21:37 -07:00
|
|
|
compatible_body_ids
|
2024-11-19 12:15:21 -08:00
|
|
|
elsif compatible_body_ids.size == 0
|
|
|
|
# If somehow we have this item, but not any modeling data for it (weird!),
|
|
|
|
# consider it to fit all standard pet types until shown otherwise.
|
2024-11-19 16:41:50 -08:00
|
|
|
PetType.basic.released_before(released_at_estimate).
|
|
|
|
distinct.pluck(:body_id).sort
|
2013-12-14 15:19:27 -08:00
|
|
|
else
|
2024-10-03 13:49:15 -07:00
|
|
|
# First, find our compatible pet types, then pair each body ID with its
|
Oops, fix modeling logic
Oh huh, I guess most of the new items we had when I rewrote this were
Maraquan, and I didn't test enough on standard species-specific items.
Before this change, partially-modeled items for standard pets would
appear as fully modeled, because the presence of the "nonstandard"
color Orange (because of the Orange Chia) meant that the "standard" key
didn't actually have any unique bodies (it was all `["standard", 47]`).
Here, I take my own comments' advice and move away from the standard
label as part of the logic. Instead, we look first for nonstandard
colors with unique bodies, which we'll call out as modelable; and then
check whether there are any basic bodies *not* covered by those special
colors.
That way, compatibility with the Maraquan Acara (a unique body) means
we'll treat Maraquan as a modelable color; and then we'll ignore the
basic bodies, even though it *does* fit the basic Mynci, because there
aren't any compatible basic bodies that aren't *also* Maraquan bodies.
This also means that compatibility with both the Blue Acara and Orange
Acara does *not* preclude a normal item from needing basic pets for
models: because, while Orange is a slightly nonstandard color in the
case of Chia, it doesn't have its own unique body for this item, so we
ignore it when predicting compatibility with basic colors.
2024-10-08 22:46:11 -07:00
|
|
|
# color. (As an optimization, we omit standard colors, other than the
|
|
|
|
# basic colors. We also flatten the basic colors into the single color
|
|
|
|
# ID "basic", so we can treat them specially.)
|
|
|
|
compatible_pairs = compatible_pet_types.joins(:color).
|
|
|
|
merge(Color.nonstandard.or(Color.basic)).
|
|
|
|
distinct.pluck(
|
|
|
|
Arel.sql("IF(colors.basic, 'basic', colors.id)"), :body_id)
|
|
|
|
|
|
|
|
# Group colors by body, to help us find bodies unique to certain colors.
|
2024-10-03 13:49:15 -07:00
|
|
|
compatible_color_ids_by_body_id = {}.tap do |h|
|
|
|
|
compatible_pairs.each do |(color_id, body_id)|
|
|
|
|
h[body_id] ||= []
|
|
|
|
h[body_id] << color_id
|
|
|
|
end
|
Update missing body ID prediction to handle, say, the Maraquan Mynci.
It turns out that some pets for seemingly nonstandard colors have the
standard body type anyway, and vice-versa. This implies that we should
stop relying on a color's standardness, but, for the time being, we've
just revised the prediction model:
Old model:
* If I see a body_id, I find the corresponding color_ids, and it's wearable
by all pet types with those color_ids.
New model:
* If I see a body_id,
* If it also belongs to a basic pet type, it's a standard body ID.
* It therefore fits all pet types of standard color (if there's
more than one body ID modeled already). (Not really,
because of weird exceptions like Orange Chia. Should that be
standard or not?)
* If it doesn't also belong to a basic pet type, it's a nonstandard
body ID.
* It therefore only belongs to one color, and therefore the item
fits all pet types of the same color.
2014-01-20 13:29:01 -08:00
|
|
|
end
|
Oops, fix modeling logic
Oh huh, I guess most of the new items we had when I rewrote this were
Maraquan, and I didn't test enough on standard species-specific items.
Before this change, partially-modeled items for standard pets would
appear as fully modeled, because the presence of the "nonstandard"
color Orange (because of the Orange Chia) meant that the "standard" key
didn't actually have any unique bodies (it was all `["standard", 47]`).
Here, I take my own comments' advice and move away from the standard
label as part of the logic. Instead, we look first for nonstandard
colors with unique bodies, which we'll call out as modelable; and then
check whether there are any basic bodies *not* covered by those special
colors.
That way, compatibility with the Maraquan Acara (a unique body) means
we'll treat Maraquan as a modelable color; and then we'll ignore the
basic bodies, even though it *does* fit the basic Mynci, because there
aren't any compatible basic bodies that aren't *also* Maraquan bodies.
This also means that compatibility with both the Blue Acara and Orange
Acara does *not* preclude a normal item from needing basic pets for
models: because, while Orange is a slightly nonstandard color in the
case of Chia, it doesn't have its own unique body for this item, so we
ignore it when predicting compatibility with basic colors.
2024-10-08 22:46:11 -07:00
|
|
|
|
|
|
|
# Find non-basic colors with at least one unique compatible body. (This
|
|
|
|
# means we'll ignore e.g. the Maraquan Mynci, which has the same body as
|
|
|
|
# the Blue Mynci, as not indicating Maraquan compatibility in general.)
|
|
|
|
modelable_color_ids =
|
|
|
|
compatible_color_ids_by_body_id.
|
|
|
|
filter { |k, v| v.size == 1 && v.first != "basic" }.
|
|
|
|
values.map(&:first).uniq
|
|
|
|
|
|
|
|
# We can model on basic pets (perhaps in addition to the above) if we
|
|
|
|
# find at least one compatible basic body that doesn't *also* fit any of
|
|
|
|
# the modelable colors we identified above.
|
|
|
|
basic_is_modelable =
|
|
|
|
compatible_color_ids_by_body_id.values.
|
|
|
|
any? { |v| v.include?("basic") && (v & modelable_color_ids).empty? }
|
|
|
|
|
2024-11-19 16:41:50 -08:00
|
|
|
# Filter to pet types that match the colors that seem compatible.
|
Oops, fix modeling logic
Oh huh, I guess most of the new items we had when I rewrote this were
Maraquan, and I didn't test enough on standard species-specific items.
Before this change, partially-modeled items for standard pets would
appear as fully modeled, because the presence of the "nonstandard"
color Orange (because of the Orange Chia) meant that the "standard" key
didn't actually have any unique bodies (it was all `["standard", 47]`).
Here, I take my own comments' advice and move away from the standard
label as part of the logic. Instead, we look first for nonstandard
colors with unique bodies, which we'll call out as modelable; and then
check whether there are any basic bodies *not* covered by those special
colors.
That way, compatibility with the Maraquan Acara (a unique body) means
we'll treat Maraquan as a modelable color; and then we'll ignore the
basic bodies, even though it *does* fit the basic Mynci, because there
aren't any compatible basic bodies that aren't *also* Maraquan bodies.
This also means that compatibility with both the Blue Acara and Orange
Acara does *not* preclude a normal item from needing basic pets for
models: because, while Orange is a slightly nonstandard color in the
case of Chia, it doesn't have its own unique body for this item, so we
ignore it when predicting compatibility with basic colors.
2024-10-08 22:46:11 -07:00
|
|
|
predicted_pet_types =
|
|
|
|
(basic_is_modelable ? PetType.basic : PetType.none).
|
|
|
|
or(PetType.where(color_id: modelable_color_ids))
|
2024-11-19 16:41:50 -08:00
|
|
|
|
|
|
|
# Only include species that were released when this item was. If we don't
|
|
|
|
# know our creation date (we don't have it for some old records), assume
|
|
|
|
# it's pretty old.
|
|
|
|
predicted_pet_types.merge! PetType.released_before(released_at_estimate)
|
|
|
|
|
|
|
|
# Get all body IDs for the pet types we decided are modelable.
|
2024-10-03 13:49:15 -07:00
|
|
|
predicted_pet_types.distinct.pluck(:body_id).sort
|
2013-12-14 15:19:27 -08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def predicted_missing_body_ids
|
2024-10-02 17:21:37 -07:00
|
|
|
@predicted_missing_body_ids ||= predicted_body_ids - compatible_body_ids
|
2013-12-14 15:19:27 -08:00
|
|
|
end
|
|
|
|
|
2014-01-01 07:15:58 -08:00
|
|
|
def predicted_missing_standard_body_ids_by_species_id
|
|
|
|
@predicted_missing_standard_body_ids_by_species_id ||=
|
|
|
|
PetType.select('DISTINCT body_id, species_id').
|
|
|
|
joins(:color).
|
|
|
|
where(body_id: predicted_missing_body_ids,
|
|
|
|
colors: {standard: true}).
|
|
|
|
inject({}) { |h, pt| h[pt.species_id] = pt.body_id; h }
|
2013-12-14 15:19:27 -08:00
|
|
|
end
|
|
|
|
|
2024-01-23 05:23:57 -08:00
|
|
|
def predicted_missing_standard_body_ids_by_species
|
|
|
|
species = Species.where(id: predicted_missing_standard_body_ids_by_species_id.keys)
|
2014-01-01 07:15:58 -08:00
|
|
|
species_by_id = species.inject({}) { |h, s| h[s.id] = s; h }
|
|
|
|
predicted_missing_standard_body_ids_by_species_id.inject({}) { |h, (sid, bid)|
|
|
|
|
h[species_by_id[sid]] = bid; h }
|
2013-12-14 15:19:27 -08:00
|
|
|
end
|
|
|
|
|
|
|
|
def predicted_missing_nonstandard_body_pet_types
|
2024-10-02 17:54:14 -07:00
|
|
|
body_ids = predicted_missing_body_ids - PetType.basic_body_ids
|
|
|
|
PetType.joins(:color).where(body_id: body_ids, colors: {standard: false})
|
2013-12-14 15:19:27 -08:00
|
|
|
end
|
|
|
|
|
2024-01-23 05:23:57 -08:00
|
|
|
def predicted_missing_nonstandard_body_ids_by_species_by_color
|
2013-12-14 15:19:27 -08:00
|
|
|
pet_types = predicted_missing_nonstandard_body_pet_types
|
|
|
|
|
|
|
|
species_by_id = {}
|
2024-01-23 05:23:57 -08:00
|
|
|
Species.find(pet_types.map(&:species_id)).each do |species|
|
2013-12-14 15:19:27 -08:00
|
|
|
species_by_id[species.id] = species
|
|
|
|
end
|
|
|
|
|
|
|
|
colors_by_id = {}
|
2024-01-23 05:23:57 -08:00
|
|
|
Color.find(pet_types.map(&:color_id)).each do |color|
|
2013-12-14 15:19:27 -08:00
|
|
|
colors_by_id[color.id] = color
|
|
|
|
end
|
|
|
|
|
2014-01-01 07:15:58 -08:00
|
|
|
body_ids_by_species_by_color = {}
|
2013-12-14 15:19:27 -08:00
|
|
|
pet_types.each do |pt|
|
|
|
|
color = colors_by_id[pt.color_id]
|
2014-01-01 07:15:58 -08:00
|
|
|
body_ids_by_species_by_color[color] ||= {}
|
|
|
|
body_ids_by_species_by_color[color][species_by_id[pt.species_id]] = pt.body_id
|
2013-12-14 15:19:27 -08:00
|
|
|
end
|
2014-01-01 07:15:58 -08:00
|
|
|
body_ids_by_species_by_color
|
2013-12-14 15:19:27 -08:00
|
|
|
end
|
|
|
|
|
2024-11-19 15:52:52 -08:00
|
|
|
def predicted_fully_modeled?(use_cached: true)
|
|
|
|
return cached_predicted_fully_modeled? if use_cached
|
2013-12-14 15:19:27 -08:00
|
|
|
predicted_missing_body_ids.empty?
|
|
|
|
end
|
|
|
|
|
|
|
|
def predicted_modeled_ratio
|
2024-10-02 17:21:37 -07:00
|
|
|
compatible_body_ids.size.to_f / predicted_body_ids.size
|
2013-12-14 15:19:27 -08:00
|
|
|
end
|
|
|
|
|
2024-11-19 16:41:50 -08:00
|
|
|
# We estimate the item's release time as either when we first saw it, or 2010
|
|
|
|
# if it's so old that we don't have a record.
|
|
|
|
def released_at_estimate
|
|
|
|
created_at || Time.new(2010)
|
|
|
|
end
|
|
|
|
|
2013-06-27 00:10:55 -07:00
|
|
|
def as_json(options={})
|
2023-11-11 08:41:31 -08:00
|
|
|
super({
|
|
|
|
only: [:id, :name, :description, :thumbnail_url, :rarity_index],
|
|
|
|
methods: [:zones_restrict],
|
|
|
|
}.merge(options))
|
2010-06-22 10:00:55 -07:00
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2024-10-02 17:55:20 -07:00
|
|
|
def compatible_body_ids(use_cached: true)
|
|
|
|
return cached_compatible_body_ids if use_cached
|
|
|
|
|
2024-07-01 17:20:38 -07:00
|
|
|
swf_assets.map(&:body_id).uniq
|
|
|
|
end
|
|
|
|
|
|
|
|
def compatible_pet_types
|
|
|
|
return PetType.all if compatible_body_ids.include?(0)
|
|
|
|
PetType.where(body_id: compatible_body_ids)
|
|
|
|
end
|
2024-11-06 14:31:16 -08:00
|
|
|
|
|
|
|
def handle_assets!
|
|
|
|
if @parent_swf_asset_relationships_to_update && @current_body_id
|
|
|
|
new_swf_asset_ids = @parent_swf_asset_relationships_to_update.map(&:swf_asset_id)
|
|
|
|
rels = ParentSwfAssetRelationship.arel_table
|
|
|
|
swf_assets = SwfAsset.arel_table
|
|
|
|
|
|
|
|
# If a relationship used to bind an item and asset for this body type,
|
|
|
|
# but doesn't in this sample, the two have been unbound. Delete the
|
|
|
|
# relationship.
|
|
|
|
ids_to_delete = self.parent_swf_asset_relationships.
|
|
|
|
select(rels[:id]).
|
|
|
|
joins(:swf_asset).
|
|
|
|
where(rels[:swf_asset_id].not_in(new_swf_asset_ids)).
|
|
|
|
where(swf_assets[:body_id].in([@current_body_id, 0])).
|
|
|
|
map(&:id)
|
|
|
|
|
|
|
|
unless ids_to_delete.empty?
|
|
|
|
ParentSwfAssetRelationship.where(:id => ids_to_delete).delete_all
|
|
|
|
end
|
|
|
|
|
|
|
|
@parent_swf_asset_relationships_to_update.each do |rel|
|
|
|
|
rel.save!
|
|
|
|
rel.swf_asset.save!
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2013-01-22 22:25:09 -08:00
|
|
|
|
|
|
|
def body_specific?
|
|
|
|
# If there are species support IDs (it's not empty), the item is
|
|
|
|
# body-specific. If it's empty, it fits everyone the same.
|
2017-10-29 15:08:22 -07:00
|
|
|
explicitly_body_specific? || !species_support_ids.empty?
|
2013-01-22 22:25:09 -08:00
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2024-11-06 14:31:16 -08:00
|
|
|
def add_origin_registry_info(info, locale)
|
2010-10-07 07:46:23 -07:00
|
|
|
# bear in mind that numbers from registries are floats
|
2013-12-08 20:59:36 -08:00
|
|
|
species_support_strs = info['species_support'] || []
|
|
|
|
self.species_support_ids = species_support_strs.map(&:to_i)
|
2015-07-27 10:23:46 -07:00
|
|
|
|
2024-05-02 13:00:10 -07:00
|
|
|
# NOTE: If some of these fields are missing, it could cause saving the item
|
|
|
|
# to fail, because many of these columns are non-nullable.
|
2024-02-20 15:52:03 -08:00
|
|
|
self.name = info['name']
|
|
|
|
self.description = info['description']
|
|
|
|
self.thumbnail_url = info['thumbnail_url']
|
|
|
|
self.category = info['category']
|
|
|
|
self.type = info['type']
|
|
|
|
self.rarity = info['rarity']
|
|
|
|
self.rarity_index = info['rarity_index'].to_i
|
|
|
|
self.price = info['price'].to_i
|
|
|
|
self.weight_lbs = info['weight_lbs'].to_i
|
|
|
|
self.zones_restrict = info['zones_restrict']
|
2010-10-07 07:46:23 -07:00
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2024-11-06 14:31:16 -08:00
|
|
|
def pending_swf_assets
|
|
|
|
@parent_swf_asset_relationships_to_update.inject([]) do |all_swf_assets, relationship|
|
|
|
|
all_swf_assets << relationship.swf_asset
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def parent_swf_asset_relationships_to_update=(rels)
|
|
|
|
@parent_swf_asset_relationships_to_update = rels
|
|
|
|
end
|
|
|
|
|
2024-02-25 12:06:20 -08:00
|
|
|
# NOTE: Adding the JSON serializer makes `as_json` treat this like a model
|
|
|
|
# instead of like a hash, so you can target its children with things like
|
|
|
|
# the `include` option. This feels clunky though, I wish I had something a
|
|
|
|
# bit more suited to it!
|
2024-07-01 16:54:39 -07:00
|
|
|
Appearance = Struct.new(:item, :body, :swf_assets) do
|
2024-02-25 12:06:20 -08:00
|
|
|
include ActiveModel::Serializers::JSON
|
2024-07-01 16:07:25 -07:00
|
|
|
delegate :present?, :empty?, to: :swf_assets
|
2024-09-03 17:27:43 -07:00
|
|
|
delegate :species, :fits?, :fits_all?, to: :body
|
2024-07-01 16:54:39 -07:00
|
|
|
|
2024-02-25 12:06:20 -08:00
|
|
|
def attributes
|
2024-07-01 16:54:39 -07:00
|
|
|
{item:, body:, swf_assets:}
|
|
|
|
end
|
|
|
|
|
2024-09-05 18:48:41 -07:00
|
|
|
def html5?
|
|
|
|
swf_assets.all?(&:html5?)
|
|
|
|
end
|
|
|
|
|
2024-09-03 12:55:10 -07:00
|
|
|
def occupied_zone_ids
|
|
|
|
swf_assets.map(&:zone_id).uniq.sort
|
|
|
|
end
|
|
|
|
|
2024-07-01 16:54:39 -07:00
|
|
|
def restricted_zone_ids
|
|
|
|
return [] if empty?
|
|
|
|
([item] + swf_assets).map(&:restricted_zone_ids).flatten.uniq.sort
|
2024-02-25 12:06:20 -08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
Appearance::Body = Struct.new(:id, :species) do
|
|
|
|
include ActiveModel::Serializers::JSON
|
|
|
|
def attributes
|
2024-07-01 16:54:39 -07:00
|
|
|
{id:, species:}
|
2024-02-25 12:06:20 -08:00
|
|
|
end
|
2024-09-03 16:42:04 -07:00
|
|
|
|
|
|
|
def fits_all?
|
|
|
|
id == 0
|
|
|
|
end
|
|
|
|
|
|
|
|
def fits?(target)
|
|
|
|
fits_all? || target.body_id == id
|
|
|
|
end
|
2024-02-25 12:06:20 -08:00
|
|
|
end
|
|
|
|
|
2023-11-11 07:14:48 -08:00
|
|
|
def appearances
|
2024-09-05 17:41:04 -07:00
|
|
|
@appearances ||= build_appearances
|
|
|
|
end
|
|
|
|
|
|
|
|
def build_appearances
|
2023-11-11 07:14:48 -08:00
|
|
|
all_swf_assets = swf_assets.to_a
|
|
|
|
|
|
|
|
# If there are no assets yet, there are no appearances.
|
|
|
|
return [] if all_swf_assets.empty?
|
|
|
|
|
|
|
|
# Get all SWF assets, and separate the ones that fit everyone (body_id=0).
|
|
|
|
swf_assets_by_body_id = all_swf_assets.group_by(&:body_id)
|
|
|
|
swf_assets_for_all_bodies = swf_assets_by_body_id.delete(0) || []
|
|
|
|
|
|
|
|
# If there are no body-specific assets, return one appearance for them all.
|
|
|
|
if swf_assets_by_body_id.empty?
|
2024-02-24 16:14:30 -08:00
|
|
|
body = Appearance::Body.new(0, nil)
|
2024-07-01 16:54:39 -07:00
|
|
|
return [Appearance.new(self, body, swf_assets_for_all_bodies)]
|
2023-11-11 07:14:48 -08:00
|
|
|
end
|
|
|
|
|
|
|
|
# Otherwise, create an appearance for each real (nonzero) body ID. We don't
|
|
|
|
# generally expect body_id = 0 and body_id != 0 to mix, but if they do,
|
|
|
|
# uhh, let's merge the body_id = 0 ones in?
|
2024-09-05 17:52:35 -07:00
|
|
|
species_by_body_id = Species.with_body_ids(swf_assets_by_body_id.keys)
|
2023-11-11 07:14:48 -08:00
|
|
|
swf_assets_by_body_id.map do |body_id, body_specific_assets|
|
|
|
|
swf_assets_for_body = body_specific_assets + swf_assets_for_all_bodies
|
2024-09-05 17:52:35 -07:00
|
|
|
body = Appearance::Body.new(body_id, species_by_body_id[body_id])
|
2024-07-01 16:54:39 -07:00
|
|
|
Appearance.new(self, body, swf_assets_for_body)
|
2023-11-11 07:14:48 -08:00
|
|
|
end
|
use proxies for item html, too
Some lame benchmarking on my box, dev, cache classes, many items:
No proxies:
Fresh JSON: 175, 90, 90, 93, 82, 88, 158, 150, 85, 167 = 117.8
Cached JSON: (none)
Fresh HTML: 371, 327, 355, 328, 322, 346 = 341.5
Cached HTML: 173, 123, 175, 187, 171, 179 = 168
Proxies:
Fresh JSON: 175, 183, 269, 219, 195, 178 = 203.17
Cached JSON: 88, 70, 89, 162, 80, 77 = 94.3
Fresh HTML: 494, 381, 350, 334, 451, 372 = 397
Cached HTML: 176, 170, 104, 101, 111, 116 = 129.7
So, overhead is significant, but the gains when cached (and that should be
all the time, since we currently have 0 evictions) are definitely worth
it. Worth pushing, and probably putting some future effort into reducing
overhead.
On production (again, lame), items#index was consistently averaging
73-74ms when super healthy, and 82ms when pets#index was being louder
than usual. For reference is all. This will probably perform
significantly worse at first (in JSON, anyway, since HTML is already
mostly cached), so it might be worth briefly warming the cache after
pushing.
2013-06-26 23:39:04 -07:00
|
|
|
end
|
|
|
|
|
2024-06-30 23:09:28 -07:00
|
|
|
def appearance_for(target, ...)
|
2024-07-01 16:54:39 -07:00
|
|
|
Item.appearances_for([self], target, ...)[id]
|
2024-06-30 23:09:28 -07:00
|
|
|
end
|
|
|
|
|
2024-09-03 12:55:10 -07:00
|
|
|
def appearances_by_occupied_zone_id
|
|
|
|
{}.tap do |h|
|
|
|
|
appearances.each do |appearance|
|
|
|
|
appearance.occupied_zone_ids.each do |zone_id|
|
|
|
|
h[zone_id] ||= []
|
|
|
|
h[zone_id] << appearance
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def appearances_by_occupied_zone
|
|
|
|
zones_by_id = occupied_zones.to_h { |z| [z.id, z] }
|
|
|
|
appearances_by_occupied_zone_id.transform_keys { |zid| zones_by_id[zid] }
|
|
|
|
end
|
|
|
|
|
2024-07-01 16:54:39 -07:00
|
|
|
# Given a list of items, return how they look on the given target (either a
|
|
|
|
# pet type or an alt style).
|
|
|
|
def self.appearances_for(items, target, swf_asset_includes: [])
|
2024-02-27 16:11:06 -08:00
|
|
|
# First, load all the relationships for these items that also fit this
|
|
|
|
# body.
|
|
|
|
relationships = ParentSwfAssetRelationship.
|
|
|
|
includes(swf_asset: swf_asset_includes).
|
2024-07-01 16:54:39 -07:00
|
|
|
where(parent_type: "Item", parent_id: items.map(&:id)).
|
2024-02-27 16:11:06 -08:00
|
|
|
where(swf_asset: {body_id: [target.body_id, 0]})
|
|
|
|
|
|
|
|
pet_type_body = Appearance::Body.new(target.body_id, target.species)
|
|
|
|
all_pets_body = Appearance::Body.new(0, nil)
|
|
|
|
|
|
|
|
# Then, convert this into a hash from item ID to SWF assets.
|
|
|
|
assets_by_item_id = relationships.group_by(&:parent_id).
|
|
|
|
transform_values { |rels| rels.map(&:swf_asset) }
|
|
|
|
|
|
|
|
# Finally, for each item, return an appearance—even if it's empty!
|
2024-07-01 16:54:39 -07:00
|
|
|
items.to_h do |item|
|
|
|
|
assets = assets_by_item_id.fetch(item.id, [])
|
2024-02-27 16:11:06 -08:00
|
|
|
|
|
|
|
fits_all_pets = assets.present? && assets.all? { |a| a.body_id == 0 }
|
|
|
|
body = fits_all_pets ? all_pets_body : pet_type_body
|
|
|
|
|
2024-07-01 16:54:39 -07:00
|
|
|
[item.id, Appearance.new(item, body, assets)]
|
2024-02-27 16:11:06 -08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2010-11-06 08:52:58 -07:00
|
|
|
def self.all_by_ids_or_children(ids, swf_assets)
|
|
|
|
swf_asset_ids = []
|
|
|
|
swf_assets_by_id = {}
|
|
|
|
swf_assets_by_parent_id = {}
|
|
|
|
swf_assets.each do |swf_asset|
|
|
|
|
id = swf_asset.id
|
|
|
|
swf_assets_by_id[id] = swf_asset
|
|
|
|
swf_asset_ids << id
|
|
|
|
end
|
2023-07-22 13:32:29 -07:00
|
|
|
SwfAsset.object_assets.joins(:parent_swf_asset_relationships).
|
|
|
|
where(SwfAsset.arel_table[:id].in(swf_asset_ids)).select([
|
2012-01-12 15:17:59 -08:00
|
|
|
SwfAsset.arel_table[:id],
|
|
|
|
ParentSwfAssetRelationship.arel_table[:parent_id]
|
2023-07-22 13:32:29 -07:00
|
|
|
]).each do |row|
|
2010-11-06 08:52:58 -07:00
|
|
|
item_id = row.parent_id.to_i
|
|
|
|
swf_assets_by_parent_id[item_id] ||= []
|
|
|
|
swf_assets_by_parent_id[item_id] << swf_assets_by_id[row.id.to_i]
|
|
|
|
ids << item_id
|
|
|
|
end
|
|
|
|
find(ids).tap do |items|
|
|
|
|
items.each do |item|
|
|
|
|
swf_assets = swf_assets_by_parent_id[item.id]
|
|
|
|
if swf_assets
|
|
|
|
swf_assets.each do |swf_asset|
|
|
|
|
swf_asset.item = item
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2011-05-02 15:07:56 -07:00
|
|
|
|
2024-05-27 16:21:22 -07:00
|
|
|
def self.preload_nc_trade_values(items)
|
2024-12-16 13:19:16 -08:00
|
|
|
DTIRequests.load_many(max_at_once: 10) do |task|
|
2024-05-27 16:21:22 -07:00
|
|
|
# Load all the trade values in concurrent async tasks. (The
|
|
|
|
# `nc_trade_value` caches the value in the Item object.)
|
2024-12-16 13:19:16 -08:00
|
|
|
items.each { |item| task.async { item.nc_trade_value } }
|
2024-05-27 16:21:22 -07:00
|
|
|
end
|
|
|
|
|
|
|
|
items
|
|
|
|
end
|
2024-11-06 14:31:16 -08:00
|
|
|
|
|
|
|
def self.collection_from_pet_type_and_registries(pet_type, info_registry, asset_registry, scope=Item.all)
|
|
|
|
# bear in mind that registries are arrays with many nil elements,
|
|
|
|
# due to how the parser works
|
|
|
|
|
|
|
|
# Collect existing items
|
|
|
|
items = {}
|
|
|
|
item_ids = []
|
|
|
|
info_registry.each do |item_id, info|
|
|
|
|
if info && info[:is_compatible]
|
|
|
|
item_ids << item_id.to_i
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# Collect existing relationships
|
|
|
|
existing_relationships_by_item_id_and_swf_asset_id = {}
|
|
|
|
existing_items = scope.where(id: item_ids).
|
|
|
|
includes(:parent_swf_asset_relationships)
|
|
|
|
existing_items.each do |item|
|
|
|
|
items[item.id] = item
|
|
|
|
relationships_by_swf_asset_id = {}
|
|
|
|
item.parent_swf_asset_relationships.each do |relationship|
|
|
|
|
relationships_by_swf_asset_id[relationship.swf_asset_id] = relationship
|
|
|
|
end
|
|
|
|
existing_relationships_by_item_id_and_swf_asset_id[item.id] =
|
|
|
|
relationships_by_swf_asset_id
|
|
|
|
end
|
|
|
|
|
|
|
|
# Collect existing assets
|
|
|
|
swf_asset_ids = []
|
|
|
|
asset_registry.each do |asset_id, asset_data|
|
|
|
|
swf_asset_ids << asset_id.to_i if asset_data
|
|
|
|
end
|
|
|
|
existing_swf_assets = SwfAsset.object_assets.includes(:zone).
|
|
|
|
where(remote_id: swf_asset_ids)
|
|
|
|
existing_swf_assets_by_remote_id = {}
|
|
|
|
existing_swf_assets.each do |swf_asset|
|
|
|
|
existing_swf_assets_by_remote_id[swf_asset.remote_id] = swf_asset
|
|
|
|
end
|
|
|
|
|
|
|
|
# With each asset in the registry,
|
|
|
|
relationships_by_item_id = {}
|
|
|
|
asset_registry.each do |asset_id, asset_data|
|
|
|
|
if asset_data
|
|
|
|
# Build and update the item
|
|
|
|
item_id = asset_data[:obj_info_id].to_i
|
|
|
|
next unless item_ids.include?(item_id) # skip incompatible (Uni Bug)
|
|
|
|
item = items[item_id]
|
|
|
|
unless item
|
|
|
|
item = Item.new
|
|
|
|
item.id = item_id
|
|
|
|
items[item_id] = item
|
|
|
|
end
|
|
|
|
item.add_origin_registry_info info_registry[item.id.to_s], I18n.default_locale
|
|
|
|
item.current_body_id = pet_type.body_id
|
|
|
|
|
|
|
|
# Build and update the SWF
|
|
|
|
swf_asset_remote_id = asset_data[:asset_id].to_i
|
|
|
|
swf_asset = existing_swf_assets_by_remote_id[swf_asset_remote_id]
|
|
|
|
unless swf_asset
|
|
|
|
swf_asset = SwfAsset.new
|
|
|
|
swf_asset.remote_id = swf_asset_remote_id
|
|
|
|
end
|
|
|
|
swf_asset.origin_object_data = asset_data
|
|
|
|
swf_asset.origin_pet_type = pet_type
|
|
|
|
swf_asset.item = item
|
|
|
|
|
|
|
|
# Build and update the relationship
|
|
|
|
relationship = existing_relationships_by_item_id_and_swf_asset_id[item.id][swf_asset.id] rescue nil
|
|
|
|
unless relationship
|
|
|
|
relationship = ParentSwfAssetRelationship.new
|
|
|
|
relationship.parent = item
|
|
|
|
end
|
|
|
|
relationship.swf_asset = swf_asset
|
|
|
|
relationships_by_item_id[item_id] ||= []
|
|
|
|
relationships_by_item_id[item_id] << relationship
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# Set up the relationships to be updated on item save
|
|
|
|
relationships_by_item_id.each do |item_id, relationships|
|
|
|
|
items[item_id].parent_swf_asset_relationships_to_update = relationships
|
|
|
|
end
|
|
|
|
|
|
|
|
items.values
|
|
|
|
end
|
2010-05-14 15:41:40 -07:00
|
|
|
end
|