2023-08-02 16:05:02 -07:00
|
|
|
class User < ApplicationRecord
|
2011-07-26 15:49:52 -07:00
|
|
|
include PrettyParam
|
|
|
|
|
2010-12-11 06:37:39 -08:00
|
|
|
PreviewTopContributorsCount = 3
|
2011-07-12 16:37:16 -07:00
|
|
|
|
2023-08-06 16:23:22 -07:00
|
|
|
belongs_to :auth_user, foreign_key: :remote_id, inverse_of: :user
|
2024-04-07 07:52:23 -07:00
|
|
|
delegate :neopass?, :disconnect_neopass, to: :auth_user
|
2023-08-06 16:23:22 -07:00
|
|
|
|
2011-07-12 16:37:16 -07:00
|
|
|
has_many :closet_hangers
|
2011-07-26 17:27:23 -07:00
|
|
|
has_many :closet_lists
|
2023-07-28 15:06:33 -07:00
|
|
|
has_many :closeted_items, through: :closet_hangers, source: :item
|
2023-07-29 12:48:45 -07:00
|
|
|
has_many :contributions
|
|
|
|
has_many :neopets_connections
|
|
|
|
has_many :outfits
|
|
|
|
|
|
|
|
# TODO: When `owned_items` and `wanted_items` are merged, they override one
|
|
|
|
# another instead of correctly returning an empty set. Is this a Rails bug
|
|
|
|
# that gets fixed down the line once we finish upgrading, or...?
|
2023-07-28 15:06:33 -07:00
|
|
|
has_many :owned_items, -> { where(ClosetHanger.arel_table[:owned].eq(true)) },
|
|
|
|
through: :closet_hangers, source: :item
|
|
|
|
has_many :wanted_items, -> { where(ClosetHanger.arel_table[:owned].eq(false)) },
|
|
|
|
through: :closet_hangers, source: :item
|
2011-07-12 16:37:16 -07:00
|
|
|
|
2023-08-06 16:00:33 -07:00
|
|
|
belongs_to :contact_neopets_connection, class_name: 'NeopetsConnection', optional: true
|
2014-01-18 20:50:14 -08:00
|
|
|
|
2023-07-22 14:04:01 -07:00
|
|
|
scope :top_contributors, -> { order('points DESC').where('points > 0') }
|
2011-07-12 16:37:16 -07:00
|
|
|
|
2023-08-06 18:35:52 -07:00
|
|
|
after_update :sync_name_with_auth_user!, if: :saved_change_to_name?
|
2024-01-19 00:00:46 -08:00
|
|
|
after_update :log_trade_activity, if: -> user {
|
|
|
|
(user.saved_change_to_owned_closet_hangers_visibility? &&
|
|
|
|
user.owned_closet_hangers_visibility >= ClosetVisibility[:trading].id) ||
|
|
|
|
(user.saved_change_to_wanted_closet_hangers_visibility? &&
|
|
|
|
user.wanted_closet_hangers_visibility >= ClosetVisibility[:trading].id)
|
|
|
|
}
|
2023-08-06 16:23:22 -07:00
|
|
|
|
2023-08-06 18:35:52 -07:00
|
|
|
def sync_name_with_auth_user!
|
2023-08-06 16:23:22 -07:00
|
|
|
auth_user.name = name
|
|
|
|
auth_user.save!
|
|
|
|
end
|
|
|
|
|
2013-03-05 19:09:08 -08:00
|
|
|
def admin?
|
|
|
|
name == 'matchu' # you know that's right.
|
|
|
|
end
|
|
|
|
|
2023-11-02 13:50:33 -07:00
|
|
|
def as_json
|
|
|
|
serializable_hash only: [:id, :name]
|
|
|
|
end
|
|
|
|
|
2023-07-29 12:48:45 -07:00
|
|
|
def unowned_items
|
|
|
|
# Join all items against our owned closet hangers, group by item ID, then
|
|
|
|
# only return those with zero matching hangers.
|
|
|
|
#
|
|
|
|
# TODO: It'd be nice to replace this with a `left_outer_joins` call in
|
|
|
|
# Rails 5+, but these conditions really do need to be part of the join:
|
|
|
|
# if we do them as a `where`, they prevent unmatching items from being
|
|
|
|
# returned in the first place.
|
|
|
|
#
|
|
|
|
# TODO: This crashes the query when combined with `unwanted_items`.
|
|
|
|
ch = ClosetHanger.arel_table.alias("owned_hangers")
|
|
|
|
Item.
|
|
|
|
joins(
|
|
|
|
"LEFT JOIN closet_hangers owned_hangers ON owned_hangers.item_id = items.id " +
|
|
|
|
"AND #{ch[:user_id].eq(self.id).to_sql} AND owned_hangers.owned = true"
|
|
|
|
).
|
|
|
|
group("items.id").having("COUNT(owned_hangers.id) = 0")
|
|
|
|
end
|
|
|
|
|
|
|
|
def unwanted_items
|
|
|
|
# See `unowned_items` above! We just change the `true` to `false`.
|
|
|
|
# TODO: This crashes the query when combined with `unowned_items`.
|
|
|
|
ch = ClosetHanger.arel_table.alias("wanted_hangers")
|
|
|
|
Item.
|
|
|
|
joins(
|
|
|
|
"LEFT JOIN closet_hangers wanted_hangers ON wanted_hangers.item_id = items.id " +
|
|
|
|
"AND #{ch[:user_id].eq(self.id).to_sql} AND wanted_hangers.owned = false"
|
|
|
|
).
|
|
|
|
group("items.id").having("COUNT(wanted_hangers.id) = 0")
|
|
|
|
end
|
|
|
|
|
2010-11-06 16:07:15 -07:00
|
|
|
def contribute!(pet)
|
2010-11-06 15:08:42 -07:00
|
|
|
new_contributions = []
|
2010-11-06 16:07:15 -07:00
|
|
|
pet.contributables.each do |contributable|
|
2010-11-06 15:08:42 -07:00
|
|
|
if contributable.new_record?
|
2013-03-05 18:51:24 -08:00
|
|
|
contribution = Contribution.new
|
|
|
|
contribution.contributed = contributable
|
|
|
|
contribution.user = self
|
2010-11-06 15:08:42 -07:00
|
|
|
new_contributions << contribution
|
|
|
|
end
|
|
|
|
end
|
2013-03-05 18:51:24 -08:00
|
|
|
new_points = 0 # temp assignment for scoping
|
2010-11-06 16:07:15 -07:00
|
|
|
Pet.transaction do
|
|
|
|
pet.save!
|
2011-02-22 15:45:53 -08:00
|
|
|
new_contributions.each do |contribution|
|
2013-03-05 18:51:24 -08:00
|
|
|
Rails.logger.debug("Saving contribution of #{contribution.contributed.inspect}: #{contribution.contributed_type.inspect}, #{contribution.contributed_id.inspect}")
|
2011-02-22 15:45:53 -08:00
|
|
|
begin
|
|
|
|
contribution.save!
|
|
|
|
rescue ActiveRecord::RecordNotSaved => e
|
|
|
|
raise ActiveRecord::RecordNotSaved, "#{e.message}, #{contribution.inspect}, #{contribution.valid?.inspect}, #{contribution.errors.inspect}"
|
|
|
|
end
|
|
|
|
end
|
2013-03-05 18:51:24 -08:00
|
|
|
new_points = new_contributions.map(&:point_value).inject(0, &:+)
|
|
|
|
self.points += new_points
|
2011-02-23 13:01:43 -08:00
|
|
|
begin
|
|
|
|
save!
|
|
|
|
rescue ActiveRecord::RecordNotSaved => e
|
|
|
|
raise ActiveRecord::RecordNotSaved, "#{e.message}, #{self.inspect}, #{self.valid?.inspect}, #{self.errors.inspect}"
|
|
|
|
end
|
2010-11-06 16:07:15 -07:00
|
|
|
end
|
|
|
|
new_points
|
2010-11-06 15:08:42 -07:00
|
|
|
end
|
2011-07-12 16:37:16 -07:00
|
|
|
|
2011-07-12 22:21:48 -07:00
|
|
|
def assign_closeted_to_items!(items)
|
|
|
|
# Assigning these items to a hash by ID means that we don't have to go
|
|
|
|
# N^2 searching the items list for items that match the given IDs or vice
|
|
|
|
# versa, and everything stays a lovely O(n)
|
use proxies for item html, too
Some lame benchmarking on my box, dev, cache classes, many items:
No proxies:
Fresh JSON: 175, 90, 90, 93, 82, 88, 158, 150, 85, 167 = 117.8
Cached JSON: (none)
Fresh HTML: 371, 327, 355, 328, 322, 346 = 341.5
Cached HTML: 173, 123, 175, 187, 171, 179 = 168
Proxies:
Fresh JSON: 175, 183, 269, 219, 195, 178 = 203.17
Cached JSON: 88, 70, 89, 162, 80, 77 = 94.3
Fresh HTML: 494, 381, 350, 334, 451, 372 = 397
Cached HTML: 176, 170, 104, 101, 111, 116 = 129.7
So, overhead is significant, but the gains when cached (and that should be
all the time, since we currently have 0 evictions) are definitely worth
it. Worth pushing, and probably putting some future effort into reducing
overhead.
On production (again, lame), items#index was consistently averaging
73-74ms when super healthy, and 82ms when pets#index was being louder
than usual. For reference is all. This will probably perform
significantly worse at first (in JSON, anyway, since HTML is already
mostly cached), so it might be worth briefly warming the cache after
pushing.
2013-06-26 23:39:04 -07:00
|
|
|
items_by_id = items.group_by(&:id)
|
2011-07-22 13:18:15 -07:00
|
|
|
closet_hangers.where(:item_id => items_by_id.keys).each do |hanger|
|
2011-08-03 07:18:03 -07:00
|
|
|
items = items_by_id[hanger.item_id]
|
|
|
|
items.each do |item|
|
|
|
|
if hanger.owned?
|
|
|
|
item.owned = true
|
|
|
|
else
|
|
|
|
item.wanted = true
|
|
|
|
end
|
2011-07-22 13:18:15 -07:00
|
|
|
end
|
|
|
|
end
|
2011-07-12 22:21:48 -07:00
|
|
|
end
|
|
|
|
|
2011-07-30 16:45:28 -07:00
|
|
|
def closet_hangers_groups_visible_to(user)
|
2012-04-08 13:59:51 -07:00
|
|
|
if user == self
|
|
|
|
[true, false]
|
|
|
|
else
|
|
|
|
public_closet_hangers_groups
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def public_closet_hangers_groups
|
2011-07-30 16:45:28 -07:00
|
|
|
[].tap do |groups|
|
|
|
|
groups << true if owned_closet_hangers_visibility >= ClosetVisibility[:public].id
|
|
|
|
groups << false if wanted_closet_hangers_visibility >= ClosetVisibility[:public].id
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2013-12-27 11:49:46 -08:00
|
|
|
def null_closet_list(owned)
|
2015-07-19 09:35:49 -07:00
|
|
|
owned ? null_owned_list : null_wanted_list
|
|
|
|
end
|
|
|
|
|
|
|
|
def null_owned_list
|
|
|
|
ClosetList::NullOwned.new(self)
|
|
|
|
end
|
|
|
|
|
|
|
|
def null_wanted_list
|
|
|
|
ClosetList::NullWanted.new(self)
|
|
|
|
end
|
|
|
|
|
|
|
|
def find_closet_list_by_id_or_null_owned(id_or_owned)
|
|
|
|
id_or_owned_str = id_or_owned.to_s
|
|
|
|
if id_or_owned_str == 'true'
|
|
|
|
null_owned_list
|
|
|
|
elsif id_or_owned_str == 'false'
|
|
|
|
null_wanted_list
|
|
|
|
else
|
|
|
|
self.closet_lists.find id_or_owned
|
|
|
|
end
|
2013-12-27 11:49:46 -08:00
|
|
|
end
|
|
|
|
|
2014-01-17 09:12:56 -08:00
|
|
|
def neopets_usernames
|
2014-01-18 19:54:11 -08:00
|
|
|
neopets_connections.map(&:neopets_username)
|
2014-01-17 09:12:56 -08:00
|
|
|
end
|
|
|
|
|
2014-01-18 20:50:14 -08:00
|
|
|
def contact_neopets_username?
|
|
|
|
contact_neopets_connection.present?
|
|
|
|
end
|
|
|
|
|
|
|
|
def contact_neopets_username
|
2014-04-25 21:04:59 -07:00
|
|
|
contact_neopets_connection.try(:neopets_username)
|
2014-01-18 20:50:14 -08:00
|
|
|
end
|
|
|
|
|
2024-01-21 06:42:24 -08:00
|
|
|
def item_quantities_for(item_id)
|
|
|
|
quantities = Hash.new(0)
|
|
|
|
|
|
|
|
hangers = closet_hangers.where(item_id: item_id).
|
|
|
|
select([:owned, :list_id, :quantity])
|
|
|
|
hangers.each do |hanger|
|
|
|
|
quantities[hanger.list_id || hanger.owned?] = hanger.quantity
|
|
|
|
end
|
|
|
|
|
|
|
|
quantities
|
|
|
|
end
|
|
|
|
|
2024-01-19 00:00:46 -08:00
|
|
|
def log_trade_activity
|
|
|
|
touch(:last_trade_activity_at)
|
|
|
|
end
|
|
|
|
|
2010-12-11 06:37:39 -08:00
|
|
|
def self.points_required_to_pass_top_contributor(offset)
|
2011-02-07 05:46:15 -08:00
|
|
|
user = User.top_contributors.select(:points).limit(1).offset(offset).first
|
|
|
|
user ? user.points : 0
|
2010-12-11 06:37:39 -08:00
|
|
|
end
|
2010-10-18 14:58:45 -07:00
|
|
|
end
|
2011-07-12 16:37:16 -07:00
|
|
|
|