2010-11-10 13:59:54 -08:00
|
|
|
class Outfit < ActiveRecord::Base
|
2010-11-12 13:31:01 -08:00
|
|
|
has_many :item_outfit_relationships, :dependent => :destroy
|
2010-11-13 14:26:14 -08:00
|
|
|
has_many :worn_item_outfit_relationships, :class_name => 'ItemOutfitRelationship',
|
|
|
|
:conditions => {:is_worn => true}
|
|
|
|
has_many :worn_items, :through => :worn_item_outfit_relationships, :source => :item
|
2010-11-10 13:59:54 -08:00
|
|
|
belongs_to :pet_state
|
|
|
|
belongs_to :user
|
2011-03-23 15:23:01 -07:00
|
|
|
|
2011-02-10 14:50:47 -08:00
|
|
|
validates :name, :presence => {:if => :user_id}, :uniqueness => {:scope => :user_id, :if => :user_id}
|
2010-11-10 13:59:54 -08:00
|
|
|
validates :pet_state, :presence => true
|
2011-03-23 15:23:01 -07:00
|
|
|
|
2010-11-13 05:50:37 -08:00
|
|
|
attr_accessible :name, :pet_state_id, :starred, :worn_and_unworn_item_ids
|
2011-03-23 15:23:01 -07:00
|
|
|
|
|
|
|
scope :wardrobe_order, order('starred DESC', :name)
|
2012-03-15 15:00:29 -07:00
|
|
|
|
|
|
|
mount_uploader :image, OutfitImageUploader
|
2012-07-16 13:47:28 -07:00
|
|
|
|
2012-07-26 20:47:22 -07:00
|
|
|
before_save :update_enqueued_image
|
2012-07-16 13:47:28 -07:00
|
|
|
after_commit :enqueue_image!
|
2011-03-23 15:23:01 -07:00
|
|
|
|
2010-11-11 10:43:22 -08:00
|
|
|
def as_json(more_options={})
|
|
|
|
serializable_hash :only => [:id, :name, :pet_state_id, :starred],
|
2012-07-17 09:15:04 -07:00
|
|
|
:methods => [:color_id, :species_id, :worn_and_unworn_item_ids,
|
2012-07-26 20:47:22 -07:00
|
|
|
:image_versions, :image_enqueued, :image_layers_hash]
|
2012-07-17 09:15:04 -07:00
|
|
|
end
|
|
|
|
|
|
|
|
def image_versions
|
|
|
|
{}.tap do |versions|
|
|
|
|
versions[:large] = image.url
|
|
|
|
image.versions.each { |name, version| versions[name] = version.url }
|
|
|
|
end
|
2010-11-11 10:43:22 -08:00
|
|
|
end
|
2011-03-23 15:23:01 -07:00
|
|
|
|
2010-11-13 14:26:14 -08:00
|
|
|
def closet_item_ids
|
|
|
|
item_outfit_relationships.map(&:item_id)
|
|
|
|
end
|
2011-03-23 15:23:01 -07:00
|
|
|
|
2010-11-11 10:43:22 -08:00
|
|
|
def color_id
|
|
|
|
pet_state.pet_type.color_id
|
|
|
|
end
|
2011-03-23 15:23:01 -07:00
|
|
|
|
2010-11-11 10:43:22 -08:00
|
|
|
def species_id
|
|
|
|
pet_state.pet_type.species_id
|
|
|
|
end
|
2011-03-23 15:23:01 -07:00
|
|
|
|
2010-11-13 14:26:14 -08:00
|
|
|
def to_query
|
|
|
|
{
|
|
|
|
:closet => closet_item_ids,
|
|
|
|
:color => color_id,
|
|
|
|
:objects => worn_item_ids,
|
|
|
|
:species => species_id,
|
|
|
|
:state => pet_state_id
|
|
|
|
}.to_query
|
|
|
|
end
|
2011-03-23 15:23:01 -07:00
|
|
|
|
2010-11-11 10:43:22 -08:00
|
|
|
def worn_and_unworn_item_ids
|
2010-11-10 13:59:54 -08:00
|
|
|
{:worn => [], :unworn => []}.tap do |output|
|
2010-11-11 10:43:22 -08:00
|
|
|
item_outfit_relationships.each do |rel|
|
2010-11-10 13:59:54 -08:00
|
|
|
key = rel.is_worn? ? :worn : :unworn
|
2010-11-11 10:43:22 -08:00
|
|
|
output[key] << rel.item_id
|
2010-11-10 13:59:54 -08:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
2011-03-23 15:23:01 -07:00
|
|
|
|
2010-11-13 05:50:37 -08:00
|
|
|
def worn_and_unworn_item_ids=(all_item_ids)
|
|
|
|
new_rels = []
|
|
|
|
all_item_ids.each do |key, item_ids|
|
|
|
|
worn = key == 'worn'
|
2010-11-14 19:31:41 -08:00
|
|
|
unless item_ids.blank?
|
|
|
|
item_ids.each do |item_id|
|
|
|
|
rel = ItemOutfitRelationship.new
|
|
|
|
rel.item_id = item_id
|
|
|
|
rel.is_worn = worn
|
|
|
|
new_rels << rel
|
|
|
|
end
|
2010-11-13 05:50:37 -08:00
|
|
|
end
|
2010-11-10 13:59:54 -08:00
|
|
|
end
|
2010-11-13 05:50:37 -08:00
|
|
|
self.item_outfit_relationships = new_rels
|
2010-11-10 13:59:54 -08:00
|
|
|
end
|
2012-03-06 19:20:40 -08:00
|
|
|
|
|
|
|
# Returns the array of SwfAssets representing each layer of the output image,
|
2012-07-16 13:47:28 -07:00
|
|
|
# ordered from bottom to top. Careful: this method is memoized, so if the
|
|
|
|
# image layers change after its first call we'll get bad results.
|
|
|
|
def image_layers
|
|
|
|
@image_layers ||= visible_assets_with_images.sort { |a, b| a.zone.depth <=> b.zone.depth }
|
2010-11-13 14:26:14 -08:00
|
|
|
end
|
2012-03-15 15:00:29 -07:00
|
|
|
|
2012-07-16 13:47:28 -07:00
|
|
|
# Creates and writes the thumbnail images for this outfit iff the new image
|
|
|
|
# would be different than the current one. (Writes to file in development,
|
|
|
|
# S3 in production.) If the image is updated, updates the image layers hash
|
|
|
|
# and runs #save! on the record, so any other changes will also be saved.
|
2012-03-15 15:00:29 -07:00
|
|
|
def write_image!
|
2012-07-16 13:47:28 -07:00
|
|
|
if image_layers_dirty?
|
2012-08-01 18:30:22 -07:00
|
|
|
image = Tempfile.open(['outfit_image', '.png'])
|
|
|
|
begin
|
2012-07-16 13:47:28 -07:00
|
|
|
create_image! image
|
|
|
|
self.image_layers_hash = generate_image_layers_hash
|
|
|
|
self.image = image
|
2012-07-26 20:47:22 -07:00
|
|
|
self.image_enqueued = false
|
2012-07-16 13:47:28 -07:00
|
|
|
save!
|
2012-08-01 18:30:22 -07:00
|
|
|
ensure
|
|
|
|
image.close!
|
2012-07-16 13:47:28 -07:00
|
|
|
end
|
2012-03-15 15:00:29 -07:00
|
|
|
end
|
2012-07-16 13:47:28 -07:00
|
|
|
|
2012-03-15 15:00:29 -07:00
|
|
|
self.image
|
|
|
|
end
|
|
|
|
|
2012-07-16 13:47:28 -07:00
|
|
|
# Enqueue an image write iff the new image would be different than the
|
|
|
|
# current one.
|
|
|
|
def enqueue_image!
|
2012-07-26 20:47:22 -07:00
|
|
|
Resque.enqueue(OutfitImageUpdate, id)
|
|
|
|
end
|
|
|
|
|
|
|
|
def update_enqueued_image
|
|
|
|
self.image_enqueued = (image_layers_dirty?)
|
|
|
|
true
|
2012-07-16 13:47:28 -07:00
|
|
|
end
|
|
|
|
|
2012-03-15 15:00:29 -07:00
|
|
|
def s3_key(size)
|
|
|
|
URI.encode("#{id}/#{size.join 'x'}.png")
|
|
|
|
end
|
2011-03-23 15:23:01 -07:00
|
|
|
|
2011-02-10 14:50:47 -08:00
|
|
|
def self.build_for_user(user, params)
|
|
|
|
Outfit.new.tap do |outfit|
|
|
|
|
name = params.delete(:name)
|
|
|
|
starred = params.delete(:starred)
|
2011-03-28 14:29:03 -07:00
|
|
|
anonymous = params.delete(:anonymous) == "true"
|
|
|
|
if user && !anonymous
|
2011-02-10 14:50:47 -08:00
|
|
|
outfit.user = user
|
|
|
|
outfit.name = name
|
|
|
|
outfit.starred = starred
|
|
|
|
end
|
|
|
|
outfit.attributes = params
|
|
|
|
end
|
|
|
|
end
|
2012-03-06 19:20:40 -08:00
|
|
|
|
|
|
|
protected
|
|
|
|
|
2012-03-15 15:00:29 -07:00
|
|
|
# Creates a 600x600 PNG image of this outfit, writing to the given output
|
|
|
|
# file.
|
|
|
|
def create_image!(output)
|
2012-07-17 09:15:04 -07:00
|
|
|
unless image_layers.empty?
|
2012-07-27 20:07:20 -07:00
|
|
|
temp_image_files = Parallel.map(image_layers, :in_threads => 8) do |swf_asset|
|
|
|
|
image_file = Tempfile.open(['outfit_layer', '.png'])
|
2012-07-31 09:05:49 -07:00
|
|
|
begin
|
|
|
|
write_temp_swf_asset_image!(swf_asset, image_file)
|
|
|
|
rescue RightAws::AwsError
|
|
|
|
nil # skip broken images
|
|
|
|
else
|
|
|
|
image_file
|
|
|
|
ensure
|
|
|
|
image_file.close
|
|
|
|
end
|
|
|
|
end.compact # remove nils for broken images
|
2012-07-27 20:07:20 -07:00
|
|
|
|
|
|
|
# Here we do some awkwardness to get the exact ImageMagick command we
|
|
|
|
# want, though it's still less awkward than handling the command
|
|
|
|
# ourselves. Give all of the temporary images as input, flatten them and
|
|
|
|
# write them to the output path.
|
|
|
|
command = MiniMagick::CommandBuilder.new('convert')
|
|
|
|
temp_image_files.each { |image_file| command.push image_file.path }
|
|
|
|
command.layers 'flatten'
|
|
|
|
command.push output.path
|
|
|
|
|
|
|
|
# Though the above command really is sufficient, we still need a dummy
|
|
|
|
# image to handle execution.
|
|
|
|
output_image = MiniMagick::Image.new(output.path)
|
|
|
|
output_image.run(command)
|
|
|
|
|
|
|
|
temp_image_files.each(&:unlink)
|
2012-07-17 09:15:04 -07:00
|
|
|
else
|
|
|
|
output.close
|
2012-03-15 15:00:29 -07:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2012-03-06 19:20:40 -08:00
|
|
|
def visible_assets
|
|
|
|
biology_assets = pet_state.swf_assets
|
|
|
|
object_assets = SwfAsset.object_assets.
|
|
|
|
fitting_body_id(pet_state.pet_type.body_id).for_item_ids(worn_item_ids)
|
|
|
|
|
|
|
|
# Now for fun with bitmasks! Rather than building a bunch of integer arrays
|
|
|
|
# here, we instead go low-level and use bit-level operations. Build the
|
|
|
|
# bitmask by parsing the binary string (reversing it to get the lower zone
|
|
|
|
# numbers on the right), then OR them all together to get the mask
|
|
|
|
# representing all the restricted zones. (Note to self: why not just store
|
|
|
|
# in this format in the first place?)
|
|
|
|
restrictors = biology_assets + worn_items
|
|
|
|
restricted_zones_mask = restrictors.inject(0) do |mask, restrictor|
|
|
|
|
mask | restrictor.zones_restrict.reverse.to_i(2)
|
|
|
|
end
|
|
|
|
|
|
|
|
# Now, check each asset's zone is not restricted in the bitmask using
|
|
|
|
# bitwise operations: shift 1 to the zone_id position, then AND it with
|
|
|
|
# the restricted zones mask. If we get 0, then the bit for that zone ID was
|
|
|
|
# not turned on, so the zone is not restricted and this asset is visible.
|
|
|
|
all_assets = biology_assets + object_assets
|
|
|
|
all_assets.select { |a| (1 << (a.zone_id - 1)) & restricted_zones_mask == 0 }
|
|
|
|
end
|
2012-03-15 15:00:29 -07:00
|
|
|
|
2012-07-16 13:47:28 -07:00
|
|
|
def visible_assets_with_images
|
|
|
|
visible_assets.select(&:has_image?)
|
|
|
|
end
|
|
|
|
|
|
|
|
# Generate 8-char hex digest representing visible image layers for this outfit.
|
|
|
|
# Hash function should be decently collision-resistant.
|
|
|
|
def generate_image_layers_hash
|
|
|
|
@generated_image_layers_hash ||=
|
|
|
|
Digest::MD5.hexdigest(image_layers.map(&:id).join(',')).first(8)
|
|
|
|
end
|
|
|
|
|
|
|
|
def image_layers_dirty?
|
|
|
|
generate_image_layers_hash != self.image_layers_hash
|
|
|
|
end
|
|
|
|
|
2012-03-15 15:00:29 -07:00
|
|
|
IMAGE_BASE_SIZE = [600, 600]
|
|
|
|
def write_temp_swf_asset_image!(swf_asset, file)
|
|
|
|
key = swf_asset.s3_key(IMAGE_BASE_SIZE)
|
|
|
|
bucket = SwfAsset::IMAGE_BUCKET
|
|
|
|
data = bucket.get(key)
|
|
|
|
file.binmode # write in binary mode
|
|
|
|
file.truncate(0) # clear the file
|
|
|
|
file.write data # write the new data
|
|
|
|
end
|
2010-11-10 13:59:54 -08:00
|
|
|
end
|
2011-03-23 15:23:01 -07:00
|
|
|
|