Compare commits
4 commits
81a58f8656
...
57c08b5646
Author | SHA1 | Date | |
---|---|---|---|
57c08b5646 | |||
97e6c39402 | |||
5b2062754d | |||
5ad320fa18 |
15 changed files with 1385 additions and 27 deletions
15
app/assets/javascripts/lib/easeljs.min.js
vendored
Normal file
15
app/assets/javascripts/lib/easeljs.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
850
app/assets/javascripts/lib/idiomorph.js
Normal file
850
app/assets/javascripts/lib/idiomorph.js
Normal file
|
@ -0,0 +1,850 @@
|
||||||
|
// https://raw.githubusercontent.com/bigskysoftware/idiomorph/v0.3.0/dist/idiomorph.js
|
||||||
|
|
||||||
|
// base IIFE to define idiomorph
|
||||||
|
var Idiomorph = (function () {
|
||||||
|
'use strict';
|
||||||
|
|
||||||
|
//=============================================================================
|
||||||
|
// AND NOW IT BEGINS...
|
||||||
|
//=============================================================================
|
||||||
|
let EMPTY_SET = new Set();
|
||||||
|
|
||||||
|
// default configuration values, updatable by users now
|
||||||
|
let defaults = {
|
||||||
|
morphStyle: "outerHTML",
|
||||||
|
callbacks : {
|
||||||
|
beforeNodeAdded: noOp,
|
||||||
|
afterNodeAdded: noOp,
|
||||||
|
beforeNodeMorphed: noOp,
|
||||||
|
afterNodeMorphed: noOp,
|
||||||
|
beforeNodeRemoved: noOp,
|
||||||
|
afterNodeRemoved: noOp,
|
||||||
|
beforeAttributeUpdated: noOp,
|
||||||
|
|
||||||
|
},
|
||||||
|
head: {
|
||||||
|
style: 'merge',
|
||||||
|
shouldPreserve: function (elt) {
|
||||||
|
return elt.getAttribute("im-preserve") === "true";
|
||||||
|
},
|
||||||
|
shouldReAppend: function (elt) {
|
||||||
|
return elt.getAttribute("im-re-append") === "true";
|
||||||
|
},
|
||||||
|
shouldRemove: noOp,
|
||||||
|
afterHeadMorphed: noOp,
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
//=============================================================================
|
||||||
|
// Core Morphing Algorithm - morph, morphNormalizedContent, morphOldNodeTo, morphChildren
|
||||||
|
//=============================================================================
|
||||||
|
function morph(oldNode, newContent, config = {}) {
|
||||||
|
|
||||||
|
if (oldNode instanceof Document) {
|
||||||
|
oldNode = oldNode.documentElement;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof newContent === 'string') {
|
||||||
|
newContent = parseContent(newContent);
|
||||||
|
}
|
||||||
|
|
||||||
|
let normalizedContent = normalizeContent(newContent);
|
||||||
|
|
||||||
|
let ctx = createMorphContext(oldNode, normalizedContent, config);
|
||||||
|
|
||||||
|
return morphNormalizedContent(oldNode, normalizedContent, ctx);
|
||||||
|
}
|
||||||
|
|
||||||
|
function morphNormalizedContent(oldNode, normalizedNewContent, ctx) {
|
||||||
|
if (ctx.head.block) {
|
||||||
|
let oldHead = oldNode.querySelector('head');
|
||||||
|
let newHead = normalizedNewContent.querySelector('head');
|
||||||
|
if (oldHead && newHead) {
|
||||||
|
let promises = handleHeadElement(newHead, oldHead, ctx);
|
||||||
|
// when head promises resolve, call morph again, ignoring the head tag
|
||||||
|
Promise.all(promises).then(function () {
|
||||||
|
morphNormalizedContent(oldNode, normalizedNewContent, Object.assign(ctx, {
|
||||||
|
head: {
|
||||||
|
block: false,
|
||||||
|
ignore: true
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (ctx.morphStyle === "innerHTML") {
|
||||||
|
|
||||||
|
// innerHTML, so we are only updating the children
|
||||||
|
morphChildren(normalizedNewContent, oldNode, ctx);
|
||||||
|
return oldNode.children;
|
||||||
|
|
||||||
|
} else if (ctx.morphStyle === "outerHTML" || ctx.morphStyle == null) {
|
||||||
|
// otherwise find the best element match in the new content, morph that, and merge its siblings
|
||||||
|
// into either side of the best match
|
||||||
|
let bestMatch = findBestNodeMatch(normalizedNewContent, oldNode, ctx);
|
||||||
|
|
||||||
|
// stash the siblings that will need to be inserted on either side of the best match
|
||||||
|
let previousSibling = bestMatch?.previousSibling;
|
||||||
|
let nextSibling = bestMatch?.nextSibling;
|
||||||
|
|
||||||
|
// morph it
|
||||||
|
let morphedNode = morphOldNodeTo(oldNode, bestMatch, ctx);
|
||||||
|
|
||||||
|
if (bestMatch) {
|
||||||
|
// if there was a best match, merge the siblings in too and return the
|
||||||
|
// whole bunch
|
||||||
|
return insertSiblings(previousSibling, morphedNode, nextSibling);
|
||||||
|
} else {
|
||||||
|
// otherwise nothing was added to the DOM
|
||||||
|
return []
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
throw "Do not understand how to morph style " + ctx.morphStyle;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param possibleActiveElement
|
||||||
|
* @param ctx
|
||||||
|
* @returns {boolean}
|
||||||
|
*/
|
||||||
|
function ignoreValueOfActiveElement(possibleActiveElement, ctx) {
|
||||||
|
return ctx.ignoreActiveValue && possibleActiveElement === document.activeElement;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param oldNode root node to merge content into
|
||||||
|
* @param newContent new content to merge
|
||||||
|
* @param ctx the merge context
|
||||||
|
* @returns {Element} the element that ended up in the DOM
|
||||||
|
*/
|
||||||
|
function morphOldNodeTo(oldNode, newContent, ctx) {
|
||||||
|
if (ctx.ignoreActive && oldNode === document.activeElement) {
|
||||||
|
// don't morph focused element
|
||||||
|
} else if (newContent == null) {
|
||||||
|
if (ctx.callbacks.beforeNodeRemoved(oldNode) === false) return oldNode;
|
||||||
|
|
||||||
|
oldNode.remove();
|
||||||
|
ctx.callbacks.afterNodeRemoved(oldNode);
|
||||||
|
return null;
|
||||||
|
} else if (!isSoftMatch(oldNode, newContent)) {
|
||||||
|
if (ctx.callbacks.beforeNodeRemoved(oldNode) === false) return oldNode;
|
||||||
|
if (ctx.callbacks.beforeNodeAdded(newContent) === false) return oldNode;
|
||||||
|
|
||||||
|
oldNode.parentElement.replaceChild(newContent, oldNode);
|
||||||
|
ctx.callbacks.afterNodeAdded(newContent);
|
||||||
|
ctx.callbacks.afterNodeRemoved(oldNode);
|
||||||
|
return newContent;
|
||||||
|
} else {
|
||||||
|
if (ctx.callbacks.beforeNodeMorphed(oldNode, newContent) === false) return oldNode;
|
||||||
|
|
||||||
|
if (oldNode instanceof HTMLHeadElement && ctx.head.ignore) {
|
||||||
|
// ignore the head element
|
||||||
|
} else if (oldNode instanceof HTMLHeadElement && ctx.head.style !== "morph") {
|
||||||
|
handleHeadElement(newContent, oldNode, ctx);
|
||||||
|
} else {
|
||||||
|
syncNodeFrom(newContent, oldNode, ctx);
|
||||||
|
if (!ignoreValueOfActiveElement(oldNode, ctx)) {
|
||||||
|
morphChildren(newContent, oldNode, ctx);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ctx.callbacks.afterNodeMorphed(oldNode, newContent);
|
||||||
|
return oldNode;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This is the core algorithm for matching up children. The idea is to use id sets to try to match up
|
||||||
|
* nodes as faithfully as possible. We greedily match, which allows us to keep the algorithm fast, but
|
||||||
|
* by using id sets, we are able to better match up with content deeper in the DOM.
|
||||||
|
*
|
||||||
|
* Basic algorithm is, for each node in the new content:
|
||||||
|
*
|
||||||
|
* - if we have reached the end of the old parent, append the new content
|
||||||
|
* - if the new content has an id set match with the current insertion point, morph
|
||||||
|
* - search for an id set match
|
||||||
|
* - if id set match found, morph
|
||||||
|
* - otherwise search for a "soft" match
|
||||||
|
* - if a soft match is found, morph
|
||||||
|
* - otherwise, prepend the new node before the current insertion point
|
||||||
|
*
|
||||||
|
* The two search algorithms terminate if competing node matches appear to outweigh what can be achieved
|
||||||
|
* with the current node. See findIdSetMatch() and findSoftMatch() for details.
|
||||||
|
*
|
||||||
|
* @param {Element} newParent the parent element of the new content
|
||||||
|
* @param {Element } oldParent the old content that we are merging the new content into
|
||||||
|
* @param ctx the merge context
|
||||||
|
*/
|
||||||
|
function morphChildren(newParent, oldParent, ctx) {
|
||||||
|
|
||||||
|
let nextNewChild = newParent.firstChild;
|
||||||
|
let insertionPoint = oldParent.firstChild;
|
||||||
|
let newChild;
|
||||||
|
|
||||||
|
// run through all the new content
|
||||||
|
while (nextNewChild) {
|
||||||
|
|
||||||
|
newChild = nextNewChild;
|
||||||
|
nextNewChild = newChild.nextSibling;
|
||||||
|
|
||||||
|
// if we are at the end of the exiting parent's children, just append
|
||||||
|
if (insertionPoint == null) {
|
||||||
|
if (ctx.callbacks.beforeNodeAdded(newChild) === false) return;
|
||||||
|
|
||||||
|
oldParent.appendChild(newChild);
|
||||||
|
ctx.callbacks.afterNodeAdded(newChild);
|
||||||
|
removeIdsFromConsideration(ctx, newChild);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// if the current node has an id set match then morph
|
||||||
|
if (isIdSetMatch(newChild, insertionPoint, ctx)) {
|
||||||
|
morphOldNodeTo(insertionPoint, newChild, ctx);
|
||||||
|
insertionPoint = insertionPoint.nextSibling;
|
||||||
|
removeIdsFromConsideration(ctx, newChild);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// otherwise search forward in the existing old children for an id set match
|
||||||
|
let idSetMatch = findIdSetMatch(newParent, oldParent, newChild, insertionPoint, ctx);
|
||||||
|
|
||||||
|
// if we found a potential match, remove the nodes until that point and morph
|
||||||
|
if (idSetMatch) {
|
||||||
|
insertionPoint = removeNodesBetween(insertionPoint, idSetMatch, ctx);
|
||||||
|
morphOldNodeTo(idSetMatch, newChild, ctx);
|
||||||
|
removeIdsFromConsideration(ctx, newChild);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// no id set match found, so scan forward for a soft match for the current node
|
||||||
|
let softMatch = findSoftMatch(newParent, oldParent, newChild, insertionPoint, ctx);
|
||||||
|
|
||||||
|
// if we found a soft match for the current node, morph
|
||||||
|
if (softMatch) {
|
||||||
|
insertionPoint = removeNodesBetween(insertionPoint, softMatch, ctx);
|
||||||
|
morphOldNodeTo(softMatch, newChild, ctx);
|
||||||
|
removeIdsFromConsideration(ctx, newChild);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// abandon all hope of morphing, just insert the new child before the insertion point
|
||||||
|
// and move on
|
||||||
|
if (ctx.callbacks.beforeNodeAdded(newChild) === false) return;
|
||||||
|
|
||||||
|
oldParent.insertBefore(newChild, insertionPoint);
|
||||||
|
ctx.callbacks.afterNodeAdded(newChild);
|
||||||
|
removeIdsFromConsideration(ctx, newChild);
|
||||||
|
}
|
||||||
|
|
||||||
|
// remove any remaining old nodes that didn't match up with new content
|
||||||
|
while (insertionPoint !== null) {
|
||||||
|
|
||||||
|
let tempNode = insertionPoint;
|
||||||
|
insertionPoint = insertionPoint.nextSibling;
|
||||||
|
removeNode(tempNode, ctx);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//=============================================================================
|
||||||
|
// Attribute Syncing Code
|
||||||
|
//=============================================================================
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param attr {String} the attribute to be mutated
|
||||||
|
* @param to {Element} the element that is going to be updated
|
||||||
|
* @param updateType {("update"|"remove")}
|
||||||
|
* @param ctx the merge context
|
||||||
|
* @returns {boolean} true if the attribute should be ignored, false otherwise
|
||||||
|
*/
|
||||||
|
function ignoreAttribute(attr, to, updateType, ctx) {
|
||||||
|
if(attr === 'value' && ctx.ignoreActiveValue && to === document.activeElement){
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return ctx.callbacks.beforeAttributeUpdated(attr, to, updateType) === false;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* syncs a given node with another node, copying over all attributes and
|
||||||
|
* inner element state from the 'from' node to the 'to' node
|
||||||
|
*
|
||||||
|
* @param {Element} from the element to copy attributes & state from
|
||||||
|
* @param {Element} to the element to copy attributes & state to
|
||||||
|
* @param ctx the merge context
|
||||||
|
*/
|
||||||
|
function syncNodeFrom(from, to, ctx) {
|
||||||
|
let type = from.nodeType
|
||||||
|
|
||||||
|
// if is an element type, sync the attributes from the
|
||||||
|
// new node into the new node
|
||||||
|
if (type === 1 /* element type */) {
|
||||||
|
const fromAttributes = from.attributes;
|
||||||
|
const toAttributes = to.attributes;
|
||||||
|
for (const fromAttribute of fromAttributes) {
|
||||||
|
if (ignoreAttribute(fromAttribute.name, to, 'update', ctx)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (to.getAttribute(fromAttribute.name) !== fromAttribute.value) {
|
||||||
|
to.setAttribute(fromAttribute.name, fromAttribute.value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// iterate backwards to avoid skipping over items when a delete occurs
|
||||||
|
for (let i = toAttributes.length - 1; 0 <= i; i--) {
|
||||||
|
const toAttribute = toAttributes[i];
|
||||||
|
if (ignoreAttribute(toAttribute.name, to, 'remove', ctx)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (!from.hasAttribute(toAttribute.name)) {
|
||||||
|
to.removeAttribute(toAttribute.name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// sync text nodes
|
||||||
|
if (type === 8 /* comment */ || type === 3 /* text */) {
|
||||||
|
if (to.nodeValue !== from.nodeValue) {
|
||||||
|
to.nodeValue = from.nodeValue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!ignoreValueOfActiveElement(to, ctx)) {
|
||||||
|
// sync input values
|
||||||
|
syncInputValue(from, to, ctx);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param from {Element} element to sync the value from
|
||||||
|
* @param to {Element} element to sync the value to
|
||||||
|
* @param attributeName {String} the attribute name
|
||||||
|
* @param ctx the merge context
|
||||||
|
*/
|
||||||
|
function syncBooleanAttribute(from, to, attributeName, ctx) {
|
||||||
|
if (from[attributeName] !== to[attributeName]) {
|
||||||
|
let ignoreUpdate = ignoreAttribute(attributeName, to, 'update', ctx);
|
||||||
|
if (!ignoreUpdate) {
|
||||||
|
to[attributeName] = from[attributeName];
|
||||||
|
}
|
||||||
|
if (from[attributeName]) {
|
||||||
|
if (!ignoreUpdate) {
|
||||||
|
to.setAttribute(attributeName, from[attributeName]);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (!ignoreAttribute(attributeName, to, 'remove', ctx)) {
|
||||||
|
to.removeAttribute(attributeName);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* NB: many bothans died to bring us information:
|
||||||
|
*
|
||||||
|
* https://github.com/patrick-steele-idem/morphdom/blob/master/src/specialElHandlers.js
|
||||||
|
* https://github.com/choojs/nanomorph/blob/master/lib/morph.jsL113
|
||||||
|
*
|
||||||
|
* @param from {Element} the element to sync the input value from
|
||||||
|
* @param to {Element} the element to sync the input value to
|
||||||
|
* @param ctx the merge context
|
||||||
|
*/
|
||||||
|
function syncInputValue(from, to, ctx) {
|
||||||
|
if (from instanceof HTMLInputElement &&
|
||||||
|
to instanceof HTMLInputElement &&
|
||||||
|
from.type !== 'file') {
|
||||||
|
|
||||||
|
let fromValue = from.value;
|
||||||
|
let toValue = to.value;
|
||||||
|
|
||||||
|
// sync boolean attributes
|
||||||
|
syncBooleanAttribute(from, to, 'checked', ctx);
|
||||||
|
syncBooleanAttribute(from, to, 'disabled', ctx);
|
||||||
|
|
||||||
|
if (!from.hasAttribute('value')) {
|
||||||
|
if (!ignoreAttribute('value', to, 'remove', ctx)) {
|
||||||
|
to.value = '';
|
||||||
|
to.removeAttribute('value');
|
||||||
|
}
|
||||||
|
} else if (fromValue !== toValue) {
|
||||||
|
if (!ignoreAttribute('value', to, 'update', ctx)) {
|
||||||
|
to.setAttribute('value', fromValue);
|
||||||
|
to.value = fromValue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if (from instanceof HTMLOptionElement) {
|
||||||
|
syncBooleanAttribute(from, to, 'selected', ctx)
|
||||||
|
} else if (from instanceof HTMLTextAreaElement && to instanceof HTMLTextAreaElement) {
|
||||||
|
let fromValue = from.value;
|
||||||
|
let toValue = to.value;
|
||||||
|
if (ignoreAttribute('value', to, 'update', ctx)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (fromValue !== toValue) {
|
||||||
|
to.value = fromValue;
|
||||||
|
}
|
||||||
|
if (to.firstChild && to.firstChild.nodeValue !== fromValue) {
|
||||||
|
to.firstChild.nodeValue = fromValue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
//=============================================================================
|
||||||
|
// the HEAD tag can be handled specially, either w/ a 'merge' or 'append' style
|
||||||
|
//=============================================================================
|
||||||
|
function handleHeadElement(newHeadTag, currentHead, ctx) {
|
||||||
|
|
||||||
|
let added = []
|
||||||
|
let removed = []
|
||||||
|
let preserved = []
|
||||||
|
let nodesToAppend = []
|
||||||
|
|
||||||
|
let headMergeStyle = ctx.head.style;
|
||||||
|
|
||||||
|
// put all new head elements into a Map, by their outerHTML
|
||||||
|
let srcToNewHeadNodes = new Map();
|
||||||
|
for (const newHeadChild of newHeadTag.children) {
|
||||||
|
srcToNewHeadNodes.set(newHeadChild.outerHTML, newHeadChild);
|
||||||
|
}
|
||||||
|
|
||||||
|
// for each elt in the current head
|
||||||
|
for (const currentHeadElt of currentHead.children) {
|
||||||
|
|
||||||
|
// If the current head element is in the map
|
||||||
|
let inNewContent = srcToNewHeadNodes.has(currentHeadElt.outerHTML);
|
||||||
|
let isReAppended = ctx.head.shouldReAppend(currentHeadElt);
|
||||||
|
let isPreserved = ctx.head.shouldPreserve(currentHeadElt);
|
||||||
|
if (inNewContent || isPreserved) {
|
||||||
|
if (isReAppended) {
|
||||||
|
// remove the current version and let the new version replace it and re-execute
|
||||||
|
removed.push(currentHeadElt);
|
||||||
|
} else {
|
||||||
|
// this element already exists and should not be re-appended, so remove it from
|
||||||
|
// the new content map, preserving it in the DOM
|
||||||
|
srcToNewHeadNodes.delete(currentHeadElt.outerHTML);
|
||||||
|
preserved.push(currentHeadElt);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (headMergeStyle === "append") {
|
||||||
|
// we are appending and this existing element is not new content
|
||||||
|
// so if and only if it is marked for re-append do we do anything
|
||||||
|
if (isReAppended) {
|
||||||
|
removed.push(currentHeadElt);
|
||||||
|
nodesToAppend.push(currentHeadElt);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// if this is a merge, we remove this content since it is not in the new head
|
||||||
|
if (ctx.head.shouldRemove(currentHeadElt) !== false) {
|
||||||
|
removed.push(currentHeadElt);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Push the remaining new head elements in the Map into the
|
||||||
|
// nodes to append to the head tag
|
||||||
|
nodesToAppend.push(...srcToNewHeadNodes.values());
|
||||||
|
log("to append: ", nodesToAppend);
|
||||||
|
|
||||||
|
let promises = [];
|
||||||
|
for (const newNode of nodesToAppend) {
|
||||||
|
log("adding: ", newNode);
|
||||||
|
let newElt = document.createRange().createContextualFragment(newNode.outerHTML).firstChild;
|
||||||
|
log(newElt);
|
||||||
|
if (ctx.callbacks.beforeNodeAdded(newElt) !== false) {
|
||||||
|
if (newElt.href || newElt.src) {
|
||||||
|
let resolve = null;
|
||||||
|
let promise = new Promise(function (_resolve) {
|
||||||
|
resolve = _resolve;
|
||||||
|
});
|
||||||
|
newElt.addEventListener('load', function () {
|
||||||
|
resolve();
|
||||||
|
});
|
||||||
|
promises.push(promise);
|
||||||
|
}
|
||||||
|
currentHead.appendChild(newElt);
|
||||||
|
ctx.callbacks.afterNodeAdded(newElt);
|
||||||
|
added.push(newElt);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// remove all removed elements, after we have appended the new elements to avoid
|
||||||
|
// additional network requests for things like style sheets
|
||||||
|
for (const removedElement of removed) {
|
||||||
|
if (ctx.callbacks.beforeNodeRemoved(removedElement) !== false) {
|
||||||
|
currentHead.removeChild(removedElement);
|
||||||
|
ctx.callbacks.afterNodeRemoved(removedElement);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx.head.afterHeadMorphed(currentHead, {added: added, kept: preserved, removed: removed});
|
||||||
|
return promises;
|
||||||
|
}
|
||||||
|
|
||||||
|
//=============================================================================
|
||||||
|
// Misc
|
||||||
|
//=============================================================================
|
||||||
|
|
||||||
|
function log() {
|
||||||
|
//console.log(arguments);
|
||||||
|
}
|
||||||
|
|
||||||
|
function noOp() {
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
Deep merges the config object and the Idiomoroph.defaults object to
|
||||||
|
produce a final configuration object
|
||||||
|
*/
|
||||||
|
function mergeDefaults(config) {
|
||||||
|
let finalConfig = {};
|
||||||
|
// copy top level stuff into final config
|
||||||
|
Object.assign(finalConfig, defaults);
|
||||||
|
Object.assign(finalConfig, config);
|
||||||
|
|
||||||
|
// copy callbacks into final config (do this to deep merge the callbacks)
|
||||||
|
finalConfig.callbacks = {};
|
||||||
|
Object.assign(finalConfig.callbacks, defaults.callbacks);
|
||||||
|
Object.assign(finalConfig.callbacks, config.callbacks);
|
||||||
|
|
||||||
|
// copy head config into final config (do this to deep merge the head)
|
||||||
|
finalConfig.head = {};
|
||||||
|
Object.assign(finalConfig.head, defaults.head);
|
||||||
|
Object.assign(finalConfig.head, config.head);
|
||||||
|
return finalConfig;
|
||||||
|
}
|
||||||
|
|
||||||
|
function createMorphContext(oldNode, newContent, config) {
|
||||||
|
config = mergeDefaults(config);
|
||||||
|
return {
|
||||||
|
target: oldNode,
|
||||||
|
newContent: newContent,
|
||||||
|
config: config,
|
||||||
|
morphStyle: config.morphStyle,
|
||||||
|
ignoreActive: config.ignoreActive,
|
||||||
|
ignoreActiveValue: config.ignoreActiveValue,
|
||||||
|
idMap: createIdMap(oldNode, newContent),
|
||||||
|
deadIds: new Set(),
|
||||||
|
callbacks: config.callbacks,
|
||||||
|
head: config.head
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function isIdSetMatch(node1, node2, ctx) {
|
||||||
|
if (node1 == null || node2 == null) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (node1.nodeType === node2.nodeType && node1.tagName === node2.tagName) {
|
||||||
|
if (node1.id !== "" && node1.id === node2.id) {
|
||||||
|
return true;
|
||||||
|
} else {
|
||||||
|
return getIdIntersectionCount(ctx, node1, node2) > 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
function isSoftMatch(node1, node2) {
|
||||||
|
if (node1 == null || node2 == null) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return node1.nodeType === node2.nodeType && node1.tagName === node2.tagName
|
||||||
|
}
|
||||||
|
|
||||||
|
function removeNodesBetween(startInclusive, endExclusive, ctx) {
|
||||||
|
while (startInclusive !== endExclusive) {
|
||||||
|
let tempNode = startInclusive;
|
||||||
|
startInclusive = startInclusive.nextSibling;
|
||||||
|
removeNode(tempNode, ctx);
|
||||||
|
}
|
||||||
|
removeIdsFromConsideration(ctx, endExclusive);
|
||||||
|
return endExclusive.nextSibling;
|
||||||
|
}
|
||||||
|
|
||||||
|
//=============================================================================
|
||||||
|
// Scans forward from the insertionPoint in the old parent looking for a potential id match
|
||||||
|
// for the newChild. We stop if we find a potential id match for the new child OR
|
||||||
|
// if the number of potential id matches we are discarding is greater than the
|
||||||
|
// potential id matches for the new child
|
||||||
|
//=============================================================================
|
||||||
|
function findIdSetMatch(newContent, oldParent, newChild, insertionPoint, ctx) {
|
||||||
|
|
||||||
|
// max id matches we are willing to discard in our search
|
||||||
|
let newChildPotentialIdCount = getIdIntersectionCount(ctx, newChild, oldParent);
|
||||||
|
|
||||||
|
let potentialMatch = null;
|
||||||
|
|
||||||
|
// only search forward if there is a possibility of an id match
|
||||||
|
if (newChildPotentialIdCount > 0) {
|
||||||
|
let potentialMatch = insertionPoint;
|
||||||
|
// if there is a possibility of an id match, scan forward
|
||||||
|
// keep track of the potential id match count we are discarding (the
|
||||||
|
// newChildPotentialIdCount must be greater than this to make it likely
|
||||||
|
// worth it)
|
||||||
|
let otherMatchCount = 0;
|
||||||
|
while (potentialMatch != null) {
|
||||||
|
|
||||||
|
// If we have an id match, return the current potential match
|
||||||
|
if (isIdSetMatch(newChild, potentialMatch, ctx)) {
|
||||||
|
return potentialMatch;
|
||||||
|
}
|
||||||
|
|
||||||
|
// computer the other potential matches of this new content
|
||||||
|
otherMatchCount += getIdIntersectionCount(ctx, potentialMatch, newContent);
|
||||||
|
if (otherMatchCount > newChildPotentialIdCount) {
|
||||||
|
// if we have more potential id matches in _other_ content, we
|
||||||
|
// do not have a good candidate for an id match, so return null
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// advanced to the next old content child
|
||||||
|
potentialMatch = potentialMatch.nextSibling;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return potentialMatch;
|
||||||
|
}
|
||||||
|
|
||||||
|
//=============================================================================
|
||||||
|
// Scans forward from the insertionPoint in the old parent looking for a potential soft match
|
||||||
|
// for the newChild. We stop if we find a potential soft match for the new child OR
|
||||||
|
// if we find a potential id match in the old parents children OR if we find two
|
||||||
|
// potential soft matches for the next two pieces of new content
|
||||||
|
//=============================================================================
|
||||||
|
function findSoftMatch(newContent, oldParent, newChild, insertionPoint, ctx) {
|
||||||
|
|
||||||
|
let potentialSoftMatch = insertionPoint;
|
||||||
|
let nextSibling = newChild.nextSibling;
|
||||||
|
let siblingSoftMatchCount = 0;
|
||||||
|
|
||||||
|
while (potentialSoftMatch != null) {
|
||||||
|
|
||||||
|
if (getIdIntersectionCount(ctx, potentialSoftMatch, newContent) > 0) {
|
||||||
|
// the current potential soft match has a potential id set match with the remaining new
|
||||||
|
// content so bail out of looking
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// if we have a soft match with the current node, return it
|
||||||
|
if (isSoftMatch(newChild, potentialSoftMatch)) {
|
||||||
|
return potentialSoftMatch;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isSoftMatch(nextSibling, potentialSoftMatch)) {
|
||||||
|
// the next new node has a soft match with this node, so
|
||||||
|
// increment the count of future soft matches
|
||||||
|
siblingSoftMatchCount++;
|
||||||
|
nextSibling = nextSibling.nextSibling;
|
||||||
|
|
||||||
|
// If there are two future soft matches, bail to allow the siblings to soft match
|
||||||
|
// so that we don't consume future soft matches for the sake of the current node
|
||||||
|
if (siblingSoftMatchCount >= 2) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// advanced to the next old content child
|
||||||
|
potentialSoftMatch = potentialSoftMatch.nextSibling;
|
||||||
|
}
|
||||||
|
|
||||||
|
return potentialSoftMatch;
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseContent(newContent) {
|
||||||
|
let parser = new DOMParser();
|
||||||
|
|
||||||
|
// remove svgs to avoid false-positive matches on head, etc.
|
||||||
|
let contentWithSvgsRemoved = newContent.replace(/<svg(\s[^>]*>|>)([\s\S]*?)<\/svg>/gim, '');
|
||||||
|
|
||||||
|
// if the newContent contains a html, head or body tag, we can simply parse it w/o wrapping
|
||||||
|
if (contentWithSvgsRemoved.match(/<\/html>/) || contentWithSvgsRemoved.match(/<\/head>/) || contentWithSvgsRemoved.match(/<\/body>/)) {
|
||||||
|
let content = parser.parseFromString(newContent, "text/html");
|
||||||
|
// if it is a full HTML document, return the document itself as the parent container
|
||||||
|
if (contentWithSvgsRemoved.match(/<\/html>/)) {
|
||||||
|
content.generatedByIdiomorph = true;
|
||||||
|
return content;
|
||||||
|
} else {
|
||||||
|
// otherwise return the html element as the parent container
|
||||||
|
let htmlElement = content.firstChild;
|
||||||
|
if (htmlElement) {
|
||||||
|
htmlElement.generatedByIdiomorph = true;
|
||||||
|
return htmlElement;
|
||||||
|
} else {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// if it is partial HTML, wrap it in a template tag to provide a parent element and also to help
|
||||||
|
// deal with touchy tags like tr, tbody, etc.
|
||||||
|
let responseDoc = parser.parseFromString("<body><template>" + newContent + "</template></body>", "text/html");
|
||||||
|
let content = responseDoc.body.querySelector('template').content;
|
||||||
|
content.generatedByIdiomorph = true;
|
||||||
|
return content
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function normalizeContent(newContent) {
|
||||||
|
if (newContent == null) {
|
||||||
|
// noinspection UnnecessaryLocalVariableJS
|
||||||
|
const dummyParent = document.createElement('div');
|
||||||
|
return dummyParent;
|
||||||
|
} else if (newContent.generatedByIdiomorph) {
|
||||||
|
// the template tag created by idiomorph parsing can serve as a dummy parent
|
||||||
|
return newContent;
|
||||||
|
} else if (newContent instanceof Node) {
|
||||||
|
// a single node is added as a child to a dummy parent
|
||||||
|
const dummyParent = document.createElement('div');
|
||||||
|
dummyParent.append(newContent);
|
||||||
|
return dummyParent;
|
||||||
|
} else {
|
||||||
|
// all nodes in the array or HTMLElement collection are consolidated under
|
||||||
|
// a single dummy parent element
|
||||||
|
const dummyParent = document.createElement('div');
|
||||||
|
for (const elt of [...newContent]) {
|
||||||
|
dummyParent.append(elt);
|
||||||
|
}
|
||||||
|
return dummyParent;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function insertSiblings(previousSibling, morphedNode, nextSibling) {
|
||||||
|
let stack = []
|
||||||
|
let added = []
|
||||||
|
while (previousSibling != null) {
|
||||||
|
stack.push(previousSibling);
|
||||||
|
previousSibling = previousSibling.previousSibling;
|
||||||
|
}
|
||||||
|
while (stack.length > 0) {
|
||||||
|
let node = stack.pop();
|
||||||
|
added.push(node); // push added preceding siblings on in order and insert
|
||||||
|
morphedNode.parentElement.insertBefore(node, morphedNode);
|
||||||
|
}
|
||||||
|
added.push(morphedNode);
|
||||||
|
while (nextSibling != null) {
|
||||||
|
stack.push(nextSibling);
|
||||||
|
added.push(nextSibling); // here we are going in order, so push on as we scan, rather than add
|
||||||
|
nextSibling = nextSibling.nextSibling;
|
||||||
|
}
|
||||||
|
while (stack.length > 0) {
|
||||||
|
morphedNode.parentElement.insertBefore(stack.pop(), morphedNode.nextSibling);
|
||||||
|
}
|
||||||
|
return added;
|
||||||
|
}
|
||||||
|
|
||||||
|
function findBestNodeMatch(newContent, oldNode, ctx) {
|
||||||
|
let currentElement;
|
||||||
|
currentElement = newContent.firstChild;
|
||||||
|
let bestElement = currentElement;
|
||||||
|
let score = 0;
|
||||||
|
while (currentElement) {
|
||||||
|
let newScore = scoreElement(currentElement, oldNode, ctx);
|
||||||
|
if (newScore > score) {
|
||||||
|
bestElement = currentElement;
|
||||||
|
score = newScore;
|
||||||
|
}
|
||||||
|
currentElement = currentElement.nextSibling;
|
||||||
|
}
|
||||||
|
return bestElement;
|
||||||
|
}
|
||||||
|
|
||||||
|
function scoreElement(node1, node2, ctx) {
|
||||||
|
if (isSoftMatch(node1, node2)) {
|
||||||
|
return .5 + getIdIntersectionCount(ctx, node1, node2);
|
||||||
|
}
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
function removeNode(tempNode, ctx) {
|
||||||
|
removeIdsFromConsideration(ctx, tempNode)
|
||||||
|
if (ctx.callbacks.beforeNodeRemoved(tempNode) === false) return;
|
||||||
|
|
||||||
|
tempNode.remove();
|
||||||
|
ctx.callbacks.afterNodeRemoved(tempNode);
|
||||||
|
}
|
||||||
|
|
||||||
|
//=============================================================================
|
||||||
|
// ID Set Functions
|
||||||
|
//=============================================================================
|
||||||
|
|
||||||
|
function isIdInConsideration(ctx, id) {
|
||||||
|
return !ctx.deadIds.has(id);
|
||||||
|
}
|
||||||
|
|
||||||
|
function idIsWithinNode(ctx, id, targetNode) {
|
||||||
|
let idSet = ctx.idMap.get(targetNode) || EMPTY_SET;
|
||||||
|
return idSet.has(id);
|
||||||
|
}
|
||||||
|
|
||||||
|
function removeIdsFromConsideration(ctx, node) {
|
||||||
|
let idSet = ctx.idMap.get(node) || EMPTY_SET;
|
||||||
|
for (const id of idSet) {
|
||||||
|
ctx.deadIds.add(id);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function getIdIntersectionCount(ctx, node1, node2) {
|
||||||
|
let sourceSet = ctx.idMap.get(node1) || EMPTY_SET;
|
||||||
|
let matchCount = 0;
|
||||||
|
for (const id of sourceSet) {
|
||||||
|
// a potential match is an id in the source and potentialIdsSet, but
|
||||||
|
// that has not already been merged into the DOM
|
||||||
|
if (isIdInConsideration(ctx, id) && idIsWithinNode(ctx, id, node2)) {
|
||||||
|
++matchCount;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return matchCount;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A bottom up algorithm that finds all elements with ids inside of the node
|
||||||
|
* argument and populates id sets for those nodes and all their parents, generating
|
||||||
|
* a set of ids contained within all nodes for the entire hierarchy in the DOM
|
||||||
|
*
|
||||||
|
* @param node {Element}
|
||||||
|
* @param {Map<Node, Set<String>>} idMap
|
||||||
|
*/
|
||||||
|
function populateIdMapForNode(node, idMap) {
|
||||||
|
let nodeParent = node.parentElement;
|
||||||
|
// find all elements with an id property
|
||||||
|
let idElements = node.querySelectorAll('[id]');
|
||||||
|
for (const elt of idElements) {
|
||||||
|
let current = elt;
|
||||||
|
// walk up the parent hierarchy of that element, adding the id
|
||||||
|
// of element to the parent's id set
|
||||||
|
while (current !== nodeParent && current != null) {
|
||||||
|
let idSet = idMap.get(current);
|
||||||
|
// if the id set doesn't exist, create it and insert it in the map
|
||||||
|
if (idSet == null) {
|
||||||
|
idSet = new Set();
|
||||||
|
idMap.set(current, idSet);
|
||||||
|
}
|
||||||
|
idSet.add(elt.id);
|
||||||
|
current = current.parentElement;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This function computes a map of nodes to all ids contained within that node (inclusive of the
|
||||||
|
* node). This map can be used to ask if two nodes have intersecting sets of ids, which allows
|
||||||
|
* for a looser definition of "matching" than tradition id matching, and allows child nodes
|
||||||
|
* to contribute to a parent nodes matching.
|
||||||
|
*
|
||||||
|
* @param {Element} oldContent the old content that will be morphed
|
||||||
|
* @param {Element} newContent the new content to morph to
|
||||||
|
* @returns {Map<Node, Set<String>>} a map of nodes to id sets for the
|
||||||
|
*/
|
||||||
|
function createIdMap(oldContent, newContent) {
|
||||||
|
let idMap = new Map();
|
||||||
|
populateIdMapForNode(oldContent, idMap);
|
||||||
|
populateIdMapForNode(newContent, idMap);
|
||||||
|
return idMap;
|
||||||
|
}
|
||||||
|
|
||||||
|
//=============================================================================
|
||||||
|
// This is what ends up becoming the Idiomorph global object
|
||||||
|
//=============================================================================
|
||||||
|
return {
|
||||||
|
morph,
|
||||||
|
defaults
|
||||||
|
}
|
||||||
|
})();
|
12
app/assets/javascripts/lib/tweenjs.min.js
vendored
Normal file
12
app/assets/javascripts/lib/tweenjs.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
|
@ -7,19 +7,47 @@ class OutfitLayer extends HTMLElement {
|
||||||
}
|
}
|
||||||
|
|
||||||
connectedCallback() {
|
connectedCallback() {
|
||||||
setTimeout(() => this.#initializeImage(), 0);
|
setTimeout(() => this.#connectToChildren(), 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
#initializeImage() {
|
disconnectedCallback() {
|
||||||
this.image = this.querySelector("img");
|
window.removeEventListener("message", this.#onMessage);
|
||||||
if (!this.image) {
|
|
||||||
throw new Error(`<outfit-layer> must contain an <img> tag`);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
this.image.addEventListener("load", () => this.#setStatus("loaded"));
|
#connectToChildren() {
|
||||||
this.image.addEventListener("error", () => this.#setStatus("error"));
|
const image = this.querySelector("img");
|
||||||
|
const iframe = this.querySelector("iframe");
|
||||||
|
|
||||||
this.#setStatus(this.image.complete ? "loaded" : "loading");
|
if (image) {
|
||||||
|
image.addEventListener("load", () => this.#setStatus("loaded"));
|
||||||
|
image.addEventListener("error", () => this.#setStatus("error"));
|
||||||
|
this.#setStatus(image.complete ? "loaded" : "loading");
|
||||||
|
} else if (iframe) {
|
||||||
|
this.iframe = iframe;
|
||||||
|
window.addEventListener("message", (m) => this.#onMessage(m));
|
||||||
|
this.#setStatus("loading");
|
||||||
|
} else {
|
||||||
|
throw new Error(
|
||||||
|
`<outfit-layer> must contain an <img> or <iframe> tag`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#onMessage({ source, data }) {
|
||||||
|
if (source !== this.iframe.contentWindow) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (
|
||||||
|
data.type === "status" &&
|
||||||
|
["loaded", "error"].includes(data.status)
|
||||||
|
) {
|
||||||
|
this.#setStatus(data.status);
|
||||||
|
} else {
|
||||||
|
throw new Error(
|
||||||
|
`<outfit-layer> got unexpected message: ${JSON.stringify(data)}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#setStatus(newStatus) {
|
#setStatus(newStatus) {
|
||||||
|
@ -29,3 +57,33 @@ class OutfitLayer extends HTMLElement {
|
||||||
}
|
}
|
||||||
|
|
||||||
customElements.define("outfit-layer", OutfitLayer);
|
customElements.define("outfit-layer", OutfitLayer);
|
||||||
|
|
||||||
|
// Morph turbo-frames on this page, to reuse asset nodes when we want to—very
|
||||||
|
// important for movies!—but ensure that it *doesn't* do its usual behavior of
|
||||||
|
// aggressively reusing existing <outfit-layer> nodes for entirely different
|
||||||
|
// assets. (It's a lot clearer for managing the loading state, and not showing
|
||||||
|
// old incorrect layers!) (We also tried using `id` to enforce this… no luck.)
|
||||||
|
addEventListener("turbo:before-frame-render", (event) => {
|
||||||
|
if (typeof Idiomorph !== "undefined") {
|
||||||
|
event.detail.render = (currentElement, newElement) => {
|
||||||
|
Idiomorph.morph(currentElement, newElement.innerHTML, {
|
||||||
|
morphStyle: "innerHTML",
|
||||||
|
callbacks: {
|
||||||
|
beforeNodeMorphed: (currentNode, newNode) => {
|
||||||
|
// If Idiomorph wants to transform an <outfit-layer> to
|
||||||
|
// have a different data-asset-id attribute, we replace
|
||||||
|
// the node ourselves and abort the morph.
|
||||||
|
if (
|
||||||
|
newNode.tagName === "OUTFIT-LAYER" &&
|
||||||
|
newNode.getAttribute("data-asset-id") !==
|
||||||
|
currentNode.getAttribute("data-asset-id")
|
||||||
|
) {
|
||||||
|
currentNode.replaceWith(newNode);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
};
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
324
app/assets/javascripts/swf_assets/show.js
Normal file
324
app/assets/javascripts/swf_assets/show.js
Normal file
|
@ -0,0 +1,324 @@
|
||||||
|
const canvas = document.getElementById("asset-canvas");
|
||||||
|
const libraryScript = document.getElementById("canvas-movie-library");
|
||||||
|
const libraryUrl = libraryScript.getAttribute("src");
|
||||||
|
|
||||||
|
// Read the asset ID from the URL, as an extra hint of what asset we're
|
||||||
|
// logging for. (This is helpful when there's a lot of assets animating!)
|
||||||
|
const assetId = document.location.pathname.split("/").at(-1);
|
||||||
|
const logPrefix = `[${assetId}] `.padEnd(9);
|
||||||
|
|
||||||
|
// State for controlling the movie.
|
||||||
|
let loadingStatus = "loading";
|
||||||
|
let playingStatus = getInitialPlayingStatus();
|
||||||
|
|
||||||
|
// State for loading the movie.
|
||||||
|
let library = null;
|
||||||
|
let movieClip = null;
|
||||||
|
let stage = null;
|
||||||
|
|
||||||
|
// State for animating the movie.
|
||||||
|
let frameRequestId = null;
|
||||||
|
let lastFrameTime = null;
|
||||||
|
let lastLogTime = null;
|
||||||
|
let numFramesSinceLastLog = 0;
|
||||||
|
|
||||||
|
// State for error reporting.
|
||||||
|
let hasLoggedRenderError = false;
|
||||||
|
|
||||||
|
function loadImage(src) {
|
||||||
|
const image = new Image();
|
||||||
|
image.crossOrigin = "anonymous";
|
||||||
|
|
||||||
|
const promise = new Promise((resolve, reject) => {
|
||||||
|
image.onload = () => {
|
||||||
|
resolve(image);
|
||||||
|
};
|
||||||
|
image.onerror = () => {
|
||||||
|
reject(new Error(`Failed to load image: ${JSON.stringify(src)}`));
|
||||||
|
};
|
||||||
|
image.src = src;
|
||||||
|
});
|
||||||
|
|
||||||
|
return promise;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getLibrary() {
|
||||||
|
if (Object.keys(window.AdobeAn?.compositions || {}).length === 0) {
|
||||||
|
throw new Error(
|
||||||
|
`Movie library ${libraryUrl} did not add a composition to window.AdobeAn.compositions.`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
const [compositionId, composition] = Object.entries(
|
||||||
|
window.AdobeAn.compositions,
|
||||||
|
)[0];
|
||||||
|
if (Object.keys(window.AdobeAn.compositions).length > 1) {
|
||||||
|
console.warn(
|
||||||
|
`Grabbing composition ${compositionId}, but there are >1 here: `,
|
||||||
|
Object.keys(window.AdobeAn.compositions).length,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
delete window.AdobeAn.compositions[compositionId];
|
||||||
|
|
||||||
|
const library = composition.getLibrary();
|
||||||
|
|
||||||
|
// One more loading step as part of loading this library is loading the
|
||||||
|
// images it uses for sprites.
|
||||||
|
//
|
||||||
|
// TODO: I guess the manifest has these too, so we could put them in preload
|
||||||
|
// meta tags to get them here faster?
|
||||||
|
const librarySrcDir = libraryUrl.split("/").slice(0, -1).join("/");
|
||||||
|
const manifestImages = new Map(
|
||||||
|
library.properties.manifest.map(({ id, src }) => [
|
||||||
|
id,
|
||||||
|
loadImage(librarySrcDir + "/" + src),
|
||||||
|
]),
|
||||||
|
);
|
||||||
|
|
||||||
|
await Promise.all(manifestImages.values());
|
||||||
|
|
||||||
|
// Finally, once we have the images loaded, the library object expects us to
|
||||||
|
// mutate it (!) to give it the actual image and sprite sheet objects from
|
||||||
|
// the loaded images. That's how the MovieClip's internal JS objects will
|
||||||
|
// access the loaded data!
|
||||||
|
const images = composition.getImages();
|
||||||
|
for (const [id, image] of manifestImages.entries()) {
|
||||||
|
images[id] = await image;
|
||||||
|
}
|
||||||
|
const spriteSheets = composition.getSpriteSheet();
|
||||||
|
for (const { name, frames } of library.ssMetadata) {
|
||||||
|
const image = await manifestImages.get(name);
|
||||||
|
spriteSheets[name] = new window.createjs.SpriteSheet({
|
||||||
|
images: [image],
|
||||||
|
frames,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return library;
|
||||||
|
}
|
||||||
|
|
||||||
|
function buildMovieClip(library) {
|
||||||
|
let constructorName;
|
||||||
|
try {
|
||||||
|
const fileName = decodeURI(libraryUrl).split("/").pop();
|
||||||
|
const fileNameWithoutExtension = fileName.split(".")[0];
|
||||||
|
constructorName = fileNameWithoutExtension.replace(/[ -]/g, "");
|
||||||
|
if (constructorName.match(/^[0-9]/)) {
|
||||||
|
constructorName = "_" + constructorName;
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
throw new Error(
|
||||||
|
`Movie libraryUrl ${JSON.stringify(libraryUrl)} did not match expected ` +
|
||||||
|
`format: ${e.message}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const LibraryMovieClipConstructor = library[constructorName];
|
||||||
|
if (!LibraryMovieClipConstructor) {
|
||||||
|
throw new Error(
|
||||||
|
`Expected JS movie library ${libraryUrl} to contain a constructor ` +
|
||||||
|
`named ${constructorName}, but it did not: ${Object.keys(library)}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
const movieClip = new LibraryMovieClipConstructor();
|
||||||
|
|
||||||
|
return movieClip;
|
||||||
|
}
|
||||||
|
|
||||||
|
function updateStage() {
|
||||||
|
try {
|
||||||
|
stage.update();
|
||||||
|
} catch (e) {
|
||||||
|
// If rendering the frame fails, log it and proceed. If it's an
|
||||||
|
// animation, then maybe the next frame will work? Also alert the user,
|
||||||
|
// just as an FYI. (This is pretty uncommon, so I'm not worried about
|
||||||
|
// being noisy!)
|
||||||
|
if (!hasLoggedRenderError) {
|
||||||
|
console.error(`Error rendering movie clip ${libraryUrl}`, e);
|
||||||
|
// TODO: Inform user about the failure
|
||||||
|
hasLoggedRenderError = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function updateCanvasDimensions() {
|
||||||
|
// Set the canvas's internal dimensions to be higher, if the device has high
|
||||||
|
// DPI. Scale the movie clip to match, too.
|
||||||
|
const internalWidth = canvas.offsetWidth * window.devicePixelRatio;
|
||||||
|
const internalHeight = canvas.offsetHeight * window.devicePixelRatio;
|
||||||
|
canvas.width = internalWidth;
|
||||||
|
canvas.height = internalHeight;
|
||||||
|
movieClip.scaleX = internalWidth / library.properties.width;
|
||||||
|
movieClip.scaleY = internalHeight / library.properties.height;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function startMovie() {
|
||||||
|
// Load the movie's library (from the JS file already run), and use it to
|
||||||
|
// build a movie clip.
|
||||||
|
library = await getLibrary();
|
||||||
|
movieClip = buildMovieClip(library);
|
||||||
|
|
||||||
|
updateCanvasDimensions();
|
||||||
|
|
||||||
|
if (canvas.getContext("2d") == null) {
|
||||||
|
console.warn(`Out of memory, can't use canvas for ${libraryUrl}.`);
|
||||||
|
// TODO: "Too many animations!"
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
stage = new window.createjs.Stage(canvas);
|
||||||
|
stage.addChild(movieClip);
|
||||||
|
updateStage();
|
||||||
|
|
||||||
|
loadingStatus = "loaded";
|
||||||
|
canvas.setAttribute("data-status", "loaded");
|
||||||
|
|
||||||
|
updateAnimationState();
|
||||||
|
}
|
||||||
|
|
||||||
|
function updateAnimationState() {
|
||||||
|
const shouldRunAnimations =
|
||||||
|
loadingStatus === "loaded" && playingStatus === "playing";
|
||||||
|
|
||||||
|
if (shouldRunAnimations && frameRequestId == null) {
|
||||||
|
lastFrameTime = document.timeline.currentTime;
|
||||||
|
lastLogTime = document.timeline.currentTime;
|
||||||
|
numFramesSinceLastLog = 0;
|
||||||
|
documentHiddenSinceLastFrame = document.hidden;
|
||||||
|
frameRequestId = requestAnimationFrame(onAnimationFrame);
|
||||||
|
} else if (!shouldRunAnimations && frameRequestId != null) {
|
||||||
|
cancelAnimationFrame(frameRequestId);
|
||||||
|
lastFrameTime = null;
|
||||||
|
lastLogTime = null;
|
||||||
|
numFramesSinceLastLog = 0;
|
||||||
|
documentHiddenSinceLastFrame = false;
|
||||||
|
frameRequestId = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function onAnimationFrame() {
|
||||||
|
const targetFps = library.properties.fps;
|
||||||
|
const msPerFrame = 1000 / targetFps;
|
||||||
|
const msSinceLastFrame = document.timeline.currentTime - lastFrameTime;
|
||||||
|
const msSinceLastLog = document.timeline.currentTime - lastLogTime;
|
||||||
|
|
||||||
|
// If it takes too long to render a frame, cancel the movie, on the
|
||||||
|
// assumption that we're riding the CPU too hard. (Some movies do this!)
|
||||||
|
//
|
||||||
|
// But note that, if the page is hidden (e.g. the window is not visible),
|
||||||
|
// it's normal for the browser to pause animations. So, if we detected that
|
||||||
|
// the document became hidden between this frame and the last, no
|
||||||
|
// intervention is necesary.
|
||||||
|
if (msSinceLastFrame >= 2000 && !documentHiddenSinceLastFrame) {
|
||||||
|
pause();
|
||||||
|
console.warn(`Paused movie for taking too long: ${msSinceLastFrame}ms`);
|
||||||
|
// TODO: Display message about low FPS, and sync up to the parent.
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (msSinceLastFrame >= msPerFrame) {
|
||||||
|
updateStage();
|
||||||
|
lastFrameTime = document.timeline.currentTime;
|
||||||
|
|
||||||
|
// If we're a little bit late to this frame, probably because the frame
|
||||||
|
// rate isn't an even divisor of 60 FPS, backdate it to what the ideal time
|
||||||
|
// for this frame *would* have been. (For example, without this tweak, a
|
||||||
|
// 24 FPS animation like the Floating Negg Faerie actually runs at 20 FPS,
|
||||||
|
// because it wants to run every 41.66ms, but a 60 FPS browser checks in
|
||||||
|
// every 16.66ms, so the best it can do is 50ms. With this tweak, we can
|
||||||
|
// *pretend* we ran at 41.66ms, so that the next frame timing correctly
|
||||||
|
// takes the extra 9.33ms into account.)
|
||||||
|
const msFrameDelay = msSinceLastFrame - msPerFrame;
|
||||||
|
if (msFrameDelay < msPerFrame) {
|
||||||
|
lastFrameTime -= msFrameDelay;
|
||||||
|
}
|
||||||
|
|
||||||
|
numFramesSinceLastLog++;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (msSinceLastLog >= 5000) {
|
||||||
|
const fps = numFramesSinceLastLog / (msSinceLastLog / 1000);
|
||||||
|
console.debug(
|
||||||
|
`${logPrefix} FPS: ${fps.toFixed(2)} (Target: ${targetFps})`,
|
||||||
|
);
|
||||||
|
lastLogTime = document.timeline.currentTime;
|
||||||
|
numFramesSinceLastLog = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
frameRequestId = requestAnimationFrame(onAnimationFrame);
|
||||||
|
documentHiddenSinceLastFrame = document.hidden;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If `document.hidden` becomes true at any point, log it for the next
|
||||||
|
// animation frame. (The next frame will reset the state, as will starting or
|
||||||
|
// stopping the animation.)
|
||||||
|
document.addEventListener("visibilitychange", () => {
|
||||||
|
if (document.hidden) {
|
||||||
|
documentHiddenSinceLastFrame = true;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
function play() {
|
||||||
|
playingStatus = "playing";
|
||||||
|
updateAnimationState();
|
||||||
|
}
|
||||||
|
|
||||||
|
function pause() {
|
||||||
|
playingStatus = "paused";
|
||||||
|
updateAnimationState();
|
||||||
|
}
|
||||||
|
|
||||||
|
function getInitialPlayingStatus() {
|
||||||
|
const params = new URLSearchParams(document.location.search);
|
||||||
|
if (params.has("playing")) {
|
||||||
|
return "playing";
|
||||||
|
} else {
|
||||||
|
return "paused";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
window.addEventListener("resize", () => {
|
||||||
|
updateCanvasDimensions();
|
||||||
|
|
||||||
|
// Redraw the stage with the new dimensions - but with `tickOnUpdate` set
|
||||||
|
// to `false`, so that we don't advance by a frame. This keeps us
|
||||||
|
// really-paused if we're paused, and avoids skipping ahead by a frame if
|
||||||
|
// we're playing.
|
||||||
|
stage.tickOnUpdate = false;
|
||||||
|
updateStage();
|
||||||
|
stage.tickOnUpdate = true;
|
||||||
|
});
|
||||||
|
|
||||||
|
window.addEventListener("message", ({ data }) => {
|
||||||
|
// NOTE: For more sensitive messages, it's important for security to also
|
||||||
|
// check the `origin` property of the incoming event. But in this case, I'm
|
||||||
|
// okay with whatever site is embedding us being able to send play/pause!
|
||||||
|
if (data.type === "play") {
|
||||||
|
play();
|
||||||
|
} else if (data.type === "pause") {
|
||||||
|
pause();
|
||||||
|
} else {
|
||||||
|
throw new Error(`unexpected message: ${JSON.stringify(data)}`);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
startMovie()
|
||||||
|
.then(() => {
|
||||||
|
parent.postMessage(
|
||||||
|
{ type: "status", status: "loaded" },
|
||||||
|
document.location.origin,
|
||||||
|
);
|
||||||
|
})
|
||||||
|
.catch((error) => {
|
||||||
|
console.error(logPrefix, error);
|
||||||
|
|
||||||
|
loadingStatus = "error";
|
||||||
|
parent.postMessage(
|
||||||
|
{ type: "status", status: "error" },
|
||||||
|
document.location.origin,
|
||||||
|
);
|
||||||
|
|
||||||
|
// If loading the movie fails, show the fallback image instead, by moving
|
||||||
|
// it out of the canvas content and into the body.
|
||||||
|
document.body.appendChild(document.getElementById("fallback"));
|
||||||
|
console.warn("Showing fallback image instead.");
|
||||||
|
});
|
|
@ -20,5 +20,4 @@
|
||||||
@import outfits/index
|
@import outfits/index
|
||||||
@import outfits/new
|
@import outfits/new
|
||||||
@import pets/bulk
|
@import pets/bulk
|
||||||
@import swf_assets/links
|
|
||||||
@import users/top_contributors
|
@import users/top_contributors
|
||||||
|
|
|
@ -54,13 +54,16 @@ body.items-show
|
||||||
position: absolute
|
position: absolute
|
||||||
inset: 0
|
inset: 0
|
||||||
|
|
||||||
img
|
img, iframe
|
||||||
width: 100%
|
width: 100%
|
||||||
height: 100%
|
height: 100%
|
||||||
|
|
||||||
&:has(outfit-layer:state(loading))
|
&:has(outfit-layer:state(loading))
|
||||||
background: gray
|
background: gray
|
||||||
|
|
||||||
|
&:has(outfit-layer:state(error))
|
||||||
|
border-color: $error-border-color
|
||||||
|
|
||||||
.species-color-picker
|
.species-color-picker
|
||||||
.error-icon
|
.error-icon
|
||||||
cursor: help
|
cursor: help
|
||||||
|
@ -70,4 +73,3 @@ body.items-show
|
||||||
select
|
select
|
||||||
border-color: $error-border-color
|
border-color: $error-border-color
|
||||||
color: $error-color
|
color: $error-color
|
||||||
|
|
||||||
|
|
|
@ -1,10 +0,0 @@
|
||||||
@import "../partials/assets-list"
|
|
||||||
|
|
||||||
body.swf_assets-links
|
|
||||||
#swf-assets
|
|
||||||
+assets-list
|
|
||||||
|
|
||||||
li
|
|
||||||
span
|
|
||||||
font-size: 75%
|
|
||||||
word-wrap: break-word
|
|
8
app/assets/stylesheets/swf_assets/show.css
Normal file
8
app/assets/stylesheets/swf_assets/show.css
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
#asset-canvas,
|
||||||
|
#fallback {
|
||||||
|
position: absolute;
|
||||||
|
left: 0;
|
||||||
|
top: 0;
|
||||||
|
width: min(100vw, 100vh);
|
||||||
|
height: min(100vw, 100vh);
|
||||||
|
}
|
44
app/controllers/swf_assets_controller.rb
Normal file
44
app/controllers/swf_assets_controller.rb
Normal file
|
@ -0,0 +1,44 @@
|
||||||
|
class SwfAssetsController < ApplicationController
|
||||||
|
# We're very careful with what content is allowed to load. This is because
|
||||||
|
# asset movies run arbitrary JS, and, while we generally trust content from
|
||||||
|
# Neopets.com, let's not be *allowing* movie JS to do whatever it wants! This
|
||||||
|
# is a good default security stance, even if we don't foresee an attack.
|
||||||
|
content_security_policy do |policy|
|
||||||
|
policy.sandbox "allow-scripts"
|
||||||
|
policy.default_src "none"
|
||||||
|
|
||||||
|
policy.img_src -> {
|
||||||
|
src_list(
|
||||||
|
helpers.image_url("favicon.png"),
|
||||||
|
@swf_asset.image_url,
|
||||||
|
*@swf_asset.canvas_movie_sprite_urls,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
policy.script_src_elem -> {
|
||||||
|
src_list(
|
||||||
|
helpers.javascript_url("lib/easeljs.min"),
|
||||||
|
helpers.javascript_url("lib/tweenjs.min"),
|
||||||
|
helpers.javascript_url("swf_assets/show"),
|
||||||
|
@swf_asset.canvas_movie_library_url,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
policy.style_src_elem -> {
|
||||||
|
src_list(
|
||||||
|
helpers.stylesheet_url("swf_assets/show"),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
end
|
||||||
|
|
||||||
|
def show
|
||||||
|
@swf_asset = SwfAsset.find params[:id]
|
||||||
|
render layout: nil
|
||||||
|
end
|
||||||
|
|
||||||
|
private
|
||||||
|
|
||||||
|
def src_list(*urls)
|
||||||
|
urls.filter(&:present?).map { |url| url.sub(/\?.*\z/, "") }.join(" ")
|
||||||
|
end
|
||||||
|
end
|
|
@ -140,7 +140,10 @@ class SwfAsset < ApplicationRecord
|
||||||
# assets in the same manifest, and earlier ones are broken and later
|
# assets in the same manifest, and earlier ones are broken and later
|
||||||
# ones are fixed. I don't know the logic exactly, but that's what we've
|
# ones are fixed. I don't know the logic exactly, but that's what we've
|
||||||
# seen!
|
# seen!
|
||||||
{ js: assets_by_ext[:js].last }
|
{
|
||||||
|
js: assets_by_ext[:js].last,
|
||||||
|
sprites: assets_by_ext.fetch(:png, []),
|
||||||
|
}
|
||||||
else
|
else
|
||||||
# Otherwise, return the first PNG and the first SVG. (Unlike the JS
|
# Otherwise, return the first PNG and the first SVG. (Unlike the JS
|
||||||
# case, it's important to choose the *first* PNG, because sometimes
|
# case, it's important to choose the *first* PNG, because sometimes
|
||||||
|
@ -185,8 +188,21 @@ class SwfAsset < ApplicationRecord
|
||||||
nil
|
nil
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def canvas_movie?
|
||||||
|
canvas_movie_library_url.present?
|
||||||
|
end
|
||||||
|
|
||||||
|
def canvas_movie_library_url
|
||||||
|
manifest_asset_urls[:js]
|
||||||
|
end
|
||||||
|
|
||||||
|
def canvas_movie_sprite_urls
|
||||||
|
return [] unless canvas_movie?
|
||||||
|
manifest_asset_urls[:sprites]
|
||||||
|
end
|
||||||
|
|
||||||
def canvas_movie_image_url
|
def canvas_movie_image_url
|
||||||
return nil unless manifest_asset_urls[:js]
|
return nil unless canvas_movie?
|
||||||
|
|
||||||
CANVAS_MOVIE_IMAGE_URL_TEMPLATE.expand(
|
CANVAS_MOVIE_IMAGE_URL_TEMPLATE.expand(
|
||||||
libraryUrl: manifest_asset_urls[:js],
|
libraryUrl: manifest_asset_urls[:js],
|
||||||
|
|
|
@ -6,4 +6,7 @@
|
||||||
"zone": swf_asset.zone.label,
|
"zone": swf_asset.zone.label,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
- if swf_asset.canvas_movie?
|
||||||
|
%iframe{src: swf_asset_path(swf_asset) + "?playing"}
|
||||||
|
- else
|
||||||
= image_tag swf_asset.image_url, alt: ""
|
= image_tag swf_asset.image_url, alt: ""
|
|
@ -40,5 +40,5 @@
|
||||||
%footer= t '.contributors.footer'
|
%footer= t '.contributors.footer'
|
||||||
|
|
||||||
- content_for :javascripts do
|
- content_for :javascripts do
|
||||||
= javascript_include_tag 'outfit-viewer', async: true
|
= javascript_include_tag "lib/idiomorph", async: true
|
||||||
|
= javascript_include_tag "outfit-viewer", async: true
|
||||||
|
|
36
app/views/swf_assets/show.html.haml
Normal file
36
app/views/swf_assets/show.html.haml
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
!!! 5
|
||||||
|
%html
|
||||||
|
%head
|
||||||
|
%meta{charset: "utf-8"}
|
||||||
|
%meta{name: "viewport", content: "width=device-width, initial-scale=1"}
|
||||||
|
%title
|
||||||
|
Embed for Asset ##{@swf_asset.id} | #{t "app_name"}
|
||||||
|
%link{href: image_path("favicon.png"), rel: "icon"}
|
||||||
|
|
||||||
|
-# NOTE: For all these assets, the Content-Security-Policy doesn't account
|
||||||
|
-# for asset debug mode, so let's just opt out of it with `debug: false`!
|
||||||
|
- if @swf_asset.canvas_movie?
|
||||||
|
-# Load the stylesheet first, because displaying things correctly is the
|
||||||
|
-# actual most essential thing.
|
||||||
|
= stylesheet_link_tag "swf_assets/show", debug: false
|
||||||
|
|
||||||
|
-# This is optional, but preloading the sprites can help us from having
|
||||||
|
-# to wait on all the other JS to load and set up before we start!
|
||||||
|
- @swf_asset.canvas_movie_sprite_urls.each do |sprite_url|
|
||||||
|
%link{rel: "preload", href: sprite_url, as: "image", crossorigin: "anonymous"}
|
||||||
|
|
||||||
|
-# Load the scripts: EaselJS libs first, then the asset's "library" file,
|
||||||
|
-# then our page script that starts the movie.
|
||||||
|
= javascript_include_tag "lib/easeljs.min", defer: true, debug: false
|
||||||
|
= javascript_include_tag "lib/tweenjs.min", defer: true, debug: false
|
||||||
|
= javascript_include_tag @swf_asset.canvas_movie_library_url, defer: true,
|
||||||
|
id: "canvas-movie-library"
|
||||||
|
= javascript_include_tag "swf_assets/show", defer: true, debug: false
|
||||||
|
%body
|
||||||
|
- if @swf_asset.canvas_movie?
|
||||||
|
%canvas#asset-canvas
|
||||||
|
-# Show a fallback image, for users with JS disabled. Lazy-load it, so
|
||||||
|
-# the browser won't bother to load it if it's not used.
|
||||||
|
= image_tag @swf_asset.image_url, id: "fallback", alt: "", loading: "lazy"
|
||||||
|
- else
|
||||||
|
= image_tag @swf_asset.image_url, alt: ""
|
|
@ -37,6 +37,7 @@ OpenneoImpressItems::Application.routes.draw do
|
||||||
resources :alt_styles, path: 'alt-styles', only: [:index]
|
resources :alt_styles, path: 'alt-styles', only: [:index]
|
||||||
end
|
end
|
||||||
resources :alt_styles, path: 'alt-styles', only: [:index]
|
resources :alt_styles, path: 'alt-styles', only: [:index]
|
||||||
|
resources :swf_assets, path: 'swf-assets', only: [:show]
|
||||||
|
|
||||||
# Loading and modeling pets!
|
# Loading and modeling pets!
|
||||||
post '/pets/load' => 'pets#load', :as => :load_pet
|
post '/pets/load' => 'pets#load', :as => :load_pet
|
||||||
|
|
Loading…
Reference in a new issue