replace /api/assetProxy with a CDN proxy
When we decided to start out with /api/assetProxy, we didn't know how much the load would be in practice, so we just went ahead and tried it! Turns out, it was too high, and Vercel shut down our deployment 😅
Now, we've off-loaded this to a Fastly CDN proxy, which should run even faster and more efficiently, without adding pressure to Vercel servers and pushing our usage numbers! And I suspect we're gonna stay comfortably in Fastly's free tier :) but we'll see!
(Though, as always, if Neopets can finally upgrade their own stuff to HTTPS, we'll get to tear down this whole proxy altogether!)
This commit is contained in:
parent
59ae70d417
commit
456a098df9
4 changed files with 19 additions and 218 deletions
|
@ -1,60 +0,0 @@
|
|||
import util from "util";
|
||||
import stream from "stream";
|
||||
import fetch from "node-fetch";
|
||||
|
||||
const streamPipeline = util.promisify(stream.pipeline);
|
||||
|
||||
const VALID_URL_PATTERNS = [
|
||||
/^http:\/\/images\.neopets\.com\/items\/[a-zA-Z0-9_ -]+\.gif$/,
|
||||
/^http:\/\/images\.neopets\.com\/cp\/(bio|items)\/data\/[0-9]{3}\/[0-9]{3}\/[0-9]{3}\/[a-f0-9_]+\/[a-zA-Z0-9_ \-\/]+\.(svg|png|js)(\?[0-9]*)?$/,
|
||||
/^http:\/\/images\.neopets\.com\/cp\/(bio|items)\/swf\/[0-9]{3}\/[0-9]{3}\/[0-9]{3}\/[a-f0-9_]+\.swf$/,
|
||||
|
||||
// These ones aren't actually used Impress 2020 - we added a cheap hack to
|
||||
// old Impress to upgrade images to use the Impress 2020 asset proxy 😅
|
||||
/^http:\/\/pets\.neopets\.com\/cp\/[a-zA-Z90-9]+\/[0-9]+\/[0-9]+\.png$/,
|
||||
/^http:\/\/pets\.neopets\.com\/cpn\/[a-zA-Z90-9_]+\/[0-9]+\/[0-9]+\.png$/,
|
||||
];
|
||||
|
||||
export default async (req, res) => {
|
||||
const urlToProxy = req.query.url;
|
||||
if (!urlToProxy) {
|
||||
return res
|
||||
.status(400)
|
||||
.send("Bad request: Must provide `?url` in the query string");
|
||||
}
|
||||
|
||||
if (!VALID_URL_PATTERNS.some((p) => urlToProxy.match(p))) {
|
||||
return res
|
||||
.status(400)
|
||||
.send("Bad request: URL did not match any valid patterns");
|
||||
}
|
||||
|
||||
console.debug("[assetProxy] 💌 Sending: %s", urlToProxy);
|
||||
|
||||
const proxyRes = await fetch(urlToProxy);
|
||||
console.debug(
|
||||
`[assetProxy] %s %s: %s`,
|
||||
proxyRes.ok ? "✅" : "🛑",
|
||||
`${proxyRes.status} ${proxyRes.statusText}`.padStart(7, " "),
|
||||
urlToProxy
|
||||
);
|
||||
|
||||
res.status(proxyRes.status);
|
||||
|
||||
res.setHeader("Content-Type", proxyRes.headers.get("Content-Type"));
|
||||
res.setHeader("Cache-Control", proxyRes.headers.get("Cache-Control"));
|
||||
res.setHeader("ETag", proxyRes.headers.get("ETag"));
|
||||
res.setHeader("Last-Modified", proxyRes.headers.get("Last-Modified"));
|
||||
|
||||
if (!proxyRes.headers.get("Content-Encoding")) {
|
||||
// If the content is not encoded (I think their images generally aren't?),
|
||||
// stream the body directly, to speed things up a bit.
|
||||
res.setHeader("Content-Length", proxyRes.headers.get("Content-Length"));
|
||||
await streamPipeline(proxyRes.body, res);
|
||||
} else {
|
||||
// Otherwise, I don't immediately know how to stream encoded content, so,
|
||||
// let's just await the full body and send it the normal way.
|
||||
const buffer = await proxyRes.buffer();
|
||||
res.send(buffer);
|
||||
}
|
||||
};
|
|
@ -1,150 +0,0 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<script src="https://code.createjs.com/1.0.0/easeljs.min.js"></script>
|
||||
<script src="https://code.createjs.com/1.0.0/tweenjs.min.js"></script>
|
||||
<script src="/api/assetProxy?url=http://images.neopets.com/cp/items/data/000/000/564/564507_fc3216b9b8/all-item_foreground_lower.js"></script>
|
||||
</head>
|
||||
<body>
|
||||
<div style="display: flex; justify-content: center;">
|
||||
<div
|
||||
style="
|
||||
width: 100%;
|
||||
max-width: 600px;
|
||||
border: 1px solid #aaa;
|
||||
position: relative;
|
||||
"
|
||||
>
|
||||
<div style="padding-bottom: 100%;"></div>
|
||||
<canvas
|
||||
id="stage-canvas"
|
||||
style="
|
||||
position: absolute;
|
||||
left: 0;
|
||||
right: 0;
|
||||
top: 0;
|
||||
bottom: 0;
|
||||
width: 100%;
|
||||
height: 100%;
|
||||
"
|
||||
></canvas>
|
||||
</div>
|
||||
</div>
|
||||
<div style="margin-top: 1em; text-align: center;">
|
||||
<button id="show-hide">Show/hide</button>
|
||||
<button id="pause-play">Pause/play</button>
|
||||
</div>
|
||||
<div style="margin-top: 1em; text-align: center;">
|
||||
FPS: <span id="fps-count">…</span>
|
||||
</div>
|
||||
<script>
|
||||
function loadImage(src) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const image = new Image();
|
||||
image.onload = () => resolve(image);
|
||||
image.onerror = (e) => reject(e);
|
||||
image.src = src;
|
||||
});
|
||||
}
|
||||
|
||||
function proxyUrl(url) {
|
||||
return "/api/assetProxy?url=" + encodeURIComponent(url);
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const composition = Object.values(AdobeAn.compositions)[0];
|
||||
const library = composition.getLibrary();
|
||||
|
||||
const manifestImages = new Map(
|
||||
library.properties.manifest.map(({ id, src }) => [
|
||||
id,
|
||||
loadImage(
|
||||
proxyUrl(
|
||||
"http://images.neopets.com/cp/items/data/000/000/564/564507_fc3216b9b8/" +
|
||||
src
|
||||
)
|
||||
),
|
||||
])
|
||||
);
|
||||
|
||||
try {
|
||||
await Promise.all(manifestImages.values());
|
||||
} catch (e) {
|
||||
console.error("Error loading images", e);
|
||||
return;
|
||||
}
|
||||
|
||||
const spriteSheets = composition.getSpriteSheet();
|
||||
for (const { name, frames } of library.ssMetadata) {
|
||||
const image = await manifestImages.get(name);
|
||||
spriteSheets[name] = new createjs.SpriteSheet({
|
||||
images: [image],
|
||||
frames,
|
||||
});
|
||||
}
|
||||
|
||||
const movieClip = new library.allitem_foreground_lower();
|
||||
|
||||
const canvas = document.getElementById("stage-canvas");
|
||||
const stage = new library.Stage(canvas);
|
||||
canvas.width = canvas.offsetWidth * window.devicePixelRatio;
|
||||
canvas.height = canvas.offsetHeight * window.devicePixelRatio;
|
||||
stage.scaleX =
|
||||
(canvas.offsetWidth * window.devicePixelRatio) /
|
||||
library.properties.width;
|
||||
stage.scaleY =
|
||||
(canvas.offsetHeight * window.devicePixelRatio) /
|
||||
library.properties.height;
|
||||
|
||||
// movieClip.alpha = 0;
|
||||
// const tween = createjs.Tween.get(movieClip, { paused: true }).to(
|
||||
// { alpha: 1 },
|
||||
// 200
|
||||
// );
|
||||
// stage.on(
|
||||
// "drawend",
|
||||
// () => {
|
||||
// tween.paused = false;
|
||||
// },
|
||||
// null,
|
||||
// true
|
||||
// );
|
||||
|
||||
// TODO: I'm not 100% clear on why, but manually caching the movie and
|
||||
// manually updating the cache at a 60FPS rate (that's how often
|
||||
// the tick fires, regardless of movie framerate) seems to
|
||||
// substantially improve performance of things like fade-in. I
|
||||
// think it might just be perceived performance, because the
|
||||
// alpha applies to a cached raster instead of the individual
|
||||
// layers, so it looks better? Although hell, maybe applying
|
||||
// alpha to a cached raster just _is_ faster than applying it to
|
||||
// like 200 overlapping layers, that would just make sense...
|
||||
// movieClip.cache(
|
||||
// 0,
|
||||
// 0,
|
||||
// library.properties.width,
|
||||
// library.properties.height
|
||||
// );
|
||||
// movieClip.on("tick", () => movieClip.updateCache());
|
||||
|
||||
stage.addChild(movieClip);
|
||||
|
||||
movieClip.framerate = library.properties.fps;
|
||||
createjs.Ticker.timingMode = createjs.Ticker.RAF;
|
||||
createjs.Ticker.on("tick", (e) => stage.update(e));
|
||||
|
||||
document.getElementById("show-hide").addEventListener("click", () => {
|
||||
tween.reversed = !tween.reversed;
|
||||
tween.setPosition(0);
|
||||
tween.paused = false;
|
||||
});
|
||||
|
||||
document.getElementById("pause-play").addEventListener("click", () => {
|
||||
movieClip.tickEnabled = !movieClip.tickEnabled;
|
||||
});
|
||||
}
|
||||
|
||||
main();
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
|
@ -15,6 +15,7 @@ import {
|
|||
} from "@chakra-ui/core";
|
||||
import { ExternalLinkIcon } from "@chakra-ui/icons";
|
||||
|
||||
import { safeImageUrl } from "../../util";
|
||||
import useSupport from "./useSupport";
|
||||
|
||||
/**
|
||||
|
@ -412,7 +413,7 @@ function ItemLayerSupportFlashPlayer({ swfUrl, backgroundColor }) {
|
|||
>
|
||||
<object
|
||||
type="application/x-shockwave-flash"
|
||||
data={`/api/assetProxy?url=${encodeURIComponent(swfUrl)}`}
|
||||
data={safeImageUrl(swfUrl)}
|
||||
width="100%"
|
||||
height="100%"
|
||||
>
|
||||
|
|
|
@ -64,16 +64,26 @@ export function Heading2({ children, ...props }) {
|
|||
/**
|
||||
* safeImageUrl returns an HTTPS-safe image URL for Neopets assets!
|
||||
*/
|
||||
export function safeImageUrl(url) {
|
||||
let safeUrl = `/api/assetProxy?url=${encodeURIComponent(url)}`;
|
||||
export function safeImageUrl(urlString) {
|
||||
const url = new URL(urlString);
|
||||
|
||||
// On our Storybook server, we need to request from the main dev server.
|
||||
const { host } = document.location;
|
||||
if (host === "localhost:6006") {
|
||||
safeUrl = "http://localhost:3000" + safeUrl;
|
||||
if (url.origin === "http://images.neopets.com") {
|
||||
url.protocol = "https:";
|
||||
url.host = "images.neopets-asset-proxy.openneo.net";
|
||||
} else if (url.origin === "http://pets.neopets.com") {
|
||||
url.protocol = "https:";
|
||||
url.host = "pets.neopets-asset-proxy.openneo.net";
|
||||
}
|
||||
|
||||
return safeUrl;
|
||||
if (url.protocol !== "https:") {
|
||||
console.warn(
|
||||
"safeImageUrl was provided an unsafe URL, but we don't know how to " +
|
||||
"upgrade it to HTTPS. Returning as-is: " +
|
||||
urlString
|
||||
);
|
||||
}
|
||||
|
||||
return url.toString();
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
Loading…
Reference in a new issue