2021-07-02 15:19:11 -07:00
|
|
|
/**
|
|
|
|
* /api/assetImage renders a canvas movie to PNG! To do this, we use a headless
|
|
|
|
* Chromium browser, which renders a special page in the webapp and screenshots
|
|
|
|
* the displayed canvas.
|
|
|
|
*
|
|
|
|
* This is, of course, a relatively heavyweight operation: it's always gonna be
|
|
|
|
* a bit slow, and consume significant RAM. So, caching is going to be
|
|
|
|
* important, so that we're not calling this all the time and overloading the
|
|
|
|
* endpoint!
|
2021-08-19 17:56:09 -07:00
|
|
|
*
|
|
|
|
* Parameters:
|
|
|
|
* - libraryUrl: A https://images.neopets.com/ URL to a JS movie library
|
|
|
|
* - size: 600, 300, or 150. Determines the output image size.
|
2021-07-02 15:19:11 -07:00
|
|
|
*/
|
|
|
|
const beeline = require("honeycomb-beeline")({
|
|
|
|
writeKey: process.env["HONEYCOMB_WRITE_KEY"],
|
|
|
|
dataset:
|
|
|
|
process.env["NODE_ENV"] === "production"
|
|
|
|
? "Dress to Impress (2020)"
|
|
|
|
: "Dress to Impress (2020, dev)",
|
|
|
|
serviceName: "impress-2020-gql-server",
|
2021-08-08 00:23:57 -07:00
|
|
|
disableInstrumentationOnLoad: true,
|
2021-07-02 15:19:11 -07:00
|
|
|
});
|
|
|
|
|
2021-11-12 21:20:48 -08:00
|
|
|
const playwright = require("playwright");
|
|
|
|
|
2021-07-02 15:19:11 -07:00
|
|
|
// To render the image, we load the /internal/assetImage page in the web app,
|
|
|
|
// a simple page specifically designed for this API endpoint!
|
|
|
|
const ASSET_IMAGE_PAGE_BASE_URL = process.env.VERCEL_URL
|
|
|
|
? `https://${process.env.VERCEL_URL}/internal/assetImage`
|
|
|
|
: process.env.NODE_ENV === "development"
|
|
|
|
? "http://localhost:3000/internal/assetImage"
|
|
|
|
: "https://impress-2020.openneo.net/internal/assetImage";
|
|
|
|
|
2021-11-12 21:20:48 -08:00
|
|
|
// We share one browser instance, but create a new independent "context" for
|
|
|
|
// each request, as a security hedge. (The intent is for the user to request
|
|
|
|
// very little from the browser, so it shouldn't matter, but it's just an extra
|
|
|
|
// layer to reduce the risk of what an attack could do!)
|
|
|
|
//
|
|
|
|
// TODO: We're probably going to need to limit the number of concurrent browser
|
|
|
|
// sessions here, right? I don't actually know how the Next.js server
|
|
|
|
// handles concurrency though, let's pressure-test and find out before
|
|
|
|
// building a solution.
|
|
|
|
let SHARED_BROWSER = null;
|
|
|
|
async function getBrowserContext() {
|
|
|
|
if (SHARED_BROWSER == null) {
|
|
|
|
SHARED_BROWSER = await playwright.chromium.launch({ headless: true });
|
2021-08-19 23:38:25 -07:00
|
|
|
}
|
2021-11-12 21:20:48 -08:00
|
|
|
return await SHARED_BROWSER.newContext();
|
2021-07-02 15:19:11 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
async function handle(req, res) {
|
2021-08-19 17:56:09 -07:00
|
|
|
const { libraryUrl, size } = req.query;
|
2021-07-02 15:19:11 -07:00
|
|
|
if (!libraryUrl) {
|
|
|
|
return reject(res, "libraryUrl is required");
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!isNeopetsUrl(libraryUrl)) {
|
|
|
|
return reject(
|
|
|
|
res,
|
|
|
|
`libraryUrl must be an HTTPS Neopets URL, but was: ${libraryUrl}`
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
2021-08-19 17:56:09 -07:00
|
|
|
if (size !== "600" && size !== "300" && size !== "150") {
|
|
|
|
return reject(res, `size must be 600, 300, or 150, but was: ${size}`);
|
|
|
|
}
|
|
|
|
|
2021-07-02 15:19:11 -07:00
|
|
|
let imageBuffer;
|
|
|
|
try {
|
2021-08-19 17:56:09 -07:00
|
|
|
imageBuffer = await loadAndScreenshotImage(libraryUrl, size);
|
2021-07-02 15:19:11 -07:00
|
|
|
} catch (e) {
|
|
|
|
console.error(e);
|
|
|
|
return reject(res, `Could not load image: ${e.message}`, 500);
|
|
|
|
}
|
|
|
|
|
|
|
|
// TODO: Compress the image?
|
|
|
|
|
|
|
|
// Send a long-term cache header, to avoid running this any more than we have
|
|
|
|
// to! If we make a big change, we'll flush the cache or add a version param.
|
|
|
|
res.setHeader("Cache-Control", "public, max-age=31536000, immutable");
|
|
|
|
res.setHeader("Content-Type", "image/png");
|
|
|
|
return res.send(imageBuffer);
|
|
|
|
}
|
|
|
|
|
2021-08-19 17:56:09 -07:00
|
|
|
async function loadAndScreenshotImage(libraryUrl, size) {
|
2021-07-02 15:19:11 -07:00
|
|
|
const assetImagePageUrl = new URL(ASSET_IMAGE_PAGE_BASE_URL);
|
2021-08-19 17:56:09 -07:00
|
|
|
assetImagePageUrl.search = new URLSearchParams({
|
|
|
|
libraryUrl,
|
|
|
|
size,
|
|
|
|
}).toString();
|
2021-07-02 15:19:11 -07:00
|
|
|
|
|
|
|
console.debug("Opening browser page");
|
2021-11-12 21:20:48 -08:00
|
|
|
const context = await getBrowserContext();
|
|
|
|
const page = await context.newPage();
|
2021-07-02 15:19:11 -07:00
|
|
|
console.debug("Page opened, navigating to: " + assetImagePageUrl.toString());
|
|
|
|
|
2021-08-19 23:38:25 -07:00
|
|
|
try {
|
|
|
|
await page.goto(assetImagePageUrl.toString());
|
|
|
|
console.debug("Page loaded, awaiting image");
|
|
|
|
|
|
|
|
// Start looking for the loaded canvas, *and* for an error message.
|
|
|
|
// When either one displays, we proceed, either by returning the image if
|
|
|
|
// present, or raising the error if present.
|
|
|
|
const imageBufferPromise = screenshotImageFromPage(page);
|
|
|
|
const errorMessagePromise = readErrorMessageFromPage(page);
|
|
|
|
const firstResultFromPage = await Promise.any([
|
|
|
|
imageBufferPromise.then((imageBuffer) => ({ imageBuffer })),
|
|
|
|
errorMessagePromise.then((errorMessage) => ({ errorMessage })),
|
|
|
|
]);
|
|
|
|
|
|
|
|
if (firstResultFromPage.errorMessage) {
|
|
|
|
throw new Error(firstResultFromPage.errorMessage);
|
|
|
|
} else if (firstResultFromPage.imageBuffer) {
|
|
|
|
return firstResultFromPage.imageBuffer;
|
|
|
|
} else {
|
|
|
|
throw new Error(
|
|
|
|
`Assertion error: Promise.any did not return an errorMessage or an imageBuffer: ` +
|
|
|
|
`${JSON.stringify(Object.keys(firstResultFromPage))}`
|
|
|
|
);
|
|
|
|
}
|
|
|
|
} finally {
|
Await closing Playwright before finish request
I noticed when loading Your Outfits earlier (before I switched it to just use prod images even on dev), that there was a big memory leak slowing down my machine.
My hypothesis is that this is because I wasn't _waiting_ for the resources to tear down before finishing the request, so Vercel terminated the request early, and I further hypothesize that terminating a Playwright session partway through isn't guaranteed to clean up the browser.
Not sure about that! Could have just been that we spun up a lot at once, and a bunch of things went into swap! (But I thought it generally handles requests in serial in the dev server? So that feels unlikely.)
Anyway, I don't feel like extensively testing this again and maybe messing up my computer session again :p Just, when I first wrote this without awaits, I knew that it was a bit risky, but I wanted to _see_ if it was a problem before slowing down individual requests by awaiting. And now, my "it's likely to be a problem" threshold has been reached, lol!
So, I'm not _confident_ this is the best play, I don't know the internals well enough; but it seems like a better side to err on than the other, now that I know more!
2021-09-03 15:43:27 -07:00
|
|
|
// Tear down our resources when we're done! If it fails, log the error, but
|
|
|
|
// don't block the success of the image.
|
|
|
|
try {
|
|
|
|
await page.close();
|
|
|
|
} catch (e) {
|
|
|
|
console.warn("Error closing page after image finished", e);
|
|
|
|
}
|
|
|
|
try {
|
2021-11-12 21:20:48 -08:00
|
|
|
await context.close();
|
Await closing Playwright before finish request
I noticed when loading Your Outfits earlier (before I switched it to just use prod images even on dev), that there was a big memory leak slowing down my machine.
My hypothesis is that this is because I wasn't _waiting_ for the resources to tear down before finishing the request, so Vercel terminated the request early, and I further hypothesize that terminating a Playwright session partway through isn't guaranteed to clean up the browser.
Not sure about that! Could have just been that we spun up a lot at once, and a bunch of things went into swap! (But I thought it generally handles requests in serial in the dev server? So that feels unlikely.)
Anyway, I don't feel like extensively testing this again and maybe messing up my computer session again :p Just, when I first wrote this without awaits, I knew that it was a bit risky, but I wanted to _see_ if it was a problem before slowing down individual requests by awaiting. And now, my "it's likely to be a problem" threshold has been reached, lol!
So, I'm not _confident_ this is the best play, I don't know the internals well enough; but it seems like a better side to err on than the other, now that I know more!
2021-09-03 15:43:27 -07:00
|
|
|
} catch (e) {
|
|
|
|
console.warn("Error closing browser after image finished", e);
|
|
|
|
}
|
2021-07-02 15:19:11 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
async function screenshotImageFromPage(page) {
|
|
|
|
await page.waitForSelector("#asset-image-canvas[data-is-loaded=true]", {
|
|
|
|
timeout: 10000,
|
|
|
|
});
|
|
|
|
const canvas = await page.$("#asset-image-canvas[data-is-loaded=true]");
|
|
|
|
console.debug("Image loaded, taking screenshot");
|
|
|
|
|
|
|
|
const imageBuffer = await canvas.screenshot({
|
|
|
|
omitBackground: true,
|
|
|
|
});
|
|
|
|
console.debug(`Screenshot captured, size: ${imageBuffer.length}`);
|
|
|
|
|
|
|
|
return imageBuffer;
|
|
|
|
}
|
|
|
|
|
|
|
|
async function readErrorMessageFromPage(page) {
|
|
|
|
await page.waitForSelector("#asset-image-error-message", {
|
|
|
|
timeout: 10000,
|
|
|
|
});
|
|
|
|
const errorMessageContainer = await page.$("#asset-image-error-message");
|
|
|
|
const errorMessage = await errorMessageContainer.innerText();
|
|
|
|
return errorMessage;
|
|
|
|
}
|
|
|
|
|
|
|
|
function isNeopetsUrl(urlString) {
|
|
|
|
let url;
|
|
|
|
try {
|
|
|
|
url = new URL(urlString);
|
|
|
|
} catch (e) {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
return url.origin === "https://images.neopets.com";
|
|
|
|
}
|
|
|
|
|
|
|
|
function reject(res, message, status = 400) {
|
2021-11-12 21:17:20 -08:00
|
|
|
res.setHeader("Content-Type", "text/plain; charset=utf8");
|
2021-07-02 15:19:11 -07:00
|
|
|
return res.status(status).send(message);
|
|
|
|
}
|
|
|
|
|
2021-08-17 01:43:39 -07:00
|
|
|
// Polyfill Promise.any for older Node: https://github.com/ungap/promise-any
|
|
|
|
Promise.any =
|
|
|
|
Promise.any ||
|
|
|
|
function ($) {
|
|
|
|
return new Promise(function (D, E, A, L) {
|
|
|
|
A = [];
|
|
|
|
L = $.map(function ($, i) {
|
|
|
|
return Promise.resolve($).then(D, function (O) {
|
|
|
|
return ((A[i] = O), --L) || E({ errors: A });
|
|
|
|
});
|
|
|
|
}).length;
|
|
|
|
});
|
|
|
|
};
|
|
|
|
|
2021-07-02 15:19:11 -07:00
|
|
|
async function handleWithBeeline(req, res) {
|
|
|
|
beeline.withTrace(
|
|
|
|
{ name: "api/assetImage", operation_name: "api/assetImage" },
|
|
|
|
() => handle(req, res)
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
export default handleWithBeeline;
|