From 5470a49651af8c90a5ced885caff76ebf30b08dd Mon Sep 17 00:00:00 2001 From: Matchu Date: Fri, 12 Nov 2021 21:17:20 -0800 Subject: [PATCH 01/17] Use utf8 in API error messages I noticed this when Playwright was trying to draw cute ASCII art and it wasn't showing up right! Not a big deal, but it's a bit more correct to do this, so let's do it! --- pages/api/allWakaValues.js | 2 +- pages/api/assetImage.js | 2 +- pages/api/outfitImage.js | 2 +- pages/api/outfitPageSSR.js | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/pages/api/allWakaValues.js b/pages/api/allWakaValues.js index 1b4dcc6..677d61a 100644 --- a/pages/api/allWakaValues.js +++ b/pages/api/allWakaValues.js @@ -20,7 +20,7 @@ async function handle(req, res) { itemValuesByIdOrName = await loadWakaValuesByIdOrName(); } catch (e) { console.error(e); - res.setHeader("Content-Type", "text/plain"); + res.setHeader("Content-Type", "text/plain; charset=utf8"); res.status(500).send("Error loading Waka data from Google Sheets API"); return; } diff --git a/pages/api/assetImage.js b/pages/api/assetImage.js index ab05e94..1a13c08 100644 --- a/pages/api/assetImage.js +++ b/pages/api/assetImage.js @@ -173,7 +173,7 @@ function isNeopetsUrl(urlString) { } function reject(res, message, status = 400) { - res.setHeader("Content-Type", "text/plain"); + res.setHeader("Content-Type", "text/plain; charset=utf8"); return res.status(status).send(message); } diff --git a/pages/api/outfitImage.js b/pages/api/outfitImage.js index 4904757..5e67f00 100644 --- a/pages/api/outfitImage.js +++ b/pages/api/outfitImage.js @@ -225,7 +225,7 @@ async function loadUpdatedAtForSavedOutfit(outfitId) { } function reject(res, message, status = 400) { - res.setHeader("Content-Type", "text/plain"); + res.setHeader("Content-Type", "text/plain; charset=utf8"); return res.status(status).send(message); } diff --git a/pages/api/outfitPageSSR.js b/pages/api/outfitPageSSR.js index d34bd74..712c2f7 100644 --- a/pages/api/outfitPageSSR.js +++ b/pages/api/outfitPageSSR.js @@ -132,7 +132,7 @@ async function loadIndexPageHtml() { } function reject(res, message, status = 400) { - res.setHeader("Content-Type", "text/plain"); + res.setHeader("Content-Type", "text/plain; charset=utf8"); return res.status(status).send(message); } From 9753cbe1735ff2d815ac1b899ed9419ebafbc8ed Mon Sep 17 00:00:00 2001 From: Matchu Date: Fri, 12 Nov 2021 21:20:48 -0800 Subject: [PATCH 02/17] /api/assetImage fixes in production Now that we're not on Vercel's AWS Lambda deployment, we can switch to something a bit more standard! I also tweaked up our version of Playwright, because, hey, why not? Getting the package list was a bit tricky, but we got there! Left a comment to explain where it's from. --- deploy/playbooks/setup.yml | 49 ++++++++++++++++++++++++++++++ package.json | 3 +- pages/api/assetImage.js | 40 ++++++++++++------------- yarn.lock | 61 ++++++++++++++++++++++---------------- 4 files changed, 105 insertions(+), 48 deletions(-) diff --git a/deploy/playbooks/setup.yml b/deploy/playbooks/setup.yml index 9279d5b..e2c8196 100644 --- a/deploy/playbooks/setup.yml +++ b/deploy/playbooks/setup.yml @@ -267,6 +267,55 @@ - libgif-dev - librsvg2-dev + - name: Install Playwright system dependencies + # NOTE: I copied the package list from the source list for + # `npx playwright install-deps`, which I couldn't get running in + # Ansible as root, and besides, I prefer manually managing the + # package list over running an npm script as root! + become: yes + apt: + update_cache: yes + name: + # Tools + - xvfb + - fonts-noto-color-emoji + - ttf-unifont + - libfontconfig + - libfreetype6 + - xfonts-cyrillic + - xfonts-scalable + - fonts-liberation + - fonts-ipafont-gothic + - fonts-wqy-zenhei + - fonts-tlwg-loma-otf + - ttf-ubuntu-font-family + # Chromium + - fonts-liberation + - libasound2 + - libatk-bridge2.0-0 + - libatk1.0-0 + - libatspi2.0-0 + - libcairo2 + - libcups2 + - libdbus-1-3 + - libdrm2 + - libegl1 + - libgbm1 + - libglib2.0-0 + - libgtk-3-0 + - libnspr4 + - libnss3 + - libpango-1.0-0 + - libx11-6 + - libx11-xcb1 + - libxcb1 + - libxcomposite1 + - libxdamage1 + - libxext6 + - libxfixes3 + - libxrandr2 + - libxshmfence1 + handlers: - name: Restart nginx become: yes diff --git a/package.json b/package.json index b0dea43..28078d6 100644 --- a/package.json +++ b/package.json @@ -43,7 +43,7 @@ "mysql2": "^2.1.0", "next": "12.0.2", "node-fetch": "^2.6.0", - "playwright-core": "^1.14.0", + "playwright": "^1.16.3", "react": "^17.0.1", "react-autosuggest": "^10.0.2", "react-dom": "^17.0.1", @@ -112,7 +112,6 @@ "inquirer": "^7.3.3", "jest-image-snapshot": "^4.3.0", "lint-staged": "^10.5.4", - "playwright": "^1.14.0", "prettier": "^2.0.5", "react-is": "^16.13.1", "ts-node": "^9.1.1", diff --git a/pages/api/assetImage.js b/pages/api/assetImage.js index 1a13c08..0c285ea 100644 --- a/pages/api/assetImage.js +++ b/pages/api/assetImage.js @@ -22,6 +22,8 @@ const beeline = require("honeycomb-beeline")({ disableInstrumentationOnLoad: true, }); +const playwright = require("playwright"); + // To render the image, we load the /internal/assetImage page in the web app, // a simple page specifically designed for this API endpoint! const ASSET_IMAGE_PAGE_BASE_URL = process.env.VERCEL_URL @@ -30,25 +32,21 @@ const ASSET_IMAGE_PAGE_BASE_URL = process.env.VERCEL_URL ? "http://localhost:3000/internal/assetImage" : "https://impress-2020.openneo.net/internal/assetImage"; -// TODO: We used to share a browser instamce, but we couldn't get it to reload -// correctly after accidental closes, so we're just gonna always load a -// new one now. What are the perf implications of this? Does it slow down -// response time substantially? -async function getBrowser() { - if (process.env["NODE_ENV"] === "production") { - // In production, we use a special chrome-aws-lambda Chromium. - const chromium = require("chrome-aws-lambda"); - const playwright = require("playwright-core"); - return await playwright.chromium.launch({ - args: chromium.args, - executablePath: await chromium.executablePath, - headless: true, - }); - } else { - // In development, we use the standard playwright Chromium. - const playwright = require("playwright"); - return await playwright.chromium.launch({ headless: true }); +// We share one browser instance, but create a new independent "context" for +// each request, as a security hedge. (The intent is for the user to request +// very little from the browser, so it shouldn't matter, but it's just an extra +// layer to reduce the risk of what an attack could do!) +// +// TODO: We're probably going to need to limit the number of concurrent browser +// sessions here, right? I don't actually know how the Next.js server +// handles concurrency though, let's pressure-test and find out before +// building a solution. +let SHARED_BROWSER = null; +async function getBrowserContext() { + if (SHARED_BROWSER == null) { + SHARED_BROWSER = await playwright.chromium.launch({ headless: true }); } + return await SHARED_BROWSER.newContext(); } async function handle(req, res) { @@ -93,8 +91,8 @@ async function loadAndScreenshotImage(libraryUrl, size) { }).toString(); console.debug("Opening browser page"); - const browser = await getBrowser(); - const page = await browser.newPage(); + const context = await getBrowserContext(); + const page = await context.newPage(); console.debug("Page opened, navigating to: " + assetImagePageUrl.toString()); try { @@ -130,7 +128,7 @@ async function loadAndScreenshotImage(libraryUrl, size) { console.warn("Error closing page after image finished", e); } try { - await browser.close(); + await context.close(); } catch (e) { console.warn("Error closing browser after image finished", e); } diff --git a/yarn.lock b/yarn.lock index 5277daf..ae5a791 100644 --- a/yarn.lock +++ b/yarn.lock @@ -3523,7 +3523,7 @@ agent-base@4, agent-base@^4.2.0, agent-base@^4.3.0: dependencies: es6-promisify "^5.0.0" -agent-base@6: +agent-base@6, agent-base@^6.0.2: version "6.0.2" resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-6.0.2.tgz#49fff58577cfee3f37176feab4c22e00f86d7f77" integrity sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ== @@ -4961,11 +4961,16 @@ commander@^5.1.0: resolved "https://registry.yarnpkg.com/commander/-/commander-5.1.0.tgz#46abbd1652f8e059bddaef99bbdcb2ad9cf179ae" integrity sha512-P0CysNDQ7rtVw4QIQtm+MRxV66vKFSvlsQvGYXZWR3qFU0jlMKHZZZgw8e+8DSah4UDKMqnknRDQz+xuQXQ/Zg== -commander@^6.1.0, commander@^6.2.0: +commander@^6.2.0: version "6.2.1" resolved "https://registry.yarnpkg.com/commander/-/commander-6.2.1.tgz#0792eb682dfbc325999bb2b84fddddba110ac73c" integrity sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA== +commander@^8.2.0: + version "8.3.0" + resolved "https://registry.yarnpkg.com/commander/-/commander-8.3.0.tgz#4837ea1b2da67b9c616a67afbb0fafee567bca66" + integrity sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww== + common-tags@^1.8.0: version "1.8.0" resolved "https://registry.yarnpkg.com/common-tags/-/common-tags-1.8.0.tgz#8e3153e542d4a39e9b10554434afaaf98956a937" @@ -9675,12 +9680,12 @@ platform@1.3.6: resolved "https://registry.yarnpkg.com/platform/-/platform-1.3.6.tgz#48b4ce983164b209c2d45a107adb31f473a6e7a7" integrity sha512-fnWVljUchTro6RiCFvCXBbNhJc2NijN7oIQxbwsyL0buWJPG85v81ehlHI9fXrJsMNgTofEoWIQeClKpgxFLrg== -playwright-core@^1.14.0: - version "1.14.0" - resolved "https://registry.yarnpkg.com/playwright-core/-/playwright-core-1.14.0.tgz#af51da7b201c11eeda780e2db3f05c8bca74c8be" - integrity sha512-n6NdknezSfRgB6LkLwcrbm5orRQZSpbd8LZmlc4YrIXV0VEvJr5tzP3xlHXpiFBfTr3yoFuagldI3T7bD/8H3w== +playwright-core@=1.16.3: + version "1.16.3" + resolved "https://registry.yarnpkg.com/playwright-core/-/playwright-core-1.16.3.tgz#f466be9acaffb698654adfb0a17a4906ba936895" + integrity sha512-16hF27IvQheJee+DbhC941AUZLjbJgfZFWi9YPS4LKEk/lKFhZI+9TiFD0sboYqb9eaEWvul47uR5xxTVbE4iw== dependencies: - commander "^6.1.0" + commander "^8.2.0" debug "^4.1.1" extract-zip "^2.0.1" https-proxy-agent "^5.0.0" @@ -9691,29 +9696,18 @@ playwright-core@^1.14.0: proper-lockfile "^4.1.1" proxy-from-env "^1.1.0" rimraf "^3.0.2" + socks-proxy-agent "^6.1.0" stack-utils "^2.0.3" ws "^7.4.6" + yauzl "^2.10.0" yazl "^2.5.1" -playwright@^1.14.0: - version "1.14.0" - resolved "https://registry.yarnpkg.com/playwright/-/playwright-1.14.0.tgz#18301b11f5278a446d36b5cf96f67db36ce2cd20" - integrity sha512-aR5oZ1iVsjQkGfYCjgYAmyMAVu0MQ0i8MgdnfdqDu9EVLfbnpuuFmTv/Rb7/Yjno1kOrDUP9+RyNC+zfG3wozA== +playwright@^1.16.3: + version "1.16.3" + resolved "https://registry.yarnpkg.com/playwright/-/playwright-1.16.3.tgz#27a292d9fa54fbac923998d3af58cd2b691f5ebe" + integrity sha512-nfJx/OpIb/8OexL3rYGxNN687hGyaM3XNpfuMzoPlrekURItyuiHHsNhC9oQCx3JDmCn5O3EyyyFCnrZjH6MpA== dependencies: - commander "^6.1.0" - debug "^4.1.1" - extract-zip "^2.0.1" - https-proxy-agent "^5.0.0" - jpeg-js "^0.4.2" - mime "^2.4.6" - pngjs "^5.0.0" - progress "^2.0.3" - proper-lockfile "^4.1.1" - proxy-from-env "^1.1.0" - rimraf "^3.0.2" - stack-utils "^2.0.3" - ws "^7.4.6" - yazl "^2.5.1" + playwright-core "=1.16.3" please-upgrade-node@^3.2.0: version "3.2.0" @@ -10928,6 +10922,23 @@ socks-proxy-agent@^4.0.1: agent-base "~4.2.1" socks "~2.3.2" +socks-proxy-agent@^6.1.0: + version "6.1.0" + resolved "https://registry.yarnpkg.com/socks-proxy-agent/-/socks-proxy-agent-6.1.0.tgz#869cf2d7bd10fea96c7ad3111e81726855e285c3" + integrity sha512-57e7lwCN4Tzt3mXz25VxOErJKXlPfXmkMLnk310v/jwW20jWRVcgsOit+xNkN3eIEdB47GwnfAEBLacZ/wVIKg== + dependencies: + agent-base "^6.0.2" + debug "^4.3.1" + socks "^2.6.1" + +socks@^2.6.1: + version "2.6.1" + resolved "https://registry.yarnpkg.com/socks/-/socks-2.6.1.tgz#989e6534a07cf337deb1b1c94aaa44296520d30e" + integrity sha512-kLQ9N5ucj8uIcxrDwjm0Jsqk06xdpBjGNQtpXy4Q8/QY2k+fY7nZH8CARy+hkbG+SGAovmzzuauCpBlb8FrnBA== + dependencies: + ip "^1.1.5" + smart-buffer "^4.1.0" + socks@~2.3.2: version "2.3.3" resolved "https://registry.yarnpkg.com/socks/-/socks-2.3.3.tgz#01129f0a5d534d2b897712ed8aceab7ee65d78e3" From eaadfd09efecec121eb804d932c43d9fe6b6360f Mon Sep 17 00:00:00 2001 From: Matchu Date: Fri, 12 Nov 2021 21:41:17 -0800 Subject: [PATCH 03/17] Delete outfitPageSSR Oh right, we implemented this with Next.js SSR in `/pages/outfits/[id].js`, so we don't need this anymore! --- pages/api/outfitPageSSR.js | 151 ------------------------------------- 1 file changed, 151 deletions(-) delete mode 100644 pages/api/outfitPageSSR.js diff --git a/pages/api/outfitPageSSR.js b/pages/api/outfitPageSSR.js deleted file mode 100644 index 712c2f7..0000000 --- a/pages/api/outfitPageSSR.js +++ /dev/null @@ -1,151 +0,0 @@ -/** - * /api/outfitPageSSR also serves the initial request for /outfits/:id, to - * add title and meta tags. This primarily for sharing, like on Discord or - * Twitter or Facebook! - * - * The route is configured in vercel.json, at the project root. - * - * To be honest, we probably should have built Impress 2020 on Next.js, and - * then we'd be getting realistic server-side rendering across practically the - * whole app very cheaply. But this is a good hack for what we have! - * - * TODO: We could add the basic outfit page layout and image preview, to use - * SSR to decrease time-to-first-content for the end-user, too… - */ -const beeline = require("honeycomb-beeline")({ - writeKey: process.env["HONEYCOMB_WRITE_KEY"], - dataset: - process.env["NODE_ENV"] === "production" - ? "Dress to Impress (2020)" - : "Dress to Impress (2020, dev)", - serviceName: "impress-2020-gql-server", - disableInstrumentationOnLoad: true, -}); - -import escapeHtml from "escape-html"; -import fetch from "node-fetch"; - -import connectToDb from "../../src/server/db"; -import { normalizeRow } from "../../src/server/util"; - -async function handle(req, res) { - // Load index.html as our initial page content. If this fails, it probably - // means something is misconfigured in a big way; we don't have a great way - // to recover, and we'll just show an error message. - let initialHtml; - try { - initialHtml = await loadIndexPageHtml(); - } catch (e) { - console.error("Error loading index.html:", e); - return reject(res, "Sorry, there was an error loading this outfit page!"); - } - - // Load the given outfit by ID. If this fails, it's possible that it's just a - // problem with the SSR, and the client will be able to handle it better - // anyway, so just show the standard index.html and let the app load - // normally, as if there was no error. (We'll just log it.) - let outfit; - try { - outfit = await loadOutfitData(req.query.id); - } catch (e) { - console.error("Error loading outfit data:", e); - return sendHtml(res, initialHtml, 200); - } - - // Similarly, if the outfit isn't found, we just show index.html - but with a - // 404 and a gentler log message. - if (outfit == null) { - console.info(`Outfit not found: ${req.query.id}`); - return sendHtml(res, initialHtml, 404); - } - - const outfitName = outfit.name || "Untitled outfit"; - - // Okay, now let's rewrite the HTML to include some outfit data! - // - // WARNING!!! - // Be sure to always use `escapeHtml` when inserting user data!! - // WARNING!!! - // - let html = initialHtml; - - // Add the outfit name to the title. - html = html.replace( - /(.*)<\/title>/, - `<title>${escapeHtml(outfitName)} | Dress to Impress` - ); - - // Add sharing meta tags just before the tag. - const updatedAtTimestamp = Math.floor( - new Date(outfit.updatedAt).getTime() / 1000 - ); - const outfitUrl = - `https://impress-2020.openneo.net/outfits` + - `/${encodeURIComponent(outfit.id)}`; - const imageUrl = - `https://impress-outfit-images.openneo.net/outfits` + - `/${encodeURIComponent(outfit.id)}` + - `/v/${encodeURIComponent(updatedAtTimestamp)}` + - `/600.png`; - const metaTags = ` - - - - - - - `; - html = html.replace(/<\/head>/, `${metaTags}`); - - console.info(`Successfully SSR'd outfit ${outfit.id}`); - - return sendHtml(res, html); -} - -async function loadOutfitData(id) { - const db = await connectToDb(); - const [rows] = await db.query(`SELECT * FROM outfits WHERE id = ?;`, [id]); - if (rows.length === 0) { - return null; - } - - return normalizeRow(rows[0]); -} - -let cachedIndexPageHtml = null; -async function loadIndexPageHtml() { - if (cachedIndexPageHtml == null) { - // Request the same built copy of index.html that we're already serving at - // our homepage. - const homepageUrl = process.env.VERCEL_URL - ? `https://${process.env.VERCEL_URL}/` - : process.env.NODE_ENV === "development" - ? "http://localhost:3000/" - : "https://impress-2020.openneo.net/"; - const liveIndexPageHtml = await fetch(homepageUrl).then((res) => - res.text() - ); - cachedIndexPageHtml = liveIndexPageHtml; - } - - return cachedIndexPageHtml; -} - -function reject(res, message, status = 400) { - res.setHeader("Content-Type", "text/plain; charset=utf8"); - return res.status(status).send(message); -} - -function sendHtml(res, html, status = 200) { - res.setHeader("Content-Type", "text/html"); - return res.status(status).send(html); -} - -async function handleWithBeeline(req, res) { - beeline.withTrace( - { name: "api/outfitPageSSR", operation_name: "api/outfitPageSSR" }, - () => handle(req, res) - ); -} - -export default handleWithBeeline; From afd23fb4dd2182f767a5225876fa42ad2e21ac32 Mon Sep 17 00:00:00 2001 From: Matchu Date: Fri, 12 Nov 2021 21:52:14 -0800 Subject: [PATCH 04/17] Bump version of graphql This was actually kinda accidental, I thought I could uninstall it but then realized I couldn't. Anyway, it's updated now! --- yarn.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/yarn.lock b/yarn.lock index ae5a791..4f3013e 100644 --- a/yarn.lock +++ b/yarn.lock @@ -7060,9 +7060,9 @@ graphql@^14.5.3: iterall "^1.2.2" graphql@^15.5.0: - version "15.5.0" - resolved "https://registry.yarnpkg.com/graphql/-/graphql-15.5.0.tgz#39d19494dbe69d1ea719915b578bf920344a69d5" - integrity sha512-OmaM7y0kaK31NKG31q4YbD2beNYa6jBBKtMFT6gLYJljHLJr42IqJ8KX08u3Li/0ifzTU5HjmoOOrwa5BRLeDA== + version "15.7.2" + resolved "https://registry.yarnpkg.com/graphql/-/graphql-15.7.2.tgz#85ab0eeb83722977151b3feb4d631b5f2ab287ef" + integrity sha512-AnnKk7hFQFmU/2I9YSQf3xw44ctnSFCfp3zE0N6W174gqe9fWG/2rKaKxROK7CcI3XtERpjEKFqts8o319Kf7A== gud@^1.0.0: version "1.0.0" From 991defffa1996dfd74d8b8241a7124e7f060985a Mon Sep 17 00:00:00 2001 From: Matchu Date: Fri, 12 Nov 2021 21:53:07 -0800 Subject: [PATCH 05/17] /api/outfitImage makes direct GQL queries Previously we were using HTTP queries to keep individual function bundle sizes small, but that doesn't matter in a server where all the code is shared! The immediate motivation is that I want /api/outfitImage requesting against the same server, not impress-2020.openneo.net. For other stuff I'm probably gonna fix this by replacing VERCEL_URL with something else, but here I figured this was a change worth making anyway. --- pages/api/outfitImage.js | 74 ++++++++++++++++------------------------ 1 file changed, 30 insertions(+), 44 deletions(-) diff --git a/pages/api/outfitImage.js b/pages/api/outfitImage.js index 5e67f00..d1386b1 100644 --- a/pages/api/outfitImage.js +++ b/pages/api/outfitImage.js @@ -35,11 +35,12 @@ const beeline = require("honeycomb-beeline")({ sampleRate: 10, }); -import fetch from "node-fetch"; import gql from "graphql-tag"; -import { print as graphqlPrint } from "graphql/language/printer"; +import { ApolloServer } from "apollo-server"; +import { createTestClient } from "apollo-server-testing"; import connectToDb from "../../src/server/db"; +import { config as graphqlConfig } from "../../src/server"; import { renderOutfitImage } from "../../src/server/outfit-images"; import getVisibleLayers, { petAppearanceFragmentForGetVisibleLayers, @@ -143,50 +144,35 @@ async function handle(req, res) { return res.send(image); } -const GRAPHQL_ENDPOINT = process.env.VERCEL_URL - ? `https://${process.env.VERCEL_URL}/api/graphql` - : process.env.NODE_ENV === "development" - ? "http://localhost:3000/api/graphql" - : "https://impress-2020.openneo.net/api/graphql"; - -// NOTE: Unlike in-app views, we only load PNGs here. We expect this to -// generally perform better, and be pretty reliable now that TNT is -// generating canonical PNGs for every layer! -const GRAPHQL_QUERY = gql` - query ApiOutfitImage($outfitId: ID!, $size: LayerImageSize) { - outfit(id: $outfitId) { - petAppearance { - layers { - id - imageUrl(size: $size) - } - ...PetAppearanceForGetVisibleLayers - } - itemAppearances { - layers { - id - imageUrl(size: $size) - } - ...ItemAppearanceForGetVisibleLayers - } - } - } - ${petAppearanceFragmentForGetVisibleLayers} - ${itemAppearanceFragmentForGetVisibleLayers} -`; -const GRAPHQL_QUERY_STRING = graphqlPrint(GRAPHQL_QUERY); +// Check out this scrappy way of making a query against server code ^_^` +const graphqlClient = createTestClient(new ApolloServer(graphqlConfig)); async function loadLayerUrlsForSavedOutfit(outfitId, size) { - const { errors, data } = await fetch(GRAPHQL_ENDPOINT, { - method: "POST", - headers: { - "Content-Type": "application/json", - }, - body: JSON.stringify({ - query: GRAPHQL_QUERY_STRING, - variables: { outfitId, size: `SIZE_${size}` }, - }), - }).then((res) => res.json()); + const { errors, data } = await graphqlClient.query({ + query: gql` + query ApiOutfitImage($outfitId: ID!, $size: LayerImageSize) { + outfit(id: $outfitId) { + petAppearance { + layers { + id + imageUrl(size: $size) + } + ...PetAppearanceForGetVisibleLayers + } + itemAppearances { + layers { + id + imageUrl(size: $size) + } + ...ItemAppearanceForGetVisibleLayers + } + } + } + ${petAppearanceFragmentForGetVisibleLayers} + ${itemAppearanceFragmentForGetVisibleLayers} + `, + variables: { outfitId, size: `SIZE_${size}` }, + }); if (errors && errors.length > 0) { throw new Error( From 0a81f078490f1e3bf9b9e97cc57b5242e585947b Mon Sep 17 00:00:00 2001 From: Matchu Date: Fri, 12 Nov 2021 22:06:50 -0800 Subject: [PATCH 06/17] Remove Waka values The motivation is that I want VERCEL_URL and local net requests outta here :p and we were doing some cutesiness with leveraging the CDN cache to back the GQL fields. No more of that, folks! lol --- pages/api/allWakaValues.js | 128 +++---------------------------------- src/server/loaders.js | 26 -------- src/server/types/Item.js | 19 ++---- 3 files changed, 13 insertions(+), 160 deletions(-) diff --git a/pages/api/allWakaValues.js b/pages/api/allWakaValues.js index 677d61a..5894eeb 100644 --- a/pages/api/allWakaValues.js +++ b/pages/api/allWakaValues.js @@ -8,127 +8,15 @@ const beeline = require("honeycomb-beeline")({ disableInstrumentationOnLoad: true, }); -import fetch from "node-fetch"; - -import connectToDb from "../../src/server/db"; - async function handle(req, res) { - const allNcItemNamesAndIdsPromise = loadAllNcItemNamesAndIds(); - - let itemValuesByIdOrName; - try { - itemValuesByIdOrName = await loadWakaValuesByIdOrName(); - } catch (e) { - console.error(e); - res.setHeader("Content-Type", "text/plain; charset=utf8"); - res.status(500).send("Error loading Waka data from Google Sheets API"); - return; - } - - // Restructure the value data to use IDs as keys, instead of names. - const allNcItemNamesAndIds = await allNcItemNamesAndIdsPromise; - const itemValues = {}; - for (const { name, id } of allNcItemNamesAndIds) { - if (id in itemValuesByIdOrName) { - itemValues[id] = itemValuesByIdOrName[id]; - } else if (name in itemValuesByIdOrName) { - itemValues[id] = itemValuesByIdOrName[name]; - } - } - - // Cache for 1 minute, and immediately serve stale data for a day after. - // This should keep it fast and responsive, and stay well within our API key - // limits. (This will cause the client to send more requests than necessary, - // but the CDN cache should generally respond quickly with a small 304 Not - // Modified, unless the data really did change.) - res.setHeader( - "Cache-Control", - "public, max-age=3600, stale-while-revalidate=86400" - ); - return res.send(itemValues); -} - -async function loadAllNcItemNamesAndIds() { - const db = await connectToDb(); - - const [rows] = await db.query(` - SELECT items.id, item_translations.name FROM items - INNER JOIN item_translations ON item_translations.item_id = items.id - WHERE - (items.rarity_index IN (0, 500) OR is_manually_nc = 1) - AND item_translations.locale = "en" - `); - - return rows.map(({ id, name }) => ({ id, name: normalizeItemName(name) })); -} - -/** - * Load all Waka values from the spreadsheet. Returns an object keyed by ID or - * name - that is, if the item ID is provided in the sheet, we use that as the - * key; or if not, we use the name as the key. - */ -async function loadWakaValuesByIdOrName() { - if (!process.env["GOOGLE_API_KEY"]) { - throw new Error(`GOOGLE_API_KEY environment variable must be provided`); - } - - const res = await fetch( - `https://sheets.googleapis.com/v4/spreadsheets/` + - `1DRMrniTSZP0sgZK6OAFFYqpmbT6xY_Ve_i480zghOX0/values/NC%20Values` + - `?fields=values&key=${encodeURIComponent(process.env["GOOGLE_API_KEY"])}` - ); - const json = await res.json(); - - if (!res.ok) { - if (json.error) { - const { code, status, message } = json.error; - throw new Error( - `Google Sheets API returned error ${code} ${status}: ${message}` - ); - } else { - throw new Error( - `Google Sheets API returned unexpected error: ${res.status} ${res.statusText}` - ); - } - } - - // Get the rows from the JSON response - skipping the first-row headers. - const rows = json.values.slice(1); - - // Reformat the rows as a map from item name to value. We offer the item data - // as an object with a single field `value` for extensibility, but we omit - // the spreadsheet columns that we don't use on DTI, like Notes. - // - // NOTE: The Sheets API only returns the first non-empty cells of the row. - // That's why we set `""` as the defaults, in case the value/notes/etc - // aren't provided. - const itemValuesByIdOrName = {}; - for (const [ - itemName, - value = "", - unusedNotes = "", - unusedMarks = "", - itemId = "", - ] of rows) { - const normalizedItemName = normalizeItemName(itemName); - itemValuesByIdOrName[itemId || normalizedItemName] = { value }; - } - - return itemValuesByIdOrName; -} - -function normalizeItemName(name) { - return ( - name - // Remove all spaces, they're a common source of inconsistency - .replace(/\s+/g, "") - // Lower case, because capitalization is another common source - .toLowerCase() - // Remove diacritics: https://stackoverflow.com/a/37511463/107415 - // Waka has some stray ones in item names, not sure why! - .normalize("NFD") - .replace(/[\u0300-\u036f]/g, "") - ); + res.setHeader("Content-Type", "text/plain; charset=utf8"); + res + .status(410) + .send( + "WakaGuide.com is no longer updating its values, so we no longer " + + "serve them from this endpoint. The most recent set of values is " + + "archived here: https://docs.google.com/spreadsheets/d/1DRMrniTSZP0sgZK6OAFFYqpmbT6xY_Ve_i480zghOX0" + ); } async function handleWithBeeline(req, res) { diff --git a/src/server/loaders.js b/src/server/loaders.js index 1477c05..6c61e68 100644 --- a/src/server/loaders.js +++ b/src/server/loaders.js @@ -1,5 +1,4 @@ import DataLoader from "dataloader"; -import fetch from "node-fetch"; import { normalizeRow } from "./util"; const buildClosetListLoader = (db) => @@ -795,30 +794,6 @@ const buildItemTradesLoader = (db, loaders) => { cacheKeyFn: ({ itemId, isOwned }) => `${itemId}-${isOwned}` } ); -const buildItemWakaValueLoader = () => - new DataLoader(async (itemIds) => { - // This loader calls our /api/allWakaValues endpoint, to take advantage of - // the CDN caching. This helps us respond a bit faster than Google Sheets - // API would, and avoid putting pressure on our Google Sheets API quotas. - // (Some kind of internal memcache or process-level cache would be a more - // idiomatic solution in a monolith server environment!) - const url = process.env.VERCEL_URL - ? `https://${process.env.VERCEL_URL}/api/allWakaValues` - : process.env.NODE_ENV === "production" - ? "https://impress-2020.openneo.net/api/allWakaValues" - : "http://localhost:3000/api/allWakaValues"; - const res = await fetch(url); - if (!res.ok) { - throw new Error( - `Error loading /api/allWakaValues: ${res.status} ${res.statusText}` - ); - } - - const allWakaValues = await res.json(); - - return itemIds.map((itemId) => allWakaValues[itemId]); - }); - const buildPetTypeLoader = (db, loaders) => new DataLoader(async (petTypeIds) => { const qs = petTypeIds.map((_) => "?").join(","); @@ -1470,7 +1445,6 @@ function buildLoaders(db) { db ); loaders.itemTradesLoader = buildItemTradesLoader(db, loaders); - loaders.itemWakaValueLoader = buildItemWakaValueLoader(); loaders.petTypeLoader = buildPetTypeLoader(db, loaders); loaders.petTypeBySpeciesAndColorLoader = buildPetTypeBySpeciesAndColorLoader( db, diff --git a/src/server/types/Item.js b/src/server/types/Item.js index 1722dc9..ef6c700 100644 --- a/src/server/types/Item.js +++ b/src/server/types/Item.js @@ -28,9 +28,8 @@ const typeDefs = gql` createdAt: String """ - This item's capsule trade value as text, according to wakaguide.com, as a - human-readable string. Will be null if the value is not known, or if - there's an error connecting to the data source. + Deprecated: This item's capsule trade value as text, according to + wakaguide.com, as a human-readable string. **This now always returns null.** """ wakaValueText: String @cacheControl(maxAge: ${oneHour}) @@ -315,17 +314,9 @@ const resolvers = { const item = await itemLoader.load(id); return item.createdAt && item.createdAt.toISOString(); }, - wakaValueText: async ({ id }, _, { itemWakaValueLoader }) => { - let wakaValue; - try { - wakaValue = await itemWakaValueLoader.load(id); - } catch (e) { - console.error(`Error loading wakaValueText for item ${id}, skipping:`); - console.error(e); - wakaValue = null; - } - - return wakaValue ? wakaValue.value : null; + wakaValueText: () => { + // This feature is deprecated, so now we just always return unknown value. + return null; }, currentUserOwnsThis: async ( From 3ec0ae7557a81d31b8447b4e6e78b2d0cbc40b6a Mon Sep 17 00:00:00 2001 From: Matchu Date: Fri, 12 Nov 2021 22:08:06 -0800 Subject: [PATCH 07/17] Use localhost in /api/assetImage Just another VERCEL_URL removal! --- pages/api/assetImage.js | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/pages/api/assetImage.js b/pages/api/assetImage.js index 0c285ea..bc77c5e 100644 --- a/pages/api/assetImage.js +++ b/pages/api/assetImage.js @@ -24,14 +24,6 @@ const beeline = require("honeycomb-beeline")({ const playwright = require("playwright"); -// To render the image, we load the /internal/assetImage page in the web app, -// a simple page specifically designed for this API endpoint! -const ASSET_IMAGE_PAGE_BASE_URL = process.env.VERCEL_URL - ? `https://${process.env.VERCEL_URL}/internal/assetImage` - : process.env.NODE_ENV === "development" - ? "http://localhost:3000/internal/assetImage" - : "https://impress-2020.openneo.net/internal/assetImage"; - // We share one browser instance, but create a new independent "context" for // each request, as a security hedge. (The intent is for the user to request // very little from the browser, so it shouldn't matter, but it's just an extra @@ -84,7 +76,14 @@ async function handle(req, res) { } async function loadAndScreenshotImage(libraryUrl, size) { - const assetImagePageUrl = new URL(ASSET_IMAGE_PAGE_BASE_URL); + // To render the image, we load the /internal/assetImage page in the web app, + // a simple page specifically designed for this API endpoint! + // + // NOTE: If we deploy to a host where localhost:3000 won't work, make this + // configurable with an env var, e.g. process.env.LOCAL_APP_HOST + const assetImagePageUrl = new URL( + "http://localhost:3000/internal/assetImage" + ); assetImagePageUrl.search = new URLSearchParams({ libraryUrl, size, From 18bc3df6f4b5eebe5bad5f65fa22568e6f8e9bf4 Mon Sep 17 00:00:00 2001 From: Matchu Date: Fri, 12 Nov 2021 23:35:05 -0800 Subject: [PATCH 08/17] Use browser pooling for /api/assetImage I tried running a pressure test against assetImage on prod with the open-source tool `wrk`: ``` wrk -t12 -c20 -d20s --timeout 20s 'https://impress-2020-box.openneo.net/api/assetImage?libraryUrl=https%3A%2F%2Fimages.neopets.com%2Fcp%2Fitems%2Fdata%2F000%2F000%2F522%2F522756_2bde0443ae%2F522756.js&size=600' ``` I found that, unsurprisingly, we run a lot of concurrent requests, which fill up memory with a lot of Chromium instances! In this change, we declare a small pool of 2 browser contexts, to allow a bit of concurrency but still very strictly limit how many browser instances can actually get created. We might tune this number depending on the actual performance characteristics! --- package.json | 1 + pages/api/assetImage.js | 74 +++++++++++++++++++++++++++++++++-------- yarn.lock | 5 +++ 3 files changed, 66 insertions(+), 14 deletions(-) diff --git a/package.json b/package.json index 28078d6..36510fd 100644 --- a/package.json +++ b/package.json @@ -33,6 +33,7 @@ "easeljs": "^1.0.2", "escape-html": "^1.0.3", "framer-motion": "^4.1.11", + "generic-pool": "^3.8.2", "graphql": "^15.5.0", "honeycomb-beeline": "^2.7.4", "immer": "^9.0.6", diff --git a/pages/api/assetImage.js b/pages/api/assetImage.js index bc77c5e..ee9e2a0 100644 --- a/pages/api/assetImage.js +++ b/pages/api/assetImage.js @@ -23,22 +23,68 @@ const beeline = require("honeycomb-beeline")({ }); const playwright = require("playwright"); +const genericPool = require("generic-pool"); -// We share one browser instance, but create a new independent "context" for -// each request, as a security hedge. (The intent is for the user to request -// very little from the browser, so it shouldn't matter, but it's just an extra -// layer to reduce the risk of what an attack could do!) -// -// TODO: We're probably going to need to limit the number of concurrent browser -// sessions here, right? I don't actually know how the Next.js server -// handles concurrency though, let's pressure-test and find out before -// building a solution. -let SHARED_BROWSER = null; +// Share a single browser instance for all requests, to help perf a lot. +// We implement it as a "pool" of 1, because the pool is better than we are at +// lifecycle management and timeouts! +const browserPool = genericPool.createPool( + { + create: async () => { + console.info(`Starting shared browser instance`); + return await playwright.chromium.launch({ headless: true }); + }, + destroy: (browser) => { + console.info(`Closing shared browser instance`); + browser.close(); + }, + validate: (browser) => browser.isConnected(), + }, + { min: 1, max: 1, testOnBorrow: true, acquireTimeoutMillis: 15000 } +); +browserPool.on("factoryCreateError", (error) => console.error(error)); +browserPool.on("factoryDestroyError", (error) => console.error(error)); +async function getBrowser() { + // HACK: We have the pool *managing* our browser's lifecycle, but we don't + // actually need to *lock* it. So, we "acquire" the browser, then + // immediately release the lock for other `getBrowser` calls. + const browser = await browserPool.acquire(); + browserPool.release(browser); + browser.on("disconnected", () => browserPool.destroy(browser)); + return browser; +} + +// We maintain a small pool of shared browser sessions ("contexts"), to manage +// memory usage. If all the sessions are already in use, a request will wait +// for one of them to become available. +const contextPool = genericPool.createPool( + { + create: async () => { + console.info(`Creating a browser context`); + const browser = await getBrowser(); + return await browser.newContext(); + }, + destroy: (context) => { + console.info(`Closing a browser context`); + context.close(); + }, + validate: (context) => context.browser().isConnected(), + }, + { min: 1, max: 2, testOnBorrow: true, acquireTimeoutMillis: 15000 } +); +contextPool.on("factoryCreateError", (error) => console.error(error)); +contextPool.on("factoryDestroyError", (error) => console.error(error)); async function getBrowserContext() { - if (SHARED_BROWSER == null) { - SHARED_BROWSER = await playwright.chromium.launch({ headless: true }); - } - return await SHARED_BROWSER.newContext(); + const context = await contextPool.acquire(); + + // When the caller closes the context, we don't just release it back to the + // pool; we actually destroy it altogether, to help further isolate requests + // as a safe default for security purposes. (I'm not aware of an attack + // vector, but it feels like a good default, esp when contexts seem fast to + // create!) + context.on("close", () => contextPool.destroy(context)); + + return context; } async function handle(req, res) { diff --git a/yarn.lock b/yarn.lock index 4f3013e..d75fcff 100644 --- a/yarn.lock +++ b/yarn.lock @@ -6776,6 +6776,11 @@ generate-function@^2.3.1: dependencies: is-property "^1.0.2" +generic-pool@^3.8.2: + version "3.8.2" + resolved "https://registry.yarnpkg.com/generic-pool/-/generic-pool-3.8.2.tgz#aab4f280adb522fdfbdc5e5b64d718d3683f04e9" + integrity sha512-nGToKy6p3PAbYQ7p1UlWl6vSPwfwU6TMSWK7TTu+WUY4ZjyZQGniGGt2oNVvyNSpyZYSB43zMXVLcBm08MTMkg== + gensync@^1.0.0-beta.1: version "1.0.0-beta.1" resolved "https://registry.yarnpkg.com/gensync/-/gensync-1.0.0-beta.1.tgz#58f4361ff987e5ff6e1e7a210827aa371eaac269" From 20fff261ef0e4f07d9921370a81781dac89670c2 Mon Sep 17 00:00:00 2001 From: Matchu Date: Sat, 13 Nov 2021 01:45:27 -0800 Subject: [PATCH 09/17] Switch to a small page pool MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Hmm I am really not managing to keep the processes under control… maybe I'll try Puppeteer and see if it's just a bit more reliable?? --- pages/api/assetImage.js | 161 +++++++++++++++++++++------------------- 1 file changed, 86 insertions(+), 75 deletions(-) diff --git a/pages/api/assetImage.js b/pages/api/assetImage.js index ee9e2a0..25d2975 100644 --- a/pages/api/assetImage.js +++ b/pages/api/assetImage.js @@ -25,67 +25,76 @@ const beeline = require("honeycomb-beeline")({ const playwright = require("playwright"); const genericPool = require("generic-pool"); -// Share a single browser instance for all requests, to help perf a lot. -// We implement it as a "pool" of 1, because the pool is better than we are at -// lifecycle management and timeouts! -const browserPool = genericPool.createPool( - { - create: async () => { - console.info(`Starting shared browser instance`); - return await playwright.chromium.launch({ headless: true }); +// We maintain a small pool of browser pages, to manage memory usage. If all +// the pages are already in use, a request will wait for one of them to become +// available. +// +// NOTE: I picked 4 because that seemed to be a good number for avoiding maxing +// out our CPU. I also noticed that maxing CPU seemed to be a weird +// threshold where Chromium processes started behaving poorly after? I'm +// not sure I'm diagnosing that correctly though, and I'm worried about +// the sysadmin implications of not having that locked down, y'know? +function createPagePool() { + console.info(`Creating new browser instance`); + const browserPromise = playwright.chromium.launch({ headless: true }); + + const pagePool = genericPool.createPool( + { + create: async () => { + console.debug(`Creating a browser page`); + const browser = await browserPromise; + return await browser.newPage(); + }, + destroy: (page) => { + console.debug(`Closing a browser page`); + page.close(); + }, + validate: (page) => page.context().browser().isConnected(), }, - destroy: (browser) => { - console.info(`Closing shared browser instance`); - browser.close(); - }, - validate: (browser) => browser.isConnected(), - }, - { min: 1, max: 1, testOnBorrow: true, acquireTimeoutMillis: 15000 } -); -browserPool.on("factoryCreateError", (error) => console.error(error)); -browserPool.on("factoryDestroyError", (error) => console.error(error)); -async function getBrowser() { - // HACK: We have the pool *managing* our browser's lifecycle, but we don't - // actually need to *lock* it. So, we "acquire" the browser, then - // immediately release the lock for other `getBrowser` calls. - const browser = await browserPool.acquire(); - browserPool.release(browser); - browser.on("disconnected", () => browserPool.destroy(browser)); - return browser; + { min: 4, max: 4, testOnBorrow: true, acquireTimeoutMillis: 15000 } + ); + pagePool.on("factoryCreateError", (error) => console.error(error)); + pagePool.on("factoryDestroyError", (error) => console.error(error)); + pagePool.browserPromise = browserPromise; // we use this during reset + + // If the browser terminates unexpectedly, and this is still the current + // page pool, I guess something went wrong! Reset! + browserPromise.then((browser) => + browser.on("disconnected", () => { + if (PAGE_POOL === pagePool) { + resetPagePool(); + } + }) + ); + + return pagePool; } -// We maintain a small pool of shared browser sessions ("contexts"), to manage -// memory usage. If all the sessions are already in use, a request will wait -// for one of them to become available. -const contextPool = genericPool.createPool( - { - create: async () => { - console.info(`Creating a browser context`); - const browser = await getBrowser(); - return await browser.newContext(); - }, - destroy: (context) => { - console.info(`Closing a browser context`); - context.close(); - }, - validate: (context) => context.browser().isConnected(), - }, - { min: 1, max: 2, testOnBorrow: true, acquireTimeoutMillis: 15000 } -); -contextPool.on("factoryCreateError", (error) => console.error(error)); -contextPool.on("factoryDestroyError", (error) => console.error(error)); -async function getBrowserContext() { - const context = await contextPool.acquire(); +let PAGE_POOL = createPagePool(); - // When the caller closes the context, we don't just release it back to the - // pool; we actually destroy it altogether, to help further isolate requests - // as a safe default for security purposes. (I'm not aware of an attack - // vector, but it feels like a good default, esp when contexts seem fast to - // create!) - context.on("close", () => contextPool.destroy(context)); +// Every minute, we stop the current browser instance, to clear memory leaks. +// (I don't think this endpoint leaks pages, though maybe it does? But I +// definitely saw weird trailing memory and CPU usage after lots of requests...) +async function resetPagePool() { + console.info(`Resetting page pool`); + const prevPagePool = PAGE_POOL; + if (prevPagePool) { + // First, wait for the previous pages to finish. This is called + // "draining" the pool: waiting for it to empty. Cute! + console.debug(`Draining previous page pool`); + prevPagePool.drain().then(async () => { + // Then, terminate the browser instance. + console.debug(`Previous page pool drained, closing browser`); + const browser = await prevPagePool.browserPromise; + await browser.close(); + console.info(`Previous browser closed`); + }); + } - return context; + const newPagePool = createPagePool(); + PAGE_POOL = newPagePool; } +setInterval(resetPagePool, 60000); async function handle(req, res) { const { libraryUrl, size } = req.query; @@ -109,6 +118,9 @@ async function handle(req, res) { imageBuffer = await loadAndScreenshotImage(libraryUrl, size); } catch (e) { console.error(e); + if (e.name === "TimeoutError") { + return reject(res, `Could not load image: Server under heavy load`, 503); + } return reject(res, `Could not load image: ${e.message}`, 500); } @@ -135,12 +147,12 @@ async function loadAndScreenshotImage(libraryUrl, size) { size, }).toString(); - console.debug("Opening browser page"); - const context = await getBrowserContext(); - const page = await context.newPage(); - console.debug("Page opened, navigating to: " + assetImagePageUrl.toString()); + console.debug("Getting browser page"); + const currentPagePool = PAGE_POOL; + const page = await currentPagePool.acquire(); try { + console.debug("Page ready, navigating to: " + assetImagePageUrl.toString()); await page.goto(assetImagePageUrl.toString()); console.debug("Page loaded, awaiting image"); @@ -149,10 +161,20 @@ async function loadAndScreenshotImage(libraryUrl, size) { // present, or raising the error if present. const imageBufferPromise = screenshotImageFromPage(page); const errorMessagePromise = readErrorMessageFromPage(page); - const firstResultFromPage = await Promise.any([ - imageBufferPromise.then((imageBuffer) => ({ imageBuffer })), - errorMessagePromise.then((errorMessage) => ({ errorMessage })), - ]); + let firstResultFromPage; + try { + firstResultFromPage = await Promise.any([ + imageBufferPromise.then((imageBuffer) => ({ imageBuffer })), + errorMessagePromise.then((errorMessage) => ({ errorMessage })), + ]); + } catch (error) { + if (error.errors) { + // If both promises failed, show all error messages. + throw new Error(error.errors.map((e) => e.message).join(", ")); + } else { + throw error; + } + } if (firstResultFromPage.errorMessage) { throw new Error(firstResultFromPage.errorMessage); @@ -165,18 +187,7 @@ async function loadAndScreenshotImage(libraryUrl, size) { ); } } finally { - // Tear down our resources when we're done! If it fails, log the error, but - // don't block the success of the image. - try { - await page.close(); - } catch (e) { - console.warn("Error closing page after image finished", e); - } - try { - await context.close(); - } catch (e) { - console.warn("Error closing browser after image finished", e); - } + currentPagePool.release(page); } } From 587aa09efcc6ecb05551454714feb2de1ceeef5c Mon Sep 17 00:00:00 2001 From: Matchu Date: Sat, 13 Nov 2021 02:12:05 -0800 Subject: [PATCH 10/17] Oops, fix bug in pm2 setup MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Oh, I made a typo that caused pm2 to be running our processes as `root` instead of `matchu`! Let's very fix that!! 😳 I noticed this because I'm trying Puppeteer, and it got upset about running in sandboxed mode as root, and I'm like "as root??" So yeah, good, fixed, lol 😬 --- deploy/playbooks/setup.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/deploy/playbooks/setup.yml b/deploy/playbooks/setup.yml index e2c8196..e6eab19 100644 --- a/deploy/playbooks/setup.yml +++ b/deploy/playbooks/setup.yml @@ -160,7 +160,7 @@ # process. They'll be able to manage it without `sudo`, including during # normal deploys, and run `pm2 monit` from their shell to see status. become: yes - command: "pm2 startup systemd {{ ansible_user_id }} --hp /home/{{ ansible_user_id }}" + command: "pm2 startup systemd -u {{ ansible_user_id }} --hp /home/{{ ansible_user_id }}" - name: Create pm2 ecosystem file copy: From 0c2939dfe4ff7a091ead5326d539c8f934126816 Mon Sep 17 00:00:00 2001 From: Matchu Date: Sat, 13 Nov 2021 02:16:58 -0800 Subject: [PATCH 11/17] Use Puppeteer instead of Playwright We used Playwright in the first place to try to work around a Vercel deploy issue, and I'm not sure it really ended up mattering lol :p But yeah, I'm putting the new Puppeteer code through the same prod stress test, and it just doesn't seem to be getting into the same broken state that Playwright was. I'm guessing it's just that Puppeteer has more investment in edge-case handling? (There's also the fact that we're no longer running things as root, which could have been a fucky problem, too?) --- deploy/playbooks/setup.yml | 7 ++ package.json | 2 +- pages/api/assetImage.js | 34 +----- yarn.lock | 236 ++++++++++++++++--------------------- 4 files changed, 112 insertions(+), 167 deletions(-) diff --git a/deploy/playbooks/setup.yml b/deploy/playbooks/setup.yml index e6eab19..4c5e977 100644 --- a/deploy/playbooks/setup.yml +++ b/deploy/playbooks/setup.yml @@ -272,6 +272,7 @@ # `npx playwright install-deps`, which I couldn't get running in # Ansible as root, and besides, I prefer manually managing the # package list over running an npm script as root! + # TODO: We're using Puppeteer now, should this list change in some way? become: yes apt: update_cache: yes @@ -316,6 +317,12 @@ - libxrandr2 - libxshmfence1 + - name: Enable user namespace cloning for Chromium sandboxing + become: yes + ansible.posix.sysctl: + name: kernel.unprivileged_userns_clone + value: "1" + handlers: - name: Restart nginx become: yes diff --git a/package.json b/package.json index 36510fd..f8cd806 100644 --- a/package.json +++ b/package.json @@ -44,7 +44,7 @@ "mysql2": "^2.1.0", "next": "12.0.2", "node-fetch": "^2.6.0", - "playwright": "^1.16.3", + "puppeteer": "^11.0.0", "react": "^17.0.1", "react-autosuggest": "^10.0.2", "react-dom": "^17.0.1", diff --git a/pages/api/assetImage.js b/pages/api/assetImage.js index 25d2975..af113fe 100644 --- a/pages/api/assetImage.js +++ b/pages/api/assetImage.js @@ -22,7 +22,7 @@ const beeline = require("honeycomb-beeline")({ disableInstrumentationOnLoad: true, }); -const playwright = require("playwright"); +const puppeteer = require("puppeteer"); const genericPool = require("generic-pool"); // We maintain a small pool of browser pages, to manage memory usage. If all @@ -36,7 +36,7 @@ const genericPool = require("generic-pool"); // the sysadmin implications of not having that locked down, y'know? function createPagePool() { console.info(`Creating new browser instance`); - const browserPromise = playwright.chromium.launch({ headless: true }); + const browserPromise = puppeteer.launch({ headless: true }); const pagePool = genericPool.createPool( { @@ -49,7 +49,7 @@ function createPagePool() { console.debug(`Closing a browser page`); page.close(); }, - validate: (page) => page.context().browser().isConnected(), + validate: (page) => page.browser().isConnected(), }, { min: 4, max: 4, testOnBorrow: true, acquireTimeoutMillis: 15000 } ); @@ -70,31 +70,7 @@ function createPagePool() { return pagePool; } -let PAGE_POOL = createPagePool(); - -// Every minute, we stop the current browser instance, to clear memory leaks. -// (I don't think this endpoint leaks pages, though maybe it does? But I -// definitely saw weird trailing memory and CPU usage after lots of requests...) -async function resetPagePool() { - console.info(`Resetting page pool`); - const prevPagePool = PAGE_POOL; - if (prevPagePool) { - // First, wait for the previous pages to finish. This is called - // "draining" the pool: waiting for it to empty. Cute! - console.debug(`Draining previous page pool`); - prevPagePool.drain().then(async () => { - // Then, terminate the browser instance. - console.debug(`Previous page pool drained, closing browser`); - const browser = await prevPagePool.browserPromise; - await browser.close(); - console.info(`Previous browser closed`); - }); - } - - const newPagePool = createPagePool(); - PAGE_POOL = newPagePool; -} -setInterval(resetPagePool, 60000); +let PAGE_POOL = createPagePool(); // TODO: simplify.. async function handle(req, res) { const { libraryUrl, size } = req.query; @@ -119,7 +95,7 @@ async function handle(req, res) { } catch (e) { console.error(e); if (e.name === "TimeoutError") { - return reject(res, `Could not load image: Server under heavy load`, 503); + return reject(res, `Server under heavy load: ${e.message}`, 503); } return reject(res, `Could not load image: ${e.message}`, 500); } diff --git a/yarn.lock b/yarn.lock index d75fcff..e9f17e0 100644 --- a/yarn.lock +++ b/yarn.lock @@ -3523,7 +3523,7 @@ agent-base@4, agent-base@^4.2.0, agent-base@^4.3.0: dependencies: es6-promisify "^5.0.0" -agent-base@6, agent-base@^6.0.2: +agent-base@6: version "6.0.2" resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-6.0.2.tgz#49fff58577cfee3f37176feab4c22e00f86d7f77" integrity sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ== @@ -4564,7 +4564,7 @@ buffer@5.6.0, buffer@^5.2.0: base64-js "^1.0.2" ieee754 "^1.1.4" -buffer@^5.5.0: +buffer@^5.2.1, buffer@^5.5.0: version "5.7.1" resolved "https://registry.yarnpkg.com/buffer/-/buffer-5.7.1.tgz#ba62e7c13133053582197160851a8f648e99eed0" integrity sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ== @@ -4966,11 +4966,6 @@ commander@^6.2.0: resolved "https://registry.yarnpkg.com/commander/-/commander-6.2.1.tgz#0792eb682dfbc325999bb2b84fddddba110ac73c" integrity sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA== -commander@^8.2.0: - version "8.3.0" - resolved "https://registry.yarnpkg.com/commander/-/commander-8.3.0.tgz#4837ea1b2da67b9c616a67afbb0fafee567bca66" - integrity sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww== - common-tags@^1.8.0: version "1.8.0" resolved "https://registry.yarnpkg.com/common-tags/-/common-tags-1.8.0.tgz#8e3153e542d4a39e9b10554434afaaf98956a937" @@ -5565,6 +5560,11 @@ detect-node@^2.0.4: resolved "https://registry.yarnpkg.com/detect-node/-/detect-node-2.0.4.tgz#014ee8f8f669c5c58023da64b8179c083a28c46c" integrity sha512-ZIzRpLJrOj7jjP2miAtgqIfmzbxa4ZOr5jJc601zklsfEx9oTzmmj2nVpIPRpNlRTIh8lc1kyViIY7BWSGNmKw== +devtools-protocol@0.0.901419: + version "0.0.901419" + resolved "https://registry.yarnpkg.com/devtools-protocol/-/devtools-protocol-0.0.901419.tgz#79b5459c48fe7e1c5563c02bd72f8fec3e0cebcd" + integrity sha512-4INMPwNm9XRpBukhNbF7OB6fNTTCaI8pzy/fXg0xQzAy5h3zL1P8xT3QazgKqBrb/hAYwIBizqDBZ7GtJE74QQ== + dicer@0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/dicer/-/dicer-0.3.0.tgz#eacd98b3bfbf92e8ab5c2fdb71aaac44bb06b872" @@ -5935,11 +5935,6 @@ escape-string-regexp@^1.0.2, escape-string-regexp@^1.0.5: resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ= -escape-string-regexp@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz#a30304e99daa32e23b2fd20f51babd07cffca344" - integrity sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w== - escape-string-regexp@^4.0.0: version "4.0.0" resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz#14ba83a5d373e3d311e5afca29cf5bfad965bf34" @@ -6366,17 +6361,7 @@ extglob@^2.0.4: snapdragon "^0.8.1" to-regex "^3.0.1" -extract-zip@^1.7.0: - version "1.7.0" - resolved "https://registry.yarnpkg.com/extract-zip/-/extract-zip-1.7.0.tgz#556cc3ae9df7f452c493a0cfb51cc30277940927" - integrity sha512-xoh5G1W/PB0/27lXgMQyIhP5DSY/LhoCsOyZgb+6iMmRtCwVBo55uKaMoEYrDCKQhWvqEip5ZPKAc6eFNyf/MA== - dependencies: - concat-stream "^1.6.2" - debug "^2.6.9" - mkdirp "^0.5.4" - yauzl "^2.10.0" - -extract-zip@^2.0.1: +extract-zip@2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/extract-zip/-/extract-zip-2.0.1.tgz#663dca56fe46df890d5f131ef4a06d22bb8ba13a" integrity sha512-GDhU9ntwuKyGXdZBUgTIe+vXnWj0fppUEtMDL0+idd5Sta8TGpHssn/eusA9mrPr9qNDym6SxAYZjNvCn/9RBg== @@ -6387,6 +6372,16 @@ extract-zip@^2.0.1: optionalDependencies: "@types/yauzl" "^2.9.1" +extract-zip@^1.7.0: + version "1.7.0" + resolved "https://registry.yarnpkg.com/extract-zip/-/extract-zip-1.7.0.tgz#556cc3ae9df7f452c493a0cfb51cc30277940927" + integrity sha512-xoh5G1W/PB0/27lXgMQyIhP5DSY/LhoCsOyZgb+6iMmRtCwVBo55uKaMoEYrDCKQhWvqEip5ZPKAc6eFNyf/MA== + dependencies: + concat-stream "^1.6.2" + debug "^2.6.9" + mkdirp "^0.5.4" + yauzl "^2.10.0" + extsprintf@1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.3.0.tgz#96918440e3041a7a414f8c52e3c574eb3c3e1e05" @@ -6989,11 +6984,6 @@ graceful-fs@^4.1.11, graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0: resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.3.tgz#4a12ff1b60376ef09862c2093edd908328be8423" integrity sha512-a30VEBm4PEdx1dRB7MFK7BejejvCvBronbLjht+sHuGYj8PHs7M/5Z+rt5lw551vZ7yfTCj4Vuyy3mSJytDWRQ== -graceful-fs@^4.2.4: - version "4.2.4" - resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.4.tgz#2256bde14d3632958c465ebc96dc467ca07a29fb" - integrity sha512-WjKPNJF79dtJAVniUlGGWHYGz2jWxT6VhN/4m1NdkbZ2nOsEF+cI1Edgql5zCRhs/VsQYRvrXctxktVXZUkixw== - graphql-extensions@^0.11.1: version "0.11.1" resolved "https://registry.yarnpkg.com/graphql-extensions/-/graphql-extensions-0.11.1.tgz#f543f544a047a7a4dd930123f662dfcc01527416" @@ -7300,6 +7290,14 @@ https-browserify@1.0.0: resolved "https://registry.yarnpkg.com/https-browserify/-/https-browserify-1.0.0.tgz#ec06c10e0a34c0f2faf199f7fd7fc78fffd03c73" integrity sha1-7AbBDgo0wPL68Zn3/X/Hj//QPHM= +https-proxy-agent@5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-5.0.0.tgz#e2a90542abb68a762e0a0850f6c9edadfd8506b2" + integrity sha512-EkYm5BcKUGiduxzSt3Eppko+PiNWNEpa4ySk9vTC6wDsQJW9rHSa+UhGNJoRYp7bz6Ht1eaRIa6QaJqO5rCFbA== + dependencies: + agent-base "6" + debug "4" + https-proxy-agent@^3.0.0: version "3.0.1" resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-3.0.1.tgz#b8c286433e87602311b01c8ea34413d856a4af81" @@ -7308,14 +7306,6 @@ https-proxy-agent@^3.0.0: agent-base "^4.3.0" debug "^3.1.0" -https-proxy-agent@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-5.0.0.tgz#e2a90542abb68a762e0a0850f6c9edadfd8506b2" - integrity sha512-EkYm5BcKUGiduxzSt3Eppko+PiNWNEpa4ySk9vTC6wDsQJW9rHSa+UhGNJoRYp7bz6Ht1eaRIa6QaJqO5rCFbA== - dependencies: - agent-base "6" - debug "4" - human-signals@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-1.1.1.tgz#c5b1cd14f50aeae09ab6c59fe63ba3395fe4dfa3" @@ -7998,11 +7988,6 @@ jpeg-js@^0.4.0: resolved "https://registry.yarnpkg.com/jpeg-js/-/jpeg-js-0.4.1.tgz#937a3ae911eb6427f151760f8123f04c8bfe6ef7" integrity sha512-jA55yJiB5tCXEddos8JBbvW+IMrqY0y1tjjx9KNVtA+QPmu7ND5j0zkKopClpUTsaETL135uOM2XfcYG4XRjmw== -jpeg-js@^0.4.2: - version "0.4.3" - resolved "https://registry.yarnpkg.com/jpeg-js/-/jpeg-js-0.4.3.tgz#6158e09f1983ad773813704be80680550eff977b" - integrity sha512-ru1HWKek8octvUHFHvE5ZzQ1yAsJmIvRdGWvSoKV52XKyuyYA437QWDttXT8eZXDSbuMpHlLzPDZUPd6idIz+Q== - js-base64@^2.5.1: version "2.6.4" resolved "https://registry.yarnpkg.com/js-base64/-/js-base64-2.6.4.tgz#f4e686c5de1ea1f867dbcad3d46d969428df98c4" @@ -9017,6 +9002,13 @@ node-fetch@2.6.1: resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.1.tgz#045bd323631f76ed2e2b55573394416b639a0052" integrity sha512-V4aYg89jEoVRxRb2fJdAg8FHvI7cEyYdVAh94HH0UIK8oJxUfkjlDQN9RbMx+bEjP7+ggMiFRprSti032Oipxw== +node-fetch@2.6.5: + version "2.6.5" + resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.5.tgz#42735537d7f080a7e5f78b6c549b7146be1742fd" + integrity sha512-mmlIVHJEu5rnIxgEgez6b9GgWXbkZj5YZ7fx+2r94a2E+Uirsp6HsPTPlomfdHtpt/B0cdKviwkoaM6pyvUOpQ== + dependencies: + whatwg-url "^5.0.0" + node-fetch@^2.1.2, node-fetch@^2.2.0, node-fetch@^2.6.0: version "2.6.0" resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.0.tgz#e633456386d4aa55863f676a7ab0daa8fdecb0fd" @@ -9659,6 +9651,13 @@ pixelmatch@^5.1.0: dependencies: pngjs "^4.0.1" +pkg-dir@4.2.0, pkg-dir@^4.1.0: + version "4.2.0" + resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-4.2.0.tgz#f099133df7ede422e81d1d8448270eeb3e4261f3" + integrity sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ== + dependencies: + find-up "^4.0.0" + pkg-dir@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-2.0.0.tgz#f6d5d1109e19d63edf428e0bd57e12777615334b" @@ -9673,47 +9672,11 @@ pkg-dir@^3.0.0: dependencies: find-up "^3.0.0" -pkg-dir@^4.1.0: - version "4.2.0" - resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-4.2.0.tgz#f099133df7ede422e81d1d8448270eeb3e4261f3" - integrity sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ== - dependencies: - find-up "^4.0.0" - platform@1.3.6: version "1.3.6" resolved "https://registry.yarnpkg.com/platform/-/platform-1.3.6.tgz#48b4ce983164b209c2d45a107adb31f473a6e7a7" integrity sha512-fnWVljUchTro6RiCFvCXBbNhJc2NijN7oIQxbwsyL0buWJPG85v81ehlHI9fXrJsMNgTofEoWIQeClKpgxFLrg== -playwright-core@=1.16.3: - version "1.16.3" - resolved "https://registry.yarnpkg.com/playwright-core/-/playwright-core-1.16.3.tgz#f466be9acaffb698654adfb0a17a4906ba936895" - integrity sha512-16hF27IvQheJee+DbhC941AUZLjbJgfZFWi9YPS4LKEk/lKFhZI+9TiFD0sboYqb9eaEWvul47uR5xxTVbE4iw== - dependencies: - commander "^8.2.0" - debug "^4.1.1" - extract-zip "^2.0.1" - https-proxy-agent "^5.0.0" - jpeg-js "^0.4.2" - mime "^2.4.6" - pngjs "^5.0.0" - progress "^2.0.3" - proper-lockfile "^4.1.1" - proxy-from-env "^1.1.0" - rimraf "^3.0.2" - socks-proxy-agent "^6.1.0" - stack-utils "^2.0.3" - ws "^7.4.6" - yauzl "^2.10.0" - yazl "^2.5.1" - -playwright@^1.16.3: - version "1.16.3" - resolved "https://registry.yarnpkg.com/playwright/-/playwright-1.16.3.tgz#27a292d9fa54fbac923998d3af58cd2b691f5ebe" - integrity sha512-nfJx/OpIb/8OexL3rYGxNN687hGyaM3XNpfuMzoPlrekURItyuiHHsNhC9oQCx3JDmCn5O3EyyyFCnrZjH6MpA== - dependencies: - playwright-core "=1.16.3" - please-upgrade-node@^3.2.0: version "3.2.0" resolved "https://registry.yarnpkg.com/please-upgrade-node/-/please-upgrade-node-3.2.0.tgz#aeddd3f994c933e4ad98b99d9a556efa0e2fe942" @@ -9731,11 +9694,6 @@ pngjs@^4.0.1: resolved "https://registry.yarnpkg.com/pngjs/-/pngjs-4.0.1.tgz#f803869bb2fc1bfe1bf99aa4ec21c108117cfdbe" integrity sha512-rf5+2/ioHeQxR6IxuYNYGFytUyG3lma/WW1nsmjeHlWwtb2aByla6dkVc8pmJ9nplzkTA0q2xx7mMWrOTqT4Gg== -pngjs@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/pngjs/-/pngjs-5.0.0.tgz#e79dd2b215767fd9c04561c01236df960bce7fbb" - integrity sha512-40QW5YalBNfQo5yRYmiw7Yz6TKKVr3h6970B2YE+3fQpsWcrbj1PzJgxeJ19DRQjhMbKPIuMY8rFaXc8moolVw== - popmotion@9.3.5: version "9.3.5" resolved "https://registry.yarnpkg.com/popmotion/-/popmotion-9.3.5.tgz#e821aff3424a021b0f2c93922db31c55cfe64149" @@ -9844,7 +9802,7 @@ process@~0.5.1: resolved "https://registry.yarnpkg.com/process/-/process-0.5.2.tgz#1638d8a8e34c2f440a91db95ab9aeb677fc185cf" integrity sha1-FjjYqONML0QKkduVq5rrZ3/Bhc8= -progress@^2.0.0, progress@^2.0.3: +progress@2.0.3, progress@^2.0.0: version "2.0.3" resolved "https://registry.yarnpkg.com/progress/-/progress-2.0.3.tgz#7e8cf8d8f5b8f239c1bc68beb4eb78567d572ef8" integrity sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA== @@ -9863,15 +9821,6 @@ prop-types@^15.5.8, prop-types@^15.6.2, prop-types@^15.7.2: object-assign "^4.1.1" react-is "^16.8.1" -proper-lockfile@^4.1.1: - version "4.1.2" - resolved "https://registry.yarnpkg.com/proper-lockfile/-/proper-lockfile-4.1.2.tgz#c8b9de2af6b2f1601067f98e01ac66baa223141f" - integrity sha512-TjNPblN4BwAWMXU8s9AEz4JmQxnD1NNL7bNOY/AKUzyamc379FWASUhc/K1pL2noVb+XmZKLL68cjzLsiOAMaA== - dependencies: - graceful-fs "^4.2.4" - retry "^0.12.0" - signal-exit "^3.0.2" - proxy-addr@~2.0.5: version "2.0.6" resolved "https://registry.yarnpkg.com/proxy-addr/-/proxy-addr-2.0.6.tgz#fdc2336505447d3f2f2c638ed272caf614bbb2bf" @@ -9894,7 +9843,7 @@ proxy-agent@3: proxy-from-env "^1.0.0" socks-proxy-agent "^4.0.1" -proxy-from-env@^1.0.0, proxy-from-env@^1.1.0: +proxy-from-env@1.1.0, proxy-from-env@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/proxy-from-env/-/proxy-from-env-1.1.0.tgz#e102f16ca355424865755d2c9e8ea4f24d58c3e2" integrity sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg== @@ -9939,6 +9888,24 @@ punycode@^2.1.0, punycode@^2.1.1: resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== +puppeteer@^11.0.0: + version "11.0.0" + resolved "https://registry.yarnpkg.com/puppeteer/-/puppeteer-11.0.0.tgz#0808719c38e15315ecc1b1c28911f1c9054d201f" + integrity sha512-6rPFqN1ABjn4shgOICGDBITTRV09EjXVqhDERBDKwCLz0UyBxeeBH6Ay0vQUJ84VACmlxwzOIzVEJXThcF3aNg== + dependencies: + debug "4.3.2" + devtools-protocol "0.0.901419" + extract-zip "2.0.1" + https-proxy-agent "5.0.0" + node-fetch "2.6.5" + pkg-dir "4.2.0" + progress "2.0.3" + proxy-from-env "1.1.0" + rimraf "3.0.2" + tar-fs "2.1.1" + unbzip2-stream "1.4.3" + ws "8.2.3" + qs@6.7.0: version "6.7.0" resolved "https://registry.yarnpkg.com/qs/-/qs-6.7.0.tgz#41dc1a015e3d581f1621776be31afb2876a9b1bc" @@ -10483,6 +10450,13 @@ reusify@^1.0.4: resolved "https://registry.yarnpkg.com/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76" integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== +rimraf@3.0.2, rimraf@^3.0.0, rimraf@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a" + integrity sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA== + dependencies: + glob "^7.1.3" + rimraf@^2.6.1, rimraf@^2.6.2, rimraf@^2.6.3: version "2.7.1" resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.7.1.tgz#35797f13a7fdadc566142c29d4f07ccad483e3ec" @@ -10490,13 +10464,6 @@ rimraf@^2.6.1, rimraf@^2.6.2, rimraf@^2.6.3: dependencies: glob "^7.1.3" -rimraf@^3.0.0, rimraf@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a" - integrity sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA== - dependencies: - glob "^7.1.3" - rimraf@~2.6.2: version "2.6.3" resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.6.3.tgz#b2d104fe0d8fb27cf9e0a1cda8262dd3833c6cab" @@ -10927,23 +10894,6 @@ socks-proxy-agent@^4.0.1: agent-base "~4.2.1" socks "~2.3.2" -socks-proxy-agent@^6.1.0: - version "6.1.0" - resolved "https://registry.yarnpkg.com/socks-proxy-agent/-/socks-proxy-agent-6.1.0.tgz#869cf2d7bd10fea96c7ad3111e81726855e285c3" - integrity sha512-57e7lwCN4Tzt3mXz25VxOErJKXlPfXmkMLnk310v/jwW20jWRVcgsOit+xNkN3eIEdB47GwnfAEBLacZ/wVIKg== - dependencies: - agent-base "^6.0.2" - debug "^4.3.1" - socks "^2.6.1" - -socks@^2.6.1: - version "2.6.1" - resolved "https://registry.yarnpkg.com/socks/-/socks-2.6.1.tgz#989e6534a07cf337deb1b1c94aaa44296520d30e" - integrity sha512-kLQ9N5ucj8uIcxrDwjm0Jsqk06xdpBjGNQtpXy4Q8/QY2k+fY7nZH8CARy+hkbG+SGAovmzzuauCpBlb8FrnBA== - dependencies: - ip "^1.1.5" - smart-buffer "^4.1.0" - socks@~2.3.2: version "2.3.3" resolved "https://registry.yarnpkg.com/socks/-/socks-2.3.3.tgz#01129f0a5d534d2b897712ed8aceab7ee65d78e3" @@ -11035,13 +10985,6 @@ ssim.js@^3.1.1: resolved "https://registry.yarnpkg.com/ssim.js/-/ssim.js-3.5.0.tgz#d7276b9ee99b57a5ff0db34035f02f35197e62df" integrity sha512-Aj6Jl2z6oDmgYFFbQqK7fght19bXdOxY7Tj03nF+03M9gCBAjeIiO8/PlEGMfKDwYpw4q6iBqVq2YuREorGg/g== -stack-utils@^2.0.3: - version "2.0.3" - resolved "https://registry.yarnpkg.com/stack-utils/-/stack-utils-2.0.3.tgz#cd5f030126ff116b78ccb3c027fe302713b61277" - integrity sha512-gL//fkxfWUsIlFL2Tl42Cl6+HFALEaB1FU76I/Fy+oZjRreP7OPMXFlGbxM7NQsI0ZpUfw76sHnv0WNYuTb7Iw== - dependencies: - escape-string-regexp "^2.0.0" - stacktrace-parser@0.1.10: version "0.1.10" resolved "https://registry.yarnpkg.com/stacktrace-parser/-/stacktrace-parser-0.1.10.tgz#29fb0cae4e0d0b85155879402857a1639eb6051a" @@ -11416,7 +11359,7 @@ table@^6.0.9: string-width "^4.2.3" strip-ansi "^6.0.1" -tar-fs@^2.0.0, tar-fs@^2.1.1: +tar-fs@2.1.1, tar-fs@^2.0.0, tar-fs@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-2.1.1.tgz#489a15ab85f1f0befabb370b7de4f9eb5cbe8784" integrity sha512-V0r2Y9scmbDRLCNex/+hYzvp/zyYjvFbHPNgVTKfQvVrb6guiE/fxP+XblDNR011utopbkex2nM4dHNV6GDsng== @@ -11598,6 +11541,11 @@ tr46@^1.0.1: dependencies: punycode "^2.1.0" +tr46@~0.0.3: + version "0.0.3" + resolved "https://registry.yarnpkg.com/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a" + integrity sha1-gYT9NH2snNwYWZLzpmIuFLnZq2o= + truncate-utf8-bytes@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/truncate-utf8-bytes/-/truncate-utf8-bytes-1.0.2.tgz#405923909592d56f78a5818434b0b78489ca5f2b" @@ -11761,6 +11709,14 @@ unbox-primitive@^1.0.1: has-symbols "^1.0.2" which-boxed-primitive "^1.0.2" +unbzip2-stream@1.4.3: + version "1.4.3" + resolved "https://registry.yarnpkg.com/unbzip2-stream/-/unbzip2-stream-1.4.3.tgz#b0da04c4371311df771cdc215e87f2130991ace7" + integrity sha512-mlExGW4w71ebDJviH16lQLtZS32VKqsSfk80GCfUlwT/4/hNRFsoscrF/c++9xinkMzECL1uL9DDwXqFWkruPg== + dependencies: + buffer "^5.2.1" + through "^2.3.8" + unfetch@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/unfetch/-/unfetch-4.1.0.tgz#6ec2dd0de887e58a4dee83a050ded80ffc4137db" @@ -12024,6 +11980,11 @@ watchpack@2.1.1: glob-to-regexp "^0.4.1" graceful-fs "^4.1.2" +webidl-conversions@^3.0.0: + version "3.0.1" + resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871" + integrity sha1-JFNCdeKnvGvnvIZhHMFq4KVlSHE= + webidl-conversions@^4.0.2: version "4.0.2" resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-4.0.2.tgz#a855980b1f0b6b359ba1d5d9fb39ae941faa63ad" @@ -12034,6 +11995,14 @@ whatwg-fetch@^3.0.0: resolved "https://registry.yarnpkg.com/whatwg-fetch/-/whatwg-fetch-3.6.2.tgz#dced24f37f2624ed0281725d51d0e2e3fe677f8c" integrity sha512-bJlen0FcuU/0EMLrdbJ7zOnW6ITZLrZMIarMUVmdKtsGvZna8vxKYaexICWPfZ8qwf9fzNq+UEIZrnSaApt6RA== +whatwg-url@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-5.0.0.tgz#966454e8765462e37644d3626f6742ce8b70965d" + integrity sha1-lmRU6HZUYuN2RNNib2dCzotwll0= + dependencies: + tr46 "~0.0.3" + webidl-conversions "^3.0.0" + whatwg-url@^7.0.0: version "7.1.0" resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-7.1.0.tgz#c2c492f1eca612988efd3d2266be1b9fc6170d06" @@ -12125,6 +12094,11 @@ write-file-atomic@^2.3.0: imurmurhash "^0.1.4" signal-exit "^3.0.2" +ws@8.2.3: + version "8.2.3" + resolved "https://registry.yarnpkg.com/ws/-/ws-8.2.3.tgz#63a56456db1b04367d0b721a0b80cae6d8becbba" + integrity sha512-wBuoj1BDpC6ZQ1B7DWQBYVLphPWkm8i9Y0/3YdHjHKHiohOJ1ws+3OccDWtH+PoC9DZD5WOTrJvNbWvjS6JWaA== + ws@^5.2.0: version "5.2.2" resolved "https://registry.yarnpkg.com/ws/-/ws-5.2.2.tgz#dffef14866b8e8dc9133582514d1befaf96e980f" @@ -12139,11 +12113,6 @@ ws@^6.0.0: dependencies: async-limiter "~1.0.0" -ws@^7.4.6: - version "7.5.3" - resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.3.tgz#160835b63c7d97bfab418fc1b8a9fced2ac01a74" - integrity sha512-kQ/dHIzuLrS6Je9+uv81ueZomEwH0qVYstcAQ4/Z93K8zeko9gtAbttJWzoC5ukqXY1PpoouV3+VSOqEAFt5wg== - ws@~7.4.2: version "7.4.4" resolved "https://registry.yarnpkg.com/ws/-/ws-7.4.4.tgz#383bc9742cb202292c9077ceab6f6047b17f2d59" @@ -12261,13 +12230,6 @@ yauzl@^2.10.0: buffer-crc32 "~0.2.3" fd-slicer "~1.1.0" -yazl@^2.5.1: - version "2.5.1" - resolved "https://registry.yarnpkg.com/yazl/-/yazl-2.5.1.tgz#a3d65d3dd659a5b0937850e8609f22fffa2b5c35" - integrity sha512-phENi2PLiHnHb6QBVot+dJnaAZ0xosj7p3fWl+znIjBDlnMI2PsZCJZ306BPTFOaHf5qdDEI8x5qFrSOBN5vrw== - dependencies: - buffer-crc32 "~0.2.3" - yeast@0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/yeast/-/yeast-0.1.2.tgz#008e06d8094320c372dbc2f8ed76a0ca6c8ac419" From 5039371a1db25ef1f8aca8be0db16eb9ef25a5dd Mon Sep 17 00:00:00 2001 From: Matchu Date: Sat, 13 Nov 2021 02:27:24 -0800 Subject: [PATCH 12/17] Simplify the page pool Yeah ok, let's just run one browser instance and one pool. I feel like I observed that, when I killed chromium in prod, pm2 noticed the abrupt loss of a child process and restarted the whole app process? which is rad? so maybe let's just trying relying on that and see how it goes --- pages/api/assetImage.js | 69 ++++++++++++++++------------------------- 1 file changed, 26 insertions(+), 43 deletions(-) diff --git a/pages/api/assetImage.js b/pages/api/assetImage.js index af113fe..6c7d4d9 100644 --- a/pages/api/assetImage.js +++ b/pages/api/assetImage.js @@ -25,52 +25,34 @@ const beeline = require("honeycomb-beeline")({ const puppeteer = require("puppeteer"); const genericPool = require("generic-pool"); +console.info(`Creating new browser instance`); +const browserPromise = puppeteer.launch({ headless: true }); + // We maintain a small pool of browser pages, to manage memory usage. If all // the pages are already in use, a request will wait for one of them to become // available. // -// NOTE: I picked 4 because that seemed to be a good number for avoiding maxing -// out our CPU. I also noticed that maxing CPU seemed to be a weird -// threshold where Chromium processes started behaving poorly after? I'm -// not sure I'm diagnosing that correctly though, and I'm worried about -// the sysadmin implications of not having that locked down, y'know? -function createPagePool() { - console.info(`Creating new browser instance`); - const browserPromise = puppeteer.launch({ headless: true }); +// NOTE: 4 pages is about where our 1-cpu prod environment maxes out. We might +// want to upgrade to the 2-cpu box as we add more pressure though, and +// then maybe we can afford more pages in the pool? - const pagePool = genericPool.createPool( - { - create: async () => { - console.debug(`Creating a browser page`); - const browser = await browserPromise; - return await browser.newPage(); - }, - destroy: (page) => { - console.debug(`Closing a browser page`); - page.close(); - }, - validate: (page) => page.browser().isConnected(), +const PAGE_POOL = genericPool.createPool( + { + create: async () => { + console.debug(`Creating a browser page`); + const browser = await browserPromise; + return await browser.newPage(); }, - { min: 4, max: 4, testOnBorrow: true, acquireTimeoutMillis: 15000 } - ); - pagePool.on("factoryCreateError", (error) => console.error(error)); - pagePool.on("factoryDestroyError", (error) => console.error(error)); - pagePool.browserPromise = browserPromise; // we use this during reset - - // If the browser terminates unexpectedly, and this is still the current - // page pool, I guess something went wrong! Reset! - browserPromise.then((browser) => - browser.on("disconnected", () => { - if (PAGE_POOL === pagePool) { - resetPagePool(); - } - }) - ); - - return pagePool; -} - -let PAGE_POOL = createPagePool(); // TODO: simplify.. + destroy: (page) => { + console.debug(`Closing a browser page`); + page.close(); + }, + validate: (page) => page.browser().isConnected(), + }, + { min: 4, max: 4, testOnBorrow: true, acquireTimeoutMillis: 15000 } +); +PAGE_POOL.on("factoryCreateError", (error) => console.error(error)); +PAGE_POOL.on("factoryDestroyError", (error) => console.error(error)); async function handle(req, res) { const { libraryUrl, size } = req.query; @@ -124,8 +106,7 @@ async function loadAndScreenshotImage(libraryUrl, size) { }).toString(); console.debug("Getting browser page"); - const currentPagePool = PAGE_POOL; - const page = await currentPagePool.acquire(); + const page = await PAGE_POOL.acquire(); try { console.debug("Page ready, navigating to: " + assetImagePageUrl.toString()); @@ -163,7 +144,9 @@ async function loadAndScreenshotImage(libraryUrl, size) { ); } } finally { - currentPagePool.release(page); + // To avoid memory leaks, we destroy the page when we're done with it. + // The pool will replace it with a fresh one! + PAGE_POOL.destroy(page); } } From bf8fd8130514f199b376cedd1c717182b4f97591 Mon Sep 17 00:00:00 2001 From: Matchu Date: Mon, 15 Nov 2021 17:42:02 -0800 Subject: [PATCH 13/17] Post a message linking to Metaverse revocation --- src/app/HomePage.js | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/src/app/HomePage.js b/src/app/HomePage.js index 0f6e60d..1b23406 100644 --- a/src/app/HomePage.js +++ b/src/app/HomePage.js @@ -2,6 +2,7 @@ import React from "react"; import { ClassNames } from "@emotion/react"; import gql from "graphql-tag"; import { + Alert, Box, Button, Center, @@ -57,6 +58,23 @@ function HomePage() { return ( + + + + The Neopets Metaverse team is no longer licensed to use this + software. + {" "} + + More information available here. + {" "} + Thanks for understanding! + + + Date: Tue, 16 Nov 2021 12:04:16 -0800 Subject: [PATCH 14/17] Don't HTTP cache currentUserOwnsThis/wants Hmm, I see, Vercel chews on Cache-Control headers a bit more than I'm used to, so anything marked `scope: PRIVATE` would not be cached at all. But on a more standard server, this was coming out as privately cacheable, but for an actual amount of time (1 hour in the homepage case), because of the `maxAge` on other fields. That meant the device browser cache would hold onto the result, and not always reflect Own/Want changes upon page reload. In this change, we set `maxAge: 0`, because we want this field to be very responsive. I also left `scope: PRIVATE`, even though I think it doesn't really matter if we're saying the field isn't cacheable anyway, because I want to set the precendent that `currentUser` fields need it, to avoid a potential gotcha if someone creates a cacheable `currentUser` field in the future. (That's important to be careful with though, because is it even okay for logouts to not clear it? TODO: Can we clear the private HTTP cache somehow? I guess we would need to include the current user ID in the URL?) --- src/server/types/Item.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/server/types/Item.js b/src/server/types/Item.js index ef6c700..129aea4 100644 --- a/src/server/types/Item.js +++ b/src/server/types/Item.js @@ -33,8 +33,8 @@ const typeDefs = gql` """ wakaValueText: String @cacheControl(maxAge: ${oneHour}) - currentUserOwnsThis: Boolean! @cacheControl(scope: PRIVATE) - currentUserWantsThis: Boolean! @cacheControl(scope: PRIVATE) + currentUserOwnsThis: Boolean! @cacheControl(maxAge: 0, scope: PRIVATE) + currentUserWantsThis: Boolean! @cacheControl(maxAge: 0, scope: PRIVATE) """ How many users are offering/seeking this in their public trade lists. From cadf7487afa0f107aee2a28d029dd8583a9e1c50 Mon Sep 17 00:00:00 2001 From: Matchu Date: Tue, 16 Nov 2021 12:12:51 -0800 Subject: [PATCH 15/17] Mark currentUser GQL as non-cacheable Comments explain most of this! Vercel changed around the Cache-Control headers a bit to always essentially apply max-age:0 when scope:PRIVATE was true. I'm noticing this isn't *fully* working yet though, because we're not getting a `Cache-Control: private` header, we're just getting no header at all. Fastly might aggressively choose to cache it anyway with etag stuff! I bet that's the fault of our caching middleware plugin thing, so I'll check on that! --- src/server/types/User.js | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/src/server/types/User.js b/src/server/types/User.js index ac81383..9d40f1e 100644 --- a/src/server/types/User.js +++ b/src/server/types/User.js @@ -47,7 +47,20 @@ const typeDefs = gql` user(id: ID!): User userByName(name: String!): User userByEmail(email: String!, supportSecret: String!): User - currentUser: User + + """ + The currently logged-in user. + """ + # Don't allow caching of *anything* nested inside currentUser, because we + # want logins/logouts always reset user data properly. + # + # TODO: If we wanted to privately cache a currentUser field, we could + # remove the maxAge condition here, and attach user ID to the GraphQL + # request URL when sending auth headers. That way, changing user + # would send different requests and avoid the old cache hits. (But we + # should leave the scope, to emphasize that the CDN cache shouldn't + # cache it.) + currentUser: User @cacheControl(maxAge: 0, scope: PRIVATE) } `; From b73e2e1123f560c8e92b0a7029c86a5a15d5caa6 Mon Sep 17 00:00:00 2001 From: Matchu Date: Tue, 16 Nov 2021 12:34:11 -0800 Subject: [PATCH 16/17] Send cache-control header for max-age=0, private Some queries, like on `/your-outfits`, had the cache hint `max-age=0, private` set. In this case, our cache code sent no cache header, on the assumption that no header would result in no caching. This was true on Vercel, but isn't true on our new Fastly setup! (Which makes sense, Vercel was a bit more aggressive here I think.) This was causing an arbitrary user's data to be cached by Fastly as the result for `/your-outfits`. (We found this bug before launching the Fastly cache though, don't worry! No actual user data leaked!) Now, as of this change, the `/your-outfits` query correctly sends a header of `Cache-Control: max-age=0, private`. This directs Fastly not to cache the result. To fix this, we made a change to our HTTP header code, which is forked from Apollo's stuff. --- src/server/lib/apollo-cache-control-fork.ts | 25 +++++++++++++++------ 1 file changed, 18 insertions(+), 7 deletions(-) diff --git a/src/server/lib/apollo-cache-control-fork.ts b/src/server/lib/apollo-cache-control-fork.ts index 33329c3..e9e6747 100644 --- a/src/server/lib/apollo-cache-control-fork.ts +++ b/src/server/lib/apollo-cache-control-fork.ts @@ -299,13 +299,24 @@ function computeOverallCachePolicy( // If maxAge is 0, then we consider it uncacheable so it doesn't matter what // the scope was. - return lowestMaxAge && lowestMaxAgePlusSWR // FORK - ? { - maxAge: lowestMaxAge, - staleWhileRevalidate: lowestMaxAgePlusSWR - lowestMaxAge, // FORK - scope, - } - : undefined; + if (lowestMaxAge && lowestMaxAgePlusSWR) { + return { + maxAge: lowestMaxAge, + staleWhileRevalidate: lowestMaxAgePlusSWR - lowestMaxAge, // FORK + scope, + }; + } else if (scope !== CacheScope.Public) { + // TODO: It'd probably be a bit better to leave the ages unspecified if + // the hints didn't specify them, but I don't wanna mess with the + // header-writing code right now. + return { + maxAge: 0, + staleWhileRevalidate: 0, + scope, + }; + } else { + return undefined; + } } function addHint( From b941dce9fa5ca2160de8d8718c442b66132eef87 Mon Sep 17 00:00:00 2001 From: Matchu Date: Tue, 16 Nov 2021 13:09:45 -0800 Subject: [PATCH 17/17] Private cache headers in item search If the user is searching for things they own or want, make sure we don't CDN cache it! For many queries, this is taken care of in practice, because the search result includes `currentUserOwnsThis` and `currentUserWantsThis`. But I noticed in testing that, if the search result has no items, so those fields aren't actually part of the _response_, then the private header doesn't get set. So this mainly makes sure we don't accidentally cache an empty result from a user who didn't have anything they owned/wanted yet! --- src/server/types/Item.js | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/src/server/types/Item.js b/src/server/types/Item.js index 129aea4..5656d14 100644 --- a/src/server/types/Item.js +++ b/src/server/types/Item.js @@ -656,8 +656,13 @@ const resolvers = { itemSearchItemsLoader, petTypeBySpeciesAndColorLoader, currentUserId, - } + }, + { cacheControl } ) => { + if (currentUserOwnsOrWants != null) { + cacheControl.setCacheHint({ scope: "PRIVATE" }); + } + let bodyId = null; if (fitsPet) { const petType = await petTypeBySpeciesAndColorLoader.load({ @@ -790,8 +795,12 @@ const resolvers = { numTotalItems: async ( { query, bodyId, itemKind, currentUserOwnsOrWants, zoneIds }, { offset, limit }, - { currentUserId, itemSearchNumTotalItemsLoader } + { currentUserId, itemSearchNumTotalItemsLoader }, + { cacheControl } ) => { + if (currentUserOwnsOrWants != null) { + cacheControl.setCacheHint({ scope: "PRIVATE" }); + } const numTotalItems = await itemSearchNumTotalItemsLoader.load({ query: query.trim(), bodyId, @@ -807,8 +816,12 @@ const resolvers = { items: async ( { query, bodyId, itemKind, currentUserOwnsOrWants, zoneIds }, { offset, limit }, - { currentUserId, itemSearchItemsLoader } + { currentUserId, itemSearchItemsLoader }, + { cacheControl } ) => { + if (currentUserOwnsOrWants != null) { + cacheControl.setCacheHint({ scope: "PRIVATE" }); + } const items = await itemSearchItemsLoader.load({ query: query.trim(), bodyId,