From 4977f1ee54b1d5bd50128bb5f8d74ad12cce6b7b Mon Sep 17 00:00:00 2001 From: Matchu Date: Mon, 17 Aug 2020 18:49:37 -0700 Subject: [PATCH] Revert "cache zone data" MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This reverts commit 0f7ab9d10e5f805b284188d1a7c093f9a21dfbe8. The Production Vercel deploys don't seem to like how I did this build trick, even though the Preview deploys seem fine with it 🤔 Reverting for now, sent a message to Vercel support. --- package.json | 5 +- scripts/build-cached-data.js | 50 -------------------- src/server/index.js | 29 ++++++------ src/server/loaders.js | 38 +++++++++++++++ src/server/query-tests/Item.test.js | 16 +++++++ src/server/query-tests/PetAppearance.test.js | 22 +++++++++ 6 files changed, 93 insertions(+), 67 deletions(-) delete mode 100644 scripts/build-cached-data.js diff --git a/package.json b/package.json index cd0974a..ae81ea5 100644 --- a/package.json +++ b/package.json @@ -32,9 +32,8 @@ "react-transition-group": "^4.3.0" }, "scripts": { - "start": "yarn build-cached-data && react-app-rewired start", - "build-cached-data": "node -r dotenv/config scripts/build-cached-data.js", - "build": "react-app-rewired build && yarn build-cached-data", + "start": "react-app-rewired start", + "build": "react-app-rewired build", "test": "react-app-rewired test --env=jsdom", "eject": "react-scripts eject", "setup-mysql-user": "mysql -h impress.openneo.net -u matchu -p < setup-mysql-user.sql", diff --git a/scripts/build-cached-data.js b/scripts/build-cached-data.js deleted file mode 100644 index 6d78c56..0000000 --- a/scripts/build-cached-data.js +++ /dev/null @@ -1,50 +0,0 @@ -// We run this on build to cache some stable database tables on the server! -const fs = require("fs").promises; -const path = require("path"); - -const connectToDb = require("../src/server/db"); -const { normalizeRow } = require("../src/server/util"); - -const cachedDataPath = path.join(__dirname, "..", "build", "cached-data"); - -async function buildZonesCache(db) { - const [rows] = await db.query(`SELECT * FROM zones;`); - const entities = rows.map(normalizeRow); - - const filePath = path.join(cachedDataPath, "zones.json"); - fs.writeFile(filePath, JSON.stringify(entities, null, 4), "utf8"); - - console.log(`📚 Wrote zones to ${path.relative(process.cwd(), filePath)}`); -} - -async function buildZoneTranslationsCache(db) { - const [rows] = await db.query( - `SELECT * FROM zone_translations WHERE locale = "en";` - ); - const entities = rows.map(normalizeRow); - - const filePath = path.join(cachedDataPath, "zone_translations.json"); - fs.writeFile(filePath, JSON.stringify(entities, null, 4), "utf8"); - - console.log( - `📚 Wrote zone translations to ${path.relative(process.cwd(), filePath)}` - ); -} - -async function main() { - const db = await connectToDb(); - await fs.mkdir(cachedDataPath, { recursive: true }); - - try { - await Promise.all([buildZonesCache(db), buildZoneTranslationsCache(db)]); - } catch (e) { - db.close(); - throw e; - } - db.close(); -} - -main().catch((e) => { - console.error(e); - process.exitCode = 1; -}); diff --git a/src/server/index.js b/src/server/index.js index 2b1de77..e11a15c 100644 --- a/src/server/index.js +++ b/src/server/index.js @@ -1,5 +1,5 @@ const { gql, makeExecutableSchema } = require("apollo-server"); -const { addBeelineToSchema, beelinePlugin } = require("./lib/beeline-graphql"); +import { addBeelineToSchema, beelinePlugin } from "./lib/beeline-graphql"; const connectToDb = require("./db"); const buildLoaders = require("./loaders"); @@ -12,15 +12,6 @@ const { getGenderPresentation, } = require("./util"); -// These are caches of stable database tables. They're built in the -// `build-cached-data` script, at build time and dev-start time. -const zoneRows = require("../../build/cached-data/zones.json"); -const zones = new Map(zoneRows.map((z) => [z.id, z])); -const zoneTranslationRows = require("../../build/cached-data/zone_translations.json"); -const zoneTranslations = new Map( - zoneTranslationRows.map((zt) => [`${zt.zoneId}-${zt.locale}`, zt]) -); - const typeDefs = gql` directive @cacheControl(maxAge: Int!) on FIELD_DEFINITION | OBJECT @@ -353,9 +344,10 @@ const resolvers = { const layer = await swfAssetLoader.load(id); return layer.bodyId; }, - zone: async ({ id }, _, { swfAssetLoader }) => { + zone: async ({ id }, _, { swfAssetLoader, zoneLoader }) => { const layer = await swfAssetLoader.load(id); - return { id: layer.zoneId }; + const zone = await zoneLoader.load(layer.zoneId); + return zone; }, swfUrl: async ({ id }, _, { swfAssetLoader }) => { const layer = await swfAssetLoader.load(id); @@ -461,8 +453,17 @@ const resolvers = { }, }, Zone: { - depth: ({ id }) => zones.get(id).depth, - label: ({ id }) => zoneTranslations.get(`${id}-en`).label, + depth: async ({ id }, _, { zoneLoader }) => { + // TODO: Should we extend this loader-in-field pattern elsewhere? I like + // that we avoid the fetch in cases where we only want the zone ID, + // but it adds complexity 🤔 + const zone = await zoneLoader.load(id); + return zone.depth; + }, + label: async ({ id }, _, { zoneTranslationLoader }) => { + const zoneTranslation = await zoneTranslationLoader.load(id); + return zoneTranslation.label; + }, }, Color: { name: async ({ id }, _, { colorTranslationLoader }) => { diff --git a/src/server/loaders.js b/src/server/loaders.js index 03da280..482dcb7 100644 --- a/src/server/loaders.js +++ b/src/server/loaders.js @@ -370,6 +370,42 @@ const buildPetStatesForPetTypeLoader = (db, loaders) => ); }); +const buildZoneLoader = (db) => + new DataLoader(async (ids) => { + const qs = ids.map((_) => "?").join(","); + const [rows, _] = await db.execute( + `SELECT * FROM zones WHERE id IN (${qs})`, + ids + ); + + const entities = rows.map(normalizeRow); + const entitiesById = new Map(entities.map((e) => [e.id, e])); + + return ids.map( + (id) => + entitiesById.get(String(id)) || + new Error(`could not find zone with ID: ${id}`) + ); + }); + +const buildZoneTranslationLoader = (db) => + new DataLoader(async (zoneIds) => { + const qs = zoneIds.map((_) => "?").join(","); + const [rows, _] = await db.execute( + `SELECT * FROM zone_translations WHERE zone_id IN (${qs}) AND locale = "en"`, + zoneIds + ); + + const entities = rows.map(normalizeRow); + const entitiesByZoneId = new Map(entities.map((e) => [e.zoneId, e])); + + return zoneIds.map( + (zoneId) => + entitiesByZoneId.get(String(zoneId)) || + new Error(`could not find translation for zone ${zoneId}`) + ); + }); + function buildLoaders(db) { const loaders = {}; loaders.loadAllSpecies = loadAllSpecies(db); @@ -399,6 +435,8 @@ function buildLoaders(db) { loaders ); loaders.speciesTranslationLoader = buildSpeciesTranslationLoader(db); + loaders.zoneLoader = buildZoneLoader(db); + loaders.zoneTranslationLoader = buildZoneTranslationLoader(db); return loaders; } diff --git a/src/server/query-tests/Item.test.js b/src/server/query-tests/Item.test.js index e51d6ff..774b0b9 100644 --- a/src/server/query-tests/Item.test.js +++ b/src/server/query-tests/Item.test.js @@ -132,6 +132,22 @@ describe("Item", () => { "180", ], ], + Array [ + "SELECT * FROM zones WHERE id IN (?,?,?)", + Array [ + "26", + "40", + "3", + ], + ], + Array [ + "SELECT * FROM zone_translations WHERE zone_id IN (?,?,?) AND locale = \\"en\\"", + Array [ + "26", + "40", + "3", + ], + ], ] `); }); diff --git a/src/server/query-tests/PetAppearance.test.js b/src/server/query-tests/PetAppearance.test.js index 32d3cb6..85ec4f0 100644 --- a/src/server/query-tests/PetAppearance.test.js +++ b/src/server/query-tests/PetAppearance.test.js @@ -82,6 +82,17 @@ describe("PetAppearance", () => { "75", ], ], + Array [ + "SELECT * FROM zones WHERE id IN (?,?,?,?,?,?)", + Array [ + "15", + "5", + "37", + "30", + "33", + "34", + ], + ], ] `); }); @@ -168,6 +179,17 @@ describe("PetAppearance", () => { "75", ], ], + Array [ + "SELECT * FROM zones WHERE id IN (?,?,?,?,?,?)", + Array [ + "15", + "5", + "37", + "30", + "33", + "34", + ], + ], ] `); });