Merge branch 'modeling' into main

This commit is contained in:
Emi Matchu 2020-10-06 07:06:30 -07:00
commit 900d102594
16 changed files with 2643 additions and 406 deletions

View file

@ -45,7 +45,7 @@
"mysql-dev": "mysql --host=localhost --user=impress_2020_dev --password=impress_2020_dev --database=impress_2020_dev",
"mysql-admin": "mysql --host=impress.openneo.net --user=matchu --password --database=openneo_impress",
"mysqldump": "mysqldump --host=impress.openneo.net --user=$(dotenv -p IMPRESS_MYSQL_USER) --password=$(dotenv -p IMPRESS_MYSQL_PASSWORD) --column-statistics=0",
"download-mysql-schema": "yarn --silent mysqldump openneo_impress species species_translations colors color_translations > scripts/setup-mysql-dev-constants.sql && yarn --silent mysqldump --no-data openneo_impress items item_translations > scripts/setup-mysql-dev-schema.sql",
"download-mysql-schema": "yarn --silent mysqldump --no-data openneo_impress items item_translations modeling_logs parents_swf_assets pet_types pet_states swf_assets | sed 's/ AUTO_INCREMENT=[0-9]*//g' > scripts/setup-mysql-dev-schema.sql && yarn --silent mysqldump openneo_impress species species_translations colors color_translations zones zone_translations > scripts/setup-mysql-dev-constants.sql",
"setup-mysql": "yarn mysql-admin < scripts/setup-mysql.sql",
"setup-mysql-dev": "yarn mysql-dev < scripts/setup-mysql-dev-constants.sql && yarn mysql-dev < scripts/setup-mysql-dev-schema.sql",
"build-cached-data": "node -r dotenv/config scripts/build-cached-data.js",

File diff suppressed because one or more lines are too long

View file

@ -41,7 +41,7 @@ CREATE TABLE `items` (
`modeling_status_hint` enum('done','glitchy') COLLATE utf8_unicode_ci DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `objects_last_spidered` (`last_spidered`)
) ENGINE=InnoDB AUTO_INCREMENT=81718 DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
/*!40101 SET character_set_client = @saved_cs_client */;
--
@ -65,7 +65,113 @@ CREATE TABLE `item_translations` (
KEY `index_item_translations_on_locale` (`locale`),
KEY `index_item_translations_name` (`name`),
KEY `index_item_translations_on_item_id_and_locale` (`item_id`,`locale`)
) ENGINE=InnoDB AUTO_INCREMENT=215758 DEFAULT CHARSET=latin1;
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `modeling_logs`
--
DROP TABLE IF EXISTS `modeling_logs`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!50503 SET character_set_client = utf8mb4 */;
CREATE TABLE `modeling_logs` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`created_at` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
`log_json` text NOT NULL,
`pet_name` varchar(128) NOT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=latin1;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `parents_swf_assets`
--
DROP TABLE IF EXISTS `parents_swf_assets`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!50503 SET character_set_client = utf8mb4 */;
CREATE TABLE `parents_swf_assets` (
`parent_id` mediumint(9) NOT NULL,
`swf_asset_id` mediumint(9) NOT NULL,
`id` int(11) NOT NULL AUTO_INCREMENT,
`parent_type` varchar(8) COLLATE utf8_unicode_ci NOT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `unique_parents_swf_assets` (`parent_id`,`swf_asset_id`),
KEY `parents_swf_assets_swf_asset_id` (`swf_asset_id`),
KEY `index_parents_swf_assets_on_parent_id_and_parent_type` (`parent_id`,`parent_type`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `pet_types`
--
DROP TABLE IF EXISTS `pet_types`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!50503 SET character_set_client = utf8mb4 */;
CREATE TABLE `pet_types` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`color_id` tinyint(4) NOT NULL,
`species_id` tinyint(4) NOT NULL,
`created_at` datetime NOT NULL,
`body_id` smallint(6) NOT NULL,
`image_hash` varchar(8) COLLATE utf8_unicode_ci DEFAULT NULL,
`basic_image_hash` varchar(255) COLLATE utf8_unicode_ci DEFAULT NULL,
PRIMARY KEY (`id`),
UNIQUE KEY `pet_types_species_color` (`species_id`,`color_id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `pet_states`
--
DROP TABLE IF EXISTS `pet_states`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!50503 SET character_set_client = utf8mb4 */;
CREATE TABLE `pet_states` (
`id` int(11) NOT NULL AUTO_INCREMENT,
`pet_type_id` mediumint(9) NOT NULL,
`swf_asset_ids` text COLLATE utf8_unicode_ci NOT NULL,
`female` tinyint(1) DEFAULT NULL,
`mood_id` int(11) DEFAULT NULL,
`unconverted` tinyint(1) DEFAULT NULL,
`labeled` tinyint(1) NOT NULL DEFAULT '0',
`glitched` tinyint(1) NOT NULL DEFAULT '0',
`artist_neopets_username` varchar(255) COLLATE utf8_unicode_ci DEFAULT NULL,
PRIMARY KEY (`id`),
KEY `pet_states_pet_type_id` (`pet_type_id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
/*!40101 SET character_set_client = @saved_cs_client */;
--
-- Table structure for table `swf_assets`
--
DROP TABLE IF EXISTS `swf_assets`;
/*!40101 SET @saved_cs_client = @@character_set_client */;
/*!50503 SET character_set_client = utf8mb4 */;
CREATE TABLE `swf_assets` (
`type` varchar(7) COLLATE utf8_unicode_ci NOT NULL,
`remote_id` mediumint(9) NOT NULL,
`url` mediumtext COLLATE utf8_unicode_ci NOT NULL,
`zone_id` tinyint(4) NOT NULL,
`zones_restrict` text COLLATE utf8_unicode_ci NOT NULL,
`created_at` datetime NOT NULL,
`body_id` smallint(6) NOT NULL,
`has_image` tinyint(1) NOT NULL DEFAULT '0',
`image_requested` tinyint(1) NOT NULL DEFAULT '0',
`reported_broken_at` datetime DEFAULT NULL,
`converted_at` datetime DEFAULT NULL,
`id` int(11) NOT NULL AUTO_INCREMENT,
`image_manual` tinyint(1) NOT NULL DEFAULT '0',
`manifest` text COLLATE utf8_unicode_ci,
PRIMARY KEY (`id`),
KEY `swf_assets_body_id_and_object_id` (`body_id`),
KEY `idx_swf_assets_zone_id` (`zone_id`),
KEY `swf_assets_type_and_id` (`type`,`remote_id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci;
/*!40101 SET character_set_client = @saved_cs_client */;
/*!40103 SET TIME_ZONE=@OLD_TIME_ZONE */;
@ -77,4 +183,4 @@ CREATE TABLE `item_translations` (
/*!40101 SET COLLATION_CONNECTION=@OLD_COLLATION_CONNECTION */;
/*!40111 SET SQL_NOTES=@OLD_SQL_NOTES */;
-- Dump completed on 2020-09-18 6:27:15
-- Dump completed on 2020-10-06 6:59:49

View file

@ -5,6 +5,7 @@ GRANT SELECT ON colors TO impress2020;
GRANT SELECT ON color_translations TO impress2020;
GRANT SELECT ON items TO impress2020;
GRANT SELECT ON item_translations TO impress2020;
GRANT SELECT ON modeling_logs TO impress2020;
GRANT SELECT ON parents_swf_assets TO impress2020;
GRANT SELECT ON pet_types TO impress2020;
GRANT SELECT ON pet_states TO impress2020;
@ -14,11 +15,14 @@ GRANT SELECT ON swf_assets TO impress2020;
GRANT SELECT ON zones TO impress2020;
GRANT SELECT ON zone_translations TO impress2020;
-- Public data tables: write
GRANT UPDATE ON items TO impress2020;
GRANT DELETE ON parents_swf_assets TO impress2020;
GRANT UPDATE ON pet_states TO impress2020;
GRANT UPDATE ON swf_assets TO impress2020;
-- Public data tables: write. Used in modeling and support tools.
GRANT INSERT, UPDATE ON items TO impress2020;
GRANT INSERT, UPDATE ON item_translations TO impress2020;
GRANT INSERT, UPDATE, DELETE ON parents_swf_assets TO impress2020;
GRANT INSERT, UPDATE ON pet_types TO impress2020;
GRANT INSERT, UPDATE ON pet_states TO impress2020;
GRANT INSERT, UPDATE ON swf_assets TO impress2020;
GRANT INSERT ON modeling_logs TO impress2020;
-- User data tables
GRANT SELECT ON closet_hangers TO impress2020;

View file

@ -171,10 +171,15 @@ function SpeciesColorPicker({
// think this matches users' mental hierarchy of species -> color: showing
// supported colors for a species makes sense, but the other way around feels
// confusing and restrictive.)
//
// Also, if a color is provided that wouldn't normally be visible, we still
// show it. This can happen when someone models a new species/color combo for
// the first time - the boxes will still be red as if it were invalid, but
// this still smooths out the experience a lot.
let visibleColors = allColors;
if (stateMustAlwaysBeValid && valids && speciesId) {
visibleColors = visibleColors.filter(
(c) => getValidPoses(valids, speciesId, c.id).size > 0
(c) => getValidPoses(valids, speciesId, c.id).size > 0 || c.id === colorId
);
}

View file

@ -1,18 +1,40 @@
const mysql = require("mysql2");
let globalDb;
let globalDbs = new Map();
// We usually run against the production database, even in local testing,
// to easily test against real data. (Not a wise general practice, but fine
// for this low-stakes project and small dev team with mostly read-only
// operations!)
//
// But you can also specify `DB_ENV=development` to use a local database,
// which is especially helpful for end-to-end modeling testing.
const defaultOptions =
process.env["DB_ENV"] === "development"
? {
host: "localhost",
user: "impress_2020_dev",
password: "impress_2020_dev",
database: "impress_2020_dev",
}
: {
host: "impress.openneo.net",
user: process.env["IMPRESS_MYSQL_USER"],
password: process.env["IMPRESS_MYSQL_PASSWORD"],
database: "openneo_impress",
};
async function connectToDb({
host = "impress.openneo.net",
user = process.env["IMPRESS_MYSQL_USER"],
password = process.env["IMPRESS_MYSQL_PASSWORD"],
database = "openneo_impress",
host = defaultOptions.host,
user = defaultOptions.user,
password = defaultOptions.password,
database = defaultOptions.database,
} = {}) {
if (globalDb) {
return globalDb;
if (globalDbs.has(host)) {
return globalDbs.get(host);
}
globalDb = mysql
const db = mysql
.createConnection({
host,
user,
@ -24,7 +46,9 @@ async function connectToDb({
// for compatibility with Honeycomb's automatic tracing.
.promise();
return globalDb;
globalDbs.set(host, db);
return db;
}
module.exports = connectToDb;

View file

@ -40,6 +40,7 @@ const schema = makeExecutableSchema(
require("./types/Item"),
require("./types/MutationsForSupport"),
require("./types/Outfit"),
require("./types/Pet"),
require("./types/PetAppearance"),
require("./types/User"),
require("./types/Zone"),

View file

@ -324,7 +324,7 @@ const buildItemBodiesWithAppearanceDataLoader = (db) =>
return itemIds.map((itemId) => entities.filter((e) => e.itemId === itemId));
});
const buildPetTypeLoader = (db) =>
const buildPetTypeLoader = (db, loaders) =>
new DataLoader(async (petTypeIds) => {
const qs = petTypeIds.map((_) => "?").join(",");
const [rows, _] = await db.execute(
@ -334,6 +334,13 @@ const buildPetTypeLoader = (db) =>
const entities = rows.map(normalizeRow);
for (const petType of entities) {
loaders.petTypeBySpeciesAndColorLoader.prime(
{ speciesId: petType.speciesId, colorId: petType.colorId },
petType
);
}
return petTypeIds.map((petTypeId) =>
entities.find((e) => e.id === petTypeId)
);
@ -385,6 +392,29 @@ const buildSwfAssetLoader = (db) =>
);
});
const buildSwfAssetByRemoteIdLoader = (db) =>
new DataLoader(
async (typeAndRemoteIdPairs) => {
const qs = typeAndRemoteIdPairs
.map((_) => "(type = ? AND remote_id = ?)")
.join(" OR ");
const values = typeAndRemoteIdPairs
.map(({ type, remoteId }) => [type, remoteId])
.flat();
const [rows, _] = await db.execute(
`SELECT * FROM swf_assets WHERE ${qs}`,
values
);
const entities = rows.map(normalizeRow);
return typeAndRemoteIdPairs.map(({ type, remoteId }) =>
entities.find((e) => e.type === type && e.remoteId === remoteId)
);
},
{ cacheKeyFn: ({ type, remoteId }) => `${type},${remoteId}` }
);
const buildItemSwfAssetLoader = (db, loaders) =>
new DataLoader(
async (itemAndBodyPairs) => {
@ -554,6 +584,37 @@ const buildCanonicalPetStateForBodyLoader = (db, loaders) =>
);
});
const buildPetStateByPetTypeAndAssetsLoader = (db, loaders) =>
new DataLoader(
async (petTypeIdAndAssetIdsPairs) => {
const qs = petTypeIdAndAssetIdsPairs
.map((_) => "(pet_type_id = ? AND swf_asset_ids = ?)")
.join(" OR ");
const values = petTypeIdAndAssetIdsPairs
.map(({ petTypeId, swfAssetIds }) => [petTypeId, swfAssetIds])
.flat();
const [rows, _] = await db.execute(
`SELECT * FROM pet_states WHERE ${qs}`,
values
);
const entities = rows.map(normalizeRow);
for (const petState of entities) {
loaders.petStateLoader.prime(petState.id, petState);
}
return petTypeIdAndAssetIdsPairs.map(({ petTypeId, swfAssetIds }) =>
entities.find(
(e) => e.petTypeId === petTypeId && e.swfAssetIds === swfAssetIds
)
);
},
{
cacheKeyFn: ({ petTypeId, swfAssetIds }) => `${petTypeId}-${swfAssetIds}`,
}
);
const buildUserLoader = (db) =>
new DataLoader(async (ids) => {
const qs = ids.map((_) => "?").join(",");
@ -671,12 +732,13 @@ function buildLoaders(db) {
loaders.itemBodiesWithAppearanceDataLoader = buildItemBodiesWithAppearanceDataLoader(
db
);
loaders.petTypeLoader = buildPetTypeLoader(db);
loaders.petTypeLoader = buildPetTypeLoader(db, loaders);
loaders.petTypeBySpeciesAndColorLoader = buildPetTypeBySpeciesAndColorLoader(
db,
loaders
);
loaders.swfAssetLoader = buildSwfAssetLoader(db);
loaders.swfAssetByRemoteIdLoader = buildSwfAssetByRemoteIdLoader(db);
loaders.itemSwfAssetLoader = buildItemSwfAssetLoader(db, loaders);
loaders.petSwfAssetLoader = buildPetSwfAssetLoader(db, loaders);
loaders.outfitLoader = buildOutfitLoader(db);
@ -692,6 +754,10 @@ function buildLoaders(db) {
db,
loaders
);
loaders.petStateByPetTypeAndAssetsLoader = buildPetStateByPetTypeAndAssetsLoader(
db,
loaders
);
loaders.speciesLoader = buildSpeciesLoader(db);
loaders.speciesTranslationLoader = buildSpeciesTranslationLoader(db);
loaders.userLoader = buildUserLoader(db);

476
src/server/modeling.js Normal file
View file

@ -0,0 +1,476 @@
/**
* saveModelingData takes data about a pet (generated by `loadCustomPetData`
* and `loadPetMetaData`), and a GQL-y context object with a `db` and some
* loaders; and updates the database to match.
*
* These days, most calls to this function are a no-op: we detect that the
* database already contains this data, and end up doing no writes. But when a
* pet contains data we haven't seen before, we write!
*/
async function saveModelingData(customPetData, petMetaData, context) {
const modelingLogs = [];
const addToModelingLogs = (entry) => {
console.log("[Modeling] " + JSON.stringify(entry, null, 4));
modelingLogs.push(entry);
};
context = { ...context, addToModelingLogs };
await Promise.all([
savePetTypeAndStateModelingData(customPetData, petMetaData, context),
saveItemModelingData(customPetData, context),
saveSwfAssetModelingData(customPetData, context),
]);
if (modelingLogs.length > 0) {
const { db } = context;
await db.execute(
`INSERT INTO modeling_logs (log_json, pet_name) VALUES (?, ?)`,
[JSON.stringify(modelingLogs, null, 4), petMetaData.name]
);
}
}
async function savePetTypeAndStateModelingData(
customPetData,
petMetaData,
context
) {
const {
db,
petTypeBySpeciesAndColorLoader,
petStateByPetTypeAndAssetsLoader,
swfAssetByRemoteIdLoader,
addToModelingLogs,
} = context;
const incomingPetType = {
colorId: String(customPetData.custom_pet.color_id),
speciesId: String(customPetData.custom_pet.species_id),
bodyId: String(customPetData.custom_pet.body_id),
// NOTE: I skip the image_hash stuff here... on Rails, we set a hash on
// creation, and may or may not bother to update it, I forget? But
// here I don't want to bother with an update. We could maybe do
// a merge function to make it on create only, but eh, I don't
// care enough ^_^`
};
await syncToDb(db, [incomingPetType], {
loader: petTypeBySpeciesAndColorLoader,
tableName: "pet_types",
buildLoaderKey: (row) => ({
speciesId: row.speciesId,
colorId: row.colorId,
}),
buildUpdateCondition: (row) => [
`species_id = ? AND color_id = ?`,
row.speciesId,
row.colorId,
],
includeUpdatedAt: false,
addToModelingLogs,
});
// NOTE: This pet type should have been looked up when syncing pet type, so
// this should be cached.
const petType = await petTypeBySpeciesAndColorLoader.load({
colorId: String(customPetData.custom_pet.color_id),
speciesId: String(customPetData.custom_pet.species_id),
});
const biologyAssets = Object.values(customPetData.custom_pet.biology_by_zone);
const incomingPetState = {
petTypeId: petType.id,
swfAssetIds: biologyAssets
.map((row) => row.part_id)
.sort((a, b) => Number(a) - Number(b))
.join(","),
female: petMetaData.gender === 2 ? 1 : 0, // sorry for this column name :/
moodId: String(petMetaData.mood),
unconverted: biologyAssets.length === 1 ? 1 : 0,
labeled: 1,
};
await syncToDb(db, [incomingPetState], {
loader: petStateByPetTypeAndAssetsLoader,
tableName: "pet_states",
buildLoaderKey: (row) => ({
petTypeId: row.petTypeId,
swfAssetIds: row.swfAssetIds,
}),
buildUpdateCondition: (row) => [
`pet_type_id = ? AND swf_asset_ids = ?`,
row.petTypeId,
row.swfAssetIds,
],
includeCreatedAt: false,
includeUpdatedAt: false,
// For pet states, syncing assets is easy: a new set of assets counts as a
// new state, so, whatever! Just insert the relationships when inserting
// the pet state, and ignore them any other time.
afterInsert: async () => {
// We need to load from the db to get the actual inserted IDs. Not lovely
// for perf, but this is a real new-data model, so that's fine!
let [petState, swfAssets] = await Promise.all([
petStateByPetTypeAndAssetsLoader.load({
petTypeId: incomingPetState.petTypeId,
swfAssetIds: incomingPetState.swfAssetIds,
}),
swfAssetByRemoteIdLoader.loadMany(
biologyAssets.map((asset) => ({
type: "biology",
remoteId: String(asset.part_id),
}))
),
]);
swfAssets = swfAssets.filter((sa) => sa != null);
if (swfAssets.length === 0) {
throw new Error(`pet state ${petState.id} has no saved assets?`);
}
const relationshipInserts = swfAssets.map((sa) => ({
parentType: "PetState",
parentId: petState.id,
swfAssetId: sa.id,
}));
const qs = swfAssets.map((_) => `(?, ?, ?)`).join(", ");
const values = relationshipInserts
.map(({ parentType, parentId, swfAssetId }) => [
parentType,
parentId,
swfAssetId,
])
.flat();
await db.execute(
`INSERT INTO parents_swf_assets (parent_type, parent_id, swf_asset_id)
VALUES ${qs};`,
values
);
addToModelingLogs({
tableName: "parents_swf_assets",
inserts: relationshipInserts,
updates: [],
});
},
addToModelingLogs,
});
}
async function saveItemModelingData(customPetData, context) {
const { db, itemLoader, itemTranslationLoader, addToModelingLogs } = context;
const objectInfos = Object.values(customPetData.object_info_registry);
const incomingItems = objectInfos.map((objectInfo) => ({
id: String(objectInfo.obj_info_id),
zonesRestrict: objectInfo.zones_restrict,
thumbnailUrl: objectInfo.thumbnail_url,
category: objectInfo.category,
type: objectInfo.type,
rarityIndex: objectInfo.rarity_index,
price: objectInfo.price,
weightLbs: objectInfo.weight_lbs,
}));
const incomingItemTranslations = objectInfos.map((objectInfo) => ({
itemId: String(objectInfo.obj_info_id),
locale: "en",
name: objectInfo.name,
description: objectInfo.description,
rarity: objectInfo.rarity,
}));
await Promise.all([
syncToDb(db, incomingItems, {
loader: itemLoader,
tableName: "items",
buildLoaderKey: (row) => row.id,
buildUpdateCondition: (row) => [`id = ?`, row.id],
addToModelingLogs,
}),
syncToDb(db, incomingItemTranslations, {
loader: itemTranslationLoader,
tableName: "item_translations",
buildLoaderKey: (row) => row.itemId,
buildUpdateCondition: (row) => [
`item_id = ? AND locale = "en"`,
row.itemId,
],
addToModelingLogs,
}),
]);
}
async function saveSwfAssetModelingData(customPetData, context) {
const { db, swfAssetByRemoteIdLoader, addToModelingLogs } = context;
const objectAssets = Object.values(customPetData.object_asset_registry);
const incomingItemSwfAssets = objectAssets.map((objectAsset) => ({
type: "object",
remoteId: String(objectAsset.asset_id),
url: objectAsset.asset_url,
zoneId: String(objectAsset.zone_id),
zonesRestrict: "",
bodyId: (currentBodyId) => {
const incomingBodyId = String(customPetData.custom_pet.body_id);
if (currentBodyId == null) {
// If this is a new asset, use the incoming body ID. This might not be
// totally true, the real ID might be 0, but we're conservative to
// start and will update it to 0 if we see a contradiction later!
//
// NOTE: There's an explicitly_body_specific column on Item. We don't
// need to consider it here, because it's specifically to
// override the heuristics in the old app that sometimes set
// bodyId=0 for incoming items depending on their zone. We don't
// do that here!
return incomingBodyId;
} else if (currentBodyId === "0") {
// If this is already an all-bodies asset, keep it that way.
return "0";
} else if (currentBodyId !== incomingBodyId) {
// If this isn't an all-bodies asset yet, but we've now seen it on two
// different items, then make it an all-bodies asset!
return "0";
} else {
// Okay, the row already exists, and its body ID matches this one.
// No change!
return currentBodyId;
}
},
}));
const biologyAssets = Object.values(customPetData.custom_pet.biology_by_zone);
const incomingPetSwfAssets = biologyAssets.map((biologyAsset) => ({
type: "biology",
remoteId: String(biologyAsset.part_id),
url: biologyAsset.asset_url,
zoneId: String(biologyAsset.zone_id),
zonesRestrict: biologyAsset.zones_restrict,
bodyId: "0",
}));
const incomingSwfAssets = [...incomingItemSwfAssets, ...incomingPetSwfAssets];
// Build a map from asset ID to item ID. We'll use this later to build the
// new parents_swf_assets rows.
const assetIdToItemIdMap = new Map();
const objectInfos = Object.values(customPetData.object_info_registry);
for (const objectInfo of objectInfos) {
const itemId = String(objectInfo.obj_info_id);
const assetIds = Object.values(objectInfo.assets_by_zone).map(String);
for (const assetId of assetIds) {
assetIdToItemIdMap.set(assetId, itemId);
}
}
syncToDb(db, incomingSwfAssets, {
loader: swfAssetByRemoteIdLoader,
tableName: "swf_assets",
buildLoaderKey: (row) => ({ type: row.type, remoteId: row.remoteId }),
buildUpdateCondition: (row) => [
`type = ? AND remote_id = ?`,
row.type,
row.remoteId,
],
includeUpdatedAt: false,
afterInsert: async (inserts) => {
// After inserting the assets, insert corresponding rows in
// parents_swf_assets for item assets, to mark the asset as belonging to
// the item. (We do this separately for pet states, so that we can get
// the pet state ID first.)
const itemAssetInserts = inserts.filter((i) => i.type === "object");
if (itemAssetInserts.length === 0) {
return;
}
const relationshipInserts = itemAssetInserts.map(({ remoteId }) => ({
parentType: "Item",
parentId: assetIdToItemIdMap.get(remoteId),
remoteId,
}));
const qs = itemAssetInserts
.map(
(_) =>
// A bit cheesy: we use a subquery here to insert _our_ ID for the
// asset, despite only having remote_id available here. This saves
// us from another round-trip to SELECT the inserted IDs.
`(?, ?, ` +
`(SELECT id FROM swf_assets WHERE type = "object" AND remote_id = ?))`
)
.join(", ");
const values = relationshipInserts
.map(({ parentType, parentId, remoteId }) => [
parentType,
parentId,
remoteId,
])
.flat();
await db.execute(
`INSERT INTO parents_swf_assets (parent_type, parent_id, swf_asset_id)
VALUES ${qs}`,
values
);
addToModelingLogs({
tableName: "parents_swf_assets",
inserts: relationshipInserts,
updates: [],
});
},
addToModelingLogs,
});
}
/**
* Syncs the given data to the database: for each incoming row, if there's no
* matching row in the loader, we insert a new row; or, if there's a matching
* row in the loader but its data is different, we update it; or, if there's
* no change, we do nothing.
*
* Automatically sets the `createdAt` and `updatedAt` timestamps for inserted
* or updated rows.
*
* Will perform one call to the loader, and at most one INSERT, and at most one
* UPDATE, regardless of how many rows we're syncing.
*/
async function syncToDb(
db,
incomingRows,
{
loader,
tableName,
buildLoaderKey,
buildUpdateCondition,
includeCreatedAt = true,
includeUpdatedAt = true,
afterInsert = null,
addToModelingLogs,
}
) {
const loaderKeys = incomingRows.map(buildLoaderKey);
const currentRows = await loader.loadMany(loaderKeys);
const inserts = [];
const updates = [];
for (const index in incomingRows) {
const incomingRow = incomingRows[index];
const currentRow = currentRows[index];
// If there is no corresponding row in the database, prepare an insert.
// TODO: Should probably converge on whether not-found is null or an error
if (currentRow == null || currentRow instanceof Error) {
const insert = {};
for (const key in incomingRow) {
let incomingValue = incomingRow[key];
// If you pass a function as a value, we treat it as a merge function:
// we'll pass it the current value, and you'll use it to determine and
// return the incoming value. In this case, the row doesn't exist yet,
// so the current value is `null`.
if (typeof incomingValue === "function") {
incomingValue = incomingValue(null);
}
insert[key] = incomingValue;
}
if (includeCreatedAt) {
insert.createdAt = new Date();
}
if (includeUpdatedAt) {
insert.updatedAt = new Date();
}
inserts.push(insert);
// Remove this from the loader cache, so that loading again will fetch
// the inserted row.
loader.clear(buildLoaderKey(incomingRow));
continue;
}
// If there's a row in the database, and some of the values don't match,
// prepare an update with the updated fields only.
const update = {};
for (const key in incomingRow) {
const currentValue = currentRow[key];
let incomingValue = incomingRow[key];
// If you pass a function as a value, we treat it as a merge function:
// we'll pass it the current value, and you'll use it to determine and
// return the incoming value.
if (typeof incomingValue === "function") {
incomingValue = incomingValue(currentValue);
}
if (currentValue !== incomingValue) {
update[key] = incomingValue;
}
}
if (Object.keys(update).length > 0) {
if (includeUpdatedAt) {
update.updatedAt = new Date();
}
updates.push({ incomingRow, update });
// Remove this from the loader cache, so that loading again will fetch
// the updated row.
loader.clear(buildLoaderKey(incomingRow));
}
}
// Do a bulk insert of anything that needs added.
if (inserts.length > 0) {
// Get the column names from the first row, and convert them to
// underscore-case instead of camel-case.
const rowKeys = Object.keys(inserts[0]).sort();
const columnNames = rowKeys.map((key) =>
key.replace(/[A-Z]/g, (m) => "_" + m[0].toLowerCase())
);
const columnsStr = columnNames.join(", ");
const qs = columnNames.map((_) => "?").join(", ");
const rowQs = inserts.map((_) => "(" + qs + ")").join(", ");
const rowValues = inserts.map((row) => rowKeys.map((key) => row[key]));
await db.execute(
`INSERT INTO ${tableName} (${columnsStr}) VALUES ${rowQs};`,
rowValues.flat()
);
if (afterInsert) {
await afterInsert(inserts);
}
}
// Do parallel updates of anything that needs updated.
// NOTE: I feel like it's not possible to do bulk updates, even in a
// multi-statement mysql2 request? I might be wrong, but whatever; it's
// very uncommon, and any perf hit would be nbd.
const updatePromises = [];
for (const { incomingRow, update } of updates) {
const rowKeys = Object.keys(update).sort();
const rowValues = rowKeys.map((k) => update[k]);
const columnNames = rowKeys.map((key) =>
key.replace(/[A-Z]/g, (m) => "_" + m[0].toLowerCase())
);
const qs = columnNames.map((c) => `${c} = ?`).join(", ");
const [conditionQs, ...conditionValues] = buildUpdateCondition(incomingRow);
updatePromises.push(
db.execute(
`UPDATE ${tableName} SET ${qs} WHERE ${conditionQs} LIMIT 1;`,
[...rowValues, ...conditionValues]
)
);
}
await Promise.all(updatePromises);
if (inserts.length > 0 || updates.length > 0) {
addToModelingLogs({
tableName,
inserts,
updates,
});
}
}
module.exports = { saveModelingData };

View file

@ -1,5 +1,11 @@
const gql = require("graphql-tag");
const { query, getDbCalls, clearDbCalls, useTestDb } = require("./setup.js");
const {
query,
getDbCalls,
clearDbCalls,
useTestDb,
connectToDb,
} = require("./setup.js");
describe("Pet", () => {
it("looks up a pet", async () => {
@ -33,6 +39,79 @@ describe("Pet", () => {
expect(res.data).toMatchSnapshot();
expect(getDbCalls()).toMatchInlineSnapshot(`
Array [
Array [
"SELECT * FROM pet_types WHERE (species_id = ? AND color_id = ?)",
Array [
"54",
"75",
],
],
Array [
"SELECT * FROM items WHERE id IN (?,?,?,?,?,?,?,?)",
Array [
"37229",
"37375",
"38911",
"38912",
"38913",
"43014",
"43397",
"48313",
],
],
Array [
"SELECT * FROM item_translations WHERE item_id IN (?,?,?,?,?,?,?,?) AND locale = \\"en\\"",
Array [
"37229",
"37375",
"38911",
"38912",
"38913",
"43014",
"43397",
"48313",
],
],
Array [
"SELECT * FROM swf_assets WHERE (type = ? AND remote_id = ?) OR (type = ? AND remote_id = ?) OR (type = ? AND remote_id = ?) OR (type = ? AND remote_id = ?) OR (type = ? AND remote_id = ?) OR (type = ? AND remote_id = ?) OR (type = ? AND remote_id = ?) OR (type = ? AND remote_id = ?) OR (type = ? AND remote_id = ?) OR (type = ? AND remote_id = ?) OR (type = ? AND remote_id = ?) OR (type = ? AND remote_id = ?) OR (type = ? AND remote_id = ?) OR (type = ? AND remote_id = ?)",
Array [
"object",
"6829",
"object",
"14855",
"object",
"14856",
"object",
"14857",
"object",
"36414",
"object",
"39646",
"object",
"51959",
"object",
"56478",
"biology",
"7942",
"biology",
"7941",
"biology",
"24008",
"biology",
"21060",
"biology",
"21057",
"biology",
"7946",
],
],
Array [
"SELECT * FROM pet_states WHERE (pet_type_id = ? AND swf_asset_ids = ?)",
Array [
"2",
"7941,7942,7946,21057,21060,24008",
],
],
Array [
"SELECT * FROM species_translations
WHERE species_id IN (?) AND locale = \\"en\\"",
@ -51,7 +130,7 @@ describe("Pet", () => {
`);
});
it("models new item data", async () => {
it("models new pet and item data", async () => {
useTestDb();
const res = await query({
@ -80,6 +159,20 @@ describe("Pet", () => {
const res2 = await query({
query: gql`
query {
petAppearance(colorId: "75", speciesId: "54", pose: SAD_MASC) {
id
pose
bodyId
restrictedZones {
id
}
layers {
id
swfUrl
}
}
items(
ids: [
"37229"
@ -99,6 +192,13 @@ describe("Pet", () => {
rarityIndex
isNc
createdAt
appearanceOn(colorId: "75", speciesId: "54") {
layers {
id
swfUrl
}
}
}
}
`,
@ -106,35 +206,199 @@ describe("Pet", () => {
expect(res2).toHaveNoErrors();
expect(res2.data).toMatchSnapshot();
expect(getDbCalls()).toMatchInlineSnapshot(`
Array [
Array [
"SELECT * FROM item_translations WHERE item_id IN (?,?,?,?,?,?,?,?) AND locale = \\"en\\"",
Array [
"37229",
"37375",
"38911",
"38912",
"38913",
"43014",
"43397",
"48313",
],
],
Array [
"SELECT * FROM items WHERE id IN (?,?,?,?,?,?,?,?)",
Array [
"37229",
"37375",
"38911",
"38912",
"38913",
"43014",
"43397",
"48313",
],
],
]
`);
expect(getDbCalls()).toMatchSnapshot();
clearDbCalls();
// If we load the pet again, it should only make SELECT queries, not
// INSERT or UPDATE.
await query({
query: gql`
query {
petOnNeopetsDotCom(petName: "roopal27") {
items {
id
}
}
}
`,
});
const dbCalls = getDbCalls();
for (const [query, _] of dbCalls) {
expect(query).toMatch(/SELECT/);
expect(query).not.toMatch(/INSERT/);
expect(query).not.toMatch(/UPDATE/);
}
});
it("models updated item data", async () => {
useTestDb();
// First, write a fake version of the Jewelled Staff to the database.
// It's mostly the real data, except we changed rarity_index,
// thumbnail_url, translated name, and translated description.
const db = await connectToDb();
await Promise.all([
db.query(
`INSERT INTO items (id, zones_restrict, thumbnail_url, category,
type, rarity_index, price, weight_lbs)
VALUES (43397, "00000000000000000000000000000000000000000000000",
"http://example.com/favicon.ico", "Clothes", "Clothes", 101,
0, 1);`
),
db.query(
`INSERT INTO item_translations (item_id, locale, name, description,
rarity)
VALUES (43397, "en", "Bejewelled Staffo",
"This staff is really neat and good!", "Artifact")`
),
]);
clearDbCalls();
// Then, load a pet wearing this. It should trigger an UPDATE for the item
// and its translation, and return the new names in the query.
const res = await query({
query: gql`
query {
petOnNeopetsDotCom(petName: "roopal27") {
items {
id
name
description
thumbnailUrl
rarityIndex
}
}
}
`,
});
expect(res).toHaveNoErrors();
const itemData = res.data.petOnNeopetsDotCom.items.find(
(item) => item.id === "43397"
);
expect(itemData).toEqual({
id: "43397",
name: "Jewelled Staff",
description: "This jewelled staff shines with a magical light.",
thumbnailUrl: "http://images.neopets.com/items/mall_staff_jewelled.gif",
rarityIndex: 500,
});
expect(getDbCalls()).toMatchSnapshot();
clearDbCalls();
// Finally, load the item. It should have the updated values.
const res2 = await query({
query: gql`
query {
item(id: "43397") {
id
name
description
thumbnailUrl
rarityIndex
}
}
`,
});
expect(res2).toHaveNoErrors();
expect(res2.data.item).toEqual({
id: "43397",
name: "Jewelled Staff",
description: "This jewelled staff shines with a magical light.",
thumbnailUrl: "http://images.neopets.com/items/mall_staff_jewelled.gif",
rarityIndex: 500,
});
expect(getDbCalls()).toMatchSnapshot();
});
it("sets bodyId=0 after seeing it on two body types", async () => {
useTestDb();
// First, write the Moon and Stars Background SWF to the database, but with
// the Standard Acara body ID set.
const db = await connectToDb();
await db.query(
`INSERT INTO swf_assets (type, remote_id, url, zone_id, zones_restrict,
created_at, body_id)
VALUES ("object", 6829, "http://images.neopets.com/cp/items/swf/000/000/006/6829_1707e50385.swf",
3, "", CURRENT_TIMESTAMP(), 93);`
);
clearDbCalls();
// Then, model a Zafara wearing it.
await query({
query: gql`
query {
petOnNeopetsDotCom(petName: "roopal27") {
id
}
}
`,
});
expect(getDbCalls()).toMatchSnapshot("db");
// The body ID should be 0 now.
const [rows, _] = await db.query(
`SELECT body_id FROM swf_assets
WHERE type = "object" AND remote_id = 6829;`
);
expect(rows[0].body_id).toEqual(0);
});
it("models unconverted pets", async () => {
useTestDb();
// First, model an unconverted pet, and check its pose and layers.
const res = await query({
query: gql`
query {
petOnNeopetsDotCom(petName: "Marishka82") {
pose
petAppearance {
id
pose
layers {
id
}
}
}
}
`,
});
expect(res).toHaveNoErrors();
const modeledPet = res.data.petOnNeopetsDotCom;
expect(modeledPet.pose).toEqual("UNCONVERTED");
expect(modeledPet.petAppearance.pose).toEqual("UNCONVERTED");
expect(modeledPet.petAppearance.layers).toHaveLength(1);
// Then, request the corresponding appearance fresh from the db, and
// confirm we get the same back as when we modeled the pet.
const res2 = await query({
query: gql`
query {
petAppearance(speciesId: "31", colorId: "36", pose: UNCONVERTED) {
id
layers {
id
}
}
}
`,
});
expect(res2).toHaveNoErrors();
const petAppearance = res2.data.petAppearance;
expect(petAppearance.id).toEqual(modeledPet.petAppearance.id);
expect(petAppearance.layers.map((l) => l.id)).toEqual(
modeledPet.petAppearance.layers.map((l) => l.id)
);
});
});

File diff suppressed because it is too large Load diff

View file

@ -83,6 +83,7 @@ beforeEach(() => {
}
dbEnvironment = "production";
dbSetupDone = false;
db = null;
});
afterAll(() => {
if (db) {
@ -138,7 +139,7 @@ module.exports = {
query,
getDbCalls,
clearDbCalls,
getDb: () => db,
connectToDb,
useTestDb,
logInAsTestUser,
};

View file

@ -71,7 +71,7 @@ const typeDefs = gql`
const resolvers = {
AppearanceLayer: {
bodyId: async ({ id }, _, { swfAssetLoader }) => {
remoteId: async ({ id }, _, { swfAssetLoader }) => {
const layer = await swfAssetLoader.load(id);
return layer.remoteId;
},
@ -79,7 +79,7 @@ const resolvers = {
const layer = await swfAssetLoader.load(id);
return layer.bodyId;
},
zone: async ({ id }, _, { swfAssetLoader, zoneLoader }) => {
zone: async ({ id }, _, { swfAssetLoader }) => {
const layer = await swfAssetLoader.load(id);
return { id: layer.zoneId };
},
@ -94,7 +94,12 @@ const resolvers = {
imageUrl: async ({ id }, { size }, { swfAssetLoader }) => {
const layer = await swfAssetLoader.load(id);
if (!layer.hasImage) {
// If there's no image, return null. (In the development db, which isn't
// aware which assets we have images for on the DTI CDN, assume we _do_
// have the image - it's usually true, and better for testing.)
const hasImage =
layer.hasImage || process.env["DB_ENV"] === "development";
if (!hasImage) {
return null;
}
@ -234,12 +239,13 @@ async function loadAndCacheAssetManifest(db, layer) {
//
// TODO: Someday the manifests will all exist, right? So we'll want to
// reload all the missing ones at that time.
manifest = manifest || "";
const manifestJson = manifest ? JSON.stringify(manifest) : "";
const [
result,
] = await db.execute(
`UPDATE swf_assets SET manifest = ? WHERE id = ? LIMIT 1;`,
[manifest, layer.id]
[manifestJson, layer.id]
);
if (result.affectedRows !== 1) {
throw new Error(

View file

@ -1,4 +1,3 @@
const fetch = require("node-fetch");
const { gql } = require("apollo-server");
const typeDefs = gql`
@ -8,16 +7,10 @@ const typeDefs = gql`
petAppearance: PetAppearance!
wornItems: [Item!]!
closetedItems: [Item!]!
species: Species! # to be deprecated? can use petAppearance? 🤔
color: Color! # to be deprecated? can use petAppearance? 🤔
pose: Pose! # to be deprecated? can use petAppearance? 🤔
items: [Item!]! # deprecated alias for wornItems
}
extend type Query {
outfit(id: ID!): Outfit
petOnNeopetsDotCom(petName: String!): Outfit
}
`;
@ -46,242 +39,7 @@ const resolvers = {
},
Query: {
outfit: (_, { id }) => ({ id }),
petOnNeopetsDotCom: async (
_,
{ petName },
{ db, itemLoader, itemTranslationLoader }
) => {
// Start all these requests as soon as possible...
const petMetaDataPromise = loadPetMetaData(petName);
const customPetDataPromise = loadCustomPetData(petName);
const modelingPromise = customPetDataPromise.then((customPetData) =>
saveModelingData(customPetData, {
db,
itemLoader,
itemTranslationLoader,
})
);
// ...then wait on all of them before finishing. It's important to wait
// on modeling, so that it doesn't get cut off when the request ends!
const [petMetaData, customPetData, __] = await Promise.all([
petMetaDataPromise,
customPetDataPromise,
modelingPromise,
]);
const outfit = {
// TODO: This isn't a fully-working Outfit object. It works for the
// client as currently implemented, but we'll probably want to
// move the client and this onto our more generic fields!
species: { id: customPetData.custom_pet.species_id },
color: { id: customPetData.custom_pet.color_id },
pose: getPoseFromPetData(petMetaData, customPetData),
items: Object.values(customPetData.object_info_registry).map((o) => ({
id: o.obj_info_id,
name: o.name,
description: o.description,
thumbnailUrl: o.thumbnail_url,
rarityIndex: o.rarity_index,
})),
};
return outfit;
},
},
};
async function loadPetMetaData(petName) {
const url =
`http://www.neopets.com/amfphp/json.php/PetService.getPet` + `/${petName}`;
const res = await fetch(url);
if (!res.ok) {
throw new Error(
`for pet meta data, neopets.com returned: ` +
`${res.status} ${res.statusText}. (${url})`
);
}
const json = await res.json();
return json;
}
async function loadCustomPetData(petName) {
const url =
`http://www.neopets.com/amfphp/json.php/CustomPetService.getViewerData` +
`/${petName}`;
const res = await fetch(url);
if (!res.ok) {
throw new Error(
`for custom pet data, neopets.com returned: ` +
`${res.status} ${res.statusText}. (${url})`
);
}
const json = await res.json();
if (!json.custom_pet) {
throw new Error(`missing custom_pet data`);
}
return json;
}
function getPoseFromPetData(petMetaData, petCustomData) {
// TODO: Use custom data to decide if Unconverted.
const moodId = petMetaData.mood;
const genderId = petMetaData.gender;
if (String(moodId) === "1" && String(genderId) === "1") {
return "HAPPY_MASC";
} else if (String(moodId) === "1" && String(genderId) === "2") {
return "HAPPY_FEM";
} else if (String(moodId) === "2" && String(genderId) === "1") {
return "SAD_MASC";
} else if (String(moodId) === "2" && String(genderId) === "2") {
return "SAD_FEM";
} else if (String(moodId) === "4" && String(genderId) === "1") {
return "SICK_MASC";
} else if (String(moodId) === "4" && String(genderId) === "2") {
return "SICK_FEM";
} else {
throw new Error(
`could not identify pose: ` +
`moodId=${moodId}, ` +
`genderId=${genderId}`
);
}
}
async function saveModelingData(
customPetData,
{ db, itemLoader, itemTranslationLoader }
) {
const itemIds = Object.keys(customPetData.object_info_registry);
const [items, itemTranslations] = await Promise.all([
itemLoader.loadMany(itemIds),
itemTranslationLoader.loadMany(itemIds),
]);
const rowsToInsert = [];
const rowsToUpdate = [];
for (const index in itemIds) {
const itemId = itemIds[index];
const item = items[index];
const itemTranslation = itemTranslations[index];
const objectInfo = customPetData.object_info_registry[itemId];
const objectInfoFields = {
id: itemId,
zonesRestrict: objectInfo.zones_restrict,
thumbnailUrl: objectInfo.thumbnail_url,
category: objectInfo.category,
type: objectInfo.type,
rarityIndex: objectInfo.rarity_index,
price: objectInfo.price,
weightLbs: objectInfo.weight_lbs,
name: objectInfo.name,
description: objectInfo.description,
rarity: objectInfo.rarity,
};
if (item instanceof Error) {
// New item, we'll just insert it!
rowsToInsert.push({
...objectInfoFields,
createdAt: new Date(),
updatedAt: new Date(),
});
continue;
}
const itemFields = {
id: item.id,
zonesRestrict: item.zonesRestrict,
thumbnailUrl: item.thumbnailUrl,
category: item.category,
type: item.type,
rarityIndex: item.rarityIndex,
price: item.price,
weightLbs: item.weightLbs,
name: itemTranslation.name,
description: itemTranslation.description,
rarity: itemTranslation.rarity,
};
if (objectsShallowEqual(objectInfoFields, itemFields)) {
// Existing item, no change!
continue;
}
// Updated item, so we'll update it!
rowsToUpdate.push({
...objectInfoFields,
updatedAt: new Date(),
});
}
if (rowsToInsert.length > 0) {
const itemQs = rowsToInsert
.map((_) => "(?, ?, ?, ?, ?, ?, ?, ?, ?, ?)")
.join(", ");
const itemTranslationQs = rowsToInsert
.map((_) => "(?, ?, ?, ?, ?, ?, ?)")
.join(", ");
const itemValues = rowsToInsert.map((row) => [
row.id,
row.zonesRestrict,
row.thumbnailUrl,
row.category,
row.type,
row.rarityIndex,
row.price,
row.weightLbs,
row.createdAt,
row.updatedAt,
]);
const itemTranslationValues = rowsToInsert.map((row) => [
row.id,
"en",
row.name,
row.description,
row.rarity,
row.createdAt,
row.updatedAt,
]);
// NOTE: Hmm, I tried to use multiple statements to combine these, but I
// guess it doesn't work for prepared statements?
await Promise.all([
db.execute(
`INSERT INTO items
(
id, zones_restrict, thumbnail_url, category, type, rarity_index,
price, weight_lbs, created_at, updated_at
)
VALUES ${itemQs};
`,
itemValues.flat()
),
db.execute(
`INSERT INTO item_translations
(item_id, locale, name, description, rarity, created_at, updated_at)
VALUES ${itemTranslationQs};`,
itemTranslationValues.flat()
),
]);
}
// TODO: Update the items that need updating!
}
/** Given two objects with the same keys, return whether their values match. */
function objectsShallowEqual(a, b) {
for (const key in a) {
if (a[key] !== b[key]) {
return false;
}
}
return true;
}
module.exports = { typeDefs, resolvers };

150
src/server/types/Pet.js Normal file
View file

@ -0,0 +1,150 @@
const fetch = require("node-fetch");
const { gql } = require("apollo-server");
const { getPoseFromPetState } = require("../util");
const { saveModelingData } = require("../modeling");
const typeDefs = gql`
type Pet {
id: ID!
name: String!
petAppearance: PetAppearance!
wornItems: [Item!]!
species: Species! # to be deprecated? can use petAppearance? 🤔
color: Color! # to be deprecated? can use petAppearance? 🤔
pose: Pose! # to be deprecated? can use petAppearance? 🤔
items: [Item!]! # deprecated alias for wornItems
}
extend type Query {
petOnNeopetsDotCom(petName: String!): Pet
}
`;
const resolvers = {
Pet: {
species: ({ customPetData }) => ({
id: customPetData.custom_pet.species_id,
}),
color: ({ customPetData }) => ({ id: customPetData.custom_pet.color_id }),
pose: ({ customPetData, petMetaData }) =>
getPoseFromPetData(petMetaData, customPetData),
petAppearance: async (
{ customPetData, petMetaData },
_,
{ petTypeBySpeciesAndColorLoader, petStatesForPetTypeLoader }
) => {
const petType = await petTypeBySpeciesAndColorLoader.load({
speciesId: customPetData.custom_pet.species_id,
colorId: customPetData.custom_pet.color_id,
});
const petStates = await petStatesForPetTypeLoader.load(petType.id);
const pose = getPoseFromPetData(petMetaData, customPetData);
const petState = petStates.find((ps) => getPoseFromPetState(ps) === pose);
return { id: petState.id };
},
wornItems: ({ customPetData }) =>
Object.values(customPetData.object_info_registry).map((o) => ({
id: o.obj_info_id,
name: o.name,
description: o.description,
thumbnailUrl: o.thumbnail_url,
rarityIndex: o.rarity_index,
})),
items: (...args) => resolvers.Pet.wornItems(...args),
},
Query: {
outfit: (_, { id }) => ({ id }),
petOnNeopetsDotCom: async (
_,
{ petName },
{
db,
petTypeBySpeciesAndColorLoader,
petStateByPetTypeAndAssetsLoader,
itemLoader,
itemTranslationLoader,
swfAssetByRemoteIdLoader,
}
) => {
const [customPetData, petMetaData, __] = await Promise.all([
loadCustomPetData(petName),
loadPetMetaData(petName),
]);
await saveModelingData(customPetData, petMetaData, {
db,
petTypeBySpeciesAndColorLoader,
petStateByPetTypeAndAssetsLoader,
itemLoader,
itemTranslationLoader,
swfAssetByRemoteIdLoader,
});
return { name: petName, customPetData, petMetaData };
},
},
};
async function loadPetMetaData(petName) {
const url = `http://www.neopets.com/amfphp/json.php/PetService.getPet/${petName}`;
const res = await fetch(url);
if (!res.ok) {
throw new Error(
`for pet meta data, neopets.com returned: ` +
`${res.status} ${res.statusText}. (${url})`
);
}
const json = await res.json();
return json;
}
async function loadCustomPetData(petName) {
const url =
`http://www.neopets.com/amfphp/json.php/CustomPetService.getViewerData` +
`/${petName}`;
const res = await fetch(url);
if (!res.ok) {
throw new Error(
`for custom pet data, neopets.com returned: ` +
`${res.status} ${res.statusText}. (${url})`
);
}
const json = await res.json();
if (!json.custom_pet) {
throw new Error(`missing custom_pet data`);
}
return json;
}
function getPoseFromPetData(petMetaData, petCustomData) {
const moodId = petMetaData.mood;
const genderId = petMetaData.gender;
if (Object.keys(petCustomData.custom_pet.biology_by_zone).length === 1) {
return "UNCONVERTED";
} else if (String(moodId) === "1" && String(genderId) === "1") {
return "HAPPY_MASC";
} else if (String(moodId) === "1" && String(genderId) === "2") {
return "HAPPY_FEM";
} else if (String(moodId) === "2" && String(genderId) === "1") {
return "SAD_MASC";
} else if (String(moodId) === "2" && String(genderId) === "2") {
return "SAD_FEM";
} else if (String(moodId) === "4" && String(genderId) === "1") {
return "SICK_MASC";
} else if (String(moodId) === "4" && String(genderId) === "2") {
return "SICK_FEM";
} else {
throw new Error(
`could not identify pose: ` +
`moodId=${moodId}, ` +
`genderId=${genderId}`
);
}
}
module.exports = { typeDefs, resolvers };

View file

@ -106,6 +106,17 @@ const resolvers = {
speciesId: id,
colorId: "8", // Blue
});
// In production, this should ~never happen, because all species have a
// Blue version, or at least if a new one is added it will be modeled
// quickly! But in development, before modeling happens, it's possible
// for this to be empty, so we return a fake body ID. (This seems better
// than making it nullable, which adds downstream complexity for a
// particularly edge-y case that generally isn't worth considering.)
if (!petType) {
return `<ERROR-BLUE-PET-NOT-MODELED-FOR-SPECIES-${id}>`;
}
return petType.bodyId;
},
},
@ -208,6 +219,9 @@ const resolvers = {
speciesId,
colorId,
});
if (!petType) {
return null;
}
// TODO: We could query for this more directly, instead of loading all
// appearances 🤔