Auto-modeling script??
It seems to be working!! How exciting!! I'm just letting it run on stuff now :3 One important issue is that Classic DTI doesn't show images for items modeled this way, because we don't download the SWFs for it. But I wanna update it to stop using AWS anyway and do the same stuff 2020 does, I think we can do that pretty sneakily!
This commit is contained in:
parent
052cc242e4
commit
e8d7f6678d
6 changed files with 323 additions and 39 deletions
|
@ -83,6 +83,7 @@
|
||||||
"cache-asset-manifests": "yarn run-script scripts/cache-asset-manifests.js",
|
"cache-asset-manifests": "yarn run-script scripts/cache-asset-manifests.js",
|
||||||
"delete-user": "yarn run-script scripts/delete-user.js",
|
"delete-user": "yarn run-script scripts/delete-user.js",
|
||||||
"export-users-to-auth0": "yarn run-script scripts/export-users-to-auth0.js",
|
"export-users-to-auth0": "yarn run-script scripts/export-users-to-auth0.js",
|
||||||
|
"model-needed-items": "yarn run-script scripts/model-needed-items.js",
|
||||||
"validate-owls-data": "yarn run-script scripts/validate-owls-data.js",
|
"validate-owls-data": "yarn run-script scripts/validate-owls-data.js",
|
||||||
"archive:create": "yarn archive:create:list-urls && yarn archive:create:download-urls && yarn archive:create:upload",
|
"archive:create": "yarn archive:create:list-urls && yarn archive:create:download-urls && yarn archive:create:upload",
|
||||||
"archive:create:list-urls": "yarn run-script scripts/archive/create/list-urls.js",
|
"archive:create:list-urls": "yarn run-script scripts/archive/create/list-urls.js",
|
||||||
|
|
206
scripts/model-needed-items.js
Normal file
206
scripts/model-needed-items.js
Normal file
|
@ -0,0 +1,206 @@
|
||||||
|
const beeline = require("honeycomb-beeline")({
|
||||||
|
writeKey: process.env["HONEYCOMB_WRITE_KEY"],
|
||||||
|
dataset:
|
||||||
|
process.env["NODE_ENV"] === "production"
|
||||||
|
? "Dress to Impress (2020)"
|
||||||
|
: "Dress to Impress (2020, dev)",
|
||||||
|
serviceName: "impress-2020-gql-server",
|
||||||
|
});
|
||||||
|
import connectToDb from "../src/server/db";
|
||||||
|
import buildLoaders from "../src/server/loaders";
|
||||||
|
import {
|
||||||
|
loadCustomPetData,
|
||||||
|
loadNCMallPreviewImageHash,
|
||||||
|
} from "../src/server/load-pet-data";
|
||||||
|
import { gql, loadGraphqlQuery } from "../src/server/ssr-graphql";
|
||||||
|
import { saveModelingData } from "../src/server/modeling";
|
||||||
|
|
||||||
|
async function main() {
|
||||||
|
const db = await connectToDb();
|
||||||
|
const loaders = buildLoaders(db);
|
||||||
|
const context = { db, ...loaders };
|
||||||
|
|
||||||
|
const { data, errors } = await loadGraphqlQuery({
|
||||||
|
query: gql`
|
||||||
|
query ScriptModelNeededItems_GetNeededItems {
|
||||||
|
standardItems: itemsThatNeedModels {
|
||||||
|
id
|
||||||
|
name
|
||||||
|
speciesThatNeedModels {
|
||||||
|
id
|
||||||
|
name
|
||||||
|
withColor(colorId: "8") {
|
||||||
|
neopetsImageHash
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
babyItems: itemsThatNeedModels(colorId: "6") {
|
||||||
|
id
|
||||||
|
name
|
||||||
|
speciesThatNeedModels(colorId: "6") {
|
||||||
|
id
|
||||||
|
name
|
||||||
|
withColor(colorId: "6") {
|
||||||
|
neopetsImageHash
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
maraquanItems: itemsThatNeedModels(colorId: "44") {
|
||||||
|
id
|
||||||
|
name
|
||||||
|
speciesThatNeedModels(colorId: "44") {
|
||||||
|
id
|
||||||
|
name
|
||||||
|
withColor(colorId: "44") {
|
||||||
|
neopetsImageHash
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
mutantItems: itemsThatNeedModels(colorId: "46") {
|
||||||
|
id
|
||||||
|
name
|
||||||
|
speciesThatNeedModels(colorId: "46") {
|
||||||
|
id
|
||||||
|
name
|
||||||
|
withColor(colorId: "46") {
|
||||||
|
neopetsImageHash
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
`,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (errors) {
|
||||||
|
console.error(`Couldn't load items that need modeling:`);
|
||||||
|
for (const error of errors) {
|
||||||
|
console.error(error);
|
||||||
|
}
|
||||||
|
return 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
await modelItems(data.standardItems, context);
|
||||||
|
await modelItems(data.babyItems, context);
|
||||||
|
await modelItems(data.maraquanItems, context);
|
||||||
|
await modelItems(data.mutantItems, context);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function modelItems(items, context) {
|
||||||
|
for (const item of items) {
|
||||||
|
for (const species of item.speciesThatNeedModels) {
|
||||||
|
try {
|
||||||
|
await modelItem(item, species, context);
|
||||||
|
} catch (error) {
|
||||||
|
console.error(
|
||||||
|
`❌ [${item.name} (${item.id}) on ${species.name} (${species.id}))] ` +
|
||||||
|
`Modeling failed, skipping:\n`,
|
||||||
|
error
|
||||||
|
);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
console.info(
|
||||||
|
`✅ [${item.name} (${item.id}) on ${species.name} (${species.id}))] ` +
|
||||||
|
`Modeling data saved!`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function modelItem(item, species, context) {
|
||||||
|
// First, use the NC Mall try-on feature to get the image hash for this
|
||||||
|
// species wearing this item.
|
||||||
|
const imageHash = await loadImageHash(item, species);
|
||||||
|
|
||||||
|
// Next, load the detailed customization data, using the special feature
|
||||||
|
// where "@imageHash" can be looked up as if it were a pet name.
|
||||||
|
const petName = "@" + imageHash;
|
||||||
|
const customPetData = await loadCustomPetData(petName);
|
||||||
|
|
||||||
|
// We don't have real pet metadata, but that's okay, that's only relevant for
|
||||||
|
// tagging pet appearances, and that's not what we're here to do, so the
|
||||||
|
// modeling function will skip that step. (But we do provide the pet "name"
|
||||||
|
// to save in our modeling logs!)
|
||||||
|
const petMetaData = { name: petName, mood: null, gender: null };
|
||||||
|
|
||||||
|
// Check whether we actually *got* modeling data back. It's possible this
|
||||||
|
// item just isn't compatible with this species! (In this case, it would be
|
||||||
|
// wise for someone to manually set the `modeling_status_hint` field on this
|
||||||
|
// item, so we skip it in the future!)
|
||||||
|
//
|
||||||
|
// NOTE: It seems like sometimes customPetData.object_asset_registry is
|
||||||
|
// an object keyed by asset ID, and sometimes it's an array? Uhhh hm. Well,
|
||||||
|
// Object.values does what we want in both cases!
|
||||||
|
const itemAssets = Object.values(customPetData.object_asset_registry);
|
||||||
|
const hasAssetsForThisItem = itemAssets.some(
|
||||||
|
(a) => String(a.obj_info_id) === item.id
|
||||||
|
);
|
||||||
|
if (!hasAssetsForThisItem) {
|
||||||
|
throw new Error(`custom pet data did not have assets for item ${item.id}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Finally, model this data into the database!
|
||||||
|
await saveModelingData(customPetData, petMetaData, context);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function loadImageHash(item, species) {
|
||||||
|
const basicImageHash = species.withColor.neopetsImageHash;
|
||||||
|
try {
|
||||||
|
return await loadWithRetries(
|
||||||
|
() => loadNCMallPreviewImageHash(basicImageHash, [item.id]),
|
||||||
|
{
|
||||||
|
numAttempts: 3,
|
||||||
|
delay: 5000,
|
||||||
|
contextString: `${item.name} (${item.id}) on ${species.name} (${species.id}))`,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
console.error(
|
||||||
|
`[${item.name} (${item.id}) on ${species.name} (${species.id}))] ` +
|
||||||
|
`Loading failed too many times, giving up`
|
||||||
|
);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function loadWithRetries(fn, { numAttempts, delay, contextString }) {
|
||||||
|
if (numAttempts <= 0) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
return await fn();
|
||||||
|
} catch (error) {
|
||||||
|
console.error(
|
||||||
|
`[${contextString}] Error loading, will retry in ${delay}ms:\n`,
|
||||||
|
error
|
||||||
|
);
|
||||||
|
await new Promise((resolve) => setTimeout(() => resolve(), delay));
|
||||||
|
return await loadWithRetries(fn, {
|
||||||
|
numAttempts: numAttempts - 1,
|
||||||
|
delay: delay * 2,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function mainWithBeeline() {
|
||||||
|
const trace = beeline.startTrace({
|
||||||
|
name: "scripts/model-needed-items",
|
||||||
|
operation_name: "scripts/model-needed-items",
|
||||||
|
});
|
||||||
|
|
||||||
|
try {
|
||||||
|
await main();
|
||||||
|
} finally {
|
||||||
|
beeline.finishTrace(trace);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
mainWithBeeline()
|
||||||
|
.catch((e) => {
|
||||||
|
console.error(e);
|
||||||
|
process.exit(1);
|
||||||
|
})
|
||||||
|
.then((code = 0) => process.exit(code));
|
83
src/server/load-pet-data.js
Normal file
83
src/server/load-pet-data.js
Normal file
|
@ -0,0 +1,83 @@
|
||||||
|
import util from "util";
|
||||||
|
import fetch from "node-fetch";
|
||||||
|
import xmlrpc from "xmlrpc";
|
||||||
|
|
||||||
|
const neopetsXmlrpcClient = xmlrpc.createSecureClient({
|
||||||
|
host: "www.neopets.com",
|
||||||
|
path: "/amfphp/xmlrpc.php",
|
||||||
|
});
|
||||||
|
const neopetsXmlrpcCall = util
|
||||||
|
.promisify(neopetsXmlrpcClient.methodCall)
|
||||||
|
.bind(neopetsXmlrpcClient);
|
||||||
|
|
||||||
|
export async function loadPetMetaData(petName) {
|
||||||
|
const response = await neopetsXmlrpcCall("PetService.getPet", [petName]);
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function loadCustomPetData(petName) {
|
||||||
|
try {
|
||||||
|
const response = await neopetsXmlrpcCall("CustomPetService.getViewerData", [
|
||||||
|
petName,
|
||||||
|
]);
|
||||||
|
return response;
|
||||||
|
} catch (error) {
|
||||||
|
// If Neopets.com fails to find valid customization data, we return null.
|
||||||
|
if (
|
||||||
|
error.code === "AMFPHP_RUNTIME_ERROR" &&
|
||||||
|
error.faultString === "Unable to find body artwork for this combination."
|
||||||
|
) {
|
||||||
|
return null;
|
||||||
|
} else {
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function loadNCMallPreviewImageHash(basicImageHash, itemIds) {
|
||||||
|
const query = new URLSearchParams();
|
||||||
|
query.append("selPetsci", basicImageHash);
|
||||||
|
for (const itemId of itemIds) {
|
||||||
|
query.append("itemsList[]", itemId);
|
||||||
|
}
|
||||||
|
|
||||||
|
// When we get rate limited, subsequent requests to the *exact* same URL
|
||||||
|
// fail. For our use case, it makes sense to cache-bust that, I think!
|
||||||
|
query.append("dti-rand", Math.random());
|
||||||
|
|
||||||
|
const url = `http://ncmall.neopets.com/mall/ajax/petview/getPetData.php?${query}`;
|
||||||
|
const res = await fetch(url);
|
||||||
|
if (!res.ok) {
|
||||||
|
try {
|
||||||
|
console.error(
|
||||||
|
`[loadNCMallPreviewImageHash] ${res.status} ${res.statusText}:\n` +
|
||||||
|
(await res.text())
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
console.error(
|
||||||
|
`[loadNCMallPreviewImageHash] could not load response text for ` +
|
||||||
|
`NC Mall preview failed request: ${error.message}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
throw new Error(
|
||||||
|
`could not load NC Mall preview image hash: ${res.status} ${res.statusText}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const dataText = await res.text();
|
||||||
|
if (dataText.includes("trying to reload the page too quickly")) {
|
||||||
|
throw new Error(`hit the NC Mall rate limit`);
|
||||||
|
}
|
||||||
|
const data = JSON.parse(dataText);
|
||||||
|
if (data.success !== true) {
|
||||||
|
throw new Error(
|
||||||
|
`NC Mall preview returned non-success data: ${JSON.stringify(data)}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if (!data.newsci) {
|
||||||
|
throw new Error(
|
||||||
|
`NC Mall preview returned no newsci field: ${JSON.stringify(data)}`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return data.newsci;
|
||||||
|
}
|
|
@ -16,10 +16,6 @@
|
||||||
* HTML5.
|
* HTML5.
|
||||||
*/
|
*/
|
||||||
async function saveModelingData(customPetData, petMetaData, context) {
|
async function saveModelingData(customPetData, petMetaData, context) {
|
||||||
if (process.env["USE_NEW_MODELING"] !== "1") {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const modelingLogs = [];
|
const modelingLogs = [];
|
||||||
const addToModelingLogs = (entry) => {
|
const addToModelingLogs = (entry) => {
|
||||||
console.info("[Modeling] " + JSON.stringify(entry, null, 4));
|
console.info("[Modeling] " + JSON.stringify(entry, null, 4));
|
||||||
|
@ -47,6 +43,13 @@ async function savePetTypeAndStateModelingData(
|
||||||
petMetaData,
|
petMetaData,
|
||||||
context
|
context
|
||||||
) {
|
) {
|
||||||
|
// NOTE: When we automatically model items with "@imageHash" pet names, we
|
||||||
|
// can't load corresponding metadata. That's fine, the script is just looking
|
||||||
|
// for new item data anyway, we can skip this step altogether in that case!
|
||||||
|
if (petMetaData.mood == null || petMetaData.gender == null) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
const {
|
const {
|
||||||
db,
|
db,
|
||||||
petTypeBySpeciesAndColorLoader,
|
petTypeBySpeciesAndColorLoader,
|
||||||
|
@ -214,6 +217,9 @@ async function saveItemModelingData(customPetData, context) {
|
||||||
async function saveSwfAssetModelingData(customPetData, context) {
|
async function saveSwfAssetModelingData(customPetData, context) {
|
||||||
const { db, swfAssetByRemoteIdLoader, addToModelingLogs } = context;
|
const { db, swfAssetByRemoteIdLoader, addToModelingLogs } = context;
|
||||||
|
|
||||||
|
// NOTE: It seems like sometimes customPetData.object_asset_registry is
|
||||||
|
// an object keyed by asset ID, and sometimes it's an array? Uhhh hm. Well,
|
||||||
|
// Object.values does what we want in both cases!
|
||||||
const objectAssets = Object.values(customPetData.object_asset_registry);
|
const objectAssets = Object.values(customPetData.object_asset_registry);
|
||||||
const incomingItemSwfAssets = objectAssets.map((objectAsset) => ({
|
const incomingItemSwfAssets = objectAssets.map((objectAsset) => ({
|
||||||
type: "object",
|
type: "object",
|
||||||
|
|
|
@ -1,8 +1,7 @@
|
||||||
import util from "util";
|
|
||||||
import { gql } from "apollo-server";
|
import { gql } from "apollo-server";
|
||||||
import xmlrpc from "xmlrpc";
|
|
||||||
import { getPoseFromPetState } from "../util";
|
import { getPoseFromPetState } from "../util";
|
||||||
import { saveModelingData } from "../modeling";
|
import { saveModelingData } from "../modeling";
|
||||||
|
import { loadCustomPetData, loadPetMetaData } from "../load-pet-data";
|
||||||
|
|
||||||
const typeDefs = gql`
|
const typeDefs = gql`
|
||||||
type Pet {
|
type Pet {
|
||||||
|
@ -123,7 +122,7 @@ const resolvers = {
|
||||||
loadPetMetaData(petName),
|
loadPetMetaData(petName),
|
||||||
]);
|
]);
|
||||||
|
|
||||||
if (customPetData != null) {
|
if (customPetData != null && process.env["USE_NEW_MODELING"] === "1") {
|
||||||
await saveModelingData(customPetData, petMetaData, {
|
await saveModelingData(customPetData, petMetaData, {
|
||||||
db,
|
db,
|
||||||
petTypeBySpeciesAndColorLoader,
|
petTypeBySpeciesAndColorLoader,
|
||||||
|
@ -139,38 +138,6 @@ const resolvers = {
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
const neopetsXmlrpcClient = xmlrpc.createSecureClient({
|
|
||||||
host: "www.neopets.com",
|
|
||||||
path: "/amfphp/xmlrpc.php",
|
|
||||||
});
|
|
||||||
const neopetsXmlrpcCall = util
|
|
||||||
.promisify(neopetsXmlrpcClient.methodCall)
|
|
||||||
.bind(neopetsXmlrpcClient);
|
|
||||||
|
|
||||||
async function loadPetMetaData(petName) {
|
|
||||||
const response = await neopetsXmlrpcCall("PetService.getPet", [petName]);
|
|
||||||
return response;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function loadCustomPetData(petName) {
|
|
||||||
try {
|
|
||||||
const response = await neopetsXmlrpcCall("CustomPetService.getViewerData", [
|
|
||||||
petName,
|
|
||||||
]);
|
|
||||||
return response;
|
|
||||||
} catch (error) {
|
|
||||||
// If Neopets.com fails to find valid customization data, we return null.
|
|
||||||
if (
|
|
||||||
error.code === "AMFPHP_RUNTIME_ERROR" &&
|
|
||||||
error.faultString === "Unable to find body artwork for this combination."
|
|
||||||
) {
|
|
||||||
return null;
|
|
||||||
} else {
|
|
||||||
throw error;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function getPoseFromPetData(petMetaData, petCustomData) {
|
function getPoseFromPetData(petMetaData, petCustomData) {
|
||||||
const moodId = petMetaData.mood;
|
const moodId = petMetaData.mood;
|
||||||
const genderId = petMetaData.gender;
|
const genderId = petMetaData.gender;
|
||||||
|
|
|
@ -36,6 +36,12 @@ const typeDefs = gql`
|
||||||
switching between standard colors.
|
switching between standard colors.
|
||||||
"""
|
"""
|
||||||
standardBodyId: ID!
|
standardBodyId: ID!
|
||||||
|
|
||||||
|
"""
|
||||||
|
A SpeciesColorPair of this species and the given color. Null if we don't
|
||||||
|
have a record of it as a valid species-color pair on Neopets.com.
|
||||||
|
"""
|
||||||
|
withColor(colorId: ID!): SpeciesColorPair
|
||||||
}
|
}
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
@ -211,6 +217,21 @@ const resolvers = {
|
||||||
|
|
||||||
return petType.bodyId;
|
return petType.bodyId;
|
||||||
},
|
},
|
||||||
|
|
||||||
|
withColor: async (
|
||||||
|
{ id },
|
||||||
|
{ colorId },
|
||||||
|
{ petTypeBySpeciesAndColorLoader }
|
||||||
|
) => {
|
||||||
|
const petType = await petTypeBySpeciesAndColorLoader.load({
|
||||||
|
speciesId: id,
|
||||||
|
colorId,
|
||||||
|
});
|
||||||
|
if (petType == null) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return { id: petType.id };
|
||||||
|
},
|
||||||
},
|
},
|
||||||
|
|
||||||
Body: {
|
Body: {
|
||||||
|
|
Loading…
Reference in a new issue