2021-02-02 22:26:55 -08:00
|
|
|
import DataLoader from "dataloader";
|
|
|
|
import { normalizeRow } from "./util";
|
2020-04-22 12:00:52 -07:00
|
|
|
|
2020-11-24 14:24:34 -08:00
|
|
|
const buildClosetListLoader = (db) =>
|
|
|
|
new DataLoader(async (ids) => {
|
|
|
|
const qs = ids.map((_) => "?").join(",");
|
2021-05-03 15:06:07 -07:00
|
|
|
const [rows] = await db.execute(
|
2020-11-24 14:24:34 -08:00
|
|
|
`SELECT * FROM closet_lists WHERE id IN (${qs})`,
|
2024-02-01 04:59:09 -08:00
|
|
|
ids,
|
2020-11-24 14:24:34 -08:00
|
|
|
);
|
|
|
|
|
|
|
|
const entities = rows.map(normalizeRow);
|
|
|
|
|
|
|
|
return ids.map((id) => entities.find((e) => e.id === id));
|
|
|
|
});
|
|
|
|
|
2021-06-12 04:45:23 -07:00
|
|
|
const buildClosetHangersForListLoader = (db) =>
|
|
|
|
new DataLoader(async (closetListIds) => {
|
|
|
|
const qs = closetListIds.map((_) => "?").join(",");
|
|
|
|
const [rows] = await db.execute(
|
|
|
|
`SELECT * FROM closet_hangers WHERE list_id IN (${qs})`,
|
2024-02-01 04:59:09 -08:00
|
|
|
closetListIds,
|
2021-06-12 04:45:23 -07:00
|
|
|
);
|
|
|
|
|
|
|
|
const entities = rows.map(normalizeRow);
|
|
|
|
|
|
|
|
return closetListIds.map((closetListId) =>
|
2024-02-01 04:59:09 -08:00
|
|
|
entities.filter((e) => e.listId === closetListId),
|
2021-06-12 04:45:23 -07:00
|
|
|
);
|
|
|
|
});
|
|
|
|
|
2021-09-30 19:26:09 -07:00
|
|
|
const buildClosetHangersForDefaultListLoader = (db) =>
|
|
|
|
new DataLoader(async (userIdAndOwnsOrWantsItemsPairs) => {
|
|
|
|
const conditions = userIdAndOwnsOrWantsItemsPairs
|
|
|
|
.map((_) => `(user_id = ? AND owned = ? AND list_id IS NULL)`)
|
|
|
|
.join(" OR ");
|
|
|
|
const values = userIdAndOwnsOrWantsItemsPairs
|
|
|
|
.map(({ userId, ownsOrWantsItems }) => [
|
|
|
|
userId,
|
|
|
|
ownsOrWantsItems === "OWNS",
|
|
|
|
])
|
|
|
|
.flat();
|
|
|
|
const [rows] = await db.execute(
|
|
|
|
`SELECT * FROM closet_hangers WHERE ${conditions}`,
|
2024-02-01 04:59:09 -08:00
|
|
|
values,
|
2021-09-30 19:26:09 -07:00
|
|
|
);
|
|
|
|
|
|
|
|
const entities = rows.map(normalizeRow);
|
|
|
|
|
|
|
|
return userIdAndOwnsOrWantsItemsPairs.map(({ userId, ownsOrWantsItems }) =>
|
|
|
|
entities.filter(
|
|
|
|
(e) =>
|
|
|
|
e.userId === userId &&
|
2024-02-01 04:59:09 -08:00
|
|
|
Boolean(e.owned) === (ownsOrWantsItems === "OWNS"),
|
|
|
|
),
|
2021-09-30 19:26:09 -07:00
|
|
|
);
|
|
|
|
});
|
|
|
|
|
2020-07-31 22:11:32 -07:00
|
|
|
const buildColorLoader = (db) => {
|
|
|
|
const colorLoader = new DataLoader(async (colorIds) => {
|
|
|
|
const qs = colorIds.map((_) => "?").join(",");
|
2021-05-03 15:06:07 -07:00
|
|
|
const [rows] = await db.execute(
|
2020-07-31 22:11:32 -07:00
|
|
|
`SELECT * FROM colors WHERE id IN (${qs}) AND prank = 0`,
|
2024-02-01 04:59:09 -08:00
|
|
|
colorIds,
|
2020-07-31 22:11:32 -07:00
|
|
|
);
|
|
|
|
|
|
|
|
const entities = rows.map(normalizeRow);
|
|
|
|
const entitiesByColorId = new Map(entities.map((e) => [e.id, e]));
|
|
|
|
|
|
|
|
return colorIds.map(
|
|
|
|
(colorId) =>
|
|
|
|
entitiesByColorId.get(String(colorId)) ||
|
2024-02-01 04:59:09 -08:00
|
|
|
new Error(`could not find color ${colorId}`),
|
2020-07-31 22:11:32 -07:00
|
|
|
);
|
|
|
|
});
|
|
|
|
|
|
|
|
colorLoader.loadAll = async () => {
|
2021-05-03 15:06:07 -07:00
|
|
|
const [rows] = await db.execute(`SELECT * FROM colors WHERE prank = 0`);
|
2020-07-31 22:11:32 -07:00
|
|
|
const entities = rows.map(normalizeRow);
|
|
|
|
|
|
|
|
for (const color of entities) {
|
|
|
|
colorLoader.prime(color.id, color);
|
|
|
|
}
|
|
|
|
|
|
|
|
return entities;
|
|
|
|
};
|
|
|
|
|
|
|
|
return colorLoader;
|
2020-04-25 03:42:05 -07:00
|
|
|
};
|
|
|
|
|
2020-08-31 18:25:42 -07:00
|
|
|
const buildSpeciesLoader = (db) => {
|
|
|
|
const speciesLoader = new DataLoader(async (speciesIds) => {
|
|
|
|
const qs = speciesIds.map((_) => "?").join(",");
|
2021-05-03 15:06:07 -07:00
|
|
|
const [rows] = await db.execute(
|
2020-08-31 18:25:42 -07:00
|
|
|
`SELECT * FROM species WHERE id IN (${qs})`,
|
2024-02-01 04:59:09 -08:00
|
|
|
speciesIds,
|
2020-08-31 18:25:42 -07:00
|
|
|
);
|
|
|
|
|
|
|
|
const entities = rows.map(normalizeRow);
|
|
|
|
const entitiesBySpeciesId = new Map(entities.map((e) => [e.id, e]));
|
|
|
|
|
|
|
|
return speciesIds.map(
|
|
|
|
(speciesId) =>
|
|
|
|
entitiesBySpeciesId.get(String(speciesId)) ||
|
2024-02-01 04:59:09 -08:00
|
|
|
new Error(`could not find color ${speciesId}`),
|
2020-08-31 18:25:42 -07:00
|
|
|
);
|
|
|
|
});
|
|
|
|
|
|
|
|
speciesLoader.loadAll = async () => {
|
2021-05-03 15:06:07 -07:00
|
|
|
const [rows] = await db.execute(`SELECT * FROM species`);
|
2020-08-31 18:25:42 -07:00
|
|
|
const entities = rows.map(normalizeRow);
|
|
|
|
|
|
|
|
for (const species of entities) {
|
|
|
|
speciesLoader.prime(species.id, species);
|
|
|
|
}
|
|
|
|
|
|
|
|
return entities;
|
|
|
|
};
|
|
|
|
|
|
|
|
return speciesLoader;
|
2020-04-25 03:42:05 -07:00
|
|
|
};
|
|
|
|
|
2020-11-25 01:53:42 -08:00
|
|
|
const buildTradeMatchesLoader = (db) =>
|
|
|
|
new DataLoader(
|
|
|
|
async (userPairs) => {
|
|
|
|
const conditions = userPairs
|
|
|
|
.map(
|
|
|
|
(_) =>
|
2024-02-01 04:59:09 -08:00
|
|
|
`(public_user_hangers.user_id = ? AND current_user_hangers.user_id = ? AND public_user_hangers.owned = ? AND current_user_hangers.owned = ?)`,
|
2020-11-25 01:53:42 -08:00
|
|
|
)
|
|
|
|
.join(" OR ");
|
|
|
|
const conditionValues = userPairs
|
|
|
|
.map(({ publicUserId, currentUserId, direction }) => {
|
|
|
|
if (direction === "public-owns-current-wants") {
|
|
|
|
return [publicUserId, currentUserId, true, false];
|
|
|
|
} else if (direction === "public-wants-current-owns") {
|
|
|
|
return [publicUserId, currentUserId, false, true];
|
|
|
|
} else {
|
|
|
|
throw new Error(
|
2024-02-01 04:59:09 -08:00
|
|
|
`unexpected user pair direction: ${JSON.stringify(direction)}`,
|
2020-11-25 01:53:42 -08:00
|
|
|
);
|
|
|
|
}
|
|
|
|
})
|
|
|
|
.flat();
|
|
|
|
|
2021-05-03 15:06:07 -07:00
|
|
|
const [rows] = await db.query(
|
2020-11-25 01:53:42 -08:00
|
|
|
`
|
2021-01-05 23:35:02 -08:00
|
|
|
SET SESSION group_concat_max_len = 4096;
|
2020-11-25 01:53:42 -08:00
|
|
|
SELECT
|
|
|
|
public_user_hangers.user_id AS public_user_id,
|
|
|
|
current_user_hangers.user_id AS current_user_id,
|
|
|
|
IF(
|
|
|
|
public_user_hangers.owned,
|
|
|
|
"public-owns-current-wants",
|
|
|
|
"public-wants-current-owns"
|
|
|
|
) AS direction,
|
|
|
|
GROUP_CONCAT(public_user_hangers.item_id) AS item_ids
|
|
|
|
FROM closet_hangers AS public_user_hangers
|
|
|
|
INNER JOIN users AS public_users ON public_users.id = public_user_hangers.user_id
|
|
|
|
LEFT JOIN closet_lists AS public_user_lists
|
|
|
|
ON public_user_lists.id = public_user_hangers.list_id
|
|
|
|
INNER JOIN closet_hangers AS current_user_hangers
|
|
|
|
ON public_user_hangers.item_id = current_user_hangers.item_id
|
|
|
|
WHERE (
|
|
|
|
(${conditions})
|
|
|
|
AND (
|
|
|
|
-- For the public user (but not the current), the hanger must be
|
|
|
|
-- marked Trading.
|
|
|
|
(public_user_hangers.list_id IS NOT NULL AND public_user_lists.visibility >= 2)
|
|
|
|
OR (
|
|
|
|
public_user_hangers.list_id IS NULL AND public_user_hangers.owned = 1
|
|
|
|
AND public_users.owned_closet_hangers_visibility >= 2
|
|
|
|
)
|
|
|
|
OR (
|
|
|
|
public_user_hangers.list_id IS NULL AND public_user_hangers.owned = 0
|
|
|
|
AND public_users.wanted_closet_hangers_visibility >= 2
|
|
|
|
)
|
|
|
|
)
|
|
|
|
)
|
|
|
|
GROUP BY public_user_id, current_user_id;
|
|
|
|
`,
|
2024-02-01 04:59:09 -08:00
|
|
|
conditionValues,
|
2020-11-25 01:53:42 -08:00
|
|
|
);
|
|
|
|
|
|
|
|
const entities = rows.map(normalizeRow);
|
|
|
|
|
|
|
|
return userPairs.map(({ publicUserId, currentUserId, direction }) => {
|
|
|
|
const entity = entities.find(
|
|
|
|
(e) =>
|
|
|
|
e.publicUserId === publicUserId &&
|
|
|
|
e.currentUserId === currentUserId &&
|
2024-02-01 04:59:09 -08:00
|
|
|
e.direction === direction,
|
2020-11-25 01:53:42 -08:00
|
|
|
);
|
|
|
|
return entity ? entity.itemIds.split(",") : [];
|
|
|
|
});
|
|
|
|
},
|
|
|
|
{
|
|
|
|
cacheKeyFn: ({ publicUserId, currentUserId, direction }) =>
|
|
|
|
`${publicUserId}-${currentUserId}-${direction}`,
|
2024-02-01 04:59:09 -08:00
|
|
|
},
|
2020-11-25 01:53:42 -08:00
|
|
|
);
|
|
|
|
|
2020-04-25 03:42:05 -07:00
|
|
|
const loadAllPetTypes = (db) => async () => {
|
2021-05-03 15:06:07 -07:00
|
|
|
const [rows] = await db.execute(`SELECT species_id, color_id FROM pet_types`);
|
2020-04-25 03:42:05 -07:00
|
|
|
const entities = rows.map(normalizeRow);
|
|
|
|
return entities;
|
|
|
|
};
|
|
|
|
|
2020-07-02 14:33:47 -07:00
|
|
|
const buildItemLoader = (db) =>
|
2020-04-23 14:23:46 -07:00
|
|
|
new DataLoader(async (ids) => {
|
|
|
|
const qs = ids.map((_) => "?").join(",");
|
2021-05-03 15:06:07 -07:00
|
|
|
const [rows] = await db.execute(
|
2020-04-23 14:23:46 -07:00
|
|
|
`SELECT * FROM items WHERE id IN (${qs})`,
|
2024-02-01 04:59:09 -08:00
|
|
|
ids,
|
2020-04-23 14:23:46 -07:00
|
|
|
);
|
|
|
|
|
|
|
|
const entities = rows.map(normalizeRow);
|
|
|
|
const entitiesById = new Map(entities.map((e) => [e.id, e]));
|
|
|
|
|
|
|
|
return ids.map(
|
|
|
|
(id) =>
|
2020-07-31 22:11:32 -07:00
|
|
|
entitiesById.get(String(id)) ||
|
2024-02-01 04:59:09 -08:00
|
|
|
new Error(`could not find item with ID: ${id}`),
|
2020-04-23 14:23:46 -07:00
|
|
|
);
|
|
|
|
});
|
2020-04-22 11:51:36 -07:00
|
|
|
|
2020-11-08 00:06:51 -08:00
|
|
|
const buildItemByNameLoader = (db, loaders) =>
|
2020-11-08 00:20:21 -08:00
|
|
|
new DataLoader(
|
|
|
|
async (names) => {
|
|
|
|
const qs = names.map((_) => "?").join(", ");
|
|
|
|
const normalizedNames = names.map((name) => name.trim().toLowerCase());
|
2021-05-03 15:06:07 -07:00
|
|
|
const [rows] = await db.execute(
|
2024-02-20 16:36:59 -08:00
|
|
|
// NOTE: In our MySQL schema, this is a case-insensitive exact search.
|
|
|
|
`SELECT * FROM items WHERE name IN (${qs})`,
|
2024-02-01 04:59:09 -08:00
|
|
|
normalizedNames,
|
2020-11-08 00:20:21 -08:00
|
|
|
);
|
|
|
|
|
|
|
|
const entitiesByName = new Map();
|
|
|
|
for (const row of rows) {
|
2024-02-20 16:36:59 -08:00
|
|
|
const item = normalizeRow(row);
|
2020-11-08 00:20:21 -08:00
|
|
|
loaders.itemLoader.prime(item.id, item);
|
|
|
|
|
2024-02-20 16:36:59 -08:00
|
|
|
const normalizedName = item.name.trim().toLowerCase();
|
|
|
|
entitiesByName.set(normalizedName, item);
|
2020-11-08 00:20:21 -08:00
|
|
|
}
|
|
|
|
|
2024-02-20 16:36:59 -08:00
|
|
|
return normalizedNames.map((name) => entitiesByName.get(name) || null);
|
2020-11-08 00:20:21 -08:00
|
|
|
},
|
2024-02-01 04:59:09 -08:00
|
|
|
{ cacheKeyFn: (name) => name.trim().toLowerCase() },
|
2020-11-08 00:20:21 -08:00
|
|
|
);
|
2020-11-08 00:06:51 -08:00
|
|
|
|
2021-01-18 15:56:24 -08:00
|
|
|
const itemSearchKindConditions = {
|
|
|
|
// NOTE: We assume that items cannot have NC rarity and the PB description,
|
|
|
|
// so we don't bother to filter out PB items in the NC filter, for perf.
|
2021-02-22 18:05:44 -08:00
|
|
|
NC: `rarity_index IN (0, 500) OR is_manually_nc = 1`,
|
|
|
|
NP: `rarity_index NOT IN (0, 500) AND is_manually_nc = 0 AND description NOT LIKE "%This item is part of a deluxe paint brush set!%"`,
|
2021-01-18 15:56:24 -08:00
|
|
|
PB: `description LIKE "%This item is part of a deluxe paint brush set!%"`,
|
|
|
|
};
|
|
|
|
|
2021-06-21 10:30:41 -07:00
|
|
|
function buildItemSearchConditions({
|
|
|
|
query,
|
|
|
|
bodyId,
|
|
|
|
itemKind,
|
|
|
|
currentUserOwnsOrWants,
|
|
|
|
currentUserId,
|
|
|
|
zoneIds,
|
|
|
|
}) {
|
|
|
|
// Split the query into words, and search for each word as a substring
|
|
|
|
// of the name.
|
|
|
|
const words = query.split(/\s+/);
|
2021-07-29 00:24:29 -07:00
|
|
|
const wordMatchConditions = [];
|
|
|
|
const wordMatchValues = [];
|
|
|
|
for (let word of words) {
|
|
|
|
// If the word starts with `-`, remove `-` and treat the filter as negative.
|
|
|
|
const isNegative = word.startsWith("-");
|
|
|
|
if (isNegative) {
|
|
|
|
word = word.substr(1);
|
|
|
|
}
|
|
|
|
if (!word) {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
2024-02-20 16:31:45 -08:00
|
|
|
const condition = isNegative
|
|
|
|
? "items.name NOT LIKE ?"
|
|
|
|
: "items.name LIKE ?";
|
2021-07-29 00:24:29 -07:00
|
|
|
const matcher = "%" + word.replace(/_%/g, "\\$0") + "%";
|
|
|
|
|
|
|
|
wordMatchConditions.push(condition);
|
|
|
|
wordMatchValues.push(matcher);
|
|
|
|
}
|
|
|
|
const wordMatchCondition = wordMatchConditions.join(" AND ") || "1";
|
2021-06-21 10:30:41 -07:00
|
|
|
|
|
|
|
const itemKindCondition = itemSearchKindConditions[itemKind] || "1";
|
|
|
|
const bodyIdCondition = bodyId
|
|
|
|
? "(swf_assets.body_id = ? OR swf_assets.body_id = 0)"
|
|
|
|
: "1";
|
|
|
|
const bodyIdValues = bodyId ? [bodyId] : [];
|
|
|
|
const zoneIdsCondition =
|
|
|
|
zoneIds.length > 0
|
|
|
|
? `swf_assets.zone_id IN (${zoneIds.map((_) => "?").join(", ")})`
|
|
|
|
: "1";
|
|
|
|
const currentUserJoin = currentUserOwnsOrWants
|
|
|
|
? `INNER JOIN closet_hangers ch ON ch.item_id = items.id`
|
|
|
|
: "";
|
|
|
|
const currentUserCondition = currentUserOwnsOrWants
|
|
|
|
? `ch.user_id = ? AND ch.owned = ?`
|
|
|
|
: "1";
|
|
|
|
const currentUserValues = currentUserOwnsOrWants
|
|
|
|
? [currentUserId, currentUserOwnsOrWants === "OWNS" ? "1" : "0"]
|
|
|
|
: [];
|
|
|
|
|
|
|
|
const queryJoins = `
|
2021-07-29 00:24:29 -07:00
|
|
|
INNER JOIN parents_swf_assets rel
|
|
|
|
ON rel.parent_type = "Item" AND rel.parent_id = items.id
|
|
|
|
INNER JOIN swf_assets ON rel.swf_asset_id = swf_assets.id
|
|
|
|
${currentUserJoin}
|
|
|
|
`;
|
2021-06-21 10:30:41 -07:00
|
|
|
|
|
|
|
const queryConditions = `
|
2021-07-29 00:24:29 -07:00
|
|
|
(${wordMatchCondition}) AND (${bodyIdCondition}) AND
|
|
|
|
(${zoneIdsCondition}) AND (${itemKindCondition}) AND
|
2024-02-20 16:31:45 -08:00
|
|
|
(${currentUserCondition})
|
2021-07-29 00:24:29 -07:00
|
|
|
`;
|
2021-06-21 10:30:41 -07:00
|
|
|
const queryConditionValues = [
|
2021-07-29 00:24:29 -07:00
|
|
|
...wordMatchValues,
|
2021-06-21 10:30:41 -07:00
|
|
|
...bodyIdValues,
|
|
|
|
...zoneIds,
|
|
|
|
...currentUserValues,
|
|
|
|
];
|
|
|
|
|
|
|
|
return { queryJoins, queryConditions, queryConditionValues };
|
|
|
|
}
|
|
|
|
|
|
|
|
const buildItemSearchNumTotalItemsLoader = (db) =>
|
|
|
|
new DataLoader(async (queries) => {
|
|
|
|
// This isn't actually optimized as a batch query, we're just using a
|
|
|
|
// DataLoader API consistency with our other loaders!
|
|
|
|
const queryPromises = queries.map(
|
|
|
|
async ({
|
|
|
|
query,
|
|
|
|
bodyId,
|
|
|
|
itemKind,
|
|
|
|
currentUserOwnsOrWants,
|
|
|
|
currentUserId,
|
|
|
|
zoneIds = [],
|
|
|
|
}) => {
|
2024-02-01 04:59:09 -08:00
|
|
|
const { queryJoins, queryConditions, queryConditionValues } =
|
|
|
|
buildItemSearchConditions({
|
|
|
|
query,
|
|
|
|
bodyId,
|
|
|
|
itemKind,
|
|
|
|
currentUserOwnsOrWants,
|
|
|
|
currentUserId,
|
|
|
|
zoneIds,
|
|
|
|
});
|
2021-06-21 10:30:41 -07:00
|
|
|
|
|
|
|
const [totalRows] = await db.execute(
|
|
|
|
`
|
|
|
|
SELECT count(DISTINCT items.id) AS numTotalItems FROM items
|
|
|
|
${queryJoins}
|
|
|
|
WHERE ${queryConditions}
|
|
|
|
`,
|
2024-02-01 04:59:09 -08:00
|
|
|
queryConditionValues,
|
2021-06-21 10:30:41 -07:00
|
|
|
);
|
|
|
|
|
|
|
|
const { numTotalItems } = totalRows[0];
|
|
|
|
return numTotalItems;
|
2024-02-01 04:59:09 -08:00
|
|
|
},
|
2021-06-21 10:30:41 -07:00
|
|
|
);
|
|
|
|
|
|
|
|
const responses = await Promise.all(queryPromises);
|
|
|
|
|
|
|
|
return responses;
|
|
|
|
});
|
|
|
|
|
|
|
|
const buildItemSearchItemsLoader = (db, loaders) =>
|
2020-04-24 21:17:03 -07:00
|
|
|
new DataLoader(async (queries) => {
|
|
|
|
// This isn't actually optimized as a batch query, we're just using a
|
|
|
|
// DataLoader API consistency with our other loaders!
|
2021-01-18 15:56:24 -08:00
|
|
|
const queryPromises = queries.map(
|
2021-01-21 15:58:24 -08:00
|
|
|
async ({
|
|
|
|
query,
|
|
|
|
bodyId,
|
|
|
|
itemKind,
|
|
|
|
currentUserOwnsOrWants,
|
|
|
|
currentUserId,
|
|
|
|
zoneIds = [],
|
|
|
|
offset,
|
|
|
|
limit,
|
|
|
|
}) => {
|
2020-04-25 01:55:48 -07:00
|
|
|
const actualOffset = offset || 0;
|
|
|
|
const actualLimit = Math.min(limit || 30, 30);
|
|
|
|
|
2024-02-01 04:59:09 -08:00
|
|
|
const { queryJoins, queryConditions, queryConditionValues } =
|
|
|
|
buildItemSearchConditions({
|
|
|
|
query,
|
|
|
|
bodyId,
|
|
|
|
itemKind,
|
|
|
|
currentUserOwnsOrWants,
|
|
|
|
currentUserId,
|
|
|
|
zoneIds,
|
|
|
|
});
|
2021-06-21 10:30:41 -07:00
|
|
|
|
|
|
|
const [rows] = await db.execute(
|
|
|
|
`
|
2024-02-20 16:31:45 -08:00
|
|
|
SELECT DISTINCT items.* FROM items
|
2021-06-21 10:30:41 -07:00
|
|
|
${queryJoins}
|
|
|
|
WHERE ${queryConditions}
|
2024-02-20 16:31:45 -08:00
|
|
|
ORDER BY items.name
|
2021-06-21 10:30:41 -07:00
|
|
|
LIMIT ? OFFSET ?
|
|
|
|
`,
|
2024-02-01 04:59:09 -08:00
|
|
|
[...queryConditionValues, actualLimit, actualOffset],
|
2020-09-01 17:35:41 -07:00
|
|
|
);
|
2020-04-25 00:43:01 -07:00
|
|
|
|
2020-04-25 01:55:48 -07:00
|
|
|
const entities = rows.map(normalizeRow);
|
2020-04-25 00:43:01 -07:00
|
|
|
|
2020-08-17 01:41:38 -07:00
|
|
|
for (const item of entities) {
|
|
|
|
loaders.itemLoader.prime(item.id, item);
|
|
|
|
}
|
|
|
|
|
2021-06-21 10:30:41 -07:00
|
|
|
return entities;
|
2024-02-01 04:59:09 -08:00
|
|
|
},
|
2020-04-25 01:55:48 -07:00
|
|
|
);
|
2020-04-25 00:43:01 -07:00
|
|
|
|
|
|
|
const responses = await Promise.all(queryPromises);
|
|
|
|
|
|
|
|
return responses;
|
|
|
|
});
|
|
|
|
|
2021-01-18 06:31:27 -08:00
|
|
|
const buildNewestItemsLoader = (db, loaders) =>
|
|
|
|
new DataLoader(async (keys) => {
|
|
|
|
// Essentially, I want to provide the loader-like API, and populate other
|
|
|
|
// loaders, even though there's only one query to run.
|
|
|
|
if (keys.length !== 1 && keys[0] !== "all-newest") {
|
|
|
|
throw new Error(
|
2024-02-01 04:59:09 -08:00
|
|
|
`this loader can only be loaded with the key "all-newest"`,
|
2021-01-18 06:31:27 -08:00
|
|
|
);
|
|
|
|
}
|
|
|
|
|
2021-05-03 15:06:07 -07:00
|
|
|
const [rows] = await db.execute(
|
2024-02-01 04:59:09 -08:00
|
|
|
`SELECT * FROM items ORDER BY created_at DESC LIMIT 20;`,
|
2021-01-18 06:31:27 -08:00
|
|
|
);
|
|
|
|
|
|
|
|
const entities = rows.map(normalizeRow);
|
|
|
|
|
|
|
|
for (const entity of entities) {
|
|
|
|
loaders.itemLoader.prime(entity.id, entity);
|
|
|
|
}
|
|
|
|
|
|
|
|
return [entities];
|
|
|
|
});
|
|
|
|
|
2021-07-11 18:09:29 -07:00
|
|
|
async function runItemModelingQuery(db, filterToItemIds) {
|
|
|
|
let itemIdsCondition;
|
|
|
|
let itemIdsValues;
|
|
|
|
if (filterToItemIds === "all") {
|
|
|
|
// For all items, we use the condition `1`, which matches everything.
|
|
|
|
itemIdsCondition = "1";
|
|
|
|
itemIdsValues = [];
|
|
|
|
} else {
|
|
|
|
// Or, to filter to certain items, we add their IDs to the WHERE clause.
|
|
|
|
const qs = filterToItemIds.map((_) => "?").join(", ");
|
|
|
|
itemIdsCondition = `(item_id IN (${qs}))`;
|
|
|
|
itemIdsValues = filterToItemIds;
|
|
|
|
}
|
|
|
|
|
|
|
|
return await db.execute(
|
|
|
|
`
|
|
|
|
SELECT T_ITEMS.item_id,
|
Oops, fix crashes in the Modeling Hub!
Neopets released a new Maraquan Koi, and it revealed a mistake in our modeling query! We already knew that the Maraquan Mynci was actually the same body type as the standard Mynci colors, but now the Koi is the same way, and because there's _two_ such species, the query started reacting by assuming that a _bunch_ of items that fit both the standard Mynci and standard Koi (which is a LOT of items!!) should also fit all _Maraquan_ pets, because it fits both the Maraquan Mynci and Maraquan Koi too. (Whereas previously, that part of the query would say "oh, it just fits the Maraquan Mynci, we don't need to assume it fits ALL maraquan pets, that's probably just species-specific.")
so yeah! This change should help the query ignore Maraquan species that have the same body type as standard species. That's fine to essentially treat them like they don't exist, because we won't lose out on any modeling that way: the standard models will cover the Maraquan versions for those two species!
2022-06-20 15:21:38 -07:00
|
|
|
T_BODIES.color_id,
|
|
|
|
T_ITEMS.supports_vandagyre,
|
|
|
|
COUNT(*) AS modeled_species_count,
|
|
|
|
GROUP_CONCAT(
|
|
|
|
T_BODIES.species_id
|
|
|
|
ORDER BY T_BODIES.species_id
|
Modeling page performance fix
Ok so, I kinda assumed that the query engine would only compute `all_species_ids_for_this_color` on the rows we actually returned, and it's a fast subquery so it's fine. But that was wrong! I think the query engine computing that for _every_ item, and _then_ filter out stuff with `HAVING`. Which makes sense, because the `HAVING` clause references it, so computing it makes sense!
In this change, we inline the subquery, so it only gets called if the other conditions in the `HAVING` clause don't fail first. That way, it only gets run when needed, and the query runs like 2x faster (~30sec instead of ~60sec), which gets us back inside some timeouts that were triggering around 1 minute and making the page fail.
However, this meant we no longer return `all_species_ids_for_this_color`, which we actually use to determine which species are _left_ to model for! So now, we have a loader that also basically runs the same query as that condition subquery.
A reasonable question would be, at this point, is the `HAVING` clause a good idea? would it be simpler to do the filtering in JS?
and I think it might be simpler, but I would guess noticeably worse performance, because I think we really do filter out a _lot_ of results with that `HAVING` clause—like basically all items, right? So to filter on the JS side, we'd be transferring data for all items over the wire, which… like, that's not even the worst dealbreaker, but it would certainly be noticed. This hypothesis could be wrong, but it's enough of a reason for me to not bother pursuring the refactor!
2022-10-10 20:15:16 -07:00
|
|
|
) AS modeled_species_ids
|
Oops, fix crashes in the Modeling Hub!
Neopets released a new Maraquan Koi, and it revealed a mistake in our modeling query! We already knew that the Maraquan Mynci was actually the same body type as the standard Mynci colors, but now the Koi is the same way, and because there's _two_ such species, the query started reacting by assuming that a _bunch_ of items that fit both the standard Mynci and standard Koi (which is a LOT of items!!) should also fit all _Maraquan_ pets, because it fits both the Maraquan Mynci and Maraquan Koi too. (Whereas previously, that part of the query would say "oh, it just fits the Maraquan Mynci, we don't need to assume it fits ALL maraquan pets, that's probably just species-specific.")
so yeah! This change should help the query ignore Maraquan species that have the same body type as standard species. That's fine to essentially treat them like they don't exist, because we won't lose out on any modeling that way: the standard models will cover the Maraquan versions for those two species!
2022-06-20 15:21:38 -07:00
|
|
|
FROM (
|
|
|
|
-- NOTE: I found that extracting this as a separate query that runs
|
|
|
|
-- first made things WAAAY faster. Less to join/group, I guess?
|
|
|
|
SELECT DISTINCT items.id AS item_id,
|
|
|
|
swf_assets.body_id AS body_id,
|
|
|
|
-- Vandagyre was added on 2014-11-14, so we add some buffer here.
|
|
|
|
-- TODO: Some later Dyeworks items don't support Vandagyre.
|
|
|
|
-- Add a manual db flag?
|
|
|
|
items.created_at >= "2014-12-01" AS supports_vandagyre
|
|
|
|
FROM items
|
|
|
|
INNER JOIN parents_swf_assets psa ON psa.parent_type = "Item"
|
|
|
|
AND psa.parent_id = items.id
|
|
|
|
INNER JOIN swf_assets ON swf_assets.id = psa.swf_asset_id
|
2024-02-20 16:32:39 -08:00
|
|
|
WHERE items.modeling_status_hint IS NULL AND items.name NOT LIKE "%MME%"
|
Oops, fix crashes in the Modeling Hub!
Neopets released a new Maraquan Koi, and it revealed a mistake in our modeling query! We already knew that the Maraquan Mynci was actually the same body type as the standard Mynci colors, but now the Koi is the same way, and because there's _two_ such species, the query started reacting by assuming that a _bunch_ of items that fit both the standard Mynci and standard Koi (which is a LOT of items!!) should also fit all _Maraquan_ pets, because it fits both the Maraquan Mynci and Maraquan Koi too. (Whereas previously, that part of the query would say "oh, it just fits the Maraquan Mynci, we don't need to assume it fits ALL maraquan pets, that's probably just species-specific.")
so yeah! This change should help the query ignore Maraquan species that have the same body type as standard species. That's fine to essentially treat them like they don't exist, because we won't lose out on any modeling that way: the standard models will cover the Maraquan versions for those two species!
2022-06-20 15:21:38 -07:00
|
|
|
AND ${itemIdsCondition}
|
|
|
|
ORDER BY item_id
|
|
|
|
) T_ITEMS
|
|
|
|
INNER JOIN (
|
|
|
|
SELECT DISTINCT body_id, species_id, color_id
|
|
|
|
FROM pet_types T_PT1
|
|
|
|
WHERE color_id IN (6, 8, 44, 46)
|
|
|
|
AND (
|
|
|
|
-- For non-standard colors, ignore the species that have the same
|
|
|
|
-- body ID as standard pets. Otherwise, a lot of items will be
|
|
|
|
-- like "oh, we fit the Maraquan Koi and Maraquan Mynci, where
|
|
|
|
-- are all the other Maraquans??", which is incorrect!
|
|
|
|
color_id = 8
|
|
|
|
OR
|
|
|
|
(
|
|
|
|
SELECT count(*) FROM pet_types T_PT2
|
|
|
|
WHERE T_PT1.body_id = T_PT2.body_id
|
|
|
|
AND T_PT1.color_id != T_PT2.color_id
|
|
|
|
) = 0
|
|
|
|
)
|
|
|
|
ORDER BY body_id, species_id
|
|
|
|
) T_BODIES ON T_ITEMS.body_id = T_BODIES.body_id
|
|
|
|
GROUP BY T_ITEMS.item_id, T_BODIES.color_id
|
|
|
|
HAVING NOT (
|
|
|
|
-- No species (either an All Bodies item, or a Capsule type thing)
|
|
|
|
modeled_species_count = 0
|
|
|
|
-- Single species (probably just their item)
|
|
|
|
OR modeled_species_count = 1
|
|
|
|
-- All species modeled (that are compatible with this color)
|
Modeling page performance fix
Ok so, I kinda assumed that the query engine would only compute `all_species_ids_for_this_color` on the rows we actually returned, and it's a fast subquery so it's fine. But that was wrong! I think the query engine computing that for _every_ item, and _then_ filter out stuff with `HAVING`. Which makes sense, because the `HAVING` clause references it, so computing it makes sense!
In this change, we inline the subquery, so it only gets called if the other conditions in the `HAVING` clause don't fail first. That way, it only gets run when needed, and the query runs like 2x faster (~30sec instead of ~60sec), which gets us back inside some timeouts that were triggering around 1 minute and making the page fail.
However, this meant we no longer return `all_species_ids_for_this_color`, which we actually use to determine which species are _left_ to model for! So now, we have a loader that also basically runs the same query as that condition subquery.
A reasonable question would be, at this point, is the `HAVING` clause a good idea? would it be simpler to do the filtering in JS?
and I think it might be simpler, but I would guess noticeably worse performance, because I think we really do filter out a _lot_ of results with that `HAVING` clause—like basically all items, right? So to filter on the JS side, we'd be transferring data for all items over the wire, which… like, that's not even the worst dealbreaker, but it would certainly be noticed. This hypothesis could be wrong, but it's enough of a reason for me to not bother pursuring the refactor!
2022-10-10 20:15:16 -07:00
|
|
|
OR modeled_species_ids = (
|
|
|
|
SELECT GROUP_CONCAT(DISTINCT species_id ORDER BY species_id)
|
|
|
|
FROM pet_types T_PT1
|
|
|
|
WHERE color_id = T_BODIES.color_id
|
|
|
|
AND (
|
|
|
|
-- For non-standard colors, ignore the species that have the same
|
|
|
|
-- body ID as standard pets. Otherwise, a lot of items will be
|
|
|
|
-- like "oh, we fit the Maraquan Koi and Maraquan Mynci, where
|
|
|
|
-- are all the other Maraquans??", which is incorrect!
|
|
|
|
color_id = 8
|
|
|
|
OR
|
|
|
|
(
|
|
|
|
SELECT count(*) FROM pet_types T_PT2
|
|
|
|
WHERE T_PT1.body_id = T_PT2.body_id
|
|
|
|
AND T_PT1.color_id != T_PT2.color_id
|
|
|
|
) = 0
|
|
|
|
)
|
|
|
|
)
|
Oops, fix crashes in the Modeling Hub!
Neopets released a new Maraquan Koi, and it revealed a mistake in our modeling query! We already knew that the Maraquan Mynci was actually the same body type as the standard Mynci colors, but now the Koi is the same way, and because there's _two_ such species, the query started reacting by assuming that a _bunch_ of items that fit both the standard Mynci and standard Koi (which is a LOT of items!!) should also fit all _Maraquan_ pets, because it fits both the Maraquan Mynci and Maraquan Koi too. (Whereas previously, that part of the query would say "oh, it just fits the Maraquan Mynci, we don't need to assume it fits ALL maraquan pets, that's probably just species-specific.")
so yeah! This change should help the query ignore Maraquan species that have the same body type as standard species. That's fine to essentially treat them like they don't exist, because we won't lose out on any modeling that way: the standard models will cover the Maraquan versions for those two species!
2022-06-20 15:21:38 -07:00
|
|
|
-- All species modeled except Vandagyre, for items that don't support it
|
|
|
|
OR (NOT T_ITEMS.supports_vandagyre AND modeled_species_count = 54 AND modeled_species_ids = "1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50,51,52,53,54")
|
|
|
|
)
|
2022-06-23 11:57:32 -07:00
|
|
|
ORDER BY T_ITEMS.item_id
|
|
|
|
-- We limit the result set a bit, because if there's a bug or something
|
|
|
|
-- that causes too many records to return, it seems to have a tendency to
|
|
|
|
-- take up a bunch of resources and crash the site?
|
|
|
|
LIMIT 200;
|
2021-07-11 18:09:29 -07:00
|
|
|
`,
|
2024-02-01 04:59:09 -08:00
|
|
|
[...itemIdsValues],
|
2021-07-11 18:09:29 -07:00
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
const buildSpeciesThatNeedModelsForItemLoader = (db) =>
|
|
|
|
new DataLoader(
|
|
|
|
async (colorIdAndItemIdPairs) => {
|
|
|
|
// Get the requested item IDs, ignoring color for now. Remove duplicates.
|
|
|
|
let itemIds = colorIdAndItemIdPairs.map(({ itemId }) => itemId);
|
|
|
|
itemIds = [...new Set(itemIds)];
|
|
|
|
|
|
|
|
// Run the big modeling query, but filtered to specifically these items.
|
|
|
|
// The filter happens very early in the query, so it runs way faster than
|
|
|
|
// the full modeling query.
|
|
|
|
const [rows] = await runItemModelingQuery(db, itemIds);
|
|
|
|
|
|
|
|
const entities = rows.map(normalizeRow);
|
|
|
|
|
|
|
|
// Finally, the query returned a row for each item combined with each
|
|
|
|
// color built into the query (well, no row when no models needed!). So,
|
|
|
|
// find the right row for each color/item pair, or possibly null!
|
|
|
|
return colorIdAndItemIdPairs.map(({ colorId, itemId }) =>
|
2024-02-01 04:59:09 -08:00
|
|
|
entities.find((e) => e.itemId === itemId && e.colorId === colorId),
|
2021-07-11 18:09:29 -07:00
|
|
|
);
|
|
|
|
},
|
2024-02-01 04:59:09 -08:00
|
|
|
{ cacheKeyFn: ({ colorId, itemId }) => `${colorId}-${itemId}` },
|
2021-07-11 18:09:29 -07:00
|
|
|
);
|
|
|
|
|
|
|
|
const buildItemsThatNeedModelsLoader = (db, loaders) =>
|
2020-09-06 02:50:04 -07:00
|
|
|
new DataLoader(async (keys) => {
|
|
|
|
// Essentially, I want to take easy advantage of DataLoader's caching, for
|
|
|
|
// this query that can only run one way ^_^` There might be a better way to
|
|
|
|
// do this!
|
|
|
|
if (keys.length !== 1 && keys[0] !== "all") {
|
|
|
|
throw new Error(`this loader can only be loaded with the key "all"`);
|
|
|
|
}
|
|
|
|
|
2021-07-11 18:09:29 -07:00
|
|
|
const [rows] = await runItemModelingQuery(db, "all");
|
2020-09-06 15:49:08 -07:00
|
|
|
|
2021-03-18 12:57:29 -07:00
|
|
|
const entities = rows.map(normalizeRow);
|
|
|
|
|
|
|
|
const result = new Map();
|
|
|
|
for (const { colorId, itemId, ...entity } of entities) {
|
2021-07-11 18:09:29 -07:00
|
|
|
loaders.speciesThatNeedModelsForItemLoader.prime(
|
|
|
|
{ colorId, itemId },
|
2024-02-01 04:59:09 -08:00
|
|
|
entity,
|
2021-07-11 18:09:29 -07:00
|
|
|
);
|
|
|
|
|
2021-03-18 12:57:29 -07:00
|
|
|
if (!result.has(colorId)) {
|
|
|
|
result.set(colorId, new Map());
|
2020-09-15 03:06:17 -07:00
|
|
|
}
|
2021-03-18 12:57:29 -07:00
|
|
|
result.get(colorId).set(itemId, entity);
|
2020-09-06 15:49:08 -07:00
|
|
|
}
|
|
|
|
|
2021-03-18 12:57:29 -07:00
|
|
|
return [result];
|
2020-09-06 02:50:04 -07:00
|
|
|
});
|
|
|
|
|
Modeling page performance fix
Ok so, I kinda assumed that the query engine would only compute `all_species_ids_for_this_color` on the rows we actually returned, and it's a fast subquery so it's fine. But that was wrong! I think the query engine computing that for _every_ item, and _then_ filter out stuff with `HAVING`. Which makes sense, because the `HAVING` clause references it, so computing it makes sense!
In this change, we inline the subquery, so it only gets called if the other conditions in the `HAVING` clause don't fail first. That way, it only gets run when needed, and the query runs like 2x faster (~30sec instead of ~60sec), which gets us back inside some timeouts that were triggering around 1 minute and making the page fail.
However, this meant we no longer return `all_species_ids_for_this_color`, which we actually use to determine which species are _left_ to model for! So now, we have a loader that also basically runs the same query as that condition subquery.
A reasonable question would be, at this point, is the `HAVING` clause a good idea? would it be simpler to do the filtering in JS?
and I think it might be simpler, but I would guess noticeably worse performance, because I think we really do filter out a _lot_ of results with that `HAVING` clause—like basically all items, right? So to filter on the JS side, we'd be transferring data for all items over the wire, which… like, that's not even the worst dealbreaker, but it would certainly be noticed. This hypothesis could be wrong, but it's enough of a reason for me to not bother pursuring the refactor!
2022-10-10 20:15:16 -07:00
|
|
|
const buildAllSpeciesIdsForColorLoader = (db) =>
|
|
|
|
new DataLoader(async (colorIds) => {
|
|
|
|
const qs = colorIds.map((_) => "?").join(", ");
|
|
|
|
const [rows] = await db.execute(
|
|
|
|
`
|
|
|
|
SELECT color_id,
|
|
|
|
GROUP_CONCAT(DISTINCT species_id ORDER BY species_id) AS species_ids
|
|
|
|
FROM pet_types T_PT1
|
|
|
|
WHERE color_id IN (${qs})
|
|
|
|
AND (
|
|
|
|
-- For non-standard colors, ignore the species that have the same
|
|
|
|
-- body ID as standard pets. Otherwise, a lot of items will be
|
|
|
|
-- like "oh, we fit the Maraquan Koi and Maraquan Mynci, where
|
|
|
|
-- are all the other Maraquans??", which is incorrect!
|
|
|
|
color_id = 8
|
|
|
|
OR
|
|
|
|
(
|
|
|
|
SELECT count(*) FROM pet_types T_PT2
|
|
|
|
WHERE T_PT1.body_id = T_PT2.body_id
|
|
|
|
AND T_PT1.color_id != T_PT2.color_id
|
|
|
|
) = 0
|
|
|
|
)
|
|
|
|
GROUP BY color_id;
|
|
|
|
`,
|
2024-02-01 04:59:09 -08:00
|
|
|
colorIds,
|
Modeling page performance fix
Ok so, I kinda assumed that the query engine would only compute `all_species_ids_for_this_color` on the rows we actually returned, and it's a fast subquery so it's fine. But that was wrong! I think the query engine computing that for _every_ item, and _then_ filter out stuff with `HAVING`. Which makes sense, because the `HAVING` clause references it, so computing it makes sense!
In this change, we inline the subquery, so it only gets called if the other conditions in the `HAVING` clause don't fail first. That way, it only gets run when needed, and the query runs like 2x faster (~30sec instead of ~60sec), which gets us back inside some timeouts that were triggering around 1 minute and making the page fail.
However, this meant we no longer return `all_species_ids_for_this_color`, which we actually use to determine which species are _left_ to model for! So now, we have a loader that also basically runs the same query as that condition subquery.
A reasonable question would be, at this point, is the `HAVING` clause a good idea? would it be simpler to do the filtering in JS?
and I think it might be simpler, but I would guess noticeably worse performance, because I think we really do filter out a _lot_ of results with that `HAVING` clause—like basically all items, right? So to filter on the JS side, we'd be transferring data for all items over the wire, which… like, that's not even the worst dealbreaker, but it would certainly be noticed. This hypothesis could be wrong, but it's enough of a reason for me to not bother pursuring the refactor!
2022-10-10 20:15:16 -07:00
|
|
|
);
|
|
|
|
|
|
|
|
const entities = rows.map(normalizeRow);
|
|
|
|
|
|
|
|
return colorIds.map(
|
|
|
|
(colorId) =>
|
|
|
|
entities.find((e) => e.colorId === colorId)?.speciesIds?.split(",") ||
|
2024-02-01 04:59:09 -08:00
|
|
|
[],
|
Modeling page performance fix
Ok so, I kinda assumed that the query engine would only compute `all_species_ids_for_this_color` on the rows we actually returned, and it's a fast subquery so it's fine. But that was wrong! I think the query engine computing that for _every_ item, and _then_ filter out stuff with `HAVING`. Which makes sense, because the `HAVING` clause references it, so computing it makes sense!
In this change, we inline the subquery, so it only gets called if the other conditions in the `HAVING` clause don't fail first. That way, it only gets run when needed, and the query runs like 2x faster (~30sec instead of ~60sec), which gets us back inside some timeouts that were triggering around 1 minute and making the page fail.
However, this meant we no longer return `all_species_ids_for_this_color`, which we actually use to determine which species are _left_ to model for! So now, we have a loader that also basically runs the same query as that condition subquery.
A reasonable question would be, at this point, is the `HAVING` clause a good idea? would it be simpler to do the filtering in JS?
and I think it might be simpler, but I would guess noticeably worse performance, because I think we really do filter out a _lot_ of results with that `HAVING` clause—like basically all items, right? So to filter on the JS side, we'd be transferring data for all items over the wire, which… like, that's not even the worst dealbreaker, but it would certainly be noticed. This hypothesis could be wrong, but it's enough of a reason for me to not bother pursuring the refactor!
2022-10-10 20:15:16 -07:00
|
|
|
);
|
|
|
|
});
|
|
|
|
|
2020-09-20 22:21:23 -07:00
|
|
|
const buildItemBodiesWithAppearanceDataLoader = (db) =>
|
2020-09-20 21:33:45 -07:00
|
|
|
new DataLoader(async (itemIds) => {
|
|
|
|
const qs = itemIds.map((_) => "?").join(",");
|
2021-05-03 15:06:07 -07:00
|
|
|
const [rows] = await db.execute(
|
2020-09-20 22:21:23 -07:00
|
|
|
// TODO: I'm not sure this ORDER BY clause will reliably get standard
|
|
|
|
// bodies to the top, it seems like it depends how DISTINCT works?
|
|
|
|
`SELECT pet_types.body_id, pet_types.species_id, items.id AS item_id
|
2020-09-20 21:33:45 -07:00
|
|
|
FROM items
|
|
|
|
INNER JOIN parents_swf_assets ON
|
|
|
|
items.id = parents_swf_assets.parent_id AND
|
|
|
|
parents_swf_assets.parent_type = "Item"
|
|
|
|
INNER JOIN swf_assets ON
|
|
|
|
parents_swf_assets.swf_asset_id = swf_assets.id
|
|
|
|
INNER JOIN pet_types ON
|
|
|
|
pet_types.body_id = swf_assets.body_id OR swf_assets.body_id = 0
|
2020-09-20 22:21:23 -07:00
|
|
|
INNER JOIN colors ON
|
|
|
|
pet_types.color_id = colors.id
|
|
|
|
WHERE items.id IN (${qs})
|
|
|
|
GROUP BY pet_types.body_id
|
|
|
|
ORDER BY
|
|
|
|
pet_types.species_id,
|
|
|
|
colors.standard DESC`,
|
2024-02-01 04:59:09 -08:00
|
|
|
itemIds,
|
2020-09-20 21:33:45 -07:00
|
|
|
);
|
|
|
|
|
|
|
|
const entities = rows.map(normalizeRow);
|
|
|
|
|
|
|
|
return itemIds.map((itemId) => entities.filter((e) => e.itemId === itemId));
|
|
|
|
});
|
|
|
|
|
2020-10-23 23:29:50 -07:00
|
|
|
const buildItemAllOccupiedZonesLoader = (db) =>
|
|
|
|
new DataLoader(async (itemIds) => {
|
|
|
|
const qs = itemIds.map((_) => "?").join(", ");
|
2021-05-03 15:06:07 -07:00
|
|
|
const [rows] = await db.execute(
|
2020-10-23 23:29:50 -07:00
|
|
|
`SELECT items.id, GROUP_CONCAT(DISTINCT sa.zone_id) AS zone_ids FROM items
|
|
|
|
INNER JOIN parents_swf_assets psa
|
|
|
|
ON psa.parent_type = "Item" AND psa.parent_id = items.id
|
|
|
|
INNER JOIN swf_assets sa ON sa.id = psa.swf_asset_id
|
|
|
|
WHERE items.id IN (${qs})
|
|
|
|
GROUP BY items.id;`,
|
2024-02-01 04:59:09 -08:00
|
|
|
itemIds,
|
2020-10-23 23:29:50 -07:00
|
|
|
);
|
|
|
|
|
|
|
|
const entities = rows.map(normalizeRow);
|
|
|
|
|
2020-11-18 10:35:25 -08:00
|
|
|
return itemIds.map((itemId) => {
|
|
|
|
const item = entities.find((e) => e.id === itemId);
|
|
|
|
if (!item) {
|
|
|
|
return [];
|
|
|
|
}
|
|
|
|
|
|
|
|
return item.zoneIds.split(",");
|
|
|
|
});
|
2020-10-23 23:29:50 -07:00
|
|
|
});
|
|
|
|
|
2021-07-11 19:08:47 -07:00
|
|
|
const buildItemCompatibleBodiesAndTheirZonesLoader = (db) =>
|
|
|
|
new DataLoader(async (itemIds) => {
|
|
|
|
const qs = itemIds.map((_) => "?").join(", ");
|
|
|
|
const [rows] = await db.query(
|
|
|
|
`
|
|
|
|
SELECT
|
|
|
|
items.id as itemId,
|
|
|
|
swf_assets.body_id AS bodyId,
|
|
|
|
(SELECT species_id FROM pet_types WHERE body_id = bodyId LIMIT 1)
|
|
|
|
AS speciesId,
|
|
|
|
GROUP_CONCAT(DISTINCT swf_assets.zone_id) AS zoneIds
|
|
|
|
FROM items
|
|
|
|
INNER JOIN parents_swf_assets ON
|
|
|
|
items.id = parents_swf_assets.parent_id AND
|
|
|
|
parents_swf_assets.parent_type = "Item"
|
|
|
|
INNER JOIN swf_assets ON
|
|
|
|
parents_swf_assets.swf_asset_id = swf_assets.id
|
|
|
|
WHERE items.id IN (${qs})
|
|
|
|
GROUP BY items.id, swf_assets.body_id
|
|
|
|
-- We have some invalid data where the asset has a body ID that
|
|
|
|
-- matches no pet type. Huh! Well, ignore those bodies!
|
|
|
|
HAVING speciesId IS NOT NULL OR bodyId = 0;
|
|
|
|
`,
|
2024-02-01 04:59:09 -08:00
|
|
|
itemIds,
|
2021-07-11 19:08:47 -07:00
|
|
|
);
|
|
|
|
|
|
|
|
const entities = rows.map(normalizeRow);
|
|
|
|
|
|
|
|
return itemIds.map((itemId) => entities.filter((e) => e.itemId === itemId));
|
|
|
|
});
|
|
|
|
|
2020-11-24 14:24:34 -08:00
|
|
|
const buildItemTradesLoader = (db, loaders) =>
|
|
|
|
new DataLoader(
|
|
|
|
async (itemIdOwnedPairs) => {
|
|
|
|
const qs = itemIdOwnedPairs
|
|
|
|
.map((_) => "(closet_hangers.item_id = ? AND closet_hangers.owned = ?)")
|
|
|
|
.join(" OR ");
|
|
|
|
const values = itemIdOwnedPairs
|
|
|
|
.map(({ itemId, isOwned }) => [itemId, isOwned])
|
|
|
|
.flat();
|
2021-05-03 15:06:07 -07:00
|
|
|
const [rows] = await db.execute(
|
2020-11-24 14:24:34 -08:00
|
|
|
{
|
|
|
|
sql: `
|
|
|
|
SELECT
|
|
|
|
closet_hangers.*, closet_lists.*, users.*
|
|
|
|
FROM closet_hangers
|
|
|
|
INNER JOIN users ON users.id = closet_hangers.user_id
|
|
|
|
LEFT JOIN closet_lists ON closet_lists.id = closet_hangers.list_id
|
|
|
|
WHERE (
|
|
|
|
(${qs})
|
|
|
|
AND (
|
|
|
|
(closet_hangers.list_id IS NOT NULL AND closet_lists.visibility >= 2)
|
|
|
|
OR (
|
|
|
|
closet_hangers.list_id IS NULL AND closet_hangers.owned = 1
|
|
|
|
AND users.owned_closet_hangers_visibility >= 2
|
|
|
|
)
|
|
|
|
OR (
|
|
|
|
closet_hangers.list_id IS NULL AND closet_hangers.owned = 0
|
|
|
|
AND users.wanted_closet_hangers_visibility >= 2
|
|
|
|
)
|
|
|
|
)
|
|
|
|
);
|
|
|
|
`,
|
|
|
|
nestTables: true,
|
|
|
|
},
|
2024-02-01 04:59:09 -08:00
|
|
|
values,
|
2020-11-24 14:24:34 -08:00
|
|
|
);
|
|
|
|
|
|
|
|
const entities = rows.map((row) => ({
|
|
|
|
closetHanger: normalizeRow(row.closet_hangers),
|
|
|
|
closetList: normalizeRow(row.closet_lists),
|
|
|
|
user: normalizeRow(row.users),
|
|
|
|
}));
|
|
|
|
|
|
|
|
for (const entity of entities) {
|
|
|
|
loaders.userLoader.prime(entity.user.id, entity.user);
|
|
|
|
loaders.closetListLoader.prime(entity.closetList.id, entity.closetList);
|
|
|
|
}
|
|
|
|
|
|
|
|
return itemIdOwnedPairs.map(({ itemId, isOwned }) =>
|
|
|
|
entities
|
|
|
|
.filter(
|
|
|
|
(e) =>
|
|
|
|
e.closetHanger.itemId === itemId &&
|
2024-02-01 04:59:09 -08:00
|
|
|
Boolean(e.closetHanger.owned) === isOwned,
|
2020-11-24 14:24:34 -08:00
|
|
|
)
|
|
|
|
.map((e) => ({
|
|
|
|
id: e.closetHanger.id,
|
|
|
|
closetList: e.closetList.id ? e.closetList : null,
|
|
|
|
user: e.user,
|
2024-02-01 04:59:09 -08:00
|
|
|
})),
|
2020-11-24 14:24:34 -08:00
|
|
|
);
|
|
|
|
},
|
2024-02-01 04:59:09 -08:00
|
|
|
{ cacheKeyFn: ({ itemId, isOwned }) => `${itemId}-${isOwned}` },
|
2020-11-24 14:24:34 -08:00
|
|
|
);
|
|
|
|
|
2020-09-27 03:18:46 -07:00
|
|
|
const buildPetTypeLoader = (db, loaders) =>
|
2020-06-24 19:05:07 -07:00
|
|
|
new DataLoader(async (petTypeIds) => {
|
|
|
|
const qs = petTypeIds.map((_) => "?").join(",");
|
2021-05-03 15:06:07 -07:00
|
|
|
const [rows] = await db.execute(
|
2020-06-24 19:05:07 -07:00
|
|
|
`SELECT * FROM pet_types WHERE id IN (${qs})`,
|
2024-02-01 04:59:09 -08:00
|
|
|
petTypeIds,
|
2020-06-24 19:05:07 -07:00
|
|
|
);
|
|
|
|
|
|
|
|
const entities = rows.map(normalizeRow);
|
|
|
|
|
2020-09-27 03:18:46 -07:00
|
|
|
for (const petType of entities) {
|
|
|
|
loaders.petTypeBySpeciesAndColorLoader.prime(
|
|
|
|
{ speciesId: petType.speciesId, colorId: petType.colorId },
|
2024-02-01 04:59:09 -08:00
|
|
|
petType,
|
2020-09-27 03:18:46 -07:00
|
|
|
);
|
|
|
|
}
|
|
|
|
|
2020-06-24 19:05:07 -07:00
|
|
|
return petTypeIds.map((petTypeId) =>
|
2024-02-01 04:59:09 -08:00
|
|
|
entities.find((e) => e.id === petTypeId),
|
2020-06-24 19:05:07 -07:00
|
|
|
);
|
|
|
|
});
|
|
|
|
|
|
|
|
const buildPetTypeBySpeciesAndColorLoader = (db, loaders) =>
|
2020-08-17 01:33:34 -07:00
|
|
|
new DataLoader(
|
|
|
|
async (speciesAndColorPairs) => {
|
|
|
|
const conditions = [];
|
|
|
|
const values = [];
|
|
|
|
for (const { speciesId, colorId } of speciesAndColorPairs) {
|
|
|
|
conditions.push("(species_id = ? AND color_id = ?)");
|
|
|
|
values.push(speciesId, colorId);
|
|
|
|
}
|
2020-04-23 01:08:00 -07:00
|
|
|
|
2021-05-03 15:06:07 -07:00
|
|
|
const [rows] = await db.execute(
|
2020-08-17 01:33:34 -07:00
|
|
|
`SELECT * FROM pet_types WHERE ${conditions.join(" OR ")}`,
|
2024-02-01 04:59:09 -08:00
|
|
|
values,
|
2020-08-17 01:33:34 -07:00
|
|
|
);
|
2020-04-23 01:08:00 -07:00
|
|
|
|
2020-08-17 01:33:34 -07:00
|
|
|
const entities = rows.map(normalizeRow);
|
|
|
|
const entitiesBySpeciesAndColorPair = new Map(
|
2024-02-01 04:59:09 -08:00
|
|
|
entities.map((e) => [`${e.speciesId},${e.colorId}`, e]),
|
2020-08-17 01:33:34 -07:00
|
|
|
);
|
2020-04-23 01:08:00 -07:00
|
|
|
|
2020-08-17 01:33:34 -07:00
|
|
|
for (const petType of entities) {
|
|
|
|
loaders.petTypeLoader.prime(petType.id, petType);
|
|
|
|
}
|
2020-06-24 19:05:07 -07:00
|
|
|
|
2020-08-17 01:33:34 -07:00
|
|
|
return speciesAndColorPairs.map(({ speciesId, colorId }) =>
|
2024-02-01 04:59:09 -08:00
|
|
|
entitiesBySpeciesAndColorPair.get(`${speciesId},${colorId}`),
|
2020-08-17 01:33:34 -07:00
|
|
|
);
|
|
|
|
},
|
2024-02-01 04:59:09 -08:00
|
|
|
{ cacheKeyFn: ({ speciesId, colorId }) => `${speciesId},${colorId}` },
|
2020-08-17 01:33:34 -07:00
|
|
|
);
|
2020-04-23 01:08:00 -07:00
|
|
|
|
2021-02-02 23:29:06 -08:00
|
|
|
const buildPetTypesForColorLoader = (db, loaders) =>
|
|
|
|
new DataLoader(async (colorIds) => {
|
|
|
|
const qs = colorIds.map((_) => "?").join(",");
|
2021-05-03 15:06:07 -07:00
|
|
|
const [rows] = await db.execute(
|
2021-02-02 23:29:06 -08:00
|
|
|
`SELECT * FROM pet_types WHERE color_id IN (${qs})`,
|
2024-02-01 04:59:09 -08:00
|
|
|
colorIds,
|
2021-02-02 23:29:06 -08:00
|
|
|
);
|
|
|
|
|
|
|
|
const entities = rows.map(normalizeRow);
|
|
|
|
|
|
|
|
for (const petType of entities) {
|
|
|
|
loaders.petTypeLoader.prime(petType.id, petType);
|
|
|
|
loaders.petTypeBySpeciesAndColorLoader.prime(
|
|
|
|
{ speciesId: petType.speciesId, colorId: petType.colorId },
|
2024-02-01 04:59:09 -08:00
|
|
|
petType,
|
2021-02-02 23:29:06 -08:00
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
return colorIds.map((colorId) =>
|
2024-02-01 04:59:09 -08:00
|
|
|
entities.filter((e) => e.colorId === colorId),
|
|
|
|
);
|
|
|
|
});
|
|
|
|
|
|
|
|
const buildAltStyleLoader = (db) =>
|
|
|
|
new DataLoader(async (altStyleIds) => {
|
|
|
|
const qs = altStyleIds.map((_) => "?").join(",");
|
|
|
|
const [rows] = await db.execute(
|
|
|
|
`SELECT * FROM alt_styles WHERE id IN (${qs})`,
|
|
|
|
altStyleIds,
|
|
|
|
);
|
|
|
|
|
|
|
|
const entities = rows.map(normalizeRow);
|
|
|
|
|
|
|
|
return altStyleIds.map((altStyleId) =>
|
|
|
|
entities.find((e) => e.id === altStyleId),
|
2021-02-02 23:29:06 -08:00
|
|
|
);
|
|
|
|
});
|
|
|
|
|
2020-08-01 15:30:26 -07:00
|
|
|
const buildSwfAssetLoader = (db) =>
|
|
|
|
new DataLoader(async (swfAssetIds) => {
|
|
|
|
const qs = swfAssetIds.map((_) => "?").join(",");
|
2021-05-03 15:06:07 -07:00
|
|
|
const [rows] = await db.execute(
|
2020-08-01 15:30:26 -07:00
|
|
|
`SELECT * FROM swf_assets WHERE id IN (${qs})`,
|
2024-02-01 04:59:09 -08:00
|
|
|
swfAssetIds,
|
2020-08-01 15:30:26 -07:00
|
|
|
);
|
|
|
|
|
|
|
|
const entities = rows.map(normalizeRow);
|
|
|
|
|
|
|
|
return swfAssetIds.map((swfAssetId) =>
|
2024-02-01 04:59:09 -08:00
|
|
|
entities.find((e) => e.id === swfAssetId),
|
2020-08-01 15:30:26 -07:00
|
|
|
);
|
|
|
|
});
|
|
|
|
|
2021-01-20 10:36:46 -08:00
|
|
|
const buildSwfAssetCountLoader = (db) =>
|
|
|
|
new DataLoader(
|
|
|
|
async (requests) => {
|
2021-05-03 15:06:07 -07:00
|
|
|
const [rows] = await db.execute(
|
2021-01-20 10:36:46 -08:00
|
|
|
`
|
|
|
|
SELECT count(*) AS count, type,
|
|
|
|
(manifest IS NOT NULL AND manifest != "") AS is_converted
|
|
|
|
FROM swf_assets
|
|
|
|
GROUP BY type, is_converted;
|
2024-02-01 04:59:09 -08:00
|
|
|
`,
|
2021-01-20 10:36:46 -08:00
|
|
|
);
|
|
|
|
const entities = rows.map(normalizeRow);
|
|
|
|
|
|
|
|
return requests.map(({ type, isConverted }) => {
|
|
|
|
// Find the returned rows that match this count request.
|
|
|
|
let matchingEntities = entities;
|
|
|
|
if (type != null) {
|
|
|
|
matchingEntities = matchingEntities.filter((e) => e.type === type);
|
|
|
|
}
|
|
|
|
if (isConverted != null) {
|
|
|
|
matchingEntities = matchingEntities.filter(
|
2024-02-01 04:59:09 -08:00
|
|
|
(e) => Boolean(e.isConverted) === isConverted,
|
2021-01-20 10:36:46 -08:00
|
|
|
);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Add their counts together, and return the total.
|
|
|
|
return matchingEntities.map((e) => e.count).reduce((a, b) => a + b, 0);
|
|
|
|
});
|
|
|
|
},
|
|
|
|
{
|
|
|
|
cacheKeyFn: ({ type, isConverted }) => `${type},${isConverted}`,
|
2024-02-01 04:59:09 -08:00
|
|
|
},
|
2021-01-20 10:36:46 -08:00
|
|
|
);
|
|
|
|
|
2020-09-19 03:04:19 -07:00
|
|
|
const buildSwfAssetByRemoteIdLoader = (db) =>
|
|
|
|
new DataLoader(
|
|
|
|
async (typeAndRemoteIdPairs) => {
|
|
|
|
const qs = typeAndRemoteIdPairs
|
|
|
|
.map((_) => "(type = ? AND remote_id = ?)")
|
|
|
|
.join(" OR ");
|
|
|
|
const values = typeAndRemoteIdPairs
|
|
|
|
.map(({ type, remoteId }) => [type, remoteId])
|
|
|
|
.flat();
|
2021-05-03 15:06:07 -07:00
|
|
|
const [rows] = await db.execute(
|
2020-09-19 03:04:19 -07:00
|
|
|
`SELECT * FROM swf_assets WHERE ${qs}`,
|
2024-02-01 04:59:09 -08:00
|
|
|
values,
|
2020-09-19 03:04:19 -07:00
|
|
|
);
|
|
|
|
|
|
|
|
const entities = rows.map(normalizeRow);
|
|
|
|
|
2020-09-19 04:22:39 -07:00
|
|
|
return typeAndRemoteIdPairs.map(({ type, remoteId }) =>
|
2024-02-01 04:59:09 -08:00
|
|
|
entities.find((e) => e.type === type && e.remoteId === remoteId),
|
2020-09-19 03:04:19 -07:00
|
|
|
);
|
|
|
|
},
|
2024-02-01 04:59:09 -08:00
|
|
|
{ cacheKeyFn: ({ type, remoteId }) => `${type},${remoteId}` },
|
2020-09-19 03:04:19 -07:00
|
|
|
);
|
|
|
|
|
2020-08-01 15:30:26 -07:00
|
|
|
const buildItemSwfAssetLoader = (db, loaders) =>
|
2020-09-01 17:00:27 -07:00
|
|
|
new DataLoader(
|
|
|
|
async (itemAndBodyPairs) => {
|
|
|
|
const conditions = [];
|
|
|
|
const values = [];
|
|
|
|
for (const { itemId, bodyId } of itemAndBodyPairs) {
|
|
|
|
conditions.push(
|
2024-02-01 04:59:09 -08:00
|
|
|
"(rel.parent_id = ? AND (sa.body_id = ? OR sa.body_id = 0))",
|
2020-09-01 17:00:27 -07:00
|
|
|
);
|
|
|
|
values.push(itemId, bodyId);
|
|
|
|
}
|
2020-04-23 01:08:00 -07:00
|
|
|
|
2021-05-03 15:06:07 -07:00
|
|
|
const [rows] = await db.execute(
|
2020-09-01 17:00:27 -07:00
|
|
|
`SELECT sa.*, rel.parent_id FROM swf_assets sa
|
2020-04-23 01:08:00 -07:00
|
|
|
INNER JOIN parents_swf_assets rel ON
|
|
|
|
rel.parent_type = "Item" AND
|
|
|
|
rel.swf_asset_id = sa.id
|
|
|
|
WHERE ${conditions.join(" OR ")}`,
|
2024-02-01 04:59:09 -08:00
|
|
|
values,
|
2020-09-01 17:00:27 -07:00
|
|
|
);
|
2020-04-23 01:08:00 -07:00
|
|
|
|
2020-09-01 17:00:27 -07:00
|
|
|
const entities = rows.map(normalizeRow);
|
2020-04-23 01:08:00 -07:00
|
|
|
|
2020-09-01 17:00:27 -07:00
|
|
|
for (const swfAsset of entities) {
|
|
|
|
loaders.swfAssetLoader.prime(swfAsset.id, swfAsset);
|
|
|
|
}
|
2020-08-01 15:30:26 -07:00
|
|
|
|
2020-09-01 17:00:27 -07:00
|
|
|
return itemAndBodyPairs.map(({ itemId, bodyId }) =>
|
|
|
|
entities.filter(
|
|
|
|
(e) =>
|
2024-02-01 04:59:09 -08:00
|
|
|
e.parentId === itemId && (e.bodyId === bodyId || e.bodyId === "0"),
|
|
|
|
),
|
2020-09-01 17:00:27 -07:00
|
|
|
);
|
|
|
|
},
|
2024-02-01 04:59:09 -08:00
|
|
|
{ cacheKeyFn: ({ itemId, bodyId }) => `${itemId},${bodyId}` },
|
2020-09-01 17:00:27 -07:00
|
|
|
);
|
2020-04-23 01:08:00 -07:00
|
|
|
|
2020-08-01 15:30:26 -07:00
|
|
|
const buildPetSwfAssetLoader = (db, loaders) =>
|
2020-04-23 14:23:46 -07:00
|
|
|
new DataLoader(async (petStateIds) => {
|
|
|
|
const qs = petStateIds.map((_) => "?").join(",");
|
2021-05-03 15:06:07 -07:00
|
|
|
const [rows] = await db.execute(
|
2020-04-23 14:23:46 -07:00
|
|
|
`SELECT sa.*, rel.parent_id FROM swf_assets sa
|
|
|
|
INNER JOIN parents_swf_assets rel ON
|
|
|
|
rel.parent_type = "PetState" AND
|
|
|
|
rel.swf_asset_id = sa.id
|
|
|
|
WHERE rel.parent_id IN (${qs})`,
|
2024-02-01 04:59:09 -08:00
|
|
|
petStateIds,
|
2020-04-23 14:23:46 -07:00
|
|
|
);
|
|
|
|
|
|
|
|
const entities = rows.map(normalizeRow);
|
|
|
|
|
2020-08-01 15:30:26 -07:00
|
|
|
for (const swfAsset of entities) {
|
|
|
|
loaders.swfAssetLoader.prime(swfAsset.id, swfAsset);
|
|
|
|
}
|
|
|
|
|
2020-04-23 14:23:46 -07:00
|
|
|
return petStateIds.map((petStateId) =>
|
2024-02-01 04:59:09 -08:00
|
|
|
entities.filter((e) => e.parentId === petStateId),
|
2020-04-23 14:23:46 -07:00
|
|
|
);
|
|
|
|
});
|
|
|
|
|
Support alt styles in outfit thumbnails
Oops, before this change, outfits with alt styles would still show the
outfit as if no alt style were applied!
Now, we have the `Outfit` GraphQL type be internally aware of alt
styles, and set its `petAppearance` and `body` and `itemAppearances`
fields accordingly. No change was required to the actual
`/api/outfitImage` endpoint, once the GraphQL started returning the
right thing!
…because of that, I'm honestly kinda surprised that there's no obvious
issues arising with the Impress 2020 outfit interface itself? But it
seems to be correctly just, not showing alt styles at all, in the way I
intended because I never added support to it. So, okay, cool!
2024-02-08 10:51:52 -08:00
|
|
|
const buildAltStyleSwfAssetLoader = (db, loaders) =>
|
|
|
|
new DataLoader(async (altStyleIds) => {
|
|
|
|
const qs = altStyleIds.map((_) => "?").join(",");
|
|
|
|
const [rows] = await db.execute(
|
|
|
|
`SELECT sa.*, rel.parent_id FROM swf_assets sa
|
|
|
|
INNER JOIN parents_swf_assets rel ON
|
|
|
|
rel.parent_type = "AltStyle" AND
|
|
|
|
rel.swf_asset_id = sa.id
|
|
|
|
WHERE rel.parent_id IN (${qs})`,
|
|
|
|
altStyleIds,
|
|
|
|
);
|
|
|
|
|
|
|
|
const entities = rows.map(normalizeRow);
|
|
|
|
|
|
|
|
for (const swfAsset of entities) {
|
|
|
|
loaders.swfAssetLoader.prime(swfAsset.id, swfAsset);
|
|
|
|
}
|
|
|
|
|
|
|
|
return altStyleIds.map((altStyleId) =>
|
|
|
|
entities.filter((e) => e.parentId === altStyleId),
|
|
|
|
);
|
|
|
|
});
|
|
|
|
|
2020-10-23 22:55:13 -07:00
|
|
|
const buildNeopetsConnectionLoader = (db) =>
|
|
|
|
new DataLoader(async (ids) => {
|
|
|
|
const qs = ids.map((_) => "?").join(", ");
|
2021-05-03 15:06:07 -07:00
|
|
|
const [rows] = await db.execute(
|
2020-10-23 22:55:13 -07:00
|
|
|
`SELECT * FROM neopets_connections WHERE id IN (${qs})`,
|
2024-02-01 04:59:09 -08:00
|
|
|
ids,
|
2020-10-23 22:55:13 -07:00
|
|
|
);
|
|
|
|
|
|
|
|
const entities = rows.map(normalizeRow);
|
|
|
|
|
|
|
|
return ids.map((id) => entities.find((e) => e.id === id));
|
|
|
|
});
|
|
|
|
|
2020-06-24 19:05:07 -07:00
|
|
|
const buildOutfitLoader = (db) =>
|
|
|
|
new DataLoader(async (outfitIds) => {
|
|
|
|
const qs = outfitIds.map((_) => "?").join(",");
|
2021-05-03 15:06:07 -07:00
|
|
|
const [rows] = await db.execute(
|
2020-06-24 19:05:07 -07:00
|
|
|
`SELECT * FROM outfits WHERE id IN (${qs})`,
|
2024-02-01 04:59:09 -08:00
|
|
|
outfitIds,
|
2020-06-24 19:05:07 -07:00
|
|
|
);
|
|
|
|
|
|
|
|
const entities = rows.map(normalizeRow);
|
|
|
|
|
|
|
|
return outfitIds.map((outfitId) => entities.find((e) => e.id === outfitId));
|
|
|
|
});
|
|
|
|
|
|
|
|
const buildItemOutfitRelationshipsLoader = (db) =>
|
|
|
|
new DataLoader(async (outfitIds) => {
|
|
|
|
const qs = outfitIds.map((_) => "?").join(",");
|
2021-05-03 15:06:07 -07:00
|
|
|
const [rows] = await db.execute(
|
2020-06-24 19:05:07 -07:00
|
|
|
`SELECT * FROM item_outfit_relationships WHERE outfit_id IN (${qs})`,
|
2024-02-01 04:59:09 -08:00
|
|
|
outfitIds,
|
2020-06-24 19:05:07 -07:00
|
|
|
);
|
|
|
|
|
|
|
|
const entities = rows.map(normalizeRow);
|
|
|
|
|
|
|
|
return outfitIds.map((outfitId) =>
|
2024-02-01 04:59:09 -08:00
|
|
|
entities.filter((e) => e.outfitId === outfitId),
|
2020-06-24 19:05:07 -07:00
|
|
|
);
|
|
|
|
});
|
|
|
|
|
2020-04-23 14:23:46 -07:00
|
|
|
const buildPetStateLoader = (db) =>
|
2020-06-24 19:05:07 -07:00
|
|
|
new DataLoader(async (petStateIds) => {
|
|
|
|
const qs = petStateIds.map((_) => "?").join(",");
|
2021-05-03 15:06:07 -07:00
|
|
|
const [rows] = await db.execute(
|
2020-06-24 19:05:07 -07:00
|
|
|
`SELECT * FROM pet_states WHERE id IN (${qs})`,
|
2024-02-01 04:59:09 -08:00
|
|
|
petStateIds,
|
2020-06-24 19:05:07 -07:00
|
|
|
);
|
|
|
|
|
|
|
|
const entities = rows.map(normalizeRow);
|
|
|
|
|
|
|
|
return petStateIds.map((petStateId) =>
|
2024-02-01 04:59:09 -08:00
|
|
|
entities.find((e) => e.id === petStateId),
|
2020-06-24 19:05:07 -07:00
|
|
|
);
|
|
|
|
});
|
|
|
|
|
|
|
|
const buildPetStatesForPetTypeLoader = (db, loaders) =>
|
2020-04-23 14:23:46 -07:00
|
|
|
new DataLoader(async (petTypeIds) => {
|
|
|
|
const qs = petTypeIds.map((_) => "?").join(",");
|
2021-05-03 15:06:07 -07:00
|
|
|
const [rows] = await db.execute(
|
2020-05-02 20:48:32 -07:00
|
|
|
`SELECT * FROM pet_states
|
2020-08-28 22:58:39 -07:00
|
|
|
WHERE pet_type_id IN (${qs})
|
2020-08-31 00:32:17 -07:00
|
|
|
ORDER BY (mood_id IS NULL) ASC, mood_id ASC, female DESC,
|
2020-08-31 00:37:12 -07:00
|
|
|
unconverted DESC, glitched ASC, id DESC`,
|
2024-02-01 04:59:09 -08:00
|
|
|
petTypeIds,
|
2020-04-23 14:23:46 -07:00
|
|
|
);
|
|
|
|
|
|
|
|
const entities = rows.map(normalizeRow);
|
|
|
|
|
2020-06-24 19:05:07 -07:00
|
|
|
for (const petState of entities) {
|
|
|
|
loaders.petStateLoader.prime(petState.id, petState);
|
|
|
|
}
|
|
|
|
|
2020-04-23 14:23:46 -07:00
|
|
|
return petTypeIds.map((petTypeId) =>
|
2024-02-01 04:59:09 -08:00
|
|
|
entities.filter((e) => e.petTypeId === petTypeId),
|
2020-04-23 14:23:46 -07:00
|
|
|
);
|
|
|
|
});
|
|
|
|
|
2020-09-20 22:21:23 -07:00
|
|
|
/** Given a bodyId, loads the canonical PetState to show as an example. */
|
|
|
|
const buildCanonicalPetStateForBodyLoader = (db, loaders) =>
|
2021-02-03 15:24:12 -08:00
|
|
|
new DataLoader(
|
|
|
|
async (requests) => {
|
|
|
|
// I don't know how to do this query in bulk, so we'll just do it in
|
|
|
|
// parallel!
|
|
|
|
return await Promise.all(
|
|
|
|
requests.map(async ({ bodyId, preferredColorId, fallbackColorId }) => {
|
|
|
|
// Randomly-ish choose which gender presentation to prefer, based on
|
|
|
|
// body ID. This makes the outcome stable, which is nice for caching
|
|
|
|
// and testing and just generally not being surprised, but sitll
|
|
|
|
// creates an even distribution.
|
|
|
|
const gender = bodyId % 2 === 0 ? "masc" : "fem";
|
|
|
|
|
2021-03-14 07:16:01 -07:00
|
|
|
const bodyCondition = bodyId !== "0" ? `pet_types.body_id = ?` : `1`;
|
|
|
|
const bodyValues = bodyId !== "0" ? [bodyId] : [];
|
|
|
|
|
2021-05-03 15:06:07 -07:00
|
|
|
const [rows] = await db.execute(
|
2021-02-03 15:24:12 -08:00
|
|
|
{
|
|
|
|
sql: `
|
2020-09-20 22:21:23 -07:00
|
|
|
SELECT pet_states.*, pet_types.* FROM pet_states
|
|
|
|
INNER JOIN pet_types ON pet_types.id = pet_states.pet_type_id
|
2021-03-14 07:16:01 -07:00
|
|
|
WHERE ${bodyCondition}
|
2020-09-20 22:21:23 -07:00
|
|
|
ORDER BY
|
2021-02-03 15:24:12 -08:00
|
|
|
pet_types.color_id = ? DESC, -- Prefer preferredColorId
|
|
|
|
pet_types.color_id = ? DESC, -- Prefer fallbackColorId
|
2020-09-20 22:21:23 -07:00
|
|
|
pet_states.mood_id = 1 DESC, -- Prefer Happy
|
|
|
|
pet_states.female = ? DESC, -- Prefer given gender
|
|
|
|
pet_states.id DESC, -- Prefer recent models (like in the app)
|
|
|
|
pet_states.glitched ASC -- Prefer not glitched (like in the app)
|
|
|
|
LIMIT 1`,
|
2021-02-03 15:24:12 -08:00
|
|
|
nestTables: true,
|
|
|
|
},
|
2021-02-03 15:51:49 -08:00
|
|
|
[
|
2021-03-14 07:16:01 -07:00
|
|
|
...bodyValues,
|
2021-02-03 15:51:49 -08:00
|
|
|
preferredColorId || "<ignore>",
|
|
|
|
fallbackColorId,
|
|
|
|
gender === "fem",
|
2024-02-01 04:59:09 -08:00
|
|
|
],
|
2021-02-03 15:24:12 -08:00
|
|
|
);
|
|
|
|
const petState = normalizeRow(rows[0].pet_states);
|
|
|
|
const petType = normalizeRow(rows[0].pet_types);
|
|
|
|
if (!petState || !petType) {
|
|
|
|
return null;
|
|
|
|
}
|
2020-09-20 22:21:23 -07:00
|
|
|
|
2021-02-03 15:24:12 -08:00
|
|
|
loaders.petStateLoader.prime(petState.id, petState);
|
|
|
|
loaders.petTypeLoader.prime(petType.id, petType);
|
2020-09-20 22:21:23 -07:00
|
|
|
|
2021-02-03 15:24:12 -08:00
|
|
|
return petState;
|
2024-02-01 04:59:09 -08:00
|
|
|
}),
|
2021-02-03 15:24:12 -08:00
|
|
|
);
|
|
|
|
},
|
|
|
|
{
|
|
|
|
cacheKeyFn: ({ bodyId, preferredColorId, fallbackColorId }) =>
|
|
|
|
`${bodyId}-${preferredColorId}-${fallbackColorId}`,
|
2024-02-01 04:59:09 -08:00
|
|
|
},
|
2021-02-03 15:24:12 -08:00
|
|
|
);
|
2020-09-20 22:21:23 -07:00
|
|
|
|
2020-09-27 03:18:46 -07:00
|
|
|
const buildPetStateByPetTypeAndAssetsLoader = (db, loaders) =>
|
2020-09-19 03:59:02 -07:00
|
|
|
new DataLoader(
|
|
|
|
async (petTypeIdAndAssetIdsPairs) => {
|
|
|
|
const qs = petTypeIdAndAssetIdsPairs
|
|
|
|
.map((_) => "(pet_type_id = ? AND swf_asset_ids = ?)")
|
|
|
|
.join(" OR ");
|
2020-09-19 04:22:39 -07:00
|
|
|
const values = petTypeIdAndAssetIdsPairs
|
|
|
|
.map(({ petTypeId, swfAssetIds }) => [petTypeId, swfAssetIds])
|
|
|
|
.flat();
|
2021-05-03 15:06:07 -07:00
|
|
|
const [rows] = await db.execute(
|
2020-09-19 03:59:02 -07:00
|
|
|
`SELECT * FROM pet_states WHERE ${qs}`,
|
2024-02-01 04:59:09 -08:00
|
|
|
values,
|
2020-09-19 03:59:02 -07:00
|
|
|
);
|
|
|
|
|
|
|
|
const entities = rows.map(normalizeRow);
|
|
|
|
|
2020-09-27 03:18:46 -07:00
|
|
|
for (const petState of entities) {
|
|
|
|
loaders.petStateLoader.prime(petState.id, petState);
|
|
|
|
}
|
|
|
|
|
2020-09-19 03:59:02 -07:00
|
|
|
return petTypeIdAndAssetIdsPairs.map(({ petTypeId, swfAssetIds }) =>
|
|
|
|
entities.find(
|
2024-02-01 04:59:09 -08:00
|
|
|
(e) => e.petTypeId === petTypeId && e.swfAssetIds === swfAssetIds,
|
|
|
|
),
|
2020-09-19 03:59:02 -07:00
|
|
|
);
|
|
|
|
},
|
|
|
|
{
|
|
|
|
cacheKeyFn: ({ petTypeId, swfAssetIds }) => `${petTypeId}-${swfAssetIds}`,
|
2024-02-01 04:59:09 -08:00
|
|
|
},
|
2020-09-19 03:59:02 -07:00
|
|
|
);
|
|
|
|
|
2020-09-04 05:57:21 -07:00
|
|
|
const buildUserLoader = (db) =>
|
|
|
|
new DataLoader(async (ids) => {
|
|
|
|
const qs = ids.map((_) => "?").join(",");
|
2021-05-03 15:06:07 -07:00
|
|
|
const [rows] = await db.execute(
|
2020-09-02 16:09:11 -07:00
|
|
|
`SELECT * FROM users WHERE id IN (${qs})`,
|
2024-02-01 04:59:09 -08:00
|
|
|
ids,
|
2020-09-02 16:09:11 -07:00
|
|
|
);
|
|
|
|
|
|
|
|
const entities = rows.map(normalizeRow);
|
|
|
|
const entitiesById = new Map(entities.map((e) => [e.id, e]));
|
|
|
|
|
|
|
|
return ids.map(
|
|
|
|
(id) =>
|
|
|
|
entitiesById.get(String(id)) ||
|
2024-02-01 04:59:09 -08:00
|
|
|
new Error(`could not find user with ID: ${id}`),
|
2020-09-02 16:09:11 -07:00
|
|
|
);
|
2020-09-04 05:57:21 -07:00
|
|
|
});
|
|
|
|
|
2020-11-18 06:45:33 -08:00
|
|
|
const buildUserByNameLoader = (db) =>
|
|
|
|
new DataLoader(async (names) => {
|
|
|
|
const qs = names.map((_) => "?").join(",");
|
2021-05-03 15:06:07 -07:00
|
|
|
const [rows] = await db.execute(
|
2020-11-18 06:45:33 -08:00
|
|
|
`SELECT * FROM users WHERE name IN (${qs})`,
|
2024-02-01 04:59:09 -08:00
|
|
|
names,
|
2020-11-18 06:45:33 -08:00
|
|
|
);
|
|
|
|
|
|
|
|
const entities = rows.map(normalizeRow);
|
|
|
|
|
|
|
|
return names.map((name) =>
|
2024-02-01 04:59:09 -08:00
|
|
|
entities.find((e) => e.name.toLowerCase() === name.toLowerCase()),
|
2020-11-18 06:45:33 -08:00
|
|
|
);
|
|
|
|
});
|
|
|
|
|
2020-11-18 10:32:49 -08:00
|
|
|
const buildUserByEmailLoader = (db) =>
|
|
|
|
new DataLoader(async (emails) => {
|
|
|
|
const qs = emails.map((_) => "?").join(",");
|
2021-05-03 15:06:07 -07:00
|
|
|
const [rows] = await db.execute(
|
2020-11-18 10:32:49 -08:00
|
|
|
{
|
|
|
|
sql: `
|
|
|
|
SELECT users.*, id_users.email FROM users
|
|
|
|
INNER JOIN openneo_id.users id_users ON id_users.id = users.remote_id
|
|
|
|
WHERE id_users.email IN (${qs})
|
|
|
|
`,
|
|
|
|
nestTables: true,
|
|
|
|
},
|
2024-02-01 04:59:09 -08:00
|
|
|
emails,
|
2020-11-18 10:32:49 -08:00
|
|
|
);
|
|
|
|
|
|
|
|
const entities = rows.map((row) => ({
|
|
|
|
user: normalizeRow(row.users),
|
|
|
|
email: row.id_users.email,
|
|
|
|
}));
|
|
|
|
|
|
|
|
return emails.map((email) => entities.find((e) => e.email === email).user);
|
|
|
|
});
|
|
|
|
|
2020-09-11 21:34:28 -07:00
|
|
|
const buildUserClosetHangersLoader = (db) =>
|
2020-09-04 05:57:21 -07:00
|
|
|
new DataLoader(async (userIds) => {
|
|
|
|
const qs = userIds.map((_) => "?").join(",");
|
2021-05-03 15:06:07 -07:00
|
|
|
const [rows] = await db.execute(
|
2024-02-20 16:33:56 -08:00
|
|
|
`SELECT closet_hangers.*, items.name as item_name FROM closet_hangers
|
2020-09-04 05:57:21 -07:00
|
|
|
INNER JOIN items ON items.id = closet_hangers.item_id
|
2020-09-11 21:34:28 -07:00
|
|
|
WHERE user_id IN (${qs})
|
2020-09-04 05:57:21 -07:00
|
|
|
ORDER BY item_name`,
|
2024-02-01 04:59:09 -08:00
|
|
|
userIds,
|
2020-09-04 05:57:21 -07:00
|
|
|
);
|
|
|
|
const entities = rows.map(normalizeRow);
|
|
|
|
|
|
|
|
return userIds.map((userId) =>
|
2024-02-01 04:59:09 -08:00
|
|
|
entities.filter((e) => e.userId === String(userId)),
|
2020-09-04 05:57:21 -07:00
|
|
|
);
|
|
|
|
});
|
2020-09-02 16:09:11 -07:00
|
|
|
|
2022-01-07 11:37:27 -08:00
|
|
|
const buildUserItemClosetHangersLoader = (db) =>
|
|
|
|
new DataLoader(async (userIdAndItemIdPairs) => {
|
|
|
|
const conditions = userIdAndItemIdPairs
|
|
|
|
.map((_) => `(user_id = ? AND item_id = ?)`)
|
|
|
|
.join(` OR `);
|
|
|
|
const params = userIdAndItemIdPairs
|
|
|
|
.map(({ userId, itemId }) => [userId, itemId])
|
|
|
|
.flat();
|
|
|
|
const [rows] = await db.execute(
|
|
|
|
`SELECT * FROM closet_hangers WHERE ${conditions};`,
|
2024-02-01 04:59:09 -08:00
|
|
|
params,
|
2022-01-07 11:37:27 -08:00
|
|
|
);
|
|
|
|
const entities = rows.map(normalizeRow);
|
|
|
|
|
|
|
|
return userIdAndItemIdPairs.map(({ userId, itemId }) =>
|
2024-02-01 04:59:09 -08:00
|
|
|
entities.filter((e) => e.userId === userId && e.itemId === itemId),
|
2022-01-07 11:37:27 -08:00
|
|
|
);
|
|
|
|
});
|
|
|
|
|
2020-11-24 14:24:34 -08:00
|
|
|
const buildUserClosetListsLoader = (db, loaders) =>
|
2020-09-11 21:23:14 -07:00
|
|
|
new DataLoader(async (userIds) => {
|
|
|
|
const qs = userIds.map((_) => "?").join(",");
|
2021-05-03 15:06:07 -07:00
|
|
|
const [rows] = await db.execute(
|
2020-09-11 21:23:14 -07:00
|
|
|
`SELECT * FROM closet_lists
|
|
|
|
WHERE user_id IN (${qs})
|
|
|
|
ORDER BY name`,
|
2024-02-01 04:59:09 -08:00
|
|
|
userIds,
|
2020-09-11 21:23:14 -07:00
|
|
|
);
|
2020-11-24 14:24:34 -08:00
|
|
|
|
2020-09-11 21:23:14 -07:00
|
|
|
const entities = rows.map(normalizeRow);
|
2020-11-24 14:24:34 -08:00
|
|
|
for (const entity of entities) {
|
|
|
|
loaders.closetListLoader.prime(entity.id, entity);
|
|
|
|
}
|
2020-09-11 21:23:14 -07:00
|
|
|
|
|
|
|
return userIds.map((userId) =>
|
2024-02-01 04:59:09 -08:00
|
|
|
entities.filter((e) => e.userId === String(userId)),
|
2020-09-11 21:23:14 -07:00
|
|
|
);
|
|
|
|
});
|
|
|
|
|
2021-01-03 23:36:00 -08:00
|
|
|
const buildUserOutfitsLoader = (db, loaders) =>
|
Paginate the user outfits page
My main inspiration for doing this is actually our potentially-huge upcoming Vercel bill lol
From inspecting my Honeycomb dashboard, it looks like the main offender for backend CPU time usage is outfit images. And it looks like they come in big spikes, of lots of low usage and then suddenly 1,000 requests in one minute.
My suspicion is that this is from users with many saved outfits loading their outfit page, which previously would show all of them at once.
We do have `loading="lazy"` set, but not all browsers support that yet, and I've had trouble pinning down the exact behavior anyway!
Anyway, paginating makes for a better experience for those huge-list users anyway. We've been meaning to do it, so here we go!
My hope is that this drastically decreases backend CPU hours immediately 🤞 If not, we'll need to investigate in more detail where these outfit image requests are actually coming from!
Note that I added the pagination to the existing `outfits` GraphQL endpoint, rather than creating a new one. I felt comfortable doing this because it requires login anyway, so I'm confident that other clients aren't using it; and because, while this kind of thing often creates a risk of problems with frontend and backend code getting out of sync, I think someone running old frontend code will just see only their first 30 outfits (but no pagination toolbar), and get confused and refresh the page, at which point they'll see all of them. (And I actually _prefer_ that slightly confusing UX, to avoid getting more giant spikes of outfit image requests, lol :p)
2021-11-01 19:33:40 -07:00
|
|
|
new DataLoader(async (queries) => {
|
|
|
|
// This isn't actually optimized as a batch query, we're just using a
|
|
|
|
// DataLoader API consistency with our other loaders!
|
|
|
|
return queries.map(async ({ userId, limit, offset }) => {
|
|
|
|
const actualLimit = Math.min(limit || 30, 30);
|
|
|
|
const actualOffset = offset || 0;
|
|
|
|
|
|
|
|
const [rows] = await db.execute(
|
|
|
|
`SELECT * FROM outfits
|
|
|
|
WHERE user_id = ?
|
|
|
|
ORDER BY name
|
|
|
|
LIMIT ? OFFSET ?`,
|
2024-02-01 04:59:09 -08:00
|
|
|
[userId, actualLimit, actualOffset],
|
Paginate the user outfits page
My main inspiration for doing this is actually our potentially-huge upcoming Vercel bill lol
From inspecting my Honeycomb dashboard, it looks like the main offender for backend CPU time usage is outfit images. And it looks like they come in big spikes, of lots of low usage and then suddenly 1,000 requests in one minute.
My suspicion is that this is from users with many saved outfits loading their outfit page, which previously would show all of them at once.
We do have `loading="lazy"` set, but not all browsers support that yet, and I've had trouble pinning down the exact behavior anyway!
Anyway, paginating makes for a better experience for those huge-list users anyway. We've been meaning to do it, so here we go!
My hope is that this drastically decreases backend CPU hours immediately 🤞 If not, we'll need to investigate in more detail where these outfit image requests are actually coming from!
Note that I added the pagination to the existing `outfits` GraphQL endpoint, rather than creating a new one. I felt comfortable doing this because it requires login anyway, so I'm confident that other clients aren't using it; and because, while this kind of thing often creates a risk of problems with frontend and backend code getting out of sync, I think someone running old frontend code will just see only their first 30 outfits (but no pagination toolbar), and get confused and refresh the page, at which point they'll see all of them. (And I actually _prefer_ that slightly confusing UX, to avoid getting more giant spikes of outfit image requests, lol :p)
2021-11-01 19:33:40 -07:00
|
|
|
);
|
|
|
|
|
|
|
|
const entities = rows.map(normalizeRow);
|
|
|
|
for (const entity of entities) {
|
|
|
|
loaders.outfitLoader.prime(entity.id, entity);
|
|
|
|
}
|
|
|
|
|
|
|
|
return entities;
|
|
|
|
});
|
|
|
|
});
|
|
|
|
|
|
|
|
const buildUserNumTotalOutfitsLoader = (db) =>
|
2021-01-03 23:36:00 -08:00
|
|
|
new DataLoader(async (userIds) => {
|
|
|
|
const qs = userIds.map((_) => "?").join(",");
|
2021-05-03 15:06:07 -07:00
|
|
|
const [rows] = await db.execute(
|
Paginate the user outfits page
My main inspiration for doing this is actually our potentially-huge upcoming Vercel bill lol
From inspecting my Honeycomb dashboard, it looks like the main offender for backend CPU time usage is outfit images. And it looks like they come in big spikes, of lots of low usage and then suddenly 1,000 requests in one minute.
My suspicion is that this is from users with many saved outfits loading their outfit page, which previously would show all of them at once.
We do have `loading="lazy"` set, but not all browsers support that yet, and I've had trouble pinning down the exact behavior anyway!
Anyway, paginating makes for a better experience for those huge-list users anyway. We've been meaning to do it, so here we go!
My hope is that this drastically decreases backend CPU hours immediately 🤞 If not, we'll need to investigate in more detail where these outfit image requests are actually coming from!
Note that I added the pagination to the existing `outfits` GraphQL endpoint, rather than creating a new one. I felt comfortable doing this because it requires login anyway, so I'm confident that other clients aren't using it; and because, while this kind of thing often creates a risk of problems with frontend and backend code getting out of sync, I think someone running old frontend code will just see only their first 30 outfits (but no pagination toolbar), and get confused and refresh the page, at which point they'll see all of them. (And I actually _prefer_ that slightly confusing UX, to avoid getting more giant spikes of outfit image requests, lol :p)
2021-11-01 19:33:40 -07:00
|
|
|
`SELECT user_id, COUNT(*) as num_total_outfits FROM outfits
|
2021-01-03 23:36:00 -08:00
|
|
|
WHERE user_id IN (${qs})
|
Paginate the user outfits page
My main inspiration for doing this is actually our potentially-huge upcoming Vercel bill lol
From inspecting my Honeycomb dashboard, it looks like the main offender for backend CPU time usage is outfit images. And it looks like they come in big spikes, of lots of low usage and then suddenly 1,000 requests in one minute.
My suspicion is that this is from users with many saved outfits loading their outfit page, which previously would show all of them at once.
We do have `loading="lazy"` set, but not all browsers support that yet, and I've had trouble pinning down the exact behavior anyway!
Anyway, paginating makes for a better experience for those huge-list users anyway. We've been meaning to do it, so here we go!
My hope is that this drastically decreases backend CPU hours immediately 🤞 If not, we'll need to investigate in more detail where these outfit image requests are actually coming from!
Note that I added the pagination to the existing `outfits` GraphQL endpoint, rather than creating a new one. I felt comfortable doing this because it requires login anyway, so I'm confident that other clients aren't using it; and because, while this kind of thing often creates a risk of problems with frontend and backend code getting out of sync, I think someone running old frontend code will just see only their first 30 outfits (but no pagination toolbar), and get confused and refresh the page, at which point they'll see all of them. (And I actually _prefer_ that slightly confusing UX, to avoid getting more giant spikes of outfit image requests, lol :p)
2021-11-01 19:33:40 -07:00
|
|
|
GROUP BY user_id`,
|
2024-02-01 04:59:09 -08:00
|
|
|
userIds,
|
2021-01-03 23:36:00 -08:00
|
|
|
);
|
|
|
|
|
|
|
|
const entities = rows.map(normalizeRow);
|
|
|
|
|
Paginate the user outfits page
My main inspiration for doing this is actually our potentially-huge upcoming Vercel bill lol
From inspecting my Honeycomb dashboard, it looks like the main offender for backend CPU time usage is outfit images. And it looks like they come in big spikes, of lots of low usage and then suddenly 1,000 requests in one minute.
My suspicion is that this is from users with many saved outfits loading their outfit page, which previously would show all of them at once.
We do have `loading="lazy"` set, but not all browsers support that yet, and I've had trouble pinning down the exact behavior anyway!
Anyway, paginating makes for a better experience for those huge-list users anyway. We've been meaning to do it, so here we go!
My hope is that this drastically decreases backend CPU hours immediately 🤞 If not, we'll need to investigate in more detail where these outfit image requests are actually coming from!
Note that I added the pagination to the existing `outfits` GraphQL endpoint, rather than creating a new one. I felt comfortable doing this because it requires login anyway, so I'm confident that other clients aren't using it; and because, while this kind of thing often creates a risk of problems with frontend and backend code getting out of sync, I think someone running old frontend code will just see only their first 30 outfits (but no pagination toolbar), and get confused and refresh the page, at which point they'll see all of them. (And I actually _prefer_ that slightly confusing UX, to avoid getting more giant spikes of outfit image requests, lol :p)
2021-11-01 19:33:40 -07:00
|
|
|
return userIds
|
|
|
|
.map((userId) => entities.find((e) => e.userId === String(userId)))
|
|
|
|
.map((e) => (e ? e.numTotalOutfits : 0));
|
2021-01-03 23:36:00 -08:00
|
|
|
});
|
|
|
|
|
2020-11-24 14:43:43 -08:00
|
|
|
const buildUserLastTradeActivityLoader = (db) =>
|
|
|
|
new DataLoader(async (userIds) => {
|
|
|
|
const qs = userIds.map((_) => "?").join(",");
|
2021-05-03 15:06:07 -07:00
|
|
|
const [rows] = await db.execute(
|
2020-11-24 21:56:29 -08:00
|
|
|
// This query has a custom index: index_closet_hangers_for_last_trade_activity.
|
|
|
|
// It's on (user_id, owned, list_id, updated_at). The intent is that this
|
|
|
|
// will enable the query planner to find the max updated_at for each
|
|
|
|
// user/owned/list_id tuple, and then use the filter conditions later to
|
|
|
|
// remove non-Trading lists and choose the overall _Trading_ max for the
|
|
|
|
// user.
|
|
|
|
//
|
|
|
|
// I'm not 100% sure that this is exactly what the query planner does,
|
|
|
|
// but it seems _very_ happy when it has this index: the Butterfly Shower
|
|
|
|
// item had ~850 users offering it, and this brought the query from
|
|
|
|
// 10-15sec to 1-2sec. An earlier version of the index, without the
|
|
|
|
// `owned` field, and forced with `USE INDEX`, was more like 4-5 sec - so
|
|
|
|
// I'm guessing what happened there is that forcing the index forced a
|
|
|
|
// better query plan, but that it still held all the hangers, instead of
|
|
|
|
// deriving intermediate maxes. (With this better index, the query
|
|
|
|
// planner jumps at it without a hint!)
|
2020-11-24 14:43:43 -08:00
|
|
|
`
|
|
|
|
SELECT
|
|
|
|
closet_hangers.user_id AS user_id,
|
|
|
|
MAX(closet_hangers.updated_at) AS last_trade_activity
|
|
|
|
FROM closet_hangers
|
|
|
|
INNER JOIN users ON users.id = closet_hangers.user_id
|
|
|
|
LEFT JOIN closet_lists ON closet_lists.id = closet_hangers.list_id
|
|
|
|
WHERE (
|
|
|
|
closet_hangers.user_id IN (${qs})
|
|
|
|
AND (
|
|
|
|
(closet_hangers.list_id IS NOT NULL AND closet_lists.visibility >= 2)
|
|
|
|
OR (
|
|
|
|
closet_hangers.list_id IS NULL AND closet_hangers.owned = 1
|
|
|
|
AND users.owned_closet_hangers_visibility >= 2
|
|
|
|
)
|
|
|
|
OR (
|
|
|
|
closet_hangers.list_id IS NULL AND closet_hangers.owned = 0
|
|
|
|
AND users.wanted_closet_hangers_visibility >= 2
|
|
|
|
)
|
|
|
|
)
|
|
|
|
)
|
|
|
|
GROUP BY closet_hangers.user_id
|
|
|
|
`,
|
2024-02-01 04:59:09 -08:00
|
|
|
userIds,
|
2020-11-24 14:43:43 -08:00
|
|
|
);
|
|
|
|
|
|
|
|
const entities = rows.map(normalizeRow);
|
|
|
|
|
|
|
|
return userIds.map((userId) => {
|
|
|
|
const entity = entities.find((e) => e.userId === String(userId));
|
|
|
|
return entity ? entity.lastTradeActivity : null;
|
|
|
|
});
|
|
|
|
});
|
|
|
|
|
2020-09-01 01:13:03 -07:00
|
|
|
const buildZoneLoader = (db) => {
|
|
|
|
const zoneLoader = new DataLoader(async (ids) => {
|
2020-08-17 18:49:37 -07:00
|
|
|
const qs = ids.map((_) => "?").join(",");
|
2021-05-03 15:06:07 -07:00
|
|
|
const [rows] = await db.execute(
|
2020-08-17 18:49:37 -07:00
|
|
|
`SELECT * FROM zones WHERE id IN (${qs})`,
|
2024-02-01 04:59:09 -08:00
|
|
|
ids,
|
2020-08-17 18:49:37 -07:00
|
|
|
);
|
|
|
|
|
|
|
|
const entities = rows.map(normalizeRow);
|
|
|
|
const entitiesById = new Map(entities.map((e) => [e.id, e]));
|
|
|
|
|
|
|
|
return ids.map(
|
|
|
|
(id) =>
|
|
|
|
entitiesById.get(String(id)) ||
|
2024-02-01 04:59:09 -08:00
|
|
|
new Error(`could not find zone with ID: ${id}`),
|
2020-08-17 18:49:37 -07:00
|
|
|
);
|
|
|
|
});
|
|
|
|
|
2020-09-01 01:13:03 -07:00
|
|
|
zoneLoader.loadAll = async () => {
|
2021-05-03 15:06:07 -07:00
|
|
|
const [rows] = await db.execute(`SELECT * FROM zones`);
|
2020-09-01 01:13:03 -07:00
|
|
|
const entities = rows.map(normalizeRow);
|
|
|
|
|
|
|
|
for (const zone of entities) {
|
|
|
|
zoneLoader.prime(zone.id, zone);
|
|
|
|
}
|
|
|
|
|
|
|
|
return entities;
|
|
|
|
};
|
|
|
|
|
|
|
|
return zoneLoader;
|
|
|
|
};
|
|
|
|
|
2020-04-23 14:23:46 -07:00
|
|
|
function buildLoaders(db) {
|
2020-06-24 19:05:07 -07:00
|
|
|
const loaders = {};
|
|
|
|
loaders.loadAllPetTypes = loadAllPetTypes(db);
|
|
|
|
|
2020-11-24 14:24:34 -08:00
|
|
|
loaders.closetListLoader = buildClosetListLoader(db);
|
2021-06-12 04:45:23 -07:00
|
|
|
loaders.closetHangersForListLoader = buildClosetHangersForListLoader(db);
|
2024-02-01 04:59:09 -08:00
|
|
|
loaders.closetHangersForDefaultListLoader =
|
|
|
|
buildClosetHangersForDefaultListLoader(db);
|
2020-07-31 22:11:32 -07:00
|
|
|
loaders.colorLoader = buildColorLoader(db);
|
2020-07-02 14:33:47 -07:00
|
|
|
loaders.itemLoader = buildItemLoader(db);
|
2020-11-08 00:06:51 -08:00
|
|
|
loaders.itemByNameLoader = buildItemByNameLoader(db, loaders);
|
2024-02-01 04:59:09 -08:00
|
|
|
loaders.itemSearchNumTotalItemsLoader =
|
|
|
|
buildItemSearchNumTotalItemsLoader(db);
|
2021-06-21 10:30:41 -07:00
|
|
|
loaders.itemSearchItemsLoader = buildItemSearchItemsLoader(db, loaders);
|
2021-01-18 06:31:27 -08:00
|
|
|
loaders.newestItemsLoader = buildNewestItemsLoader(db, loaders);
|
2024-02-01 04:59:09 -08:00
|
|
|
loaders.speciesThatNeedModelsForItemLoader =
|
|
|
|
buildSpeciesThatNeedModelsForItemLoader(db);
|
2021-07-11 18:09:29 -07:00
|
|
|
loaders.itemsThatNeedModelsLoader = buildItemsThatNeedModelsLoader(
|
|
|
|
db,
|
2024-02-01 04:59:09 -08:00
|
|
|
loaders,
|
2021-07-11 18:09:29 -07:00
|
|
|
);
|
Modeling page performance fix
Ok so, I kinda assumed that the query engine would only compute `all_species_ids_for_this_color` on the rows we actually returned, and it's a fast subquery so it's fine. But that was wrong! I think the query engine computing that for _every_ item, and _then_ filter out stuff with `HAVING`. Which makes sense, because the `HAVING` clause references it, so computing it makes sense!
In this change, we inline the subquery, so it only gets called if the other conditions in the `HAVING` clause don't fail first. That way, it only gets run when needed, and the query runs like 2x faster (~30sec instead of ~60sec), which gets us back inside some timeouts that were triggering around 1 minute and making the page fail.
However, this meant we no longer return `all_species_ids_for_this_color`, which we actually use to determine which species are _left_ to model for! So now, we have a loader that also basically runs the same query as that condition subquery.
A reasonable question would be, at this point, is the `HAVING` clause a good idea? would it be simpler to do the filtering in JS?
and I think it might be simpler, but I would guess noticeably worse performance, because I think we really do filter out a _lot_ of results with that `HAVING` clause—like basically all items, right? So to filter on the JS side, we'd be transferring data for all items over the wire, which… like, that's not even the worst dealbreaker, but it would certainly be noticed. This hypothesis could be wrong, but it's enough of a reason for me to not bother pursuring the refactor!
2022-10-10 20:15:16 -07:00
|
|
|
loaders.allSpeciesIdsForColorLoader = buildAllSpeciesIdsForColorLoader(db);
|
2024-02-01 04:59:09 -08:00
|
|
|
loaders.itemBodiesWithAppearanceDataLoader =
|
|
|
|
buildItemBodiesWithAppearanceDataLoader(db);
|
2020-10-23 23:29:50 -07:00
|
|
|
loaders.itemAllOccupiedZonesLoader = buildItemAllOccupiedZonesLoader(db);
|
2024-02-01 04:59:09 -08:00
|
|
|
loaders.itemCompatibleBodiesAndTheirZonesLoader =
|
|
|
|
buildItemCompatibleBodiesAndTheirZonesLoader(db);
|
2020-11-24 14:24:34 -08:00
|
|
|
loaders.itemTradesLoader = buildItemTradesLoader(db, loaders);
|
2020-09-27 03:18:46 -07:00
|
|
|
loaders.petTypeLoader = buildPetTypeLoader(db, loaders);
|
2020-06-24 19:05:07 -07:00
|
|
|
loaders.petTypeBySpeciesAndColorLoader = buildPetTypeBySpeciesAndColorLoader(
|
|
|
|
db,
|
2024-02-01 04:59:09 -08:00
|
|
|
loaders,
|
2020-06-24 19:05:07 -07:00
|
|
|
);
|
2021-02-02 23:29:06 -08:00
|
|
|
loaders.petTypesForColorLoader = buildPetTypesForColorLoader(db, loaders);
|
2024-02-01 04:59:09 -08:00
|
|
|
loaders.altStyleLoader = buildAltStyleLoader(db);
|
2020-08-01 15:30:26 -07:00
|
|
|
loaders.swfAssetLoader = buildSwfAssetLoader(db);
|
2021-01-20 10:36:46 -08:00
|
|
|
loaders.swfAssetCountLoader = buildSwfAssetCountLoader(db);
|
2020-09-19 03:04:19 -07:00
|
|
|
loaders.swfAssetByRemoteIdLoader = buildSwfAssetByRemoteIdLoader(db);
|
2020-08-01 15:30:26 -07:00
|
|
|
loaders.itemSwfAssetLoader = buildItemSwfAssetLoader(db, loaders);
|
|
|
|
loaders.petSwfAssetLoader = buildPetSwfAssetLoader(db, loaders);
|
Support alt styles in outfit thumbnails
Oops, before this change, outfits with alt styles would still show the
outfit as if no alt style were applied!
Now, we have the `Outfit` GraphQL type be internally aware of alt
styles, and set its `petAppearance` and `body` and `itemAppearances`
fields accordingly. No change was required to the actual
`/api/outfitImage` endpoint, once the GraphQL started returning the
right thing!
…because of that, I'm honestly kinda surprised that there's no obvious
issues arising with the Impress 2020 outfit interface itself? But it
seems to be correctly just, not showing alt styles at all, in the way I
intended because I never added support to it. So, okay, cool!
2024-02-08 10:51:52 -08:00
|
|
|
loaders.altStyleSwfAssetLoader = buildAltStyleSwfAssetLoader(db, loaders);
|
2020-10-23 22:55:13 -07:00
|
|
|
loaders.neopetsConnectionLoader = buildNeopetsConnectionLoader(db);
|
2020-06-24 19:05:07 -07:00
|
|
|
loaders.outfitLoader = buildOutfitLoader(db);
|
2024-02-01 04:59:09 -08:00
|
|
|
loaders.itemOutfitRelationshipsLoader =
|
|
|
|
buildItemOutfitRelationshipsLoader(db);
|
2020-06-24 19:05:07 -07:00
|
|
|
loaders.petStateLoader = buildPetStateLoader(db);
|
|
|
|
loaders.petStatesForPetTypeLoader = buildPetStatesForPetTypeLoader(
|
|
|
|
db,
|
2024-02-01 04:59:09 -08:00
|
|
|
loaders,
|
2020-06-24 19:05:07 -07:00
|
|
|
);
|
2020-09-20 22:21:23 -07:00
|
|
|
loaders.canonicalPetStateForBodyLoader = buildCanonicalPetStateForBodyLoader(
|
|
|
|
db,
|
2024-02-01 04:59:09 -08:00
|
|
|
loaders,
|
2020-09-19 03:59:02 -07:00
|
|
|
);
|
2024-02-01 04:59:09 -08:00
|
|
|
loaders.petStateByPetTypeAndAssetsLoader =
|
|
|
|
buildPetStateByPetTypeAndAssetsLoader(db, loaders);
|
2020-08-31 18:25:42 -07:00
|
|
|
loaders.speciesLoader = buildSpeciesLoader(db);
|
2020-11-25 01:53:42 -08:00
|
|
|
loaders.tradeMatchesLoader = buildTradeMatchesLoader(db);
|
2020-09-02 16:09:11 -07:00
|
|
|
loaders.userLoader = buildUserLoader(db);
|
2020-11-18 06:45:33 -08:00
|
|
|
loaders.userByNameLoader = buildUserByNameLoader(db);
|
2020-11-18 10:32:49 -08:00
|
|
|
loaders.userByEmailLoader = buildUserByEmailLoader(db);
|
2020-09-11 21:34:28 -07:00
|
|
|
loaders.userClosetHangersLoader = buildUserClosetHangersLoader(db);
|
2022-01-07 11:37:27 -08:00
|
|
|
loaders.userItemClosetHangersLoader = buildUserItemClosetHangersLoader(db);
|
2020-11-24 14:24:34 -08:00
|
|
|
loaders.userClosetListsLoader = buildUserClosetListsLoader(db, loaders);
|
Paginate the user outfits page
My main inspiration for doing this is actually our potentially-huge upcoming Vercel bill lol
From inspecting my Honeycomb dashboard, it looks like the main offender for backend CPU time usage is outfit images. And it looks like they come in big spikes, of lots of low usage and then suddenly 1,000 requests in one minute.
My suspicion is that this is from users with many saved outfits loading their outfit page, which previously would show all of them at once.
We do have `loading="lazy"` set, but not all browsers support that yet, and I've had trouble pinning down the exact behavior anyway!
Anyway, paginating makes for a better experience for those huge-list users anyway. We've been meaning to do it, so here we go!
My hope is that this drastically decreases backend CPU hours immediately 🤞 If not, we'll need to investigate in more detail where these outfit image requests are actually coming from!
Note that I added the pagination to the existing `outfits` GraphQL endpoint, rather than creating a new one. I felt comfortable doing this because it requires login anyway, so I'm confident that other clients aren't using it; and because, while this kind of thing often creates a risk of problems with frontend and backend code getting out of sync, I think someone running old frontend code will just see only their first 30 outfits (but no pagination toolbar), and get confused and refresh the page, at which point they'll see all of them. (And I actually _prefer_ that slightly confusing UX, to avoid getting more giant spikes of outfit image requests, lol :p)
2021-11-01 19:33:40 -07:00
|
|
|
loaders.userNumTotalOutfitsLoader = buildUserNumTotalOutfitsLoader(db);
|
2021-01-03 23:36:00 -08:00
|
|
|
loaders.userOutfitsLoader = buildUserOutfitsLoader(db, loaders);
|
2020-11-24 14:43:43 -08:00
|
|
|
loaders.userLastTradeActivityLoader = buildUserLastTradeActivityLoader(db);
|
2020-08-17 18:49:37 -07:00
|
|
|
loaders.zoneLoader = buildZoneLoader(db);
|
2020-06-24 19:05:07 -07:00
|
|
|
|
|
|
|
return loaders;
|
2020-04-23 14:23:46 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
module.exports = buildLoaders;
|