Build itemSearchV2 in GQL
The main change is that we restructure the query, so that only the parts that are actually affected by pagination depend on those variables! This will enable the Apollo Cache to trivially cache and show `numTotalItems` while waiting for other pages to load.
This commit is contained in:
parent
367a527a6f
commit
c38678cf1a
2 changed files with 236 additions and 85 deletions
|
@ -297,33 +297,21 @@ const itemSearchKindConditions = {
|
|||
PB: `description LIKE "%This item is part of a deluxe paint brush set!%"`,
|
||||
};
|
||||
|
||||
const buildItemSearchLoader = (db, loaders) =>
|
||||
new DataLoader(async (queries) => {
|
||||
// This isn't actually optimized as a batch query, we're just using a
|
||||
// DataLoader API consistency with our other loaders!
|
||||
const queryPromises = queries.map(
|
||||
async ({
|
||||
function buildItemSearchConditions({
|
||||
query,
|
||||
bodyId,
|
||||
itemKind,
|
||||
currentUserOwnsOrWants,
|
||||
currentUserId,
|
||||
zoneIds = [],
|
||||
offset,
|
||||
limit,
|
||||
}) => {
|
||||
const actualOffset = offset || 0;
|
||||
const actualLimit = Math.min(limit || 30, 30);
|
||||
|
||||
zoneIds,
|
||||
}) {
|
||||
// Split the query into words, and search for each word as a substring
|
||||
// of the name.
|
||||
const words = query.split(/\s+/);
|
||||
const wordMatchersForMysql = words.map(
|
||||
(word) => "%" + word.replace(/_%/g, "\\$0") + "%"
|
||||
);
|
||||
const matcherPlaceholders = words
|
||||
.map((_) => "t.name LIKE ?")
|
||||
.join(" AND ");
|
||||
const matcherPlaceholders = words.map((_) => "t.name LIKE ?").join(" AND ");
|
||||
|
||||
const itemKindCondition = itemSearchKindConditions[itemKind] || "1";
|
||||
const bodyIdCondition = bodyId
|
||||
|
@ -364,8 +352,86 @@ const buildItemSearchLoader = (db, loaders) =>
|
|||
...currentUserValues,
|
||||
];
|
||||
|
||||
const [[rows], [totalRows]] = await Promise.all([
|
||||
db.execute(
|
||||
return { queryJoins, queryConditions, queryConditionValues };
|
||||
}
|
||||
|
||||
const buildItemSearchNumTotalItemsLoader = (db) =>
|
||||
new DataLoader(async (queries) => {
|
||||
// This isn't actually optimized as a batch query, we're just using a
|
||||
// DataLoader API consistency with our other loaders!
|
||||
const queryPromises = queries.map(
|
||||
async ({
|
||||
query,
|
||||
bodyId,
|
||||
itemKind,
|
||||
currentUserOwnsOrWants,
|
||||
currentUserId,
|
||||
zoneIds = [],
|
||||
}) => {
|
||||
const {
|
||||
queryJoins,
|
||||
queryConditions,
|
||||
queryConditionValues,
|
||||
} = buildItemSearchConditions({
|
||||
query,
|
||||
bodyId,
|
||||
itemKind,
|
||||
currentUserOwnsOrWants,
|
||||
currentUserId,
|
||||
zoneIds,
|
||||
});
|
||||
|
||||
const [totalRows] = await db.execute(
|
||||
`
|
||||
SELECT count(DISTINCT items.id) AS numTotalItems FROM items
|
||||
${queryJoins}
|
||||
WHERE ${queryConditions}
|
||||
`,
|
||||
queryConditionValues
|
||||
);
|
||||
|
||||
const { numTotalItems } = totalRows[0];
|
||||
return numTotalItems;
|
||||
}
|
||||
);
|
||||
|
||||
const responses = await Promise.all(queryPromises);
|
||||
|
||||
return responses;
|
||||
});
|
||||
|
||||
const buildItemSearchItemsLoader = (db, loaders) =>
|
||||
new DataLoader(async (queries) => {
|
||||
// This isn't actually optimized as a batch query, we're just using a
|
||||
// DataLoader API consistency with our other loaders!
|
||||
const queryPromises = queries.map(
|
||||
async ({
|
||||
query,
|
||||
bodyId,
|
||||
itemKind,
|
||||
currentUserOwnsOrWants,
|
||||
currentUserId,
|
||||
zoneIds = [],
|
||||
offset,
|
||||
limit,
|
||||
}) => {
|
||||
const actualOffset = offset || 0;
|
||||
const actualLimit = Math.min(limit || 30, 30);
|
||||
|
||||
const {
|
||||
queryJoins,
|
||||
queryConditions,
|
||||
queryConditionValues,
|
||||
} = buildItemSearchConditions({
|
||||
query,
|
||||
bodyId,
|
||||
itemKind,
|
||||
currentUserOwnsOrWants,
|
||||
currentUserId,
|
||||
zoneIds,
|
||||
});
|
||||
|
||||
const [rows] = await db.execute(
|
||||
`
|
||||
SELECT DISTINCT items.*, t.name FROM items
|
||||
${queryJoins}
|
||||
|
@ -374,25 +440,15 @@ const buildItemSearchLoader = (db, loaders) =>
|
|||
LIMIT ? OFFSET ?
|
||||
`,
|
||||
[...queryConditionValues, actualLimit, actualOffset]
|
||||
),
|
||||
db.execute(
|
||||
`
|
||||
SELECT count(DISTINCT items.id) AS numTotalItems FROM items
|
||||
${queryJoins}
|
||||
WHERE ${queryConditions}
|
||||
`,
|
||||
queryConditionValues
|
||||
),
|
||||
]);
|
||||
);
|
||||
|
||||
const entities = rows.map(normalizeRow);
|
||||
const { numTotalItems } = totalRows[0];
|
||||
|
||||
for (const item of entities) {
|
||||
loaders.itemLoader.prime(item.id, item);
|
||||
}
|
||||
|
||||
return [entities, numTotalItems];
|
||||
return entities;
|
||||
}
|
||||
);
|
||||
|
||||
|
@ -1248,7 +1304,10 @@ function buildLoaders(db) {
|
|||
loaders.itemLoader = buildItemLoader(db);
|
||||
loaders.itemTranslationLoader = buildItemTranslationLoader(db);
|
||||
loaders.itemByNameLoader = buildItemByNameLoader(db, loaders);
|
||||
loaders.itemSearchLoader = buildItemSearchLoader(db, loaders);
|
||||
loaders.itemSearchNumTotalItemsLoader = buildItemSearchNumTotalItemsLoader(
|
||||
db
|
||||
);
|
||||
loaders.itemSearchItemsLoader = buildItemSearchItemsLoader(db, loaders);
|
||||
loaders.newestItemsLoader = buildNewestItemsLoader(db, loaders);
|
||||
loaders.itemsThatNeedModelsLoader = buildItemsThatNeedModelsLoader(db);
|
||||
loaders.itemBodiesWithAppearanceDataLoader = buildItemBodiesWithAppearanceDataLoader(
|
||||
|
|
|
@ -167,8 +167,7 @@ const typeDefs = gql`
|
|||
}
|
||||
|
||||
# TODO: I guess I didn't add the NC/NP/PB filter to this. Does that cause
|
||||
# bugs in comparing results on the client? (Also, should we just throw
|
||||
# this out for a better merge function?)
|
||||
# bugs in comparing results on the client?
|
||||
type ItemSearchResult {
|
||||
query: String!
|
||||
zones: [Zone!]!
|
||||
|
@ -176,6 +175,15 @@ const typeDefs = gql`
|
|||
numTotalItems: Int!
|
||||
}
|
||||
|
||||
# TODO: I guess I didn't add the NC/NP/PB filter to this. Does that cause
|
||||
# bugs in comparing results on the client?
|
||||
type ItemSearchResultV2 {
|
||||
query: String!
|
||||
zones: [Zone!]!
|
||||
items(offset: Int, limit: Int): [Item!]!
|
||||
numTotalItems: Int!
|
||||
}
|
||||
|
||||
type ItemTrade {
|
||||
id: ID!
|
||||
user: User!
|
||||
|
@ -195,6 +203,7 @@ const typeDefs = gql`
|
|||
itemsByName(names: [String!]!): [Item]!
|
||||
|
||||
# Search for items with fuzzy matching.
|
||||
# Deprecated: Prefer itemSearchV2 instead! (A lot is not yet ported tho!)
|
||||
itemSearch(
|
||||
query: String!
|
||||
fitsPet: FitsPetSearchFilter
|
||||
|
@ -205,6 +214,15 @@ const typeDefs = gql`
|
|||
limit: Int
|
||||
): ItemSearchResult!
|
||||
|
||||
# Search for items with fuzzy matching.
|
||||
itemSearchV2(
|
||||
query: String!
|
||||
fitsPet: FitsPetSearchFilter
|
||||
itemKind: ItemKindSearchFilter
|
||||
currentUserOwnsOrWants: OwnsOrWants
|
||||
zoneIds: [ID!]
|
||||
): ItemSearchResultV2!
|
||||
|
||||
# Deprecated: an alias for itemSearch, but with speciesId and colorId
|
||||
# required, serving the same purpose as fitsPet in itemSearch.
|
||||
itemSearchToFit(
|
||||
|
@ -642,7 +660,12 @@ const resolvers = {
|
|||
offset,
|
||||
limit,
|
||||
},
|
||||
{ itemSearchLoader, petTypeBySpeciesAndColorLoader, currentUserId }
|
||||
{
|
||||
itemSearchNumTotalItemsLoader,
|
||||
itemSearchItemsLoader,
|
||||
petTypeBySpeciesAndColorLoader,
|
||||
currentUserId,
|
||||
}
|
||||
) => {
|
||||
let bodyId = null;
|
||||
if (fitsPet) {
|
||||
|
@ -658,7 +681,8 @@ const resolvers = {
|
|||
}
|
||||
bodyId = petType.bodyId;
|
||||
}
|
||||
const [items, numTotalItems] = await itemSearchLoader.load({
|
||||
const [items, numTotalItems] = await Promise.all([
|
||||
itemSearchItemsLoader.load({
|
||||
query: query.trim(),
|
||||
bodyId,
|
||||
itemKind,
|
||||
|
@ -667,10 +691,40 @@ const resolvers = {
|
|||
zoneIds,
|
||||
offset,
|
||||
limit,
|
||||
});
|
||||
}),
|
||||
itemSearchNumTotalItemsLoader.load({
|
||||
query: query.trim(),
|
||||
bodyId,
|
||||
itemKind,
|
||||
currentUserOwnsOrWants,
|
||||
currentUserId,
|
||||
zoneIds,
|
||||
}),
|
||||
]);
|
||||
const zones = zoneIds.map((id) => ({ id }));
|
||||
return { query, zones, items, numTotalItems };
|
||||
},
|
||||
itemSearchV2: async (
|
||||
_,
|
||||
{ query, fitsPet, itemKind, currentUserOwnsOrWants, zoneIds = [] },
|
||||
{ petTypeBySpeciesAndColorLoader }
|
||||
) => {
|
||||
let bodyId = null;
|
||||
if (fitsPet) {
|
||||
const petType = await petTypeBySpeciesAndColorLoader.load({
|
||||
speciesId: fitsPet.speciesId,
|
||||
colorId: fitsPet.colorId,
|
||||
});
|
||||
if (!petType) {
|
||||
throw new Error(
|
||||
`pet type not found: speciesId=${fitsPet.speciesId}, ` +
|
||||
`colorId: ${fitsPet.colorId}`
|
||||
);
|
||||
}
|
||||
bodyId = petType.bodyId;
|
||||
}
|
||||
return { query, bodyId, itemKind, currentUserOwnsOrWants, zoneIds };
|
||||
},
|
||||
itemSearchToFit: async (
|
||||
_,
|
||||
{
|
||||
|
@ -726,6 +780,44 @@ const resolvers = {
|
|||
},
|
||||
},
|
||||
|
||||
ItemSearchResultV2: {
|
||||
numTotalItems: async (
|
||||
{ query, bodyId, itemKind, currentUserOwnsOrWants, zoneIds },
|
||||
{ offset, limit },
|
||||
{ currentUserId, itemSearchNumTotalItemsLoader }
|
||||
) => {
|
||||
const numTotalItems = await itemSearchNumTotalItemsLoader.load({
|
||||
query: query.trim(),
|
||||
bodyId,
|
||||
itemKind,
|
||||
currentUserOwnsOrWants,
|
||||
currentUserId,
|
||||
zoneIds,
|
||||
offset,
|
||||
limit,
|
||||
});
|
||||
return numTotalItems;
|
||||
},
|
||||
items: async (
|
||||
{ query, bodyId, itemKind, currentUserOwnsOrWants, zoneIds },
|
||||
{ offset, limit },
|
||||
{ currentUserId, itemSearchItemsLoader }
|
||||
) => {
|
||||
const items = await itemSearchItemsLoader.load({
|
||||
query: query.trim(),
|
||||
bodyId,
|
||||
itemKind,
|
||||
currentUserOwnsOrWants,
|
||||
currentUserId,
|
||||
zoneIds,
|
||||
offset,
|
||||
limit,
|
||||
});
|
||||
return items.map(({ id }) => ({ id }));
|
||||
},
|
||||
zones: ({ zoneIds }) => zoneIds.map((id) => ({ id })),
|
||||
},
|
||||
|
||||
Mutation: {
|
||||
addToItemsCurrentUserOwns: async (
|
||||
_,
|
||||
|
|
Loading…
Reference in a new issue