Improve item page perf by caching valids in client

Okay, so getting the initial render down time for these faces is annoying, though I might come back to it…

But actually, the _worst_ part isn't the _initial_ render, which just kinda gets processed as part of the page navigation, right?

The _worst_ part is that we render it slowly _twice_: once on page load, as we send the `useAllValidPetPoses` fetch request; and then again when the fetch request ~instantly comes back from the network cache.

The fact that this requires a double-render, instead of just rendering with the cached valids data in the first place (like how our GraphQL client does), causes a second and highly-visible render of a slow-to-render UI!

So, here we update `useAllValidPetPoses` to cache its response in JS memory, similar in principle to how Apollo Client does. That way, we can return the valids instantly on the first render, if you already loaded them from the homepage or the wardrobe page or another item page!
This commit is contained in:
Emi Matchu 2021-06-11 07:37:49 -07:00
parent caf0a8b815
commit eaa4fbb575
2 changed files with 40 additions and 8 deletions

View file

@ -310,17 +310,45 @@ const SpeciesColorSelect = ({
);
};
export function useAllValidPetPoses(fetchOptions) {
const { loading, error, data: validsBuffer } = useFetch(
"/api/validPetPoses",
{ ...fetchOptions, responseType: "arrayBuffer" }
);
let cachedResponseForAllValidPetPoses = null;
/**
* useAllValidPoses fetches the valid pet poses, as a `valids` object ready to
* pass into the various validity-checker utility functions!
*
* In addition to the network caching, we globally cache this response in the
* client code as `cachedResponseForAllValidPetPoses`. This helps prevent extra
* re-renders when client-side navigating between pages, similar to how cached
* data from GraphQL serves on the first render, without a loading state.
*/
export function useAllValidPetPoses() {
const networkResponse = useFetch("/api/validPetPoses", {
responseType: "arrayBuffer",
// If we already have globally-cached valids, skip the request.
skip: cachedResponseForAllValidPetPoses != null,
});
// Use the globally-cached response if we have one, or await the network
// response if not.
const response = cachedResponseForAllValidPetPoses || networkResponse;
const { loading, error, data: validsBuffer } = response;
const valids = React.useMemo(
() => validsBuffer && new DataView(validsBuffer),
[validsBuffer]
);
// Once a network response comes in, save it as the globally-cached response.
React.useEffect(() => {
if (
networkResponse &&
!networkResponse.loading &&
!cachedResponseForAllValidPetPoses
) {
cachedResponseForAllValidPetPoses = networkResponse;
}
}, [networkResponse]);
return { loading, error, valids };
}

View file

@ -241,14 +241,14 @@ export function usePageTitle(title, { skip = false } = {}) {
*
* Our limited API is designed to match the `use-http` library!
*/
export function useFetch(url, { responseType, ...fetchOptions }) {
export function useFetch(url, { responseType, skip, ...fetchOptions }) {
// Just trying to be clear about what you'll get back ^_^` If we want to
// fetch non-binary data later, extend this and get something else from res!
if (responseType !== "arrayBuffer") {
throw new Error(`unsupported responseType ${responseType}`);
}
const [loading, setLoading] = React.useState(true);
const [loading, setLoading] = React.useState(skip ? false : true);
const [error, setError] = React.useState(null);
const [data, setData] = React.useState(null);
@ -258,6 +258,10 @@ export function useFetch(url, { responseType, ...fetchOptions }) {
const fetchOptionsAsJson = JSON.stringify(fetchOptions);
React.useEffect(() => {
if (skip) {
return;
}
let canceled = false;
fetch(url, JSON.parse(fetchOptionsAsJson))
@ -284,7 +288,7 @@ export function useFetch(url, { responseType, ...fetchOptions }) {
return () => {
canceled = true;
};
}, [url, fetchOptionsAsJson]);
}, [skip, url, fetchOptionsAsJson]);
return { loading, error, data };
}