2020-04-23 13:31:39 -07:00
|
|
|
import React from "react";
|
2021-01-04 01:17:30 -08:00
|
|
|
import { Box, Heading, useColorModeValue } from "@chakra-ui/react";
|
2021-01-21 14:27:05 -08:00
|
|
|
import loadableLibrary from "@loadable/component";
|
2021-01-22 14:12:07 -08:00
|
|
|
import * as Sentry from "@sentry/react";
|
2020-04-23 13:31:39 -07:00
|
|
|
|
2020-04-26 00:46:05 -07:00
|
|
|
/**
|
2021-01-18 15:56:24 -08:00
|
|
|
* Delay hides its content at first, then shows it after the given delay.
|
2020-04-26 00:46:05 -07:00
|
|
|
*
|
|
|
|
* This is useful for loading states: it can be disruptive to see a spinner or
|
|
|
|
* skeleton element for only a brief flash, we'd rather just show them if
|
|
|
|
* loading is genuinely taking a while!
|
|
|
|
*
|
|
|
|
* 300ms is a pretty good default: that's about when perception shifts from "it
|
|
|
|
* wasn't instant" to "the process took time".
|
|
|
|
* https://developers.google.com/web/fundamentals/performance/rail
|
|
|
|
*/
|
2020-04-23 23:43:39 -07:00
|
|
|
export function Delay({ children, ms = 300 }) {
|
2020-04-23 13:31:39 -07:00
|
|
|
const [isVisible, setIsVisible] = React.useState(false);
|
|
|
|
|
|
|
|
React.useEffect(() => {
|
|
|
|
const id = setTimeout(() => setIsVisible(true), ms);
|
|
|
|
return () => clearTimeout(id);
|
|
|
|
}, [ms, setIsVisible]);
|
|
|
|
|
|
|
|
return (
|
|
|
|
<Box opacity={isVisible ? 1 : 0} transition="opacity 0.5s">
|
|
|
|
{children}
|
|
|
|
</Box>
|
|
|
|
);
|
|
|
|
}
|
2020-04-24 21:17:03 -07:00
|
|
|
|
2020-04-26 00:46:05 -07:00
|
|
|
/**
|
|
|
|
* Heading1 is a large, page-title-ish heading, with our DTI-brand-y Delicious
|
|
|
|
* font and some special typographical styles!
|
|
|
|
*/
|
2020-04-24 21:17:03 -07:00
|
|
|
export function Heading1({ children, ...props }) {
|
|
|
|
return (
|
2020-05-18 00:56:46 -07:00
|
|
|
<Heading
|
2020-10-27 23:09:42 -07:00
|
|
|
as="h1"
|
2020-08-12 00:37:31 -07:00
|
|
|
size="2xl"
|
2020-05-18 00:56:46 -07:00
|
|
|
fontFamily="Delicious, sans-serif"
|
|
|
|
fontWeight="800"
|
|
|
|
{...props}
|
|
|
|
>
|
2020-04-24 21:17:03 -07:00
|
|
|
{children}
|
|
|
|
</Heading>
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
2020-04-26 00:46:05 -07:00
|
|
|
/**
|
|
|
|
* Heading2 is a major subheading, with our DTI-brand-y Delicious font and some
|
|
|
|
* special typographical styles!!
|
|
|
|
*/
|
2020-04-24 21:17:03 -07:00
|
|
|
export function Heading2({ children, ...props }) {
|
|
|
|
return (
|
2020-05-18 00:56:46 -07:00
|
|
|
<Heading
|
2020-10-27 23:09:42 -07:00
|
|
|
as="h2"
|
2020-05-18 00:56:46 -07:00
|
|
|
size="xl"
|
|
|
|
fontFamily="Delicious, sans-serif"
|
|
|
|
fontWeight="700"
|
|
|
|
{...props}
|
|
|
|
>
|
2020-04-24 21:17:03 -07:00
|
|
|
{children}
|
|
|
|
</Heading>
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
2020-10-27 23:09:42 -07:00
|
|
|
/**
|
|
|
|
* Heading2 is a minor subheading, with our DTI-brand-y Delicious font and some
|
|
|
|
* special typographical styles!!
|
|
|
|
*/
|
|
|
|
export function Heading3({ children, ...props }) {
|
|
|
|
return (
|
|
|
|
<Heading
|
|
|
|
as="h3"
|
|
|
|
size="lg"
|
|
|
|
fontFamily="Delicious, sans-serif"
|
|
|
|
fontWeight="700"
|
|
|
|
{...props}
|
|
|
|
>
|
|
|
|
{children}
|
|
|
|
</Heading>
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
2021-01-03 23:31:02 -08:00
|
|
|
/**
|
|
|
|
* ErrorMessage is a simple error message for simple errors!
|
|
|
|
*/
|
2021-01-20 10:36:46 -08:00
|
|
|
export function ErrorMessage({ children, ...props }) {
|
|
|
|
return (
|
|
|
|
<Box color="red.400" {...props}>
|
|
|
|
{children}
|
|
|
|
</Box>
|
|
|
|
);
|
2021-01-03 23:31:02 -08:00
|
|
|
}
|
|
|
|
|
2021-01-04 01:17:30 -08:00
|
|
|
export function useCommonStyles() {
|
|
|
|
return {
|
|
|
|
brightBackground: useColorModeValue("white", "gray.700"),
|
2021-03-14 07:16:01 -07:00
|
|
|
bodyBackground: useColorModeValue("gray.50", "gray.800"),
|
2021-01-04 01:17:30 -08:00
|
|
|
};
|
|
|
|
}
|
|
|
|
|
2020-05-02 15:41:02 -07:00
|
|
|
/**
|
|
|
|
* safeImageUrl returns an HTTPS-safe image URL for Neopets assets!
|
|
|
|
*/
|
replace /api/assetProxy with a CDN proxy
When we decided to start out with /api/assetProxy, we didn't know how much the load would be in practice, so we just went ahead and tried it! Turns out, it was too high, and Vercel shut down our deployment 😅
Now, we've off-loaded this to a Fastly CDN proxy, which should run even faster and more efficiently, without adding pressure to Vercel servers and pushing our usage numbers! And I suspect we're gonna stay comfortably in Fastly's free tier :) but we'll see!
(Though, as always, if Neopets can finally upgrade their own stuff to HTTPS, we'll get to tear down this whole proxy altogether!)
2020-10-19 13:24:13 -07:00
|
|
|
export function safeImageUrl(urlString) {
|
2020-10-22 20:53:21 -07:00
|
|
|
if (urlString == null) {
|
|
|
|
return urlString;
|
|
|
|
}
|
|
|
|
|
2021-02-09 16:11:32 -08:00
|
|
|
let url;
|
|
|
|
try {
|
|
|
|
url = new URL(
|
|
|
|
urlString,
|
|
|
|
// A few item thumbnail images incorrectly start with "/". When that
|
|
|
|
// happens, the correct URL is at images.neopets.com.
|
|
|
|
//
|
|
|
|
// So, we provide "http://images.neopets.com" as the base URL when
|
|
|
|
// parsing. Most URLs are absolute and will ignore it, but relative URLs
|
|
|
|
// will resolve relative to that base.
|
|
|
|
"http://images.neopets.com"
|
|
|
|
);
|
|
|
|
} catch (e) {
|
|
|
|
logAndCapture(
|
|
|
|
new Error(
|
|
|
|
`safeImageUrl could not parse URL: ${urlString}. Returning a placeholder.`
|
|
|
|
)
|
|
|
|
);
|
2021-02-09 16:16:46 -08:00
|
|
|
return "https://impress-2020.openneo.net/__error__URL-was-not-parseable__";
|
2021-02-09 16:11:32 -08:00
|
|
|
}
|
2020-09-22 03:03:01 -07:00
|
|
|
|
replace /api/assetProxy with a CDN proxy
When we decided to start out with /api/assetProxy, we didn't know how much the load would be in practice, so we just went ahead and tried it! Turns out, it was too high, and Vercel shut down our deployment 😅
Now, we've off-loaded this to a Fastly CDN proxy, which should run even faster and more efficiently, without adding pressure to Vercel servers and pushing our usage numbers! And I suspect we're gonna stay comfortably in Fastly's free tier :) but we'll see!
(Though, as always, if Neopets can finally upgrade their own stuff to HTTPS, we'll get to tear down this whole proxy altogether!)
2020-10-19 13:24:13 -07:00
|
|
|
if (url.origin === "http://images.neopets.com") {
|
|
|
|
url.protocol = "https:";
|
|
|
|
url.host = "images.neopets-asset-proxy.openneo.net";
|
|
|
|
} else if (url.origin === "http://pets.neopets.com") {
|
|
|
|
url.protocol = "https:";
|
|
|
|
url.host = "pets.neopets-asset-proxy.openneo.net";
|
2020-09-22 03:03:01 -07:00
|
|
|
}
|
|
|
|
|
replace /api/assetProxy with a CDN proxy
When we decided to start out with /api/assetProxy, we didn't know how much the load would be in practice, so we just went ahead and tried it! Turns out, it was too high, and Vercel shut down our deployment 😅
Now, we've off-loaded this to a Fastly CDN proxy, which should run even faster and more efficiently, without adding pressure to Vercel servers and pushing our usage numbers! And I suspect we're gonna stay comfortably in Fastly's free tier :) but we'll see!
(Though, as always, if Neopets can finally upgrade their own stuff to HTTPS, we'll get to tear down this whole proxy altogether!)
2020-10-19 13:24:13 -07:00
|
|
|
if (url.protocol !== "https:") {
|
2021-02-09 16:13:18 -08:00
|
|
|
logAndCapture(
|
|
|
|
new Error(
|
|
|
|
`safeImageUrl was provided an unsafe URL, but we don't know how to ` +
|
|
|
|
`upgrade it to HTTPS: ${urlString}. Returning a placeholder.`
|
|
|
|
)
|
replace /api/assetProxy with a CDN proxy
When we decided to start out with /api/assetProxy, we didn't know how much the load would be in practice, so we just went ahead and tried it! Turns out, it was too high, and Vercel shut down our deployment 😅
Now, we've off-loaded this to a Fastly CDN proxy, which should run even faster and more efficiently, without adding pressure to Vercel servers and pushing our usage numbers! And I suspect we're gonna stay comfortably in Fastly's free tier :) but we'll see!
(Though, as always, if Neopets can finally upgrade their own stuff to HTTPS, we'll get to tear down this whole proxy altogether!)
2020-10-19 13:24:13 -07:00
|
|
|
);
|
2021-02-09 16:16:46 -08:00
|
|
|
return "https://impress-2020.openneo.net/__error__URL-was-not-HTTPS__";
|
replace /api/assetProxy with a CDN proxy
When we decided to start out with /api/assetProxy, we didn't know how much the load would be in practice, so we just went ahead and tried it! Turns out, it was too high, and Vercel shut down our deployment 😅
Now, we've off-loaded this to a Fastly CDN proxy, which should run even faster and more efficiently, without adding pressure to Vercel servers and pushing our usage numbers! And I suspect we're gonna stay comfortably in Fastly's free tier :) but we'll see!
(Though, as always, if Neopets can finally upgrade their own stuff to HTTPS, we'll get to tear down this whole proxy altogether!)
2020-10-19 13:24:13 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
return url.toString();
|
2020-05-02 15:41:02 -07:00
|
|
|
}
|
|
|
|
|
2020-04-26 00:46:05 -07:00
|
|
|
/**
|
|
|
|
* useDebounce helps make a rapidly-changing value change less! It waits for a
|
|
|
|
* pause in the incoming data before outputting the latest value.
|
|
|
|
*
|
|
|
|
* We use it in search: when the user types rapidly, we don't want to update
|
|
|
|
* our query and send a new request every keystroke. We want to wait for it to
|
|
|
|
* seem like they might be done, while still feeling responsive!
|
|
|
|
*
|
|
|
|
* Adapted from https://usehooks.com/useDebounce/
|
|
|
|
*/
|
2020-09-01 19:53:38 -07:00
|
|
|
export function useDebounce(
|
|
|
|
value,
|
|
|
|
delay,
|
2021-01-21 16:03:14 -08:00
|
|
|
{ waitForFirstPause = false, initialValue = null, forceReset = false } = {}
|
2020-09-01 19:53:38 -07:00
|
|
|
) {
|
2020-04-24 21:17:03 -07:00
|
|
|
// State and setters for debounced value
|
2020-09-01 19:53:38 -07:00
|
|
|
const [debouncedValue, setDebouncedValue] = React.useState(
|
|
|
|
waitForFirstPause ? initialValue : value
|
|
|
|
);
|
2020-04-24 21:17:03 -07:00
|
|
|
|
|
|
|
React.useEffect(
|
|
|
|
() => {
|
|
|
|
// Update debounced value after delay
|
|
|
|
const handler = setTimeout(() => {
|
|
|
|
setDebouncedValue(value);
|
|
|
|
}, delay);
|
|
|
|
|
|
|
|
// Cancel the timeout if value changes (also on delay change or unmount)
|
|
|
|
// This is how we prevent debounced value from updating if value is changed ...
|
|
|
|
// .. within the delay period. Timeout gets cleared and restarted.
|
|
|
|
return () => {
|
|
|
|
clearTimeout(handler);
|
|
|
|
};
|
|
|
|
},
|
|
|
|
[value, delay] // Only re-call effect if value or delay changes
|
|
|
|
);
|
|
|
|
|
2021-01-21 16:03:14 -08:00
|
|
|
// The `forceReset` option sets the value immediately!
|
|
|
|
React.useEffect(() => {
|
|
|
|
if (forceReset) {
|
|
|
|
setDebouncedValue(value);
|
|
|
|
}
|
|
|
|
}, [value, forceReset]);
|
|
|
|
|
2020-04-24 21:17:03 -07:00
|
|
|
return debouncedValue;
|
|
|
|
}
|
2020-05-17 23:26:00 -07:00
|
|
|
|
2020-05-17 23:44:33 -07:00
|
|
|
/**
|
|
|
|
* usePageTitle sets the page title!
|
|
|
|
*/
|
2020-09-12 19:29:20 -07:00
|
|
|
export function usePageTitle(title, { skip = false } = {}) {
|
2020-05-17 23:26:00 -07:00
|
|
|
React.useEffect(() => {
|
2020-09-12 19:29:20 -07:00
|
|
|
if (skip) return;
|
2021-01-22 14:47:18 -08:00
|
|
|
try {
|
|
|
|
document.title = title
|
|
|
|
? `${title} | Dress to Impress`
|
|
|
|
: "Dress to Impress";
|
|
|
|
} catch (e) {
|
|
|
|
// I've been seeing Sentry errors that we can't read `title` of
|
|
|
|
// undefined, with no traceback. This is the only `.title` I see in our
|
|
|
|
// codebase, aside from unpacking props that I'm pretty sure aren't
|
|
|
|
// null... so I'm adding this to help confirm!
|
|
|
|
logAndCapture(
|
|
|
|
new Error(
|
|
|
|
`Could not set page title: ${e.message}. Document is: ${document}.`
|
|
|
|
)
|
|
|
|
);
|
|
|
|
}
|
2020-09-12 19:29:20 -07:00
|
|
|
}, [title, skip]);
|
2020-05-17 23:26:00 -07:00
|
|
|
}
|
2020-05-17 23:44:33 -07:00
|
|
|
|
|
|
|
/**
|
|
|
|
* useFetch uses `fetch` to fetch the given URL, and returns the request state.
|
|
|
|
*
|
|
|
|
* Our limited API is designed to match the `use-http` library!
|
|
|
|
*/
|
2021-04-23 11:40:49 -07:00
|
|
|
export function useFetch(url, { responseType, ...fetchOptions }) {
|
2020-05-17 23:44:33 -07:00
|
|
|
// Just trying to be clear about what you'll get back ^_^` If we want to
|
|
|
|
// fetch non-binary data later, extend this and get something else from res!
|
|
|
|
if (responseType !== "arrayBuffer") {
|
|
|
|
throw new Error(`unsupported responseType ${responseType}`);
|
|
|
|
}
|
|
|
|
|
|
|
|
const [loading, setLoading] = React.useState(true);
|
|
|
|
const [error, setError] = React.useState(null);
|
|
|
|
const [data, setData] = React.useState(null);
|
|
|
|
|
2021-04-23 11:48:38 -07:00
|
|
|
// We expect this to be a simple object, so this helps us only re-send the
|
|
|
|
// fetch when the options have actually changed, rather than e.g. a new copy
|
|
|
|
// of an identical object!
|
|
|
|
const fetchOptionsAsJson = JSON.stringify(fetchOptions);
|
|
|
|
|
2020-05-17 23:44:33 -07:00
|
|
|
React.useEffect(() => {
|
|
|
|
let canceled = false;
|
|
|
|
|
2021-04-23 11:48:38 -07:00
|
|
|
fetch(url, JSON.parse(fetchOptionsAsJson))
|
2020-05-17 23:44:33 -07:00
|
|
|
.then(async (res) => {
|
|
|
|
if (canceled) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
const arrayBuffer = await res.arrayBuffer();
|
|
|
|
setLoading(false);
|
|
|
|
setError(null);
|
|
|
|
setData(arrayBuffer);
|
|
|
|
})
|
|
|
|
.catch((error) => {
|
|
|
|
if (canceled) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
setLoading(false);
|
|
|
|
setError(error);
|
|
|
|
setData(null);
|
|
|
|
});
|
|
|
|
|
|
|
|
return () => {
|
|
|
|
canceled = true;
|
|
|
|
};
|
2021-04-23 11:48:38 -07:00
|
|
|
}, [url, fetchOptionsAsJson]);
|
2020-05-17 23:44:33 -07:00
|
|
|
|
|
|
|
return { loading, error, data };
|
|
|
|
}
|
2020-08-28 22:58:39 -07:00
|
|
|
|
|
|
|
/**
|
|
|
|
* useLocalStorage is like React.useState, but it persists the value in the
|
|
|
|
* device's `localStorage`, so it comes back even after reloading the page.
|
|
|
|
*
|
|
|
|
* Adapted from https://usehooks.com/useLocalStorage/.
|
|
|
|
*/
|
2020-09-22 05:39:48 -07:00
|
|
|
let storageListeners = [];
|
2020-08-28 22:58:39 -07:00
|
|
|
export function useLocalStorage(key, initialValue) {
|
2020-09-22 05:39:48 -07:00
|
|
|
const loadValue = React.useCallback(() => {
|
2020-08-28 22:58:39 -07:00
|
|
|
try {
|
|
|
|
const item = window.localStorage.getItem(key);
|
|
|
|
return item ? JSON.parse(item) : initialValue;
|
|
|
|
} catch (error) {
|
|
|
|
console.log(error);
|
|
|
|
return initialValue;
|
|
|
|
}
|
2020-09-22 05:39:48 -07:00
|
|
|
}, [key, initialValue]);
|
|
|
|
|
|
|
|
const [storedValue, setStoredValue] = React.useState(loadValue);
|
2020-08-28 22:58:39 -07:00
|
|
|
|
|
|
|
const setValue = (value) => {
|
|
|
|
try {
|
|
|
|
setStoredValue(value);
|
|
|
|
window.localStorage.setItem(key, JSON.stringify(value));
|
2020-09-22 05:39:48 -07:00
|
|
|
storageListeners.forEach((l) => l());
|
2020-08-28 22:58:39 -07:00
|
|
|
} catch (error) {
|
|
|
|
console.log(error);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2020-09-22 05:39:48 -07:00
|
|
|
const reloadValue = React.useCallback(() => {
|
|
|
|
setStoredValue(loadValue());
|
|
|
|
}, [loadValue, setStoredValue]);
|
|
|
|
|
|
|
|
// Listen for changes elsewhere on the page, and update here too!
|
|
|
|
React.useEffect(() => {
|
|
|
|
storageListeners.push(reloadValue);
|
|
|
|
return () => {
|
|
|
|
storageListeners = storageListeners.filter((l) => l !== reloadValue);
|
|
|
|
};
|
|
|
|
}, [reloadValue]);
|
|
|
|
|
|
|
|
// Listen for changes in other tabs, and update here too! (This does not
|
|
|
|
// catch same-page updates!)
|
|
|
|
React.useEffect(() => {
|
|
|
|
window.addEventListener("storage", reloadValue);
|
|
|
|
return () => window.removeEventListener("storage", reloadValue);
|
|
|
|
}, [reloadValue]);
|
|
|
|
|
2020-08-28 22:58:39 -07:00
|
|
|
return [storedValue, setValue];
|
|
|
|
}
|
fix Download button to use better caching
So I broke the Download button when we switched to impress-2020.openneo.net, and I forgot to update the Amazon S3 config.
But in addition to that, I'm making some code changes here, to make downloads faster: we now use exactly the same URL and crossOrigin configuration between the <img> tag on the page, and the image that the Download button requests, which ensures that it can use the cached copy instead of loading new stuff. (There were two main cases: 1. it always loaded the PNGs instead of the SVG, which doesn't matter for quality if we're rendering a 600x600 bitmap anyway, but is good caching, and 2. send `crossOrigin` on the <img> tag, which isn't necessary there, but is necessary for Download, and having them match means we can use the cached copy.)
2020-10-10 01:19:59 -07:00
|
|
|
|
|
|
|
export function loadImage({ src, crossOrigin = null }) {
|
|
|
|
const image = new Image();
|
|
|
|
const promise = new Promise((resolve, reject) => {
|
|
|
|
image.onload = () => resolve(image);
|
Clearer errors when image download fails
Two fixes in here, for when image downloads fail!
1) Actually catch the error, and show UI feedback
2) Throw it as an actual exception, so the console message will have a stack trace
Additionally, debugging this was a bit trickier than normal, because I didn't fully understand that the image `onerror` argument is an error _event_, not an Error object. So, Sentry captured the uncaught promise rejection, but it didn't have trace information, because it wasn't an Error. Whereas now, if I forget to catch `loadImage` calls in the future, we'll get a real trace! both in the console for debugging, and in Sentry if it makes it to prod :)
2021-01-17 04:42:35 -08:00
|
|
|
image.onerror = () =>
|
|
|
|
reject(new Error(`Failed to load image: ${JSON.stringify(src)}`));
|
fix Download button to use better caching
So I broke the Download button when we switched to impress-2020.openneo.net, and I forgot to update the Amazon S3 config.
But in addition to that, I'm making some code changes here, to make downloads faster: we now use exactly the same URL and crossOrigin configuration between the <img> tag on the page, and the image that the Download button requests, which ensures that it can use the cached copy instead of loading new stuff. (There were two main cases: 1. it always loaded the PNGs instead of the SVG, which doesn't matter for quality if we're rendering a 600x600 bitmap anyway, but is good caching, and 2. send `crossOrigin` on the <img> tag, which isn't necessary there, but is necessary for Download, and having them match means we can use the cached copy.)
2020-10-10 01:19:59 -07:00
|
|
|
if (crossOrigin) {
|
|
|
|
image.crossOrigin = crossOrigin;
|
|
|
|
}
|
|
|
|
image.src = src;
|
|
|
|
});
|
|
|
|
promise.cancel = () => {
|
|
|
|
image.src = "";
|
|
|
|
};
|
|
|
|
return promise;
|
|
|
|
}
|
2021-01-21 14:27:05 -08:00
|
|
|
|
|
|
|
/**
|
|
|
|
* loadable is a wrapper for `@loadable/component`, with extra error handling.
|
|
|
|
* Loading the page will often fail if you keep a session open during a deploy,
|
|
|
|
* because Vercel doesn't keep old JS chunks on the CDN. Recover by reloading!
|
|
|
|
*/
|
|
|
|
export function loadable(load, options) {
|
|
|
|
return loadableLibrary(
|
|
|
|
() =>
|
|
|
|
load().catch((e) => {
|
Fix remaining chunk error noise
I've been getting more Sentry errors about JS chunk errors after deploys, and finally looked into it!
Turns out that, our try/catch handling was working great, and the page was reloading correctly for users as expected. But in these scenarios we would _also_ throw and log two uncaught errors!
The first is that, because we're a single-page app, unrecognized routes fall back to the index.html by default (to power our custom client-side routes, like /outfits/123 etc). So this meant that missing JS files, which _should_ be returning a 404, were instead returning 200 OK and an HTML file, which failed to parse. (And running the script isn't included in the catchable part of the `import` promise!)
Now, in our `vercel.json` config, we catch those paths specifically and 404 them. (The exact choice of path is important: on dev, all these routes run _before_ the dev server, which is responsible for serving the static files - but dev doesn't include hashes in the JS file names, so this 404 route only matches built prod JS files, not local dev JS files.)
The second is that we failed to return anything to `@loadable/component` in the error case, so it would try to render `undefined` as if it were a component class. Now, we return a trivial component class that returns null!
2021-01-26 11:43:27 -08:00
|
|
|
console.error("Error loading page, reloading:", e);
|
2021-01-21 14:27:05 -08:00
|
|
|
window.location.reload();
|
Fix remaining chunk error noise
I've been getting more Sentry errors about JS chunk errors after deploys, and finally looked into it!
Turns out that, our try/catch handling was working great, and the page was reloading correctly for users as expected. But in these scenarios we would _also_ throw and log two uncaught errors!
The first is that, because we're a single-page app, unrecognized routes fall back to the index.html by default (to power our custom client-side routes, like /outfits/123 etc). So this meant that missing JS files, which _should_ be returning a 404, were instead returning 200 OK and an HTML file, which failed to parse. (And running the script isn't included in the catchable part of the `import` promise!)
Now, in our `vercel.json` config, we catch those paths specifically and 404 them. (The exact choice of path is important: on dev, all these routes run _before_ the dev server, which is responsible for serving the static files - but dev doesn't include hashes in the JS file names, so this 404 route only matches built prod JS files, not local dev JS files.)
The second is that we failed to return anything to `@loadable/component` in the error case, so it would try to render `undefined` as if it were a component class. Now, we return a trivial component class that returns null!
2021-01-26 11:43:27 -08:00
|
|
|
// Return a component that renders nothing, while we reload!
|
|
|
|
return () => null;
|
2021-01-21 14:27:05 -08:00
|
|
|
}),
|
|
|
|
options
|
|
|
|
);
|
|
|
|
}
|
2021-01-22 14:12:07 -08:00
|
|
|
|
|
|
|
/**
|
|
|
|
* logAndCapture will print an error to the console, and send it to Sentry.
|
|
|
|
*
|
|
|
|
* This is useful when there's a graceful recovery path, but it's still a
|
|
|
|
* genuinely unexpected error worth logging.
|
|
|
|
*/
|
|
|
|
export function logAndCapture(e) {
|
|
|
|
console.error(e);
|
|
|
|
Sentry.captureException(e);
|
|
|
|
}
|