impress-2020/api/graphql.js
Matchu e5081dab7e Disable honeycomb auto instrumentation
Huh, well, I can't figure out what in our production env stopped working with Honeycomb's automatic instrumentation… so, oh well! Let's try disabling it for now and see if it works.

This means our Honeycomb logs will no longer include _super helpful_ visualizations of how HTTP requests and MySQL queries create a request dependency waterfall… but I haven't opened Honeycomb in a while, and this bug is blocking all of prod, so if this fixes the site then I'm okay with that as a stopgap!

Btw the error message was:
```
Unhandled rejection: TypeError: Cannot read property 'id' of undefined    at exports.instrumentLoad (/var/task/node_modules/honeycomb-beeline/lib/instrumentation.js:80:14)    at Function._load (/var/task/node_modules/honeycomb-beeline/lib/instrumentation.js:164:16)    at ModuleWrap.<anonymous> (internal/modules/esm/translators.js:199:29)    at ModuleJob.run (internal/modules/esm/module_job.js:169:25)    at Loader.import (internal/modules/esm/loader.js:177:24)
```

Oh also, this is the first time eslint has looked at scripts/build-cached-data.js I guess, so I fixed some lint errors in there.
2021-08-08 00:14:55 -07:00

70 lines
3 KiB
JavaScript

const beeline = require("honeycomb-beeline")({
writeKey: process.env["HONEYCOMB_WRITE_KEY"],
dataset:
process.env["NODE_ENV"] === "production"
? "Dress to Impress (2020)"
: "Dress to Impress (2020, dev)",
serviceName: "impress-2020-gql-server",
enabledInstrumentations: [],
samplerHook,
});
const { ApolloServer } = require("../src/server/lib/apollo-server-vercel");
const { config } = require("../src/server");
const crypto = require("crypto");
const server = new ApolloServer(config);
const serverHandler = server.createHandler();
// We apply different sampling rates for different GraphQL operations
// (according to the client-defined query name), depending on how much load
// we're getting on them. For most operations, we just save all the events, but
// especially heavy-load operations get a lower sampling rate!
const OPERATION_SAMPLE_RATES = {
ApiOutfitImage: 10, // save 1 out of every 10, ignore the others
SearchPanel: 5, // save 1 out of every 5, ignore the others
};
function samplerHook(data) {
// Use the sample rate from the table above.
// Defaults to 1 (all) for most operations.
let sampleRate = OPERATION_SAMPLE_RATES[data["app.operation_name"]] || 1;
// Use the `deterministicSampler` to decide whether this event should be
// sampled. This might be a child event of a higher-level trace, and we want
// to make sure that we always return all child events of traces we've
// sampled, and no child events of traces we haven't. Deterministically
// sampling by trace ID does this for us!
//
// This strategy is outlined in: https://docs.honeycomb.io/getting-data-in/javascript/beeline-nodejs/#sampling-events.
const shouldSample = deterministicSampler(data["trace.trace_id"], sampleRate);
return { shouldSample, sampleRate };
}
function deterministicSampler(traceId, sampleRate) {
// Copied from https://docs.honeycomb.io/getting-data-in/javascript/beeline-nodejs/#sampling-events
const MAX_UINT32 = Math.pow(2, 32) - 1;
const sum = crypto.createHash("sha1").update(traceId).digest();
const upperBound = (MAX_UINT32 / sampleRate) >>> 0;
return sum.readUInt32BE(0) <= upperBound;
}
async function handle(req, res) {
// CAREFUL! We here allow any website to use our GraphQL API, so our data can
// be more useful to the public. Using the * wildcard means that, in modern
// browsers, requests should be sent without credentials. Additionally, we
// don't store credentials in cookies; the client is responsible for setting
// an Authorization header. So, I don't think there's any CSRF danger here.
// But, let's be careful and make sure this continues to be true!
res.setHeader("Access-Control-Allow-Origin", "*");
await serverHandler(req, res);
// As a sneaky trick, we require the Honeycomb trace to finish before the
// request formally finishes. This... is technically a slowdown, I'm not sure
// how much of one. Hopefully not too much?
// https://vercel.com/docs/platform/limits#streaming-responses
await beeline.flush();
res.end();
}
export default handle;