2021-01-16 11:08:12 -08:00
|
|
|
// This exports users from the MySQL database to Auth0.
|
|
|
|
//
|
|
|
|
// If you use the --since flag, we'll only include users whose OpenNeo ID
|
2021-03-10 05:18:31 -08:00
|
|
|
// records were updated (or created) since then. Or, the --username flag will
|
|
|
|
// filter for a single specific username. Otherwise, we'll include all users.
|
2021-03-31 16:47:39 -07:00
|
|
|
// (It's safe to re-run against users already imported; Auth0 will reject any
|
|
|
|
// duplicates!)
|
2020-09-02 15:26:33 -07:00
|
|
|
//
|
|
|
|
// This sorta creates a second copy of everyone's account, copied onto Auth0.
|
|
|
|
// We should be thoughtful about how we do the actual migration process!
|
|
|
|
//
|
|
|
|
// For now, we can run this whenever we want to make it _possible_ to log in
|
|
|
|
// with Auth0, even if things will be potentially out of sync, because traffic
|
|
|
|
// to Impress 2020 is just testers now anyway!
|
2021-06-16 08:24:59 -07:00
|
|
|
//
|
|
|
|
// The --upsert command will additionally *update* Auth0's copy of users, not
|
|
|
|
// just insert. I think I tried to do this early on and it used to reject
|
|
|
|
// upserts with custom password hashes? But now it seems to work! But I have it
|
|
|
|
// as a opt-in flag for now, in case I'm forgetting something 😅
|
2021-01-16 11:08:12 -08:00
|
|
|
const { argv } = require("yargs");
|
2020-09-02 15:26:33 -07:00
|
|
|
const { ManagementClient } = require("auth0");
|
|
|
|
const PromisePool = require("es6-promise-pool");
|
|
|
|
|
|
|
|
const connectToDb = require("../src/server/db");
|
|
|
|
const { normalizeRow } = require("../src/server/util");
|
|
|
|
|
|
|
|
const auth0 = new ManagementClient({
|
|
|
|
domain: "openneo.us.auth0.com",
|
2020-09-02 23:00:16 -07:00
|
|
|
clientId: process.env.AUTH0_SUPPORT_CLIENT_ID,
|
|
|
|
clientSecret: process.env.AUTH0_SUPPORT_CLIENT_SECRET,
|
2020-09-02 15:26:33 -07:00
|
|
|
scope: "read:users update:users",
|
|
|
|
});
|
|
|
|
|
|
|
|
async function main() {
|
|
|
|
const connectionsPromise = auth0.getConnections();
|
|
|
|
|
Use env vars for MySQL in export-users-to-auth0
Huh, oops, there are a _few_ reasons the user sync cron job hasn't been running correctly.
I fixed some of the config in prod, but then discovered one more issue: the script prompts for an admin database password, so of _course_ it can't auto-run, lol.
Instead, I've now created a `impress2020-util` account, with just a few permissions (but specifically the `openneo_id.users` permission that I _don't_ give the app!), and added the username and password to the secret .env file, both locally and in prod. (In this case, prod means the Linode VPS, not Vercel, because that's where our cron runs.)
2021-10-02 00:57:39 -07:00
|
|
|
const db = await connectToDb({
|
|
|
|
user: process.env.IMPRESS_MYSQL_SCRIPT_USER,
|
|
|
|
password: process.env.IMPRESS_MYSQL_SCRIPT_PASSWORD,
|
|
|
|
});
|
2020-09-02 15:26:33 -07:00
|
|
|
|
|
|
|
const connections = await connectionsPromise;
|
2021-10-02 01:01:29 -07:00
|
|
|
if (connections.length === 0) {
|
|
|
|
throw new Error(`no connections found on the Auth0 account`);
|
|
|
|
} else if (connections.length > 1) {
|
|
|
|
throw new Error(
|
|
|
|
`Not yet implemented: when there is more than one Auth0 connection, specify which one to use.`
|
|
|
|
);
|
|
|
|
}
|
|
|
|
const connectionId = connections[0].id;
|
2020-09-02 15:26:33 -07:00
|
|
|
|
2021-03-10 05:18:31 -08:00
|
|
|
let conditionSQL = "1";
|
|
|
|
let conditionValues = [];
|
|
|
|
if (argv.username) {
|
|
|
|
conditionSQL = "oid.name = ?";
|
|
|
|
conditionValues = [argv.username];
|
|
|
|
} else if (argv.since) {
|
|
|
|
conditionSQL = "oid.created_at >= ?";
|
|
|
|
conditionValues = [argv.since];
|
|
|
|
}
|
|
|
|
|
2020-09-02 15:26:33 -07:00
|
|
|
let users;
|
|
|
|
try {
|
|
|
|
const [rows] = await db.query(
|
|
|
|
`SELECT dti.id, oid.name, email, encrypted_password, password_salt
|
|
|
|
FROM openneo_id.users oid
|
|
|
|
INNER JOIN openneo_impress.users dti ON dti.remote_id = oid.id
|
2021-03-10 05:18:31 -08:00
|
|
|
WHERE ${conditionSQL}
|
2021-01-16 11:08:12 -08:00
|
|
|
ORDER BY dti.id`,
|
2021-03-10 05:18:31 -08:00
|
|
|
conditionValues
|
2020-09-02 15:26:33 -07:00
|
|
|
);
|
|
|
|
users = rows.map(normalizeRow);
|
|
|
|
} finally {
|
|
|
|
db.close();
|
|
|
|
}
|
|
|
|
|
|
|
|
let i = 0;
|
|
|
|
function importNextBatch() {
|
|
|
|
if (i < users.length) {
|
|
|
|
const batchStart = i;
|
|
|
|
i += 1000;
|
2021-06-16 08:24:59 -07:00
|
|
|
console.info(`Starting batch ${batchStart + 1}-${batchStart + 1000}`);
|
2020-09-02 15:26:33 -07:00
|
|
|
|
|
|
|
const usersBatch = users.slice(batchStart, batchStart + 1000);
|
|
|
|
const usersBatchJson = JSON.stringify(usersBatch.map(formatUserForAuth0));
|
|
|
|
return runAuth0ImportJob(usersBatchJson, connectionId, batchStart);
|
|
|
|
} else {
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Process two import jobs at a time, which is the max allowed by Auth0.
|
|
|
|
const pool = new PromisePool(importNextBatch, 2);
|
|
|
|
try {
|
|
|
|
await pool.start();
|
|
|
|
} catch (e) {
|
|
|
|
console.error(e);
|
|
|
|
}
|
|
|
|
|
2021-06-16 08:24:59 -07:00
|
|
|
console.info(`Sent ${users.length} users for import.`);
|
2020-09-02 15:26:33 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
async function runAuth0ImportJob(usersBatchJson, connectionId, batchStart) {
|
|
|
|
let job = await auth0.jobs.importUsersJob({
|
|
|
|
connection_id: connectionId,
|
|
|
|
users_json: usersBatchJson,
|
|
|
|
send_completion_email: false, // we're watching the script!
|
2021-06-16 08:24:59 -07:00
|
|
|
upsert: Boolean(argv.upsert),
|
2020-09-02 15:26:33 -07:00
|
|
|
});
|
2021-06-16 08:24:59 -07:00
|
|
|
console.info(
|
2020-09-02 15:26:33 -07:00
|
|
|
`[Batch ${batchStart + 1}] Created import job ${job.id}. Waiting...`
|
|
|
|
);
|
|
|
|
|
|
|
|
while (job.status === "pending") {
|
|
|
|
await pause(5000);
|
|
|
|
job = await auth0.jobs.get({ id: job.id });
|
|
|
|
}
|
|
|
|
|
|
|
|
if (job.status !== "completed") {
|
2021-06-16 08:24:59 -07:00
|
|
|
console.info(
|
2020-09-02 15:26:33 -07:00
|
|
|
`[Batch ${batchStart + 1}] Unexpected job status: ${job.status}`
|
|
|
|
);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
const errorGroups = await auth0.jobs.errors({ id: job.id });
|
2021-06-16 08:24:59 -07:00
|
|
|
console.info(
|
2020-09-02 15:26:33 -07:00
|
|
|
`[Batch ${batchStart + 1}] Import job completed, ` +
|
|
|
|
`${errorGroups.length} failed`
|
|
|
|
);
|
|
|
|
|
|
|
|
for (const { user, errors } of errorGroups) {
|
|
|
|
for (const error of errors) {
|
2021-06-16 08:24:59 -07:00
|
|
|
console.info(
|
2020-09-02 15:26:33 -07:00
|
|
|
`[Batch ${batchStart + 1}] User ${user.user_id} (${user.email}): ` +
|
|
|
|
`${error.message}`
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
function pause(delayMs) {
|
|
|
|
return new Promise((resolve) => {
|
|
|
|
setTimeout(() => resolve(), delayMs);
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
function formatUserForAuth0(user) {
|
2021-03-10 05:18:31 -08:00
|
|
|
const normalizedUsername = user.name.replace(
|
|
|
|
/[^a-zA-Z0-9_+\-.!#$^`~@']/g,
|
|
|
|
""
|
|
|
|
);
|
|
|
|
if (normalizedUsername !== user.name) {
|
|
|
|
console.warn(
|
|
|
|
`WARN: Username ${user.name} (${user.email}) was not valid, changing to ${normalizedUsername}`
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
2020-09-02 15:26:33 -07:00
|
|
|
return {
|
|
|
|
user_id: `impress-${user.id}`,
|
2021-03-10 05:18:31 -08:00
|
|
|
username: normalizedUsername,
|
2020-09-02 15:26:33 -07:00
|
|
|
email: user.email,
|
|
|
|
custom_password_hash: {
|
|
|
|
algorithm: "hmac",
|
|
|
|
hash: {
|
|
|
|
value: user.encryptedPassword,
|
|
|
|
encoding: "hex",
|
|
|
|
digest: "sha256",
|
|
|
|
key: {
|
|
|
|
encoding: "utf8",
|
|
|
|
value: user.passwordSalt,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
main()
|
|
|
|
.catch((e) => {
|
|
|
|
console.error(e);
|
|
|
|
process.exit(1);
|
|
|
|
})
|
|
|
|
.then(() => process.exit());
|