From 87eca7a97eeb1b986f9a15565566057c9f626fef Mon Sep 17 00:00:00 2001 From: Louis Capitanchik <contact@louiscap.co> Date: Tue, 20 Apr 2021 22:08:27 +0100 Subject: [PATCH] Reformat with prettier --- .prettierrc.js | 9 + .../20000101000000-enable-extensions.js | 29 +++- .../20191117234148-create-users-table.js | 2 +- .../20191118001739-create-metrics-table.js | 105 ++++++------ ...004631-create-oauth-access-tokens-table.js | 2 +- ...05011-create-oauth-refresh-tokens-table.js | 2 +- ...8011656-create-files-search-index-table.js | 50 ++++-- ...20200112223826-create-files-likes-table.js | 2 +- ...0210015221-add-files-featured-timestamp.js | 2 +- ...0200323000437-create-bundle-codes-table.js | 2 +- ...23000553-create-user-bundle-codes-table.js | 2 +- .../20210000000000-create-analytics-table.js | 91 ++++++++++ package-lock.json | 14 ++ package.json | 5 +- run.js | 13 +- server.js | 45 +++-- src/app.js | 33 ++-- src/bootstrap.js | 38 +++-- src/config/app.js | 7 +- src/config/client.js | 2 +- src/config/database.js | 9 +- src/config/mail.js | 29 +++- src/config/pusher.js | 2 +- src/config/queue.js | 5 +- src/config/sequelize.js | 12 +- src/config/totp.js | 6 +- src/console/CleanTestDatabases.js | 18 +- src/console/CreateNewDatabase.js | 48 +++--- src/console/CreateSystemUser.js | 4 +- src/core/errors/HttpError.js | 2 +- src/core/errors/InputValidationError.js | 2 +- src/core/errors/UnauthorizedError.js | 4 +- src/core/events/bus.js | 2 +- src/core/injection/ContextualModule.js | 12 +- src/core/injection/ServiceProvider.js | 10 +- src/core/injection/ThreadContext.js | 2 +- src/core/services/dataloaders.js | 91 ++++++---- src/core/services/pusher.js | 6 +- src/core/utils/crypto.js | 96 ++++++++++- src/core/utils/jwt.js | 68 +++++--- src/core/utils/process.js | 4 +- src/core/utils/queue.js | 7 +- src/core/utils/urls.js | 9 +- src/core/utils/validation.js | 103 ++++++------ src/database/models/AccessToken.js | 62 +++---- src/database/models/AuthorizationCode.js | 69 ++++---- src/database/models/BaseModel.js | 10 +- src/database/models/BundleCode.js | 59 ++++--- src/database/models/File.js | 114 +++++++------ src/database/models/Metric.js | 44 +++-- src/database/models/OAuthClient.js | 62 ++++--- src/database/models/RefreshToken.js | 59 ++++--- src/database/models/User.js | 71 ++++---- src/database/models/index.js | 14 +- src/domain/auth/AuthServer.js | 62 +++++-- src/domain/auth/AuthenticationService.js | 19 ++- src/domain/auth/OAuthFlow.js | 47 ++++-- .../auth/handlers/SendUserPasswordReset.js | 63 +++++-- src/domain/data/MetricsService.js | 41 +++-- src/domain/users/UserService.js | 12 +- src/http/controllers/api/app.js | 4 +- src/http/controllers/api/auth.js | 37 ++-- src/http/controllers/api/content.js | 40 +++-- src/http/controllers/api/feedback.js | 52 +++--- src/http/controllers/api/oauth.js | 19 ++- src/http/controllers/api/storage.js | 159 +++++++++++------- src/http/controllers/api/user.js | 8 +- src/http/controllers/auth.js | 27 ++- src/http/controllers/fs_local.js | 5 +- src/http/middleware/DeviceProperties.js | 12 +- src/http/middleware/ParseIncludes.js | 5 +- src/http/middleware/Profiler.js | 18 +- src/http/middleware/RequiresAuth.js | 2 +- src/http/middleware/SentryReporter.js | 32 ++-- src/http/params/event.js | 2 +- src/http/params/file.js | 2 +- src/http/params/oauth_client.js | 2 +- src/http/routes.js | 96 +++++++---- src/http/validators.js | 26 ++- src/services/cache/interface.js | 27 ++- src/services/cache/memory.js | 20 ++- src/services/cache/null.js | 16 +- src/services/cache/redis.js | 11 +- src/services/fs/gcs.js | 38 +++-- src/services/fs/interface.js | 32 +++- src/services/fs/local.js | 32 ++-- src/services/index.js | 12 +- src/services/mail/interface.js | 19 ++- src/services/mail/log.js | 23 ++- src/services/mail/postmark.js | 82 +++++++++ src/services/mail/sendgrid.js | 11 +- src/services/mail/smtp.js | 37 ++-- src/services/queue/amqp.js | 89 ++++++---- src/services/queue/async.js | 8 +- src/services/queue/interface.js | 16 +- src/services/totp/vault.js | 29 +++- src/services/utils.js | 22 ++- src/vendor/koa-handlebars.js | 77 ++++++--- src/vendor/sentry.js | 21 +-- worker.js | 28 ++- 100 files changed, 1916 insertions(+), 1096 deletions(-) create mode 100644 .prettierrc.js create mode 100644 database/migrations/20210000000000-create-analytics-table.js create mode 100644 src/services/mail/postmark.js diff --git a/.prettierrc.js b/.prettierrc.js new file mode 100644 index 0000000..e3e0097 --- /dev/null +++ b/.prettierrc.js @@ -0,0 +1,9 @@ +module.exports = { + bracketSpacing: true, + jsxBracketSameLine: true, + singleQuote: true, + trailingComma: 'all', + arrowParens: 'avoid', + useTabs: true, + semi: false, +} diff --git a/database/migrations/20000101000000-enable-extensions.js b/database/migrations/20000101000000-enable-extensions.js index 9ddb56a..c152cf2 100644 --- a/database/migrations/20000101000000-enable-extensions.js +++ b/database/migrations/20000101000000-enable-extensions.js @@ -1,17 +1,32 @@ module.exports = { up: (migration, Types) => { return migration.sequelize.transaction(async t => { - await migration.sequelize.query('CREATE EXTENSION IF NOT EXISTS postgis;', { transaction: t }) - await migration.sequelize.query('CREATE EXTENSION IF NOT EXISTS pg_trgm;', { transaction: t }) - await migration.sequelize.query('CREATE EXTENSION IF NOT EXISTS timescaledb;', { transaction: t }) + await migration.sequelize.query( + 'CREATE EXTENSION IF NOT EXISTS postgis;', + { transaction: t }, + ) + await migration.sequelize.query( + 'CREATE EXTENSION IF NOT EXISTS pg_trgm;', + { transaction: t }, + ) + await migration.sequelize.query( + 'CREATE EXTENSION IF NOT EXISTS timescaledb;', + { transaction: t }, + ) }) }, down: (migration, Types) => { return migration.sequelize.transaction(async t => { - await migration.sequelize.query('DROP EXTENSION IF EXISTS postgis;', { transaction: t }) - await migration.sequelize.query('DROP EXTENSION IF EXISTS pg_trgm;', { transaction: t }) - await migration.sequelize.query('DROP EXTENSION IF EXISTS timescaledb;', { transaction: t }) + await migration.sequelize.query('DROP EXTENSION IF EXISTS postgis;', { + transaction: t, + }) + await migration.sequelize.query('DROP EXTENSION IF EXISTS pg_trgm;', { + transaction: t, + }) + await migration.sequelize.query('DROP EXTENSION IF EXISTS timescaledb;', { + transaction: t, + }) }) }, -} \ No newline at end of file +} diff --git a/database/migrations/20191117234148-create-users-table.js b/database/migrations/20191117234148-create-users-table.js index 04ca011..a99e6b3 100644 --- a/database/migrations/20191117234148-create-users-table.js +++ b/database/migrations/20191117234148-create-users-table.js @@ -49,4 +49,4 @@ module.exports = { down: (migration, Types) => { return migration.dropTable('users') }, -} \ No newline at end of file +} diff --git a/database/migrations/20191118001739-create-metrics-table.js b/database/migrations/20191118001739-create-metrics-table.js index acfb2aa..7cb5e42 100644 --- a/database/migrations/20191118001739-create-metrics-table.js +++ b/database/migrations/20191118001739-create-metrics-table.js @@ -1,59 +1,70 @@ module.exports = { up: (migration, Types) => { return migration.sequelize.transaction(async t => { - await migration.createTable('metrics', { - id: { - type: Types.UUID, - defaultValue: Types.UUIDV4, - allowNull: false, - }, - value: { - type: Types.TEXT, - allowNull: false, - }, - type: { - type: Types.TEXT, - allowNull: false, - }, - location: { - type: Types.GEOGRAPHY('POINT', 4326), - allowNull: false, - }, - author_id: { - type: Types.UUID, - allowNull: true, - references: { - model: 'users', - key: 'id', - }, - onDelete: 'SET NULL', - onUpdate: 'CASCADE', - }, - meta: { - type: Types.JSONB, - defaultValue: {}, - allowNull: false, - }, - recorded_at: { - type: Types.DATE, - defaultValue: Types.fn('now'), - allowNull: false, - }, - deleted_at: { - type: Types.DATE, - defaultValue: null, - allowNull: true, + await migration.createTable( + 'metrics', + { + id: { + type: Types.UUID, + defaultValue: Types.UUIDV4, + allowNull: false, + }, + value: { + type: Types.TEXT, + allowNull: false, + }, + type: { + type: Types.TEXT, + allowNull: false, + }, + location: { + type: Types.GEOGRAPHY('POINT', 4326), + allowNull: false, + }, + author_id: { + type: Types.UUID, + allowNull: true, + references: { + model: 'users', + key: 'id', + }, + onDelete: 'SET NULL', + onUpdate: 'CASCADE', + }, + meta: { + type: Types.JSONB, + defaultValue: {}, + allowNull: false, + }, + recorded_at: { + type: Types.DATE, + defaultValue: Types.fn('now'), + allowNull: false, + }, + deleted_at: { + type: Types.DATE, + defaultValue: null, + allowNull: true, + }, }, - }, { transaction: t }) - await migration.sequelize.query('CREATE INDEX metrics_id_idx ON metrics(id)', { transaction: t }) - await migration.sequelize.query('SELECT create_hypertable(\'metrics\', \'recorded_at\')', { transaction: t }) + { transaction: t }, + ) + await migration.sequelize.query( + 'CREATE INDEX metrics_id_idx ON metrics(id)', + { transaction: t }, + ) + await migration.sequelize.query( + "SELECT create_hypertable('metrics', 'recorded_at')", + { transaction: t }, + ) }) - }, down: (migration, Types) => { return migration.sequelize.transaction(async t => { - await migration.removeIndex('metrics', 'metrics_id_idx', { transaction: t }) + await migration.removeIndex('metrics', 'metrics_id_idx', { + transaction: t, + }) await migration.dropTable('metrics', { transaction: t }) }) }, diff --git a/database/migrations/20191118004631-create-oauth-access-tokens-table.js b/database/migrations/20191118004631-create-oauth-access-tokens-table.js index 619aa0e..94bb963 100644 --- a/database/migrations/20191118004631-create-oauth-access-tokens-table.js +++ b/database/migrations/20191118004631-create-oauth-access-tokens-table.js @@ -66,4 +66,4 @@ module.exports = { down: (migration, Types) => { return migration.dropTable('oauth_access_tokens') }, -} \ No newline at end of file +} diff --git a/database/migrations/20191118005011-create-oauth-refresh-tokens-table.js b/database/migrations/20191118005011-create-oauth-refresh-tokens-table.js index 16265ef..364331a 100644 --- a/database/migrations/20191118005011-create-oauth-refresh-tokens-table.js +++ b/database/migrations/20191118005011-create-oauth-refresh-tokens-table.js @@ -66,4 +66,4 @@ module.exports = { down: (migration, Types) => { return migration.dropTable('oauth_refresh_tokens') }, -} \ No newline at end of file +} diff --git a/database/migrations/20191118011656-create-files-search-index-table.js b/database/migrations/20191118011656-create-files-search-index-table.js index 6f0feb3..020eb63 100644 --- a/database/migrations/20191118011656-create-files-search-index-table.js +++ b/database/migrations/20191118011656-create-files-search-index-table.js @@ -1,15 +1,22 @@ module.exports = { up: (migration, Types) => { return migration.sequelize.transaction(async t => { - await migration.sequelize.query(` + await migration.sequelize.query( + ` CREATE TABLE IF NOT EXISTS files_search_index ( file_id UUID UNIQUE PRIMARY KEY NOT NULL REFERENCES files(id) ON DELETE CASCADE ON UPDATE CASCADE, search_index TSVECTOR NOT NULL -);`, { transaction: t }) +);`, + { transaction: t }, + ) - await migration.sequelize.query(`CREATE INDEX IF NOT EXISTS files_search_index_idx ON files_search_index USING GIN(search_index);`, { transaction: t }) + await migration.sequelize.query( + `CREATE INDEX IF NOT EXISTS files_search_index_idx ON files_search_index USING GIN(search_index);`, + { transaction: t }, + ) - await migration.sequelize.query(` + await migration.sequelize.query( + ` CREATE OR REPLACE FUNCTION update_files_search_index() RETURNS TRIGGER AS $FUNC$ DECLARE indexed_text TSVECTOR := to_tsvector(concat(NEW.comment, array_to_string(NEW.tags, ' ', ''), array_to_string(NEW.labels, ' ', ''))); @@ -20,20 +27,37 @@ CREATE OR REPLACE FUNCTION update_files_search_index() RETURNS TRIGGER AS $FUNC$ RETURN NEW; END; -$FUNC$ LANGUAGE plpgsql;`, { transaction: t }) +$FUNC$ LANGUAGE plpgsql;`, + { transaction: t }, + ) - await migration.sequelize.query(` + await migration.sequelize.query( + ` CREATE TRIGGER update_files_search_index_trigger AFTER INSERT OR UPDATE ON files - FOR EACH ROW EXECUTE FUNCTION update_files_search_index();`, { transaction: t }) + FOR EACH ROW EXECUTE FUNCTION update_files_search_index();`, + { transaction: t }, + ) }) }, down: (migration, Types) => { return migration.sequelize.transaction(async t => { - await migration.sequelize.query(`DROP TRIGGER IF EXISTS update_files_search_index_trigger ON files`, { transaction: t }) - await migration.sequelize.query(`DROP FUNCTION IF EXISTS update_files_search_index()`, { transaction: t }) - await migration.sequelize.query(`DROP INDEX IF EXISTS files_search_index_idx`, { transaction: t }) - await migration.sequelize.query(`DROP TABLE IF EXISTS files_search_index`, { transaction: t }) + await migration.sequelize.query( + `DROP TRIGGER IF EXISTS update_files_search_index_trigger ON files`, + { transaction: t }, + ) + await migration.sequelize.query( + `DROP FUNCTION IF EXISTS update_files_search_index()`, + { transaction: t }, + ) + await migration.sequelize.query( + `DROP INDEX IF EXISTS files_search_index_idx`, + { transaction: t }, + ) + await migration.sequelize.query( + `DROP TABLE IF EXISTS files_search_index`, + { transaction: t }, + ) }) - } -} \ No newline at end of file + }, +} diff --git a/database/migrations/20200112223826-create-files-likes-table.js b/database/migrations/20200112223826-create-files-likes-table.js index f812203..196a7ae 100644 --- a/database/migrations/20200112223826-create-files-likes-table.js +++ b/database/migrations/20200112223826-create-files-likes-table.js @@ -36,4 +36,4 @@ module.exports = { down: (migration, Types) => { return migration.dropTable('user_file_likes') }, -} \ No newline at end of file +} diff --git a/database/migrations/20200210015221-add-files-featured-timestamp.js b/database/migrations/20200210015221-add-files-featured-timestamp.js index 15d4fe0..bc81b39 100644 --- a/database/migrations/20200210015221-add-files-featured-timestamp.js +++ b/database/migrations/20200210015221-add-files-featured-timestamp.js @@ -9,4 +9,4 @@ module.exports = { down: (migration, Types) => { return migration.removeColumn('files', 'featured_at') }, -} \ No newline at end of file +} diff --git a/database/migrations/20200323000437-create-bundle-codes-table.js b/database/migrations/20200323000437-create-bundle-codes-table.js index 056e525..d5037b3 100644 --- a/database/migrations/20200323000437-create-bundle-codes-table.js +++ b/database/migrations/20200323000437-create-bundle-codes-table.js @@ -46,4 +46,4 @@ module.exports = { down: (migration, Types) => { return migration.dropTable('') }, -} \ No newline at end of file +} diff --git a/database/migrations/20200323000553-create-user-bundle-codes-table.js b/database/migrations/20200323000553-create-user-bundle-codes-table.js index 84b0443..0609c16 100644 --- a/database/migrations/20200323000553-create-user-bundle-codes-table.js +++ b/database/migrations/20200323000553-create-user-bundle-codes-table.js @@ -27,4 +27,4 @@ module.exports = { down: (migration, Types) => { return migration.dropTable('user_bundle_codes') }, -} \ No newline at end of file +} diff --git a/database/migrations/20210000000000-create-analytics-table.js b/database/migrations/20210000000000-create-analytics-table.js new file mode 100644 index 0000000..cf5b5a1 --- /dev/null +++ b/database/migrations/20210000000000-create-analytics-table.js @@ -0,0 +1,91 @@ +module.exports = { + up: (migration, Types) => { + return migration.sequelize.transaction(async t => { + await migration.createTable( + 'analytics', + { + id: { + type: Types.UUID, + primaryKey: true, + defaultValue: Types.UUIDV4, + allowNull: false, + }, + session_id: { + type: Types.UUID, + allowNull: true, + }, + type: { + type: Types.TEXT, + allowNull: false, + }, + start_time: { + type: Types.DATE, + defaultValue: Types.fn('now'), + allowNull: false, + }, + end_time: { + type: Types.DATE, + allowNull: true, + }, + parent_id: { + type: Types.UUID, + allowNull: true, + references: { + model: 'analytics', + key: 'id', + }, + onDelete: 'CASCADE', + onUpdate: 'CASCADE', + }, + properties: { + type: Types.JSONB, + defaultValue: '{}', + allowNull: false, + }, + location: { + type: Types.GEOGRAPHY('POINT', 4326), + allowNull: true, + }, + device: { + type: Types.JSONB, + allowNull: true, + }, + meta: { + type: Types.JSONB, + defaultValue: {}, + allowNull: false, + }, + }, + { transaction: t }, + ) + // await migration.addColumn( + // 'analytics', + // 'parent_id', + // { + // type: Types.UUID, + // allowNull: true, + // references: { + // model: 'analytics', + // key: 'id', + // }, + // onDelete: 'CASCADE', + // onUpdate: 'CASCADE', + // }, + // { transaction: t }, + // ) + await migration.sequelize.query( + 'CREATE INDEX analytics_session_id_idx ON analytics(session_id)', + { transaction: t }, + ) + }) + }, + + down: (migration, Types) => { + return migration.sequelize.transaction(async t => { + await migration.removeIndex('analytics', 'analytics_session_id_idx', { + transaction: t, + }) + await migration.dropTable('analytics', { transaction: t }) + }) + }, +} diff --git a/package-lock.json b/package-lock.json index 2adb6e0..87b04fe 100644 --- a/package-lock.json +++ b/package-lock.json @@ -7393,6 +7393,14 @@ "xtend": "^4.0.0" } }, + "postmark": { + "version": "2.7.1", + "resolved": "https://registry.npmjs.org/postmark/-/postmark-2.7.1.tgz", + "integrity": "sha512-mevTZY8mZ2+DqBQweoUVsjlcsY2wu/WotDKplsFMiOy4mG7euOzmD4pqkWFymMVjeKbU52NZWEkO2Et1X0tdFw==", + "requires": { + "axios": "^0.21.1" + } + }, "prelude-ls": { "version": "1.1.2", "resolved": "https://npm.lcr.gr/prelude-ls/-/prelude-ls-1.1.2.tgz", @@ -7405,6 +7413,12 @@ "integrity": "sha1-6SQ0v6XqjBn0HN/UAddBo8gZ2Jc=", "dev": true }, + "prettier": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.2.1.tgz", + "integrity": "sha512-PqyhM2yCjg/oKkFPtTGUojv7gnZAoG80ttl45O6x2Ug/rMJw4wcc9k6aaf2hibP7BGVCCM33gZoGjyvt9mm16Q==", + "dev": true + }, "pretty-format": { "version": "26.6.2", "resolved": "https://npm.lcr.gr/pretty-format/-/pretty-format-26.6.2.tgz", diff --git a/package.json b/package.json index 13d1e65..6859207 100644 --- a/package.json +++ b/package.json @@ -12,7 +12,8 @@ "start": "NODE_PATH=src node server", "cmd": "NODE_PATH=src node run", "sql": "NODE_PATH=src node scripts/npx-boot.js sequelize", - "repl": "NODE_PATH=src node -e 'Object.entries(require(\"bootstrap\")).forEach(([key, value]) => Object.defineProperty(global, key, { value })); boot().then(() => console.log(\"Booted\"))' -i" + "repl": "NODE_PATH=src node -e 'Object.entries(require(\"bootstrap\")).forEach(([key, value]) => Object.defineProperty(global, key, { value })); boot().then(() => console.log(\"Booted\"))' -i", + "prettier": "prettier server.js worker.js run.js src database --write" }, "author": "Louis Capitanchik <louis@microhacks.co.uk>", "license": "GPL-3.0+", @@ -56,6 +57,7 @@ "pg": "^8.3.0", "pg-hstore": "^2.3.3", "pluralize": "^8.0.0", + "postmark": "^2.7.1", "redbird": "^0.10.0", "remarkable": "^2.0.1", "scrypt-kdf": "^2.0.1", @@ -67,6 +69,7 @@ "devDependencies": { "jest": "^26.6.3", "nodemon": "^2.0.4", + "prettier": "^2.2.1", "supertest": "^6.1.3" } } diff --git a/run.js b/run.js index c2033ea..13f1ee6 100644 --- a/run.js +++ b/run.js @@ -11,8 +11,7 @@ async function runWorker() { .commandDir(bootstrap.fs.path('src', 'console')) .demandCommand() .recommendCommands() - .help() - .argv + .help().argv } async function runMaster() { @@ -44,9 +43,7 @@ async function run() { } } - -run() - .catch(e => { - console.error(e) - process.exit(1) - }) \ No newline at end of file +run().catch(e => { + console.error(e) + process.exit(1) +}) diff --git a/server.js b/server.js index 339c090..62e9563 100644 --- a/server.js +++ b/server.js @@ -9,7 +9,7 @@ const debug = require('debug')('server:boot') const pkg = require('./package.json') const Sentry = require('@sentry/node') -const Tracing = require("@sentry/tracing"); +const Tracing = require('@sentry/tracing') let server = null let worker = null @@ -20,18 +20,21 @@ function bindSentry(app) { debug('Binding sentry to app level errors') - app.on("error", (err, ctx) => { + app.on('error', (err, ctx) => { console.error(err) - Sentry.withScope(function(scope) { - scope.addEventProcessor(function(event) { - return Sentry.Handlers.parseRequest(event, ctx.request); - }); - Sentry.captureException(err); - }); - }); + Sentry.withScope(function (scope) { + scope.addEventProcessor(function (event) { + return Sentry.Handlers.parseRequest(event, ctx.request) + }) + Sentry.captureException(err) + }) + }) } -async function launch(port = 0, host = config('app.host.web', `http://localhost:${ port }`)) { +async function launch( + port = 0, + host = config('app.host.web', `http://localhost:${port}`), +) { const koa = new Koa() const appserver = await app(koa) @@ -41,7 +44,7 @@ async function launch(port = 0, host = config('app.host.web', `http://localhost: const httpServer = http.createServer(appserver.callback()) httpServer.listen(port) - debug(`Listening on ${ host }`) + debug(`Listening on ${host}`) server = httpServer @@ -62,9 +65,9 @@ async function runProxy() { throw new Error('Failed to start') } - debug(`Binding hosts [${ hosts.join(', ') }] to server port ${ address.port}`) + debug(`Binding hosts [${hosts.join(', ')}] to server port ${address.port}`) for (const host of hosts) { - proxy.register(host, `http://127.0.0.1:${ address.port }`) + proxy.register(host, `http://127.0.0.1:${address.port}`) } } @@ -89,18 +92,12 @@ async function main() { } } -main() - .catch(e => { - console.error(e) - process.exit(1) - }) - +main().catch(e => { + console.error(e) + process.exit(1) +}) -const cleanupsigs = [ - 'SIGINT', - 'SIGTERM', - 'SIGUSR2', -] +const cleanupsigs = ['SIGINT', 'SIGTERM', 'SIGUSR2'] cleanupsigs.forEach(signal => { process.on(signal, () => { diff --git a/src/app.js b/src/app.js index 293f583..b717c76 100644 --- a/src/app.js +++ b/src/app.js @@ -20,20 +20,27 @@ module.exports = async function createApp(app = new Koa()) { app.keys = [config('app.key')] - app.use(hbs(fs.path('views'), { - debug: debughbs, - })) + app.use( + hbs(fs.path('views'), { + debug: debughbs, + }), + ) app.use(etag({ weak: true })) app.use(bodyparser()) app.use(logger(s => requestLog(s))) - app.use(session({ - key: config('app.session_key'), - renew: true, - secure: config('app.secure_sessions'), - signed: true, - httpOnly: true, - }, app)) + app.use( + session( + { + key: config('app.session_key'), + renew: true, + secure: config('app.secure_sessions'), + signed: true, + httpOnly: true, + }, + app, + ), + ) if (!config('app.secure_sessions')) { debug('Sessions are not restricted to HTTPS') @@ -44,7 +51,11 @@ module.exports = async function createApp(app = new Koa()) { app.use(serviceProvider.attach) Object.values(routers).forEach(router => { - debug('[Prefix "%s"] Mounting %d layers', router.opts?.prefix ?? '/', router.stack?.length ?? 0) + debug( + '[Prefix "%s"] Mounting %d layers', + router.opts?.prefix ?? '/', + router.stack?.length ?? 0, + ) app.use(router.routes()) app.use(router.allowedMethods()) }) diff --git a/src/bootstrap.js b/src/bootstrap.js index 09bf9da..3d82b02 100644 --- a/src/bootstrap.js +++ b/src/bootstrap.js @@ -13,7 +13,7 @@ require('core/events/register') exports.unset = Symbol('unset') -exports.boot = async function() { +exports.boot = async function () { const dotenv = require('dotenv') const expand = require('dotenv-expand') const { loadKeys } = require('core/utils/jwt') @@ -23,7 +23,9 @@ exports.boot = async function() { expand(conf) } - const envEnv = dotenv.config({ path: `.env.${ process.env.NODE_ENV ?? 'development' }`}) + const envEnv = dotenv.config({ + path: `.env.${process.env.NODE_ENV ?? 'development'}`, + }) expand(envEnv) const conf = dotenv.config() @@ -32,8 +34,7 @@ exports.boot = async function() { if (exports.env('GCS_CREDENTIALS_B64', '').trim() !== '') { await exports.fs.writeAsync( 'google-storage.json', - Buffer.from(exports.env('GCS_CREDENTIALS_B64'), 'base64') - .toString() + Buffer.from(exports.env('GCS_CREDENTIALS_B64'), 'base64').toString(), ) } @@ -105,14 +106,13 @@ exports.config = function getConfigValue(path, fallback = null) { const [file, ...innerPath] = path.split('.') let conf = null try { - conf = require(`./config/${ file }`) + conf = require(`./config/${file}`) } catch (e) { console.error(e) // require('services').log.error(e) return fallback } - if (conf.hasOwnProperty('driver')) { if (innerPath.length === 1 && innerPath[0] === 'driver') { return conf.driver @@ -131,8 +131,8 @@ exports.patchConfig = function setConfigValue(path, value) { const [file, ...innerPath] = path.split('.') let conf = null try { - conf = require(`./config/${ file }`) - } catch(e) { + conf = require(`./config/${file}`) + } catch (e) { console.error(e) return null } @@ -155,21 +155,21 @@ exports.fs = jetpack.cwd(pathUtil.join(__dirname, '..')) exports.url = { web(name, params) { const routes = require('./http/routes') - return (new URL( - routes.web.url(name, params), - exports.config('app.host.web')) + return new URL( + routes.web.url(name, params), + exports.config('app.host.web'), ).toString() }, api(name, params) { const routes = require('./http/routes') - return (new URL( - pathUtil.join('api', routes.api.url(name, params)), - exports.config('app.host.api')) + return new URL( + pathUtil.join('api', routes.api.url(name, params)), + exports.config('app.host.api'), ).toString() }, } -exports.route = function(type, name, params) { +exports.route = function (type, name, params) { const routers = require('http/routes') if (!routers.hasOwnProperty(type)) { return null @@ -180,7 +180,9 @@ exports.route = function(type, name, params) { } exports.routes = { - api(name, params) { return exports.route('api', name, params) }, + api(name, params) { + return exports.route('api', name, params) + }, } exports.invoke = async function invokeCommand(command, args = [], sync = true) { @@ -193,6 +195,6 @@ exports.invoke = async function invokeCommand(command, args = [], sync = true) { ...process.env, NODE_PATH: __dirname, }, - stdio: 'inherit' + stdio: 'inherit', }) -} \ No newline at end of file +} diff --git a/src/config/app.js b/src/config/app.js index 333b95c..f10841c 100644 --- a/src/config/app.js +++ b/src/config/app.js @@ -17,7 +17,10 @@ module.exports = { public_key_b64: env('RSA_PUBLIC_KEY_B64', null), private_key: null, private_key_b64: env('RSA_PRIVATE_KEY_B64', null), - } + }, } -module.exports.secure_session = env('SECURE_SESSIONS') != null ? env('SECURE_SESSIONS') !== 'FALSE' : !module.exports.dev +module.exports.secure_session = + env('SECURE_SESSIONS') != null + ? env('SECURE_SESSIONS') !== 'FALSE' + : !module.exports.dev diff --git a/src/config/client.js b/src/config/client.js index 88205e6..4bcd6e0 100644 --- a/src/config/client.js +++ b/src/config/client.js @@ -8,5 +8,5 @@ module.exports = { red: '#ff8080', yellow: '#ffdd67', orange: '#ffba92', - } + }, } diff --git a/src/config/database.js b/src/config/database.js index a200bb9..3e95b1f 100644 --- a/src/config/database.js +++ b/src/config/database.js @@ -26,12 +26,15 @@ if (url) { dialectOptions: { ssl: { rejectUnauthorized: false, - ca: Buffer.from(env('DATABASE_CA_CERT', null) ?? '', 'base64').toString(), - } + ca: Buffer.from( + env('DATABASE_CA_CERT', null) ?? '', + 'base64', + ).toString(), + }, }, log_queries: env('LOG_SQL_QUERIES', 'true') === 'true', } } // module.exports.logging = require('debug')('server:database') -module.exports.logging = false \ No newline at end of file +module.exports.logging = false diff --git a/src/config/mail.js b/src/config/mail.js index d9735e8..5dc0d69 100644 --- a/src/config/mail.js +++ b/src/config/mail.js @@ -1,7 +1,9 @@ const { env, config } = require('bootstrap') const fromTo = { - from: env('MAIL_FROM', () => { throw new Error('Missing mail from address')}), + from: env('MAIL_FROM', () => { + throw new Error('Missing mail from address') + }), name: env('MAIL_FROM_NAME', 'Autobot'), replyto: env('MAIL_REPLY_TO', () => config('mail.from', null)), } @@ -9,7 +11,28 @@ const fromTo = { module.exports = { driver: env('MAIL_DRIVER', 'log'), sendgrid: { - key: env('SENDGRID_KEY', () => { throw new Error('Missing Sendgrid Key') }), + key: env('SENDGRID_KEY', () => { + throw new Error('Missing Sendgrid Key') + }), + ...fromTo, + opts: { + from: { + email: env('MAIL_FROM'), + name: env('MAIL_FROM_NAME'), + }, + replyTo: { + email: env('MAIL_REPLY_TO'), + name: env('MAIL_FROM_NAME'), + }, + }, + templates: { + 'reset-password': 'd-dd89d66ad75f40f5b3b0ed6849753cf7', + }, + }, + postmark: { + key: env('POSTMARK_KEY', () => { + throw new Error('Missing Postmark Key') + }), ...fromTo, opts: { from: { @@ -34,5 +57,5 @@ module.exports = { port: env('SMTP_PORT'), user: env('SMTP_USERNAME'), password: env('SMTP_PASSWORD'), - } + }, } diff --git a/src/config/pusher.js b/src/config/pusher.js index 99a172b..fc61e50 100644 --- a/src/config/pusher.js +++ b/src/config/pusher.js @@ -4,5 +4,5 @@ module.exports = { id: env('PUSHER_APP_ID'), key: env('PUSHER_APP_KEY'), secret: env('PUSHER_APP_SECRET'), - region: env('PUSHER_APP_REGION') + region: env('PUSHER_APP_REGION'), } diff --git a/src/config/queue.js b/src/config/queue.js index c7437c8..2931422 100644 --- a/src/config/queue.js +++ b/src/config/queue.js @@ -8,5 +8,6 @@ module.exports = { user: env('AMQP_USER', 'guest'), pass: env('AMQP_PASSWORD', 'guest'), secure: env('AMQP_SECURE', 'false') === 'true', - } -} \ No newline at end of file + retries: parseInt(env('QUEUE_RETRIES', '5'), 10), + }, +} diff --git a/src/config/sequelize.js b/src/config/sequelize.js index 585e1b1..99516bf 100644 --- a/src/config/sequelize.js +++ b/src/config/sequelize.js @@ -7,13 +7,13 @@ module.exports = { password: 'hackerfest', database: 'hackerfest', host: '127.0.0.1', - ...(config('database', {})), + ...config('database', {}), dialect: 'postgres', pool: { max: 5, min: 1, acquire: 30000, - idle: 10000 + idle: 10000, }, define: { timestamps: true, @@ -26,13 +26,13 @@ module.exports = { password: 'hackerfest', database: 'hackerfest', host: '127.0.0.1', - ...(config('database', {})), + ...config('database', {}), dialect: 'postgres', pool: { max: 10, min: 1, acquire: 30000, - idle: 10000 + idle: 10000, }, define: { timestamps: true, @@ -45,13 +45,13 @@ module.exports = { password: 'hackerfest', database: 'hackerfest', host: '127.0.0.1', - ...(config('database', {})), + ...config('database', {}), dialect: 'postgres', pool: { max: 10, min: 1, acquire: 30000, - idle: 10000 + idle: 10000, }, define: { timestamps: true, diff --git a/src/config/totp.js b/src/config/totp.js index e0d77cd..5c518ff 100644 --- a/src/config/totp.js +++ b/src/config/totp.js @@ -2,7 +2,5 @@ const { env } = require('bootstrap') module.exports = { driver: env('TOTP_DRIVER', 'vault'), - vault: { - - } -} \ No newline at end of file + vault: {}, +} diff --git a/src/console/CleanTestDatabases.js b/src/console/CleanTestDatabases.js index 3be7e6e..4477a8a 100644 --- a/src/console/CleanTestDatabases.js +++ b/src/console/CleanTestDatabases.js @@ -4,9 +4,11 @@ module.exports = { async handler(args) { const { config } = require('bootstrap') const { sequelize } = require('database/models') - const [databases] = await sequelize.query(`SELECT datname as name FROM pg_database WHERE datistemplate = false;`) + const [databases] = await sequelize.query( + `SELECT datname as name FROM pg_database WHERE datistemplate = false;`, + ) - const prefix = `${ config('database.database') }_` + const prefix = `${config('database.database')}_` const toDelete = [] for (const entry of databases) { @@ -16,16 +18,16 @@ module.exports = { } if (toDelete.length === 0) { - console.log("No databases with the prefix %s", prefix) + console.log('No databases with the prefix %s', prefix) } else { - console.log("Deleting Databases:") + console.log('Deleting Databases:') for (const name of toDelete) { - console.log(` ${ name }`) - await sequelize.query(`DROP DATABASE ${ name };`) + console.log(` ${name}`) + await sequelize.query(`DROP DATABASE ${name};`) } } process.exit(0) - } -} \ No newline at end of file + }, +} diff --git a/src/console/CreateNewDatabase.js b/src/console/CreateNewDatabase.js index d9ee348..7f6a913 100644 --- a/src/console/CreateNewDatabase.js +++ b/src/console/CreateNewDatabase.js @@ -2,47 +2,55 @@ module.exports = { command: 'db:fresh [id]', description: 'Create a fresh database instance with a random name', builder(yargs) { - yargs.positional('id', { - describe: 'A preset ID to use for the database name', - type: 'string', - }).option('and-migrate', { - demandOption: false, - alias: 'migrate', - describe: 'Run migrations against the newly created database', - }) + yargs + .positional('id', { + describe: 'A preset ID to use for the database name', + type: 'string', + }) + .option('and-migrate', { + demandOption: false, + alias: 'migrate', + describe: 'Run migrations against the newly created database', + }) }, async handler(args) { const { v4: uuid } = require('uuid') const { config } = require('bootstrap') const dbid = args.id ?? uuid().replace(/-/g, '') - const newDatabaseName = `${ config('database.database') }_${ dbid }` + const newDatabaseName = `${config('database.database')}_${dbid}` const { sequelize } = require('database/models') - const [databases] = await sequelize.query(`SELECT datname as name FROM pg_database WHERE datistemplate = false;`) + const [databases] = await sequelize.query( + `SELECT datname as name FROM pg_database WHERE datistemplate = false;`, + ) for (const entry of databases) { if (entry.name === newDatabaseName) { - throw new Error(`Database with name ${ newDatabaseName } already exists`) + throw new Error(`Database with name ${newDatabaseName} already exists`) } } console.log('Creating new database with name', newDatabaseName) - await sequelize.query(`CREATE DATABASE ${ newDatabaseName };`) + await sequelize.query(`CREATE DATABASE ${newDatabaseName};`) if (args.migrate) { console.log('Running migrations on database', newDatabaseName) const { exec } = require('core/utils/process') - await exec('npm run sql db:migrate', { - env: { - ...process.env, - DATABASE_NAME: newDatabaseName, + await exec( + 'npm run sql db:migrate', + { + env: { + ...process.env, + DATABASE_NAME: newDatabaseName, + }, + stdio: 'inherit', }, - stdio: 'inherit', - }, true) + true, + ) } process.exit(0) - } -} \ No newline at end of file + }, +} diff --git a/src/console/CreateSystemUser.js b/src/console/CreateSystemUser.js index c1b4982..ac5f406 100644 --- a/src/console/CreateSystemUser.js +++ b/src/console/CreateSystemUser.js @@ -13,5 +13,5 @@ module.exports = { } process.exit(0) - } -} \ No newline at end of file + }, +} diff --git a/src/core/errors/HttpError.js b/src/core/errors/HttpError.js index b30a566..bf54766 100644 --- a/src/core/errors/HttpError.js +++ b/src/core/errors/HttpError.js @@ -32,4 +32,4 @@ module.exports = class HttpError extends Error { ctx.body = { errors: { general: [this._message] } } } } -} \ No newline at end of file +} diff --git a/src/core/errors/InputValidationError.js b/src/core/errors/InputValidationError.js index a6234da..1a97aef 100644 --- a/src/core/errors/InputValidationError.js +++ b/src/core/errors/InputValidationError.js @@ -4,4 +4,4 @@ module.exports = class InputValidationError extends HttpError { constructor(fields) { super(422, 'The supplied input was not valid', { fields }) } -} \ No newline at end of file +} diff --git a/src/core/errors/UnauthorizedError.js b/src/core/errors/UnauthorizedError.js index 34a684e..81323dd 100644 --- a/src/core/errors/UnauthorizedError.js +++ b/src/core/errors/UnauthorizedError.js @@ -1,7 +1,7 @@ const HttpError = require('./HttpError') -module.exports = class UnauthorizedError extends HttpError { +module.exports = class UnauthorizedError extends HttpError { constructor() { super(401, 'You must be logged in to access this resource') } -} \ No newline at end of file +} diff --git a/src/core/events/bus.js b/src/core/events/bus.js index cf3958e..d41d55d 100644 --- a/src/core/events/bus.js +++ b/src/core/events/bus.js @@ -1,7 +1,7 @@ const EventEmitter = require('events') const bus = new EventEmitter() -bus.on('error', function(error) { +bus.on('error', function (error) { console.log(error) }) bus.setMaxListeners(250) diff --git a/src/core/injection/ContextualModule.js b/src/core/injection/ContextualModule.js index 8af8b53..0fb0279 100644 --- a/src/core/injection/ContextualModule.js +++ b/src/core/injection/ContextualModule.js @@ -1,8 +1,14 @@ module.exports = class ContextualModule { - static withContext(ctx) { return new this(ctx) } - static withoutContext() { return new this({}) } + static withContext(ctx) { + return new this(ctx) + } + static withoutContext() { + return new this({}) + } - static getServiceName() { throw new Error(`getServiceName Not Implemented for ${ this.name }`) } + static getServiceName() { + throw new Error(`getServiceName Not Implemented for ${this.name}`) + } constructor(context = null) { if (context == null) { diff --git a/src/core/injection/ServiceProvider.js b/src/core/injection/ServiceProvider.js index 5bbee41..f0d72e7 100644 --- a/src/core/injection/ServiceProvider.js +++ b/src/core/injection/ServiceProvider.js @@ -32,8 +32,8 @@ function createFakeContext() { res.statusCode = 200 const context = Object.create(baseCtx) - const request = context.request = Object.create(baseReq) - const response = context.response = Object.create(baseRes) + const request = (context.request = Object.create(baseReq)) + const response = (context.response = Object.create(baseRes)) context.app = request.app = response.app = this context.req = request.req = response.req = req @@ -63,7 +63,9 @@ module.exports = class ServiceProvider { services.forEach(service => { const name = service.getServiceName() if (ctx.services.hasOwnProperty(name)) { - console.warn(`Multiple services found for name: ${name}. Using implementation provided by ${service.name}`) + console.warn( + `Multiple services found for name: ${name}. Using implementation provided by ${service.name}`, + ) } ctx.services[name] = createServiceWithProfiling(service, ctx) }) @@ -88,4 +90,4 @@ module.exports = class ServiceProvider { return ServiceProvider.attach(newContext, () => newContext) } -} \ No newline at end of file +} diff --git a/src/core/injection/ThreadContext.js b/src/core/injection/ThreadContext.js index be53ee5..86b3402 100644 --- a/src/core/injection/ThreadContext.js +++ b/src/core/injection/ThreadContext.js @@ -53,7 +53,7 @@ class ThreadContext extends AsyncLocalStorage { if (existing) { return existing } - const Sentry = require("@sentry/node") + const Sentry = require('@sentry/node') const t = Sentry.startTransaction(ctx) this.set('profiling', t) return t diff --git a/src/core/services/dataloaders.js b/src/core/services/dataloaders.js index db2f697..373e012 100644 --- a/src/core/services/dataloaders.js +++ b/src/core/services/dataloaders.js @@ -1,40 +1,58 @@ const Dataloader = require('dataloader') -const createLoaderForModel = exports.createForModel = function (model, singleProperties = ['id'], multipleProperties = []) { - const loaders = singleProperties.reduce((set, property) => ({ - ...set, - [property]: new Dataloader(keys => Promise.all(keys.map(async key => { - const data = await model.findOne({where: {[property]: key}}) - if (data) { - singleProperties.forEach(prop => { - if (prop !== property && data[prop] != null) { - loaders[prop].prime(data[prop], data) - } - }) - } - return data - }))), - }), {}) +const createLoaderForModel = (exports.createForModel = function ( + model, + singleProperties = ['id'], + multipleProperties = [], +) { + const loaders = singleProperties.reduce( + (set, property) => ({ + ...set, + [property]: new Dataloader(keys => + Promise.all( + keys.map(async key => { + const data = await model.findOne({ where: { [property]: key } }) + if (data) { + singleProperties.forEach(prop => { + if (prop !== property && data[prop] != null) { + loaders[prop].prime(data[prop], data) + } + }) + } + return data + }), + ), + ), + }), + {}, + ) - multipleProperties.reduce((set, property) => ({ - ...set, - [property]: new Dataloader(keys => Promise.all(keys.map(async key => { - const data = await model.find({where: {[property]: key}}) - if (data && data.length) { - singleProperties.forEach(prop => { - data.forEach(datum => { - if (prop !== property && datum[prop] != null) { - loaders[prop].prime(datum[prop], datum) + multipleProperties.reduce( + (set, property) => ({ + ...set, + [property]: new Dataloader(keys => + Promise.all( + keys.map(async key => { + const data = await model.find({ where: { [property]: key } }) + if (data && data.length) { + singleProperties.forEach(prop => { + data.forEach(datum => { + if (prop !== property && datum[prop] != null) { + loaders[prop].prime(datum[prop], datum) + } + }) + }) } - }) - }) - } - return data - }))) - }), loaders) + return data + }), + ), + ), + }), + loaders, + ) return loaders -} +}) exports.generateDataloaders = function () { const { User } = require('database/models') // , Ticket, TicketType, Event, Episode, Venue, Address } = require('database/models') @@ -49,11 +67,15 @@ exports.generateDataloaders = function () { } } -exports.DataloaderService = class extends require('core/injection/ContextualModule') { - static getServiceName() { return 'dataloaderService' } +exports.DataloaderService = class extends ( + require('core/injection/ContextualModule') +) { + static getServiceName() { + return 'dataloaderService' + } constructor(...args) { - super(...args); + super(...args) const dataloader = exports.generateDataloaders() Object.entries(dataloader).forEach(([name, loadermap]) => { @@ -65,5 +87,4 @@ exports.DataloaderService = class extends require('core/injection/ContextualModu }) }) } - } diff --git a/src/core/services/pusher.js b/src/core/services/pusher.js index 5bf6794..c639ec4 100644 --- a/src/core/services/pusher.js +++ b/src/core/services/pusher.js @@ -1,4 +1,4 @@ -const Pusher = require('pusher'); +const Pusher = require('pusher') const { config } = require('bootstrap') const pusher = new Pusher({ @@ -6,7 +6,7 @@ const pusher = new Pusher({ key: config('pusher.key'), secret: config('pusher.secret'), cluster: config('pusher.region'), - encrypted: true -}); + encrypted: true, +}) module.exports = pusher diff --git a/src/core/utils/crypto.js b/src/core/utils/crypto.js index a891390..942c263 100644 --- a/src/core/utils/crypto.js +++ b/src/core/utils/crypto.js @@ -36,7 +36,24 @@ exports.secureHexString = function generateSecureHexString(bytes) { return exports.secureBuffer(bytes).then(b => b.toString('hex')) } -const hex = ['1', '2', '3', '4', '5', '6', '7', '8', '9', '0', 'a', 'b', 'c', 'd', 'e', 'f'] +const hex = [ + '1', + '2', + '3', + '4', + '5', + '6', + '7', + '8', + '9', + '0', + 'a', + 'b', + 'c', + 'd', + 'e', + 'f', +] /** * Generate a hexadecimal string of the given length in a synchronous and insecure manner. @@ -68,9 +85,63 @@ exports.insecureHexString = function generateInsecureHexStringSync(length) { * @type {string[]} */ const friendlyAlphabet = [ - 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'j', 'k', 'm', 'n', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', - 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'J', 'K', 'L', 'M', 'N', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', - '1', '2', '3', '4', '5', '6', '7', '8', '9', '0', + 'a', + 'b', + 'c', + 'd', + 'e', + 'f', + 'g', + 'h', + 'j', + 'k', + 'm', + 'n', + 'p', + 'q', + 'r', + 's', + 't', + 'u', + 'v', + 'w', + 'x', + 'y', + 'z', + 'A', + 'B', + 'C', + 'D', + 'E', + 'F', + 'G', + 'H', + 'J', + 'K', + 'L', + 'M', + 'N', + 'P', + 'Q', + 'R', + 'S', + 'T', + 'U', + 'V', + 'W', + 'X', + 'Y', + 'Z', + '1', + '2', + '3', + '4', + '5', + '6', + '7', + '8', + '9', + '0', ] exports.friendlyRefString = function generateFriendlyRefString(length) { const buffer = [] @@ -140,8 +211,7 @@ exports.hash = function hashPayload(payload) { const source = stringifyPayload(payload) const inputBuffer = Buffer.from(source, 'utf-8') const params = getScryptParams() - return scrypt.kdf(inputBuffer, params) - .then(hash => hash.toString('base64')) + return scrypt.kdf(inputBuffer, params).then(hash => hash.toString('base64')) } /** @@ -256,13 +326,21 @@ exports.decryptWith = function decryptWithKey(key, encrypted) { return buffer } -exports.hmac = async function createHmacSignature(payload, secret, algorithm = 'sha256') { +exports.hmac = async function createHmacSignature( + payload, + secret, + algorithm = 'sha256', +) { const hmac = crypto.createHmac(algorithm, secret) hmac.update(payload) return hmac.digest('base64') } -exports.hashFile = function createFileHash(path, algorithm = 'sha384', output = 'base64') { +exports.hashFile = function createFileHash( + path, + algorithm = 'sha384', + output = 'base64', +) { const hash = crypto.createHash(algorithm) const stream = fs.createReadStream(path) @@ -281,4 +359,4 @@ exports.hashFile = function createFileHash(path, algorithm = 'sha384', output = } exports.toBase64 = string => Buffer.from(string).toString('base64') -exports.fromBase64 = string => Buffer.from(string, 'base64').toString() \ No newline at end of file +exports.fromBase64 = string => Buffer.from(string, 'base64').toString() diff --git a/src/core/utils/jwt.js b/src/core/utils/jwt.js index c3038df..8c4ba34 100644 --- a/src/core/utils/jwt.js +++ b/src/core/utils/jwt.js @@ -2,25 +2,29 @@ const { generateKeyPair, createPublicKey, createPrivateKey } = require('crypto') async function generateRsaKeys() { const { config } = require('bootstrap') return new Promise((resolve, reject) => { - generateKeyPair('rsa', { - modulusLength: 4096, - publicKeyEncoding: { - type: 'spki', - format: 'pem', + generateKeyPair( + 'rsa', + { + modulusLength: 4096, + publicKeyEncoding: { + type: 'spki', + format: 'pem', + }, + privateKeyEncoding: { + type: 'pkcs8', + format: 'pem', + cipher: 'aes-256-cbc', + passphrase: config('app.key'), + }, }, - privateKeyEncoding: { - type: 'pkcs8', - format: 'pem', - cipher: 'aes-256-cbc', - passphrase: config('app.key'), - } - }, (err, pub, priv) => { - if (err) { - reject(err) - } else { - resolve({ pub: pub.toString(), priv: priv.toString() }) - } - }) + (err, pub, priv) => { + if (err) { + reject(err) + } else { + resolve({ pub: pub.toString(), priv: priv.toString() }) + } + }, + ) }) } @@ -42,7 +46,7 @@ exports.loadKeys = async () => { if (pub != null && priv != null) { return { pub, - priv + priv, } } @@ -56,26 +60,34 @@ exports.loadKeys = async () => { } pub = createPublicKey({ key: pub }) - priv = createPrivateKey({ key: priv, passphrase: config('app.security.private_key_passphrase', env('RSA_PRIVATE_PASSPHRASE', config('app.key'))) }) + priv = createPrivateKey({ + key: priv, + passphrase: config( + 'app.security.private_key_passphrase', + env('RSA_PRIVATE_PASSPHRASE', config('app.key')), + ), + }) patchConfig('app.security.public_key', pub) patchConfig('app.security.private_key', priv) return { pub, priv } } -exports.sign = async (payload) => { +exports.sign = async payload => { const threadContext = require('core/injection/ThreadContext') const { default: SignJWT } = require('jose/jwt/sign') const { priv } = exports.getKeys() - return await threadContext.profile('jwt.sign', JSON.stringify(payload), () => new SignJWT(payload) - .setIssuer(exports.jwtOptions.issuer) - .setIssuedAt() - .setProtectedHeader({ alg: 'RS256' }) - .sign(priv)) + return await threadContext.profile('jwt.sign', JSON.stringify(payload), () => + new SignJWT(payload) + .setIssuer(exports.jwtOptions.issuer) + .setIssuedAt() + .setProtectedHeader({ alg: 'RS256' }) + .sign(priv), + ) } -exports.verify = async(token) => { +exports.verify = async token => { const threadContext = require('core/injection/ThreadContext') const { default: jwtVerify } = require('jose/jwt/verify') const { getKeys, jwtOptions } = exports @@ -89,4 +101,4 @@ exports.verify = async(token) => { exports.jwtOptions = { issuer: 'urn:jetsam:systems:auth', -} \ No newline at end of file +} diff --git a/src/core/utils/process.js b/src/core/utils/process.js index d92c4cd..6927d1b 100644 --- a/src/core/utils/process.js +++ b/src/core/utils/process.js @@ -1,4 +1,4 @@ -exports.exec = async function(cmd, opts, linkStdio) { +exports.exec = async function (cmd, opts, linkStdio) { const proc = require('child_process') let resolve, reject @@ -18,4 +18,4 @@ exports.exec = async function(cmd, opts, linkStdio) { child.once('exit', resolve) return await defer -} \ No newline at end of file +} diff --git a/src/core/utils/queue.js b/src/core/utils/queue.js index 31b1623..c96201a 100644 --- a/src/core/utils/queue.js +++ b/src/core/utils/queue.js @@ -1,8 +1,11 @@ const HANDLERS = [ - ['send-user-password-reset', require('domain/auth/handlers/SendUserPasswordReset')] + [ + 'send-user-password-reset', + require('domain/auth/handlers/SendUserPasswordReset'), + ], ] module.exports = function bindJobHandlers() { const { queue } = require('services') HANDLERS.forEach(([job, handler]) => queue.bind(job, handler)) -} \ No newline at end of file +} diff --git a/src/core/utils/urls.js b/src/core/utils/urls.js index 8fe489a..ff3b832 100644 --- a/src/core/utils/urls.js +++ b/src/core/utils/urls.js @@ -10,6 +10,11 @@ exports.createUrl = (host, path, query = unset) => { return url.toString() } -exports.appUrl = (path, query = unset) => exports.createUrl(config('app.host.web'), path, query) +exports.appUrl = (path, query = unset) => + exports.createUrl(config('app.host.web'), path, query) -exports.queryValueToArray = (value = '') => (value || '').split(',').map(s => s.trim()).filter(Boolean) +exports.queryValueToArray = (value = '') => + (value || '') + .split(',') + .map(s => s.trim()) + .filter(Boolean) diff --git a/src/core/utils/validation.js b/src/core/utils/validation.js index a4bd295..3ba8c21 100644 --- a/src/core/utils/validation.js +++ b/src/core/utils/validation.js @@ -1,66 +1,57 @@ const Joi = require('joi') exports['survey'] = Joi.object({ - questions: Joi.array().items(Joi.object({ - question: Joi.string().min(1).required(), - type: Joi.any().valid( - 'text', - 'number', - 'checkbox', - 'choice', - 'range', - ).id('survey-type'), - required: Joi.boolean().required(), - constraints: Joi.alternatives().conditional(Joi.ref('#survey-type'), { - switch: [ - { - is: 'text', - then: Joi.object({ - email: Joi.boolean(), - }), - }, - { - is: 'number', - then: Joi.object({ - min: Joi.number(), - max: Joi.number(), - }), - }, - { - is: 'checkbox', - then: Joi.object({ - checkRequired: Joi.boolean(), - }), - }, - { - is: 'choice', - then: Joi.object({ - options: Joi.array().items(Joi.string()) - }), - }, - { - is: 'range', - then: Joi.object({ - - }), - }, - { - is: Joi.invalid('text', 'number', 'checkbox', 'choice', 'range'), - then: Joi.forbidden(), - }, - ], + questions: Joi.array().items( + Joi.object({ + question: Joi.string().min(1).required(), + type: Joi.any() + .valid('text', 'number', 'checkbox', 'choice', 'range') + .id('survey-type'), + required: Joi.boolean().required(), + constraints: Joi.alternatives().conditional(Joi.ref('#survey-type'), { + switch: [ + { + is: 'text', + then: Joi.object({ + email: Joi.boolean(), + }), + }, + { + is: 'number', + then: Joi.object({ + min: Joi.number(), + max: Joi.number(), + }), + }, + { + is: 'checkbox', + then: Joi.object({ + checkRequired: Joi.boolean(), + }), + }, + { + is: 'choice', + then: Joi.object({ + options: Joi.array().items(Joi.string()), + }), + }, + { + is: 'range', + then: Joi.object({}), + }, + { + is: Joi.invalid('text', 'number', 'checkbox', 'choice', 'range'), + then: Joi.forbidden(), + }, + ], + }), }), - })), + ), }) exports['events.ticket_types.new'] = Joi.object({ - name: Joi.string() - .min(1) - .required(), + name: Joi.string().min(1).required(), description: Joi.string(), - quantity: Joi.number() - .integer() - .required() - .min(1), + quantity: Joi.number().integer().required().min(1), survey: exports['survey'], }) diff --git a/src/database/models/AccessToken.js b/src/database/models/AccessToken.js index 16d060d..dcf414f 100644 --- a/src/database/models/AccessToken.js +++ b/src/database/models/AccessToken.js @@ -3,8 +3,8 @@ const BaseModel = require('./BaseModel') class AccessToken extends BaseModel { static associate(models) { - this.belongsTo(models.User, {foreignKey: 'user_id'}) - this.belongsTo(models.OAuthClient, {foreignKey: 'client_id'}) + this.belongsTo(models.User, { foreignKey: 'user_id' }) + this.belongsTo(models.OAuthClient, { foreignKey: 'client_id' }) } get scopes() { @@ -27,13 +27,13 @@ class AccessToken extends BaseModel { accessToken: this.token, accessTokenExpiresAt: this.expires_at, scope: this.scope, - client: client.toOAuthInterface(), + client: client.toOAuthInterface(), user, } } toJSON() { - const user = this.user ? {user: this.user} : {} + const user = this.user ? { user: this.user } : {} return { id: this.id, token: this.token, @@ -48,34 +48,38 @@ class AccessToken extends BaseModel { } module.exports = (sequelize, DataTypes) => { - AccessToken.init(Object.assign({ - id: { - type: DataTypes.UUID, - primaryKey: true, - defaultValue: DataTypes.UUIDV4, - validate: { - isUUID: 4, + AccessToken.init( + Object.assign( + { + id: { + type: DataTypes.UUID, + primaryKey: true, + defaultValue: DataTypes.UUIDV4, + validate: { + isUUID: 4, + }, + }, + token: { + type: DataTypes.TEXT, + }, + scope: { + type: DataTypes.TEXT, + }, + expires_at: { + type: DataTypes.DATE, + }, + meta: { + type: DataTypes.JSONB, }, }, - token: { - type: DataTypes.TEXT, - }, - scope: { - type: DataTypes.TEXT, - }, - expires_at: { - type: DataTypes.DATE, - }, - meta: { - type: DataTypes.JSONB, - }, + timestamps(DataTypes), + ), + { + sequelize, + paranoid: true, + tableName: 'oauth_access_tokens', }, - timestamps(DataTypes), - ), { - sequelize, - paranoid: true, - tableName: 'oauth_access_tokens', - }) + ) return AccessToken } diff --git a/src/database/models/AuthorizationCode.js b/src/database/models/AuthorizationCode.js index e6b03a4..abccacd 100644 --- a/src/database/models/AuthorizationCode.js +++ b/src/database/models/AuthorizationCode.js @@ -13,12 +13,16 @@ class AuthorizationCode extends BaseModel { expiresAt: this.expires_at, redirectUri: this.redirect_uri, scope: this.scope, - client: this.client ? this.client : { - id: this.client_id, - }, - user: this.user ? this.user : { - id: this.user_id, - } + client: this.client + ? this.client + : { + id: this.client_id, + }, + user: this.user + ? this.user + : { + id: this.user_id, + }, } } @@ -35,8 +39,8 @@ class AuthorizationCode extends BaseModel { } toJSON() { - const user = this.user ? { user: this.user } : { } - const client = this.user ? { user: this.user } : { } + const user = this.user ? { user: this.user } : {} + const client = this.user ? { user: this.user } : {} return { id: this.id, scopes: this.scopes, @@ -50,31 +54,34 @@ class AuthorizationCode extends BaseModel { } module.exports = (sequelize, DataTypes) => { - AuthorizationCode.init(Object.assign( - { - auth_code: { - type: DataTypes.TEXT, - primaryKey: true, - }, - scope: { - type: DataTypes.TEXT, + AuthorizationCode.init( + Object.assign( + { + auth_code: { + type: DataTypes.TEXT, + primaryKey: true, + }, + scope: { + type: DataTypes.TEXT, + }, + redirect_uri: { + type: DataTypes.TEXT, + }, + expires_at: { + type: DataTypes.DATE, + }, + meta: { + type: DataTypes.JSONB, + }, }, - redirect_uri: { - type: DataTypes.TEXT, - }, - expires_at: { - type: DataTypes.DATE, - }, - meta: { - type: DataTypes.JSONB, - } + timestamps(DataTypes), + ), + { + sequelize, + paranoid: true, + tableName: 'oauth_authorization_codes', }, - timestamps(DataTypes), - ), { - sequelize, - paranoid: true, - tableName: 'oauth_authorization_codes', - }) + ) return AuthorizationCode } diff --git a/src/database/models/BaseModel.js b/src/database/models/BaseModel.js index a1109b4..6d3840a 100644 --- a/src/database/models/BaseModel.js +++ b/src/database/models/BaseModel.js @@ -14,13 +14,9 @@ module.exports = class BaseModel extends Model { return this.name } - static associate(models) { + static associate(models) {} - } - - async handleIncludes(includes, loaders) { - - } + async handleIncludes(includes, loaders) {} toJSON() { return { @@ -30,4 +26,4 @@ module.exports = class BaseModel extends Model { updated_at: this.updated_at, } } -} \ No newline at end of file +} diff --git a/src/database/models/BundleCode.js b/src/database/models/BundleCode.js index ef315d2..ed9ad07 100644 --- a/src/database/models/BundleCode.js +++ b/src/database/models/BundleCode.js @@ -7,7 +7,7 @@ class BundleCode extends BaseModel { through: 'user_bundle_codes', foreignKey: 'bundle_code_id', otherKey: 'user_id', - timestamps: false + timestamps: false, }) } toJSON() { @@ -24,35 +24,38 @@ class BundleCode extends BaseModel { } module.exports = (sequelize, DataTypes) => { - BundleCode.init(Object.assign( - { - id: { - type: DataTypes.UUID, - primaryKey: true, - defaultValue: DataTypes.UUIDV4, - validate: { - isUUID: 4, + BundleCode.init( + Object.assign( + { + id: { + type: DataTypes.UUID, + primaryKey: true, + defaultValue: DataTypes.UUIDV4, + validate: { + isUUID: 4, + }, + }, + name: { + type: DataTypes.TEXT, + }, + description: { + type: DataTypes.TEXT, + }, + platforms: { + type: DataTypes.JSONB, + }, + meta: { + type: DataTypes.JSONB, }, }, - name: { - type: DataTypes.TEXT, - }, - description: { - type: DataTypes.TEXT, - }, - platforms: { - type: DataTypes.JSONB, - }, - meta: { - type: DataTypes.JSONB, - }, + timestamps(DataTypes), + ), + { + sequelize, + paranoid: true, + tableName: 'bundle_codes', }, - timestamps(DataTypes), - ), { - sequelize, - paranoid: true, - tableName: 'bundle_codes', - }) + ) return BundleCode -} \ No newline at end of file +} diff --git a/src/database/models/File.js b/src/database/models/File.js index 8ef0be1..02fc4ba 100644 --- a/src/database/models/File.js +++ b/src/database/models/File.js @@ -25,64 +25,72 @@ class File extends BaseModel { } function generatePublicUri(provider, file) { - return ['https://storage.googleapis.com', file.meta.bucket, file.file_root, file.file_name].join('/') + return [ + 'https://storage.googleapis.com', + file.meta.bucket, + file.file_root, + file.file_name, + ].join('/') } module.exports = (sequelize, DataTypes) => { - File.init(Object.assign( - { - id: { - type: DataTypes.UUID, - defaultValue: DataTypes.UUIDV4, - primaryKey: true, - }, - provider: { - type: DataTypes.TEXT, - }, - file_root: { - type: DataTypes.TEXT, - }, - file_name: { - type: DataTypes.TEXT, - }, - comment: { - type: DataTypes.TEXT, - }, - tags: { - type: DataTypes.ARRAY(DataTypes.TEXT), - }, - labels: { - type: DataTypes.ARRAY(DataTypes.TEXT), - }, - public_uri: { - type: DataTypes.VIRTUAL, - get() { - const provider = this.provider - return generatePublicUri(provider, this) - } - }, - stream: { - type: DataTypes.TEXT, - }, - requires_approval: { - type: DataTypes.BOOLEAN, - }, - approved: { - type: DataTypes.BOOLEAN, - }, - featured: { - type: DataTypes.BOOLEAN, - }, - meta: { - type: DataTypes.JSONB, + File.init( + Object.assign( + { + id: { + type: DataTypes.UUID, + defaultValue: DataTypes.UUIDV4, + primaryKey: true, + }, + provider: { + type: DataTypes.TEXT, + }, + file_root: { + type: DataTypes.TEXT, + }, + file_name: { + type: DataTypes.TEXT, + }, + comment: { + type: DataTypes.TEXT, + }, + tags: { + type: DataTypes.ARRAY(DataTypes.TEXT), + }, + labels: { + type: DataTypes.ARRAY(DataTypes.TEXT), + }, + public_uri: { + type: DataTypes.VIRTUAL, + get() { + const provider = this.provider + return generatePublicUri(provider, this) + }, + }, + stream: { + type: DataTypes.TEXT, + }, + requires_approval: { + type: DataTypes.BOOLEAN, + }, + approved: { + type: DataTypes.BOOLEAN, + }, + featured: { + type: DataTypes.BOOLEAN, + }, + meta: { + type: DataTypes.JSONB, + }, }, + timestamps(DataTypes), + ), + { + sequelize, + paranoid: true, + tableName: 'files', }, - timestamps(DataTypes), - ), { - sequelize, - paranoid: true, - tableName: 'files', - }) + ) return File } diff --git a/src/database/models/Metric.js b/src/database/models/Metric.js index e975ef2..a47f28a 100644 --- a/src/database/models/Metric.js +++ b/src/database/models/Metric.js @@ -16,11 +16,14 @@ class Metric extends BaseModel { } static async findUnassociatedForDevice(deviceId) { - return this.findAll({ where: { author_id: null, meta: { device: { id: deviceId } } } }) + return this.findAll({ + where: { author_id: null, meta: { device: { id: deviceId } } }, + }) } static async claimNewUnassociatedForDEvice(authorId, deviceId) { - return this.sequelize.query(` + return this.sequelize.query( + ` update metrics set author_id = :authorId where meta -> 'device' ->> 'id' = :deviceId::text @@ -28,10 +31,12 @@ class Metric extends BaseModel { and not exists ( select * from metrics where meta -> 'device' ->> 'id' = :deviceId::text and author_id is not null limit 1 ); - `, { - replacements: { authorId, deviceId }, - type: sequelize.QueryTypes.UPDATE, - }) + `, + { + replacements: { authorId, deviceId }, + type: sequelize.QueryTypes.UPDATE, + }, + ) } getNativeValue() { @@ -57,8 +62,8 @@ class Metric extends BaseModel { } module.exports = (sequelize, DataTypes) => { - Metric.init(Object.assign( - { + Metric.init( + Object.assign({ fake_pk: { type: DataTypes.VIRTUAL, primaryKey: true, @@ -86,9 +91,15 @@ module.exports = (sequelize, DataTypes) => { }, set(location) { if (Array.isArray(location)) { - this.setDataValue('location', { type: 'Point', coordinates: location }) + this.setDataValue('location', { + type: 'Point', + coordinates: location, + }) } else if (location.latitude && location.longitude) { - this.setDataValue('location', { type: 'Point', coordinated: [location.longitude, location.latitude] }) + this.setDataValue('location', { + type: 'Point', + coordinated: [location.longitude, location.latitude], + }) } else { this.setDataValue('location', location) } @@ -103,13 +114,14 @@ module.exports = (sequelize, DataTypes) => { deleted_at: { type: DataTypes.DATE, }, + }), + { + sequelize, + paranoid: true, + timestamps: false, + tableName: 'metrics', }, - ), { - sequelize, - paranoid: true, - timestamps: false, - tableName: 'metrics', - }) + ) return Metric } diff --git a/src/database/models/OAuthClient.js b/src/database/models/OAuthClient.js index 9f35ae2..2cfb54a 100644 --- a/src/database/models/OAuthClient.js +++ b/src/database/models/OAuthClient.js @@ -4,7 +4,12 @@ const BaseModel = require('./BaseModel') class OAuthClient extends BaseModel { static associate(models) { this.belongsTo(models.User, { as: 'owner', foreignKey: 'owner_id' }) - this.belongsToMany(models.User, { as: 'users', through: models.AccessToken, otherKey: 'user_id', foreignKey: 'client_id' }) + this.belongsToMany(models.User, { + as: 'users', + through: models.AccessToken, + otherKey: 'user_id', + foreignKey: 'client_id', + }) this.hasMany(models.AccessToken, { foreignKey: 'client_id' }) this.hasMany(models.RefreshToken, { foreignKey: 'client_id' }) } @@ -43,35 +48,38 @@ class OAuthClient extends BaseModel { } module.exports = (sequelize, DataTypes) => { - OAuthClient.init(Object.assign( - { - id: { - type: DataTypes.UUID, - primaryKey: true, - defaultValue: DataTypes.UUIDV4, - validate: { - isUUID: 4, + OAuthClient.init( + Object.assign( + { + id: { + type: DataTypes.UUID, + primaryKey: true, + defaultValue: DataTypes.UUIDV4, + validate: { + isUUID: 4, + }, + }, + secret: { + type: DataTypes.TEXT, + }, + redirect_uris: { + type: DataTypes.ARRAY(DataTypes.TEXT), + }, + grant_types: { + type: DataTypes.ARRAY(DataTypes.TEXT), + }, + meta: { + type: DataTypes.JSONB, }, }, - secret: { - type: DataTypes.TEXT, - }, - redirect_uris: { - type: DataTypes.ARRAY(DataTypes.TEXT), - }, - grant_types: { - type: DataTypes.ARRAY(DataTypes.TEXT), - }, - meta: { - type: DataTypes.JSONB, - } + timestamps(DataTypes), + ), + { + sequelize, + paranoid: true, + tableName: 'oauth_clients', }, - timestamps(DataTypes), - ), { - sequelize, - paranoid: true, - tableName: 'oauth_clients', - }) + ) return OAuthClient } diff --git a/src/database/models/RefreshToken.js b/src/database/models/RefreshToken.js index 214edc4..ba315d6 100644 --- a/src/database/models/RefreshToken.js +++ b/src/database/models/RefreshToken.js @@ -27,13 +27,13 @@ class RefreshToken extends BaseModel { refreshToken: this.token, refreshTokenExpiresAt: this.expires_at, scope: this.scope, - client: client.toOAuthInterface(), + client: client.toOAuthInterface(), user, } } toJSON() { - const user = this.user ? { user: this.user } : { } + const user = this.user ? { user: this.user } : {} return { id: this.id, token: this.token, @@ -48,35 +48,38 @@ class RefreshToken extends BaseModel { } module.exports = (sequelize, DataTypes) => { - RefreshToken.init(Object.assign( - { - id: { - type: DataTypes.UUID, - primaryKey: true, - defaultValue: DataTypes.UUIDV4, - validate: { - isUUID: 4, + RefreshToken.init( + Object.assign( + { + id: { + type: DataTypes.UUID, + primaryKey: true, + defaultValue: DataTypes.UUIDV4, + validate: { + isUUID: 4, + }, + }, + token: { + type: DataTypes.TEXT, + }, + scope: { + type: DataTypes.TEXT, + }, + expires_at: { + type: DataTypes.DATE, + }, + meta: { + type: DataTypes.JSONB, }, }, - token: { - type: DataTypes.TEXT, - }, - scope: { - type: DataTypes.TEXT, - }, - expires_at: { - type: DataTypes.DATE, - }, - meta: { - type: DataTypes.JSONB, - } + timestamps(DataTypes), + ), + { + sequelize, + paranoid: true, + tableName: 'oauth_refresh_tokens', }, - timestamps(DataTypes), - ), { - sequelize, - paranoid: true, - tableName: 'oauth_refresh_tokens', - }) + ) return RefreshToken } diff --git a/src/database/models/User.js b/src/database/models/User.js index ea4037e..3a23fb1 100644 --- a/src/database/models/User.js +++ b/src/database/models/User.js @@ -12,13 +12,13 @@ class User extends BaseModel { as: 'likes', through: 'user_file_likes', foreignKey: 'user_id', - otherKey: 'file_id' + otherKey: 'file_id', }) this.belongsToMany(models.BundleCode, { through: 'user_bundle_codes', foreignKey: 'user_id', otherKey: 'bundle_code_id', - timestamps: false + timestamps: false, }) } @@ -38,11 +38,11 @@ class User extends BaseModel { } else { try { id = await this.idFromOpaque(token) - } catch(e) {} + } catch (e) {} if (id == null) { try { id = await this.idFromJwt(token) - } catch(e) {} + } catch (e) {} } } @@ -139,15 +139,12 @@ class User extends BaseModel { return b } - async generateResetToken() { const crypto = require('core/utils/crypto') const moment = require('moment') const id = this.id - const expires = moment.utc() - .add(1, 'hour') - .toISOString() + const expires = moment.utc().add(1, 'hour').toISOString() const token = await crypto.encrypt(JSON.stringify({ id, expires })) this.reset_token = token @@ -156,7 +153,6 @@ class User extends BaseModel { return token } - toJSON() { const meta = { ...this.meta } delete meta.dob @@ -173,35 +169,38 @@ class User extends BaseModel { } module.exports = (sequelize, DataTypes) => { - User.init(Object.assign( - { - id: { - type: DataTypes.UUID, - primaryKey: true, - defaultValue: DataTypes.UUIDV4, - }, - name: { - type: DataTypes.TEXT, - }, - email: { - type: DataTypes.TEXT, + User.init( + Object.assign( + { + id: { + type: DataTypes.UUID, + primaryKey: true, + defaultValue: DataTypes.UUIDV4, + }, + name: { + type: DataTypes.TEXT, + }, + email: { + type: DataTypes.TEXT, + }, + password: { + type: DataTypes.TEXT, + }, + reset_token: { + type: DataTypes.TEXT, + }, + meta: { + type: DataTypes.JSONB, + }, }, - password: { - type: DataTypes.TEXT, - }, - reset_token: { - type: DataTypes.TEXT, - }, - meta: { - type: DataTypes.JSONB, - } + timestamps(DataTypes), + ), + { + sequelize, + paranoid: true, + tableName: 'users', }, - timestamps(DataTypes), - ), { - sequelize, - paranoid: true, - tableName: 'users', - }) + ) return User } diff --git a/src/database/models/index.js b/src/database/models/index.js index 3e66754..470e9db 100644 --- a/src/database/models/index.js +++ b/src/database/models/index.js @@ -18,11 +18,17 @@ const sequelize = new Sequelize( config('database.database'), config('database.username'), config('database.password'), - conf + conf, ) fs.list(__dirname) - .filter(file => (file[0] !== '.') && file !== 'BaseModel.js' && (file !== basename) && (file.slice(-3) === '.js')) + .filter( + file => + file[0] !== '.' && + file !== 'BaseModel.js' && + file !== basename && + file.slice(-3) === '.js', + ) .forEach(file => { const initialiser = require(path.join(__dirname, file)) const model = initialiser(sequelize, DataTypes) @@ -49,14 +55,14 @@ if (config('database.log_queries')) { const span = trace.startChild({ op: 'sql.query', }) - threadContext.set(`span_${ traceId }`, span) + threadContext.set(`span_${traceId}`, span) query.options.trace = traceId } }) sequelize.addHook('afterQuery', (model, query) => { if (query.options.trace) { - const trace = threadContext.get(`span_${ query.options.trace }`) + const trace = threadContext.get(`span_${query.options.trace}`) if (trace) { trace.description = query.sql trace.setData('sql.params', query.options.bind) diff --git a/src/domain/auth/AuthServer.js b/src/domain/auth/AuthServer.js index 39573c8..dfb7c1a 100644 --- a/src/domain/auth/AuthServer.js +++ b/src/domain/auth/AuthServer.js @@ -1,5 +1,12 @@ const OAuthServer = require('oauth2-server') -const { Sequelize: { Op }, User, OAuthClient, AccessToken, RefreshToken, AuthorizationCode } = require('database/models') +const { + Sequelize: { Op }, + User, + OAuthClient, + AccessToken, + RefreshToken, + AuthorizationCode, +} = require('database/models') const crypto = require('core/utils/crypto') const HttpError = require('core/errors/HttpError') @@ -35,22 +42,31 @@ const model = { } const client = await OAuthClient.findOne({ where }) - const asInterface = client.toOAuthInterface() + const asInterface = client.toOAuthInterface() return asInterface }, getAccessToken(token) { - return AccessToken.findOne({ include: [ { model: User }, { model: OAuthClient } ], where: { token: { [Op.eq]: token } } }) + return AccessToken.findOne({ + include: [{ model: User }, { model: OAuthClient }], + where: { token: { [Op.eq]: token } }, + }) }, async getRefreshToken(token) { - const t = await RefreshToken.findOne({ include: [ { model: User }, { model: OAuthClient } ], where: { token: { [Op.eq]: token } } }) + const t = await RefreshToken.findOne({ + include: [{ model: User }, { model: OAuthClient }], + where: { token: { [Op.eq]: token } }, + }) return await t?.toOAuthInterface() }, getUser: async function getAuthUser(email, password) { - const user = await User.findOne({ - where: { email: { [Op.eq]: email } }, - }, { }) + const user = await User.findOne( + { + where: { email: { [Op.eq]: email } }, + }, + {}, + ) if (user != null) { const valid = await user.checkPassword(password) @@ -93,7 +109,11 @@ const model = { return createTokenPair(user, client, accessTokenModel, refreshTokenModel) }, - saveAuthorizationCode: async function ({ authorizationCode, expiresAt, redirectUri, scope }, client, user) { + saveAuthorizationCode: async function ( + { authorizationCode, expiresAt, redirectUri, scope }, + client, + user, + ) { const authCode = { auth_code: authorizationCode, client_id: client.id, @@ -108,7 +128,11 @@ const model = { return code.toOAuthInterface() }, - revokeToken: async function revokeRefreshToken({ refreshToken, client, user }) { + revokeToken: async function revokeRefreshToken({ + refreshToken, + client, + user, + }) { const deletions = await RefreshToken.destroy({ where: { token: { [Op.eq]: refreshToken }, @@ -120,7 +144,11 @@ const model = { return deletions > 0 }, - revokeAuthorizationCode: async function({ authorizationCode: code, client, user }) { + revokeAuthorizationCode: async function ({ + authorizationCode: code, + client, + user, + }) { const deletions = await AuthorizationCode.destroy({ where: { auth_code: code, @@ -132,7 +160,7 @@ const model = { return deletions > 0 }, - getAuthorizationCode: async function(auth_code) { + getAuthorizationCode: async function (auth_code) { const code = await AuthorizationCode.findOne({ where: { auth_code } }) if (code) { return code.toOAuthInterface() @@ -172,13 +200,10 @@ class KoaOAuthServer { this.authorize = async ctx => { const OAuthFlow = require('./OAuthFlow') const flow = await OAuthFlow.initialiseFlow(ctx) - const { - user, - redirect, - } = flow + const { user, redirect } = flow if (!user) { - return ctx.redirect(`/login?auth_state=${ redirect }`) + return ctx.redirect(`/login?auth_state=${redirect}`) } else if (ctx.method === 'GET') { return await OAuthFlow.showOAuthConsent(ctx, flow) } else { @@ -199,7 +224,10 @@ class KoaOAuthServer { this.token = async ctx => { const { req, res } = this.transformContext(ctx) - await authServer.token(req, res, { allowExtendedTokenAttributes: true, accessTokenLifetime: 3600 * 24 * 7 }) + await authServer.token(req, res, { + allowExtendedTokenAttributes: true, + accessTokenLifetime: 3600 * 24 * 7, + }) for (const [name, value] of Object.entries(res.headers)) { ctx.response.set(name, value) } diff --git a/src/domain/auth/AuthenticationService.js b/src/domain/auth/AuthenticationService.js index 07ae334..7ea548c 100644 --- a/src/domain/auth/AuthenticationService.js +++ b/src/domain/auth/AuthenticationService.js @@ -11,9 +11,7 @@ module.exports = class AuthenticationService extends ContextualModule { } static get profileMethods() { - return [ - 'getUser', 'attemptLogin', 'saveToSession' - ] + return ['getUser', 'attemptLogin', 'saveToSession'] } init() { @@ -22,7 +20,10 @@ module.exports = class AuthenticationService extends ContextualModule { async attemptLogin(email, password) { if (this._user != null) { - if (this._user.email === email && await this._user.checkPassword(password)) { + if ( + this._user.email === email && + (await this._user.checkPassword(password)) + ) { return this._user } else { throw new HttpError(401, 'Forbidden') @@ -55,9 +56,10 @@ module.exports = class AuthenticationService extends ContextualModule { } } else if (this.ctx.get('Authorization')) { const token = this.ctx.get('Authorization').substr(HEADER_PREFIX.length) + const accessToken = await AccessToken.findOne({ where: { token }, - include: [{ model: User }] + include: [{ model: User }], }) if (accessToken.User) { @@ -66,13 +68,14 @@ module.exports = class AuthenticationService extends ContextualModule { } } else if (this.ctx.get('x-api-token')) { const token = this.ctx.get('x-api-token') + console.log(token, this.ctx.get('x-token-type')) try { const user = await User.fromToken(token, this.ctx.get('x-token-type')) if (user) { this.authenticateAs(user) return this._user } - } catch(e) { + } catch (e) { console.error(e) } } @@ -87,7 +90,9 @@ module.exports = class AuthenticationService extends ContextualModule { async saveToSession(logoutIfEmpty = true) { const user = await this.getUser() if (user) { - this.ctx.session.user = await crypto.encrypt(JSON.stringify({ id: user.id })) + this.ctx.session.user = await crypto.encrypt( + JSON.stringify({ id: user.id }), + ) } else if (logoutIfEmpty) { this.ctx.session.user = null } diff --git a/src/domain/auth/OAuthFlow.js b/src/domain/auth/OAuthFlow.js index b2d369e..6ae94fa 100644 --- a/src/domain/auth/OAuthFlow.js +++ b/src/domain/auth/OAuthFlow.js @@ -24,7 +24,9 @@ exports.initialiseFlow = async ctx => { const action = baseQuery.action - const redirectState = await crypto.encrypt(JSON.stringify({ redirect: 'authorize', query: baseQuery })) + const redirectState = await crypto.encrypt( + JSON.stringify({ redirect: 'authorize', query: baseQuery }), + ) return { user, @@ -36,13 +38,9 @@ exports.initialiseFlow = async ctx => { } exports.showOAuthConsent = async (ctx, queryState) => { - const { - user, - query, - redirect, - } = queryState + const { user, query, redirect } = queryState - const client = await OAuthClient.findOne({ where: { id: query.client_id }}) + const client = await OAuthClient.findOne({ where: { id: query.client_id } }) if (client == null) { throw new HttpError(400, 'Invalid client id specified') } @@ -63,7 +61,10 @@ exports.handleConsentRejection = async (ctx, flow) => { const search = new URLSearchParams(redirect.searchParams) search.set('error', 'access_denied') - search.set('error_description', 'The user has denied the requested permissions') + search.set( + 'error_description', + 'The user has denied the requested permissions', + ) redirect.search = search.toString() ctx.set('Location', redirect.toString()) @@ -78,7 +79,13 @@ exports.handleConsentAcceptance = async (ctx, flow, server) => { } const { req, res } = server.transformContext(ctx, { query: queryState }) - await server.getAuthServer().authorize(req, res, { authenticateHandler: { handle() { return flow.user } }}) + await server.getAuthServer().authorize(req, res, { + authenticateHandler: { + handle() { + return flow.user + }, + }, + }) for (const [name, value] of Object.entries(res.headers)) { ctx.response.set(name, value) } @@ -89,11 +96,13 @@ const scopeDescriptionMap = { '*': { icon: 'admin', name: 'Full Access', - description: 'Full access to your account, including the ability to create, update and delete any user information, metrics and files.' + description: + 'Full access to your account, including the ability to create, update and delete any user information, metrics and files.', }, 'metrics:create': { name: 'Create Metrics', - description: 'The ability to add data metrics linked to your account. Remember that connected apps that create metrics will know your location!', + description: + 'The ability to add data metrics linked to your account. Remember that connected apps that create metrics will know your location!', }, 'files:upload': { name: 'Upload Files', @@ -101,24 +110,26 @@ const scopeDescriptionMap = { }, 'files:read': { name: 'Read Files', - description: 'The ability to see and download images that you\'ve uploaded to your account', + description: + "The ability to see and download images that you've uploaded to your account", }, 'profile:read': { name: 'Read Profile', - description: 'The ability to see any information associated with your user profile. This includes your name and email address', + description: + 'The ability to see any information associated with your user profile. This includes your name and email address', }, 'profile:write': { name: 'Modify Profile', - description: 'The ability to edit any information associated with your user profile. This includes your name', + description: + 'The ability to edit any information associated with your user profile. This includes your name', }, 'profile:stats': { name: 'Profile Stats', - description: 'The ability to see information about your account stats, including your points and citizen scientist level', + description: + 'The ability to see information about your account stats, including your points and citizen scientist level', }, - } function describeScopeRequest(scope = '*') { const scopes = scope.split(' ') - return scopes.map(s => scopeDescriptionMap[s]) - .filter(Boolean) + return scopes.map(s => scopeDescriptionMap[s]).filter(Boolean) } diff --git a/src/domain/auth/handlers/SendUserPasswordReset.js b/src/domain/auth/handlers/SendUserPasswordReset.js index fd93380..d7dc21b 100644 --- a/src/domain/auth/handlers/SendUserPasswordReset.js +++ b/src/domain/auth/handlers/SendUserPasswordReset.js @@ -2,38 +2,67 @@ const { config } = require('bootstrap') const HttpError = require('core/errors/HttpError') module.exports = async (body, ctx) => { - const { email } = body + const { email, device } = body const user = await ctx.services['core.users'].findByEmail(email) - const token = await ctx.profile('user.generateResetToken', 'Create reset token', () => user.generateResetToken()) + const token = await ctx.profile( + 'user.generateResetToken', + 'Create reset token', + () => user.generateResetToken(), + ) - const name = user.name || 'Jetsam User (You haven\'t told us your name!)' - const reset_link = new URL(`/reset-password?token=${ token }`, config('app.host.web')) + const name = user.name || "Jetsam User (You haven't told us your name!)" + const reset_link = new URL( + `/reset-password?token=${token}`, + config('app.host.web'), + ) const { mail } = require('services') + const data = { name, reset_link, action_url: reset_link, path: reset_link } + + if (device) { + data.security = { + operating_system: device.platform ?? '', + browser_name: device.info?.app_version + ? `Jetsam ${device.info.app_version}` + : device.browser ?? 'Jetsam', + } + } + try { await ctx.profile( 'services.mail.sendTemplate', - `template ${ config('mail.templates.reset-password')}`, - () => mail.sendTemplate(email, 'Reset Your Jetsam password', config('mail.templates.reset-password'), { - name, - reset_link, - }) + `template ${config('mail.templates.reset-password')}`, + () => + mail.sendTemplate( + email, + 'Reset Your Jetsam password', + '22427615', + data, + ), ) } catch (e) { // reporter.report(e) - console.log(e.response.body.errors) + console.error(e) + console.log(e?.response?.body?.errors) throw new HttpError( 500, 'Failed To Send Reset Email', - { status: 500, title: 'Failed to send reset email', description: 'Could not send the password reset email' }, { - sendgrid: (e.response?.body?.errors ?? []).reduce((acc, e, i) => ({ - ...acc, - [`err-${ i }`]: JSON.stringify(e, null, 2), - }), {}), - } + status: 500, + title: 'Failed to send reset email', + description: 'Could not send the password reset email', + }, + { + sendgrid: (e?.response?.body?.errors ?? []).reduce( + (acc, e, i) => ({ + ...acc, + [`err-${i}`]: JSON.stringify(e, null, 2), + }), + {}, + ), + }, ) } -} \ No newline at end of file +} diff --git a/src/domain/data/MetricsService.js b/src/domain/data/MetricsService.js index b9a737f..996ccf0 100644 --- a/src/domain/data/MetricsService.js +++ b/src/domain/data/MetricsService.js @@ -12,17 +12,21 @@ module.exports = class MetricsService extends ContextualModule { } static get profileMethods() { - return [ - 'queryAggregate', - 'queryAll', - 'recordMetric', - ] + return ['queryAggregate', 'queryAll', 'recordMetric'] } async recordMetric(value, type, location) { const user = await this.ctx.services['core.auth'].getUser() - const point = { type: 'Point', coordinates: [location.longitude, location.latitude] } - const payload = { value, type, location: point, author_id: user ? user.id : null } + const point = { + type: 'Point', + coordinates: [location.longitude, location.latitude], + } + const payload = { + value, + type, + location: point, + author_id: user ? user.id : null, + } if (this.ctx.request.device) { payload.meta = { device: this.ctx.request.device } } @@ -35,24 +39,23 @@ module.exports = class MetricsService extends ContextualModule { return await Metric.findAll({ where: { [Op.and]: [ - Sequelize.literal(`ST_COVEREDBY(${ snapClause }, ST_POLYGONFROMTEXT('POLYGON((${ pointBuffer }))')::geography::geometry)`), + Sequelize.literal( + `ST_COVEREDBY(${snapClause}, ST_POLYGONFROMTEXT('POLYGON((${pointBuffer}))')::geography::geometry)`, + ), ], recorded_at: { - [Op.between]: [from, to] + [Op.between]: [from, to], }, type: { [Op.in]: types, - } + }, }, attributes: [ 'type', [Sequelize.fn('COUNT', Sequelize.col('value')), 'value'], - [Sequelize.literal(snapClause), 'location'] + [Sequelize.literal(snapClause), 'location'], ], - group: [ - Sequelize.literal(snapClause), - 'type', - ] + group: [Sequelize.literal(snapClause), 'type'], }) } @@ -60,14 +63,16 @@ module.exports = class MetricsService extends ContextualModule { return await Metric.findAll({ where: { [Op.and]: [ - Sequelize.literal(`ST_COVEREDBY("Metric"."location", ST_POLYGONFROMTEXT('POLYGON((${ pointBuffer }))'))`), + Sequelize.literal( + `ST_COVEREDBY("Metric"."location", ST_POLYGONFROMTEXT('POLYGON((${pointBuffer}))'))`, + ), ], recorded_at: { - [Op.between]: [from, to] + [Op.between]: [from, to], }, type: { [Op.in]: types, - } + }, }, }) } diff --git a/src/domain/users/UserService.js b/src/domain/users/UserService.js index 2b3ce9e..167c187 100644 --- a/src/domain/users/UserService.js +++ b/src/domain/users/UserService.js @@ -10,12 +10,7 @@ module.exports = class UserService extends ContextualModule { } static get profileMethods() { - return [ - 'register', - 'findUser', - 'findByEmail', - 'findByAccessToken', - ] + return ['register', 'findUser', 'findByEmail', 'findByAccessToken'] } findUser(id) { @@ -30,7 +25,10 @@ module.exports = class UserService extends ContextualModule { } async findByAccessToken(token) { - const accessToken = await AccessToken.findOne({ where: { token }, include: [{ model: User }] }) + const accessToken = await AccessToken.findOne({ + where: { token }, + include: [{ model: User }], + }) if (accessToken) { return accessToken.User } diff --git a/src/http/controllers/api/app.js b/src/http/controllers/api/app.js index b4fcd9b..cbfe351 100644 --- a/src/http/controllers/api/app.js +++ b/src/http/controllers/api/app.js @@ -8,10 +8,10 @@ exports.getBundles = async ctx => { const bundles = await ctx.profile( 'user.getBundleCodes', 'Get codepush bundles for authed user', - () => user.getBundleCodes() + () => user.getBundleCodes(), ) ctx.body = { bundles, } } -} \ No newline at end of file +} diff --git a/src/http/controllers/api/auth.js b/src/http/controllers/api/auth.js index 10290b3..126913a 100644 --- a/src/http/controllers/api/auth.js +++ b/src/http/controllers/api/auth.js @@ -14,11 +14,19 @@ exports.register = async ctx => { const user = await User.findOne({ where: { email } }) if (user != null) { - throw new HttpError(409, 'Email Already Exists', { status: 409, title: 'Email Already Exists', description: 'That email address already exists. Please try another email address.' }) + throw new HttpError(409, 'Email Already Exists', { + status: 409, + title: 'Email Already Exists', + description: + 'That email address already exists. Please try another email address.', + }) } - const newUser = await ctx.services['core.users'] - .register(name ?? null, email, password) + const newUser = await ctx.services['core.users'].register( + name ?? null, + email, + password, + ) await newUser.handleIncludes(ctx.includes) const token = await newUser.asToken(ctx.get('x-token-type')) @@ -27,8 +35,7 @@ exports.register = async ctx => { exports.login = async ctx => { const { email, password } = ctx.request.body - const user = await ctx.services['core.auth'] - .attemptLogin(email, password) + const user = await ctx.services['core.auth'].attemptLogin(email, password) const token = await user.asToken(ctx.get('x-token-type')) @@ -40,10 +47,20 @@ exports.triggerPasswordReset = async ctx => { const { email } = ctx.request.body const user = await ctx.services['core.users'].findByEmail(email) if (!user) { - throw new HttpError(404, 'No Such Email', { status: 404, title: 'No Such Email', description: 'The provided email address is not associated with an account' }) + throw new HttpError(404, 'No Such Email', { + status: 404, + title: 'No Such Email', + description: + 'The provided email address is not associated with an account', + }) } - await queue.dispatch('send-user-password-reset', { email }) + console.log(ctx.request.device) + + await queue.dispatch('send-user-password-reset', { + email, + device: ctx.request.device, + }) ctx.body = { reset_token: null, @@ -116,7 +133,7 @@ exports.handlePasswordReset = async ctx => { if (expires.isSameOrBefore(moment.utc())) { ctx.body = { error: { - message: 'The reset token was invalid or expired' + message: 'The reset token was invalid or expired', }, params: { new_password, @@ -131,7 +148,7 @@ exports.handlePasswordReset = async ctx => { if (!user) { ctx.body = { error: { - message: 'The reset token was invalid or expired' + message: 'The reset token was invalid or expired', }, params: { new_password, @@ -148,4 +165,4 @@ exports.handlePasswordReset = async ctx => { ctx.status = 200 ctx.body = { user } -} \ No newline at end of file +} diff --git a/src/http/controllers/api/content.js b/src/http/controllers/api/content.js index 7b28757..b654872 100644 --- a/src/http/controllers/api/content.js +++ b/src/http/controllers/api/content.js @@ -7,7 +7,11 @@ exports.postMetric = async ctx => { const allowedTypes = new Set(Metric.getSupportedMetricTypes()) const { value, type, location } = ctx.request.body - if (location == null || location.longitude == null || location.latitude == null) { + if ( + location == null || + location.longitude == null || + location.latitude == null + ) { throw new HttpError({ status: 400, code: 'MTR-001', @@ -17,22 +21,31 @@ exports.postMetric = async ctx => { } if (allowedTypes.has(type)) { - const metric = await ctx.services['data.metrics'].recordMetric(value, type, location) + const metric = await ctx.services['data.metrics'].recordMetric( + value, + type, + location, + ) ctx.body = { metric: metric.toJSON() } } else { - throw new HttpError({ status: 400, code: 'MTR-002', title: 'Invalid Metric', description: `${ type } is not a supported type`}) + throw new HttpError({ + status: 400, + code: 'MTR-002', + title: 'Invalid Metric', + description: `${type} is not a supported type`, + }) } } function splitString(str) { - return str.split(',') + return str + .split(',') .map(s => s.trim()) .filter(Boolean) } function pointFromString(str) { - const parts = splitString(str) - .map(Number) + const parts = splitString(str).map(Number) return { latitude: parts[0], @@ -47,7 +60,7 @@ exports.getWithin = async ctx => { date_from, date_to = moment.utc(), types = '', - format = 'full' + format = 'full', } = ctx.request.query const fromPoint = pointFromString(point_from) @@ -69,18 +82,21 @@ exports.getWithin = async ctx => { [maxFromLong, maxFromLat], [maxFromLong, minFromLat], [minFromLong, minFromLat], - ].map(pb => pb.map(Number).join(' ')).join(',') + ] + .map(pb => pb.map(Number).join(' ')) + .join(',') - const query = format === 'marker' ? - ctx.services['data.metrics'].queryAggregate - : ctx.services['data.metrics'].queryAll + const query = + format === 'marker' + ? ctx.services['data.metrics'].queryAggregate + : ctx.services['data.metrics'].queryAll const metrics = await query( pointBuffer, metricTypes, // fromDate.toISOString(), moment().subtract(12, 'months').toISOString(), - toDate.toISOString() + toDate.toISOString(), ) ctx.body = { metrics } diff --git a/src/http/controllers/api/feedback.js b/src/http/controllers/api/feedback.js index 4a12d69..a70e05a 100644 --- a/src/http/controllers/api/feedback.js +++ b/src/http/controllers/api/feedback.js @@ -4,46 +4,46 @@ const SLACK_WEBHOOK = process.env.SLACK_WEBHOOK function createSlackPayload({ name, email, message = '' }) { return { - "blocks": [ + blocks: [ { - "type": "divider" + type: 'divider', }, { - "type": "section", - "text": { - "type": "mrkdwn", - "text": "Incoming message from an app user!" - } + type: 'section', + text: { + type: 'mrkdwn', + text: 'Incoming message from an app user!', + }, }, { - "type": "section", - "text": { - "type": "plain_text", + type: 'section', + text: { + type: 'plain_text', text: message, emoji: true, - } + }, }, { - "type": "divider" + type: 'divider', }, { - "type": "context", - "elements": [ + type: 'context', + elements: [ { - "type": "mrkdwn", - "text": (new Date()).toDateString(), + type: 'mrkdwn', + text: new Date().toDateString(), }, name && { - "type": "plain_text", - "text": `From: ${ name }` + type: 'plain_text', + text: `From: ${name}`, }, email && { - "type": "plain_text", - "text": `Email: ${ email }` - } - ].filter(Boolean) - } - ] + type: 'plain_text', + text: `Email: ${email}`, + }, + ].filter(Boolean), + }, + ], } } @@ -77,10 +77,10 @@ exports.send = async ctx => { console.error(response) ctx.status = 500 ctx.body = { - message: 'Something went wrong' + message: 'Something went wrong', } } else { ctx.status = response.status - ctx.body = { message: 'Something Happened'} + ctx.body = { message: 'Something Happened' } } } diff --git a/src/http/controllers/api/oauth.js b/src/http/controllers/api/oauth.js index a82997e..643b2cd 100644 --- a/src/http/controllers/api/oauth.js +++ b/src/http/controllers/api/oauth.js @@ -22,7 +22,7 @@ exports.listClients = async ctx => { const clients = await user.getOAuthClients() ctx.body = { - clients: clients.map(c => c.toOAuthInterface()) + clients: clients.map(c => c.toOAuthInterface()), } } @@ -31,7 +31,10 @@ exports.addClientRedirect = async ctx => { const user = await ctx.services['core.auth'].getUser() if (client?.owner_id !== user.id) { - throw new HttpError(403, 'You do not have permission to modify this oauth client') + throw new HttpError( + 403, + 'You do not have permission to modify this oauth client', + ) } const uri = ctx.request.body?.uri @@ -42,10 +45,7 @@ exports.addClientRedirect = async ctx => { const uris = new Set(client.redirect_uris) if (!uris.has(uri)) { - client.redirect_uris = [ - ...client.redirect_uris, - uri, - ] + client.redirect_uris = [...client.redirect_uris, uri] await client.save() } @@ -58,7 +58,10 @@ exports.removeClientRedirect = async ctx => { const user = await ctx.services['core.auth'].getUser() if (client.owner_id !== user.id) { - throw new HttpError(403, 'You do not have permission to modify this oauth client') + throw new HttpError( + 403, + 'You do not have permission to modify this oauth client', + ) } const uri = ctx.request.body?.uri @@ -75,4 +78,4 @@ exports.removeClientRedirect = async ctx => { await client.save() } ctx.body = { client: client.toOAuthInterface() } -} \ No newline at end of file +} diff --git a/src/http/controllers/api/storage.js b/src/http/controllers/api/storage.js index e888623..544e81a 100644 --- a/src/http/controllers/api/storage.js +++ b/src/http/controllers/api/storage.js @@ -5,7 +5,7 @@ const mimeType = require('mime-types') const { v4: uuid } = require('uuid') const { fs, env } = require('bootstrap') const moment = require('moment') -const {unset} = require('../../../bootstrap') +const { unset } = require('../../../bootstrap') // const reporter = require('services/Reporter') const { Op } = Sequelize @@ -20,11 +20,15 @@ exports.saveFile = async ctx => { const user = await ctx.profile( 'core.auth.getUser', 'Get logged in user', - async () => ctx.services['core.auth'].getUser() + async () => ctx.services['core.auth'].getUser(), ) if (!user) { - throw new HttpError({ status: 403, title: 'Must be signed in to feature an image', description: 'You must be signed in to submit images to the content feed' }) + throw new HttpError({ + status: 403, + title: 'Must be signed in to feature an image', + description: 'You must be signed in to submit images to the content feed', + }) } const { file } = ctx @@ -37,13 +41,13 @@ exports.saveFile = async ctx => { bucket: env('GCS_BUCKET'), device: ctx.request.device, }, - file_name: `${ uuid() }.${ mimeType.extension(file.mimetype) }`, - file_root: `${ user.id }`, + file_name: `${uuid()}.${mimeType.extension(file.mimetype)}`, + file_root: `${user.id}`, user_id: user.id, stream: 'plastics-public', provider: 'google', comment: '', - tags : [], + tags: [], approved: true, featured: false, } @@ -56,9 +60,11 @@ exports.saveFile = async ctx => { fileInfo.meta.location = location } - const persistedFile = await File.create(fileInfo).catch(e => console.error(e) || null) + const persistedFile = await File.create(fileInfo).catch( + e => console.error(e) || null, + ) - const storagePath = `${ fileInfo.file_root }/${ fileInfo.file_name }` + const storagePath = `${fileInfo.file_root}/${fileInfo.file_name}` const bucket = storage.bucket(env('GCS_BUCKET')) const uploadOpts = { @@ -69,8 +75,8 @@ exports.saveFile = async ctx => { metadata: { mimetype: file.mimetype, uploader: user.id, - } - } + }, + }, } await bucket.upload(file.path, uploadOpts) @@ -88,11 +94,15 @@ exports.saveFile2 = async ctx => { const user = await ctx.profile( 'core.auth.getUser', 'Get logged in user', - async () => ctx.services['core.auth'].getUser() + async () => ctx.services['core.auth'].getUser(), ) if (!user) { - throw new HttpError({ status: 403, title: 'Must be signed in to feature an image', description: 'You must be signed in to submit images to the content feed' }) + throw new HttpError({ + status: 403, + title: 'Must be signed in to feature an image', + description: 'You must be signed in to submit images to the content feed', + }) } const { file } = ctx @@ -104,13 +114,13 @@ exports.saveFile2 = async ctx => { size: file.size, bucket: env('GCS_BUCKET'), }, - file_name: `${ uuid() }.${ mimeType.extension(file.mimetype) }`, - file_root: `${ user.id }`, + file_name: `${uuid()}.${mimeType.extension(file.mimetype)}`, + file_root: `${user.id}`, user_id: user.id, stream: 'plastics-public', provider: 'google', comment: title, - tags : Array.isArray(tags) ? tags : [tags], + tags: Array.isArray(tags) ? tags : [tags], approved: true, featured: false, } @@ -119,9 +129,11 @@ exports.saveFile2 = async ctx => { fileinfo.meta.remote_id = remote_id } - const persistedFile = await File.create(fileinfo).catch(e => console.error(e) || null) + const persistedFile = await File.create(fileinfo).catch( + e => console.error(e) || null, + ) - const storagePath = `${ fileinfo.file_root }/${ fileinfo.file_name }` + const storagePath = `${fileinfo.file_root}/${fileinfo.file_name}` const bucket = storage.bucket(env('GCS_BUCKET')) const uploadOpts = { @@ -133,8 +145,8 @@ exports.saveFile2 = async ctx => { tags, mimetype: file.mimetype, uploader: user.id, - } - } + }, + }, } await bucket.upload(file.path, uploadOpts) @@ -148,14 +160,18 @@ exports.getFiles = async ctx => { const user = await ctx.profile( 'core.auth.getUser', 'Get logged in user', - async () => ctx.services['core.auth'].getUser() + async () => ctx.services['core.auth'].getUser(), ) if (!user) { - throw new HttpError({ status: 403, title: 'Must be signed in to feature an image', description: 'You must be signed in to submit images to the content feed' }) + throw new HttpError({ + status: 403, + title: 'Must be signed in to feature an image', + description: 'You must be signed in to submit images to the content feed', + }) } - const where = { } + const where = {} const { since } = ctx.query if (since && moment.utc(since).isValid()) { @@ -171,13 +187,21 @@ exports.featureImage = async ctx => { const user = await ctx.services['core.auth'].getUser() if (!user) { - throw new HttpError({ status: 403, title: 'Must be signed in to feature an image', description: 'You must be signed in to submit images to the content feed' }) + throw new HttpError({ + status: 403, + title: 'Must be signed in to feature an image', + description: 'You must be signed in to submit images to the content feed', + }) } const { imageId } = ctx.params const file = await File.findOne({ where: { id: imageId } }) if (file.user_id !== user.id) { - throw new HttpError({ status: 403, title: 'Cannot feature non-owned image', description: 'You can\'t feature an image that doesn\'t belong to you' }) + throw new HttpError({ + status: 403, + title: 'Cannot feature non-owned image', + description: "You can't feature an image that doesn't belong to you", + }) } if (file.featured && file.featured_at) { @@ -205,43 +229,42 @@ exports.feed = async ctx => { if (user != null) { includes.push([ - Sequelize.literal(`exists (select * from user_file_likes where user_file_likes.user_id='${ user.id }' and user_file_likes.file_id="File".id)`), + Sequelize.literal( + `exists (select * from user_file_likes where user_file_likes.user_id='${user.id}' and user_file_likes.file_id="File".id)`, + ), 'liked', ]) } else { - includes.push([ - Sequelize.literal('false'), - 'liked', - ]) + includes.push([Sequelize.literal('false'), 'liked']) } await ctx.profile('feed.list', 'Fetch Jetsam community feed', async () => { ctx.body = { - feed: (await File.findAll({ - where: { - approved: true, - featured: true, - created_at: { - [Sequelize.Op.gt]: moment.utc().subtract(1, 'month').toISOString(), + feed: ( + await File.findAll({ + where: { + approved: true, + featured: true, + created_at: { + [Sequelize.Op.gt]: moment + .utc() + .subtract(1, 'month') + .toISOString(), + }, }, - }, - order: [ - ['featured_at', 'desc'], - ['created_at', 'desc'], - ], - attributes: { - include: includes, - exclude: [ - 'labels', - 'updated_at', - 'requires_approval', - 'labels', - ] - } - })).map(inst => ({ + order: [ + ['featured_at', 'desc'], + ['created_at', 'desc'], + ], + attributes: { + include: includes, + exclude: ['labels', 'updated_at', 'requires_approval', 'labels'], + }, + }) + ).map(inst => ({ ...inst.toJSON(), liked: inst.get('liked'), - })) + })), } }) } @@ -251,28 +274,44 @@ exports.like = async ctx => { const user = await ctx.services['core.auth'].getUser() if (!user) { - throw new HttpError({ status: 403, title: 'Must be signed in to like an image', description: 'You must be signed in to like images' }) + throw new HttpError({ + status: 403, + title: 'Must be signed in to like an image', + description: 'You must be signed in to like images', + }) } - const file = await File.findOne({ where: { id: fileId }}) + const file = await File.findOne({ where: { id: fileId } }) if (!file) { - throw new HttpError({ status: 404, title: 'Could not find image', description: 'Could not find image to like' }) + throw new HttpError({ + status: 404, + title: 'Could not find image', + description: 'Could not find image to like', + }) } await user.addLike(file) - ctx.body = { file: {...file.toJSON(), liked: true }} + ctx.body = { file: { ...file.toJSON(), liked: true } } } exports.unlike = async ctx => { const { fileId } = ctx.params const user = await ctx.services['core.auth'].getUser() if (!user) { - throw new HttpError({ status: 403, title: 'Must be signed in to like an image', description: 'You must be signed in to unlike images' }) + throw new HttpError({ + status: 403, + title: 'Must be signed in to like an image', + description: 'You must be signed in to unlike images', + }) } - const file = await File.findOne({ where: { id: fileId }}) + const file = await File.findOne({ where: { id: fileId } }) if (!file) { - throw new HttpError({ status: 404, title: 'Could not find image', description: 'Could not find image to unlike' }) + throw new HttpError({ + status: 404, + title: 'Could not find image', + description: 'Could not find image to unlike', + }) } await user.removeLike(file) - ctx.body = { file: {...file.toJSON(), liked: false }} -} \ No newline at end of file + ctx.body = { file: { ...file.toJSON(), liked: false } } +} diff --git a/src/http/controllers/api/user.js b/src/http/controllers/api/user.js index 8e540b8..dbb3ee8 100644 --- a/src/http/controllers/api/user.js +++ b/src/http/controllers/api/user.js @@ -28,8 +28,12 @@ exports.updateOne = async ctx => { await user.save() await user.handleIncludes(ctx.includes) } else { - throw new HttpError({ status: 404, title: 'No such user', description: 'No user is currently logged in' }) + throw new HttpError({ + status: 404, + title: 'No such user', + description: 'No user is currently logged in', + }) } ctx.body = { user } -} \ No newline at end of file +} diff --git a/src/http/controllers/auth.js b/src/http/controllers/auth.js index 640453f..d44678b 100644 --- a/src/http/controllers/auth.js +++ b/src/http/controllers/auth.js @@ -15,7 +15,9 @@ exports.handleLoginRedirect = async ctx => { console.log(values) if (values.redirect === 'authorize') { - return ctx.redirect(`/auth/authorize?auth_state=${ values.query.auth_state }`) + return ctx.redirect( + `/auth/authorize?auth_state=${values.query.auth_state}`, + ) } else { return ctx.redirect('/') } @@ -25,8 +27,10 @@ exports.handleLoginRedirect = async ctx => { } const resetErrorMessages = { - missing: 'No token was found in the URL. If you clicked a link to get here, please make sure that it contains a password reset token. You may need to request a new password reset in the Jetsam app.', - invalid: 'The link you clicked was invalid or has expired. Password reset links are valid for 1 hour from the time we send them to you; you may need to request a new password reset in the Jetsam app.', + missing: + 'No token was found in the URL. If you clicked a link to get here, please make sure that it contains a password reset token. You may need to request a new password reset in the Jetsam app.', + invalid: + 'The link you clicked was invalid or has expired. Password reset links are valid for 1 hour from the time we send them to you; you may need to request a new password reset in the Jetsam app.', } exports.resetPassword = async ctx => { @@ -44,7 +48,12 @@ exports.resetPassword = async ctx => { } const { sequelize } = require('database/models') - const [ [ { exists } ] ] = await sequelize.query('SELECT exists(select reset_token from users where reset_token = :token AND deleted_at is null limit 1)', { replacements: { token } }) + const [ + [{ exists }], + ] = await sequelize.query( + 'SELECT exists(select reset_token from users where reset_token = :token AND deleted_at is null limit 1)', + { replacements: { token } }, + ) if (!exists) { errorData.message = resetErrorMessages.invalid @@ -65,7 +74,11 @@ exports.resetPassword = async ctx => { const time = moment.utc(data.expires) - if (data.id == null || data.expires == null || time.isSameOrBefore(moment.utc())) { + if ( + data.id == null || + data.expires == null || + time.isSameOrBefore(moment.utc()) + ) { errorData.message = resetErrorMessages.invalid await ctx.render('auth/reset-password-error', errorData) ctx.status = 400 @@ -84,11 +97,11 @@ exports.handleResetPassword = async ctx => { if (ctx.status >= 400) { const { error } = ctx.body await ctx.render('auth/reset-password-error', { - back_link: `/reset-password?token=${ ctx.request.body.reset_token }`, + back_link: `/reset-password?token=${ctx.request.body.reset_token}`, message: error.message, }) return } await ctx.render('auth/reset-password-success') -} \ No newline at end of file +} diff --git a/src/http/controllers/fs_local.js b/src/http/controllers/fs_local.js index ae77418..19a55f1 100644 --- a/src/http/controllers/fs_local.js +++ b/src/http/controllers/fs_local.js @@ -29,7 +29,7 @@ exports.uploadFile = async ctx => { } const extension = mime.extension(ctx.request.file.mimetype) - const fpath = p.normalize(`${ n(uid) }/${ n(fid) }.${ extension }`) + const fpath = p.normalize(`${n(uid)}/${n(fid)}.${extension}`) await fs.write(fpath, ctx.request.file.buffer) file.meta = { ...file.meta, path: fpath } @@ -52,10 +52,9 @@ exports.serveFile = async ctx => { throw new HttpError(404, 'File not found') } - const { fs } = require('bootstrap') const { fs: fileservice } = require('services') ctx.set('Content-Type', file.meta.contentType) ctx.body = fs.createReadStream(fs.path(fileservice.base, file.meta.path)) -} \ No newline at end of file +} diff --git a/src/http/middleware/DeviceProperties.js b/src/http/middleware/DeviceProperties.js index ec13422..ae8f957 100644 --- a/src/http/middleware/DeviceProperties.js +++ b/src/http/middleware/DeviceProperties.js @@ -1,11 +1,9 @@ exports.extractDevice = async (ctx, next) => { - const deviceId = ctx.get('X-Request-Device') - const platform = ctx.get('X-Request-Platform') - const rawSlug = ctx.get('X-Request-Slug') + const deviceId = ctx.get('x-request-device') + const platform = ctx.get('x-request-platform') + const rawSlug = ctx.get('x-request-slug') - const slug = rawSlug ? - Buffer.from(rawSlug, 'base64').toString('utf-8') - : null + const slug = rawSlug ? Buffer.from(rawSlug, 'base64').toString('utf-8') : null ctx.request.device = { id: deviceId, @@ -14,4 +12,4 @@ exports.extractDevice = async (ctx, next) => { } return await next() -} \ No newline at end of file +} diff --git a/src/http/middleware/ParseIncludes.js b/src/http/middleware/ParseIncludes.js index 8869d37..a928e37 100644 --- a/src/http/middleware/ParseIncludes.js +++ b/src/http/middleware/ParseIncludes.js @@ -1,7 +1,10 @@ module.exports = async function (ctx, next) { const { expand } = ctx.query if (expand != null) { - const expanded = expand.split(',').map(s => s.trim()).filter(Boolean) + const expanded = expand + .split(',') + .map(s => s.trim()) + .filter(Boolean) ctx.includes = expanded } else { ctx.includes = [] diff --git a/src/http/middleware/Profiler.js b/src/http/middleware/Profiler.js index d9b1e37..9e3b5b0 100644 --- a/src/http/middleware/Profiler.js +++ b/src/http/middleware/Profiler.js @@ -1,16 +1,16 @@ -const Sentry = require("@sentry/node") -const { extractTraceparentData }= require("@sentry/tracing") +const Sentry = require('@sentry/node') +const { extractTraceparentData } = require('@sentry/tracing') const threadContext = require('core/injection/ThreadContext') module.exports = async (ctx, next) => { let traceparentData - if (ctx.request.get("sentry-trace")) { - traceparentData = extractTraceparentData(ctx.request.get("sentry-trace")); + if (ctx.request.get('sentry-trace')) { + traceparentData = extractTraceparentData(ctx.request.get('sentry-trace')) } const t = threadContext.getTransaction({ op: 'http.request', - name: `[${ ctx.method }] ${ ctx.path }`, + name: `[${ctx.method}] ${ctx.path}`, traceparentData, tags: { 'http.method': ctx.method, @@ -41,7 +41,11 @@ module.exports = async (ctx, next) => { try { return await next() } finally { - t.setName(`[${ ctx.method }] ${ ctx._matchedRouteName ?? ctx._matchedRoute ?? ctx.path }`) + t.setName( + `[${ctx.method}] ${ + ctx._matchedRouteName ?? ctx._matchedRoute ?? ctx.path + }`, + ) t.setHttpStatus(ctx.status) const user = ctx.services['core.auth']._user Sentry.configureScope(scope => { @@ -57,4 +61,4 @@ module.exports = async (ctx, next) => { threadContext.stopTransaction() }) } -} \ No newline at end of file +} diff --git a/src/http/middleware/RequiresAuth.js b/src/http/middleware/RequiresAuth.js index 5f6cda3..35c924f 100644 --- a/src/http/middleware/RequiresAuth.js +++ b/src/http/middleware/RequiresAuth.js @@ -6,4 +6,4 @@ module.exports = async (ctx, next) => { throw new UnauthorizedError() } return await next() -} \ No newline at end of file +} diff --git a/src/http/middleware/SentryReporter.js b/src/http/middleware/SentryReporter.js index 694260b..dfa1ed1 100644 --- a/src/http/middleware/SentryReporter.js +++ b/src/http/middleware/SentryReporter.js @@ -17,7 +17,7 @@ exports.report = async function reportErrorToSentry(error, ctx) { } let user = await getUserObject(ctx) - Sentry.withScope(function(scope) { + Sentry.withScope(function (scope) { if (user) { scope.setUser(user) } @@ -30,19 +30,23 @@ exports.report = async function reportErrorToSentry(error, ctx) { scope.setContext('debugging', error._debugging) } - scope.addEventProcessor(function(event) { - return Sentry.Handlers.parseRequest(event, ctx.request); - }); - Sentry.captureException(error); - }); + scope.addEventProcessor(function (event) { + return Sentry.Handlers.parseRequest(event, ctx.request) + }) + Sentry.captureException(error) + }) } exports.reportHttp = async function reportHttpToSentry(ctx) { let user = await getUserObject(ctx) - const error = new HttpError(ctx.status, `[${ ctx.status }] A HTTP Error Occurred`, { body: ctx.body }) + const error = new HttpError( + ctx.status, + `[${ctx.status}] A HTTP Error Occurred`, + { body: ctx.body }, + ) - Sentry.withScope(function(scope) { + Sentry.withScope(function (scope) { if (user) { scope.setUser(user) } @@ -53,9 +57,9 @@ exports.reportHttp = async function reportHttpToSentry(ctx) { scope.setContext('device', ctx.request.device) } - scope.addEventProcessor(function(event) { - return Sentry.Handlers.parseRequest(event, ctx.request); - }); - Sentry.captureException(error); - }); -} \ No newline at end of file + scope.addEventProcessor(function (event) { + return Sentry.Handlers.parseRequest(event, ctx.request) + }) + Sentry.captureException(error) + }) +} diff --git a/src/http/params/event.js b/src/http/params/event.js index 30d1f82..41db4e4 100644 --- a/src/http/params/event.js +++ b/src/http/params/event.js @@ -4,4 +4,4 @@ module.exports = async (id, ctx, next) => { ctx.models = ctx.models ?? {} ctx.models.event = await Event.findByPk(id) return await next() -} \ No newline at end of file +} diff --git a/src/http/params/file.js b/src/http/params/file.js index 23c7510..17fcfd8 100644 --- a/src/http/params/file.js +++ b/src/http/params/file.js @@ -4,4 +4,4 @@ module.exports = async (id, ctx, next) => { ctx.models = ctx.models ?? {} ctx.models.file = await File.findByPk(id) return await next() -} \ No newline at end of file +} diff --git a/src/http/params/oauth_client.js b/src/http/params/oauth_client.js index ed386d8..91ee3d8 100644 --- a/src/http/params/oauth_client.js +++ b/src/http/params/oauth_client.js @@ -4,4 +4,4 @@ module.exports = async (id, ctx, next) => { ctx.models = ctx.models ?? {} ctx.models.oauthClient = await OAuthClient.findByPk(id) return await next() -} \ No newline at end of file +} diff --git a/src/http/routes.js b/src/http/routes.js index db1b7ce..de30add 100644 --- a/src/http/routes.js +++ b/src/http/routes.js @@ -1,5 +1,5 @@ -const controller = (name, method) => require(`./controllers/${ name }`)[method] -const param = (name) => require(`./params/${ name }`) +const controller = (name, method) => require(`./controllers/${name}`)[method] +const param = name => require(`./params/${name}`) const AuthServer = require('domain/auth/AuthServer') const { env, config } = require('bootstrap') @@ -14,6 +14,7 @@ const includes = require('http/middleware/ParseIncludes') const profiling = require('http/middleware/Profiler') const loaders = require('http/middleware/MountLoaders') const userGate = require('http/middleware/RequiresAuth') +const device = require('http/middleware/DeviceProperties').extractDevice const well_known = new Router({ prefix: '/.well-known' }) well_known.get('wk.jwks', '/jwks.json', async ctx => { @@ -26,16 +27,19 @@ well_known.get('wk.jwks', '/jwks.json', async ctx => { ctx.set('Cache-Control', `public, max-age=30`) ctx.body = { - keys: [{ - use: 'sig', - ...jwk, - alg: 'RS256', - }], + keys: [ + { + use: 'sig', + ...jwk, + alg: 'RS256', + }, + ], } }) const web = new Router() web.use(profiling) +web.use(device) web.use(well_known.allowedMethods()) web.use(well_known.routes()) @@ -55,19 +59,26 @@ web.post('/reset-password', controller('auth', 'handleResetPassword')) web.get('/auth/authorize', AuthServer.authorize) web.post('/auth/authorize', AuthServer.authorize) web.post('/auth/token', AuthServer.token) - -;(env('FS_DRIVER', 'local') === 'local') && (function() { - const debug = require('debug')('server:routes') - debug('Mounting local file upload routes for signed URLs') - const p = `${ config('fs.url') }/:uid/:fid` - - web.put(p, errors, includes, loaders, userGate, upload.single('file'), controller('fs_local', 'uploadFile')) - web.get(p, errors, includes, loaders, controller('fs_local', 'serveFile')) - - debug(`Mounted GET ${ p } to serve local files`) - debug(`Mounted PUT ${ p } to upload local files`) -}()) - +env('FS_DRIVER', 'local') === 'local' && + (function () { + const debug = require('debug')('server:routes') + debug('Mounting local file upload routes for signed URLs') + const p = `${config('fs.url')}/:uid/:fid` + + web.put( + p, + errors, + includes, + loaders, + userGate, + upload.single('file'), + controller('fs_local', 'uploadFile'), + ) + web.get(p, errors, includes, loaders, controller('fs_local', 'serveFile')) + + debug(`Mounted GET ${p} to serve local files`) + debug(`Mounted PUT ${p} to upload local files`) + })() const apiRouter = new Router({ prefix: '/api' }) const apiLegacy = new Router({ prefix: '/api/api' }) @@ -78,6 +89,7 @@ function mount(api) { api.use(errors) api.use(includes) api.use(loaders) + api.use(device) api.get('/', ctx => { const pkg = require('../../package.json') @@ -87,42 +99,68 @@ function mount(api) { } }) - api.post('/metrics', controller('api/content', 'postMetric')) api.get('/metrics', controller('api/content', 'getWithin')) api.get('/images', controller('api/storage', 'getFiles')) - api.post('/images', upload.single('featured_image'), controller('api/storage', 'saveFile')) - api.post('/images/:imageId/feature', controller('api/storage', 'featureImage')) + api.post( + '/images', + upload.single('featured_image'), + controller('api/storage', 'saveFile'), + ) + api.post( + '/images/:imageId/feature', + controller('api/storage', 'featureImage'), + ) /** @deprecated */ - api.post('/feature', upload.single('featured_image'), controller('api/storage', 'saveFile')) + api.post( + '/feature', + upload.single('featured_image'), + controller('api/storage', 'saveFile'), + ) api.get('/feed', controller('api/storage', 'feed')) - api.post('/feed/:fileId/like',controller('api/storage', 'like')) + api.post('/feed/:fileId/like', controller('api/storage', 'like')) api.post('/feed/:fileId/unlike', controller('api/storage', 'unlike')) api.post('/register', controller('api/auth', 'register')) api.post('/login', controller('api/auth', 'login')) api.post('/auth/reset-token', controller('api/auth', 'triggerPasswordReset')) - api.post('/auth/reset-password', controller('api/auth', 'handlePasswordReset')) + api.post( + '/auth/reset-password', + controller('api/auth', 'handlePasswordReset'), + ) api.param('oauthClientId', param('oauth_client')) api.get('/oauth/clients', controller('api/oauth', 'listClients')) api.post('/oauth/clients', controller('api/oauth', 'createClient')) - api.post('/oauth/clients/:oauthClientId/redirects', controller('api/oauth', 'addClientRedirect')) - api.delete('/oauth/clients/:oauthClientId/redirects', controller('api/oauth', 'removeClientRedirect')) + api.post( + '/oauth/clients/:oauthClientId/redirects', + controller('api/oauth', 'addClientRedirect'), + ) + api.delete( + '/oauth/clients/:oauthClientId/redirects', + controller('api/oauth', 'removeClientRedirect'), + ) api.get('/self', controller('api/user', 'self')) api.get('/self/bundles', controller('api/app', 'getBundles')) api.put('/self/:property', controller('api/user', 'updateOne')) + api.post('/an/id', async ctx => {}) + api.post('/an/ev', async ctx => { + console.log(ctx.request.body) + ctx.body = { + foo: true, + } + }) + api.post('/feedback', controller('api/feedback', 'send')) } - mount(apiRouter) mount(apiLegacy) diff --git a/src/http/validators.js b/src/http/validators.js index ea857a5..82ac43b 100644 --- a/src/http/validators.js +++ b/src/http/validators.js @@ -7,24 +7,20 @@ exports.validator = name => async (ctx, next) => { const { value, error } = await schema.validate(ctx.request.body) if (error instanceof ValidationError) { - throw new InputValidationError(error.details.map(detail => ({ - field: detail.path.join('.'), - type: detail.type, - path: detail.path, - }))) + throw new InputValidationError( + error.details.map(detail => ({ + field: detail.path.join('.'), + type: detail.type, + path: detail.path, + })), + ) } return await next() } exports.newEvent = Joi.object({ - name: Joi.string() - .required(), - tags: Joi.array() - .items(Joi.string()) - .unique() - .required(), - location: Joi.string() - .valid('online', 'physical') - .required(), -}) \ No newline at end of file + name: Joi.string().required(), + tags: Joi.array().items(Joi.string()).unique().required(), + location: Joi.string().valid('online', 'physical').required(), +}) diff --git a/src/services/cache/interface.js b/src/services/cache/interface.js index 3ea3f6b..b06ce3f 100644 --- a/src/services/cache/interface.js +++ b/src/services/cache/interface.js @@ -2,10 +2,18 @@ const { notImplemented } = require('services/utils') const { Model } = require('sequelize') module.exports = class Cache { - get() { notImplemented('cache', 'get') } - set() { notImplemented('cache', 'set') } - remember() { notImplemented('cache', 'remember') } - clear() { notImplemented('cache', 'clear') } + get() { + notImplemented('cache', 'get') + } + set() { + notImplemented('cache', 'set') + } + remember() { + notImplemented('cache', 'remember') + } + clear() { + notImplemented('cache', 'clear') + } serialize(data) { if (Array.isArray(data)) { @@ -46,7 +54,11 @@ module.exports = class Cache { } // Enable for Sequelize model support // - if (hydrated != null && typeof hydrated === 'object' && '$$_modelName' in hydrated) { + if ( + hydrated != null && + typeof hydrated === 'object' && + '$$_modelName' in hydrated + ) { return this.deserializeModel(hydrated) } return this.deserializeObject(hydrated) @@ -69,9 +81,10 @@ module.exports = class Cache { deserializeObject(object) { if (object != null && object.hasOwnProperty('expiresAt')) { - object = Object.assign({}, object, { data: this.deserialize(object.data) }) + object = Object.assign({}, object, { + data: this.deserialize(object.data), + }) } return object } - } diff --git a/src/services/cache/memory.js b/src/services/cache/memory.js index b76f5e1..1d177b8 100644 --- a/src/services/cache/memory.js +++ b/src/services/cache/memory.js @@ -9,11 +9,15 @@ class MemoryCache extends Cache { } _generateKey(key) { - return `${ config('cache.prefix') }:${ key }` + return `${config('cache.prefix')}:${key}` } - get(key) { return this.deserialize(this._cache.get(key)) } - set(key, value) { this._cache.set(key, this.serialize(value)) } + get(key) { + return this.deserialize(this._cache.get(key)) + } + set(key, value) { + this._cache.set(key, this.serialize(value)) + } async remember(key, forS, fn) { let value = null if (this._cache.has(key)) { @@ -28,14 +32,18 @@ class MemoryCache extends Cache { if (value == null) { value = await fn() - const payload = { expiresAt: moment.utc().add(forS, 'seconds').toISOString(), data: value } + const payload = { + expiresAt: moment.utc().add(forS, 'seconds').toISOString(), + data: value, + } await this.set(key, payload) } return value } - clear(key) { this._cache.delete(key) } - + clear(key) { + this._cache.delete(key) + } } module.exports = new MemoryCache() diff --git a/src/services/cache/null.js b/src/services/cache/null.js index 1d0bac2..e79e7f9 100644 --- a/src/services/cache/null.js +++ b/src/services/cache/null.js @@ -1,10 +1,18 @@ const Cache = require('./interface') class NullCache extends Cache { - get() { /* noop */ } - set() { /* noop */ } - remember(_, __, fn) { return fn() } - clear() { /* noop */ } + get() { + /* noop */ + } + set() { + /* noop */ + } + remember(_, __, fn) { + return fn() + } + clear() { + /* noop */ + } } module.exports = new NullCache() diff --git a/src/services/cache/redis.js b/src/services/cache/redis.js index d7bdc34..c2f7863 100644 --- a/src/services/cache/redis.js +++ b/src/services/cache/redis.js @@ -13,11 +13,10 @@ class RedisCache extends Cache { debug('Connecting to redis server %s', this._dsn) const ioredis = require('ioredis') this._client = new ioredis(this._dsn) - } _generateKey(key) { - return `${ this._prefix }:${ key }` + return `${this._prefix}:${key}` } async get(key) { @@ -28,7 +27,12 @@ class RedisCache extends Cache { async set(key, value, ttl = null) { if (ttl) { debug('Setting value %s for %s. Will expire in %d', value, key, ttl) - return this._client.set(this._generateKey(key), this.serialize(value), 'ex', ttl) + return this._client.set( + this._generateKey(key), + this.serialize(value), + 'ex', + ttl, + ) } else { debug('Setting value %s for %s', value, key) return this._client.set(this._generateKey(key), this.serialize(value)) @@ -52,7 +56,6 @@ class RedisCache extends Cache { debug('Clearing value for %s', key) return this._client.del(this._generateKey(key)) } - } module.exports = new RedisCache() diff --git a/src/services/fs/gcs.js b/src/services/fs/gcs.js index 8746df8..55ac411 100644 --- a/src/services/fs/gcs.js +++ b/src/services/fs/gcs.js @@ -7,7 +7,7 @@ const log = require('debug')('server:services:gcs') const FS = require('./interface') const { fs, config } = require('bootstrap') -const {fromBase64} = require('core/utils/crypto') +const { fromBase64 } = require('core/utils/crypto') /** * @type GCSFS @@ -19,17 +19,19 @@ const {fromBase64} = require('core/utils/crypto') */ class GCSFS extends FS { constructor() { - super(); + super() this.bucketname = config('fs.bucket') this.path = config('fs.path') this.credentials = config('fs.credentials') if (this.path != null) { - log(`Created GCS FS with credentials from ${ this.path }`) + log(`Created GCS FS with credentials from ${this.path}`) this.storage = new Storage({ keyFilename: this.path }) } else if (this.credentials != null) { log(`Created GCS FS with base 64 credentials from environment`) - this.storage = new Storage({ credentials: JSON.parse(fromBase64(this.credentials)) }) + this.storage = new Storage({ + credentials: JSON.parse(fromBase64(this.credentials)), + }) } else { log(`Created GCS FS without specifying credentials`) this.storage = new Storage() @@ -66,7 +68,7 @@ class GCSFS extends FS { async createUploadUrl(path, ttl, opts) { const gopts = { version: 'v4', - expires: Date.now() + (ttl * 1000), + expires: Date.now() + ttl * 1000, action: 'write', contentType: getContentType(opts.headers ?? {}) ?? opts.contentType, } @@ -77,13 +79,15 @@ class GCSFS extends FS { url, expires_at_ms: gopts.expires, method: 'PUT', - headers: opts.headers ? { 'Content-Type': getContentType(opts.headers) ?? opts.contentType } : {}, + headers: opts.headers + ? { 'Content-Type': getContentType(opts.headers) ?? opts.contentType } + : {}, } } async createDownloadUrl(path, ttl, opts) { const gopts = { version: 'v4', - expires: Date.now() + (ttl * 1000), + expires: Date.now() + ttl * 1000, action: 'read', } @@ -97,11 +101,15 @@ class GCSFS extends FS { } } async getPublicUrl(path, opts) { - return `https://storage.googleapis.com/${ this.bucketname }/${ path }` + return `https://storage.googleapis.com/${this.bucketname}/${path}` } - async makePublic(path, opts) { return this.bucket.file(path).makePublic() } - async makePrivate(path, opts) { return this.bucket.file(path).makePrivate() } + async makePublic(path, opts) { + return this.bucket.file(path).makePublic() + } + async makePrivate(path, opts) { + return this.bucket.file(path).makePrivate() + } getTmpFilename() { return fs.dir(fs.path(os.tmpdir(), 'hf')).path(uuid()) @@ -115,9 +123,11 @@ class GCSFS extends FS { } function getContentType(obj) { - return obj.headers?.['Content-Type'] ?? - obj.headers?.['content-type'] ?? - obj.headers?.contentType + return ( + obj.headers?.['Content-Type'] ?? + obj.headers?.['content-type'] ?? + obj.headers?.contentType + ) } -module.exports = new GCSFS() \ No newline at end of file +module.exports = new GCSFS() diff --git a/src/services/fs/interface.js b/src/services/fs/interface.js index 8750a35..6eb2500 100644 --- a/src/services/fs/interface.js +++ b/src/services/fs/interface.js @@ -1,17 +1,33 @@ const { notImplemented } = require('services/utils') module.exports = class FS { - async write(path, contents, opts) { notImplemented('fs', 'write') } + async write(path, contents, opts) { + notImplemented('fs', 'write') + } async writeAll(fds, opts) { for (const fd of fds) { await this.write(fd.path, fd.contents, opts) } } - async read(path, opts) { notImplemented('fs', 'read') } - async list(path, opts) { notImplemented('fs', 'list') } - async createUploadUrl(path, ttl, opts) { notImplemented('fs', 'createUploadUrl') } - async createDownloadUrl(path, ttl, opts) { notImplemented('fs', 'createDownloadUrl') } - async getPublicUrl(path, opts) { notImplemented('fs', 'getPublicUrl') } - async makePublic(path, opts) { notImplemented('fs', 'makePublic') } - async makePrivate(path, opts) { notImplemented('fs', 'makePrivate') } + async read(path, opts) { + notImplemented('fs', 'read') + } + async list(path, opts) { + notImplemented('fs', 'list') + } + async createUploadUrl(path, ttl, opts) { + notImplemented('fs', 'createUploadUrl') + } + async createDownloadUrl(path, ttl, opts) { + notImplemented('fs', 'createDownloadUrl') + } + async getPublicUrl(path, opts) { + notImplemented('fs', 'getPublicUrl') + } + async makePublic(path, opts) { + notImplemented('fs', 'makePublic') + } + async makePrivate(path, opts) { + notImplemented('fs', 'makePrivate') + } } diff --git a/src/services/fs/local.js b/src/services/fs/local.js index 4a8ea71..33e2be0 100644 --- a/src/services/fs/local.js +++ b/src/services/fs/local.js @@ -5,7 +5,7 @@ const pathUtil = require('path') class LocalFS extends FS { constructor() { - super(); + super() this.base = fs.path(config('fs.root')) this.urlRoot = fs.path(config('fs.url')) } @@ -28,29 +28,41 @@ class LocalFS extends FS { return await fs.inspectTreeAsync(filePath, opts) } async createUploadUrl(path, ttl, opts) { - const url = new URL(pathUtil.normalize(this.urlRoot + '/' + path), config('app.host.web')) + const url = new URL( + pathUtil.normalize(this.urlRoot + '/' + path), + config('app.host.web'), + ) return { url, - expires_at_ms: Date.now() + (ttl * 1000), + expires_at_ms: Date.now() + ttl * 1000, method: 'PUT', headers: opts.headers ?? {}, } } async createDownloadUrl(path, ttl, opts) { - const url = new URL(pathUtil.normalize(this.urlRoot + '/' + path), config('app.host.web')) + const url = new URL( + pathUtil.normalize(this.urlRoot + '/' + path), + config('app.host.web'), + ) return { url, - expires_at_ms: Date.now() + (ttl * 1000), + expires_at_ms: Date.now() + ttl * 1000, method: 'GET', headers: opts.headers ?? {}, } } async getPublicUrl(path, opts) { - return new URL(pathUtil.normalize(this.urlRoot + '/' + path), config('app.host.web')).toString() - + return new URL( + pathUtil.normalize(this.urlRoot + '/' + path), + config('app.host.web'), + ).toString() + } + async makePublic(path, opts) { + return true + } + async makePrivate(path, opts) { + return true } - async makePublic(path, opts) { return true } - async makePrivate(path, opts) { return true } } -module.exports = new LocalFS() \ No newline at end of file +module.exports = new LocalFS() diff --git a/src/services/index.js b/src/services/index.js index 8592613..bd3a963 100644 --- a/src/services/index.js +++ b/src/services/index.js @@ -10,10 +10,12 @@ const SERVICES = [ const services = {} -SERVICES.forEach(([name, env, fallback]) => Object.defineProperty(services, name, { - get() { - return loadEnvService(name, env, fallback) - }, -})) +SERVICES.forEach(([name, env, fallback]) => + Object.defineProperty(services, name, { + get() { + return loadEnvService(name, env, fallback) + }, + }), +) module.exports = services diff --git a/src/services/mail/interface.js b/src/services/mail/interface.js index 3bce2d9..dccb636 100644 --- a/src/services/mail/interface.js +++ b/src/services/mail/interface.js @@ -10,14 +10,21 @@ module.exports = class Mail { return this._renderer } - get fromAddress() { return config('mail.from') } - get replyAddress() { return config('mail.replyto') } + get fromAddress() { + return config('mail.from') + } + get replyAddress() { + return config('mail.replyto') + } - sendTo() { notImplemented('Mail', 'sendTo') } - send(view, to, subject, data = {}, cc = [], bcc = []) { notImplemented('Mail', 'send') } + sendTo() { + notImplemented('Mail', 'sendTo') + } + send(view, to, subject, data = {}, cc = [], bcc = []) { + notImplemented('Mail', 'send') + } sendTemplate(to, subject, templateId, data, opts) { - const templatePath = `templates/sg/${ templateId }` + const templatePath = `templates/sg/${templateId}` return this.send(templatePath, to, subject, data) } } - diff --git a/src/services/mail/log.js b/src/services/mail/log.js index 2ba3205..12d564e 100644 --- a/src/services/mail/log.js +++ b/src/services/mail/log.js @@ -1,5 +1,7 @@ const log = { - info(...args) { console.log('[Info]', ...args)} + info(...args) { + console.log('[Info]', ...args) + }, } const Mail = require('./interface') @@ -12,23 +14,18 @@ class LogMailer extends Mail { async send(view, to, subject, data = {}, cc = [], bcc = []) { // const paths = await getMailerPaths(view) + log.info('[Mailer]', `To: [${to}]`, subject) + log.info('[Mailer]', `CC: [${cc.join(', ')}]`, `BCC: [${bcc.join(', ')}]`) log.info( '[Mailer]', - `To: [${ to }]`, - subject - ) - log.info( - '[Mailer]', - `CC: [${ cc.join(', ') }]`, - `BCC: [${ bcc.join(', ') }]` + `From: [${this.fromAddress}]`, + `Reply To: [${this.replyAddress}]`, ) + // log.info('[Mailer] Text:', paths.text ? await this.renderer.render(paths.text, data) : 'null') log.info( - '[Mailer]', - `From: [${ this.fromAddress }]`, - `Reply To: [${ this.replyAddress }]` + '[Mailer] Html:', + view ? await this.renderer.render(view, data) : 'null', ) - // log.info('[Mailer] Text:', paths.text ? await this.renderer.render(paths.text, data) : 'null') - log.info('[Mailer] Html:', view ? await this.renderer.render(view, data) : 'null') } } diff --git a/src/services/mail/postmark.js b/src/services/mail/postmark.js new file mode 100644 index 0000000..7171d32 --- /dev/null +++ b/src/services/mail/postmark.js @@ -0,0 +1,82 @@ +const postmark = require('postmark') +const Mail = require('./interface') +const { config } = require('bootstrap') + +class PostmarkMailer extends Mail { + constructor() { + super() + const key = config('mail.key') + this.mailer = new postmark.ServerClient(key) + } + + async sendTo(options) { + const renderer = this.renderer + + const data = Object.assign( + { + meta: { + subject: options.subject, + }, + }, + options.data, + ) + + const value = await renderer.render(options.htmlView, data) + + const message = { + To: options.to, // 'test@blackhole.postmarkapp.com', // options.to, + From: options.from, + // fromName: options.name, + Subject: options.subject, + // replyTo: options.replyto, + TextBody: value, + HtmlBody: value, + // cc: options.cc, + // bcc: options.bcc, + MessageStream: 'outbound', + } + + return this.mailer.sendEmail(message) + } + + async send(view, to, subject, data = {}, cc = [], bcc = []) { + // const paths = await getMailerPaths(view) + // if (paths.html == null || paths.text == null) { + // throw new InvalidMailerPathError(view) + // } + + return this.sendTo({ + to, + from: this.fromAddress, + subject, + name: config('mail.name'), + replyto: this.replyAddress, + textView: view, + htmlView: view, + cc, + bcc, + data, + }) + } + + async sendTemplate(to, subject, templateId, data = {}, opts = {}) { + // const baseOpts = config('mail.opts') + + await this.mailer.sendEmailWithTemplate({ + // ...baseOpts, + ...opts, + To: to, + From: opts.from ?? this.fromAddress, + TemplateId: templateId, + TemplateModel: { + product_name: 'Jetsam', + product_url: 'https://jetsam.tech', + company_name: 'Jetsam Tech Ltd', + subject, + ...data, + }, + }) + } +} + +module.exports = new PostmarkMailer() diff --git a/src/services/mail/sendgrid.js b/src/services/mail/sendgrid.js index 80b4ac0..9156087 100644 --- a/src/services/mail/sendgrid.js +++ b/src/services/mail/sendgrid.js @@ -12,11 +12,14 @@ class SendgridMailer extends Mail { async sendTo(options) { const renderer = this.renderer - const data = Object.assign({ - meta: { - subject: options.subject, + const data = Object.assign( + { + meta: { + subject: options.subject, + }, }, - }, options.data) + options.data, + ) const value = await renderer.render(options.htmlView, data) diff --git a/src/services/mail/smtp.js b/src/services/mail/smtp.js index c05b112..f320fa3 100644 --- a/src/services/mail/smtp.js +++ b/src/services/mail/smtp.js @@ -21,25 +21,34 @@ class SmtpMailer extends Mail { async sendTo(options) { const renderer = this.renderer - const data = Object.assign({ - meta: { - subject: options.subject, + const data = Object.assign( + { + meta: { + subject: options.subject, + }, }, - }, options.data) + options.data, + ) const value = await renderer.render(options.htmlView, data) if (Array.isArray(options.to)) { - return Promise.all(options.to.map(to => this.transport.sendMail({ - to, - from: options.from, - subject: options.subject, - replyTo: options.replyto, - text: value, - html: value, - cc: options.cc, - bcc: options.bcc, - }).catch(e => e))) + return Promise.all( + options.to.map(to => + this.transport + .sendMail({ + to, + from: options.from, + subject: options.subject, + replyTo: options.replyto, + text: value, + html: value, + cc: options.cc, + bcc: options.bcc, + }) + .catch(e => e), + ), + ) } const message = { diff --git a/src/services/queue/amqp.js b/src/services/queue/amqp.js index f543de9..5df4b10 100644 --- a/src/services/queue/amqp.js +++ b/src/services/queue/amqp.js @@ -26,31 +26,41 @@ class AmqpQueue extends Queue { this.dlq = 'jetsam.jobs.dlq' this._handlers = {} - this._init.then(q => { - debug('Established connection to AMQP queue') - this._amqp = q - }).catch(e => { - debug('Failed to connect to AMQP queue') - console.error(e) - }) + this._init + .then(q => { + debug('Established connection to AMQP queue') + this._amqp = q + }) + .catch(e => { + debug('Failed to connect to AMQP queue') + console.error(e) + }) } bind(jobname, handler) { this._handlers[jobname] = handler } dispatch(jobname, payload, attempt = 0) { - return threadContext.profile('queue.dispatch', jobname, () => this.dispatchAfter(jobname, payload, 0, attempt)) + return threadContext.profile('queue.dispatch', jobname, () => + this.dispatchAfter(jobname, payload, 0, attempt), + ) } dispatchAfter(jobname, payload, delay, attempt = 0) { return this._initialise().then(() => { - debug(`Processing job ${ jobname } via AMQP connection`) - - this._channel.publish(this._exchange, 'jobs', Buffer.from(JSON.stringify({ - type: jobname, - payload, - delay, - attempt, - }))) + debug(`Processing job ${jobname} via AMQP connection`) + + this._channel.publish( + this._exchange, + 'jobs', + Buffer.from( + JSON.stringify({ + type: jobname, + payload, + delay, + attempt, + }), + ), + ) }) } @@ -67,7 +77,7 @@ class AmqpQueue extends Queue { debug('Starting Trace') threadContext.getTransaction({ op: 'queue.job', - name: 'AMQP Queue Handler' + name: 'AMQP Queue Handler', }) await this._handleMessage(object) } finally { @@ -78,9 +88,10 @@ class AmqpQueue extends Queue { }) }) - return () => this._init.then(q => { - q.close() - }) + return () => + this._init.then(q => { + q.close() + }) } async _handleMessage(object) { @@ -95,40 +106,46 @@ class AmqpQueue extends Queue { t.setData('job.attempt', attempt) t.setData('job.body', body) - if (!this._handlers.hasOwnProperty(type)) { t.setName(`[Q] Unknown`) - debug(`No handler for type ${ type }, discarding message`) + debug(`No handler for type ${type}, discarding message`) await this._channel.ack(object) return } - t.setName(`[Q] ${ type }`) + t.setName(`[Q] ${type}`) - debug(`Processing message ${ type }`) + debug(`Processing message ${type}`) try { const ctx = await ServiceProvider.detached() const handler = this._handlers[type] t.description = handler.name - await threadContext.profile('job.handler', undefined, () => handler(body, ctx)) + await threadContext.profile('job.handler', undefined, () => + handler(body, ctx), + ) await this._channel.ack(object) - debug(`Processed message ${ type }`) + debug(`Processed message ${type}`) } catch (e) { Sentry.withScope(scope => { scope.setContext('job', { type, body, delay, attempt }) Sentry.captureException(e) }) - debug(`Failed message ${ type }`) + debug(`Failed message ${type}`) debug(e) - if (attempt < 5) { + if (attempt < this.conf.retries ?? 5) { const next = attempt + 1 - debug(`Re-queue message ${ type } #${ next }`) - await this._channel.sendToQueue(this.dlq, Buffer.from(JSON.stringify({ - type, - payload: body, - delay, - attempt: next, - }))) + debug(`Re-queue message ${type} #${next}`) + await this._channel.sendToQueue( + this.dlq, + Buffer.from( + JSON.stringify({ + type, + payload: body, + delay, + attempt: next, + }), + ), + ) // await this.dispatchAfter(type, body, next * 500, next) } else { debug('Discarding message') @@ -152,7 +169,7 @@ class AmqpQueue extends Queue { await this._channel.assertQueue(this.dlq, { deadLetterExchange: this._exchange, deadLetterRoutingKey: 'jobs', - messageTtl: 500 + messageTtl: 500, }) } } diff --git a/src/services/queue/async.js b/src/services/queue/async.js index 98186d3..8aac842 100644 --- a/src/services/queue/async.js +++ b/src/services/queue/async.js @@ -4,7 +4,7 @@ const debug = require('debug')('server:services:queue') class AsyncQueue extends Queue { constructor() { - super(); + super() this._emitter = new EventEmitter() this._buffer = [] this._active = false @@ -18,19 +18,19 @@ class AsyncQueue extends Queue { if (this._active) { this.dispatchAfter(jobname, payload, 0) } else { - debug(`Adding job ${ jobname } to buffer, no queue listener present`) + debug(`Adding job ${jobname} to buffer, no queue listener present`) this._buffer.push({ name: jobname, payload }) } } dispatchAfter(jobname, payload, delay) { if (this._active) { - debug(`Processing job ${ jobname } asynchronously`) + debug(`Processing job ${jobname} asynchronously`) setTimeout(() => { this._emitter.emit(jobname, payload) }, delay) } else { - debug(`Adding job ${ jobname } to buffer, no queue listener present`) + debug(`Adding job ${jobname} to buffer, no queue listener present`) this._buffer.push({ name: jobname, payload, delay }) } } diff --git a/src/services/queue/interface.js b/src/services/queue/interface.js index 909485a..a8e3c7a 100644 --- a/src/services/queue/interface.js +++ b/src/services/queue/interface.js @@ -1,8 +1,16 @@ const { notImplemented } = require('services/utils') module.exports = class Queue { - bind(jobname, handler) { notImplemented('Queue', 'bind') } - dispatch(jobname, payload) { notImplemented('Queue', 'dispatch') } - dispatchAfter(jobname, payload, delay) { notImplemented('Queue', 'dispatchAfter') } - listen() { notImplemented('Queue', 'listen') } + bind(jobname, handler) { + notImplemented('Queue', 'bind') + } + dispatch(jobname, payload) { + notImplemented('Queue', 'dispatch') + } + dispatchAfter(jobname, payload, delay) { + notImplemented('Queue', 'dispatchAfter') + } + listen() { + notImplemented('Queue', 'listen') + } } diff --git a/src/services/totp/vault.js b/src/services/totp/vault.js index eb2fac3..3baaaf1 100644 --- a/src/services/totp/vault.js +++ b/src/services/totp/vault.js @@ -2,7 +2,12 @@ const TotpProvider = require('./interface') const { config } = require('bootstrap') const { URL } = require('url') const threadContext = require('core/injection/ThreadContext') -const { VaultClient, VaultSimpleAuth, VaultKVStore, VaultTOTPStore } = require('@commander-lol/vault-client') +const { + VaultClient, + VaultSimpleAuth, + VaultKVStore, + VaultTOTPStore, +} = require('@commander-lol/vault-client') class VaultTotpProvider extends TotpProvider { constructor() { @@ -17,7 +22,7 @@ class VaultTotpProvider extends TotpProvider { options: { auth: { path: config('vault.auth_path'), - credentials: config('vault.credentials') + credentials: config('vault.credentials'), }, kv: { path: config('vault.kv_path'), @@ -38,7 +43,11 @@ class VaultTotpProvider extends TotpProvider { const email = user.email try { - const { barcode, url } = await this.client.stores.totp.createProvider(id, 'Jetsam', email) + const { barcode, url } = await this.client.stores.totp.createProvider( + id, + 'Jetsam', + email, + ) const parsed = new URL(url) const secret = parsed.searchParams.get('secret') @@ -47,7 +56,7 @@ class VaultTotpProvider extends TotpProvider { url, secret, } - } catch(e) { + } catch (e) { console.error(e) return null } @@ -77,7 +86,9 @@ class VaultTotpProvider extends TotpProvider { const hashes = await Promise.all(codes.map(c => crypto.hash(c))) try { - await this.client.stores.kv.write(`totp_recovery/${ userid }`, { codes: hashes }) + await this.client.stores.kv.write(`totp_recovery/${userid}`, { + codes: hashes, + }) } catch (e) { console.log(e) return null @@ -90,7 +101,7 @@ class VaultTotpProvider extends TotpProvider { let data = null try { - ;({ data } = await this.client.stores.kv.read(`totp_recovery/${ userid }`)) + ;({ data } = await this.client.stores.kv.read(`totp_recovery/${userid}`)) } catch (e) { console.log(e) } @@ -108,7 +119,9 @@ class VaultTotpProvider extends TotpProvider { if (found != null) { const newCodes = codes.filter(c => c !== found) - await this.client.stores.kv.write(`totp_recovery/${ userid }`, { codes: newCodes }) + await this.client.stores.kv.write(`totp_recovery/${userid}`, { + codes: newCodes, + }) return true } } @@ -117,4 +130,4 @@ class VaultTotpProvider extends TotpProvider { } } -module.exports = new VaultTotpProvider() \ No newline at end of file +module.exports = new VaultTotpProvider() diff --git a/src/services/utils.js b/src/services/utils.js index 095c78d..6fec150 100644 --- a/src/services/utils.js +++ b/src/services/utils.js @@ -1,23 +1,33 @@ const { env } = require('bootstrap') const debug = require('debug')('server:services') -exports.loadEnvService = function loadEnvService(serviceName, envName, fallbackName = null) { +exports.loadEnvService = function loadEnvService( + serviceName, + envName, + fallbackName = null, +) { const driverName = env(envName, fallbackName) if (driverName != null) { - const service = require(`./${ serviceName }/${ driverName }`) - const interfaze = require(`./${ serviceName }/interface`) + const service = require(`./${serviceName}/${driverName}`) + const interfaze = require(`./${serviceName}/interface`) - debug('Loading service interface %s for service type %s', driverName, serviceName) + debug( + 'Loading service interface %s for service type %s', + driverName, + serviceName, + ) if (service instanceof interfaze) { return service } - throw new Error(`Bad service implementation; expected ${ serviceName }/${ driverName } to implement interface, but did not`) + throw new Error( + `Bad service implementation; expected ${serviceName}/${driverName} to implement interface, but did not`, + ) } else { return null } } exports.notImplemented = function throwNotImplemented(clazz, methodName) { - throw new Error(`${ clazz } method not implemented: ${ methodName }`) + throw new Error(`${clazz} method not implemented: ${methodName}`) } diff --git a/src/vendor/koa-handlebars.js b/src/vendor/koa-handlebars.js index 8902032..363b929 100644 --- a/src/vendor/koa-handlebars.js +++ b/src/vendor/koa-handlebars.js @@ -4,23 +4,23 @@ const Handlebars = require('handlebars') function createPathsFromContext(context = '', filename) { const { name } = path.parse(filename) - return [ - path.join(context, filename), - path.join(context, name), - name, - ] + return [path.join(context, filename), path.join(context, name), name] } function loadPartials(fsinst, root, ext, debug) { const tree = fsinst.inspectTree(root) if (!tree || (tree.type !== 'dir' && tree.type !== 'file')) { - debug('[koa-handlebars] Partials directory does not exist, skipping partials') + debug( + '[koa-handlebars] Partials directory does not exist, skipping partials', + ) return [] } if (tree.type !== 'dir') { - throw new TypeError(`[koa-handlebars] Partials directory path must point to a directory, found ${ tree.type }`) + throw new TypeError( + `[koa-handlebars] Partials directory path must point to a directory, found ${tree.type}`, + ) } const processing = tree.children @@ -31,15 +31,20 @@ function loadPartials(fsinst, root, ext, debug) { if (current.type === 'dir') { for (const child of current.children) { - child.context = current.context ? path.join(current.context, current.name) : current.name + child.context = current.context + ? path.join(current.context, current.name) + : current.name processing.push(child) } } else if (current.type === 'file') { if (path.extname(current.name) === ext) { - const [pathWithExt, pathWithoutExt] = createPathsFromContext(current.context, current.name) + const [pathWithExt, pathWithoutExt] = createPathsFromContext( + current.context, + current.name, + ) const content = fsinst.read(fsinst.path(root, pathWithExt), 'utf8') - debug(`[koa-mustache] Loading partial ${ pathWithExt }`) + debug(`[koa-mustache] Loading partial ${pathWithExt}`) partials.push({ name: pathWithoutExt, content }) } @@ -50,7 +55,6 @@ function loadPartials(fsinst, root, ext, debug) { } function loadHelpers(fsinst, root, debug) { - const tree = fsinst.inspectTree(root) if (!tree || (tree.type !== 'dir' && tree.type !== 'file')) { @@ -59,29 +63,38 @@ function loadHelpers(fsinst, root, debug) { } if (tree.type !== 'dir') { - throw new TypeError(`[koa-handlebars] Helpers directory path must point to a directory, found ${ tree.type }`) + throw new TypeError( + `[koa-handlebars] Helpers directory path must point to a directory, found ${tree.type}`, + ) } const processing = tree.children const helpers = [] - processing.forEach(p => p.context = fsinst.path(root)) + processing.forEach(p => (p.context = fsinst.path(root))) while (processing.length > 0) { const current = processing.shift() if (current.type === 'dir') { for (const child of current.children) { - child.context = current.context ? path.join(current.context, current.name) : current.name + child.context = current.context + ? path.join(current.context, current.name) + : current.name processing.push(child) } } else if (current.type === 'file') { - const [pathWithExt, _, name ] = createPathsFromContext(current.context, current.name) + const [pathWithExt, _, name] = createPathsFromContext( + current.context, + current.name, + ) try { const helperMap = require(pathWithExt) helpers.push({ name, content: helperMap }) - debug(`[koa-mustache] Loading helper ${ pathWithExt }`) - } catch(e) { console.warn(e.message) } + debug(`[koa-mustache] Loading helper ${pathWithExt}`) + } catch (e) { + console.warn(e.message) + } } } @@ -91,7 +104,11 @@ function loadHelpers(fsinst, root, debug) { module.exports = function createRenderMiddleware(root, opts = {}) { const instance = module.exports.createRenderer(root, opts) return async function handlebarsMiddleware(ctx, next) { - ctx.render = async function renderTemplateData(template, data = {}, opts = {}) { + ctx.render = async function renderTemplateData( + template, + data = {}, + opts = {}, + ) { const content = await instance.render(template, data, opts) if (content == null) { this.status = 404 @@ -123,8 +140,8 @@ class Renderer { if (!template) { let fileData = await viewDir.inspectAsync(path) if (fileData == null) { - path = `${ path }${ this._ext }` - fileData = await viewDir.inspectAsync(`${ view }${ this._ext }`) + path = `${path}${this._ext}` + fileData = await viewDir.inspectAsync(`${view}${this._ext}`) if (fileData == null) { return null } @@ -151,12 +168,14 @@ class Renderer { } } -module.exports.createRenderer = function(root, opts = {}) { - const useCache = opts.hasOwnProperty('cache') ? opts.cache : process.env.NODE_ENV === 'production' +module.exports.createRenderer = function (root, opts = {}) { + const useCache = opts.hasOwnProperty('cache') + ? opts.cache + : process.env.NODE_ENV === 'production' const extension = opts.hasOwnProperty('extension') ? opts.extension : '.hbs' const partials = opts.hasOwnProperty('partials') ? opts.partials : 'partials' const helpers = opts.hasOwnProperty('helpers') ? opts.helpers : 'helpers' - const extend = opts.hasOwnProperty('extend') ? opts.extend : (i) => i + const extend = opts.hasOwnProperty('extend') ? opts.extend : i => i const debug = opts.debug || (() => {}) const fsinst = opts.hasOwnProperty('jetpack') ? opts.jetpack : fs @@ -165,13 +184,17 @@ module.exports.createRenderer = function(root, opts = {}) { const helperContent = loadHelpers(viewDir, helpers, debug) helperContent.forEach(({ name, content }) => - typeof content === 'function' ? - instance.registerHelper(name, content) : - Object.entries(content).forEach(([helper, fn]) => instance.registerHelper(helper, fn)) + typeof content === 'function' + ? instance.registerHelper(name, content) + : Object.entries(content).forEach(([helper, fn]) => + instance.registerHelper(helper, fn), + ), ) const partialContent = loadPartials(viewDir, partials, extension, debug) - partialContent.forEach(({ name, content }) => instance.registerPartial(name, content)) + partialContent.forEach(({ name, content }) => + instance.registerPartial(name, content), + ) extend(instance) diff --git a/src/vendor/sentry.js b/src/vendor/sentry.js index 50a6cac..84f12e2 100644 --- a/src/vendor/sentry.js +++ b/src/vendor/sentry.js @@ -1,24 +1,22 @@ const Sentry = require('@sentry/node') const Tracing = require('@sentry/tracing') -const blockedPaths = new Set([ - '/api/.secure/jwks', - '/api', -]) +const blockedPaths = new Set(['/api/.secure/jwks', '/api']) -exports.configure = function() { +exports.configure = function () { const pkg = require('../../package.json') const { config } = require('bootstrap') Sentry.init({ dsn: config('sentry.dsn'), - integrations: integrations => integrations.filter(itg => itg.name !== 'Console'), + integrations: integrations => + integrations.filter(itg => itg.name !== 'Console'), environment: config('app.env'), - release: `${ pkg.name }@${ pkg.version }`, - tracesSampler: (ctx) => { + release: `${pkg.name}@${pkg.version}`, + tracesSampler: ctx => { if ( - ctx.transactionContext?.op === 'http.request' - && blockedPaths.has(ctx.transactionContext?.tags?.['http.path']) + ctx.transactionContext?.op === 'http.request' && + blockedPaths.has(ctx.transactionContext?.tags?.['http.path']) ) { return 0 } @@ -26,5 +24,4 @@ exports.configure = function() { return config('sentry.samples') }, }) - -} \ No newline at end of file +} diff --git a/worker.js b/worker.js index 98c2c56..35c30f8 100644 --- a/worker.js +++ b/worker.js @@ -23,7 +23,9 @@ async function main() { close = await queue.listen() await new Promise(async r => { debug('Starting worker spin loop') - while (!done) { await new Promise(rr => setTimeout(rr, 10)) } + while (!done) { + await new Promise(rr => setTimeout(rr, 10)) + } debug('Ending worker spin loop') r(true) }) @@ -35,24 +37,18 @@ function bindSentry() { debug('Binding sentry to process level errors') - process.on("error", (err) => { - Sentry.captureException(err); - }); -} - -main() - .catch(e => { - console.error(e) - Sentry.captureException(e); - process.exit(1) + process.on('error', err => { + Sentry.captureException(err) }) +} +main().catch(e => { + console.error(e) + Sentry.captureException(e) + process.exit(1) +}) -const cleanupsigs = [ - 'SIGINT', - 'SIGTERM', - 'SIGUSR2', -] +const cleanupsigs = ['SIGINT', 'SIGTERM', 'SIGUSR2'] cleanupsigs.forEach(signal => { process.on(signal, () => { -- GitLab