Skip to content

Commit

Permalink
feat: cron job to remove vc users from std and parent realms
Browse files Browse the repository at this point in the history
  • Loading branch information
NithinKuruba committed Oct 27, 2023
1 parent 285040b commit 0d96893
Show file tree
Hide file tree
Showing 16 changed files with 1,860 additions and 270 deletions.
2 changes: 2 additions & 0 deletions docker/kc-cron-job/.env.example
Original file line number Diff line number Diff line change
Expand Up @@ -20,3 +20,5 @@ PGDATABASE=
CSS_API_URL=http://localhost:8080/app
CSS_API_AUTH_SECRET=
RC_WEBHOOK=
VC_USERS_RETENTION_DAYS=
INACTIVE_IDIR_USERS_RETENTION_DAYS=
28 changes: 28 additions & 0 deletions docker/kc-cron-job/.eslintrc.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
module.exports = {
env: {
browser: true,
commonjs: true,
es2021: true
},
extends: 'standard',
overrides: [
{
env: {
node: true
},
files: ['.eslintrc.{js,cjs}'],
parserOptions: {
sourceType: 'script'
}
}
],
parserOptions: {
ecmaVersion: 'latest'
},
rules: {
'space-before-function-paren': [0],
'spaced-comment': [0],
semi: [0],
'array-callback-return': [0]
}
};
12 changes: 12 additions & 0 deletions docker/kc-cron-job/.prettierrc
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
{
"singleQuote": true,
"printWidth": 120,
"tabWidth": 2,
"useTabs": false,
"semi": true,
"quoteProps": "as-needed",
"trailingComma": "none",
"bracketSpacing": true,
"arrowParens": "always",
"jsxSingleQuote": false
}
2 changes: 1 addition & 1 deletion docker/kc-cron-job/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM node:14
FROM node:18

WORKDIR /usr/src/app

Expand Down
29 changes: 8 additions & 21 deletions docker/kc-cron-job/active-sessions.js
Original file line number Diff line number Diff line change
@@ -1,60 +1,47 @@
const _ = require('lodash');
const { Client } = require('pg');
const format = require('pg-format');
const { getPgClient } = require('./helpers');
const KcAdminClient = require('keycloak-admin').default;

const KEYCLOAK_URL = process.env.KEYCLOAK_URL || 'https://dev.oidc.gov.bc.ca';
const KEYCLOAK_CLIENT_ID = process.env.KEYCLOAK_CLIENT_ID || 'script-cli';
const KEYCLOAK_CLIENT_SECRET = process.env.KEYCLOAK_CLIENT_SECRET;
const PGHOST = process.env.PGHOST;
const PGPORT = process.env.PGPORT || '5432';
const PGUSER = process.env.PGUSER;
const PGPASSWORD = process.env.PGPASSWORD;
const PGDATABASE = process.env.PGDATABASE;

const kcAdminClient = new KcAdminClient({
baseUrl: `${KEYCLOAK_URL}/auth`,
realmName: 'master',
realmName: 'master'
});

async function main() {
try {
await kcAdminClient.auth({
grantType: 'client_credentials',
clientId: KEYCLOAK_CLIENT_ID,
clientSecret: KEYCLOAK_CLIENT_SECRET,
clientSecret: KEYCLOAK_CLIENT_SECRET
});

// see https://node-postgres.com/api/client#new-clientconfig-object
const client = new Client({
host: PGHOST,
port: parseInt(PGPORT),
user: PGUSER,
password: PGPASSWORD,
database: PGDATABASE,
ssl: { rejectUnauthorized: false },
});
const client = getPgClient();

const realms = await kcAdminClient.realms.find({});
const dataset = [];
await Promise.all(
realms.map(async (realm) => {
const sessions = await kcAdminClient.sessions.find({
realm: realm.realm,
realm: realm.realm
});
sessions.map((session) => {
const sessionActiveCount = parseInt(session.active);
const sessionActiveCount = parseInt(session.active, 10);
const sessionClientID = session.clientId;
if (sessionActiveCount > 0) {
dataset.push([KEYCLOAK_URL, realm.realm, sessionClientID, sessionActiveCount]);
}
});
}),
})
);

const query = format(
'INSERT INTO active_sessions (keycloak_url, realm, client_id, session_count) VALUES %L',
dataset,
dataset
);

await client.connect();
Expand Down
58 changes: 12 additions & 46 deletions docker/kc-cron-job/event-logs.js
Original file line number Diff line number Diff line change
@@ -1,15 +1,9 @@
const _ = require('lodash');
const { Client } = require('pg');
const format = require('pg-format');
const fsPromises = require('fs').promises;
const fs = require('fs');
const readline = require('readline');
const { deleteLegacyData, getPgClient } = require('./helpers');

const PGHOST = process.env.PGHOST || 'localhost';
const PGPORT = process.env.PGPORT || '5432';
const PGUSER = process.env.PGUSER || 'postgres';
const PGPASSWORD = process.env.PGPASSWORD || 'postgres';
const PGDATABASE = process.env.PGDATABASE || 'postgres';
const LOG_BATCH_SIZE = process.env.LOG_BATCH_SIZE || 1000;
const RETENTION_PERIOD_DAYS = process.env.RETENTION_PERIOD_DAYS || 30;
const SAVE_LOGS_N_DAYS_AGO = process.env.SAVE_LOGS_N_DAYS_AGO || 2;
Expand All @@ -28,7 +22,7 @@ const logFields = [
'processName',
'processId',
'timestamp',
'version',
'version'
];

const getQuery = (logs) => {
Expand All @@ -50,7 +44,7 @@ const getQuery = (logs) => {
version,
namespace
) VALUES %L`,
logs,
logs
);
return query;
};
Expand Down Expand Up @@ -95,7 +89,7 @@ const reduceDataFromFiles = async (dirname) => {
let client;

try {
client = getClient();
client = getPgClient();
await client.connect();
if (!fs.existsSync(dirname)) {
console.info(`Directory ${dirname} does not exist.`);
Expand All @@ -105,7 +99,7 @@ const reduceDataFromFiles = async (dirname) => {
const files = await fsPromises.readdir(dirname);
for (const filename of files) {
const lineReader = readline.createInterface({
input: fs.createReadStream(`${dirname}/${filename}`),
input: fs.createReadStream(`${dirname}/${filename}`)
});
promises.push(saveLogsForFile(lineReader, client));
}
Expand All @@ -118,8 +112,8 @@ const reduceDataFromFiles = async (dirname) => {
};

const formatLog = (log) => {
log['timestamp'] = log['@timestamp'];
log['version'] = log['@version'];
log.timestamp = log['@timestamp'];
log.version = log['@version'];
delete log['@timestamp'];
delete log['@version'];
try {
Expand All @@ -132,10 +126,10 @@ const formatLog = (log) => {
return null;
}

let { message } = log;
const { message } = log;
const json = {};
const fields = message.split(', ');
for (field of fields) {
for (const field of fields) {
const [key, val] = field.split(/=(.+)/);
json[key] = val;
}
Expand All @@ -146,42 +140,14 @@ const formatLog = (log) => {
}
};

const getClient = () => {
const client = new Client({
host: PGHOST,
port: parseInt(PGPORT),
user: PGUSER,
password: PGPASSWORD,
database: PGDATABASE,
ssl: { rejectUnauthorized: false },
});
return client;
};

const clearOldLogs = async (retentionPeriodDays) => {
console.info('Removing old logs from database...');
let client;
try {
client = getClient();
await client.connect();
const query = `DELETE from sso_logs where timestamp < NOW() - INTERVAL '${retentionPeriodDays} DAYS' and namespace = '${process.env.NAMESPACE}';`;
console.info(`Running delete query: ${query}`);
await client.query(query);
} catch (e) {
console.error(e);
} finally {
await client.end();
}
};

const parseLogStats = async () => {
console.info('Collecting log stats...');
let client;
try {
client = getClient();
client = getPgClient();
await client.connect();
console.info('running save_log_types function...');
const saveStatsQuery = `SELECT save_log_types();`;
const saveStatsQuery = 'SELECT save_log_types();';
await client.query(saveStatsQuery);
} catch (e) {
console.error(e);
Expand All @@ -201,7 +167,7 @@ async function saveFilesToDatabase(dirname) {
try {
const dateToSave = getDate(SAVE_LOGS_N_DAYS_AGO);
const previousDayLogsFolder = `${dirname}/${dateToSave}`;
await clearOldLogs(RETENTION_PERIOD_DAYS);
await deleteLegacyData('sso_logs', RETENTION_PERIOD_DAYS);
await reduceDataFromFiles(previousDayLogsFolder);
await parseLogStats();
} catch (err) {
Expand Down
120 changes: 120 additions & 0 deletions docker/kc-cron-job/helpers.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,120 @@
const KcAdminClient = require('keycloak-admin').default;
const jws = require('jws');
const { Client } = require('pg');
const axios = require('axios');

require('dotenv').config();

const removeTrailingSlash = (url) => {
return url.endsWith('/') ? url.slice(0, -1) : url;
};

const envs = {
dev: {
url: removeTrailingSlash(process.env.DEV_KEYCLOAK_URL || 'https://dev.loginproxy.gov.bc.ca'),
clientId: process.env.DEV_KEYCLOAK_CLIENT_ID || 'script-cli',
clientSecret: process.env.DEV_KEYCLOAK_CLIENT_SECRET
},
test: {
url: removeTrailingSlash(process.env.TEST_KEYCLOAK_URL || 'https://test.loginproxy.gov.bc.ca'),
clientId: process.env.TEST_KEYCLOAK_CLIENT_ID || 'script-cli',
clientSecret: process.env.TEST_KEYCLOAK_CLIENT_SECRET
},
prod: {
url: removeTrailingSlash(process.env.PROD_KEYCLOAK_URL || 'https://loginproxy.gov.bc.ca'),
clientId: process.env.PROD_KEYCLOAK_CLIENT_ID || 'script-cli',
clientSecret: process.env.PROD_KEYCLOAK_CLIENT_SECRET
}
};

module.exports = {
oneMin: 60 * 1000,
getAdminClient: async function (env) {
try {
const config = envs[env];
if (!config) throw Error(`invalid env ${env}`);

const kcAdminClient = new KcAdminClient({
baseUrl: `${config.url}/auth`,
realmName: 'master',
requestConfig: {
/* Axios request config options https://github.com/axios/axios#request-config */
timeout: 60000
}
});

let decodedToken;

const auth = async () => {
await kcAdminClient.auth({
grantType: 'client_credentials',
clientId: config.clientId,
clientSecret: config.clientSecret
});

decodedToken = jws.decode(kcAdminClient.accessToken);
};

const refreshAsNeeded = async () => {
const expiresIn = decodedToken.payload.exp * 1000 - Date.now();
console.log(expiresIn < this.oneMin);
if (expiresIn < this.oneMin) await auth();
};

kcAdminClient.reauth = auth;
kcAdminClient.refreshAsNeeded = refreshAsNeeded;
kcAdminClient.url = config.url;

await auth();
return kcAdminClient;
} catch (err) {
console.error(err);
return null;
}
},
log: function (msg) {
console.log(`[${new Date().toLocaleString()}] ${msg}`);
},
getPgClient: function () {
return new Client({
host: process.env.PGHOST || 'localhost',
port: parseInt(process.env.PGPORT || '5432'),
user: process.env.PGUSER || 'postgres',
password: process.env.PGPASSWORD || 'postgres',
database: process.env.PGDATABASE || 'rhsso',
ssl: { rejectUnauthorized: false }
});
},
sendRcNotification: async function (cronName, message, err) {
try {
const headers = { Accept: 'application/json' };
const statusCode = err ? 'ERROR' : '';
await axios.post(process.env.RC_WEBHOOK, { projectName: cronName, message, statusCode }, { headers });
} catch (err) {
console.error(err);
}
},
handleError: function (error) {
if (error.isAxiosError) {
console.error((error.response && error.response.data) || error);
} else {
console.error(error);
}
},
deleteLegacyData: async function (tableName, retentionPeriodDays) {
console.info('Removing old logs from database...');
let client;
try {
client = module.exports.getPgClient();
await client.connect();
const query = `DELETE from ${tableName} where timestamp < NOW() - INTERVAL '${retentionPeriodDays} DAYS';`;
console.info(`Running delete query: ${query}`);
await client.query(query);
console.info('Completed running delete query');
} catch (e) {
console.error(e);
} finally {
await client.end();
}
}
};
6 changes: 6 additions & 0 deletions docker/kc-cron-job/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,12 @@
},
"devDependencies": {
"dotenv": "^16.3.1",
"eslint": "^8.0.1",
"eslint-config-standard": "^17.1.0",
"eslint-plugin-import": "^2.25.2",
"eslint-plugin-n": "^15.0.0 || ^16.0.0 ",
"eslint-plugin-prettier": "^5.0.1",
"eslint-plugin-promise": "^6.0.0",
"jest": "^29.7.0"
}
}
Loading

0 comments on commit 0d96893

Please sign in to comment.