From 86cb51364ac61b21729e770bd34dab29e4c84974 Mon Sep 17 00:00:00 2001 From: Sheen Capadngan Date: Wed, 2 Oct 2024 22:30:07 +0800 Subject: [PATCH] misc: initial setup for migration of audit logs --- .env.migration.example | 1 + backend/package.json | 18 +++-- backend/src/db/auditlog-knexfile.ts | 73 +++++++++++++++++++ backend/src/db/index.ts | 2 +- backend/src/db/instance.ts | 42 +++++++++++ .../20241002092243_audit-log-drop-fk.ts | 53 ++++++++++++++ ...1002110531_add-audit-log-metadata-index.ts | 15 ++++ backend/src/db/schemas/audit-logs.ts | 3 +- .../ee/services/audit-log/audit-log-dal.ts | 26 +------ backend/src/lib/config/env.ts | 6 ++ backend/src/main.ts | 11 ++- backend/src/server/app.ts | 5 +- backend/src/server/routes/index.ts | 5 +- .../server/routes/v1/organization-router.ts | 7 +- frontend/src/hooks/api/auditLogs/types.tsx | 5 +- .../AuditLogsPage/components/LogsTableRow.tsx | 2 +- 16 files changed, 227 insertions(+), 47 deletions(-) create mode 100644 backend/src/db/auditlog-knexfile.ts create mode 100644 backend/src/db/migrations/20241002092243_audit-log-drop-fk.ts create mode 100644 backend/src/db/migrations/20241002110531_add-audit-log-metadata-index.ts diff --git a/.env.migration.example b/.env.migration.example index 4d1c8f9ef5..2c5f5b9570 100644 --- a/.env.migration.example +++ b/.env.migration.example @@ -1 +1,2 @@ DB_CONNECTION_URI= +AUDIT_LOGS_DB_CONNECTION_URI= diff --git a/backend/package.json b/backend/package.json index 217817edf4..181a3a8684 100644 --- a/backend/package.json +++ b/backend/package.json @@ -45,13 +45,19 @@ "test:e2e-coverage": "vitest run --coverage -c vitest.e2e.config.ts", "generate:component": "tsx ./scripts/create-backend-file.ts", "generate:schema": "tsx ./scripts/generate-schema-types.ts", + "auditlog-migration:latest": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:latest", + "auditlog-migration:up": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:up", + "auditlog-migration:down": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:down", + "auditlog-migration:list": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:list", + "auditlog-migration:status": "knex --knexfile ./src/db/auditlog-knexfile.ts --client pg migrate:status", + "auditlog-migration:rollback": "knex --knexfile ./src/db/auditlog-knexfile.ts migrate:rollback", "migration:new": "tsx ./scripts/create-migration.ts", - "migration:up": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:up", - "migration:down": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:down", - "migration:list": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:list", - "migration:latest": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest", - "migration:status": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:status", - "migration:rollback": "knex --knexfile ./src/db/knexfile.ts migrate:rollback", + "migration:up": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:up && npm run auditlog-migration:up", + "migration:down": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:down && npm run auditlog-migration:down", + "migration:list": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:list && npm run auditlog-migration:list", + "migration:latest": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:latest && npm run auditlog-migration:latest", + "migration:status": "knex --knexfile ./src/db/knexfile.ts --client pg migrate:status && npm run auditlog-migration:status", + "migration:rollback": "knex --knexfile ./src/db/knexfile.ts migrate:rollback && npm run auditlog-migration:rollback", "seed:new": "tsx ./scripts/create-seed-file.ts", "seed": "knex --knexfile ./src/db/knexfile.ts --client pg seed:run", "db:reset": "npm run migration:rollback -- --all && npm run migration:latest" diff --git a/backend/src/db/auditlog-knexfile.ts b/backend/src/db/auditlog-knexfile.ts new file mode 100644 index 0000000000..f6b84d4534 --- /dev/null +++ b/backend/src/db/auditlog-knexfile.ts @@ -0,0 +1,73 @@ +// eslint-disable-next-line +import "ts-node/register"; + +import dotenv from "dotenv"; +import type { Knex } from "knex"; +import path from "path"; + +// Update with your config settings. . +dotenv.config({ + path: path.join(__dirname, "../../../.env.migration") +}); +dotenv.config({ + path: path.join(__dirname, "../../../.env") +}); + +if (!process.env.AUDIT_LOGS_DB_CONNECTION_URI && !process.env.AUDIT_LOGS_DB_HOST) { + console.info("Dedicated audit log database not found. No further migrations necessary"); + process.exit(0); +} + +export default { + development: { + client: "postgres", + connection: { + connectionString: process.env.AUDIT_LOGS_DB_CONNECTION_URI, + host: process.env.AUDIT_LOGS_DB_HOST, + port: process.env.AUDIT_LOGS_DB_PORT, + user: process.env.AUDIT_LOGS_DB_USER, + database: process.env.AUDIT_LOGS_DB_NAME, + password: process.env.AUDIT_LOGS_DB_PASSWORD, + ssl: process.env.AUDIT_LOGS_DB_ROOT_CERT + ? { + rejectUnauthorized: true, + ca: Buffer.from(process.env.AUDIT_LOGS_DB_ROOT_CERT, "base64").toString("ascii") + } + : false + }, + pool: { + min: 2, + max: 10 + }, + seeds: { + directory: "./seeds" + }, + migrations: { + tableName: "infisical_migrations" + } + }, + production: { + client: "postgres", + connection: { + connectionString: process.env.AUDIT_LOGS_DB_CONNECTION_URI, + host: process.env.AUDIT_LOGS_DB_HOST, + port: process.env.AUDIT_LOGS_DB_PORT, + user: process.env.AUDIT_LOGS_DB_USER, + database: process.env.AUDIT_LOGS_DB_NAME, + password: process.env.AUDIT_LOGS_DB_PASSWORD, + ssl: process.env.AUDIT_LOGS_DB_ROOT_CERT + ? { + rejectUnauthorized: true, + ca: Buffer.from(process.env.AUDIT_LOGS_DB_ROOT_CERT, "base64").toString("ascii") + } + : false + }, + pool: { + min: 2, + max: 10 + }, + migrations: { + tableName: "infisical_migrations" + } + } +} as Knex.Config; diff --git a/backend/src/db/index.ts b/backend/src/db/index.ts index 75992e2c69..abebdf65a8 100644 --- a/backend/src/db/index.ts +++ b/backend/src/db/index.ts @@ -1,2 +1,2 @@ export type { TDbClient } from "./instance"; -export { initDbConnection } from "./instance"; +export { initAuditLogDbConnection, initDbConnection } from "./instance"; diff --git a/backend/src/db/instance.ts b/backend/src/db/instance.ts index f6162ad9cc..d4a2a5b2ca 100644 --- a/backend/src/db/instance.ts +++ b/backend/src/db/instance.ts @@ -70,3 +70,45 @@ export const initDbConnection = ({ return db; }; + +export const initAuditLogDbConnection = ({ + dbConnectionUri, + dbRootCert +}: { + dbConnectionUri: string; + dbRootCert?: string; +}) => { + // akhilmhdh: the default Knex is knex.Knex. but when assigned with knex({}) the value is knex.Knex + // this was causing issue with files like `snapshot-dal` `findRecursivelySnapshots` this i am explicitly putting the any and unknown[] + // eslint-disable-next-line + const db: Knex = knex({ + client: "pg", + connection: { + connectionString: dbConnectionUri, + host: process.env.AUDIT_LOGS_DB_HOST, + // @ts-expect-error I have no clue why only for the port there is a type error + // eslint-disable-next-line + port: process.env.AUDIT_LOGS_DB_PORT, + user: process.env.AUDIT_LOGS_DB_USER, + database: process.env.AUDIT_LOGS_DB_NAME, + password: process.env.AUDIT_LOGS_DB_PASSWORD, + ssl: dbRootCert + ? { + rejectUnauthorized: true, + ca: Buffer.from(dbRootCert, "base64").toString("ascii") + } + : false + } + }); + + // we add these overrides so that auditLogDb and the primary DB are interchangeable + db.primaryNode = () => { + return db; + }; + + db.replicaNode = () => { + return db; + }; + + return db; +}; diff --git a/backend/src/db/migrations/20241002092243_audit-log-drop-fk.ts b/backend/src/db/migrations/20241002092243_audit-log-drop-fk.ts new file mode 100644 index 0000000000..b2e1a62810 --- /dev/null +++ b/backend/src/db/migrations/20241002092243_audit-log-drop-fk.ts @@ -0,0 +1,53 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; + +export async function up(knex: Knex): Promise { + const doesProjectIdExist = await knex.schema.hasColumn(TableName.AuditLog, "projectId"); + const doesOrgIdExist = await knex.schema.hasColumn(TableName.AuditLog, "orgId"); + const doesTableExist = await knex.schema.hasTable(TableName.AuditLog); + + const doesProjectNameExist = await knex.schema.hasColumn(TableName.AuditLog, "projectName"); + + if (doesTableExist) { + await knex.schema.alterTable(TableName.AuditLog, (t) => { + // remove existing FKs + if (doesOrgIdExist) { + t.dropForeign("orgId"); + } + + if (doesProjectIdExist) { + t.dropForeign("projectId"); + } + + // add normalized fields necessary after FK removal + if (!doesProjectNameExist) { + t.string("projectName"); + } + }); + } +} + +export async function down(knex: Knex): Promise { + const doesProjectIdExist = await knex.schema.hasColumn(TableName.AuditLog, "projectId"); + const doesOrgIdExist = await knex.schema.hasColumn(TableName.AuditLog, "orgId"); + const doesTableExist = await knex.schema.hasTable(TableName.AuditLog); + const doesProjectNameExist = await knex.schema.hasColumn(TableName.AuditLog, "projectName"); + + if (doesTableExist) { + await knex.schema.alterTable(TableName.AuditLog, (t) => { + // add back FKs + if (doesOrgIdExist) { + t.foreign("orgId").references("id").inTable(TableName.Organization).onDelete("CASCADE"); + } + if (doesProjectIdExist) { + t.foreign("projectId").references("id").inTable(TableName.Project).onDelete("CASCADE"); + } + + // remove normalized fields + if (doesProjectNameExist) { + t.dropColumn("projectName"); + } + }); + } +} diff --git a/backend/src/db/migrations/20241002110531_add-audit-log-metadata-index.ts b/backend/src/db/migrations/20241002110531_add-audit-log-metadata-index.ts new file mode 100644 index 0000000000..d07211a90f --- /dev/null +++ b/backend/src/db/migrations/20241002110531_add-audit-log-metadata-index.ts @@ -0,0 +1,15 @@ +import { Knex } from "knex"; + +import { TableName } from "../schemas"; + +export async function up(knex: Knex): Promise { + if (await knex.schema.hasColumn(TableName.AuditLog, "actorMetadata")) { + await knex.raw( + `CREATE INDEX "audit_logs_actorMetadata_idx" ON ${TableName.AuditLog} USING gin("actorMetadata" jsonb_path_ops)` + ); + } +} + +export async function down(knex: Knex): Promise { + await knex.raw(`DROP INDEX IF EXISTS "audit_logs_actorMetadata_idx"`); +} diff --git a/backend/src/db/schemas/audit-logs.ts b/backend/src/db/schemas/audit-logs.ts index b8906698b6..d1c239724c 100644 --- a/backend/src/db/schemas/audit-logs.ts +++ b/backend/src/db/schemas/audit-logs.ts @@ -20,7 +20,8 @@ export const AuditLogsSchema = z.object({ createdAt: z.date(), updatedAt: z.date(), orgId: z.string().uuid().nullable().optional(), - projectId: z.string().nullable().optional() + projectId: z.string().nullable().optional(), + projectName: z.string().nullable().optional() }); export type TAuditLogs = z.infer; diff --git a/backend/src/ee/services/audit-log/audit-log-dal.ts b/backend/src/ee/services/audit-log/audit-log-dal.ts index 5e5e6872ba..4398486e1d 100644 --- a/backend/src/ee/services/audit-log/audit-log-dal.ts +++ b/backend/src/ee/services/audit-log/audit-log-dal.ts @@ -1,7 +1,7 @@ import { Knex } from "knex"; import { TDbClient } from "@app/db"; -import { AuditLogsSchema, TableName } from "@app/db/schemas"; +import { TableName } from "@app/db/schemas"; import { DatabaseError } from "@app/lib/errors"; import { ormify, selectAllTableCols } from "@app/lib/knex"; import { logger } from "@app/lib/logger"; @@ -55,11 +55,10 @@ export const auditLogDALFactory = (db: TDbClient) => { try { // Find statements const sqlQuery = (tx || db.replicaNode())(TableName.AuditLog) - .leftJoin(TableName.Project, `${TableName.AuditLog}.projectId`, `${TableName.Project}.id`) // eslint-disable-next-line func-names .where(function () { if (orgId) { - void this.where(`${TableName.Project}.orgId`, orgId).orWhere(`${TableName.AuditLog}.orgId`, orgId); + void this.where(`${TableName.AuditLog}.orgId`, orgId); } else if (projectId) { void this.where(`${TableName.AuditLog}.projectId`, projectId); } @@ -72,10 +71,6 @@ export const auditLogDALFactory = (db: TDbClient) => { // Select statements void sqlQuery .select(selectAllTableCols(TableName.AuditLog)) - .select( - db.ref("name").withSchema(TableName.Project).as("projectName"), - db.ref("slug").withSchema(TableName.Project).as("projectSlug") - ) .limit(limit) .offset(offset) .orderBy(`${TableName.AuditLog}.createdAt`, "desc"); @@ -111,21 +106,7 @@ export const auditLogDALFactory = (db: TDbClient) => { } const docs = await sqlQuery; - return docs.map((doc) => { - // Our type system refuses to acknowledge that the project name and slug are present in the doc, due to the disjointed query structure above. - // This is a quick and dirty way to get around the types. - const projectDoc = doc as unknown as { projectName: string; projectSlug: string }; - - return { - ...AuditLogsSchema.parse(doc), - ...(projectDoc?.projectSlug && { - project: { - name: projectDoc.projectName, - slug: projectDoc.projectSlug - } - }) - }; - }); + return docs; } catch (error) { throw new DatabaseError({ error }); } @@ -148,6 +129,7 @@ export const auditLogDALFactory = (db: TDbClient) => { .where("expiresAt", "<", today) .select("id") .limit(AUDIT_LOG_PRUNE_BATCH_SIZE); + // eslint-disable-next-line no-await-in-loop deletedAuditLogIds = await (tx || db)(TableName.AuditLog) .whereIn("id", findExpiredLogSubQuery) diff --git a/backend/src/lib/config/env.ts b/backend/src/lib/config/env.ts index 06b60f27e0..b047de7558 100644 --- a/backend/src/lib/config/env.ts +++ b/backend/src/lib/config/env.ts @@ -34,6 +34,12 @@ const envSchema = z DB_CONNECTION_URI: zpStr(z.string().describe("Postgres database connection string")).default( `postgresql://${process.env.DB_USER}:${process.env.DB_PASSWORD}@${process.env.DB_HOST}:${process.env.DB_PORT}/${process.env.DB_NAME}` ), + AUDIT_LOGS_DB_CONNECTION_URI: zpStr( + z.string().describe("Postgres database connection string for Audit logs").optional() + ), + AUDIT_LOGS_DB_ROOT_CERT: zpStr( + z.string().describe("Postgres database base64-encoded CA cert for Audit logs").optional() + ), MAX_LEASE_LIMIT: z.coerce.number().default(10000), DB_ROOT_CERT: zpStr(z.string().describe("Postgres database base64-encoded CA cert").optional()), DB_HOST: zpStr(z.string().describe("Postgres database host").optional()), diff --git a/backend/src/main.ts b/backend/src/main.ts index a1c5dfd09c..f71a1fe95a 100644 --- a/backend/src/main.ts +++ b/backend/src/main.ts @@ -1,7 +1,7 @@ import dotenv from "dotenv"; import path from "path"; -import { initDbConnection } from "./db"; +import { initAuditLogDbConnection, initDbConnection } from "./db"; import { keyStoreFactory } from "./keystore/keystore"; import { formatSmtpConfig, initEnvConfig, IS_PACKAGED } from "./lib/config/env"; import { isMigrationMode } from "./lib/fn"; @@ -25,6 +25,13 @@ const run = async () => { })) }); + const auditLogDb = appCfg.AUDIT_LOGS_DB_CONNECTION_URI + ? initAuditLogDbConnection({ + dbConnectionUri: appCfg.AUDIT_LOGS_DB_CONNECTION_URI, + dbRootCert: appCfg.AUDIT_LOGS_DB_ROOT_CERT + }) + : undefined; + // Case: App is running in packaged mode (binary), and migration mode is enabled. // Run the migrations and exit the process after completion. if (IS_PACKAGED && isMigrationMode()) { @@ -46,7 +53,7 @@ const run = async () => { const queue = queueServiceFactory(appCfg.REDIS_URL); const keyStore = keyStoreFactory(appCfg.REDIS_URL); - const server = await main({ db, smtp, logger, queue, keyStore }); + const server = await main({ db, auditLogDb, smtp, logger, queue, keyStore }); const bootstrap = await bootstrapCheck({ db }); // eslint-disable-next-line diff --git a/backend/src/server/app.ts b/backend/src/server/app.ts index 8456eed8d8..b768d0db5b 100644 --- a/backend/src/server/app.ts +++ b/backend/src/server/app.ts @@ -30,6 +30,7 @@ import { fastifySwagger } from "./plugins/swagger"; import { registerRoutes } from "./routes"; type TMain = { + auditLogDb?: Knex; db: Knex; smtp: TSmtpService; logger?: Logger; @@ -38,7 +39,7 @@ type TMain = { }; // Run the server! -export const main = async ({ db, smtp, logger, queue, keyStore }: TMain) => { +export const main = async ({ db, auditLogDb, smtp, logger, queue, keyStore }: TMain) => { const appCfg = getConfig(); const server = fastify({ logger: appCfg.NODE_ENV === "test" ? false : logger, @@ -94,7 +95,7 @@ export const main = async ({ db, smtp, logger, queue, keyStore }: TMain) => { await server.register(maintenanceMode); - await server.register(registerRoutes, { smtp, queue, db, keyStore }); + await server.register(registerRoutes, { smtp, queue, db, auditLogDb, keyStore }); if (appCfg.isProductionMode) { await server.register(registerExternalNextjs, { diff --git a/backend/src/server/routes/index.ts b/backend/src/server/routes/index.ts index addb46b937..30c56faa40 100644 --- a/backend/src/server/routes/index.ts +++ b/backend/src/server/routes/index.ts @@ -213,11 +213,12 @@ import { registerV3Routes } from "./v3"; export const registerRoutes = async ( server: FastifyZodProvider, { + auditLogDb, db, smtp: smtpService, queue: queueService, keyStore - }: { db: Knex; smtp: TSmtpService; queue: TQueueServiceFactory; keyStore: TKeyStoreFactory } + }: { auditLogDb?: Knex; db: Knex; smtp: TSmtpService; queue: TQueueServiceFactory; keyStore: TKeyStoreFactory } ) => { const appCfg = getConfig(); if (!appCfg.DISABLE_SECRET_SCANNING) { @@ -282,7 +283,7 @@ export const registerRoutes = async ( const identityOidcAuthDAL = identityOidcAuthDALFactory(db); const identityAzureAuthDAL = identityAzureAuthDALFactory(db); - const auditLogDAL = auditLogDALFactory(db); + const auditLogDAL = auditLogDALFactory(auditLogDb ?? db); const auditLogStreamDAL = auditLogStreamDALFactory(db); const trustedIpDAL = trustedIpDALFactory(db); const telemetryDAL = telemetryDALFactory(db); diff --git a/backend/src/server/routes/v1/organization-router.ts b/backend/src/server/routes/v1/organization-router.ts index b113b9f9d2..439950f1cd 100644 --- a/backend/src/server/routes/v1/organization-router.ts +++ b/backend/src/server/routes/v1/organization-router.ts @@ -125,12 +125,6 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => { }) .merge( z.object({ - project: z - .object({ - name: z.string(), - slug: z.string() - }) - .optional(), event: z.object({ type: z.string(), metadata: z.any() @@ -168,6 +162,7 @@ export const registerOrgRouter = async (server: FastifyZodProvider) => { actorAuthMethod: req.permission.authMethod, actor: req.permission.type }); + return { auditLogs }; } }); diff --git a/frontend/src/hooks/api/auditLogs/types.tsx b/frontend/src/hooks/api/auditLogs/types.tsx index 764d0b2a53..76eb585177 100644 --- a/frontend/src/hooks/api/auditLogs/types.tsx +++ b/frontend/src/hooks/api/auditLogs/types.tsx @@ -886,8 +886,5 @@ export type AuditLog = { userAgentType: UserAgentType; createdAt: string; updatedAt: string; - project?: { - name: string; - slug: string; - }; + projectName?: string; }; diff --git a/frontend/src/views/Org/AuditLogsPage/components/LogsTableRow.tsx b/frontend/src/views/Org/AuditLogsPage/components/LogsTableRow.tsx index 014301c567..e4620a477e 100644 --- a/frontend/src/views/Org/AuditLogsPage/components/LogsTableRow.tsx +++ b/frontend/src/views/Org/AuditLogsPage/components/LogsTableRow.tsx @@ -573,7 +573,7 @@ export const LogsTableRow = ({ auditLog, isOrgAuditLogs, showActorColumn }: Prop {formatDate(auditLog.createdAt)} {`${eventToNameMap[auditLog.event.type]}`} - {isOrgAuditLogs && {auditLog?.project?.name ?? "N/A"}} + {isOrgAuditLogs && {auditLog?.projectName ?? "N/A"}} {showActorColumn && renderActor(auditLog.actor)} {renderSource()} {renderMetadata(auditLog.event)}