Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: initial creation of child logging #533

Merged
merged 15 commits into from
Jul 8, 2024
Merged
5 changes: 3 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
"name": "UDS Core",
"uuid": "uds-core",
"onError": "reject",
"logLevel": "debug",
"logLevel": "info",
"alwaysIgnore": {
"namespaces": [
"uds-dev-stack",
Expand All @@ -27,7 +27,8 @@
"env": {
"UDS_DOMAIN": "###ZARF_VAR_DOMAIN###",
"UDS_ALLOW_ALL_NS_EXEMPTIONS": "###ZARF_VAR_ALLOW_ALL_NS_EXEMPTIONS###",
"UDS_SINGLE_TEST": "###ZARF_VAR_UDS_SINGLE_TEST###"
"UDS_SINGLE_TEST": "###ZARF_VAR_UDS_SINGLE_TEST###",
"UDS_LOG_LEVEL": "###ZARF_VAR_UDS_LOG_LEVEL###"
}
},
"scripts": {
Expand Down
8 changes: 5 additions & 3 deletions src/pepr/config.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { Log } from "pepr";
import { Component, setupLogger } from "./logger";

let domain = process.env.UDS_DOMAIN;

Expand All @@ -16,10 +16,12 @@ export const UDSConfig = {
allowAllNSExemptions: process.env.UDS_ALLOW_ALL_NS_EXEMPTIONS === "true",
};

Log.info(UDSConfig, "Loaded UDS Config");
// configure subproject logger
const log = setupLogger(Component.CONFIG);
log.info(UDSConfig, "Loaded UDS Config");

if (UDSConfig.isSingleTest) {
Log.warn(
log.warn(
"Running in single test mode, this will change the behavior of the operator and should only be used for UDS Core development testing.",
);
}
21 changes: 10 additions & 11 deletions src/pepr/istio/index.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,9 @@
import { Exec, KubeConfig } from "@kubernetes/client-node";
import { Capability, Log, a } from "pepr";
import { Capability, a } from "pepr";
import { Component, setupLogger } from "../logger";

// configure subproject logger
const log = setupLogger(Component.ISTIO);

export const istio = new Capability({
name: "istio",
Expand All @@ -20,13 +24,8 @@ When(a.Pod)
.WithLabel("batch.kubernetes.io/job-name")
.WithLabel("service.istio.io/canonical-name")
.Watch(async pod => {
Log.info(
pod,
`Processing Pod ${pod.metadata?.namespace}/${pod.metadata?.name} for istio job termination`,
);

if (!pod.metadata?.name || !pod.metadata.namespace) {
Log.error(pod, `Invalid Pod definition`);
log.error(pod, `Invalid Pod definition`);
return;
}

Expand All @@ -42,7 +41,7 @@ When(a.Pod)
if (pod.status?.phase == "Running") {
// Check all container statuses
if (!pod.status.containerStatuses) {
Log.error(pod, `Invalid container status in Pod`);
log.error(pod, `Invalid container status in Pod`);
return;
}
const shouldTerminate = pod.status.containerStatuses
Expand All @@ -55,7 +54,7 @@ When(a.Pod)
// Mark the pod as seen
inProgress.add(key);

Log.info(`Attempting to terminate sidecar for ${key}`);
log.info(`Attempting to terminate sidecar for ${key}`);
try {
const kc = new KubeConfig();
kc.loadFromDefault();
Expand All @@ -72,9 +71,9 @@ When(a.Pod)
true,
);

Log.info(`Terminated sidecar for ${key}`);
log.info(`Terminated sidecar for ${key}`);
} catch (err) {
Log.error({ err }, `Failed to terminate the sidecar for ${key}`);
log.error({ err }, `Failed to terminate the sidecar for ${key}`);

// Remove the pod from the seen list
inProgress.delete(key);
Expand Down
31 changes: 31 additions & 0 deletions src/pepr/logger.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
import { Log } from "pepr";

export enum Component {
CONFIG = "config",
ISTIO = "istio",
OPERATOR_EXEMPTIONS = "operator.exemptions",
OPERATOR_ISTIO = "operator.istio",
OPERATOR_KEYCLOAK = "operator.keycloak",
OPERATOR_MONITORING = "operator.monitoring",
OPERATOR_NETWORK = "operator.network",
OPERATOR_GENERATORS = "operator.generators",
OPERATOR_CRD = "operator.crd",
OPERATOR_RECONCILERS = "operator.reconcilers",
POLICIES = "policies",
POLICIES_EXEMPTIONS = "policies.exemptions",
PROMETHEUS = "prometheus",
}

export function setupLogger(component: Component) {
const setupLogger = Log.child({ component });

// Handle commands that do not template the env vars
let logLevel = process.env.UDS_LOG_LEVEL;
if (!logLevel || logLevel === "###ZARF_VAR_UDS_LOG_LEVEL###") {
logLevel = "debug";
}

setupLogger.level = logLevel;

return setupLogger;
}
15 changes: 9 additions & 6 deletions src/pepr/operator/controllers/exemptions/exemption-store.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,10 @@
import { Log } from "pepr";
import { Component, setupLogger } from "../../../logger";
import { StoredMatcher } from "../../../policies";
import { Matcher, Policy, UDSExemption } from "../../crd";

// configure subproject logger
const log = setupLogger(Component.OPERATOR_EXEMPTIONS);

export type PolicyOwnerMap = Map<string, UDSExemption>;
export type PolicyMap = Map<Policy, StoredMatcher[]>;
let policyExemptionMap: PolicyMap;
Expand Down Expand Up @@ -34,7 +37,7 @@ function addMatcher(matcher: Matcher, p: Policy, owner: string = ""): void {
}

// Iterate through each exemption block of CR and add matchers to PolicyMap
function add(exemption: UDSExemption, log: boolean = true) {
function add(exemption: UDSExemption, logger: boolean = true) {
// Remove any existing exemption for this owner, in case of WatchPhase.Modified
remove(exemption);
const owner = exemption.metadata?.uid || "";
Expand All @@ -45,8 +48,8 @@ function add(exemption: UDSExemption, log: boolean = true) {
for (const p of policies) {
// Append the matcher to the list of stored matchers for this policy
addMatcher(e.matcher, p, owner);
if (log) {
Log.debug(`Added exemption to ${p}: ${JSON.stringify(e.matcher)}`);
if (logger) {
log.debug(`Added exemption to ${p}: ${JSON.stringify(e.matcher)}`);
}
}
}
Expand All @@ -68,9 +71,9 @@ function remove(exemption: UDSExemption) {
}
}
policyOwnerMap.delete(owner);
Log.debug(`Removed all policy exemptions for ${owner}`);
log.debug(`Removed all policy exemptions for ${owner}`);
} else {
Log.debug(`No existing exemption for owner ${owner}`);
log.debug(`No existing exemption for owner ${owner}`);
}
}

Expand Down
8 changes: 6 additions & 2 deletions src/pepr/operator/controllers/istio/injection.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,11 @@
import { K8s, Log, kind } from "pepr";
import { K8s, kind } from "pepr";

import { Component, setupLogger } from "../../../logger";
import { UDSPackage } from "../../crd";

// configure subproject logger
const log = setupLogger(Component.OPERATOR_ISTIO);

const injectionLabel = "istio-injection";
const injectionAnnotation = "uds.dev/original-istio-injection";

Expand Down Expand Up @@ -143,7 +147,7 @@ async function killPods(ns: string, enableInjection: boolean) {
}

for (const pod of group) {
Log.info(`Deleting pod ${ns}/${pod.metadata?.name} to enable the istio sidecar`);
log.info(`Deleting pod ${ns}/${pod.metadata?.name} to enable the istio sidecar`);
await K8s(kind.Pod).Delete(pod);
}
}
Expand Down
18 changes: 11 additions & 7 deletions src/pepr/operator/controllers/istio/istio-resources.ts
Original file line number Diff line number Diff line change
@@ -1,9 +1,13 @@
import { K8s, Log } from "pepr";
import { K8s } from "pepr";

import { IstioVirtualService, IstioServiceEntry, UDSPackage } from "../../crd";
import { Component, setupLogger } from "../../../logger";
import { IstioServiceEntry, IstioVirtualService, UDSPackage } from "../../crd";
import { getOwnerRef } from "../utils";
import { generateVirtualService } from "./virtual-service";
import { generateServiceEntry } from "./service-entry";
import { generateVirtualService } from "./virtual-service";

// configure subproject logger
const log = setupLogger(Component.OPERATOR_ISTIO);

/**
* Creates a VirtualService and ServiceEntry for each exposed service in the package
Expand All @@ -30,7 +34,7 @@ export async function istioResources(pkg: UDSPackage, namespace: string) {
// Generate a VirtualService for this `expose` entry
const vsPayload = generateVirtualService(expose, namespace, pkgName, generation, ownerRefs);

Log.debug(vsPayload, `Applying VirtualService ${vsPayload.metadata?.name}`);
log.debug(vsPayload, `Applying VirtualService ${vsPayload.metadata?.name}`);

// Apply the VirtualService and force overwrite any existing policy
await K8s(IstioVirtualService).Apply(vsPayload, { force: true });
Expand All @@ -45,7 +49,7 @@ export async function istioResources(pkg: UDSPackage, namespace: string) {
continue;
}

Log.debug(sePayload, `Applying ServiceEntry ${sePayload.metadata?.name}`);
log.debug(sePayload, `Applying ServiceEntry ${sePayload.metadata?.name}`);

// Apply the ServiceEntry and force overwrite any existing policy
await K8s(IstioServiceEntry).Apply(sePayload, { force: true });
Expand All @@ -66,7 +70,7 @@ export async function istioResources(pkg: UDSPackage, namespace: string) {

// Delete any orphaned VirtualServices
for (const vs of orphanedVS) {
Log.debug(vs, `Deleting orphaned VirtualService ${vs.metadata!.name}`);
log.debug(vs, `Deleting orphaned VirtualService ${vs.metadata!.name}`);
await K8s(IstioVirtualService).Delete(vs);
}

Expand All @@ -83,7 +87,7 @@ export async function istioResources(pkg: UDSPackage, namespace: string) {

// Delete any orphaned ServiceEntries
for (const se of orphanedSE) {
Log.debug(se, `Deleting orphaned ServiceEntry ${se.metadata!.name}`);
log.debug(se, `Deleting orphaned ServiceEntry ${se.metadata!.name}`);
await K8s(IstioServiceEntry).Delete(se);
}

Expand Down
28 changes: 17 additions & 11 deletions src/pepr/operator/controllers/keycloak/client-sync.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import { K8s, Log, fetch, kind } from "pepr";
import { fetch, K8s, kind } from "pepr";

import { UDSConfig } from "../../../config";
import { Component, setupLogger } from "../../../logger";
import { Store } from "../../common";
import { Sso, UDSPackage } from "../../crd";
import { getOwnerRef } from "../utils";
Expand Down Expand Up @@ -32,6 +33,9 @@ const x509CertRegex = new RegExp(
/<[^>]*:X509Certificate[^>]*>((.|[\n\r])*)<\/[^>]*:X509Certificate>/,
);

// configure subproject logger
const log = setupLogger(Component.OPERATOR_KEYCLOAK);

/**
* Create or update the Keycloak clients for the package
*
Expand Down Expand Up @@ -72,7 +76,7 @@ export async function purgeSSOClients(pkg: UDSPackage, refs: string[] = []) {
Store.removeItem(ref);
await apiCall({ clientId }, "DELETE", token);
} else {
Log.warn(pkg.metadata, `Failed to remove client ${clientId}, token not found`);
log.warn(pkg.metadata, `Failed to remove client ${clientId}, token not found`);
}
}
}
Expand All @@ -82,7 +86,8 @@ async function syncClient(
pkg: UDSPackage,
isRetry = false,
) {
Log.debug(pkg.metadata, `Processing client request: ${clientReq.clientId}`);
log.debug(pkg.metadata, `Processing client request: ${clientReq.clientId}`);

// Not including the CR data in the ref because Keycloak client IDs must be unique already
const name = `sso-client-${clientReq.clientId}`;
let client: Client;
Expand All @@ -94,10 +99,10 @@ async function syncClient(
try {
// If an existing client is found, use the token to update the client
if (token && !isRetry) {
Log.debug(pkg.metadata, `Found existing token for ${clientReq.clientId}`);
log.debug(pkg.metadata, `Found existing token for ${clientReq.clientId}`);
client = await apiCall(clientReq, "PUT", token);
} else {
Log.debug(pkg.metadata, `Creating new client for ${clientReq.clientId}`);
log.debug(pkg.metadata, `Creating new client for ${clientReq.clientId}`);
client = await apiCall(clientReq);
}
} catch (err) {
Expand All @@ -107,12 +112,12 @@ async function syncClient(

// Throw the error if this is the retry or was an initial client creation attempt
if (isRetry || !token) {
Log.error(`${msg}, retry failed.`);
log.error(`${msg}, retry failed.`);
// Throw the original error captured from the first attempt
throw new Error(msg);
} else {
// Retry the request without the token in case we have a bad token stored
Log.error(msg);
log.error(msg);

try {
return await syncClient(clientReq, pkg, true);
Expand All @@ -121,7 +126,7 @@ async function syncClient(
const retryMsg =
`Retry of Keycloak request failed for client '${clientReq.clientId}', package ` +
`${pkg.metadata?.namespace}/${pkg.metadata?.name}. Error: ${retryErr.message}`;
Log.error(retryMsg);
log.error(retryMsg);
// Throw the error from the original attempt since our retry without token failed
throw new Error(msg);
}
Expand Down Expand Up @@ -154,6 +159,7 @@ async function syncClient(
labels: {
"uds/package": pkg.metadata!.name,
},

// Use the CR as the owner ref for each VirtualService
ownerReferences: getOwnerRef(pkg),
},
Expand Down Expand Up @@ -185,7 +191,7 @@ export function handleClientGroups(clientReq: Sso) {
async function apiCall(sso: Partial<Sso>, method = "POST", authToken = "") {
// Handle single test mode
if (UDSConfig.isSingleTest) {
Log.warn(`Generating fake client for '${sso.clientId}' in single test mode`);
log.warn(`Generating fake client for '${sso.clientId}' in single test mode`);
return {
...sso,
secret: sso.secret || "fake-secret",
Expand Down Expand Up @@ -231,14 +237,14 @@ async function apiCall(sso: Partial<Sso>, method = "POST", authToken = "") {

export function generateSecretData(client: Client, secretTemplate?: { [key: string]: string }) {
if (secretTemplate) {
Log.debug(`Using secret template for client: ${client.clientId}`);
log.debug(`Using secret template for client: ${client.clientId}`);
// Iterate over the secret template entry and process each value
return templateData(secretTemplate, client);
}

const stringMap: Record<string, string> = {};

Log.debug(`Using client data for secret: ${client.clientId}`);
log.debug(`Using client data for secret: ${client.clientId}`);

// iterate over the client object and convert all values to strings
for (const [key, value] of Object.entries(client)) {
Expand Down
14 changes: 9 additions & 5 deletions src/pepr/operator/controllers/monitoring/service-monitor.ts
Original file line number Diff line number Diff line change
@@ -1,9 +1,13 @@
import { K8s, Log } from "pepr";
import { K8s } from "pepr";

import { V1OwnerReference } from "@kubernetes/client-node";
import { Prometheus, UDSPackage, Monitor } from "../../crd";
import { Component, setupLogger } from "../../../logger";
import { Monitor, Prometheus, UDSPackage } from "../../crd";
import { getOwnerRef, sanitizeResourceName } from "../utils";

// configure subproject logger
const log = setupLogger(Component.OPERATOR_MONITORING);

/**
* Generate a service monitor for a service
*
Expand All @@ -15,7 +19,7 @@ export async function serviceMonitor(pkg: UDSPackage, namespace: string) {
const generation = (pkg.metadata?.generation ?? 0).toString();
const ownerRefs = getOwnerRef(pkg);

Log.debug(`Reconciling ServiceMonitors for ${pkgName}`);
log.debug(`Reconciling ServiceMonitors for ${pkgName}`);

// Get the list of monitored services
const monitorList = pkg.spec?.monitor ?? [];
Expand All @@ -27,7 +31,7 @@ export async function serviceMonitor(pkg: UDSPackage, namespace: string) {
for (const monitor of monitorList) {
const payload = generateServiceMonitor(monitor, namespace, pkgName, generation, ownerRefs);

Log.debug(payload, `Applying ServiceMonitor ${payload.metadata?.name}`);
log.debug(payload, `Applying ServiceMonitor ${payload.metadata?.name}`);

// Apply the ServiceMonitor and force overwrite any existing policy
await K8s(Prometheus.ServiceMonitor).Apply(payload, { force: true });
Expand All @@ -48,7 +52,7 @@ export async function serviceMonitor(pkg: UDSPackage, namespace: string) {

// Delete any orphaned ServiceMonitors
for (const sm of orphanedSM) {
Log.debug(sm, `Deleting orphaned ServiceMonitor ${sm.metadata!.name}`);
log.debug(sm, `Deleting orphaned ServiceMonitor ${sm.metadata!.name}`);
await K8s(Prometheus.ServiceMonitor).Delete(sm);
}
} catch (err) {
Expand Down
Loading