Skip to content

Commit

Permalink
Adding backup feature
Browse files Browse the repository at this point in the history
  • Loading branch information
sebt3 committed Jan 20, 2025
1 parent 92e8990 commit 299a1cd
Show file tree
Hide file tree
Showing 58 changed files with 1,420 additions and 222 deletions.
275 changes: 142 additions & 133 deletions Cargo.lock

Large diffs are not rendered by default.

3 changes: 2 additions & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ version = "0.96.0"


[workspace.package]
version = "0.3.2"
version = "0.3.3"
authors = ["Sébastien Huss <sebastien.huss@gmail.com>"]
edition = "2021"
license = "BSD-3-Clause"
Expand Down Expand Up @@ -77,6 +77,7 @@ operator = { cmd=[
"podman build . -f operator/Dockerfile -t docker.io/sebt3/vynil-operator:$(cargo run --bin agent -- version) && podman push docker.io/sebt3/vynil-operator:$(cargo run --bin agent -- version)",
]}
box = { cmd=[
"cargo cmd generate_crd",
"cargo run --bin agent -- package update --source ./box/vynil/",
"cargo run --bin agent -- package build -o ./box/vynil/ --tag $(cargo run --bin agent -- version) -r docker.io -n sebt3/vynil -u $(jq -r '.auths[\"docker.io\"].auth' </run/user/$(id -u)/containers/auth.json |base64 -d|awk -F: '{print $1}') -p $(jq -r '.auths[\"docker.io\"].auth' </run/user/$(id -u)/containers/auth.json |base64 -d|awk -F: '{print $2}')",
]}
8 changes: 5 additions & 3 deletions agent/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -27,9 +27,9 @@ RUN touch common/src/lib.rs agent/src/lib.rs agent/src/main.rs \
&& strip target/release/agent
# Then create the intermediary image with run-time dependencies installed
FROM docker.io/debian:${DEBIAN_NEXT}-slim AS middle
ARG HELM_VERSION=v3.10.3
ARG HELM_VERSION=v3.17.0
ARG KUBECTL_VERSION=v1.30.3
ARG TF_VERSION=1.8.1
ARG TF_VERSION=1.9.0
ARG DEB_PACKAGES="git jq curl tar gzip unzip openssl openssh-client ca-certificates postgresql-client mariadb-client-compat restic"
# hadolint ignore=DL3008,DL4006,SC2035
RUN DEBIAN_FRONTEND=noninteractive apt-get update \
Expand Down Expand Up @@ -58,10 +58,12 @@ COPY agent/providers.tf .
RUN tofu init
# Finally assemble everything together
FROM middle AS target
RUN mkdir -p /var/cache/restic /backup /secrets && chown nobody:nogroup /var/cache/restic /backup /secrets
COPY --from=builder /usr/src/agent/target/release/agent /usr/bin/agent
COPY agent/scripts /usr/lib/vynil/scripts
COPY agent/templates /usr/lib/vynil/templates
USER nobody
COPY --from=downloader /src/.terraform/providers /nonexistent/.terraform.d/plugins
WORKDIR /work
ENV SCRIPT_DIRECTORY=/usr/lib/vynil/scripts PACKAGE_DIRECTORY=/package CONFIG_DIR=/etc/vynil
ENV SCRIPT_DIRECTORY=/usr/lib/vynil/scripts TEMPLATE_DIRECTORY=/usr/lib/vynil/templates PACKAGE_DIRECTORY=/package CONFIG_DIR=/etc/vynil XDG_CACHE_HOME=/var/cache/restic
ENTRYPOINT ["agent"]
2 changes: 1 addition & 1 deletion agent/parent.toml
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ version = "0.96.0"


[workspace.package]
version = "0.3.2"
version = "0.3.3"
authors = ["Sébastien Huss <sebastien.huss@gmail.com>"]
edition = "2021"
license = "BSD-3-Clause"
Expand Down
247 changes: 247 additions & 0 deletions agent/scripts/lib/backup_context.rhai
Original file line number Diff line number Diff line change
@@ -0,0 +1,247 @@
// create a run context for backup/restore pod in from the preparation of the "run" function bellow
fn from_args(context) {
context["deployment_list"] = get_env("DEPLOYMENT_LIST").split(" ").filter(|x| x!="");
context["statefulset_list"] = get_env("STATEFULSET_LIST").split(" ").filter(|x| x!="");
context["secret_list"] = get_env("SECRET_LIST").split(" ").filter(|x| x!="");
context["pg_list"] = get_env("PG_LIST").split(" ").filter(|x| x!="");
context["restic_tags"] = get_env("RESTIC_TAGS");
context["snapshot"] = get_env("RESTIC_SNAPSHOT");
context["max_dayly"] = get_env("RESTIC_MAX_DAYLY");
if context["max_dayly"] == "" {context["max_dayly"] = "7";}
context["max_weekly"] = get_env("RESTIC_MAX_WEEKLY");
if context["max_weekly"] == "" {context["max_weekly"] = "5";}
context["max_monthly"] = get_env("RESTIC_MAX_MONTHLY");
if context["max_monthly"] == "" {context["max_monthly"] = "12";}
context["max_yearly"] = get_env("RESTIC_MAX_YEARLY");
if context["max_yearly"] == "" {context["max_yearly"] = "4";}
context["namespace"] = get_env("NAMESPACE");
try {
context["scale_target"] = parse_int(get_env("SCALE_TARGET"));
} catch {
context["scale_target"] = 1;
}
let sub_path = get_env("SUB_PATH");
let base_path = get_env("BASE_REPO_URL");
sub_path.replace("/"," ");
sub_path.trim();
sub_path.replace(" ","/");
base_path.replace("/"," ");
base_path.trim();
base_path.replace(" ","/");
context["sub_path"] = sub_path;
context["base_path"] = base_path;
context["s3_url"] = `s3:${base_path}/${sub_path}`;
context["restic_args"] = "";
if get_env("INSECURE_TLS") == "true" {
context["restic_args"] += " --insecure-tls";
}
context
}

fn run(instance, context, use_init_from) {
// TODO: Prepare MongoDB backup
// TODO: Prepare Redis backup
let secret_name = "backup-settings";
let sub_path = `${context.instance.namespace}/${context.instance.appslug}`;
if use_init_from {
if instance.spec.initFrom.secretName != () {
secret_name = instance.spec.initFrom.secretName;
}
if instance.spec.initFrom.subPath != () {
sub_path = instance.spec.initFrom.subPath;
}
}
context["volumes"] = [#{
name: "empty-dir",
emptyDir: #{}
}];
context["mounts"] = [#{
name: "empty-dir",
mountPath: `/backup`
}];
context["envs_from"] = [#{
secretRef: #{
name: secret_name
}
}];
context["envs"] = [#{
name: "RESTIC_TAGS",
value: `tenant:${context.tenant.name},namespace:${context.instance.namespace},instance:${context.instance.name},component:${context.instance["package"].name},version:${context.instance.requested}`
}, #{
name: "RESTIC_HOST",
value: context.instance.appslug
}, #{
name: "AGENT_IMAGE",
value: context.agent_image
}, #{
name: "SCALE_TARGET",
value: if context.namespace.ha { "2" } else { "1" }
}, #{
name: "VYNIL_NAMESPACE",
value: context.cluster.vynil_namespace
}, #{
name: "INSTANCE",
value: context.instance.name
}, #{
name: "TAG",
value: context.instance.requested
}, #{
name: "SUB_PATH",
value: sub_path
}, #{
name: "APPSLUG",
value: context.instance.appslug
}, #{
name: "NAMESPACE",
valueFrom: #{
fieldRef: #{
apiVersion: "v1",
fieldPath: "metadata.namespace"
}
}
}, #{
name: "POD_NAME",
valueFrom: #{
fieldRef: #{
apiVersion: "v1",
fieldPath: "metadata.name"
}
}
}];
if use_init_from {
context["envs"] += #{
name: "RESTIC_SNAPSHOT",
value: instance.spec.initFrom.snapshot
};
}
context["files"] = [];
if is_dir(`${context.package_dir}/scripts`) {
for f in read_dir(`${context.package_dir}/scripts`) {
let base = basename(f);
if base.starts_with("restore") || base.starts_with("backup") || base.starts_with("maintenance") {
context["files"] += #{
name: base,
content: file_read(f)
};
}
}
}
context["has_files"] = context["files"].len() > 0;
if context["has_files"] {
context["volumes"] += #{
name: "backups-scripts",
configMap: #{
defaultMode: 493,
name: `${context.instance.appslug}-backups-scripts`
}
};
context["mounts"] += #{
name: "backups-scripts",
mountPath: "/package/scripts"
};
}
context["schedule"] = `${context.namespace.maintenance_start_minut} ${context.namespace.maintenance_start_hour} * * *`;
context["service_account"] = `${context.instance.appslug}-backup`;
let pgs = [];
let secrets = [];
for v in instance.status.vitals {
let name = v.name;
name.replace(context.instance.appslug,"");
name.replace("-"," ");
name.trim();
name.replace(" ","-");
if v.kind == "PersistentVolumeClaim" {
if name.is_empty() {
name = "data";
}
context["volumes"] += #{
name: name,
persistentVolumeClaim: #{
claimName: v.name
}
};
context["mounts"] += #{
name: name,
mountPath: `/backup/${name}`
};
} else if v.kind == "Secret" {
if name.is_empty() {
name = "secret";
}
secrets += name;
if ! use_init_from {
context["volumes"] += #{
name: name,
secret: #{
secretName: v.name
}
};
context["mounts"] += #{
name: name,
mountPath: `/secrets/${name}`
};
}
context["envs"] += #{
name: `secret_${name}_target`,
value: v.name
};
} else if v.kind == "Cluster" {
if name.is_empty() {
name = "postgres";
}
pgs += name;
let lst = k8s_resource("Secrets", context.instance.namespace).list_meta().items;
let secret = `${v.name}-app`;
if lst.filter(|s| s.metadata.name == `${v.name}-superuser`).len() > 0 {
secret = `${v.name}-superuser`;
}
for i in ["host", "username", "password", "dbname"] {
context["envs"] += #{
name: `${name}_${i}`,
valueFrom: #{
secretKeyRef: #{
name: secret,
key: i
}
}
};
}
}
}
let deploy = [];
let sts = [];
if instance.status.scalables != () {
for s in instance.status.scalables {
if s.kind == "Deployment" {
deploy += s.name;
} else if s.kind == "StatefulSet" {
sts += s.name;
}
}
}
if deploy.len() > 0 {
context["envs"] += #{
name: `DEPLOYMENT_LIST`,
value: deploy.reduce(|sum, v| if sum.type_of() == "()" { v } else { `${sum} ${v}` })
};
}
if sts.len() > 0 {
context["envs"] += #{
name: `STATEFULSET_LIST`,
value: sts.reduce(|sum, v| if sum.type_of() == "()" { v } else { `${sum} ${v}` })
};
}
if pgs.len() > 0 {
context["envs"] += #{
name: `PG_LIST`,
value: pgs.reduce(|sum, v| if sum.type_of() == "()" { v } else { `${sum} ${v}` })
};
}
if secrets.len() > 0 {
context["envs"] += #{
name: `SECRET_LIST`,
value: secrets.reduce(|sum, v| if sum.type_of() == "()" { v } else { `${sum} ${v}` })
};
}
context
}
2 changes: 1 addition & 1 deletion agent/scripts/lib/build_context.rhai
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,6 @@ fn run(instance, args) {
},
values: get_values(instance.spec.options, defaults),
defaults: defaults,
package_dir: args.package_dir
package_dir: args.package_dir,
}
}
16 changes: 11 additions & 5 deletions agent/scripts/lib/install_from_dir.rhai
Original file line number Diff line number Diff line change
Expand Up @@ -34,11 +34,17 @@ fn get_objects(context, dir) {
hbs.register_helper_dir(`${context.package_dir}/handlebars/helpers`);
let ret = [];
for file in read_dir(dir) {
let objects = if file.ends_with(".yaml") || file.ends_with(".yml") {
yaml_decode_multi(file_read(file))
} else if file.ends_with(".yaml.hbs") || file.ends_with(".yml.hbs") {
yaml_decode_multi(hbs.render_from(file_read(file), context))
} else {[]};
let objects = [];
try {
objects = if file.ends_with(".yaml") || file.ends_with(".yml") {
yaml_decode_multi(file_read(file))
} else if file.ends_with(".yaml.hbs") || file.ends_with(".yml.hbs") {
yaml_decode_multi(hbs.render_from(file_read(file), context))
} else {[]};
} catch(e) {
log_error(`While parsing ${file} :`);
throw e;
}
for obj in objects.filter(|obj| type_of(obj) != "map" || ! obj.keys().contains("kind") || ! obj.keys().contains("metadata") || type_of(obj.metadata) != "map") {
log_warn(`No kind for an object in file ${file}`);
log_debug(yaml_encode(obj));
Expand Down
10 changes: 10 additions & 0 deletions agent/scripts/lib/wait.rhai
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,16 @@ fn vital(lst, duration) {
log_info(`Waiting for ${v.kind} ${v.namespace}/${v.name} to be available`);
let sts = get_statefulset(v.namespace, v.name);
sts.wait_available(duration);
} else if ["BucketClaim"].contains(v.kind) {
log_info(`Waiting for ${v.kind} ${v.namespace}/${v.name} to be available`);
let api = k8s_resource(v.kind, v.namespace);
let obj = api.get_obj(v.name);
obj.wait_status("bucketReady", duration);
} else if ["BucketAccess"].contains(v.kind) {
log_info(`Waiting for ${v.kind} ${v.namespace}/${v.name} to be available`);
let api = k8s_resource(v.kind, v.namespace);
let obj = api.get_obj(v.name);
obj.wait_status("accessGranted", duration);
}
}
}
Expand Down
2 changes: 2 additions & 0 deletions agent/scripts/packages/build.rhai
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,8 @@ fn build(args) {
copy::dir_exts(item, args.temp+"/"+base, [".hbs", ".yaml", ".yml"]);
} else if base in ["vitals", "scalables", "others"] && is_dir(item) && valid.metadata.type == "tenant" {
copy::dir_exts(item, args.temp+"/"+base, [".hbs", ".yaml", ".yml"]);
} else if base == "pods" && is_dir(item) && valid.metadata.type == "tenant" {
copy::dir_exts(item, args.temp+"/"+base, [".hbs"]);
} else if base == "handlebars" && is_dir(item) {
for sub in read_dir(item) {
let subbase = basename(sub);
Expand Down
Loading

0 comments on commit 299a1cd

Please sign in to comment.