Skip to content

Commit

Permalink
feat: add support to import custom ldif (#1002)
Browse files Browse the repository at this point in the history
* refactor: remove unused initialization check

* feat(ldap): add support for importing custom LDIF files

* feat(sql): add support for importing custom LDIF files

* feat(spanner): add support for importing custom LDIF files

* refactor: put generated ldif template under /app/tmp

* feat(couchbase): add support for importing custom LDIF files

* docs: add custom ldif instructions

Co-authored-by: Mohammad Abudayyeh <47318409+moabu@users.noreply.github.com>
  • Loading branch information
iromli and moabu authored Mar 11, 2022
1 parent a78ee1a commit 0b6334a
Show file tree
Hide file tree
Showing 6 changed files with 267 additions and 115 deletions.
8 changes: 6 additions & 2 deletions docker-jans-persistence-loader/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -177,23 +177,27 @@ LABEL name="Persistence" \
summary="Janssen Authorization Server Persistence loader" \
description="Generate initial data for persistence layer"

RUN mkdir -p /app/tmp /etc/certs /etc/jans/conf
RUN mkdir -p /app/tmp /app/custom_ldif /etc/certs /etc/jans/conf

COPY scripts /app/scripts
# this overrides existing templates
COPY templates /app/templates
RUN chmod +x /app/scripts/entrypoint.sh

# # create non-root user
# create non-root user
RUN adduser -s /bin/sh -D -G root -u 1000 1000

# adjust ownership
RUN chown -R 1000:1000 /tmp \
&& chown -R 1000:1000 /app/tmp/ \
&& chown -R 1000:1000 /app/custom_ldif/ \
&& chgrp -R 0 /tmp && chmod -R g=u /tmp \
&& chgrp -R 0 /app/tmp && chmod -R g=u /app/tmp \
&& chgrp -R 0 /app/custom_ldif && chmod -R g=u /app/custom_ldif \
&& chgrp -R 0 /etc/certs && chmod -R g=u /etc/certs \
&& chgrp -R 0 /etc/jans && chmod -R g=u /etc/jans

USER 1000

ENTRYPOINT ["tini", "-g", "--"]
CMD ["sh", "/app/scripts/entrypoint.sh"]
110 changes: 74 additions & 36 deletions docker-jans-persistence-loader/scripts/couchbase_setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import logging.config
import os
import time
from pathlib import Path

from ldif import LDIFParser

Expand All @@ -29,32 +30,32 @@ def get_bucket_mappings(manager):
"default": {
"bucket": prefix,
"mem_alloc": 100,
"document_key_prefix": [],
# "document_key_prefix": [],
},
"user": {
"bucket": f"{prefix}_user",
"mem_alloc": 300,
"document_key_prefix": ["groups_", "people_", "authorizations_"],
# "document_key_prefix": ["groups_", "people_", "authorizations_"],
},
"site": {
"bucket": f"{prefix}_site",
"mem_alloc": 100,
"document_key_prefix": ["site_", "cache-refresh_"],
# "document_key_prefix": ["site_", "cache-refresh_"],
},
"token": {
"bucket": f"{prefix}_token",
"mem_alloc": 300,
"document_key_prefix": ["tokens_"],
# "document_key_prefix": ["tokens_"],
},
"cache": {
"bucket": f"{prefix}_cache",
"mem_alloc": 100,
"document_key_prefix": ["cache_"],
# "document_key_prefix": ["cache_"],
},
"session": {
"bucket": f"{prefix}_session",
"mem_alloc": 200,
"document_key_prefix": [],
# "document_key_prefix": [],
},
}

Expand Down Expand Up @@ -205,6 +206,7 @@ def __init__(self, manager):
self.client = CouchbaseClient(hostname, user, password)
self.manager = manager
self.index_num_replica = 0
self.attr_processor = AttrProcessor()

def create_buckets(self, bucket_mappings, bucket_type="couchbase"):
sys_info = self.client.get_system_info()
Expand Down Expand Up @@ -326,37 +328,10 @@ def create_indexes(self, bucket_mappings):
continue
logger.warning("Failed to execute query, reason={}".format(error["msg"]))

def import_ldif(self, bucket_mappings):
ctx = prepare_template_ctx(self.manager)
attr_processor = AttrProcessor()

def import_builtin_ldif(self, bucket_mappings, ctx):
for _, mapping in bucket_mappings.items():
for file_ in mapping["files"]:
logger.info(f"Importing {file_} file")
src = f"/app/templates/{file_}"
dst = f"/app/tmp/{file_}"
os.makedirs(os.path.dirname(dst), exist_ok=True)

render_ldif(src, dst, ctx)

with open(dst, "rb") as fd:
parser = LDIFParser(fd)

for dn, entry in parser.parse():
if len(entry) <= 2:
continue

key = id_from_dn(dn)
entry["dn"] = [dn]
entry = transform_entry(entry, attr_processor)
data = json.dumps(entry)

# using INSERT will cause duplication error, but the data is left intact
query = 'INSERT INTO `%s` (KEY, VALUE) VALUES ("%s", %s)' % (mapping["bucket"], key, data)
req = self.client.exec_query(query)

if not req.ok:
logger.warning("Failed to execute query, reason={}".format(req.json()))
self._import_ldif(f"/app/templates/{file_}", ctx)

def initialize(self):
num_replica = int(os.environ.get("CN_COUCHBASE_INDEX_NUM_REPLICA", 0))
Expand All @@ -376,7 +351,9 @@ def initialize(self):
self.create_indexes(bucket_mappings)

time.sleep(5)
self.import_ldif(bucket_mappings)
ctx = prepare_template_ctx(self.manager)
self.import_builtin_ldif(bucket_mappings, ctx)
self.import_custom_ldif(ctx)

time.sleep(5)
self.create_couchbase_shib_user()
Expand All @@ -388,3 +365,64 @@ def create_couchbase_shib_user(self):
'Shibboleth IDP',
'query_select[*]',
)

def import_custom_ldif(self, ctx):
custom_dir = Path("/app/custom_ldif")

for file_ in custom_dir.rglob("*.ldif"):
self._import_ldif(file_, ctx)

def _import_ldif(self, path, ctx):
src = Path(path).resolve()

# generated template will be saved under ``/app/tmp`` directory
# examples:
# - ``/app/templates/groups.ldif`` will be saved as ``/app/tmp/templates/groups.ldif``
# - ``/app/custom_ldif/groups.ldif`` will be saved as ``/app/tmp/custom_ldif/groups.ldif``
dst = Path("/app/tmp").joinpath(str(src).removeprefix("/app/")).resolve()

# ensure directory for generated template is exist
dst.parent.mkdir(parents=True, exist_ok=True)

logger.info(f"Importing {src} file")
render_ldif(src, dst, ctx)

with open(dst, "rb") as fd:
parser = LDIFParser(fd)

for dn, entry in parser.parse():
if len(entry) <= 2:
continue

key = id_from_dn(dn)
bucket = get_bucket_for_key(key)
entry["dn"] = [dn]
entry = transform_entry(entry, self.attr_processor)
data = json.dumps(entry)

# TODO: get the bucket based on key prefix
# using INSERT will cause duplication error, but the data is left intact
query = 'INSERT INTO `%s` (KEY, VALUE) VALUES ("%s", %s)' % (bucket, key, data)
req = self.client.exec_query(query)

if not req.ok:
logger.warning("Failed to execute query, reason={}".format(req.json()))


def get_bucket_for_key(key):
bucket_prefix = os.environ.get("CN_COUCHBASE_BUCKET_PREFIX", "jans")

cursor = key.find("_")
key_prefix = key[:cursor + 1]

if key_prefix in ("groups_", "people_", "authorizations_"):
bucket = f"{bucket_prefix}_user"
elif key_prefix in ("site_", "cache-refresh_"):
bucket = f"{bucket_prefix}_site"
elif key_prefix in ("tokens_"):
bucket = f"{bucket_prefix}_token"
elif key_prefix in ("cache_"):
bucket = f"{bucket_prefix}_cache"
else:
bucket = bucket_prefix
return bucket
86 changes: 38 additions & 48 deletions docker-jans-persistence-loader/scripts/ldap_setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,13 @@
import logging.config
import os
import time
from pathlib import Path

from ldap3.core.exceptions import LDAPSessionTerminatedByServerError
from ldap3.core.exceptions import LDAPSocketOpenError

from ldif import LDIFParser

from jans.pycloudlib.utils import as_boolean
from jans.pycloudlib.persistence.ldap import LdapClient

from settings import LOGGING_CONFIG
Expand Down Expand Up @@ -54,7 +54,7 @@ def check_indexes(self, mapping):
"retrying in {} seconds".format(reason, sleep_duration))
time.sleep(sleep_duration)

def import_ldif(self):
def import_builtin_ldif(self, ctx):
optional_scopes = json.loads(self.manager.config.get("optional_scopes", "[]"))
ldif_mappings = get_ldif_mappings(optional_scopes)

Expand All @@ -65,31 +65,19 @@ def import_ldif(self):
mapping = ldap_mapping
ldif_mappings = {mapping: ldif_mappings[mapping]}

# # these mappings require `base.ldif`
# these mappings require `base.ldif`
# opt_mappings = ("user", "token",)

# `user` mapping requires `o=gluu` which available in `base.ldif`
# `user` mapping requires `o=jans` which available in `base.ldif`
# if mapping in opt_mappings and "base.ldif" not in ldif_mappings[mapping]:
if "base.ldif" not in ldif_mappings[mapping]:
ldif_mappings[mapping].insert(0, "base.ldif")

ctx = prepare_template_ctx(self.manager)

for mapping, files in ldif_mappings.items():
self.check_indexes(mapping)

for file_ in files:
logger.info(f"Importing {file_} file")
src = f"/app/templates/{file_}"
dst = f"/app/tmp/{file_}"
os.makedirs(os.path.dirname(dst), exist_ok=True)

render_ldif(src, dst, ctx)

with open(dst, "rb") as fd:
parser = LDIFParser(fd)
for dn, entry in parser.parse():
self.add_entry(dn, entry)
self._import_ldif(f"/app/templates/{file_}", ctx)

def add_entry(self, dn, attrs):
max_wait_time = 300
Expand All @@ -106,34 +94,36 @@ def add_entry(self, dn, attrs):
time.sleep(sleep_duration)

def initialize(self):
def is_initialized():
persistence_type = os.environ.get("CN_PERSISTENCE_TYPE", "ldap")
ldap_mapping = os.environ.get("CN_PERSISTENCE_LDAP_MAPPING", "default")

# a minimum service stack is having oxTrust, hence check whether entry
# for oxTrust exists in LDAP
default_search = ("ou=jans-auth,ou=configuration,o=jans",
"(objectClass=jansAppConf)")

if persistence_type == "hybrid":
# `cache` and `token` mapping only have base entries
search_mapping = {
"default": default_search,
"user": ("inum=60B7,ou=groups,o=jans", "(objectClass=jansGrp)"),
"site": ("ou=cache-refresh,o=site", "(ou=people)"),
"cache": ("o=jans", "(ou=cache)"),
"token": ("ou=tokens,o=jans", "(ou=tokens)"),
"session": ("ou=sessions,o=jans", "(ou=sessions)"),
}
search = search_mapping[ldap_mapping]
else:
search = default_search
return self.client.search(search[0], search[1], attributes=["objectClass"], limit=1)

should_skip = as_boolean(
os.environ.get("CN_PERSISTENCE_SKIP_INITIALIZED", False),
)
if should_skip and is_initialized():
logger.info("LDAP backend already initialized")
return
self.import_ldif()
ctx = prepare_template_ctx(self.manager)

logger.info("Importing builtin LDIF files")
self.import_builtin_ldif(ctx)

logger.info("Importing custom LDIF files (if any)")
self.import_custom_ldif(ctx)

def import_custom_ldif(self, ctx):
custom_dir = Path("/app/custom_ldif")

for file_ in custom_dir.rglob("*.ldif"):
self._import_ldif(file_, ctx)

def _import_ldif(self, path, ctx):
src = Path(path).resolve()

# generated template will be saved under ``/app/tmp`` directory
# examples:
# - ``/app/templates/groups.ldif`` will be saved as ``/app/tmp/templates/groups.ldif``
# - ``/app/custom_ldif/groups.ldif`` will be saved as ``/app/tmp/custom_ldif/groups.ldif``
dst = Path("/app/tmp").joinpath(str(src).removeprefix("/app/")).resolve()

# ensure directory for generated template is exist
dst.parent.mkdir(parents=True, exist_ok=True)

logger.info(f"Importing {src} file")
render_ldif(src, dst, ctx)

with open(dst, "rb") as fd:
parser = LDIFParser(fd)
for dn, entry in parser.parse():
self.add_entry(dn, entry)
Loading

0 comments on commit 0b6334a

Please sign in to comment.