diff --git a/geonode/br/management/commands/backup.py b/geonode/br/management/commands/backup.py
index 1c41c258f16..c8280d0fe8e 100644
--- a/geonode/br/management/commands/backup.py
+++ b/geonode/br/management/commands/backup.py
@@ -92,6 +92,13 @@ def add_arguments(self, parser):
help='Skips activation of the Read Only mode in backup procedure execution.'
)
+ parser.add_argument(
+ '--skip-logger-setup',
+ action='store_false',
+ dest='setup_logger',
+ help='Skips setup of the "geonode.br" logger, "br" handler and "br" format if not present in settings'
+ )
+
def handle(self, **options):
skip_read_only = options.get('skip_read_only')
config = Configuration.load()
@@ -102,6 +109,9 @@ def handle(self, **options):
config.read_only = True
config.save()
+ if options.get('setup_logger'):
+ utils.setup_logger()
+
try:
# execute backup procedure
self.execute_backup(**options)
@@ -123,9 +133,11 @@ def execute_backup(self, **options):
if not backup_dir or len(backup_dir) == 0:
raise CommandError("Destination folder '--backup-dir' is mandatory")
- print("Before proceeding with the Backup, please ensure that:")
- print(" 1. The backend (DB or whatever) is accessible and you have rights")
- print(" 2. The GeoServer is up and running and reachable from this machine")
+ if not force_exec:
+ print("Before proceeding with the Backup, please ensure that:")
+ print(" 1. The backend (DB or whatever) is accessible and you have rights")
+ print(" 2. The GeoServer is up and running and reachable from this machine")
+
message = 'You want to proceed?'
if force_exec or utils.confirm(prompt=message, resp=False):
@@ -142,26 +154,29 @@ def execute_backup(self, **options):
self.create_geoserver_backup(config, settings, target_folder, ignore_errors)
self.dump_geoserver_raster_data(config, settings, target_folder)
self.dump_geoserver_vector_data(config, settings, target_folder)
- logger.info("Dumping geoserver external resources")
self.dump_geoserver_externals(config, settings, target_folder)
else:
- print("Skipping geoserver backup")
+ logger.info("Skipping geoserver backup")
# Deactivate GeoNode Signals
with DisableDjangoSignals():
# Dump Fixtures
+ logger.info("*** Dumping GeoNode fixtures...")
+
for app_name, dump_name in zip(config.app_names, config.dump_names):
# prevent dumping BackupRestore application
if app_name == 'br':
continue
- logger.info(f"Dumping '{app_name}' into '{dump_name}.json'.")
+ logger.info(f" - Dumping '{app_name}' into '{dump_name}.json'")
# Point stdout at a file for dumping data to.
with open(os.path.join(target_folder, f'{dump_name}.json'), 'w') as output:
call_command('dumpdata', app_name, format='json', indent=2, stdout=output)
# Store Media Root
+ logger.info("*** Dumping GeoNode media folder...")
+
media_root = settings.MEDIA_ROOT
media_folder = os.path.join(target_folder, utils.MEDIA_ROOT)
if not os.path.exists(media_folder):
@@ -169,9 +184,11 @@ def execute_backup(self, **options):
copy_tree(media_root, media_folder,
ignore=utils.ignore_time(config.gs_data_dt_filter[0], config.gs_data_dt_filter[1]))
- print(f"Saved Media Files from '{media_root}'.")
+ logger.info(f"Saved media files from '{media_root}'")
# Store Static Root
+ logger.info("*** Dumping GeoNode static folder...")
+
static_root = settings.STATIC_ROOT
static_folder = os.path.join(target_folder, utils.STATIC_ROOT)
if not os.path.exists(static_folder):
@@ -179,9 +196,11 @@ def execute_backup(self, **options):
copy_tree(static_root, static_folder,
ignore=utils.ignore_time(config.gs_data_dt_filter[0], config.gs_data_dt_filter[1]))
- print(f"Saved Static Root from '{static_root}'.")
+ logger.info(f"Saved static root from '{static_root}'.")
# Store Static Folders
+ logger.info("*** Dumping GeoNode static files...")
+
static_folders = settings.STATICFILES_DIRS
static_files_folders = os.path.join(target_folder, utils.STATICFILES_DIRS)
if not os.path.exists(static_files_folders):
@@ -193,8 +212,8 @@ def execute_backup(self, **options):
# (check to prevent saving files from site-packages in project-template based GeoNode projects)
if getattr(settings, 'PROJECT_ROOT', None) and \
not static_files_folder.startswith(settings.PROJECT_ROOT):
- print(f"Skipping static directory: {static_files_folder}. "
- f"It's not located under PROJECT_ROOT path: {settings.PROJECT_ROOT}.")
+ logger.info(f"Skipping static directory: {static_files_folder}. "
+ f"It's not located under PROJECT_ROOT path: {settings.PROJECT_ROOT}.")
continue
static_folder = os.path.join(static_files_folders,
@@ -204,9 +223,11 @@ def execute_backup(self, **options):
copy_tree(static_files_folder, static_folder,
ignore=utils.ignore_time(config.gs_data_dt_filter[0], config.gs_data_dt_filter[1]))
- print(f"Saved Static Files from '{static_files_folder}'.")
+ logger.info(f"Saved static files from '{static_files_folder}'.")
# Store Template Folders
+ logger.info("*** Dumping GeoNode template folders...")
+
template_folders = []
try:
template_folders = settings.TEMPLATE_DIRS
@@ -225,8 +246,8 @@ def execute_backup(self, **options):
# (check to prevent saving files from site-packages in project-template based GeoNode projects)
if getattr(settings, 'PROJECT_ROOT', None) and \
not template_files_folder.startswith(settings.PROJECT_ROOT):
- print(f"Skipping template directory: {template_files_folder}. "
- f"It's not located under PROJECT_ROOT path: {settings.PROJECT_ROOT}.")
+ logger.info(f"Skipping template directory: {template_files_folder}. "
+ f"It's not located under PROJECT_ROOT path: {settings.PROJECT_ROOT}.")
continue
template_folder = os.path.join(template_files_folders,
@@ -236,9 +257,10 @@ def execute_backup(self, **options):
copy_tree(template_files_folder, template_folder,
ignore=utils.ignore_time(config.gs_data_dt_filter[0], config.gs_data_dt_filter[1]))
- print(f"Saved Template Files from '{template_files_folder}'.")
+ logger.info(f"Saved template files from '{template_files_folder}'.")
# Store Locale Folders
+ logger.info("*** Dumping GeoNode locale folders...")
locale_folders = settings.LOCALE_PATHS
locale_files_folders = os.path.join(target_folder, utils.LOCALE_PATHS)
if not os.path.exists(locale_files_folders):
@@ -264,7 +286,11 @@ def execute_backup(self, **options):
logger.info(f"Saved Locale Files from '{locale_files_folder}'.")
# Create Final ZIP Archive
+ logger.info("*** Creating final ZIP archive...")
+
backup_archive = os.path.join(backup_dir, f'{dir_time_suffix}.zip')
+ logger.info(f"Creating zip {backup_archive}...")
+
zip_dir(target_folder, backup_archive)
# Generate a md5 hash of a backup archive and save it
@@ -279,12 +305,13 @@ def execute_backup(self, **options):
config.config_parser.write(configfile)
# Clean-up Temp Folder
+ logger.info("*** Final cleanup...")
try:
shutil.rmtree(target_folder)
except Exception:
logger.warning(f"WARNING: Could not be possible to delete the temp folder: '{target_folder}'")
- print("Backup Finished. Archive generated.")
+ logger.info("Backup Finished. Archive generated.")
return str(os.path.join(backup_dir, f'{dir_time_suffix}.zip'))
@@ -295,7 +322,7 @@ def create_geoserver_backup(self, config, settings, target_folder, ignore_errors
passwd = settings.OGC_SERVER['default']['PASSWORD']
geoserver_bk_file = os.path.join(target_folder, 'geoserver_catalog.zip')
- logger.info(f"Dumping 'GeoServer Catalog [{url}]' into '{geoserver_bk_file}'.")
+ logger.info(f"*** Dumping GeoServer catalog [{url}] into '{geoserver_bk_file}'")
r = requests.put(f'{url}rest/reset/',
auth=HTTPBasicAuth(user, passwd))
if r.status_code != 200:
@@ -314,7 +341,8 @@ def create_geoserver_backup(self, config, settings, target_folder, ignore_errors
f'BK_BEST_EFFORT={("true" if ignore_errors else "false")}',
f'exclude.file.path={config.gs_exclude_file_path}'
]
- data = {'backup': {'archiveFile': geoserver_bk_file, 'overwrite': 'true',
+ data = {'backup': {'archiveFile': geoserver_bk_file,
+ 'overwrite': 'true',
'options': {'option': _options}}}
headers = {
'Accept': 'application/json',
@@ -329,18 +357,18 @@ def create_geoserver_backup(self, config, settings, target_folder, ignore_errors
headers=headers,
auth=HTTPBasicAuth(user, passwd),
timeout=10)
- if (r.status_code == 200):
+ if r.status_code == 200:
gs_backup = r.json()
- _url = urlparse(gs_backup['backups']['backup'][len(gs_backup['backups']['backup']) - 1]['href'])
+ _url = urlparse(gs_backup['backups']['backup'][-1]['href'])
_url = f'{urljoin(url, _url.path)}?{_url.query}'
r = requests.get(_url,
headers=headers,
auth=HTTPBasicAuth(user, passwd),
timeout=10)
- if (r.status_code == 200):
+ if r.status_code == 200:
gs_backup = r.json()
- if (r.status_code != 200):
+ if r.status_code != 200:
raise ValueError(error_backup.format(url, r.status_code, r.text))
except ValueError:
raise ValueError(error_backup.format(url, r.status_code, r.text))
@@ -350,18 +378,18 @@ def create_geoserver_backup(self, config, settings, target_folder, ignore_errors
headers=headers,
auth=HTTPBasicAuth(user, passwd),
timeout=10)
- if (r.status_code == 200):
+ if r.status_code == 200:
gs_bk_exec_status = gs_backup['backup']['execution']['status']
gs_bk_exec_progress = gs_backup['backup']['execution']['progress']
gs_bk_exec_progress_updated = '0/0'
- while (gs_bk_exec_status != 'COMPLETED' and gs_bk_exec_status != 'FAILED'):
- if (gs_bk_exec_progress != gs_bk_exec_progress_updated):
+ while gs_bk_exec_status != 'COMPLETED' and gs_bk_exec_status != 'FAILED':
+ if gs_bk_exec_progress != gs_bk_exec_progress_updated:
gs_bk_exec_progress_updated = gs_bk_exec_progress
r = requests.get(f'{url}rest/br/backup/{gs_bk_exec_id}.json',
headers=headers,
auth=HTTPBasicAuth(user, passwd),
timeout=10)
- if (r.status_code == 200):
+ if r.status_code == 200:
try:
gs_backup = r.json()
@@ -370,7 +398,7 @@ def create_geoserver_backup(self, config, settings, target_folder, ignore_errors
gs_bk_exec_status = gs_backup['backup']['execution']['status']
gs_bk_exec_progress = gs_backup['backup']['execution']['progress']
- print(f'{gs_bk_exec_status} - {gs_bk_exec_progress}')
+ logger.info(f'Async backup status: {gs_bk_exec_status} - {gs_bk_exec_progress}')
time.sleep(3)
else:
raise ValueError(error_backup.format(url, r.status_code, r.text))
@@ -389,56 +417,50 @@ def create_geoserver_backup(self, config, settings, target_folder, ignore_errors
raise ValueError(error_backup.format(url, r.status_code, r.text))
def dump_geoserver_raster_data(self, config, settings, target_folder):
- if (config.gs_data_dir):
- if (config.gs_dump_raster_data):
- # Dump '$config.gs_data_dir/geonode'
- gs_data_root = os.path.join(config.gs_data_dir, 'geonode')
- if not os.path.isabs(gs_data_root):
- gs_data_root = os.path.join(settings.PROJECT_ROOT, '..', gs_data_root)
- logger.info(f"Dumping GeoServer Uploaded Data from '{gs_data_root}'.")
- if os.path.exists(gs_data_root):
- gs_data_folder = os.path.join(target_folder, 'gs_data_dir', 'geonode')
- if not os.path.exists(gs_data_folder):
- os.makedirs(gs_data_folder, exist_ok=True)
- copy_tree(gs_data_root, gs_data_folder,
- ignore=utils.ignore_time(config.gs_data_dt_filter[0], config.gs_data_dt_filter[1]))
- logger.info(f"Dumped GeoServer Uploaded Data from '{gs_data_root}'.")
+ if config.gs_data_dir and config.gs_dump_raster_data:
+ logger.info("*** Dump GeoServer raster data")
+
+ for source_root, dest_folder in (
+ (os.path.join(config.gs_data_dir, 'geonode'), # Dump '$config.gs_data_dir/geonode'
+ os.path.join(target_folder, 'gs_data_dir', 'geonode')),
+ (os.path.join(config.gs_data_dir, 'data', 'geonode'), # Dump '$config.gs_data_dir/data/geonode'
+ os.path.join(target_folder, 'gs_data_dir', 'data', 'geonode')),
+ ):
+
+ if not os.path.isabs(source_root):
+ source_root = os.path.join(settings.PROJECT_ROOT, '..', source_root)
+ logger.info(f"Dumping raster data from '{source_root}'...")
+ if os.path.exists(source_root):
+ if not os.path.exists(dest_folder):
+ os.makedirs(dest_folder, exist_ok=True)
+ copy_tree(source_root,
+ dest_folder,
+ ignore=utils.ignore_time(config.gs_data_dt_filter[0],
+ config.gs_data_dt_filter[1]))
+ logger.info(f"Dumped raster data from '{source_root}'")
else:
- logger.info(f"Skipped GeoServer Uploaded Data '{gs_data_root}'.")
-
- # Dump '$config.gs_data_dir/data/geonode'
- gs_data_root = os.path.join(config.gs_data_dir, 'data', 'geonode')
- if not os.path.isabs(gs_data_root):
- gs_data_root = os.path.join(settings.PROJECT_ROOT, '..', gs_data_root)
- logger.info(f"Dumping GeoServer Uploaded Data from '{gs_data_root}'.")
- if os.path.exists(gs_data_root):
- gs_data_folder = os.path.join(target_folder, 'gs_data_dir', 'data', 'geonode')
- if not os.path.exists(gs_data_folder):
- os.makedirs(gs_data_folder, exist_ok=True)
-
- copy_tree(gs_data_root, gs_data_folder,
- ignore=utils.ignore_time(config.gs_data_dt_filter[0], config.gs_data_dt_filter[1]))
- logger.info(f"Dumped GeoServer Uploaded Data from '{gs_data_root}'.")
- else:
- logger.info(f"Skipped GeoServer Uploaded Data '{gs_data_root}'.")
+ logger.info(f"Skipped raster data directory '{source_root}' because it does not exist")
def dump_geoserver_vector_data(self, config, settings, target_folder):
- if (config.gs_dump_vector_data):
+ if config.gs_dump_vector_data:
+ logger.info("*** Dump GeoServer vector data")
+
# Dump Vectorial Data from DB
- datastore = settings.OGC_SERVER['default']['DATASTORE']
- if (datastore):
- ogc_db_name = settings.DATABASES[datastore]['NAME']
- ogc_db_user = settings.DATABASES[datastore]['USER']
- ogc_db_passwd = settings.DATABASES[datastore]['PASSWORD']
- ogc_db_host = settings.DATABASES[datastore]['HOST']
- ogc_db_port = settings.DATABASES[datastore]['PORT']
+ datastore_name = settings.OGC_SERVER['default']['DATASTORE']
+ if datastore_name:
+ datastore = settings.DATABASES[datastore_name]
gs_data_folder = os.path.join(target_folder, 'gs_data_dir', 'geonode')
if not os.path.exists(gs_data_folder):
os.makedirs(gs_data_folder, exist_ok=True)
- utils.dump_db(config, ogc_db_name, ogc_db_user, ogc_db_port,
- ogc_db_host, ogc_db_passwd, gs_data_folder)
+ utils.dump_db(config,
+ datastore['NAME'],
+ datastore['USER'],
+ datastore['PORT'],
+ datastore['HOST'],
+ datastore['PASSWORD'],
+ gs_data_folder)
def dump_geoserver_externals(self, config, settings, target_folder):
"""Scan layers xml and see if there are external references.
@@ -447,6 +469,8 @@ def dump_geoserver_externals(self, config, settings, target_folder):
backup. Also, some references may point to specific url, which
may not be available later.
"""
+ logger.info("*** Dumping GeoServer external resources")
+
external_folder = os.path.join(target_folder, utils.EXTERNAL_ROOT)
def copy_external_resource(abspath):
@@ -522,6 +546,7 @@ def is_xml_file(filename, regexp=re.compile(".*.xml$")):
for directory in ('workspaces', 'styles'):
source = os.path.join(config.gs_data_dir, directory)
+ logger.info(f"Dumping external dir {source}...")
for root, dirs, files in os.walk(source):
for filename in filter(is_xml_file, files):
path = os.path.join(root, filename)
diff --git a/geonode/br/management/commands/restore.py b/geonode/br/management/commands/restore.py
index ec381101da3..dedde804116 100755
--- a/geonode/br/management/commands/restore.py
+++ b/geonode/br/management/commands/restore.py
@@ -27,7 +27,7 @@
import requests
import tempfile
import warnings
-import traceback
+import pathlib
from typing import Union
from datetime import datetime
@@ -159,6 +159,13 @@ def add_arguments(self, parser):
help='If True, preserve geoserver resources and tables'
)
+ parser.add_argument(
+ '--skip-logger-setup',
+ action='store_false',
+ dest='setup_logger',
+ help='Skips setup of the "geonode.br" logger, "br" handler and "br" format if not present in settings'
+ )
+
def handle(self, **options):
skip_read_only = options.get('skip_read_only')
config = Configuration.load()
@@ -169,6 +176,9 @@ def handle(self, **options):
config.read_only = True
config.save()
+ if options.get('setup_logger'):
+ utils.setup_logger()
+
try:
self.execute_restore(**options)
except Exception:
@@ -195,14 +205,16 @@ def execute_restore(self, **options):
# choose backup_file from backup_files_dir, if --backup-files-dir was provided
if backup_files_dir:
+ logger.info("*** Looking for backup file...")
backup_file = self.parse_backup_files_dir(backup_files_dir)
else:
backup_files_dir = os.path.dirname(backup_file)
# calculate and validate backup archive hash
+ logger.info("*** Validating backup file...")
backup_md5 = self.validate_backup_file_hash(backup_file)
- # check if the original backup file ini setting are available or not
+ # check if the original backup file ini setting is available or not
backup_ini = self.check_backup_ini_settings(backup_file)
if backup_ini:
options['config'] = backup_ini
@@ -225,9 +237,11 @@ def execute_restore(self, **options):
if user.email:
admin_emails.append(user.email)
- print("Before proceeding with the Restore, please ensure that:")
- print(" 1. The backend (DB or whatever) is accessible and you have rights")
- print(" 2. The GeoServer is up and running and reachable from this machine")
+ if not force_exec:
+ print("Before proceeding with the Restore, please ensure that:")
+ print(" 1. The backend (DB or whatever) is accessible and you have rights")
+ print(" 2. The GeoServer is up and running and reachable from this machine")
+
message = 'WARNING: The restore will overwrite ALL GeoNode data. You want to proceed?'
if force_exec or utils.confirm(prompt=message, resp=False):
@@ -238,13 +252,15 @@ def execute_restore(self, **options):
# otherwise default tmp directory is chosen
temp_dir_path = backup_files_dir if os.path.exists(backup_files_dir) else None
- restore_folder = os.path.join(temp_dir_path, f'tmp{str(uuid.uuid4())[:4]}')
+ restore_folder = os.path.join(temp_dir_path,
+ f'unzip_{pathlib.Path(backup_file).stem}_{str(uuid.uuid4())[:4]}')
try:
os.makedirs(restore_folder, exist_ok=True)
except Exception as e:
raise e
try:
# Extract ZIP Archive to Target Folder
+ logger.info("*** Unzipping backup file...")
target_folder = extract_archive(backup_file, restore_folder)
# Write Checks
@@ -267,41 +283,43 @@ def execute_restore(self, **options):
locale_files_folders = os.path.join(target_folder, utils.LOCALE_PATHS)
try:
- print(f"[Sanity Check] Full Write Access to '{restore_folder}' ...")
+ logger.info("*** Performing some checks...")
+ logger.info(f"[Sanity Check] Full Write Access to restore folder: '{restore_folder}' ...")
chmod_tree(restore_folder)
- print(f"[Sanity Check] Full Write Access to '{media_root}' ...")
+ logger.info(f"[Sanity Check] Full Write Access to media root: '{media_root}' ...")
chmod_tree(media_root)
- print(f"[Sanity Check] Full Write Access to '{static_root}' ...")
+ logger.info(f"[Sanity Check] Full Write Access to static root: '{static_root}' ...")
chmod_tree(static_root)
- for static_files_folder in static_folders:
+ for folder in static_folders:
if getattr(settings, 'PROJECT_ROOT', None) and \
- static_files_folder.startswith(settings.PROJECT_ROOT):
- print(f"[Sanity Check] Full Write Access to '{static_files_folder}' ...")
- chmod_tree(static_files_folder)
- for template_files_folder in template_folders:
+ folder.startswith(settings.PROJECT_ROOT):
+ logger.info(f"[Sanity Check] Full Write Access to static file folder: '{folder}' ...")
+ chmod_tree(folder)
+ for folder in template_folders:
if getattr(settings, 'PROJECT_ROOT', None) and \
- template_files_folder.startswith(settings.PROJECT_ROOT):
- print(f"[Sanity Check] Full Write Access to '{template_files_folder}' ...")
- chmod_tree(template_files_folder)
- for locale_files_folder in locale_folders:
+ folder.startswith(settings.PROJECT_ROOT):
+ logger.info(f"[Sanity Check] Full Write Access to template folder: '{folder}' ...")
+ chmod_tree(folder)
+ for folder in locale_folders:
if getattr(settings, 'PROJECT_ROOT', None) and \
- locale_files_folder.startswith(settings.PROJECT_ROOT):
- print(f"[Sanity Check] Full Write Access to '{locale_files_folder}' ...")
- chmod_tree(locale_files_folder)
- except Exception as exception:
+ folder.startswith(settings.PROJECT_ROOT):
+ logger.info(f"[Sanity Check] Full Write Access to locale files folder: '{folder}' ...")
+ chmod_tree(folder)
+ except Exception as e:
if notify:
restore_notification.apply_async(
- args=(admin_emails, backup_file, backup_md5, str(exception)),
+ args=(admin_emails, backup_file, backup_md5, str(e)),
expiration=30
)
- print("...Sanity Checks on Folder failed. Please make sure that the current user has full WRITE access to the above folders (and sub-folders or files).") # noqa
- print("Reason:")
- raise
+ logger.error("Sanity Checks on Folder failed. "
+ "Please make sure that the current user has full WRITE access to the above folders "
+ "(and sub-folders or files).", exc_info=e) # noqa
+ raise Exception(f'Some folders need write access: {str(e)}')
if not skip_geoserver:
try:
- print(f"[Sanity Check] Full Write Access to '{target_folder}' ...")
+ logger.info(f"[Sanity Check] Full Write Access to target folder: '{target_folder}' ...")
chmod_tree(target_folder)
self.restore_geoserver_backup(config, settings, target_folder,
skip_geoserver_info, skip_geoserver_security,
@@ -309,10 +327,13 @@ def execute_restore(self, **options):
self.prepare_geoserver_gwc_config(config, settings)
self.restore_geoserver_raster_data(config, settings, target_folder)
self.restore_geoserver_vector_data(config, settings, target_folder, soft_reset)
- print("Restoring geoserver external resources")
self.restore_geoserver_externals(config, settings, target_folder)
- except Exception as exception:
+ logger.info("*** Recreate GWC tile layers")
+ call_command('create_tile_layers')
+ except Exception as e:
+ logger.warning(f"*** GeoServer Restore failed: {e}", exc_info=e)
if recovery_file:
+ logger.warning("*** Trying to restore from recovery file...")
with tempfile.TemporaryDirectory(dir=temp_dir_path) as restore_folder:
recovery_folder = extract_archive(recovery_file, restore_folder)
self.restore_geoserver_backup(config, settings, recovery_folder,
@@ -323,27 +344,28 @@ def execute_restore(self, **options):
self.restore_geoserver_externals(config, settings, recovery_folder)
if notify:
restore_notification.apply_async(
- args=(admin_emails, backup_file, backup_md5, str(exception)),
+ args=(admin_emails, backup_file, backup_md5, str(e)),
expiration=30
)
- raise exception
+ raise Exception(f'GeoServer restore failed: {e}')
else:
- print("Skipping geoserver backup restore")
+ logger.info("*** Skipping geoserver backup restore")
# Prepare Target DB
try:
- call_command('makemigrations', interactive=False)
+ logger.info("*** Align the database schema")
+ # call_command('makemigrations', interactive=False)
call_command('migrate', interactive=False)
- db_name = settings.DATABASES['default']['NAME']
- db_user = settings.DATABASES['default']['USER']
- db_port = settings.DATABASES['default']['PORT']
- db_host = settings.DATABASES['default']['HOST']
- db_passwd = settings.DATABASES['default']['PASSWORD']
-
- utils.patch_db(db_name, db_user, db_port, db_host, db_passwd, settings.MONITORING_ENABLED)
- except Exception:
- traceback.print_exc()
+ # db_name = settings.DATABASES['default']['NAME']
+ # db_user = settings.DATABASES['default']['USER']
+ # db_port = settings.DATABASES['default']['PORT']
+ # db_host = settings.DATABASES['default']['HOST']
+ # db_passwd = settings.DATABASES['default']['PASSWORD']
+ #
+ # utils.patch_db(db_name, db_user, db_port, db_host, db_passwd, settings.MONITORING_ENABLED)
+ except Exception as e:
+ logger.warning(f"Error while aligning the db: {e}", exc_info=e)
try:
# Deactivate GeoNode Signals
@@ -356,36 +378,41 @@ def execute_restore(self, **options):
db_host = settings.DATABASES['default']['HOST']
db_passwd = settings.DATABASES['default']['PASSWORD']
- utils.flush_db(db_name, db_user, db_port, db_host, db_passwd)
+ utils.truncate_tables(db_name, db_user, db_port, db_host, db_passwd)
except Exception:
+ logger.info("Error while truncating tables, trying external task")
+
try:
call_command('flush', interactive=False)
- except Exception:
- traceback.print_exc()
- raise
+ except Exception as e:
+ logger.warning("Could not cleanup GeoNode tables", exc_info=e)
+ raise Exception("Could not cleanup GeoNode tables")
# Restore Fixtures
- abortlater = False
+ err_cnt = 0
+
+ logger.info("*** Restoring GeoNode fixtures...")
for app_name, dump_name in zip(config.app_names, config.dump_names):
fixture_file = os.path.join(target_folder, f"{dump_name}.json")
- print(f"Deserializing '{fixture_file}'")
+ logger.info(f" - restoring '{fixture_file}'")
try:
call_command('loaddata', fixture_file, app_label=app_name)
- except IntegrityError:
- traceback.print_exc()
- logger.warning(f"WARNING: The fixture '{dump_name}' fails on integrity check and import is aborted after all fixtures have been checked.") # noqa
- abortlater = True
+ except IntegrityError as e:
+ logger.warning(f"The fixture '{dump_name}' failed the integrity check. "
+ "Import will be aborted after all fixtures have been checked",
+ exc_info=e) # noqa
+ err_cnt += 1
except Exception as e:
- traceback.print_exc()
- logger.warning(f"WARNING: No valid fixture data found for '{dump_name}'.")
+ logger.warning(f"No valid fixture data found for '{dump_name}'", exc_info=e)
# helpers.load_fixture(app_name, fixture_file)
raise e
- if abortlater:
- raise IntegrityError()
+ if err_cnt:
+ raise IntegrityError(f"{err_cnt} fixtures could not be loaded")
# Restore Media Root
+ logger.info("*** Restore media root...")
if config.gs_data_dt_filter[0] is None:
shutil.rmtree(media_root, ignore_errors=True)
@@ -394,9 +421,10 @@ def execute_restore(self, **options):
copy_tree(media_folder, media_root)
chmod_tree(media_root)
- print(f"Media Files Restored into '{media_root}'.")
+ logger.info(f"Media files restored into '{media_root}'.")
# Restore Static Root
+ logger.info("*** Restore static root...")
if config.gs_data_dt_filter[0] is None:
shutil.rmtree(static_root, ignore_errors=True)
@@ -405,97 +433,99 @@ def execute_restore(self, **options):
copy_tree(static_folder, static_root)
chmod_tree(static_root)
- print(f"Static Root Restored into '{static_root}'.")
+ logger.info(f"Static root restored into '{static_root}'.")
# Restore Static Folders
- for static_files_folder in static_folders:
+ logger.info("*** Restore static folders...")
+
+ for folder in static_folders:
+ logger.info(f"* Restoring {folder}...")
# skip restoration of static files of apps not located under PROJECT_ROOT path
# (check to prevent overriding files from site-packages
# in project-template based GeoNode projects)
if getattr(settings, 'PROJECT_ROOT', None) and \
- not static_files_folder.startswith(settings.PROJECT_ROOT):
- print(
- f"Skipping static directory: {static_files_folder}. "
+ not folder.startswith(settings.PROJECT_ROOT):
+ logger.info(
+ f"Skipping static directory: {folder}. "
f"It's not located under PROJECT_ROOT path: {settings.PROJECT_ROOT}.")
continue
if config.gs_data_dt_filter[0] is None:
- shutil.rmtree(static_files_folder, ignore_errors=True)
+ logger.info(f"Cleaning {folder}...")
+ shutil.rmtree(folder, ignore_errors=True)
- if not os.path.exists(static_files_folder):
- os.makedirs(static_files_folder, exist_ok=True)
+ logger.info(f"Restoring {folder}...")
+ if not os.path.exists(folder):
+ os.makedirs(folder, exist_ok=True)
copy_tree(os.path.join(static_files_folders,
- os.path.basename(os.path.normpath(static_files_folder))),
- static_files_folder)
- chmod_tree(static_files_folder)
- print(f"Static Files Restored into '{static_files_folder}'.")
+ os.path.basename(os.path.normpath(folder))),
+ folder)
+ chmod_tree(folder)
+ logger.info(f"Static files restored into '{folder}'.")
# Restore Template Folders
- for template_files_folder in template_folders:
+ logger.info("*** Restore template folders...")
+ for folder in template_folders:
+ logger.info(f"* Restoring {folder}...")
# skip restoration of template files of apps not located under PROJECT_ROOT path
# (check to prevent overriding files from site-packages
# in project-template based GeoNode projects)
if getattr(settings, 'PROJECT_ROOT', None) and \
- not template_files_folder.startswith(settings.PROJECT_ROOT):
- print(
- f"Skipping template directory: {template_files_folder}. "
+ not folder.startswith(settings.PROJECT_ROOT):
+ logger.info(
+ f"Skipping template directory: {folder}. "
f"It's not located under PROJECT_ROOT path: {settings.PROJECT_ROOT}.")
continue
if config.gs_data_dt_filter[0] is None:
- shutil.rmtree(template_files_folder, ignore_errors=True)
+ logger.info(f"Cleaning {folder}...")
+ shutil.rmtree(folder, ignore_errors=True)
- if not os.path.exists(template_files_folder):
- os.makedirs(template_files_folder, exist_ok=True)
+ logger.info(f"Restoring {folder}...")
+ if not os.path.exists(folder):
+ os.makedirs(folder, exist_ok=True)
copy_tree(os.path.join(template_files_folders,
- os.path.basename(os.path.normpath(template_files_folder))),
- template_files_folder)
- chmod_tree(template_files_folder)
- print(f"Template Files Restored into '{template_files_folder}'.")
+ os.path.basename(os.path.normpath(folder))),
+ folder)
+ chmod_tree(folder)
+ logger.info(f"Template files restored into '{folder}'.")
# Restore Locale Folders
- for locale_files_folder in locale_folders:
+ logger.info("*** Restore locale folders...")
+ for folder in locale_folders:
+ logger.info(f"* Restoring {folder}...")
# skip restoration of locale files of apps not located under PROJECT_ROOT path
# (check to prevent overriding files from site-packages
# in project-template based GeoNode projects)
if getattr(settings, 'PROJECT_ROOT', None) and \
- not locale_files_folder.startswith(settings.PROJECT_ROOT):
- print(
- f"Skipping locale directory: {locale_files_folder}. "
+ not folder.startswith(settings.PROJECT_ROOT):
+ logger.info(
+ f"Skipping locale directory: {folder}. "
f"It's not located under PROJECT_ROOT path: {settings.PROJECT_ROOT}.")
continue
if config.gs_data_dt_filter[0] is None:
- shutil.rmtree(locale_files_folder, ignore_errors=True)
+ logger.info(f"Cleaning {folder}...")
+ shutil.rmtree(folder, ignore_errors=True)
- if not os.path.exists(locale_files_folder):
- os.makedirs(locale_files_folder, exist_ok=True)
+ logger.info(f"Restoring {folder}...")
+ if not os.path.exists(folder):
+ os.makedirs(folder, exist_ok=True)
copy_tree(os.path.join(locale_files_folders,
- os.path.basename(os.path.normpath(locale_files_folder))),
- locale_files_folder)
- chmod_tree(locale_files_folder)
- print(f"Locale Files Restored into '{locale_files_folder}'.")
+ os.path.basename(os.path.normpath(folder))),
+ folder)
+ chmod_tree(folder)
+ logger.info(f"Locale Files Restored into '{folder}'.")
+ logger.info("*** Calling collectstatic...")
call_command('collectstatic', interactive=False)
- # Cleanup DB
- try:
- db_name = settings.DATABASES['default']['NAME']
- db_user = settings.DATABASES['default']['USER']
- db_port = settings.DATABASES['default']['PORT']
- db_host = settings.DATABASES['default']['HOST']
- db_passwd = settings.DATABASES['default']['PASSWORD']
-
- utils.cleanup_db(db_name, db_user, db_port, db_host, db_passwd)
- except Exception:
- traceback.print_exc()
-
# store backup info
restored_backup = RestoredBackup(
name=backup_file.rsplit('/', 1)[-1],
@@ -504,17 +534,21 @@ def execute_restore(self, **options):
)
restored_backup.save()
- except Exception as exception:
+ except Exception as e:
+ # exception during geonode db restore (gs has already been restored)
if notify:
restore_notification.apply_async(
- args=(admin_emails, backup_file, backup_md5, str(exception)),
+ args=(admin_emails, backup_file, backup_md5, str(e)),
expiration=30
)
+ raise Exception(f'GeoNode restore failed: {e}')
+
+ # call_command('makemigrations', interactive=False)
+ logger.info("*** Synch db with fake migrations...")
+ call_command('migrate', interactive=False, fake=True)
- finally:
- call_command('makemigrations', interactive=False)
- call_command('migrate', interactive=False, fake=True)
- call_command('sync_geonode_datasets', updatepermissions=True, ignore_errors=True)
+ logger.info("*** Sync layers with GeoServer...")
+ call_command('sync_geonode_datasets', updatepermissions=True, ignore_errors=True)
if notify:
restore_notification.apply_async(
@@ -522,13 +556,14 @@ def execute_restore(self, **options):
expiration=30
)
- print("HINT: If you migrated from another site, do not forget to run the command 'migrate_baseurl' to fix Links") # noqa
- print(
+ logger.info("HINT: If you migrated from another site, do not forget to run the command 'migrate_baseurl' to fix Links") # noqa
+ logger.info(
" e.g.: DJANGO_SETTINGS_MODULE=my_geonode.settings python manage.py migrate_baseurl "
"--source-address=my-host-dev.geonode.org --target-address=my-host-prod.geonode.org"
)
- print("Restore finished.")
+ logger.info("Restore finished.")
finally:
+ logger.info("*** Final filesystem cleanup ...")
shutil.rmtree(restore_folder)
def validate_backup_file_options(self, **options) -> None:
@@ -654,17 +689,20 @@ def restore_geoserver_backup(self, config, settings, target_folder,
passwd = settings.OGC_SERVER['default']['PASSWORD']
geoserver_bk_file = os.path.join(target_folder, 'geoserver_catalog.zip')
+ logger.info(f"*** Restoring GeoServer catalog [{url}] from '{geoserver_bk_file}'")
+
if not os.path.exists(geoserver_bk_file) or not os.access(geoserver_bk_file, os.R_OK):
raise Exception(f'ERROR: geoserver restore: file "{geoserver_bk_file}" not found.')
- print(f"Restoring 'GeoServer Catalog [{url}]' from '{geoserver_bk_file}'.")
+ def bstr(x):
+ return 'true' if x else 'false'
# Best Effort Restore: 'options': {'option': ['BK_BEST_EFFORT=true']}
_options = [
- f"BK_PURGE_RESOURCES={'true' if not soft_reset else 'false'}",
+ f"BK_PURGE_RESOURCES={bstr(not soft_reset)}",
'BK_CLEANUP_TEMP=true',
- f'BK_SKIP_SETTINGS={("true" if skip_geoserver_info else "false")}',
- f'BK_SKIP_SECURITY={("true" if skip_geoserver_security else "false")}',
+ f'BK_SKIP_SETTINGS={bstr(skip_geoserver_info)}',
+ f'BK_SKIP_SECURITY={bstr(skip_geoserver_security)}',
'BK_BEST_EFFORT=true',
f'exclude.file.path={config.gs_exclude_file_path}'
]
@@ -685,7 +723,7 @@ def restore_geoserver_backup(self, config, settings, target_folder,
auth=HTTPBasicAuth(user, passwd),
timeout=10)
- if (r.status_code == 200):
+ if r.status_code == 200:
gs_backup = r.json()
_url = urlparse(gs_backup['restores']['restore'][len(gs_backup['restores']['restore']) - 1]['href'])
_url = f'{urljoin(url, _url.path)}?{_url.query}'
@@ -694,10 +732,10 @@ def restore_geoserver_backup(self, config, settings, target_folder,
auth=HTTPBasicAuth(user, passwd),
timeout=10)
- if (r.status_code == 200):
+ if r.status_code == 200:
gs_backup = r.json()
- if (r.status_code != 200):
+ if r.status_code != 200:
raise ValueError(error_backup.format(url, r.status_code, r.text))
except ValueError:
raise ValueError(error_backup.format(url, r.status_code, r.text))
@@ -708,19 +746,19 @@ def restore_geoserver_backup(self, config, settings, target_folder,
auth=HTTPBasicAuth(user, passwd),
timeout=10)
- if (r.status_code == 200):
+ if r.status_code == 200:
gs_bk_exec_status = gs_backup['restore']['execution']['status']
gs_bk_exec_progress = gs_backup['restore']['execution']['progress']
gs_bk_exec_progress_updated = '0/0'
- while (gs_bk_exec_status != 'COMPLETED' and gs_bk_exec_status != 'FAILED'):
- if (gs_bk_exec_progress != gs_bk_exec_progress_updated):
+ while gs_bk_exec_status != 'COMPLETED' and gs_bk_exec_status != 'FAILED':
+ if gs_bk_exec_progress != gs_bk_exec_progress_updated:
gs_bk_exec_progress_updated = gs_bk_exec_progress
r = requests.get(f'{url}rest/br/restore/{gs_bk_exec_id}.json',
headers=headers,
auth=HTTPBasicAuth(user, passwd),
timeout=10)
- if (r.status_code == 200):
+ if r.status_code == 200:
try:
gs_backup = r.json()
@@ -729,7 +767,7 @@ def restore_geoserver_backup(self, config, settings, target_folder,
gs_bk_exec_status = gs_backup['restore']['execution']['status']
gs_bk_exec_progress = gs_backup['restore']['execution']['progress']
- print(f'{gs_bk_exec_status} - {gs_bk_exec_progress}')
+ logger.info(f'Async backup status: {gs_bk_exec_status} - {gs_bk_exec_progress}')
time.sleep(3)
else:
raise ValueError(error_backup.format(url, r.status_code, r.text))
@@ -744,68 +782,64 @@ def restore_geoserver_backup(self, config, settings, target_folder,
raise ValueError(error_backup.format(url, r.status_code, r.text))
def prepare_geoserver_gwc_config(self, config, settings):
- if (config.gs_data_dir):
+ if config.gs_data_dir:
+ logger.info("*** Cleanup old GWC config...")
# Cleanup '$config.gs_data_dir/gwc-layers'
- gwc_datasets_root = os.path.join(config.gs_data_dir, 'gwc-layers')
- if not os.path.isabs(gwc_datasets_root):
- gwc_datasets_root = os.path.join(settings.PROJECT_ROOT, '..', gwc_datasets_root)
+ gwc_layers_root = os.path.join(config.gs_data_dir, 'gwc-layers')
+ if not os.path.isabs(gwc_layers_root):
+ gwc_layers_root = os.path.join(settings.PROJECT_ROOT, '..', gwc_layers_root)
try:
- shutil.rmtree(gwc_datasets_root)
- print(f'Cleaned out old GeoServer GWC Layers Config: {gwc_datasets_root}')
- except Exception:
- pass
- if not os.path.exists(gwc_datasets_root):
- os.makedirs(gwc_datasets_root, exist_ok=True)
+ logger.info(f'Cleaning out old GeoServer GWC layers config: {gwc_layers_root}')
+ shutil.rmtree(gwc_layers_root)
+ except Exception as e:
+ logger.info(f'Error while cleaning old GeoServer GWC layers config: {e}')
+ if not os.path.exists(gwc_layers_root):
+ logger.info(f'Recreating GWC layers dir: {gwc_layers_root}')
+ os.makedirs(gwc_layers_root, exist_ok=True)
def restore_geoserver_raster_data(self, config, settings, target_folder):
- if (config.gs_data_dir):
- if (config.gs_dump_raster_data):
- # Restore '$config.gs_data_dir/geonode'
- gs_data_folder = os.path.join(target_folder, 'gs_data_dir', 'geonode')
- if os.path.exists(gs_data_folder):
- gs_data_root = os.path.join(config.gs_data_dir, 'geonode')
- if not os.path.isabs(gs_data_root):
- gs_data_root = os.path.join(settings.PROJECT_ROOT, '..', gs_data_root)
-
- if not os.path.exists(gs_data_root):
- os.makedirs(gs_data_root, exist_ok=True)
-
- copy_tree(gs_data_folder, gs_data_root)
- print(f"GeoServer Uploaded Raster Data Restored to '{gs_data_root}'.")
- else:
- print(f"Skipping geoserver raster data restore: directory \"{gs_data_folder}\" not found.")
-
- # Restore '$config.gs_data_dir/data/geonode'
- gs_data_folder = os.path.join(target_folder, 'gs_data_dir', 'data', 'geonode')
- if os.path.exists(gs_data_folder):
- gs_data_root = os.path.join(config.gs_data_dir, 'data', 'geonode')
- if not os.path.isabs(gs_data_root):
- gs_data_root = os.path.join(settings.PROJECT_ROOT, '..', gs_data_root)
-
- if not os.path.exists(gs_data_root):
- os.makedirs(gs_data_root, exist_ok=True)
-
- copy_tree(gs_data_folder, gs_data_root)
- print(f"GeoServer Uploaded Data Restored to '{gs_data_root}'.")
+ if config.gs_data_dir and config.gs_dump_raster_data:
+ logger.info("*** Restore raster data")
+
+ for dest_folder, source_root in (
+ (os.path.join(config.gs_data_dir, 'geonode'),
+ os.path.join(target_folder, 'gs_data_dir', 'geonode')), # Dump '$config.gs_data_dir/geonode'
+ (os.path.join(config.gs_data_dir, 'data', 'geonode'),
+ os.path.join(target_folder, 'gs_data_dir', 'data', 'geonode')), # Dump '$config.gs_data_dir/data/geonode'
+ ):
+ if os.path.exists(source_root):
+ logger.info(f"Restoring raster data to '{dest_folder}'...")
+ if not os.path.isabs(dest_folder):
+ dest_folder = os.path.join(settings.PROJECT_ROOT, '..', dest_folder)
+
+ if not os.path.exists(dest_folder):
+ os.makedirs(dest_folder, exist_ok=True)
+
+ logger.info(f"Copying data from '{source_root}' to '{dest_folder}'...")
+ copy_tree(source_root, dest_folder)
+ logger.info(f"Restored raster data to '{dest_folder}'")
else:
- print(f"Skipping geoserver raster data restore: directory \"{gs_data_folder}\" not found.")
+ logger.info(f"Skipping raster data directory '{source_root}' because it does not exist")
def restore_geoserver_vector_data(self, config, settings, target_folder, soft_reset):
"""Restore Vectorial Data from DB"""
- if (config.gs_dump_vector_data):
+ if config.gs_dump_vector_data:
+ logger.info("*** Restore vector data")
gs_data_folder = os.path.join(target_folder, 'gs_data_dir', 'geonode')
if not os.path.exists(gs_data_folder):
- print(f"Skipping geoserver vector data restore: directory \"{gs_data_folder}\" not found.")
+ logger.info(f"Skipping vector data restore: directory \"{gs_data_folder}\" not found")
return
+ logger.info(f"Restoring vector data from \"{gs_data_folder}\" not found")
- datastore = settings.OGC_SERVER['default']['DATASTORE']
- if (datastore):
- ogc_db_name = settings.DATABASES[datastore]['NAME']
- ogc_db_user = settings.DATABASES[datastore]['USER']
- ogc_db_passwd = settings.DATABASES[datastore]['PASSWORD']
- ogc_db_host = settings.DATABASES[datastore]['HOST']
- ogc_db_port = settings.DATABASES[datastore]['PORT']
+ datastore_name = settings.OGC_SERVER['default']['DATASTORE']
+ datastore = settings.DATABASES[datastore_name]
+ if datastore_name:
+ ogc_db_name = datastore['NAME']
+ ogc_db_user = datastore['USER']
+ ogc_db_passwd = datastore['PASSWORD']
+ ogc_db_host = datastore['HOST']
+ ogc_db_port = datastore['PORT']
if not soft_reset:
utils.remove_existing_tables(ogc_db_name, ogc_db_user, ogc_db_port, ogc_db_host, ogc_db_passwd)
@@ -815,6 +849,7 @@ def restore_geoserver_vector_data(self, config, settings, target_folder, soft_re
def restore_geoserver_externals(self, config, settings, target_folder):
"""Restore external references from XML files"""
+ logger.info("*** Restoring GeoServer external resources...")
external_folder = os.path.join(target_folder, utils.EXTERNAL_ROOT)
if os.path.exists(external_folder):
dir_util.copy_tree(external_folder, '/')
diff --git a/geonode/br/management/commands/settings_docker_sample.ini b/geonode/br/management/commands/settings_docker_sample.ini
index 85b4dbe6f0b..82a28022119 100644
--- a/geonode/br/management/commands/settings_docker_sample.ini
+++ b/geonode/br/management/commands/settings_docker_sample.ini
@@ -1,6 +1,7 @@
[database]
pgdump = pg_dump
pgrestore = pg_restore
+psql = psql
[geoserver]
datadir = /geoserver_data/data
@@ -12,6 +13,6 @@ dumprasterdata = yes
# data_layername_exclude_filter = {comma separated list of layernames, optionally with glob syntax} e.g.: tuscany_*,italy
[fixtures]
-apps = contenttypes,auth,people,groups,account,guardian,admin,actstream,announcements,avatar,base,documents,geoserver,invitations,pinax_notifications,layers,maps,oauth2_provider,services,harvesting,sites,socialaccount,taggit,tastypie,upload,user_messages,geonode_themes,geoapps,favorite,geonode_client
-dumps = contenttypes,auth,people,groups,account,guardian,admin,actstream,announcements,avatar,base,documents,geoserver,invitations,pinax_notifications,layers,maps,oauth2_provider,services,harvesting,sites,socialaccount,taggit,tastypie,upload,user_messages,geonode_themes,geoapps,favorite,geonode_client
+apps = contenttypes,auth,people,groups,account,guardian,admin,actstream,announcements,avatar,base,documents,geoserver,invitations,pinax_notifications,layers,maps,oauth2_provider,harvesting,services,sites,socialaccount,taggit,tastypie,upload,user_messages,geonode_themes,geoapps,favorite,geonode_client
+dumps = contenttypes,auth,people,groups,account,guardian,admin,actstream,announcements,avatar,base,documents,geoserver,invitations,pinax_notifications,layers,maps,oauth2_provider,harvesting,services,sites,socialaccount,taggit,tastypie,upload,user_messages,geonode_themes,geoapps,favorite,geonode_client
diff --git a/geonode/br/management/commands/settings_sample.ini b/geonode/br/management/commands/settings_sample.ini
index ef1cb1f1c27..dc51bfa5f98 100644
--- a/geonode/br/management/commands/settings_sample.ini
+++ b/geonode/br/management/commands/settings_sample.ini
@@ -1,6 +1,7 @@
[database]
pgdump = pg_dump
pgrestore = pg_restore
+psql = psql
[geoserver]
datadir = geoserver/data
diff --git a/geonode/br/management/commands/utils/utils.py b/geonode/br/management/commands/utils/utils.py
index 936524b5805..5086183374f 100644
--- a/geonode/br/management/commands/utils/utils.py
+++ b/geonode/br/management/commands/utils/utils.py
@@ -22,13 +22,17 @@
import sys
import shutil
import hashlib
+from logging import Formatter, StreamHandler
+
import psycopg2
import traceback
import dateutil.parser
import logging
+import subprocess
from configparser import ConfigParser
+from django.conf import settings
from django.core.management.base import CommandError
@@ -38,6 +42,8 @@
TEMPLATE_DIRS = 'template_dirs'
LOCALE_PATHS = 'locale_dirs'
EXTERNAL_ROOT = 'external'
+
+
logger = logging.getLogger(__name__)
@@ -90,80 +96,70 @@ def geoserver_option_list(parser):
class Config:
- def __init__(self, options):
- self.config_parser = None
- self.load_settings(settings_path=options.get('config'))
- self.load_options(options)
+ def __init__(self, options: dict):
+ def apply_options_override(options):
+ def get_option(key, fallback):
+ o = options.get(key)
+ return o if o is not None else fallback
+
+ self.gs_data_dir = get_option("gs_data_dir", self.gs_data_dir)
+ self.gs_dump_vector_data = get_option("dump_gs_vector_data", self.gs_dump_vector_data)
+ self.gs_dump_raster_data = get_option("dump_gs_raster_data", self.gs_dump_raster_data)
+
+ # store back overrides as current config (needed for saving it into the backup zip)
+ self.config_parser['geoserver']['datadir'] = self.gs_data_dir
+ self.config_parser['geoserver']['dumpvectordata'] = str(self.gs_dump_vector_data)
+ self.config_parser['geoserver']['dumprasterdata'] = str(self.gs_dump_raster_data)
+
+ def load_settings(config):
+ self.pg_dump_cmd = config.get('database', 'pgdump')
+ self.pg_restore_cmd = config.get('database', 'pgrestore')
+ self.psql_cmd = config.get('database', 'psql', fallback='psql')
- def load_options(self, options):
- if options.get("gs_data_dir", None):
- self.gs_data_dir = options.get("gs_data_dir")
- if self.config_parser:
- self.config_parser['geoserver']['datadir'] = self.gs_data_dir
+ self.gs_data_dir = config.get('geoserver', 'datadir')
- if options.get("dump_gs_vector_data", None) is not None:
- self.gs_dump_vector_data = options.get("dump_gs_vector_data")
- if self.config_parser:
- self.config_parser['geoserver']['dumpvectordata'] = self.gs_dump_vector_data
+ self.gs_exclude_file_path = ';'.join(config.get('geoserver', 'datadir_exclude_file_path').split(',')) \
+ if config.has_option('geoserver', 'datadir_exclude_file_path') \
+ else ''
- if options.get("dump_gs_raster_data", None) is not None:
- self.gs_dump_raster_data = options.get("dump_gs_raster_data")
- if self.config_parser:
- self.config_parser['geoserver']['dumprasterdata'] = self.gs_dump_raster_data
+ self.gs_dump_vector_data = config.getboolean('geoserver', 'dumpvectordata')
+ self.gs_dump_raster_data = config.getboolean('geoserver', 'dumprasterdata')
- def load_settings(self, settings_path):
+ self.gs_data_dt_filter = config.get('geoserver', 'data_dt_filter').split(' ') \
+ if config.has_option('geoserver', 'data_dt_filter') \
+ else (None, None)
+ self.gs_data_datasetname_filter = config.get('geoserver', 'data_datasetname_filter').split(',') \
+ if config.has_option('geoserver', 'data_datasetname_filter') \
+ else ''
+
+ self.gs_data_datasetname_exclude_filter = config.get('geoserver', 'data_datasetname_exclude_filter').split(
+ ',') \
+ if config.has_option('geoserver', 'data_datasetname_exclude_filter') \
+ else ''
+
+ self.app_names = config.get('fixtures', 'apps').split(',')
+ self.dump_names = config.get('fixtures', 'dumps').split(',')
+
+ # Start init code
+ settings_path = options.get('config')
if not settings_path:
- raise CommandError("Mandatory option (-c / --config)")
+ raise CommandError("Missing mandatory option (-c / --config)")
if not os.path.isabs(settings_path):
settings_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
os.pardir,
settings_path)
if not os.path.exists(settings_path):
- raise CommandError("Provided '-c' / '--config' file does not exist.")
-
- config = ConfigParser()
- config.read(settings_path)
-
- self.pg_dump_cmd = config.get('database', 'pgdump')
- self.pg_restore_cmd = config.get('database', 'pgrestore')
-
- self.gs_data_dir = config.get('geoserver', 'datadir')
-
- if config.has_option('geoserver', 'datadir_exclude_file_path'):
- self.gs_exclude_file_path = \
- ';'.join(config.get('geoserver', 'datadir_exclude_file_path').split(','))
- else:
- self.gs_exclude_file_path = ''
-
- self.gs_dump_vector_data = \
- config.getboolean('geoserver', 'dumpvectordata')
- self.gs_dump_raster_data = \
- config.getboolean('geoserver', 'dumprasterdata')
-
- if config.has_option('geoserver', 'data_dt_filter'):
- self.gs_data_dt_filter = \
- config.get('geoserver', 'data_dt_filter').split(' ')
- else:
- self.gs_data_dt_filter = (None, None)
-
- if config.has_option('geoserver', 'data_datasetname_filter'):
- self.gs_data_datasetname_filter = \
- config.get('geoserver', 'data_datasetname_filter').split(',')
- else:
- self.gs_data_datasetname_filter = ''
-
- if config.has_option('geoserver', 'data_datasetname_exclude_filter'):
- self.gs_data_datasetname_exclude_filter = \
- config.get('geoserver', 'data_datasetname_exclude_filter').split(',')
- else:
- self.gs_data_datasetname_exclude_filter = ''
+ raise CommandError(f"Provided '-c' / '--config' file does not exist: {settings_path}")
- self.app_names = config.get('fixtures', 'apps').split(',')
- self.dump_names = config.get('fixtures', 'dumps').split(',')
+ self.config_parser = ConfigParser()
+ self.config_parser.read(settings_path)
- self.config_parser = config
+ # set config from file
+ load_settings(self.config_parser)
+ # override config from command line
+ apply_options_override(options)
sys.path.append(os.path.join(os.path.dirname(__file__), "lib"))
@@ -179,73 +175,54 @@ def get_db_conn(db_name, db_user, db_port, db_host, db_passwd):
return conn
-def patch_db(db_name, db_user, db_port, db_host, db_passwd, truncate_monitoring=False):
- """Apply patch to GeoNode DB"""
- conn = get_db_conn(db_name, db_user, db_port, db_host, db_passwd)
- curs = conn.cursor()
-
- try:
- curs.execute("ALTER TABLE base_contactrole ALTER COLUMN resource_id DROP NOT NULL;")
- curs.execute("ALTER TABLE base_link ALTER COLUMN resource_id DROP NOT NULL;")
- if truncate_monitoring:
- curs.execute("TRUNCATE monitoring_notificationreceiver CASCADE;")
- conn.commit()
- except Exception:
- try:
- conn.rollback()
- except Exception:
- pass
-
- traceback.print_exc()
- finally:
- curs.close()
- conn.close()
-
-
-def cleanup_db(db_name, db_user, db_port, db_host, db_passwd):
- """Remove spurious records from GeoNode DB"""
- conn = get_db_conn(db_name, db_user, db_port, db_host, db_passwd)
- curs = conn.cursor()
+def get_tables(db_user, db_passwd, db_name, db_host='localhost', db_port=5432):
+ select = f"SELECT tablename FROM pg_tables WHERE tableowner = '{db_user}' and schemaname = 'public'"
+ logger.info(f"Retrieving table list from DB {db_name}@{db_host}: {select}")
try:
- curs.execute("DELETE FROM base_contactrole WHERE resource_id is NULL;")
- curs.execute("DELETE FROM base_link WHERE resource_id is NULL;")
+ conn = get_db_conn(db_name, db_user, db_port, db_host, db_passwd)
+ curs = conn.cursor()
+ curs.execute(select)
+ pg_tables = [table[0] for table in curs.fetchall()]
conn.commit()
- except Exception:
- try:
- conn.rollback()
- except Exception:
- pass
+ return pg_tables
+ except Exception as e:
traceback.print_exc()
+ raise e
finally:
curs.close()
conn.close()
-def flush_db(db_name, db_user, db_port, db_host, db_passwd):
+def truncate_tables(db_name, db_user, db_port, db_host, db_passwd):
"""HARD Truncate all DB Tables"""
db_host = db_host if db_host is not None else 'localhost'
db_port = db_port if db_port is not None else 5432
+
+ logger.info(f"Truncating the tables in DB {db_name} @{db_host}:{db_port} for user {db_user}")
+ pg_tables = get_tables(db_user, db_passwd, db_name, db_host, db_port)
+ logger.info(f"Tables found: {pg_tables}")
+
conn = get_db_conn(db_name, db_user, db_port, db_host, db_passwd)
- curs = conn.cursor()
+ bad_tables = []
try:
- sql_dump = f"""SELECT tablename from pg_tables where tableowner = '{db_user}'"""
- curs.execute(sql_dump)
- pg_tables = curs.fetchall()
- for table in pg_tables:
- if table[0] == 'br_restoredbackup':
+ for table in sorted(pg_tables):
+ if table == 'br_restoredbackup':
continue
- print(f"Flushing Data : {table[0]}")
- curs.execute(f"TRUNCATE {table[0]} CASCADE;")
- conn.commit()
- except Exception:
- try:
- conn.rollback()
- except Exception:
- pass
- traceback.print_exc()
+ logger.info(f"Truncating table : {table}")
+ try:
+ curs = conn.cursor()
+ curs.execute(f"TRUNCATE {table} CASCADE;")
+ conn.commit()
+ except Exception as e:
+ logger.warning(f"Could not truncate table {table}: {e}", exc_info=e)
+ bad_tables.append(table)
+ conn.rollback()
+ if bad_tables:
+ raise Exception(f"Could not truncate tables {bad_tables}")
+
finally:
curs.close()
conn.close()
@@ -255,98 +232,109 @@ def dump_db(config, db_name, db_user, db_port, db_host, db_passwd, target_folder
"""Dump Full DB into target folder"""
db_host = db_host if db_host is not None else 'localhost'
db_port = db_port if db_port is not None else 5432
- conn = get_db_conn(db_name, db_user, db_port, db_host, db_passwd)
- curs = conn.cursor()
- try:
- sql_dump = f"""SELECT tablename from pg_tables where tableowner = '{db_user}'"""
- curs.execute(sql_dump)
- pg_all_tables = [table[0] for table in curs.fetchall()]
- pg_tables = []
- if config.gs_data_datasetname_filter:
- for pat in config.gs_data_datasetname_filter:
- pg_tables += glob_filter(pg_all_tables, pat)
- elif config.gs_data_datasetname_exclude_filter:
- pg_tables = pg_all_tables
- for pat in config.gs_data_datasetname_exclude_filter:
- names = ','.join(glob_filter(pg_all_tables, pat))
- for exclude_table in names.split(','):
- pg_tables.remove(exclude_table)
- else:
- pg_tables = pg_all_tables
-
- print(f"Dumping existing GeoServer Vectorial Data: {pg_tables}")
- empty_folder(target_folder)
- for table in pg_tables:
- print(f"Dump Table: {db_name}:{table}")
- os.system(f"PGPASSWORD=\"{db_passwd}\" {config.pg_dump_cmd} -h {db_host} -p {str(db_port)} -U {db_user} "
- f"-F c -b -t '\"{str(table)}\"' -f {os.path.join(target_folder, f'{table}.dump {db_name}')}")
- conn.commit()
- except Exception:
- try:
- conn.rollback()
- except Exception:
- pass
- traceback.print_exc()
- finally:
- curs.close()
- conn.close()
+ logger.info("Dumping data tables")
+ pg_tables = get_tables(db_user, db_passwd, db_name, db_host, db_port)
+ logger.info(f"Tables found: {pg_tables}")
+
+ include_filter = config.gs_data_datasetname_filter
+ exclude_filter = config.gs_data_datasetname_exclude_filter
+
+ if include_filter:
+ filtered_tables = []
+ for pat in include_filter:
+ filtered_tables += glob_filter(pg_tables, pat)
+ pg_tables = filtered_tables
+ logger.info(f"Tables found after INCLUDE filtering: {pg_tables}")
+
+ elif exclude_filter:
+ for pat in exclude_filter:
+ names = glob_filter(pg_tables, pat)
+ for exclude_table in names:
+ pg_tables.remove(exclude_table)
+ logger.info(f"Tables found after EXCLUDE filtering: {pg_tables}")
+
+ logger.debug(f"Cleaning up destination folder {target_folder}...")
+ empty_folder(target_folder)
+ for table in sorted(pg_tables):
+ logger.info(f" - Dumping data table: {db_name}:{table}")
+ command = f'{config.pg_dump_cmd} ' \
+ f' -h {db_host} -p {str(db_port)} -U {db_user} -d {db_name} ' \
+ f' -b ' \
+ f" -t '\"{str(table)}\"' " \
+ f" -f {os.path.join(target_folder, f'{table}.sql ')}"
+ ret = subprocess.call(command, shell=True, env={'PGPASSWORD': db_passwd})
+ if ret != 0:
+ logger.error(f'DUMP FAILED FOR TABLE {table}')
def restore_db(config, db_name, db_user, db_port, db_host, db_passwd, source_folder, preserve_tables):
"""Restore Full DB into target folder"""
db_host = db_host if db_host is not None else 'localhost'
db_port = db_port if db_port is not None else 5432
- conn = get_db_conn(db_name, db_user, db_port, db_host, db_passwd)
- # curs = conn.cursor()
- try:
- included_extenstions = ['dump', 'sql']
- file_names = [fn for fn in os.listdir(source_folder)
- if any(fn.endswith(ext) for ext in included_extenstions)]
- for table in file_names:
- print(f"Restoring GeoServer Vectorial Data : {db_name}:{os.path.splitext(table)[0]} ")
- pg_rstcmd = (f"PGPASSWORD=\"{db_passwd}\" {config.pg_restore_cmd} -h {db_host} -p {str(db_port)} -U {db_user} "
- f"--role={db_user} -F c -t \"{os.path.splitext(table)[0]}\" {os.path.join(source_folder, table)} -d {db_name}"
- " -c" if not preserve_tables else "")
- os.system(pg_rstcmd)
- conn.commit()
- except Exception:
- try:
- conn.rollback()
- except Exception:
- pass
- traceback.print_exc()
- finally:
- conn.close()
+ logger.info("Restoring data tables")
+
+ dump_extensions = ['dump', 'sql']
+ file_names = [fn for fn in os.listdir(source_folder)
+ if any(fn.endswith(ext) for ext in dump_extensions)]
+ for filename in sorted(file_names):
+ table_name = os.path.splitext(filename)[0]
+ logger.info(f" - restoring data table: {db_name}:{table_name} ")
+ if filename.endswith('dump'):
+ command = f'{config.pg_restore_cmd} ' \
+ f' -h {db_host} -p {str(db_port)} -d {db_name}' \
+ f' -U {db_user} --role={db_user} ' \
+ f' -t "{table_name}" ' \
+ f' {"-c" if not preserve_tables else "" } ' \
+ f' {os.path.join(source_folder, filename)} '
+ ret = subprocess.call(command, env={'PGPASSWORD': db_passwd},
+ shell=True)
+ if ret:
+ logger.error(f'RESTORE FAILED FOR FILE {filename}')
+
+ elif filename.endswith('sql'):
+ args = f'{config.psql_cmd} ' \
+ f' -h {db_host} ' \
+ f' -p {str(db_port)} ' \
+ f' -d {db_name} ' \
+ f' -U {db_user} ' \
+ f' -f {os.path.join(source_folder, filename)} '\
+ ' -q -b '
+ cproc = subprocess.run(args, env={'PGPASSWORD': db_passwd},
+ shell=True, capture_output=True, text=True)
+ ret = cproc.returncode
+ if ret:
+ logger.error(f'RESTORE FAILED FOR FILE {filename}')
+ logger.error(f'CMD:: {" ".join(args)}')
+ # logger.error(f'OUT:: {cproc.stdout}')
+ logger.error(f'ERR:: {cproc.stderr}')
def remove_existing_tables(db_name, db_user, db_port, db_host, db_passwd):
+ logger.info("Dropping existing GeoServer vector data from DB")
+ pg_tables = get_tables(db_user, db_passwd, db_name, db_host, db_port)
+ bad_tables = []
+
conn = get_db_conn(db_name, db_user, db_port, db_host, db_passwd)
- curs = conn.cursor()
- table_list = f"""SELECT tablename from pg_tables where tableowner = '{db_user}'"""
- try:
- curs.execute(table_list)
- pg_all_tables = [table[0] for table in curs.fetchall()]
- print(f"Dropping existing GeoServer Vectorial Data: {table_list}")
- for pg_table in pg_all_tables:
- print(f"Drop Table: {db_name}:{pg_table} ")
- try:
- curs.execute(f"DROP TABLE \"{pg_table}\" CASCADE")
- except Exception as e:
- print(f"Error Droping Table: {e}")
- conn.commit()
- except Exception as e:
- print(f"Error Removing GeoServer Vectorial Data Tables: {e}")
+ for table in pg_tables:
+ logger.info(f"- Drop Table: {db_name}:{table} ")
try:
+ curs = conn.cursor()
+ curs.execute(f'DROP TABLE "{table}" CASCADE')
+ conn.commit()
+ except Exception as e:
+ logger.warning(f"Error Dropping Table {table}: {str(e)}", exc_info=e)
+ bad_tables.append(table)
conn.rollback()
- except Exception:
- pass
- traceback.print_exc()
- finally:
- curs.close()
- conn.close()
+
+ if bad_tables:
+ logger.warning("Some tables could not be removed. This error will probably break the procedure in next steps.")
+ logger.warning(f"Bad tables list: {bad_tables}")
+
+ curs.close()
+ conn.close()
def confirm(prompt=None, resp=False):
@@ -474,3 +462,21 @@ def empty_folder(folder):
shutil.rmtree(file_path)
except Exception as e:
print(f'Failed to delete {file_path}. Reason: {e}')
+
+
+def setup_logger():
+ if 'geonode.br' not in settings.LOGGING['loggers']:
+ settings.LOGGING['formatters']['br'] = {'format': '%(levelname)-7s %(asctime)s %(message)s'}
+ settings.LOGGING['handlers']['br'] = {
+ 'level': 'DEBUG',
+ 'class': 'logging.StreamHandler',
+ 'formatter': 'br'}
+ settings.LOGGING['loggers']['geonode.br'] = {'handlers': ['br'], "level": "INFO", 'propagate': False}
+
+ logger = logging.getLogger('geonode.br')
+
+ handler = StreamHandler()
+ handler.setFormatter(Formatter(fmt='%(levelname)-7s %(asctime)s %(message)s'))
+ logger.addHandler(handler)
+ logger.setLevel(logging.INFO)
+ logger.propagate = False
diff --git a/geonode/br/tasks.py b/geonode/br/tasks.py
index 98691614e5c..3a7da3bd045 100644
--- a/geonode/br/tasks.py
+++ b/geonode/br/tasks.py
@@ -37,7 +37,7 @@
retry_backoff=3,
retry_backoff_max=30,
retry_jitter=False)
-def restore_notification(recipients: List, backup_file: str, backup_md5: str, exception: str = None):
+def restore_notification(self, recipients: List, backup_file: str, backup_md5: str, exception: str = None):
"""
Function sending a CC email report of the restore procedure to a provided emails.
"""
diff --git a/geonode/br/tests/test_backup.py b/geonode/br/tests/test_backup.py
index 3867ebf4750..4891ab959bd 100644
--- a/geonode/br/tests/test_backup.py
+++ b/geonode/br/tests/test_backup.py
@@ -90,9 +90,9 @@ def test_config_file_not_provided(self, mock_configuration_save, fake_confirm):
call_command('backup', *args, **kwargs)
self.assertIn(
- 'Mandatory option (-c / --config)',
+ 'andatory option (-c / --config)',
exc.exception.args[0],
- '"Mandatory option (-c / --config)" exception expected.'
+ 'Can not match message about mandatory option (-c / --config)" exception'
)
# force backup interruption before starting the procedure itself
diff --git a/geonode/br/tests/test_restore.py b/geonode/br/tests/test_restore.py
index d31fadc3dfa..d8923d342c6 100644
--- a/geonode/br/tests/test_restore.py
+++ b/geonode/br/tests/test_restore.py
@@ -165,9 +165,9 @@ def test_config_files(self, mock_configuration_save, fake_confirm):
call_command('restore', *args, **kwargs)
self.assertIn(
- 'Mandatory option (-c / --config)',
+ 'andatory option (-c / --config)',
exc.exception.args[0],
- '"Mandatory option (-c / --config)" exception expected.'
+ 'Can not match message about mandatory option (-c / --config)" exception'
)
# create the backup file with ini file
diff --git a/geonode/geoserver/management/commands/create_tile_layers.py b/geonode/geoserver/management/commands/create_tile_layers.py
new file mode 100644
index 00000000000..39e2f25d47a
--- /dev/null
+++ b/geonode/geoserver/management/commands/create_tile_layers.py
@@ -0,0 +1,127 @@
+# -*- coding: utf-8 -*-
+#########################################################################
+#
+# Copyright (C) 2023 OSGeo
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see .
+#
+#########################################################################
+
+import logging
+import requests
+from requests.auth import HTTPBasicAuth
+
+from django.core.management.base import BaseCommand
+from django.conf import settings
+
+from geonode.layers.models import Dataset
+
+
+logger = logging.getLogger(__name__)
+
+
+REQ_TEMPLATE = '''
+
+ true
+ true
+ {}
+
+ 2
+ 1
+
+
+ application/json;type=utfgrid
+ image/png
+ image/vnd.jpeg-png
+ image/jpeg
+ image/vnd.jpeg-png8
+ image/gif
+ image/png8
+
+
+
+ EPSG:3857
+
+
+ EPSG:3857x2
+
+
+ EPSG:4326
+
+
+ EPSG:4326x2
+
+
+ EPSG:900913
+
+
+'''
+
+
+class Command(BaseCommand):
+ help = 'Create missing TileLayers in GWC'
+
+ def add_arguments(self, parser):
+ pass
+
+ def handle(self, **options):
+ try:
+ baseurl = settings.OGC_SERVER['default']['LOCATION']
+ user = settings.OGC_SERVER['default']['USER']
+ passwd = settings.OGC_SERVER['default']['PASSWORD']
+ """
+ curl -v -u admin:geoserver -XGET \
+ "http://:/geoserver/gwc/rest/layers/geonode:tasmania_roads.xml"
+ """
+ layers = Dataset.objects.all()
+ tot = len(layers)
+ logger.info(f"Total layers in GeoNode: {tot}")
+ i = 0
+ cnt_old = 0
+ cnt_new = 0
+ cnt_bad = 0
+ for layer in layers:
+ i += 1
+ logger.info(f"- {i}/{tot} Processing layer: {layer.typename}")
+ r = requests.get(f'{baseurl}gwc/rest/layers/{layer.typename}.xml',
+ auth=HTTPBasicAuth(user, passwd))
+
+ if r.status_code == 200:
+ logger.info(" - Layer already configured")
+ cnt_old += 1
+ continue
+ try:
+ data = REQ_TEMPLATE.format(layer.name)
+ url = f'{baseurl}gwc/rest/layers/{layer.typename}.xml'
+ logger.info(' - Configuring...')
+ response = requests.put(url, data=data,
+ headers={'Content-Type': 'text/xml'},
+ auth=HTTPBasicAuth(user, passwd))
+
+ if response.status_code == 200:
+ logger.info(f" - Done {layer.name}")
+ cnt_new += 1
+ else:
+ logger.warning(f"Layer {layer.typename} couldn't be configured: code {response.status_code}")
+ cnt_bad += 1
+
+ except Exception as e:
+ raise e
+ except Exception as e:
+ raise e
+
+ logger.info('Work completed')
+ logger.info(f'- TileLayers configured: {cnt_new}')
+ logger.info(f'- TileLayers in error : {cnt_bad}')
+ logger.info(f'- TileLayers found : {cnt_old}')
diff --git a/geonode/settings.py b/geonode/settings.py
index f109bff77e8..fda9a7ff1ec 100644
--- a/geonode/settings.py
+++ b/geonode/settings.py
@@ -650,6 +650,9 @@
'simple': {
'format': '%(message)s',
},
+ 'br': {
+ 'format': '%(levelname)-7s %(asctime)s %(message)s'
+ },
},
'filters': {
'require_debug_false': {
@@ -666,21 +669,26 @@
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler',
- }
+ },
+ 'br': {
+ 'level': 'DEBUG',
+ 'class': 'logging.StreamHandler',
+ 'formatter': 'br'
+ },
+ },
+ 'root': {
+ 'handlers': ['console'], 'level': 'ERROR',
},
"loggers": {
- "django": {
- "handlers": ["console"], "level": "ERROR", },
- "geonode": {
- "handlers": ["console"], "level": "ERROR", },
- "geoserver-restconfig.catalog": {
- "handlers": ["console"], "level": "ERROR", },
- "owslib": {
- "handlers": ["console"], "level": "ERROR", },
- "pycsw": {
- "handlers": ["console"], "level": "ERROR", },
- "celery": {
- 'handlers': ["console"], 'level': 'ERROR', },
+ "django": {"level": "ERROR", },
+ "geonode": {"level": "WARN", },
+ "geonode.br": {"level": "INFO", 'handlers': ['br'], 'propagate': False},
+ "geoserver-restconfig.catalog": {"level": "ERROR", },
+ "owslib": {"level": "ERROR", },
+ "pycsw": {"level": "ERROR", },
+ "celery": {"level": "WARN", },
+ "mapstore2_adapter.plugins.serializers": {"level": "ERROR", },
+ "geonode_logstash.logstash": {"level": "ERROR", },
},
}