From 0b2db1d5a0d3babb195e5a2eafb1359533eb8fcd Mon Sep 17 00:00:00 2001 From: avdata99 <andres@data99.com.ar> Date: Sun, 12 Jan 2025 21:40:48 -0300 Subject: [PATCH] Simplificando --- ckanext/superset/blueprints/superset.py | 27 ++------------------- ckanext/superset/data/main.py | 32 +++++++------------------ 2 files changed, 10 insertions(+), 49 deletions(-) diff --git a/ckanext/superset/blueprints/superset.py b/ckanext/superset/blueprints/superset.py index 2843e5b..cda7c61 100644 --- a/ckanext/superset/blueprints/superset.py +++ b/ckanext/superset/blueprints/superset.py @@ -176,34 +176,11 @@ def list_datasets(): """ List all datasets created from Superset charts """ cfg = get_config() sc = SupersetCKAN(**cfg) - sc.load_datasets() - - # Extraer los IDs de datasets directamente desde sc.datasets - dataset_ids = [dataset.get('id') for dataset in sc.datasets if dataset.get('id')] - raw_datasets = sc.get_list_datasets(dataset_ids) - - # Procesar los datos para aplanarlos - datasets = [] - for d in raw_datasets: - if d is not None and isinstance(d, dict): - log.debug(f"Procesando dataset: {d}") - if d.get('description') is None: - d['description'] = 'Sin descripción' - if d.get('database') is None: - d['database'] = {'database_name': 'Sin organización'} - datasets.append({ - 'table_name': d.get('table_name', 'Sin nombre'), - 'description': d.get('description'), - 'database_name': d.get('database').get('database_name'), - 'superset_chart_id': d.get('id'), - 'private': False, # Ajustar lógica si hay un indicador real de privacidad - }) - else: - log.warning(f"Elemento no procesado en raw_datasets: {d}") + superset_datasets = sc.get_datasets() superset_url = tk.config.get('ckanext.superset.instance.url') extra_vars = { - 'datasets': datasets, + 'datasets': superset_datasets, 'superset_url': superset_url, } return tk.render('superset/list-datasets.html', extra_vars) diff --git a/ckanext/superset/data/main.py b/ckanext/superset/data/main.py index ee08145..7fc32f2 100644 --- a/ckanext/superset/data/main.py +++ b/ckanext/superset/data/main.py @@ -55,12 +55,11 @@ def __init__( def load_datasets(self, force=False): """ Get and load all datasets """ + log.info("Loading datasets") if self.datasets and not force: return q_data = {"page_size": 50, "page": 0} - log.debug("DENTRO DE load_datasets") - log.debug("Q_DATA:", q_data) self.datasets = [] while True: params = {'q': json.dumps(q_data)} @@ -73,10 +72,6 @@ def load_datasets(self, force=False): self.datasets.extend(datasets) q_data["page"] += 1 - print("Q_DATA['page']:", q_data["page"]) - if q_data["page"] > 5: - log.error("Too many pages of datasets") - break return self.datasets @@ -101,9 +96,7 @@ def load_charts(self, force=False): ds.load(chart) self.charts.append(ds) q_data["page"] += 1 - if q_data["page"] > 20: - log.error("Too many pages of charts") - break + return self.charts def load_databases(self, force=False): @@ -129,21 +122,6 @@ def get_dataset(self, dataset_id): self.datasets.append(dataset) return dataset - def get_list_datasets(self, dataset_ids): - """ Get a list of datasets """ - list_datasets = [] - - for dataset_id in dataset_ids: - # Verificar si ya está en self.datasets - dataset = next((d for d in self.datasets if d.get('id') == dataset_id), None) - if not dataset: - # Si no está, obtenerlo desde la API - dataset = SupersetDataset(superset_instance=self) - dataset.get_from_superset(dataset_id) - self.datasets.append(dataset) - list_datasets.append(dataset) - return list_datasets - def get_chart(self, chart_id): """ Get a chart by ID """ for chart in self.charts: @@ -161,6 +139,12 @@ def get_databases(self): self.load_databases(self) return self.databases + def get_datasets(self): + """ Get a list_dataset """ + # Get from the API + self.load_datasets(self) + return self.datasets + def prepare_connection(self): """ Define the client and login if required """ log.info(f"Connecting to {self.superset_url}")