From f41b27e04e95fcdfb9a99d71041e8026c807d876 Mon Sep 17 00:00:00 2001 From: jacquesfize Date: Wed, 9 Oct 2024 14:21:43 +0200 Subject: [PATCH 1/2] feat(import): extending jdd by verifying its active status --- .../geonature/core/imports/checks/dataframe/core.py | 9 +++++++++ backend/geonature/core/imports/checks/errors.py | 5 +++++ .../0e8e1943c215_add_import_missing_error.py | 7 +++++++ backend/geonature/tests/fixtures.py | 10 +++++++++- .../tests/imports/files/occhab/valid_file.csv | 1 + .../geonature/tests/imports/test_imports_occhab.py | 12 +++++++++++- 6 files changed, 42 insertions(+), 2 deletions(-) diff --git a/backend/geonature/core/imports/checks/dataframe/core.py b/backend/geonature/core/imports/checks/dataframe/core.py index b3233a44ff..af579946c9 100644 --- a/backend/geonature/core/imports/checks/dataframe/core.py +++ b/backend/geonature/core/imports/checks/dataframe/core.py @@ -245,6 +245,15 @@ def check_datasets( "invalid_rows": df[invalid_ds_mask], } + inactive_dataset = [uuid for uuid, ds in datasets.items() if not ds.active] + inactive_dataset_mask = df[uuid_col].isin(inactive_dataset) + if inactive_dataset_mask.any(): + yield { + "error_code": ImportCodeError.DATASET_NOT_ACTIVE, + "column": uuid_field.name_field, + "invalid_rows": df[inactive_dataset_mask], + } + # Warning: we check only permissions of first author, but currently there it only one author per import. authorized_datasets = { ds.unique_dataset_id.hex: ds diff --git a/backend/geonature/core/imports/checks/errors.py b/backend/geonature/core/imports/checks/errors.py index 806d8c004d..f8e7449c7e 100644 --- a/backend/geonature/core/imports/checks/errors.py +++ b/backend/geonature/core/imports/checks/errors.py @@ -8,6 +8,8 @@ class ImportCodeError: The referenced dataset was not found DATASET_NOT_AUTHORIZED : str The dataset is not authorized to the current user + DATASET_NOT_ACTIVE : str + The dataset is inactive MULTIPLE_ATTACHMENT_TYPE_CODE : str Multiple attachments of the same type are not allowed MULTIPLE_CODE_ATTACHMENT : str @@ -118,11 +120,14 @@ class ImportCodeError: The habitat code is not found CD_NOM_NOT_FOUND : str The cd_nom is not found in the instance TaxRef + + """ # Dataset error DATASET_NOT_FOUND = "DATASET_NOT_FOUND" DATASET_NOT_AUTHORIZED = "DATASET_NOT_AUTHORIZED" + DATASET_NOT_ACTIVE = "DATASET_NOT_ACTIVE" MULTIPLE_ATTACHMENT_TYPE_CODE = "MULTIPLE_ATTACHMENT_TYPE_CODE" MULTIPLE_CODE_ATTACHMENT = "MULTIPLE_CODE_ATTACHMENT" diff --git a/backend/geonature/migrations/versions/0e8e1943c215_add_import_missing_error.py b/backend/geonature/migrations/versions/0e8e1943c215_add_import_missing_error.py index eae3fab816..a8e68ac0f5 100644 --- a/backend/geonature/migrations/versions/0e8e1943c215_add_import_missing_error.py +++ b/backend/geonature/migrations/versions/0e8e1943c215_add_import_missing_error.py @@ -47,6 +47,12 @@ def upgrade(): "description": "Les entitiés existantes selon UUID sont ignorees.", "error_level": "WARNING", }, + { + "error_type": "Erreur de référentiel", + "name": "DATASET_NOT_ACTIVE", + "description": "Les jeux de données doivent être actifs pour pouvoir importer des données.", + "error_level": "ERROR", + }, ], ) @@ -59,3 +65,4 @@ def downgrade(): op.execute(sa.delete(error_type).where(error_type.c.name == "INCOHERENT_DATA")) op.execute(sa.delete(error_type).where(error_type.c.name == "INVALID_NUMERIC")) op.execute(sa.delete(error_type).where(error_type.c.name == "SKIP_EXISTING_UUID")) + op.execute(sa.delete(error_type).where(error_type.c.name == "DATASET_NOT_ACTIVE")) diff --git a/backend/geonature/tests/fixtures.py b/backend/geonature/tests/fixtures.py index 641947b222..b5540bfec7 100644 --- a/backend/geonature/tests/fixtures.py +++ b/backend/geonature/tests/fixtures.py @@ -433,7 +433,7 @@ def datasets(users, acquisition_frameworks, module): select(TModules).where(TModules.module_code.in_(writable_module_code)) ).all() - def create_dataset(name, id_af, digitizer=None, modules=writable_module): + def create_dataset(name, id_af, digitizer=None, modules=writable_module, active=True): with db.session.begin_nested(): dataset = TDatasets( id_acquisition_framework=id_af, @@ -443,6 +443,7 @@ def create_dataset(name, id_af, digitizer=None, modules=writable_module): marine_domain=True, terrestrial_domain=True, id_digitizer=digitizer.id_role if digitizer else None, + active=active, ) if digitizer and digitizer.organisme: actor = CorDatasetActor( @@ -477,6 +478,13 @@ def create_dataset(name, id_af, digitizer=None, modules=writable_module): ), ] } + datasets["own_dataset_not_activated"] = create_dataset( + "own_dataset_not_activated", + af.id_acquisition_framework, + users["user"], + active=False, + ) + datasets["with_module_1"] = create_dataset( name="module_1_dataset", id_af=af_1.id_acquisition_framework, diff --git a/backend/geonature/tests/imports/files/occhab/valid_file.csv b/backend/geonature/tests/imports/files/occhab/valid_file.csv index c3016c6b2e..f9f2e638cc 100644 --- a/backend/geonature/tests/imports/files/occhab/valid_file.csv +++ b/backend/geonature/tests/imports/files/occhab/valid_file.csv @@ -40,3 +40,4 @@ On importe une station qui existe déjà en base et un nouvel habitat, seul l On importe une station et un habitat existant déjà en base;SKIP_EXISTING_UUID;SKIP_EXISTING_UUID;;EXISTING_STATION_UUID;;17/11/2023;17/11/2023;Toto;;POINT(3.634 44.399);St;EXISTING_HABITAT_UUID;prairie;24;; technique collect vaut « autre » mais pas de précision fournise;OK !;CONDITIONAL_MANDATORY_FIELD_ERROR;;;VALID_DATASET_UUID;17/11/2023;17/11/2023;Toto;;POINT(3.634 44.399);St;;prairie;24;;10 technique collect vaut « autre » et une précision est bien fournies;OK !;OK !;;;VALID_DATASET_UUID;17/11/2023;17/11/2023;Toto;;POINT(3.634 44.399);St;;prairie;24;moyen précis;10 +jeu de données pas actif;OK !;OK !;;;DATASET_NOT_ACTIVE;17/11/2023;17/11/2023;Toto;;POINT(3.634 44.399);St;;prairie;24;moyen précis;10 diff --git a/backend/geonature/tests/imports/test_imports_occhab.py b/backend/geonature/tests/imports/test_imports_occhab.py index 1a09d4c367..004e51d6bd 100644 --- a/backend/geonature/tests/imports/test_imports_occhab.py +++ b/backend/geonature/tests/imports/test_imports_occhab.py @@ -107,6 +107,10 @@ def uploaded_import( b"FORBIDDEN_DATASET_UUID", datasets["orphan_dataset"].unique_dataset_id.hex.encode("ascii"), ) + content = content.replace( + b"DATASET_NOT_ACTIVE", + datasets["own_dataset_not_activated"].unique_dataset_id.hex.encode("ascii"), + ) f = BytesIO(content) data = { "file": (f, import_file_name), @@ -250,6 +254,12 @@ def test_import_valid_file(self, imported_import): "unique_dataset_id", frozenset({6}), ), + ( + ImportCodeError.DATASET_NOT_ACTIVE, + "station", + "unique_dataset_id", + frozenset({43}), + ), ( ImportCodeError.INVALID_UUID, "station", @@ -309,7 +319,7 @@ def test_import_valid_file(self, imported_import): ImportCodeError.ERRONEOUS_PARENT_ENTITY, "habitat", "", - frozenset({5, 6, 9, 24}), + frozenset({5, 6, 9, 24, 43}), ), ( ImportCodeError.NO_PARENT_ENTITY, From d58e7e3f7d49c5a6df39e7d520365c90d860d800 Mon Sep 17 00:00:00 2001 From: jacquesfize Date: Fri, 11 Oct 2024 10:15:56 +0200 Subject: [PATCH 2/2] fix(test) : apply @bouttier renaming suggestion and fix permission failing test caused by the addition of a new dataset in the fixture --- backend/geonature/tests/imports/files/occhab/valid_file.csv | 2 +- backend/geonature/tests/imports/test_imports_occhab.py | 2 +- backend/geonature/tests/test_gn_meta.py | 6 +++--- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/backend/geonature/tests/imports/files/occhab/valid_file.csv b/backend/geonature/tests/imports/files/occhab/valid_file.csv index f9f2e638cc..315c4f37a6 100644 --- a/backend/geonature/tests/imports/files/occhab/valid_file.csv +++ b/backend/geonature/tests/imports/files/occhab/valid_file.csv @@ -40,4 +40,4 @@ On importe une station qui existe déjà en base et un nouvel habitat, seul l On importe une station et un habitat existant déjà en base;SKIP_EXISTING_UUID;SKIP_EXISTING_UUID;;EXISTING_STATION_UUID;;17/11/2023;17/11/2023;Toto;;POINT(3.634 44.399);St;EXISTING_HABITAT_UUID;prairie;24;; technique collect vaut « autre » mais pas de précision fournise;OK !;CONDITIONAL_MANDATORY_FIELD_ERROR;;;VALID_DATASET_UUID;17/11/2023;17/11/2023;Toto;;POINT(3.634 44.399);St;;prairie;24;;10 technique collect vaut « autre » et une précision est bien fournies;OK !;OK !;;;VALID_DATASET_UUID;17/11/2023;17/11/2023;Toto;;POINT(3.634 44.399);St;;prairie;24;moyen précis;10 -jeu de données pas actif;OK !;OK !;;;DATASET_NOT_ACTIVE;17/11/2023;17/11/2023;Toto;;POINT(3.634 44.399);St;;prairie;24;moyen précis;10 +jeu de données pas actif;DATASET_NOT_ACTIVE;ERRONEOUS_PARENT_ENTITY;;;INACTIVE_DATASET_UUID;17/11/2023;17/11/2023;Toto;;POINT(3.634 44.399);St;;prairie;24;moyen précis;10 diff --git a/backend/geonature/tests/imports/test_imports_occhab.py b/backend/geonature/tests/imports/test_imports_occhab.py index 004e51d6bd..f05a22957c 100644 --- a/backend/geonature/tests/imports/test_imports_occhab.py +++ b/backend/geonature/tests/imports/test_imports_occhab.py @@ -108,7 +108,7 @@ def uploaded_import( datasets["orphan_dataset"].unique_dataset_id.hex.encode("ascii"), ) content = content.replace( - b"DATASET_NOT_ACTIVE", + b"INACTIVE_DATASET_UUID", datasets["own_dataset_not_activated"].unique_dataset_id.hex.encode("ascii"), ) f = BytesIO(content) diff --git a/backend/geonature/tests/test_gn_meta.py b/backend/geonature/tests/test_gn_meta.py index 64c29bbeff..f5a5eca52f 100644 --- a/backend/geonature/tests/test_gn_meta.py +++ b/backend/geonature/tests/test_gn_meta.py @@ -144,6 +144,7 @@ def test_acquisition_frameworks_permissions(self, app, acquisition_frameworks, d nested = db.session.begin_nested() af.t_datasets.remove(datasets["own_dataset"]) + af.t_datasets.remove(datasets["own_dataset_not_activated"]) # Now, the AF has no DS on which user is digitizer. assert af.has_instance_permission(1) == False # But the AF has still DS on which user organism is actor. @@ -564,13 +565,12 @@ def test_datasets_permissions(self, app, datasets, users): qs = select(TDatasets).where(TDatasets.id_dataset.in_(ds_ids)) assert set(sc(dsc.filter_by_scope(0, query=qs)).unique().all()) == set([]) assert set(sc(dsc.filter_by_scope(1, query=qs)).unique().all()) == set( - [ - datasets["own_dataset"], - ] + [datasets["own_dataset"], datasets["own_dataset_not_activated"]] ) assert set(sc(dsc.filter_by_scope(2, query=qs)).unique().all()) == set( [ datasets["own_dataset"], + datasets["own_dataset_not_activated"], datasets["associate_dataset"], datasets["associate_2_dataset_sensitive"], ]