Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[IMPORT] Extending jdd checks by verifying its active status #3221

Merged
merged 2 commits into from
Oct 11, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 9 additions & 0 deletions backend/geonature/core/imports/checks/dataframe/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -245,6 +245,15 @@ def check_datasets(
"invalid_rows": df[invalid_ds_mask],
}

inactive_dataset = [uuid for uuid, ds in datasets.items() if not ds.active]
inactive_dataset_mask = df[uuid_col].isin(inactive_dataset)
if inactive_dataset_mask.any():
yield {
"error_code": ImportCodeError.DATASET_NOT_ACTIVE,
"column": uuid_field.name_field,
"invalid_rows": df[inactive_dataset_mask],
}

# Warning: we check only permissions of first author, but currently there it only one author per import.
authorized_datasets = {
ds.unique_dataset_id.hex: ds
Expand Down
5 changes: 5 additions & 0 deletions backend/geonature/core/imports/checks/errors.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@ class ImportCodeError:
The referenced dataset was not found
DATASET_NOT_AUTHORIZED : str
The dataset is not authorized to the current user
DATASET_NOT_ACTIVE : str
The dataset is inactive
MULTIPLE_ATTACHMENT_TYPE_CODE : str
Multiple attachments of the same type are not allowed
MULTIPLE_CODE_ATTACHMENT : str
Expand Down Expand Up @@ -118,11 +120,14 @@ class ImportCodeError:
The habitat code is not found
CD_NOM_NOT_FOUND : str
The cd_nom is not found in the instance TaxRef


"""

# Dataset error
DATASET_NOT_FOUND = "DATASET_NOT_FOUND"
DATASET_NOT_AUTHORIZED = "DATASET_NOT_AUTHORIZED"
DATASET_NOT_ACTIVE = "DATASET_NOT_ACTIVE"
MULTIPLE_ATTACHMENT_TYPE_CODE = "MULTIPLE_ATTACHMENT_TYPE_CODE"
MULTIPLE_CODE_ATTACHMENT = "MULTIPLE_CODE_ATTACHMENT"

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,12 @@ def upgrade():
"description": "Les entitiés existantes selon UUID sont ignorees.",
"error_level": "WARNING",
},
{
"error_type": "Erreur de référentiel",
"name": "DATASET_NOT_ACTIVE",
"description": "Les jeux de données doivent être actifs pour pouvoir importer des données.",
"error_level": "ERROR",
},
],
)

Expand All @@ -59,3 +65,4 @@ def downgrade():
op.execute(sa.delete(error_type).where(error_type.c.name == "INCOHERENT_DATA"))
op.execute(sa.delete(error_type).where(error_type.c.name == "INVALID_NUMERIC"))
op.execute(sa.delete(error_type).where(error_type.c.name == "SKIP_EXISTING_UUID"))
op.execute(sa.delete(error_type).where(error_type.c.name == "DATASET_NOT_ACTIVE"))
10 changes: 9 additions & 1 deletion backend/geonature/tests/fixtures.py
Original file line number Diff line number Diff line change
Expand Up @@ -433,7 +433,7 @@ def datasets(users, acquisition_frameworks, module):
select(TModules).where(TModules.module_code.in_(writable_module_code))
).all()

def create_dataset(name, id_af, digitizer=None, modules=writable_module):
def create_dataset(name, id_af, digitizer=None, modules=writable_module, active=True):
with db.session.begin_nested():
dataset = TDatasets(
id_acquisition_framework=id_af,
Expand All @@ -443,6 +443,7 @@ def create_dataset(name, id_af, digitizer=None, modules=writable_module):
marine_domain=True,
terrestrial_domain=True,
id_digitizer=digitizer.id_role if digitizer else None,
active=active,
)
if digitizer and digitizer.organisme:
actor = CorDatasetActor(
Expand Down Expand Up @@ -477,6 +478,13 @@ def create_dataset(name, id_af, digitizer=None, modules=writable_module):
),
]
}
datasets["own_dataset_not_activated"] = create_dataset(
"own_dataset_not_activated",
af.id_acquisition_framework,
users["user"],
jacquesfize marked this conversation as resolved.
Show resolved Hide resolved
active=False,
)

datasets["with_module_1"] = create_dataset(
name="module_1_dataset",
id_af=af_1.id_acquisition_framework,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,3 +40,4 @@ On importe une station qui existe déjà en base et un nouvel habitat, seul l
On importe une station et un habitat existant déjà en base;SKIP_EXISTING_UUID;SKIP_EXISTING_UUID;;EXISTING_STATION_UUID;;17/11/2023;17/11/2023;Toto;;POINT(3.634 44.399);St;EXISTING_HABITAT_UUID;prairie;24;;
technique collect vaut « autre » mais pas de précision fournise;OK !;CONDITIONAL_MANDATORY_FIELD_ERROR;;;VALID_DATASET_UUID;17/11/2023;17/11/2023;Toto;;POINT(3.634 44.399);St;;prairie;24;;10
technique collect vaut « autre » et une précision est bien fournies;OK !;OK !;;;VALID_DATASET_UUID;17/11/2023;17/11/2023;Toto;;POINT(3.634 44.399);St;;prairie;24;moyen précis;10
jeu de données pas actif;DATASET_NOT_ACTIVE;ERRONEOUS_PARENT_ENTITY;;;INACTIVE_DATASET_UUID;17/11/2023;17/11/2023;Toto;;POINT(3.634 44.399);St;;prairie;24;moyen précis;10
12 changes: 11 additions & 1 deletion backend/geonature/tests/imports/test_imports_occhab.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,10 @@ def uploaded_import(
b"FORBIDDEN_DATASET_UUID",
datasets["orphan_dataset"].unique_dataset_id.hex.encode("ascii"),
)
content = content.replace(
b"INACTIVE_DATASET_UUID",
datasets["own_dataset_not_activated"].unique_dataset_id.hex.encode("ascii"),
)
f = BytesIO(content)
data = {
"file": (f, import_file_name),
Expand Down Expand Up @@ -250,6 +254,12 @@ def test_import_valid_file(self, imported_import):
"unique_dataset_id",
frozenset({6}),
),
(
ImportCodeError.DATASET_NOT_ACTIVE,
"station",
"unique_dataset_id",
frozenset({43}),
),
(
ImportCodeError.INVALID_UUID,
"station",
Expand Down Expand Up @@ -309,7 +319,7 @@ def test_import_valid_file(self, imported_import):
ImportCodeError.ERRONEOUS_PARENT_ENTITY,
"habitat",
"",
frozenset({5, 6, 9, 24}),
frozenset({5, 6, 9, 24, 43}),
),
(
ImportCodeError.NO_PARENT_ENTITY,
Expand Down
6 changes: 3 additions & 3 deletions backend/geonature/tests/test_gn_meta.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,6 +144,7 @@ def test_acquisition_frameworks_permissions(self, app, acquisition_frameworks, d

nested = db.session.begin_nested()
af.t_datasets.remove(datasets["own_dataset"])
af.t_datasets.remove(datasets["own_dataset_not_activated"])
# Now, the AF has no DS on which user is digitizer.
assert af.has_instance_permission(1) == False
# But the AF has still DS on which user organism is actor.
Expand Down Expand Up @@ -564,13 +565,12 @@ def test_datasets_permissions(self, app, datasets, users):
qs = select(TDatasets).where(TDatasets.id_dataset.in_(ds_ids))
assert set(sc(dsc.filter_by_scope(0, query=qs)).unique().all()) == set([])
assert set(sc(dsc.filter_by_scope(1, query=qs)).unique().all()) == set(
[
datasets["own_dataset"],
]
[datasets["own_dataset"], datasets["own_dataset_not_activated"]]
)
assert set(sc(dsc.filter_by_scope(2, query=qs)).unique().all()) == set(
[
datasets["own_dataset"],
datasets["own_dataset_not_activated"],
datasets["associate_dataset"],
datasets["associate_2_dataset_sensitive"],
]
Expand Down
Loading