Skip to content

Commit

Permalink
Fix a couple of bugs, notably use json instead of pickle in scen model
Browse files Browse the repository at this point in the history
Pickle doesn't like negative numbers.
  • Loading branch information
manuelma committed May 24, 2023
1 parent e3f2eff commit 4a9e6a2
Show file tree
Hide file tree
Showing 6 changed files with 29 additions and 34 deletions.
2 changes: 1 addition & 1 deletion spinetoolbox/spine_db_editor/graphics_items.py
Original file line number Diff line number Diff line change
Expand Up @@ -453,7 +453,7 @@ def _refresh_entity_class_lists(self):
db_map_entity_class_ids = {db_map: {self.entity_class_id(db_map)} for db_map in self.db_maps}
for db_map, ent_clss in self.db_mngr.find_cascading_entity_classes(db_map_entity_class_ids).items():
for ent_cls in ent_clss:
ent_cls = ent_cls.copy()
ent_cls = ent_cls._extended()
ent_cls["dimension_id_list"] = list(ent_cls["dimension_id_list"])
ent_cls["entity_ids"] = entity_ids_per_class.get((db_map, ent_cls["id"]), set())
self._db_map_entity_class_lists.setdefault(ent_cls["name"], []).append((db_map, ent_cls))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ def paste_alternative_mime_data(self, mime_data, database_item):
"""
alternative_data = json.loads(mime_data.data(mime_types.ALTERNATIVE_DATA).data())
names_to_descriptions = {}
for db_key, alternative_ids in alternative_data.items():
for db_key in alternative_data:
db_map = self.db_mngr.db_map_from_key(db_key)
items = self.db_mngr.get_items(db_map, "alternative", only_visible=False)
names_to_descriptions.update({i.name: i.description for i in items})
Expand Down
4 changes: 4 additions & 0 deletions spinetoolbox/spine_db_editor/mvcmodels/scenario_item.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,6 +93,10 @@ def update_alternative_id_list(self):
elif curr_alt_count > alt_count:
removed_count = curr_alt_count - alt_count
self.remove_children(alt_count, removed_count)
else:
self.model.dataChanged.emit(
self.model.index(0, 0, self.index()), self.model.index(self.child_count() - 1, 0, self.index())
)

def accepts_item(self, item, db_map):
return db_map == self.db_map and item["scenario_id"] == self.id
Expand Down
26 changes: 13 additions & 13 deletions spinetoolbox/spine_db_editor/mvcmodels/scenario_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,9 @@
# this program. If not, see <http://www.gnu.org/licenses/>.
######################################################################################################################
"""Contains scenario tree model."""
import pickle
import json

from PySide6.QtCore import QMimeData, Qt
from PySide6.QtCore import QMimeData, Qt, QByteArray
from spinetoolbox.helpers import unique_name
from .tree_model_base import TreeModelBase
from .scenario_item import ScenarioDBItem, ScenarioAlternativeItem, ScenarioItem
Expand Down Expand Up @@ -66,7 +66,7 @@ def mimeData(self, indexes):
db_item = item.parent_item
db_key = self.db_mngr.db_map_key(db_item.db_map)
scenario_data.setdefault(db_key, []).append(item.id)
mime.setData(mime_types.SCENARIO_DATA, pickle.dumps(scenario_data))
mime.setData(mime_types.SCENARIO_DATA, QByteArray(json.dumps(scenario_data)))
mime.setText(two_column_as_csv(scenario_indexes))
return mime
alternative_indexes = []
Expand All @@ -84,19 +84,19 @@ def mimeData(self, indexes):
db_item = item.parent_item.parent_item
db_key = self.db_mngr.db_map_key(db_item.db_map)
alternative_data.setdefault(db_key, []).append(item.alternative_id)
mime.setData(mime_types.ALTERNATIVE_DATA, pickle.dumps(alternative_data))
mime.setData(mime_types.ALTERNATIVE_DATA, QByteArray(json.dumps(alternative_data)))
mime.setText(two_column_as_csv(alternative_indexes))
return mime
return None

def canDropMimeData(self, data, drop_action, row, column, parent):
def canDropMimeData(self, mime_data, drop_action, row, column, parent):
if drop_action & self.supportedDropActions() == 0:
return False
if not data.hasFormat(mime_types.ALTERNATIVE_DATA):
if not mime_data.hasFormat(mime_types.ALTERNATIVE_DATA):
return False
try:
payload = pickle.loads(data.data(mime_types.ALTERNATIVE_DATA))
except pickle.UnpicklingError:
payload = json.loads(mime_data.data(mime_types.ALTERNATIVE_DATA).data())
except json.JSONDecodeError:
return False
if not isinstance(payload, dict):
return False
Expand All @@ -115,12 +115,12 @@ def canDropMimeData(self, data, drop_action, row, column, parent):
db_item = self.db_item(parent_item)
if db_map != db_item.db_map:
return False
if data.hasFormat("application/vnd.spinetoolbox.scenario-alternative"):
if mime_data.hasFormat("application/vnd.spinetoolbox.scenario-alternative"):
# Check that reordering only happens within the same scenario
return False
return True

def dropMimeData(self, data, drop_action, row, column, parent):
def dropMimeData(self, mime_data, drop_action, row, column, parent):
# This function expects that data has be verified by canDropMimeData() already.
scenario_item = self.item_from_index(parent)
if not isinstance(scenario_item, ScenarioItem):
Expand All @@ -131,7 +131,7 @@ def dropMimeData(self, data, drop_action, row, column, parent):
old_alternative_id_list = list(scenario_item.alternative_id_list)
if row == -1:
row = len(old_alternative_id_list)
db_map_key, alternative_ids = pickle.loads(data.data(mime_types.ALTERNATIVE_DATA)).popitem()
_db_map_key, alternative_ids = json.loads(mime_data.data(mime_types.ALTERNATIVE_DATA).data()).popitem()
alternative_id_list = [id_ for id_ in old_alternative_id_list[:row] if id_ not in alternative_ids]
alternative_id_list += alternative_ids
alternative_id_list += [id_ for id_ in old_alternative_id_list[row:] if id_ not in alternative_ids]
Expand All @@ -151,7 +151,7 @@ def paste_alternative_mime_data(self, mime_data, row, scenario_item):
if row == -1:
row = len(old_alternative_id_list)
data_to_add = {}
for db_map_key, alternative_ids in pickle.loads(mime_data.data(mime_types.ALTERNATIVE_DATA)).items():
for db_map_key, alternative_ids in json.loads(mime_data.data(mime_types.ALTERNATIVE_DATA).data()).items():
target_db_map = self.db_mngr.db_map_from_key(db_map_key)
if target_db_map != scenario_item.db_map:
continue
Expand All @@ -175,7 +175,7 @@ def paste_scenario_mime_data(self, mime_data, db_item):
existing_alternatives = {
i.name for i in self.db_mngr.get_items(db_item.db_map, "alternative", only_visible=False)
}
for db_map_key, scenario_ids in pickle.loads(mime_data.data(mime_types.SCENARIO_DATA)).items():
for db_map_key, scenario_ids in json.loads(mime_data.data(mime_types.SCENARIO_DATA).data()).items():
db_map = self.db_mngr.db_map_from_key(db_map_key)
if db_map is db_item.db_map:
continue
Expand Down
9 changes: 0 additions & 9 deletions spinetoolbox/spine_db_editor/mvcmodels/tree_item_utility.py
Original file line number Diff line number Diff line change
Expand Up @@ -134,15 +134,6 @@ def insert_children_sorted(self, children):
return False
return True

def _resort(self):
# FIXME MM Needed?
non_empty_children, empty_child = self.children[:-1], self.children[-1]
non_empty_children.sort(key=self._children_sort_key)
self.children = non_empty_children + [empty_child]
top = self.model.index_from_item(self.child(0))
bottom = self.model.index_from_item(self.child(-1))
self.model.dataChanged.emit(top, bottom)


class FetchMoreMixin:
def __init__(self, *args, **kwargs):
Expand Down
20 changes: 10 additions & 10 deletions spinetoolbox/spine_db_editor/widgets/spine_db_editor.py
Original file line number Diff line number Diff line change
Expand Up @@ -479,6 +479,9 @@ def import_from_json(self, file_path):
except json.decoder.JSONDecodeError as err:
self.msg_error.emit(f"File {file_path} is not a valid json: {err}")
return
with DatabaseMapping("sqlite://", create=True) as db_map:
import_data(db_map, **data)
data = export_data(db_map)

This comment has been minimized.

Copy link
@soininen

soininen Apr 29, 2024

Contributor

@manuelma Do you recall why you added these lines here? With this detour, you cannot import e.g. a json that contains entities only because the empty temporary database does not contain entity classes, see #2725.

This comment has been minimized.

Copy link
@manuelma

manuelma Apr 29, 2024

Author Collaborator

Right, I missed that.

The reason for these lines is to be able to import JSON files with the old format ("object_classes", "relationship_classes", etc.). By importing into an empty DB and then exporting from that, the exported data has the new keys ("entity_classes", etc.). Note that self.import_data expects the data to have the new keys (I think).

This comment has been minimized.

Copy link
@manuelma

manuelma Apr 29, 2024

Author Collaborator

So we need a better system to translate the old format to the new format it seems.

This comment has been minimized.

Copy link
@soininen

soininen Apr 29, 2024

Contributor

Note that self.import_data expects the data to have the new keys (I think).

Indeed, I missed that. Thanks a lot for the information!

This comment has been minimized.

Copy link
@manuelma

manuelma Apr 29, 2024

Author Collaborator

No worries, nice catch!

self.import_data(data)
filename = os.path.split(file_path)[1]
self.msg.emit(f"File {filename} successfully imported.")
Expand Down Expand Up @@ -534,16 +537,13 @@ def _clean_up_export_items_dialog(self):

@Slot(bool)
def export_session(self, checked=False):
"""Exports changes made in the current session as reported by DiffDatabaseMapping."""
db_map_diff_ids = {db_map: db_map.diff_ids() for db_map in self.db_maps} # FIXME: diff_ids()
db_map_ent_cls_ids = {db_map: diff_ids["entity_class"] for db_map, diff_ids in db_map_diff_ids.items()}
db_map_ent_ids = {db_map: diff_ids["entity"] for db_map, diff_ids in db_map_diff_ids.items()}
db_map_par_val_lst_ids = {
db_map: diff_ids["parameter_value_list"] for db_map, diff_ids in db_map_diff_ids.items()
}
db_map_par_def_ids = {db_map: diff_ids["parameter_definition"] for db_map, diff_ids in db_map_diff_ids.items()}
db_map_par_val_ids = {db_map: diff_ids["parameter_value"] for db_map, diff_ids in db_map_diff_ids.items()}
db_map_ent_group_ids = {db_map: diff_ids["entity_group"] for db_map, diff_ids in db_map_diff_ids.items()}
"""Exports changes made in the current session."""
db_map_ent_cls_ids = {db_map: db_map.cache.dirty_ids("entity_class") for db_map in self.db_maps}
db_map_ent_ids = {db_map: db_map.cache.dirty_ids("entity") for db_map in self.db_maps}
db_map_par_val_lst_ids = {db_map: db_map.cache.dirty_ids("parameter_value_list") for db_map in self.db_maps}
db_map_par_def_ids = {db_map: db_map.cache.dirty_ids("parameter_definition") for db_map in self.db_maps}
db_map_par_val_ids = {db_map: db_map.cache.dirty_ids("parameter_value") for db_map in self.db_maps}
db_map_ent_group_ids = {db_map: db_map.cache.dirty_ids("entity_group") for db_map in self.db_maps}
parcel = SpineDBParcel(self.db_mngr)
parcel.push_entity_class_ids(db_map_ent_cls_ids)
parcel.push_entity_ids(db_map_ent_ids)
Expand Down

0 comments on commit 4a9e6a2

Please sign in to comment.