Skip to content

Commit

Permalink
Use weights_only=True from PyTorch 2.4 (#423)
Browse files Browse the repository at this point in the history
Fixes #422 by adding the `weights_only = True` argument to `torch.load`
in the file `io.py`. This protects agains the arbitrary data warning.
The types `stype` and `StatType` were added to the safe globals list.

By: Neel Kondapalli (neel2h06@gmail.com)

---------

Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
Co-authored-by: Akihiro Nitta <nitta@akihironitta.com>
  • Loading branch information
3 people authored Aug 24, 2024
1 parent 59994ec commit 3710d2f
Show file tree
Hide file tree
Showing 4 changed files with 25 additions and 4 deletions.
2 changes: 2 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@ The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/).

### Changed

- Set `weights_only=True` in `torch_frame.load` from PyTorch 2.4 ([#423](https://github.com/pyg-team/pytorch-frame/pull/423))

### Deprecated

### Removed
Expand Down
16 changes: 15 additions & 1 deletion torch_frame/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,13 +12,27 @@
embedding,
)
from .data import TensorFrame
from .typing import TaskType, Metric, DataFrame, NAStrategy
from .typing import (
TaskType,
Metric,
DataFrame,
NAStrategy,
WITH_PT24,
)
from torch_frame.utils import save, load, cat # noqa
import torch_frame.data # noqa
import torch_frame.datasets # noqa
import torch_frame.nn # noqa
import torch_frame.gbdt # noqa

if WITH_PT24:
import torch

torch.serialization.add_safe_globals([
stype,
torch_frame.data.stats.StatType,
])

__version__ = '0.2.3'

__all__ = [
Expand Down
4 changes: 4 additions & 0 deletions torch_frame/typing.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,15 @@
from typing import Dict, List, Mapping, Union

import pandas as pd
import torch
from torch import Tensor

from torch_frame.data.multi_embedding_tensor import MultiEmbeddingTensor
from torch_frame.data.multi_nested_tensor import MultiNestedTensor

WITH_PT20 = int(torch.__version__.split('.')[0]) >= 2
WITH_PT24 = WITH_PT20 and int(torch.__version__.split('.')[1]) >= 4


class Metric(Enum):
r"""The metric.
Expand Down
7 changes: 4 additions & 3 deletions torch_frame/utils/io.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
)
from torch_frame.data.multi_tensor import _MultiTensor
from torch_frame.data.stats import StatType
from torch_frame.typing import TensorData
from torch_frame.typing import WITH_PT24, TensorData


def serialize_feat_dict(
Expand Down Expand Up @@ -80,7 +80,8 @@ def save(tensor_frame: TensorFrame,


def load(
path: str, device: torch.device | None = None
path: str,
device: torch.device | None = None,
) -> tuple[TensorFrame, dict[str, dict[StatType, Any]] | None]:
r"""Load saved :class:`TensorFrame` object and optional :obj:`col_stats`
from a specified path.
Expand All @@ -95,7 +96,7 @@ def load(
tuple: A tuple of loaded :class:`TensorFrame` object and
optional :obj:`col_stats`.
"""
tf_dict, col_stats = torch.load(path)
tf_dict, col_stats = torch.load(path, weights_only=WITH_PT24)
tf_dict['feat_dict'] = deserialize_feat_dict(
tf_dict.pop('feat_serialized_dict'))
tensor_frame = TensorFrame(**tf_dict)
Expand Down

0 comments on commit 3710d2f

Please sign in to comment.