Skip to content

Commit 2e8deb3

Browse files
Move 2 tensor initialization to lazy call to prevent using GPU memory when start program in non-gpu mode(such as cpu) (#795)
1 parent b014b24 commit 2e8deb3

File tree

2 files changed

+11
-3
lines changed

2 files changed

+11
-3
lines changed

ppsci/arch/activation.py

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -146,7 +146,7 @@ def init_for_hidden_layer(layer: nn.Linear, w0: float = 30):
146146
"silu": Silu(),
147147
"sin": Sin(),
148148
"cos": Cos(),
149-
"swish": Swish(),
149+
"swish": Swish,
150150
"tanh": nn.Tanh(),
151151
"identity": nn.Identity(),
152152
"siren": Siren(),
@@ -166,4 +166,9 @@ def get_activation(act_name: str) -> Callable:
166166
if act_name.lower() not in act_func_dict:
167167
raise ValueError(f"act_name({act_name}) not found in act_func_dict")
168168

169-
return act_func_dict[act_name.lower()]
169+
act_layer = act_func_dict[act_name.lower()]
170+
if isinstance(act_layer, type) and act_name != "stan":
171+
# Is a activation class but not a instance of it, instantiate manually(except for 'Stan')
172+
return act_layer()
173+
174+
return act_layer

ppsci/utils/symbolic.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -97,7 +97,7 @@
9797
sp.Max: paddle.maximum,
9898
sp.Min: paddle.minimum,
9999
sp.Abs: paddle.abs,
100-
sp.Heaviside: functools.partial(paddle.heaviside, y=paddle.zeros([])),
100+
sp.Heaviside: paddle.heaviside,
101101
sp.sign: paddle.sign,
102102
sp.ceiling: paddle.ceil,
103103
sp.floor: paddle.floor,
@@ -214,6 +214,9 @@ def __init__(
214214
elif self.expr.func == sp.Heaviside:
215215
self._apply_func = self._heaviside_operator_func
216216
self._auxiliary_func = SYMPY_TO_PADDLE[sp.Heaviside]
217+
self._auxiliary_func = functools.partial(
218+
self._auxiliary_func, y=paddle.zeros([])
219+
)
217220
elif self.expr.func == sp.Min:
218221
self._apply_func = self._minimum_operator_func
219222
elif self.expr.func == sp.Max:

0 commit comments

Comments
 (0)