Skip to content

Commit

Permalink
Merge pull request #194 from huangshiyu13/main
Browse files Browse the repository at this point in the history
fix leaky_relu twice written
  • Loading branch information
huangshiyu13 authored Aug 11, 2023
2 parents 5c78ecf + 1ef8a18 commit ad925cc
Show file tree
Hide file tree
Showing 5 changed files with 9 additions and 9 deletions.
2 changes: 1 addition & 1 deletion openrl/configs/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -480,7 +480,7 @@ def create_config_parser():
"--activation_id",
type=int,
default=1,
help="choose 0 to use tanh, 1 to use relu, 2 to use leaky relu, 3 to use elu",
help="choose 0 to use tanh, 1 to use relu, 2 to use leaky relu, 3 to use selu",
)
parser.add_argument(
"--use_popart",
Expand Down
6 changes: 3 additions & 3 deletions openrl/modules/networks/utils/attention.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ def __init__(
active_func = [nn.Tanh(), nn.ReLU(), nn.LeakyReLU(), nn.ELU()][activation_id]
init_method = [nn.init.xavier_uniform_, nn.init.orthogonal_][use_orthogonal]
gain = nn.init.calculate_gain(
["tanh", "relu", "leaky_relu", "leaky_relu"][activation_id]
["tanh", "relu", "leaky_relu", "selu"][activation_id]
)

def init_(m):
Expand Down Expand Up @@ -194,7 +194,7 @@ def __init__(self, split_shape, d_model, use_orthogonal=True, activation_id=1):
active_func = [nn.Tanh(), nn.ReLU(), nn.LeakyReLU(), nn.ELU()][activation_id]
init_method = [nn.init.xavier_uniform_, nn.init.orthogonal_][use_orthogonal]
gain = nn.init.calculate_gain(
["tanh", "relu", "leaky_relu", "leaky_relu"][activation_id]
["tanh", "relu", "leaky_relu", "selu"][activation_id]
)

def init_(m):
Expand Down Expand Up @@ -252,7 +252,7 @@ def __init__(self, split_shape, d_model, use_orthogonal=True, activation_id=1):
active_func = [nn.Tanh(), nn.ReLU(), nn.LeakyReLU(), nn.ELU()][activation_id]
init_method = [nn.init.xavier_uniform_, nn.init.orthogonal_][use_orthogonal]
gain = nn.init.calculate_gain(
["tanh", "relu", "leaky_relu", "leaky_relu"][activation_id]
["tanh", "relu", "leaky_relu", "selu"][activation_id]
)

def init_(m):
Expand Down
2 changes: 1 addition & 1 deletion openrl/modules/networks/utils/cnn.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ def __init__(
[nn.Tanh(), nn.ReLU(), nn.LeakyReLU(), nn.ELU()][activation_id]
init_method = [nn.init.xavier_uniform_, nn.init.orthogonal_][use_orthogonal]
gain = nn.init.calculate_gain(
["tanh", "relu", "leaky_relu", "leaky_relu"][activation_id]
["tanh", "relu", "leaky_relu", "selu"][activation_id]
)

def init_(m):
Expand Down
4 changes: 2 additions & 2 deletions openrl/modules/networks/utils/mix.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ def _convert(params):
active_func = [nn.Tanh(), nn.ReLU(), nn.LeakyReLU(), nn.ELU()][activation_id]
init_method = [nn.init.xavier_uniform_, nn.init.orthogonal_][use_orthogonal]
gain = nn.init.calculate_gain(
["tanh", "relu", "leaky_relu", "leaky_relu"][activation_id]
["tanh", "relu", "leaky_relu", "selu"][activation_id]
)

def init_(m):
Expand Down Expand Up @@ -189,7 +189,7 @@ def _build_mlp_model(self, obs_shape, hidden_size, use_orthogonal, activation_id
active_func = [nn.Tanh(), nn.ReLU(), nn.LeakyReLU(), nn.ELU()][activation_id]
init_method = [nn.init.xavier_uniform_, nn.init.orthogonal_][use_orthogonal]
gain = nn.init.calculate_gain(
["tanh", "relu", "leaky_relu", "leaky_relu"][activation_id]
["tanh", "relu", "leaky_relu", "selu"][activation_id]
)

def init_(m):
Expand Down
4 changes: 2 additions & 2 deletions openrl/modules/networks/utils/mlp.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ def __init__(self, input_dim, hidden_size, layer_N, use_orthogonal, activation_i
active_func = [nn.Tanh(), nn.ReLU(), nn.LeakyReLU(), nn.ELU()][activation_id]
init_method = [nn.init.xavier_uniform_, nn.init.orthogonal_][use_orthogonal]
gain = nn.init.calculate_gain(
["tanh", "relu", "leaky_relu", "leaky_relu"][activation_id]
["tanh", "relu", "leaky_relu", "selu"][activation_id]
)

def init_(m):
Expand Down Expand Up @@ -53,7 +53,7 @@ def __init__(self, input_dim, hidden_size, use_orthogonal, activation_id):
active_func = [nn.Tanh(), nn.ReLU(), nn.LeakyReLU(), nn.ELU()][activation_id]
init_method = [nn.init.xavier_uniform_, nn.init.orthogonal_][use_orthogonal]
gain = nn.init.calculate_gain(
["tanh", "relu", "leaky_relu", "leaky_relu"][activation_id]
["tanh", "relu", "leaky_relu", "selu"][activation_id]
)

def init_(m):
Expand Down

0 comments on commit ad925cc

Please sign in to comment.