Skip to content

Commit

Permalink
train NN5 with 56new suite, no lr warmup, update learning schedule
Browse files Browse the repository at this point in the history
  • Loading branch information
xkykai committed Nov 4, 2024
1 parent 40dff0c commit 3551960
Showing 1 changed file with 8 additions and 8 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -76,9 +76,9 @@ const grid_point_below_kappa = args["point_below_kappa"]
seed = args["random_seed"]
learning_rate = args["learning_rate"]

LES_FILE_DIRS = ["./LES2/$(file)/instantaneous_timeseries.jld2" for file in LES_suite["trainnoSO34new"]]
LES_FILE_DIRS = ["./LES2/$(file)/instantaneous_timeseries.jld2" for file in LES_suite["train56new"]]

FILE_DIR = "./training_output/NDE_NN5_Qb_Ri_nof_BBLRifirst$(grid_point_below_kappa)_wTwS_$(length(LES_FILE_DIRS))simnew_$(args["hidden_layer"])layer_$(args["hidden_layer_size"])_$(args["activation"])_$(seed)seed_$(learning_rate)lr_localbaseclosure_2Pr_6simstableRi"
FILE_DIR = "./training_output/NDE_NN5_Qb_Ri_nof_BBLRifirst$(grid_point_below_kappa)_wTwS_$(length(LES_FILE_DIRS))simnew_$(args["hidden_layer"])layer_$(args["hidden_layer_size"])_$(args["activation"])_$(seed)seed_$(learning_rate)lr_localbaseclosure_2Pr_6simstableRi_nowarmup"
mkpath(FILE_DIR)
@info FILE_DIR

Expand Down Expand Up @@ -897,9 +897,9 @@ function train_NDE_stochastic(ps, params, ps_baseclosure, sts, NNs, truths, x₀
N = length(indices_training)
Nbatch = cld(N, batchsize)
for iter in 1:maxiter
if iter <= 40
Optimisers.adjust!(opt_state, eta=rule.eta * iter / 40)
end
# if iter <= 40
# Optimisers.adjust!(opt_state, eta=rule.eta * iter / 40)
# end

shuffle!(rng, indices_training)
for batch in 1:Nbatch
Expand Down Expand Up @@ -962,10 +962,10 @@ end
# end_epochs = cumsum(maxiters)
# training_timeframes = [timeframes[1][1:5], timeframes[1][1:10], timeframes[1][1:15], timeframes[1][1:20], timeframes[1][1:25], timeframes[1][1:27]]

optimizers = [Optimisers.Adam(3e-4), Optimisers.Adam(learning_rate), Optimisers.Adam(learning_rate/3), Optimisers.Adam(learning_rate/3), Optimisers.Adam(learning_rate/3)]
maxiters = [5000, 2000, 2000, 2000, 2000]
optimizers = [Optimisers.Adam(3e-4), Optimisers.Adam(3e-5), Optimisers.Adam(learning_rate), Optimisers.Adam(learning_rate), Optimisers.Adam(learning_rate), Optimisers.Adam(learning_rate/3), Optimisers.Adam(learning_rate/3)]
maxiters = [2000, 5000, 2000, 2000, 2000, 2000, 2000]
end_epochs = cumsum(maxiters)
training_timeframes = [timeframes[1][1:10], timeframes[1][1:15], timeframes[1][1:20], timeframes[1][1:25], timeframes[1][1:27]]
training_timeframes = [timeframes[1][1:5], timeframes[1][1:5], timeframes[1][1:10], timeframes[1][1:15], timeframes[1][1:20], timeframes[1][1:25], timeframes[1][1:27]]

sim_indices = 1:length(LES_FILE_DIRS)

Expand Down

0 comments on commit 3551960

Please sign in to comment.