Skip to content

Commit

Permalink
Add check on layer type
Browse files Browse the repository at this point in the history
  • Loading branch information
APJansen authored and scarlehoff committed Jan 24, 2024
1 parent b5fffdd commit fc2cb9f
Showing 1 changed file with 11 additions and 0 deletions.
11 changes: 11 additions & 0 deletions n3fit/src/n3fit/checks.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,16 @@ def check_initializer(initializer):
raise CheckError(f"Initializer {initializer} not accepted by {MetaLayer}")


def check_layer_type_implemented(parameters):
"""Checks whether the layer_type is implemented"""
layer_type = parameters.get("layer_type")
implemented_types = ["dense", "dense_per_flavour"]
if layer_type not in implemented_types:
raise CheckError(
f"Layer type {layer_type} not implemented, must be one of {implemented_types}"
)


def check_dropout(parameters):
"""Checks the dropout setup (positive and smaller than 1.0)"""
dropout = parameters.get("dropout")
Expand Down Expand Up @@ -175,6 +185,7 @@ def wrapper_check_NN(basis, tensorboard, save, load, parameters):
check_consistent_layers(parameters)
check_basis_with_layers(basis, parameters)
check_stopping(parameters)
check_layer_type_implemented(parameters)
check_dropout(parameters)
check_lagrange_multipliers(parameters, "integrability")
check_lagrange_multipliers(parameters, "positivity")
Expand Down

0 comments on commit fc2cb9f

Please sign in to comment.