Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Refactor preprocessing #1777

Merged
merged 21 commits into from
Aug 8, 2023
Merged
Show file tree
Hide file tree
Changes from 17 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
62 changes: 37 additions & 25 deletions n3fit/src/n3fit/layers/preprocessing.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,28 +34,30 @@ class Preprocessing(MetaLayer):
"""

def __init__(
self, flav_info=None, seed=0, initializer="random_uniform", large_x=True, **kwargs,
self,
flav_info: list = None,
APJansen marked this conversation as resolved.
Show resolved Hide resolved
seed: int = 0,
large_x: bool = True,
**kwargs,
):
if flav_info is None:
raise ValueError(
"Trying to instantiate a preprocessing factor with no basis information"
)
self.flav_info = flav_info
self.seed = seed
self.output_dim = len(flav_info)
self.initializer = initializer
self.large_x = large_x
self.kernel = []
self.alphas = []
self.betas = []
super().__init__(**kwargs)

def generate_weight(self, weight_name, kind, dictionary, set_to_zero=False):
def generate_weight(self, name: str, kind: str, dictionary: dict, set_to_zero: bool = False):
"""
Generates weights according to the flavour dictionary and adds them
to the kernel list of the class
Generates weights according to the flavour dictionary

Parameters
----------
weight_name: str
name: str
name to be given to the generated weight
kind: str
where to find the limits of the weight in the dictionary
Expand All @@ -64,48 +66,58 @@ def generate_weight(self, weight_name, kind, dictionary, set_to_zero=False):
set_to_zero: bool
set the weight to constant 0
"""
weight_constraint = None
constraint = None
if set_to_zero:
initializer = MetaLayer.init_constant(0.0)
trainable = False
else:
limits = dictionary[kind]
ldo = limits[0]
lup = limits[1]
minval, maxval = dictionary[kind]
trainable = dictionary.get("trainable", True)
# Set the initializer and move the seed one up
initializer = MetaLayer.select_initializer(
self.initializer, minval=ldo, maxval=lup, seed=self.seed
"random_uniform", minval=minval, maxval=maxval, seed=self.seed
)
self.seed += 1
# If we are training, constrain the weights to be within the limits
if trainable:
weight_constraint = constraints.MinMaxWeight(ldo, lup)
constraint = constraints.MinMaxWeight(minval, maxval)

# Generate the new trainable (or not) parameter
newpar = self.builder_helper(
name=weight_name,
name=name,
kernel_shape=(1,),
initializer=initializer,
trainable=trainable,
constraint=weight_constraint,
constraint=constraint,
)
self.kernel.append(newpar)
return newpar

def build(self, input_shape):
# Run through the whole basis
for flav_dict in self.flav_info:
flav_name = flav_dict["fl"]
alpha_name = f"alpha_{flav_name}"
self.generate_weight(alpha_name, "smallx", flav_dict)
self.alphas.append(self.generate_weight(alpha_name, "smallx", flav_dict))
beta_name = f"beta_{flav_name}"
self.generate_weight(beta_name, "largex", flav_dict, set_to_zero=not self.large_x)
self.betas.append(
self.generate_weight(beta_name, "largex", flav_dict, set_to_zero=not self.large_x)
)

super(Preprocessing, self).build(input_shape)

def call(self, inputs, **kwargs):
x = inputs
pdf_list = []
for i in range(0, self.output_dim * 2, 2):
pdf_list.append(x ** (1 - self.kernel[i][0]) * (1 - x) ** self.kernel[i + 1][0])
return op.concatenate(pdf_list, axis=-1)
def call(self, x):
"""
Compute preprocessing prefactor.

Parameters
----------
x: tensor(shape=[1,N,1])

Returns
-------
prefactor: tensor(shape=[1,N,F])
"""
alphas = op.stack(self.alphas, axis=1)
betas = op.stack(self.betas, axis=1)

return x ** (1 - alphas) * (1 - x) ** betas
65 changes: 65 additions & 0 deletions n3fit/src/n3fit/tests/test_preprocessing.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
import numpy as np

from n3fit.layers import Preprocessing


def test_preprocessing():
"""Regression test"""
# taken from basic runcard
flav_info = [
{'fl': 'sng', 'smallx': [1.05, 1.19], 'largex': [1.47, 2.7], 'trainable': False},
{'fl': 'g', 'smallx': [0.94, 1.25], 'largex': [0.11, 5.87], 'trainable': False},
{'fl': 'v', 'smallx': [0.54, 0.75], 'largex': [1.15, 2.76], 'trainable': False},
{'fl': 'v3', 'smallx': [0.21, 0.57], 'largex': [1.35, 3.08]},
{'fl': 'v8', 'smallx': [0.52, 0.76], 'largex': [0.77, 3.56]},
{'fl': 't3', 'smallx': [-0.37, 1.52], 'largex': [1.74, 3.39]},
{'fl': 't8', 'smallx': [0.56, 1.29], 'largex': [1.45, 3.03]},
{'fl': 'cp', 'smallx': [0.12, 1.19], 'largex': [1.83, 6.7]},
]
prepro = Preprocessing(flav_info=flav_info, seed=0)
APJansen marked this conversation as resolved.
Show resolved Hide resolved
np.random.seed(42)
test_x = np.random.uniform(size=(1, 4, 1))
test_prefactors = [
[
3.2668063e-01,
6.7284244e-01,
3.9915803e-01,
1.5305418e-01,
1.8249598e-01,
7.2065055e-02,
3.1497714e-01,
1.3243365e-01,
],
[
4.6502685e-04,
2.4272988e-02,
2.2857290e-02,
3.3989837e-04,
1.6686485e-04,
7.2010938e-05,
1.4990549e-04,
1.3058851e-04,
],
[
3.5664584e-02,
2.0763232e-01,
1.7362502e-01,
2.5178187e-02,
2.0087767e-02,
1.0968268e-02,
2.2659913e-02,
1.6590673e-02,
],
[
1.0150098e-01,
3.5557139e-01,
2.6867521e-01,
6.4237528e-02,
5.9941418e-02,
3.0898115e-02,
7.7342905e-02,
4.8169162e-02,
],
]
prefactors = prepro(test_x)
assert np.allclose(test_prefactors, prefactors)
APJansen marked this conversation as resolved.
Show resolved Hide resolved