Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Force process grouping in theorycovariance module #1135

Merged
merged 11 commits into from
Mar 10, 2021
Merged
Show file tree
Hide file tree
Changes from 9 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,6 @@ meta:
keywords: [theory uncertainties, 3-point]
title: NLO 3-point variations for 5 process types - DIS CC, DIS NC, DY, Top, Jets

metadata_group: nnpdf31_process

default_theory:
- theoryid: 163

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,11 +30,11 @@ Total (exp. + th.) $\chi^2$

Experimental $\chi^2$ by dataset
--------------------------------
{@groups_chi2_table@}
{@procs_chi2_table@}

Total (exp. + th.) $\chi^2$ by dataset
--------------------------------------
{@groups_chi2_table_theory@}
{@procs_chi2_table_theory@}

$\chi^2$ including only diagonal theory elements
------------------------------------------------
Expand Down
3 changes: 3 additions & 0 deletions validphys2/src/validphys/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -1358,6 +1358,9 @@ def produce_group_dataset_inputs_by_metadata(
def produce_group_dataset_inputs_by_experiment(self, data_input):
return self.produce_group_dataset_inputs_by_metadata(data_input, "experiment")

def produce_force_process_grouping(self, data_input):
return self.produce_group_dataset_inputs_by_metadata(data_input, "nnpdf31_process")
RosalynLP marked this conversation as resolved.
Show resolved Hide resolved

def produce_scale_variation_theories(self, theoryid, point_prescription):
"""Produces a list of theoryids given a theoryid at central scales and a point
prescription. The options for the latter are '3 point', '5 point', '5bar point', '7 point'
Expand Down
44 changes: 43 additions & 1 deletion validphys2/src/validphys/results.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,6 +154,7 @@ def rawdata(self):

experiments_data = collect("data", ("group_dataset_inputs_by_experiment",))

procs_data = collect("data", ("force_process_grouping",))

def groups_index(groups_data):
"""Return a pandas.MultiIndex with levels for group, dataset and point
Expand Down Expand Up @@ -193,17 +194,26 @@ def groups_index(groups_data):
def experiments_index(experiments_data):
return groups_index(experiments_data)

def procs_index(procs_data):
return groups_index(procs_data)

def groups_data_values(group_result_table):
"""Returns list of data values for the input groups."""
data_central_values = group_result_table["data_central"]
return data_central_values

def procs_data_values(proc_result_table):
"""Like groups_data_values but grouped by process."""
data_central_values = proc_result_table["data_central"]
return data_central_values

groups_results = collect(
"dataset_inputs_results", ("group_dataset_inputs_by_metadata",)
)

procs_results = collect(
"dataset_inputs_results", ("force_process_grouping",)
)

def group_result_table_no_table(groups_results, groups_index):
"""Generate a table containing the data central value, the central prediction,
Expand Down Expand Up @@ -243,12 +253,19 @@ def group_result_table(group_result_table_no_table):
"""Duplicate of group_result_table_no_table but with a table decorator."""
return group_result_table_no_table

def proc_result_table_no_table(procs_results, procs_index):
return group_result_table_no_table(procs_results, procs_index)

@table
def proc_result_table(proc_result_table_no_table):
return proc_result_table_no_table

experiment_result_table = collect(
"group_result_table", ("group_dataset_inputs_by_experiment",)
)



@table
def group_result_table_68cl(
groups_results, group_result_table_no_table: pd.DataFrame, pdf: PDF
Expand Down Expand Up @@ -334,6 +351,12 @@ def groups_covmat(groups_covmat_no_table):
"""Duplicate of groups_covmat_no_table but with a table decorator."""
return groups_covmat_no_table

def procs_covmat_no_table(experiments_covmat_no_table, procs_index):
return relabel_experiments_to_groups(experiments_covmat_no_table, procs_index)

@table
def procs_covmat(procs_covmat_no_table):
return procs_covmat_no_table

experiments_sqrt_covmat = collect(
"dataset_inputs_sqrt_covmat", ("group_dataset_inputs_by_experiment",)
Expand Down Expand Up @@ -401,6 +424,9 @@ def groups_normcovmat(groups_covmat, groups_data_values):
mat = df / np.outer(groups_data_array, groups_data_array)
return mat

@table
def procs_normcovmat(procs_covmat, procs_data_values):
return groups_normcovmat(procs_covmat, procs_data_values)

@table
def groups_corrmat(groups_covmat):
Expand All @@ -411,6 +437,9 @@ def groups_corrmat(groups_covmat):
mat = diag_minus_half[:, np.newaxis] * df * diag_minus_half
return mat

@table
def procs_corrmat(procs_covmat):
return groups_corrmat(procs_covmat)

@table
def closure_pseudodata_replicas(
Expand Down Expand Up @@ -798,11 +827,14 @@ def groups_chi2_table(groups_data, pdf, groups_chi2, each_dataset_chi2):
return pd.DataFrame(records)


@table
def procs_chi2_table(procs_data, pdf, procs_chi2, each_dataset_chi2):
return groups_chi2_table(procs_data, pdf, procs_chi2, each_dataset_chi2)

experiments_chi2_table = collect(
"groups_chi2_table", ("group_dataset_inputs_by_experiment",)
)


@check_cuts_considered
@table
def closure_shifts(experiments_index, fit, use_cuts, experiments):
Expand Down Expand Up @@ -921,6 +953,9 @@ def dataset_chi2_table(chi2_stats, dataset):
"dataset_inputs_abs_chi2_data", ("group_dataset_inputs_by_metadata",)
)

procs_chi2 = collect("dataset_inputs_abs_chi2_data", ("force_process_grouping",)
)

fits_groups_chi2_data = collect("groups_chi2", ("fits", "fitcontext"))
fits_groups = collect("groups_data", ("fits", "fitcontext",))

Expand Down Expand Up @@ -1253,6 +1288,13 @@ def groups_central_values(group_result_table):
central_theory_values = group_result_table["theory_central"]
return central_theory_values

def procs_central_values_no_table(proc_result_table_no_table):
central_theory_values = proc_result_table_no_table["theory_central"]
return central_theory_values

@table
def procs_central_values(procs_central_values_no_table):
return procs_central_values_no_table

dataspecs_each_dataset_chi2 = collect("each_dataset_chi2", ("dataspecs",))
each_dataset = collect("dataset", ("data",))
Expand Down
Loading