diff --git a/FKTable Playground.ipynb b/FKTable Playground.ipynb index 05583078d8..29acfc8932 100644 --- a/FKTable Playground.ipynb +++ b/FKTable Playground.ipynb @@ -15,7 +15,7 @@ "from validphys.convolution import central_predictions\n", "\n", "profile = _get_nnpdf_profile()\n", - "yaml_db = Path(profile[\"data_path\"]) / \"yamldb\"" + "#yaml_db = Path(profile[\"data_path\"]) / \"yamldb\"" ] }, { @@ -43,29 +43,492 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 7, "id": "051581e1", "metadata": { "scrolled": false }, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "NMCPD_dw_ite\n", + "\n", + "-- Reading COMMONDATA for Dataset: NMCPD_dw_ite\n", + "nData: 260 nSys: 105\n", + "-- COMMONDATA Files for NMCPD_dw_ite successfully read.\n", + "\n", + "NMC\n", + "\n", + "-- Reading COMMONDATA for Dataset: NMC\n", + "nData: 292 nSys: 16\n", + "-- COMMONDATA Files for NMC successfully read.\n", + "\n", + "SLACP_dwsh\n", + "\n", + "-- Reading COMMONDATA for Dataset: SLACP_dwsh\n", + "nData: 211 nSys: 3\n", + "-- COMMONDATA Files for SLACP_dwsh successfully read.\n", + "\n", + "SLACD_dw_ite\n", + "\n", + "-- Reading COMMONDATA for Dataset: SLACD_dw_ite\n", + "nData: 211 nSys: 103\n", + "-- COMMONDATA Files for SLACD_dw_ite successfully read.\n", + "\n", + "BCDMSP_dwsh\n", + "\n", + "-- Reading COMMONDATA for Dataset: BCDMSP_dwsh\n", + "nData: 351 nSys: 11\n", + "-- COMMONDATA Files for BCDMSP_dwsh successfully read.\n", + "\n", + "BCDMSD_dw_ite\n", + "\n", + "-- Reading COMMONDATA for Dataset: BCDMSD_dw_ite\n", + "nData: 254 nSys: 108\n", + "-- COMMONDATA Files for BCDMSD_dw_ite successfully read.\n", + "\n", + "CHORUSNUPb_dw_ite\n", + "\n", + "-- Reading COMMONDATA for Dataset: CHORUSNUPb_dw_ite\n", + "nData: 607 nSys: 1014\n", + "-- COMMONDATA Files for CHORUSNUPb_dw_ite successfully read.\n", + "\n", + "CHORUSNBPb_dw_ite\n", + "\n", + "-- Reading COMMONDATA for Dataset: CHORUSNBPb_dw_ite\n", + "nData: 607 nSys: 114\n", + "-- COMMONDATA Files for CHORUSNBPb_dw_ite successfully read.\n", + "\n", + "NTVNUDMNFe_dw_ite\n", + "\n", + "-- Reading COMMONDATA for Dataset: NTVNUDMNFe_dw_ite\n", + "nData: 45 nSys: 1003\n", + "-- COMMONDATA Files for NTVNUDMNFe_dw_ite successfully read.\n", + "\n", + "NTVNBDMNFe_dw_ite\n", + "\n", + "-- Reading COMMONDATA for Dataset: NTVNBDMNFe_dw_ite\n", + "nData: 45 nSys: 103\n", + "-- COMMONDATA Files for NTVNBDMNFe_dw_ite successfully read.\n", + "\n", + "HERACOMBNCEM\n", + "\n", + "-- Reading COMMONDATA for Dataset: HERACOMBNCEM\n", + "nData: 159 nSys: 170\n", + "-- COMMONDATA Files for HERACOMBNCEM successfully read.\n", + "\n", + "HERACOMBNCEP460\n", + "\n", + "-- Reading COMMONDATA for Dataset: HERACOMBNCEP460\n", + "nData: 209 nSys: 170\n", + "-- COMMONDATA Files for HERACOMBNCEP460 successfully read.\n", + "\n", + "HERACOMBNCEP575\n", + "\n", + "-- Reading COMMONDATA for Dataset: HERACOMBNCEP575\n", + "nData: 260 nSys: 170\n", + "-- COMMONDATA Files for HERACOMBNCEP575 successfully read.\n", + "\n", + "HERACOMBNCEP820\n", + "\n", + "-- Reading COMMONDATA for Dataset: HERACOMBNCEP820\n", + "nData: 112 nSys: 170\n", + "-- COMMONDATA Files for HERACOMBNCEP820 successfully read.\n", + "\n", + "HERACOMBNCEP920\n", + "\n", + "-- Reading COMMONDATA for Dataset: HERACOMBNCEP920\n", + "nData: 485 nSys: 170\n", + "-- COMMONDATA Files for HERACOMBNCEP920 successfully read.\n", + "\n", + "HERACOMBCCEM\n", + "\n", + "-- Reading COMMONDATA for Dataset: HERACOMBCCEM\n", + "nData: 42 nSys: 170\n", + "-- COMMONDATA Files for HERACOMBCCEM successfully read.\n", + "\n", + "HERACOMBCCEP\n", + "\n", + "-- Reading COMMONDATA for Dataset: HERACOMBCCEP\n", + "nData: 39 nSys: 170\n", + "-- COMMONDATA Files for HERACOMBCCEP successfully read.\n", + "\n", + "HERACOMB_SIGMARED_C\n", + "\n", + "-- Reading COMMONDATA for Dataset: HERACOMB_SIGMARED_C\n", + "nData: 52 nSys: 167\n", + "-- COMMONDATA Files for HERACOMB_SIGMARED_C successfully read.\n", + "\n", + "HERACOMB_SIGMARED_B\n", + "\n", + "-- Reading COMMONDATA for Dataset: HERACOMB_SIGMARED_B\n", + "nData: 27 nSys: 167\n", + "-- COMMONDATA Files for HERACOMB_SIGMARED_B successfully read.\n", + "\n", + "CDFZRAP_NEW\n", + "\n", + "-- Reading COMMONDATA for Dataset: CDFZRAP_NEW\n", + "nData: 28 nSys: 11\n", + "-- COMMONDATA Files for CDFZRAP_NEW successfully read.\n", + "\n", + "D0ZRAP_40\n", + "\n", + "-- Reading COMMONDATA for Dataset: D0ZRAP_40\n", + "nData: 28 nSys: 1\n", + "-- COMMONDATA Files for D0ZRAP_40 successfully read.\n", + "\n", + "ATLASWZRAP36PB\n", + "\n", + "-- Reading COMMONDATA for Dataset: ATLASWZRAP36PB\n", + "nData: 30 nSys: 32\n", + "-- COMMONDATA Files for ATLASWZRAP36PB successfully read.\n", + "\n", + "ATLASZHIGHMASS49FB\n", + "\n", + "-- Reading COMMONDATA for Dataset: ATLASZHIGHMASS49FB\n", + "nData: 13 nSys: 11\n", + "-- COMMONDATA Files for ATLASZHIGHMASS49FB successfully read.\n", + "\n", + "ATLASLOMASSDY11EXT\n", + "\n", + "-- Reading COMMONDATA for Dataset: ATLASLOMASSDY11EXT\n", + "nData: 6 nSys: 8\n", + "-- COMMONDATA Files for ATLASLOMASSDY11EXT successfully read.\n", + "\n", + "ATLASWZRAP11CC\n", + "\n", + "-- Reading COMMONDATA for Dataset: ATLASWZRAP11CC\n", + "nData: 46 nSys: 133\n", + "-- COMMONDATA Files for ATLASWZRAP11CC successfully read.\n", + "\n", + "ATLASWZRAP11CF\n", + "\n", + "-- Reading COMMONDATA for Dataset: ATLASWZRAP11CF\n", + "nData: 15 nSys: 133\n", + "-- COMMONDATA Files for ATLASWZRAP11CF successfully read.\n", + "\n", + "ATLASDY2D8TEV\n", + "\n", + "-- Reading COMMONDATA for Dataset: ATLASDY2D8TEV\n", + "nData: 48 nSys: 37\n", + "-- COMMONDATA Files for ATLASDY2D8TEV successfully read.\n", + "\n", + "ATLAS_DY_2D_8TEV_LOWMASS\n", + "\n", + "-- Reading COMMONDATA for Dataset: ATLAS_DY_2D_8TEV_LOWMASS\n", + "nData: 84 nSys: 277\n", + "-- COMMONDATA Files for ATLAS_DY_2D_8TEV_LOWMASS successfully read.\n", + "\n", + "ATLAS_WZ_TOT_13TEV\n", + "\n", + "-- Reading COMMONDATA for Dataset: ATLAS_WZ_TOT_13TEV\n", + "nData: 3 nSys: 4\n", + "-- COMMONDATA Files for ATLAS_WZ_TOT_13TEV successfully read.\n", + "\n", + "ATLAS_WP_JET_8TEV_PT\n", + "ATLAS_WM_JET_8TEV_PT\n", + "\n", + "-- Reading COMMONDATA for Dataset: ATLAS_WM_JET_8TEV_PT\n", + "nData: 16 nSys: 124\n", + "-- COMMONDATA Files for ATLAS_WM_JET_8TEV_PT successfully read.\n", + "\n", + "ATLASZPT8TEVMDIST\n", + "\n", + "-- Reading COMMONDATA for Dataset: ATLASZPT8TEVMDIST\n", + "nData: 64 nSys: 102\n", + "-- COMMONDATA Files for ATLASZPT8TEVMDIST successfully read.\n", + "\n", + "ATLASZPT8TEVYDIST\n", + "\n", + "-- Reading COMMONDATA for Dataset: ATLASZPT8TEVYDIST\n", + "nData: 120 nSys: 102\n", + "-- COMMONDATA Files for ATLASZPT8TEVYDIST successfully read.\n", + "\n", + "ATLASTTBARTOT7TEV\n", + "\n", + "-- Reading COMMONDATA for Dataset: ATLASTTBARTOT7TEV\n", + "nData: 1 nSys: 3\n", + "-- COMMONDATA Files for ATLASTTBARTOT7TEV successfully read.\n", + "\n", + "ATLASTTBARTOT8TEV\n", + "\n", + "-- Reading COMMONDATA for Dataset: ATLASTTBARTOT8TEV\n", + "nData: 1 nSys: 3\n", + "-- COMMONDATA Files for ATLASTTBARTOT8TEV successfully read.\n", + "\n", + "ATLAS_TTBARTOT_13TEV_FULLLUMI\n", + "\n", + "-- Reading COMMONDATA for Dataset: ATLAS_TTBARTOT_13TEV_FULLLUMI\n", + "nData: 1 nSys: 2\n", + "-- COMMONDATA Files for ATLAS_TTBARTOT_13TEV_FULLLUMI successfully read.\n", + "\n", + "ATLAS_TTB_DIFF_8TEV_LJ_TRAPNORM\n", + "\n", + "-- Reading COMMONDATA for Dataset: ATLAS_TTB_DIFF_8TEV_LJ_TRAPNORM\n", + "nData: 5 nSys: 135\n", + "-- COMMONDATA Files for ATLAS_TTB_DIFF_8TEV_LJ_TRAPNORM successfully read.\n", + "\n", + "ATLAS_TTB_DIFF_8TEV_LJ_TTRAPNORM\n", + "\n", + "-- Reading COMMONDATA for Dataset: ATLAS_TTB_DIFF_8TEV_LJ_TTRAPNORM\n", + "nData: 5 nSys: 135\n", + "-- COMMONDATA Files for ATLAS_TTB_DIFF_8TEV_LJ_TTRAPNORM successfully read.\n", + "\n", + "ATLAS_TOPDIFF_DILEPT_8TEV_TTRAPNORM\n", + "\n", + "-- Reading COMMONDATA for Dataset: ATLAS_TOPDIFF_DILEPT_8TEV_TTRAPNORM\n", + "nData: 5 nSys: 5\n", + "-- COMMONDATA Files for ATLAS_TOPDIFF_DILEPT_8TEV_TTRAPNORM successfully read.\n", + "\n", + "ATLAS_1JET_8TEV_R06_DEC\n", + "\n", + "-- Reading COMMONDATA for Dataset: ATLAS_1JET_8TEV_R06_DEC\n", + "nData: 171 nSys: 677\n", + "-- COMMONDATA Files for ATLAS_1JET_8TEV_R06_DEC successfully read.\n", + "\n", + "ATLAS_2JET_7TEV_R06\n", + "\n", + "-- Reading COMMONDATA for Dataset: ATLAS_2JET_7TEV_R06\n", + "nData: 90 nSys: 474\n", + "-- COMMONDATA Files for ATLAS_2JET_7TEV_R06 successfully read.\n", + "\n", + "ATLASPHT15_SF\n", + "\n", + "-- Reading COMMONDATA for Dataset: ATLASPHT15_SF\n", + "nData: 53 nSys: 2\n", + "-- COMMONDATA Files for ATLASPHT15_SF successfully read.\n", + "\n", + "ATLAS_SINGLETOP_TCH_R_7TEV\n", + "\n", + "-- Reading COMMONDATA for Dataset: ATLAS_SINGLETOP_TCH_R_7TEV\n", + "nData: 1 nSys: 13\n", + "-- COMMONDATA Files for ATLAS_SINGLETOP_TCH_R_7TEV successfully read.\n", + "\n", + "ATLAS_SINGLETOP_TCH_R_13TEV\n", + "\n", + "-- Reading COMMONDATA for Dataset: ATLAS_SINGLETOP_TCH_R_13TEV\n", + "nData: 1 nSys: 1\n", + "-- COMMONDATA Files for ATLAS_SINGLETOP_TCH_R_13TEV successfully read.\n", + "\n", + "ATLAS_SINGLETOP_TCH_DIFF_7TEV_T_RAP_NORM\n", + "\n", + "-- Reading COMMONDATA for Dataset: ATLAS_SINGLETOP_TCH_DIFF_7TEV_T_RAP_NORM\n", + "nData: 3 nSys: 17\n", + "-- COMMONDATA Files for ATLAS_SINGLETOP_TCH_DIFF_7TEV_T_RAP_NORM successfully read.\n", + "\n", + "ATLAS_SINGLETOP_TCH_DIFF_7TEV_TBAR_RAP_NORM\n", + "\n", + "-- Reading COMMONDATA for Dataset: ATLAS_SINGLETOP_TCH_DIFF_7TEV_TBAR_RAP_NORM\n", + "nData: 3 nSys: 15\n", + "-- COMMONDATA Files for ATLAS_SINGLETOP_TCH_DIFF_7TEV_TBAR_RAP_NORM successfully read.\n", + "\n", + "ATLAS_SINGLETOP_TCH_DIFF_8TEV_T_RAP_NORM\n", + "\n", + "-- Reading COMMONDATA for Dataset: ATLAS_SINGLETOP_TCH_DIFF_8TEV_T_RAP_NORM\n", + "nData: 3 nSys: 31\n", + "-- COMMONDATA Files for ATLAS_SINGLETOP_TCH_DIFF_8TEV_T_RAP_NORM successfully read.\n", + "\n", + "ATLAS_SINGLETOP_TCH_DIFF_8TEV_TBAR_RAP_NORM\n", + "\n", + "-- Reading COMMONDATA for Dataset: ATLAS_SINGLETOP_TCH_DIFF_8TEV_TBAR_RAP_NORM\n", + "nData: 3 nSys: 31\n", + "-- COMMONDATA Files for ATLAS_SINGLETOP_TCH_DIFF_8TEV_TBAR_RAP_NORM successfully read.\n", + "\n", + "CMSWEASY840PB\n", + "\n", + "-- Reading COMMONDATA for Dataset: CMSWEASY840PB\n", + "nData: 11 nSys: 11\n", + "-- COMMONDATA Files for CMSWEASY840PB successfully read.\n", + "\n", + "CMSWMASY47FB\n", + "\n", + "-- Reading COMMONDATA for Dataset: CMSWMASY47FB\n", + "nData: 11 nSys: 11\n", + "-- COMMONDATA Files for CMSWMASY47FB successfully read.\n", + "\n", + "CMSDY2D11\n", + "\n", + "-- Reading COMMONDATA for Dataset: CMSDY2D11\n", + "nData: 132 nSys: 133\n", + "-- COMMONDATA Files for CMSDY2D11 successfully read.\n", + "\n", + "CMSWMU8TEV\n", + "\n", + "-- Reading COMMONDATA for Dataset: CMSWMU8TEV\n", + "nData: 22 nSys: 45\n", + "-- COMMONDATA Files for CMSWMU8TEV successfully read.\n", + "\n", + "CMSZDIFF12\n", + "\n", + "-- Reading COMMONDATA for Dataset: CMSZDIFF12\n", + "nData: 50 nSys: 52\n", + "-- COMMONDATA Files for CMSZDIFF12 successfully read.\n", + "\n" + ] + }, + { + "name": "stdout", + "output_type": "stream", + "text": [ + "CMS_2JET_7TEV\n", + "\n", + "-- Reading COMMONDATA for Dataset: CMS_2JET_7TEV\n", + "nData: 54 nSys: 88\n", + "-- COMMONDATA Files for CMS_2JET_7TEV successfully read.\n", + "\n", + "CMS_1JET_8TEV\n", + "\n", + "-- Reading COMMONDATA for Dataset: CMS_1JET_8TEV\n", + "nData: 239 nSys: 293\n", + "-- COMMONDATA Files for CMS_1JET_8TEV successfully read.\n", + "\n", + "CMSTTBARTOT7TEV\n", + "\n", + "-- Reading COMMONDATA for Dataset: CMSTTBARTOT7TEV\n", + "nData: 1 nSys: 2\n", + "-- COMMONDATA Files for CMSTTBARTOT7TEV successfully read.\n", + "\n", + "CMSTTBARTOT8TEV\n", + "\n", + "-- Reading COMMONDATA for Dataset: CMSTTBARTOT8TEV\n", + "nData: 1 nSys: 2\n", + "-- COMMONDATA Files for CMSTTBARTOT8TEV successfully read.\n", + "\n", + "CMSTTBARTOT13TEV\n", + "\n", + "-- Reading COMMONDATA for Dataset: CMSTTBARTOT13TEV\n", + "nData: 1 nSys: 2\n", + "-- COMMONDATA Files for CMSTTBARTOT13TEV successfully read.\n", + "\n", + "CMSTOPDIFF8TEVTTRAPNORM\n", + "\n", + "-- Reading COMMONDATA for Dataset: CMSTOPDIFF8TEVTTRAPNORM\n", + "nData: 10 nSys: 21\n", + "-- COMMONDATA Files for CMSTOPDIFF8TEVTTRAPNORM successfully read.\n", + "\n", + "CMSTTBARTOT5TEV\n", + "\n", + "-- Reading COMMONDATA for Dataset: CMSTTBARTOT5TEV\n", + "nData: 1 nSys: 2\n", + "-- COMMONDATA Files for CMSTTBARTOT5TEV successfully read.\n", + "\n", + "CMS_TTBAR_2D_DIFF_MTT_TRAP_NORM\n", + "\n", + "-- Reading COMMONDATA for Dataset: CMS_TTBAR_2D_DIFF_MTT_TRAP_NORM\n", + "nData: 16 nSys: 44\n", + "-- COMMONDATA Files for CMS_TTBAR_2D_DIFF_MTT_TRAP_NORM successfully read.\n", + "\n", + "CMS_TTB_DIFF_13TEV_2016_2L_TRAP\n", + "\n", + "-- Reading COMMONDATA for Dataset: CMS_TTB_DIFF_13TEV_2016_2L_TRAP\n", + "nData: 10 nSys: 10\n", + "-- COMMONDATA Files for CMS_TTB_DIFF_13TEV_2016_2L_TRAP successfully read.\n", + "\n", + "CMS_TTB_DIFF_13TEV_2016_LJ_TRAP\n", + "\n", + "-- Reading COMMONDATA for Dataset: CMS_TTB_DIFF_13TEV_2016_LJ_TRAP\n", + "nData: 11 nSys: 11\n", + "-- COMMONDATA Files for CMS_TTB_DIFF_13TEV_2016_LJ_TRAP successfully read.\n", + "\n", + "CMS_SINGLETOP_TCH_TOT_7TEV\n", + "\n", + "-- Reading COMMONDATA for Dataset: CMS_SINGLETOP_TCH_TOT_7TEV\n", + "nData: 1 nSys: 3\n", + "-- COMMONDATA Files for CMS_SINGLETOP_TCH_TOT_7TEV successfully read.\n", + "\n", + "CMS_SINGLETOP_TCH_R_8TEV\n", + "\n", + "-- Reading COMMONDATA for Dataset: CMS_SINGLETOP_TCH_R_8TEV\n", + "nData: 1 nSys: 1\n", + "-- COMMONDATA Files for CMS_SINGLETOP_TCH_R_8TEV successfully read.\n", + "\n", + "CMS_SINGLETOP_TCH_R_13TEV\n", + "\n", + "-- Reading COMMONDATA for Dataset: CMS_SINGLETOP_TCH_R_13TEV\n", + "nData: 1 nSys: 1\n", + "-- COMMONDATA Files for CMS_SINGLETOP_TCH_R_13TEV successfully read.\n", + "\n", + "LHCBZ940PB\n", + "\n", + "-- Reading COMMONDATA for Dataset: LHCBZ940PB\n", + "nData: 9 nSys: 11\n", + "-- COMMONDATA Files for LHCBZ940PB successfully read.\n", + "\n", + "LHCBZEE2FB_40\n", + "\n", + "-- Reading COMMONDATA for Dataset: LHCBZEE2FB_40\n", + "nData: 17 nSys: 19\n", + "-- COMMONDATA Files for LHCBZEE2FB_40 successfully read.\n", + "\n", + "LHCBWZMU7TEV\n", + "\n", + "-- Reading COMMONDATA for Dataset: LHCBWZMU7TEV\n", + "nData: 33 nSys: 35\n", + "-- COMMONDATA Files for LHCBWZMU7TEV successfully read.\n", + "\n", + "LHCBWZMU8TEV\n", + "\n", + "-- Reading COMMONDATA for Dataset: LHCBWZMU8TEV\n", + "nData: 34 nSys: 36\n", + "-- COMMONDATA Files for LHCBWZMU8TEV successfully read.\n", + "\n", + "LHCB_Z_13TEV_DIMUON\n", + "\n", + "-- Reading COMMONDATA for Dataset: LHCB_Z_13TEV_DIMUON\n", + "nData: 18 nSys: 19\n", + "-- COMMONDATA Files for LHCB_Z_13TEV_DIMUON successfully read.\n", + "\n", + "LHCB_Z_13TEV_DIELECTRON\n", + "\n", + "-- Reading COMMONDATA for Dataset: LHCB_Z_13TEV_DIELECTRON\n", + "nData: 17 nSys: 18\n", + "-- COMMONDATA Files for LHCB_Z_13TEV_DIELECTRON successfully read.\n", + "\n", + " vp pine ratio CMSTTBARTOT5TEV, ['QCD']\n", + " 0 0 0\n", + "data \n", + "0 69.137978 63.366976 1.091073\n", + " vp pine ratio CMS_TTBAR_2D_DIFF_MTT_TRAP_NORM, ['QCD']\n", + " 0 0 0\n", + "data \n", + "0 0.002605 0.003382 0.770101\n", + "1 0.002313 0.002982 0.775740\n", + "2 0.001639 0.002114 0.775299\n", + "3 0.000570 0.000741 0.769081\n", + "4 0.002545 0.002664 0.955411\n", + "5 0.002176 0.002363 0.921011\n", + "6 0.001513 0.001637 0.924156\n", + "7 0.000536 0.000578 0.927862\n", + "8 0.000998 0.001038 0.961663\n", + "9 0.000857 0.000930 0.921232\n", + "10 0.000624 0.000678 0.920042\n", + "11 0.000254 0.000276 0.918816\n", + "12 0.000072 0.000069 1.032299\n", + "13 0.000064 0.000069 0.923609\n", + "14 0.000059 0.000064 0.927297\n", + "15 0.000032 0.000035 0.918087\n" + ] + } + ], "source": [ "# Test them all\n", - "if False:\n", + "if True:\n", " from yaml import safe_load\n", " pdf = API.pdf(pdf=\"NNPDF40_nnlo_as_01180\")\n", " all_res = []\n", " # Reference here a NNPDF40 runcard to read up all datasets\n", - " nnpdf40_runcard = safe_load(Path(\"/home/juacrumar/NNPDF-testing/nnpdf/n3fit/NNPDF40_with_pineappl.yml\").read_text())\n", - " #nnpdf40_runcard = safe_load(Path(\"/mount/storage/Academic_Workspace/NNPDF/source/nnpdf/n3fit/NNPDF40_with_pineappl.yml\").read_text())\n", + " #nnpdf40_runcard = safe_load(Path(\"/home/juacrumar/NNPDF-testing/nnpdf/n3fit/NNPDF40_with_pineappl.yml\").read_text())\n", + " nnpdf40_runcard = safe_load(Path(\"/mount/storage/Academic_Workspace/NNPDF/src/nnpdf/n3fit/NNPDF40_with_pineappl.yml\").read_text())\n", " for d in nnpdf40_runcard[\"dataset_inputs\"]:\n", " target_ds = d[\"dataset\"]\n", " #if any(skipthis in target_ds for skipthis in [\"HERA\", \"NMC\", \"NTV\", \"CHORUS\", \"SLAC\", \"BCD\"]):\n", " # continue\n", " print(target_ds)\n", " cfac = d.get(\"cfac\", [])\n", - " old_ds = API.dataset(dataset_input={\"dataset\": target_ds, \"cfac\": cfac + [\"oldmode\"]}, theoryid=200, use_cuts=\"internal\")\n", - " ds = API.dataset(dataset_input={\"dataset\": target_ds, \"cfac\": cfac}, theoryid=200, use_cuts=\"internal\")\n", + " old_ds = API.dataset(dataset_input={\"dataset\": target_ds, \"cfac\": cfac}, theoryid=200, use_cuts=\"internal\")\n", + " ds = API.dataset(dataset_input={\"dataset\": target_ds, \"cfac\": cfac}, theoryid=400, use_cuts=\"internal\")\n", " new_cp = central_predictions(ds, pdf)\n", " cp = central_predictions(old_ds, pdf)\n", " all_res.append(pd.concat([new_cp, cp, new_cp/cp], axis=1, keys=[\"vp\", \"pine\", f\"ratio {target_ds}, {cfac}\"]))\n", @@ -78,20 +541,20 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 8, "id": "a4d88771", "metadata": {}, "outputs": [], "source": [ "target_ds = \"ATLAS_WP_JET_8TEV_PT\"\n", "cfac = [\"QCD\"] # [\"NRM\"]\n", - "old_ds = API.dataset(dataset_input={\"dataset\": target_ds, \"cfac\": cfac + [\"oldmode\"]}, theoryid=200, use_cuts=\"internal\")\n", - "ds = API.dataset(dataset_input={\"dataset\": target_ds, \"cfac\": cfac}, theoryid=200, use_cuts=\"internal\")" + "old_ds = API.dataset(dataset_input={\"dataset\": target_ds, \"cfac\": cfac}, theoryid=200, use_cuts=\"internal\")\n", + "ds = API.dataset(dataset_input={\"dataset\": target_ds, \"cfac\": cfac}, theoryid=400, use_cuts=\"internal\")" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 4, "id": "316a571e", "metadata": { "scrolled": false @@ -106,34 +569,189 @@ "nData: 16 nSys: 124\n", "-- COMMONDATA Files for ATLAS_WP_JET_8TEV_PT successfully read.\n", "\n", - "> \u001b[0;32m/home/juacrumar/NNPDF-testing/nnpdf/validphys2/src/validphys/pineparser.py\u001b[0m(243)\u001b[0;36mpineappl_reader\u001b[0;34m()\u001b[0m\n", - "\u001b[0;32m 242 \u001b[0;31m \u001b[0;32mimport\u001b[0m \u001b[0mipdb\u001b[0m\u001b[0;34m;\u001b[0m \u001b[0mipdb\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mset_trace\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0m\u001b[0;32m--> 243 \u001b[0;31m \u001b[0mpine_rep\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mpines\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0m\u001b[0;32m 244 \u001b[0;31m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0m\n", - "ipdb> n\n", - "> \u001b[0;32m/home/juacrumar/NNPDF-testing/nnpdf/validphys2/src/validphys/pineparser.py\u001b[0m(246)\u001b[0;36mpineappl_reader\u001b[0;34m()\u001b[0m\n", - "\u001b[0;32m 245 \u001b[0;31m \u001b[0;31m# Is it hadronic? (at the moment only hadronic and DIS are considered)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0m\u001b[0;32m--> 246 \u001b[0;31m \u001b[0mhadronic\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mpine_rep\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mkey_values\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m\"initial_state_1\"\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0mpine_rep\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mkey_values\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m\"initial_state_2\"\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0m\u001b[0;32m 247 \u001b[0;31m \u001b[0;31m# Sanity check (in case at some point we start fitting things that are not protons)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", - "\u001b[0m\n", - "ipdb> pine_rep\n", - "\n", - "ipdb> pine_rep.metadata\n", - "*** AttributeError: 'builtins.PyFkTable' object has no attribute 'metadata'\n", - "ipdb> dir(pine_rep)\n", - "['__class__', '__delattr__', '__dict__', '__dir__', '__doc__', '__eq__', '__format__', '__ge__', '__getattr__', '__getattribute__', '__gt__', '__hash__', '__init__', '__init_subclass__', '__le__', '__lt__', '__module__', '__ne__', '__new__', '__reduce__', '__reduce_ex__', '__repr__', '__setattr__', '__sizeof__', '__str__', '__subclasshook__', '__weakref__', '_raw', 'raw', 'read']\n", - "ipdb> pine_rep._raw\n", - "\n", - "ipdb> dir(pine_rep._raw)\n", - "['__class__', '__delattr__', '__dir__', '__doc__', '__eq__', '__format__', '__ge__', '__getattribute__', '__gt__', '__hash__', '__init__', '__init_subclass__', '__le__', '__lt__', '__module__', '__ne__', '__new__', '__reduce__', '__reduce_ex__', '__repr__', '__setattr__', '__sizeof__', '__str__', '__subclasshook__', 'bin_dimensions', 'bin_left', 'bin_normalizations', 'bin_right', 'bins', 'convolute_with_one', 'key_values', 'lumi', 'muf2', 'read', 'table', 'write', 'write_lz4', 'x_grid']\n", - "ipdb> dir(pine_rep._raw.key_values)\n", - "['__call__', '__class__', '__delattr__', '__dir__', '__doc__', '__eq__', '__format__', '__ge__', '__getattribute__', '__gt__', '__hash__', '__init__', '__init_subclass__', '__le__', '__lt__', '__module__', '__name__', '__ne__', '__new__', '__qualname__', '__reduce__', '__reduce_ex__', '__repr__', '__self__', '__setattr__', '__sizeof__', '__str__', '__subclasshook__', '__text_signature__']\n", - "ipdb> print(pine_rep._raw.key_values.keys())\n", - "*** AttributeError: 'builtin_function_or_method' object has no attribute 'keys'\n", - "ipdb> print(pine_rep._raw.key_values())\n", - "{'lumi_id_types': 'evol', 'pineappl_gitversion': 'v0.5.0-beta.3-18-g32e63c9', 'initial_state_2': '2212', 'initial_state_1': '2212'}\n" + "LHAPDF 6.4.0 loading /usr/share/lhapdf/LHAPDF/NNPDF40_nnlo_as_01180/NNPDF40_nnlo_as_01180_0000.dat\n", + "NNPDF40_nnlo_as_01180 PDF set, member #0, version 1; LHAPDF ID = 331100\n" ] + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
pinevpratio vp/ratioratio pine/vp
0000
data
15611.4398175616.2483441.0008570.999144
23154.7675733157.5594791.0008850.999116
31194.4676381195.3542441.0007420.999258
4515.187956515.5745651.0007500.999250
5246.767542246.9416541.0007060.999295
6126.644243126.7315871.0006900.999311
770.24457770.2953671.0007230.999277
831.54448331.5662111.0006890.999312
911.81750611.8254411.0006710.999329
105.0290275.0321501.0006210.999379
112.2904812.2917551.0005560.999444
121.1237061.1242751.0005060.999494
130.5658940.5661581.0004660.999534
140.2457230.2458241.0004150.999586
150.0545560.0545731.0003270.999673
\n", + "
" + ], + "text/plain": [ + " pine vp ratio vp/ratio ratio pine/vp\n", + " 0 0 0 0\n", + "data \n", + "1 5611.439817 5616.248344 1.000857 0.999144\n", + "2 3154.767573 3157.559479 1.000885 0.999116\n", + "3 1194.467638 1195.354244 1.000742 0.999258\n", + "4 515.187956 515.574565 1.000750 0.999250\n", + "5 246.767542 246.941654 1.000706 0.999295\n", + "6 126.644243 126.731587 1.000690 0.999311\n", + "7 70.244577 70.295367 1.000723 0.999277\n", + "8 31.544483 31.566211 1.000689 0.999312\n", + "9 11.817506 11.825441 1.000671 0.999329\n", + "10 5.029027 5.032150 1.000621 0.999379\n", + "11 2.290481 2.291755 1.000556 0.999444\n", + "12 1.123706 1.124275 1.000506 0.999494\n", + "13 0.565894 0.566158 1.000466 0.999534\n", + "14 0.245723 0.245824 1.000415 0.999586\n", + "15 0.054556 0.054573 1.000327 0.999673" + ] + }, + "execution_count": 4, + "metadata": {}, + "output_type": "execute_result" } ], "source": [ @@ -500,9 +1118,9 @@ ], "metadata": { "kernelspec": { - "display_name": "devnnpdf", + "display_name": "nnvortex", "language": "python", - "name": "devnnpdf" + "name": "nnvortex" }, "language_info": { "codemirror_mode": { @@ -514,7 +1132,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.2" + "version": "3.10.4" } }, "nbformat": 4, diff --git a/n3fit/src/n3fit/model_trainer.py b/n3fit/src/n3fit/model_trainer.py index b8fe9c1aed..cefad2cdf3 100644 --- a/n3fit/src/n3fit/model_trainer.py +++ b/n3fit/src/n3fit/model_trainer.py @@ -131,6 +131,8 @@ def __init__( """ # Save all input information self.exp_info = exp_info + if pos_info is None: + pos_info = [] self.pos_info = pos_info self.integ_info = integ_info if self.integ_info is not None: diff --git a/validphys2/src/validphys/core.py b/validphys2/src/validphys/core.py index 54f45fb2f0..5df6550d38 100644 --- a/validphys2/src/validphys/core.py +++ b/validphys2/src/validphys/core.py @@ -538,7 +538,7 @@ class FKTableSpec(TupleComp): """ def __init__(self, fkpath, cfactors, metadata=None): self.cfactors = cfactors if cfactors is not None else [] - # Note: the legacy interface is expected to be removed by NNPDF5.0 + # NOTE: the legacy interface is expected to be removed by NNPDF5.0 # so please don't write code that relies on it self.legacy = True if isinstance(fkpath, (tuple, list)): @@ -547,7 +547,8 @@ def __init__(self, fkpath, cfactors, metadata=None): self.fkpath = fkpath self.metadata = metadata - # If this is a yaml file that loads an applgrid, keep also the name of the target + # If this is a yaml file that loads an applgrid-converted pineappl, + # keep also the name of the target # this is needed since we can now easily reutilize grids if not self.legacy and self.metadata.get("appl"): super().__init__(fkpath, cfactors, self.metadata.get("target_dataset")) @@ -788,6 +789,14 @@ def __repr__(self): def __str__(self): return f"Theory {self.id}" + @property + def yamldb_path(self): + return self.path / "yamldb" + + def is_pineappl(self): + """Check whether this theory is a pineappl-based theory""" + return self.yamldb_path.exists() + class ThCovMatSpec: def __init__(self, path): self.path = path diff --git a/validphys2/src/validphys/loader.py b/validphys2/src/validphys/loader.py index b98e6185f2..6f287e2321 100644 --- a/validphys2/src/validphys/loader.py +++ b/validphys2/src/validphys/loader.py @@ -124,9 +124,6 @@ def __init__(self, profile=None): # And save them up self.datapath = datapath - # TODO: eventually the yaml database will be the new commondata format - # so this is a temporary location? - self.yamlpath = datapath / "yamldb" self.resultspath = resultspath self._old_commondata_fits = set() self.nnprofile = profile @@ -526,16 +523,6 @@ def check_dataset(self, If the dataset contains new-type fktables, use the pineappl loading function, otherwise fallback to legacy """ - # TODO: this is just so I can load both types at once during development - readyaml = True - force_pineappl = False - if "oldmode" in cfac: - cfac = [i for i in cfac if i != "oldmode"] - readyaml = False - elif "forcepineappl" in cfac: - cfac = [i for i in cfac if i != "forcepineappl"] - force_pineappl = True - if not isinstance(theoryid, TheoryIDSpec): theoryid = self.check_theoryID(theoryid) @@ -544,13 +531,11 @@ def check_dataset(self, commondata = self.check_commondata( name, sysnum, use_fitcommondata=use_fitcommondata, fit=fit) - # Let's first see whether this is a new type of fktable - fkpath = (self.yamlpath / name).with_suffix(".yaml") - if fkpath.exists() and readyaml: + if theoryid.is_pineappl(): + # If it is a pineappl theory, use the pineappl reader + fkpath = (theoryid.yamldb_path / name).with_suffix(".yaml") fkspec, op = self.check_fkyaml(fkpath, theoryno, cfac) else: - if force_pineappl: - raise pineparser.PineAPPLEquivalentNotKnown(f"No pineappl version for {name}") try: fkspec, op = self.check_compound(theoryno, name, cfac) except CompoundNotFound: diff --git a/validphys2/src/validphys/n3fit_data.py b/validphys2/src/validphys/n3fit_data.py index d0e3e35bb5..441b3b230c 100644 --- a/validphys2/src/validphys/n3fit_data.py +++ b/validphys2/src/validphys/n3fit_data.py @@ -523,7 +523,7 @@ def _fitting_lagrange_dict(lambdadataset): } -def posdatasets_fitting_pos_dict(posdatasets): +def posdatasets_fitting_pos_dict(posdatasets=None): """Loads all positivity datasets. It is not allowed to be empty. Parameters @@ -533,7 +533,10 @@ def posdatasets_fitting_pos_dict(posdatasets): these can be found in the runcards located in n3fit/runcards. They have a format similar to ``dataset_input``. """ - return [_fitting_lagrange_dict(i) for i in posdatasets] + if posdatasets is not None: + return [_fitting_lagrange_dict(i) for i in posdatasets] + log.warning("Not using any positivity datasets.") + return None # can't use collect here because integdatasets might not exist. diff --git a/validphys2/src/validphys/pineparser.py b/validphys2/src/validphys/pineparser.py index 7b010dc79b..4d2fe2ac68 100644 --- a/validphys2/src/validphys/pineparser.py +++ b/validphys2/src/validphys/pineparser.py @@ -192,9 +192,8 @@ def get_yaml_information(yaml_file, theorypath): Transitional function: the call to "pineko" might be to some other commondata reader that will know how to extract the information from the commondata """ - # TODO: Tell the reader where to look for the pineappl grids - # for debugging purposes they are inside "share/NNPDF/data/theory_X/pineappls" - grids_folder = theorypath / "pineappls" + # The pineappl grids are just stored where the fktables would be, "fastkernel" + grids_folder = theorypath / "fastkernel" return pineko_yaml(yaml_file, grids_folder)