diff --git a/n3fit/src/n3fit/scripts/vp_setupfit.py b/n3fit/src/n3fit/scripts/vp_setupfit.py index fe844117bb..4d197bbadd 100644 --- a/n3fit/src/n3fit/scripts/vp_setupfit.py +++ b/n3fit/src/n3fit/scripts/vp_setupfit.py @@ -50,6 +50,7 @@ 'validphys.theorycovariance.construction', 'validphys.results', 'validphys.covmats', + 'validphys.commondata', 'n3fit.n3fit_checks_provider', ] diff --git a/nnpdf_data/nnpdf_data/theory_cards/825.yaml b/nnpdf_data/nnpdf_data/theory_cards/825.yaml new file mode 100644 index 0000000000..5f10af72bb --- /dev/null +++ b/nnpdf_data/nnpdf_data/theory_cards/825.yaml @@ -0,0 +1,48 @@ +ID: 825 +PTO: 2 +FNS: FONLL-C +DAMP: 0 +IC: 1 +ModEv: TRN +XIR: 1.0 +XIF: 1.0 +NfFF: 5 +MaxNfAs: 5 +MaxNfPdf: 5 +Q0: 1.65 +alphas: 0.106 +Qref: 91.2 +QED: 0 +alphaqed: 0.007496252 +Qedref: 1.777 +SxRes: 0 +SxOrd: LL +HQ: POLE +mc: 1.51 +Qmc: 1.51 +kcThr: 1.0 +mb: 4.92 +Qmb: 4.92 +kbThr: 1.0 +mt: 172.5 +Qmt: 172.5 +ktThr: 1.0 +CKM: +- 0.97428 +- 0.2253 +- 0.00347 +- 0.2252 +- 0.97345 +- 0.041 +- 0.00862 +- 0.0403 +- 0.999152 +MZ: 91.1876 +MW: 80.398 +GF: 1.1663787e-05 +SIN2TW: 0.23126 +TMC: 1 +MP: 0.938 +Comments: NNLO nFONLL alpha_s=0.106 +global_nx: 0 +EScaleVar: 1 diff --git a/nnpdf_data/nnpdf_data/theory_cards/826.yaml b/nnpdf_data/nnpdf_data/theory_cards/826.yaml new file mode 100644 index 0000000000..40d3075b52 --- /dev/null +++ b/nnpdf_data/nnpdf_data/theory_cards/826.yaml @@ -0,0 +1,48 @@ +ID: 826 +PTO: 2 +FNS: FONLL-C +DAMP: 0 +IC: 1 +ModEv: TRN +XIR: 1.0 +XIF: 1.0 +NfFF: 5 +MaxNfAs: 5 +MaxNfPdf: 5 +Q0: 1.65 +alphas: 0.123 +Qref: 91.2 +QED: 0 +alphaqed: 0.007496252 +Qedref: 1.777 +SxRes: 0 +SxOrd: LL +HQ: POLE +mc: 1.51 +Qmc: 1.51 +kcThr: 1.0 +mb: 4.92 +Qmb: 4.92 +kbThr: 1.0 +mt: 172.5 +Qmt: 172.5 +ktThr: 1.0 +CKM: +- 0.97428 +- 0.2253 +- 0.00347 +- 0.2252 +- 0.97345 +- 0.041 +- 0.00862 +- 0.0403 +- 0.999152 +MZ: 91.1876 +MW: 80.398 +GF: 1.1663787e-05 +SIN2TW: 0.23126 +TMC: 1 +MP: 0.938 +Comments: NNLO nFONLL alpha_s=0.123 +global_nx: 0 +EScaleVar: 1 diff --git a/nnpdf_data/nnpdf_data/theory_cards/827.yaml b/nnpdf_data/nnpdf_data/theory_cards/827.yaml new file mode 100644 index 0000000000..2fc95de9fc --- /dev/null +++ b/nnpdf_data/nnpdf_data/theory_cards/827.yaml @@ -0,0 +1,48 @@ +ID: 827 +PTO: 2 +FNS: FONLL-C +DAMP: 0 +IC: 1 +ModEv: TRN +XIR: 1.0 +XIF: 1.0 +NfFF: 5 +MaxNfAs: 5 +MaxNfPdf: 5 +Q0: 1.65 +alphas: 0.124 +Qref: 91.2 +QED: 0 +alphaqed: 0.007496252 +Qedref: 1.777 +SxRes: 0 +SxOrd: LL +HQ: POLE +mc: 1.51 +Qmc: 1.51 +kcThr: 1.0 +mb: 4.92 +Qmb: 4.92 +kbThr: 1.0 +mt: 172.5 +Qmt: 172.5 +ktThr: 1.0 +CKM: +- 0.97428 +- 0.2253 +- 0.00347 +- 0.2252 +- 0.97345 +- 0.041 +- 0.00862 +- 0.0403 +- 0.999152 +MZ: 91.1876 +MW: 80.398 +GF: 1.1663787e-05 +SIN2TW: 0.23126 +TMC: 1 +MP: 0.938 +Comments: NNLO nFONLL alpha_s=0.124 +global_nx: 0 +EScaleVar: 1 diff --git a/nnpdf_data/nnpdf_data/theory_cards/828.yaml b/nnpdf_data/nnpdf_data/theory_cards/828.yaml new file mode 100644 index 0000000000..1d3d5d932c --- /dev/null +++ b/nnpdf_data/nnpdf_data/theory_cards/828.yaml @@ -0,0 +1,48 @@ +ID: 828 +PTO: 2 +FNS: FONLL-C +DAMP: 0 +IC: 1 +ModEv: TRN +XIR: 1.0 +XIF: 1.0 +NfFF: 5 +MaxNfAs: 5 +MaxNfPdf: 5 +Q0: 1.65 +alphas: 0.125 +Qref: 91.2 +QED: 0 +alphaqed: 0.007496252 +Qedref: 1.777 +SxRes: 0 +SxOrd: LL +HQ: POLE +mc: 1.51 +Qmc: 1.51 +kcThr: 1.0 +mb: 4.92 +Qmb: 4.92 +kbThr: 1.0 +mt: 172.5 +Qmt: 172.5 +ktThr: 1.0 +CKM: +- 0.97428 +- 0.2253 +- 0.00347 +- 0.2252 +- 0.97345 +- 0.041 +- 0.00862 +- 0.0403 +- 0.999152 +MZ: 91.1876 +MW: 80.398 +GF: 1.1663787e-05 +SIN2TW: 0.23126 +TMC: 1 +MP: 0.938 +Comments: NNLO nFONLL alpha_s=0.125 +global_nx: 0 +EScaleVar: 1 diff --git a/nnpdf_data/nnpdf_data/theory_cards/829.yaml b/nnpdf_data/nnpdf_data/theory_cards/829.yaml new file mode 100644 index 0000000000..335790dd4c --- /dev/null +++ b/nnpdf_data/nnpdf_data/theory_cards/829.yaml @@ -0,0 +1,48 @@ +ID: 829 +PTO: 2 +FNS: FONLL-C +DAMP: 0 +IC: 1 +ModEv: TRN +XIR: 1.0 +XIF: 1.0 +NfFF: 5 +MaxNfAs: 5 +MaxNfPdf: 5 +Q0: 1.65 +alphas: 0.130 +Qref: 91.2 +QED: 0 +alphaqed: 0.007496252 +Qedref: 1.777 +SxRes: 0 +SxOrd: LL +HQ: POLE +mc: 1.51 +Qmc: 1.51 +kcThr: 1.0 +mb: 4.92 +Qmb: 4.92 +kbThr: 1.0 +mt: 172.5 +Qmt: 172.5 +ktThr: 1.0 +CKM: +- 0.97428 +- 0.2253 +- 0.00347 +- 0.2252 +- 0.97345 +- 0.041 +- 0.00862 +- 0.0403 +- 0.999152 +MZ: 91.1876 +MW: 80.398 +GF: 1.1663787e-05 +SIN2TW: 0.23126 +TMC: 1 +MP: 0.938 +Comments: NNLO nFONLL alpha_s=0.130 +global_nx: 0 +EScaleVar: 1 diff --git a/nnpdf_data/nnpdf_data/theory_cards/830.yaml b/nnpdf_data/nnpdf_data/theory_cards/830.yaml new file mode 100644 index 0000000000..1c9e4f9467 --- /dev/null +++ b/nnpdf_data/nnpdf_data/theory_cards/830.yaml @@ -0,0 +1,48 @@ +ID: 830 +PTO: 1 +FNS: FONLL-B +DAMP: 0 +IC: 1 +ModEv: TRN +XIR: 1.0 +XIF: 1.0 +NfFF: 5 +MaxNfAs: 5 +MaxNfPdf: 5 +Q0: 1.65 +alphas: 0.106 +Qref: 91.2 +QED: 0 +alphaqed: 0.007496252 +Qedref: 1.777 +SxRes: 0 +SxOrd: LL +HQ: POLE +mc: 1.51 +Qmc: 1.51 +kcThr: 1.0 +mb: 4.92 +Qmb: 4.92 +kbThr: 1.0 +mt: 172.5 +Qmt: 172.5 +ktThr: 1.0 +CKM: +- 0.97428 +- 0.2253 +- 0.00347 +- 0.2252 +- 0.97345 +- 0.041 +- 0.00862 +- 0.0403 +- 0.999152 +MZ: 91.1876 +MW: 80.398 +GF: 1.1663787e-05 +SIN2TW: 0.23126 +TMC: 1 +MP: 0.938 +Comments: NLO nFONLL alpha_s=0.106 +global_nx: 0 +EScaleVar: 1 diff --git a/nnpdf_data/nnpdf_data/theory_cards/831.yaml b/nnpdf_data/nnpdf_data/theory_cards/831.yaml new file mode 100644 index 0000000000..98e5e320a8 --- /dev/null +++ b/nnpdf_data/nnpdf_data/theory_cards/831.yaml @@ -0,0 +1,48 @@ +ID: 831 +PTO: 1 +FNS: FONLL-B +DAMP: 0 +IC: 1 +ModEv: TRN +XIR: 1.0 +XIF: 1.0 +NfFF: 5 +MaxNfAs: 5 +MaxNfPdf: 5 +Q0: 1.65 +alphas: 0.114 +Qref: 91.2 +QED: 0 +alphaqed: 0.007496252 +Qedref: 1.777 +SxRes: 0 +SxOrd: LL +HQ: POLE +mc: 1.51 +Qmc: 1.51 +kcThr: 1.0 +mb: 4.92 +Qmb: 4.92 +kbThr: 1.0 +mt: 172.5 +Qmt: 172.5 +ktThr: 1.0 +CKM: +- 0.97428 +- 0.2253 +- 0.00347 +- 0.2252 +- 0.97345 +- 0.041 +- 0.00862 +- 0.0403 +- 0.999152 +MZ: 91.1876 +MW: 80.398 +GF: 1.1663787e-05 +SIN2TW: 0.23126 +TMC: 1 +MP: 0.938 +Comments: NLO nFONLL alpha_s=0.114 +global_nx: 0 +EScaleVar: 1 diff --git a/nnpdf_data/nnpdf_data/theory_cards/832.yaml b/nnpdf_data/nnpdf_data/theory_cards/832.yaml new file mode 100644 index 0000000000..4293bd43d8 --- /dev/null +++ b/nnpdf_data/nnpdf_data/theory_cards/832.yaml @@ -0,0 +1,48 @@ +ID: 832 +PTO: 1 +FNS: FONLL-B +DAMP: 0 +IC: 1 +ModEv: TRN +XIR: 1.0 +XIF: 1.0 +NfFF: 5 +MaxNfAs: 5 +MaxNfPdf: 5 +Q0: 1.65 +alphas: 0.115 +Qref: 91.2 +QED: 0 +alphaqed: 0.007496252 +Qedref: 1.777 +SxRes: 0 +SxOrd: LL +HQ: POLE +mc: 1.51 +Qmc: 1.51 +kcThr: 1.0 +mb: 4.92 +Qmb: 4.92 +kbThr: 1.0 +mt: 172.5 +Qmt: 172.5 +ktThr: 1.0 +CKM: +- 0.97428 +- 0.2253 +- 0.00347 +- 0.2252 +- 0.97345 +- 0.041 +- 0.00862 +- 0.0403 +- 0.999152 +MZ: 91.1876 +MW: 80.398 +GF: 1.1663787e-05 +SIN2TW: 0.23126 +TMC: 1 +MP: 0.938 +Comments: NLO nFONLL alpha_s=0.115 +global_nx: 0 +EScaleVar: 1 diff --git a/nnpdf_data/nnpdf_data/theory_cards/833.yaml b/nnpdf_data/nnpdf_data/theory_cards/833.yaml new file mode 100644 index 0000000000..ac88a54bc9 --- /dev/null +++ b/nnpdf_data/nnpdf_data/theory_cards/833.yaml @@ -0,0 +1,48 @@ +ID: 833 +PTO: 1 +FNS: FONLL-B +DAMP: 0 +IC: 1 +ModEv: TRN +XIR: 1.0 +XIF: 1.0 +NfFF: 5 +MaxNfAs: 5 +MaxNfPdf: 5 +Q0: 1.65 +alphas: 0.116 +Qref: 91.2 +QED: 0 +alphaqed: 0.007496252 +Qedref: 1.777 +SxRes: 0 +SxOrd: LL +HQ: POLE +mc: 1.51 +Qmc: 1.51 +kcThr: 1.0 +mb: 4.92 +Qmb: 4.92 +kbThr: 1.0 +mt: 172.5 +Qmt: 172.5 +ktThr: 1.0 +CKM: +- 0.97428 +- 0.2253 +- 0.00347 +- 0.2252 +- 0.97345 +- 0.041 +- 0.00862 +- 0.0403 +- 0.999152 +MZ: 91.1876 +MW: 80.398 +GF: 1.1663787e-05 +SIN2TW: 0.23126 +TMC: 1 +MP: 0.938 +Comments: NLO nFONLL alpha_s=0.116 +global_nx: 0 +EScaleVar: 1 diff --git a/nnpdf_data/nnpdf_data/theory_cards/834.yaml b/nnpdf_data/nnpdf_data/theory_cards/834.yaml new file mode 100644 index 0000000000..2bcec6fbf5 --- /dev/null +++ b/nnpdf_data/nnpdf_data/theory_cards/834.yaml @@ -0,0 +1,48 @@ +ID: 834 +PTO: 1 +FNS: FONLL-B +DAMP: 0 +IC: 1 +ModEv: TRN +XIR: 1.0 +XIF: 1.0 +NfFF: 5 +MaxNfAs: 5 +MaxNfPdf: 5 +Q0: 1.65 +alphas: 0.117 +Qref: 91.2 +QED: 0 +alphaqed: 0.007496252 +Qedref: 1.777 +SxRes: 0 +SxOrd: LL +HQ: POLE +mc: 1.51 +Qmc: 1.51 +kcThr: 1.0 +mb: 4.92 +Qmb: 4.92 +kbThr: 1.0 +mt: 172.5 +Qmt: 172.5 +ktThr: 1.0 +CKM: +- 0.97428 +- 0.2253 +- 0.00347 +- 0.2252 +- 0.97345 +- 0.041 +- 0.00862 +- 0.0403 +- 0.999152 +MZ: 91.1876 +MW: 80.398 +GF: 1.1663787e-05 +SIN2TW: 0.23126 +TMC: 1 +MP: 0.938 +Comments: NLO nFONLL alpha_s=0.117 +global_nx: 0 +EScaleVar: 1 diff --git a/nnpdf_data/nnpdf_data/theory_cards/835.yaml b/nnpdf_data/nnpdf_data/theory_cards/835.yaml new file mode 100644 index 0000000000..a55e1c01ce --- /dev/null +++ b/nnpdf_data/nnpdf_data/theory_cards/835.yaml @@ -0,0 +1,48 @@ +ID: 835 +PTO: 1 +FNS: FONLL-B +DAMP: 0 +IC: 1 +ModEv: TRN +XIR: 1.0 +XIF: 1.0 +NfFF: 5 +MaxNfAs: 5 +MaxNfPdf: 5 +Q0: 1.65 +alphas: 0.119 +Qref: 91.2 +QED: 0 +alphaqed: 0.007496252 +Qedref: 1.777 +SxRes: 0 +SxOrd: LL +HQ: POLE +mc: 1.51 +Qmc: 1.51 +kcThr: 1.0 +mb: 4.92 +Qmb: 4.92 +kbThr: 1.0 +mt: 172.5 +Qmt: 172.5 +ktThr: 1.0 +CKM: +- 0.97428 +- 0.2253 +- 0.00347 +- 0.2252 +- 0.97345 +- 0.041 +- 0.00862 +- 0.0403 +- 0.999152 +MZ: 91.1876 +MW: 80.398 +GF: 1.1663787e-05 +SIN2TW: 0.23126 +TMC: 1 +MP: 0.938 +Comments: NLO nFONLL alpha_s=0.119 +global_nx: 0 +EScaleVar: 1 diff --git a/nnpdf_data/nnpdf_data/theory_cards/836.yaml b/nnpdf_data/nnpdf_data/theory_cards/836.yaml new file mode 100644 index 0000000000..66f08a5215 --- /dev/null +++ b/nnpdf_data/nnpdf_data/theory_cards/836.yaml @@ -0,0 +1,48 @@ +ID: 836 +PTO: 1 +FNS: FONLL-B +DAMP: 0 +IC: 1 +ModEv: TRN +XIR: 1.0 +XIF: 1.0 +NfFF: 5 +MaxNfAs: 5 +MaxNfPdf: 5 +Q0: 1.65 +alphas: 0.120 +Qref: 91.2 +QED: 0 +alphaqed: 0.007496252 +Qedref: 1.777 +SxRes: 0 +SxOrd: LL +HQ: POLE +mc: 1.51 +Qmc: 1.51 +kcThr: 1.0 +mb: 4.92 +Qmb: 4.92 +kbThr: 1.0 +mt: 172.5 +Qmt: 172.5 +ktThr: 1.0 +CKM: +- 0.97428 +- 0.2253 +- 0.00347 +- 0.2252 +- 0.97345 +- 0.041 +- 0.00862 +- 0.0403 +- 0.999152 +MZ: 91.1876 +MW: 80.398 +GF: 1.1663787e-05 +SIN2TW: 0.23126 +TMC: 1 +MP: 0.938 +Comments: NLO nFONLL alpha_s=0.120 +global_nx: 0 +EScaleVar: 1 diff --git a/nnpdf_data/nnpdf_data/theory_cards/837.yaml b/nnpdf_data/nnpdf_data/theory_cards/837.yaml new file mode 100644 index 0000000000..4f1bdcd85d --- /dev/null +++ b/nnpdf_data/nnpdf_data/theory_cards/837.yaml @@ -0,0 +1,48 @@ +ID: 837 +PTO: 1 +FNS: FONLL-B +DAMP: 0 +IC: 1 +ModEv: TRN +XIR: 1.0 +XIF: 1.0 +NfFF: 5 +MaxNfAs: 5 +MaxNfPdf: 5 +Q0: 1.65 +alphas: 0.121 +Qref: 91.2 +QED: 0 +alphaqed: 0.007496252 +Qedref: 1.777 +SxRes: 0 +SxOrd: LL +HQ: POLE +mc: 1.51 +Qmc: 1.51 +kcThr: 1.0 +mb: 4.92 +Qmb: 4.92 +kbThr: 1.0 +mt: 172.5 +Qmt: 172.5 +ktThr: 1.0 +CKM: +- 0.97428 +- 0.2253 +- 0.00347 +- 0.2252 +- 0.97345 +- 0.041 +- 0.00862 +- 0.0403 +- 0.999152 +MZ: 91.1876 +MW: 80.398 +GF: 1.1663787e-05 +SIN2TW: 0.23126 +TMC: 1 +MP: 0.938 +Comments: NLO nFONLL alpha_s=0.121 +global_nx: 0 +EScaleVar: 1 diff --git a/nnpdf_data/nnpdf_data/theory_cards/838.yaml b/nnpdf_data/nnpdf_data/theory_cards/838.yaml new file mode 100644 index 0000000000..81df19b2c5 --- /dev/null +++ b/nnpdf_data/nnpdf_data/theory_cards/838.yaml @@ -0,0 +1,48 @@ +ID: 838 +PTO: 1 +FNS: FONLL-B +DAMP: 0 +IC: 1 +ModEv: TRN +XIR: 1.0 +XIF: 1.0 +NfFF: 5 +MaxNfAs: 5 +MaxNfPdf: 5 +Q0: 1.65 +alphas: 0.122 +Qref: 91.2 +QED: 0 +alphaqed: 0.007496252 +Qedref: 1.777 +SxRes: 0 +SxOrd: LL +HQ: POLE +mc: 1.51 +Qmc: 1.51 +kcThr: 1.0 +mb: 4.92 +Qmb: 4.92 +kbThr: 1.0 +mt: 172.5 +Qmt: 172.5 +ktThr: 1.0 +CKM: +- 0.97428 +- 0.2253 +- 0.00347 +- 0.2252 +- 0.97345 +- 0.041 +- 0.00862 +- 0.0403 +- 0.999152 +MZ: 91.1876 +MW: 80.398 +GF: 1.1663787e-05 +SIN2TW: 0.23126 +TMC: 1 +MP: 0.938 +Comments: NLO nFONLL alpha_s=0.122 +global_nx: 0 +EScaleVar: 1 diff --git a/nnpdf_data/nnpdf_data/theory_cards/839.yaml b/nnpdf_data/nnpdf_data/theory_cards/839.yaml new file mode 100644 index 0000000000..3f28f22019 --- /dev/null +++ b/nnpdf_data/nnpdf_data/theory_cards/839.yaml @@ -0,0 +1,48 @@ +ID: 839 +PTO: 1 +FNS: FONLL-B +DAMP: 0 +IC: 1 +ModEv: TRN +XIR: 1.0 +XIF: 1.0 +NfFF: 5 +MaxNfAs: 5 +MaxNfPdf: 5 +Q0: 1.65 +alphas: 0.123 +Qref: 91.2 +QED: 0 +alphaqed: 0.007496252 +Qedref: 1.777 +SxRes: 0 +SxOrd: LL +HQ: POLE +mc: 1.51 +Qmc: 1.51 +kcThr: 1.0 +mb: 4.92 +Qmb: 4.92 +kbThr: 1.0 +mt: 172.5 +Qmt: 172.5 +ktThr: 1.0 +CKM: +- 0.97428 +- 0.2253 +- 0.00347 +- 0.2252 +- 0.97345 +- 0.041 +- 0.00862 +- 0.0403 +- 0.999152 +MZ: 91.1876 +MW: 80.398 +GF: 1.1663787e-05 +SIN2TW: 0.23126 +TMC: 1 +MP: 0.938 +Comments: NLO nFONLL alpha_s=0.123 +global_nx: 0 +EScaleVar: 1 diff --git a/nnpdf_data/nnpdf_data/theory_cards/840.yaml b/nnpdf_data/nnpdf_data/theory_cards/840.yaml new file mode 100644 index 0000000000..3563935fe2 --- /dev/null +++ b/nnpdf_data/nnpdf_data/theory_cards/840.yaml @@ -0,0 +1,48 @@ +ID: 840 +PTO: 1 +FNS: FONLL-B +DAMP: 0 +IC: 1 +ModEv: TRN +XIR: 1.0 +XIF: 1.0 +NfFF: 5 +MaxNfAs: 5 +MaxNfPdf: 5 +Q0: 1.65 +alphas: 0.124 +Qref: 91.2 +QED: 0 +alphaqed: 0.007496252 +Qedref: 1.777 +SxRes: 0 +SxOrd: LL +HQ: POLE +mc: 1.51 +Qmc: 1.51 +kcThr: 1.0 +mb: 4.92 +Qmb: 4.92 +kbThr: 1.0 +mt: 172.5 +Qmt: 172.5 +ktThr: 1.0 +CKM: +- 0.97428 +- 0.2253 +- 0.00347 +- 0.2252 +- 0.97345 +- 0.041 +- 0.00862 +- 0.0403 +- 0.999152 +MZ: 91.1876 +MW: 80.398 +GF: 1.1663787e-05 +SIN2TW: 0.23126 +TMC: 1 +MP: 0.938 +Comments: NLO nFONLL alpha_s=0.124 +global_nx: 0 +EScaleVar: 1 diff --git a/nnpdf_data/nnpdf_data/theory_cards/841.yaml b/nnpdf_data/nnpdf_data/theory_cards/841.yaml new file mode 100644 index 0000000000..b6b8231c7c --- /dev/null +++ b/nnpdf_data/nnpdf_data/theory_cards/841.yaml @@ -0,0 +1,48 @@ +ID: 841 +PTO: 1 +FNS: FONLL-B +DAMP: 0 +IC: 1 +ModEv: TRN +XIR: 1.0 +XIF: 1.0 +NfFF: 5 +MaxNfAs: 5 +MaxNfPdf: 5 +Q0: 1.65 +alphas: 0.125 +Qref: 91.2 +QED: 0 +alphaqed: 0.007496252 +Qedref: 1.777 +SxRes: 0 +SxOrd: LL +HQ: POLE +mc: 1.51 +Qmc: 1.51 +kcThr: 1.0 +mb: 4.92 +Qmb: 4.92 +kbThr: 1.0 +mt: 172.5 +Qmt: 172.5 +ktThr: 1.0 +CKM: +- 0.97428 +- 0.2253 +- 0.00347 +- 0.2252 +- 0.97345 +- 0.041 +- 0.00862 +- 0.0403 +- 0.999152 +MZ: 91.1876 +MW: 80.398 +GF: 1.1663787e-05 +SIN2TW: 0.23126 +TMC: 1 +MP: 0.938 +Comments: NLO nFONLL alpha_s=0.125 +global_nx: 0 +EScaleVar: 1 diff --git a/nnpdf_data/nnpdf_data/theory_cards/842.yaml b/nnpdf_data/nnpdf_data/theory_cards/842.yaml new file mode 100644 index 0000000000..1804e9dbc0 --- /dev/null +++ b/nnpdf_data/nnpdf_data/theory_cards/842.yaml @@ -0,0 +1,48 @@ +ID: 842 +PTO: 1 +FNS: FONLL-B +DAMP: 0 +IC: 1 +ModEv: TRN +XIR: 1.0 +XIF: 1.0 +NfFF: 5 +MaxNfAs: 5 +MaxNfPdf: 5 +Q0: 1.65 +alphas: 0.130 +Qref: 91.2 +QED: 0 +alphaqed: 0.007496252 +Qedref: 1.777 +SxRes: 0 +SxOrd: LL +HQ: POLE +mc: 1.51 +Qmc: 1.51 +kcThr: 1.0 +mb: 4.92 +Qmb: 4.92 +kbThr: 1.0 +mt: 172.5 +Qmt: 172.5 +ktThr: 1.0 +CKM: +- 0.97428 +- 0.2253 +- 0.00347 +- 0.2252 +- 0.97345 +- 0.041 +- 0.00862 +- 0.0403 +- 0.999152 +MZ: 91.1876 +MW: 80.398 +GF: 1.1663787e-05 +SIN2TW: 0.23126 +TMC: 1 +MP: 0.938 +Comments: NLO nFONLL alpha_s=0.130 +global_nx: 0 +EScaleVar: 1 diff --git a/validphys2/src/validphys/closuretest/closure_results.py b/validphys2/src/validphys/closuretest/closure_results.py index 4f86cdbb48..436ba81dbd 100644 --- a/validphys2/src/validphys/closuretest/closure_results.py +++ b/validphys2/src/validphys/closuretest/closure_results.py @@ -24,7 +24,29 @@ underlying_results = collect("results", ("fitunderlyinglaw",)) - +def replica_chi2_level1(dataset_inputs_results, data_fits_cv): + dt_ct, th_ct = dataset_inputs_results + preds = [] + for ds in data_fits_cv: + preds.append(ds[0]) + preds_conc = np.array([j[0] for i in preds for j in i]) + chi2s = [] + for th_ct_replica in th_ct.error_members.T: + diff = preds_conc - th_ct_replica + chi2 = calc_chi2(dt_ct.sqrtcovmat, diff) / (len(preds_conc)) + chi2s.append(chi2) + return chi2s + +def central_chi2_level1(dataset_inputs_results_central, data_fits_cv): + dt_ct, th_ct = dataset_inputs_results_central + preds = [] + for ds in data_fits_cv: + preds.append(ds[0]) + preds_conc = np.array([j[0] for i in preds for j in i]) + central_diff = preds_conc - th_ct.central_value + chi2s = calc_chi2(dt_ct.sqrtcovmat, central_diff) + return chi2s / (len(preds_conc)) + @check_fit_isclosure @check_use_fitcommondata def bias_dataset(results, underlying_results, fit, use_fitcommondata): diff --git a/validphys2/src/validphys/config.py b/validphys2/src/validphys/config.py index 083860071c..c40f4b8e40 100644 --- a/validphys2/src/validphys/config.py +++ b/validphys2/src/validphys/config.py @@ -151,7 +151,31 @@ def parse_theoryid(self, theoryID: (str, int)): raise ConfigError( str(e), theoryID, self.loader.available_theories, display_alternatives="all" ) - + + @element_of("theoryids") + @_id_with_label + def parse_faketheoryid(self, theoryID: (str, int)): + """A number corresponding to the database theory ID where the + corresponding theory folder is installed in te data directory.""" + try: + return self.loader.check_theoryID(theoryID) + except LoaderError as e: + raise ConfigError( + str(e), theoryID, self.loader.available_theories, display_alternatives="all" + ) + + @element_of("theoryids") + @_id_with_label + def parse_t0theoryid(self, theoryID: (str, int)): + """A number corresponding to the database theory ID where the + corresponding theory folder is installed in te data directory.""" + try: + return self.loader.check_theoryID(theoryID) + except LoaderError as e: + raise ConfigError( + str(e), theoryID, self.loader.available_theories, display_alternatives="all" + ) + def parse_use_cuts(self, use_cuts: (bool, str)): """Whether to filter the points based on the cuts applied in the fit, or the whole data in the dataset. The possible options are: @@ -633,6 +657,54 @@ def produce_dataset( if not ds.commondata.plotfiles: log.warning(f"Plotting files not found for: {ds}") return ds + + def produce_t0dataset( + self, + *, + dataset_input, + theoryid, + cuts, + t0theoryid=None, + use_fitcommondata=False, + fit=None, + check_plotting: bool = False, + ): + """Dataset specification from the theory and CommonData. + Use the cuts from the fit, if provided. If check_plotting is set to + True, attempt to lod and check the PLOTTING files + (note this may cause a noticeable slowdown in general).""" + name = dataset_input.name + sysnum = dataset_input.sys + cfac = dataset_input.cfac + frac = dataset_input.frac + weight = dataset_input.weight + variant = dataset_input.variant + if t0theoryid: + theoryid = t0theoryid + try: + ds = self.loader.check_dataset( + name=name, + sysnum=sysnum, + theoryid=theoryid, + cfac=cfac, + cuts=cuts, + frac=frac, + use_fitcommondata=use_fitcommondata, + fit=fit, + weight=weight, + variant=variant, + ) + except DataNotFoundError as e: + raise ConfigError(str(e), name, self.loader.available_datasets) + + except LoadFailedError as e: + raise ConfigError(e) + if check_plotting: + # normalize=True should check for more stuff + get_info(ds, normalize=True) + if not ds.commondata.plotfiles: + log.warning(f"Plotting files not found for: {ds}") + return ds @configparser.element_of("experiments") def parse_experiment(self, experiment: dict): @@ -1423,7 +1495,20 @@ def produce_defaults( filter_defaults["maxTau"] = maxTau return filter_defaults + + def produce_data_level0(self, data_input, faketheoryid, *, group_name="data"): + """A set of datasets where correlated systematics are taken + into account + """ + from validphys.loader import Loader + theoryid_alphacentral = faketheoryid + datasets = [] + for dsinp in data_input: + with self.set_context(ns=self._curr_ns.new_child({"dataset_input": dsinp, "theoryid": theoryid_alphacentral})): + datasets.append(self.parse_from_(None, "dataset", write=False)[1]) + return DataGroupSpec(name=group_name, datasets=datasets, dsinputs=data_input) + def produce_data(self, data_input, *, group_name="data"): """A set of datasets where correlated systematics are taken into account @@ -1687,16 +1772,11 @@ def produce_filter_data(self, fakedata: bool = False, theorycovmatconfig=None): if not fakedata: return validphys.filters.filter_real_data else: - if theorycovmatconfig is not None and theorycovmatconfig.get( - "use_thcovmat_in_sampling" - ): - # NOTE: By the time we run theory covmat closure tests, - # hopefully the generation of pseudodata will be done in python. - raise ConfigError( - "Generating closure test data which samples from the theory " - "covariance matrix has not been implemented yet." - ) - return validphys.filters.filter_closure_data_by_experiment + if theorycovmatconfig is not None: + # Here I do not care anymore of the thcovmat, I am producing the fakedata + # without it. However I need to group all the groups together + return validphys.filters._filter_closure_data + return validphys.filters.filter_closure_data_by_experiment @configparser.explicit_node def produce_total_chi2_data(self, fitthcovmat): diff --git a/validphys2/src/validphys/covmats.py b/validphys2/src/validphys/covmats.py index dec41cd2ba..e280531368 100644 --- a/validphys2/src/validphys/covmats.py +++ b/validphys2/src/validphys/covmats.py @@ -224,7 +224,7 @@ def dataset_inputs_covmat_from_systematics( @check_cuts_considered @functools.lru_cache -def dataset_t0_predictions(dataset, t0set): +def dataset_t0_predictions(t0dataset, t0set): """Returns the t0 predictions for a ``dataset`` which are the predictions calculated using the central member of ``pdf``. Note that if ``pdf`` has errortype ``replicas``, and the dataset is a hadronic observable then the @@ -246,7 +246,7 @@ def dataset_t0_predictions(dataset, t0set): """ # reshape because the underlying data has shape ndata * 1 # accounting for the fact that some datasets are single datapoint - return central_predictions(dataset, t0set).to_numpy().reshape(-1) + return central_predictions(t0dataset, t0set).to_numpy().reshape(-1) def t0_covmat_from_systematics( diff --git a/validphys2/src/validphys/filters.py b/validphys2/src/validphys/filters.py index cded762eab..9dc631275d 100644 --- a/validphys2/src/validphys/filters.py +++ b/validphys2/src/validphys/filters.py @@ -137,7 +137,7 @@ def export_mask(path, mask): np.savetxt(path, mask, fmt='%d') -def filter_closure_data(filter_path, data, fakepdf, fakenoise, filterseed, sep_mult): +def filter_closure_data(filter_path, data_level0, fakepdf, fakenoise, filterseed, sep_mult): """Filter closure data. In addition to cutting data points, the data is generated from an underlying ``fakepdf``, applying a shift to the data if ``fakenoise`` is ``True``, which emulates the experimental central values @@ -145,11 +145,11 @@ def filter_closure_data(filter_path, data, fakepdf, fakenoise, filterseed, sep_m """ log.info('Filtering closure-test data.') - return _filter_closure_data(filter_path, data, fakepdf, fakenoise, filterseed, sep_mult) + return _filter_closure_data(filter_path, data_level0, fakepdf, fakenoise, filterseed, sep_mult) def filter_closure_data_by_experiment( - filter_path, experiments_data, fakepdf, fakenoise, filterseed, data_index, sep_mult + filter_path, experiments_data, fakepdf, fakenoise, filterseed, data_index_level0, sep_mult ): """ Like :py:func:`filter_closure_data` except filters data by experiment. @@ -163,7 +163,7 @@ def filter_closure_data_by_experiment( res = [] for exp in experiments_data: - experiment_index = data_index[data_index.isin([exp.name], level=0)] + experiment_index = data_index_level0[data_index_level0.isin([exp.name], level=0)] res.append( _filter_closure_data( filter_path, exp, fakepdf, fakenoise, filterseed, experiment_index, sep_mult @@ -173,10 +173,10 @@ def filter_closure_data_by_experiment( return res -def filter_real_data(filter_path, data): +def filter_real_data(filter_path, data_level0): """Filter real data, cutting any points which do not pass the filter rules.""" log.info('Filtering real data.') - return _filter_real_data(filter_path, data) + return _filter_real_data(filter_path, data_level0) def filter(filter_data): @@ -217,7 +217,7 @@ def _filter_real_data(filter_path, data): return total_data_points, total_cut_data_points -def _filter_closure_data(filter_path, data, fakepdf, fakenoise, filterseed, data_index, sep_mult): +def _filter_closure_data(filter_path, data_level0, fakepdf, fakenoise, filterseed, data_index_level0, sep_mult): """ This function is accessed within a closure test only, that is, the fakedata namespace has to be True (If fakedata = False, the _filter_real_data function @@ -263,15 +263,15 @@ def _filter_closure_data(filter_path, data, fakepdf, fakenoise, filterseed, data total_cut_data_points = 0 # circular import generated @ core.py - from validphys.pseudodata import level0_commondata_wc, make_level1_data + from validphys.pseudodata import level0_commondata_wc_patched, make_level1_data - closure_data = level0_commondata_wc(data, fakepdf) + closure_data = level0_commondata_wc_patched(data_level0, fakepdf) # Keep track of the original commondata, since it is what will be used to export # the data afterwards all_raw_commondata = {} - for dataset in data.datasets: + for dataset in data_level0.datasets: # == print number of points passing cuts, make dataset directory and write FKMASK ==# path = filter_path / dataset.name nfull, ncut = _write_ds_cut_data(path, dataset) @@ -282,7 +282,7 @@ def _filter_closure_data(filter_path, data, fakepdf, fakenoise, filterseed, data if fakenoise: # ======= Level 1 closure test =======# - closure_data = make_level1_data(data, closure_data, filterseed, data_index, sep_mult) + closure_data = make_level1_data(data_level0, closure_data, filterseed, data_index_level0, sep_mult) # ====== write commondata and systype files ======# if fakenoise: diff --git a/validphys2/src/validphys/pseudodata.py b/validphys2/src/validphys/pseudodata.py index 5dc1560d52..bb523bdfd7 100644 --- a/validphys2/src/validphys/pseudodata.py +++ b/validphys2/src/validphys/pseudodata.py @@ -18,7 +18,7 @@ sqrt_covmat, ) -FILE_PREFIX = "datacuts_theory_fitting_" +FILE_PREFIX = "datacuts_theory_closuretest_fitting_" log = logging.getLogger(__name__) @@ -132,6 +132,7 @@ def make_replica( sep_mult, genrep=True, max_tries=int(1e6), + resample_negative_pseudodata=True, ): """Function that takes in a list of :py:class:`validphys.coredata.CommonData` objects and returns a pseudodata replica accounting for @@ -266,7 +267,7 @@ def make_replica( # Shifting pseudodata shifted_pseudodata = (all_pseudodata + shifts) * mult_part # positivity control - if np.all(shifted_pseudodata[full_mask] >= 0): + if np.all(shifted_pseudodata[full_mask] >= 0) or not resample_negative_pseudodata: return shifted_pseudodata dfail = " ".join(i.setname for i in groups_dataset_inputs_loaded_cd_with_cuts) @@ -320,16 +321,17 @@ def level0_commondata_wc(data, fakepdf): commondata_wc = commondata_wc.with_cuts(cuts) # == Generate a new CommonData instance with central value given by Level 0 data generated with fakepdf ==# - t0_prediction = dataset_t0_predictions( - dataset=dataset, t0set=fakepdf + t0dataset=dataset, t0set=fakepdf ) # N.B. cuts already applied to th. pred. level0_commondata_instances_wc.append(commondata_wc.with_central_value(t0_prediction)) return level0_commondata_instances_wc +def level0_commondata_wc_patched(data_level0, fakepdf): + return level0_commondata_wc(data_level0, fakepdf) -def make_level1_data(data, level0_commondata_wc, filterseed, data_index, sep_mult): +def make_level1_data(data_level0, level0_commondata_wc_patched, filterseed, data_index_level0, sep_mult): """ Given a list of Level 0 commondata instances, return the same list with central values replaced by Level 1 data. @@ -387,10 +389,10 @@ def make_level1_data(data, level0_commondata_wc, filterseed, data_index, sep_mul [CommonData(setname='NMC', ndata=204, commondataproc='DIS_NCE', nkin=3, nsys=16)] """ - dataset_input_list = list(data.dsinputs) + dataset_input_list = list(data_level0.dsinputs) covmat = dataset_inputs_covmat_from_systematics( - level0_commondata_wc, + level0_commondata_wc_patched, dataset_input_list, use_weights_in_covmat=False, norm_threshold=None, @@ -400,15 +402,15 @@ def make_level1_data(data, level0_commondata_wc, filterseed, data_index, sep_mul # ================== generation of Level1 data ======================# level1_data = make_replica( - level0_commondata_wc, filterseed, covmat, sep_mult=sep_mult, genrep=True + level0_commondata_wc_patched, filterseed, covmat, sep_mult=sep_mult, genrep=True ) - indexed_level1_data = indexed_make_replica(data_index, level1_data) + indexed_level1_data = indexed_make_replica(data_index_level0, level1_data) - dataset_order = {cd.setname: i for i, cd in enumerate(level0_commondata_wc)} + dataset_order = {cd.setname: i for i, cd in enumerate(level0_commondata_wc_patched)} # ===== create commondata instances with central values given by pseudo_data =====# - level1_commondata_dict = {c.setname: c for c in level0_commondata_wc} + level1_commondata_dict = {c.setname: c for c in level0_commondata_wc_patched} level1_commondata_instances_wc = [] for xx, grp in indexed_level1_data.groupby('dataset'): diff --git a/validphys2/src/validphys/results.py b/validphys2/src/validphys/results.py index 6c9e15a4b9..320576380b 100644 --- a/validphys2/src/validphys/results.py +++ b/validphys2/src/validphys/results.py @@ -206,6 +206,8 @@ def from_convolution(cls, pdf, posset): stats = pdf.stats_class(data.T) return cls(stats) +def data_index_level0(data_level0): + return data_index(data_level0) def data_index(data): """ @@ -239,7 +241,7 @@ def data_index(data): # TODO: finish deprecating all dependencies on this index largely in theorycovmat module groups_data = collect("data", ("group_dataset_inputs_by_metadata",)) -experiments_data = collect("data", ("group_dataset_inputs_by_experiment",)) +experiments_data = collect("data_level0", ("group_dataset_inputs_by_experiment",)) procs_data = collect("data", ("group_dataset_inputs_by_process",)) diff --git a/validphys2/src/validphys/scalevariations/pointprescriptions.yaml b/validphys2/src/validphys/scalevariations/pointprescriptions.yaml index d76d2ca400..6757fb1ecb 100644 --- a/validphys2/src/validphys/scalevariations/pointprescriptions.yaml +++ b/validphys2/src/validphys/scalevariations/pointprescriptions.yaml @@ -25,3 +25,5 @@ 'n3lo fhmv full thcovmat': ['(0, 0, 0, 0, 0, 0, 0)','(1, 0, 0, 0, 0, 0, 0)','(2, 0, 0, 0, 0, 0, 0)','(0, 1, 0, 0, 0, 0, 0)','(0, 2, 0, 0, 0, 0, 0)', '(0, 0, 1, 0, 0, 0, 0)','(0, 0, 2, 0, 0, 0, 0)','(0, 0, 0, 1, 0, 0, 0)','(0, 0, 0, 2, 0, 0, 0)','(0, 0, 0, 0, 1, 0, 0)','(0, 0, 0, 0, 2, 0, 0)','(0, 0, 0, 0, 0, 1, 0)','(0, 0, 0, 0, 0, 2, 0)','(0, 0, 0, 0, 0, 0, 1)','(0, 0, 0, 0, 0, 0, 2)','(2, 1)', '(0.5, 1)', '(1, 2)', '(1, 0.5)', '(2, 2)', '(0.5, 0.5)','(-1, -1)','(1, 1)'] # N3LO full IHOU + 3 point scale variations for hadronic dasasets with FHMV splittings 'n3lo fhmv 3pt hadronic': ['(0, 0, 0, 0, 0, 0, 0)','(1, 0, 0, 0, 0, 0, 0)','(2, 0, 0, 0, 0, 0, 0)','(0, 1, 0, 0, 0, 0, 0)','(0, 2, 0, 0, 0, 0, 0)', '(0, 0, 1, 0, 0, 0, 0)','(0, 0, 2, 0, 0, 0, 0)','(0, 0, 0, 1, 0, 0, 0)','(0, 0, 0, 2, 0, 0, 0)','(0, 0, 0, 0, 1, 0, 0)','(0, 0, 0, 0, 2, 0, 0)','(0, 0, 0, 0, 0, 1, 0)','(0, 0, 0, 0, 0, 2, 0)','(0, 0, 0, 0, 0, 0, 1)','(0, 0, 0, 0, 0, 0, 2)','(1, 0.5 hadronic)', '(1, 2 hadronic)','(-1, -1)','(1, 1)'] +# alpha_s determination +'alpha_s_extended' : ['(0.118)', '(0.122)', '(0.114)'] \ No newline at end of file diff --git a/validphys2/src/validphys/scalevariations/scalevariationtheoryids.yaml b/validphys2/src/validphys/scalevariations/scalevariationtheoryids.yaml index 98031c3498..b0b7eb542f 100644 --- a/validphys2/src/validphys/scalevariations/scalevariationtheoryids.yaml +++ b/validphys2/src/validphys/scalevariations/scalevariationtheoryids.yaml @@ -260,4 +260,36 @@ scale_variations_for: (1, 1): 1024 # IHOU + MHOU missing prescription (1, 0.5 hadronic): 1025 # As 1018 but DIS from 1000 - (1, 2 hadronic): 1026 # As 1021 but DIS from 1000 \ No newline at end of file + (1, 2 hadronic): 1026 # As 1021 but DIS from 1000 + # alpha_s variations + - theoryid: 708 + variations: + # alpha_s 3pt variations + (0.114): 804 + (0.115): 805 + (0.116): 806 + (0.117): 807 + (0.118): 708 + (0.119): 808 + (0.120): 809 + (0.121): 810 + (0.122): 811 + - theoryid: 717 + variations: + # alpha_s 3pt variations + (0.106): 830 + (0.114): 831 + (0.115): 832 + (0.116): 833 + (0.117): 834 + (0.118): 717 + (0.119): 835 + (0.120): 836 + (0.121): 837 + (0.122): 838 + (0.123): 839 + (0.124): 840 + (0.125): 841 + (0.130): 842 + + \ No newline at end of file diff --git a/validphys2/src/validphys/tests/test_pseudodata.py b/validphys2/src/validphys/tests/test_pseudodata.py index 74f2dd87d0..958a64fb2b 100644 --- a/validphys2/src/validphys/tests/test_pseudodata.py +++ b/validphys2/src/validphys/tests/test_pseudodata.py @@ -83,7 +83,7 @@ def test_read_matches_recreate(): pd.testing.assert_index_equal(read.val_idx, recreate.val_idx, check_order=False) -def test_level0_commondata_wc(): +def test_level0_commondata_wc_patched(): """ check whether level0_commondata_wc and dataset_t0_predictions coincide @@ -92,10 +92,13 @@ def test_level0_commondata_wc(): pdfname = PDF l = Loader() datasetspec = l.check_dataset(list(dataset.values())[0], theoryid=THEORYID) - t0set = l.check_pdf(pdfname) + t0set = l.check_pdf(pdfname) + l0_cd = API.level0_commondata_wc_patched( + dataset_inputs=[dataset], + use_cuts="internal", + theoryid=THEORYID, + fakepdf=pdfname, - l0_cd = API.level0_commondata_wc( - dataset_inputs=[dataset], use_cuts="internal", theoryid=THEORYID, fakepdf=pdfname ) l0_vals = l0_cd[0].central_values assert_allclose( diff --git a/validphys2/src/validphys/theorycovariance/construction.py b/validphys2/src/validphys/theorycovariance/construction.py index 4e9a200f22..b387b128a6 100644 --- a/validphys2/src/validphys/theorycovariance/construction.py +++ b/validphys2/src/validphys/theorycovariance/construction.py @@ -92,6 +92,19 @@ def combine_by_type(each_dataset_results_central_bytheory): ) return process_info +def covmat_alphas(name1, name2, deltas1, deltas2): + """Returns the covariance sub-matrix for 3-pt alpha_s + variation given two dataset names and collections of the + alpha_s shifts. This is equivalent to 3 point factorisation + scale variation because it's fully correlated across all + processes. + """ + s = 0.5 * (np.outer(deltas1[0], deltas2[0]) + np.outer(deltas1[1], deltas2[1])) + # NOTE: an edit has been made to redefine the covmat to account for + # second order derivatives of the theory prediction wrt alpha_s. (see + # section 1.1 of 2105.05114) + # s = 0.5 * np.outer(deltas1[0] - deltas1[1], deltas2[0] - deltas2[1]) + return s def covmat_3fpt(name1, name2, deltas1, deltas2): """Returns theory covariance sub-matrix for 3pt factorisation @@ -294,7 +307,9 @@ def compute_covs_pt_prescrip( deltas2 = deltas1 if l == 3: - if point_prescription == "3f point": + if point_prescription.startswith("alpha_s"): + s = covmat_alphas(name1, name2, deltas1, deltas2) + elif point_prescription == "3f point": s = covmat_3fpt(name1, name2, deltas1, deltas2) elif point_prescription == "3r point": s = covmat_3rpt(name1, name2, deltas1, deltas2) diff --git a/validphys2/src/validphys/theorycovariance/theorycovarianceutils.py b/validphys2/src/validphys/theorycovariance/theorycovarianceutils.py index 33fbe12f4d..ed79aa2121 100644 --- a/validphys2/src/validphys/theorycovariance/theorycovarianceutils.py +++ b/validphys2/src/validphys/theorycovariance/theorycovarianceutils.py @@ -26,7 +26,10 @@ def check_correct_theory_combination_internal( xifs = [theoryid.get_description()["XIF"] for theoryid in theoryids] xirs = [theoryid.get_description()["XIR"] for theoryid in theoryids] if l == 3: - if point_prescription == "3f point": + if point_prescription.startswith("alpha_s"): + correct_xifs = [1.0, 1.0, 1.0] + correct_xirs = [1.0, 1.0, 1.0] + elif point_prescription == "3f point": correct_xifs = [1.0, 2.0, 0.5] correct_xirs = [1.0, 1.0, 1.0] elif point_prescription == "3r point":