Skip to content

Commit

Permalink
update eventflow
Browse files Browse the repository at this point in the history
  • Loading branch information
mieskolainen committed Jul 11, 2024
1 parent 6af465b commit 7275c13
Show file tree
Hide file tree
Showing 2 changed files with 18 additions and 17 deletions.
2 changes: 1 addition & 1 deletion configs/zee/plots.yml
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ OBS_reweight:
maxW: 30 # Maximum ML weight cutoff

# Renormalize weights (set null for no renormalization)
renorm_origin: null # Reference class ID (0,1) to use for the renormalization
renorm_origin: 1 # Reference class ID (0,1) to use for the renormalization

# Temperature values
tau_values: [0.6, 0.65, 0.7, 0.75, 0.8, 0.85, 0.9, 0.95, 1.0, 1.05, 1.1, 1.15]
Expand Down
33 changes: 17 additions & 16 deletions icezee/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
#from configs.zee.cuts import *
#from configs.zee.filter import *

def load_helper(mcfiles, datafiles, args):
def load_helper(mcfiles, datafiles, maxevents, args):

print(__name__ + '.load_helper:')
print(f'{mcfiles}')
Expand Down Expand Up @@ -96,7 +96,7 @@ def load_helper(mcfiles, datafiles, args):
W = np.concatenate((W_MC, W_data))

ids = LOAD_VARS # We use these

## -------------------------------------------------
# ** Drop negative weight events **
if args['drop_negative']:
Expand All @@ -107,6 +107,20 @@ def load_helper(mcfiles, datafiles, args):
W = W[~ind] # Boolean NOT
Y = Y[~ind]

# -------------------------------------------------------------------------
# ** Randomize MC vs Data order to avoid problems with other functions **
rand = np.random.permutation(len(X))
X = X[rand].squeeze() # Squeeze removes additional [] dimension
Y = Y[rand].squeeze()
W = W[rand].squeeze()
# -------------------------------------------------------------------------

# Apply maxevents cutoff
maxevents = np.min([maxevents, len(X)])
if maxevents < len(X):
print(__name__ + f'.load_root_file: Applying maxevents cutoff {maxevents}')
X, Y, W = X[0:maxevents], Y[0:maxevents], W[0:maxevents]

# Re-nenormalize MC to the event count
ind = (Y == 0)
W[ind] = W[ind] / np.sum(W[ind]) * len(W[ind])
Expand Down Expand Up @@ -164,7 +178,7 @@ def load_root_file(root_path, ids=None, entry_start=0, entry_stop=None, maxevent
mc_files = io.glob_expand_files(datasets=args["mcfile"][mode], datapath=root_path)
da_files = io.glob_expand_files(datasets=args["datafile"][mode], datapath=root_path)

X[mode],Y[mode],W[mode],ids = load_helper(mcfiles=mc_files, datafiles=da_files, args=args)
X[mode],Y[mode],W[mode],ids = load_helper(mcfiles=mc_files, datafiles=da_files, maxevents=maxevents, args=args)
running_split[mode] = np.arange(N_prev, len(X[mode]) + N_prev)

# Combine
Expand All @@ -175,19 +189,6 @@ def load_root_file(root_path, ids=None, entry_start=0, entry_stop=None, maxevent
# Aux info here
info = {'running_split': running_split}

# -------------------------------------------------------------------------
# ** Randomize MC vs Data order to avoid problems with other functions **
rand = np.random.permutation(len(X))
X = X[rand].squeeze() # Squeeze removes additional [] dimension
Y = Y[rand].squeeze()
W = W[rand].squeeze()

# Apply maxevents cutoff
maxevents = np.min([maxevents, len(X)])
if maxevents < len(X):
print(__name__ + f'.load_root_file: Applying maxevents cutoff {maxevents}')
X, Y, W = X[0:maxevents], Y[0:maxevents], W[0:maxevents]

return {'X':X, 'Y':Y, 'W':W, 'ids':ids, 'info':info}


Expand Down

0 comments on commit 7275c13

Please sign in to comment.