-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathREADME
150 lines (112 loc) · 5.04 KB
/
README
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
### installation with GIT
# 1) make sure you have set-up the CMSSW environment
cmsrel CMSSW_5_3_3 # or newer from 53x family
cd CMSSW_5_3_3/src/
cmsenv
# 2) checkout and compile analysis code
git clone [email protected]:jpavel/cms-ucl-tau.git .
cd SFrame/
source setup.sh
make
cd ../ZHtautauAnalysis/
make
cd ..
# 3) compile FWlite code for di-tau mass reco
cd TauAnalysis/CandidateTools/
emacs -nw bin/nsvfitStandalone.cc
scram b -j 9
cd ../../
cmsenv # important to do after every recompillation
# 4) checkout lumi calc code
cvs co -r V04-02-08 RecoLuminosity/LumiDB
cd RecoLuminosity/LumiDB/
scram b -j 9
cd ../../
cmsenv
########################## Running at CONDOR
#All scripts are located in ZHtautauAnalysis/CONDOR dir
# 1) Preparing submission commands
# A) Single sample
# Use Prepare_condor_InputDir_OutputName_FilesPerJob_configFile.sh
# script that takes 5 parameters: input directory, output name number
# of input files per job, job configuration XML file, name of folder
# where results would be stored. EG:
source Prepare_condor_InputDir_OutputName_FilesPerJob_configFile.sh \
/storage/data/cms/store/user/jez/DoubleElectron_Run2012C/ \
DoubleElectron2012C 50 Analysis_config_T2.xml results
# B1) Multiple samples in the same directory
# Use Prepare_listdir.sh script that takes 6 parameters:
# directory in which directories with input are, name of output,
# list of names of directories with input in a text file, number of
# files per job, job configuration XML file, name of folder where
# results would be stored eg:
source Prepare_listdir.sh \
/storage/data/cms/store/user/jez/ZHttNtuples_v2/53X/Data/ 2012 \
2012.list 50 Analysis_config_T2.xml results
# B2) Script above creates a .sh file finishing with _PrepareJobs.sh
# that can be used to change some parameters (config, files per job)
# for some of the jobs in case one does not want all jobs to run with
# the same configuration. Afterwards, execute the *_PrepareJobs.sh script
# 2) Submit with condor_submit and the cmd file created by script
# above or execute *_RunAll.sh script created during preparation
# of submission of multiple samples to submit on all samples at once
# 3) Control the output by executing *_ShowStatus.sh script created in
# submission process and having the chosen output name. It also
# checks failed jobs and prepares resubmission. Follow instructions
# on the screen
# 4) Enjoy results stored in the directory you specified in submission
############################### Running locally
# Local running is also possible, but not all features are
# available on computers without CMSSW, e.g. full mass reconstruction
# or lumi calculation. To run locally do RUN! (change input file defined in Analysis_config.xml appropriatelly)
cd ZHtautauAnalysis/config
sframe-main Analysis_config.xml
# The code executed by this command is stored in src/Analysis.cxx file
# and steered via the xml file that is the first parameter of
# sframe-main program
########### misc (see http://git-scm.com/book for more help)
## adress of a git repository
https://github.com/jpavel/cms-ucl-tau
##
## Obtaining Kerberos ticket from CERN for 24h password-less access to CERN
# use setup described e.g. here: http://david.decotigny.free.fr/wiki/wakka.php?wiki=AfsOnUbuntu
kinit <AFSuserName>@CERN.CH
##making a lightweight tag
git tag v1.4
### making an annotated tag (recommended)
git tag -a v1.4 -m "making tag v1.4"
## switching to a new tag or branch
git checkout v1.4
## create new branch
git branch myNewBranch
## switch to branch
git checkout myNewBranch
## pushing to a server
git push
## review status of work directory
git status
## adding remote repository (on top of origin that is defined automatically if you use clone as above)
git remote add <MyRepositoryNickname> <MyRepositoryURL> # e.g. git remote add pb git://github.com/paulboone/ticgit.git
## check what is new in the repository (pushes by other users)
git fetch # it is possible to specify repository nickname as a first argument. This command will not do anything with your local repository, only inform about changes in the remote
## update your local repository and merge with remote
git pull # if merging is not possible due to conflicts, git will inform you
## inspecting a remote
git remote show origin
##################################
## Running at ULB cluster
##################################
ssh m2.iihe.ac.be # connect without password
source $VO_CMS_SW_DIR/cmsset_default.sh # basic cms environment
cmsrel CMSSW_5_3_3_patch3 #prepare the CMSSW area
# Standard stuff
cd CMSSW_5_3_3_patch3/src/
cmsenv
# checkout and compile SFRAME and analysis as above
# make grid proxy (.globus with certificate necesssary at home dir)
voms-proxy-init --voms cms:/cms/becms
# define input in Analysis_config.xml - If the location is e.g. /pnfs/iihe/cms/store/user/jez/ZHTo2TauAna_52x_test0/output_Ntuples_10_1_rCX.root, the input file tag should read
# <In FileName="dcap://maite.iihe.ac.be/pnfs/iihe/cms/store/user/jez/ZHTo2TauAna_52x_test0/output_Ntuples_10_1_rCX.root" Lumi="1.0" />
# RUN!
cd config
sframe-main Analysis_config.xml