Skip to content

Commit

Permalink
Merge pull request #63 from UMCUGenetics/Release/2.2.2
Browse files Browse the repository at this point in the history
Release/2.2.2
  • Loading branch information
FiniDG authored Nov 2, 2021
2 parents 4181138 + 228fabe commit 5ad46a5
Show file tree
Hide file tree
Showing 8 changed files with 344 additions and 31 deletions.
36 changes: 20 additions & 16 deletions pipeline/run.sh
Original file line number Diff line number Diff line change
Expand Up @@ -168,7 +168,7 @@ done
job_ids=\${job_ids::-1}
echo \${job_ids}
sbatch --job-name=1-queueStart_${name} --time=${queue_1_time} --mem=${queue_1_mem} --dependency=afterany:\${job_ids}:+10 --output=${outdir}/logs/queue/1-queueStart.o --error=${outdir}/logs/queue/1-queueStart.e ${global_sbatch_parameters} ${outdir}/jobs/queue/1-queueStart.sh
sbatch --job-name=1-queueStart_${name} --time=${queue_1_time} --mem=${queue_1_mem} --dependency=afterok:\${job_ids}:+5 --output=${outdir}/logs/queue/1-queueStart.o --error=${outdir}/logs/queue/1-queueStart.e ${global_sbatch_parameters} ${outdir}/jobs/queue/1-queueStart.sh
EOF

# 1-queueStart.sh
Expand All @@ -193,7 +193,8 @@ for mzML in ${outdir}/1-data/*.mzML ; do
${rscript} ${scripts}/2-DIMS.R \$mzML ${outdir} ${trim} ${dims_thresh} ${resol} ${scripts}
" > ${outdir}/jobs/2-DIMS/\${input}.sh
cur_id=\$(sbatch --job-name=2-dims_\${input}_${name} --time=${job_2_time} --mem=${job_2_mem} --dependency=afterok:\${break_id}:+10 --output=${outdir}/logs/2-DIMS/\${input}.o --error=${outdir}/logs/2-DIMS/\${input}.e ${global_sbatch_parameters} ${outdir}/jobs/2-DIMS/\${input}.sh)
cur_id=\$(sbatch --job-name=2-dims_\${input}_${name} --time=${job_2_time} --mem=${job_2_mem} --dependency=afterok:\${break_id}:+5 --output=${outdir}/logs/2-DIMS/\${input}.o --error=${outdir}/logs/2-DIMS/\${input}.e ${global_sbatch_parameters} ${outdir}/jobs/2-DIMS/\${input}.sh)
job_ids+="\${cur_id}:"
done
job_ids=\${job_ids::-1} # remove last :
Expand All @@ -203,11 +204,12 @@ echo "#!/bin/sh
/hpc/local/CentOS7/common/lang/R/3.2.2/bin/Rscript ${scripts}/3-averageTechReplicates.R ${indir} ${outdir} ${nrepl} ${thresh2remove} ${dims_thresh} ${scripts}
" > ${outdir}/jobs/3-averageTechReplicates/average.sh
avg_id=\$(sbatch --job-name=3-average_\${input}_${name} --time=${job_3_time} --mem=${job_3_mem} --dependency=afterok:\${job_ids}:+10 --output=${outdir}/logs/3-averageTechReplicates/average.o --error=${outdir}/logs/3-averageTechReplicates/average.e ${global_sbatch_parameters} ${outdir}/jobs/3-averageTechReplicates/average.sh)
avg_id=\$(sbatch --job-name=3-average_\${input}_${name} --time=${job_3_time} --mem=${job_3_mem} --dependency=afterok:\${job_ids}:+5 --output=${outdir}/logs/3-averageTechReplicates/average.o --error=${outdir}/logs/3-averageTechReplicates/average.e ${global_sbatch_parameters} ${outdir}/jobs/3-averageTechReplicates/average.sh)
# start next queue
sbatch --job-name=2-queuePeakFinding_positive_${name} --time=${queue_2_time} --mem=${queue_2_mem} --dependency=afterok:\${avg_id}:+10 --output=${outdir}/logs/queue/2-queuePeakFinding_positive.o --error=${outdir}/logs/queue/2-queuePeakFinding_positive.e ${global_sbatch_parameters} ${outdir}/jobs/queue/2-queuePeakFinding_positive.sh
sbatch --job-name=2-queuePeakFinding_negative_${name} --time=${queue_2_time} --mem=${queue_2_mem} --dependency=afterok:\${avg_id}:+10 --output=${outdir}/logs/queue/2-queuePeakFinding_negative.o --error=${outdir}/logs/queue/2-queuePeakFinding_negative.e ${global_sbatch_parameters} ${outdir}/jobs/queue/2-queuePeakFinding_negative.sh
sbatch --job-name=2-queuePeakFinding_positive_${name} --time=${queue_2_time} --mem=${queue_2_mem} --dependency=afterok:\${avg_id}:+5 --output=${outdir}/logs/queue/2-queuePeakFinding_positive.o --error=${outdir}/logs/queue/2-queuePeakFinding_positive.e ${global_sbatch_parameters} ${outdir}/jobs/queue/2-queuePeakFinding_positive.sh
sbatch --job-name=2-queuePeakFinding_negative_${name} --time=${queue_2_time} --mem=${queue_2_mem} --dependency=afterok:\${avg_id}:+5 --output=${outdir}/logs/queue/2-queuePeakFinding_negative.o --error=${outdir}/logs/queue/2-queuePeakFinding_negative.e ${global_sbatch_parameters} ${outdir}/jobs/queue/2-queuePeakFinding_negative.sh
EOF

# 14-cleanup.sh
Expand Down Expand Up @@ -300,7 +302,7 @@ echo "#!/bin/sh
/hpc/local/CentOS7/common/lang/R/3.2.2/bin/Rscript ${scripts}/5-collectSamples.R ${outdir} ${scanmode}
" > ${outdir}/jobs/5-collectSamples/${scanmode}.sh
col_id=\$(sbatch --job-name=5-collectSamples_${scanmode}_${name} --time=${job_5_time} --mem=${job_5_mem} --dependency=afterany:\${job_ids}:+10 --output=${outdir}/logs/5-collectSamples/${scanmode}.o --error=${outdir}/logs/5-collectSamples/${scanmode}.e ${global_sbatch_parameters} ${outdir}/jobs/5-collectSamples/${scanmode}.sh)
col_id=\$(sbatch --job-name=5-collectSamples_${scanmode}_${name} --time=${job_5_time} --mem=${job_5_mem} --dependency=afterok:\${job_ids}:+5 --output=${outdir}/logs/5-collectSamples/${scanmode}.o --error=${outdir}/logs/5-collectSamples/${scanmode}.e ${global_sbatch_parameters} ${outdir}/jobs/5-collectSamples/${scanmode}.sh)
# hmdb_part.R
echo "#!/bin/sh
Expand All @@ -319,7 +321,7 @@ hmdb_id_2=\$(sbatch --job-name=hmdb_part_adductSums_${scanmode}_${name} --time=$
echo "${hmdb_id_2}" > ${outdir}/logs/hmdb_2
# start next queue
sbatch --job-name=3-queuePeakGrouping_${scanmode}_${name} --time=${queue_3_time} --mem=${queue_3_mem} --dependency=afterany:\${col_id}:\${hmdb_id_1}:\${hmdb_id_2}:+10 --output=${outdir}/logs/queue/3-queuePeakGrouping_${scanmode}.o --error=${outdir}/logs/queue/3-queuePeakGrouping_${scanmode}.e ${global_sbatch_parameters} ${outdir}/jobs/queue/3-queuePeakGrouping_${scanmode}.sh
sbatch --job-name=3-queuePeakGrouping_${scanmode}_${name} --time=${queue_3_time} --mem=${queue_3_mem} --dependency=afterok:\${col_id}:\${hmdb_id_1}:\${hmdb_id_2}:+5 --output=${outdir}/logs/queue/3-queuePeakGrouping_${scanmode}.o --error=${outdir}/logs/queue/3-queuePeakGrouping_${scanmode}.e ${global_sbatch_parameters} ${outdir}/jobs/queue/3-queuePeakGrouping_${scanmode}.sh
EOF

# 3-queuePeakGrouping.sh
Expand All @@ -344,10 +346,10 @@ echo "#!/bin/sh
/hpc/local/CentOS7/common/lang/R/3.2.2/bin/Rscript ${scripts}/7-collectSamplesGroupedHMDB.R ${outdir} ${scanmode} ${ppm}
" > ${outdir}/jobs/7-collectSamplesGroupedHMDB/${scanmode}.sh
col_id=\$(sbatch --job-name=7-collectSamplesGroupedHMDB_${scanmode}_${name} --time=${job_7_time} --mem=${job_7_mem} --dependency=afterany:\${job_ids}:+10 --output=${outdir}/logs/7-collectSamplesGroupedHMDB/${scanmode}.o --error=${outdir}/logs/7-collectSamplesGroupedHMDB/${scanmode}.e ${global_sbatch_parameters} ${outdir}/jobs/7-collectSamplesGroupedHMDB/${scanmode}.sh)
col_id=\$(sbatch --job-name=7-collectSamplesGroupedHMDB_${scanmode}_${name} --time=${job_7_time} --mem=${job_7_mem} --dependency=afterok:\${job_ids}:+5 --output=${outdir}/logs/7-collectSamplesGroupedHMDB/${scanmode}.o --error=${outdir}/logs/7-collectSamplesGroupedHMDB/${scanmode}.e ${global_sbatch_parameters} ${outdir}/jobs/7-collectSamplesGroupedHMDB/${scanmode}.sh)
# start next queue
sbatch --job-name=4-queuePeakGroupingRest_${scanmode}_${name} --time=${queue_4_time} --mem=${queue_4_mem} --dependency=afterany:\${col_id}:+10 --output=${outdir}/logs/queue/4-queuePeakGroupingRest_${scanmode}.o --error=${outdir}/logs/queue/4-queuePeakGroupingRest_${scanmode}.e ${global_sbatch_parameters} ${outdir}/jobs/queue/4-queuePeakGroupingRest_${scanmode}.sh
sbatch --job-name=4-queuePeakGroupingRest_${scanmode}_${name} --time=${queue_4_time} --mem=${queue_4_mem} --dependency=afterok:\${col_id}:+5 --output=${outdir}/logs/queue/4-queuePeakGroupingRest_${scanmode}.o --error=${outdir}/logs/queue/4-queuePeakGroupingRest_${scanmode}.e ${global_sbatch_parameters} ${outdir}/jobs/queue/4-queuePeakGroupingRest_${scanmode}.sh
EOF

# 4-queuePeakGroupingRest.sh
Expand All @@ -369,7 +371,7 @@ done
job_ids=\${job_ids::-1}
# start next queue
sbatch --job-name=5-queueFillMissing_${scanmode}_${name} --time=${queue_5_time} --mem=${queue_5_mem} --dependency=afterany:\${job_ids}:+10 --output=${outdir}/logs/queue/5-queueFillMissing_${scanmode}.o --error=${outdir}/logs/queue/5-queueFillMissing_${scanmode}.e ${global_sbatch_parameters} ${outdir}/jobs/queue/5-queueFillMissing_${scanmode}.sh
sbatch --job-name=5-queueFillMissing_${scanmode}_${name} --time=${queue_5_time} --mem=${queue_5_mem} --dependency=afterok:\${job_ids}:+5 --output=${outdir}/logs/queue/5-queueFillMissing_${scanmode}.o --error=${outdir}/logs/queue/5-queueFillMissing_${scanmode}.e ${global_sbatch_parameters} ${outdir}/jobs/queue/5-queueFillMissing_${scanmode}.sh
EOF

# 5-queueFillMissing.sh
Expand All @@ -388,6 +390,7 @@ for file in ${outdir}/8-grouping_rest/${scanmode}_* ; do
cur_id=\$(sbatch --job-name=9-runFillMissing_1_\${input}_${scanmode}_${name} --time=${job_9a_time} --mem=${job_9a_mem} --output=${outdir}/logs/9-runFillMissing/rest_${scanmode}_\${input}.o --error=${outdir}/logs/9-runFillMissing/rest_${scanmode}_\${input}.e ${global_sbatch_parameters} ${outdir}/jobs/9-runFillMissing/rest_${scanmode}_\${input}.sh)
job_ids+="\${cur_id}:"
done
wait
for file in ${outdir}/6-grouping_hmdb/*_${scanmode}.RData ; do
input=\$(basename \$file .RData)
Expand All @@ -397,20 +400,21 @@ for file in ${outdir}/6-grouping_hmdb/*_${scanmode}.RData ; do
/hpc/local/CentOS7/common/lang/R/3.2.2/bin/Rscript ${scripts}/9-runFillMissing.R \$file ${outdir} ${scanmode} ${thresh} ${resol} ${scripts}
" > ${outdir}/jobs/9-runFillMissing/hmdb_${scanmode}_\${input}.sh
cut_id=\$(sbatch --job-name=9-runFillMissing_2_\${input}_${scanmode}_${name} --time=${job_9b_time} --mem=${job_9b_mem} --output=${outdir}/logs/9-runFillMissing/hmdb_${scanmode}_\${input}.o --error=${outdir}/logs/9-runFillMissing/hmdb_${scanmode}_\${input}.e ${global_sbatch_parameters} ${outdir}/jobs/9-runFillMissing/hmdb_${scanmode}_\${input}.sh)
cur_id=\$(sbatch --job-name=9-runFillMissing_2_\${input}_${scanmode}_${name} --time=${job_9b_time} --mem=${job_9b_mem} --output=${outdir}/logs/9-runFillMissing/hmdb_${scanmode}_\${input}.o --error=${outdir}/logs/9-runFillMissing/hmdb_${scanmode}_\${input}.e ${global_sbatch_parameters} ${outdir}/jobs/9-runFillMissing/hmdb_${scanmode}_\${input}.sh)
job_ids+="\${cur_id}:"
done
wait
job_ids=\${job_ids::-1}
# 10-collectSamplesFilled
echo "#!/bin/sh
/hpc/local/CentOS7/common/lang/R/3.2.2/bin/Rscript ${scripts}/10-collectSamplesFilled.R ${outdir} ${scanmode} ${normalization} ${scripts} ${z_score} ${ppm}
" > ${outdir}/jobs/10-collectSamplesFilled/${scanmode}.sh
col_id=\$(sbatch --job-name=10-collectSamplesFilled_${scanmode}_${name} --time=${job_10_time} --mem=${job_10_mem} --dependency=afterany:\${job_ids}:+10 --output=${outdir}/logs/10-collectSamplesFilled/${scanmode}.o --error=${outdir}/logs/10-collectSamplesFilled/${scanmode}.e ${global_sbatch_parameters} ${outdir}/jobs/10-collectSamplesFilled/${scanmode}.sh)
col_id=\$(sbatch --job-name=10-collectSamplesFilled_${scanmode}_${name} --time=${job_10_time} --mem=${job_10_mem} --dependency=afterok:\${job_ids}:+5 --output=${outdir}/logs/10-collectSamplesFilled/${scanmode}.o --error=${outdir}/logs/10-collectSamplesFilled/${scanmode}.e ${global_sbatch_parameters} ${outdir}/jobs/10-collectSamplesFilled/${scanmode}.sh)
# start next queue
sbatch --job-name=6-queueSumAdducts_${scanmode}_${name} --time=${queue_6_time} --mem=${queue_6_mem} --dependency=afterany:\${col_id}:+10 --output=${outdir}/logs/queue/6-queueSumAdducts_${scanmode}.o --error=${outdir}/logs/queue/6-queueSumAdducts_${scanmode}.e ${global_sbatch_parameters} ${outdir}/jobs/queue/6-queueSumAdducts_${scanmode}.sh
sbatch --job-name=6-queueSumAdducts_${scanmode}_${name} --time=${queue_6_time} --mem=${queue_6_mem} --dependency=afterok:\${col_id}:+5 --output=${outdir}/logs/queue/6-queueSumAdducts_${scanmode}.o --error=${outdir}/logs/queue/6-queueSumAdducts_${scanmode}.e ${global_sbatch_parameters} ${outdir}/jobs/queue/6-queueSumAdducts_${scanmode}.sh
EOF

# 6-queueSumAdducts.sh
Expand All @@ -436,7 +440,7 @@ echo "#!/bin/sh
/hpc/local/CentOS7/common/lang/R/3.2.2/bin/Rscript ${scripts}/12-collectSamplesAdded.R ${outdir} ${scanmode} ${scripts}
" > ${outdir}/jobs/12-collectSamplesAdded/${scanmode}.sh
col_id=\$(sbatch --job-name=12-collectSamplesAdded_${scanmode}_${name} --time=${job_12_time} --mem=${job_12_mem} --dependency=afterany:\${job_ids}:+10 --output=${outdir}/logs/12-collectSamplesAdded/${scanmode}.o --error=${outdir}/logs/12-collectSamplesAdded/${scanmode}.e ${global_sbatch_parameters} ${outdir}/jobs/12-collectSamplesAdded/${scanmode}.sh)
col_id=\$(sbatch --job-name=12-collectSamplesAdded_${scanmode}_${name} --time=${job_12_time} --mem=${job_12_mem} --dependency=afterok:\${job_ids}:+5 --output=${outdir}/logs/12-collectSamplesAdded/${scanmode}.o --error=${outdir}/logs/12-collectSamplesAdded/${scanmode}.e ${global_sbatch_parameters} ${outdir}/jobs/12-collectSamplesAdded/${scanmode}.sh)
if [ -f "${outdir}/logs/done" ]; then # if one of the scanmodes has already finished
echo other scanmode already finished queueing - queue next step
Expand All @@ -448,8 +452,8 @@ if [ -f "${outdir}/logs/done" ]; then # if one of the scanmodes has already fi
${rscript} ${scripts}/13-excelExport.R ${outdir} ${name} ${matrix} ${db2} ${z_score}
" > ${outdir}/jobs/13-excelExport.sh
exp_id=\$(sbatch --job-name=13-excelExport_${name} --time=${job_13_time} --mem=${job_13_mem} --dependency=afterany:\${col_ids}:+10 --output=${outdir}/logs/13-excelExport/exp.o --error=${outdir}/logs/13-excelExport/exp.e ${global_sbatch_parameters} ${outdir}/jobs/13-excelExport.sh)
sbatch --job-name=14-cleanup_${name} --time=${job_14_time} --mem=${job_14_mem} --dependency=afterany:\${exp_id}:+10 --output=${outdir}/logs/14-cleanup.o --error=${outdir}/logs/14-cleanup.e ${global_sbatch_parameters} ${outdir}/jobs/14-cleanup.sh
exp_id=\$(sbatch --job-name=13-excelExport_${name} --time=${job_13_time} --mem=${job_13_mem} --dependency=afterok:\${col_ids}:+5 --output=${outdir}/logs/13-excelExport/exp.o --error=${outdir}/logs/13-excelExport/exp.e ${global_sbatch_parameters} ${outdir}/jobs/13-excelExport.sh)
sbatch --job-name=14-cleanup_${name} --time=${job_14_time} --mem=${job_14_mem} --dependency=afterok:\${exp_id}:+5 --output=${outdir}/logs/14-cleanup.o --error=${outdir}/logs/14-cleanup.e ${global_sbatch_parameters} ${outdir}/jobs/14-cleanup.sh
else
echo other scanmode not queued yet, not yet queueing next step
echo \${col_id} > ${outdir}/logs/done
Expand Down
2 changes: 1 addition & 1 deletion pipeline/scripts/10-collectSamplesFilled.R
Original file line number Diff line number Diff line change
Expand Up @@ -24,10 +24,10 @@ ppm <- as.numeric(cmd_args[6])
#z_score <- 0

object.files = list.files(paste(outdir, "9-samplePeaksFilled", sep="/"), full.names=TRUE, pattern=scanmode)

outlist.tot=NULL
for (i in 1:length(object.files)) {
load(object.files[i])
print(print(object.files[i]))
outlist.tot = rbind(outlist.tot, final.outlist.idpat3)
}

Expand Down
65 changes: 65 additions & 0 deletions pipeline/tools/settings.config-csf
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
# manual
thresh_pos=2000
thresh_neg=2000
dims_thresh=100
trim=0.1
nrepl=3
normalization=disabled
thresh2remove=1000000000
resol=140000
[email protected]
matrix=CSF
proteowizard=/hpc/dbg_mz/tools/proteowizard_3.0.19252-aa45583de
db=/hpc/dbg_mz/tools/db/HMDB_add_iso_corrNaCl_withIS_withC5OH.RData
db2=/hpc/dbg_mz/tools/db/HMDB_with_info_relevance_IS_C5OH.RData
z_score=1
job_0_time=00:15:00
job_0_mem=2G
job_1_time=00:15:00
job_1_mem=2G
job_2_time=00:15:00
job_2_mem=4G
job_3_time=02:30:00
job_3_mem=5G
job_4_time=02:00:00
job_4_mem=8G
job_5_time=03:00:00
job_5_mem=8G
job_6_time=04:30:00
job_6_mem=4G
job_7_time=02:00:00
job_7_mem=8G
job_8_time=02:00:00
job_8_mem=8G
job_9a_time=01:30:00
job_9a_mem=4G
job_9b_time=01:30:00
job_9b_mem=4G
job_10_time=02:00:00
job_10_mem=8G
job_11_time=03:30:00
job_11_mem=8G
job_12_time=01:30:00
job_12_mem=8G
job_13_time=02:00:00
job_13_mem=8G
job_14_time=00:15:00
job_14_mem=500M
queue_0_time=00:15:00
queue_0_mem=1G
queue_1_time=00:15:00
queue_1_mem=1G
queue_2_time=00:15:00
queue_2_mem=500M
queue_3_time=00:15:00
queue_3_mem=500M
queue_4_time=00:15:00
queue_4_mem=500M
queue_5_time=00:15:00
queue_5_mem=500M
queue_6_time=00:15:00
queue_6_mem=500M
job_hmdb1_time=04:00:00
job_hmdb1_mem=8G
job_hmdb2_time=04:00:00
job_hmdb2_mem=8G
64 changes: 64 additions & 0 deletions pipeline/tools/settings.config-dbs
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
# manual
thresh_pos=2000
thresh_neg=2000
dims_thresh=100
trim=0.1
nrepl=3
normalization=disabled
thresh2remove=500000000
resol=140000
[email protected]
matrix=DBS
z_score=1
db=/hpc/dbg_mz/tools/db/HMDB_add_iso_corrNaCl_withIS_withC5OH.RData
db2=/hpc/dbg_mz/tools/db/HMDB_with_info_relevance_IS_C5OH.RData
job_0_time=00:15:00
job_0_mem=2G
job_1_time=00:15:00
job_1_mem=2G
job_2_time=00:15:00
job_2_mem=4G
job_3_time=02:30:00
job_3_mem=5G
job_4_time=02:00:00
job_4_mem=8G
job_5_time=03:00:00
job_5_mem=8G
job_6_time=04:30:00
job_6_mem=4G
job_7_time=02:00:00
job_7_mem=8G
job_8_time=02:00:00
job_8_mem=8G
job_9a_time=01:30:00
job_9a_mem=4G
job_9b_time=01:30:00
job_9b_mem=4G
job_10_time=02:00:00
job_10_mem=8G
job_11_time=03:30:00
job_11_mem=8G
job_12_time=01:30:00
job_12_mem=8G
job_13_time=02:00:00
job_13_mem=8G
job_14_time=00:15:00
job_14_mem=500M
queue_0_time=00:15:00
queue_0_mem=1G
queue_1_time=00:15:00
queue_1_mem=1G
queue_2_time=00:15:00
queue_2_mem=500M
queue_3_time=00:15:00
queue_3_mem=500M
queue_4_time=00:15:00
queue_4_mem=500M
queue_5_time=00:15:00
queue_5_mem=500M
queue_6_time=00:15:00
queue_6_mem=500M
job_hmdb1_time=04:00:00
job_hmdb1_mem=8G
job_hmdb2_time=04:00:00
job_hmdb2_mem=8G
14 changes: 0 additions & 14 deletions pipeline/tools/settings.config-example

This file was deleted.

64 changes: 64 additions & 0 deletions pipeline/tools/settings.config-plasma
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
# manual
thresh_pos=2000
thresh_neg=2000
dims_thresh=100
trim=0.1
nrepl=3
normalization=disabled
thresh2remove=1000000000
resol=140000
[email protected]
matrix=Plasma
z_score=1
db=/hpc/dbg_mz/tools/db/HMDB_add_iso_corrNaCl_withIS_withC5OH.RData
db2=/hpc/dbg_mz/tools/db/HMDB_with_info_relevance_IS_C5OH.RData
job_0_time=00:15:00
job_0_mem=2G
job_1_time=00:15:00
job_1_mem=2G
job_2_time=00:15:00
job_2_mem=4G
job_3_time=02:30:00
job_3_mem=5G
job_4_time=02:00:00
job_4_mem=8G
job_5_time=03:00:00
job_5_mem=8G
job_6_time=04:30:00
job_6_mem=4G
job_7_time=02:00:00
job_7_mem=8G
job_8_time=02:00:00
job_8_mem=8G
job_9a_time=01:30:00
job_9a_mem=4G
job_9b_time=01:30:00
job_9b_mem=4G
job_10_time=02:00:00
job_10_mem=8G
job_11_time=03:30:00
job_11_mem=8G
job_12_time=01:30:00
job_12_mem=8G
job_13_time=02:00:00
job_13_mem=8G
job_14_time=00:15:00
job_14_mem=500M
queue_0_time=00:15:00
queue_0_mem=1G
queue_1_time=00:15:00
queue_1_mem=1G
queue_2_time=00:15:00
queue_2_mem=500M
queue_3_time=00:15:00
queue_3_mem=500M
queue_4_time=00:15:00
queue_4_mem=500M
queue_5_time=00:15:00
queue_5_mem=500M
queue_6_time=00:15:00
queue_6_mem=500M
job_hmdb1_time=04:00:00
job_hmdb1_mem=8G
job_hmdb2_time=04:00:00
job_hmdb2_mem=8G
Loading

0 comments on commit 5ad46a5

Please sign in to comment.