-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmdlaunchCV.py
executable file
·158 lines (137 loc) · 7.41 KB
/
mdlaunchCV.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
#!/usr/bin/env python3
from pathlib import Path
import argparse
import re
import shutil
import subprocess
import oyaml as yaml
def create_biobb_pth_file(file_path):
with open(file_path, 'w') as pth_file:
pth_file.write("/home/bsc23/bsc23210/macshare/biobb_common \n")
pth_file.write("/home/bsc23/bsc23210/macshare/biobb_md \n")
pth_file.write("/home/bsc23/bsc23210/macshare/biobb_pmx \n")
pth_file.write("/home/bsc23/bsc23210/macshare/biobb_analysis \n")
pth_file.write("/home/bsc23/bsc23210/macshare/biobb_adapters \n")
pth_file.write("/home/bsc23/bsc23210/macshare/biobb_wf_pmxligand \n")
pth_file.write("/home/bsc23/bsc23210/macshare/biobb_structure_utils")
def get_template_config_dict(config_yaml_path):
with open(config_yaml_path) as config_yaml_file:
return yaml.safe_load(config_yaml_file)
def launch(mutation, wt_str, queue, num_nodes, compss_version, md_length,
base_dir, compss_debug, time, output_dir, job_name, mpi_nodes):
base_dir = Path(base_dir)
pth_path = Path.home().joinpath('.local', 'lib', 'python3.6', 'site-packages', 'biobb.pth')
template_yaml_path = base_dir.joinpath('workflows', 'CV19_MD', 'md_muts_sets.yaml')
template_py_path = base_dir.joinpath('workflows', 'CV19_MD', 'md_add_muts_wt.py')
# Check if biobb.pth file exists and if not exists create it
if not pth_path.exists():
create_biobb_pth_file(pth_path)
# Create working dir path
working_dir_path = base_dir.joinpath('CV_MDs', 'md_muts_set')
if output_dir:
if output_dir.startswith('/'):
working_dir_path = Path(output_dir).resolve()
else:
working_dir_path = base_dir.joinpath('CV_MDs', output_dir)
working_dir_path.mkdir(parents=True, exist_ok=True)
# Check if it's the first launch
run_number = 0
run_dir = working_dir_path.joinpath("wf_pmx")
config_yaml_path = working_dir_path.joinpath(f"pmx.yaml")
wf_py_path = working_dir_path.joinpath(f"pmx.py")
launch_path = working_dir_path.joinpath(f"launch.sh")
if not job_name:
job_name = "mdlaunch_job"
while run_dir.exists():
run_number += 1
run_dir = working_dir_path.joinpath(f"wf_pmx_{str(run_number)}")
config_yaml_path = working_dir_path.joinpath(f"pmx_{str(run_number)}.yaml")
wf_py_path = working_dir_path.joinpath(f"pmx_{str(run_number)}.py")
launch_path = working_dir_path.joinpath(f"launch_{str(run_number)}.sh")
job_name = f"{job_name}_{str(run_number)}"
# Copy py file
shutil.copyfile(template_py_path, wf_py_path)
# Read yaml template file
config_dict = get_template_config_dict(template_yaml_path)
# Update config_dict
config_dict['working_dir_path'] = str(run_dir)
config_dict['mutations'] = mutation
config_dict['input_pdb'] = wt_str
config_dict['step13_grompp_md']['properties']['mdp']['nsteps'] = int(md_length/0.002)
with open(config_yaml_path, 'w') as config_yaml_file:
config_yaml_file.write(yaml.dump(config_dict))
# Create launch
with open(launch_path, 'w') as launch_file:
launch_file.write(f"#!/bin/bash\n")
launch_file.write(f"\n")
launch_file.write(f"module purge\n")
launch_file.write(f"\n")
launch_file.write(f"module load ANACONDA/2018.12_py3\n")
launch_file.write(f"source activate biobb\n")
launch_file.write(f"\n")
launch_file.write(f"# COMPSs environment\n")
launch_file.write(f"export COMPSS_PYTHON_VERSION=none\n")
launch_file.write(f"# COMPSs release\n")
launch_file.write(f"module load COMPSs/{compss_version}\n")
launch_file.write(f"\n")
launch_file.write(f"# Singularity\n")
launch_file.write(f"module load singularity\n")
launch_file.write(f"\n")
launch_file.write(f"#GROMACS 2019\n")
launch_file.write(f"module load intel/2018.4 impi/2018.4 mkl/2018.4 gromacs/2019.1\n")
launch_file.write(f"\n")
#launch_file.write(f"#TASK_TIME_OUT env var\n")
#launch_file.write(f"#3600 seconds = 1h\n")
#launch_file.write(f"export TASK_TIME_OUT=\"3600\"\n")
launch_file.write(f"#MULTINODE MPI env var\n")
launch_file.write(f"export TASK_COMPUTING_NODES=\"{mpi_nodes}\"\n")
launch_file.write(f"\n")
launch_file.write(f"#Permissions for everyone\n")
launch_file.write(f"umask ugo+rwx\n")
launch_file.write(f"\n")
launch_file.write(f"enqueue_compss ")
if compss_debug:
launch_file.write(f"-d ")
launch_file.write(f"--job_name={job_name} \
--num_nodes={num_nodes} \
--exec_time={str(time)} \
--base_log_dir=$PWD \
--worker_working_dir=$PWD \
--master_working_dir=$PWD \
--network=ethernet \
--qos={queue} \
{wf_py_path} \
--config {config_yaml_path} ")
launch_file.write(f"\n")
subprocess.call(f"bash {launch_path}", shell=True)
def main():
parser = argparse.ArgumentParser(description="Workflow to model,setup and run MD simulations for a set of mutations.")
parser.add_argument('-m', '--mutation', required=True, help="Mutations set in WT;A:Arg6Gln,A:Asn13Lys;A:Glu31Asn,A:Lys43Asn format")
parser.add_argument('-wt', '--wt_structure', required=True, default='wt.pdb', type=str, help="(wt.pdb) [Path to the WT structure]")
parser.add_argument('-q', '--queue', required=False, default='bsc_ls', type=str, help="(bsc_ls) [bsc_ls|debug]")
parser.add_argument('-t', '--time', required=False, default=120, type=int, help="(120) [integer] Time in minutes")
parser.add_argument('-nn', '--num_nodes', required=False, default=1, type=int, help="(1) [integer]")
parser.add_argument('-cv', '--compss_version', required=False, default='2.6.1', type=str, help="(2.6.1) [version_name]")
parser.add_argument('-d', '--compss_debug', required=False, help="Compss debug mode", action='store_true')
parser.add_argument('-l', '--md_length', required=False, default=10000, type=int, help="(10000ns) [integer] MD length in nanoseconds")
parser.add_argument('-mpi', '--mpi_nodes', required=False, default=1, type=int, help="(1) [integer] Number of MPI nodes to be used per MD simulation")
parser.add_argument('--base_dir', required=False, default='.', type=str, help="('.') [path_to_base_dir]")
parser.add_argument('-o', '--output_dir', required=False, default='', type=str, help="Output dir name: If output_dir is absolute it will be respected if it's a relative path: /base_dir/MDs/runs/output_dir', if output_dir not exists, the name is autogenerated.")
parser.add_argument('-jn', '--job_name', required=False, default='', type=str, help="Job name if it not exists, the name is autogenerated.")
args = parser.parse_args()
# Specific call of each building block
launch(mutation=args.mutation,
wt_str=args.wt_structure,
queue=args.queue,
time=args.time,
num_nodes=args.num_nodes,
compss_version=args.compss_version,
compss_debug=args.compss_debug,
md_length=args.md_length,
mpi_nodes=args.mpi_nodes,
output_dir=args.output_dir,
job_name=args.job_name,
base_dir=Path(args.base_dir)
)
if __name__ == '__main__':
main()