diff --git a/.bandit b/.bandit new file mode 100644 index 00000000000..96b0e563021 --- /dev/null +++ b/.bandit @@ -0,0 +1,20 @@ +# NOTE: At present Bandit does *not* automatically source the .bandit +# configuration file, invoke like so: +# $ bandit -r --ini .bandit +# For development it may be convenient to use flake8-bandit. + +[bandit] +targets: cylc/flow +skips: B404,B607 +# B404: import_subprocess - consider security implications +# * Ignored as the use of subprocesses is a project wide decision. +# * The purpose of cylc is to run user defined code therefore the use +# of subprocesses is unavoidable. +# B607: Starting a process with a partial executable path +# * Ignored as Cylc needs to be able to call out to executables which +# may have to be installed into environments so we cannot specify +# absolute paths. In some cases this may be required for portability. +# * Users can modify their $PATH to get Cylc to call malicious scripts, +# however, the purpose of Cylc Flow is to run user-defined code making +# this a moot point, note all code is run as the user. Cylc Flow does +# *not* provide multi-user functionality. diff --git a/.github/workflows/test_fast.yml b/.github/workflows/test_fast.yml index c8cbc52e04a..0664b66a841 100644 --- a/.github/workflows/test_fast.yml +++ b/.github/workflows/test_fast.yml @@ -70,6 +70,10 @@ jobs: run: | pytest tests/unit + - name: Bandit + run: | + bandit -r --ini .bandit cylc/flow + - name: Integration Tests run: | pytest tests/integration diff --git a/cylc/flow/dbstatecheck.py b/cylc/flow/dbstatecheck.py index 7b7a3d32371..bd95c91432e 100644 --- a/cylc/flow/dbstatecheck.py +++ b/cylc/flow/dbstatecheck.py @@ -75,9 +75,16 @@ def display_maps(res): def get_remote_point_format(self): """Query a remote workflow database for a 'cycle point format' entry""" for row in self.conn.execute( - r"SELECT value FROM " + CylcWorkflowDAO.TABLE_WORKFLOW_PARAMS + - r" WHERE key==?", - ['cycle_point_format']): + rf''' + SELECT + value + FROM + {CylcWorkflowDAO.TABLE_WORKFLOW_PARAMS} + WHERE + key==? + ''', # nosec (table name is code constant) + ['cycle_point_format'] + ): return row[0] def state_lookup(self, state): @@ -102,7 +109,14 @@ def workflow_state_query( else: target_table = CylcWorkflowDAO.TABLE_TASK_STATES - stmt = "select {0} from {1}".format(mask, target_table) + stmt = rf''' + SELECT + {mask} + FROM + {target_table} + ''' # nosec + # * mask is hardcoded + # * target_table is a code constant if task is not None: stmt_wheres.append("name==?") stmt_args.append(task) diff --git a/cylc/flow/install_plugins/log_vc_info.py b/cylc/flow/install_plugins/log_vc_info.py index 6da9eaf0ff2..52315258a9b 100644 --- a/cylc/flow/install_plugins/log_vc_info.py +++ b/cylc/flow/install_plugins/log_vc_info.py @@ -192,8 +192,15 @@ def _run_cmd(vcs: str, args: Iterable[str], cwd: Union[Path, str]) -> str: """ cmd = [vcs, *args] try: - proc = Popen( - cmd, cwd=cwd, stdin=DEVNULL, stdout=PIPE, stderr=PIPE, text=True) + proc = Popen( # nosec + cmd, + cwd=cwd, + stdin=DEVNULL, + stdout=PIPE, + stderr=PIPE, + text=True, + ) + # commands are defined in constants at top of module except FileNotFoundError as exc: # This will only be raised if the VCS command is not installed, # otherwise Popen() will succeed with a non-zero return code diff --git a/cylc/flow/job_file.py b/cylc/flow/job_file.py index ee8eb77417b..1de5e5d23fc 100644 --- a/cylc/flow/job_file.py +++ b/cylc/flow/job_file.py @@ -81,9 +81,13 @@ def write(self, local_job_file_path, job_conf, check_syntax=True): # check syntax if check_syntax: try: - with Popen( - ['/usr/bin/env', 'bash', '-n', tmp_name], - stderr=PIPE, stdin=DEVNULL) as proc: + with Popen( # nosec + ['/usr/bin/env', 'bash', '-n', tmp_name], + stderr=PIPE, + stdin=DEVNULL, + # * the purpose of this is to evaluate user devined code + # prior to it being executed + ) as proc: if proc.wait(): # This will leave behind the temporary file, # which is useful for debugging syntax errors, etc. diff --git a/cylc/flow/job_runner_handlers/background.py b/cylc/flow/job_runner_handlers/background.py index 8659ab6a1b1..5dd44f0e1c5 100644 --- a/cylc/flow/job_runner_handlers/background.py +++ b/cylc/flow/job_runner_handlers/background.py @@ -66,7 +66,7 @@ def submit(cls, job_file_path, submit_opts): # process can detach as a process group leader and not subjected to # SIGHUP from the current process. # TODO: close stdout? Maybe atexit? - proc = Popen( + proc = Popen( # nosec [ "nohup", "bash", @@ -81,6 +81,7 @@ def submit(cls, job_file_path, submit_opts): stdout=DEVNULL, stderr=STDOUT ) + # * the purpose of this call is to run user defined code except OSError as exc: # subprocess.Popen has a bad habit of not setting the # filename of the executable when it raises an OSError. diff --git a/cylc/flow/parsec/validate.py b/cylc/flow/parsec/validate.py index 4e322c540d0..1f683bf2321 100644 --- a/cylc/flow/parsec/validate.py +++ b/cylc/flow/parsec/validate.py @@ -71,7 +71,9 @@ class ParsecValidator: # Parameterized names containing at least one comma. _REC_MULTI_PARAM = re.compile(r'<[\w]+,.*?>') - SELF_REFERENCE_PATTERNS = ['localhost', '127.0.0.1', '0.0.0.0'] + SELF_REFERENCE_PATTERNS = ['localhost', '127.0.0.1', '0.0.0.0'] # nosec + # * these strings are used for validation purposes + # * they are not used for binding # Value type constants V_BOOLEAN = 'V_BOOLEAN' diff --git a/cylc/flow/platforms.py b/cylc/flow/platforms.py index c0b0e4d72df..d8026719f7a 100644 --- a/cylc/flow/platforms.py +++ b/cylc/flow/platforms.py @@ -156,7 +156,7 @@ def platform_from_name( for platform_name_re in reversed(list(platform_groups)): if re.fullmatch(platform_name_re, platform_name): platform_group = deepcopy(platform_name) - platform_name = random.choice( + platform_name = random.choice( # nosec (not crypto related) platform_groups[platform_name_re]['platforms'] ) @@ -562,7 +562,7 @@ def get_random_platform_for_install_target( """Return a randomly selected platform (dict) for given install target.""" platforms = get_all_platforms_for_install_target(install_target) try: - return random.choice(platforms) + return random.choice(platforms) # nosec (not crypto related) except IndexError: # No platforms to choose from raise PlatformLookupError( diff --git a/cylc/flow/prerequisite.py b/cylc/flow/prerequisite.py index 5d0c4da7f26..a0e855a037f 100644 --- a/cylc/flow/prerequisite.py +++ b/cylc/flow/prerequisite.py @@ -154,7 +154,9 @@ def _conditional_is_satisfied(self): """ try: - res = eval(self.conditional_expression) + res = eval(self.conditional_expression) # nosec + # * the expression is constructed internally + # * https://github.com/cylc/cylc-flow/issues/4403 except (SyntaxError, ValueError) as exc: err_msg = str(exc) if str(exc).find("unexpected EOF") != -1: diff --git a/cylc/flow/remote.py b/cylc/flow/remote.py index 69e3b2f401e..905ec8e6951 100644 --- a/cylc/flow/remote.py +++ b/cylc/flow/remote.py @@ -41,7 +41,12 @@ def get_proc_ancestors(): pid = os.getpid() ancestors = [] while True: - p = Popen(["ps", "-p", str(pid), "-oppid="], stdout=PIPE, stderr=PIPE) + p = Popen( # nosec + ["ps", "-p", str(pid), "-oppid="], + stdout=PIPE, + stderr=PIPE, + ) + # * there is no untrusted output ppid = p.communicate()[0].decode().strip() if not ppid: return ancestors @@ -116,7 +121,13 @@ def run_cmd( stdin = read try: - proc = Popen(command, stdin=stdin, stdout=stdout, stderr=stderr) + proc = Popen( # nosec + command, + stdin=stdin, + stdout=stdout, + stderr=stderr, + ) + # * this see CODACY ISSUE comment above except OSError as exc: sys.exit(r'ERROR: %s: %s' % ( exc, ' '.join(quote(item) for item in command))) diff --git a/cylc/flow/rundb.py b/cylc/flow/rundb.py index 962575ac52d..a71466bced2 100644 --- a/cylc/flow/rundb.py +++ b/cylc/flow/rundb.py @@ -494,16 +494,25 @@ def select_workflow_params(self, callback): E.g. a row might be ['UTC mode', '1'] """ - stmt = f"SELECT key, value FROM {self.TABLE_WORKFLOW_PARAMS}" + stmt = rf''' + SELECT + key, value + FROM + {self.TABLE_WORKFLOW_PARAMS} + ''' # nosec (table name is code constant) for row_idx, row in enumerate(self.connect().execute(stmt)): callback(row_idx, list(row)) def select_workflow_params_restart_count(self): """Return number of restarts in workflow_params table.""" - stmt = f""" - SELECT value FROM {self.TABLE_WORKFLOW_PARAMS} - WHERE key == 'n_restart'; - """ + stmt = rf""" + SELECT + value + FROM + {self.TABLE_WORKFLOW_PARAMS} + WHERE + key == 'n_restart' + """ # nosec (table name is code constant) result = self.connect().execute(stmt).fetchone() return int(result[0]) if result else 0 @@ -514,8 +523,13 @@ def select_workflow_template_vars(self, callback): [key,value] """ for row_idx, row in enumerate(self.connect().execute( - r"SELECT key,value FROM %s" % ( - self.TABLE_WORKFLOW_TEMPLATE_VARS))): + rf''' + SELECT + key, value + FROM + {self.TABLE_WORKFLOW_TEMPLATE_VARS} + ''' # nosec (table name is code constant) + )): callback(row_idx, list(row)) def select_task_action_timers(self, callback): @@ -526,8 +540,14 @@ def select_task_action_timers(self, callback): attrs = [] for item in self.TABLES_ATTRS[self.TABLE_TASK_ACTION_TIMERS]: attrs.append(item[0]) - stmt = r"SELECT %s FROM %s" % ( - ",".join(attrs), self.TABLE_TASK_ACTION_TIMERS) + stmt = rf''' + SELECT + {",".join(attrs)} + FROM + {self.TABLE_TASK_ACTION_TIMERS} + ''' # nosec + # * table name is code constant + # * attrs are code constants for row_idx, row in enumerate(self.connect().execute(stmt)): callback(row_idx, list(row)) @@ -541,17 +561,33 @@ def select_task_job(self, cycle, name, submit_num=None): for column in self.tables[self.TABLE_TASK_JOBS].columns[3:]: keys.append(column.name) if submit_num in [None, "NN"]: - stmt = (r"SELECT %(keys_str)s FROM %(table)s" - r" WHERE cycle==? AND name==?" - r" ORDER BY submit_num DESC LIMIT 1") % { - "keys_str": ",".join(keys), - "table": self.TABLE_TASK_JOBS} + stmt = f''' + SELECT + {",".join(keys)} + FROM + {self.TABLE_TASK_JOBS} + WHERE + cycle==? + AND name==?" + ORDER BY + submit_num DESC LIMIT 1 + ''' # nosec + # * table name is code constant + # * keys are code constants stmt_args = [cycle, name] else: - stmt = (r"SELECT %(keys_str)s FROM %(table)s" - r" WHERE cycle==? AND name==? AND submit_num==?") % { - "keys_str": ",".join(keys), - "table": self.TABLE_TASK_JOBS} + stmt = f''' + SELECT + {",".join(keys)} + FROM + {self.TABLE_TASK_JOBS} + WHERE + cycle==? + AND name==? + AND submit_num==? + ''' # nosec + # * table name is code constant + # * keys are code constants stmt_args = [cycle, name, submit_num] try: for row in self.connect().execute(stmt, stmt_args): @@ -625,7 +661,12 @@ def select_task_job_run_times(self, callback): def select_task_job_platforms(self): """Return the set of platform names from task_jobs table.""" - stmt = f"SELECT DISTINCT platform_name FROM {self.TABLE_TASK_JOBS}" + stmt = rf''' + SELECT DISTINCT + platform_name + FROM + {self.TABLE_TASK_JOBS} + ''' # nosec (table name is code constant) return {i[0] for i in self.connect().execute(stmt)} def select_submit_nums(self, name, point): @@ -656,13 +697,23 @@ def select_submit_nums(self, name, point): return ret def select_xtriggers_for_restart(self, callback): - stm = r"SELECT signature,results FROM %s" % self.TABLE_XTRIGGERS - for row_idx, row in enumerate(self.connect().execute(stm, [])): + stmt = rf''' + SELECT + signature,results + FROM + {self.TABLE_XTRIGGERS} + ''' # nosec (table name is code constant) + for row_idx, row in enumerate(self.connect().execute(stmt, [])): callback(row_idx, list(row)) def select_abs_outputs_for_restart(self, callback): - stm = r"SELECT cycle,name,output FROM %s" % self.TABLE_ABS_OUTPUTS - for row_idx, row in enumerate(self.connect().execute(stm, [])): + stmt = rf''' + SELECT + cycle, name, output + FROM + {self.TABLE_ABS_OUTPUTS} + ''' # nosec (table name is code constant) + for row_idx, row in enumerate(self.connect().execute(stmt, [])): callback(row_idx, list(row)) def select_task_pool(self, callback): @@ -739,7 +790,7 @@ def select_task_prerequisites( self, cycle: str, name: str ) -> List[Tuple[str, str, str, str]]: """Return prerequisites of a task of the given name & cycle point.""" - stmt = f""" + stmt = rf""" SELECT prereq_name, prereq_cycle, @@ -750,13 +801,18 @@ def select_task_prerequisites( WHERE cycle == ? AND name == ? - """ + """ # nosec (table name is code constant) stmt_args = [cycle, name] return list(self.connect().execute(stmt, stmt_args)) def select_tasks_to_hold(self) -> List[Tuple[str, str]]: """Return all tasks to hold stored in the DB.""" - stmt = f"SELECT name, cycle FROM {self.TABLE_TASKS_TO_HOLD}" + stmt = rf''' + SELECT + name, cycle + FROM + {self.TABLE_TASKS_TO_HOLD} + ''' # nosec (table name is code constant) return list(self.connect().execute(stmt)) def select_task_times(self): @@ -765,7 +821,7 @@ def select_task_times(self): To make data interpretation easier, choose the most recent succeeded task to sample timings from. """ - q = """ + stmt = rf""" SELECT name, cycle, @@ -775,18 +831,15 @@ def select_task_times(self): time_run, time_run_exit FROM - %(task_jobs)s + {self.TABLE_TASK_JOBS} WHERE - run_status = %(succeeded)d - """ % { - 'task_jobs': self.TABLE_TASK_JOBS, - 'succeeded': 0, - } + run_status = 0 + """ # nosec (table name is code constant) columns = ( 'name', 'cycle', 'host', 'job_runner', 'submit_time', 'start_time', 'succeed_time' ) - return columns, list(self.connect().execute(q)) + return columns, list(self.connect().execute(stmt)) def vacuum(self): """Vacuum to the database.""" diff --git a/cylc/flow/scheduler.py b/cylc/flow/scheduler.py index 9a834ebf3e9..17bd599b847 100644 --- a/cylc/flow/scheduler.py +++ b/cylc/flow/scheduler.py @@ -990,9 +990,13 @@ def _configure_contact(self): workflow_files.detect_old_contact_file(self.workflow) # Get "pid,args" process string with "ps" pid_str = str(os.getpid()) - proc = Popen( + proc = Popen( # nosec ['ps', workflow_files.PS_OPTS, pid_str], - stdin=DEVNULL, stdout=PIPE, stderr=PIPE) + stdin=DEVNULL, + stdout=PIPE, + stderr=PIPE + ) + # * there is no untrusted output out, err = (f.decode() for f in proc.communicate()) ret_code = proc.wait() process_str = None @@ -1356,9 +1360,14 @@ def workflow_auto_restart(self, max_retries=3): LOG.info(f'Attempting to restart on "{new_host}"') # proc will start with current env (incl CYLC_HOME etc) - proc = Popen( + proc = Popen( # nosec [*cmd, f'--host={new_host}'], - stdin=DEVNULL, stdout=PIPE, stderr=PIPE) + stdin=DEVNULL, + stdout=PIPE, + stderr=PIPE, + ) + # * new_host comes from internal interface which can only return + # host names if proc.wait(): msg = 'Could not restart workflow' if attempt_no < max_retries: @@ -1807,9 +1816,12 @@ def _update_profile_info(self, category, amount, amount_format="%s"): def _update_cpu_usage(self): """Obtain CPU usage statistics.""" - proc = Popen( + proc = Popen( # nosec ["ps", "-o%cpu= ", str(os.getpid())], - stdin=DEVNULL, stdout=PIPE) + stdin=DEVNULL, + stdout=PIPE, + ) + # * there is no untrusted input try: cpu_frac = float(proc.communicate()[0]) except (TypeError, OSError, ValueError) as exc: diff --git a/cylc/flow/scripts/cat_log.py b/cylc/flow/scripts/cat_log.py index 335697b4cfa..f5b136deb20 100755 --- a/cylc/flow/scripts/cat_log.py +++ b/cylc/flow/scripts/cat_log.py @@ -137,7 +137,7 @@ def colorise_cat_log(cat_proc, color=False): """ if color: - color_proc = Popen( + color_proc = Popen( # nosec [ 'python3', '-c', '; '.join([ @@ -145,6 +145,7 @@ def colorise_cat_log(cat_proc, color=False): 'from cylc.flow.loggingutil import re_formatter', 'print(re_formatter(sys.stdin.read()), end="")' ]), + # * there is no untrusted input, everything is hardcoded ], stdin=PIPE ) @@ -199,11 +200,12 @@ def view_log(logpath, mode, tailer_tmpl, batchview_cmd=None, remote=False, cmd = shlex.split(batchview_cmd) else: cmd = ['cat', logpath] - proc1 = Popen( + proc1 = Popen( # nosec cmd, stdin=DEVNULL, stdout=PIPE if color else None ) + # * batchview command is user configurable colorise_cat_log(proc1, color=color) return 0 elif mode == 'tail': @@ -211,7 +213,8 @@ def view_log(logpath, mode, tailer_tmpl, batchview_cmd=None, remote=False, cmd = batchview_cmd else: cmd = tailer_tmpl % {"filename": logpath} - proc = Popen(shlex.split(cmd), stdin=DEVNULL) + proc = Popen(shlex.split(cmd), stdin=DEVNULL) # nosec + # * batchview command is user configurable with suppress(KeyboardInterrupt): watch_and_kill(proc) return proc.wait() @@ -308,7 +311,8 @@ def tmpfile_edit(tmpfile, geditor=False): modtime1 = os.stat(tmpfile.name).st_mtime cmd = shlex.split(editor) cmd.append(tmpfile.name) - proc = Popen(cmd, stderr=PIPE) + proc = Popen(cmd, stderr=PIPE) # nosec + # * editor command is user configurable err = proc.communicate()[1].decode() ret_code = proc.wait() if ret_code == 0 and os.stat(tmpfile.name).st_mtime > modtime1: diff --git a/cylc/flow/scripts/graph.py b/cylc/flow/scripts/graph.py index ca44162020e..668918d6136 100755 --- a/cylc/flow/scripts/graph.py +++ b/cylc/flow/scripts/graph.py @@ -327,11 +327,13 @@ def dot(opts, lines): dot.append('}') # render graph - proc = Popen( + proc = Popen( # nosec ['dot', f'-T{fmt}', '-o', filename], stdin=PIPE, text=True ) + # * filename is generated in code above + # * fmt is user specified and quoted (by subprocess) proc.communicate('\n'.join(dot)) proc.wait() if proc.returncode: diff --git a/cylc/flow/scripts/view.py b/cylc/flow/scripts/view.py index 7bd0fa56fc2..9e85b62a5f6 100755 --- a/cylc/flow/scripts/view.py +++ b/cylc/flow/scripts/view.py @@ -161,7 +161,7 @@ def main(parser: COP, options: 'Values', reg: str) -> None: command_list.append(viewfile.name) command = ' '.join(command_list) # THIS BLOCKS UNTIL THE COMMAND COMPLETES - retcode = call(command_list) + retcode = call(command_list) # nosec (editor command is user configurable) if retcode != 0: # the command returned non-zero exist status raise CylcError(f'{command} failed: {retcode}') diff --git a/cylc/flow/task_events_mgr.py b/cylc/flow/task_events_mgr.py index 7302ae07a8f..2c7034c677e 100644 --- a/cylc/flow/task_events_mgr.py +++ b/cylc/flow/task_events_mgr.py @@ -325,8 +325,10 @@ def process_events(self, schd_ctx): self.proc_pool.put_command( SubProcContext( (key1, submit_num), - timer.ctx.cmd, env=os.environ, shell=True, - ), + timer.ctx.cmd, + env=os.environ, + shell=True, # nosec + ), # designed to run user defined code callback=self._custom_handler_callback, callback_args=[schd_ctx, id_key] ) diff --git a/cylc/flow/task_remote_mgr.py b/cylc/flow/task_remote_mgr.py index 71d05ef3f27..728d8296a57 100644 --- a/cylc/flow/task_remote_mgr.py +++ b/cylc/flow/task_remote_mgr.py @@ -292,7 +292,13 @@ def construct_remote_tidy_ssh_cmd(install_target, platform): procs[platform_n] = ( cmd, host, - Popen(cmd, stdout=PIPE, stderr=PIPE, stdin=DEVNULL) + Popen( # nosec + cmd, + stdout=PIPE, + stderr=PIPE, + stdin=DEVNULL, + ) + # * command constructed by internal interface ) # Wait for commands to complete for a max of 10 seconds timeout = time() + 10.0 @@ -325,10 +331,13 @@ def construct_remote_tidy_ssh_cmd(install_target, platform): procs[platform_n] = ( retry_cmd, host, - Popen( - retry_cmd, stdout=PIPE, stderr=PIPE, - stdin=DEVNULL + Popen( # nosec + retry_cmd, + stdout=PIPE, + stderr=PIPE, + stdin=DEVNULL, ) + # * command constructed by internal interface ) if proc.wait() and proc.returncode != 255: LOG.warning(TaskRemoteMgmtError( diff --git a/cylc/flow/workflow_db_mgr.py b/cylc/flow/workflow_db_mgr.py index d1f023bf552..f8a9c1eac57 100644 --- a/cylc/flow/workflow_db_mgr.py +++ b/cylc/flow/workflow_db_mgr.py @@ -635,8 +635,16 @@ def check_workflow_db_compatibility(self): pri_dao = self.get_pri_dao() try: last_run_ver = pri_dao.connect().execute( - f'SELECT value FROM {self.TABLE_WORKFLOW_PARAMS} ' - f'WHERE key == "{self.KEY_CYLC_VERSION}"').fetchone()[0] + rf''' + SELECT + value + FROM + {self.TABLE_WORKFLOW_PARAMS} + WHERE + key == ? + ''', # nosec (table name is a code constant) + [self.KEY_CYLC_VERSION] + ).fetchone()[0] except TypeError: raise ServiceFileError(f"{incompat_msg}, or is corrupted") finally: diff --git a/cylc/flow/workflow_events.py b/cylc/flow/workflow_events.py index 1ff079ee147..d30c6e1d6ca 100644 --- a/cylc/flow/workflow_events.py +++ b/cylc/flow/workflow_events.py @@ -190,7 +190,11 @@ def _run_event_custom_handlers(self, config, ctx): cmd = "%s '%s' '%s' '%s'" % ( handler, ctx.event, ctx.workflow, ctx.reason) proc_ctx = SubProcContext( - cmd_key, cmd, env=dict(os.environ), shell=True) + cmd_key, + cmd, + env=dict(os.environ), + shell=True # nosec (designed to run user defined code) + ) if self.proc_pool.closed: # Run command in foreground if abort on failure is set or if # process pool is closed diff --git a/cylc/flow/workflow_files.py b/cylc/flow/workflow_files.py index 6c6c9e9e777..c0aabe04378 100644 --- a/cylc/flow/workflow_files.py +++ b/cylc/flow/workflow_files.py @@ -28,9 +28,10 @@ from pathlib import Path from random import shuffle import re +import shlex import shutil from subprocess import Popen, PIPE, DEVNULL -import time +from time import sleep, time from typing import ( Any, Container, Deque, Dict, Iterable, List, NamedTuple, Optional, Set, Tuple, TYPE_CHECKING, Union @@ -377,11 +378,16 @@ def detect_old_contact_file(reg, check_host_port=None): old_pid_str = old_proc_str.split(None, 1)[0].strip() cmd = ["timeout", "10", "ps", PS_OPTS, str(old_pid_str)] if is_remote_host(old_host): - import shlex ssh_str = get_platform()["ssh command"] cmd = shlex.split(ssh_str) + ["-n", old_host] + cmd - from time import sleep, time - proc = Popen(cmd, stdin=DEVNULL, stdout=PIPE, stderr=PIPE) + proc = Popen( # nosec + cmd, + stdin=DEVNULL, + stdout=PIPE, + stderr=PIPE, + ) + # * command is constructed by internal interface + # * ssh command is intended to be user configurable # Terminate command after 10 seconds to prevent hanging SSH, etc. timeout = time() + 10.0 while proc.poll() is None: @@ -921,7 +927,7 @@ def remote_clean( failed_targets.append(item.install_target) elif err: LOG.debug(err) - time.sleep(0.2) + sleep(0.2) if failed_targets: raise CylcError( f"Could not clean on install targets: {', '.join(failed_targets)}" @@ -958,7 +964,14 @@ def _remote_clean_cmd( timeout=timeout, set_verbosity=True ) LOG.debug(" ".join(cmd)) - return Popen(cmd, stdin=DEVNULL, stdout=PIPE, stderr=PIPE, text=True) + return Popen( # nosec + cmd, + stdin=DEVNULL, + stdout=PIPE, + stderr=PIPE, + text=True, + ) + # * command constructed by internal interface def remove_keys_on_server(keys): @@ -1406,7 +1419,8 @@ def reinstall_workflow(named_run, rundir, source, dry_run=False): f"\"{source}\" to \"{rundir}\"") rsync_cmd = get_rsync_rund_cmd( source, rundir, reinstall=True, dry_run=dry_run) - proc = Popen(rsync_cmd, stdout=PIPE, stderr=PIPE, text=True) + proc = Popen(rsync_cmd, stdout=PIPE, stderr=PIPE, text=True) # nosec + # * command is constructed via internal interface stdout, stderr = proc.communicate() reinstall_log.info( f"Copying files from {source} to {rundir}" @@ -1498,7 +1512,8 @@ def install_workflow( link_runN(rundir) create_workflow_srv_dir(rundir) rsync_cmd = get_rsync_rund_cmd(source, rundir) - proc = Popen(rsync_cmd, stdout=PIPE, stderr=PIPE, text=True) + proc = Popen(rsync_cmd, stdout=PIPE, stderr=PIPE, text=True) # nosec + # * command is constructed via internal interface stdout, stderr = proc.communicate() install_log.info( f"Copying files from {source} to {rundir}" diff --git a/setup.py b/setup.py index 361c9316abf..865c837508d 100644 --- a/setup.py +++ b/setup.py @@ -58,6 +58,7 @@ def find_version(*file_paths): tests_require = [ 'async-timeout>=3.0.0', 'async_generator', + 'bandit>=1.7.0', 'coverage>=5.0.0', 'flake8-broken-line>=0.3.0', 'flake8-bugbear>=21.0.0',