diff --git a/config/acme/machines/template.case.run b/config/acme/machines/template.case.run index 56d425e3401..38a7fd7c8e3 100755 --- a/config/acme/machines/template.case.run +++ b/config/acme/machines/template.case.run @@ -32,23 +32,19 @@ import argparse, doctest def parse_command_line(args, description): ############################################################################### parser = argparse.ArgumentParser( - usage="""\n%s [--verbose] + usage="""\n{0} [--verbose] OR -%s --help +{0} --help OR -%s --test +{0} --test \033[1mEXAMPLES:\033[0m \033[1;32m# case.run SMS\033[0m - > %s -""" % ((os.path.basename(args[0]), ) * 4), - -description=description, - -formatter_class=argparse.ArgumentDefaultsHelpFormatter -) - - parser = argparse.ArgumentParser() + > {0} +""".format(os.path.basename(args[0])), + description=description, + formatter_class=argparse.ArgumentDefaultsHelpFormatter + ) CIME.utils.setup_standard_logging_options(parser) @@ -56,11 +52,16 @@ formatter_class=argparse.ArgumentDefaultsHelpFormatter help="Case directory to build") args = CIME.utils.parse_args_and_handle_standard_logging_options(args, parser) + parser.add_argument("--skip-preview-namelist", action="store_true", + help="Skip calling preview-namelist during case.run") if args.caseroot is not None: os.chdir(args.caseroot) - return args.caseroot + if args.skip_preview_namelist is None: + args.skip_preview_namelist = False + + return args.caseroot, args.skip_preview_namelist ############################################################################### def _main_func(description): @@ -69,9 +70,9 @@ def _main_func(description): test_results = doctest.testmod(verbose=True) sys.exit(1 if test_results.failed > 0 else 0) - caseroot = parse_command_line(sys.argv, description) + caseroot, skip_pnl = parse_command_line(sys.argv, description) with Case(caseroot, read_only=False) as case: - success = case_run(case) + success = case_run(case, skip_pnl=skip_pnl) sys.exit(0 if success else 1) diff --git a/config/cesm/machines/template.case.run b/config/cesm/machines/template.case.run index 7f9d31fc5f5..03187424a96 100755 --- a/config/cesm/machines/template.case.run +++ b/config/cesm/machines/template.case.run @@ -29,35 +29,37 @@ import argparse, doctest def parse_command_line(args, description): ############################################################################### parser = argparse.ArgumentParser( - usage="""\n%s [--verbose] + usage="""\n{0} [--verbose] OR -%s --help +{0} --help OR -%s --test +{0} --test \033[1mEXAMPLES:\033[0m \033[1;32m# case.run SMS\033[0m - > %s -""" % ((os.path.basename(args[0]), ) * 4), - -description=description, - -formatter_class=argparse.ArgumentDefaultsHelpFormatter -) - - parser = argparse.ArgumentParser() + > {0} +""".format(os.path.basename(args[0])), + description=description, + formatter_class=argparse.ArgumentDefaultsHelpFormatter + ) CIME.utils.setup_standard_logging_options(parser) parser.add_argument("--caseroot", help="Case directory to build") + parser.add_argument("--skip-preview-namelist", action="store_true", + help="Skip calling preview-namelist during case.run") + args = CIME.utils.parse_args_and_handle_standard_logging_options(args, parser) if args.caseroot is not None: os.chdir(args.caseroot) - return args.caseroot + if args.skip_preview_namelist is None: + args.skip_preview_namelist = False + + return args.caseroot, args.skip_preview_namelist ############################################################################### def _main_func(description): @@ -66,9 +68,9 @@ def _main_func(description): test_results = doctest.testmod(verbose=True) sys.exit(1 if test_results.failed > 0 else 0) - caseroot = parse_command_line(sys.argv, description) + caseroot, skip_pnl = parse_command_line(sys.argv, description) with Case(caseroot, read_only=False) as case: - success = case_run(case) + success = case_run(case, skip_pnl=skip_pnl) sys.exit(0 if success else 1) diff --git a/scripts/Tools/case.submit b/scripts/Tools/case.submit index 3ea9d21b287..e049d78af7f 100755 --- a/scripts/Tools/case.submit +++ b/scripts/Tools/case.submit @@ -12,16 +12,15 @@ from CIME.case import Case def parse_command_line(args, description): ############################################################################### parser = argparse.ArgumentParser( - usage="""\n%s [] [--verbose] + usage="""\n{0} [] [--verbose] OR -%s --help +{0} --help OR -%s --test - +{0} --test \033[1mEXAMPLES:\033[0m \033[1;32m# Setup case \033[0m - > %s -""" % ((os.path.basename(args[0]), ) * 4), + > {0} +""".format(os.path.basename(args[0])), description=description, formatter_class=argparse.ArgumentDefaultsHelpFormatter ) @@ -31,6 +30,9 @@ OR parser.add_argument("caseroot", nargs="?", default=os.getcwd(), help="Case directory to setup") + parser.add_argument("--test", action="store_true", + help="Run case as a test.") + parser.add_argument("--job", "-j", help="Name of the job to be submitted, default is case.run" " can be any of the jobs listed in env_batch.xml") @@ -45,6 +47,9 @@ OR parser.add_argument("--resubmit", action="store_true", help="Used with tests only, to continue rather than restart a test. ") + parser.add_argument("--skip-preview-namelist", action="store_true", + help="Skip calling preview-namelist during case.run") + parser.add_argument("-a", "--batch-args", help="Used to pass additional arguments to batch system. ") @@ -52,18 +57,20 @@ OR CIME.utils.expect(args.prereq is None, "--prereq not currently supported") - return args.caseroot, args.job, args.no_batch, args.resubmit, args.batch_args + return args.test, args.caseroot, args.job, args.no_batch, args.resubmit, args.skip_preview_namelist, args.batch_args ############################################################################### def _main_func(description): ############################################################################### - if "--test" in sys.argv: + test, caseroot, job, no_batch, resubmit, skip_pnl, batch_args = parse_command_line(sys.argv, description) + if test: test_results = doctest.testmod(verbose=True) sys.exit(1 if test_results.failed > 0 else 0) - caseroot, job, no_batch, resubmit, batch_args = parse_command_line(sys.argv, description) with Case(caseroot, read_only=False) as case: - submit(case, job=job, no_batch=no_batch, resubmit=resubmit, batch_args=batch_args) + submit(case, job=job, no_batch=no_batch, + resubmit=resubmit, skip_pnl=skip_pnl, + batch_args=batch_args) if __name__ == "__main__": _main_func(__doc__) diff --git a/scripts/lib/CIME/XML/env_batch.py b/scripts/lib/CIME/XML/env_batch.py index 45e2f1fe3dc..949268c5055 100644 --- a/scripts/lib/CIME/XML/env_batch.py +++ b/scripts/lib/CIME/XML/env_batch.py @@ -44,7 +44,7 @@ def set_value(self, item, value, subgroup=None, ignore_type=False): elif value.count(":") == 2: t_spec = "%H:%M:%S" else: - expect(False, "could not interpret format for wallclock time %s"%value) + expect(False, "could not interpret format for wallclock time {}".format(value)) value = format_time(walltime_format, t_spec, value) # allow the user to set item for all jobs if subgroup is not provided @@ -91,7 +91,7 @@ def get_type_info(self, vid): type_info = new_type_info else: expect( type_info == new_type_info, - "Inconsistent type_info for entry id=%s %s %s" % (vid, new_type_info, type_info)) + "Inconsistent type_info for entry id={} {} {}".format(vid, new_type_info, type_info)) return type_info def get_jobs(self): @@ -153,7 +153,7 @@ def set_batch_system(self, batchobj, batch_system_type=None): self.root.append(deepcopy(batchobj.machine_node)) def make_batch_script(self, input_template, job, case, total_tasks, tasks_per_node, num_nodes, thread_count): - expect(os.path.exists(input_template), "input file '%s' does not exist" % input_template) + expect(os.path.exists(input_template), "input file '{}' does not exist".format(input_template)) self.tasks_per_node = tasks_per_node self.num_tasks = total_tasks @@ -198,7 +198,7 @@ def set_job_defaults(self, batch_jobs, pesize=None, walltime=None, force_queue=N if force_queue: if not self.queue_meets_spec(force_queue, task_count, walltime=walltime, job=job): - logger.warning("WARNING: User-requested queue '%s' does not meet requirements for job '%s'" % (force_queue, job)) + logger.warning("WARNING: User-requested queue '{}' does not meet requirements for job '{}'".format(force_queue, job)) queue = force_queue else: queue = self.select_best_queue(task_count, walltime=walltime, job=job) @@ -209,9 +209,9 @@ def set_job_defaults(self, batch_jobs, pesize=None, walltime=None, force_queue=N # It was, override the walltime if a test, otherwise just warn the user new_walltime = self.get_queue_specs(queue)[3] expect(new_walltime is not None, "Should never make it here") - logger.warning("WARNING: Requested walltime '%s' could not be matched by any queue" % walltime) + logger.warning("WARNING: Requested walltime '{}' could not be matched by any queue".format(walltime)) if allow_walltime_override: - logger.warning(" Using walltime '%s' instead" % new_walltime) + logger.warning(" Using walltime '{}' instead".format(new_walltime)) walltime = new_walltime else: logger.warning(" Continuing with suspect walltime, batch submission may fail") @@ -235,7 +235,7 @@ def set_job_defaults(self, batch_jobs, pesize=None, walltime=None, force_queue=N self.set_value("JOB_QUEUE", queue, subgroup=job) self.set_value("JOB_WALLCLOCK_TIME", walltime, subgroup=job) - logger.debug("Job %s queue %s walltime %s" % (job, queue, walltime)) + logger.debug("Job {} queue {} walltime {}".format(job, queue, walltime)) def get_batch_directives(self, case, job, raw=False): """ @@ -255,7 +255,7 @@ def get_batch_directives(self, case, job, raw=False): directive = transform_vars(directive, case=case, subgroup=job, default=default, check_members=self) elif default is not None: directive = transform_vars(directive, default=default) - result.append("%s %s" % (directive_prefix, directive)) + result.append("{} {}".format(directive_prefix, directive)) return "\n".join(result) @@ -278,7 +278,7 @@ def get_submit_args(self, case, job): continue if name is None: - submitargs+=" %s"%flag + submitargs+=" {}".format(flag) else: if name.startswith("$"): name = name[1:] @@ -302,23 +302,23 @@ def get_submit_args(self, case, job): if flag.rfind("=", len(flag)-1, len(flag)) >= 0 or\ flag.rfind(":", len(flag)-1, len(flag)) >= 0: - submitargs+=" %s%s"%(flag,str(rval).strip()) + submitargs+=" {}{}".format(flag,str(rval).strip()) else: - submitargs+=" %s %s"%(flag,str(rval).strip()) + submitargs+=" {} {}".format(flag,str(rval).strip()) return submitargs - def submit_jobs(self, case, no_batch=False, job=None, batch_args=None, dry_run=False): + def submit_jobs(self, case, no_batch=False, job=None, skip_pnl=False, batch_args=None, dry_run=False): alljobs = self.get_jobs() startindex = 0 jobs = [] firstjob = job if job is not None: - expect(job in alljobs, "Do not know about batch job %s"%job) + expect(job in alljobs, "Do not know about batch job {}".format(job)) startindex = alljobs.index(job) for index, job in enumerate(alljobs): - logger.debug( "Index %d job %s startindex %d" % (index, job, startindex)) + logger.debug( "Index {:d} job {} startindex {:d}".format(index, job, startindex)) if index < startindex: continue try: @@ -329,8 +329,7 @@ def submit_jobs(self, case, no_batch=False, job=None, batch_args=None, dry_run=F prereq = case.get_resolved_value(prereq) prereq = eval(prereq) except: - expect(False,"Unable to evaluate prereq expression '%s' for job '%s'"%(self.get_value('prereq',subgroup=job), job)) - + expect(False,"Unable to evaluate prereq expression '{}' for job '{}'".format(self.get_value('prereq',subgroup=job), job)) if prereq: jobs.append((job, self.get_value('dependency', subgroup=job))) @@ -362,8 +361,8 @@ def submit_jobs(self, case, no_batch=False, job=None, batch_args=None, dry_run=F if slen == 0: jobid = None - logger.warn("job is %s" % job) - result = self._submit_single_job(case, job, jobid, no_batch=no_batch, batch_args=batch_args, dry_run=dry_run) + logger.warn("job is {}".format(job)) + result = self._submit_single_job(case, job, jobid, no_batch=no_batch, skip_pnl=skip_pnl, batch_args=batch_args, dry_run=dry_run) batch_job_id = str(alljobs.index(job)) if dry_run else result depid[job] = batch_job_id jobcmds.append( (job, result) ) @@ -375,8 +374,8 @@ def submit_jobs(self, case, no_batch=False, job=None, batch_args=None, dry_run=F else: return sorted(list(depid.values())) - def _submit_single_job(self, case, job, depid=None, no_batch=False, batch_args=None, dry_run=False): - logger.warn("Submit job %s"%job) + def _submit_single_job(self, case, job, depid=None, no_batch=False, skip_pnl=False, batch_args=None, dry_run=False): + logger.warn("Submit job {}".format(job)) batch_system = self.get_value("BATCH_SYSTEM", subgroup=None) if batch_system is None or batch_system == "none" or no_batch: # Import here to avoid circular include @@ -384,7 +383,7 @@ def _submit_single_job(self, case, job, depid=None, no_batch=False, batch_args=N from CIME.case_run import case_run # pylint: disable=unused-variable from CIME.case_st_archive import case_st_archive # pylint: disable=unused-variable - logger.info("Starting job script %s" % job) + logger.info("Starting job script {}".format(job)) function_name = job.replace(".", "_") if not dry_run: @@ -414,13 +413,16 @@ def _submit_single_job(self, case, job, depid=None, no_batch=False, batch_args=N if string is not None: submitcmd += string + " " + if job == 'case.run' and skip_pnl: + submitcmd += " --skip-preview-namelist" + if dry_run: return submitcmd else: - logger.info("Submitting job script %s"%submitcmd) + logger.info("Submitting job script {}".format(submitcmd)) output = run_cmd_no_fail(submitcmd, combine_output=True) jobid = self.get_job_id(output) - logger.info("Submitted job id is %s"%jobid) + logger.info("Submitted job id is {}".format(jobid)) return jobid def get_batch_system_type(self): @@ -439,7 +441,7 @@ def get_job_id(self, output): expect(jobid_pattern is not None, "Could not find jobid_pattern in env_batch.xml") search_match = re.search(jobid_pattern, output) expect(search_match is not None, - "Couldn't match jobid_pattern '%s' within submit output:\n '%s'" % (jobid_pattern, output)) + "Couldn't match jobid_pattern '{}' within submit output:\n '{}'".format(jobid_pattern, output)) jobid = search_match.group(1) return jobid diff --git a/scripts/lib/CIME/case.py b/scripts/lib/CIME/case.py index ecab463f1ab..d9498720c57 100644 --- a/scripts/lib/CIME/case.py +++ b/scripts/lib/CIME/case.py @@ -1103,9 +1103,9 @@ def create_clone(self, newcase, keepexe=False, mach_dir=None, project=None, cime return newcase - def submit_jobs(self, no_batch=False, job=None, batch_args=None, dry_run=False): + def submit_jobs(self, no_batch=False, job=None, skip_pnl=False, batch_args=None, dry_run=False): env_batch = self.get_env('batch') - return env_batch.submit_jobs(self, no_batch=no_batch, job=job, batch_args=batch_args, dry_run=dry_run) + return env_batch.submit_jobs(self, no_batch=no_batch, job=job, skip_pnl=skip_pnl, batch_args=batch_args, dry_run=dry_run) def get_mpirun_cmd(self, job="case.run"): env_mach_specific = self.get_env('mach_specific') diff --git a/scripts/lib/CIME/case_run.py b/scripts/lib/CIME/case_run.py index 31ff36d00f9..cca41e47469 100644 --- a/scripts/lib/CIME/case_run.py +++ b/scripts/lib/CIME/case_run.py @@ -12,7 +12,7 @@ logger = logging.getLogger(__name__) ############################################################################### -def pre_run_check(case, lid): +def pre_run_check(case, lid, skip_pnl=False): ############################################################################### # Pre run initialization code.. @@ -25,7 +25,7 @@ def pre_run_check(case, lid): if case.get_value("TESTCASE") == "PFS": env_mach_pes = os.path.join(caseroot,"env_mach_pes.xml") - shutil.copy(env_mach_pes,"%s.%s"%(env_mach_pes,lid)) + shutil.copy(env_mach_pes,"{}.{}".format(env_mach_pes, lid)) # check for locked files. check_lockedfiles(case.get_value("CASEROOT")) @@ -34,7 +34,7 @@ def pre_run_check(case, lid): # check that build is done expect(build_complete, "BUILD_COMPLETE is not true\nPlease rebuild the model interactively") - logger.debug("build complete is %s " %build_complete) + logger.debug("build complete is {} ".format(build_complete)) # load the module environment... case.load_env() @@ -67,19 +67,20 @@ def pre_run_check(case, lid): # This needs to be done everytime the LID changes in order for log files to be set up correctly # The following also needs to be called in case a user changes a user_nl_xxx file OR an env_run.xml # variable while the job is in the queue - create_namelists(case) + if not skip_pnl: + create_namelists(case) logger.info("-------------------------------------------------------------------------") - logger.info(" - Prestage required restarts into %s" %(rundir)) - logger.info(" - Case input data directory (DIN_LOC_ROOT) is %s " %(din_loc_root)) + logger.info(" - Prestage required restarts into {}".format(rundir)) + logger.info(" - Case input data directory (DIN_LOC_ROOT) is {} ".format(din_loc_root)) logger.info(" - Checking for required input datasets in DIN_LOC_ROOT") logger.info("-------------------------------------------------------------------------") ############################################################################### -def _run_model_impl(case, lid): +def _run_model_impl(case, lid, skip_pnl=False): ############################################################################### - pre_run_check(case, lid) + pre_run_check(case, lid, skip_pnl=skip_pnl) model = case.get_value("MODEL") @@ -90,11 +91,11 @@ def _run_model_impl(case, lid): os.environ["OMP_NUM_THREADS"] = str(thread_count) # Run the model - logger.info("%s MODEL EXECUTION BEGINS HERE" %(time.strftime("%Y-%m-%d %H:%M:%S"))) + logger.info("{} MODEL EXECUTION BEGINS HERE".format(time.strftime("%Y-%m-%d %H:%M:%S"))) cmd = case.get_mpirun_cmd(job="case.run") cmd = case.get_resolved_value(cmd) - logger.info("run command is %s " %cmd) + logger.info("run command is {} ".format(cmd)) rundir = case.get_value("RUNDIR") loop = True @@ -131,18 +132,18 @@ def _run_model_impl(case, lid): if not loop: # We failed and we're not restarting - expect(False, "RUN FAIL: Command '%s' failed\nSee log file for details: %s" % (cmd, model_logfile)) + expect(False, "RUN FAIL: Command '{}' failed\nSee log file for details: {}".format(cmd, model_logfile)) - logger.info("%s MODEL EXECUTION HAS FINISHED" %(time.strftime("%Y-%m-%d %H:%M:%S"))) + logger.info("{} MODEL EXECUTION HAS FINISHED".format(time.strftime("%Y-%m-%d %H:%M:%S"))) post_run_check(case, lid) return lid ############################################################################### -def run_model(case, lid): +def run_model(case, lid, skip_pnl=False): ############################################################################### - functor = lambda: _run_model_impl(case, lid) + functor = lambda: _run_model_impl(case, lid, skip_pnl=skip_pnl) return run_and_log_case_status(functor, "case.run", caseroot=case.get_value("CASEROOT")) ############################################################################### @@ -157,15 +158,15 @@ def post_run_check(case, lid): cpl_logfile = os.path.join(rundir, "cpl" + ".log." + lid) if not os.path.isfile(model_logfile): - expect(False, "Model did not complete, no %s log file " % model_logfile) + expect(False, "Model did not complete, no {} log file ".format(model_logfile)) elif not os.path.isfile(cpl_logfile): - expect(False, "Model did not complete, no cpl log file '%s'" % cpl_logfile) + expect(False, "Model did not complete, no cpl log file '{}'".format(cpl_logfile)) elif os.stat(model_logfile).st_size == 0: expect(False, "Run FAILED") else: with open(cpl_logfile, 'r') as fd: if 'SUCCESSFUL TERMINATION' not in fd.read(): - expect(False, "Model did not complete - see %s \n " % cpl_logfile) + expect(False, "Model did not complete - see {} \n ".format(cpl_logfile)) ############################################################################### def save_logs(case, lid): @@ -177,7 +178,7 @@ def save_logs(case, lid): caseroot = case.get_value("CASEROOT") rundir = case.get_value("RUNDIR") - logfiles = glob.glob(os.path.join(rundir, "*.log.%s"%(lid))) + logfiles = glob.glob(os.path.join(rundir, "*.log.{}".format(lid))) for logfile in logfiles: if os.path.isfile(logfile): logfile_gz = gzip_existing_file(logfile) @@ -192,12 +193,12 @@ def resubmit_check(case): # Note that Mira requires special logic dout_s = case.get_value("DOUT_S") - logger.warn("dout_s %s "%(dout_s)) + logger.warn("dout_s {} ".format(dout_s)) mach = case.get_value("MACH") - logger.warn("mach %s "%(mach)) + logger.warn("mach {} ".format(mach)) testcase = case.get_value("TESTCASE") resubmit_num = case.get_value("RESUBMIT") - logger.warn("resubmit_num %s"%(resubmit_num)) + logger.warn("resubmit_num {}".format(resubmit_num)) # If dout_s is True than short-term archiving handles the resubmit # If dout_s is True and machine is mira submit the st_archive script resubmit = False @@ -206,7 +207,7 @@ def resubmit_check(case): elif dout_s and mach == 'mira': caseroot = case.get_value("CASEROOT") cimeroot = case.get_value("CIMEROOT") - cmd = "ssh cooleylogin1 'cd %s; CIMEROOT=%s ./case.submit %s --job case.st_archive'"%(caseroot, cimeroot, caseroot) + cmd = "ssh cooleylogin1 'cd {}; CIMEROOT={} ./case.submit {} --job case.st_archive'".format(caseroot, cimeroot, caseroot) run_cmd(cmd, verbose=True) if resubmit: @@ -219,23 +220,23 @@ def resubmit_check(case): ############################################################################### def do_external(script_name, caseroot, rundir, lid, prefix): ############################################################################### - filename = "%s.external.log.%s" %(prefix, lid) + filename = "{}.external.log.{}".format(prefix, lid) outfile = os.path.join(rundir, filename) - cmd = script_name + " 1> %s %s 2>&1" %(outfile, caseroot) - logger.info("running %s" %script_name) + cmd = script_name + " 1> {} {} 2>&1".format(outfile, caseroot) + logger.info("running {}".format(script_name)) run_cmd_no_fail(cmd) ############################################################################### def do_data_assimilation(da_script, caseroot, cycle, lid, rundir): ############################################################################### - filename = "da.log.%s" %(lid) + filename = "da.log.{}".format(lid) outfile = os.path.join(rundir, filename) - cmd = da_script + " 1> %s %s %d 2>&1" %(outfile, caseroot, cycle) - logger.info("running %s" %da_script) + cmd = da_script + " 1> {} {} {:d} 2>&1".format(outfile, caseroot, cycle) + logger.info("running {}".format(da_script)) run_cmd_no_fail(cmd) ############################################################################### -def case_run(case): +def case_run(case, skip_pnl=False): ############################################################################### # Set up the run, run the model, do the postrun steps run_with_submit = case.get_value("RUN_WITH_SUBMIT") @@ -271,7 +272,7 @@ def case_run(case): do_external(prerun_script, case.get_value("CASEROOT"), case.get_value("RUNDIR"), lid, prefix="prerun") - lid = run_model(case, lid) + lid = run_model(case, lid, skip_pnl) save_logs(case, lid) # Copy log files back to caseroot if case.get_value("CHECK_TIMING") or case.get_value("SAVE_TIMING"): get_timing(case, lid) # Run the getTiming script diff --git a/scripts/lib/CIME/case_submit.py b/scripts/lib/CIME/case_submit.py index d10032e56d7..feefd5c9570 100644 --- a/scripts/lib/CIME/case_submit.py +++ b/scripts/lib/CIME/case_submit.py @@ -15,7 +15,7 @@ logger = logging.getLogger(__name__) -def _submit(case, job=None, resubmit=False, no_batch=False, batch_args=None): +def _submit(case, job=None, resubmit=False, no_batch=False, skip_pnl=False, batch_args=None): caseroot = case.get_value("CASEROOT") if job is None: @@ -26,7 +26,7 @@ def _submit(case, job=None, resubmit=False, no_batch=False, batch_args=None): if resubmit: resub = case.get_value("RESUBMIT") - logger.info("Submitting job '%s', resubmit=%d" % (job, resub)) + logger.info("Submitting job '{}', resubmit={:d}".format(job, resub)) case.set_value("RESUBMIT",resub-1) if case.get_value("RESUBMIT_SETS_CONTINUE_RUN"): case.set_value("CONTINUE_RUN", True) @@ -61,11 +61,11 @@ def _submit(case, job=None, resubmit=False, no_batch=False, batch_args=None): case.set_value("RUN_WITH_SUBMIT",True) case.flush() - logger.warn("submit_jobs %s" % job) - job_ids = case.submit_jobs(no_batch=no_batch, job=job, batch_args=batch_args) - logger.info("Submitted job ids %s" % job_ids) + logger.warn("submit_jobs {}".format(job)) + job_ids = case.submit_jobs(no_batch=no_batch, job=job, skip_pnl=skip_pnl, batch_args=batch_args) + logger.info("Submitted job ids {}".format(job_ids)) -def submit(case, job=None, resubmit=False, no_batch=False, batch_args=None): +def submit(case, job=None, resubmit=False, no_batch=False, skip_pnl=False, batch_args=None): if case.get_value("TEST"): caseroot = case.get_value("CASEROOT") casebaseid = case.get_value("CASEBASEID") @@ -80,7 +80,7 @@ def submit(case, job=None, resubmit=False, no_batch=False, batch_args=None): ts.set_status(SUBMIT_PHASE, TEST_PASS_STATUS) try: - functor = lambda: _submit(case, job, resubmit, no_batch, batch_args) + functor = lambda: _submit(case, job, resubmit, no_batch, skip_pnl, batch_args) run_and_log_case_status(functor, "case.submit", caseroot=case.get_value("CASEROOT")) except: # If something failed in the batch system, make sure to mark @@ -105,5 +105,5 @@ def check_DA_settings(case): if case.get_value("DATA_ASSIMILATION"): script = case.get_value("DATA_ASSIMILATION_SCRIPT") cycles = case.get_value("DATA_ASSIMILATION_CYCLES") - logger.info("Data Assimilation enabled using script %s with %d cycles"%(script,cycles)) + logger.info("Data Assimilation enabled using script {} with {:d} cycles".format(script,cycles)) diff --git a/scripts/lib/CIME/test_scheduler.py b/scripts/lib/CIME/test_scheduler.py index 3e2f4886f33..df7ab2f7057 100644 --- a/scripts/lib/CIME/test_scheduler.py +++ b/scripts/lib/CIME/test_scheduler.py @@ -570,9 +570,9 @@ def _run_phase(self, test): ########################################################################### test_dir = self._get_test_dir(test) if self._no_batch: - cmd = "./case.submit --no-batch" + cmd = "./case.submit --no-batch --skip-preview-namelist" else: - cmd = "./case.submit " + cmd = "./case.submit --skip-preview-namelist" return self._shell_cmd_for_phase(test, cmd, RUN_PHASE, from_dir=test_dir)