diff --git a/.DS_Store b/.DS_Store index ffbb913d..4e084a62 100644 Binary files a/.DS_Store and b/.DS_Store differ diff --git a/bin/Xnat_tools/XnatSwitchProcessStatus b/bin/Xnat_tools/XnatSwitchProcessStatus index 1adc1933..b1fbeb65 100755 --- a/bin/Xnat_tools/XnatSwitchProcessStatus +++ b/bin/Xnat_tools/XnatSwitchProcessStatus @@ -450,11 +450,13 @@ def set_need_inputs_proctype(xnat, assessor, need_inputs, full_regex=False): else: set_proc_status(assessor_obj, task.NEED_INPUTS) - li_assessors = xnat.get_assessor_out_resources( + li_assessors = XnatUtils.list_assessor_out_resources( + xnat, a_linked_dict['project_id'], a_linked_dict['subject_id'], a_linked_dict['session_id'], a_linked_dict['label']) + for resource in li_assessors: delete_assr_resource(assessor_obj, resource['label']) @@ -567,7 +569,8 @@ def set_status_assessor(xnat, assessor, status, for resource in resources: delete_assr_resource(assessor_obj, resource) else: - resources = xnat.get_assessor_out_resources( + resources = XnatUtils.list_assessor_out_resources( + xnat, assessor['project_id'], assessor['subject_label'], assessor['session_label'], assessor['label']) for resource in resources: diff --git a/dax/dax_tools_utils.py b/dax/dax_tools_utils.py index 259379f7..d1261ef9 100644 --- a/dax/dax_tools_utils.py +++ b/dax/dax_tools_utils.py @@ -816,6 +816,26 @@ def get_version_assessor(assessor_path): return version +def get_dax_version_hash_assessor(assessor_path): + """ + Get the dax_version_hash of assessor we are uploading from text file + + :param assessor_path: path for the assessor + :return: version of the assessor from the version.txt file + """ + dax_version_hash = '' + fpath = os.path.join(assessor_path, 'dax_version_hash.txt') + + try: + with open(fpath, 'r') as f_obj: + dax_version_hash = f_obj.read().strip() + + except IOError as e: + LOGGER.warn('failed to read dax_version_hash:' + str(e)) + + return dax_version_hash + + def generate_snapshots(assessor_path): """ Generate Snapshots from the PDF if it exists. @@ -967,6 +987,7 @@ def upload_assessor(xnat, assessor_dict, assessor_path): """ # get spiderpath from version.txt file: version = get_version_assessor(assessor_path) + dax_version_hash = get_dax_version_hash_assessor(assessor_path) session_obj = XnatUtils.select_obj(xnat, assessor_dict['project_id'], assessor_dict['subject_label'], @@ -1028,7 +1049,8 @@ def upload_assessor(xnat, assessor_dict, assessor_path): xsitype + '/jobnode': ctask.get_jobnode(), xsitype + '/memused': ctask.get_memused(), xsitype + '/walltimeused': ctask.get_walltime(), - xsitype + '/jobstartdate': ctask.get_jobstartdate() + xsitype + '/jobstartdate': ctask.get_jobstartdate(), + xsitype + '/dax_version_hash': dax_version_hash }) # Delete the task from diskq diff --git a/dax/processor_parser.py b/dax/processor_parser.py index 1fff958c..aeaba004 100644 --- a/dax/processor_parser.py +++ b/dax/processor_parser.py @@ -275,7 +275,13 @@ def find_inputs(self, assr): if 'fmatch' in cur_res: fmatch = cur_res['fmatch'] + elif cur_res['ftype'] == 'FILE': + # Default to all + fmatch = '*' + else: + fmatch = None + if fmatch: # Get list of all files in the resource file_list = robj.files().get() diff --git a/dax/processors.py b/dax/processors.py index 27b89abc..77103704 100644 --- a/dax/processors.py +++ b/dax/processors.py @@ -937,7 +937,7 @@ def build_cmds(self, assr, jobdir): """ assr_label = assr.label() dstdir = os.path.join(DAX_Settings().get_results_dir(), assr_label) - + # Find values for the xnat inputs var2val, input_list = self.parser.find_inputs(assr) @@ -968,6 +968,10 @@ def build_cmds(self, assr, jobdir): _ref = attr_in['ref'] _refval = assr_inputs[_ref].rsplit('/', 1)[1] _val = assr.parent().scan(_refval).attrs.get(_attr) + elif _obj == 'assessor': + _ref = attr_in['ref'] + _refval = assr_inputs[_ref].rsplit('/', 1)[1] + _val = assr.parent().assessor(_refval).attrs.get(_attr) else: LOGGER.error('invalid YAML') err = 'YAML File:contains invalid attribute:{}' @@ -1007,7 +1011,7 @@ def build_text(self, var2val, input_list, jobdir, dstdir): cmd += 'INDIR=$JOBDIR/INPUTS\n' cmd += 'OUTDIR=$JOBDIR/OUTPUTS\n' cmd += 'DSTDIR={}\n\n'.format(dstdir) - + cmd += 'CONTAINERPATH={}\n\n'.format(self.container_path) # Append the main command cmd += 'MAINCMD=\"' cmd += self.command.format(**var2val) diff --git a/dax/templates/SLURM/singularity_job_template.txt b/dax/templates/SLURM/singularity_job_template.txt index 6ec58b29..95018e3a 100644 --- a/dax/templates/SLURM/singularity_job_template.txt +++ b/dax/templates/SLURM/singularity_job_template.txt @@ -22,12 +22,14 @@ date #DSTDIR= #INLIST= #OUTLIST= +#CONTAINERPATH= #MAINCMD= ${job_cmds} #============================================================================= echo $DSTDIR echo $INDIR echo $OUTDIR +echo $CONTAINERPATH mkdir -p $INDIR mkdir -p $OUTDIR @@ -40,21 +42,21 @@ for IN in "${INLIST[@]}"; do elif [ $col2 == "DIRJ" ]; then CMD="curl -s -n $col3?format=zip -o $INDIR/${col1}.zip && unzip -j $INDIR/${col1}.zip -d $INDIR/$col1" else - CMD="curl -s -n $col3?format=zip -o $INDIR/${col1}.zip && unzip $INDIR/${col1}.zip -d $INDIR/$col1" + CMD="curl -s -n '$col3?format=zip&structure=simplified' -o $INDIR/${col1}.zip && unzip $INDIR/${col1}.zip -d $INDIR/$col1 && mv $INDIR/$col1/*/out/* $INDIR/$col1" fi echo $CMD eval $CMD done # Run main command -module load GCC Singularity echo $_JAVA_OPTIONS echo $MAINCMD eval $MAINCMD -# Write version file +# Write version files mkdir -p $DSTDIR echo $VERSION > $DSTDIR/version.txt +sha256sum $CONTAINERPATH | awk '{print $1}' > $DSTDIR/dax_version_hash.txt # Handle outputs errors=0 @@ -84,4 +86,7 @@ done if [ $errors -gt 0 ] || [ $haspdf != 1 ]; then echo "JOB_FAILED" && touch $DSTDIR/JOB_FAILED.txt; else echo "COMPLETE" && touch $DSTDIR/READY_TO_UPLOAD.txt; fi +rm -rf $INDIR $OUTDIR + echo "DONE" +