Skip to content

Commit

Permalink
Merge branch 'release_23.1' into dev
Browse files Browse the repository at this point in the history
  • Loading branch information
mvdbeek committed Oct 23, 2023
2 parents 730ef28 + 41e06d0 commit 3ccccc8
Show file tree
Hide file tree
Showing 22 changed files with 230 additions and 59 deletions.
5 changes: 3 additions & 2 deletions .k8s_ci.Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -166,7 +166,7 @@ RUN set -xe; \

# Create Galaxy user, group, directory; chown
RUN set -xe; \
adduser --system --group $GALAXY_USER \
adduser --system --group --uid 101 $GALAXY_USER \
&& mkdir -p $SERVER_DIR \
&& chown $GALAXY_USER:$GALAXY_USER $ROOT_DIR -R

Expand All @@ -179,7 +179,8 @@ COPY --chown=$GALAXY_USER:$GALAXY_USER --from=client_build $SERVER_DIR/static ./
WORKDIR $SERVER_DIR

# The data in version.json will be displayed in Galaxy's /api/version endpoint
RUN printf "{\n \"git_commit\": \"$(cat GITREVISION)\",\n \"build_date\": \"$BUILD_DATE\",\n \"image_tag\": \"$IMAGE_TAG\"\n}\n" > version.json
RUN printf "{\n \"git_commit\": \"$(cat GITREVISION)\",\n \"build_date\": \"$BUILD_DATE\",\n \"image_tag\": \"$IMAGE_TAG\"\n}\n" > version.json \
&& chown $GALAXY_USER:$GALAXY_USER version.json

EXPOSE 8080
USER $GALAXY_USER
Expand Down
5 changes: 4 additions & 1 deletion client/src/components/History/Content/ContentItem.vue
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@
:state="state"
:item-urls="itemUrls"
:keyboard-selectable="expandDataset"
@delete="$emit('delete')"
@delete="onDelete"
@display="onDisplay"
@showCollectionInfo="onShowCollectionInfo"
@edit="onEdit"
Expand Down Expand Up @@ -243,6 +243,9 @@ export default {
this.$router.push(this.itemUrls.display, { title: this.name });
}
},
onDelete(recursive = false) {
this.$emit("delete", this.item, recursive);
},
onDragStart(evt) {
setDrag(evt, this.item);
},
Expand Down
39 changes: 36 additions & 3 deletions client/src/components/History/Content/ContentOptions.vue
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
<template>
<span class="align-self-start btn-group">
<span class="align-self-start btn-group align-items-baseline">
<!-- Special case for collections -->
<b-button
v-if="isCollection && canShowCollectionDetails"
Expand Down Expand Up @@ -43,8 +43,21 @@
title="Delete"
size="sm"
variant="link"
@click.stop="$emit('delete')">
<icon icon="trash" />
@click.stop="onDelete($event)">
<icon v-if="isDataset" icon="trash" />
<b-dropdown v-else ref="deleteCollectionMenu" size="sm" variant="link" no-caret toggle-class="p-0 m-0">
<template v-slot:button-content>
<icon icon="trash" />
</template>
<b-dropdown-item title="Delete collection only" @click.prevent.stop="onDeleteItem">
<icon icon="file" />
Collection only
</b-dropdown-item>
<b-dropdown-item title="Delete collection and elements" @click.prevent.stop="onDeleteItemRecursively">
<icon icon="copy" />
Collection and elements
</b-dropdown-item>
</b-dropdown>
</b-button>
<b-button
v-if="writable && isHistoryItem && isDeleted"
Expand Down Expand Up @@ -131,6 +144,26 @@ export default {
this.$emit("display");
}
},
onDelete() {
if (this.isCollection) {
this.$refs.deleteCollectionMenu.show();
} else {
this.onDeleteItem();
}
},
onDeleteItem() {
this.$emit("delete");
},
onDeleteItemRecursively() {
const recursive = true;
this.$emit("delete", recursive);
},
},
};
</script>
<style lang="css">
.dropdown-menu .dropdown-item {
font-weight: normal;
}
</style>
```
6 changes: 3 additions & 3 deletions client/src/components/History/Content/GenericItem.vue
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
:is-dataset="item.history_content_type == 'dataset' || item.element_type == 'hda'"
@update:expand-dataset="expandDataset = $event"
@view-collection="viewCollection = !viewCollection"
@delete="onDelete(item)"
@delete="onDelete"
@toggleHighlights="onHighlight(item)"
@undelete="onUndelete(item)"
@unhide="onUnhide(item)" />
Expand Down Expand Up @@ -76,8 +76,8 @@ export default {
},
methods: {
...mapActions(useHistoryStore, ["applyFilters"]),
onDelete(item) {
deleteContent(item);
onDelete(item, recursive = false) {
deleteContent(item, { recursive: recursive });
},
onUndelete(item) {
updateContentFields(item, { deleted: false });
Expand Down
6 changes: 3 additions & 3 deletions client/src/components/History/CurrentHistory/HistoryPanel.vue
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,7 @@
@update:expand-dataset="setExpanded(item, $event)"
@update:selected="setSelected(item, $event)"
@view-collection="$emit('view-collection', item, currentOffset)"
@delete="onDelete(item)"
@delete="onDelete"
@undelete="onUndelete(item)"
@unhide="onUnhide(item)" />
</template>
Expand Down Expand Up @@ -363,9 +363,9 @@ export default {
this.loading = false;
}
},
onDelete(item) {
onDelete(item, recursive = false) {
this.setInvisible(item);
deleteContent(item);
deleteContent(item, { recursive: recursive });
},
onHideSelection(selectedItems) {
selectedItems.forEach((item) => {
Expand Down
4 changes: 2 additions & 2 deletions client/src/components/Markdown/MarkdownToolBox.vue
Original file line number Diff line number Diff line change
Expand Up @@ -255,8 +255,8 @@ export default {
const steps = [];
this.steps &&
Object.values(this.steps).forEach((step) => {
if (step.label || step.content_id) {
steps.push(step.label || step.content_id);
if (step.label) {
steps.push(step.label);
}
});
return steps;
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy/job_execution/output_collect.py
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,7 @@ def set_default_hda_permissions(self, primary_data):
self._security_agent.set_all_dataset_permissions(primary_data.dataset, permissions, new=True, flush=False)

def copy_dataset_permissions(self, init_from, primary_data):
self._security_agent.copy_dataset_permissions(init_from.dataset, primary_data.dataset)
self._security_agent.copy_dataset_permissions(init_from.dataset, primary_data.dataset, flush=False)


class MetadataSourceProvider(AbstractMetadataSourceProvider):
Expand Down
4 changes: 1 addition & 3 deletions lib/galaxy/jobs/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -1434,8 +1434,6 @@ def fail(
dataset.state = dataset.states.ERROR
dataset.blurb = "tool error"
dataset.info = message
dataset.set_size()
dataset.dataset.set_total_size()
dataset.mark_unhidden()
if dataset.ext == "auto":
dataset.extension = "data"
Expand Down Expand Up @@ -1742,7 +1740,6 @@ def _finish_dataset(self, output_name, dataset, job, context, final_job_state, r
# Ensure white space between entries
dataset.info = f"{dataset.info.rstrip()}\n{context['stderr'].strip()}"
dataset.tool_version = self.version_string
dataset.set_size()
if "uuid" in context:
dataset.dataset.uuid = context["uuid"]
self.__update_output(job, dataset)
Expand Down Expand Up @@ -2427,6 +2424,7 @@ def __update_output(self, job, hda, clean_only=False):
cleaned up if the dataset has been purged.
"""
dataset = hda.dataset
dataset.set_total_size()
if dataset not in job.output_library_datasets:
purged = dataset.purged
if not purged and not clean_only:
Expand Down
3 changes: 2 additions & 1 deletion lib/galaxy/managers/markdown_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -751,7 +751,8 @@ def find_non_empty_group(match):
target_match = step_match
name = find_non_empty_group(target_match)
ref_object_type = "job"
ref_object = invocation.step_invocation_for_label(name).job
invocation_step = invocation.step_invocation_for_label(name)
ref_object = invocation_step and invocation_step.job
else:
target_match = None
ref_object = None
Expand Down
1 change: 1 addition & 0 deletions lib/galaxy/metadata/set_metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -449,6 +449,7 @@ def set_meta(new_dataset_instance, file_dict):
partial(push_if_necessary, object_store, dataset, external_filename)
)
object_store_update_actions.append(partial(reset_external_filename, dataset))
object_store_update_actions.append(partial(dataset.set_total_size))
object_store_update_actions.append(partial(export_store.add_dataset, dataset))
if dataset_instance_id not in unnamed_id_to_path:
object_store_update_actions.append(partial(collect_extra_files, object_store, dataset, "."))
Expand Down
10 changes: 6 additions & 4 deletions lib/galaxy/model/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3855,6 +3855,7 @@ class Dataset(Base, StorableObject, Serializable):
non_ready_states = (states.NEW, states.UPLOAD, states.QUEUED, states.RUNNING, states.SETTING_METADATA)
ready_states = tuple(set(states.__members__.values()) - set(non_ready_states))
valid_input_states = tuple(set(states.__members__.values()) - {states.ERROR, states.DISCARDED})
no_data_states = (states.PAUSED, states.DEFERRED, states.DISCARDED, *non_ready_states)
terminal_states = (
states.OK,
states.EMPTY,
Expand Down Expand Up @@ -5713,7 +5714,7 @@ def to_history_dataset_association(self, target_history, parent_id=None, add_to_
sa_session.commit()
return hda

def copy(self, parent_id=None, target_folder=None):
def copy(self, parent_id=None, target_folder=None, flush=True):
sa_session = object_session(self)
ldda = LibraryDatasetDatasetAssociation(
name=self.name,
Expand Down Expand Up @@ -6137,9 +6138,9 @@ def attribute_columns(column_collection, attributes, nesting_level=None):
inner_dc = alias(DatasetCollection)
inner_dce = alias(DatasetCollectionElement)
order_by_columns.append(inner_dce.c.element_index)
q = q.join(inner_dc, inner_dc.c.id == dce.c.child_collection_id).outerjoin(
inner_dce, inner_dce.c.dataset_collection_id == inner_dc.c.id
)
q = q.join(
inner_dc, and_(inner_dc.c.id == dce.c.child_collection_id, dce.c.dataset_collection_id == dc.c.id)
).outerjoin(inner_dce, inner_dce.c.dataset_collection_id == inner_dc.c.id)
q = q.add_columns(
*attribute_columns(inner_dce.c, element_attributes, nesting_level),
*attribute_columns(inner_dc.c, collection_attributes, nesting_level),
Expand Down Expand Up @@ -7757,6 +7758,7 @@ def copy_to(self, copied_step, step_mapping, user=None):
copied_step.order_index = self.order_index
copied_step.type = self.type
copied_step.tool_id = self.tool_id
copied_step.tool_version = self.tool_version
copied_step.tool_inputs = self.tool_inputs
copied_step.tool_errors = self.tool_errors
copied_step.position = self.position
Expand Down
8 changes: 6 additions & 2 deletions lib/galaxy/model/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,9 +152,13 @@ def versioned_objects_strict(iter):
# These should get some other type of permanent storage, perhaps UserDatasetAssociation ?
# Everything else needs to have a hid and a history
if not obj.history and not obj.history_id:
raise Exception(f"HistoryDatsetAssociation {obj} without history detected, this is not valid")
raise Exception(f"HistoryDatasetAssociation {obj} without history detected, this is not valid")
elif not obj.hid:
raise Exception(f"HistoryDatsetAssociation {obj} without has no hid, this is not valid")
raise Exception(f"HistoryDatasetAssociation {obj} without hid, this is not valid")
elif obj.dataset.file_size is None and obj.dataset.state not in obj.dataset.no_data_states:
raise Exception(
f"HistoryDatasetAssociation {obj} in state {obj.dataset.state} with null file size, this is not valid"
)
yield obj


Expand Down
4 changes: 2 additions & 2 deletions lib/galaxy/model/security.py
Original file line number Diff line number Diff line change
Expand Up @@ -987,12 +987,12 @@ def get_permissions(self, item):
permissions[action] = [item_permission.role]
return permissions

def copy_dataset_permissions(self, src, dst):
def copy_dataset_permissions(self, src, dst, flush=True):
if not isinstance(src, self.model.Dataset):
src = src.dataset
if not isinstance(dst, self.model.Dataset):
dst = dst.dataset
self.set_all_dataset_permissions(dst, self.get_permissions(src))
self.set_all_dataset_permissions(dst, self.get_permissions(src), flush=flush)

def privately_share_dataset(self, dataset, users=None):
dataset.ensure_shareable()
Expand Down
4 changes: 3 additions & 1 deletion lib/galaxy/model/store/discover.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ def create_dataset(

if init_from:
self.permission_provider.copy_dataset_permissions(init_from, primary_data)
primary_data.state = init_from.state
primary_data.raw_set_dataset_state(init_from.state)
else:
self.permission_provider.set_default_hda_permissions(primary_data)
else:
Expand Down Expand Up @@ -265,6 +265,8 @@ def set_datasets_metadata(datasets, datasets_attributes=None):
except Exception:
log.exception("Exception occured while setting dataset peek")

primary_data.set_total_size()

def populate_collection_elements(
self,
collection,
Expand Down
41 changes: 32 additions & 9 deletions lib/galaxy/tools/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -316,6 +316,21 @@ class safe_update(NamedTuple):
}


def get_safe_version(tool: "Tool", requested_tool_version: str) -> Optional[str]:
if tool.id:
safe_version = WORKFLOW_SAFE_TOOL_VERSION_UPDATES.get(tool.id)
if (
safe_version
and tool.lineage
and safe_version.current_version >= parse_version(requested_tool_version) >= safe_version.min_version
):
# tool versions are sorted from old to new, so check newest version first
for lineage_version in reversed(tool.lineage.tool_versions):
if safe_version.current_version >= parse_version(lineage_version) >= safe_version.min_version:
return lineage_version
return None


class ToolNotFoundException(Exception):
pass

Expand Down Expand Up @@ -3247,7 +3262,9 @@ def produce_outputs(self, trans, out_data, output_collections, incoming, history

assert collection.collection_type == "paired"
forward_o, reverse_o = collection.dataset_instances
forward, reverse = forward_o.copy(copy_tags=forward_o.tags), reverse_o.copy(copy_tags=reverse_o.tags)
forward, reverse = forward_o.copy(copy_tags=forward_o.tags, flush=False), reverse_o.copy(
copy_tags=reverse_o.tags, flush=False
)
self._add_datasets_to_history(history, [forward, reverse])

out_data["forward"] = forward
Expand All @@ -3263,7 +3280,9 @@ def produce_outputs(self, trans, out_data, output_collections, incoming, history
forward_o = incoming["input_forward"]
reverse_o = incoming["input_reverse"]

forward, reverse = forward_o.copy(copy_tags=forward_o.tags), reverse_o.copy(copy_tags=reverse_o.tags)
forward, reverse = forward_o.copy(copy_tags=forward_o.tags, flush=False), reverse_o.copy(
copy_tags=reverse_o.tags, flush=False
)
new_elements = {}
new_elements["forward"] = forward
new_elements["reverse"] = reverse
Expand Down Expand Up @@ -3294,7 +3313,9 @@ def produce_outputs(self, trans, out_data, output_collections, incoming, history
identifier = getattr(incoming_repeat["input"], "element_identifier", incoming_repeat["input"].name)
elif id_select == "manual":
identifier = incoming_repeat["id_cond"]["identifier"]
new_elements[identifier] = incoming_repeat["input"].copy(copy_tags=incoming_repeat["input"].tags)
new_elements[identifier] = incoming_repeat["input"].copy(
copy_tags=incoming_repeat["input"].tags, flush=False
)

self._add_datasets_to_history(history, new_elements.values())
output_collections.create_collection(
Expand Down Expand Up @@ -3328,7 +3349,9 @@ def produce_outputs(self, trans, out_data, output_collections, incoming, history
else:
raise Exception("Invalid tool parameters.")
extracted = extracted_element.element_object
extracted_o = extracted.copy(copy_tags=extracted.tags, new_name=extracted_element.element_identifier)
extracted_o = extracted.copy(
copy_tags=extracted.tags, new_name=extracted_element.element_identifier, flush=False
)
self._add_datasets_to_history(history, [extracted_o], datasets_visible=True)

out_data["output"] = extracted_o
Expand Down Expand Up @@ -3411,7 +3434,7 @@ def produce_outputs(self, trans, out_data, output_collections, incoming, history
if getattr(value, "history_content_type", None) == "dataset":
copied_value = value.copy(copy_tags=value.tags, flush=False)
else:
copied_value = value.copy()
copied_value = value.copy(flush=False)
new_elements[key] = copied_value

self._add_datasets_to_history(history, new_elements.values())
Expand All @@ -3431,7 +3454,7 @@ def _get_new_elements(self, history, elements_to_copy):
if getattr(dce.element_object, "history_content_type", None) == "dataset":
copied_value = dce.element_object.copy(copy_tags=dce.element_object.tags, flush=False)
else:
copied_value = dce.element_object.copy()
copied_value = dce.element_object.copy(flush=False)
new_elements[element_identifier] = copied_value
return new_elements

Expand Down Expand Up @@ -3599,7 +3622,7 @@ def add_copied_value_to_new_elements(new_label, dce_object):
if getattr(dce_object, "history_content_type", None) == "dataset":
copied_value = dce_object.copy(copy_tags=dce_object.tags, flush=False)
else:
copied_value = dce_object.copy()
copied_value = dce_object.copy(flush=False)
new_elements[new_label] = copied_value

new_labels_path = new_labels_dataset_assoc.file_name
Expand Down Expand Up @@ -3705,7 +3728,7 @@ def add_copied_value_to_new_elements(new_tags_dict, dce):
)
else:
# We have a collection, and we copy the elements so that we don't manipulate the original tags
copied_value = dce.element_object.copy(element_destination=history)
copied_value = dce.element_object.copy(element_destination=history, flush=False)
for new_element, old_element in zip(copied_value.dataset_elements, dce.element_object.dataset_elements):
# TODO: This should be eliminated, but collections created by the collection builder
# don't set `visible` to `False` if you don't hide the original elements.
Expand Down Expand Up @@ -3765,7 +3788,7 @@ def produce_outputs(self, trans, out_data, output_collections, incoming, history
if getattr(dce_object, "history_content_type", None) == "dataset":
copied_value = dce_object.copy(copy_tags=dce_object.tags, flush=False)
else:
copied_value = dce_object.copy()
copied_value = dce_object.copy(flush=False)

if passes_filter:
filtered_elements[element_identifier] = copied_value
Expand Down
Loading

0 comments on commit 3ccccc8

Please sign in to comment.