Shows published workflows.
-
is:shared
+
is:shared_with_me
Shows workflows shared by another user directly with you.
diff --git a/client/src/components/providers/storeProviders.js b/client/src/components/providers/storeProviders.js
index ff1a16d0154e..a584b1300085 100644
--- a/client/src/components/providers/storeProviders.js
+++ b/client/src/components/providers/storeProviders.js
@@ -47,6 +47,7 @@ export const SimpleProviderMixin = {
loading: this.loading,
item: this.item,
save: this.save,
+ result: this.item,
});
},
};
@@ -131,6 +132,15 @@ export const JobProvider = {
},
};
+export const DatasetCollectionElementProvider = {
+ mixins: [SimpleProviderMixin],
+ computed: {
+ url() {
+ return prependPath(`api/dataset_collection_element/${this.id}`);
+ },
+ },
+};
+
/**
* Provider component interface to the actual stores i.e. history items and collection elements stores.
* @param {String} storeAction The store action is executed when the consuming component e.g. the history panel, changes the provider props.
diff --git a/client/src/composables/shortTermStorage.js b/client/src/composables/shortTermStorage.js
index 8b78cc013737..5bbc9a807c5b 100644
--- a/client/src/composables/shortTermStorage.js
+++ b/client/src/composables/shortTermStorage.js
@@ -22,12 +22,20 @@ export function useShortTermStorage() {
const isPreparing = ref(false);
+ async function prepareHistoryDownload(historyId, options = DEFAULT_OPTIONS) {
+ return prepareObjectDownload(historyId, "histories", options, false);
+ }
+
async function downloadHistory(historyId, options = DEFAULT_OPTIONS) {
- return prepareObjectDownload(historyId, "histories", options);
+ return prepareObjectDownload(historyId, "histories", options, true);
+ }
+
+ async function prepareWorkflowInvocationDownload(invocationId, options = DEFAULT_OPTIONS) {
+ return prepareObjectDownload(invocationId, "invocations", options, false);
}
async function downloadWorkflowInvocation(invocationId, options = DEFAULT_OPTIONS) {
- return prepareObjectDownload(invocationId, "invocations", options);
+ return prepareObjectDownload(invocationId, "invocations", options, true);
}
function getDownloadObjectUrl(storageRequestId) {
@@ -40,7 +48,7 @@ export function useShortTermStorage() {
window.location.assign(url);
}
- async function prepareObjectDownload(object_id, object_api, options = DEFAULT_OPTIONS) {
+ async function prepareObjectDownload(object_id, object_api, options = DEFAULT_OPTIONS, downloadWhenReady = true) {
const finalOptions = Object.assign(DEFAULT_OPTIONS, options);
resetTimeout();
isPreparing.value = true;
@@ -54,29 +62,31 @@ export function useShortTermStorage() {
};
const response = await axios.post(url, exportParams).catch(handleError);
- handleInitialize(response);
+ handleInitialize(response, downloadWhenReady);
}
- function handleInitialize(response) {
+ function handleInitialize(response, downloadWhenReady) {
const storageRequestId = response.data.storage_request_id;
- pollStorageRequestId(storageRequestId);
+ pollStorageRequestId(storageRequestId, downloadWhenReady);
}
- function pollStorageRequestId(storageRequestId) {
+ function pollStorageRequestId(storageRequestId, downloadWhenReady) {
const url = withPrefix(`/api/short_term_storage/${storageRequestId}/ready`);
axios
.get(url)
.then((r) => {
- handlePollResponse(r, storageRequestId);
+ handlePollResponse(r, storageRequestId, downloadWhenReady);
})
.catch(handleError);
}
- function handlePollResponse(response, storageRequestId) {
+ function handlePollResponse(response, storageRequestId, downloadWhenReady) {
const ready = response.data;
if (ready) {
isPreparing.value = false;
- downloadObjectByRequestId(storageRequestId);
+ if (downloadWhenReady) {
+ downloadObjectByRequestId(storageRequestId);
+ }
} else {
pollAfterDelay(storageRequestId);
}
@@ -103,13 +113,25 @@ export function useShortTermStorage() {
return {
/**
- * Starts preparing a history download file. When `isPreparing` is false the download will start automatically.
+ * Starts preparing a history download file in the short term storage.
+ * @param {String} historyId The ID of the history to be prepared for download
+ * @param {Object} options Options for the download preparation
+ */
+ prepareHistoryDownload,
+ /**
+ * Prepares a history download file in the short term storage and starts the download when ready.
* @param {String} historyId The ID of the history to be downloaded
* @param {Object} options Options for the download preparation
*/
downloadHistory,
/**
- * Starts preparing a workflow invocation download file. When `isPreparing` is false the download will start automatically.
+ * Starts preparing a workflow invocation download file in the short term storage.
+ * @param {String} invocationId The ID of the workflow invocation to be prepared for download
+ * @param {Object} options Options for the download preparation
+ */
+ prepareWorkflowInvocationDownload,
+ /**
+ * Starts preparing a workflow invocation download file in the short term storage and starts the download when ready.
* @param {String} invocationId The ID of the workflow invocation to be downloaded
* @param {Object} options Options for the download preparation
*/
diff --git a/client/src/libs/jquery.custom.js b/client/src/libs/jquery.custom.js
index 0eeb1de96450..0d87a38b5078 100644
--- a/client/src/libs/jquery.custom.js
+++ b/client/src/libs/jquery.custom.js
@@ -23,4 +23,7 @@ require("imports-loader?imports=default|jqueryVendor|jQuery!jquery-migrate");
// require("imports-loader?jQuery=jqueryVendor!../ui/autocom_tagging");
+// Only used in reports
+require("imports-loader?imports=default|jqueryVendor|jQuery!libs/jquery.sparklines");
+
module.exports = jQuery;
diff --git a/client/src/mvc/ui/ui-select-content.js b/client/src/mvc/ui/ui-select-content.js
index 814c562330ef..1d618b98a5c7 100644
--- a/client/src/mvc/ui/ui-select-content.js
+++ b/client/src/mvc/ui/ui-select-content.js
@@ -423,12 +423,12 @@ const View = Backbone.View.extend({
const select_options = { hda: [], hdca: [] };
_.each(options, (items, src) => {
_.each(items, (item) => {
- self._patchValue(item);
+ self._patchValue(item, src);
const current_src = item.src || src;
const addOption = !this.model.attributes.tag || item.tags.includes(this.model.attributes.tag);
if (addOption) {
select_options[current_src].push({
- hid: item.hid,
+ hid: item.hid || Infinity, // if we got no hid we have a "Selected" item
keep: item.keep,
label: `${item.hid || "Selected"}: ${item.name}`,
value: item.id,
@@ -448,13 +448,19 @@ const View = Backbone.View.extend({
_changeValue: function () {
const new_value = this.model.get("value");
if (new_value && new_value.values && new_value.values.length > 0) {
+ // sniff first suitable field type from config list
+ let src = new_value.values[0].src;
+ if (src === "dce") {
+ src =
+ this.cache[`dce${new_value.values[0].id}_hda`]?.src ||
+ this.cache[`dce${new_value.values[0].id}_hdca`]?.src;
+ }
+ this._patchValue(new_value, src);
// create list with content ids
const list = [];
_.each(new_value.values, (value) => {
list.push(value.id);
});
- // sniff first suitable field type from config list
- const src = new_value.values[0].src;
const multiple = new_value.values.length > 1;
for (let i = 0; i < this.config.length; i++) {
const field = this.fields[i];
@@ -479,11 +485,11 @@ const View = Backbone.View.extend({
/** Library datasets are displayed and selected together with history datasets,
Dataset collection elements are displayed together with history dataset collections **/
- _patchValue: function (v) {
- const patchTo = { ldda: "hda", dce: "hdca" };
+ _patchValue: function (v, src) {
+ const patchTo = { ldda: "hda", dce: src };
if (v.values) {
_.each(v.values, (v) => {
- this._patchValue(v);
+ this._patchValue(v, src);
});
} else if (patchTo[v.src]) {
v.origin = v.src;
diff --git a/client/src/reports/run_stats.js b/client/src/reports/run_stats.js
index e7d8dc92d9c3..2e49976cbed6 100644
--- a/client/src/reports/run_stats.js
+++ b/client/src/reports/run_stats.js
@@ -1,5 +1,6 @@
import $ from "jquery";
-import * as d3 from "d3";
+import * as d3 from "d3v3";
+import { event as currentEvent } from "d3v3";
function date_by_subtracting_days(date, days) {
return new Date(
@@ -125,14 +126,14 @@ export function create_chart(inp_data, name, time, title) {
}
var wdth = i * 4 + 10;
- d3.select(d.target.parentElement)
+ d3.select(currentEvent.target.parentElement)
.select(".tool_tip")
.select("text")
.attr("transform", `translate( ${margin.left - 5}, ${height - d * zoom + margin.top + 10} )`)
.attr("visibility", "visible")
.text(d);
- d3.select(d.target.parentElement)
+ d3.select(currentEvent.target.parentElement)
.select(".tool_tip")
.attr("width", `${wdth}px`)
.attr("height", "15px")
@@ -144,9 +145,12 @@ export function create_chart(inp_data, name, time, title) {
})
.on("mouseleave", (d) => {
// Remove tool tip
- d3.select(d.target.parentElement).select(".tool_tip").select("text").attr("visibility", "hidden");
+ d3.select(currentEvent.target.parentElement)
+ .select(".tool_tip")
+ .select("text")
+ .attr("visibility", "hidden");
- d3.select(d.target.parentElement)
+ d3.select(currentEvent.target.parentElement)
.select(".tool_tip")
.select("rect")
.attr("width", "0")
@@ -178,14 +182,14 @@ export function create_chart(inp_data, name, time, title) {
});
// Declare how high the y axis goes
- var y = d3.curveLinear().range([height, 0]);
+ var y = d3.scale.linear().range([height, 0]);
// Create a yAxis object
var yAxis = d3.svg
.axis()
.scale(y)
.orient("left")
- .tickFormat((d) => Math.round(d * d3.max(data), 0));
+ .tickFormat((d) => d3.round(d * d3.max(data), 0));
// Put the y axis on the chart
chart
@@ -438,13 +442,13 @@ export function create_histogram(inp_data, name, title) {
// Cereate x axis metadata
// Used for x axis, histogram creation, and bar initialization
- var x = d3
- .curveLinear()
+ var x = d3.scale
+ .linear()
.domain([0, d3.max(data)])
.range([0, width]);
// Generate a histogram using twenty uniformly-spaced bins.
- data = d3.histogram().bins(x.ticks(20))(data);
+ data = d3.layout.histogram().bins(x.ticks(20))(data);
// Create an array of the sizes of the bars
var lengths = [];
@@ -462,8 +466,8 @@ export function create_histogram(inp_data, name, title) {
// Create y axis metadata
// Used for y axis and bar initialization
- var y = d3
- .curveLinear()
+ var y = d3.scale
+ .linear()
.domain([0, d3.max(data, (d) => d.y)])
.range([height, 0]);
@@ -538,14 +542,14 @@ export function create_histogram(inp_data, name, title) {
i++;
}
var wdth = i * 4 + 10;
- d3.select(d.target.parentElement)
+ d3.select(currentEvent.target.parentElement)
.select(".tool_tip")
.select("text")
.attr("transform", `translate( ${margin.left - 5}, ${height - d.length * zoom + margin.top + 10} )`)
.attr("visibility", "visible")
.text(d.length);
- d3.select(d.target.parentElement)
+ d3.select(currentEvent.target.parentElement)
.select(".tool_tip")
.attr("width", `${wdth}px`)
.attr("height", "15px")
@@ -555,11 +559,14 @@ export function create_histogram(inp_data, name, title) {
.attr("height", "15px")
.attr("fill", "#ebd9b2");
})
- .on("mouseleave", (d) => {
+ .on("mouseleave", () => {
// Remove tool tip
- d3.select(d.target.parentElement).select(".tool_tip").select("text").attr("visibility", "hidden");
+ d3.select(currentEvent.target.parentElement)
+ .select(".tool_tip")
+ .select("text")
+ .attr("visibility", "hidden");
- d3.select(d.target.parentElement)
+ d3.select(currentEvent.target.parentElement)
.select(".tool_tip")
.select("rect")
.attr("width", "0")
diff --git a/client/src/schema/schema.ts b/client/src/schema/schema.ts
index 034563cbe54f..88326139bd19 100644
--- a/client/src/schema/schema.ts
+++ b/client/src/schema/schema.ts
@@ -47,6 +47,10 @@ export interface paths {
*/
put: operations["reload_toolbox_api_configuration_toolbox_put"];
};
+ "/api/dataset_collection_element/{dce_id}": {
+ /** Content */
+ get: operations["content_api_dataset_collection_element__dce_id__get"];
+ };
"/api/dataset_collections": {
/** Create a new dataset collection instance. */
post: operations["create_api_dataset_collections_post"];
@@ -7838,6 +7842,33 @@ export interface operations {
};
};
};
+ content_api_dataset_collection_element__dce_id__get: {
+ /** Content */
+ parameters: {
+ /** @description The user ID that will be used to effectively make this API call. Only admins and designated users can make API calls on behalf of other users. */
+ header?: {
+ "run-as"?: string;
+ };
+ /** @description The encoded identifier of the dataset collection element. */
+ path: {
+ dce_id: string;
+ };
+ };
+ responses: {
+ /** @description Successful Response */
+ 200: {
+ content: {
+ "application/json": components["schemas"]["DCESummary"];
+ };
+ };
+ /** @description Validation Error */
+ 422: {
+ content: {
+ "application/json": components["schemas"]["HTTPValidationError"];
+ };
+ };
+ };
+ };
create_api_dataset_collections_post: {
/** Create a new dataset collection instance. */
parameters?: {
diff --git a/client/src/stores/workflowConnectionStore.test.ts b/client/src/stores/workflowConnectionStore.test.ts
index ff15ac9d1592..5b14e1e15ae1 100644
--- a/client/src/stores/workflowConnectionStore.test.ts
+++ b/client/src/stores/workflowConnectionStore.test.ts
@@ -31,7 +31,6 @@ const outputTerminal: OutputTerminal = {
};
const connection: Connection = {
- id: "connection-id",
input: inputTerminal,
output: outputTerminal,
};
diff --git a/client/src/stores/workflowConnectionStore.ts b/client/src/stores/workflowConnectionStore.ts
index 170ddf76d80f..ddd2dee7b42a 100644
--- a/client/src/stores/workflowConnectionStore.ts
+++ b/client/src/stores/workflowConnectionStore.ts
@@ -4,7 +4,7 @@ import { pushOrSet } from "@/utils/pushOrSet";
import Vue from "vue";
interface InvalidConnections {
- [index: string]: string | undefined;
+ [index: ConnectionId]: string | undefined;
}
export interface State {
@@ -15,20 +15,13 @@ export interface State {
stepToConnections: { [index: number]: Connection[] };
}
-export class Connection {
+export interface Connection {
input: InputTerminal;
output: OutputTerminal;
-
- constructor(input: InputTerminal, output: OutputTerminal) {
- this.input = input;
- this.output = output;
- }
-
- get id(): string {
- return `${this.input.stepId}-${this.input.name}-${this.output.stepId}-${this.output.name}`;
- }
}
+export type ConnectionId = `${string}-${string}-${string}-${string}`;
+
export interface BaseTerminal {
stepId: number;
name: string;
@@ -50,7 +43,7 @@ interface TerminalToOutputTerminals {
export const useConnectionStore = defineStore("workflowConnectionStore", {
state: (): State => ({
- connections: [] as Connection[],
+ connections: [] as Array
>,
invalidConnections: {} as InvalidConnections,
inputTerminalToOutputTerminals: {} as TerminalToOutputTerminals,
terminalToConnection: {} as { [index: string]: Connection[] },
@@ -87,13 +80,15 @@ export const useConnectionStore = defineStore("workflowConnectionStore", {
dropFromInvalidConnections(this: State, connectionId: string) {
Vue.delete(this.invalidConnections, connectionId);
},
- removeConnection(this, terminal: InputTerminal | OutputTerminal | Connection["id"]) {
+ removeConnection(this, terminal: InputTerminal | OutputTerminal | ConnectionId) {
const stepStore = useWorkflowStepStore();
this.connections = this.connections.filter((connection) => {
+ const id = getConnectionId(connection);
+
if (typeof terminal === "string") {
- if (connection.id == terminal) {
+ if (id === terminal) {
stepStore.removeConnection(connection);
- Vue.delete(this.invalidConnections, connection.id);
+ Vue.delete(this.invalidConnections, id);
return false;
} else {
return true;
@@ -101,7 +96,7 @@ export const useConnectionStore = defineStore("workflowConnectionStore", {
} else if (terminal.connectorType === "input") {
if (connection.input.stepId == terminal.stepId && connection.input.name == terminal.name) {
stepStore.removeConnection(connection);
- Vue.delete(this.invalidConnections, connection.id);
+ Vue.delete(this.invalidConnections, id);
return false;
} else {
return true;
@@ -109,7 +104,7 @@ export const useConnectionStore = defineStore("workflowConnectionStore", {
} else {
if (connection.output.stepId == terminal.stepId && connection.output.name == terminal.name) {
stepStore.removeConnection(connection);
- Vue.delete(this.invalidConnections, connection.id);
+ Vue.delete(this.invalidConnections, id);
return false;
} else {
return true;
@@ -125,7 +120,7 @@ export const useConnectionStore = defineStore("workflowConnectionStore", {
function updateTerminalToTerminal(connections: Connection[]) {
const inputTerminalToOutputTerminals: TerminalToOutputTerminals = {};
- connections.map((connection) => {
+ connections.forEach((connection) => {
const terminals = getTerminals(connection);
const inputTerminalId = getTerminalId(terminals.input);
pushOrSet(inputTerminalToOutputTerminals, inputTerminalId, terminals.output);
@@ -135,7 +130,7 @@ function updateTerminalToTerminal(connections: Connection[]) {
function updateTerminalToConnection(connections: Connection[]) {
const terminalToConnection: { [index: string]: Connection[] } = {};
- connections.map((connection) => {
+ connections.forEach((connection) => {
const terminals = getTerminals(connection);
const outputTerminalId = getTerminalId(terminals.output);
pushOrSet(terminalToConnection, outputTerminalId, connection);
@@ -147,7 +142,7 @@ function updateTerminalToConnection(connections: Connection[]) {
function updateStepToConnections(connections: Connection[]) {
const stepToConnections: { [index: number]: Connection[] } = {};
- connections.map((connection) => {
+ connections.forEach((connection) => {
pushOrSet(stepToConnections, connection.input.stepId, connection);
pushOrSet(stepToConnections, connection.output.stepId, connection);
});
@@ -164,3 +159,7 @@ export function getTerminals(item: Connection): { input: InputTerminal; output:
output: { stepId: item.output.stepId, name: item.output.name, connectorType: "output" },
};
}
+
+export function getConnectionId(item: Connection): ConnectionId {
+ return `${item.input.stepId}-${item.input.name}-${item.output.stepId}-${item.output.name}`;
+}
diff --git a/client/src/stores/workflowEditorStateStore.ts b/client/src/stores/workflowEditorStateStore.ts
index 0cfd7d7040c1..8e767a3b8091 100644
--- a/client/src/stores/workflowEditorStateStore.ts
+++ b/client/src/stores/workflowEditorStateStore.ts
@@ -1,4 +1,4 @@
-import Vue from "vue";
+import Vue, { reactive } from "vue";
import type { UnwrapRef } from "vue";
import { defineStore } from "pinia";
import type { OutputTerminals } from "@/components/Workflow/Editor/modules/terminals";
@@ -14,12 +14,7 @@ export interface OutputTerminalPosition {
startY: number;
}
-export interface TerminalPosition {
- startX: number;
- endX: number;
- startY: number;
- endY: number;
-}
+export type TerminalPosition = InputTerminalPosition & OutputTerminalPosition;
export interface XYPosition {
x: number;
@@ -50,10 +45,14 @@ export const useWorkflowStateStore = defineStore("workflowStateStore", {
}),
getters: {
getInputTerminalPosition(state: State) {
- return (stepId: number, inputName: string) => state.inputTerminals[stepId]?.[inputName];
+ return (stepId: number, inputName: string) => {
+ return state.inputTerminals[stepId]?.[inputName] as InputTerminalPosition | undefined;
+ };
},
getOutputTerminalPosition(state: State) {
- return (stepId: number, outputName: string) => state.outputTerminals[stepId]?.[outputName];
+ return (stepId: number, outputName: string) => {
+ return state.outputTerminals[stepId]?.[outputName] as OutputTerminalPosition | undefined;
+ };
},
getStepLoadingState(state: State) {
return (stepId: number) => state.stepLoadingState[stepId];
@@ -61,18 +60,18 @@ export const useWorkflowStateStore = defineStore("workflowStateStore", {
},
actions: {
setInputTerminalPosition(stepId: number, inputName: string, position: InputTerminalPosition) {
- if (this.inputTerminals[stepId]) {
- Vue.set(this.inputTerminals[stepId]!, inputName, position);
- } else {
- Vue.set(this.inputTerminals, stepId, { [inputName]: position });
+ if (!this.inputTerminals[stepId]) {
+ Vue.set(this.inputTerminals, stepId, {});
}
+
+ Vue.set(this.inputTerminals[stepId]!, inputName, position);
},
setOutputTerminalPosition(stepId: number, outputName: string, position: OutputTerminalPosition) {
- if (this.outputTerminals[stepId]) {
- Vue.set(this.outputTerminals[stepId]!, outputName, position);
- } else {
- Vue.set(this.outputTerminals, stepId, { [outputName]: position });
+ if (!this.outputTerminals[stepId]) {
+ Vue.set(this.outputTerminals, stepId, reactive({}));
}
+
+ Vue.set(this.outputTerminals[stepId]!, outputName, position);
},
deleteInputTerminalPosition(stepId: number, inputName: string) {
delete this.inputTerminals[stepId]?.[inputName];
diff --git a/client/src/stores/workflowStepStore.ts b/client/src/stores/workflowStepStore.ts
index 31e95c9ee0c3..0fea270984cd 100644
--- a/client/src/stores/workflowStepStore.ts
+++ b/client/src/stores/workflowStepStore.ts
@@ -1,7 +1,6 @@
import Vue from "vue";
import { defineStore } from "pinia";
-import { useConnectionStore } from "@/stores/workflowConnectionStore";
-import { Connection } from "@/stores/workflowConnectionStore";
+import { getConnectionId, useConnectionStore, type Connection } from "@/stores/workflowConnectionStore";
import type { CollectionTypeDescriptor } from "@/components/Workflow/Editor/modules/collectionTypeDescription";
import { assertDefined } from "@/utils/assertions";
@@ -276,7 +275,7 @@ export const useWorkflowStepStore = defineStore("workflowStepStore", {
const connectionStore = useConnectionStore();
connectionStore
.getConnectionsForStep(stepId)
- .forEach((connection) => connectionStore.removeConnection(connection.id));
+ .forEach((connection) => connectionStore.removeConnection(getConnectionId(connection)));
Vue.delete(this.steps, stepId.toString());
Vue.delete(this.stepExtraInputs, stepId);
},
@@ -294,18 +293,18 @@ export function stepToConnections(step: Step): Connection[] {
outputArray = [outputArray];
}
outputArray.forEach((output) => {
- const connection = new Connection(
- {
+ const connection: Connection = {
+ input: {
stepId: step.id,
name: inputName,
connectorType: "input",
},
- {
+ output: {
stepId: output.id,
name: output.output_name,
connectorType: "output",
- }
- );
+ },
+ };
const connectionInput = step.inputs.find((input) => input.name == inputName);
if (connectionInput && "input_subworkflow_step_id" in connectionInput) {
connection.input.input_subworkflow_step_id = connectionInput.input_subworkflow_step_id;
diff --git a/client/src/style/scss/workflow.scss b/client/src/style/scss/workflow.scss
index c16fc0af9af0..c069203f823b 100644
--- a/client/src/style/scss/workflow.scss
+++ b/client/src/style/scss/workflow.scss
@@ -98,32 +98,6 @@
transform-origin: center;
position: relative;
}
- .ribbon {
- .ribbon-outer {
- stroke: $brand-primary;
- }
- .ribbon-outer:active {
- stroke: $brand-success;
- }
- .ribbon-inner {
- stroke: $white;
- }
- .ribbon-inner:active {
- stroke: $brand-success;
- }
- .ribbon-inner-valid {
- stroke: $brand-success;
- }
- .ribbon-inner-invalid {
- stroke: $brand-warning;
- }
- &.dashed {
- stroke-dasharray: 5, 3;
- }
- &:hover .ribbon-outer {
- stroke: $brand-success;
- }
- }
.base-terminal {
@extend .fa;
@extend .fa-circle;
@@ -279,14 +253,6 @@
opacity: 0.8;
z-index: 10;
}
- canvas {
- position: absolute;
- z-index: 10;
- }
- canvas.dragging {
- position: absolute;
- z-index: 1000;
- }
}
}
diff --git a/client/src/utils/navigation/navigation.yml b/client/src/utils/navigation/navigation.yml
index 274ac895ecac..d3cbe1cfafb7 100644
--- a/client/src/utils/navigation/navigation.yml
+++ b/client/src/utils/navigation/navigation.yml
@@ -262,6 +262,7 @@ history_panel:
tag_area_button: '.details .stateless-tags .multiselect button'
tag_area_input: '.details .stateless-tags .multiselect input'
list_items: '.dataset-collection-panel .listing .content-item'
+ back_to_history: svg[data-description="back to history"]
selectors:
_: '#current-history-panel'
@@ -450,7 +451,7 @@ history_export:
history_export_tasks:
selectors:
- direct_download: '.direct-download-btn'
+ direct_download: '.gen-direct-download-btn'
file_source_tab: '.tab-export-to-file'
remote_file_name_input: '#file-source-tab #name'
toggle_options_link: '#toggle-options-link'
@@ -521,6 +522,11 @@ tool_form:
parameter_checkbox: 'div.ui-form-element[id="form-element-${parameter}"] .ui-switch'
parameter_input: 'div.ui-form-element[id="form-element-${parameter}"] .ui-input'
parameter_textarea: 'div.ui-form-element[id="form-element-${parameter}"] textarea'
+ parameter_batch_dataset_collection:
+ type: xpath
+ selector: //div[@id='form-element-${parameter}']//i[contains(@class, 'fa-folder-o')]/parent::label
+ data_option_value: option[value="${item_id}"]
+
repeat_insert: '[data-description="repeat insert"]'
reference: '.formatted-reference'
about: '.tool-footer'
@@ -700,7 +706,7 @@ workflow_editor:
//div[@data-label='Remove Tags']//input
tool_version_button: ".tool-versions"
connector_for: "#connection-${sink_id}-${source_id}"
- connector_invalid_for: "#connection-${sink_id}-${source_id} .ribbon-inner-invalid"
+ connector_invalid_for: "#connection-${sink_id}-${source_id} .connection.invalid"
connector_destroy_callout: '.delete-terminal'
save_button: '.editor-button-save'
save_workflow_confirmation_button: '#save-workflow-confirmation .btn-primary'
diff --git a/client/yarn.lock b/client/yarn.lock
index d12a22776e7c..507ac8f17e3b 100644
--- a/client/yarn.lock
+++ b/client/yarn.lock
@@ -1843,10 +1843,10 @@
consola "^2.15.0"
node-fetch "^2.6.7"
-"@pinia/testing@^0.0.14":
- version "0.0.14"
- resolved "https://registry.yarnpkg.com/@pinia/testing/-/testing-0.0.14.tgz#774791df360a1949c9ea2d8665de35d5a0200251"
- integrity sha512-ZmZwVNd/NnKYLIfjfuKl0zlJ3UdiXFpsHzSlL6wCeezSlyrqGMxsIQKv0J6fleu38gyCNTPBEipfxrt8V4+VIg==
+"@pinia/testing@0.1.0":
+ version "0.1.0"
+ resolved "https://registry.yarnpkg.com/@pinia/testing/-/testing-0.1.0.tgz#35a76c573aae9f2c43ea2aa0b567de4a8877fb6a"
+ integrity sha512-6lcP6QvQNrFtjslppFjfnpiyRnEKrwgro/TjujLlHI8HwWbdfHfMU4BpciuLLLuuSIENIH9zrevl+eAvmUgh7A==
dependencies:
vue-demi "*"
@@ -2630,10 +2630,10 @@
optionalDependencies:
prettier "^1.18.2 || ^2.0.0"
-"@vue/devtools-api@^6.4.5":
- version "6.4.5"
- resolved "https://registry.yarnpkg.com/@vue/devtools-api/-/devtools-api-6.4.5.tgz#d54e844c1adbb1e677c81c665ecef1a2b4bb8380"
- integrity sha512-JD5fcdIuFxU4fQyXUu3w2KpAJHzTVdN+p4iOX2lMWSHMOoQdMAcpFLZzm9Z/2nmsoZ1a96QEhZ26e50xLBsgOQ==
+"@vue/devtools-api@^6.5.0":
+ version "6.5.0"
+ resolved "https://registry.yarnpkg.com/@vue/devtools-api/-/devtools-api-6.5.0.tgz#98b99425edee70b4c992692628fa1ea2c1e57d07"
+ integrity sha512-o9KfBeaBmCKl10usN4crU53fYtC1r7jJwdGKjPT24t348rHxgfpZ0xL3Xm/gLUYnc0oTp8LAmrxOeLyu6tbk2Q==
"@vue/test-utils@^1.3.4":
version "1.3.4"
@@ -8826,12 +8826,12 @@ pinia-plugin-persistedstate@^2.4.0:
resolved "https://registry.yarnpkg.com/pinia-plugin-persistedstate/-/pinia-plugin-persistedstate-2.4.0.tgz#fda569b3c397517a0cf8aba83a628283767da620"
integrity sha512-bQcpv47jk3ISl+InuJWsFaS/K7pRZ97kfoD2WCf/suhnlLy48k3BnFM2tI6YZ1xMsDaPv4yOsaPuPAUuSmEO2Q==
-pinia@^2.0.23:
- version "2.0.25"
- resolved "https://registry.yarnpkg.com/pinia/-/pinia-2.0.25.tgz#1e9d50ab54d106c8fcb39090293d780b3b8152b6"
- integrity sha512-3reAkjJ6bW2D5hZKRMS0c9rUbHVlsVyZd037xO0PJr2AuF/09RRSBnFLlJgmHF4Jx6dEoW/jZBOHTushY7IMlw==
+pinia@^2.0.36:
+ version "2.0.36"
+ resolved "https://registry.yarnpkg.com/pinia/-/pinia-2.0.36.tgz#65130f3b94cc7fe25156308634010fab893dff24"
+ integrity sha512-4UKApwjlmJH+VuHKgA+zQMddcCb3ezYnyewQ9NVrsDqZ/j9dMv5+rh+1r48whKNdpFkZAWVxhBp5ewYaYX9JcQ==
dependencies:
- "@vue/devtools-api" "^6.4.5"
+ "@vue/devtools-api" "^6.5.0"
vue-demi "*"
pinkie-promise@^2.0.0:
@@ -11133,9 +11133,9 @@ vinyl@^2.0.0:
replace-ext "^1.0.0"
vue-demi@*:
- version "0.13.11"
- resolved "https://registry.yarnpkg.com/vue-demi/-/vue-demi-0.13.11.tgz#7d90369bdae8974d87b1973564ad390182410d99"
- integrity sha512-IR8HoEEGM65YY3ZJYAjMlKygDQn25D5ajNFNoKh9RSDMQtlzCxtfQjdQgv9jjK+m3377SsJXY8ysq8kLCZL25A==
+ version "0.14.1"
+ resolved "https://registry.yarnpkg.com/vue-demi/-/vue-demi-0.14.1.tgz#1ed9af03a27642762bfed83d8750805302d0398d"
+ integrity sha512-rt+yuCtXvscYot9SQQj3WKZJVSriPNqVkpVBNEHPzSgBv7QIYzsS410VqVgvx8f9AAPgjg+XPKvmV3vOqqkJQQ==
vue-eslint-parser@^9.0.1:
version "9.0.3"
diff --git a/config/plugins/webhooks/gtn/script.js b/config/plugins/webhooks/gtn/script.js
index 91d748ec6175..7518ef30d821 100644
--- a/config/plugins/webhooks/gtn/script.js
+++ b/config/plugins/webhooks/gtn/script.js
@@ -1,8 +1,9 @@
-(function () {
- var gtnWebhookLoaded = false;
- var lastUpdate = 0;
- var urlParams = new URLSearchParams(document.location.search);
- var autoLoadTutorial = urlParams.get('autoload_gtn_tutorial') === null ? "" : urlParams.get('autoload_gtn_tutorial');
+(() => {
+ let gtnWebhookLoaded = false;
+ let lastUpdate = 0;
+ const urlParams = new URLSearchParams(document.location.search);
+ const autoLoadTutorial =
+ urlParams.get("autoload_gtn_tutorial") === null ? "" : urlParams.get("autoload_gtn_tutorial");
function removeOverlay() {
const container = document.getElementById("gtn-container");
@@ -16,7 +17,7 @@
}
function getIframeUrl() {
- var loc;
+ let loc;
try {
loc = document.getElementById("gtn-embed").contentWindow.location.pathname;
} catch (e) {
@@ -26,7 +27,7 @@
}
function getIframeScroll() {
- var loc;
+ let loc;
try {
loc = parseInt(document.getElementById("gtn-embed").contentWindow.scrollY);
} catch (e) {
@@ -35,11 +36,9 @@
return loc;
}
- function restoreLocation() {}
-
function persistLocation() {
// Don't save every scroll event.
- var time = new Date().getTime();
+ const time = new Date().getTime();
if (time - lastUpdate < 1000) {
return;
}
@@ -48,7 +47,9 @@
}
function addIframe() {
- let url, message, onloadscroll;
+ let url;
+ let message;
+ let onloadscroll;
gtnWebhookLoaded = true;
let storedData = false;
let safe = false;
@@ -59,8 +60,7 @@
fetch("/training-material/")
.then((response) => {
if (!response.ok) {
- url =
- `https://training.galaxyproject.org/training-material/${autoLoadTutorial}?utm_source=webhook&utm_medium=noproxy&utm_campaign=gxy`;
+ url = `https://training.galaxyproject.org/training-material/${autoLoadTutorial}?utm_source=webhook&utm_medium=noproxy&utm_campaign=gxy`;
message = `
Click to run unavailable.
@@ -68,7 +68,7 @@
} else {
safe = true;
- var storedLocation = window.localStorage.getItem("gtn-in-galaxy");
+ const storedLocation = window.localStorage.getItem("gtn-in-galaxy");
if (
storedLocation !== null &&
storedLocation.split(" ")[1] !== undefined &&
@@ -113,6 +113,7 @@
// Depends on the iframe being present
document.getElementById("gtn-embed").addEventListener("load", () => {
// Save our current location when possible
+ const gtnEmbed = document.getElementById("gtn-embed");
if (onloadscroll !== undefined) {
document.getElementById("gtn-embed").contentWindow.scrollTo(0, parseInt(onloadscroll));
onloadscroll = undefined;
@@ -121,26 +122,59 @@
if (safe) {
persistLocation();
}
- var gtn_tools = $("#gtn-embed").contents().find("span[data-tool]");
- // Buttonify
- gtn_tools.addClass("galaxy-proxy-active");
+ // Add the class to the entire GTN page
+ document
+ .getElementById("gtn-embed")
+ .contentDocument.getElementsByTagName("body")[0]
+ .classList.add("galaxy-proxy-active");
- gtn_tools.click((e) => {
- var target = e.target;
+ const gtnToolElements = document
+ .getElementById("gtn-embed")
+ .contentDocument.querySelectorAll("span[data-tool]");
- // Sometimes we get the i or the strong, not the parent.
- if (e.target.tagName.toLowerCase() !== "span") {
- target = e.target.parentElement;
- }
+ // Buttonify
+ gtnToolElements.forEach(function (el) {
+ el.classList.add("galaxy-proxy-active");
+ el.addEventListener("click", function (e) {
+ let target = e.target;
+
+ // Sometimes we get the i or the strong, not the parent.
+ if (e.target.tagName.toLowerCase() !== "span") {
+ target = e.target.parentElement;
+ }
+
+ tool_id = target.dataset.tool;
+
+ if (tool_id === "upload1" || tool_id === "upload") {
+ document.getElementById("tool-panel-upload-button").click();
+ } else {
+ Galaxy.router.push({ path: `/?tool_id=${encodeURIComponent(tool_id)}` });
+ }
+ removeOverlay();
+ });
+ });
- tool_id = $(target).data("tool");
+ const gtnWorkflowElements = document
+ .getElementById("gtn-embed")
+ .contentDocument.querySelectorAll("span[data-workflow]");
- if (tool_id === "upload1" || tool_id === "upload") {
- document.getElementById("tool-panel-upload-button").click();
- } else {
- Galaxy.router.push({ path: `/?tool_id=${encodeURIComponent(tool_id)}` });
- }
- removeOverlay();
+ // Buttonify
+ gtnWorkflowElements.forEach(function (el) {
+ el.classList.add("galaxy-proxy-active");
+ el.addEventListener("click", (e) => {
+ let target = e.target;
+
+ // Sometimes we get the i or the strong, not the parent.
+ if (e.target.tagName.toLowerCase() !== "span") {
+ target = e.target.parentElement;
+ }
+
+ trs_url = target.dataset.workflow;
+ Galaxy.router.push({
+ path: `/workflows/trs_import?trs_url=${encodeURIComponent(trs_url)}&run_form=true`,
+ });
+ removeOverlay();
+ });
});
});
});
@@ -183,15 +217,15 @@
showOverlay();
}
});
- if(autoLoadTutorial){
+ if (autoLoadTutorial) {
clean.click();
}
});
// Remove the overlay on escape button click
- document.addEventListener("keydown", (e) => {
- // Check for escape button - "27"
- if (e.which === 27 || e.keyCode === 27) {
+ document.addEventListener("keydown", (event) => {
+ // Check for escape button - "Escape" (modern browsers), "27" (old browsers)
+ if (event.key === "Escape" || event.keyCode === 27) {
removeOverlay();
}
});
diff --git a/lib/galaxy/datatypes/data.py b/lib/galaxy/datatypes/data.py
index c98e2a738177..2e5f3ee295a3 100644
--- a/lib/galaxy/datatypes/data.py
+++ b/lib/galaxy/datatypes/data.py
@@ -728,7 +728,7 @@ def as_display_type(self, dataset: DatasetProtocol, type: str, **kwd) -> Union[F
return f"This display type ({type}) is not implemented for this datatype ({dataset.ext})."
def get_display_links(
- self, dataset: DatasetProtocol, type: str, app, base_url: str, target_frame: str = "_blank", **kwd
+ self, dataset: DatasetProtocol, type: str, app, base_url: str, request, target_frame: str = "_blank", **kwd
):
"""
Returns a list of tuples of (name, link) for a particular display type. No check on
@@ -739,7 +739,7 @@ def get_display_links(
try:
if app.config.enable_old_display_applications and type in self.get_display_types():
return target_frame, getattr(self, self.supported_display_apps[type]["links_function"])(
- dataset, type, app, base_url, **kwd
+ dataset, type, app, base_url, request, **kwd
)
except Exception:
log.exception(
diff --git a/lib/galaxy/datatypes/display_applications/application.py b/lib/galaxy/datatypes/display_applications/application.py
index f22523bdc1f3..ce6bc7d0e04f 100644
--- a/lib/galaxy/datatypes/display_applications/application.py
+++ b/lib/galaxy/datatypes/display_applications/application.py
@@ -62,6 +62,7 @@ def get_display_url(self, data, trans):
app_name=quote_plus(self.display_application.id),
link_name=quote_plus(self.id),
app_action=None,
+ environ=trans.request.environ,
)
def get_inital_values(self, data, trans):
diff --git a/lib/galaxy/datatypes/display_applications/parameters.py b/lib/galaxy/datatypes/display_applications/parameters.py
index 29c4af4f57c4..6846d1088bf2 100644
--- a/lib/galaxy/datatypes/display_applications/parameters.py
+++ b/lib/galaxy/datatypes/display_applications/parameters.py
@@ -227,7 +227,8 @@ def url(self):
base_url = f"http{base_url[5:]}"
return "{}{}".format(
base_url,
- self.trans.app.url_for(
+ self.trans.app.legacy_url_for(
+ mapper=self.trans.app.legacy_mapper,
controller="dataset",
action="display_application",
dataset_id=self._dataset_hash,
@@ -236,6 +237,7 @@ def url(self):
link_name=quote_plus(self.parameter.link.id),
app_action=self.action_name,
action_param=self._url,
+ environ=self.trans.request.environ,
),
)
diff --git a/lib/galaxy/datatypes/genetics.py b/lib/galaxy/datatypes/genetics.py
index a4acd4df30e3..3456bfa249e7 100644
--- a/lib/galaxy/datatypes/genetics.py
+++ b/lib/galaxy/datatypes/genetics.py
@@ -90,7 +90,7 @@ def as_ucsc_display_file(self, dataset: DatasetProtocol, **kwd) -> Union[FileObj
"""
return open(dataset.file_name, "rb")
- def ucsc_links(self, dataset: DatasetProtocol, type: str, app, base_url: str) -> List:
+ def ucsc_links(self, dataset: DatasetProtocol, type: str, app, base_url: str, request) -> List:
"""
from the ever-helpful angie hinrichs angie@soe.ucsc.edu
a genome graphs call looks like this
@@ -114,12 +114,17 @@ def ucsc_links(self, dataset: DatasetProtocol, type: str, app, base_url: str) ->
for site_name, site_url in app.datatypes_registry.get_legacy_sites_by_build("ucsc", dataset.dbkey):
if site_name in app.datatypes_registry.get_display_sites("ucsc"):
site_url = site_url.replace("/hgTracks?", "/hgGenome?") # for genome graphs
- internal_url = "%s" % app.url_for(
- controller="dataset", dataset_id=dataset.id, action="display_at", filename=f"ucsc_{site_name}"
+ internal_url = "%s" % app.legacy_url_for(
+ mapper=app.legacy_mapper,
+ environ=request.environ,
+ controller="dataset",
+ dataset_id=dataset.id,
+ action="display_at",
+ filename=f"ucsc_{site_name}",
)
display_url = "%s%s/display_as?id=%i&display_app=%s&authz_method=display_at" % (
base_url,
- app.url_for(controller="root"),
+ app.legacy_url_for(mapper=app.legacy_mapper, environ=request.environ, controller="root"),
dataset.id,
type,
)
diff --git a/lib/galaxy/datatypes/interval.py b/lib/galaxy/datatypes/interval.py
index a92f32f87557..1a60d1e1f3f2 100644
--- a/lib/galaxy/datatypes/interval.py
+++ b/lib/galaxy/datatypes/interval.py
@@ -315,7 +315,7 @@ def display_peek(self, dataset: DatasetProtocol) -> str:
},
)
- def ucsc_links(self, dataset: DatasetProtocol, type: str, app, base_url: str) -> List:
+ def ucsc_links(self, dataset: DatasetProtocol, type: str, app, base_url: str, request) -> List:
"""
Generate links to UCSC genome browser sites based on the dbkey
and content of dataset.
@@ -337,12 +337,22 @@ def ucsc_links(self, dataset: DatasetProtocol, type: str, app, base_url: str) ->
# Accumulate links for valid sites
ret_val = []
for site_name, site_url in valid_sites:
- internal_url = app.url_for(
- controller="dataset", dataset_id=dataset.id, action="display_at", filename="ucsc_" + site_name
+ internal_url = app.legacy_url_for(
+ mapper=app.legacy_mapper,
+ environ=request.environ,
+ controller="dataset",
+ dataset_id=dataset.id,
+ action="display_at",
+ filename="ucsc_" + site_name,
)
display_url = quote_plus(
"%s%s/display_as?id=%i&display_app=%s&authz_method=display_at"
- % (base_url, app.url_for(controller="root"), dataset.id, type)
+ % (
+ base_url,
+ app.legacy_url_for(mapper=app.legacy_mapper, environ=request.environ, controller="root"),
+ dataset.id,
+ type,
+ )
)
redirect_url = quote_plus(f"{site_url}db={dataset.dbkey}&position={chrom}:{start}-{stop}&hgt.customText=%s")
link = f"{internal_url}?redirect_url={redirect_url}&display_url={display_url}"
@@ -758,18 +768,23 @@ class Bed12(BedStrict):
class _RemoteCallMixin:
def _get_remote_call_url(
- self, redirect_url: str, site_name: str, dataset: HasId, type: str, app, base_url: str
+ self, redirect_url: str, site_name: str, dataset: HasId, type: str, app, base_url: str, request
) -> str:
"""Retrieve the URL to call out to an external site and retrieve data.
This routes our external URL through a local galaxy instance which makes
the data available, followed by redirecting to the remote site with a
link back to the available information.
"""
- internal_url = f"{app.url_for(controller='dataset', dataset_id=dataset.id, action='display_at', filename=f'{type}_{site_name}')}"
+ internal_url = f"{app.legacy_url_for(mapper=app.legacy_mapper, environ=request.environ, controller='dataset', dataset_id=dataset.id, action='display_at', filename=f'{type}_{site_name}')}"
base_url = app.config.get("display_at_callback", base_url)
display_url = quote_plus(
"%s%s/display_as?id=%i&display_app=%s&authz_method=display_at"
- % (base_url, app.url_for(controller="root"), dataset.id, type)
+ % (
+ base_url,
+ app.legacy_url_for(mapper=app.legacy_mapper, environ=request.environ, controller="root"),
+ dataset.id,
+ type,
+ )
)
link = f"{internal_url}?redirect_url={redirect_url}&display_url={display_url}"
return link
@@ -954,7 +969,7 @@ def get_estimated_display_viewport(
log.exception("Unexpected error")
return (None, None, None) # could not determine viewport
- def ucsc_links(self, dataset: DatasetProtocol, type: str, app, base_url: str) -> List:
+ def ucsc_links(self, dataset: DatasetProtocol, type: str, app, base_url: str, request) -> List:
ret_val = []
seqid, start, stop = self.get_estimated_display_viewport(dataset)
if seqid is not None:
@@ -963,11 +978,11 @@ def ucsc_links(self, dataset: DatasetProtocol, type: str, app, base_url: str) ->
redirect_url = quote_plus(
f"{site_url}db={dataset.dbkey}&position={seqid}:{start}-{stop}&hgt.customText=%s"
)
- link = self._get_remote_call_url(redirect_url, site_name, dataset, type, app, base_url)
+ link = self._get_remote_call_url(redirect_url, site_name, dataset, type, app, base_url, request)
ret_val.append((site_name, link))
return ret_val
- def gbrowse_links(self, dataset: DatasetProtocol, type: str, app, base_url: str) -> List:
+ def gbrowse_links(self, dataset: DatasetProtocol, type: str, app, base_url: str, request) -> List:
ret_val = []
seqid, start, stop = self.get_estimated_display_viewport(dataset)
if seqid is not None:
@@ -976,7 +991,7 @@ def gbrowse_links(self, dataset: DatasetProtocol, type: str, app, base_url: str)
if seqid.startswith("chr") and len(seqid) > 3:
seqid = seqid[3:]
redirect_url = quote_plus(f"{site_url}/?q={seqid}:{start}..{stop}&eurl=%s")
- link = self._get_remote_call_url(redirect_url, site_name, dataset, type, app, base_url)
+ link = self._get_remote_call_url(redirect_url, site_name, dataset, type, app, base_url, request)
ret_val.append((site_name, link))
return ret_val
@@ -1356,7 +1371,7 @@ def get_estimated_display_viewport(
log.exception("Unexpected error")
return (None, None, None) # could not determine viewport
- def gbrowse_links(self, dataset: DatasetProtocol, type: str, app, base_url: str) -> List:
+ def gbrowse_links(self, dataset: DatasetProtocol, type: str, app, base_url: str, request) -> List:
ret_val = []
chrom, start, stop = self.get_estimated_display_viewport(dataset)
if chrom is not None:
@@ -1365,11 +1380,11 @@ def gbrowse_links(self, dataset: DatasetProtocol, type: str, app, base_url: str)
if chrom.startswith("chr") and len(chrom) > 3:
chrom = chrom[3:]
redirect_url = quote_plus(f"{site_url}/?q={chrom}:{start}..{stop}&eurl=%s")
- link = self._get_remote_call_url(redirect_url, site_name, dataset, type, app, base_url)
+ link = self._get_remote_call_url(redirect_url, site_name, dataset, type, app, base_url, request)
ret_val.append((site_name, link))
return ret_val
- def ucsc_links(self, dataset: DatasetProtocol, type: str, app, base_url: str) -> List:
+ def ucsc_links(self, dataset: DatasetProtocol, type: str, app, base_url: str, request) -> List:
ret_val = []
chrom, start, stop = self.get_estimated_display_viewport(dataset)
if chrom is not None:
@@ -1378,7 +1393,7 @@ def ucsc_links(self, dataset: DatasetProtocol, type: str, app, base_url: str) ->
redirect_url = quote_plus(
f"{site_url}db={dataset.dbkey}&position={chrom}:{start}-{stop}&hgt.customText=%s"
)
- link = self._get_remote_call_url(redirect_url, site_name, dataset, type, app, base_url)
+ link = self._get_remote_call_url(redirect_url, site_name, dataset, type, app, base_url, request)
ret_val.append((site_name, link))
return ret_val
@@ -1538,16 +1553,21 @@ def get_estimated_display_viewport(
log.exception("Unexpected error")
return (None, None, None) # could not determine viewport
- def ucsc_links(self, dataset: DatasetProtocol, type: str, app, base_url: str) -> List:
+ def ucsc_links(self, dataset: DatasetProtocol, type: str, app, base_url: str, request) -> List:
ret_val = []
chrom, start, stop = self.get_estimated_display_viewport(dataset)
if chrom is not None:
for site_name, site_url in app.datatypes_registry.get_legacy_sites_by_build("ucsc", dataset.dbkey):
if site_name in app.datatypes_registry.get_display_sites("ucsc"):
- internal_url = f"{app.url_for(controller='dataset', dataset_id=dataset.id, action='display_at', filename='ucsc_' + site_name)}"
+ internal_url = f"{app.legacy_url_for(mapper=app.legacy_mapper, environ=request.environ, controller='dataset', dataset_id=dataset.id, action='display_at', filename='ucsc_' + site_name)}"
display_url = quote_plus(
"%s%s/display_as?id=%i&display_app=%s&authz_method=display_at"
- % (base_url, app.url_for(controller="root"), dataset.id, type)
+ % (
+ base_url,
+ app.legacy_url_for(mapper=app.legacy_mapper, environ=request.environ, controller="root"),
+ dataset.id,
+ type,
+ )
)
redirect_url = quote_plus(
f"{site_url}db={dataset.dbkey}&position={chrom}:{start}-{stop}&hgt.customText=%s"
diff --git a/lib/galaxy/dependencies/__init__.py b/lib/galaxy/dependencies/__init__.py
index 79526e490304..e46c6fee60c9 100644
--- a/lib/galaxy/dependencies/__init__.py
+++ b/lib/galaxy/dependencies/__init__.py
@@ -258,6 +258,9 @@ def check_fs_googledrivefs(self):
def check_fs_gcsfs(self):
return "googlecloudstorage" in self.file_sources
+ def check_google_cloud_storage(self):
+ return "googlecloudstorage" in self.file_sources
+
def check_fs_onedatafs(self):
return "onedata" in self.file_sources
diff --git a/lib/galaxy/dependencies/conditional-requirements.txt b/lib/galaxy/dependencies/conditional-requirements.txt
index 8d8a5d9bd55c..e347e8fb8970 100644
--- a/lib/galaxy/dependencies/conditional-requirements.txt
+++ b/lib/galaxy/dependencies/conditional-requirements.txt
@@ -22,6 +22,8 @@ fs.sshfs # type: ssh
fs.anvilfs # type: anvil
fs.googledrivefs # type: googledrive
fs-gcsfs # type: googlecloudstorage
+# fs-gcsfs doesn't pin google-cloud-storage, and old versions log noisy exceptions and break test discovery
+google-cloud-storage>=2.8.0 # type: googlecloudstorage
fs-onedatafs # type: onedata
fs-basespace # type: basespace
diff --git a/lib/galaxy/dependencies/dev-requirements.txt b/lib/galaxy/dependencies/dev-requirements.txt
index 5b9f9be9087e..3082077def9c 100644
--- a/lib/galaxy/dependencies/dev-requirements.txt
+++ b/lib/galaxy/dependencies/dev-requirements.txt
@@ -88,6 +88,7 @@ pytest-asyncio==0.21.0 ; python_version >= "3.7" and python_version < "3.12"
pytest-celery==0.0.0 ; python_version >= "3.7" and python_version < "3.12"
pytest-cov==4.0.0 ; python_version >= "3.7" and python_version < "3.12"
pytest-html==3.2.0 ; python_version >= "3.7" and python_version < "3.12"
+pytest-httpserver==1.0.6 ; python_version >= "3.7" and python_version < "3.12"
pytest-json-report==1.5.0 ; python_version >= "3.7" and python_version < "3.12"
pytest-metadata==2.0.4 ; python_version >= "3.7" and python_version < "3.12"
pytest-mock==3.10.0 ; python_version >= "3.7" and python_version < "3.12"
diff --git a/lib/galaxy/managers/base.py b/lib/galaxy/managers/base.py
index a0779c4380af..829df35cdca8 100644
--- a/lib/galaxy/managers/base.py
+++ b/lib/galaxy/managers/base.py
@@ -371,9 +371,10 @@ def list(self, filters=None, order_by=None, limit=None, offset=None, **kwargs):
Returns all objects matching the given filters
"""
# list becomes a way of applying both filters generated in the orm (such as .user ==)
- # and functional filters that aren't currently possible using the orm (such as instance calcluated values
+ # and functional filters that aren't currently possible using the orm (such as instance calculated values
# or annotations/tags). List splits those two filters and applies limits/offsets
# only after functional filters (if any) using python.
+ self._handle_filters_case_sensitivity(filters)
orm_filters, fn_filters = self._split_filters(filters)
if not fn_filters:
# if no fn_filtering required, we can use the 'all orm' version with limit offset
@@ -387,6 +388,18 @@ def list(self, filters=None, order_by=None, limit=None, offset=None, **kwargs):
items = self._apply_fn_filters_gen(items, fn_filters)
return list(self._apply_fn_limit_offset_gen(items, limit, offset))
+ def _handle_filters_case_sensitivity(self, filters):
+ """Modifies the filters to make them case insensitive if needed."""
+ if filters is None:
+ return # No filters to handle
+ iterable_filters = filters if isinstance(filters, list) else [filters]
+ for item in iterable_filters:
+ # If the filter has the case_insensitive attribute set to True this means that the filter
+ # is a parsed orm filter and that it needs to compare the column with a lower case version of the value.
+ is_case_insensitive = getattr(item, "case_insensitive", False)
+ if is_case_insensitive and isinstance(item.filter, sqlalchemy.sql.elements.BinaryExpression):
+ item.filter.left = sqlalchemy.func.lower(item.filter.left)
+
def _split_filters(self, filters):
"""
Splits `filters` into a tuple of two lists:
diff --git a/lib/galaxy/managers/hdas.py b/lib/galaxy/managers/hdas.py
index 17d053511b8f..9b17b81638e9 100644
--- a/lib/galaxy/managers/hdas.py
+++ b/lib/galaxy/managers/hdas.py
@@ -640,7 +640,9 @@ def serialize_old_display_applications(self, item, key, trans=None, **context):
display_link_fn = hda.datatype.get_display_links
for display_app in hda.datatype.get_display_types():
- target_frame, display_links = display_link_fn(hda, display_app, self.app, trans.request.base)
+ target_frame, display_links = display_link_fn(
+ hda, display_app, self.app, trans.request.base, request=trans.request
+ )
if len(display_links) > 0:
display_label = hda.datatype.get_display_label(display_app)
diff --git a/lib/galaxy/managers/jobs.py b/lib/galaxy/managers/jobs.py
index 14788e6f688a..8c41f603e6ef 100644
--- a/lib/galaxy/managers/jobs.py
+++ b/lib/galaxy/managers/jobs.py
@@ -903,7 +903,7 @@ def inputs_recursive(input_params, param_values, depth=1, upgrade_messages=None)
value=f"{len(param_values[input.name])} uploaded datasets",
)
)
- elif input.type == "data":
+ elif input.type == "data" or input.type == "data_collection":
value = []
for element in listify(param_values[input.name]):
encoded_id = trans.security.encode_id(element.id)
diff --git a/lib/galaxy/model/security.py b/lib/galaxy/model/security.py
index 24b35ba89b74..c9bb5711287b 100644
--- a/lib/galaxy/model/security.py
+++ b/lib/galaxy/model/security.py
@@ -4,6 +4,7 @@
datetime,
timedelta,
)
+from typing import List
from sqlalchemy import (
and_,
@@ -519,6 +520,12 @@ def can_access_datasets(self, user_roles, action_tuples):
return True
+ def can_access_collection(self, user_roles: List[galaxy.model.Role], collection: galaxy.model.DatasetCollection):
+ action_tuples = collection.dataset_action_tuples
+ if not self.can_access_datasets(user_roles, action_tuples):
+ return False
+ return True
+
def can_manage_dataset(self, roles, dataset):
return self.allow_action(roles, self.permitted_actions.DATASET_MANAGE_PERMISSIONS, dataset)
diff --git a/lib/galaxy/model/store/__init__.py b/lib/galaxy/model/store/__init__.py
index 97205084ac72..122af7529555 100644
--- a/lib/galaxy/model/store/__init__.py
+++ b/lib/galaxy/model/store/__init__.py
@@ -2128,12 +2128,14 @@ def export_history(
datasets = query.all()
for dataset in datasets:
dataset.annotation = get_item_annotation_str(sa_session, history.user, dataset)
- add_dataset = (dataset.visible or include_hidden) and (not dataset.deleted or include_deleted)
- if dataset.id in self.collection_datasets:
- add_dataset = True
+ should_include_file = (dataset.visible or include_hidden) and (not dataset.deleted or include_deleted)
+ if not dataset.deleted and dataset.id in self.collection_datasets:
+ should_include_file = True
if dataset not in self.included_datasets:
- self.add_dataset(dataset, include_files=add_dataset)
+ if should_include_file:
+ self._ensure_dataset_file_exists(dataset)
+ self.add_dataset(dataset, include_files=should_include_file)
def export_library(
self, library: model.Library, include_hidden: bool = False, include_deleted: bool = False
@@ -2153,8 +2155,8 @@ def export_library_folder_contents(
) -> None:
for library_dataset in library_folder.datasets:
ldda = library_dataset.library_dataset_dataset_association
- add_dataset = (not ldda.visible or not include_hidden) and (not ldda.deleted or include_deleted)
- self.add_dataset(ldda, add_dataset)
+ should_include_file = (not ldda.visible or not include_hidden) and (not ldda.deleted or include_deleted)
+ self.add_dataset(ldda, should_include_file)
for folder in library_folder.folders:
self.export_library_folder_contents(folder, include_hidden=include_hidden, include_deleted=include_deleted)
@@ -2215,6 +2217,16 @@ def add_dataset_collection(
def add_dataset(self, dataset: model.DatasetInstance, include_files: bool = True) -> None:
self.included_datasets[dataset] = (dataset, include_files)
+ def _ensure_dataset_file_exists(self, dataset: model.DatasetInstance) -> None:
+ state = dataset.dataset.state
+ if state in [model.Dataset.states.OK] and not dataset.file_name:
+ log.error(
+ f"Dataset [{dataset.id}] does not exists on on object store [{dataset.dataset.object_store_id or 'None'}], while trying to export."
+ )
+ raise Exception(
+ f"Cannot export history dataset [{getattr(dataset, 'hid', '')}: {dataset.name}] with id {self.exported_key(dataset)}"
+ )
+
def _finalize(self) -> None:
export_directory = self.export_directory
@@ -2957,7 +2969,6 @@ def source_to_import_store(
target_path, import_options=import_options, app=app, user=galaxy_user
)
else:
- # TODO: rocrate.zip is not supported here...
raise Exception(f"Unknown model_store_format type encountered {model_store_format}")
return model_import_store
diff --git a/lib/galaxy/selenium/navigates_galaxy.py b/lib/galaxy/selenium/navigates_galaxy.py
index c46db09dcb1f..4625aadc41b7 100644
--- a/lib/galaxy/selenium/navigates_galaxy.py
+++ b/lib/galaxy/selenium/navigates_galaxy.py
@@ -2101,6 +2101,11 @@ def share_with_user(
self.wait_for_xpath_visible(xpath)
self.screenshot_if(screenshot_after_submit)
+ def tutorial_mode_activate(self):
+ search_selector = "#gtn a"
+ self.wait_for_and_click_selector(search_selector)
+ self.wait_for_selector_visible("#gtn-screen")
+
class NotLoggedInException(SeleniumTimeoutException):
def __init__(self, timeout_exception, user_info, dom_message):
diff --git a/lib/galaxy/tool_util/client/staging.py b/lib/galaxy/tool_util/client/staging.py
index ca7d0bf64405..46ea37b5858c 100644
--- a/lib/galaxy/tool_util/client/staging.py
+++ b/lib/galaxy/tool_util/client/staging.py
@@ -40,6 +40,7 @@
DEFAULT_USE_FETCH_API = True
DEFAULT_FILE_TYPE = "auto"
DEFAULT_DBKEY = "?"
+DEFAULT_DECOMPRESS = False
class StagingInterface(metaclass=abc.ABCMeta):
@@ -104,6 +105,7 @@ def _attach_file(upload_payload: Dict[str, Any], uri: str, index: int = 0) -> Di
file_type=file_type,
dbkey=dbkey,
to_posix_lines=to_posix_lines,
+ decompress=upload_target.properties.get("decompress") or DEFAULT_DECOMPRESS,
)
name = _file_path_to_name(file_path)
if file_path is not None:
@@ -333,6 +335,8 @@ def _fetch_payload(history_id, file_type=DEFAULT_FILE_TYPE, dbkey=DEFAULT_DBKEY,
element[arg] = kwd[arg]
if "file_name" in kwd:
element["name"] = kwd["file_name"]
+ if "decompress" in kwd:
+ element["auto_decompress"] = kwd["decompress"]
target = {
"destination": {"type": "hdas"},
"elements": [element],
diff --git a/lib/galaxy/tool_util/cwl/util.py b/lib/galaxy/tool_util/cwl/util.py
index d78650ec41aa..18e40beb1e7c 100644
--- a/lib/galaxy/tool_util/cwl/util.py
+++ b/lib/galaxy/tool_util/cwl/util.py
@@ -232,6 +232,8 @@ def replacement_file(value):
kwd["tags"] = value.get("tags")
if "dbkey" in value:
kwd["dbkey"] = value.get("dbkey")
+ if "decompress" in value:
+ kwd["decompress"] = value["decompress"]
if composite_data_raw:
composite_data = []
for entry in composite_data_raw:
diff --git a/lib/galaxy/tool_util/linters/tests.py b/lib/galaxy/tool_util/linters/tests.py
index f23c9231153a..8e1856fe826d 100644
--- a/lib/galaxy/tool_util/linters/tests.py
+++ b/lib/galaxy/tool_util/linters/tests.py
@@ -185,23 +185,11 @@ def _check_asserts(test_idx, assertions, lint_ctx):
lint_ctx.error(f"Test {test_idx}: unknown assertion '{a.tag}'", node=a)
continue
assert_function_sig = signature(asserts.assertion_functions[assert_function_name])
- # check type of the attributes (int, float ...)
+ # check of the attributes
for attrib in a.attrib:
if attrib not in assert_function_sig.parameters:
lint_ctx.error(f"Test {test_idx}: unknown attribute '{attrib}' for '{a.tag}'", node=a)
continue
- annotation = assert_function_sig.parameters[attrib].annotation
- annotation = _handle_optionals(annotation)
- if annotation is not Parameter.empty:
- try:
- annotation(a.attrib[attrib])
- except TypeError:
- raise Exception(f"Faild to instantiate {attrib} for {assert_function_name}")
- except ValueError:
- lint_ctx.error(
- f"Test {test_idx}: attribute '{attrib}' for '{a.tag}' needs to be '{annotation.__name__}' got '{a.attrib[attrib]}'",
- node=a,
- )
# check missing required attributes
for p in assert_function_sig.parameters:
if p in ["output", "output_bytes", "verify_assertions_function", "children"]:
diff --git a/lib/galaxy/tool_util/verify/asserts/__init__.py b/lib/galaxy/tool_util/verify/asserts/__init__.py
index f37bc018e095..4e1555e91e3f 100644
--- a/lib/galaxy/tool_util/verify/asserts/__init__.py
+++ b/lib/galaxy/tool_util/verify/asserts/__init__.py
@@ -60,7 +60,7 @@ def verify_assertion(data: bytes, assertion_description):
# output. children is the parsed version of the child elements of
# the XML element describing this assertion. See
# assert_element_text in test/base/asserts/xml.py as an example of
- # how to use verify_assertions_function and children in conjuction
+ # how to use verify_assertions_function and children in conjunction
# to apply assertion checking to a subset of the input. The parsed
# version of an elements child elements do not need to just define
# assertions, developers of assertion functions can also use the
diff --git a/lib/galaxy/tool_util/verify/asserts/_util.py b/lib/galaxy/tool_util/verify/asserts/_util.py
index c8f6281c760f..2e3b604e98dc 100644
--- a/lib/galaxy/tool_util/verify/asserts/_util.py
+++ b/lib/galaxy/tool_util/verify/asserts/_util.py
@@ -1,10 +1,25 @@
from math import inf
+from typing import (
+ Callable,
+ Optional,
+ TypeVar,
+ Union,
+)
from galaxy.util import asbool
from galaxy.util.bytesize import parse_bytesize
-def _assert_number(count, n, delta, min, max, negate, n_text, min_max_text):
+def _assert_number(
+ count: int,
+ n: Optional[Union[int, str]],
+ delta: Union[int, str],
+ min: Optional[Union[int, str]],
+ max: Optional[Union[int, str]],
+ negate: Union[bool, str],
+ n_text: str,
+ min_max_text: str,
+) -> None:
"""
helper function for assering that count is in
- [n-delta:n+delta]
@@ -26,12 +41,12 @@ def _assert_number(count, n, delta, min, max, negate, n_text, min_max_text):
)
if min is not None or max is not None:
if min is None:
- min = -inf # also replacing min/max for output
+ min = "-inf" # also replacing min/max for output
min_bytes = -inf
else:
min_bytes = parse_bytesize(min)
if max is None:
- max = inf
+ max = "inf"
max_bytes = inf
else:
max_bytes = parse_bytesize(max)
@@ -41,9 +56,24 @@ def _assert_number(count, n, delta, min, max, negate, n_text, min_max_text):
)
+OutputType = TypeVar("OutputType")
+TextType = TypeVar("TextType")
+
+
def _assert_presence_number(
- output, text, n, delta, min, max, negate, check_presence_foo, count_foo, presence_text, n_text, min_max_text
-):
+ output: OutputType,
+ text: TextType,
+ n: Optional[Union[int, str]],
+ delta: Union[int, str],
+ min: Optional[Union[int, str]],
+ max: Optional[Union[int, str]],
+ negate: Union[bool, str],
+ check_presence_foo: Callable[[OutputType, TextType], bool],
+ count_foo: Callable[[OutputType, TextType], int],
+ presence_text: str,
+ n_text: str,
+ min_max_text: str,
+) -> None:
"""
helper function to assert that
- text is present in output using check_presence_foo
diff --git a/lib/galaxy/tool_util/verify/asserts/archive.py b/lib/galaxy/tool_util/verify/asserts/archive.py
index ae00a36168ed..5ddf786a1053 100644
--- a/lib/galaxy/tool_util/verify/asserts/archive.py
+++ b/lib/galaxy/tool_util/verify/asserts/archive.py
@@ -3,14 +3,17 @@
import tarfile
import tempfile
import zipfile
-from typing import Optional
+from typing import (
+ Optional,
+ Union,
+)
from galaxy.util import asbool
from ._util import _assert_presence_number
-def _extract_from_tar(bytes, fn):
- with io.BytesIO(bytes) as temp:
+def _extract_from_tar(output_bytes, fn):
+ with io.BytesIO(output_bytes) as temp:
with tarfile.open(fileobj=temp, mode="r") as tar_temp:
ti = tar_temp.getmember(fn)
# zip treats directories like empty files.
@@ -23,9 +26,9 @@ def _extract_from_tar(bytes, fn):
return member_fh.read()
-def _list_from_tar(bytes, path):
+def _list_from_tar(output_bytes, path):
lst = list()
- with io.BytesIO(bytes) as temp:
+ with io.BytesIO(output_bytes) as temp:
with tarfile.open(fileobj=temp, mode="r") as tar_temp:
for fn in tar_temp.getnames():
if not re.match(path, fn):
@@ -34,16 +37,16 @@ def _list_from_tar(bytes, path):
return sorted(lst)
-def _extract_from_zip(bytes, fn):
- with io.BytesIO(bytes) as temp:
+def _extract_from_zip(output_bytes, fn):
+ with io.BytesIO(output_bytes) as temp:
with zipfile.ZipFile(temp, mode="r") as zip_temp:
with zip_temp.open(fn) as member_fh:
return member_fh.read()
-def _list_from_zip(bytes, path):
+def _list_from_zip(output_bytes, path):
lst = list()
- with io.BytesIO(bytes) as temp:
+ with io.BytesIO(output_bytes) as temp:
with zipfile.ZipFile(temp, mode="r") as zip_temp:
for fn in zip_temp.namelist():
if not re.match(path, fn):
@@ -53,17 +56,17 @@ def _list_from_zip(bytes, path):
def assert_has_archive_member(
- output_bytes,
- path,
+ output_bytes: bytes,
+ path: str,
verify_assertions_function,
children,
- all="false",
- n: Optional[int] = None,
- delta: int = 0,
- min: Optional[int] = None,
- max: Optional[int] = None,
- negate: bool = False,
-):
+ all: Union[bool, str] = False,
+ n: Optional[Union[int, str]] = None,
+ delta: Union[int, str] = 0,
+ min: Optional[Union[int, str]] = None,
+ max: Optional[Union[int, str]] = None,
+ negate: Union[bool, str] = False,
+) -> None:
"""Recursively checks the specified children assertions against the text of
the first element matching the specified path found within the archive.
Currently supported formats: .zip, .tar, .tar.gz."""
diff --git a/lib/galaxy/tool_util/verify/asserts/hdf5.py b/lib/galaxy/tool_util/verify/asserts/hdf5.py
index 8eab9f747945..e94c61c3af04 100644
--- a/lib/galaxy/tool_util/verify/asserts/hdf5.py
+++ b/lib/galaxy/tool_util/verify/asserts/hdf5.py
@@ -13,7 +13,7 @@ def _assert_h5py():
raise Exception(IMPORT_MISSING_MESSAGE)
-def assert_has_h5_attribute(output_bytes, key, value):
+def assert_has_h5_attribute(output_bytes: bytes, key: str, value: str) -> None:
"""Asserts the specified HDF5 output has a given key-value pair as HDF5
attribute"""
_assert_h5py()
@@ -25,11 +25,10 @@ def assert_has_h5_attribute(output_bytes, key, value):
# TODO the function actually queries groups. so the function and argument name are misleading
-def assert_has_h5_keys(output_bytes, keys):
+def assert_has_h5_keys(output_bytes: bytes, keys: str) -> None:
"""Asserts the specified HDF5 output has the given keys."""
_assert_h5py()
- keys = [k.strip() for k in keys.strip().split(",")]
- h5_keys = sorted(keys)
+ h5_keys = sorted([k.strip() for k in keys.strip().split(",")])
output_temp = io.BytesIO(output_bytes)
local_keys = []
diff --git a/lib/galaxy/tool_util/verify/asserts/size.py b/lib/galaxy/tool_util/verify/asserts/size.py
index 089ba5492276..de20309f99ed 100644
--- a/lib/galaxy/tool_util/verify/asserts/size.py
+++ b/lib/galaxy/tool_util/verify/asserts/size.py
@@ -1,16 +1,19 @@
-from typing import Optional
+from typing import (
+ Optional,
+ Union,
+)
from ._util import _assert_number
def assert_has_size(
- output_bytes,
- value: Optional[int] = None,
- delta: int = 0,
- min: Optional[int] = None,
- max: Optional[int] = None,
- negate: bool = False,
-):
+ output_bytes: bytes,
+ value: Optional[Union[int, str]] = None,
+ delta: Union[int, str] = 0,
+ min: Optional[Union[int, str]] = None,
+ max: Optional[Union[int, str]] = None,
+ negate: Union[bool, str] = False,
+) -> None:
"""
Asserts the specified output has a size of the specified value,
allowing for absolute (delta) and relative (delta_frac) difference.
diff --git a/lib/galaxy/tool_util/verify/asserts/tabular.py b/lib/galaxy/tool_util/verify/asserts/tabular.py
index 6679fa9c06b5..b7278ec1d979 100644
--- a/lib/galaxy/tool_util/verify/asserts/tabular.py
+++ b/lib/galaxy/tool_util/verify/asserts/tabular.py
@@ -1,10 +1,13 @@
import re
-from typing import Optional
+from typing import (
+ Optional,
+ Union,
+)
from ._util import _assert_number
-def get_first_line(output, comment):
+def get_first_line(output: str, comment: str) -> str:
"""
get the first non-comment and non-empty line
"""
@@ -19,15 +22,15 @@ def get_first_line(output, comment):
def assert_has_n_columns(
- output,
- n: Optional[int] = None,
- delta: int = 0,
- min: Optional[int] = None,
- max: Optional[int] = None,
- sep="\t",
- comment="",
- negate: bool = False,
-):
+ output: str,
+ n: Optional[Union[int, str]] = None,
+ delta: Union[int, str] = 0,
+ min: Optional[Union[int, str]] = None,
+ max: Optional[Union[int, str]] = None,
+ sep: str = "\t",
+ comment: str = "",
+ negate: Union[bool, str] = False,
+) -> None:
"""Asserts the tabular output contains n columns. The optional
sep argument specifies the column seperator used to determine the
number of columns. The optional comment argument specifies
diff --git a/lib/galaxy/tool_util/verify/asserts/text.py b/lib/galaxy/tool_util/verify/asserts/text.py
index 1d6244ad13a6..e7aa1bc37e4d 100644
--- a/lib/galaxy/tool_util/verify/asserts/text.py
+++ b/lib/galaxy/tool_util/verify/asserts/text.py
@@ -1,5 +1,8 @@
import re
-from typing import Optional
+from typing import (
+ Optional,
+ Union,
+)
from ._util import (
_assert_number,
@@ -8,14 +11,14 @@
def assert_has_text(
- output,
- text,
- n: Optional[int] = None,
- delta: int = 0,
- min: Optional[int] = None,
- max: Optional[int] = None,
- negate: bool = False,
-):
+ output: str,
+ text: str,
+ n: Optional[Union[int, str]] = None,
+ delta: Union[int, str] = 0,
+ min: Optional[Union[int, str]] = None,
+ max: Optional[Union[int, str]] = None,
+ negate: Union[bool, str] = False,
+) -> None:
"""Asserts specified output contains the substring specified by
the argument text. The exact number of occurrences can be
optionally specified by the argument n"""
@@ -36,7 +39,7 @@ def assert_has_text(
)
-def assert_not_has_text(output, text):
+def assert_not_has_text(output: str, text: str) -> None:
"""Asserts specified output does not contain the substring
specified by the argument text"""
assert output is not None, "Checking not_has_text assertion on empty output (None)"
@@ -44,14 +47,14 @@ def assert_not_has_text(output, text):
def assert_has_line(
- output,
- line,
- n: Optional[int] = None,
- delta: int = 0,
- min: Optional[int] = None,
- max: Optional[int] = None,
- negate: bool = False,
-):
+ output: str,
+ line: str,
+ n: Optional[Union[int, str]] = None,
+ delta: Union[int, str] = 0,
+ min: Optional[Union[int, str]] = None,
+ max: Optional[Union[int, str]] = None,
+ negate: Union[bool, str] = False,
+) -> None:
"""Asserts the specified output contains the line specified by the
argument line. The exact number of occurrences can be optionally
specified by the argument n"""
@@ -73,13 +76,13 @@ def assert_has_line(
def assert_has_n_lines(
- output,
- n: Optional[int] = None,
- delta: int = 0,
- min: Optional[int] = None,
- max: Optional[int] = None,
- negate: bool = False,
-):
+ output: str,
+ n: Optional[Union[int, str]] = None,
+ delta: Union[int, str] = 0,
+ min: Optional[Union[int, str]] = None,
+ max: Optional[Union[int, str]] = None,
+ negate: Union[bool, str] = False,
+) -> None:
"""Asserts the specified output contains ``n`` lines allowing
for a difference in the number of lines (delta)
or relative differebce in the number of lines"""
@@ -98,14 +101,14 @@ def assert_has_n_lines(
def assert_has_text_matching(
- output,
- expression,
- n: Optional[int] = None,
- delta: int = 0,
- min: Optional[int] = None,
- max: Optional[int] = None,
- negate: bool = False,
-):
+ output: str,
+ expression: str,
+ n: Optional[Union[int, str]] = None,
+ delta: Union[int, str] = 0,
+ min: Optional[Union[int, str]] = None,
+ max: Optional[Union[int, str]] = None,
+ negate: Union[bool, str] = False,
+) -> None:
"""Asserts the specified output contains text matching the
regular expression specified by the argument expression.
If n is given the assertion checks for exacly n (nonoverlapping)
@@ -128,14 +131,14 @@ def assert_has_text_matching(
def assert_has_line_matching(
- output,
- expression,
- n: Optional[int] = None,
- delta: int = 0,
- min: Optional[int] = None,
- max: Optional[int] = None,
- negate: bool = False,
-):
+ output: str,
+ expression: str,
+ n: Optional[Union[int, str]] = None,
+ delta: Union[int, str] = 0,
+ min: Optional[Union[int, str]] = None,
+ max: Optional[Union[int, str]] = None,
+ negate: Union[bool, str] = False,
+) -> None:
"""Asserts the specified output contains a line matching the
regular expression specified by the argument expression. If n is given
the assertion checks for exactly n occurences."""
diff --git a/lib/galaxy/tool_util/verify/asserts/xml.py b/lib/galaxy/tool_util/verify/asserts/xml.py
index 14d858d38bb0..bdb53e3913e8 100644
--- a/lib/galaxy/tool_util/verify/asserts/xml.py
+++ b/lib/galaxy/tool_util/verify/asserts/xml.py
@@ -1,5 +1,8 @@
import re
-from typing import Optional
+from typing import (
+ Optional,
+ Union,
+)
from lxml.etree import XMLSyntaxError
@@ -11,7 +14,7 @@
)
-def assert_is_valid_xml(output):
+def assert_is_valid_xml(output: str) -> None:
"""Simple assertion that just verifies the specified output
is valid XML."""
try:
@@ -20,7 +23,7 @@ def assert_is_valid_xml(output):
raise AssertionError(f"Expected valid XML, but could not parse output. {unicodify(e)}")
-def assert_has_element_with_path(output, path, negate: bool = False):
+def assert_has_element_with_path(output: str, path: str, negate: Union[bool, str] = False) -> None:
"""Asserts the specified output has at least one XML element with a
path matching the specified path argument. Valid paths are the
simplified subsets of XPath implemented by lxml.etree;
@@ -29,64 +32,68 @@ def assert_has_element_with_path(output, path, negate: bool = False):
def assert_has_n_elements_with_path(
- output,
- path,
- n: Optional[int] = None,
- delta: int = 0,
- min: Optional[int] = None,
- max: Optional[int] = None,
- negate: bool = False,
-):
+ output: str,
+ path: str,
+ n: Optional[Union[int, str]] = None,
+ delta: Union[int, str] = 0,
+ min: Optional[Union[int, str]] = None,
+ max: Optional[Union[int, str]] = None,
+ negate: Union[bool, str] = False,
+) -> None:
"""Asserts the specified output has exactly n elements matching the
path specified."""
assert_xml_element(output, path, n=n, delta=delta, min=min, max=max, negate=negate)
-def assert_element_text_matches(output, path, expression, negate: bool = False):
+def assert_element_text_matches(output: str, path: str, expression: str, negate: Union[bool, str] = False) -> None:
"""Asserts the text of the first element matching the specified
path matches the specified regular expression."""
sub = {"tag": "has_text_matching", "attributes": {"expression": expression, "negate": negate}}
assert_xml_element(output, path, asserts.verify_assertions, [sub])
-def assert_element_text_is(output, path, text, negate: bool = False):
+def assert_element_text_is(output: str, path: str, text: str, negate: Union[bool, str] = False) -> None:
"""Asserts the text of the first element matching the specified
path matches exactly the specified text."""
assert_element_text_matches(output, path, re.escape(text) + "$", negate=negate)
-def assert_attribute_matches(output, path, attribute, expression, negate: bool = False):
+def assert_attribute_matches(
+ output: str, path: str, attribute, expression: str, negate: Union[bool, str] = False
+) -> None:
"""Asserts the specified attribute of the first element matching
the specified path matches the specified regular expression."""
sub = {"tag": "has_text_matching", "attributes": {"expression": expression, "negate": negate}}
assert_xml_element(output, path, asserts.verify_assertions, [sub], attribute=attribute)
-def assert_attribute_is(output, path, attribute, text, negate: bool = False):
+def assert_attribute_is(output: str, path: str, attribute: str, text, negate: Union[bool, str] = False) -> None:
"""Asserts the specified attribute of the first element matching
the specified path matches exactly the specified text."""
assert_attribute_matches(output, path, attribute, re.escape(text) + "$", negate=negate)
-def assert_element_text(output, path, verify_assertions_function, children, negate: bool = False):
+def assert_element_text(
+ output: str, path: str, verify_assertions_function, children, negate: Union[bool, str] = False
+) -> None:
"""Recursively checks the specified assertions against the text of
the first element matching the specified path."""
assert_xml_element(output, path, verify_assertions_function, children, negate=negate)
def assert_xml_element(
- output,
- path,
+ output: str,
+ path: str,
verify_assertions_function=None,
children=None,
- attribute=None,
- all=False,
- n: Optional[int] = None,
- delta: int = 0,
- min: Optional[int] = None,
- max: Optional[int] = None,
- negate: bool = False,
-):
+ attribute: Optional[str] = None,
+ all: Union[bool, str] = False,
+ n: Optional[Union[int, str]] = None,
+ delta: Union[int, str] = 0,
+ min: Optional[Union[int, str]] = None,
+ max: Optional[Union[int, str]] = None,
+ negate: Union[bool, str] = False,
+) -> None:
"""
Check if path occurs in the xml. If n and delta or min and max are given
also the number of occurences is checked.
diff --git a/lib/galaxy/tools/__init__.py b/lib/galaxy/tools/__init__.py
index 94ec48c91974..cb3793e95c0c 100644
--- a/lib/galaxy/tools/__init__.py
+++ b/lib/galaxy/tools/__init__.py
@@ -2560,7 +2560,9 @@ def to_json(self, trans, kwd=None, job=None, workflow_building_mode=False, histo
history = history or trans.get_history()
if history is None and job is not None:
history = self.history_manager.get_owned(job.history.id, trans.user, current_history=trans.history)
- if history is None:
+ # We can show the tool form if the current user is anonymous and doesn't have a history
+ user = trans.get_user()
+ if history is None and user is not None:
raise exceptions.MessageException("History unavailable. Please specify a valid history id")
# build request context
@@ -2582,12 +2584,6 @@ def to_json(self, trans, kwd=None, job=None, workflow_building_mode=False, histo
# create parameter object
params = Params(kwd, sanitize=False)
- # expand incoming parameters (parameters might trigger multiple tool executions,
- # here we select the first execution only in order to resolve dynamic parameters)
- expanded_incomings, _ = expand_meta_parameters(trans, self, params.__dict__)
- if expanded_incomings:
- params.__dict__ = expanded_incomings[0]
-
# do param translation here, used by datasource tools
if self.input_translator:
self.input_translator.translate(params)
diff --git a/lib/galaxy/tools/parameters/basic.py b/lib/galaxy/tools/parameters/basic.py
index 15d5df5a6e1f..8c5305be6cf4 100644
--- a/lib/galaxy/tools/parameters/basic.py
+++ b/lib/galaxy/tools/parameters/basic.py
@@ -2092,6 +2092,9 @@ def from_json(self, value, trans, other_values=None):
elif single_value["src"] == "ldda":
decoded_id = trans.security.decode_id(single_value["id"])
rval.append(trans.sa_session.query(LibraryDatasetDatasetAssociation).get(decoded_id))
+ elif single_value["src"] == "dce":
+ decoded_id = trans.security.decode_id(single_value["id"])
+ rval.append(trans.sa_session.query(DatasetCollectionElement).get(decoded_id))
else:
raise ValueError(f"Unknown input source {single_value['src']} passed to job submission API.")
elif isinstance(
@@ -2127,6 +2130,9 @@ def from_json(self, value, trans, other_values=None):
elif value["src"] == "hdca":
decoded_id = trans.security.decode_id(value["id"])
rval.append(trans.sa_session.query(HistoryDatasetCollectionAssociation).get(decoded_id))
+ elif value["src"] == "dce":
+ decoded_id = trans.security.decode_id(value["id"])
+ rval.append(trans.sa_session.query(DatasetCollectionElement).get(decoded_id))
else:
raise ValueError(f"Unknown input source {value['src']} passed to job submission API.")
elif str(value).startswith("__collection_reduce__|"):
@@ -2280,6 +2286,28 @@ def append(list, hda, name, src, keep=False, subcollection_type=None):
value["map_over_type"] = subcollection_type
return list.append(value)
+ def append_dce(dce):
+ if dce.hda:
+ # well this isn't good, but what's the alternative ?
+ # we should be precise about what we're (re-)running here.
+ key = "hda"
+ else:
+ key = "hdca"
+ d["options"][key].append(
+ {
+ "id": trans.security.encode_id(dce.id),
+ "name": dce.element_identifier,
+ "src": "dce",
+ "tags": [],
+ "keep": True,
+ }
+ )
+
+ # append DCE
+ if isinstance(other_values.get(self.name), DatasetCollectionElement):
+ dce = other_values[self.name]
+ append_dce(dce)
+
# add datasets
hda_list = util.listify(other_values.get(self.name))
# Prefetch all at once, big list of visible, non-deleted datasets.
@@ -2299,6 +2327,8 @@ def append(list, hda, name, src, keep=False, subcollection_type=None):
else:
hda_state = "unavailable"
append(d["options"]["hda"], hda, f"({hda_state}) {hda.name}", "hda", True)
+ elif isinstance(hda, DatasetCollectionElement):
+ append_dce(hda)
# add dataset collections
dataset_collection_matcher = dataset_matcher_factory.dataset_collection_matcher(dataset_matcher)
@@ -2460,7 +2490,7 @@ def to_dict(self, trans, other_values=None):
# append DCE
if isinstance(other_values.get(self.name), DatasetCollectionElement):
dce = other_values[self.name]
- d["options"]["dce"].append(
+ d["options"]["hdca"].append(
{
"id": trans.security.encode_id(dce.id),
"hid": None,
diff --git a/lib/galaxy/web/framework/__init__.py b/lib/galaxy/web/framework/__init__.py
index 0170f095e436..a62442893e9b 100644
--- a/lib/galaxy/web/framework/__init__.py
+++ b/lib/galaxy/web/framework/__init__.py
@@ -26,7 +26,13 @@ def legacy_url_for(mapper, *args, **kwargs) -> str:
Re-establishes the mapper for legacy WSGI routes.
"""
rc = request_config()
+ environ = kwargs.pop("environ", None)
rc.mapper = mapper
+ if environ:
+ rc.environ = environ
+ if hasattr(rc, "using_request_local"):
+ rc.request_local = lambda: rc
+ rc = request_config()
return base.routes.url_for(*args, **kwargs)
diff --git a/lib/galaxy/web/framework/base.py b/lib/galaxy/web/framework/base.py
index ee1693315dc1..361524d94b10 100644
--- a/lib/galaxy/web/framework/base.py
+++ b/lib/galaxy/web/framework/base.py
@@ -15,6 +15,7 @@
SimpleCookie,
)
from importlib import import_module
+from urllib.parse import urljoin
import routes
import webob.compat
@@ -430,6 +431,10 @@ def cookies(self):
def base(self):
return f"{self.scheme}://{self.host}"
+ @lazy_property
+ def url_path(self):
+ return urljoin(self.base, self.environ.get("SCRIPT_NAME", ""))
+
# @lazy_property
# def params( self ):
# return parse_formvars( self.environ )
diff --git a/lib/galaxy/webapps/galaxy/api/__init__.py b/lib/galaxy/webapps/galaxy/api/__init__.py
index c6723c58b262..78651b18f09e 100644
--- a/lib/galaxy/webapps/galaxy/api/__init__.py
+++ b/lib/galaxy/webapps/galaxy/api/__init__.py
@@ -8,14 +8,19 @@
Any,
AsyncGenerator,
cast,
+ MutableMapping,
NamedTuple,
Optional,
Tuple,
Type,
TypeVar,
)
-from urllib.parse import urlencode
+from urllib.parse import (
+ urlencode,
+ urljoin,
+)
+from a2wsgi.wsgi import build_environ
from fastapi import (
Form,
Header,
@@ -194,17 +199,38 @@ class GalaxyASGIRequest(GalaxyAbstractRequest):
def __init__(self, request: Request):
self.__request = request
+ self.__environ: Optional[MutableMapping[str, Any]] = None
@property
def base(self) -> str:
return str(self.__request.base_url)
+ @property
+ def url_path(self) -> str:
+ scope = self.__request.scope
+ root_path = scope.get("root_path")
+ url = self.base
+ if root_path:
+ url = urljoin(url, root_path)
+ return url
+
@property
def host(self) -> str:
client = self.__request.client
assert client is not None
return str(client.host)
+ @property
+ def environ(self) -> MutableMapping[str, Any]:
+ """
+ Fallback WSGI environ.
+
+ This is not a full environ, there is no body. This is only meant to make routes.url_for work.
+ """
+ if self.__environ is None:
+ self.__environ = build_environ(self.__request.scope, None) # type: ignore[arg-type]
+ return self.__environ
+
class GalaxyASGIResponse(GalaxyAbstractResponse):
"""Wrapper around Starlette/FastAPI Response object.
diff --git a/lib/galaxy/webapps/galaxy/api/dataset_collections.py b/lib/galaxy/webapps/galaxy/api/dataset_collections.py
index 79818f1ef15a..5e9d61d1a314 100644
--- a/lib/galaxy/webapps/galaxy/api/dataset_collections.py
+++ b/lib/galaxy/webapps/galaxy/api/dataset_collections.py
@@ -13,6 +13,7 @@
AnyHDCA,
CreateNewCollectionPayload,
DatasetCollectionInstanceType,
+ DCESummary,
HDCADetailed,
)
from galaxy.webapps.galaxy.api import (
@@ -36,6 +37,11 @@
..., description="The encoded identifier of the dataset collection."
)
+
+DatasetCollectionElementIdPathParam: DecodedDatabaseIdField = Path(
+ ..., description="The encoded identifier of the dataset collection element."
+)
+
InstanceTypeQueryParam: DatasetCollectionInstanceType = Query(
default="history",
description="The type of collection instance. Either `history` (default) or `library`.",
@@ -129,3 +135,11 @@ def contents(
),
) -> DatasetCollectionContentElements:
return self.service.contents(trans, hdca_id, parent_id, instance_type, limit, offset)
+
+ @router.get("/api/dataset_collection_element/{dce_id}")
+ def content(
+ self,
+ trans: ProvidesHistoryContext = DependsOnTrans,
+ dce_id: DecodedDatabaseIdField = DatasetCollectionElementIdPathParam,
+ ) -> DCESummary:
+ return self.service.dce_content(trans, dce_id)
diff --git a/lib/galaxy/webapps/galaxy/api/workflows.py b/lib/galaxy/webapps/galaxy/api/workflows.py
index 33794507d4c3..7d4fd6e66e8c 100644
--- a/lib/galaxy/webapps/galaxy/api/workflows.py
+++ b/lib/galaxy/webapps/galaxy/api/workflows.py
@@ -1499,7 +1499,7 @@ def _deprecated_generate_bco(
self, trans, invocation_id: DecodedDatabaseIdField, merge_history_metadata: Optional[bool]
):
export_options = BcoExportOptions(
- galaxy_url=trans.request.base,
+ galaxy_url=trans.request.url_path,
galaxy_version=VERSION,
merge_history_metadata=merge_history_metadata or False,
)
diff --git a/lib/galaxy/webapps/galaxy/controllers/async.py b/lib/galaxy/webapps/galaxy/controllers/async.py
index 8a51d246a007..f048db145014 100644
--- a/lib/galaxy/webapps/galaxy/controllers/async.py
+++ b/lib/galaxy/webapps/galaxy/controllers/async.py
@@ -70,7 +70,7 @@ def index(self, trans, tool_id=None, data_secret=None, **kwd):
data.state = data.blurb = data.states.RUNNING
log.debug(f"executing tool {tool.id}")
trans.log_event(f"Async executing tool {tool.id}", tool_id=tool.id)
- galaxy_url = f"{trans.request.base}/async/{tool_id}/{data.id}/{key}"
+ galaxy_url = f"{trans.request.url_path}/async/{tool_id}/{data.id}/{key}"
galaxy_url = params.get("GALAXY_URL", galaxy_url)
params = dict(
URL=URL, GALAXY_URL=galaxy_url, name=data.name, info=data.info, dbkey=data.dbkey, data_type=data.ext
@@ -163,7 +163,7 @@ def index(self, trans, tool_id=None, data_secret=None, **kwd):
try:
key = hmac_new(trans.app.config.tool_secret, "%d:%d" % (data.id, data.history_id))
- galaxy_url = f"{trans.request.base}/async/{tool_id}/{data.id}/{key}"
+ galaxy_url = f"{trans.request.url_path}/async/{tool_id}/{data.id}/{key}"
params.update({"GALAXY_URL": galaxy_url})
params.update({"data_id": data.id})
diff --git a/lib/galaxy/webapps/galaxy/controllers/authnz.py b/lib/galaxy/webapps/galaxy/controllers/authnz.py
index 656521e0fea6..71bd0fbcf7e4 100644
--- a/lib/galaxy/webapps/galaxy/controllers/authnz.py
+++ b/lib/galaxy/webapps/galaxy/controllers/authnz.py
@@ -145,7 +145,7 @@ def create_user(self, trans, provider, **kwargs):
)
except exceptions.AuthenticationFailed as e:
return trans.response.send_redirect(
- f"{trans.request.base + url_for('/')}root/login?message={str(e) or 'Duplicate Email'}"
+ f"{trans.request.url_path + url_for('/')}root/login?message={str(e) or 'Duplicate Email'}"
)
if success is False:
diff --git a/lib/galaxy/webapps/galaxy/fast_app.py b/lib/galaxy/webapps/galaxy/fast_app.py
index a527de7eee44..997ec5b2b4c6 100644
--- a/lib/galaxy/webapps/galaxy/fast_app.py
+++ b/lib/galaxy/webapps/galaxy/fast_app.py
@@ -141,13 +141,14 @@ def include_legacy_openapi(app, gx_app):
return app.openapi_schema
-def get_fastapi_instance() -> FastAPI:
+def get_fastapi_instance(root_path="") -> FastAPI:
return FastAPI(
title="Galaxy API",
docs_url="/api/docs",
redoc_url="/api/redoc",
openapi_tags=api_tags_metadata,
license_info={"name": "MIT", "url": "https://github.com/galaxyproject/galaxy/blob/dev/LICENSE.txt"},
+ root_path=root_path,
)
@@ -168,7 +169,8 @@ def get_openapi_schema() -> Dict[str, Any]:
def initialize_fast_app(gx_wsgi_webapp, gx_app):
- app = get_fastapi_instance()
+ root_path = "" if gx_app.config.galaxy_url_prefix == "/" else gx_app.config.galaxy_url_prefix
+ app = get_fastapi_instance(root_path=root_path)
add_exception_handler(app)
add_galaxy_middleware(app, gx_app)
add_request_id_middleware(app)
diff --git a/lib/galaxy/webapps/galaxy/services/dataset_collections.py b/lib/galaxy/webapps/galaxy/services/dataset_collections.py
index 17e17fea5a9d..ac313b2fcb1f 100644
--- a/lib/galaxy/webapps/galaxy/services/dataset_collections.py
+++ b/lib/galaxy/webapps/galaxy/services/dataset_collections.py
@@ -25,6 +25,7 @@
from galaxy.managers.context import ProvidesHistoryContext
from galaxy.managers.hdcas import HDCAManager
from galaxy.managers.histories import HistoryManager
+from galaxy.model import DatasetCollectionElement
from galaxy.schema.fields import (
DecodedDatabaseIdField,
ModelClassField,
@@ -208,6 +209,17 @@ def show(
)
return rval
+ def dce_content(self, trans: ProvidesHistoryContext, dce_id: DecodedDatabaseIdField) -> DCESummary:
+ dce: Optional[DatasetCollectionElement] = trans.model.session.query(DatasetCollectionElement).get(dce_id)
+ if not dce:
+ raise exceptions.ObjectNotFound("No DatasetCollectionElement found")
+ if not trans.user_is_admin:
+ collection = dce.child_collection or dce.collection
+ if not trans.app.security_agent.can_access_collection(trans.get_current_user_roles(), collection):
+ raise exceptions.ItemAccessibilityException("Collection not accessible by user.")
+ serialized_dce = dictify_element_reference(dce, recursive=False, security=trans.security)
+ return trans.security.encode_all_ids(serialized_dce, recursive=True)
+
def contents(
self,
trans: ProvidesHistoryContext,
diff --git a/lib/galaxy/webapps/galaxy/services/invocations.py b/lib/galaxy/webapps/galaxy/services/invocations.py
index 44d0655a3fee..9c6130b00853 100644
--- a/lib/galaxy/webapps/galaxy/services/invocations.py
+++ b/lib/galaxy/webapps/galaxy/services/invocations.py
@@ -167,7 +167,7 @@ def prepare_store_download(
short_term_storage_request_id=short_term_storage_target.request_id,
user=trans.async_request_user,
invocation_id=workflow_invocation.id,
- galaxy_url=trans.request.base,
+ galaxy_url=trans.request.url_path,
**payload.dict(),
)
result = prepare_invocation_download.delay(request=request)
@@ -181,7 +181,7 @@ def write_store(
if not workflow_invocation:
raise ObjectNotFound()
request = WriteInvocationTo(
- galaxy_url=trans.request.base,
+ galaxy_url=trans.request.url_path,
user=trans.async_request_user,
invocation_id=workflow_invocation.id,
**payload.dict(),
diff --git a/lib/galaxy_test/api/test_dataset_collections.py b/lib/galaxy_test/api/test_dataset_collections.py
index 168713509f57..c7601e2242a2 100644
--- a/lib/galaxy_test/api/test_dataset_collections.py
+++ b/lib/galaxy_test/api/test_dataset_collections.py
@@ -198,6 +198,40 @@ def test_hda_security(self):
create_response = self._post("dataset_collections", payload, json=True)
self._assert_status_code_is(create_response, 403)
+ def test_dataset_collection_element_security(self):
+ with self.dataset_populator.test_history(require_new=False) as history_id:
+ dataset_collection = self.dataset_collection_populator.create_list_of_list_in_history(
+ history_id,
+ collection_type="list:list:list",
+ wait=True,
+ ).json()
+ first_element = dataset_collection["elements"][0]
+ assert first_element["model_class"] == "DatasetCollectionElement"
+ assert first_element["element_type"] == "dataset_collection"
+ first_element_url = f"/api/dataset_collection_element/{first_element['id']}"
+ # Make one dataset private to check that access permissions are respected
+ first_dataset_element = first_element["object"]["elements"][0]["object"]["elements"][0]
+ self.dataset_populator.make_private(history_id, first_dataset_element["object"]["id"])
+ with self._different_user():
+ assert self._get(first_element_url).status_code == 403
+ collection_dce_response = self._get(first_element_url)
+ collection_dce_response.raise_for_status()
+ collection_dce = collection_dce_response.json()
+ assert collection_dce["model_class"] == "DatasetCollectionElement"
+ assert collection_dce["element_type"] == "dataset_collection"
+ first_dataset_element = first_element["object"]["elements"][0]["object"]["elements"][0]
+ assert first_dataset_element["model_class"] == "DatasetCollectionElement"
+ assert first_dataset_element["element_type"] == "hda"
+ first_dataset_element_url = f"/api/dataset_collection_element/{first_dataset_element['id']}"
+ with self._different_user():
+ assert self._get(first_dataset_element_url).status_code == 403
+ dataset_dce_response = self._get(first_dataset_element_url)
+ dataset_dce_response.raise_for_status()
+ dataset_dce = dataset_dce_response.json()
+ assert dataset_dce["model_class"] == "DatasetCollectionElement"
+ assert dataset_dce["element_type"] == "hda"
+ assert dataset_dce["object"]["model_class"] == "HistoryDatasetAssociation"
+
def test_enforces_unique_names(self):
with self.dataset_populator.test_history(require_new=False) as history_id:
element_identifiers = self.dataset_collection_populator.list_identifiers(history_id)
diff --git a/lib/galaxy_test/api/test_histories.py b/lib/galaxy_test/api/test_histories.py
index 63750fff875e..096038657b26 100644
--- a/lib/galaxy_test/api/test_histories.py
+++ b/lib/galaxy_test/api/test_histories.py
@@ -140,6 +140,42 @@ def test_index_query(self):
assert len(index_response) == 1
assert index_response[0]["name"] == expected_history_name
+ def test_index_case_insensitive_contains_query(self):
+ # Create the histories with a different user to ensure the test
+ # is not conflicted with the current user's histories.
+ with self._different_user(f"user_{uuid4()}@bx.psu.edu"):
+ unique_id = uuid4()
+ expected_history_name = f"Test History That Match Query_{unique_id}"
+ self._create_history(expected_history_name)
+ self._create_history(expected_history_name.upper())
+ self._create_history(expected_history_name.lower())
+ self._create_history(f"Another history_{uuid4()}")
+
+ name_contains = "history"
+ query = f"?q=name-contains&qv={name_contains}"
+ index_response = self._get(f"histories{query}").json()
+ assert len(index_response) == 4
+
+ name_contains = "history that match query"
+ query = f"?q=name-contains&qv={name_contains}"
+ index_response = self._get(f"histories{query}").json()
+ assert len(index_response) == 3
+
+ name_contains = "ANOTHER"
+ query = f"?q=name-contains&qv={name_contains}"
+ index_response = self._get(f"histories{query}").json()
+ assert len(index_response) == 1
+
+ name_contains = "test"
+ query = f"?q=name-contains&qv={name_contains}"
+ index_response = self._get(f"histories{query}").json()
+ assert len(index_response) == 3
+
+ name_contains = unique_id
+ query = f"?q=name-contains&qv={name_contains}"
+ index_response = self._get(f"histories{query}").json()
+ assert len(index_response) == 3
+
def test_delete(self):
# Setup a history and ensure it is in the index
history_id = self._create_history("TestHistoryForDelete")["id"]
diff --git a/lib/galaxy_test/api/test_tools_upload.py b/lib/galaxy_test/api/test_tools_upload.py
index db0d5fe27305..3a4a60bb52fc 100644
--- a/lib/galaxy_test/api/test_tools_upload.py
+++ b/lib/galaxy_test/api/test_tools_upload.py
@@ -280,6 +280,40 @@ def test_newlines_stage_fetch_configured(self, history_id: str) -> None:
details = self.dataset_populator.get_history_dataset_details(history_id=history_id, dataset=dataset)
assert details["genome_build"] == "hg19"
+ @skip_if_github_down
+ def test_stage_fetch_decompress_true(self, history_id: str) -> None:
+ job = {
+ "input1": {
+ "class": "File",
+ "format": "fasta",
+ "location": "https://github.com/galaxyproject/galaxy/blob/dev/test-data/1.fasta.gz?raw=true",
+ "decompress": True,
+ }
+ }
+ inputs, datasets = stage_inputs(
+ self.galaxy_interactor, history_id, job, use_path_paste=False, to_posix_lines=False
+ )
+ dataset = datasets[0]
+ content = self.dataset_populator.get_history_dataset_content(history_id=history_id, dataset=dataset)
+ assert content.startswith(">hg17")
+
+ @skip_if_github_down
+ def test_stage_fetch_decompress_false(self, history_id: str) -> None:
+ job = {
+ "input1": {
+ "class": "File",
+ "format": "fasta",
+ "location": "https://github.com/galaxyproject/galaxy/blob/dev/test-data/1.fasta.gz?raw=true",
+ "decompress": False,
+ }
+ }
+ inputs, datasets = stage_inputs(
+ self.galaxy_interactor, history_id, job, use_path_paste=False, to_posix_lines=False
+ )
+ dataset = datasets[0]
+ content = self.dataset_populator.get_history_dataset_content(history_id=history_id, dataset=dataset)
+ assert not content.startswith(">hg17")
+
@skip_if_github_down
def test_upload_multiple_mixed_success(self, history_id):
destination = {"type": "hdas"}
diff --git a/lib/galaxy_test/api/test_webhooks.py b/lib/galaxy_test/api/test_webhooks.py
index 8c5cb9b84197..e640baffc172 100644
--- a/lib/galaxy_test/api/test_webhooks.py
+++ b/lib/galaxy_test/api/test_webhooks.py
@@ -11,7 +11,15 @@ def test_get_all(self):
self._assert_status_code_is(response, 200)
webhook_objs = self._assert_are_webhooks(response)
ids = self._get_webhook_ids(webhook_objs)
- for expected_id in ["history_test1", "history_test2", "masthead_test", "phdcomics", "trans_object", "xkcd"]:
+ for expected_id in [
+ "history_test1",
+ "history_test2",
+ "masthead_test",
+ "phdcomics",
+ "trans_object",
+ "xkcd",
+ "gtn",
+ ]:
assert expected_id in ids
def test_get_data(self):
diff --git a/lib/galaxy_test/base/populators.py b/lib/galaxy_test/base/populators.py
index ec5e7fefd295..86c27fbe3bb8 100644
--- a/lib/galaxy_test/base/populators.py
+++ b/lib/galaxy_test/base/populators.py
@@ -1444,6 +1444,8 @@ def download_history_to_store(self, history_id: str, extension: str = "tgz", ser
self.wait_for_download_ready(storage_request_id)
if serve_file:
return self._get_to_tempfile(f"short_term_storage/{storage_request_id}")
+ else:
+ return storage_request_id
def get_history_export_tasks(self, history_id: str):
headers = {"accept": "application/vnd.galaxy.task.export+json"}
diff --git a/lib/galaxy_test/selenium/test_tool_form.py b/lib/galaxy_test/selenium/test_tool_form.py
index 376231d00492..e92fd152064d 100644
--- a/lib/galaxy_test/selenium/test_tool_form.py
+++ b/lib/galaxy_test/selenium/test_tool_form.py
@@ -134,6 +134,38 @@ def check_recorded_val():
self.history_panel_wait_for_hid_ok(2)
self._check_dataset_details_for_inttest_value(2)
+ @selenium_test
+ def test_rerun_dataset_collection_element(self):
+ # upload a first dataset that should not become selected on re-run
+ test_path = self.get_filename("1.fasta")
+ self.perform_upload(test_path)
+ self.history_panel_wait_for_hid_ok(1)
+
+ history_id = self.current_history_id()
+ # upload a nested collection
+ collection_id = self.dataset_collection_populator.create_list_of_list_in_history(
+ history_id,
+ collection_type="list:list",
+ wait=True,
+ ).json()["id"]
+ self.tool_open("identifier_multiple")
+ self.components.tool_form.parameter_batch_dataset_collection(parameter="input1").wait_for_and_click()
+ self.sleep_for(self.wait_types.UX_RENDER)
+ self.components.tool_form.data_option_value(item_id=collection_id).wait_for_and_click()
+ self.sleep_for(self.wait_types.UX_RENDER)
+ self.tool_form_execute()
+ self.history_panel_wait_for_hid_ok(7)
+ self.history_panel_expand_collection(7)
+ self.sleep_for(self.wait_types.UX_RENDER)
+ self.history_panel_click_item_title(1)
+ self.sleep_for(self.wait_types.UX_RENDER)
+ self.hda_click_primary_action_button(1, "rerun")
+ self.sleep_for(self.wait_types.UX_RENDER)
+ assert self.driver.find_element(By.CSS_SELECTOR, "option:checked").text == "Selected: test0"
+ self.tool_form_execute()
+ self.components.history_panel.collection_view.back_to_history.wait_for_and_click()
+ self.history_panel_wait_for_hid_ok(9)
+
@selenium_test
@flakey
def test_run_data(self):
diff --git a/lib/galaxy_test/selenium/test_tutorial_mode.py b/lib/galaxy_test/selenium/test_tutorial_mode.py
new file mode 100644
index 000000000000..11dd08d2e4b9
--- /dev/null
+++ b/lib/galaxy_test/selenium/test_tutorial_mode.py
@@ -0,0 +1,33 @@
+from unittest import SkipTest
+
+import pytest
+
+from .framework import (
+ selenium_test,
+ SeleniumTestCase,
+)
+
+
+class TestTutorialMode(SeleniumTestCase):
+ @selenium_test
+ @pytest.mark.gtn_screenshot
+ def test_activate_tutorial_mode(self):
+ self._ensure_tutorial_mode_available()
+ self.home()
+ self.screenshot("tutorial_mode_0_0")
+ self.tutorial_mode_activate()
+ self.screenshot("tutorial_mode_0_1")
+
+ # Access inside the frame
+ self.driver.switch_to.frame("gtn-embed")
+ self.wait_for_selector_visible("#top-navbar")
+ self.screenshot("tutorial_mode_0_2")
+
+ def _ensure_tutorial_mode_available(self):
+ """Skip a test if the webhook GTN doesn't appear."""
+ response = self.api_get("webhooks", raw=True)
+ assert response.status_code == 200
+ data = response.json()
+ webhooks = [x["id"] for x in data]
+ if "gtn" not in webhooks:
+ raise SkipTest('Skipping test, webhook "GTN Tutorial Mode" doesn\'t appear to be configured.')
diff --git a/packages/util/test-requirements.txt b/packages/util/test-requirements.txt
index 208ec64cdf02..779ccfa23c7e 100644
--- a/packages/util/test-requirements.txt
+++ b/packages/util/test-requirements.txt
@@ -1,2 +1,3 @@
pytest
+pytest-httpserver
responses
diff --git a/pyproject.toml b/pyproject.toml
index f12c9a0e5ebf..5f138b2cd9f0 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -133,6 +133,7 @@ pytest-asyncio = "*"
pytest-celery = "*"
pytest-cov = "*"
pytest-html = "*"
+pytest-httpserver = "*"
python-irodsclient = "!=1.1.2" # https://github.com/irods/python-irodsclient/issues/356
pytest-json-report = "*"
pytest-mock = "*"
diff --git a/test/functional/webhooks/gtn b/test/functional/webhooks/gtn
new file mode 120000
index 000000000000..ebc09166db54
--- /dev/null
+++ b/test/functional/webhooks/gtn
@@ -0,0 +1 @@
+../../../config/plugins/webhooks/gtn
\ No newline at end of file
diff --git a/test/integration/test_history_import_export.py b/test/integration/test_history_import_export.py
index 8c27338e5014..eb9848609cef 100644
--- a/test/integration/test_history_import_export.py
+++ b/test/integration/test_history_import_export.py
@@ -7,6 +7,7 @@
history_model_store_dict,
one_hda_model_store_dict,
)
+from galaxy.util import unlink
from galaxy_test.api.test_histories import ImportExportTests
from galaxy_test.base.api import UsesCeleryTasks
from galaxy_test.base.api_asserts import assert_has_keys
@@ -198,6 +199,22 @@ def test_export_history_with_discarded_dataset_to_rocrate(self):
self._wait_for_export_task_on_record(last_record)
assert last_record["ready"] is True
+ def test_export_missing_dataset_fails(self):
+ history_name = f"for_export_failure_{uuid4()}"
+ history_id = self.dataset_populator.new_history(history_name)
+ hda = self.dataset_populator.new_dataset(history_id, wait=True)
+
+ # Simulate data loss or some unexpected problem with the dataset file.
+ assert os.path.exists(hda["file_name"])
+ unlink(hda["file_name"])
+ assert not os.path.exists(hda["file_name"])
+
+ storage_request_id = self.dataset_populator.download_history_to_store(history_id)
+
+ result_response = self._get(f"short_term_storage/{storage_request_id}")
+ self._assert_status_code_is(result_response, 500)
+ assert "Cannot export history dataset" in result_response.json()["err_msg"]
+
def _wait_for_export_task_on_record(self, record):
if record["preparing"]:
assert record["task_uuid"]
diff --git a/test/integration/test_prefix_handling.py b/test/integration/test_prefix_handling.py
new file mode 100644
index 000000000000..705952f42fed
--- /dev/null
+++ b/test/integration/test_prefix_handling.py
@@ -0,0 +1,38 @@
+from galaxy_test.base.populators import DatasetPopulator
+from galaxy_test.driver import integration_util
+
+
+class TestPrefixUrlSerializationIntegration(integration_util.IntegrationTestCase):
+ dataset_populator: DatasetPopulator
+ url_prefix = "/galaxypf"
+
+ def setUp(self) -> None:
+ super().setUp()
+ self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
+
+ @classmethod
+ def handle_galaxy_config_kwds(cls, config) -> None:
+ super().handle_galaxy_config_kwds(config)
+ config["galaxy_url_prefix"] = "/galaxypf"
+
+ def test_display_application_prefix_handling(self, history_id: str) -> None:
+ hda = self.dataset_populator.new_dataset(
+ history_id, content="chr1\t1\t2", file_type="interval", dbkey="hg18", wait=True
+ )
+ details_response = self.dataset_populator.get_history_dataset_details(
+ history_id=history_id, dataset_id=hda["id"]
+ )
+ # verify old style display app contains correct link back to galaxy
+ ucsc = details_response["display_types"][0]
+ assert ucsc["label"] == "display at UCSC"
+ assert "galaxypf" in ucsc["links"][0]["href"].split("redirect_url=")[-1]
+ # verify new style display app links contain prefix
+ display_apps = details_response["display_apps"]
+ for display_app in display_apps:
+ href = display_app["links"][0]["href"]
+ # This is a little inconsistent since most other references are generated without prefix
+ # but it's a real pain to work with the reverse lookup in routes and the callback URLs
+ # do need to include the prefix.
+ assert href.startswith(f"{self.url_prefix}/display_application")
+ response = self._get(f"{self.url[:-(len(self.url_prefix) + 1)]}{href}")
+ response.raise_for_status()
diff --git a/test/unit/tool_util/test_tool_linters.py b/test/unit/tool_util/test_tool_linters.py
index 4f4208235197..e27a59536e7f 100644
--- a/test/unit/tool_util/test_tool_linters.py
+++ b/test/unit/tool_util/test_tool_linters.py
@@ -1644,8 +1644,7 @@ def test_tests_asserts(lint_ctx):
assert "Test 1: unknown assertion 'invalid'" in lint_ctx.error_messages
assert "Test 1: unknown attribute 'invalid_attrib' for 'has_text'" in lint_ctx.error_messages
assert "Test 1: missing attribute 'text' for 'has_text'" in lint_ctx.error_messages
- assert "Test 1: attribute 'value' for 'has_size' needs to be 'int' got '500k'" in lint_ctx.error_messages
- assert "Test 1: attribute 'delta' for 'has_size' needs to be 'int' got '1O'" in lint_ctx.error_messages
+ assert "Test 1: attribute 'value' for 'has_size' needs to be 'int' got '500k'" not in lint_ctx.error_messages
assert (
"Test 1: unknown attribute 'invalid_attrib_also_checked_in_nested_asserts' for 'not_has_text'"
in lint_ctx.error_messages
@@ -1654,7 +1653,7 @@ def test_tests_asserts(lint_ctx):
assert "Test 1: 'has_n_columns' needs to specify 'n', 'min', or 'max'" in lint_ctx.error_messages
assert "Test 1: 'has_n_lines' needs to specify 'n', 'min', or 'max'" in lint_ctx.error_messages
assert not lint_ctx.warn_messages
- assert len(lint_ctx.error_messages) == 9
+ assert len(lint_ctx.error_messages) == 7
def test_tests_output_type_mismatch(lint_ctx):
diff --git a/test/unit/util/test_get_url.py b/test/unit/util/test_get_url.py
index c5a18ccd2bd7..499bbbddd2d8 100644
--- a/test/unit/util/test_get_url.py
+++ b/test/unit/util/test_get_url.py
@@ -1,9 +1,9 @@
import pytest
import requests
import responses
+from werkzeug.wrappers.response import Response
from galaxy.util import url_get
-from galaxy.util.unittest_utils import skip_if_site_down
@responses.activate
@@ -23,11 +23,21 @@ def test_get_url_forbidden():
assert "403 Client Error: Forbidden for url: https://toolshed.g2.bx.psu.edu/" in str(excinfo)
-@skip_if_site_down("https://httpbin.org")
-def test_get_url_retry_after():
- # This test is not ideal since it contacts an external resource
- # and doesn't actually verify multiple attempts have been made.
- # responses doesn't mock the right place to fully simulate this.
- url = "https://httpbin.org/status/429"
+def test_get_url_retry_after(httpserver):
+ attempts = []
+
+ def retry_handler(request):
+ attempts.append(requests)
+ if len(attempts) < 4:
+ return Response("try again later", status=429, content_type="text/plain")
+ else:
+ return Response("ok", status=200, content_type="text/plain")
+
+ httpserver.expect_request("/429").respond_with_handler(retry_handler)
+ url = httpserver.url_for("/429")
with pytest.raises(requests.exceptions.RetryError):
- url_get(url, max_retries=2, backoff_factor=0.01)
+ url_get(url, max_retries=1, backoff_factor=0.01)
+ assert len(attempts) == 2
+ response = url_get(url, max_retries=2, backoff_factor=0.01)
+ assert len(attempts) == 4
+ assert response == "ok"