Skip to content

Commit

Permalink
Merge branch 'release_20.09' into dev
Browse files Browse the repository at this point in the history
  • Loading branch information
mvdbeek committed Nov 1, 2020
2 parents a72cc38 + 31685ac commit c03cc3d
Show file tree
Hide file tree
Showing 40 changed files with 987 additions and 240 deletions.
2 changes: 1 addition & 1 deletion client/src/components/JobStates/CollectionJobStates.vue
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ export default {
mixins: [mixin],
computed: {
loadingNote() {
return `Loading job data for ${this.collectionTypeDescription}}`;
return `Loading job data for ${this.collectionTypeDescription}`;
},
generatingNote() {
return `${this.jobsStr} generating a ${this.collectionTypeDescription}`;
Expand Down
14 changes: 12 additions & 2 deletions client/src/components/ProgressBar.vue
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
<template>
<div class="my-1">
<small class="position-absolute text-center w-100" v-if="note">
<div class="my-1 progressContainer">
<small v-if="note" class="progressNote">
{{ note }}<span v-if="loading">.<span class="blinking">..</span></span>
</small>
<b-progress :max="total">
Expand Down Expand Up @@ -46,3 +46,13 @@ export default {
},
};
</script>
<style lang="css" scoped>
.progressNote {
position: absolute;
text-align: center;
width: 100%;
}
.progressContainer {
position: relative;
}
</style>
Original file line number Diff line number Diff line change
Expand Up @@ -226,7 +226,10 @@ export default {
},
created() {
this.rememberIdp = this.getIdpPreference() !== null;
this.getCILogonIdps();
/* Only fetch CILogonIDPs if custos/cilogon configured */
if (this.cilogonListShow) {
this.getCILogonIdps();
}
},
};
</script>
Expand Down
4 changes: 2 additions & 2 deletions client/src/components/Workflow/Editor/Index.vue
Original file line number Diff line number Diff line change
Expand Up @@ -380,14 +380,14 @@ export default {
const markdown = report.markdown || reportDefault;
this.markdownText = markdown;
this.markdownConfig = report;
showUpgradeMessage(data);
const has_changes = showUpgradeMessage(data);
getVersions(this.id).then((versions) => {
this.versions = versions;
});
Vue.nextTick(() => {
this.canvasManager.drawOverview();
this.canvasManager.scrollToNodes();
this.hasChanges = false;
this.hasChanges = has_changes;
});
})
.catch((response) => {
Expand Down
3 changes: 3 additions & 0 deletions client/src/components/Workflow/Editor/modules/utilities.js
Original file line number Diff line number Diff line change
Expand Up @@ -101,12 +101,14 @@ export function showForm(workflow, node, datatypes) {
export function showUpgradeMessage(data) {
// Determine if any parameters were 'upgraded' and provide message
var upgrade_message = "";
let hasToolUpgrade = false;
_.each(data.steps, (step, step_id) => {
var details = "";
if (step.errors) {
details += `<li>${step.errors}</li>`;
}
_.each(data.upgrade_messages[step_id], (m) => {
hasToolUpgrade = true;
details += `<li>${m}</li>`;
});
if (details) {
Expand All @@ -122,6 +124,7 @@ export function showUpgradeMessage(data) {
} else {
hide_modal();
}
return hasToolUpgrade;
}

export function getWorkflowParameters(nodes) {
Expand Down
6 changes: 4 additions & 2 deletions client/src/mvc/form/form-section.js
Original file line number Diff line number Diff line change
Expand Up @@ -89,9 +89,11 @@ var View = Backbone.View.extend({
break;
}
}
if (i == selectedCase && nonhidden) {
if (i == selectedCase) {
const selectedView = self.elements[input_def.id + "_" + i];
selectedView.renderOnce();
}
if (i == selectedCase && nonhidden) {
section_row.fadeIn("fast");
} else {
section_row.hide();
Expand Down Expand Up @@ -156,7 +158,7 @@ var View = Backbone.View.extend({

/** Add a customized section */
_addSection: function (input_def) {
const section = new View(this.app, { inputs: input_def.inputs, skip_render: true });
const section = new View(this.app, { inputs: input_def.inputs });
var portlet = new Portlet.View({
title: input_def.title || input_def.name,
cls: "ui-portlet-section",
Expand Down
1 change: 1 addition & 0 deletions lib/galaxy/authnz/managers.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@ def __init__(self, app, oidc_config_file, oidc_backends_config_file):
file (e.g., oidc_backends_config.xml).
"""
self.app = app
self.allowed_idps = None
self._parse_oidc_config(oidc_config_file)
self._parse_oidc_backends_config(oidc_backends_config_file)

Expand Down
153 changes: 77 additions & 76 deletions lib/galaxy/datatypes/binary.py
Original file line number Diff line number Diff line change
Expand Up @@ -938,6 +938,8 @@ class Anndata(H5):
True
>>> Anndata().sniff(get_test_fname('test.mz5'))
False
>>> Anndata().sniff(get_test_fname('import.loom.krumsiek11.h5ad'))
True
"""
file_ext = 'h5ad'

Expand All @@ -953,20 +955,20 @@ class Anndata(H5):
MetadataElement(name="obs_names", desc="obs_names", default=[], param=metadata.SelectParameter, multiple=True, readonly=True, no_value=None)
MetadataElement(name="obs_layers", desc="obs_layers", default=[], param=metadata.SelectParameter, multiple=True, readonly=True, no_value=None)
MetadataElement(name="obs_count", default=0, desc="obs_count", readonly=True, visible=True, no_value=0)
MetadataElement(name="obs_size", default=0, desc="obs_size", readonly=True, visible=True, no_value=0)
MetadataElement(name="obs_size", default=-1, desc="obs_size", readonly=True, visible=True, no_value=0)
MetadataElement(name="obsm_layers", desc="obsm_layers", default=[], param=metadata.SelectParameter, multiple=True, readonly=True, no_value=None)
MetadataElement(name="obsm_count", default=0, desc="obsm_count", readonly=True, visible=True, no_value=0)
MetadataElement(name="raw_var_layers", desc="raw_var_layers", default=[], param=metadata.SelectParameter, multiple=True, readonly=True, no_value=None)
MetadataElement(name="raw_var_count", default=0, desc="raw_var_count", readonly=True, visible=True, no_value=0)
MetadataElement(name="raw_var_size", default=0, desc="raw_var_size", readonly=True, visible=True, no_value=0)
MetadataElement(name="var_layers", desc="var_layers", default=[], param=metadata.SelectParameter, multiple=True, readonly=True, no_value=None)
MetadataElement(name="var_count", default=0, desc="var_count", readonly=True, visible=True, no_value=0)
MetadataElement(name="var_size", default=0, desc="var_size", readonly=True, visible=True, no_value=0)
MetadataElement(name="var_size", default=-1, desc="var_size", readonly=True, visible=True, no_value=0)
MetadataElement(name="varm_layers", desc="varm_layers", default=[], param=metadata.SelectParameter, multiple=True, readonly=True, no_value=None)
MetadataElement(name="varm_count", default=0, desc="varm_count", readonly=True, visible=True, no_value=0)
MetadataElement(name="uns_layers", desc="uns_layers", default=[], param=metadata.SelectParameter, multiple=True, readonly=True, no_value=None)
MetadataElement(name="uns_count", default=0, desc="uns_count", readonly=True, visible=True, no_value=0)
MetadataElement(name="shape", default=(), desc="shape", param=metadata.ListParameter, readonly=True, visible=True, no_value=(0, 0))
MetadataElement(name="shape", default=(-1, -1), desc="shape", param=metadata.ListParameter, readonly=True, visible=True, no_value=(0, 0))

def sniff(self, filename):
if super().sniff(filename):
Expand All @@ -979,79 +981,78 @@ def sniff(self, filename):

def set_meta(self, dataset, overwrite=True, **kwd):
super().set_meta(dataset, overwrite=overwrite, **kwd)
try:
with h5py.File(dataset.file_name, 'r') as anndata_file:
dataset.metadata.title = util.unicodify(anndata_file.attrs.get('title'))
dataset.metadata.description = util.unicodify(anndata_file.attrs.get('description'))
dataset.metadata.url = util.unicodify(anndata_file.attrs.get('url'))
dataset.metadata.doi = util.unicodify(anndata_file.attrs.get('doi'))
dataset.creation_date = util.unicodify(anndata_file.attrs.get('creation_date'))
# none of the above appear to work in any dataset tested, but could be useful for future
# AnnData datasets

# all possible keys
dataset.metadata.layers_count = len(anndata_file)
dataset.metadata.layers_names = list(anndata_file.keys())

def _layercountsize(tmp, lennames=0):
"From TMP and LENNAMES, return layers, their number, and the length of one of the layers (all equal)."
if hasattr(tmp, 'dtype'):
layers = [util.unicodify(x) for x in tmp.dtype.names]
count = len(tmp.dtype)
size = int(tmp.size)
else:
layers = [util.unicodify(x) for x in tmp.keys()]
count = len(layers)
size = lennames
return (layers, count, size)

if 'obs' in dataset.metadata.layers_names:
tmp = anndata_file["obs"]
dataset.metadata.obs_names = [util.unicodify(x) for x in tmp["index"]]
dataset.metadata.obs_layers, \
dataset.metadata.obs_count, \
dataset.metadata.obs_size = _layercountsize(tmp, len(dataset.metadata.obs_names))

if 'obsm' in dataset.metadata.layers_names:
tmp = anndata_file["obsm"]
dataset.metadata.obsm_layers, dataset.metadata.obsm_count, _ = _layercountsize(tmp)

if 'raw.var' in dataset.metadata.layers_names:
tmp = anndata_file["raw.var"]
# full set of genes would never need to be previewed
# dataset.metadata.raw_var_names = tmp["index"]
dataset.metadata.raw_var_layers, \
dataset.metadata.raw_var_count, \
dataset.metadata.raw_var_size = _layercountsize(tmp, len(tmp["index"]))

if 'var' in dataset.metadata.layers_names:
tmp = anndata_file["var"]
# row names are never used in preview windows
# dataset.metadata.var_names = tmp["index"]
dataset.metadata.var_layers, \
dataset.metadata.var_count, \
dataset.metadata.var_size = _layercountsize(tmp, len(tmp["index"]))

if 'varm' in dataset.metadata.layers_names:
tmp = anndata_file["varm"]
dataset.metadata.varm_layers, dataset.metadata.varm_count, _ = _layercountsize(tmp)

if 'uns' in dataset.metadata.layers_names:
tmp = anndata_file["uns"]
dataset.metadata.uns_layers, dataset.metadata.uns_count, _ = _layercountsize(tmp)

if 'X' in dataset.metadata.layers_names:
# Shape we determine here due to the non-standard representation of 'X' dimensions
shape = anndata_file['X'].attrs.get("shape")
if shape is not None:
dataset.metadata.shape = tuple(shape)
elif hasattr(anndata_file['X'], 'shape'):
dataset.metadata.shape = tuple(anndata_file['X'].shape)
else:
dataset.metadata.shape = (int(dataset.metadata.obs_size), int(dataset.metadata.var_size))

except Exception as e:
log.warning('%s, set_meta Exception: %s', self, e)
with h5py.File(dataset.file_name, 'r') as anndata_file:
dataset.metadata.title = util.unicodify(anndata_file.attrs.get('title'))
dataset.metadata.description = util.unicodify(anndata_file.attrs.get('description'))
dataset.metadata.url = util.unicodify(anndata_file.attrs.get('url'))
dataset.metadata.doi = util.unicodify(anndata_file.attrs.get('doi'))
dataset.creation_date = util.unicodify(anndata_file.attrs.get('creation_date'))
dataset.metadata.shape = anndata_file.attrs.get('shape') or dataset.metadata.shape
# none of the above appear to work in any dataset tested, but could be useful for future
# AnnData datasets

# all possible keys
dataset.metadata.layers_count = len(anndata_file)
dataset.metadata.layers_names = list(anndata_file.keys())

def _layercountsize(tmp, lennames=0):
"From TMP and LENNAMES, return layers, their number, and the length of one of the layers (all equal)."
if hasattr(tmp, 'dtype'):
layers = [util.unicodify(x) for x in tmp.dtype.names]
count = len(tmp.dtype)
size = int(tmp.size)
else:
layers = [util.unicodify(x) for x in list(tmp.keys())]
count = len(layers)
size = lennames
return (layers, count, size)

if 'obs' in dataset.metadata.layers_names:
tmp = anndata_file["obs"]
dataset.metadata.obs_names = [util.unicodify(x) for x in tmp["index"]]
dataset.metadata.obs_layers, \
dataset.metadata.obs_count, \
dataset.metadata.obs_size = _layercountsize(tmp, len(dataset.metadata.obs_names))

if 'obsm' in dataset.metadata.layers_names:
tmp = anndata_file["obsm"]
dataset.metadata.obsm_layers, dataset.metadata.obsm_count, _ = _layercountsize(tmp)

if 'raw.var' in dataset.metadata.layers_names:
tmp = anndata_file["raw.var"]
# full set of genes would never need to be previewed
# dataset.metadata.raw_var_names = tmp["index"]
dataset.metadata.raw_var_layers, \
dataset.metadata.raw_var_count, \
dataset.metadata.raw_var_size = _layercountsize(tmp, len(tmp["index"]))

if 'var' in dataset.metadata.layers_names:
tmp = anndata_file["var"]
# row names are never used in preview windows
# dataset.metadata.var_names = tmp["index"]
dataset.metadata.var_layers, \
dataset.metadata.var_count, \
dataset.metadata.var_size = _layercountsize(tmp, len(tmp["index"]))

if 'varm' in dataset.metadata.layers_names:
tmp = anndata_file["varm"]
dataset.metadata.varm_layers, dataset.metadata.varm_count, _ = _layercountsize(tmp)

if 'uns' in dataset.metadata.layers_names:
tmp = anndata_file["uns"]
dataset.metadata.uns_layers, dataset.metadata.uns_count, _ = _layercountsize(tmp)

# Resolving the problematic shape parameter
if 'X' in dataset.metadata.layers_names:
# Shape we determine here due to the non-standard representation of 'X' dimensions
shape = anndata_file['X'].attrs.get("shape")
if shape is not None:
dataset.metadata.shape = tuple(shape)
elif hasattr(anndata_file['X'], 'shape'):
dataset.metadata.shape = tuple(anndata_file['X'].shape)

if dataset.metadata.shape is None:
dataset.metadata.shape = (int(dataset.metadata.obs_size), int(dataset.metadata.var_size))

def set_peek(self, dataset, is_multi_byte=False):
if not dataset.dataset.purged:
Expand Down
Binary file not shown.
Loading

0 comments on commit c03cc3d

Please sign in to comment.