',
@@ -850,6 +856,12 @@ var LibraryDatasetView = Backbone.View.extend({
'
<%= _.escape(ldda.get("misc_info")) %> ',
'',
'<% } %>',
+ '<% if (item.get("tags")) { %>',
+ '
',
+ 'Tags ',
+ '<%= _.escape(item.get("tags")) %> ',
+ ' ',
+ '<% } %>',
'',
'
',
'
',
@@ -951,6 +963,13 @@ var LibraryDatasetView = Backbone.View.extend({
'Miscellaneous blurb ',
'<%= _.escape(item.get("misc_blurb")) %> ',
'',
+ //TODO: add functionality to modify tags here
+ '<% if (item.get("tags")) { %>',
+ '',
+ 'Tags ',
+ '<%= _.escape(item.get("tags")) %> ',
+ ' ',
+ '<% } %>',
'',
'',
'
',
diff --git a/client/galaxy/scripts/mvc/library/library-folder-view.js b/client/galaxy/scripts/mvc/library/library-folder-view.js
index ed3c30ed2b9d..a66019ccbd78 100644
--- a/client/galaxy/scripts/mvc/library/library-folder-view.js
+++ b/client/galaxy/scripts/mvc/library/library-folder-view.js
@@ -181,7 +181,7 @@ var FolderView = Backbone.View.extend({
* Extract the role ids from Select2 elements's 'data'
*/
_extractIds: function(roles_list){
- ids_list = [];
+ var ids_list = [];
for (var i = roles_list.length - 1; i >= 0; i--) {
ids_list.push(roles_list[i].id);
};
diff --git a/client/galaxy/scripts/mvc/library/library-folderlist-view.js b/client/galaxy/scripts/mvc/library/library-folderlist-view.js
index 644f9fdcd38d..df4467b5e843 100644
--- a/client/galaxy/scripts/mvc/library/library-folderlist-view.js
+++ b/client/galaxy/scripts/mvc/library/library-folderlist-view.js
@@ -112,7 +112,7 @@ var FolderListView = Backbone.View.extend({
// when dataset_id is present render its details too
if ( this.options.dataset_id ){
- row = _.findWhere( that.rowViews, { id: this.options.dataset_id } );
+ var row = _.findWhere( that.rowViews, { id: this.options.dataset_id } );
if ( row ) {
row.showDatasetDetails();
} else {
@@ -277,7 +277,7 @@ var FolderListView = Backbone.View.extend({
// Iterate each checkbox
$(':checkbox', '#folder_list_body').each(function() {
this.checked = selected;
- $row = $(this.parentElement.parentElement);
+ var $row = $(this.parentElement.parentElement);
// Change color of selected/unselected
if (selected) {
that.makeDarkRow($row);
diff --git a/client/galaxy/scripts/mvc/library/library-folderrow-view.js b/client/galaxy/scripts/mvc/library/library-folderrow-view.js
index ebb2d5e28222..138e487f180a 100644
--- a/client/galaxy/scripts/mvc/library/library-folderrow-view.js
+++ b/client/galaxy/scripts/mvc/library/library-folderrow-view.js
@@ -66,7 +66,7 @@ var FolderRowView = Backbone.View.extend({
* the filling of the row template of a given folder.
*/
prepareButtons: function(folder){
- vis_config = this.options.visibility_config;
+ var vis_config = this.options.visibility_config;
if (this.options.edit_mode === false){
vis_config.save_folder_btn = false;
vis_config.cancel_folder_btn = false;
diff --git a/client/galaxy/scripts/mvc/library/library-foldertoolbar-view.js b/client/galaxy/scripts/mvc/library/library-foldertoolbar-view.js
index d6a13549869d..0c301fa1c1b4 100644
--- a/client/galaxy/scripts/mvc/library/library-foldertoolbar-view.js
+++ b/client/galaxy/scripts/mvc/library/library-foldertoolbar-view.js
@@ -161,8 +161,13 @@ var FolderToolbarView = Backbone.View.extend({
var folderDetails = this.serialize_new_folder();
if (this.validate_new_folder(folderDetails)){
var folder = new mod_library_model.FolderAsModel();
- url_items = Backbone.history.fragment.split('/');
- current_folder_id = url_items[url_items.length-1];
+ var url_items = Backbone.history.fragment.split('/'),
+ current_folder_id;
+ if(url_items.indexOf('page') > -1){
+ current_folder_id = url_items[url_items.length-3];
+ }else {
+ current_folder_id = url_items[url_items.length-1];
+ }
folder.url = folder.urlRoot + current_folder_id ;
folder.save(folderDetails, {
@@ -313,7 +318,7 @@ var FolderToolbarView = Backbone.View.extend({
updateProgress: function(){
this.progress += this.progressStep;
$( '.progress-bar-import' ).width( Math.round( this.progress ) + '%' );
- txt_representation = Math.round( this.progress ) + '% Complete';
+ var txt_representation = Math.round( this.progress ) + '% Complete';
$( '.completion_span' ).text( txt_representation );
},
@@ -427,7 +432,7 @@ var FolderToolbarView = Backbone.View.extend({
url : Galaxy.root + "api/datatypes?extension_only=False",
success : function( datatypes ) {
that.list_extensions = [];
- for (key in datatypes) {
+ for (var key in datatypes) {
that.list_extensions.push({
id : datatypes[key].extension,
text : datatypes[key].extension,
@@ -446,7 +451,7 @@ var FolderToolbarView = Backbone.View.extend({
url : Galaxy.root + "api/genomes",
success : function( genomes ) {
that.list_genomes = [];
- for ( key in genomes ) {
+ for (var key in genomes ) {
that.list_genomes.push({
id : genomes[key][1],
text : genomes[key][0]
@@ -611,7 +616,7 @@ var FolderToolbarView = Backbone.View.extend({
this.modal.disableButton('Import');
paths = paths.split('\n');
for (var i = paths.length - 1; i >= 0; i--) {
- trimmed = paths[i].trim();
+ var trimmed = paths[i].trim();
if (trimmed.length!==0){
valid_paths.push(trimmed);
}
@@ -670,11 +675,12 @@ var FolderToolbarView = Backbone.View.extend({
importFromJstreePath: function ( that, options ){
var all_nodes = $( '#jstree_browser' ).jstree().get_selected( true );
// remove the disabled elements that could have been trigerred with the 'select all'
- selected_nodes = _.filter(all_nodes, function(node){ return node.state.disabled == false; })
+ var selected_nodes = _.filter(all_nodes, function(node){ return node.state.disabled == false; })
var preserve_dirs = this.modal.$el.find( '.preserve-checkbox' ).is( ':checked' );
var link_data = this.modal.$el.find( '.link-checkbox' ).is( ':checked' );
var file_type = this.select_extension.value();
var dbkey = this.select_genome.value();
+ var tag_using_filenames = this.modal.$el.find( '.tag-files' ).is( ':checked' );
var selection_type = selected_nodes[0].type;
var paths = [];
if ( selected_nodes.length < 1 ){
@@ -694,13 +700,15 @@ var FolderToolbarView = Backbone.View.extend({
link_data: link_data,
source: full_source,
file_type: file_type,
- dbkey: dbkey } );
+ dbkey: dbkey,
+ tag_using_filenames: tag_using_filenames } );
} else if ( selection_type === 'file' ){
var full_source = options.source + '_file';
this.chainCallImportingUserdirFiles( { paths : paths,
file_type: file_type,
dbkey: dbkey,
- source: full_source } );
+ source: full_source,
+ tag_using_filenames: tag_using_filenames } );
}
}
},
@@ -745,7 +753,7 @@ var FolderToolbarView = Backbone.View.extend({
}
});
for ( var i = history_item_ids.length - 1; i >= 0; i-- ) {
- history_item_id = history_item_ids[i];
+ var history_item_id = history_item_ids[i];
var folder_item = new mod_library_model.Item();
folder_item.url = Galaxy.root + 'api/folders/' + this.options.id + '/contents';
if (history_item_types[i] === 'collection') {
@@ -794,9 +802,10 @@ var FolderToolbarView = Backbone.View.extend({
},
/**
- * Take the array of paths and createa request for each of them
+ * Take the array of paths and create a request for each of them
* calling them in chain. Update the progress bar in between each.
- * @param {array} paths paths relative to user folder on Galaxy
+ * @param {array} paths paths relative to user folder on Galaxy
+ * @param {boolean} tag_using_filenames add tags to datasets using names of files
*/
chainCallImportingUserdirFiles: function( options ){
@@ -815,7 +824,8 @@ var FolderToolbarView = Backbone.View.extend({
'&source=' + options.source +
'&path=' + popped_item +
'&file_type=' + options.file_type +
- '&dbkey=' + options.dbkey ) )
+ '&dbkey=' + options.dbkey +
+ '&tag_using_filenames=' + options.tag_using_filenames ) )
promise.done( function( response ){
that.updateProgress();
that.chainCallImportingUserdirFiles( options );
@@ -830,11 +840,12 @@ var FolderToolbarView = Backbone.View.extend({
/**
* Take the array of paths and createa request for each of them
* calling them in chain. Update the progress bar in between each.
- * @param {array} paths paths relative to Galaxy root folder
- * @param {boolean} preserve_dirs indicates whether to preserve folder structure
- * @param {boolean} link_data copy files to Galaxy or link instead
- * @param {str} source string representing what type of folder
- * is the source of import
+ * @param {array} paths paths relative to Galaxy root folder
+ * @param {boolean} preserve_dirs indicates whether to preserve folder structure
+ * @param {boolean} link_data copy files to Galaxy or link instead
+ * @param {str} source string representing what type of folder
+ * is the source of import
+ * @param {boolean} tag_using_filenames add tags to datasets using names of files
*/
chainCallImportingFolders: function( options ){
// TODO need to check which paths to call
@@ -856,7 +867,8 @@ var FolderToolbarView = Backbone.View.extend({
'&preserve_dirs=' + options.preserve_dirs +
'&link_data=' + options.link_data +
'&file_type=' + options.file_type +
- '&dbkey=' + options.dbkey ) )
+ '&dbkey=' + options.dbkey +
+ '&tag_using_filenames=' + options.tag_using_filenames ) )
promise.done(function(response){
that.updateProgress();
that.chainCallImportingFolders( options );
@@ -1341,7 +1353,13 @@ var FolderToolbarView = Backbone.View.extend({
'',
'Type: ',
'Genome: ',
- '
',
+ ' ',
+ '',
+ '',
+ 'Tag datasets based on file names.',
+ ' ',
+ ' ',
+ '
',
' '
].join(''));
},
diff --git a/client/galaxy/scripts/mvc/library/library-library-view.js b/client/galaxy/scripts/mvc/library/library-library-view.js
index 42650e04cab3..9d6d6b82d191 100644
--- a/client/galaxy/scripts/mvc/library/library-library-view.js
+++ b/client/galaxy/scripts/mvc/library/library-library-view.js
@@ -212,7 +212,7 @@ var LibraryView = Backbone.View.extend({
},
_extractIds: function(roles_list){
- ids_list = [];
+ var ids_list = [];
for (var i = roles_list.length - 1; i >= 0; i--) {
ids_list.push(roles_list[i].id);
};
diff --git a/client/galaxy/scripts/mvc/library/library-libraryrow-view.js b/client/galaxy/scripts/mvc/library/library-libraryrow-view.js
index e39cc2b0da5b..757222170cbe 100644
--- a/client/galaxy/scripts/mvc/library/library-libraryrow-view.js
+++ b/client/galaxy/scripts/mvc/library/library-libraryrow-view.js
@@ -64,7 +64,7 @@ var LibraryRowView = Backbone.View.extend({
* the filling of the row template of given library.
*/
prepareButtons: function(library){
- vis_config = this.element_visibility_config;
+ var vis_config = this.element_visibility_config;
if (this.edit_mode === false){
vis_config.save_library_btn = false;
diff --git a/client/galaxy/scripts/mvc/library/library-model.js b/client/galaxy/scripts/mvc/library/library-model.js
index 91f00cd92bab..be62bef4a132 100644
--- a/client/galaxy/scripts/mvc/library/library-model.js
+++ b/client/galaxy/scripts/mvc/library/library-model.js
@@ -41,7 +41,7 @@ define([], function() {
if (search_term == "") return this;
var lowercase_term = search_term.toLowerCase();
return this.filter(function(data) {
- lowercase_name = data.get("name").toLowerCase();
+ var lowercase_name = data.get("name").toLowerCase();
return lowercase_name.indexOf(lowercase_term) !== -1;
});
},
diff --git a/client/galaxy/scripts/mvc/tool/tool-form-base.js b/client/galaxy/scripts/mvc/tool/tool-form-base.js
index c26c35e96f7e..0dd48016623b 100644
--- a/client/galaxy/scripts/mvc/tool/tool-form-base.js
+++ b/client/galaxy/scripts/mvc/tool/tool-form-base.js
@@ -8,13 +8,10 @@ define( [ 'utils/utils', 'utils/deferred', 'mvc/ui/ui-misc', 'mvc/form/form-view
var self = this;
this.deferred = new Deferred();
FormBase.prototype.initialize.call( this, options );
- if ( this.model.get( 'inputs' ) ) {
- this._buildForm( this.model.attributes );
- } else {
- this.deferred.execute( function( process ) {
- self._buildModel( process, self.model.attributes, true );
- });
- }
+
+ // optional model update
+ this._update( this.model.get( 'initialmodel' ) );
+
// listen to history panel
if ( this.model.get( 'listen_to_history' ) && parent.Galaxy && parent.Galaxy.currHistoryPanel ) {
this.listenTo( parent.Galaxy.currHistoryPanel.collection, 'change', function() {
@@ -25,6 +22,21 @@ define( [ 'utils/utils', 'utils/deferred', 'mvc/ui/ui-misc', 'mvc/form/form-view
this.$el.on( 'remove', function() { self._destroy() } );
},
+ /** Allows tool form variation to update tool model */
+ _update: function( callback ) {
+ var self = this;
+ callback = callback || this.model.get( 'buildmodel' );
+ if ( callback ) {
+ this.deferred.reset();
+ this.deferred.execute( function( process ) {
+ callback( process, self );
+ process.then( function() { self._render() } );
+ });
+ } else {
+ this._render();
+ }
+ },
+
/** Wait for deferred build processes before removal */
_destroy: function() {
var self = this;
@@ -36,12 +48,12 @@ define( [ 'utils/utils', 'utils/deferred', 'mvc/ui/ui-misc', 'mvc/form/form-view
},
/** Build form */
- _buildForm: function( options ) {
+ _render: function() {
var self = this;
- this.model.set( options );
+ var options = this.model.attributes;
this.model.set({
- title : options.title || '
' + options.name + ' ' + options.description + ' (Galaxy Version ' + options.version + ')',
- operations : !this.model.get( 'hide_operations' ) && this._operations(),
+ title : options.fixed_title || '
' + options.name + ' ' + options.description + ' (Galaxy Version ' + options.version + ')',
+ operations : !options.hide_operations && this._operations(),
onchange : function() {
self.deferred.reset();
self.deferred.execute( function ( process ) {
@@ -49,85 +61,16 @@ define( [ 'utils/utils', 'utils/deferred', 'mvc/ui/ui-misc', 'mvc/form/form-view
});
}
});
- this.model.get( 'customize' ) && this.model.get( 'customize' )( this );
this.render();
if ( !this.model.get( 'collapsible' ) ) {
this.$el.append( $( '
' ).addClass( 'ui-margin-top-large' ).append( this._footer() ) );
}
- },
-
- /** Builds a new model through api call and recreates the entire form */
- _buildModel: function( process, new_options, hide_message ) {
- var self = this;
- var options = this.model.attributes;
- options.version = new_options.version;
- options.id = new_options.id;
-
- // build request url
- var build_url = '';
- var build_data = {};
- var job_id = '';
- // When re-running a job the job_id is found in the new_options object.
- // When re-running a job and requesting a new tool_version,
- // the job_id is in the options object.
- if ( new_options.job_id ) {
- job_id = new_options.job_id;
- } else if (options.job_id) {
- job_id = options.job_id;
- }
- if ( job_id ) {
- build_url = Galaxy.root + 'api/jobs/' + job_id + '/build_for_rerun';
- } else {
- build_url = Galaxy.root + 'api/tools/' + options.id + '/build';
- build_data = $.extend( {}, Galaxy.params );
- build_data[ 'tool_id' ] && ( delete build_data[ 'tool_id' ] );
- }
- options.version && ( build_data[ 'tool_version' ] = options.version );
-
- // get initial model
- Utils.get({
- url : build_url,
- data : build_data,
- success : function( data ) {
- if( !data.display ) {
- window.location = Galaxy.root;
- return;
- }
- self._buildForm( data );
- !hide_message && self.message.update({
- status : 'success',
- message : 'Now you are using \'' + options.name + '\' version ' + options.version + ', id \'' + options.id + '\'.',
- persistent : false
- });
- Galaxy.emit.debug('tool-form-base::_buildModel()', 'Initial tool model ready.', data);
- process.resolve();
- },
- error : function( response, status ) {
- var error_message = ( response && response.err_msg ) || 'Uncaught error.';
- if ( status == 401 ) {
- window.location = Galaxy.root + 'user/login?' + $.param({ redirect : Galaxy.root + '?tool_id=' + options.id });
- } else if ( self.$el.is( ':empty' ) ) {
- self.$el.prepend( ( new Ui.Message({
- message : error_message,
- status : 'danger',
- persistent : true,
- large : true
- }) ).$el );
- } else {
- Galaxy.modal && Galaxy.modal.show({
- title : 'Tool request failed',
- body : error_message,
- buttons : {
- 'Close' : function() {
- Galaxy.modal.hide();
- }
- }
- });
- }
- Galaxy.emit.debug( 'tool-form-base::_buildModel()', 'Initial tool model request failed.', response );
- process.reject();
- }
+ this.show_message && this.message.update({
+ status : 'success',
+ message : 'Now you are using \'' + options.name + '\' version ' + options.version + ', id \'' + options.id + '\'.',
+ persistent : false
});
+ this.show_message = true;
},
/** Create tool operation menu */
@@ -151,13 +94,9 @@ define( [ 'utils/utils', 'utils/deferred', 'mvc/ui/ui-misc', 'mvc/form/form-view
icon : 'fa-cube',
onclick : function() {
// here we update the tool version (some tools encode the version also in the id)
- var id = options.id.replace( options.version, this.version );
- var version = this.version;
- // queue model request
- self.deferred.reset();
- self.deferred.execute( function( process ) {
- self._buildModel( process, { id : id, version : version } )
- });
+ self.model.set( 'id', options.id.replace( options.version, this.version ) );
+ self.model.set( 'version', this.version );
+ self._update();
}
});
}
diff --git a/client/galaxy/scripts/mvc/tool/tool-form-composite.js b/client/galaxy/scripts/mvc/tool/tool-form-composite.js
index 608109432c1a..156af19e47b0 100644
--- a/client/galaxy/scripts/mvc/tool/tool-form-composite.js
+++ b/client/galaxy/scripts/mvc/tool/tool-form-composite.js
@@ -45,7 +45,7 @@ define([ 'utils/utils', 'utils/deferred', 'mvc/ui/ui-misc', 'mvc/form/form-view'
}
step = Utils.merge( {
index : i,
- title : _.escape( title ),
+ fixed_title : _.escape( title ),
icon : icon || '',
help : null,
citations : null,
@@ -112,7 +112,7 @@ define([ 'utils/utils', 'utils/deferred', 'mvc/ui/ui-misc', 'mvc/form/form-view'
var wp_count = 0;
this.wp_inputs = {};
function _handleWorkflowParameter( value, callback ) {
- var re = /\$\{(.+?)\}/g;
+ var re = /\$\{(.+?)\}/g, match;
while ( match = re.exec( String( value ) ) ) {
var wp_name = match[ 1 ];
callback( self.wp_inputs[ wp_name ] = self.wp_inputs[ wp_name ] || {
@@ -304,7 +304,7 @@ define([ 'utils/utils', 'utils/deferred', 'mvc/ui/ui-misc', 'mvc/form/form-view'
var is_simple_input = ([ 'data_input', 'data_collection_input' ]).indexOf( step.step_type ) != -1;
_.each( step.inputs, function( input ) { input.flavor = 'module'; input.hide_label = is_simple_input; } );
form = new Form( Utils.merge({
- title : step.title,
+ title : step.fixed_title,
onchange : function() { _.each( self.links[ step.index ], function( link ) { self._refreshStep( link ) } ) },
inputs : step.inputs && step.inputs.length > 0 ? step.inputs : [ { type: 'hidden', name: 'No options available.', ignore: null } ]
}, step ) );
@@ -336,7 +336,7 @@ define([ 'utils/utils', 'utils/deferred', 'mvc/ui/ui-misc', 'mvc/form/form-view'
new_value = { values: [] };
_.each( input.step_linked, function( source_step ) {
if ( self._isDataStep( source_step ) ) {
- value = self.forms[ source_step.index ].data.create().input;
+ var value = self.forms[ source_step.index ].data.create().input;
value && _.each( value.values, function( v ) { new_value.values.push( v ) } );
}
});
@@ -345,7 +345,7 @@ define([ 'utils/utils', 'utils/deferred', 'mvc/ui/ui-misc', 'mvc/form/form-view'
}
} else if ( input.wp_linked ) {
new_value = input.value;
- var re = /\$\{(.+?)\}/g;
+ var re = /\$\{(.+?)\}/g, match;
while ( match = re.exec( input.value ) ) {
var wp_field = self.wp_form.field_list[ self.wp_form.data.match( match[ 1 ] ) ];
var wp_value = wp_field && wp_field.value();
@@ -514,7 +514,7 @@ define([ 'utils/utils', 'utils/deferred', 'mvc/ui/ui-misc', 'mvc/form/form-view'
/** Is data input module/step */
_isDataStep: function( steps ) {
- lst = $.isArray( steps ) ? steps : [ steps ] ;
+ var lst = $.isArray( steps ) ? steps : [ steps ] ;
for ( var i = 0; i < lst.length; i++ ) {
var step = lst[ i ];
if ( !step || !step.step_type || !step.step_type.startsWith( 'data' ) ) {
diff --git a/client/galaxy/scripts/mvc/tool/tool-form.js b/client/galaxy/scripts/mvc/tool/tool-form.js
index 7a10c5eba74f..1c3117f0fd01 100644
--- a/client/galaxy/scripts/mvc/tool/tool-form.js
+++ b/client/galaxy/scripts/mvc/tool/tool-form.js
@@ -8,42 +8,64 @@ define([ 'utils/utils', 'mvc/ui/ui-misc', 'mvc/ui/ui-modal', 'mvc/tool/tool-form
this.form = new ToolFormBase( Utils.merge({
listen_to_history : true,
always_refresh : false,
- customize : function( form ) {
+ buildmodel: function( process, form ) {
var options = form.model.attributes;
- // build execute button
- options.buttons = {
- execute: execute_btn = new Ui.Button({
- icon : 'fa-check',
- tooltip : 'Execute: ' + options.name + ' (' + options.version + ')',
- title : 'Execute',
- cls : 'ui-button btn btn-primary',
- floating : 'clear',
- onclick : function() {
- execute_btn.wait();
- form.portlet.disable();
- self.submit( options, function() {
- execute_btn.unwait();
- form.portlet.enable();
- } );
- }
- })
- }
- // remap feature
- if ( options.job_id && options.job_remap ) {
- options.inputs.push({
- label : 'Resume dependencies from this job',
- name : 'rerun_remap_job_id',
- type : 'select',
- display : 'radio',
- ignore : '__ignore__',
- value : '__ignore__',
- options : [ [ 'Yes', options.job_id ], [ 'No', '__ignore__' ] ],
- help : 'The previous run of this tool failed and other tools were waiting for it to finish successfully. Use this option to resume those tools using the new output(s) of this tool run.'
- });
+
+ // build request url
+ var build_url = '';
+ var build_data = {};
+ var job_id = options.job_id;
+ if ( job_id ) {
+ build_url = Galaxy.root + 'api/jobs/' + job_id + '/build_for_rerun';
+ } else {
+ build_url = Galaxy.root + 'api/tools/' + options.id + '/build';
+ build_data = $.extend( {}, Galaxy.params );
+ build_data[ 'tool_id' ] && ( delete build_data[ 'tool_id' ] );
}
+ options.version && ( build_data[ 'tool_version' ] = options.version );
+
+ // get initial model
+ Utils.get({
+ url : build_url,
+ data : build_data,
+ success : function( data ) {
+ if( !data.display ) {
+ window.location = Galaxy.root;
+ return;
+ }
+ form.model.set( data );
+ self._customize( form );
+ Galaxy.emit.debug('tool-form-base::_buildModel()', 'Initial tool model ready.', data);
+ process.resolve();
+ },
+ error : function( response, status ) {
+ var error_message = ( response && response.err_msg ) || 'Uncaught error.';
+ if ( status == 401 ) {
+ window.location = Galaxy.root + 'user/login?' + $.param({ redirect : Galaxy.root + '?tool_id=' + options.id });
+ } else if ( form.$el.is( ':empty' ) ) {
+ form.$el.prepend( ( new Ui.Message({
+ message : error_message,
+ status : 'danger',
+ persistent : true,
+ large : true
+ }) ).$el );
+ } else {
+ Galaxy.modal && Galaxy.modal.show({
+ title : 'Tool request failed',
+ body : error_message,
+ buttons : {
+ 'Close' : function() {
+ Galaxy.modal.hide();
+ }
+ }
+ });
+ }
+ Galaxy.emit.debug( 'tool-form-base::_buildModel()', 'Initial tool model request failed.', response );
+ process.reject();
+ }
+ });
},
postchange : function( process, form ) {
- var self = this;
var current_state = {
tool_id : form.model.get( 'id' ),
tool_version : form.model.get( 'version' ),
@@ -73,6 +95,42 @@ define([ 'utils/utils', 'mvc/ui/ui-misc', 'mvc/ui/ui-modal', 'mvc/tool/tool-form
this.$el.append( this.form.$el );
},
+ _customize: function( form ) {
+ var self = this;
+ var options = form.model.attributes;
+ // build execute button
+ options.buttons = {
+ execute: execute_btn = new Ui.Button({
+ icon : 'fa-check',
+ tooltip : 'Execute: ' + options.name + ' (' + options.version + ')',
+ title : 'Execute',
+ cls : 'btn btn-primary ui-clear-float',
+ wait_cls : 'btn btn-info ui-clear-float',
+ onclick : function() {
+ execute_btn.wait();
+ form.portlet.disable();
+ self.submit( options, function() {
+ execute_btn.unwait();
+ form.portlet.enable();
+ } );
+ }
+ })
+ }
+ // remap feature
+ if ( options.job_id && options.job_remap ) {
+ options.inputs.push({
+ label : 'Resume dependencies from this job',
+ name : 'rerun_remap_job_id',
+ type : 'select',
+ display : 'radio',
+ ignore : '__ignore__',
+ value : '__ignore__',
+ options : [ [ 'Yes', options.job_id ], [ 'No', '__ignore__' ] ],
+ help : 'The previous run of this tool failed and other tools were waiting for it to finish successfully. Use this option to resume those tools using the new output(s) of this tool run.'
+ });
+ }
+ },
+
/** Submit a regular job.
* @param{dict} options - Specifies tool id and version
* @param{function} callback - Called when request has completed
@@ -207,4 +265,4 @@ define([ 'utils/utils', 'mvc/ui/ui-misc', 'mvc/ui/ui-modal', 'mvc/tool/tool-form
return {
View: View
};
-});
+});
\ No newline at end of file
diff --git a/client/galaxy/scripts/mvc/tool/tool-genomespace.js b/client/galaxy/scripts/mvc/tool/tool-genomespace.js
new file mode 100644
index 000000000000..8ca44dff9080
--- /dev/null
+++ b/client/galaxy/scripts/mvc/tool/tool-genomespace.js
@@ -0,0 +1,24 @@
+// Provides support for interacting with the GenomeSpace File Browser popup dialogue
+define([], function() {
+
+// tool form templates
+return {
+ openFileBrowser: function( options ) {
+ var GS_UI_URL = window.Galaxy.config.genomespace_ui_url;
+ var GS_UPLOAD_URL = GS_UI_URL + 'upload/loadUrlToGenomespace.html?getLocation=true'
+
+ var newWin = window.open(GS_UPLOAD_URL, "GenomeSpace File Browser", "height=360px,width=600px");
+
+ successCalBack = options['successCallback'];
+ window.addEventListener( "message", function (e) {
+ successCalBack(e.data);
+ }, false);
+
+ newWin.focus();
+
+ if (options['errorCallback'] != null) newWin.setCallbackOnGSUploadError = config['errorCallback'];
+ }
+
+};
+
+});
\ No newline at end of file
diff --git a/client/galaxy/scripts/mvc/toolshed/categories-view.js b/client/galaxy/scripts/mvc/toolshed/categories-view.js
index b914e55fe3ac..b9226f8d5c4c 100644
--- a/client/galaxy/scripts/mvc/toolshed/categories-view.js
+++ b/client/galaxy/scripts/mvc/toolshed/categories-view.js
@@ -39,7 +39,7 @@ define(['mvc/toolshed/toolshed-model', 'mvc/toolshed/util'], function(toolshed_m
var params = {term: request.term, tool_shed_url: shed_url};
$.post(base_url, params, function(data) {
console.log(data);
- result_list = toolshed_util.shedParser(data);
+ var result_list = toolshed_util.shedParser(data);
response(result_list);
});
},
diff --git a/client/galaxy/scripts/mvc/toolshed/repo-status-view.js b/client/galaxy/scripts/mvc/toolshed/repo-status-view.js
index 183de64a53d9..0d8e381e87c9 100644
--- a/client/galaxy/scripts/mvc/toolshed/repo-status-view.js
+++ b/client/galaxy/scripts/mvc/toolshed/repo-status-view.js
@@ -16,7 +16,7 @@ define(['mvc/toolshed/toolshed-model', 'mvc/toolshed/util'], function(toolshed_m
var all_done = true;
_.some(self.model.models, function(repository) {
repo_id = repository.get('id');
- repo_status = repository.get('status').toLowerCase();
+ var repo_status = repository.get('status').toLowerCase();
if (terminal_states.indexOf(repo_status) === -1) {
all_done = false;
return true;
diff --git a/client/galaxy/scripts/mvc/toolshed/repositories-view.js b/client/galaxy/scripts/mvc/toolshed/repositories-view.js
index 795adfa04715..acf7f2fa7f30 100644
--- a/client/galaxy/scripts/mvc/toolshed/repositories-view.js
+++ b/client/galaxy/scripts/mvc/toolshed/repositories-view.js
@@ -32,7 +32,7 @@ define(['mvc/toolshed/toolshed-model', 'mvc/toolshed/util'], function(toolshed_m
var base_url = Galaxy.root + 'api/tool_shed/search';
var params = {term: request.term, tool_shed_url: shed_url};
$.post(base_url, params, function(data) {
- result_list = toolshed_util.shedParser(data);
+ var result_list = toolshed_util.shedParser(data);
response(result_list);
});
},
diff --git a/client/galaxy/scripts/mvc/toolshed/repository-queue-view.js b/client/galaxy/scripts/mvc/toolshed/repository-queue-view.js
index 24193a63d2bc..509f0a869bbb 100644
--- a/client/galaxy/scripts/mvc/toolshed/repository-queue-view.js
+++ b/client/galaxy/scripts/mvc/toolshed/repository-queue-view.js
@@ -63,7 +63,7 @@ define(['mvc/toolshed/toolshed-model', 'mvc/toolshed/util'], function(toolshed_m
if (queue_key === undefined) {
queue_key = toolshed_util.queueKey(repository_metadata);
}
- repository_queue = JSON.parse(localStorage.repositories);
+ var repository_queue = JSON.parse(localStorage.repositories);
if (repository_queue.hasOwnProperty(queue_key)) {
delete repository_queue[queue_key];
localStorage.repositories = JSON.stringify(repository_queue);
@@ -82,7 +82,7 @@ define(['mvc/toolshed/toolshed-model', 'mvc/toolshed/util'], function(toolshed_m
},
loadFromQueue: function(queue_key) {
- repository_queue = JSON.parse(localStorage.repositories);
+ var repository_queue = JSON.parse(localStorage.repositories);
if (repository_queue.hasOwnProperty(queue_key)) {
return repository_queue[queue_key];
}
diff --git a/client/galaxy/scripts/mvc/toolshed/repository-view.js b/client/galaxy/scripts/mvc/toolshed/repository-view.js
index 27403c54d027..9192988f16cd 100644
--- a/client/galaxy/scripts/mvc/toolshed/repository-view.js
+++ b/client/galaxy/scripts/mvc/toolshed/repository-view.js
@@ -19,7 +19,7 @@ define(['mvc/toolshed/toolshed-model',
this.options = _.defaults(this.options || {}, this.defaults);
this.model = new toolshed_model.RepositoryCollection();
this.listenTo(this.model, 'sync', this.render);
- shed = params.tool_shed.replace(/\//g, '%2f');
+ var shed = params.tool_shed.replace(/\//g, '%2f');
this.model.url += '?tool_shed_url=' + shed + '&repository_id=' + params.repository_id;
this.model.tool_shed_url = params.tool_shed.replace(/%2f/g, '/');
this.model.tool_shed = shed;
@@ -81,7 +81,7 @@ define(['mvc/toolshed/toolshed-model',
params.tool_panel_section = JSON.stringify(that.panelSelect(params));
params.shed_tool_conf = $("select[name='shed_tool_conf']").find('option:selected').val()
params.changeset = $('#changeset').find("option:selected").val();
- url = $('#repository_installation').attr('action');
+ var url = $('#repository_installation').attr('action');
that.prepareInstall(params, url);
});
$('#queue_install').on('click', function(ev) {
@@ -107,8 +107,8 @@ define(['mvc/toolshed/toolshed-model',
that.checkInstalled(repository_metadata);
});
$('.tool_panel_section_picker').on('change', function() {
- new_value = $(this).find('option:selected').val();
- default_tps = $('#tool_panel_section_select').find('option:selected').val();
+ var new_value = $(this).find('option:selected').val();
+ var default_tps = $('#tool_panel_section_select').find('option:selected').val();
if (new_value == default_tps) {
$(this).attr('default', 'active');
}
@@ -191,8 +191,8 @@ define(['mvc/toolshed/toolshed-model',
params.new_tool_panel_section = $("#new_tool_panel_section").val();
}
$('.tool_panel_section_picker').each(function() {
- element_name = $(this).attr('name');
- tool_guid = $(this).attr('data-toolguid');
+ var element_name = $(this).attr('name');
+ var tool_guid = $(this).attr('data-toolguid');
if (element_name === 'tool_panel_section_id') {
tool_panel_section[tool_guid] = { tool_panel_section: $(this).find("option:selected").val(), action: 'append' }
}
@@ -244,13 +244,13 @@ define(['mvc/toolshed/toolshed-model',
prepareInstall: function(params, api_url) {
var that = this;
$.post(api_url, params, function(data) {
- iri_parameters = JSON.parse(data);
+ var iri_parameters = JSON.parse(data);
that.doInstall(iri_parameters);
});
},
doInstall: function(params) {
- controller_url = Galaxy.root + 'admin_toolshed/manage_repositories';
+ var controller_url = Galaxy.root + 'admin_toolshed/manage_repositories';
var repositories = params.repositories;
var new_route = 'status/r/' + repositories.join('|');
$.post(controller_url, params, function(data) {
diff --git a/client/galaxy/scripts/mvc/toolshed/util.js b/client/galaxy/scripts/mvc/toolshed/util.js
index ed052ecb8e81..143309f6df47 100644
--- a/client/galaxy/scripts/mvc/toolshed/util.js
+++ b/client/galaxy/scripts/mvc/toolshed/util.js
@@ -4,7 +4,7 @@ define([], function() {
var shed_url = this.shed_url;
var base_url = Galaxy.root + 'api/tool_shed/search';
$.get(base_url, {term: request.term, tool_shed_url: shed_url}, function(data) {
- result_list = that.shedParser(data);
+ var result_list = that.shedParser(data);
response(result_list);
});
@@ -16,7 +16,7 @@ define([], function() {
$.each(hits, function(hit) {
var record = hits[hit];
var label = record.repository.name + ' by ' + record.repository.repo_owner_username + ': ' + record.repository.description;
- result = {value: record.repository.id, label: label};
+ var result = {value: record.repository.id, label: label};
results.push(result);
});
return results;
@@ -37,8 +37,8 @@ define([], function() {
var queueLength = function() {
if (localStorage.hasOwnProperty('repositories')) {
- repo_queue = JSON.parse(localStorage.repositories);
- queue_length = Object.keys(repo_queue).length;
+ var repo_queue = JSON.parse(localStorage.repositories);
+ var queue_length = Object.keys(repo_queue).length;
return queue_length;
}
else {
diff --git a/client/galaxy/scripts/mvc/toolshed/workflows-view.js b/client/galaxy/scripts/mvc/toolshed/workflows-view.js
index bb47d9ba65a3..45ea8faacb9d 100644
--- a/client/galaxy/scripts/mvc/toolshed/workflows-view.js
+++ b/client/galaxy/scripts/mvc/toolshed/workflows-view.js
@@ -24,7 +24,7 @@ define(['mvc/toolshed/toolshed-model', 'mvc/toolshed/util'], function(toolshed_m
},
bindEvents: function() {
- var that = this;
+ var that = this, repository_id;
$('.show_wf_repo').on('click', function() {
var tool_ids = $(this).attr('data-toolids');
var toolshed = $(this).attr('data-shed');
diff --git a/client/galaxy/scripts/mvc/ui/ui-buttons.js b/client/galaxy/scripts/mvc/ui/ui-buttons.js
index 03c053fb39c1..220b4c8c9c4b 100644
--- a/client/galaxy/scripts/mvc/ui/ui-buttons.js
+++ b/client/galaxy/scripts/mvc/ui/ui-buttons.js
@@ -6,7 +6,6 @@ define( [ 'utils/utils' ], function( Utils ) {
this.model = options && options.model || new Backbone.Model({
id : Utils.uid(),
title : '',
- floating : 'right',
icon : '',
cls : 'btn btn-default',
wait : false,
@@ -31,7 +30,6 @@ define( [ 'utils/utils' ], function( Utils ) {
.addClass( options.disabled && 'disabled' )
.attr( 'id', options.id )
.attr( 'disabled', options.disabled )
- .css( 'float', options.floating )
.off( 'click' ).on( 'click' , function() {
$( '.tooltip' ).hide();
options.onclick && !self.disabled && options.onclick();
@@ -174,7 +172,6 @@ define( [ 'utils/utils' ], function( Utils ) {
this.model = options && options.model || new Backbone.Model({
id : Utils.uid(),
title : '',
- floating : 'right',
icon : '',
cls : 'ui-button-icon',
disabled : false
@@ -193,7 +190,6 @@ define( [ 'utils/utils' ], function( Utils ) {
.addClass( options.disabled && 'disabled' )
.attr( 'disabled', options.disabled )
.attr( 'id', options.id )
- .css( 'float', options.floating )
.off( 'click' ).on( 'click', function() {
$( '.tooltip' ).hide();
!options.disabled && options.onclick && options.onclick();
@@ -212,7 +208,6 @@ define( [ 'utils/utils' ], function( Utils ) {
this.model = options && options.model || new Backbone.Model({
id : '',
title : '',
- floating : 'right',
pull : 'right',
icon : null,
onclick : null,
@@ -239,8 +234,7 @@ define( [ 'utils/utils' ], function( Utils ) {
.addClass( 'dropdown' )
.addClass( options.cls )
.attr( 'id', options.id )
- .css( { float : options.floating,
- display : options.visible && this.collection.where( { visible: true } ).length > 0 ? 'block' : 'none' } );
+ .css( { display : options.visible && this.collection.where( { visible: true } ).length > 0 ? 'block' : 'none' } );
this.$root.addClass( 'root button dropdown-toggle' )
.attr( 'data-toggle', 'dropdown' )
.tooltip( { title: options.tooltip, placement: 'bottom' } )
diff --git a/client/galaxy/scripts/mvc/ui/ui-drilldown.js b/client/galaxy/scripts/mvc/ui/ui-drilldown.js
index 71627f2e6517..88f56957b5b8 100644
--- a/client/galaxy/scripts/mvc/ui/ui-drilldown.js
+++ b/client/galaxy/scripts/mvc/ui/ui-drilldown.js
@@ -53,7 +53,7 @@ var View = Options.BaseIcons.extend({
// recursive function which iterates through options
function iterate ( $tmpl, options, header ) {
header = header || [];
- for ( i in options ) {
+ for (var i in options ) {
var level = options[ i ];
var has_options = level.options && level.options.length > 0;
var new_header = header.slice( 0 );
diff --git a/client/galaxy/scripts/mvc/ui/ui-list.js b/client/galaxy/scripts/mvc/ui/ui-list.js
index bdc7d080d291..368d5ce170c0 100644
--- a/client/galaxy/scripts/mvc/ui/ui-list.js
+++ b/client/galaxy/scripts/mvc/ui/ui-list.js
@@ -25,7 +25,6 @@ var View = Backbone.View.extend({
// create insert new list element button
this.button = new Ui.ButtonIcon({
icon : 'fa fa-sign-in',
- floating : 'left',
tooltip : 'Insert new ' + this.name,
onclick : function() {
self.add({
diff --git a/client/galaxy/scripts/mvc/ui/ui-popover.js b/client/galaxy/scripts/mvc/ui/ui-popover.js
index 3227a1c4408d..7d8f013b4196 100644
--- a/client/galaxy/scripts/mvc/ui/ui-popover.js
+++ b/client/galaxy/scripts/mvc/ui/ui-popover.js
@@ -116,7 +116,8 @@ var View = Backbone.View.extend({
var container_position = $container.position();
// get position
- var top = left = 0;
+ var top, left;
+ top = left = 0;
if ([ 'top', 'bottom' ].indexOf( placement ) != -1) {
left = container_position.left - width + ( container_width + width ) / 2;
switch ( placement ) {
diff --git a/client/galaxy/scripts/mvc/ui/ui-select-content.js b/client/galaxy/scripts/mvc/ui/ui-select-content.js
index 274fbeb774fa..37844e9110ba 100644
--- a/client/galaxy/scripts/mvc/ui/ui-select-content.js
+++ b/client/galaxy/scripts/mvc/ui/ui-select-content.js
@@ -175,7 +175,7 @@ var View = Backbone.View.extend({
optional : self.model.get( 'optional' ),
multiple : c.multiple,
searchable : !c.multiple || ( data && data[ c.src ] && data[ c.src ].length > self.model.get( 'pagelimit' ) ),
- selectall : false,
+ individual : true,
error_text : 'No ' + ( extensions ? extensions + ' ' : '' ) + ( src_labels[ c.src ] || 'content' ) + ' available.',
onchange : function() {
self.trigger( 'change' );
diff --git a/client/galaxy/scripts/mvc/ui/ui-select-default.js b/client/galaxy/scripts/mvc/ui/ui-select-default.js
index 4d0671344a52..4d14bc6936a1 100644
--- a/client/galaxy/scripts/mvc/ui/ui-select-default.js
+++ b/client/galaxy/scripts/mvc/ui/ui-select-default.js
@@ -20,7 +20,7 @@ var View = Backbone.View.extend({
disabled : false,
onchange : function(){},
value : null,
- selectall : true,
+ individual : false,
pagesize : 20
}).set( options );
this.on( 'change', function() { self.model.get( 'onchange' ) && self.model.get( 'onchange' )( self.value() ) } );
@@ -94,7 +94,7 @@ var View = Backbone.View.extend({
});
}
this.all_button = null;
- if ( this.model.get( 'multiple' ) && this.model.get( 'selectall' ) ) {
+ if ( this.model.get( 'multiple' ) && !this.model.get( 'individual' ) ) {
this.all_button = new Buttons.ButtonCheck({
onclick: function() {
var new_value = [];
@@ -315,7 +315,7 @@ var View = Backbone.View.extend({
}
if ( this.model.get( 'searchable' ) ) {
if ( $.isArray( new_value ) ) {
- val = [];
+ var val = [];
_.each( new_value, function( v ) {
var d = _.findWhere( self.data2, { id: v } );
d && val.push( d );
diff --git a/client/galaxy/scripts/mvc/ui/ui-select-genomespace.js b/client/galaxy/scripts/mvc/ui/ui-select-genomespace.js
new file mode 100644
index 000000000000..61e382f67829
--- /dev/null
+++ b/client/galaxy/scripts/mvc/ui/ui-select-genomespace.js
@@ -0,0 +1,96 @@
+// dependencies
+define(['utils/utils', 'mvc/ui/ui-misc', 'mvc/tool/tool-genomespace'],
+ function(Utils, Ui, GenomespaceBrowser) {
+
+/**
+ * GenomeSpace file selector
+ */
+var View = Backbone.View.extend({
+ // initialize
+ initialize : function(options) {
+
+ // link this
+ var self = this;
+
+ // create insert new list element button
+ this.browse_button = new Ui.ButtonIcon({
+ title : 'Browse',
+ icon : 'fa fa-sign-in',
+ tooltip : 'Browse GenomeSpace',
+ onclick : function() {
+ self.browseGenomeSpace();
+ }
+ });
+
+ // create genomespace filepath textbox
+ this.filename_textbox = new Ui.Input();
+
+ // create genomespace token textbox
+ this.token_textbox = new Ui.Input({
+ type : 'password'
+ });
+
+ // create elements
+ this.setElement(this._template(options));
+ this.$('.ui-gs-browse-button').append(this.browse_button.$el);
+ this.$('.ui-gs-filename-textbox').append(this.filename_textbox.$el);
+ this.$('.ui-gs-token-textbox').append(this.token_textbox.$el);
+ },
+
+ /** Browse GenomeSpace */
+ browseGenomeSpace: function(options) {
+ var self = this;
+ GenomespaceBrowser.openFileBrowser({
+ successCallback: function(data) {
+ self.value(data.destination + "^" + data.token);
+ }
+ });
+ },
+
+ /** Main Template */
+ _template: function(options) {
+ return '
' +
+ '
' +
+ ' ' +
+ ' ' +
+ '
' +
+ '
' +
+ '
Token
' +
+ '
' +
+ '
' +
+ '
';
+ },
+
+ /** Return/Set currently selected genomespace filename/token */
+ value : function (new_value) {
+ // check if new_value is defined
+ if (new_value !== undefined) {
+ this._setValue(new_value);
+ }
+ else {
+ return this._getValue();
+ }
+ },
+
+ // get value
+ _getValue: function() {
+ return this.filename_textbox.value() +
+ "^" + this.token_textbox.value();
+ },
+
+ // set value
+ _setValue: function(new_value) {
+ if (new_value) {
+ values = new_value.split("^");
+ this.filename_textbox.value(values[0]);
+ this.token_textbox.value(values[1]);
+ }
+ },
+
+});
+
+return {
+ View: View
+}
+
+});
diff --git a/client/galaxy/scripts/mvc/upload/upload-ftp.js b/client/galaxy/scripts/mvc/upload/upload-ftp.js
index 4e36750f55f5..b3547f82eee7 100644
--- a/client/galaxy/scripts/mvc/upload/upload-ftp.js
+++ b/client/galaxy/scripts/mvc/upload/upload-ftp.js
@@ -27,7 +27,7 @@ define( [ 'utils/utils' ], function( Utils ) {
if ( ftp_files && ftp_files.length > 0 ) {
this.$( '.upload-ftp-content' ).html( $( this._templateTable() ) );
var size = 0;
- for ( index in ftp_files ) {
+ for (var index in ftp_files ) {
this.rows.push( this._add( ftp_files[ index ] ) );
size += ftp_files[ index ].size;
}
@@ -39,7 +39,7 @@ define( [ 'utils/utils' ], function( Utils ) {
this.$select_all = this.$( '.upload-selectall' ).addClass( this.options.class_add );
this.$select_all.on( 'click', function() {
var add = self.$select_all.hasClass( self.options.class_add );
- for ( index in ftp_files ) {
+ for (var index in ftp_files ) {
var ftp_file = ftp_files[ index ];
var model_index = self._find( ftp_file );
if( !model_index && add || model_index && !add ) {
diff --git a/client/galaxy/scripts/mvc/upload/upload-view.js b/client/galaxy/scripts/mvc/upload/upload-view.js
index e6a8a2dc839c..38e6b8329d17 100644
--- a/client/galaxy/scripts/mvc/upload/upload-view.js
+++ b/client/galaxy/scripts/mvc/upload/upload-view.js
@@ -48,7 +48,7 @@ function( Utils, Modal, Tabs, UploadButton, UploadViewDefault, UploadViewComposi
Utils.get({
url : Galaxy.root + 'api/datatypes?extension_only=False',
success : function( datatypes ) {
- for ( key in datatypes ) {
+ for (var key in datatypes ) {
self.list_extensions.push({
id : datatypes[ key ].extension,
text : datatypes[ key ].extension,
@@ -72,7 +72,7 @@ function( Utils, Modal, Tabs, UploadButton, UploadViewDefault, UploadViewComposi
Utils.get({
url : Galaxy.root + 'api/genomes',
success : function( genomes ) {
- for ( key in genomes ) {
+ for (var key in genomes ) {
self.list_genomes.push({
id : genomes[ key ][ 1 ],
text : genomes[ key ][ 0 ]
diff --git a/client/galaxy/scripts/mvc/user/user-custom-builds.js b/client/galaxy/scripts/mvc/user/user-custom-builds.js
index b9b8438c882e..b3b82e8b2873 100644
--- a/client/galaxy/scripts/mvc/user/user-custom-builds.js
+++ b/client/galaxy/scripts/mvc/user/user-custom-builds.js
@@ -126,8 +126,7 @@ define( [ 'utils/utils', 'mvc/ui/ui-misc', 'mvc/form/form-view', 'mvc/ui/ui-tabl
icon : 'fa-save',
tooltip : 'Create new Build',
title : 'Save',
- cls : 'ui-button btn btn-primary',
- floating : 'clear',
+ cls : 'btn btn-primary ui-clear-float',
onclick : function() {
var data = form.data.create();
if ( !data.id || !data.name ) {
diff --git a/client/galaxy/scripts/mvc/user/user-preferences.js b/client/galaxy/scripts/mvc/user/user-preferences.js
index 06a504e48cd0..62edb56cd431 100644
--- a/client/galaxy/scripts/mvc/user/user-preferences.js
+++ b/client/galaxy/scripts/mvc/user/user-preferences.js
@@ -1,5 +1,5 @@
/** User Preferences view */
-define( [ 'mvc/form/form-view', 'mvc/ui/ui-misc' ], function( Form, Ui ) {
+define( [ 'mvc/form/form-view', 'mvc/ui/ui-misc', 'utils/query-string-parsing' ], function( Form, Ui, QueryStringParsing ) {
/** Contains descriptive dictionaries describing user forms */
var Model = Backbone.Model.extend({
@@ -12,7 +12,8 @@ define( [ 'mvc/form/form-view', 'mvc/ui/ui-misc' ], function( Form, Ui ) {
title : 'Manage information',
description : 'Edit your email, addresses and custom parameters or change your username.',
url : 'api/users/' + options.user_id + '/information/inputs',
- icon : 'fa-user'
+ icon : 'fa-user',
+ redirect : 'user'
},
'password': {
title : 'Change password',
@@ -20,19 +21,22 @@ define( [ 'mvc/form/form-view', 'mvc/ui/ui-misc' ], function( Form, Ui ) {
icon : 'fa-unlock-alt',
url : 'api/users/' + options.user_id + '/password/inputs',
submit_title : 'Save password',
+ redirect : 'user'
},
'communication': {
title : 'Change communication settings',
description : 'Enable or disable the communication feature to chat with other users.',
url : 'api/users/' + options.user_id + '/communication/inputs',
- icon : 'fa-comments-o'
+ icon : 'fa-comments-o',
+ redirect : 'user'
},
'permissions': {
title : 'Set dataset permissions for new histories',
description : 'Grant others default access to newly created histories. Changes made here will only affect histories created after these settings have been stored.',
url : 'api/users/' + options.user_id + '/permissions/inputs',
icon : 'fa-users',
- submit_title : 'Save permissions'
+ submit_title : 'Save permissions',
+ redirect : 'user'
},
'api_key': {
title : 'Manage API key',
@@ -47,7 +51,8 @@ define( [ 'mvc/form/form-view', 'mvc/ui/ui-misc' ], function( Form, Ui ) {
description : 'Customize your Toolbox by displaying or omitting sets of Tools.',
url : 'api/users/' + options.user_id + '/toolbox_filters/inputs',
icon : 'fa-filter',
- submit_title : 'Save filters'
+ submit_title : 'Save filters',
+ redirect : 'user'
},
'openids': {
title : 'Manage OpenIDs',
@@ -65,14 +70,6 @@ define( [ 'mvc/form/form-view', 'mvc/ui/ui-misc' ], function( Form, Ui ) {
window.location.href = Galaxy.root + 'custom_builds';
}
},
- 'configure_menu': {
- title : 'Configure workflow menu',
- description : 'Configure your workflow items which appear in the Tool panel.',
- icon : 'fa-cog',
- onclick : function() {
- window.location.href = Galaxy.root + 'workflow/configure_menu';
- }
- },
'logout': {
title : 'Sign out',
description : 'Click here to sign out of all sessions.',
@@ -83,7 +80,7 @@ define( [ 'mvc/form/form-view', 'mvc/ui/ui-misc' ], function( Form, Ui ) {
body : 'Do you want to continue and sign out of all active sessions?',
buttons : {
'Cancel' : function() { Galaxy.modal.hide(); },
- 'Sign out' : function() { window.location.href = Galaxy.root + 'user/logout'; }
+ 'Sign out' : function() { window.location.href = Galaxy.root + 'user/logout?session_csrf_token=' + Galaxy.session_csrf_token; }
}
});
}
@@ -97,7 +94,6 @@ define( [ 'mvc/form/form-view', 'mvc/ui/ui-misc' ], function( Form, Ui ) {
initialize: function() {
this.model = new Model();
- this.message = new Ui.Message();
this.setElement( '
' );
this.render();
},
@@ -107,10 +103,14 @@ define( [ 'mvc/form/form-view', 'mvc/ui/ui-misc' ], function( Form, Ui ) {
var config = Galaxy.config;
$.getJSON( Galaxy.root + 'api/users/' + Galaxy.user.id, function( data ) {
self.$preferences = $( '
' ).addClass( 'ui-panel' )
- .append( self.message.$el )
.append( $( '
' ).append( 'User preferences' ) )
.append( $( '
' ).append( 'You are logged in as
' + _.escape( data.email ) + ' .' ) )
.append( self.$table = $( '
' ).addClass( 'ui-panel-table' ) );
+ var message = QueryStringParsing.get( 'message' );
+ var status = QueryStringParsing.get( 'status' );
+ if( message && status ) {
+ self.$preferences.prepend( ( new Ui.Message( { message: message, status: status } ) ).$el );
+ }
if( !config.use_remote_user ) {
self._addLink( 'information' );
self._addLink( 'password' );
@@ -127,8 +127,9 @@ define( [ 'mvc/form/form-view', 'mvc/ui/ui-misc' ], function( Form, Ui ) {
if( config.enable_openid && !config.use_remote_user ) {
self._addLink( 'openids' );
}
- self._addLink( 'configure_menu' );
- self._addLink( 'logout' );
+ if(Galaxy.session_csrf_token) {
+ self._addLink( 'logout' );
+ }
self.$preferences.append( self._templateFooter( data ) );
self.$el.empty().append( self.$preferences );
});
@@ -167,66 +168,8 @@ define( [ 'mvc/form/form-view', 'mvc/ui/ui-misc' ], function( Form, Ui ) {
}
});
- /** View of individual user forms */
- var Forms = Backbone.View.extend({
-
- initialize: function( options ) {
- this.model = new Model( options );
- this.page = this.model.get( options.form_id );
- this.setElement( '
' );
- this.render();
- },
-
- render: function() {
- var self = this;
- $.ajax({
- url : Galaxy.root + this.page.url,
- type : 'GET'
- }).done( function( response ) {
- var options = $.extend( {}, self.page, response );
- var form = new Form({
- title : options.title,
- icon : options.icon,
- inputs : options.inputs,
- operations: {
- 'submit': new Ui.ButtonIcon({
- tooltip : options.submit_tooltip,
- title : options.submit_title || 'Save settings',
- icon : options.submit_icon || 'fa-save',
- onclick : function() { self._submit( form, options ) }
- })
- }
- });
- self.$el.empty().append( form.$el );
- }).fail( function( response ) {
- self.$el.empty().append( new Ui.Message({
- message : 'Failed to load resource ' + self.page.url + '.',
- status : 'danger',
- persistent : true
- }).$el );
- });
- },
-
- _submit: function( form, options ) {
- var self = this;
- $.ajax( {
- url : Galaxy.root + options.url,
- data : JSON.stringify( form.data.create() ),
- type : 'PUT',
- contentType : 'application/json'
- }).done( function( response ) {
- form.data.matchModel( response, function ( input, input_id ) {
- form.field_list[ input_id ].value( input.value );
- });
- form.message.update( { message: response.message, status: 'success' } );
- }).fail( function( response ) {
- form.message.update( { message: response.responseJSON.err_msg, status: 'danger' } );
- });
- }
- });
-
return {
View : View,
- Forms : Forms
+ Model : Model
};
});
diff --git a/client/galaxy/scripts/mvc/workflow/workflow-configure-menu.js b/client/galaxy/scripts/mvc/workflow/workflow-configure-menu.js
deleted file mode 100644
index 4e262aa56f90..000000000000
--- a/client/galaxy/scripts/mvc/workflow/workflow-configure-menu.js
+++ /dev/null
@@ -1,153 +0,0 @@
-/** Configure Workflow Menu View */
-define( [], function() {
- var View = Backbone.View.extend({
-
- initialize: function( options ) {
- this.setElement( '
' );
- this.render();
- },
-
- render: function() {
- var self = this;
- $.getJSON( Galaxy.root + 'api/workflows/menu/', function( response ) {
- var workflows = response.workflows,
- ids_in_menu = response.ids_in_menu,
- $el_config_worflow = null;
-
- // Add configure workflow header
- self.$el.empty().append( self._templateConfigWorkflowHeader() );
- $el_config_worflow = self.$el.find( '.configure-workflows' );
- $el_config_worflow.append( self._templateActionButtons() );
- if( workflows.length > 0 ) {
- $el_config_worflow.append( self._templateConfigureWorkflow( self, workflows, ids_in_menu ) );
- self.save_workflow_menu( self );
- self.make_checked( self, ids_in_menu );
- self.register_check_uncheck_all( self );
- }
- else {
- $el_config_worflow.append( self._templateNoWorkflow() );
- }
- });
- },
-
- /** Register check and uncheck all callbacks*/
- register_check_uncheck_all: function( self ) {
- var $el_check_all = self.$el.find( '.check-all-wf' ),
- $el_uncheck_all = self.$el.find( '.uncheck-all-wf' );
-
- $el_check_all.click(function( e ) {
- self.check_uncheck_all( self, true );
- });
- $el_uncheck_all.click(function( e ) {
- self.check_uncheck_all( self, false );
- });
- },
-
- /** Check or uncheck all workflows */
- check_uncheck_all: function( self, checked ) {
- $.each(self.$el.find( '.wf-config-item' ), function() {
- var wf_checkbox = $( this )[0];
- wf_checkbox.checked = checked;
- });
- },
-
- /** Make the worflows as checked if present in the menu */
- make_checked: function( self, ids_in_menu ) {
- $.each(self.$el.find( '.wf-config-item' ), function() {
- var wf_checkbox = $( this )[0];
- _.each( ids_in_menu, function( id ) {
- if ( parseInt( wf_checkbox.value ) === id ) {
- wf_checkbox.checked = true;
- }
- });
- });
- },
-
- /** Save the changes for workflow menu */
- save_workflow_menu: function( self ) {
- var $el_save_workflow_menu = self.$el.find( '.wf-save-menu' );
- $el_save_workflow_menu.click( function( e ) {
- var ids = [];
- $.each(self.$el.find( '.wf-config-item' ), function() {
- var wf_checkbox = $( this )[0];
- if( wf_checkbox.checked || wf_checkbox.checked === 'true' ) {
- ids.push( parseInt( wf_checkbox.value ) );
- }
- });
- $.ajax({
- type: 'PUT',
- url: Galaxy.root + 'api/workflows/menu/',
- data: JSON.stringify( { 'workflow_ids': ids } ),
- contentType : 'application/json'
- }).done( function( response ) {
- window.location = Galaxy.root + 'user';
- });
- });
- },
-
- /** Template for actions buttons */
- _templateActionButtons: function() {
- return '
';
- },
-
- /** Template for configure workflow table */
- _templateConfigureWorkflow: function( self, workflows, ids_in_menu ) {
- var tableHtml = "", trHtml = "";
- tableHtml = tableHtml + '
' +
- ' ';
- _.each( workflows, function( wf ) {
- trHtml = trHtml + '' +
- '' + _.escape( wf.name ) +' ' +
- '' + ( wf.owner === Galaxy.user.attributes.username ? "You" : wf.owner ) +' ' +
- '' + wf.number_of_steps + ' ' +
- '' + self._templateInputCheckbox( self, wf, ids_in_menu ) + ' ' +
- ' ';
- });
- tableHtml = tableHtml + '
';
- tableHtml = tableHtml + '' +
- '
' +
- 'Back to User Preferences ' +
- ' ';
- return tableHtml;
- },
-
- /** Template for no workflow */
- _templateNoWorkflow: function() {
- return '
You do not have any accessible workflows.
';
- },
-
- /** Template for checkboxes */
- _templateInputCheckbox: function( self, wf ) {
- return '
';
- },
-
- /** Template for main config workflow menu */
- _templateConfigWorkflowHeader: function() {
- return '
' +
- '
' +
- '
Configure workflow menu ' +
- ''+
- '
';
- }
- });
-
- return {
- View : View
- };
-});
diff --git a/client/galaxy/scripts/mvc/workflow/workflow-forms.js b/client/galaxy/scripts/mvc/workflow/workflow-forms.js
index 1aed0dd189a9..7f70a4087a35 100644
--- a/client/galaxy/scripts/mvc/workflow/workflow-forms.js
+++ b/client/galaxy/scripts/mvc/workflow/workflow-forms.js
@@ -3,119 +3,198 @@ define( [ 'utils/utils', 'mvc/form/form-view', 'mvc/tool/tool-form-base' ], func
/** Default form wrapper for non-tool modules in the workflow editor. */
var Default = Backbone.View.extend({
initialize: function( options ) {
- this.form = new Form( options );
+ var self = this;
+ var node = options.node;
+ this.form = new Form( Utils.merge( options, {
+ onchange: function() {
+ Utils.request({
+ type : 'POST',
+ url : Galaxy.root + 'api/workflows/build_module',
+ data : {
+ id : node.id,
+ type : node.type,
+ content_id : node.content_id,
+ inputs : self.form.data.create()
+ },
+ success : function( data ) {
+ node.update_field_data( data );
+ }
+ } );
+ }
+ } ) );
+ _addLabelAnnotation( this.form );
+ this.form.render();
}
});
/** Tool form wrapper for the workflow editor. */
var Tool = Backbone.View.extend({
initialize: function( options ) {
- var self = this;
- this.workflow = options.workflow;
- this.node = options.node;
- if ( this.node ) {
- this.post_job_actions = this.node.post_job_actions || {};
- Utils.deepeach( options.inputs, function( input ) {
- if ( input.type ) {
- if ( [ 'data', 'data_collection' ].indexOf( input.type ) != -1 ) {
- input.type = 'hidden';
- input.info = 'Data input \'' + input.name + '\' (' + Utils.textify( input.extensions ) + ')';
- input.value = { '__class__': 'RuntimeValue' };
- } else if ( !input.fixed ) {
- input.collapsible_value = { '__class__': 'RuntimeValue' };
- input.is_workflow = ( input.options && input.options.length == 0 ) ||
- ( [ 'integer', 'float' ].indexOf( input.type ) != -1 );
- }
+ var self = this;
+ var node = options.node;
+ this.form = new ToolFormBase( Utils.merge( options, {
+ text_enable : 'Set in Advance',
+ text_disable : 'Set at Runtime',
+ narrow : true,
+ initial_errors : true,
+ cls : 'ui-portlet-narrow',
+ initialmodel : function( process, form ) {
+ self._customize( form );
+ process.resolve();
+ },
+ buildmodel : function( process, form ) {
+ form.model.get( 'postchange' )( process, form );
+ },
+ postchange : function( process, form ) {
+ var options = form.model.attributes;
+ var current_state = {
+ tool_id : options.id,
+ tool_version : options.version,
+ type : 'tool',
+ inputs : $.extend( true, {}, form.data.create() )
}
- });
- Utils.deepeach( options.inputs, function( input ) {
- input.type == 'conditional' && ( input.test_param.collapsible_value = undefined );
- });
- this._makeSections( options );
- this.form = new ToolFormBase( Utils.merge( options, {
- text_enable : 'Set in Advance',
- text_disable : 'Set at Runtime',
- narrow : true,
- initial_errors : true,
- sustain_version : true,
- cls : 'ui-portlet-narrow',
- postchange : function( process, form ) {
- var options = form.model.attributes;
- var current_state = {
- tool_id : options.id,
- tool_version : options.version,
- type : 'tool',
- inputs : $.extend( true, {}, form.data.create() )
+ Galaxy.emit.debug( 'tool-form-workflow::postchange()', 'Sending current state.', current_state );
+ Utils.request({
+ type : 'POST',
+ url : Galaxy.root + 'api/workflows/build_module',
+ data : current_state,
+ success : function( data ) {
+ form.model.set( data.config_form );
+ self._customize( form );
+ form.update( data.config_form );
+ form.errors( data.config_form );
+ // This hasn't modified the workflow, just returned
+ // module information for the tool to update the workflow
+ // state stored on the client with. User needs to save
+ // for this to take effect.
+ node.update_field_data( data );
+ Galaxy.emit.debug( 'tool-form-workflow::postchange()', 'Received new model.', data );
+ process.resolve();
+ },
+ error : function( response ) {
+ Galaxy.emit.debug( 'tool-form-workflow::postchange()', 'Refresh request failed.', response );
+ process.reject();
}
- Galaxy.emit.debug( 'tool-form-workflow::postchange()', 'Sending current state.', current_state );
- Utils.request({
- type : 'POST',
- url : Galaxy.root + 'api/workflows/build_module',
- data : current_state,
- success : function( data ) {
- form.update( data.config_form );
- form.errors( data.config_form );
- // This hasn't modified the workflow, just returned
- // module information for the tool to update the workflow
- // state stored on the client with. User needs to save
- // for this to take effect.
- self.node.update_field_data( data );
- Galaxy.emit.debug( 'tool-form-workflow::postchange()', 'Received new model.', data );
- process.resolve();
- },
- error : function( response ) {
- Galaxy.emit.debug( 'tool-form-workflow::postchange()', 'Refresh request failed.', response );
- process.reject();
- }
- });
- },
- }));
- } else {
- Galaxy.emit.debug('tool-form-workflow::initialize()', 'Node not found in workflow.');
- }
+ });
+ }
+ }));
},
- /** Builds all sub sections */
- _makeSections: function( options ){
- var inputs = options.inputs;
- var datatypes = options.datatypes;
- var output_id = this.node.output_terminals && Object.keys( this.node.output_terminals )[ 0 ];
- if ( output_id ) {
- inputs.push({
- name : 'pja__' + output_id + '__EmailAction',
- label : 'Email notification',
- type : 'boolean',
- value : String( Boolean( this.post_job_actions[ 'EmailAction' + output_id ] ) ),
- ignore : 'false',
- help : 'An email notification will be sent when the job has completed.',
- payload : {
- 'host' : window.location.host
+ _customize: function( form ) {
+ var options = form.model.attributes;
+ Utils.deepeach( options.inputs, function( input ) {
+ if ( input.type ) {
+ if ( [ 'data', 'data_collection' ].indexOf( input.type ) != -1 ) {
+ input.type = 'hidden';
+ input.info = 'Data input \'' + input.name + '\' (' + Utils.textify( input.extensions ) + ')';
+ input.value = { '__class__': 'RuntimeValue' };
+ } else if ( !input.fixed ) {
+ input.collapsible_value = { '__class__': 'RuntimeValue' };
+ input.is_workflow = ( input.options && input.options.length == 0 ) ||
+ ( [ 'integer', 'float' ].indexOf( input.type ) != -1 );
+ }
+ }
+ });
+ Utils.deepeach( options.inputs, function( input ) {
+ input.type == 'conditional' && ( input.test_param.collapsible_value = undefined );
+ });
+ _addSections( form );
+ _addLabelAnnotation( form );
+ }
+ });
+
+ /** Augments the module form definition by adding label and annotation fields */
+ function _addLabelAnnotation ( form ) {
+ var options = form.model.attributes;
+ var workflow = options.workflow;
+ var node = options.node;
+ options.inputs.unshift({
+ type : 'text',
+ name : '__annotation',
+ label : 'Annotation',
+ fixed : true,
+ value : node.annotation,
+ area : true,
+ help : 'Add an annotation or notes to this step. Annotations are available when a workflow is viewed.'
+ });
+ options.inputs.unshift({
+ type : 'text',
+ name : '__label',
+ label : 'Label',
+ value : node.label,
+ help : 'Add a step label.',
+ fixed : true,
+ onchange: function( new_label ) {
+ var duplicate = false;
+ for ( var i in workflow.nodes ) {
+ var n = workflow.nodes[ i ];
+ if ( n.label && n.label == new_label && n.id != node.id ) {
+ duplicate = true;
+ break;
}
- });
- inputs.push({
- name : 'pja__' + output_id + '__DeleteIntermediatesAction',
- label : 'Output cleanup',
- type : 'boolean',
- value : String( Boolean( this.post_job_actions[ 'DeleteIntermediatesAction' + output_id ] ) ),
- ignore : 'false',
- help : 'Upon completion of this step, delete non-starred outputs from completed workflow steps if they are no longer required as inputs.'
- });
- for ( var i in this.node.output_terminals ) {
- inputs.push( this._makeSection( i, datatypes ) );
}
+ var input_id = form.data.match( '__label' );
+ var input_element = form.element_list[ input_id ];
+ input_element.model.set( 'error_text', duplicate && 'Duplicate label. Please fix this before saving the workflow.' );
+ form.trigger( 'change' );
}
- },
+ });
+ }
+
+ /** Builds all sub sections */
+ function _addSections( form ) {
+ var options = form.model.attributes;
+ var inputs = options.inputs;
+ var datatypes = options.datatypes;
+ var node = options.node;
+ var workflow = options.workflow;
+ var post_job_actions = node.post_job_actions;
+ var output_id = node.output_terminals && Object.keys( node.output_terminals )[ 0 ];
+
+ /** Visit input nodes and enrich by name/value pairs from server data */
+ function visit( head, head_list ) {
+ head_list = head_list || [];
+ head_list.push( head );
+ for ( var i in head.inputs ) {
+ var input = head.inputs[ i ];
+ var action = input.action;
+ if ( action ) {
+ input.name = 'pja__' + output_id + '__' + input.action;
+ if ( input.pja_arg ) {
+ input.name += '__' + input.pja_arg;
+ }
+ if ( input.payload ) {
+ for ( var p_id in input.payload ) {
+ var p = input.payload[ p_id ];
+ input.payload[ input.name + '__' + p_id ] = p;
+ delete p;
+ }
+ }
+ var d = post_job_actions[ input.action + output_id ];
+ if ( d ) {
+ for ( var j in head_list ) {
+ head_list[ j ].expanded = true;
+ }
+ if ( input.pja_arg ) {
+ input.value = d.action_arguments && d.action_arguments[ input.pja_arg ] || input.value;
+ } else {
+ input.value = 'true';
+ }
+ }
+ }
+ input.inputs && visit( input, head_list.slice( 0 ) );
+ }
+ }
/** Builds sub section with step actions/annotation */
- _makeSection: function( output_id, datatypes ){
- var self = this;
+ function _makeSection( output_id, datatypes ) {
var extensions = [];
var input_terminal_names = [];
- for ( key in datatypes ) {
+ for (var key in datatypes ) {
extensions.push( { 0 : datatypes[ key ], 1 : datatypes[ key ] } );
}
- for ( key in this.node.input_terminals ){
- input_terminal_names.push( this.node.input_terminals[ key ].name );
+ for ( key in node.input_terminals ){
+ input_terminal_names.push( node.input_terminals[ key ].name );
}
extensions.sort( function( a, b ) {
return a.label > b.label ? 1 : a.label < b.label ? -1 : 0;
@@ -132,6 +211,7 @@ define( [ 'utils/utils', 'mvc/form/form-view', 'mvc/tool/tool-form-base' ], func
0 : 'Leave unchanged',
1 : '__empty__'
});
+ var output;
var input_config = {
title : 'Configure Output: \'' + output_id + '\'',
type : 'section',
@@ -139,10 +219,10 @@ define( [ 'utils/utils', 'mvc/form/form-view', 'mvc/tool/tool-form-base' ], func
inputs : [{
label : 'Label',
type : 'text',
- value : ( output = this.node.getWorkflowOutput( output_id ) ) && output.label || '',
+ value : ( output = node.getWorkflowOutput( output_id ) ) && output.label || '',
help : 'This will provide a short name to describe the output - this must be unique across workflows.',
onchange : function( new_value ) {
- self.workflow.attemptUpdateOutputLabel( self.node, output_id, new_value );
+ workflow.attemptUpdateOutputLabel( node, output_id, new_value );
}
},{
action : 'RenameDatasetAction',
@@ -164,11 +244,19 @@ define( [ 'utils/utils', 'mvc/form/form-view', 'mvc/tool/tool-form-base' ], func
},{
action : 'TagDatasetAction',
pja_arg : 'tags',
- label : 'Tags',
+ label : 'Add Tags',
type : 'text',
value : '',
ignore : '',
help : 'This action will set tags for the dataset.'
+ },{
+ action : 'RemoveTagDatasetAction',
+ pja_arg : 'tags',
+ label : 'Remove Tags',
+ type : 'text',
+ value : '',
+ ignore : '',
+ help : 'This action will remove tags for the dataset.'
},{
title : 'Assign columns',
type : 'section',
@@ -212,45 +300,35 @@ define( [ 'utils/utils', 'mvc/form/form-view', 'mvc/tool/tool-form-base' ], func
help : 'This action will set column assignments in the output dataset. Blank fields are ignored.'
}]
};
+ visit( input_config );
+ return input_config;
+ }
- // visit input nodes and enrich by name/value pairs from server data
- function visit ( head, head_list ) {
- head_list = head_list || [];
- head_list.push( head );
- for ( var i in head.inputs ) {
- var input = head.inputs[ i ];
- var action = input.action;
- if ( action ) {
- input.name = 'pja__' + output_id + '__' + input.action;
- if ( input.pja_arg ) {
- input.name += '__' + input.pja_arg;
- }
- if ( input.payload ) {
- for ( var p_id in input.payload ) {
- var p = input.payload[ p_id ];
- input.payload[ input.name + '__' + p_id ] = p;
- delete p;
- }
- }
- var d = self.post_job_actions[ input.action + output_id ];
- if ( d ) {
- for ( var j in head_list ) {
- head_list[ j ].expanded = true;
- }
- if ( input.pja_arg ) {
- input.value = d.action_arguments && d.action_arguments[ input.pja_arg ] || input.value;
- } else {
- input.value = 'true';
- }
- }
- }
- input.inputs && visit( input, head_list.slice( 0 ) );
+ if ( output_id ) {
+ inputs.push({
+ name : 'pja__' + output_id + '__EmailAction',
+ label : 'Email notification',
+ type : 'boolean',
+ value : String( Boolean( post_job_actions[ 'EmailAction' + output_id ] ) ),
+ ignore : 'false',
+ help : 'An email notification will be sent when the job has completed.',
+ payload : {
+ 'host' : window.location.host
}
+ });
+ inputs.push({
+ name : 'pja__' + output_id + '__DeleteIntermediatesAction',
+ label : 'Output cleanup',
+ type : 'boolean',
+ value : String( Boolean( post_job_actions[ 'DeleteIntermediatesAction' + output_id ] ) ),
+ ignore : 'false',
+ help : 'Upon completion of this step, delete non-starred outputs from completed workflow steps if they are no longer required as inputs.'
+ });
+ for ( var i in node.output_terminals ) {
+ inputs.push( _makeSection( i, datatypes ) );
}
- visit( input_config );
- return input_config;
}
- });
+ }
return {
Default: Default,
diff --git a/client/galaxy/scripts/mvc/workflow/workflow-manager.js b/client/galaxy/scripts/mvc/workflow/workflow-manager.js
index 4e03d37b1319..feb4611e8229 100644
--- a/client/galaxy/scripts/mvc/workflow/workflow-manager.js
+++ b/client/galaxy/scripts/mvc/workflow/workflow-manager.js
@@ -182,15 +182,16 @@ function( Connector, Toastr ) {
id : node.id,
type : node.type,
content_id : node.content_id,
+ tool_version : node.config_form.version,
tool_state : node.tool_state,
errors : node.errors,
input_connections : input_connections,
position : $(node.element).position(),
- annotation: node.annotation,
- post_job_actions: node.post_job_actions,
- uuid: node.uuid,
- label: node.label,
- workflow_outputs: node.workflow_outputs
+ annotation : node.annotation,
+ post_job_actions : node.post_job_actions,
+ uuid : node.uuid,
+ label : node.label,
+ workflow_outputs : node.workflow_outputs
};
nodes[ node.id ] = node_data;
});
@@ -198,7 +199,7 @@ function( Connector, Toastr ) {
},
from_simple : function ( data, initialImport_ ) {
var initialImport = (initialImport_ === undefined) ? true : initialImport_;
- wf = this;
+ var wf = this;
var offset = 0;
if( initialImport ) {
wf.name = data.name;
@@ -263,7 +264,7 @@ function( Connector, Toastr ) {
$.each(node.output_terminals, function(ot_id, ot){
if(node.post_job_actions['HideDatasetAction'+ot.name] === undefined){
node.addWorkflowOutput(ot.name);
- callout = $(node.element).find('.callout.'+ot.name);
+ var callout = $(node.element).find('.callout.'+ot.name);
callout.find('img').attr('src', Galaxy.root + 'static/images/fugue/asterisk-small.png');
wf.has_changes = true;
}
@@ -337,10 +338,10 @@ function( Connector, Toastr ) {
});
});
// Assemble order, tracking levels
- node_ids_by_level = [];
+ var node_ids_by_level = [];
while ( true ) {
// Everything without a predecessor
- level_parents = [];
+ var level_parents = [];
for ( var pred_k in n_pred ) {
if ( n_pred[ pred_k ] == 0 ) {
level_parents.push( pred_k );
@@ -394,7 +395,7 @@ function( Connector, Toastr ) {
ymin = Infinity, ymax = -Infinity,
p;
$.each( this.nodes, function( id, node ) {
- e = $(node.element);
+ var e = $(node.element);
p = e.position();
xmin = Math.min( xmin, p.left );
xmax = Math.max( xmax, p.left + e.width() );
@@ -410,7 +411,7 @@ function( Connector, Toastr ) {
}
function fix_delta( x, n ) {
if ( x < n|| x > 3*n ) {
- new_pos = ( Math.ceil( ( ( x % n ) ) / n ) + 1 ) * n;
+ var new_pos = ( Math.ceil( ( ( x % n ) ) / n ) + 1 ) * n;
return ( - ( x - new_pos ) );
}
return 0;
diff --git a/client/galaxy/scripts/mvc/workflow/workflow-node.js b/client/galaxy/scripts/mvc/workflow/workflow-node.js
index ed0ac71e1e6a..6beff7dcc419 100644
--- a/client/galaxy/scripts/mvc/workflow/workflow-node.js
+++ b/client/galaxy/scripts/mvc/workflow/workflow-node.js
@@ -154,6 +154,7 @@ define(['mvc/workflow/workflow-view-node'], function( NodeView ) {
}
this.name = data.name;
this.config_form = data.config_form;
+ this.tool_version = this.config_form && this.config_form.version;
this.tool_state = data.tool_state;
this.errors = data.errors;
this.tooltip = data.tooltip ? data.tooltip : "";
@@ -183,8 +184,55 @@ define(['mvc/workflow/workflow-view-node'], function( NodeView ) {
update_field_data : function( data ) {
var node = this;
var nodeView = node.nodeView;
+ // remove unused output views and remove pre-existing output views from data.data_outputs,
+ // so that these are not added twice.
+ var unused_outputs = [];
+ // nodeView.outputViews contains pre-existing outputs,
+ // while data.data_output contains what should be displayed.
+ // Now we gather the unused outputs
+ $.each(nodeView.outputViews, function(i, output_view) {
+ var cur_name = output_view.output.name;
+ var data_names = data.data_outputs;
+ var cur_name_in_data_outputs = false;
+ _.each(data_names, function(data_name) {
+ if (data_name.name == cur_name) {
+ cur_name_in_data_outputs = true;
+ }
+ });
+ if (cur_name_in_data_outputs === false) {
+ unused_outputs.push(cur_name)
+ }
+ });
+
+ // Remove the unused outputs
+ _.each(unused_outputs, function(unused_output) {
+ _.each(nodeView.outputViews[unused_output].terminalElement.terminal.connectors, function(x) {
+ if (x) {
+ x.destroy(); // Removes the noodle connectors
+ }
+ });
+ nodeView.outputViews[unused_output].remove(); // removes the rendered output
+ delete nodeView.outputViews[unused_output]; // removes the reference to the output
+ delete node.output_terminals[unused_output]; // removes the output terminal
+ });
+ $.each( node.workflow_outputs, function(i, wf_output){
+ if (wf_output && !node.output_terminals[wf_output.output_name]) {
+ node.workflow_outputs.splice(i, 1); // removes output from list of workflow outputs
+ }
+ });
+ $.each( data.data_outputs, function( i, output ) {
+ if (!nodeView.outputViews[output.name]) {
+ nodeView.addDataOutput(output); // add data output if it does not yet exist
+ } else {
+ // the output already exists, but the output formats may have changed.
+ // Therefore we update the datatypes and destroy invalid connections.
+ node.output_terminals[ output.name ].datatypes = output.extensions;
+ node.output_terminals[ output.name ].destroyInvalidConnections();
+ }
+ });
this.tool_state = data.tool_state;
this.config_form = data.config_form;
+ this.tool_version = this.config_form && this.config_form.version;
this.errors = data.errors;
this.annotation = data['annotation'];
this.label = data.label;
@@ -237,4 +285,4 @@ define(['mvc/workflow/workflow-view-node'], function( NodeView ) {
}
});
return Node;
-});
\ No newline at end of file
+});
diff --git a/client/galaxy/scripts/mvc/workflow/workflow-terminals.js b/client/galaxy/scripts/mvc/workflow/workflow-terminals.js
index 8b244b6fa41d..45c2cf8f6296 100644
--- a/client/galaxy/scripts/mvc/workflow/workflow-terminals.js
+++ b/client/galaxy/scripts/mvc/workflow/workflow-terminals.js
@@ -4,6 +4,41 @@ define(['mvc/workflow/workflow-globals'], function( Globals ) {
this.isCollection = true;
this.rank = collectionType.split(":").length;
}
+
+ var NULL_COLLECTION_TYPE_DESCRIPTION = {
+ isCollection: false,
+ canMatch: function( other ) { return false; },
+ canMapOver: function( other ) {
+ return false;
+ },
+ toString: function() {
+ return "NullCollectionType[]";
+ },
+ append: function( otherCollectionType ) {
+ return otherCollectionType;
+ },
+ equal: function( other ) {
+ return other === this;
+ }
+ };
+
+ var ANY_COLLECTION_TYPE_DESCRIPTION = {
+ isCollection: true,
+ canMatch: function( other ) { return NULL_COLLECTION_TYPE_DESCRIPTION !== other; },
+ canMapOver: function( other ) {
+ return false;
+ },
+ toString: function() {
+ return "AnyCollectionType[]";
+ },
+ append: function( otherCollectionType ) {
+ throw "Cannot append to ANY_COLLECTION_TYPE_DESCRIPTION";
+ },
+ equal: function( other ) {
+ return other === this;
+ }
+ };
+
$.extend( CollectionTypeDescription.prototype, {
append: function( otherCollectionTypeDescription ) {
if( otherCollectionTypeDescription === NULL_COLLECTION_TYPE_DESCRIPTION ) {
@@ -53,40 +88,6 @@ define(['mvc/workflow/workflow-globals'], function( Globals ) {
}
} );
- NULL_COLLECTION_TYPE_DESCRIPTION = {
- isCollection: false,
- canMatch: function( other ) { return false; },
- canMapOver: function( other ) {
- return false;
- },
- toString: function() {
- return "NullCollectionType[]";
- },
- append: function( otherCollectionType ) {
- return otherCollectionType;
- },
- equal: function( other ) {
- return other === this;
- }
- };
-
- ANY_COLLECTION_TYPE_DESCRIPTION = {
- isCollection: true,
- canMatch: function( other ) { return NULL_COLLECTION_TYPE_DESCRIPTION !== other; },
- canMapOver: function( other ) {
- return false;
- },
- toString: function() {
- return "AnyCollectionType[]";
- },
- append: function( otherCollectionType ) {
- throw "Cannot append to ANY_COLLECTION_TYPE_DESCRIPTION";
- },
- equal: function( other ) {
- return other === this;
- }
- };
-
var TerminalMapping = Backbone.Model.extend( {
initialize: function( attr ) {
this.mapOver = attr.mapOver || NULL_COLLECTION_TYPE_DESCRIPTION;
@@ -134,7 +135,7 @@ define(['mvc/workflow/workflow-globals'], function( Globals ) {
},
destroyInvalidConnections: function( ) {
_.each( this.connectors, function( connector ) {
- connector.destroyIfInvalid();
+ connector && connector.destroyIfInvalid();
} );
},
setMapOver : function( val ) {
diff --git a/client/galaxy/scripts/mvc/workflow/workflow-view.js b/client/galaxy/scripts/mvc/workflow/workflow-view.js
index c79870999b84..6934a9bc1eb4 100644
--- a/client/galaxy/scripts/mvc/workflow/workflow-view.js
+++ b/client/galaxy/scripts/mvc/workflow/workflow-view.js
@@ -64,7 +64,7 @@ define([
var close_editor = function() {
self.workflow.check_changes_in_active_form();
if ( workflow && self.workflow.has_changes ) {
- do_close = function() {
+ var do_close = function() {
window.onbeforeunload = undefined;
window.document.location = self.urls.workflow_index;
};
@@ -227,7 +227,7 @@ define([
self.scroll_to_nodes();
self.canvas_manager.draw_overview();
// Determine if any parameters were 'upgraded' and provide message
- upgrade_message = "";
+ var upgrade_message = "";
_.each( data.steps, function( step, step_id ) {
var details = "";
if ( step.errors ) {
@@ -256,7 +256,7 @@ define([
"Save" : save_current_workflow,
"Save As": workflow_save_as,
"Run": function() {
- window.location = self.urls.run_workflow;
+ window.location = Galaxy.root + "workflow/run?id=" + self.options.id;
},
"Edit Attributes" : function() { self.workflow.clear_active_node() },
"Auto Re-layout": layout_editor,
@@ -335,7 +335,7 @@ define([
}
// On load, set the size to the pref stored in local storage if it exists
- overview_size = $.jStorage.get("overview-size");
+ var overview_size = $.jStorage.get("overview-size");
if (overview_size !== undefined) {
$("#overview-border").css( {
width: overview_size,
@@ -468,7 +468,7 @@ define([
success: function( data ) {
self.workflow.from_simple( data, false );
// Determine if any parameters were 'upgraded' and provide message
- upgrade_message = "";
+ var upgrade_message = "";
$.each( data.upgrade_messages, function( k, v ) {
upgrade_message += ( "
Step " + ( parseInt(k, 10) + 1 ) + ": " + self.workflow.nodes[k].name + "");
$.each( v, function( i, vv ) {
@@ -538,18 +538,18 @@ define([
// Add a new step to the workflow by tool id
add_node_for_tool: function ( id, title ) {
- node = this.workflow.create_node( 'tool', title, id );
+ var node = this.workflow.create_node( 'tool', title, id );
this._moduleInitAjax(node, { type: "tool", tool_id: id, "_": "true" });
},
// Add a new step to the workflow by tool id
add_node_for_subworkflow: function ( id, title ) {
- node = this.workflow.create_node( 'subworkflow', title, id );
+ var node = this.workflow.create_node( 'subworkflow', title, id );
this._moduleInitAjax(node, { type: "subworkflow", content_id: id, "_": "true" });
},
add_node_for_module: function ( type, title ) {
- node = this.workflow.create_node( type, title );
+ var node = this.workflow.create_node( type, title );
this._moduleInitAjax(node, { type: type, "_": "true" });
},
@@ -559,7 +559,7 @@ define([
var self = this;
$("#pja_container").append( get_pja_form(pja, node) );
$("#pja_container>.toolForm:last>.toolFormTitle>.buttons").click(function (){
- action_to_rem = $(this).closest(".toolForm", ".action_tag").children(".action_tag:first").text();
+ var action_to_rem = $(this).closest(".toolForm", ".action_tag").children(".action_tag:first").text();
$(this).closest(".toolForm").remove();
delete self.workflow.active_node.post_job_actions[action_to_rem];
self.workflow.active_form_has_changes = true;
@@ -571,7 +571,7 @@ define([
},
display_file_list: function (node){
- addlist = "";
+ var addlist = "";
for (var out_terminal in node.output_terminals){
addlist += ""+ out_terminal +" ";
}
@@ -660,65 +660,18 @@ define([
var $container = $( '#' + cls );
if ( content && $container.find( '#' + id ).length == 0 ) {
var $el = $( '
' );
- var form_wrapper = null;
content.node = node;
content.workflow = this.workflow;
content.datatypes = this.datatypes;
content.icon = WorkflowIcons[ node.type ];
content.cls = 'ui-portlet-narrow';
- content.inputs.unshift({
- type : 'text',
- name : '__annotation',
- label : 'Annotation',
- fixed : true,
- value : node.annotation,
- area : true,
- help : 'Add an annotation or notes to this step. Annotations are available when a workflow is viewed.'
- });
- content.inputs.unshift({
- type : 'text',
- name : '__label',
- label : 'Label',
- value : node.label,
- help : 'Add a step label.',
- fixed : true,
- onchange: function( new_label ) {
- var duplicate = false;
- for ( var i in self.workflow.nodes ) {
- var n = self.workflow.nodes[ i ];
- if ( n.label && n.label == new_label && n.id != node.id ) {
- duplicate = true;
- break;
- }
- }
- var input_id = form_wrapper.form.data.match( '__label' );
- var input_element = form_wrapper.form.element_list[ input_id ];
- input_element.model.set( 'error_text', duplicate && 'Duplicate label. Please fix this before saving the workflow.' );
- form_wrapper.form.trigger( 'change' );
- }
- });
- content.onchange = function() {
- Utils.request({
- type : 'POST',
- url : Galaxy.root + 'api/workflows/build_module',
- data : {
- id : node.id,
- type : node.type,
- content_id : node.content_id,
- inputs : form_wrapper.form.data.create()
- },
- success : function( data ) {
- node.update_field_data( data );
- }
- });
- };
- if ( node.type == 'tool' ) {
- form_wrapper = new FormWrappers.Tool( content );
+ if ( node ) {
+ var form_type = ( node.type == 'tool' ? 'Tool' : 'Default' );
+ $el.append( ( new FormWrappers[ form_type ]( content ) ).form.$el );
+ $container.append( $el );
} else {
- form_wrapper = new FormWrappers.Default( content );
+ Galaxy.emit.debug('workflow-view::initialize()', 'Node not found in workflow.');
}
- $el.append( form_wrapper.form.$el );
- $container.append( $el );
}
$( '.' + cls ).hide();
$container.find( '#' + id ).show();
diff --git a/client/galaxy/scripts/mvc/workflow/workflow.js b/client/galaxy/scripts/mvc/workflow/workflow.js
index 35f4b07ff14f..988cabe61441 100644
--- a/client/galaxy/scripts/mvc/workflow/workflow.js
+++ b/client/galaxy/scripts/mvc/workflow/workflow.js
@@ -1,19 +1,17 @@
/** Workflow view */
-define( [ 'utils/utils' ], function( Utils ) {
+define( [ 'utils/utils', 'mvc/ui/ui-misc' ], function( Utils, Ui ) {
/** Build messages after user action */
- function build_messages( self ) {
- var $el_message = self.$el.find( '.response-message' ),
- status = Utils.getQueryString( 'status' ),
- message = Utils.getQueryString( 'message' );
-
- if( message && message !== null && message !== "" ) {
- $el_message.addClass( status + 'message' );
- $el_message.html( '' + _.escape( message ) + '
' );
- }
- else {
- $el_message.html("");
- }
+ function build_messages() {
+ var $el_message = this.$( '.response-message' ),
+ response = {};
+ response = {
+ 'status': Utils.getQueryString( 'status' ),
+ 'message': _.escape( Utils.getQueryString( 'message' ) ),
+ 'persistent': true,
+ 'cls': Utils.getQueryString( 'status' ) + 'message'
+ };
+ $el_message.empty().html( new Ui.Message( response ).$el );
}
/** View of the main workflow list page */
@@ -32,8 +30,8 @@ define( [ 'utils/utils' ], function( Utils ) {
// Add workflow header
self.$el.empty().append( self._templateHeader() );
// Add user actions message if any
- build_messages( self );
- $el_workflow = self.$el.find( '.user-workflows' );
+ build_messages();
+ $el_workflow = self.$( '.user-workflows' );
// Add the actions buttons
$el_workflow.append( self._templateActionButtons() );
if( workflows.length > 0) {
@@ -41,10 +39,11 @@ define( [ 'utils/utils' ], function( Utils ) {
self.adjust_actiondropdown( $el_workflow );
// Register delete and run workflow events
_.each( workflows, function( wf ) {
- self.confirm_delete( self, wf );
+ self.confirm_delete( wf );
});
+ self.register_show_tool_menu();
// Register search workflow event
- self.search_workflow( self, self.$el.find( '.search-wf' ), self.$el.find( '.workflow-search tr' ), min_query_length );
+ self.search_workflow( self.$( '.search-wf' ), self.$( '.workflow-search tr' ), min_query_length );
}
else {
$el_workflow.append( self._templateNoWorkflow() );
@@ -52,10 +51,34 @@ define( [ 'utils/utils' ], function( Utils ) {
});
},
+ // Save the workflow as an item in Tool panel
+ register_show_tool_menu: function() {
+ var $el_checkboxes = this.$( '.show-in-tool-panel' );
+ $el_checkboxes.on( 'click', function( e ) {
+ var ids = [];
+ // Look for all the checked checkboxes
+ for( var item = 0; item < $el_checkboxes.length; item++ ) {
+ var checkbox = $el_checkboxes[ item ];
+ if( checkbox.checked ) {
+ ids.push( checkbox.value );
+ }
+ }
+ // Save all the checked workflows
+ $.ajax({
+ type: 'PUT',
+ url: Galaxy.root + 'api/workflows/menu/',
+ data: JSON.stringify( { 'workflow_ids': ids } ),
+ contentType : 'application/json'
+ }).done( function( response ) {
+ window.location = Galaxy.root + 'workflow';
+ });
+ });
+ },
+
/** Add confirm box before removing/unsharing workflow */
- confirm_delete: function( self, workflow ) {
- var $el_wf_link = self.$el.find( '.link-confirm-' + workflow.id ),
- $el_shared_wf_link = self.$el.find( '.link-confirm-shared-' + workflow.id );
+ confirm_delete: function( workflow ) {
+ var $el_wf_link = this.$( '.link-confirm-' + workflow.id ),
+ $el_shared_wf_link = this.$( '.link-confirm-shared-' + workflow.id );
$el_wf_link.click( function() {
return confirm( "Are you sure you want to delete workflow '" + workflow.name + "'?" );
});
@@ -65,7 +88,7 @@ define( [ 'utils/utils' ], function( Utils ) {
},
/** Implement client side workflow search/filtering */
- search_workflow: function( self, $el_searchinput, $el_tabletr, min_querylen ) {
+ search_workflow: function( $el_searchinput, $el_tabletr, min_querylen ) {
$el_searchinput.on( 'keyup', function () {
var query = $( this ).val();
// Filter when query is at least 3 characters
@@ -128,8 +151,10 @@ define( [ 'utils/utils' ], function( Utils ) {
'Owner ' +
'# of Steps ' +
'Published ' +
+ 'Show in tools panel ' +
'';
_.each( workflows, function( wf ) {
+ var checkbox_html = ' ';
trHtml = trHtml + '' +
'' +
'' +
@@ -142,6 +167,7 @@ define( [ 'utils/utils' ], function( Utils ) {
'
' + ( wf.owner === Galaxy.user.attributes.username ? "You" : wf.owner ) +' ' +
'' + wf.number_of_steps + ' ' +
'' + ( wf.published ? "Yes" : "No" ) + ' ' +
+ ''+ checkbox_html +' ' +
' ';
});
return tableHtml + '' + trHtml + ' ';
@@ -192,12 +218,12 @@ define( [ 'utils/utils' ], function( Utils ) {
render: function() {
var self = this;
$.getJSON( Galaxy.root + 'workflow/upload_import_workflow', function( options ) {
- self.$el.empty().append( self._mainTemplate( self, options ) );
+ self.$el.empty().append( self._mainTemplate( options ) );
});
},
/** Template for the import workflow page */
- _mainTemplate: function( self, options ) {
+ _mainTemplate: function( options ) {
return "" +
"
Import Galaxy workflow
" +
"
" +
diff --git a/client/galaxy/scripts/nls/fr/locale.js b/client/galaxy/scripts/nls/fr/locale.js
index 79f8eb0e689c..4edef74d85a8 100644
--- a/client/galaxy/scripts/nls/fr/locale.js
+++ b/client/galaxy/scripts/nls/fr/locale.js
@@ -22,7 +22,7 @@ define({
"New Track Browser":
"Nouveau Navigateur de Tracks/Pistes",
"Saved Visualizations":
- "Visualisations sauvegardés",
+ "Visualisations sauvegardées",
"Interactive Environments":
"Environnements interactifs",
"Admin":
@@ -81,7 +81,7 @@ define({
"Chain tools into workflows":
"Relier outils dans un workflow",
"Analysis home view":
- "Accueil analyse de donnée",
+ "Accueil analyse de données",
// ---------------------------------------------------------------------------- histories
// ---- history/options-menu
"History Lists":
@@ -142,7 +142,7 @@ define({
"This history is empty" :
"Cet historique est vide",
"No matching datasets found" :
- "Aucunes données correspondantes n'a été trouvées",
+ "Aucunes données correspondantes n'ont été trouvées",
"An error occurred while getting updates from the server" :
"Une erreur s'est produite lors de la réception des données depuis le serveur",
"Please contact a Galaxy administrator if the problem persists" :
@@ -270,7 +270,7 @@ define({
"Ce traitement est en pause. Utilisez le \"Relancer les traitements en pause\" dans le menu d'historique pour le relancer",
// state: error
"An error occurred with this dataset" :
- "Un erreur est survenue avec ce jeu de données",
+ "Une erreur est survenue avec ce jeu de données",
// state: empty
"No data" :
"Aucune donnée",
diff --git a/client/galaxy/scripts/reports_webapp/run_stats.js b/client/galaxy/scripts/reports_webapp/run_stats.js
index 7542b63733b3..e0138e08d5d2 100644
--- a/client/galaxy/scripts/reports_webapp/run_stats.js
+++ b/client/galaxy/scripts/reports_webapp/run_stats.js
@@ -117,7 +117,7 @@ function create_chart( inp_data, name, time, title ) {
.enter().append("g")
.attr("transform", function(d, i) {
// Place the bar in the correct place
- curr_margin = +margin.left;
+ var curr_margin = +margin.left;
curr_margin += +(i * barWidth);
return "translate(" + curr_margin + "," + margin.top + ")";
})
@@ -503,8 +503,8 @@ function create_histogram( inp_data, name, title ) {
// Formatter for x axis times (converting minutes to HH:MM).
var formatMinutes = function(d) {
- hours = Math.floor( d / 60 )
- minutes = Math.floor(d - (hours * 60))
+ var hours = Math.floor( d / 60 ),
+ minutes = Math.floor(d - (hours * 60))
if(hours < 10) {
hours = "0" + hours
@@ -542,8 +542,8 @@ function create_histogram( inp_data, name, title ) {
return "translate(" + (+x(d.x) + +margin.left) + "," + (+y(d.y) + +margin.top) + ")"; })
.on("mouseenter", function(d) {
// Show tool tip
- i = 0;
- size = d.length;
+ var i = 0,
+ size = d.length;
while( size >= 1) {
size = size / 10;
diff --git a/client/galaxy/scripts/ui/scroll-panel.js b/client/galaxy/scripts/ui/scroll-panel.js
index 3b9aafd0bf18..d8b496a2ebdc 100644
--- a/client/galaxy/scripts/ui/scroll-panel.js
+++ b/client/galaxy/scripts/ui/scroll-panel.js
@@ -11,8 +11,8 @@ $.ui.plugin.add("draggable", "scrollPanel", {
panel = o.panel,
panel_pos = panel.position(),
panel_w = panel.width(),
- panel_h = panel.height()
- viewport = panel.parent();
+ panel_h = panel.height(),
+ viewport = panel.parent(),
viewport_w = viewport.width(),
viewport_h = viewport.height(),
element_w = element.width(),
diff --git a/client/galaxy/scripts/utils/graph.js b/client/galaxy/scripts/utils/graph.js
index b252354736ec..fb1ac3921542 100644
--- a/client/galaxy/scripts/utils/graph.js
+++ b/client/galaxy/scripts/utils/graph.js
@@ -387,8 +387,8 @@ Graph.prototype.createEdge = function( sourceName, targetName, directed, data ){
var isReflexive = sourceName === targetName;
if( !this.allowReflexiveEdges && isReflexive ){ return null; }
- sourceVertex = this.vertices[ sourceName ];
- targetVertex = this.vertices[ targetName ];
+ var sourceVertex = this.vertices[ sourceName ];
+ var targetVertex = this.vertices[ targetName ];
//note: silently ignores edges from/to unknown vertices
if( !( sourceVertex && targetVertex ) ){ return null; }
diff --git a/client/galaxy/scripts/utils/query-string-parsing.js b/client/galaxy/scripts/utils/query-string-parsing.js
index a866ffc6f9a6..d868a2583d6f 100644
--- a/client/galaxy/scripts/utils/query-string-parsing.js
+++ b/client/galaxy/scripts/utils/query-string-parsing.js
@@ -2,14 +2,16 @@ define([], function(){
// ============================================================================
function get( key, queryString ){
queryString = queryString || window.location.search.substr( 1 );
- var keyRegex = new RegExp( key + '=([^$]+)' ),
- matches = queryString.match( keyRegex );
+ var keyRegex = new RegExp( key + '=([^$]+)', 'g' );
+ var matches = queryString.match( keyRegex );
if( !matches || !matches.length ){
return undefined;
}
- matches = matches.splice( 1 );
+ matches = _.map( matches, function( match ) {
+ return decodeURIComponent( match.substr( key.length + 1 ).replace( /\+/g, ' ' ) );
+ } );
if( matches.length === 1 ){
- return matches[0];
+ return matches[ 0 ];
}
return matches;
}
diff --git a/client/galaxy/scripts/utils/uploadbox.js b/client/galaxy/scripts/utils/uploadbox.js
index b75d855d3c57..1382c3179bec 100755
--- a/client/galaxy/scripts/utils/uploadbox.js
+++ b/client/galaxy/scripts/utils/uploadbox.js
@@ -53,7 +53,7 @@
}
// prepare request
- xhr = new XMLHttpRequest();
+ var xhr = new XMLHttpRequest();
xhr.open('POST', cnf.url, true);
xhr.setRequestHeader('Accept', 'application/json');
xhr.setRequestHeader('Cache-Control', 'no-cache');
diff --git a/client/galaxy/scripts/viz/circster.js b/client/galaxy/scripts/viz/circster.js
index 7efe2ce6e1df..7cdd2589ee8e 100644
--- a/client/galaxy/scripts/viz/circster.js
+++ b/client/galaxy/scripts/viz/circster.js
@@ -634,10 +634,10 @@ var CircsterTrackView = Backbone.View.extend({
track.get('config').set_value('max_value', self.data_bounds[1], {silent: true});
// Merge chroms layout with data.
- layout_and_data = _.zip(chrom_arcs, genome_wide_data),
+ var layout_and_data = _.zip(chrom_arcs, genome_wide_data);
// Render each chromosome's data.
- chroms_data_layout = _.map(layout_and_data, function(chrom_info) {
+ _.each(layout_and_data, function(chrom_info) {
var chrom_arc = chrom_info[0],
data = chrom_info[1];
return self._render_chrom_data(svg, chrom_arc, data);
diff --git a/client/galaxy/scripts/viz/phyloviz.js b/client/galaxy/scripts/viz/phyloviz.js
index c2c6ded626d9..6b6dfdd72697 100644
--- a/client/galaxy/scripts/viz/phyloviz.js
+++ b/client/galaxy/scripts/viz/phyloviz.js
@@ -726,7 +726,7 @@ var HeaderButtons = Backbone.View.extend({
initRightHeaderBtns : function(){
var self = this;
- rightMenu = mod_icon_btn.create_icon_buttons_menu([
+ var rightMenu = mod_icon_btn.create_icon_buttons_menu([
{ icon_class: 'gear', title: 'PhyloViz Settings', on_click: function(){
$("#SettingsMenu").show();
self.settingsMenu.updateUI();
diff --git a/client/galaxy/scripts/viz/trackster/painters.js b/client/galaxy/scripts/viz/trackster/painters.js
index 5108d19737de..722fb1f91e32 100644
--- a/client/galaxy/scripts/viz/trackster/painters.js
+++ b/client/galaxy/scripts/viz/trackster/painters.js
@@ -516,7 +516,7 @@ _.extend(LinkedFeaturePainter.prototype, FeaturePainter.prototype, {
thickness, y_start, thick_start = null, thick_end = null,
// TODO: is there any reason why block, label color cannot be set at the Painter level?
// For now, assume '.' === '+'
- block_color = (!feature_strand || feature_strand === "+" || feature_strand === "." ? this.prefs.block_color : this.prefs.reverse_strand_color);
+ block_color = (!feature_strand || feature_strand === "+" || feature_strand === "." ? this.prefs.block_color : this.prefs.reverse_strand_color),
label_color = this.prefs.label_color;
// Set global alpha.
@@ -1223,7 +1223,7 @@ Color.prototype = {
},
mix: function (color2, weight) {
- color1 = this;
+ var color1 = this;
var p = weight; // .value / 100.0;
var w = p * 2 - 1;
@@ -1543,8 +1543,8 @@ _.extend(VariantPainter.prototype, Painter.prototype, {
// Draw allele fractions onto summary.
for (j = 0; j < alt.length; j++) {
ctx.fillStyle = ( alt[j].type === 'deletion' ? 'black' : this.base_color_fn(alt[j].value) );
- allele_frac = allele_counts / sample_gts.length;
- draw_height = Math.ceil(this.prefs.summary_height * allele_frac);
+ var allele_frac = allele_counts / sample_gts.length;
+ var draw_height = Math.ceil(this.prefs.summary_height * allele_frac);
ctx.fillRect(draw_x_start, draw_y_start - draw_height, base_px, draw_height);
draw_y_start -= draw_height;
}
diff --git a/client/galaxy/scripts/viz/trackster/tracks.js b/client/galaxy/scripts/viz/trackster/tracks.js
index 5e0b520ce7a8..a797c2afb243 100644
--- a/client/galaxy/scripts/viz/trackster/tracks.js
+++ b/client/galaxy/scripts/viz/trackster/tracks.js
@@ -791,7 +791,7 @@ extend(DrawableGroup.prototype, Drawable.prototype, DrawableCollection.prototype
// For all tracks, save current filter manager and set manager to shared (this object's) manager.
this.saved_filters_managers = [];
for (var i = 0; i < this.drawables.length; i++) {
- drawable = this.drawables[i];
+ var drawable = this.drawables[i];
this.saved_filters_managers.push(drawable.filters_manager);
drawable.filters_manager = this.filters_manager;
}
@@ -1699,7 +1699,7 @@ var TracksterToolView = Backbone.View.extend({
* Render tool UI.
*/
render: function() {
- var self = this;
+ var self = this,
tool = this.model,
parent_div = this.$el.addClass("dynamic-tool").hide();
@@ -3959,6 +3959,7 @@ extend(FeatureTrack.prototype, Drawable.prototype, TiledTrack.prototype, {
* Returns appropriate display mode based on data.
*/
get_mode: function(data) {
+ var mode;
// HACK: use no_detail mode track is in overview to prevent overview from being too large.
if (data.extra_info === "no_detail" || this.is_overview) {
mode = "no_detail";
@@ -4030,7 +4031,7 @@ extend(FeatureTrack.prototype, Drawable.prototype, TiledTrack.prototype, {
// Preprocessing: filter features and determine whether all unfiltered features have been slotted.
var
filtered = [],
- slots = this.slotters[w_scale].slots;
+ slots = this.slotters[w_scale].slots,
all_slotted = true;
if ( result.data ) {
var filters = this.filters_manager.filters;
diff --git a/client/galaxy/style/less/base.less b/client/galaxy/style/less/base.less
index d15dcc0074ba..6e510bada7bf 100644
--- a/client/galaxy/style/less/base.less
+++ b/client/galaxy/style/less/base.less
@@ -1812,6 +1812,6 @@ div.toolTitleNoSection
margin-left: 0.5%;
}
-.other-options, .configure-workflows {
+.other-options {
margin-bottom: 2%;
}
diff --git a/client/galaxy/style/less/dataset.less b/client/galaxy/style/less/dataset.less
index e85f75c1fffc..3b88fa3b5ad1 100644
--- a/client/galaxy/style/less/dataset.less
+++ b/client/galaxy/style/less/dataset.less
@@ -172,7 +172,7 @@
background: @state-default-bg;
.state-icon {
&:before {
- content: @fa-var-exclamation-circle;
+ content: @fa-var-clock-o;
}
}
}
diff --git a/client/galaxy/style/less/galaxy_bootstrap/overrides.less b/client/galaxy/style/less/galaxy_bootstrap/overrides.less
index 1d4ac947707c..4b81980b31e7 100644
--- a/client/galaxy/style/less/galaxy_bootstrap/overrides.less
+++ b/client/galaxy/style/less/galaxy_bootstrap/overrides.less
@@ -94,3 +94,11 @@ div.ui-form-help {
}
}
}
+
+// pre styling
+pre.code {
+ white-space: pre-wrap;
+ background: #1d1f21;
+ color: white;
+ padding: 1em;
+}
diff --git a/client/galaxy/style/less/ui.less b/client/galaxy/style/less/ui.less
index bcb0365ce6a6..a85e7c58f12b 100644
--- a/client/galaxy/style/less/ui.less
+++ b/client/galaxy/style/less/ui.less
@@ -34,6 +34,12 @@
@ui-margin-horizontal-small: 4px;
@ui-margin-horizontal-large: 10px;
+// helpers
+.ui-clear-float {
+ clear: both !important;
+ float: none !important;
+}
+
// margins
.ui-margin-top {
padding-top: @ui-margin-vertical !important;
@@ -94,6 +100,7 @@
margin-bottom: 0px;
width: 100%;
display: none;
+ word-wrap: break-word;
}
.ui-modal {
@@ -135,6 +142,7 @@
// buttons
.ui-button-default {
+ float: right;
.icon {
font-size: 1.1em;
}
@@ -154,6 +162,7 @@
&:extend(.no-highlight);
height: auto !important;
width: auto !important;
+ float: right;
ul {
i {
width: 1.2em;
@@ -195,6 +204,7 @@
height: inherit !important;
width: inherit !important;
padding-right: 3px !important;
+ float: right;
}
// side panel
@@ -424,6 +434,7 @@
.ui-form-help {
overflow: auto;
+ word-wrap: break-word;
}
.ui-form-info {
@@ -851,10 +862,12 @@
.ui-button-icon {
margin-top: 3px;
margin-right: 5px;
+ float: left;
}
}
.ui-list-message, .ui-list-portlet {
clear: both;
+ padding-top: 1px;
}
.ui-list-id {
cursor: pointer;
@@ -866,6 +879,34 @@
}
}
+.ui-gs-select-file {
+ .ui-gs-filename-textbox {
+ float: right;
+ width: ~'calc(100% - 76px)';
+ }
+ .ui-gs-token-textbox {
+ float: right;
+ width: ~'calc(100% - 76px)';
+ }
+ .ui-gs-browse-button {
+ float: left;
+ .ui-button-icon {
+ margin-top: 3px;
+ margin-right: 5px;
+ }
+ }
+ .ui-gs-token-label {
+ margin-top: 5px;
+ margin-left: 32px;
+ margin-right: 5px;
+ float: left;
+ }
+ .ui-gs-token-field {
+ clear: both;
+ padding-top: 5px;
+ }
+}
+
.ui-select {
position: relative;
.icon-dropdown {
diff --git a/client/galaxy/style/source_material/circle.py b/client/galaxy/style/source_material/circle.py
index 6f6e977e200f..cbd5d06bade2 100755
--- a/client/galaxy/style/source_material/circle.py
+++ b/client/galaxy/style/source_material/circle.py
@@ -9,47 +9,47 @@
import cairo
-assert sys.version_info[:2] >= ( 2, 4 )
+assert sys.version_info[:2] >= (2, 4)
-def parse_css_color( color ):
- if color.startswith( '#' ):
+def parse_css_color(color):
+ if color.startswith('#'):
color = color[1:]
- if len( color ) == 3:
- r = int( color[0], 16 )
- g = int( color[1], 16 )
- b = int( color[2], 16 )
- elif len( color ) == 6:
- r = int( color[0:2], 16 )
- g = int( color[2:4], 16 )
- b = int( color[4:6], 16 )
+ if len(color) == 3:
+ r = int(color[0], 16)
+ g = int(color[1], 16)
+ b = int(color[2], 16)
+ elif len(color) == 6:
+ r = int(color[0:2], 16)
+ g = int(color[2:4], 16)
+ b = int(color[4:6], 16)
else:
- raise Exception( "Color should be 3 hex numbers" )
+ raise Exception("Color should be 3 hex numbers")
return r / 256, g / 256, b / 256
-size = int( sys.argv[1] )
+size = int(sys.argv[1])
-surface = cairo.ImageSurface( cairo.FORMAT_ARGB32, size, size )
-c = cairo.Context( surface )
+surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, size, size)
+c = cairo.Context(surface)
-c.set_line_width( 1 )
+c.set_line_width(1)
-c.arc( size / 2.0, size / 2.0, ( size - 1 ) / 2.0, 0, 2 * pi )
+c.arc(size / 2.0, size / 2.0, (size - 1) / 2.0, 0, 2 * pi)
-c.set_source_rgb( *parse_css_color( sys.argv[2] ) )
+c.set_source_rgb(*parse_css_color(sys.argv[2]))
c.fill_preserve()
-c.set_source_rgb( *parse_css_color( sys.argv[3] ) )
+c.set_source_rgb(*parse_css_color(sys.argv[3]))
c.stroke()
t = size / 4.0
arrow = sys.argv[4]
if arrow == 'right':
- c.move_to( t + 1, t )
- c.line_to( 3 * t - 1, 2 * t )
- c.line_to( t + 1, 3 * t )
+ c.move_to(t + 1, t)
+ c.line_to(3 * t - 1, 2 * t)
+ c.line_to(t + 1, 3 * t)
c.stroke()
-surface.write_to_png( "/dev/stdout" )
+surface.write_to_png("/dev/stdout")
diff --git a/client/toolshed/scripts/mvc/groups/group-list-view.js b/client/toolshed/scripts/mvc/groups/group-list-view.js
index 75fffd3488af..973e6d767211 100644
--- a/client/toolshed/scripts/mvc/groups/group-list-view.js
+++ b/client/toolshed/scripts/mvc/groups/group-list-view.js
@@ -114,7 +114,7 @@ var GroupListView = Backbone.View.extend({
// MMMMMMMMMMMMMMMMMM
templateGroupsList: function(){
- tmpl_array = [];
+ var tmpl_array = [];
tmpl_array.push('
');
tmpl_array.push('
');
diff --git a/config/datatypes_conf.xml.sample b/config/datatypes_conf.xml.sample
index 056f6a3d3836..0bb4c67df104 100644
--- a/config/datatypes_conf.xml.sample
+++ b/config/datatypes_conf.xml.sample
@@ -240,7 +240,10 @@
+
+
+
@@ -293,6 +296,7 @@
+
@@ -322,7 +326,7 @@
-
+
@@ -381,6 +385,7 @@
+
@@ -548,8 +553,12 @@
+
+
+
+
@@ -683,6 +692,7 @@
+
diff --git a/config/error_report.yml.sample b/config/error_report.yml.sample
new file mode 100644
index 000000000000..a6b4b8640fdc
--- /dev/null
+++ b/config/error_report.yml.sample
@@ -0,0 +1,42 @@
+# Each element in this file corresponds to a destination for an error
+# report.
+
+# If you supply the parameter ``user_submission``, and set it to True,
+# this error plugin will only be activated when the user uses the bug
+# report submission interface. Otherwise, it will be triggered without
+# any user interaction (e.g. historical behaviour of sentry plugin)
+
+# If your plugin is ``user_submission=True``, then supplying
+# ``verbose=True`` will cause the plugin to display a message to the end
+# user. E.g. the email plugin simply states "Your error report has been
+# sent", or the JSON plugin informs the user "Wrote error report to ..."
+
+# The plugins below are listed with their default values of
+# verbose/user_submission, but those are not necessary to provide.
+
+# The default Email bug reporter. By default, the standard
+# configuration is taken from your galaxy.ini
+- type: email
+ verbose: true
+ user_submission: true
+
+# Example JSON bug reporter implementation. This just writes the bug
+# report as a JSON file to a specified directory and serves as an
+# example for others to extend.
+# - type: json
+# verbose: true
+# user_submission: true
+# directory: /tmp/reports/
+
+# Submit error reports to sentry. If a sentry_dsn is configured in your
+# galaxy.ini, then Galaxy will submit the job error to Sentry.
+- type: sentry
+ user_submission: false
+
+# Allow users to submit error reports to biostars. This requires that the
+# biostars integration is configured. This *only* makes sense when
+# user_submission is true, as it only generates the link for the user to click
+# on and submit the bug report, it does not actually submit the bug report on
+# their behalf.
+# - type: biostars
+# user_submission: true
diff --git a/config/galaxy.ini.sample b/config/galaxy.ini.sample
index 8ca5d65f3f09..4896f40797ae 100644
--- a/config/galaxy.ini.sample
+++ b/config/galaxy.ini.sample
@@ -209,8 +209,7 @@ paste.app_factory = galaxy.web.buildapp:app_factory
# Pass debug flag to conda commands.
#conda_debug = False
# conda channels to enable by default (http://conda.pydata.org/docs/custom-channels.html)
-# the recommended channel order is the one from BioConda (https://github.com/bioconda/bioconda-recipes/blob/master/config.yml#L8)
-#conda_ensure_channels = iuc,bioconda,r,defaults,conda-forge
+#conda_ensure_channels = iuc,bioconda,conda-forge,defaults,r
# Set to True to instruct Galaxy to look for and install missing tool
# dependencies before each job runs.
#conda_auto_install = False
@@ -633,6 +632,10 @@ nglims_config_file = tool-data/nglims.yaml
# The URL linked by the "Videos" link in the "Help" menu.
#screencasts_url = https://vimeo.com/galaxyproject
+# Points to the GenomeSpace UI service which will be used by
+# the GenomeSpace importer and exporter tools
+#genomespace_ui_url = https://gsui.genomespace.org/jsui/
+
# The URL linked by the "Terms and Conditions" link in the "Help" menu, as well
# as on the user registration and login forms and in the activation emails.
#terms_url = None
@@ -908,12 +911,14 @@ use_interactive = True
# sub-directories of files contained in their directory.
#user_library_import_dir = None
-# Add an option to the admin library upload tool allowing admins to paste
+# Allow admins to paste filesystem paths during upload. For libraries this
+# adds an option to the admin library upload tool allowing admins to paste
# filesystem paths to files and directories in a box, and these paths will be
-# added to a library. Set to True to enable. Please note the security
+# added to a library. For history uploads, this allows pasting in paths as URIs.
+# (i.e. prefixed with file://). Set to True to enable. Please note the security
# implication that this will give Galaxy Admins access to anything your Galaxy
# user has access to.
-#allow_library_path_paste = False
+#allow_path_paste = False
# Users may choose to download multiple files from a library in an archive. By
# default, Galaxy allows users to select from a few different archive formats
diff --git a/config/object_store_conf.xml.sample b/config/object_store_conf.xml.sample
index 655e7271fbe6..59170e046ca9 100644
--- a/config/object_store_conf.xml.sample
+++ b/config/object_store_conf.xml.sample
@@ -52,6 +52,16 @@
-->
+
+
diff --git a/config/plugins/interactive_environments/rstudio/static/js/crypto/base64.js b/config/plugins/interactive_environments/rstudio/static/js/crypto/base64.js
index 77b3868abf9d..68e51f6e036c 100644
--- a/config/plugins/interactive_environments/rstudio/static/js/crypto/base64.js
+++ b/config/plugins/interactive_environments/rstudio/static/js/crypto/base64.js
@@ -30,7 +30,7 @@ function b64tohex(s) {
var slop;
for(i = 0; i < s.length; ++i) {
if(s.charAt(i) == b64pad) break;
- v = b64map.indexOf(s.charAt(i));
+ var v = b64map.indexOf(s.charAt(i));
if(v < 0) continue;
if(k == 0) {
ret += int2char(v >> 2);
diff --git a/config/plugins/interactive_environments/rstudio/static/js/crypto/rstudio.big.js b/config/plugins/interactive_environments/rstudio/static/js/crypto/rstudio.big.js
index 2389ea472825..db13dbbeec84 100644
--- a/config/plugins/interactive_environments/rstudio/static/js/crypto/rstudio.big.js
+++ b/config/plugins/interactive_environments/rstudio/static/js/crypto/rstudio.big.js
@@ -819,7 +819,7 @@ function b64tohex(s) {
var slop;
for(i = 0; i < s.length; ++i) {
if(s.charAt(i) == b64pad) break;
- v = b64map.indexOf(s.charAt(i));
+ var v = b64map.indexOf(s.charAt(i));
if(v < 0) continue;
if(k == 0) {
ret += int2char(v >> 2);
diff --git a/config/plugins/interactive_environments/rstudio/static/js/crypto/rstudio/base64.js b/config/plugins/interactive_environments/rstudio/static/js/crypto/rstudio/base64.js
index 77b3868abf9d..68e51f6e036c 100644
--- a/config/plugins/interactive_environments/rstudio/static/js/crypto/rstudio/base64.js
+++ b/config/plugins/interactive_environments/rstudio/static/js/crypto/rstudio/base64.js
@@ -30,7 +30,7 @@ function b64tohex(s) {
var slop;
for(i = 0; i < s.length; ++i) {
if(s.charAt(i) == b64pad) break;
- v = b64map.indexOf(s.charAt(i));
+ var v = b64map.indexOf(s.charAt(i));
if(v < 0) continue;
if(k == 0) {
ret += int2char(v >> 2);
diff --git a/config/plugins/interactive_environments/rstudio/static/js/crypto/wu/base64.js b/config/plugins/interactive_environments/rstudio/static/js/crypto/wu/base64.js
index ad53bb8ed06b..1421d8772a48 100644
--- a/config/plugins/interactive_environments/rstudio/static/js/crypto/wu/base64.js
+++ b/config/plugins/interactive_environments/rstudio/static/js/crypto/wu/base64.js
@@ -29,7 +29,7 @@ function b64tohex(s) {
var slop;
for(i = 0; i < s.length; ++i) {
if(s.charAt(i) == b64padchar) break;
- v = b64map.indexOf(s.charAt(i));
+ var v = b64map.indexOf(s.charAt(i));
if(v < 0) continue;
if(k == 0) {
ret += int2char(v >> 2);
diff --git a/config/plugins/visualizations/charts/static/client/components/screenshot.js b/config/plugins/visualizations/charts/static/client/components/screenshot.js
index dc4414c99e86..64f12f27e8c8 100644
--- a/config/plugins/visualizations/charts/static/client/components/screenshot.js
+++ b/config/plugins/visualizations/charts/static/client/components/screenshot.js
@@ -37,7 +37,7 @@ define( [ 'libs/underscore' ], function( _ ) {
}
}
$canvas.children().each( function() { _toImage( $( this ), 0, 0 ) } );
- imgData = newCanvas.toDataURL( 'image/png' );
+ var imgData = newCanvas.toDataURL( 'image/png' );
if ( imgData ) {
window.location.href = imgData.replace( 'image/png', 'image/octet-stream' );
}
@@ -97,7 +97,7 @@ define( [ 'libs/underscore' ], function( _ ) {
$el.append( form );
}
form.empty();
- for ( name in data ) {
+ for (var name in data ) {
var input = $( '
', {
type : 'hidden',
name : name,
diff --git a/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/views/optspanel.js b/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/views/optspanel.js
index 2334d871911b..33568d52f02f 100644
--- a/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/views/optspanel.js
+++ b/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/views/optspanel.js
@@ -120,8 +120,8 @@ var Optspanel = Backbone.View.extend({
.lasso()
.on("lassoBegin", function(e, lassoPoints) {
polygon = [];
- canvas = self.vis.el.childNodes[1];
- c2 = canvas.getContext('2d');
+ var canvas = self.vis.el.childNodes[1];
+ var c2 = canvas.getContext('2d');
c2.fillStyle = "rgba(100, 100, 100, 0.02)";
c2.beginPath();
diff --git a/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/views/vispanel.js b/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/views/vispanel.js
index 779477afcaeb..e2c954b22d9b 100644
--- a/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/views/vispanel.js
+++ b/config/plugins/visualizations/charts/static/repository/plugins/biojs/drawrnajs/views/vispanel.js
@@ -89,7 +89,7 @@ var Vispanel = Backbone.View.extend({
setResidueNodes: function(cy){
//index nodes
for(var i=1; i
0 ) {
- has_result = true;
- self.makeSection( type, data[type] );
- }
+ if( data[ type ] && data[ type ].length > 0 ) {
+ has_result = true;
+ self.makeSection( type, data[type] );
+ }
}
},
@@ -452,119 +453,118 @@ $(document).ready(function() {
localStorageObject = null;
localStorageObject = self.getStorageObject( self, window.Galaxy.user.id, type );
if( localStorageObject ) {
- _.each( localStorageObject, function ( item, item_key ) {
- if( item_key === item_id ) {
- present = true;
- }
- });
- return present;
- }
+ _.each( localStorageObject, function ( item, item_key ) {
+ if( item_key === item_id ) {
+ present = true;
+ }
+ });
+ return present;
+ }
},
/** Create collection of templates of all sections and links for tools */
makeToolSection: function( search_result ) {
- var template_dict = [],
- tool_template = "",
- self = this,
- $el_search_result = $( '.search-results' ),
+ var template_dict = [],
+ tool_template = "",
+ self = this,
+ $el_search_result = $( '.search-results' ),
removed_results_key = "removed_results",
pinned_results_key = "pinned_results",
class_tool_link = "tool-search-link";
- _.each( search_result, function( item ) {
- var all_sections = Galaxy.toolPanel.attributes.layout.models;
- _.each( all_sections, function( section ) {
- if( section.attributes.model_class === "ToolSection" ) {
- var all_tools = section.attributes.elems,
- is_present = false,
- tools_template = "",
- section_header_id = "",
- section_header_name = "";
- _.each( all_tools, function( tool ) {
- if( tool.id === item ) {
- var attrs = tool.attributes;
- if( !self.checkItemPresent( attrs.id, removed_results_key, self ) ) {
- is_present = true;
- tools_template = tools_template + self._buildLinkTemplate( attrs.id,
- attrs.link,
- attrs.name,
- attrs.description,
- attrs.target,
- class_tool_link,
- self.checkItemPresent( attrs.id,
- pinned_results_key, self ),
- attrs.version,
- attrs.min_width,
- attrs.form_style );
+
+ _.each( Galaxy.config.toolbox_in_panel, function( section ) {
+ if( section.model_class === "ToolSection" ) {
+ var all_tools = section.elems,
+ is_present = false,
+ tools_template = "",
+ section_header_id = "",
+ section_header_name = "";
+ _.each( all_tools, function( tool ) {
+ if( _.contains( search_result, tool.id ) ) {
+ var attrs = tool.attributes;
+ if( !self.checkItemPresent( tool.id, removed_results_key, self ) ) {
+ is_present = true;
+ tools_template = tools_template + self._buildLinkTemplate( attrs.id,
+ attrs.link,
+ attrs.name,
+ attrs.description,
+ attrs.target,
+ class_tool_link,
+ self.checkItemPresent( attrs.id, pinned_results_key, self ),
+ attrs.version,
+ attrs.min_width,
+ attrs.form_style );
+ }
}
+ });
+ if( is_present ) {
+ section_header_id = section.id;
+ section_header_name = section.name;
+ template_dict = self.appendTemplate( template_dict,
+ section_header_id,
+ section_header_name,
+ tools_template );
}
- });
- if( is_present ) {
- section_header_id = section.attributes.id;
- section_header_name = section.attributes.name;
- template_dict = self.appendTemplate( template_dict,
- section_header_id,
- section_header_name,
- tools_template );
}
- }
- else if( section.attributes.model_class === "Tool" || section.attributes.model_class === "DataSourceTool" ) {
- var attributes = section.attributes;
- if( item === attributes.id ) {
- if( !self.checkItemPresent( attributes.id, removed_results_key, self ) ) {
- tool_template = tool_template + self._buildLinkTemplate( attributes.id, attributes.link,
+ else if( section.model_class === "Tool" || section.model_class === "DataSourceTool" ) {
+ var attributes = section.attributes;
+ if( item === attributes.id ) {
+ if( !self.checkItemPresent( attributes.id, removed_results_key, self ) ) {
+ tool_template = tool_template + self._buildLinkTemplate( attributes.id, attributes.link,
attributes.name, attributes.description, attributes.target,
class_tool_link, self.checkItemPresent( attributes.id, pinned_results_key, self ),
attributes.version, attributes.min_width, attributes.form_style );
+ }
}
}
- }
});
- });
- // Remove the tool search result section if already present
- $el_search_result.find( '.search-tools' ).remove();
- // Make template for sections and tools
- self.makeToolSearchResultTemplate( template_dict, tool_template );
+ // Remove the tool search result section if already present
+ $el_search_result.find( '.search-tools' ).remove();
+ // Make template for sections and tools
+ self.makeToolSearchResultTemplate( template_dict, tool_template );
},
/** Append the template or creates a new section */
appendTemplate: function( collection, id, name, text ) {
- var is_present = false;
- _.each( collection, function( item ) {
- if( id === item.id ) {
- item.template = item.template + text;
- is_present = true;
+ var is_present = false;
+ _.each( collection, function( item ) {
+ if( id === item.id ) {
+ item.template = item.template + text;
+ is_present = true;
+ }
+ });
+ if(!is_present) {
+ collection.push( { id: id, template: text, name: name } );
}
- });
- if(!is_present) {
- collection.push( { id: id, template: text, name: name } );
- }
- return collection;
+ return collection;
},
/** Register tool search link click */
registerToolLinkClick: function( self ) {
- $( ".tool-search-link" ).click(function( e ) {
+ $( ".tool-search-link" ).click(function( e ) {
e.preventDefault();
self.saveMostUsedToolsCount( this, self );
- self.searchedToolLink( self, e );
- });
+ self.searchedToolLink( self, e );
+ });
$( ".most-used-tools" ).click(function( e ) {
e.preventDefault();
self.saveMostUsedToolsCount( this, self );
- self.searchedToolLink( self, e );
- });
+ self.searchedToolLink( self, e );
+ });
},
/** Save count of the most used tools */
saveMostUsedToolsCount: function( el, self ) {
var item = {};
- item = { 'id': $( el ).attr( 'data-id' ),
- 'desc': $( el )[0].innerText,
- 'link': $( el ).attr( 'href' ),
- 'target': $( el ).attr( 'target' ),
- 'formstyle': $( el ).attr( 'data-formstyle' ),
- 'version': $( el ).attr( 'data-version' ) };
+ item = {
+ 'id': $( el ).attr( 'data-id' ),
+ 'desc': $( el )[0].innerText,
+ 'link': $( el ).attr( 'href' ),
+ 'target': $( el ).attr( 'target' ),
+ 'formstyle': $( el ).attr( 'data-formstyle' ),
+ 'version': $( el ).attr( 'data-version' )
+ };
self.setStorageObject( self, window.Galaxy.user.id, 'most_used_tools', item, 1 );
},
@@ -577,7 +577,7 @@ $(document).ready(function() {
if( $el_removeditems.children().length === 0 ) {
$el_removeditems.append( self._templateNoItems() );
}
- });
+ });
// Register the click of trash icon in elements of favorites section
$( '.remove-fav' ).click(function( e ) {
var $el_favourites = $( '.fav-header' ),
@@ -599,13 +599,13 @@ $(document).ready(function() {
$el_remove_item.removeClass( 'hide' ).addClass( 'show' );
}
});
- });
+ });
},
/** Remove items from data storage for trash icon */
removeItems: function( self, _self, e, type ) {
e.preventDefault();
- e.stopPropagation();
+ e.stopPropagation();
self.removeFromDataStorage( self, $( _self ).parent(), type );
$( _self ).parent().remove();
},
@@ -614,16 +614,16 @@ $(document).ready(function() {
registerLinkActionClickEvent: function( self, $el, $el_parent_section ) {
// Register click of trash icon in search results
// and move item to excluded section
- $el.find( ".remove-item" ).click(function( e ) {
- e.preventDefault();
- e.stopPropagation();
- self.setStorage( self, $( this ).parent() );
- $( this ).parent().remove();
- // If there are not elements left, remove the section
- if( $el_parent_section.find( '.remove-item' ).length === 0 ) {
- $el_parent_section.remove();
- }
- });
+ $el.find( ".remove-item" ).click(function( e ) {
+ e.preventDefault();
+ e.stopPropagation();
+ self.setStorage( self, $( this ).parent() );
+ $( this ).parent().remove();
+ // If there are not elements left, remove the section
+ if( $el_parent_section.find( '.remove-item' ).length === 0 ) {
+ $el_parent_section.remove();
+ }
+ });
// Register click of pin icon to add the element to favorites section
$el.find( ".pin-item" ).click(function( e ) {
var $el_this = $( this ),
@@ -632,7 +632,7 @@ $(document).ready(function() {
titles_added = 'Added to favourites',
class_removeitem = '.remove-item';
e.preventDefault();
- e.stopPropagation();
+ e.stopPropagation();
// Toggle between pin and unpin
// If pinned, then unpin and vice-versa
if( $el_this.hasClass( class_pinned ) ) {
@@ -652,7 +652,7 @@ $(document).ready(function() {
if( self.getActiveFilter() === "all" ) {
self.showPinnedItems( '.fav-header' );
}
- });
+ });
},
/** Set localstorage for pinned items */
@@ -664,17 +664,17 @@ $(document).ready(function() {
/** Build removed links */
showRemovedLinks: function() {
- var self = this,
+ var self = this,
$el_removed_result = $( '.removed-items' ),
html_text = "",
$el_span = null,
removed_results_html = null,
title_restore_search = "Restore to search";
$el_removed_result.html( "" );
- // Build the removed result from web storage
+ // Build the removed result from web storage
removed_results_html = self.getStorageObject( self, window.Galaxy.user.id, 'removed_results' );
- for( item in removed_results_html ) {
- html_text = html_text + removed_results_html[ item ];
+ for(var item in removed_results_html ) {
+ html_text = html_text + removed_results_html[ item ];
}
// Build html if there is an item
if( html_text.length > 0 ) {
@@ -686,7 +686,7 @@ $(document).ready(function() {
$el_span.removeClass( 'remove-item' ).addClass( 'restore-item' );
// Update the title of the delete icon
$el_span.attr( 'title', title_restore_search );
- self.registerRemoveLinkClicks( self );
+ self.registerRemoveLinkClicks( self );
}
else {
$el_removed_result.append( self._templateNoItems() );
@@ -696,9 +696,9 @@ $(document).ready(function() {
/** Display pinned items */
showPinnedItems: function( class_name ) {
var self = this,
- pinned_results = {},
+ pinned_results = {},
$el_search_results = $( '.search-results' ),
- html_text = "",
+ html_text = "",
fav_header = "",
title = 'Remove from favourites';
pinned_results = self.getStorageObject( self, window.Galaxy.user.id, 'pinned_results' );
@@ -770,7 +770,7 @@ $(document).ready(function() {
if( html_text.length > 0 ) {
used_tools_header = self._buildHeaderTemplate( 'used_tools_header', title, class_name );
$el_most_used_tools_result.append( used_tools_header );
- $el_most_used_tools_result.find( '.used-tools-header' ).append( "" + html_text + "
" );
+ $el_most_used_tools_result.find( '.used-tools-header' ).append( "" + html_text + "
" );
self.registerToolLinkClick( self );
}
},
@@ -916,10 +916,10 @@ $(document).ready(function() {
/** Remove the delete item from localstorage */
removeFromDataStorage: function( self, $el, type ) {
- var link_id = "",
- elem = $el[0].outerHTML;
+ var link_id = "",
+ elem = $el[0].outerHTML;
// Get the id of the link
- link_id = ( $( elem ).attr( 'id' ) ? $( elem ).attr( 'id' ) : $( elem ).attr( 'data-id' ) );
+ link_id = ( $( elem ).attr( 'id' ) ? $( elem ).attr( 'id' ) : $( elem ).attr( 'data-id' ) );
// Delete it from web storage
self.deleteFromStorage( self, window.Galaxy.user.id, type, link_id );
},
@@ -931,7 +931,7 @@ $(document).ready(function() {
/** Set localstorage for the removed links */
setStorage: function( self, $el ) {
- self.setLocalStorageForRemovedLinks( self, $el[0].outerHTML );
+ self.setLocalStorageForRemovedLinks( self, $el[0].outerHTML );
},
/** Build web storage object based on whether user is logged in */
@@ -949,10 +949,10 @@ $(document).ready(function() {
if( !storageObject[ type ] ) {
storageObject[ type ] = {};
}
- }
- else {
- storageObject[ type ] = {};
- }
+ }
+ else {
+ storageObject[ type ] = {};
+ }
// Check for html strings and set element to web storage
if( isNaN( elem ) ) {
@@ -974,7 +974,7 @@ $(document).ready(function() {
}
}
// Set the object to key
- storageType.setItem( key, JSON.stringify( storageObject ) );
+ storageType.setItem( key, JSON.stringify( storageObject ) );
},
/** Return the web storage object */
@@ -1011,7 +1011,7 @@ $(document).ready(function() {
/** Return links template */
_buildLinkTemplate: function( id, link, name, description, target, cls, isBookmarked, version, min_width, form_style ) {
- var template = "",
+ var template = "",
bookmark_class = (isBookmarked ? "pinned-item" : ""),
bookmark_title = (isBookmarked ? "Added to favourites" : "Add to favourites") ;
template = " " +
- ( ( isBookmarked ) ? " " :
- " " ) +
- name + " " + (description ? description : "") + " ";
- return template;
+ template = template + "'> " +
+ ( ( isBookmarked ) ? " " :
+ " " ) +
+ name + " " + (description ? description : "") + "";
+ return template;
},
/** Build section header template */
_buildHeaderTemplate: function( id, name, cls ) {
- return "";
+ return "";
},
/** Template for no results for any query */
_templateNoResults: function() {
- return 'No results. Please search with different keywords
';
+ return 'No results. Please search with different keywords
';
},
/** Template for no items when links are removed */
diff --git a/config/reports.yml.sample b/config/reports.yml.sample
new file mode 100644
index 000000000000..eb56f778143c
--- /dev/null
+++ b/config/reports.yml.sample
@@ -0,0 +1,73 @@
+uwsgi:
+
+ # The address and port on which to listen. By default, only listen to
+ # localhost (reports will not be accessible over the network). Use
+ # '0.0.0.0' to listen on all available network interfaces.
+ http: 127.0.0.1:9001
+
+ threads: 8
+
+ http-raw-body: True
+
+ offload-threads: 8
+
+ module: galaxy.webapps.reports.buildapp:uwsgi_app()
+
+reports:
+
+ # If running behind a proxy server and Galaxy is served from a
+ # subdirectory, enable the proxy-prefix filter and set the prefix in
+ # the [filter:proxy-prefix] section above.
+ #filter-with: proxy-prefix
+
+ # If proxy-prefix is enabled and you're running more than one Galaxy
+ # instance behind one hostname, you will want to set this to the same
+ # path as the prefix in the filter above. This value becomes the
+ # "path" attribute set in the cookie so the cookies from each instance
+ # will not clobber each other.
+ #cookie_path: None
+
+ # Verbosity of console log messages. Acceptable values can be found
+ # here: https://docs.python.org/2/library/logging.html#logging-levels
+ #log_level: DEBUG
+
+ # Database connection Galaxy reports are intended for production
+ # Galaxy instances, so sqlite (and the default value below) is not
+ # supported. An SQLAlchemy connection string should be used specify an
+ # external database.
+ #database_connection: sqlite:///./database/universe.sqlite?isolation_level=IMMEDIATE
+
+ # Where dataset files are saved Temporary storage for additional
+ # datasets, this should be shared through the cluster
+ #file_path: database/files
+
+ # Where dataset files are saved Temporary storage for additional
+ # datasets, this should be shared through the cluster
+ #new_file_path: database/tmp
+
+ # Mako templates are compiled as needed and cached for reuse, this
+ # directory is used for the cache
+ #template_cache_path: database/compiled_templates/reports
+
+ # Configuration for debugging middleware
+ #debug: False
+
+ # Check for WSGI compliance.
+ #use_lint: False
+
+ # NEVER enable this on a public site (even test or QA)
+ #use_interactive: True
+
+ # Write thread status periodically to 'heartbeat.log' (careful, uses
+ # disk space rapidly!)
+ #use_heartbeat: True
+
+ # Profiling middleware (cProfile based)
+ #use_profile: True
+
+ # Mail
+ #smtp_server: yourserver@yourfacility.edu
+
+ # Mail
+ #error_email_to: your_bugs@bx.psu.edu
+
diff --git a/config/tool_conf.xml.main b/config/tool_conf.xml.main
index aa5b7c60cc15..c26c8b2cb889 100644
--- a/config/tool_conf.xml.main
+++ b/config/tool_conf.xml.main
@@ -15,8 +15,8 @@
-
+
diff --git a/config/tool_conf.xml.sample b/config/tool_conf.xml.sample
index b8b98a82d778..ae8bc9b96685 100644
--- a/config/tool_conf.xml.sample
+++ b/config/tool_conf.xml.sample
@@ -24,8 +24,8 @@
-
+
diff --git a/config/tool_shed.yml.sample b/config/tool_shed.yml.sample
new file mode 100644
index 000000000000..3a9d6a46eec1
--- /dev/null
+++ b/config/tool_shed.yml.sample
@@ -0,0 +1,208 @@
+uwsgi:
+
+ # The address and port on which to listen. By default, only listen to
+ # localhost (tool_shed will not be accessible over the network). Use
+ # '0.0.0.0' to listen on all available network interfaces.
+ http: 127.0.0.1:9009
+
+ threads: 8
+
+ http-raw-body: True
+
+ offload-threads: 8
+
+ module: galaxy.webapps.tool_shed.buildapp:uwsgi_app()
+
+tool_shed:
+
+ # Verbosity of console log messages. Acceptable values can be found
+ # here: https://docs.python.org/2/library/logging.html#logging-levels
+ #log_level: DEBUG
+
+ # By default, the Tool Shed uses a SQLite database at
+ # 'database/community.sqlite'. You may use a SQLAlchemy connection
+ # string to specify an external database instead. This string takes
+ # many options which are explained in detail in the config file
+ # documentation.
+ #database_connection: sqlite:///./database/community.sqlite?isolation_level=IMMEDIATE
+
+ # Where the hgweb.config file is stored. The default is the Galaxy
+ # installation directory.
+ #hgweb_config_dir: None
+
+ # Where tool shed repositories are stored.
+ #file_path: database/community_files
+
+ # Temporary storage for additional datasets, this should be shared
+ # through the cluster
+ #new_file_path: database/tmp
+
+ # File containing old-style genome builds
+ #builds_file_path: tool-data/shared/ucsc/builds.txt
+
+ # Format string used when showing date and time information. The
+ # string may contain: - the directives used by Python time.strftime()
+ # function (see
+ # https://docs.python.org/2/library/time.html#time.strftime ), -
+ # $locale (complete format string for the server locale), - $iso8601
+ # (complete format string as specified by ISO 8601 international
+ # standard).
+ #pretty_datetime_format: $locale (UTC)
+
+ # -- Repository and Tool search Using the script located at
+ # scripts/build_ts_whoosh_index.py you can generate search index and
+ # allow full text API searching over the repositories and tools within
+ # the Tool Shed given that you specify the following two config
+ # options.
+ #toolshed_search_on: True
+
+ # -- Repository and Tool search Using the script located at
+ # scripts/build_ts_whoosh_index.py you can generate search index and
+ # allow full text API searching over the repositories and tools within
+ # the Tool Shed given that you specify the following two config
+ # options.
+ #whoosh_index_dir: database/toolshed_whoosh_indexes
+
+ # For searching repositories at /api/repositories:
+ #repo_name_boost: 0.9
+
+ # For searching repositories at /api/repositories:
+ #repo_description_boost: 0.6
+
+ # For searching repositories at /api/repositories:
+ #repo_long_description_boost: 0.5
+
+ # For searching repositories at /api/repositories:
+ #repo_homepage_url_boost: 0.3
+
+ # For searching repositories at /api/repositories:
+ #repo_remote_repository_url_boost: 0.2
+
+ # For searching repositories at /api/repositories:
+ #repo_owner_username_boost: 0.3
+
+ # For searching tools at /api/tools
+ #tool_name_boost: 1.2
+
+ # For searching tools at /api/tools
+ #tool_description_boost: 0.6
+
+ # For searching tools at /api/tools
+ #tool_help_boost: 0.4
+
+ # For searching tools at /api/tools
+ #tool_repo_owner_username: 0.3
+
+ # You can enter tracking code here to track visitor's behavior through
+ # your Google Analytics account. Example: UA-XXXXXXXX-Y
+ #ga_code: None
+
+ # The Tool Shed encodes various internal values when these values will
+ # be output in some format (for example, in a URL or cookie). You
+ # should set a key to be used by the algorithm that encodes and
+ # decodes these values. It can be any string. If left unchanged,
+ # anyone could construct a cookie that would grant them access to
+ # others' sessions. One simple way to generate a value for this is
+ # with the shell command: python -c 'import time; print time.time()'
+ # | md5sum | cut -f 1 -d ' '
+ #id_secret: changethisinproductiontoo
+
+ # User authentication can be delegated to an upstream proxy server
+ # (usually Apache). The upstream proxy should set a REMOTE_USER
+ # header in the request. Enabling remote user disables regular logins.
+ # For more information, see:
+ # https://wiki.galaxyproject.org/Admin/Config/ApacheProxy
+ #use_remote_user: False
+
+ # If use_remote_user is enabled, anyone who can log in to the Galaxy
+ # host may impersonate any other user by simply sending the
+ # appropriate header. Thus a secret shared between the upstream proxy
+ # server, and Galaxy is required. If anyone other than the Galaxy user
+ # is using the server, then apache/nginx should pass a value in the
+ # header 'GX_SECRET' that is identical the one below
+ #remote_user_secret: changethisinproductiontoo
+
+ # Configuration for debugging middleware
+ #debug: False
+
+ # Check for WSGI compliance.
+ #use_lint: False
+
+ # NEVER enable this on a public site (even test or QA)
+ #use_interactive: True
+
+ # Administrative users - set this to a comma-separated list of valid
+ # Tool Shed users (email addresses). These users will have access to
+ # the Admin section of the server, and will have access to create
+ # users, groups, roles, libraries, and more.
+ #admin_users: None
+
+ # Force everyone to log in (disable anonymous access)
+ #require_login: False
+
+ # For use by email messages sent from the tool shed
+ #smtp_server: smtp.your_tool_shed_server
+
+ # For use by email messages sent from the tool shed
+ #email_from: your_tool_shed_email@server
+
+ # If your SMTP server requires a username and password, you can
+ # provide them here (password in cleartext here, but if your server
+ # supports STARTTLS it will be sent over the network encrypted).
+ #smtp_username: None
+
+ # If your SMTP server requires a username and password, you can
+ # provide them here (password in cleartext here, but if your server
+ # supports STARTTLS it will be sent over the network encrypted).
+ #smtp_password: None
+
+ # If your SMTP server requires SSL from the beginning of the
+ # connection
+ #smtp_ssl: False
+
+ # The URL linked by the "Support" link in the "Help" menu.
+ #support_url: https://wiki.galaxyproject.org/Support
+
+ # Address to join mailing list
+ #mailing_join_addr: galaxy-announce-join@bx.psu.edu
+
+ # Write thread status periodically to 'heartbeat.log' (careful, uses
+ # disk space rapidly!)
+ #use_heartbeat: True
+
+ # Profiling middleware (cProfile based)
+ #use_profile: True
+
+ # Enable creation of Galaxy flavor Docker Image
+ #enable_galaxy_flavor_docker_image: False
+
+ # Show a message box under the masthead.
+ #message_box_visible: False
+
+ # Show a message box under the masthead.
+ #message_box_content: None
+
+ # Show a message box under the masthead.
+ #message_box_class: info
+
+ # Serving static files (needed if running standalone)
+ #static_enabled: True
+
+ # Serving static files (needed if running standalone)
+ #static_cache_time: 360
+
+ # Serving static files (needed if running standalone)
+ #static_dir: static/
+
+ # Serving static files (needed if running standalone)
+ #static_images_dir: static/images
+
+ # Serving static files (needed if running standalone)
+ #static_favicon_dir: static/favicon.ico
+
+ # Serving static files (needed if running standalone)
+ #static_scripts_dir: static/scripts/
+
+ # Serving static files (needed if running standalone)
+ #static_style_dir: static/style/blue
+
diff --git a/config/user_preferences_extra_conf.yml.sample b/config/user_preferences_extra_conf.yml.sample
new file mode 100644
index 000000000000..8f8ef07670ae
--- /dev/null
+++ b/config/user_preferences_extra_conf.yml.sample
@@ -0,0 +1,41 @@
+preferences:
+ # the key you can refer to
+ apollo_url_01:
+ # description that is displayed to the user
+ description: The URL to your personal Apollo instance
+ inputs:
+ # the content can be accessed by apollo_url_01.apollo_url
+ - name: apollo_url
+ label: Apollo URL
+ # type of input field that will be displayed to the user
+ # can be string or password
+ type: text
+ # by defaul all inputs are required
+ required: True
+ - name: apollo_text
+ label: Apollo Text
+ # type of input field that will be displayed to the user
+ # can be string or password
+ type: text
+ # by defaul all inputs are required
+ required: True
+
+ openstack_account:
+ description: Your own Open Stack account
+ inputs:
+ - name: username
+ label: Username
+ type: text
+ required: False
+ - name: url
+ label: Open Stack URL
+ type: text
+ required: True
+
+ webhook_overlay_search:
+ description: Configuration option for your search results
+ inputs:
+ - name: max_search_results
+ label: Maximum number of search results
+ type: text
+ required: False
diff --git a/contrib/nagios/check_galaxy.py b/contrib/nagios/check_galaxy.py
index a46b22154e72..565d1d278bee 100755
--- a/contrib/nagios/check_galaxy.py
+++ b/contrib/nagios/check_galaxy.py
@@ -34,11 +34,11 @@ def usage():
try:
- opts, args = getopt.getopt( sys.argv[1:], 'n' )
+ opts, args = getopt.getopt(sys.argv[1:], 'n')
except getopt.GetoptError as e:
print(str(e))
usage()
-if len( args ) < 1:
+if len(args) < 1:
usage()
server = args[0]
username = args[1]
@@ -56,9 +56,9 @@ def usage():
usage()
# state information
-var_dir = os.path.join( home, ".check_galaxy", server.replace('http://', '').replace('https://', ''), handler )
-if not os.access( var_dir, os.F_OK ):
- os.makedirs( var_dir, 0o700 )
+var_dir = os.path.join(home, ".check_galaxy", server.replace('http://', '').replace('https://', ''), handler)
+if not os.access(var_dir, os.F_OK):
+ os.makedirs(var_dir, 0o700)
# default timeout for twill browser is never
socket.setdefaulttimeout(60)
@@ -79,7 +79,7 @@ def __init__(self):
self._history_id = None
if not self.server.startswith('http'):
self.server = 'http://' + self.server
- self.cookie_jar = os.path.join( var_dir, "cookie_jar" )
+ self.cookie_jar = os.path.join(var_dir, "cookie_jar")
dprint("cookie jar path: %s" % self.cookie_jar)
if not os.access(self.cookie_jar, os.R_OK):
dprint("no cookie jar at above path, creating")
@@ -99,7 +99,7 @@ def req(self, path, data=None, method=None):
req = urllib2.Request(url, headers={'Content-Type': 'application/json'})
if method:
req.get_method = lambda: method
- res = self.opener.open( req )
+ res = self.opener.open(req)
print('==> at %s (%s)' % (url, method or 'GET'))
assert res.getcode() == 200, url
return res
@@ -115,7 +115,7 @@ def reset(self):
def delete_history(self):
# note, this could cause a history to be created and then deleted. i don't care.
- self.req( '/api/histories/%s' % self.history_id, method='DELETE' )
+ self.req('/api/histories/%s' % self.history_id, method='DELETE')
def login(self, user, pw):
self.get("/user/login")
@@ -135,9 +135,9 @@ def login(self, user, pw):
def runtool(self):
path = '/api/tools'
- data = { 'tool_id' : self.tool,
- 'history_id' : self.history_id,
- 'inputs' : { 'echo' : self.handler } }
+ data = {'tool_id' : self.tool,
+ 'history_id' : self.history_id,
+ 'inputs' : {'echo' : self.handler}}
res = self.req(path, data=data)
dprint(json.loads(res.read()))
@@ -210,7 +210,7 @@ def wait(self):
while count < maxiter:
count += 1
if not self.history_state_terminal:
- time.sleep( sleep_amount )
+ time.sleep(sleep_amount)
sleep_amount += 1
else:
break
diff --git a/cron/build_chrom_db.py b/cron/build_chrom_db.py
index ce472d257ee1..f6b2b8efc2f6 100644
--- a/cron/build_chrom_db.py
+++ b/cron/build_chrom_db.py
@@ -38,7 +38,7 @@ def getchrominfo(url, db):
"hgta_doTopSubmit": "get info"})
page = urlopen(URL)
for line in page:
- line = line.rstrip( "\r\n" )
+ line = line.rstrip("\r\n")
if line.startswith("#"):
continue
fields = line.split("\t")
diff --git a/doc/source/admin/dependency_resolvers.rst b/doc/source/admin/dependency_resolvers.rst
index 0d6930d1ef79..12e93a773573 100644
--- a/doc/source/admin/dependency_resolvers.rst
+++ b/doc/source/admin/dependency_resolvers.rst
@@ -220,8 +220,8 @@ debug
ensure_channels
conda channels to enable by default. See
http://conda.pydata.org/docs/custom-channels.html for more
- information about channels. This defaults to ``iuc,bioconda,r,defaults,conda-forge``.
- This order should be consistent with `Bioconda prescribed order `__
+ information about channels. This defaults to ``iuc,bioconda,conda-forge,defaults,r``.
+ This order should be consistent with the `Bioconda prescribed order `__
if it includes ``bioconda``.
auto_install
diff --git a/doc/source/admin/reports.rst b/doc/source/admin/reports.rst
new file mode 100644
index 000000000000..bac451a7a4a9
--- /dev/null
+++ b/doc/source/admin/reports.rst
@@ -0,0 +1,57 @@
+Galaxy Reports
+========================================
+
+Overview
+----------------------------
+
+For admins wishing to have more information on the usage of a Galaxy instance,
+one can configure and use the Galaxy Reports application. This is a separate
+application that can run beside Galaxy and must target the same database.
+
+The Galaxy Reports application's easy to use web interface will give you information on the
+following (and more):
+
+- Jobs
+
+ - Today's jobs
+ - Jobs per day this month
+ - Jobs in error per day this month
+ - All unfinished jobs
+ - Jobs per month
+ - Jobs in error per month
+ - Jobs per user
+ - Jobs per tool
+
+- Workflows
+
+ - Workflows per month
+ - Workflows per user
+
+- Users
+
+ - Registered users
+ - Date of last login
+ - User disk usage
+
+- System
+
+ - Disk space maintenance
+
+
+Configuration
+----------------------------
+
+- Configure ``config/reports.yml`` in the same manner as your main galaxy instance (i.e., same database connection, but different port). This is a uwsgi YAML configuration file and should contain a reports section with app-specific configuration (options described below).
+
+ - The default port for the reports application is ``9001``, and like Galaxy it only binds to localhost by default.
+ - ``database_connection`` should match the value used in your Galaxy configuration
+ - ``database_connection`` should point at a Postgres database, experimental support for MySQL is available but sqlite is not supported at all.
+
+- Run reports in a uWSGI server with ``sh run_reports.sh``
+- Use a web browser and go to the address you configured in ``reports.yml`` (defaults to http://localhost:9001/)
+
+----------------------------
+Configuration Options
+----------------------------
+
+.. include:: reports_options.rst
diff --git a/doc/source/admin/reports_options.rst b/doc/source/admin/reports_options.rst
new file mode 100644
index 000000000000..90557f7c844d
--- /dev/null
+++ b/doc/source/admin/reports_options.rst
@@ -0,0 +1,156 @@
+~~~~~~~~~~~~~~~
+``filter-with``
+~~~~~~~~~~~~~~~
+
+:Description:
+ If running behind a proxy server and Galaxy is served from a
+ subdirectory, enable the proxy-prefix filter and set the prefix in
+ the [filter:proxy-prefix] section above.
+:Default: proxy-prefix
+:Type: str
+
+
+~~~~~~~~~~~~~~~
+``cookie_path``
+~~~~~~~~~~~~~~~
+
+:Description:
+ If proxy-prefix is enabled and you're running more than one Galaxy
+ instance behind one hostname, you will want to set this to the
+ same path as the prefix in the filter above. This value becomes
+ the "path" attribute set in the cookie so the cookies from each
+ instance will not clobber each other.
+:Default: None
+:Type: str
+
+
+~~~~~~~~~~~~~
+``log_level``
+~~~~~~~~~~~~~
+
+:Description:
+ Verbosity of console log messages. Acceptable values can be found
+ here: https://docs.python.org/2/library/logging.html#logging-
+ levels
+:Default: DEBUG
+:Type: str
+
+
+~~~~~~~~~~~~~~~~~~~~~~~
+``database_connection``
+~~~~~~~~~~~~~~~~~~~~~~~
+
+:Description:
+ Database connection Galaxy reports are intended for production
+ Galaxy instances, so sqlite (and the default value below) is not
+ supported. An SQLAlchemy connection string should be used specify
+ an external database.
+:Default: sqlite:///./database/universe.sqlite?isolation_level=IMMEDIATE
+:Type: str
+
+
+~~~~~~~~~~~~~
+``file_path``
+~~~~~~~~~~~~~
+
+:Description:
+ Where dataset files are saved Temporary storage for additional
+ datasets, this should be shared through the cluster
+:Default: database/files
+:Type: str
+
+
+~~~~~~~~~~~~~~~~~
+``new_file_path``
+~~~~~~~~~~~~~~~~~
+
+:Description:
+ Where dataset files are saved Temporary storage for additional
+ datasets, this should be shared through the cluster
+:Default: database/tmp
+:Type: str
+
+
+~~~~~~~~~~~~~~~~~~~~~~~
+``template_cache_path``
+~~~~~~~~~~~~~~~~~~~~~~~
+
+:Description:
+ Mako templates are compiled as needed and cached for reuse, this
+ directory is used for the cache
+:Default: database/compiled_templates/reports
+:Type: str
+
+
+~~~~~~~~~
+``debug``
+~~~~~~~~~
+
+:Description:
+ Configuration for debugging middleware
+:Default: False
+:Type: bool
+
+
+~~~~~~~~~~~~
+``use_lint``
+~~~~~~~~~~~~
+
+:Description:
+ Check for WSGI compliance.
+:Default: False
+:Type: bool
+
+
+~~~~~~~~~~~~~~~~~~~
+``use_interactive``
+~~~~~~~~~~~~~~~~~~~
+
+:Description:
+ NEVER enable this on a public site (even test or QA)
+:Default: True
+:Type: bool
+
+
+~~~~~~~~~~~~~~~~~
+``use_heartbeat``
+~~~~~~~~~~~~~~~~~
+
+:Description:
+ Write thread status periodically to 'heartbeat.log' (careful, uses
+ disk space rapidly!)
+:Default: True
+:Type: bool
+
+
+~~~~~~~~~~~~~~~
+``use_profile``
+~~~~~~~~~~~~~~~
+
+:Description:
+ Profiling middleware (cProfile based)
+:Default: True
+:Type: bool
+
+
+~~~~~~~~~~~~~~~
+``smtp_server``
+~~~~~~~~~~~~~~~
+
+:Description:
+ Mail
+:Default: yourserver@yourfacility.edu
+:Type: str
+
+
+~~~~~~~~~~~~~~~~~~
+``error_email_to``
+~~~~~~~~~~~~~~~~~~
+
+:Description:
+ Mail
+:Default: your_bugs@bx.psu.edu
+:Type: str
+
+
+
diff --git a/doc/source/admin/special_topics/bug_reports.rst b/doc/source/admin/special_topics/bug_reports.rst
new file mode 100644
index 000000000000..f2a6f79fe787
--- /dev/null
+++ b/doc/source/admin/special_topics/bug_reports.rst
@@ -0,0 +1,72 @@
+Bug Reports
+===========
+
+In 17.09, bug reports were refactored to a plugin-type infrastructure. This
+gives the administrator more control over how and when bug reports are
+generated. In the past, email reports would be generated when the user clicks
+the button and only then. Sentry reports would be generated for every failing
+tool, as soon as the tool failed. This disparate behaviour was unified under a
+single configuration file, ``config/error_report.xml.sample``.
+
+Let's look at that briefly:
+
+.. code-block:: xml
+
+
+
+
+
+
+
+
+The actual configuration file contains more documentation which has been
+stripped out here for the sake of brevity. As you can see, there are a couple of
+different plugins that already exist. Additionally, there are some options that
+are common to all plugins:
+
+``user_submission``
+ When true, this action is triggered only when the user is on the job error
+ page and clicks "submit bug report".
+
+ When false, this action is triggered *any* time a job errors, without user
+ involvement
+
+``verbose``
+ When ``user_submission=True``, and ``verbose=True``, this indicates that a
+ message is to be displayed to the end user. For example, the email plugin will
+ inform the user that a mail has been sent. Or the JSON plugin will inform the
+ user that a file has been written to a specific directory.
+
+Email
+-----
+
+This is the classic bug-report mechanism that we are all familiar with. It
+generates an email to the admin and the submitting user containing detailed
+information about the job and links to the precise locations within Galaxy.
+
+As a plugin, nothing has changed to this functionality, though future updates
+may add features. It currently takes all of its configuration from your
+``$GALAXY/config/galaxy.ini``, e.g. the variables ``error_email_to`` and
+``email_from``.
+
+JSON
+----
+
+This is a demo-plugin that writes the contents of the error report to a file in
+your temp directory. This just serves as a full-featured reference
+implementation that others can use to build their own bug reporting plugins.
+
+When ``verbose="true" user_submission="true"``, the plugin will inform the user
+that a report has been written to ``/tmp/``.
+
+Sentry
+------
+
+This refactors the existing on-failure submit-to-sentry behaviour into a bug
+reporting plugin. Now, for example, you are able to disable the automatic
+submission to sentry and only run that whenever the user reports it.
+
+When ``verbose="true" user_submission="true"``, the plugin will inform the user
+that ``Submitted bug report to Sentry. Your guru meditation number is
+dc907d44ce294f78b267a56f68e5cd1a``, using the same phrasing that is common to
+users from Galaxy internal server errors.
diff --git a/doc/source/admin/special_topics/grt.rst b/doc/source/admin/special_topics/grt.rst
index cde12d76ba6e..9427b6e0cf83 100644
--- a/doc/source/admin/special_topics/grt.rst
+++ b/doc/source/admin/special_topics/grt.rst
@@ -5,37 +5,111 @@ This is an opt-in service which Galaxy admins can configure to contribute their
job run data back to the community. We hope that by collecting this information
we can build accurate models of tool CPU/memory/time requirements. In turn,
admins will be able to use this analyzed data to optimize their job
-distribution across highly heterogenous clusters.
+distribution across highly heterogeneous clusters.
Registration
------------
You will need to register your Galaxy instance with the Galactic Radio
-Telescope (GRT). This can be done `https://radio-telescope.galaxyproject.org
-`__.
+Telescope (GRT). This can be done `https://telescope.galaxyproject.org
+`__.
-Submitting Data
----------------
+About the Script
+----------------
Once you've registered your Galaxy instance, you'll receive an instance ID and
-an API key which are used to run ``scripts/grt.py``. The tool itself is very simple
-to run. It collects the last 7 days (by default) of data from your Galaxy
-server, and sends them to the GRT for processing and display. Additionally
-it collects the total number of users, and the number of users who ran
-jobs in the last N days.
-
-Running the tool is simple:
-
-.. code-block:: shell
-
- python scripts/grt.py \
- \
- \
- -c config/galaxy.ini \
- --grt-url https://radio-telescope.galaxyproject.org/api/v1/upload/
- --days 7
-
-The only required parameters are the instance ID and API key. As you can see in
-the example command, the GRT URL is configurable. If you do not wish to
-participate in the public version of this experiment you can host your own
-radio telescope to collect Galactic information.
+an API key which are used to run ``scripts/grt/export.py``. The tool itself is very
+simple to run. GRT will run and produce a directory of reports that can be
+synced with the GRT server. Every time it is run, GRT only processes the list
+of jobs that were run since the last time it was run. On first run, GRT will
+attempt to export all job data for your instance which may be very slow
+depending on your instance size. We have attempted to optimize this as much as
+is feasible.
+
+Data Privacy
+------------
+
+All data submitted to the GRT will be released into the public domain. If there
+are certain tools you do not want included, or certain parameters you wish to
+hide (e.g. because they contain API keys), then you can take advantage of the
+built-in sanitization. ``scripts/grt/grt.yml.sample`` file allows you to build up
+sanitization for the job logs.
+
+.. code-block:: yaml
+
+ sanitization:
+ # Blacklist the entire tool from appearing
+ tools:
+ - __SET_METADATA__
+ - upload1
+ # Or you can blacklist individual parameters from being submitted, e.g. if
+ # you have API keys as a tool parameter.
+ tool_params:
+ # Or to blacklist under a specific tool, just specify the ID
+ some_tool_id:
+ - dbkey
+ # If you need to specify a parameter multiple levels deep, you can
+ # do that as well. Currently we only support blacklisting via the
+ # full path, rather than just a path component. So everything under
+ # `path.to.parameter` will be blacklisted.
+ - path.to.parameter
+ # However you could not do "parameter" and have everything under
+ # `path.to.parameter` be removed.
+ # Repeats are rendered as an *, e.g.: repeat_name.*.values
+
+To blacklist the results from specific tools appearing in results, just add the
+tool ID under the ``tools`` list.
+
+Blacklisting tool parameters is more complex. In a key under the ``tool_params`` key,
+supply a list of parameters you wish to blacklist. *NB: This will slow down
+processing of records associated with that tool.* Selecting keys is done
+identically to writing test cases, except if you have a repeat element, just
+replace the location of the numeric identifier with ``*``, e.g.
+``repeat_name.*.some_subkey``
+
+Data Collection Process
+-----------------------
+
+.. code-block:: console
+
+ cd $GALAXY; python scripts/grt/export.py -l debug
+
+
+``export.py`` connects to your galaxy database and makes queries against the
+database for three primary tables:
+
+- job
+- job_parameter
+- job_metric_numeric
+
+these are exported with very little processing, as tabular files to the GRT
+reports directory, ``$GALAXY/reports/``. We only collect new job data that we
+have not seen since the previous run. The last-seen job ID is stored in
+``$GALAXY/reports/.checkpoint``. Once the files have been exported, they are
+put in a compressed archive, and some metadata about the export process is
+written to a json file with the same name as the report archive.
+
+You may wish to inspect these files to be sure that you're comfortable with the
+information being sent.
+
+Once you're happy with the data, you can submit it with the GRT submission tool...
+
+Data Submission
+---------------
+
+.. code-block:: console
+
+ cd $GALAXY; python scripts/grt/upload.py
+
+``scripts/grt/upload.py`` is a script which will submit your data to the
+configured GRT server. You must first be registered with the server which will
+also walk you through the setup process.
+
+With your reports, submitting them is very simple. The script will login to the
+server and determine which reports the server does not have yet. Then it will
+begin uploading those.
+
+For administrators with firewalled galaxies and no internet access, if you are
+able to exfiltrate your files to somewhere with internet, then you can still
+take advantage of GRT. Alternatively you can deploy GRT on your own
+infrastructure if you don't want to share your job logs.
diff --git a/doc/source/admin/special_topics/index.rst b/doc/source/admin/special_topics/index.rst
index b3cf8f7aa9aa..0abc9e283f5f 100644
--- a/doc/source/admin/special_topics/index.rst
+++ b/doc/source/admin/special_topics/index.rst
@@ -11,3 +11,4 @@ Special Topics
chat
webhooks
performance_tracking
+ bug_reports
diff --git a/lib/galaxy/__init__.py b/lib/galaxy/__init__.py
index 07a3f6c52a0b..da33de39875c 100644
--- a/lib/galaxy/__init__.py
+++ b/lib/galaxy/__init__.py
@@ -8,13 +8,13 @@
# compat: BadZipFile introduced in Python 2.7
import zipfile
-if not hasattr( zipfile, 'BadZipFile' ):
+if not hasattr(zipfile, 'BadZipFile'):
zipfile.BadZipFile = zipfile.error
# compat: patch to add the NullHandler class to logging
import logging
-if not hasattr( logging, 'NullHandler' ):
- class NullHandler( logging.Handler ):
- def emit( self, record ):
+if not hasattr(logging, 'NullHandler'):
+ class NullHandler(logging.Handler):
+ def emit(self, record):
pass
logging.NullHandler = NullHandler
diff --git a/lib/galaxy/actions/admin.py b/lib/galaxy/actions/admin.py
index 13a902c5b5ae..3879f6b794e9 100644
--- a/lib/galaxy/actions/admin.py
+++ b/lib/galaxy/actions/admin.py
@@ -6,195 +6,206 @@
from galaxy import util
from galaxy.exceptions import ActionInputError
-log = logging.getLogger( __name__ )
+log = logging.getLogger(__name__)
-class AdminActions( object ):
+class AdminActions(object):
"""
Mixin for controllers that provide administrative functionality.
"""
- def _create_quota( self, params ):
- if params.amount.lower() in ( 'unlimited', 'none', 'no limit' ):
+
+ def _create_quota(self, params, decode_id=None):
+ if params.amount.lower() in ('unlimited', 'none', 'no limit'):
create_amount = None
else:
try:
- create_amount = util.size_to_bytes( params.amount )
+ create_amount = util.size_to_bytes(params.amount)
except AssertionError:
create_amount = False
if not params.name or not params.description:
- raise ActionInputError( "Enter a valid name and a description." )
- elif self.sa_session.query( self.app.model.Quota ).filter( self.app.model.Quota.table.c.name == params.name ).first():
- raise ActionInputError( "Quota names must be unique and a quota with that name already exists, so choose another name." )
- elif not params.get( 'amount', None ):
- raise ActionInputError( "Enter a valid quota amount." )
+ raise ActionInputError("Enter a valid name and a description.")
+ elif self.sa_session.query(self.app.model.Quota).filter(self.app.model.Quota.table.c.name == params.name).first():
+ raise ActionInputError("Quota names must be unique and a quota with that name already exists, so choose another name.")
+ elif not params.get('amount', None):
+ raise ActionInputError("Enter a valid quota amount.")
elif create_amount is False:
- raise ActionInputError( "Unable to parse the provided amount." )
+ raise ActionInputError("Unable to parse the provided amount.")
elif params.operation not in self.app.model.Quota.valid_operations:
- raise ActionInputError( "Enter a valid operation." )
+ raise ActionInputError("Enter a valid operation.")
elif params.default != 'no' and params.default not in self.app.model.DefaultQuotaAssociation.types.__dict__.values():
- raise ActionInputError( "Enter a valid default type." )
+ raise ActionInputError("Enter a valid default type.")
elif params.default != 'no' and params.operation != '=':
- raise ActionInputError( "Operation for a default quota must be '='." )
+ raise ActionInputError("Operation for a default quota must be '='.")
elif create_amount is None and params.operation != '=':
- raise ActionInputError( "Operation for an unlimited quota must be '='." )
+ raise ActionInputError("Operation for an unlimited quota must be '='.")
else:
# Create the quota
- quota = self.app.model.Quota( name=params.name, description=params.description, amount=create_amount, operation=params.operation )
- self.sa_session.add( quota )
+ quota = self.app.model.Quota(name=params.name, description=params.description, amount=create_amount, operation=params.operation)
+ self.sa_session.add(quota)
# If this is a default quota, create the DefaultQuotaAssociation
if params.default != 'no':
- self.app.quota_agent.set_default_quota( params.default, quota )
+ self.app.quota_agent.set_default_quota(params.default, quota)
+ message = "Default quota '%s' has been created."
else:
# Create the UserQuotaAssociations
- for user in [ self.sa_session.query( self.app.model.User ).get( x ) for x in params.in_users ]:
- uqa = self.app.model.UserQuotaAssociation( user, quota )
- self.sa_session.add( uqa )
+ in_users = [self.sa_session.query(self.app.model.User).get(decode_id(x) if decode_id else x) for x in util.listify(params.in_users)]
+ in_groups = [self.sa_session.query(self.app.model.Group).get(decode_id(x) if decode_id else x) for x in util.listify(params.in_groups)]
+ if None in in_users:
+ raise ActionInputError("One or more invalid user id has been provided.")
+ for user in in_users:
+ uqa = self.app.model.UserQuotaAssociation(user, quota)
+ self.sa_session.add(uqa)
# Create the GroupQuotaAssociations
- for group in [ self.sa_session.query( self.app.model.Group ).get( x ) for x in params.in_groups ]:
- gqa = self.app.model.GroupQuotaAssociation( group, quota )
- self.sa_session.add( gqa )
+ if None in in_groups:
+ raise ActionInputError("One or more invalid group id has been provided.")
+ for group in in_groups:
+ gqa = self.app.model.GroupQuotaAssociation(group, quota)
+ self.sa_session.add(gqa)
+ message = "Quota '%s' has been created with %d associated users and %d associated groups." % (quota.name, len(in_users), len(in_groups))
self.sa_session.flush()
- message = "Quota '%s' has been created with %d associated users and %d associated groups." % \
- ( quota.name, len( params.in_users ), len( params.in_groups ) )
return quota, message
- def _rename_quota( self, quota, params ):
+ def _rename_quota(self, quota, params):
if not params.name:
- raise ActionInputError( 'Enter a valid name' )
- elif params.name != quota.name and self.sa_session.query( self.app.model.Quota ).filter( self.app.model.Quota.table.c.name == params.name ).first():
- raise ActionInputError( 'A quota with that name already exists' )
+ raise ActionInputError('Enter a valid name.')
+ elif params.name != quota.name and self.sa_session.query(self.app.model.Quota).filter(self.app.model.Quota.table.c.name == params.name).first():
+ raise ActionInputError('A quota with that name already exists.')
else:
old_name = quota.name
quota.name = params.name
quota.description = params.description
- self.sa_session.add( quota )
+ self.sa_session.add(quota)
self.sa_session.flush()
- message = "Quota '%s' has been renamed to '%s'" % ( old_name, params.name )
+ message = "Quota '%s' has been renamed to '%s'." % (old_name, params.name)
return message
- def _manage_users_and_groups_for_quota( self, quota, params ):
+ def _manage_users_and_groups_for_quota(self, quota, params, decode_id=None):
if quota.default:
- raise ActionInputError( 'Default quotas cannot be associated with specific users and groups' )
+ raise ActionInputError('Default quotas cannot be associated with specific users and groups.')
else:
- in_users = [ self.sa_session.query( self.app.model.User ).get( x ) for x in util.listify( params.in_users ) ]
- in_groups = [ self.sa_session.query( self.app.model.Group ).get( x ) for x in util.listify( params.in_groups ) ]
- self.app.quota_agent.set_entity_quota_associations( quotas=[ quota ], users=in_users, groups=in_groups )
- self.sa_session.refresh( quota )
- message = "Quota '%s' has been updated with %d associated users and %d associated groups" % ( quota.name, len( in_users ), len( in_groups ) )
+ in_users = [self.sa_session.query(self.app.model.User).get(decode_id(x) if decode_id else x) for x in util.listify(params.in_users)]
+ if None in in_users:
+ raise ActionInputError("One or more invalid user id has been provided.")
+ in_groups = [self.sa_session.query(self.app.model.Group).get(decode_id(x) if decode_id else x) for x in util.listify(params.in_groups)]
+ if None in in_groups:
+ raise ActionInputError("One or more invalid group id has been provided.")
+ self.app.quota_agent.set_entity_quota_associations(quotas=[quota], users=in_users, groups=in_groups)
+ self.sa_session.refresh(quota)
+ message = "Quota '%s' has been updated with %d associated users and %d associated groups." % (quota.name, len(in_users), len(in_groups))
return message
- def _edit_quota( self, quota, params ):
- if params.amount.lower() in ( 'unlimited', 'none', 'no limit' ):
+ def _edit_quota(self, quota, params):
+ if params.amount.lower() in ('unlimited', 'none', 'no limit'):
new_amount = None
else:
try:
- new_amount = util.size_to_bytes( params.amount )
+ new_amount = util.size_to_bytes(params.amount)
except AssertionError:
new_amount = False
if not params.amount:
- raise ActionInputError( 'Enter a valid amount' )
+ raise ActionInputError('Enter a valid amount.')
elif new_amount is False:
- raise ActionInputError( 'Unable to parse the provided amount' )
+ raise ActionInputError('Unable to parse the provided amount.')
elif params.operation not in self.app.model.Quota.valid_operations:
- raise ActionInputError( 'Enter a valid operation' )
+ raise ActionInputError('Enter a valid operation.')
else:
quota.amount = new_amount
quota.operation = params.operation
- self.sa_session.add( quota )
+ self.sa_session.add(quota)
self.sa_session.flush()
- message = "Quota '%s' is now '%s'" % ( quota.name, quota.operation + quota.display_amount )
+ message = "Quota '%s' is now '%s'." % (quota.name, quota.operation + quota.display_amount)
return message
- def _set_quota_default( self, quota, params ):
+ def _set_quota_default(self, quota, params):
if params.default != 'no' and params.default not in self.app.model.DefaultQuotaAssociation.types.__dict__.values():
- raise ActionInputError( 'Enter a valid default type.' )
+ raise ActionInputError('Enter a valid default type.')
else:
if params.default != 'no':
- self.app.quota_agent.set_default_quota( params.default, quota )
- message = "Quota '%s' is now the default for %s users" % ( quota.name, params.default )
+ self.app.quota_agent.set_default_quota(params.default, quota)
+ message = "Quota '%s' is now the default for %s users." % (quota.name, params.default)
else:
if quota.default:
- message = "Quota '%s' is no longer the default for %s users." % ( quota.name, quota.default[0].type )
+ message = "Quota '%s' is no longer the default for %s users." % (quota.name, quota.default[0].type)
for dqa in quota.default:
- self.sa_session.delete( dqa )
+ self.sa_session.delete(dqa)
self.sa_session.flush()
else:
message = "Quota '%s' is not a default." % quota.name
return message
- def _unset_quota_default( self, quota, params ):
+ def _unset_quota_default(self, quota, params=None):
if not quota.default:
- raise ActionInputError( "Quota '%s' is not a default." % quota.name )
+ raise ActionInputError("Quota '%s' is not a default." % quota.name)
else:
- message = "Quota '%s' is no longer the default for %s users." % ( quota.name, quota.default[0].type )
+ message = "Quota '%s' is no longer the default for %s users." % (quota.name, quota.default[0].type)
for dqa in quota.default:
- self.sa_session.delete( dqa )
+ self.sa_session.delete(dqa)
self.sa_session.flush()
return message
- def _mark_quota_deleted( self, quota, params ):
- quotas = util.listify( quota )
+ def _delete_quota(self, quota, params=None):
+ quotas = util.listify(quota)
names = []
for q in quotas:
if q.default:
- names.append( q.name )
- if len( names ) == 1:
- raise ActionInputError( "Quota '%s' is a default, please unset it as a default before deleting it" % ( names[0] ) )
- elif len( names ) > 1:
- raise ActionInputError( "Quotas are defaults, please unset them as defaults before deleting them: " + ', '.join( names ) )
- message = "Deleted %d quotas: " % len( quotas )
+ names.append(q.name)
+ if len(names) == 1:
+ raise ActionInputError("Quota '%s' is a default, please unset it as a default before deleting it." % (names[0]))
+ elif len(names) > 1:
+ raise ActionInputError("Quotas are defaults, please unset them as defaults before deleting them: " + ', '.join(names))
+ message = "Deleted %d quotas: " % len(quotas)
for q in quotas:
q.deleted = True
- self.sa_session.add( q )
- names.append( q.name )
+ self.sa_session.add(q)
+ names.append(q.name)
self.sa_session.flush()
- message += ', '.join( names )
+ message += ', '.join(names)
return message
- def _undelete_quota( self, quota, params=None):
- quotas = util.listify( quota )
+ def _undelete_quota(self, quota, params=None):
+ quotas = util.listify(quota)
names = []
for q in quotas:
if not q.deleted:
- names.append( q.name )
- if len( names ) == 1:
- raise ActionInputError( "Quota '%s' has not been deleted, so it cannot be undeleted." % ( names[0] ) )
- elif len( names ) > 1:
- raise ActionInputError( "Quotas have not been deleted so they cannot be undeleted: " + ', '.join( names ) )
- message = "Undeleted %d quotas: " % len( quotas )
+ names.append(q.name)
+ if len(names) == 1:
+ raise ActionInputError("Quota '%s' has not been deleted, so it cannot be undeleted." % (names[0]))
+ elif len(names) > 1:
+ raise ActionInputError("Quotas have not been deleted so they cannot be undeleted: " + ', '.join(names))
+ message = "Undeleted %d quotas: " % len(quotas)
for q in quotas:
q.deleted = False
- self.sa_session.add( q )
- names.append( q.name )
+ self.sa_session.add(q)
+ names.append(q.name)
self.sa_session.flush()
- message += ', '.join( names )
+ message += ', '.join(names)
return message
- def _purge_quota( self, quota, params ):
+ def _purge_quota(self, quota, params=None):
"""
This method should only be called for a Quota that has previously been deleted.
Purging a deleted Quota deletes all of the following from the database:
- UserQuotaAssociations where quota_id == Quota.id
- GroupQuotaAssociations where quota_id == Quota.id
"""
- quotas = util.listify( quota )
+ quotas = util.listify(quota)
names = []
for q in quotas:
if not q.deleted:
- names.append( q.name )
- if len( names ) == 1:
- raise ActionInputError( "Quota '%s' has not been deleted, so it cannot be purged." % ( names[0] ) )
- elif len( names ) > 1:
- raise ActionInputError( "Quotas have not been deleted so they cannot be undeleted: " + ', '.join( names ) )
- message = "Purged %d quotas: " % len( quotas )
+ names.append(q.name)
+ if len(names) == 1:
+ raise ActionInputError("Quota '%s' has not been deleted, so it cannot be purged." % (names[0]))
+ elif len(names) > 1:
+ raise ActionInputError("Quotas have not been deleted so they cannot be undeleted: " + ', '.join(names))
+ message = "Purged %d quotas: " % len(quotas)
for q in quotas:
# Delete UserQuotaAssociations
for uqa in q.users:
- self.sa_session.delete( uqa )
+ self.sa_session.delete(uqa)
# Delete GroupQuotaAssociations
for gqa in q.groups:
- self.sa_session.delete( gqa )
- names.append( q.name )
+ self.sa_session.delete(gqa)
+ names.append(q.name)
self.sa_session.flush()
- message += ', '.join( names )
+ message += ', '.join(names)
return message
diff --git a/lib/galaxy/app.py b/lib/galaxy/app.py
index b735e8de8ba6..803727def09e 100644
--- a/lib/galaxy/app.py
+++ b/lib/galaxy/app.py
@@ -27,6 +27,7 @@
ToolShedRepositoryCache
)
from galaxy.jobs import metrics as job_metrics
+from galaxy.tools.error_reports import ErrorReports
from galaxy.web.proxy import ProxyManager
from galaxy.web.stack import application_stack_instance
from galaxy.queue_worker import GalaxyQueueWorker
@@ -37,27 +38,28 @@
from tool_shed.galaxy_install import update_repository_manager
-log = logging.getLogger( __name__ )
+log = logging.getLogger(__name__)
app = None
-class UniverseApplication( object, config.ConfiguresGalaxyMixin ):
+class UniverseApplication(object, config.ConfiguresGalaxyMixin):
"""Encapsulates the state of a Universe application"""
- def __init__( self, **kwargs ):
+
+ def __init__(self, **kwargs):
if not log.handlers:
# Paste didn't handle it, so we need a temporary basic log
# configured. The handler added here gets dumped and replaced with
# an appropriately configured logger in configure_logging below.
logging.basicConfig(level=logging.DEBUG)
- log.debug( "python path is: %s", ", ".join( sys.path ) )
+ log.debug("python path is: %s", ", ".join(sys.path))
self.name = 'galaxy'
self.startup_timer = ExecutionTimer()
self.new_installation = False
self.application_stack = application_stack_instance()
# Read config file and check for errors
- self.config = config.Configuration( **kwargs )
+ self.config = config.Configuration(**kwargs)
self.config.check()
- config.configure_logging( self.config )
+ config.configure_logging(self.config)
self.configure_fluent_log()
self.config.reload_sanitize_whitelist(explicit='sanitize_whitelist_file' in kwargs)
self.amqp_internal_connection_obj = galaxy.queues.connection_from_config(self.config)
@@ -66,41 +68,44 @@ def __init__( self, **kwargs ):
self.control_worker = GalaxyQueueWorker(self)
self._configure_tool_shed_registry()
- self._configure_object_store( fsmon=True )
+ self._configure_object_store(fsmon=True)
# Setup the database engine and ORM
- config_file = kwargs.get( 'global_conf', {} ).get( '__file__', None )
+ config_file = kwargs.get('global_conf', {}).get('__file__', None)
if config_file:
- log.debug( 'Using "galaxy.ini" config file: %s', config_file )
+ log.debug('Using "galaxy.ini" config file: %s', config_file)
check_migrate_tools = self.config.check_migrate_tools
- self._configure_models( check_migrate_databases=True, check_migrate_tools=check_migrate_tools, config_file=config_file )
+ self._configure_models(check_migrate_databases=True, check_migrate_tools=check_migrate_tools, config_file=config_file)
# Manage installed tool shed repositories.
from tool_shed.galaxy_install import installed_repository_manager
- self.installed_repository_manager = installed_repository_manager.InstalledRepositoryManager( self )
+ self.installed_repository_manager = installed_repository_manager.InstalledRepositoryManager(self)
- self._configure_datatypes_registry( self.installed_repository_manager )
- galaxy.model.set_datatypes_registry( self.datatypes_registry )
+ self._configure_datatypes_registry(self.installed_repository_manager)
+ galaxy.model.set_datatypes_registry(self.datatypes_registry)
# Security helper
self._configure_security()
# Tag handler
- self.tag_handler = GalaxyTagManager( self.model.context )
+ self.tag_handler = GalaxyTagManager(self.model.context)
# Dataset Collection Plugins
self.dataset_collections_service = DatasetCollectionManager(self)
# Tool Data Tables
- self._configure_tool_data_tables( from_shed_config=False )
+ self._configure_tool_data_tables(from_shed_config=False)
# Load dbkey / genome build manager
- self._configure_genome_builds( data_table_name="__dbkeys__", load_old_style=True )
+ self._configure_genome_builds(data_table_name="__dbkeys__", load_old_style=True)
# Genomes
- self.genomes = Genomes( self )
+ self.genomes = Genomes(self)
# Data providers registry.
self.data_provider_registry = DataProviderRegistry()
# Initialize job metrics manager, needs to be in place before
# config so per-destination modifications can be made.
- self.job_metrics = job_metrics.JobMetrics( self.config.job_metrics_config_file, app=self )
+ self.job_metrics = job_metrics.JobMetrics(self.config.job_metrics_config_file, app=self)
+
+ # Initialize error report plugins.
+ self.error_reports = ErrorReports(self.config.error_report_file, app=self)
# Initialize the job management configuration
self.job_config = jobs.JobConfiguration(self)
@@ -113,24 +118,24 @@ def __init__( self, **kwargs ):
self._configure_toolbox()
# Load Data Manager
- self.data_managers = DataManagers( self )
+ self.data_managers = DataManagers(self)
# Load the update repository manager.
- self.update_repository_manager = update_repository_manager.UpdateRepositoryManager( self )
+ self.update_repository_manager = update_repository_manager.UpdateRepositoryManager(self)
# Load proprietary datatype converters and display applications.
self.installed_repository_manager.load_proprietary_converters_and_display_applications()
# Load datatype display applications defined in local datatypes_conf.xml
- self.datatypes_registry.load_display_applications( self )
+ self.datatypes_registry.load_display_applications(self)
# Load datatype converters defined in local datatypes_conf.xml
- self.datatypes_registry.load_datatype_converters( self.toolbox )
+ self.datatypes_registry.load_datatype_converters(self.toolbox)
# Load external metadata tool
- self.datatypes_registry.load_external_metadata_tool( self.toolbox )
+ self.datatypes_registry.load_external_metadata_tool(self.toolbox)
# Load history import/export tools.
- load_lib_tools( self.toolbox )
+ load_lib_tools(self.toolbox)
# visualizations registry: associates resources with visualizations, controls how to render
self.visualizations_registry = VisualizationsRegistry(
self,
directories_setting=self.config.visualization_plugins_directory,
- template_cache_dir=self.config.template_cache )
+ template_cache_dir=self.config.template_cache)
# Tours registry
self.tour_registry = ToursRegistry(self.config.tour_config_dir)
# Webhooks registry
@@ -139,23 +144,23 @@ def __init__( self, **kwargs ):
self.security_agent = self.model.security_agent
self.host_security_agent = galaxy.security.HostAgent(
model=self.security_agent.model,
- permitted_actions=self.security_agent.permitted_actions )
+ permitted_actions=self.security_agent.permitted_actions)
# Load quota management.
if self.config.enable_quotas:
- self.quota_agent = galaxy.quota.QuotaAgent( self.model )
+ self.quota_agent = galaxy.quota.QuotaAgent(self.model)
else:
- self.quota_agent = galaxy.quota.NoQuotaAgent( self.model )
+ self.quota_agent = galaxy.quota.NoQuotaAgent(self.model)
# Heartbeat for thread profiling
self.heartbeat = None
# Container for OpenID authentication routines
if self.config.enable_openid:
from galaxy.web.framework import openid_manager
- self.openid_manager = openid_manager.OpenIDManager( self.config.openid_consumer_cache_path )
- self.openid_providers = OpenIDProviders.from_file( self.config.openid_config_file )
+ self.openid_manager = openid_manager.OpenIDManager(self.config.openid_consumer_cache_path)
+ self.openid_providers = OpenIDProviders.from_file(self.config.openid_config_file)
else:
self.openid_providers = OpenIDProviders()
from galaxy import auth
- self.auth_manager = auth.AuthManager( self )
+ self.auth_manager = auth.AuthManager(self)
# Start the heartbeat process if configured and available (wait until
# postfork if using uWSGI)
if self.config.use_heartbeat:
@@ -177,37 +182,37 @@ def postfork_sentry_client():
self.application_stack.register_postfork_function(postfork_sentry_client)
# Transfer manager client
- if self.config.get_bool( 'enable_beta_job_managers', False ):
+ if self.config.get_bool('enable_beta_job_managers', False):
from galaxy.jobs import transfer_manager
- self.transfer_manager = transfer_manager.TransferManager( self )
+ self.transfer_manager = transfer_manager.TransferManager(self)
# Start the job manager
from galaxy.jobs import manager
- self.job_manager = manager.JobManager( self )
+ self.job_manager = manager.JobManager(self)
self.job_manager.start()
# FIXME: These are exposed directly for backward compatibility
self.job_queue = self.job_manager.job_queue
self.job_stop_queue = self.job_manager.job_stop_queue
- self.proxy_manager = ProxyManager( self.config )
+ self.proxy_manager = ProxyManager(self.config)
# Initialize the external service types
self.external_service_types = external_service_types.ExternalServiceTypesCollection(
self.config.external_service_type_config_file,
- self.config.external_service_type_path, self )
+ self.config.external_service_type_path, self)
from galaxy.workflow import scheduling_manager
# Must be initialized after job_config.
- self.workflow_scheduling_manager = scheduling_manager.WorkflowSchedulingManager( self )
+ self.workflow_scheduling_manager = scheduling_manager.WorkflowSchedulingManager(self)
# Configure handling of signals
handlers = {}
if self.heartbeat:
handlers[signal.SIGUSR1] = self.heartbeat.dump_signal_handler
- self._configure_signal_handlers( handlers )
+ self._configure_signal_handlers(handlers)
self.model.engine.dispose()
self.server_starttime = int(time.time()) # used for cachebusting
log.info("Galaxy app startup finished %s" % self.startup_timer)
- def shutdown( self ):
+ def shutdown(self):
self.workflow_scheduling_manager.shutdown()
self.job_manager.shutdown()
self.object_store.shutdown()
@@ -223,16 +228,16 @@ def shutdown( self ):
# If the datatypes registry was persisted, attempt to
# remove the temporary file in which it was written.
if self.datatypes_registry.integrated_datatypes_configs is not None:
- os.unlink( self.datatypes_registry.integrated_datatypes_configs )
+ os.unlink(self.datatypes_registry.integrated_datatypes_configs)
except:
pass
- def configure_fluent_log( self ):
+ def configure_fluent_log(self):
if self.config.fluent_log:
from galaxy.util.log.fluent_log import FluentTraceLogger
- self.trace_logger = FluentTraceLogger( 'galaxy', self.config.fluent_host, self.config.fluent_port )
+ self.trace_logger = FluentTraceLogger('galaxy', self.config.fluent_host, self.config.fluent_port)
else:
self.trace_logger = None
- def is_job_handler( self ):
+ def is_job_handler(self):
return (self.config.track_jobs_in_database and self.job_config.is_handler(self.config.server_name)) or not self.config.track_jobs_in_database
diff --git a/lib/galaxy/auth/__init__.py b/lib/galaxy/auth/__init__.py
index d60a42192626..d8587fe0b189 100644
--- a/lib/galaxy/auth/__init__.py
+++ b/lib/galaxy/auth/__init__.py
@@ -17,7 +17,7 @@ class AuthManager(object):
def __init__(self, app):
self.__app = app
import galaxy.auth.providers
- self.__plugins_dict = plugin_config.plugins_dict(galaxy.auth.providers, 'plugin_type' )
+ self.__plugins_dict = plugin_config.plugins_dict(galaxy.auth.providers, 'plugin_type')
auth_config_file = app.config.auth_config_file
# parse XML
ct = xml.etree.ElementTree.parse(auth_config_file)
@@ -85,26 +85,26 @@ def check_auto_registration(self, trans, login, password):
username = login
for provider, options in self.active_authenticators(email, username, password):
if provider is None:
- log.debug( "Unable to find module: %s" % options )
+ log.debug("Unable to find module: %s" % options)
else:
auth_result, auto_email, auto_username = provider.authenticate(email, username, password, options)
auto_email = str(auto_email).lower()
auto_username = str(auto_username).lower()
if auth_result is True:
# make username unique
- if validate_publicname( trans, auto_username ) != '':
+ if validate_publicname(trans, auto_username) != '':
i = 1
while i <= 10: # stop after 10 tries
- if validate_publicname( trans, "%s-%i" % (auto_username, i) ) == '':
+ if validate_publicname(trans, "%s-%i" % (auto_username, i)) == '':
auto_username = "%s-%i" % (auto_username, i)
break
i += 1
else:
break # end for loop if we can't make a unique username
- log.debug( "Email: %s, auto-register with username: %s" % (auto_email, auto_username) )
+ log.debug("Email: %s, auto-register with username: %s" % (auto_email, auto_username))
return (_get_bool(options, 'auto-register', False), auto_email, auto_username)
elif auth_result is None:
- log.debug( "Email: %s, Username %s, stopping due to failed non-continue" % (auto_email, auto_username) )
+ log.debug("Email: %s, Username %s, stopping due to failed non-continue" % (auto_email, auto_username))
break # end authentication (skip rest)
return (False, '', '')
@@ -112,7 +112,7 @@ def check_password(self, user, password):
"""Checks the username/email and password using auth providers."""
for provider, options in self.active_authenticators(user.email, user.username, password):
if provider is None:
- log.debug( "Unable to find module: %s" % options )
+ log.debug("Unable to find module: %s" % options)
else:
auth_result = provider.authenticate_user(user, password, options)
if auth_result is True:
@@ -127,7 +127,7 @@ def check_change_password(self, user, current_password):
"""
for provider, options in self.active_authenticators(user.email, user.username, current_password):
if provider is None:
- log.debug( "Unable to find module: %s" % options )
+ log.debug("Unable to find module: %s" % options)
else:
auth_result = provider.authenticate_user(user, current_password, options)
if auth_result is True:
@@ -153,7 +153,7 @@ def active_authenticators(self, email, username, password):
continue # skip to next
yield authenticator.plugin, authenticator.options
except Exception:
- log.exception( "Active Authenticators Failure" )
+ log.exception("Active Authenticators Failure")
raise
diff --git a/lib/galaxy/config.py b/lib/galaxy/config.py
index 672fc662188d..4408dabef246 100644
--- a/lib/galaxy/config.py
+++ b/lib/galaxy/config.py
@@ -31,7 +31,7 @@
from galaxy.web.stack import register_postfork_function
from .version import VERSION_MAJOR
-log = logging.getLogger( __name__ )
+log = logging.getLogger(__name__)
PATH_DEFAULTS = dict(
@@ -43,6 +43,7 @@
job_config_file=['config/job_conf.xml', 'job_conf.xml'],
tool_destinations_config_file=['config/tool_destinations.yml', 'config/tool_destinations.yml.sample'],
job_metrics_config_file=['config/job_metrics_conf.xml', 'job_metrics_conf.xml', 'config/job_metrics_conf.xml.sample'],
+ error_report_file=['config/error_report.yml', 'config/error_report.yml.sample'],
dependency_resolvers_config_file=['config/dependency_resolvers_conf.xml', 'dependency_resolvers_conf.xml'],
job_resource_params_file=['config/job_resource_params_conf.xml', 'job_resource_params_conf.xml'],
migrated_tools_config=['migrated_tools_conf.xml', 'config/migrated_tools_conf.xml'],
@@ -54,6 +55,7 @@
workflow_schedulers_config_file=['config/workflow_schedulers_conf.xml', 'config/workflow_schedulers_conf.xml.sample'],
modules_mapping_files=['config/environment_modules_mapping.yml', 'config/environment_modules_mapping.yml.sample'],
local_conda_mapping_file=['config/local_conda_mapping.yml', 'config/local_conda_mapping.yml.sample'],
+ user_preferences_extra_config_file=['config/user_preferences_extra_conf.yml'],
containers_config_file=['config/containers_conf.yml'],
)
@@ -76,10 +78,10 @@
)
-def resolve_path( path, root ):
+def resolve_path(path, root):
"""If 'path' is relative make absolute by prepending 'root'"""
- if not os.path.isabs( path ):
- path = os.path.join( root, path )
+ if not os.path.isabs(path):
+ path = os.path.join(root, path)
return path
@@ -105,99 +107,99 @@ def find_root(kwargs):
return root
-class Configuration( object ):
- deprecated_options = ( 'database_file', )
+class Configuration(object):
+ deprecated_options = ('database_file', )
- def __init__( self, **kwargs ):
+ def __init__(self, **kwargs):
self.config_dict = kwargs
self.root = find_root(kwargs)
# Resolve paths of other config files
- self.__parse_config_file_options( kwargs )
+ self.__parse_config_file_options(kwargs)
# Collect the umask and primary gid from the environment
- self.umask = os.umask( 0o77 ) # get the current umask
- os.umask( self.umask ) # can't get w/o set, so set it back
+ self.umask = os.umask(0o77) # get the current umask
+ os.umask(self.umask) # can't get w/o set, so set it back
self.gid = os.getgid() # if running under newgrp(1) we'll need to fix the group of data created on the cluster
self.version_major = VERSION_MAJOR
# Database related configuration
- self.database = resolve_path( kwargs.get( "database_file", "database/universe.sqlite" ), self.root )
- self.database_connection = kwargs.get( "database_connection", False )
- self.database_engine_options = get_database_engine_options( kwargs )
- self.database_create_tables = string_as_bool( kwargs.get( "database_create_tables", "True" ) )
- self.database_query_profiling_proxy = string_as_bool( kwargs.get( "database_query_profiling_proxy", "False" ) )
- self.slow_query_log_threshold = float( kwargs.get( "slow_query_log_threshold", 0) )
+ self.database = resolve_path(kwargs.get("database_file", "database/universe.sqlite"), self.root)
+ self.database_connection = kwargs.get("database_connection", False)
+ self.database_engine_options = get_database_engine_options(kwargs)
+ self.database_create_tables = string_as_bool(kwargs.get("database_create_tables", "True"))
+ self.database_query_profiling_proxy = string_as_bool(kwargs.get("database_query_profiling_proxy", "False"))
+ self.slow_query_log_threshold = float(kwargs.get("slow_query_log_threshold", 0))
# Don't set this to true for production databases, but probably should
# default to True for sqlite databases.
- self.database_auto_migrate = string_as_bool( kwargs.get( "database_auto_migrate", "False" ) )
+ self.database_auto_migrate = string_as_bool(kwargs.get("database_auto_migrate", "False"))
# Install database related configuration (if different).
- self.install_database_connection = kwargs.get( "install_database_connection", None )
- self.install_database_engine_options = get_database_engine_options( kwargs, model_prefix="install_" )
+ self.install_database_connection = kwargs.get("install_database_connection", None)
+ self.install_database_engine_options = get_database_engine_options(kwargs, model_prefix="install_")
# Where dataset files are stored
- self.file_path = resolve_path( kwargs.get( "file_path", "database/files" ), self.root )
- self.new_file_path = resolve_path( kwargs.get( "new_file_path", "database/tmp" ), self.root )
- override_tempdir = string_as_bool( kwargs.get( "override_tempdir", "True" ) )
+ self.file_path = resolve_path(kwargs.get("file_path", "database/files"), self.root)
+ self.new_file_path = resolve_path(kwargs.get("new_file_path", "database/tmp"), self.root)
+ override_tempdir = string_as_bool(kwargs.get("override_tempdir", "True"))
if override_tempdir:
tempfile.tempdir = self.new_file_path
- self.openid_consumer_cache_path = resolve_path( kwargs.get( "openid_consumer_cache_path", "database/openid_consumer_cache" ), self.root )
- self.cookie_path = kwargs.get( "cookie_path", "/" )
+ self.openid_consumer_cache_path = resolve_path(kwargs.get("openid_consumer_cache_path", "database/openid_consumer_cache"), self.root)
+ self.cookie_path = kwargs.get("cookie_path", "/")
# Galaxy OpenID settings
- self.enable_openid = string_as_bool( kwargs.get( 'enable_openid', False ) )
- self.enable_quotas = string_as_bool( kwargs.get( 'enable_quotas', False ) )
- self.enable_unique_workflow_defaults = string_as_bool( kwargs.get( 'enable_unique_workflow_defaults', False ) )
- self.tool_path = resolve_path( kwargs.get( "tool_path", "tools" ), self.root )
- self.tool_data_path = resolve_path( kwargs.get( "tool_data_path", "tool-data" ), os.getcwd() )
- self.builds_file_path = resolve_path( kwargs.get( "builds_file_path", os.path.join( self.tool_data_path, 'shared', 'ucsc', 'builds.txt') ), self.root )
- self.len_file_path = resolve_path( kwargs.get( "len_file_path", os.path.join( self.tool_data_path, 'shared', 'ucsc', 'chrom') ), self.root )
+ self.enable_openid = string_as_bool(kwargs.get('enable_openid', False))
+ self.enable_quotas = string_as_bool(kwargs.get('enable_quotas', False))
+ self.enable_unique_workflow_defaults = string_as_bool(kwargs.get('enable_unique_workflow_defaults', False))
+ self.tool_path = resolve_path(kwargs.get("tool_path", "tools"), self.root)
+ self.tool_data_path = resolve_path(kwargs.get("tool_data_path", "tool-data"), os.getcwd())
+ self.builds_file_path = resolve_path(kwargs.get("builds_file_path", os.path.join(self.tool_data_path, 'shared', 'ucsc', 'builds.txt')), self.root)
+ self.len_file_path = resolve_path(kwargs.get("len_file_path", os.path.join(self.tool_data_path, 'shared', 'ucsc', 'chrom')), self.root)
# The value of migrated_tools_config is the file reserved for containing only those tools that have been eliminated from the distribution
# and moved to the tool shed.
- self.integrated_tool_panel_config = resolve_path( kwargs.get( 'integrated_tool_panel_config', 'integrated_tool_panel.xml' ), self.root )
- integrated_tool_panel_tracking_directory = kwargs.get( 'integrated_tool_panel_tracking_directory', None )
+ self.integrated_tool_panel_config = resolve_path(kwargs.get('integrated_tool_panel_config', 'integrated_tool_panel.xml'), self.root)
+ integrated_tool_panel_tracking_directory = kwargs.get('integrated_tool_panel_tracking_directory', None)
if integrated_tool_panel_tracking_directory:
- self.integrated_tool_panel_tracking_directory = resolve_path( integrated_tool_panel_tracking_directory, self.root )
+ self.integrated_tool_panel_tracking_directory = resolve_path(integrated_tool_panel_tracking_directory, self.root)
else:
self.integrated_tool_panel_tracking_directory = None
- self.toolbox_filter_base_modules = listify( kwargs.get( "toolbox_filter_base_modules", "galaxy.tools.filters,galaxy.tools.toolbox.filters" ) )
- self.tool_filters = listify( kwargs.get( "tool_filters", [] ), do_strip=True )
- self.tool_label_filters = listify( kwargs.get( "tool_label_filters", [] ), do_strip=True )
- self.tool_section_filters = listify( kwargs.get( "tool_section_filters", [] ), do_strip=True )
+ self.toolbox_filter_base_modules = listify(kwargs.get("toolbox_filter_base_modules", "galaxy.tools.filters,galaxy.tools.toolbox.filters"))
+ self.tool_filters = listify(kwargs.get("tool_filters", []), do_strip=True)
+ self.tool_label_filters = listify(kwargs.get("tool_label_filters", []), do_strip=True)
+ self.tool_section_filters = listify(kwargs.get("tool_section_filters", []), do_strip=True)
- self.user_tool_filters = listify( kwargs.get( "user_tool_filters", [] ), do_strip=True )
- self.user_tool_label_filters = listify( kwargs.get( "user_tool_label_filters", [] ), do_strip=True )
- self.user_tool_section_filters = listify( kwargs.get( "user_tool_section_filters", [] ), do_strip=True )
- self.has_user_tool_filters = bool( self.user_tool_filters or self.user_tool_label_filters or self.user_tool_section_filters )
+ self.user_tool_filters = listify(kwargs.get("user_tool_filters", []), do_strip=True)
+ self.user_tool_label_filters = listify(kwargs.get("user_tool_label_filters", []), do_strip=True)
+ self.user_tool_section_filters = listify(kwargs.get("user_tool_section_filters", []), do_strip=True)
+ self.has_user_tool_filters = bool(self.user_tool_filters or self.user_tool_label_filters or self.user_tool_section_filters)
- self.tour_config_dir = resolve_path( kwargs.get("tour_config_dir", "config/plugins/tours"), self.root)
- self.webhooks_dirs = resolve_path( kwargs.get("webhooks_dir", "config/plugins/webhooks"), self.root)
+ self.tour_config_dir = resolve_path(kwargs.get("tour_config_dir", "config/plugins/tours"), self.root)
+ self.webhooks_dirs = resolve_path(kwargs.get("webhooks_dir", "config/plugins/webhooks"), self.root)
- self.expose_user_name = kwargs.get( "expose_user_name", False )
- self.expose_user_email = kwargs.get( "expose_user_email", False )
- self.password_expiration_period = timedelta( days=int( kwargs.get( "password_expiration_period", 0 ) ) )
+ self.expose_user_name = kwargs.get("expose_user_name", False)
+ self.expose_user_email = kwargs.get("expose_user_email", False)
+ self.password_expiration_period = timedelta(days=int(kwargs.get("password_expiration_period", 0)))
# Check for tools defined in the above non-shed tool configs (i.e., tool_conf.xml) tht have
# been migrated from the Galaxy code distribution to the Tool Shed.
- self.check_migrate_tools = string_as_bool( kwargs.get( 'check_migrate_tools', True ) )
- self.shed_tool_data_path = kwargs.get( "shed_tool_data_path", None )
- self.x_frame_options = kwargs.get( "x_frame_options", "SAMEORIGIN" )
+ self.check_migrate_tools = string_as_bool(kwargs.get('check_migrate_tools', True))
+ self.shed_tool_data_path = kwargs.get("shed_tool_data_path", None)
+ self.x_frame_options = kwargs.get("x_frame_options", "SAMEORIGIN")
if self.shed_tool_data_path:
- self.shed_tool_data_path = resolve_path( self.shed_tool_data_path, self.root )
+ self.shed_tool_data_path = resolve_path(self.shed_tool_data_path, self.root)
else:
self.shed_tool_data_path = self.tool_data_path
- self.manage_dependency_relationships = string_as_bool( kwargs.get( 'manage_dependency_relationships', False ) )
- self.running_functional_tests = string_as_bool( kwargs.get( 'running_functional_tests', False ) )
- self.hours_between_check = kwargs.get( 'hours_between_check', 12 )
- self.enable_tool_shed_check = string_as_bool( kwargs.get( 'enable_tool_shed_check', False ) )
- if isinstance( self.hours_between_check, string_types ):
- self.hours_between_check = float( self.hours_between_check )
+ self.manage_dependency_relationships = string_as_bool(kwargs.get('manage_dependency_relationships', False))
+ self.running_functional_tests = string_as_bool(kwargs.get('running_functional_tests', False))
+ self.hours_between_check = kwargs.get('hours_between_check', 12)
+ self.enable_tool_shed_check = string_as_bool(kwargs.get('enable_tool_shed_check', False))
+ if isinstance(self.hours_between_check, string_types):
+ self.hours_between_check = float(self.hours_between_check)
try:
- if isinstance( self.hours_between_check, int ):
+ if isinstance(self.hours_between_check, int):
if self.hours_between_check < 1 or self.hours_between_check > 24:
self.hours_between_check = 12
- elif isinstance( self.hours_between_check, float ):
+ elif isinstance(self.hours_between_check, float):
# If we're running functional tests, the minimum hours between check should be reduced to 0.001, or 3.6 seconds.
if self.running_functional_tests:
if self.hours_between_check < 0.001 or self.hours_between_check > 24.0:
@@ -209,205 +211,210 @@ def __init__( self, **kwargs ):
self.hours_between_check = 12
except:
self.hours_between_check = 12
- self.update_integrated_tool_panel = kwargs.get( "update_integrated_tool_panel", True )
- self.enable_data_manager_user_view = string_as_bool( kwargs.get( "enable_data_manager_user_view", "False" ) )
- self.galaxy_data_manager_data_path = kwargs.get( 'galaxy_data_manager_data_path', self.tool_data_path )
- self.tool_secret = kwargs.get( "tool_secret", "" )
- self.id_secret = kwargs.get( "id_secret", "USING THE DEFAULT IS NOT SECURE!" )
- self.retry_metadata_internally = string_as_bool( kwargs.get( "retry_metadata_internally", "True" ) )
- self.max_metadata_value_size = int( kwargs.get( "max_metadata_value_size", 5242880 ) )
- self.single_user = kwargs.get( "single_user", None )
- self.use_remote_user = string_as_bool( kwargs.get( "use_remote_user", "False" ) ) or self.single_user
- self.normalize_remote_user_email = string_as_bool( kwargs.get( "normalize_remote_user_email", "False" ) )
- self.remote_user_maildomain = kwargs.get( "remote_user_maildomain", None )
- self.remote_user_header = kwargs.get( "remote_user_header", 'HTTP_REMOTE_USER' )
- self.remote_user_logout_href = kwargs.get( "remote_user_logout_href", None )
- self.remote_user_secret = kwargs.get( "remote_user_secret", None )
- self.require_login = string_as_bool( kwargs.get( "require_login", "False" ) )
- self.allow_user_creation = string_as_bool( kwargs.get( "allow_user_creation", "True" ) )
- self.allow_user_deletion = string_as_bool( kwargs.get( "allow_user_deletion", "False" ) )
- self.allow_user_dataset_purge = string_as_bool( kwargs.get( "allow_user_dataset_purge", "True" ) )
- self.allow_user_impersonation = string_as_bool( kwargs.get( "allow_user_impersonation", "False" ) )
- self.new_user_dataset_access_role_default_private = string_as_bool( kwargs.get( "new_user_dataset_access_role_default_private", "False" ) )
- self.collect_outputs_from = [ x.strip() for x in kwargs.get( 'collect_outputs_from', 'new_file_path,job_working_directory' ).lower().split(',') ]
- self.template_path = resolve_path( kwargs.get( "template_path", "templates" ), self.root )
- self.template_cache = resolve_path( kwargs.get( "template_cache_path", "database/compiled_templates" ), self.root )
- self.local_job_queue_workers = int( kwargs.get( "local_job_queue_workers", "5" ) )
- self.cluster_job_queue_workers = int( kwargs.get( "cluster_job_queue_workers", "3" ) )
- self.job_queue_cleanup_interval = int( kwargs.get("job_queue_cleanup_interval", "5") )
- self.cluster_files_directory = os.path.abspath( kwargs.get( "cluster_files_directory", "database/pbs" ) )
+ self.update_integrated_tool_panel = kwargs.get("update_integrated_tool_panel", True)
+ self.enable_data_manager_user_view = string_as_bool(kwargs.get("enable_data_manager_user_view", "False"))
+ self.galaxy_data_manager_data_path = kwargs.get('galaxy_data_manager_data_path', self.tool_data_path)
+ self.tool_secret = kwargs.get("tool_secret", "")
+ self.id_secret = kwargs.get("id_secret", "USING THE DEFAULT IS NOT SECURE!")
+ self.retry_metadata_internally = string_as_bool(kwargs.get("retry_metadata_internally", "True"))
+ self.max_metadata_value_size = int(kwargs.get("max_metadata_value_size", 5242880))
+ self.single_user = kwargs.get("single_user", None)
+ self.use_remote_user = string_as_bool(kwargs.get("use_remote_user", "False")) or self.single_user
+ self.normalize_remote_user_email = string_as_bool(kwargs.get("normalize_remote_user_email", "False"))
+ self.remote_user_maildomain = kwargs.get("remote_user_maildomain", None)
+ self.remote_user_header = kwargs.get("remote_user_header", 'HTTP_REMOTE_USER')
+ self.remote_user_logout_href = kwargs.get("remote_user_logout_href", None)
+ self.remote_user_secret = kwargs.get("remote_user_secret", None)
+ self.require_login = string_as_bool(kwargs.get("require_login", "False"))
+ self.allow_user_creation = string_as_bool(kwargs.get("allow_user_creation", "True"))
+ self.allow_user_deletion = string_as_bool(kwargs.get("allow_user_deletion", "False"))
+ self.allow_user_dataset_purge = string_as_bool(kwargs.get("allow_user_dataset_purge", "True"))
+ self.allow_user_impersonation = string_as_bool(kwargs.get("allow_user_impersonation", "False"))
+ self.new_user_dataset_access_role_default_private = string_as_bool(kwargs.get("new_user_dataset_access_role_default_private", "False"))
+ self.collect_outputs_from = [x.strip() for x in kwargs.get('collect_outputs_from', 'new_file_path,job_working_directory').lower().split(',')]
+ self.template_path = resolve_path(kwargs.get("template_path", "templates"), self.root)
+ self.template_cache = resolve_path(kwargs.get("template_cache_path", "database/compiled_templates"), self.root)
+ self.local_job_queue_workers = int(kwargs.get("local_job_queue_workers", "5"))
+ self.cluster_job_queue_workers = int(kwargs.get("cluster_job_queue_workers", "3"))
+ self.job_queue_cleanup_interval = int(kwargs.get("job_queue_cleanup_interval", "5"))
+ self.cluster_files_directory = os.path.abspath(kwargs.get("cluster_files_directory", "database/pbs"))
# Fall back to legacy job_working_directory config variable if set.
- default_jobs_directory = kwargs.get( "job_working_directory", "database/jobs_directory" )
- self.jobs_directory = resolve_path( kwargs.get( "jobs_directory", default_jobs_directory ), self.root )
- self.default_job_shell = kwargs.get( "default_job_shell", "/bin/bash" )
- self.cleanup_job = kwargs.get( "cleanup_job", "always" )
- preserve_python_environment = kwargs.get( "preserve_python_environment", "legacy_only" )
+ default_jobs_directory = kwargs.get("job_working_directory", "database/jobs_directory")
+ self.jobs_directory = resolve_path(kwargs.get("jobs_directory", default_jobs_directory), self.root)
+ self.default_job_shell = kwargs.get("default_job_shell", "/bin/bash")
+ self.cleanup_job = kwargs.get("cleanup_job", "always")
+ preserve_python_environment = kwargs.get("preserve_python_environment", "legacy_only")
if preserve_python_environment not in ["legacy_only", "legacy_and_local", "always"]:
- log.warn("preserve_python_environment set to unknown value [%s], defaulting to legacy_only")
+ log.warning("preserve_python_environment set to unknown value [%s], defaulting to legacy_only")
preserve_python_environment = "legacy_only"
self.preserve_python_environment = preserve_python_environment
# Older default container cache path, I don't think anyone is using it anymore and it wasn't documented - we
# should probably drop the backward compatiblity to save the path check.
- self.container_image_cache_path = self.resolve_path( kwargs.get( "container_image_cache_path", "database/container_images" ) )
- if not os.path.exists( self.container_image_cache_path ):
- self.container_image_cache_path = self.resolve_path( kwargs.get( "container_image_cache_path", "database/container_cache" ) )
- self.outputs_to_working_directory = string_as_bool( kwargs.get( 'outputs_to_working_directory', False ) )
- self.output_size_limit = int( kwargs.get( 'output_size_limit', 0 ) )
- self.retry_job_output_collection = int( kwargs.get( 'retry_job_output_collection', 0 ) )
- self.check_job_script_integrity = string_as_bool( kwargs.get( "check_job_script_integrity", True ) )
- self.job_walltime = kwargs.get( 'job_walltime', None )
+ self.container_image_cache_path = self.resolve_path(kwargs.get("container_image_cache_path", "database/container_images"))
+ if not os.path.exists(self.container_image_cache_path):
+ self.container_image_cache_path = self.resolve_path(kwargs.get("container_image_cache_path", "database/container_cache"))
+ self.outputs_to_working_directory = string_as_bool(kwargs.get('outputs_to_working_directory', False))
+ self.output_size_limit = int(kwargs.get('output_size_limit', 0))
+ self.retry_job_output_collection = int(kwargs.get('retry_job_output_collection', 0))
+ self.check_job_script_integrity = string_as_bool(kwargs.get("check_job_script_integrity", True))
+ self.job_walltime = kwargs.get('job_walltime', None)
self.job_walltime_delta = None
if self.job_walltime is not None:
- h, m, s = [ int( v ) for v in self.job_walltime.split( ':' ) ]
- self.job_walltime_delta = timedelta( 0, s, 0, 0, m, h )
- self.admin_users = kwargs.get( "admin_users", "" )
+ h, m, s = [int(v) for v in self.job_walltime.split(':')]
+ self.job_walltime_delta = timedelta(0, s, 0, 0, m, h)
+ self.admin_users = kwargs.get("admin_users", "")
self.admin_users_list = [u.strip() for u in self.admin_users.split(',') if u]
self.mailing_join_addr = kwargs.get('mailing_join_addr', 'galaxy-announce-join@bx.psu.edu')
- self.error_email_to = kwargs.get( 'error_email_to', None )
+ self.error_email_to = kwargs.get('error_email_to', None)
# activation_email was used until release_15.03
- activation_email = kwargs.get( 'activation_email', None )
- self.email_from = kwargs.get( 'email_from', activation_email )
- self.user_activation_on = string_as_bool( kwargs.get( 'user_activation_on', False ) )
- self.activation_grace_period = int( kwargs.get( 'activation_grace_period', 3 ) )
- default_inactivity_box_content = ( "Your account has not been activated yet. Feel free to browse around and see what's available, but"
- " you won't be able to upload data or run jobs until you have verified your email address." )
- self.inactivity_box_content = kwargs.get( 'inactivity_box_content', default_inactivity_box_content )
- self.terms_url = kwargs.get( 'terms_url', None )
- self.instance_resource_url = kwargs.get( 'instance_resource_url', None )
- self.registration_warning_message = kwargs.get( 'registration_warning_message', None )
- self.ga_code = kwargs.get( 'ga_code', None )
- self.session_duration = int(kwargs.get( 'session_duration', 0 ))
+ activation_email = kwargs.get('activation_email', None)
+ self.email_from = kwargs.get('email_from', activation_email)
+ self.user_activation_on = string_as_bool(kwargs.get('user_activation_on', False))
+ self.activation_grace_period = int(kwargs.get('activation_grace_period', 3))
+ default_inactivity_box_content = ("Your account has not been activated yet. Feel free to browse around and see what's available, but"
+ " you won't be able to upload data or run jobs until you have verified your email address.")
+ self.inactivity_box_content = kwargs.get('inactivity_box_content', default_inactivity_box_content)
+ self.terms_url = kwargs.get('terms_url', None)
+ self.instance_resource_url = kwargs.get('instance_resource_url', None)
+ self.registration_warning_message = kwargs.get('registration_warning_message', None)
+ self.ga_code = kwargs.get('ga_code', None)
+ self.session_duration = int(kwargs.get('session_duration', 0))
# Get the disposable email domains blacklist file and its contents
- self.blacklist_location = kwargs.get( 'blacklist_file', None )
+ self.blacklist_location = kwargs.get('blacklist_file', None)
self.blacklist_content = None
if self.blacklist_location is not None:
- self.blacklist_file = resolve_path( kwargs.get( 'blacklist_file', None ), self.root )
+ self.blacklist_file = resolve_path(kwargs.get('blacklist_file', None), self.root)
try:
- with open( self.blacklist_file ) as blacklist:
- self.blacklist_content = [ line.rstrip() for line in blacklist.readlines() ]
+ with open(self.blacklist_file) as blacklist:
+ self.blacklist_content = [line.rstrip() for line in blacklist.readlines()]
except IOError:
- log.error( "CONFIGURATION ERROR: Can't open supplied blacklist file from path: " + str( self.blacklist_file ) )
- self.smtp_server = kwargs.get( 'smtp_server', None )
- self.smtp_username = kwargs.get( 'smtp_username', None )
- self.smtp_password = kwargs.get( 'smtp_password', None )
- self.smtp_ssl = kwargs.get( 'smtp_ssl', None )
- self.track_jobs_in_database = string_as_bool( kwargs.get( 'track_jobs_in_database', 'True') )
- self.start_job_runners = listify(kwargs.get( 'start_job_runners', '' ))
- self.expose_dataset_path = string_as_bool( kwargs.get( 'expose_dataset_path', 'False' ) )
- self.expose_potentially_sensitive_job_metrics = string_as_bool( kwargs.get( 'expose_potentially_sensitive_job_metrics', 'False' ) )
- self.enable_communication_server = string_as_bool( kwargs.get( 'enable_communication_server', 'False' ) )
- self.communication_server_host = kwargs.get( 'communication_server_host', 'http://localhost' )
- self.communication_server_port = int( kwargs.get( 'communication_server_port', '7070' ) )
- self.persistent_communication_rooms = listify( kwargs.get( "persistent_communication_rooms", [] ), do_strip=True )
- self.enable_openid = string_as_bool( kwargs.get( 'enable_openid', 'False' ) )
- self.enable_quotas = string_as_bool( kwargs.get( 'enable_quotas', 'False' ) )
+ log.error("CONFIGURATION ERROR: Can't open supplied blacklist file from path: " + str(self.blacklist_file))
+ self.smtp_server = kwargs.get('smtp_server', None)
+ self.smtp_username = kwargs.get('smtp_username', None)
+ self.smtp_password = kwargs.get('smtp_password', None)
+ self.smtp_ssl = kwargs.get('smtp_ssl', None)
+ self.track_jobs_in_database = string_as_bool(kwargs.get('track_jobs_in_database', 'True'))
+ self.start_job_runners = listify(kwargs.get('start_job_runners', ''))
+ self.expose_dataset_path = string_as_bool(kwargs.get('expose_dataset_path', 'False'))
+ self.expose_potentially_sensitive_job_metrics = string_as_bool(kwargs.get('expose_potentially_sensitive_job_metrics', 'False'))
+ self.enable_communication_server = string_as_bool(kwargs.get('enable_communication_server', 'False'))
+ self.communication_server_host = kwargs.get('communication_server_host', 'http://localhost')
+ self.communication_server_port = int(kwargs.get('communication_server_port', '7070'))
+ self.persistent_communication_rooms = listify(kwargs.get("persistent_communication_rooms", []), do_strip=True)
+ self.enable_openid = string_as_bool(kwargs.get('enable_openid', 'False'))
+ self.enable_quotas = string_as_bool(kwargs.get('enable_quotas', 'False'))
# External Service types used in sample tracking
- self.external_service_type_path = resolve_path( kwargs.get( 'external_service_type_path', 'external_service_types' ), self.root )
+ self.external_service_type_path = resolve_path(kwargs.get('external_service_type_path', 'external_service_types'), self.root)
# Tasked job runner.
- self.use_tasked_jobs = string_as_bool( kwargs.get( 'use_tasked_jobs', False ) )
+ self.use_tasked_jobs = string_as_bool(kwargs.get('use_tasked_jobs', False))
self.local_task_queue_workers = int(kwargs.get("local_task_queue_workers", 2))
- self.tool_submission_burst_threads = int( kwargs.get( 'tool_submission_burst_threads', '1' ) )
- self.tool_submission_burst_at = int( kwargs.get( 'tool_submission_burst_at', '10' ) )
+ self.tool_submission_burst_threads = int(kwargs.get('tool_submission_burst_threads', '1'))
+ self.tool_submission_burst_at = int(kwargs.get('tool_submission_burst_at', '10'))
# Enable new interface for API installations from TS.
# Admin menu will list both if enabled.
- self.enable_beta_ts_api_install = string_as_bool( kwargs.get( 'enable_beta_ts_api_install', 'True' ) )
+ self.enable_beta_ts_api_install = string_as_bool(kwargs.get('enable_beta_ts_api_install', 'True'))
# The transfer manager and deferred job queue
- self.enable_beta_job_managers = string_as_bool( kwargs.get( 'enable_beta_job_managers', 'False' ) )
+ self.enable_beta_job_managers = string_as_bool(kwargs.get('enable_beta_job_managers', 'False'))
# These workflow modules should not be considered part of Galaxy's
# public API yet - the module state definitions may change and
# workflows built using these modules may not function in the
# future.
- self.enable_beta_workflow_modules = string_as_bool( kwargs.get( 'enable_beta_workflow_modules', 'False' ) )
+ self.enable_beta_workflow_modules = string_as_bool(kwargs.get('enable_beta_workflow_modules', 'False'))
# These are not even beta - just experiments - don't use them unless
# you want yours tools to be broken in the future.
- self.enable_beta_tool_formats = string_as_bool( kwargs.get( 'enable_beta_tool_formats', 'False' ) )
+ self.enable_beta_tool_formats = string_as_bool(kwargs.get('enable_beta_tool_formats', 'False'))
# Beta containers interface used by GIEs
- self.enable_beta_containers_interface = string_as_bool( kwargs.get( 'enable_beta_containers_interface', 'False' ) )
+ self.enable_beta_containers_interface = string_as_bool(kwargs.get('enable_beta_containers_interface', 'False'))
# Certain modules such as the pause module will automatically cause
# workflows to be scheduled in job handlers the way all workflows will
# be someday - the following two properties can also be used to force this
# behavior in under conditions - namely for workflows that have a minimum
# number of steps or that consume collections.
- self.force_beta_workflow_scheduled_min_steps = int( kwargs.get( 'force_beta_workflow_scheduled_min_steps', '250' ) )
- self.force_beta_workflow_scheduled_for_collections = string_as_bool( kwargs.get( 'force_beta_workflow_scheduled_for_collections', 'False' ) )
+ self.force_beta_workflow_scheduled_min_steps = int(kwargs.get('force_beta_workflow_scheduled_min_steps', '250'))
+ self.force_beta_workflow_scheduled_for_collections = string_as_bool(kwargs.get('force_beta_workflow_scheduled_for_collections', 'False'))
- self.history_local_serial_workflow_scheduling = string_as_bool( kwargs.get( 'history_local_serial_workflow_scheduling', 'False' ) )
- self.parallelize_workflow_scheduling_within_histories = string_as_bool( kwargs.get( 'parallelize_workflow_scheduling_within_histories', 'False' ) )
- self.maximum_workflow_invocation_duration = int( kwargs.get( "maximum_workflow_invocation_duration", 2678400 ) )
+ self.history_local_serial_workflow_scheduling = string_as_bool(kwargs.get('history_local_serial_workflow_scheduling', 'False'))
+ self.parallelize_workflow_scheduling_within_histories = string_as_bool(kwargs.get('parallelize_workflow_scheduling_within_histories', 'False'))
+ self.maximum_workflow_invocation_duration = int(kwargs.get("maximum_workflow_invocation_duration", 2678400))
# Per-user Job concurrency limitations
- self.cache_user_job_count = string_as_bool( kwargs.get( 'cache_user_job_count', False ) )
- self.user_job_limit = int( kwargs.get( 'user_job_limit', 0 ) )
- self.registered_user_job_limit = int( kwargs.get( 'registered_user_job_limit', self.user_job_limit ) )
- self.anonymous_user_job_limit = int( kwargs.get( 'anonymous_user_job_limit', self.user_job_limit ) )
- self.default_cluster_job_runner = kwargs.get( 'default_cluster_job_runner', 'local:///' )
- self.pbs_application_server = kwargs.get('pbs_application_server', "" )
- self.pbs_dataset_server = kwargs.get('pbs_dataset_server', "" )
- self.pbs_dataset_path = kwargs.get('pbs_dataset_path', "" )
- self.pbs_stage_path = kwargs.get('pbs_stage_path', "" )
- self.drmaa_external_runjob_script = kwargs.get('drmaa_external_runjob_script', None )
+ self.cache_user_job_count = string_as_bool(kwargs.get('cache_user_job_count', False))
+ self.user_job_limit = int(kwargs.get('user_job_limit', 0))
+ self.registered_user_job_limit = int(kwargs.get('registered_user_job_limit', self.user_job_limit))
+ self.anonymous_user_job_limit = int(kwargs.get('anonymous_user_job_limit', self.user_job_limit))
+ self.default_cluster_job_runner = kwargs.get('default_cluster_job_runner', 'local:///')
+ self.pbs_application_server = kwargs.get('pbs_application_server', "")
+ self.pbs_dataset_server = kwargs.get('pbs_dataset_server', "")
+ self.pbs_dataset_path = kwargs.get('pbs_dataset_path', "")
+ self.pbs_stage_path = kwargs.get('pbs_stage_path', "")
+ self.drmaa_external_runjob_script = kwargs.get('drmaa_external_runjob_script', None)
self.drmaa_external_killjob_script = kwargs.get('drmaa_external_killjob_script', None)
self.external_chown_script = kwargs.get('external_chown_script', None)
self.real_system_username = kwargs.get('real_system_username', 'user_email')
- self.environment_setup_file = kwargs.get( 'environment_setup_file', None )
- self.use_heartbeat = string_as_bool( kwargs.get( 'use_heartbeat', 'False' ) )
- self.heartbeat_interval = int( kwargs.get( 'heartbeat_interval', 20 ) )
- self.heartbeat_log = kwargs.get( 'heartbeat_log', None )
- self.log_actions = string_as_bool( kwargs.get( 'log_actions', 'False' ) )
- self.log_events = string_as_bool( kwargs.get( 'log_events', 'False' ) )
- self.sanitize_all_html = string_as_bool( kwargs.get( 'sanitize_all_html', True ) )
- self.sanitize_whitelist_file = resolve_path( kwargs.get( 'sanitize_whitelist_file', "config/sanitize_whitelist.txt" ), self.root )
- self.serve_xss_vulnerable_mimetypes = string_as_bool( kwargs.get( 'serve_xss_vulnerable_mimetypes', False ) )
- self.allowed_origin_hostnames = self._parse_allowed_origin_hostnames( kwargs )
+ self.environment_setup_file = kwargs.get('environment_setup_file', None)
+ self.use_heartbeat = string_as_bool(kwargs.get('use_heartbeat', 'False'))
+ self.heartbeat_interval = int(kwargs.get('heartbeat_interval', 20))
+ self.heartbeat_log = kwargs.get('heartbeat_log', None)
+ self.log_actions = string_as_bool(kwargs.get('log_actions', 'False'))
+ self.log_events = string_as_bool(kwargs.get('log_events', 'False'))
+ self.sanitize_all_html = string_as_bool(kwargs.get('sanitize_all_html', True))
+ self.sanitize_whitelist_file = resolve_path(kwargs.get('sanitize_whitelist_file', "config/sanitize_whitelist.txt"), self.root)
+ self.serve_xss_vulnerable_mimetypes = string_as_bool(kwargs.get('serve_xss_vulnerable_mimetypes', False))
+ self.allowed_origin_hostnames = self._parse_allowed_origin_hostnames(kwargs)
if "trust_jupyter_notebook_conversion" in kwargs:
- trust_jupyter_notebook_conversion = string_as_bool( kwargs.get( 'trust_jupyter_notebook_conversion', False ) )
+ trust_jupyter_notebook_conversion = string_as_bool(kwargs.get('trust_jupyter_notebook_conversion', False))
else:
- trust_jupyter_notebook_conversion = string_as_bool( kwargs.get( 'trust_ipython_notebook_conversion', False ) )
+ trust_jupyter_notebook_conversion = string_as_bool(kwargs.get('trust_ipython_notebook_conversion', False))
self.trust_jupyter_notebook_conversion = trust_jupyter_notebook_conversion
- self.enable_old_display_applications = string_as_bool( kwargs.get( "enable_old_display_applications", "True" ) )
- self.brand = kwargs.get( 'brand', None )
- self.welcome_url = kwargs.get( 'welcome_url', '/static/welcome.html' )
- self.show_welcome_with_login = string_as_bool( kwargs.get( "show_welcome_with_login", "False" ) )
+ self.enable_old_display_applications = string_as_bool(kwargs.get("enable_old_display_applications", "True"))
+ self.brand = kwargs.get('brand', None)
+ self.welcome_url = kwargs.get('welcome_url', '/static/welcome.html')
+ self.show_welcome_with_login = string_as_bool(kwargs.get("show_welcome_with_login", "False"))
# Configuration for the message box directly below the masthead.
- self.message_box_visible = string_as_bool( kwargs.get( 'message_box_visible', False ) )
- self.message_box_content = kwargs.get( 'message_box_content', None )
- self.message_box_class = kwargs.get( 'message_box_class', 'info' )
- self.support_url = kwargs.get( 'support_url', 'https://galaxyproject.org/support' )
- self.citation_url = kwargs.get( 'citation_url', 'https://galaxyproject.org/citing-galaxy' )
- self.wiki_url = kwargs.get( 'wiki_url', 'https://galaxyproject.org/' )
- self.blog_url = kwargs.get( 'blog_url', None )
- self.screencasts_url = kwargs.get( 'screencasts_url', None )
- self.library_import_dir = kwargs.get( 'library_import_dir', None )
- self.user_library_import_dir = kwargs.get( 'user_library_import_dir', None )
+ self.message_box_visible = string_as_bool(kwargs.get('message_box_visible', False))
+ self.message_box_content = kwargs.get('message_box_content', None)
+ self.message_box_class = kwargs.get('message_box_class', 'info')
+ self.support_url = kwargs.get('support_url', 'https://galaxyproject.org/support')
+ self.citation_url = kwargs.get('citation_url', 'https://galaxyproject.org/citing-galaxy')
+ self.wiki_url = kwargs.get('wiki_url', 'https://galaxyproject.org/')
+ self.blog_url = kwargs.get('blog_url', None)
+ self.screencasts_url = kwargs.get('screencasts_url', None)
+ self.genomespace_ui_url = kwargs.get('genomespace_ui_url', 'https://gsui.genomespace.org/jsui/')
+ self.library_import_dir = kwargs.get('library_import_dir', None)
+ self.user_library_import_dir = kwargs.get('user_library_import_dir', None)
# Searching data libraries
- self.enable_lucene_library_search = string_as_bool( kwargs.get( 'enable_lucene_library_search', False ) )
- self.enable_whoosh_library_search = string_as_bool( kwargs.get( 'enable_whoosh_library_search', False ) )
- self.whoosh_index_dir = resolve_path( kwargs.get( "whoosh_index_dir", "database/whoosh_indexes" ), self.root )
- self.ftp_upload_dir = kwargs.get( 'ftp_upload_dir', None )
- self.ftp_upload_dir_identifier = kwargs.get( 'ftp_upload_dir_identifier', 'email' ) # attribute on user - email, username, id, etc...
- self.ftp_upload_dir_template = kwargs.get( 'ftp_upload_dir_template', '${ftp_upload_dir}%s${ftp_upload_dir_identifier}' % os.path.sep )
- self.ftp_upload_purge = string_as_bool( kwargs.get( 'ftp_upload_purge', 'True' ) )
- self.ftp_upload_site = kwargs.get( 'ftp_upload_site', None )
- self.allow_library_path_paste = kwargs.get( 'allow_library_path_paste', False )
- self.disable_library_comptypes = kwargs.get( 'disable_library_comptypes', '' ).lower().split( ',' )
- self.watch_tools = kwargs.get( 'watch_tools', 'false' )
- self.watch_tool_data_dir = kwargs.get( 'watch_tool_data_dir', 'false' )
+ self.enable_lucene_library_search = string_as_bool(kwargs.get('enable_lucene_library_search', False))
+ self.enable_whoosh_library_search = string_as_bool(kwargs.get('enable_whoosh_library_search', False))
+ self.whoosh_index_dir = resolve_path(kwargs.get("whoosh_index_dir", "database/whoosh_indexes"), self.root)
+ self.ftp_upload_dir = kwargs.get('ftp_upload_dir', None)
+ self.ftp_upload_dir_identifier = kwargs.get('ftp_upload_dir_identifier', 'email') # attribute on user - email, username, id, etc...
+ self.ftp_upload_dir_template = kwargs.get('ftp_upload_dir_template', '${ftp_upload_dir}%s${ftp_upload_dir_identifier}' % os.path.sep)
+ self.ftp_upload_purge = string_as_bool(kwargs.get('ftp_upload_purge', 'True'))
+ self.ftp_upload_site = kwargs.get('ftp_upload_site', None)
+ self.allow_path_paste = string_as_bool(kwargs.get('allow_path_paste', False))
+ # Support older library-specific path paste option but just default to the new
+ # allow_path_paste value.
+ self.allow_library_path_paste = string_as_bool(kwargs.get('allow_library_path_paste', self.allow_path_paste))
+ self.disable_library_comptypes = kwargs.get('disable_library_comptypes', '').lower().split(',')
+ self.check_upload_content = string_as_bool(kwargs.get('check_upload_content', True))
+ self.watch_tools = kwargs.get('watch_tools', 'false')
+ self.watch_tool_data_dir = kwargs.get('watch_tool_data_dir', 'false')
# On can mildly speed up Galaxy startup time by disabling index of help,
# not needed on production systems but useful if running many functional tests.
- self.index_tool_help = string_as_bool( kwargs.get( "index_tool_help", True ) )
- self.tool_name_boost = kwargs.get( "tool_name_boost", 9 )
- self.tool_section_boost = kwargs.get( "tool_section_boost", 3 )
- self.tool_description_boost = kwargs.get( "tool_description_boost", 2 )
- self.tool_labels_boost = kwargs.get( "tool_labels_boost", 1 )
- self.tool_stub_boost = kwargs.get( "tool_stub_boost", 5 )
- self.tool_help_boost = kwargs.get( "tool_help_boost", 0.5 )
- self.tool_search_limit = kwargs.get( "tool_search_limit", 20 )
- self.tool_enable_ngram_search = kwargs.get( "tool_enable_ngram_search", False )
- self.tool_ngram_minsize = kwargs.get( "tool_ngram_minsize", 3 )
- self.tool_ngram_maxsize = kwargs.get( "tool_ngram_maxsize", 4 )
+ self.index_tool_help = string_as_bool(kwargs.get("index_tool_help", True))
+ self.tool_name_boost = kwargs.get("tool_name_boost", 9)
+ self.tool_section_boost = kwargs.get("tool_section_boost", 3)
+ self.tool_description_boost = kwargs.get("tool_description_boost", 2)
+ self.tool_labels_boost = kwargs.get("tool_labels_boost", 1)
+ self.tool_stub_boost = kwargs.get("tool_stub_boost", 5)
+ self.tool_help_boost = kwargs.get("tool_help_boost", 0.5)
+ self.tool_search_limit = kwargs.get("tool_search_limit", 20)
+ self.tool_enable_ngram_search = kwargs.get("tool_enable_ngram_search", False)
+ self.tool_ngram_minsize = kwargs.get("tool_ngram_minsize", 3)
+ self.tool_ngram_maxsize = kwargs.get("tool_ngram_maxsize", 4)
# Location for tool dependencies.
use_tool_dependencies, tool_dependency_dir, use_cached_dependency_manager, tool_dependency_cache_dir, precache_dependencies = \
parse_dependency_options(kwargs, self.root, self.dependency_resolvers_config_file)
@@ -426,8 +433,8 @@ def __init__( self, **kwargs ):
os.path.join(self.root, "lib", "galaxy", "tools", "deps", "resolvers", "default_conda_mapping.yml"),
]
- self.enable_beta_mulled_containers = string_as_bool( kwargs.get( 'enable_beta_mulled_containers', 'False' ) )
- containers_resolvers_config_file = kwargs.get( 'containers_resolvers_config_file', None )
+ self.enable_beta_mulled_containers = string_as_bool(kwargs.get('enable_beta_mulled_containers', 'False'))
+ containers_resolvers_config_file = kwargs.get('containers_resolvers_config_file', None)
if containers_resolvers_config_file:
containers_resolvers_config_file = resolve_path(containers_resolvers_config_file, self.root)
self.containers_resolvers_config_file = containers_resolvers_config_file
@@ -436,50 +443,50 @@ def __init__( self, **kwargs ):
if involucro_path is None:
involucro_path = os.path.join(tool_dependency_dir, "involucro")
self.involucro_path = resolve_path(involucro_path, self.root)
- self.involucro_auto_init = string_as_bool(kwargs.get( 'involucro_auto_init', True))
+ self.involucro_auto_init = string_as_bool(kwargs.get('involucro_auto_init', True))
- default_job_resubmission_condition = kwargs.get( 'default_job_resubmission_condition', '')
+ default_job_resubmission_condition = kwargs.get('default_job_resubmission_condition', '')
if not default_job_resubmission_condition.strip():
default_job_resubmission_condition = None
self.default_job_resubmission_condition = default_job_resubmission_condition
# Configuration options for taking advantage of nginx features
- self.upstream_gzip = string_as_bool( kwargs.get( 'upstream_gzip', False ) )
- self.apache_xsendfile = string_as_bool( kwargs.get( 'apache_xsendfile', False ) )
- self.nginx_x_accel_redirect_base = kwargs.get( 'nginx_x_accel_redirect_base', False )
- self.nginx_x_archive_files_base = kwargs.get( 'nginx_x_archive_files_base', False )
- self.nginx_upload_store = kwargs.get( 'nginx_upload_store', False )
- self.nginx_upload_path = kwargs.get( 'nginx_upload_path', False )
- self.nginx_upload_job_files_store = kwargs.get( 'nginx_upload_job_files_store', False )
- self.nginx_upload_job_files_path = kwargs.get( 'nginx_upload_job_files_path', False )
+ self.upstream_gzip = string_as_bool(kwargs.get('upstream_gzip', False))
+ self.apache_xsendfile = string_as_bool(kwargs.get('apache_xsendfile', False))
+ self.nginx_x_accel_redirect_base = kwargs.get('nginx_x_accel_redirect_base', False)
+ self.nginx_x_archive_files_base = kwargs.get('nginx_x_archive_files_base', False)
+ self.nginx_upload_store = kwargs.get('nginx_upload_store', False)
+ self.nginx_upload_path = kwargs.get('nginx_upload_path', False)
+ self.nginx_upload_job_files_store = kwargs.get('nginx_upload_job_files_store', False)
+ self.nginx_upload_job_files_path = kwargs.get('nginx_upload_job_files_path', False)
if self.nginx_upload_store:
- self.nginx_upload_store = os.path.abspath( self.nginx_upload_store )
- self.object_store = kwargs.get( 'object_store', 'disk' )
- self.object_store_check_old_style = string_as_bool( kwargs.get( 'object_store_check_old_style', False ) )
- self.object_store_cache_path = resolve_path( kwargs.get( "object_store_cache_path", "database/object_store_cache" ), self.root )
+ self.nginx_upload_store = os.path.abspath(self.nginx_upload_store)
+ self.object_store = kwargs.get('object_store', 'disk')
+ self.object_store_check_old_style = string_as_bool(kwargs.get('object_store_check_old_style', False))
+ self.object_store_cache_path = resolve_path(kwargs.get("object_store_cache_path", "database/object_store_cache"), self.root)
# Handle AWS-specific config options for backward compatibility
- if kwargs.get( 'aws_access_key', None) is not None:
- self.os_access_key = kwargs.get( 'aws_access_key', None )
- self.os_secret_key = kwargs.get( 'aws_secret_key', None )
- self.os_bucket_name = kwargs.get( 's3_bucket', None )
- self.os_use_reduced_redundancy = kwargs.get( 'use_reduced_redundancy', False )
+ if kwargs.get('aws_access_key', None) is not None:
+ self.os_access_key = kwargs.get('aws_access_key', None)
+ self.os_secret_key = kwargs.get('aws_secret_key', None)
+ self.os_bucket_name = kwargs.get('s3_bucket', None)
+ self.os_use_reduced_redundancy = kwargs.get('use_reduced_redundancy', False)
else:
- self.os_access_key = kwargs.get( 'os_access_key', None )
- self.os_secret_key = kwargs.get( 'os_secret_key', None )
- self.os_bucket_name = kwargs.get( 'os_bucket_name', None )
- self.os_use_reduced_redundancy = kwargs.get( 'os_use_reduced_redundancy', False )
- self.os_host = kwargs.get( 'os_host', None )
- self.os_port = kwargs.get( 'os_port', None )
- self.os_is_secure = string_as_bool( kwargs.get( 'os_is_secure', True ) )
- self.os_conn_path = kwargs.get( 'os_conn_path', '/' )
- self.object_store_cache_size = float(kwargs.get( 'object_store_cache_size', -1 ))
- self.distributed_object_store_config_file = kwargs.get( 'distributed_object_store_config_file', None )
+ self.os_access_key = kwargs.get('os_access_key', None)
+ self.os_secret_key = kwargs.get('os_secret_key', None)
+ self.os_bucket_name = kwargs.get('os_bucket_name', None)
+ self.os_use_reduced_redundancy = kwargs.get('os_use_reduced_redundancy', False)
+ self.os_host = kwargs.get('os_host', None)
+ self.os_port = kwargs.get('os_port', None)
+ self.os_is_secure = string_as_bool(kwargs.get('os_is_secure', True))
+ self.os_conn_path = kwargs.get('os_conn_path', '/')
+ self.object_store_cache_size = float(kwargs.get('object_store_cache_size', -1))
+ self.distributed_object_store_config_file = kwargs.get('distributed_object_store_config_file', None)
if self.distributed_object_store_config_file is not None:
- self.distributed_object_store_config_file = resolve_path( self.distributed_object_store_config_file, self.root )
- self.irods_root_collection_path = kwargs.get( 'irods_root_collection_path', None )
- self.irods_default_resource = kwargs.get( 'irods_default_resource', None )
+ self.distributed_object_store_config_file = resolve_path(self.distributed_object_store_config_file, self.root)
+ self.irods_root_collection_path = kwargs.get('irods_root_collection_path', None)
+ self.irods_default_resource = kwargs.get('irods_default_resource', None)
# Parse global_conf and save the parser
- global_conf = kwargs.get( 'global_conf', None )
+ global_conf = kwargs.get('global_conf', None)
global_conf_parser = configparser.ConfigParser()
self.config_file = None
self.global_conf_parser = global_conf_parser
@@ -510,7 +517,7 @@ def __init__( self, **kwargs ):
# local network - used to remotely communicate with the Galaxy API.
web_port = kwargs.get("galaxy_infrastructure_web_port", None)
self.galaxy_infrastructure_web_port = web_port
- galaxy_infrastructure_url = kwargs.get( 'galaxy_infrastructure_url', None )
+ galaxy_infrastructure_url = kwargs.get('galaxy_infrastructure_url', None)
galaxy_infrastructure_url_set = True
if galaxy_infrastructure_url is None:
# Still provide a default but indicate it was not explicitly set
@@ -530,11 +537,11 @@ def __init__( self, **kwargs ):
# Store advanced job management config
self.job_manager = kwargs.get('job_manager', self.server_name).strip()
- self.job_handlers = [ x.strip() for x in kwargs.get('job_handlers', self.server_name).split(',') ]
- self.default_job_handlers = [ x.strip() for x in kwargs.get('default_job_handlers', ','.join( self.job_handlers ) ).split(',') ]
+ self.job_handlers = [x.strip() for x in kwargs.get('job_handlers', self.server_name).split(',')]
+ self.default_job_handlers = [x.strip() for x in kwargs.get('default_job_handlers', ','.join(self.job_handlers)).split(',')]
# Store per-tool runner configs
- self.tool_handlers = self.__read_tool_job_config( global_conf_parser, 'galaxy:tool_handlers', 'name' )
- self.tool_runners = self.__read_tool_job_config( global_conf_parser, 'galaxy:tool_runners', 'url' )
+ self.tool_handlers = self.__read_tool_job_config(global_conf_parser, 'galaxy:tool_handlers', 'name')
+ self.tool_runners = self.__read_tool_job_config(global_conf_parser, 'galaxy:tool_runners', 'url')
# Galaxy messaging (AMQP) configuration options
self.amqp = {}
try:
@@ -553,84 +560,84 @@ def __init__( self, **kwargs ):
elif 'database_connection' in kwargs:
self.amqp_internal_connection = "sqlalchemy+" + self.database_connection
else:
- self.amqp_internal_connection = "sqlalchemy+sqlite:///%s?isolation_level=IMMEDIATE" % resolve_path( "database/control.sqlite", self.root )
- self.biostar_url = kwargs.get( 'biostar_url', None )
- self.biostar_key_name = kwargs.get( 'biostar_key_name', None )
- self.biostar_key = kwargs.get( 'biostar_key', None )
- self.biostar_enable_bug_reports = string_as_bool( kwargs.get( 'biostar_enable_bug_reports', True ) )
- self.biostar_never_authenticate = string_as_bool( kwargs.get( 'biostar_never_authenticate', False ) )
- self.pretty_datetime_format = expand_pretty_datetime_format( kwargs.get( 'pretty_datetime_format', '$locale (UTC)' ) )
- self.master_api_key = kwargs.get( 'master_api_key', None )
+ self.amqp_internal_connection = "sqlalchemy+sqlite:///%s?isolation_level=IMMEDIATE" % resolve_path("database/control.sqlite", self.root)
+ self.biostar_url = kwargs.get('biostar_url', None)
+ self.biostar_key_name = kwargs.get('biostar_key_name', None)
+ self.biostar_key = kwargs.get('biostar_key', None)
+ self.biostar_enable_bug_reports = string_as_bool(kwargs.get('biostar_enable_bug_reports', True))
+ self.biostar_never_authenticate = string_as_bool(kwargs.get('biostar_never_authenticate', False))
+ self.pretty_datetime_format = expand_pretty_datetime_format(kwargs.get('pretty_datetime_format', '$locale (UTC)'))
+ self.master_api_key = kwargs.get('master_api_key', None)
if self.master_api_key == "changethis": # default in sample config file
raise ConfigurationError("Insecure configuration, please change master_api_key to something other than default (changethis)")
# Experimental: This will not be enabled by default and will hide
# nonproduction code.
# The api_folders refers to whether the API exposes the /folders section.
- self.api_folders = string_as_bool( kwargs.get( 'api_folders', False ) )
+ self.api_folders = string_as_bool(kwargs.get('api_folders', False))
# This is for testing new library browsing capabilities.
- self.new_lib_browse = string_as_bool( kwargs.get( 'new_lib_browse', False ) )
+ self.new_lib_browse = string_as_bool(kwargs.get('new_lib_browse', False))
# Error logging with sentry
- self.sentry_dsn = kwargs.get( 'sentry_dsn', None )
+ self.sentry_dsn = kwargs.get('sentry_dsn', None)
# Statistics and profiling with statsd
- self.statsd_host = kwargs.get( 'statsd_host', '')
- self.statsd_port = int( kwargs.get( 'statsd_port', 8125 ) )
- self.statsd_prefix = kwargs.get( 'statsd_prefix', 'galaxy' )
+ self.statsd_host = kwargs.get('statsd_host', '')
+ self.statsd_port = int(kwargs.get('statsd_port', 8125))
+ self.statsd_prefix = kwargs.get('statsd_prefix', 'galaxy')
# Statistics and profiling with graphite
- self.graphite_host = kwargs.get( 'graphite_host', '')
- self.graphite_port = int( kwargs.get( 'graphite_port', 2003 ) )
- self.graphite_prefix = kwargs.get( 'graphite_prefix', 'galaxy' )
+ self.graphite_host = kwargs.get('graphite_host', '')
+ self.graphite_port = int(kwargs.get('graphite_port', 2003))
+ self.graphite_prefix = kwargs.get('graphite_prefix', 'galaxy')
# Logging with fluentd
- self.fluent_log = string_as_bool( kwargs.get( 'fluent_log', False ) )
- self.fluent_host = kwargs.get( 'fluent_host', 'localhost' )
- self.fluent_port = int( kwargs.get( 'fluent_port', 24224 ) )
+ self.fluent_log = string_as_bool(kwargs.get('fluent_log', False))
+ self.fluent_host = kwargs.get('fluent_host', 'localhost')
+ self.fluent_port = int(kwargs.get('fluent_port', 24224))
# directory where the visualization registry searches for plugins
self.visualization_plugins_directory = kwargs.get(
- 'visualization_plugins_directory', 'config/plugins/visualizations' )
- ie_dirs = kwargs.get( 'interactive_environment_plugins_directory', None )
+ 'visualization_plugins_directory', 'config/plugins/visualizations')
+ ie_dirs = kwargs.get('interactive_environment_plugins_directory', None)
self.gie_dirs = [d.strip() for d in (ie_dirs.split(",") if ie_dirs else [])]
if ie_dirs and not self.visualization_plugins_directory:
self.visualization_plugins_directory = ie_dirs
elif ie_dirs:
self.visualization_plugins_directory += ",%s" % ie_dirs
- self.gie_swarm_mode = string_as_bool( kwargs.get( 'interactive_environment_swarm_mode', False ) )
+ self.gie_swarm_mode = string_as_bool(kwargs.get('interactive_environment_swarm_mode', False))
- self.proxy_session_map = self.resolve_path( kwargs.get( "dynamic_proxy_session_map", "database/session_map.sqlite" ) )
- self.manage_dynamic_proxy = string_as_bool( kwargs.get( "dynamic_proxy_manage", "True" ) ) # Set to false if being launched externally
- self.dynamic_proxy_debug = string_as_bool( kwargs.get( "dynamic_proxy_debug", "False" ) )
- self.dynamic_proxy_bind_port = int( kwargs.get( "dynamic_proxy_bind_port", "8800" ) )
- self.dynamic_proxy_bind_ip = kwargs.get( "dynamic_proxy_bind_ip", "0.0.0.0" )
- self.dynamic_proxy_external_proxy = string_as_bool( kwargs.get( "dynamic_proxy_external_proxy", "False" ) )
- self.dynamic_proxy_prefix = kwargs.get( "dynamic_proxy_prefix", "gie_proxy" )
+ self.proxy_session_map = self.resolve_path(kwargs.get("dynamic_proxy_session_map", "database/session_map.sqlite"))
+ self.manage_dynamic_proxy = string_as_bool(kwargs.get("dynamic_proxy_manage", "True")) # Set to false if being launched externally
+ self.dynamic_proxy_debug = string_as_bool(kwargs.get("dynamic_proxy_debug", "False"))
+ self.dynamic_proxy_bind_port = int(kwargs.get("dynamic_proxy_bind_port", "8800"))
+ self.dynamic_proxy_bind_ip = kwargs.get("dynamic_proxy_bind_ip", "0.0.0.0")
+ self.dynamic_proxy_external_proxy = string_as_bool(kwargs.get("dynamic_proxy_external_proxy", "False"))
+ self.dynamic_proxy_prefix = kwargs.get("dynamic_proxy_prefix", "gie_proxy")
- self.dynamic_proxy = kwargs.get( "dynamic_proxy", "node" )
- self.dynamic_proxy_golang_noaccess = kwargs.get( "dynamic_proxy_golang_noaccess", 60 )
- self.dynamic_proxy_golang_clean_interval = kwargs.get( "dynamic_proxy_golang_clean_interval", 10 )
- self.dynamic_proxy_golang_docker_address = kwargs.get( "dynamic_proxy_golang_docker_address", "unix:///var/run/docker.sock" )
- self.dynamic_proxy_golang_api_key = kwargs.get( "dynamic_proxy_golang_api_key", None )
+ self.dynamic_proxy = kwargs.get("dynamic_proxy", "node")
+ self.dynamic_proxy_golang_noaccess = kwargs.get("dynamic_proxy_golang_noaccess", 60)
+ self.dynamic_proxy_golang_clean_interval = kwargs.get("dynamic_proxy_golang_clean_interval", 10)
+ self.dynamic_proxy_golang_docker_address = kwargs.get("dynamic_proxy_golang_docker_address", "unix:///var/run/docker.sock")
+ self.dynamic_proxy_golang_api_key = kwargs.get("dynamic_proxy_golang_api_key", None)
# Default chunk size for chunkable datatypes -- 64k
- self.display_chunk_size = int( kwargs.get( 'display_chunk_size', 65536) )
+ self.display_chunk_size = int(kwargs.get('display_chunk_size', 65536))
- self.citation_cache_type = kwargs.get( "citation_cache_type", "file" )
- self.citation_cache_data_dir = self.resolve_path( kwargs.get( "citation_cache_data_dir", "database/citations/data" ) )
- self.citation_cache_lock_dir = self.resolve_path( kwargs.get( "citation_cache_lock_dir", "database/citations/locks" ) )
+ self.citation_cache_type = kwargs.get("citation_cache_type", "file")
+ self.citation_cache_data_dir = self.resolve_path(kwargs.get("citation_cache_data_dir", "database/citations/data"))
+ self.citation_cache_lock_dir = self.resolve_path(kwargs.get("citation_cache_lock_dir", "database/citations/locks"))
self.containers_conf = parse_containers_config(self.containers_config_file)
@property
- def sentry_dsn_public( self ):
+ def sentry_dsn_public(self):
"""
Sentry URL with private key removed for use in client side scripts,
sentry server will need to be configured to accept events
"""
if self.sentry_dsn:
- return re.sub( r"^([^:/?#]+:)?//(\w+):(\w+)", r"\1//\2", self.sentry_dsn )
+ return re.sub(r"^([^:/?#]+:)?//(\w+):(\w+)", r"\1//\2", self.sentry_dsn)
else:
return None
- def reload_sanitize_whitelist( self, explicit=True ):
+ def reload_sanitize_whitelist(self, explicit=True):
self.sanitize_whitelist = []
try:
with open(self.sanitize_whitelist_file, 'rt') as f:
@@ -641,37 +648,37 @@ def reload_sanitize_whitelist( self, explicit=True ):
if explicit:
log.warning("Sanitize log file explicitly specified as '%s' but does not exist, continuing with no tools whitelisted.", self.sanitize_whitelist_file)
- def __parse_config_file_options( self, kwargs ):
+ def __parse_config_file_options(self, kwargs):
"""
Backwards compatibility for config files moved to the config/ dir.
"""
for var in PATH_DEFAULTS:
- setattr( self, var, find_path( kwargs, var, self.root ) )
+ setattr(self, var, find_path(kwargs, var, self.root))
for var, defaults in PATH_LIST_DEFAULTS.items():
paths = []
- if kwargs.get( var, None ) is not None:
- paths = listify( kwargs.get( var ) )
+ if kwargs.get(var, None) is not None:
+ paths = listify(kwargs.get(var))
else:
for default in defaults:
- for path in listify( default ):
- if not os.path.exists( resolve_path( path, self.root ) ):
+ for path in listify(default):
+ if not os.path.exists(resolve_path(path, self.root)):
break
else:
- paths = listify( default )
+ paths = listify(default)
break
else:
- paths = listify( defaults[-1] )
- setattr( self, var, [ resolve_path( x, self.root ) for x in paths ] )
+ paths = listify(defaults[-1])
+ setattr(self, var, [resolve_path(x, self.root) for x in paths])
# Backwards compatibility for names used in too many places to fix
self.datatypes_config = self.datatypes_config_file
self.tool_configs = self.tool_config_file
- def __read_tool_job_config( self, global_conf_parser, section, key ):
+ def __read_tool_job_config(self, global_conf_parser, section, key):
try:
- tool_runners_config = global_conf_parser.items( section )
+ tool_runners_config = global_conf_parser.items(section)
# Process config to group multiple configs for the same tool.
rval = {}
@@ -681,98 +688,98 @@ def __read_tool_job_config( self, global_conf_parser, section, key ):
runner_dict = {}
if tool_config.find("[") != -1:
# Found tool with additional params; put params in dict.
- tool, params = tool_config[:-1].split( "[" )
+ tool, params = tool_config[:-1].split("[")
param_dict = {}
- for param in params.split( "," ):
- name, value = param.split( "@" )
- param_dict[ name ] = value
- runner_dict[ 'params' ] = param_dict
+ for param in params.split(","):
+ name, value = param.split("@")
+ param_dict[name] = value
+ runner_dict['params'] = param_dict
else:
tool = tool_config
# Add runner URL.
- runner_dict[ key ] = val
+ runner_dict[key] = val
# Create tool entry if necessary.
if tool not in rval:
- rval[ tool ] = []
+ rval[tool] = []
# Add entry to runners.
- rval[ tool ].append( runner_dict )
+ rval[tool].append(runner_dict)
return rval
except configparser.NoSectionError:
return {}
- def get( self, key, default ):
- return self.config_dict.get( key, default )
+ def get(self, key, default):
+ return self.config_dict.get(key, default)
- def get_bool( self, key, default ):
+ def get_bool(self, key, default):
if key in self.config_dict:
- return string_as_bool( self.config_dict[key] )
+ return string_as_bool(self.config_dict[key])
else:
return default
- def ensure_tempdir( self ):
- self._ensure_directory( self.new_file_path )
+ def ensure_tempdir(self):
+ self._ensure_directory(self.new_file_path)
- def _ensure_directory( self, path ):
- if path not in [ None, False ] and not os.path.isdir( path ):
+ def _ensure_directory(self, path):
+ if path not in [None, False] and not os.path.isdir(path):
try:
- os.makedirs( path )
+ os.makedirs(path)
except Exception as e:
- raise ConfigurationError( "Unable to create missing directory: %s\n%s" % ( path, e ) )
+ raise ConfigurationError("Unable to create missing directory: %s\n%s" % (path, e))
- def check( self ):
- paths_to_check = [ self.root, self.tool_path, self.tool_data_path, self.template_path ]
+ def check(self):
+ paths_to_check = [self.root, self.tool_path, self.tool_data_path, self.template_path]
# Check that required directories exist
for path in paths_to_check:
- if path not in [ None, False ] and not os.path.isdir( path ):
+ if path not in [None, False] and not os.path.isdir(path):
try:
- os.makedirs( path )
+ os.makedirs(path)
except Exception as e:
- raise ConfigurationError( "Unable to create missing directory: %s\n%s" % ( path, e ) )
+ raise ConfigurationError("Unable to create missing directory: %s\n%s" % (path, e))
# Create the directories that it makes sense to create
for path in (self.new_file_path, self.template_cache, self.ftp_upload_dir,
self.library_import_dir, self.user_library_import_dir,
self.nginx_upload_store, self.whoosh_index_dir,
self.object_store_cache_path):
- self._ensure_directory( path )
+ self._ensure_directory(path)
# Check that required files exist
tool_configs = self.tool_configs
if self.migrated_tools_config not in tool_configs:
- tool_configs.append( self.migrated_tools_config )
+ tool_configs.append(self.migrated_tools_config)
for path in tool_configs:
- if not os.path.exists( path ):
- raise ConfigurationError("Tool config file not found: %s" % path )
- for datatypes_config in listify( self.datatypes_config ):
- if not os.path.isfile( datatypes_config ):
- raise ConfigurationError("Datatypes config file not found: %s" % datatypes_config )
+ if not os.path.exists(path):
+ raise ConfigurationError("Tool config file not found: %s" % path)
+ for datatypes_config in listify(self.datatypes_config):
+ if not os.path.isfile(datatypes_config):
+ raise ConfigurationError("Datatypes config file not found: %s" % datatypes_config)
# Check for deprecated options.
for key in self.config_dict.keys():
if key in self.deprecated_options:
- log.warning( "Config option '%s' is deprecated and will be removed in a future release. Please consult the latest version of the sample configuration file." % key )
+ log.warning("Config option '%s' is deprecated and will be removed in a future release. Please consult the latest version of the sample configuration file." % key)
- def is_admin_user( self, user ):
+ def is_admin_user(self, user):
"""
Determine if the provided user is listed in `admin_users`.
NOTE: This is temporary, admin users will likely be specified in the
database in the future.
"""
- admin_users = [ x.strip() for x in self.get( "admin_users", "" ).split( "," ) ]
+ admin_users = [x.strip() for x in self.get("admin_users", "").split(",")]
return user is not None and user.email in admin_users
- def resolve_path( self, path ):
+ def resolve_path(self, path):
""" Resolve a path relative to Galaxy's root.
"""
- return resolve_path( path, self.root )
+ return resolve_path(path, self.root)
def guess_galaxy_port(self):
# Code derived from Jupyter work ie.mako
config = configparser.SafeConfigParser({'port': '8080'})
if self.config_file:
- config.read( self.config_file )
+ config.read(self.config_file)
try:
port = config.getint('server:%s' % self.server_name, 'port')
@@ -781,23 +788,23 @@ def guess_galaxy_port(self):
port = None
return port
- def _parse_allowed_origin_hostnames( self, kwargs ):
+ def _parse_allowed_origin_hostnames(self, kwargs):
"""
Parse a CSV list of strings/regexp of hostnames that should be allowed
to use CORS and will be sent the Access-Control-Allow-Origin header.
"""
- allowed_origin_hostnames = listify( kwargs.get( 'allowed_origin_hostnames', None ) )
+ allowed_origin_hostnames = listify(kwargs.get('allowed_origin_hostnames', None))
if not allowed_origin_hostnames:
return None
- def parse( string ):
+ def parse(string):
# a string enclosed in fwd slashes will be parsed as a regexp: e.g. //
if string[0] == '/' and string[-1] == '/':
string = string[1:-1]
- return re.compile( string, flags=( re.UNICODE | re.LOCALE ) )
+ return re.compile(string, flags=(re.UNICODE | re.LOCALE))
return string
- return [ parse( v ) for v in allowed_origin_hostnames if v ]
+ return [parse(v) for v in allowed_origin_hostnames if v]
def parse_dependency_options(kwargs, root, dependency_resolvers_config_file):
@@ -825,7 +832,7 @@ def parse_dependency_options(kwargs, root, dependency_resolvers_config_file):
return use_tool_dependencies, tool_dependency_dir, use_cached_dependency_manager, tool_dependency_cache_dir, precache_dependencies
-def get_database_engine_options( kwargs, model_prefix='' ):
+def get_database_engine_options(kwargs, model_prefix=''):
"""
Allow options for the SQLAlchemy database engine to be passed by using
the prefix "database_engine_option".
@@ -842,18 +849,18 @@ def get_database_engine_options( kwargs, model_prefix='' ):
'server_side_cursors': string_as_bool
}
prefix = "%sdatabase_engine_option_" % model_prefix
- prefix_len = len( prefix )
+ prefix_len = len(prefix)
rval = {}
for key, value in kwargs.items():
- if key.startswith( prefix ):
+ if key.startswith(prefix):
key = key[prefix_len:]
if key in conversions:
value = conversions[key](value)
- rval[ key ] = value
+ rval[key] = value
return rval
-def configure_logging( config ):
+def configure_logging(config):
"""Allow some basic logging configuration to be read from ini file.
This should be able to consume either a galaxy.config.Configuration object
@@ -866,43 +873,43 @@ def configure_logging( config ):
# some simple setup using the 'log_*' values from the config.
parser = getattr(config, "global_conf_parser", None)
if parser:
- paste_configures_logging = config.global_conf_parser.has_section( "loggers" )
+ paste_configures_logging = config.global_conf_parser.has_section("loggers")
else:
paste_configures_logging = False
- auto_configure_logging = not paste_configures_logging and string_as_bool( config.get( "auto_configure_logging", "True" ) )
+ auto_configure_logging = not paste_configures_logging and string_as_bool(config.get("auto_configure_logging", "True"))
if auto_configure_logging:
- format = config.get( "log_format", "%(name)s %(levelname)s %(asctime)s %(message)s" )
- level = logging._levelNames[ config.get( "log_level", "DEBUG" ) ]
- destination = config.get( "log_destination", "stdout" )
- log.info( "Logging at '%s' level to '%s'" % ( level, destination ) )
+ format = config.get("log_format", "%(name)s %(levelname)s %(asctime)s %(message)s")
+ level = logging._levelNames[config.get("log_level", "DEBUG")]
+ destination = config.get("log_destination", "stdout")
+ log.info("Logging at '%s' level to '%s'" % (level, destination))
# Set level
- root.setLevel( level )
+ root.setLevel(level)
- disable_chatty_loggers = string_as_bool( config.get( "auto_configure_logging_disable_chatty", "True" ) )
+ disable_chatty_loggers = string_as_bool(config.get("auto_configure_logging_disable_chatty", "True"))
if disable_chatty_loggers:
# Turn down paste httpserver logging
if level <= logging.DEBUG:
for chatty_logger in ["paste.httpserver.ThreadPool", "routes.middleware"]:
- logging.getLogger( chatty_logger ).setLevel( logging.WARN )
+ logging.getLogger(chatty_logger).setLevel(logging.WARN)
# Remove old handlers
for h in root.handlers[:]:
root.removeHandler(h)
# Create handler
if destination == "stdout":
- handler = logging.StreamHandler( sys.stdout )
+ handler = logging.StreamHandler(sys.stdout)
else:
- handler = logging.FileHandler( destination )
+ handler = logging.FileHandler(destination)
# Create formatter
- formatter = logging.Formatter( format )
+ formatter = logging.Formatter(format)
# Hook everything up
- handler.setFormatter( formatter )
- root.addHandler( handler )
+ handler.setFormatter(formatter)
+ root.addHandler(handler)
# If sentry is configured, also log to it
if getattr(config, "sentry_dsn", None):
from raven.handlers.logging import SentryHandler
- sentry_handler = SentryHandler( config.sentry_dsn )
- sentry_handler.setLevel( logging.WARN )
+ sentry_handler = SentryHandler(config.sentry_dsn)
+ sentry_handler.setLevel(logging.WARN)
register_postfork_function(root.addHandler, sentry_handler)
@@ -910,8 +917,8 @@ class ConfiguresGalaxyMixin:
""" Shared code for configuring Galaxy-like app objects.
"""
- def _configure_genome_builds( self, data_table_name="__dbkeys__", load_old_style=True ):
- self.genome_builds = GenomeBuilds( self, data_table_name=data_table_name, load_old_style=load_old_style )
+ def _configure_genome_builds(self, data_table_name="__dbkeys__", load_old_style=True):
+ self.genome_builds = GenomeBuilds(self, data_table_name=data_table_name, load_old_style=load_old_style)
def wait_for_toolbox_reload(self, old_toolbox):
timer = ExecutionTimer()
@@ -922,20 +929,20 @@ def wait_for_toolbox_reload(self, old_toolbox):
break
time.sleep(0.1)
- def _configure_toolbox( self ):
+ def _configure_toolbox(self):
from galaxy import tools
from galaxy.managers.citations import CitationsManager
from galaxy.tools.deps import containers
import galaxy.tools.search
- self.citations_manager = CitationsManager( self )
+ self.citations_manager = CitationsManager(self)
self._toolbox_lock = threading.RLock()
# Initialize the tools, making sure the list of tool configs includes the reserved migrated_tools_conf.xml file.
tool_configs = self.config.tool_configs
if self.config.migrated_tools_config not in tool_configs:
- tool_configs.append( self.config.migrated_tools_config )
- self.toolbox = tools.ToolBox( tool_configs, self.config.tool_path, self )
+ tool_configs.append(self.config.migrated_tools_config)
+ self.toolbox = tools.ToolBox(tool_configs, self.config.tool_path, self)
galaxy_root_dir = os.path.abspath(self.config.root)
file_path = os.path.abspath(getattr(self.config, "file_path"))
app_info = containers.AppInfo(
@@ -954,26 +961,26 @@ def _configure_toolbox( self ):
self.toolbox_search = galaxy.tools.search.ToolBoxSearch(self.toolbox, index_help)
self.reindex_tool_search()
- def reindex_tool_search( self ):
+ def reindex_tool_search(self):
# Call this when tools are added or removed.
self.toolbox_search.build_index(tool_cache=self.tool_cache)
self.tool_cache.reset_status()
- def _configure_tool_data_tables( self, from_shed_config ):
+ def _configure_tool_data_tables(self, from_shed_config):
from galaxy.tools.data import ToolDataTableManager
# Initialize tool data tables using the config defined by self.config.tool_data_table_config_path.
- self.tool_data_tables = ToolDataTableManager( tool_data_path=self.config.tool_data_path,
- config_filename=self.config.tool_data_table_config_path )
+ self.tool_data_tables = ToolDataTableManager(tool_data_path=self.config.tool_data_path,
+ config_filename=self.config.tool_data_table_config_path)
# Load additional entries defined by self.config.shed_tool_data_table_config into tool data tables.
- self.tool_data_tables.load_from_config_file( config_filename=self.config.shed_tool_data_table_config,
- tool_data_path=self.tool_data_tables.tool_data_path,
- from_shed_config=from_shed_config )
+ self.tool_data_tables.load_from_config_file(config_filename=self.config.shed_tool_data_table_config,
+ tool_data_path=self.tool_data_tables.tool_data_path,
+ from_shed_config=from_shed_config)
- def _configure_datatypes_registry( self, installed_repository_manager=None ):
+ def _configure_datatypes_registry(self, installed_repository_manager=None):
from galaxy.datatypes import registry
# Create an empty datatypes registry.
- self.datatypes_registry = registry.Registry( self.config )
+ self.datatypes_registry = registry.Registry(self.config)
if installed_repository_manager:
# Load proprietary datatypes defined in datatypes_conf.xml files in all installed tool shed repositories. We
# load proprietary datatypes before datatypes in the distribution because Galaxy's default sniffers include some
@@ -984,30 +991,30 @@ def _configure_datatypes_registry( self, installed_repository_manager=None ):
installed_repository_manager.load_proprietary_datatypes()
# Load the data types in the Galaxy distribution, which are defined in self.config.datatypes_config.
datatypes_configs = self.config.datatypes_config
- for datatypes_config in listify( datatypes_configs ):
+ for datatypes_config in listify(datatypes_configs):
# Setting override=False would make earlier files would take
# precedence - but then they wouldn't override tool shed
# datatypes.
- self.datatypes_registry.load_datatypes( self.config.root, datatypes_config, override=True )
+ self.datatypes_registry.load_datatypes(self.config.root, datatypes_config, override=True)
- def _configure_object_store( self, **kwds ):
+ def _configure_object_store(self, **kwds):
from galaxy.objectstore import build_object_store_from_config
- self.object_store = build_object_store_from_config( self.config, **kwds )
+ self.object_store = build_object_store_from_config(self.config, **kwds)
- def _configure_security( self ):
+ def _configure_security(self):
from galaxy.web import security
- self.security = security.SecurityHelper( id_secret=self.config.id_secret )
+ self.security = security.SecurityHelper(id_secret=self.config.id_secret)
- def _configure_tool_shed_registry( self ):
+ def _configure_tool_shed_registry(self):
import tool_shed.tool_shed_registry
# Set up the tool sheds registry
- if os.path.isfile( self.config.tool_sheds_config_file ):
- self.tool_shed_registry = tool_shed.tool_shed_registry.Registry( self.config.root, self.config.tool_sheds_config_file )
+ if os.path.isfile(self.config.tool_sheds_config_file):
+ self.tool_shed_registry = tool_shed.tool_shed_registry.Registry(self.config.root, self.config.tool_sheds_config_file)
else:
self.tool_shed_registry = None
- def _configure_models( self, check_migrate_databases=False, check_migrate_tools=False, config_file=None ):
+ def _configure_models(self, check_migrate_databases=False, check_migrate_tools=False, config_file=None):
"""
Preconditions: object_store must be set on self.
"""
@@ -1018,17 +1025,17 @@ def _configure_models( self, check_migrate_databases=False, check_migrate_tools=
install_db_url = self.config.install_database_connection
# TODO: Consider more aggressive check here that this is not the same
# database file under the hood.
- combined_install_database = not( install_db_url and install_db_url != db_url )
+ combined_install_database = not(install_db_url and install_db_url != db_url)
install_db_url = install_db_url or db_url
if check_migrate_databases:
# Initialize database / check for appropriate schema version. # If this
# is a new installation, we'll restrict the tool migration messaging.
from galaxy.model.migrate.check import create_or_verify_database
- create_or_verify_database( db_url, config_file, self.config.database_engine_options, app=self )
+ create_or_verify_database(db_url, config_file, self.config.database_engine_options, app=self)
if not combined_install_database:
from galaxy.model.tool_shed_install.migrate.check import create_or_verify_database as tsi_create_or_verify_database
- tsi_create_or_verify_database( install_db_url, self.config.install_database_engine_options, app=self )
+ tsi_create_or_verify_database(install_db_url, self.config.install_database_engine_options, app=self)
if check_migrate_tools:
# Alert the Galaxy admin to tools that have been moved from the distribution to the tool shed.
@@ -1037,18 +1044,18 @@ def _configure_models( self, check_migrate_databases=False, check_migrate_tools=
install_database_options = self.config.database_engine_options
else:
install_database_options = self.config.install_database_engine_options
- verify_tools( self, install_db_url, config_file, install_database_options )
+ verify_tools(self, install_db_url, config_file, install_database_options)
from galaxy.model import mapping
- self.model = mapping.init( self.config.file_path,
- db_url,
- self.config.database_engine_options,
- map_install_models=combined_install_database,
- database_query_profiling_proxy=self.config.database_query_profiling_proxy,
- object_store=self.object_store,
- trace_logger=getattr(self, "trace_logger", None),
- use_pbkdf2=self.config.get_bool( 'use_pbkdf2', True ),
- slow_query_log_threshold=self.config.slow_query_log_threshold )
+ self.model = mapping.init(self.config.file_path,
+ db_url,
+ self.config.database_engine_options,
+ map_install_models=combined_install_database,
+ database_query_profiling_proxy=self.config.database_query_profiling_proxy,
+ object_store=self.object_store,
+ trace_logger=getattr(self, "trace_logger", None),
+ use_pbkdf2=self.config.get_bool('use_pbkdf2', True),
+ slow_query_log_threshold=self.config.slow_query_log_threshold)
if combined_install_database:
log.info("Install database targetting Galaxy's database configuration.")
@@ -1058,9 +1065,9 @@ def _configure_models( self, check_migrate_databases=False, check_migrate_tools=
install_db_url = self.config.install_database_connection
log.info("Install database using its own connection %s" % install_db_url)
install_db_engine_options = self.config.install_database_engine_options
- self.install_model = install_mapping.init( install_db_url,
- install_db_engine_options )
+ self.install_model = install_mapping.init(install_db_url,
+ install_db_engine_options)
- def _configure_signal_handlers( self, handlers ):
+ def _configure_signal_handlers(self, handlers):
for sig, handler in handlers.items():
- signal.signal( sig, handler )
+ signal.signal(sig, handler)
diff --git a/lib/galaxy/containers/__init__.py b/lib/galaxy/containers/__init__.py
index 1e479b271a6d..4b33dbe8d5a3 100644
--- a/lib/galaxy/containers/__init__.py
+++ b/lib/galaxy/containers/__init__.py
@@ -121,7 +121,7 @@ def __init__(self, conf, key, containers_config_file):
self._key = key
self._containers_config_file = containers_config_file
mro = reversed(self.__class__.__mro__)
- mro.next()
+ next(mro)
self._conf = ContainerInterfaceConfig()
for c in mro:
self._conf.update(c.conf_defaults)
@@ -310,7 +310,7 @@ def parse_containers_config(containers_config_file):
conf.update(c.get('containers', {}))
except (OSError, IOError) as exc:
if exc.errno == errno.ENOENT:
- log.warning("config file '%s' does not exist, running with default config", containers_config_file)
+ log.debug("config file '%s' does not exist, running with default config", containers_config_file)
else:
raise
return conf
@@ -320,10 +320,8 @@ def _get_interface_modules():
interfaces = []
modules = submodules(sys.modules[__name__])
for module in modules:
- classes = filter(
- lambda x: inspect.isclass(x)
- and not x == ContainerInterface # noqa: E131
- and issubclass(x, ContainerInterface), # noqa: E131
- [getattr(module, x) for x in dir(module)])
+ module_names = [getattr(module, _) for _ in dir(module)]
+ classes = [_ for _ in module_names if inspect.isclass(_) and
+ not _ == ContainerInterface and issubclass(_, ContainerInterface)]
interfaces.extend(classes)
- return dict([(x.container_type, x) for x in interfaces])
+ return dict((x.container_type, x) for x in interfaces)
diff --git a/lib/galaxy/dataset_collections/builder.py b/lib/galaxy/dataset_collections/builder.py
index 48c160c9cb73..b32b26ae3121 100644
--- a/lib/galaxy/dataset_collections/builder.py
+++ b/lib/galaxy/dataset_collections/builder.py
@@ -2,24 +2,24 @@
from galaxy.util.odict import odict
-def build_collection( type, dataset_instances ):
+def build_collection(type, dataset_instances):
"""
Build DatasetCollection with populated DatasetcollectionElement objects
corresponding to the supplied dataset instances or throw exception if
this is not a valid collection of the specified type.
"""
- dataset_collection = model.DatasetCollection( )
- set_collection_elements( dataset_collection, type, dataset_instances )
+ dataset_collection = model.DatasetCollection()
+ set_collection_elements(dataset_collection, type, dataset_instances)
return dataset_collection
-def set_collection_elements( dataset_collection, type, dataset_instances ):
+def set_collection_elements(dataset_collection, type, dataset_instances):
element_index = 0
elements = []
- for element in type.generate_elements( dataset_instances ):
+ for element in type.generate_elements(dataset_instances):
element.element_index = element_index
element.collection = dataset_collection
- elements.append( element )
+ elements.append(element)
element_index += 1
@@ -48,7 +48,7 @@ def get_level(self, identifier):
return self._current_elements[identifier]
def add_dataset(self, identifier, dataset_instance):
- self._current_elements[ identifier ] = dataset_instance
+ self._current_elements[identifier] = dataset_instance
def build_elements(self):
elements = self._current_elements
@@ -61,7 +61,7 @@ def build_elements(self):
def build(self):
type_plugin = self._collection_type_description.rank_type_plugin()
- collection = build_collection( type_plugin, self.build_elements() )
+ collection = build_collection(type_plugin, self.build_elements())
collection.collection_type = self._collection_type_description.collection_type
return collection
@@ -74,17 +74,17 @@ def _nested_collection(self):
return self._collection_type_description.has_subcollections()
-class BoundCollectionBuilder( CollectionBuilder ):
+class BoundCollectionBuilder(CollectionBuilder):
""" More stateful builder that is bound to a particular model object. """
- def __init__( self, dataset_collection, collection_type_description ):
+ def __init__(self, dataset_collection, collection_type_description):
self.dataset_collection = dataset_collection
if dataset_collection.populated:
raise Exception("Cannot reset elements of an already populated dataset collection.")
- super( BoundCollectionBuilder, self ).__init__( collection_type_description )
+ super(BoundCollectionBuilder, self).__init__(collection_type_description)
- def populate( self ):
+ def populate(self):
elements = self.build_elements()
type_plugin = self._collection_type_description.rank_type_plugin()
- set_collection_elements( self.dataset_collection, type_plugin, elements )
+ set_collection_elements(self.dataset_collection, type_plugin, elements)
self.dataset_collection.mark_as_populated()
diff --git a/lib/galaxy/dataset_collections/matching.py b/lib/galaxy/dataset_collections/matching.py
index 7230916a5d42..739ef4cc5e49 100644
--- a/lib/galaxy/dataset_collections/matching.py
+++ b/lib/galaxy/dataset_collections/matching.py
@@ -9,29 +9,29 @@
CANNOT_MATCH_ERROR_MESSAGE = "Cannot match collection types."
-class CollectionsToMatch( object ):
+class CollectionsToMatch(object):
""" Structure representing a set of collections that need to be matched up
when running tools (possibly workflows in the future as well).
"""
- def __init__( self ):
+ def __init__(self):
self.collections = {}
- def add( self, input_name, hdca, subcollection_type=None, linked=True ):
- self.collections[ input_name ] = bunch.Bunch(
+ def add(self, input_name, hdca, subcollection_type=None, linked=True):
+ self.collections[input_name] = bunch.Bunch(
hdca=hdca,
subcollection_type=subcollection_type,
linked=linked,
)
- def has_collections( self ):
- return len( self.collections ) > 0
+ def has_collections(self):
+ return len(self.collections) > 0
- def items( self ):
+ def items(self):
return self.collections.items()
-class MatchingCollections( object ):
+class MatchingCollections(object):
""" Structure holding the result of matching a list of collections
together. This class being different than the class above and being
created in the dataset_collections_service layer may seem like
@@ -41,49 +41,49 @@ class MatchingCollections( object ):
sevice - hence the complexity now.
"""
- def __init__( self ):
+ def __init__(self):
self.linked_structure = None
self.unlinked_structures = []
self.collections = {}
- def __attempt_add_to_linked_match( self, input_name, hdca, collection_type_description, subcollection_type ):
- structure = get_structure( hdca, collection_type_description, leaf_subcollection_type=subcollection_type )
+ def __attempt_add_to_linked_match(self, input_name, hdca, collection_type_description, subcollection_type):
+ structure = get_structure(hdca, collection_type_description, leaf_subcollection_type=subcollection_type)
if not self.linked_structure:
self.linked_structure = structure
- self.collections[ input_name ] = hdca
+ self.collections[input_name] = hdca
else:
- if not self.linked_structure.can_match( structure ):
- raise exceptions.MessageException( CANNOT_MATCH_ERROR_MESSAGE )
- self.collections[ input_name ] = hdca
+ if not self.linked_structure.can_match(structure):
+ raise exceptions.MessageException(CANNOT_MATCH_ERROR_MESSAGE)
+ self.collections[input_name] = hdca
- def slice_collections( self ):
- return self.linked_structure.walk_collections( self.collections )
+ def slice_collections(self):
+ return self.linked_structure.walk_collections(self.collections)
@property
- def structure( self ):
+ def structure(self):
"""Yield cross product of all unlinked datasets to linked dataset."""
effective_structure = leaf
for unlinked_structure in self.unlinked_structures:
- effective_structure = effective_structure.multiply( unlinked_structure )
+ effective_structure = effective_structure.multiply(unlinked_structure)
linked_structure = self.linked_structure or leaf
- effective_structure = effective_structure.multiply( linked_structure )
+ effective_structure = effective_structure.multiply(linked_structure)
return None if effective_structure.is_leaf else effective_structure
@staticmethod
- def for_collections( collections_to_match, collection_type_descriptions ):
+ def for_collections(collections_to_match, collection_type_descriptions):
if not collections_to_match.has_collections():
return None
matching_collections = MatchingCollections()
- for input_key, to_match in collections_to_match.items():
+ for input_key, to_match in sorted(collections_to_match.items()):
hdca = to_match.hdca
- collection_type_description = collection_type_descriptions.for_collection_type( hdca.collection.collection_type )
+ collection_type_description = collection_type_descriptions.for_collection_type(hdca.collection.collection_type)
subcollection_type = to_match.subcollection_type
if to_match.linked:
- matching_collections.__attempt_add_to_linked_match( input_key, hdca, collection_type_description, subcollection_type )
+ matching_collections.__attempt_add_to_linked_match(input_key, hdca, collection_type_description, subcollection_type)
else:
- structure = get_structure( hdca, collection_type_description, leaf_subcollection_type=subcollection_type )
- matching_collections.unlinked_structures.append( structure )
+ structure = get_structure(hdca, collection_type_description, leaf_subcollection_type=subcollection_type)
+ matching_collections.unlinked_structures.append(structure)
return matching_collections
diff --git a/lib/galaxy/dataset_collections/registry.py b/lib/galaxy/dataset_collections/registry.py
index 16319fa38e25..43acdc18203b 100644
--- a/lib/galaxy/dataset_collections/registry.py
+++ b/lib/galaxy/dataset_collections/registry.py
@@ -10,18 +10,18 @@
class DatasetCollectionTypesRegistry(object):
- def __init__( self, app ):
- self.__plugins = dict( [ ( p.collection_type, p() ) for p in PLUGIN_CLASSES ] )
+ def __init__(self, app):
+ self.__plugins = dict([(p.collection_type, p()) for p in PLUGIN_CLASSES])
- def get( self, plugin_type ):
- return self.__plugins[ plugin_type ]
+ def get(self, plugin_type):
+ return self.__plugins[plugin_type]
- def prototype( self, plugin_type ):
- plugin_type_object = self.get( plugin_type )
- if not hasattr( plugin_type_object, 'prototype_elements' ):
- raise Exception( "Cannot pre-determine structure for collection of type %s" % plugin_type )
+ def prototype(self, plugin_type):
+ plugin_type_object = self.get(plugin_type)
+ if not hasattr(plugin_type_object, 'prototype_elements'):
+ raise Exception("Cannot pre-determine structure for collection of type %s" % plugin_type)
dataset_collection = model.DatasetCollection()
- elements = [ e for e in plugin_type_object.prototype_elements() ]
+ elements = [e for e in plugin_type_object.prototype_elements()]
dataset_collection.elements = elements
return dataset_collection
diff --git a/lib/galaxy/dataset_collections/structure.py b/lib/galaxy/dataset_collections/structure.py
index 889dc3c8ecbf..3278dd65e40f 100644
--- a/lib/galaxy/dataset_collections/structure.py
+++ b/lib/galaxy/dataset_collections/structure.py
@@ -1,134 +1,134 @@
""" Module for reasoning about structure of and matching hierarchical collections of data.
"""
import logging
-log = logging.getLogger( __name__ )
+log = logging.getLogger(__name__)
from .type_description import map_over_collection_type
-class Leaf( object ):
+class Leaf(object):
- def __len__( self ):
+ def __len__(self):
return 1
@property
- def is_leaf( self ):
+ def is_leaf(self):
return True
- def clone( self ):
+ def clone(self):
return self
- def multiply( self, other_structure ):
+ def multiply(self, other_structure):
return other_structure.clone()
leaf = Leaf()
-class Tree( object ):
+class Tree(object):
- def __init__( self, children, collection_type_description ):
+ def __init__(self, children, collection_type_description):
self.children = children
self.collection_type_description = collection_type_description
@staticmethod
- def for_dataset_collection( dataset_collection, collection_type_description ):
+ def for_dataset_collection(dataset_collection, collection_type_description):
children = []
for element in dataset_collection.elements:
if collection_type_description.has_subcollections():
child_collection = element.child_collection
subcollection_type_description = collection_type_description.subcollection_type_description() # Type description of children
- tree = Tree.for_dataset_collection( child_collection, collection_type_description=subcollection_type_description )
- children.append( ( element.element_identifier, tree ) )
+ tree = Tree.for_dataset_collection(child_collection, collection_type_description=subcollection_type_description)
+ children.append((element.element_identifier, tree))
else:
- children.append( ( element.element_identifier, leaf ) )
- return Tree( children, collection_type_description )
+ children.append((element.element_identifier, leaf))
+ return Tree(children, collection_type_description)
- def walk_collections( self, hdca_dict ):
- return self._walk_collections( dict_map( lambda hdca: hdca.collection, hdca_dict ) )
+ def walk_collections(self, hdca_dict):
+ return self._walk_collections(dict_map(lambda hdca: hdca.collection, hdca_dict))
- def _walk_collections( self, collection_dict ):
- for index, ( identifier, substructure ) in enumerate( self.children ):
- def element( collection ):
- return collection[ index ]
+ def _walk_collections(self, collection_dict):
+ for index, (identifier, substructure) in enumerate(self.children):
+ def element(collection):
+ return collection[index]
if substructure.is_leaf:
- yield dict_map( element, collection_dict )
+ yield dict_map(element, collection_dict)
else:
- sub_collections = dict_map( lambda collection: element( collection ).child_collection, collection_dict )
- for element in substructure._walk_collections( sub_collections ):
+ sub_collections = dict_map(lambda collection: element(collection).child_collection, collection_dict)
+ for element in substructure._walk_collections(sub_collections):
yield element
@property
- def is_leaf( self ):
+ def is_leaf(self):
return False
- def can_match( self, other_structure ):
- if not self.collection_type_description.can_match_type( other_structure.collection_type_description ):
+ def can_match(self, other_structure):
+ if not self.collection_type_description.can_match_type(other_structure.collection_type_description):
return False
- if len( self.children ) != len( other_structure.children ):
+ if len(self.children) != len(other_structure.children):
return False
- for my_child, other_child in zip( self.children, other_structure.children ):
+ for my_child, other_child in zip(self.children, other_structure.children):
# At least one is nested collection...
- if my_child[ 1 ].is_leaf != other_child[ 1 ].is_leaf:
+ if my_child[1].is_leaf != other_child[1].is_leaf:
return False
- if not my_child[ 1 ].is_leaf and not my_child[ 1 ].can_match( other_child[ 1 ]):
+ if not my_child[1].is_leaf and not my_child[1].can_match(other_child[1]):
return False
return True
- def __len__( self ):
- return sum( [ len( c[ 1 ] ) for c in self.children ] )
+ def __len__(self):
+ return sum([len(c[1]) for c in self.children])
- def element_identifiers_for_outputs( self, trans, outputs ):
+ def element_identifiers_for_outputs(self, trans, outputs):
element_identifiers = []
elements_collection_type = None
for identifier, child in self.children:
- if isinstance( child, Tree ):
- child_identifiers = child.element_identifiers_for_outputs( trans, outputs[ 0:len( child ) ] )
- child_identifiers[ "name" ] = identifier
- element_identifiers.append( child_identifiers )
- elements_collection_type = child_identifiers[ "collection_type" ]
+ if isinstance(child, Tree):
+ child_identifiers = child.element_identifiers_for_outputs(trans, outputs[0:len(child)])
+ child_identifiers["name"] = identifier
+ element_identifiers.append(child_identifiers)
+ elements_collection_type = child_identifiers["collection_type"]
else:
- output_object = outputs[ 0 ]
- element_identifiers.append( dict( name=identifier, __object__=output_object ) )
- if hasattr( output_object, "collection_type" ):
+ output_object = outputs[0]
+ element_identifiers.append(dict(name=identifier, __object__=output_object))
+ if hasattr(output_object, "collection_type"):
elements_collection_type = output_object.collection_type
- outputs = outputs[ len( child ): ]
+ outputs = outputs[len(child):]
- collection_type = map_over_collection_type( self.collection_type_description.rank_collection_type(), elements_collection_type )
+ collection_type = map_over_collection_type(self.collection_type_description.rank_collection_type(), elements_collection_type)
return dict(
src="new_collection",
collection_type=collection_type,
element_identifiers=element_identifiers,
)
- def multiply( self, other_structure ):
+ def multiply(self, other_structure):
if other_structure.is_leaf:
return self.clone()
- new_collection_type = self.collection_type_description.multiply( other_structure.collection_type_description )
+ new_collection_type = self.collection_type_description.multiply(other_structure.collection_type_description)
new_children = []
for (identifier, structure) in self.children:
- new_children.append( (identifier, structure.multiply( other_structure ) ) )
+ new_children.append((identifier, structure.multiply(other_structure)))
- return Tree( new_children, new_collection_type )
+ return Tree(new_children, new_collection_type)
- def clone( self ):
+ def clone(self):
cloned_children = [(_[0], _[1].clone()) for _ in self.children]
- return Tree( cloned_children, self.collection_type_description )
+ return Tree(cloned_children, self.collection_type_description)
-def dict_map( func, input_dict ):
- return dict( ( k, func(v) ) for k, v in input_dict.items() )
+def dict_map(func, input_dict):
+ return dict((k, func(v)) for k, v in input_dict.items())
-def get_structure( dataset_collection_instance, collection_type_description, leaf_subcollection_type=None ):
+def get_structure(dataset_collection_instance, collection_type_description, leaf_subcollection_type=None):
if leaf_subcollection_type:
- collection_type_description = collection_type_description.effective_collection_type_description( leaf_subcollection_type )
+ collection_type_description = collection_type_description.effective_collection_type_description(leaf_subcollection_type)
- return Tree.for_dataset_collection( dataset_collection_instance.collection, collection_type_description )
+ return Tree.for_dataset_collection(dataset_collection_instance.collection, collection_type_description)
diff --git a/lib/galaxy/dataset_collections/subcollections.py b/lib/galaxy/dataset_collections/subcollections.py
index 6a18d1a7f31f..3a536a34da49 100644
--- a/lib/galaxy/dataset_collections/subcollections.py
+++ b/lib/galaxy/dataset_collections/subcollections.py
@@ -1,25 +1,25 @@
from galaxy import exceptions
-def split_dataset_collection_instance( dataset_collection_instance, collection_type ):
+def split_dataset_collection_instance(dataset_collection_instance, collection_type):
""" Split up collection into collection.
"""
- return _split_dataset_collection( dataset_collection_instance.collection, collection_type )
+ return _split_dataset_collection(dataset_collection_instance.collection, collection_type)
-def _split_dataset_collection( dataset_collection, collection_type ):
+def _split_dataset_collection(dataset_collection, collection_type):
this_collection_type = dataset_collection.collection_type
- if not this_collection_type.endswith( collection_type ) or this_collection_type == collection_type:
- raise exceptions.MessageException( "Cannot split collection in desired fashion." )
+ if not this_collection_type.endswith(collection_type) or this_collection_type == collection_type:
+ raise exceptions.MessageException("Cannot split collection in desired fashion.")
split_elements = []
for element in dataset_collection.elements:
child_collection = element.child_collection
if child_collection is None:
- raise exceptions.MessageException( "Cannot split collection in desired fashion." )
+ raise exceptions.MessageException("Cannot split collection in desired fashion.")
if child_collection.collection_type == collection_type:
- split_elements.append( element )
+ split_elements.append(element)
else:
- split_elements.extend( _split_dataset_collection( element.child_collection, element.child_collection.collection_type ) )
+ split_elements.extend(_split_dataset_collection(element.child_collection, element.child_collection.collection_type))
return split_elements
diff --git a/lib/galaxy/dataset_collections/type_description.py b/lib/galaxy/dataset_collections/type_description.py
index 84d2636a977e..95a97bd14b96 100644
--- a/lib/galaxy/dataset_collections/type_description.py
+++ b/lib/galaxy/dataset_collections/type_description.py
@@ -1,17 +1,17 @@
-class CollectionTypeDescriptionFactory( object ):
+class CollectionTypeDescriptionFactory(object):
- def __init__( self, type_registry ):
+ def __init__(self, type_registry):
# taking in type_registry though not using it, because we will someday
# I think.
self.type_registry = type_registry
- def for_collection_type( self, collection_type ):
- return CollectionTypeDescription( collection_type, self )
+ def for_collection_type(self, collection_type):
+ return CollectionTypeDescription(collection_type, self)
-class CollectionTypeDescription( object ):
+class CollectionTypeDescription(object):
""" Abstraction over dataset collection type that ties together string
reprentation in database/model with type registry.
@@ -40,25 +40,25 @@ class CollectionTypeDescription( object ):
'list'
"""
- def __init__( self, collection_type, collection_type_description_factory ):
+ def __init__(self, collection_type, collection_type_description_factory):
self.collection_type = collection_type
self.collection_type_description_factory = collection_type_description_factory
- self.__has_subcollections = self.collection_type.find( ":" ) > 0
+ self.__has_subcollections = self.collection_type.find(":") > 0
- def effective_collection_type_description( self, subcollection_type ):
- effective_collection_type = self.effective_collection_type( subcollection_type )
- return self.collection_type_description_factory.for_collection_type( effective_collection_type )
+ def effective_collection_type_description(self, subcollection_type):
+ effective_collection_type = self.effective_collection_type(subcollection_type)
+ return self.collection_type_description_factory.for_collection_type(effective_collection_type)
- def effective_collection_type( self, subcollection_type ):
- if hasattr( subcollection_type, 'collection_type' ):
+ def effective_collection_type(self, subcollection_type):
+ if hasattr(subcollection_type, 'collection_type'):
subcollection_type = subcollection_type.collection_type
- if not self.has_subcollections_of_type( subcollection_type ):
- raise ValueError( "Cannot compute effective subcollection type of %s over %s" % ( subcollection_type, self ) )
+ if not self.has_subcollections_of_type(subcollection_type):
+ raise ValueError("Cannot compute effective subcollection type of %s over %s" % (subcollection_type, self))
- return self.collection_type[ :-( len( subcollection_type ) + 1 ) ]
+ return self.collection_type[:-(len(subcollection_type) + 1)]
- def has_subcollections_of_type( self, other_collection_type ):
+ def has_subcollections_of_type(self, other_collection_type):
""" Take in another type (either flat string or another
CollectionTypeDescription) and determine if this collection contains
subcollections matching that type.
@@ -67,61 +67,61 @@ def has_subcollections_of_type( self, other_collection_type ):
for this to return True if these subtypes are proper (i.e. a type
is not considered to have subcollections of its own type).
"""
- if hasattr( other_collection_type, 'collection_type' ):
+ if hasattr(other_collection_type, 'collection_type'):
other_collection_type = other_collection_type.collection_type
collection_type = self.collection_type
- return collection_type.endswith( other_collection_type ) and collection_type != other_collection_type
+ return collection_type.endswith(other_collection_type) and collection_type != other_collection_type
- def is_subcollection_of_type( self, other_collection_type ):
- if not hasattr( other_collection_type, 'collection_type' ):
- other_collection_type = self.collection_type_description_factory.for_collection_type( other_collection_type )
- return other_collection_type.has_subcollections_of_type( self )
+ def is_subcollection_of_type(self, other_collection_type):
+ if not hasattr(other_collection_type, 'collection_type'):
+ other_collection_type = self.collection_type_description_factory.for_collection_type(other_collection_type)
+ return other_collection_type.has_subcollections_of_type(self)
- def can_match_type( self, other_collection_type ):
- if hasattr( other_collection_type, 'collection_type' ):
+ def can_match_type(self, other_collection_type):
+ if hasattr(other_collection_type, 'collection_type'):
other_collection_type = other_collection_type.collection_type
collection_type = self.collection_type
return other_collection_type == collection_type
- def subcollection_type_description( self ):
+ def subcollection_type_description(self):
if not self.__has_subcollections:
- raise ValueError( "Cannot generate subcollection type description for flat type %s" % self.collection_type )
- subcollection_type = self.collection_type.split( ":", 1 )[ 1 ]
- return self.collection_type_description_factory.for_collection_type( subcollection_type )
+ raise ValueError("Cannot generate subcollection type description for flat type %s" % self.collection_type)
+ subcollection_type = self.collection_type.split(":", 1)[1]
+ return self.collection_type_description_factory.for_collection_type(subcollection_type)
- def has_subcollections( self ):
+ def has_subcollections(self):
return self.__has_subcollections
- def rank_collection_type( self ):
+ def rank_collection_type(self):
""" Return the top-level collection type corresponding to this
collection type. For instance the "rank" type of a list of paired
data ("list:paired") is "list".
"""
- return self.collection_type.split( ":" )[ 0 ]
+ return self.collection_type.split(":")[0]
- def rank_type_plugin( self ):
- return self.collection_type_description_factory.type_registry.get( self.rank_collection_type() )
+ def rank_type_plugin(self):
+ return self.collection_type_description_factory.type_registry.get(self.rank_collection_type())
@property
- def dimension( self ):
+ def dimension(self):
return len(self.collection_type.split(":")) + 1
- def multiply( self, other_collection_type ):
- collection_type = map_over_collection_type( self, other_collection_type )
- return self.collection_type_description_factory.for_collection_type( collection_type )
+ def multiply(self, other_collection_type):
+ collection_type = map_over_collection_type(self, other_collection_type)
+ return self.collection_type_description_factory.for_collection_type(collection_type)
- def __str__( self ):
+ def __str__(self):
return "CollectionTypeDescription[%s]" % self.collection_type
-def map_over_collection_type( mapped_over_collection_type, target_collection_type ):
- if hasattr( mapped_over_collection_type, 'collection_type' ):
+def map_over_collection_type(mapped_over_collection_type, target_collection_type):
+ if hasattr(mapped_over_collection_type, 'collection_type'):
mapped_over_collection_type = mapped_over_collection_type.collection_type
if not target_collection_type:
return mapped_over_collection_type
else:
- if hasattr( target_collection_type, 'collection_type' ):
+ if hasattr(target_collection_type, 'collection_type'):
target_collection_type = target_collection_type.collection_type
return "%s:%s" % (mapped_over_collection_type, target_collection_type)
diff --git a/lib/galaxy/dataset_collections/types/__init__.py b/lib/galaxy/dataset_collections/types/__init__.py
index cb0a870b9582..3f89cfa3011b 100644
--- a/lib/galaxy/dataset_collections/types/__init__.py
+++ b/lib/galaxy/dataset_collections/types/__init__.py
@@ -8,21 +8,21 @@
from galaxy import exceptions
-log = logging.getLogger( __name__ )
+log = logging.getLogger(__name__)
@six.add_metaclass(ABCMeta)
class DatasetCollectionType(object):
@abstractmethod
- def generate_elements( self, dataset_instances ):
+ def generate_elements(self, dataset_instances):
""" Generate DatasetCollectionElements with corresponding
to the supplied dataset instances or throw exception if
this is not a valid collection of the specified type.
"""
-class BaseDatasetCollectionType( DatasetCollectionType ):
+class BaseDatasetCollectionType(DatasetCollectionType):
- def _validation_failed( self, message ):
- raise exceptions.ObjectAttributeInvalidException( message )
+ def _validation_failed(self, message):
+ raise exceptions.ObjectAttributeInvalidException(message)
diff --git a/lib/galaxy/dataset_collections/types/list.py b/lib/galaxy/dataset_collections/types/list.py
index 44c0576e29a0..570a48c4084b 100644
--- a/lib/galaxy/dataset_collections/types/list.py
+++ b/lib/galaxy/dataset_collections/types/list.py
@@ -3,15 +3,15 @@
from ..types import BaseDatasetCollectionType
-class ListDatasetCollectionType( BaseDatasetCollectionType ):
+class ListDatasetCollectionType(BaseDatasetCollectionType):
""" A flat list of named elements.
"""
collection_type = "list"
- def __init__( self ):
+ def __init__(self):
pass
- def generate_elements( self, elements ):
+ def generate_elements(self, elements):
for identifier, element in elements.items():
association = DatasetCollectionElement(
element=element,
diff --git a/lib/galaxy/dataset_collections/types/paired.py b/lib/galaxy/dataset_collections/types/paired.py
index 80cdbd5ef0d6..36763d91566a 100644
--- a/lib/galaxy/dataset_collections/types/paired.py
+++ b/lib/galaxy/dataset_collections/types/paired.py
@@ -5,23 +5,23 @@
FORWARD_IDENTIFIER = "forward"
REVERSE_IDENTIFIER = "reverse"
-INVALID_IDENTIFIERS_MESSAGE = "Paired instance must define '%s' and '%s' datasets ." % ( FORWARD_IDENTIFIER, REVERSE_IDENTIFIER )
+INVALID_IDENTIFIERS_MESSAGE = "Paired instance must define '%s' and '%s' datasets ." % (FORWARD_IDENTIFIER, REVERSE_IDENTIFIER)
-class PairedDatasetCollectionType( BaseDatasetCollectionType ):
+class PairedDatasetCollectionType(BaseDatasetCollectionType):
"""
Paired (left/right) datasets.
"""
collection_type = "paired"
- def __init__( self ):
+ def __init__(self):
pass
- def generate_elements( self, elements ):
- forward_dataset = elements.get( FORWARD_IDENTIFIER, None )
- reverse_dataset = elements.get( REVERSE_IDENTIFIER, None )
+ def generate_elements(self, elements):
+ forward_dataset = elements.get(FORWARD_IDENTIFIER, None)
+ reverse_dataset = elements.get(REVERSE_IDENTIFIER, None)
if not forward_dataset or not reverse_dataset:
- self._validation_failed( INVALID_IDENTIFIERS_MESSAGE )
+ self._validation_failed(INVALID_IDENTIFIERS_MESSAGE)
left_association = DatasetCollectionElement(
element=forward_dataset,
element_identifier=FORWARD_IDENTIFIER,
@@ -33,7 +33,7 @@ def generate_elements( self, elements ):
yield left_association
yield right_association
- def prototype_elements( self ):
+ def prototype_elements(self):
left_association = DatasetCollectionElement(
element=HistoryDatasetAssociation(),
element_identifier=FORWARD_IDENTIFIER,
diff --git a/lib/galaxy/datatypes/assembly.py b/lib/galaxy/datatypes/assembly.py
index 015d5f8ba1ce..669de6e35edf 100644
--- a/lib/galaxy/datatypes/assembly.py
+++ b/lib/galaxy/datatypes/assembly.py
@@ -18,13 +18,13 @@
log = logging.getLogger(__name__)
-class Amos( data.Text ):
+class Amos(data.Text):
"""Class describing the AMOS assembly file """
edam_data = "data_0925"
edam_format = "format_3582"
file_ext = 'afg'
- def sniff( self, filename ):
+ def sniff(self, filename):
# FIXME: this method will read the entire file.
# It should call get_headers() like other sniff methods.
"""
@@ -52,14 +52,14 @@ def sniff( self, filename ):
"""
isAmos = False
try:
- fh = open( filename )
+ fh = open(filename)
while not isAmos:
line = fh.readline()
if not line:
break # EOF
line = line.strip()
if line: # first non-empty line
- if line.startswith( '{' ):
+ if line.startswith('{'):
if re.match(r'{(RED|CTG|TLE)$', line):
isAmos = True
fh.close()
@@ -68,11 +68,11 @@ def sniff( self, filename ):
return isAmos
-class Sequences( sequence.Fasta ):
+class Sequences(sequence.Fasta):
"""Class describing the Sequences file generated by velveth """
edam_data = "data_0925"
- def sniff( self, filename ):
+ def sniff(self, filename):
"""
Determines whether the file is a velveth produced fasta format
The id line has 3 fields separated by tabs: sequence_name sequence_index category::
@@ -84,19 +84,19 @@ def sniff( self, filename ):
"""
try:
- fh = open( filename )
+ fh = open(filename)
while True:
line = fh.readline()
if not line:
break # EOF
line = line.strip()
if line: # first non-empty line
- if line.startswith( '>' ):
+ if line.startswith('>'):
if not re.match(r'>[^\t]+\t\d+\t\d+$', line):
break
# The next line.strip() must not be '', nor startwith '>'
line = fh.readline().strip()
- if line == '' or line.startswith( '>' ):
+ if line == '' or line.startswith('>'):
break
return True
else:
@@ -107,11 +107,11 @@ def sniff( self, filename ):
return False
-class Roadmaps( data.Text ):
+class Roadmaps(data.Text):
"""Class describing the Sequences file generated by velveth """
edam_format = "format_2561"
- def sniff( self, filename ):
+ def sniff(self, filename):
"""
Determines whether the file is a velveth produced RoadMap::
142858 21 1
@@ -121,7 +121,7 @@ def sniff( self, filename ):
"""
try:
- fh = open( filename )
+ fh = open(filename)
while True:
line = fh.readline()
if not line:
@@ -143,43 +143,43 @@ def sniff( self, filename ):
return False
-class Velvet( Html ):
- MetadataElement( name="base_name", desc="base name for velveth dataset", default="velvet", readonly=True, set_in_upload=True)
- MetadataElement( name="paired_end_reads", desc="has paired-end reads", default="False", readonly=False, set_in_upload=True)
- MetadataElement( name="long_reads", desc="has long reads", default="False", readonly=False, set_in_upload=True)
- MetadataElement( name="short2_reads", desc="has 2nd short reads", default="False", readonly=False, set_in_upload=True)
+class Velvet(Html):
+ MetadataElement(name="base_name", desc="base name for velveth dataset", default="velvet", readonly=True, set_in_upload=True)
+ MetadataElement(name="paired_end_reads", desc="has paired-end reads", default="False", readonly=False, set_in_upload=True)
+ MetadataElement(name="long_reads", desc="has long reads", default="False", readonly=False, set_in_upload=True)
+ MetadataElement(name="short2_reads", desc="has 2nd short reads", default="False", readonly=False, set_in_upload=True)
composite_type = 'auto_primary_file'
allow_datatype_change = False
file_ext = 'velvet'
- def __init__( self, **kwd ):
- Html.__init__( self, **kwd )
- self.add_composite_file( 'Sequences', mimetype='text/html', description='Sequences', substitute_name_with_metadata=None, is_binary=False )
- self.add_composite_file( 'Roadmaps', mimetype='text/html', description='Roadmaps', substitute_name_with_metadata=None, is_binary=False )
- self.add_composite_file( 'Log', mimetype='text/html', description='Log', optional='True', substitute_name_with_metadata=None, is_binary=False )
+ def __init__(self, **kwd):
+ Html.__init__(self, **kwd)
+ self.add_composite_file('Sequences', mimetype='text/html', description='Sequences', substitute_name_with_metadata=None, is_binary=False)
+ self.add_composite_file('Roadmaps', mimetype='text/html', description='Roadmaps', substitute_name_with_metadata=None, is_binary=False)
+ self.add_composite_file('Log', mimetype='text/html', description='Log', optional='True', substitute_name_with_metadata=None, is_binary=False)
- def generate_primary_file( self, dataset=None ):
- log.debug( "Velvet log info %s %s" % ('JJ generate_primary_file', dataset))
+ def generate_primary_file(self, dataset=None):
+ log.debug("Velvet log info %s %s" % ('JJ generate_primary_file', dataset))
rval = ['Velvet Galaxy Composite Dataset
']
rval.append('This composite dataset is composed of the following files:
')
- for composite_name, composite_file in self.get_composite_files( dataset=dataset ).items():
+ for composite_name, composite_file in self.get_composite_files(dataset=dataset).items():
fn = composite_name
- log.debug( "Velvet log info %s %s %s" % ('JJ generate_primary_file', fn, composite_file))
+ log.debug("Velvet log info %s %s %s" % ('JJ generate_primary_file', fn, composite_file))
opt_text = ''
if composite_file.optional:
opt_text = ' (optional)'
if composite_file.get('description'):
- rval.append( '%s (%s) %s ' % ( fn, fn, composite_file.get('description'), opt_text ) )
+ rval.append('%s (%s) %s ' % (fn, fn, composite_file.get('description'), opt_text))
else:
- rval.append( '%s %s ' % ( fn, fn, opt_text ) )
- rval.append( ' ' )
- return "\n".join( rval )
+ rval.append('%s %s ' % (fn, fn, opt_text))
+ rval.append('